mirror of
https://github.com/astral-sh/ruff.git
synced 2025-07-07 21:25:08 +00:00
Add support for configuring knot in pyproject.toml
files (#15493)
## Summary This PR adds support for configuring Red Knot in the `tool.knot` section of the project's `pyproject.toml` section. Options specified on the CLI precede the options in the configuration file. This PR only supports the `environment` and the `src.root` options for now. Other options will be added as separate PRs. There are also a few concerns that I intentionally ignored as part of this PR: * Handling of relative paths: We need to anchor paths relative to the current working directory (CLI), or the project (`pyproject.toml` or `knot.toml`) * Tracking the source of a value. Diagnostics would benefit from knowing from which configuration a value comes so that we can point the user to the right configuration file (or CLI) if the configuration is invalid. * Schema generation and there's a lot more; see https://github.com/astral-sh/ruff/issues/15491 This PR changes the default for first party codes: Our existing default was to only add the project root. Now, Red Knot adds the project root and `src` (if such a directory exists). Theoretically, we'd have to add a file watcher event that changes the first-party search paths if a user later creates a `src` directory. I think this is pretty uncommon, which is why I ignored the complexity for now but I can be persuaded to handle it if it's considered important. Part of https://github.com/astral-sh/ruff/issues/15491 ## Test Plan Existing tests, new file watching test demonstrating that changing the python version and platform is correctly reflected.
This commit is contained in:
parent
9ed67ba33e
commit
eb47a6634d
36 changed files with 820 additions and 413 deletions
5
Cargo.lock
generated
5
Cargo.lock
generated
|
@ -2336,13 +2336,17 @@ dependencies = [
|
|||
"crossbeam",
|
||||
"ctrlc",
|
||||
"filetime",
|
||||
"insta",
|
||||
"insta-cmd",
|
||||
"rayon",
|
||||
"red_knot_python_semantic",
|
||||
"red_knot_server",
|
||||
"red_knot_workspace",
|
||||
"regex",
|
||||
"ruff_db",
|
||||
"salsa",
|
||||
"tempfile",
|
||||
"toml",
|
||||
"tracing",
|
||||
"tracing-flame",
|
||||
"tracing-subscriber",
|
||||
|
@ -2479,6 +2483,7 @@ dependencies = [
|
|||
"red_knot_vendored",
|
||||
"ruff_cache",
|
||||
"ruff_db",
|
||||
"ruff_macros",
|
||||
"ruff_python_ast",
|
||||
"ruff_text_size",
|
||||
"rustc-hash 2.1.0",
|
||||
|
|
|
@ -32,9 +32,14 @@ tracing-flame = { workspace = true }
|
|||
tracing-tree = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
filetime = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["testing"] }
|
||||
|
||||
insta = { workspace = true, features = ["filters"] }
|
||||
insta-cmd = { workspace = true }
|
||||
filetime = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
|
|
@ -9,7 +9,7 @@ use python_version::PythonVersion;
|
|||
use red_knot_python_semantic::SitePackages;
|
||||
use red_knot_server::run_server;
|
||||
use red_knot_workspace::db::ProjectDatabase;
|
||||
use red_knot_workspace::project::settings::Configuration;
|
||||
use red_knot_workspace::project::options::{EnvironmentOptions, Options};
|
||||
use red_knot_workspace::project::ProjectMetadata;
|
||||
use red_knot_workspace::watch;
|
||||
use red_knot_workspace::watch::ProjectWatcher;
|
||||
|
@ -71,31 +71,30 @@ struct Args {
|
|||
}
|
||||
|
||||
impl Args {
|
||||
fn to_configuration(&self, cli_cwd: &SystemPath) -> Configuration {
|
||||
let mut configuration = Configuration::default();
|
||||
|
||||
if let Some(python_version) = self.python_version {
|
||||
configuration.python_version = Some(python_version.into());
|
||||
fn to_options(&self, cli_cwd: &SystemPath) -> Options {
|
||||
Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
python_version: self.python_version.map(Into::into),
|
||||
venv_path: self
|
||||
.venv_path
|
||||
.as_ref()
|
||||
.map(|venv_path| SitePackages::Derived {
|
||||
venv_path: SystemPath::absolute(venv_path, cli_cwd),
|
||||
}),
|
||||
typeshed: self
|
||||
.typeshed
|
||||
.as_ref()
|
||||
.map(|typeshed| SystemPath::absolute(typeshed, cli_cwd)),
|
||||
extra_paths: self.extra_search_path.as_ref().map(|extra_search_paths| {
|
||||
extra_search_paths
|
||||
.iter()
|
||||
.map(|path| SystemPath::absolute(path, cli_cwd))
|
||||
.collect()
|
||||
}),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Default::default()
|
||||
}
|
||||
|
||||
if let Some(venv_path) = &self.venv_path {
|
||||
configuration.search_paths.site_packages = Some(SitePackages::Derived {
|
||||
venv_path: SystemPath::absolute(venv_path, cli_cwd),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(typeshed) = &self.typeshed {
|
||||
configuration.search_paths.typeshed = Some(SystemPath::absolute(typeshed, cli_cwd));
|
||||
}
|
||||
|
||||
if let Some(extra_search_paths) = &self.extra_search_path {
|
||||
configuration.search_paths.extra_paths = extra_search_paths
|
||||
.iter()
|
||||
.map(|path| Some(SystemPath::absolute(path, cli_cwd)))
|
||||
.collect();
|
||||
}
|
||||
|
||||
configuration
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -164,18 +163,13 @@ fn run() -> anyhow::Result<ExitStatus> {
|
|||
.unwrap_or_else(|| cli_base_path.clone());
|
||||
|
||||
let system = OsSystem::new(cwd.clone());
|
||||
let cli_configuration = args.to_configuration(&cwd);
|
||||
let workspace_metadata = ProjectMetadata::discover(
|
||||
system.current_directory(),
|
||||
&system,
|
||||
Some(&cli_configuration),
|
||||
)?;
|
||||
let cli_options = args.to_options(&cwd);
|
||||
let mut workspace_metadata = ProjectMetadata::discover(system.current_directory(), &system)?;
|
||||
workspace_metadata.apply_cli_options(cli_options.clone());
|
||||
|
||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
||||
// cache and load the cache if it exists.
|
||||
let mut db = ProjectDatabase::new(workspace_metadata, system)?;
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new(cli_configuration);
|
||||
let (main_loop, main_loop_cancellation_token) = MainLoop::new(cli_options);
|
||||
|
||||
// Listen to Ctrl+C and abort the watch mode.
|
||||
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
|
||||
|
@ -228,11 +222,11 @@ struct MainLoop {
|
|||
/// The file system watcher, if running in watch mode.
|
||||
watcher: Option<ProjectWatcher>,
|
||||
|
||||
cli_configuration: Configuration,
|
||||
cli_options: Options,
|
||||
}
|
||||
|
||||
impl MainLoop {
|
||||
fn new(cli_configuration: Configuration) -> (Self, MainLoopCancellationToken) {
|
||||
fn new(cli_options: Options) -> (Self, MainLoopCancellationToken) {
|
||||
let (sender, receiver) = crossbeam_channel::bounded(10);
|
||||
|
||||
(
|
||||
|
@ -240,7 +234,7 @@ impl MainLoop {
|
|||
sender: sender.clone(),
|
||||
receiver,
|
||||
watcher: None,
|
||||
cli_configuration,
|
||||
cli_options,
|
||||
},
|
||||
MainLoopCancellationToken { sender },
|
||||
)
|
||||
|
@ -324,7 +318,7 @@ impl MainLoop {
|
|||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
revision += 1;
|
||||
// Automatically cancels any pending queries and waits for them to complete.
|
||||
db.apply_changes(changes, Some(&self.cli_configuration));
|
||||
db.apply_changes(changes, Some(&self.cli_options));
|
||||
if let Some(watcher) = self.watcher.as_mut() {
|
||||
watcher.update(db);
|
||||
}
|
||||
|
|
60
crates/red_knot/tests/cli.rs
Normal file
60
crates/red_knot/tests/cli.rs
Normal file
|
@ -0,0 +1,60 @@
|
|||
use anyhow::Context;
|
||||
use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
|
||||
use std::process::Command;
|
||||
use tempfile::TempDir;
|
||||
|
||||
/// Specifying an option on the CLI should take precedence over the same setting in the
|
||||
/// project's configuration.
|
||||
#[test]
|
||||
fn test_config_override() -> anyhow::Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
|
||||
std::fs::write(
|
||||
tempdir.path().join("pyproject.toml"),
|
||||
r#"
|
||||
[tool.knot.environment]
|
||||
python-version = "3.11"
|
||||
"#,
|
||||
)
|
||||
.context("Failed to write settings")?;
|
||||
|
||||
std::fs::write(
|
||||
tempdir.path().join("test.py"),
|
||||
r#"
|
||||
import sys
|
||||
|
||||
# Access `sys.last_exc` that was only added in Python 3.12
|
||||
print(sys.last_exc)
|
||||
"#,
|
||||
)
|
||||
.context("Failed to write test.py")?;
|
||||
|
||||
insta::with_settings!({filters => vec![(&*tempdir_filter(&tempdir), "<temp_dir>/")]}, {
|
||||
assert_cmd_snapshot!(knot().arg("--project").arg(tempdir.path()), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
error[lint:unresolved-attribute] <temp_dir>/test.py:5:7 Type `<module 'sys'>` has no attribute `last_exc`
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
});
|
||||
|
||||
assert_cmd_snapshot!(knot().arg("--project").arg(tempdir.path()).arg("--python-version").arg("3.12"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn knot() -> Command {
|
||||
Command::new(get_cargo_bin("red_knot"))
|
||||
}
|
||||
|
||||
fn tempdir_filter(tempdir: &TempDir) -> String {
|
||||
format!(r"{}\\?/?", regex::escape(tempdir.path().to_str().unwrap()))
|
||||
}
|
|
@ -4,9 +4,12 @@ use std::io::Write;
|
|||
use std::time::{Duration, Instant};
|
||||
|
||||
use anyhow::{anyhow, Context};
|
||||
use red_knot_python_semantic::{resolve_module, ModuleName, Program, PythonVersion, SitePackages};
|
||||
use red_knot_python_semantic::{
|
||||
resolve_module, ModuleName, PythonPlatform, PythonVersion, SitePackages,
|
||||
};
|
||||
use red_knot_workspace::db::{Db, ProjectDatabase};
|
||||
use red_knot_workspace::project::settings::{Configuration, SearchPathConfiguration};
|
||||
use red_knot_workspace::project::options::{EnvironmentOptions, Options};
|
||||
use red_knot_workspace::project::pyproject::{PyProject, Tool};
|
||||
use red_knot_workspace::project::ProjectMetadata;
|
||||
use red_knot_workspace::watch::{directory_watcher, ChangeEvent, ProjectWatcher};
|
||||
use ruff_db::files::{system_path_to_file, File, FileError};
|
||||
|
@ -22,7 +25,6 @@ struct TestCase {
|
|||
/// We need to hold on to it in the test case or the temp files get deleted.
|
||||
_temp_dir: tempfile::TempDir,
|
||||
root_dir: SystemPathBuf,
|
||||
configuration: Configuration,
|
||||
}
|
||||
|
||||
impl TestCase {
|
||||
|
@ -112,19 +114,44 @@ impl TestCase {
|
|||
Ok(all_events)
|
||||
}
|
||||
|
||||
fn take_watch_changes(&self) -> Vec<ChangeEvent> {
|
||||
self.try_take_watch_changes(Duration::from_secs(10))
|
||||
fn take_watch_changes<M: MatchEvent>(&self, matcher: M) -> Vec<ChangeEvent> {
|
||||
self.try_take_watch_changes(matcher, Duration::from_secs(10))
|
||||
.expect("Expected watch changes but observed none")
|
||||
}
|
||||
|
||||
fn try_take_watch_changes(&self, timeout: Duration) -> Option<Vec<ChangeEvent>> {
|
||||
let watcher = self.watcher.as_ref()?;
|
||||
fn try_take_watch_changes<M: MatchEvent>(
|
||||
&self,
|
||||
mut matcher: M,
|
||||
timeout: Duration,
|
||||
) -> Result<Vec<ChangeEvent>, Vec<ChangeEvent>> {
|
||||
let watcher = self
|
||||
.watcher
|
||||
.as_ref()
|
||||
.expect("Cannot call `try_take_watch_changes` after `stop_watch`");
|
||||
|
||||
let mut all_events = self
|
||||
.changes_receiver
|
||||
.recv_timeout(timeout)
|
||||
.unwrap_or_default();
|
||||
watcher.flush();
|
||||
let start = Instant::now();
|
||||
let mut all_events = Vec::new();
|
||||
|
||||
loop {
|
||||
let events = self
|
||||
.changes_receiver
|
||||
.recv_timeout(Duration::from_millis(100))
|
||||
.unwrap_or_default();
|
||||
|
||||
if events
|
||||
.iter()
|
||||
.any(|event| matcher.match_event(event) || event.is_rescan())
|
||||
{
|
||||
all_events.extend(events);
|
||||
break;
|
||||
}
|
||||
|
||||
all_events.extend(events);
|
||||
|
||||
if start.elapsed() > timeout {
|
||||
return Err(all_events);
|
||||
}
|
||||
}
|
||||
|
||||
while let Ok(event) = self
|
||||
.changes_receiver
|
||||
|
@ -134,26 +161,28 @@ impl TestCase {
|
|||
watcher.flush();
|
||||
}
|
||||
|
||||
if all_events.is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some(all_events)
|
||||
Ok(all_events)
|
||||
}
|
||||
|
||||
fn apply_changes(&mut self, changes: Vec<ChangeEvent>) {
|
||||
self.db.apply_changes(changes, Some(&self.configuration));
|
||||
self.db.apply_changes(changes, None);
|
||||
}
|
||||
|
||||
fn update_search_path_settings(
|
||||
&mut self,
|
||||
configuration: SearchPathConfiguration,
|
||||
) -> anyhow::Result<()> {
|
||||
let program = Program::get(self.db());
|
||||
fn update_options(&mut self, options: Options) -> anyhow::Result<()> {
|
||||
std::fs::write(
|
||||
self.project_path("pyproject.toml").as_std_path(),
|
||||
toml::to_string(&PyProject {
|
||||
project: None,
|
||||
tool: Some(Tool {
|
||||
knot: Some(options),
|
||||
}),
|
||||
})
|
||||
.context("Failed to serialize options")?,
|
||||
)
|
||||
.context("Failed to write configuration")?;
|
||||
|
||||
let new_settings = configuration.to_settings(self.db.project().root(&self.db));
|
||||
self.configuration.search_paths = configuration;
|
||||
|
||||
program.update_search_paths(&mut self.db, &new_settings)?;
|
||||
let changes = self.take_watch_changes(event_for_file("pyproject.toml"));
|
||||
self.apply_changes(changes);
|
||||
|
||||
if let Some(watcher) = &mut self.watcher {
|
||||
watcher.update(&self.db);
|
||||
|
@ -234,14 +263,13 @@ fn setup<F>(setup_files: F) -> anyhow::Result<TestCase>
|
|||
where
|
||||
F: SetupFiles,
|
||||
{
|
||||
setup_with_search_paths(setup_files, |_root, _project_path| {
|
||||
SearchPathConfiguration::default()
|
||||
})
|
||||
setup_with_options(setup_files, |_root, _project_path| None)
|
||||
}
|
||||
|
||||
fn setup_with_search_paths<F>(
|
||||
// TODO: Replace with configuration?
|
||||
fn setup_with_options<F>(
|
||||
setup_files: F,
|
||||
create_search_paths: impl FnOnce(&SystemPath, &SystemPath) -> SearchPathConfiguration,
|
||||
create_options: impl FnOnce(&SystemPath, &SystemPath) -> Option<Options>,
|
||||
) -> anyhow::Result<TestCase>
|
||||
where
|
||||
F: SetupFiles,
|
||||
|
@ -275,32 +303,34 @@ where
|
|||
|
||||
let system = OsSystem::new(&project_path);
|
||||
|
||||
let search_paths = create_search_paths(&root_path, &project_path);
|
||||
if let Some(options) = create_options(&root_path, &project_path) {
|
||||
std::fs::write(
|
||||
project_path.join("pyproject.toml").as_std_path(),
|
||||
toml::to_string(&PyProject {
|
||||
project: None,
|
||||
tool: Some(Tool {
|
||||
knot: Some(options),
|
||||
}),
|
||||
})
|
||||
.context("Failed to serialize options")?,
|
||||
)
|
||||
.context("Failed to write configuration")?;
|
||||
}
|
||||
|
||||
for path in search_paths
|
||||
let project = ProjectMetadata::discover(&project_path, &system)?;
|
||||
let program_settings = project.to_program_settings(&system);
|
||||
|
||||
for path in program_settings
|
||||
.search_paths
|
||||
.extra_paths
|
||||
.iter()
|
||||
.flatten()
|
||||
.chain(search_paths.typeshed.iter())
|
||||
.chain(search_paths.site_packages.iter().flat_map(|site_packages| {
|
||||
if let SitePackages::Known(path) = site_packages {
|
||||
path.as_slice()
|
||||
} else {
|
||||
&[]
|
||||
}
|
||||
}))
|
||||
.chain(program_settings.search_paths.typeshed.as_ref())
|
||||
.chain(program_settings.search_paths.site_packages.paths())
|
||||
{
|
||||
std::fs::create_dir_all(path.as_std_path())
|
||||
.with_context(|| format!("Failed to create search path `{path}`"))?;
|
||||
}
|
||||
|
||||
let configuration = Configuration {
|
||||
python_version: Some(PythonVersion::PY312),
|
||||
search_paths,
|
||||
};
|
||||
|
||||
let project = ProjectMetadata::discover(&project_path, &system, Some(&configuration))?;
|
||||
|
||||
let db = ProjectDatabase::new(project, system)?;
|
||||
|
||||
let (sender, receiver) = crossbeam::channel::unbounded();
|
||||
|
@ -316,12 +346,12 @@ where
|
|||
watcher: Some(watcher),
|
||||
_temp_dir: temp_dir,
|
||||
root_dir: root_path,
|
||||
configuration,
|
||||
};
|
||||
|
||||
// Sometimes the file watcher reports changes for events that happened before the watcher was started.
|
||||
// Do a best effort at dropping these events.
|
||||
test_case.try_take_watch_changes(Duration::from_millis(100));
|
||||
let _ =
|
||||
test_case.try_take_watch_changes(|_event: &ChangeEvent| true, Duration::from_millis(100));
|
||||
|
||||
Ok(test_case)
|
||||
}
|
||||
|
@ -762,13 +792,15 @@ fn directory_deleted() -> anyhow::Result<()> {
|
|||
|
||||
#[test]
|
||||
fn search_path() -> anyhow::Result<()> {
|
||||
let mut case =
|
||||
setup_with_search_paths([("bar.py", "import sub.a")], |root_path, _project_path| {
|
||||
SearchPathConfiguration {
|
||||
site_packages: Some(SitePackages::Known(vec![root_path.join("site_packages")])),
|
||||
..SearchPathConfiguration::default()
|
||||
}
|
||||
})?;
|
||||
let mut case = setup_with_options([("bar.py", "import sub.a")], |root_path, _project_path| {
|
||||
Some(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
venv_path: Some(SitePackages::Known(vec![root_path.join("site_packages")])),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
})
|
||||
})?;
|
||||
|
||||
let site_packages = case.root_path().join("site_packages");
|
||||
|
||||
|
@ -802,9 +834,12 @@ fn add_search_path() -> anyhow::Result<()> {
|
|||
assert!(resolve_module(case.db().upcast(), &ModuleName::new_static("a").unwrap()).is_none());
|
||||
|
||||
// Register site-packages as a search path.
|
||||
case.update_search_path_settings(SearchPathConfiguration {
|
||||
site_packages: Some(SitePackages::Known(vec![site_packages.clone()])),
|
||||
..SearchPathConfiguration::default()
|
||||
case.update_options(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
venv_path: Some(SitePackages::Known(vec![site_packages.clone()])),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
})
|
||||
.expect("Search path settings to be valid");
|
||||
|
||||
|
@ -821,19 +856,22 @@ fn add_search_path() -> anyhow::Result<()> {
|
|||
|
||||
#[test]
|
||||
fn remove_search_path() -> anyhow::Result<()> {
|
||||
let mut case =
|
||||
setup_with_search_paths([("bar.py", "import sub.a")], |root_path, _project_path| {
|
||||
SearchPathConfiguration {
|
||||
site_packages: Some(SitePackages::Known(vec![root_path.join("site_packages")])),
|
||||
..SearchPathConfiguration::default()
|
||||
}
|
||||
})?;
|
||||
let mut case = setup_with_options([("bar.py", "import sub.a")], |root_path, _project_path| {
|
||||
Some(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
venv_path: Some(SitePackages::Known(vec![root_path.join("site_packages")])),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
})
|
||||
})?;
|
||||
|
||||
// Remove site packages from the search path settings.
|
||||
let site_packages = case.root_path().join("site_packages");
|
||||
case.update_search_path_settings(SearchPathConfiguration {
|
||||
site_packages: None,
|
||||
..SearchPathConfiguration::default()
|
||||
|
||||
case.update_options(Options {
|
||||
environment: None,
|
||||
..Options::default()
|
||||
})
|
||||
.expect("Search path settings to be valid");
|
||||
|
||||
|
@ -846,9 +884,63 @@ fn remove_search_path() -> anyhow::Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn change_python_version_and_platform() -> anyhow::Result<()> {
|
||||
let mut case = setup_with_options(
|
||||
// `sys.last_exc` is a Python 3.12 only feature
|
||||
// `os.getegid()` is Unix only
|
||||
[(
|
||||
"bar.py",
|
||||
r#"
|
||||
import sys
|
||||
import os
|
||||
print(sys.last_exc, os.getegid())
|
||||
"#,
|
||||
)],
|
||||
|_root_path, _project_path| {
|
||||
Some(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
python_version: Some(PythonVersion::PY311),
|
||||
python_platform: Some(PythonPlatform::Identifier("win32".to_string())),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
})
|
||||
},
|
||||
)?;
|
||||
|
||||
let diagnostics = case.db.check().context("Failed to check project.")?;
|
||||
|
||||
assert_eq!(diagnostics.len(), 2);
|
||||
assert_eq!(
|
||||
diagnostics[0].message(),
|
||||
"Type `<module 'sys'>` has no attribute `last_exc`"
|
||||
);
|
||||
assert_eq!(
|
||||
diagnostics[1].message(),
|
||||
"Type `<module 'os'>` has no attribute `getegid`"
|
||||
);
|
||||
|
||||
// Change the python version
|
||||
case.update_options(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
python_version: Some(PythonVersion::PY312),
|
||||
python_platform: Some(PythonPlatform::Identifier("linux".to_string())),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
})
|
||||
.expect("Search path settings to be valid");
|
||||
|
||||
let diagnostics = case.db.check().context("Failed to check project.")?;
|
||||
assert!(diagnostics.is_empty());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn changed_versions_file() -> anyhow::Result<()> {
|
||||
let mut case = setup_with_search_paths(
|
||||
let mut case = setup_with_options(
|
||||
|root_path: &SystemPath, project_path: &SystemPath| {
|
||||
std::fs::write(project_path.join("bar.py").as_std_path(), "import sub.a")?;
|
||||
std::fs::create_dir_all(root_path.join("typeshed/stdlib").as_std_path())?;
|
||||
|
@ -860,9 +952,14 @@ fn changed_versions_file() -> anyhow::Result<()> {
|
|||
|
||||
Ok(())
|
||||
},
|
||||
|root_path, _project_path| SearchPathConfiguration {
|
||||
typeshed: Some(root_path.join("typeshed")),
|
||||
..SearchPathConfiguration::default()
|
||||
|root_path, _project_path| {
|
||||
Some(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
typeshed: Some(root_path.join("typeshed")),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
})
|
||||
},
|
||||
)?;
|
||||
|
||||
|
@ -1127,7 +1224,7 @@ mod unix {
|
|||
update_file(baz_original, "def baz(): print('Version 2')")
|
||||
.context("Failed to update bar/baz.py")?;
|
||||
|
||||
let changes = case.take_watch_changes();
|
||||
let changes = case.take_watch_changes(event_for_file("baz.py"));
|
||||
|
||||
case.apply_changes(changes);
|
||||
|
||||
|
@ -1259,7 +1356,7 @@ mod unix {
|
|||
/// ```
|
||||
#[test]
|
||||
fn symlinked_module_search_path() -> anyhow::Result<()> {
|
||||
let mut case = setup_with_search_paths(
|
||||
let mut case = setup_with_options(
|
||||
|root: &SystemPath, project: &SystemPath| {
|
||||
// Set up the symlink target.
|
||||
let site_packages = root.join("site-packages");
|
||||
|
@ -1282,11 +1379,16 @@ mod unix {
|
|||
|
||||
Ok(())
|
||||
},
|
||||
|_root, project| SearchPathConfiguration {
|
||||
site_packages: Some(SitePackages::Known(vec![
|
||||
project.join(".venv/lib/python3.12/site-packages")
|
||||
])),
|
||||
..SearchPathConfiguration::default()
|
||||
|_root, project| {
|
||||
Some(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
venv_path: Some(SitePackages::Known(vec![
|
||||
project.join(".venv/lib/python3.12/site-packages")
|
||||
])),
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
..Options::default()
|
||||
})
|
||||
},
|
||||
)?;
|
||||
|
||||
|
|
|
@ -175,7 +175,7 @@ pub(crate) mod tests {
|
|||
db.write_files(self.files)
|
||||
.context("Failed to write test files")?;
|
||||
|
||||
let mut search_paths = SearchPathSettings::new(src_root);
|
||||
let mut search_paths = SearchPathSettings::new(vec![src_root]);
|
||||
search_paths.typeshed = self.custom_typeshed;
|
||||
|
||||
Program::from_settings(
|
||||
|
|
|
@ -168,7 +168,7 @@ impl SearchPaths {
|
|||
|
||||
let SearchPathSettings {
|
||||
extra_paths,
|
||||
src_root,
|
||||
src_roots,
|
||||
typeshed,
|
||||
site_packages: site_packages_paths,
|
||||
} = settings;
|
||||
|
@ -186,8 +186,10 @@ impl SearchPaths {
|
|||
static_paths.push(SearchPath::extra(system, path)?);
|
||||
}
|
||||
|
||||
tracing::debug!("Adding first-party search path '{src_root}'");
|
||||
static_paths.push(SearchPath::first_party(system, src_root.to_path_buf())?);
|
||||
for src_root in src_roots {
|
||||
tracing::debug!("Adding first-party search path '{src_root}'");
|
||||
static_paths.push(SearchPath::first_party(system, src_root.to_path_buf())?);
|
||||
}
|
||||
|
||||
let (typeshed_versions, stdlib_path) = if let Some(typeshed) = typeshed {
|
||||
let typeshed = canonicalize(typeshed, system);
|
||||
|
@ -1299,7 +1301,7 @@ mod tests {
|
|||
python_platform: PythonPlatform::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
src_roots: vec![src.clone()],
|
||||
typeshed: Some(custom_typeshed),
|
||||
site_packages: SitePackages::Known(vec![site_packages]),
|
||||
},
|
||||
|
@ -1805,7 +1807,7 @@ not_a_directory
|
|||
python_platform: PythonPlatform::default(),
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: SystemPathBuf::from("/src"),
|
||||
src_roots: vec![SystemPathBuf::from("/src")],
|
||||
typeshed: None,
|
||||
site_packages: SitePackages::Known(vec![
|
||||
venv_site_packages,
|
||||
|
|
|
@ -237,7 +237,7 @@ impl TestCaseBuilder<MockedTypeshed> {
|
|||
python_platform,
|
||||
search_paths: SearchPathSettings {
|
||||
extra_paths: vec![],
|
||||
src_root: src.clone(),
|
||||
src_roots: vec![src.clone()],
|
||||
typeshed: Some(typeshed.clone()),
|
||||
site_packages: SitePackages::Known(vec![site_packages.clone()]),
|
||||
},
|
||||
|
@ -295,7 +295,7 @@ impl TestCaseBuilder<VendoredTypeshed> {
|
|||
python_platform,
|
||||
search_paths: SearchPathSettings {
|
||||
site_packages: SitePackages::Known(vec![site_packages.clone()]),
|
||||
..SearchPathSettings::new(src.clone())
|
||||
..SearchPathSettings::new(vec![src.clone()])
|
||||
},
|
||||
},
|
||||
)
|
||||
|
|
|
@ -103,7 +103,7 @@ pub struct SearchPathSettings {
|
|||
pub extra_paths: Vec<SystemPathBuf>,
|
||||
|
||||
/// The root of the project, used for finding first-party modules.
|
||||
pub src_root: SystemPathBuf,
|
||||
pub src_roots: Vec<SystemPathBuf>,
|
||||
|
||||
/// Optional path to a "custom typeshed" directory on disk for us to use for standard-library types.
|
||||
/// If this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,
|
||||
|
@ -115,9 +115,9 @@ pub struct SearchPathSettings {
|
|||
}
|
||||
|
||||
impl SearchPathSettings {
|
||||
pub fn new(src_root: SystemPathBuf) -> Self {
|
||||
pub fn new(src_roots: Vec<SystemPathBuf>) -> Self {
|
||||
Self {
|
||||
src_root,
|
||||
src_roots,
|
||||
extra_paths: vec![],
|
||||
typeshed: None,
|
||||
site_packages: SitePackages::Known(vec![]),
|
||||
|
@ -126,7 +126,7 @@ impl SearchPathSettings {
|
|||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
|
||||
pub enum SitePackages {
|
||||
Derived {
|
||||
venv_path: SystemPathBuf,
|
||||
|
@ -134,3 +134,12 @@ pub enum SitePackages {
|
|||
/// Resolved site packages paths
|
||||
Known(Vec<SystemPathBuf>),
|
||||
}
|
||||
|
||||
impl SitePackages {
|
||||
pub fn paths(&self) -> &[SystemPathBuf] {
|
||||
match self {
|
||||
SitePackages::Derived { venv_path } => std::slice::from_ref(venv_path),
|
||||
SitePackages::Known(paths) => paths,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -69,7 +69,7 @@ impl Session {
|
|||
let system = LSPSystem::new(index.clone());
|
||||
|
||||
// TODO(dhruvmanila): Get the values from the client settings
|
||||
let metadata = ProjectMetadata::discover(system_path, &system, None)?;
|
||||
let metadata = ProjectMetadata::discover(system_path, &system)?;
|
||||
// TODO(micha): Handle the case where the program settings are incorrect more gracefully.
|
||||
workspaces.insert(path, ProjectDatabase::new(metadata, system)?);
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ use ruff_db::{Db as SourceDb, Upcast};
|
|||
#[salsa::db]
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct Db {
|
||||
workspace_root: SystemPathBuf,
|
||||
project_root: SystemPathBuf,
|
||||
storage: salsa::Storage<Self>,
|
||||
files: Files,
|
||||
system: TestSystem,
|
||||
|
@ -20,11 +20,11 @@ pub(crate) struct Db {
|
|||
}
|
||||
|
||||
impl Db {
|
||||
pub(crate) fn setup(workspace_root: SystemPathBuf) -> Self {
|
||||
pub(crate) fn setup(project_root: SystemPathBuf) -> Self {
|
||||
let rule_selection = RuleSelection::from_registry(default_lint_registry());
|
||||
|
||||
let db = Self {
|
||||
workspace_root,
|
||||
project_root,
|
||||
storage: salsa::Storage::default(),
|
||||
system: TestSystem::default(),
|
||||
vendored: red_knot_vendored::file_system().clone(),
|
||||
|
@ -33,7 +33,7 @@ impl Db {
|
|||
};
|
||||
|
||||
db.memory_file_system()
|
||||
.create_directory_all(&db.workspace_root)
|
||||
.create_directory_all(&db.project_root)
|
||||
.unwrap();
|
||||
|
||||
Program::from_settings(
|
||||
|
@ -41,7 +41,7 @@ impl Db {
|
|||
ProgramSettings {
|
||||
python_version: PythonVersion::default(),
|
||||
python_platform: PythonPlatform::default(),
|
||||
search_paths: SearchPathSettings::new(db.workspace_root.clone()),
|
||||
search_paths: SearchPathSettings::new(vec![db.project_root.clone()]),
|
||||
},
|
||||
)
|
||||
.expect("Invalid search path settings");
|
||||
|
@ -49,8 +49,8 @@ impl Db {
|
|||
db
|
||||
}
|
||||
|
||||
pub(crate) fn workspace_root(&self) -> &SystemPath {
|
||||
&self.workspace_root
|
||||
pub(crate) fn project_root(&self) -> &SystemPath {
|
||||
&self.project_root
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -97,7 +97,7 @@ pub fn run(path: &Utf8Path, long_title: &str, short_title: &str, test_name: &str
|
|||
}
|
||||
|
||||
fn run_test(db: &mut db::Db, test: &parser::MarkdownTest) -> Result<(), Failures> {
|
||||
let workspace_root = db.workspace_root().to_path_buf();
|
||||
let project_root = db.project_root().to_path_buf();
|
||||
|
||||
let test_files: Vec<_> = test
|
||||
.files()
|
||||
|
@ -110,7 +110,7 @@ fn run_test(db: &mut db::Db, test: &parser::MarkdownTest) -> Result<(), Failures
|
|||
matches!(embedded.lang, "py" | "pyi"),
|
||||
"Non-Python files not supported yet."
|
||||
);
|
||||
let full_path = workspace_root.join(embedded.path);
|
||||
let full_path = project_root.join(embedded.path);
|
||||
db.write_file(&full_path, embedded.code).unwrap();
|
||||
let file = system_path_to_file(db, full_path).unwrap();
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ use js_sys::Error;
|
|||
use wasm_bindgen::prelude::*;
|
||||
|
||||
use red_knot_workspace::db::{Db, ProjectDatabase};
|
||||
use red_knot_workspace::project::settings::Configuration;
|
||||
use red_knot_workspace::project::options::{EnvironmentOptions, Options};
|
||||
use red_knot_workspace::project::ProjectMetadata;
|
||||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_db::files::{system_path_to_file, File};
|
||||
|
@ -42,15 +42,17 @@ impl Workspace {
|
|||
#[wasm_bindgen(constructor)]
|
||||
pub fn new(root: &str, settings: &Settings) -> Result<Workspace, Error> {
|
||||
let system = WasmSystem::new(SystemPath::new(root));
|
||||
let workspace = ProjectMetadata::discover(
|
||||
SystemPath::new(root),
|
||||
&system,
|
||||
Some(&Configuration {
|
||||
|
||||
let mut workspace =
|
||||
ProjectMetadata::discover(SystemPath::new(root), &system).map_err(into_error)?;
|
||||
|
||||
workspace.apply_cli_options(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
python_version: Some(settings.python_version.into()),
|
||||
..Configuration::default()
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
)
|
||||
.map_err(into_error)?;
|
||||
..Options::default()
|
||||
});
|
||||
|
||||
let db = ProjectDatabase::new(workspace, system.clone()).map_err(into_error)?;
|
||||
|
||||
|
|
|
@ -12,12 +12,12 @@ license.workspace = true
|
|||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
red_knot_python_semantic = { workspace = true }
|
||||
|
||||
ruff_cache = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["os", "cache", "serde"] }
|
||||
ruff_macros = { workspace = true }
|
||||
ruff_python_ast = { workspace = true, features = ["serde"] }
|
||||
ruff_text_size = { workspace = true }
|
||||
red_knot_python_semantic = { workspace = true, features = ["serde"] }
|
||||
red_knot_vendored = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
|
@ -34,7 +34,6 @@ toml = { workspace = true }
|
|||
tracing = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
red_knot_python_semantic = { workspace = true, features = ["serde"] }
|
||||
ruff_db = { workspace = true, features = ["testing"] }
|
||||
glob = { workspace = true }
|
||||
insta = { workspace = true, features = ["redactions", "ron"] }
|
||||
|
|
|
@ -45,8 +45,12 @@ impl ProjectDatabase {
|
|||
rule_selection: Arc::new(rule_selection),
|
||||
};
|
||||
|
||||
// TODO: Use the `program_settings` to compute the key for the database's persistent
|
||||
// cache and load the cache if it exists.
|
||||
// we may want to have a dedicated method for this?
|
||||
|
||||
// Initialize the `Program` singleton
|
||||
let program_settings = project_metadata.to_program_settings();
|
||||
let program_settings = project_metadata.to_program_settings(db.system());
|
||||
Program::from_settings(&db, program_settings)?;
|
||||
|
||||
db.project = Some(Project::from_metadata(&db, project_metadata));
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
use crate::db::{Db, ProjectDatabase};
|
||||
use crate::project::settings::Configuration;
|
||||
use crate::project::options::Options;
|
||||
use crate::project::{Project, ProjectMetadata};
|
||||
use crate::watch::{ChangeEvent, CreatedKind, DeletedKind};
|
||||
|
||||
use red_knot_python_semantic::Program;
|
||||
use ruff_db::files::{system_path_to_file, File, Files};
|
||||
use ruff_db::system::walk_directory::WalkState;
|
||||
|
@ -10,12 +11,8 @@ use ruff_db::Db as _;
|
|||
use rustc_hash::FxHashSet;
|
||||
|
||||
impl ProjectDatabase {
|
||||
#[tracing::instrument(level = "debug", skip(self, changes, base_configuration))]
|
||||
pub fn apply_changes(
|
||||
&mut self,
|
||||
changes: Vec<ChangeEvent>,
|
||||
base_configuration: Option<&Configuration>,
|
||||
) {
|
||||
#[tracing::instrument(level = "debug", skip(self, changes, cli_options))]
|
||||
pub fn apply_changes(&mut self, changes: Vec<ChangeEvent>, cli_options: Option<&Options>) {
|
||||
let mut project = self.project();
|
||||
let project_path = project.root(self).to_path_buf();
|
||||
let program = Program::get(self);
|
||||
|
@ -141,9 +138,13 @@ impl ProjectDatabase {
|
|||
}
|
||||
|
||||
if project_changed {
|
||||
match ProjectMetadata::discover(&project_path, self.system(), base_configuration) {
|
||||
Ok(metadata) => {
|
||||
let program_settings = metadata.to_program_settings();
|
||||
match ProjectMetadata::discover(&project_path, self.system()) {
|
||||
Ok(mut metadata) => {
|
||||
if let Some(cli_options) = cli_options {
|
||||
metadata.apply_cli_options(cli_options.clone());
|
||||
}
|
||||
|
||||
let program_settings = metadata.to_program_settings(self.system());
|
||||
|
||||
let program = Program::get(self);
|
||||
if let Err(error) = program.update_from_settings(self, program_settings) {
|
||||
|
@ -168,7 +169,10 @@ impl ProjectDatabase {
|
|||
|
||||
return;
|
||||
} else if custom_stdlib_change {
|
||||
let search_paths = project.metadata(self).to_program_settings().search_paths;
|
||||
let search_paths = project
|
||||
.metadata(self)
|
||||
.to_program_settings(self.system())
|
||||
.search_paths;
|
||||
|
||||
if let Err(error) = program.update_search_paths(self, &search_paths) {
|
||||
tracing::error!("Failed to set the new search paths: {error}");
|
||||
|
|
|
@ -21,10 +21,11 @@ use salsa::{Durability, Setter as _};
|
|||
use std::borrow::Cow;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub mod combine;
|
||||
mod files;
|
||||
mod metadata;
|
||||
mod pyproject;
|
||||
pub mod settings;
|
||||
pub mod options;
|
||||
pub mod pyproject;
|
||||
|
||||
/// The project as a Salsa ingredient.
|
||||
///
|
||||
|
@ -52,7 +53,7 @@ pub struct Project {
|
|||
#[return_ref]
|
||||
file_set: IndexedFiles,
|
||||
|
||||
/// The metadata describing the project, including the unresolved configuration.
|
||||
/// The metadata describing the project, including the unresolved options.
|
||||
#[return_ref]
|
||||
pub metadata: ProjectMetadata,
|
||||
}
|
||||
|
|
190
crates/red_knot_workspace/src/project/combine.rs
Normal file
190
crates/red_knot_workspace/src/project/combine.rs
Normal file
|
@ -0,0 +1,190 @@
|
|||
use std::{collections::HashMap, hash::BuildHasher};
|
||||
|
||||
use red_knot_python_semantic::{PythonPlatform, PythonVersion, SitePackages};
|
||||
use ruff_db::system::SystemPathBuf;
|
||||
|
||||
/// Combine two values, preferring the values in `self`.
|
||||
///
|
||||
/// The logic should follow that of Cargo's `config.toml`:
|
||||
///
|
||||
/// > If a key is specified in multiple config files, the values will get merged together.
|
||||
/// > Numbers, strings, and booleans will use the value in the deeper config directory taking
|
||||
/// > precedence over ancestor directories, where the home directory is the lowest priority.
|
||||
/// > Arrays will be joined together with higher precedence items being placed later in the
|
||||
/// > merged array.
|
||||
///
|
||||
/// ## uv Compatibility
|
||||
///
|
||||
/// The merging behavior differs from uv in that values with higher precedence in arrays
|
||||
/// are placed later in the merged array. This is because we want to support overriding
|
||||
/// earlier values and values from other configurations, including unsetting them.
|
||||
/// For example: patterns coming last in file inclusion and exclusion patterns
|
||||
/// allow overriding earlier patterns, matching the `gitignore` behavior.
|
||||
/// Generally speaking, it feels more intuitive if later values override earlier values
|
||||
/// than the other way around: `knot --exclude png --exclude "!important.png"`.
|
||||
///
|
||||
/// The main downside of this approach is that the ordering can be surprising in cases
|
||||
/// where the option has a "first match" semantic and not a "last match" wins.
|
||||
/// One such example is `extra-paths` where the semantics is given by Python:
|
||||
/// the module on the first matching search path wins.
|
||||
///
|
||||
/// ```toml
|
||||
/// [environment]
|
||||
/// extra-paths = ["b", "c"]
|
||||
/// ```
|
||||
///
|
||||
/// ```bash
|
||||
/// knot --extra-paths a
|
||||
/// ```
|
||||
///
|
||||
/// That's why a user might expect that this configuration results in `["a", "b", "c"]`,
|
||||
/// because the CLI has higher precedence. However, the current implementation results in a
|
||||
/// resolved extra search path of `["b", "c", "a"]`, which means `a` will be tried last.
|
||||
///
|
||||
/// There's an argument here that the user should be able to specify the order of the paths,
|
||||
/// because only then is the user in full control of where to insert the path when specyifing `extra-paths`
|
||||
/// in multiple sources.
|
||||
///
|
||||
/// ## Macro
|
||||
/// You can automatically derive `Combine` for structs with named fields by using `derive(ruff_macros::Combine)`.
|
||||
pub trait Combine {
|
||||
#[must_use]
|
||||
fn combine(mut self, other: Self) -> Self
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.combine_with(other);
|
||||
self
|
||||
}
|
||||
|
||||
fn combine_with(&mut self, other: Self);
|
||||
}
|
||||
|
||||
impl<T> Combine for Option<T>
|
||||
where
|
||||
T: Combine,
|
||||
{
|
||||
fn combine(self, other: Self) -> Self
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
match (self, other) {
|
||||
(Some(a), Some(b)) => Some(a.combine(b)),
|
||||
(None, Some(b)) => Some(b),
|
||||
(a, _) => a,
|
||||
}
|
||||
}
|
||||
|
||||
fn combine_with(&mut self, other: Self) {
|
||||
match (self, other) {
|
||||
(Some(a), Some(b)) => {
|
||||
a.combine_with(b);
|
||||
}
|
||||
(a @ None, Some(b)) => {
|
||||
*a = Some(b);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Combine for Vec<T> {
|
||||
fn combine_with(&mut self, mut other: Self) {
|
||||
// `self` takes precedence over `other` but values with higher precedence must be placed after.
|
||||
// Swap the vectors so that `other` is the one that gets extended, so that the values of `self` come after.
|
||||
std::mem::swap(self, &mut other);
|
||||
self.extend(other);
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V, S> Combine for HashMap<K, V, S>
|
||||
where
|
||||
K: Eq + std::hash::Hash,
|
||||
S: BuildHasher,
|
||||
{
|
||||
fn combine_with(&mut self, mut other: Self) {
|
||||
// `self` takes precedence over `other` but `extend` overrides existing values.
|
||||
// Swap the hash maps so that `self` is the one that gets extended.
|
||||
std::mem::swap(self, &mut other);
|
||||
self.extend(other);
|
||||
}
|
||||
}
|
||||
|
||||
/// Implements [`Combine`] for a value that always returns `self` when combined with another value.
|
||||
macro_rules! impl_noop_combine {
|
||||
($name:ident) => {
|
||||
impl Combine for $name {
|
||||
#[inline(always)]
|
||||
fn combine_with(&mut self, _other: Self) {}
|
||||
|
||||
#[inline(always)]
|
||||
fn combine(self, _other: Self) -> Self {
|
||||
self
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_noop_combine!(SystemPathBuf);
|
||||
impl_noop_combine!(PythonPlatform);
|
||||
impl_noop_combine!(SitePackages);
|
||||
impl_noop_combine!(PythonVersion);
|
||||
|
||||
// std types
|
||||
impl_noop_combine!(bool);
|
||||
impl_noop_combine!(usize);
|
||||
impl_noop_combine!(u8);
|
||||
impl_noop_combine!(u16);
|
||||
impl_noop_combine!(u32);
|
||||
impl_noop_combine!(u64);
|
||||
impl_noop_combine!(u128);
|
||||
impl_noop_combine!(isize);
|
||||
impl_noop_combine!(i8);
|
||||
impl_noop_combine!(i16);
|
||||
impl_noop_combine!(i32);
|
||||
impl_noop_combine!(i64);
|
||||
impl_noop_combine!(i128);
|
||||
impl_noop_combine!(String);
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::project::combine::Combine;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[test]
|
||||
fn combine_option() {
|
||||
assert_eq!(Some(1).combine(Some(2)), Some(1));
|
||||
assert_eq!(None.combine(Some(2)), Some(2));
|
||||
assert_eq!(Some(1).combine(None), Some(1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn combine_vec() {
|
||||
assert_eq!(None.combine(Some(vec![1, 2, 3])), Some(vec![1, 2, 3]));
|
||||
assert_eq!(Some(vec![1, 2, 3]).combine(None), Some(vec![1, 2, 3]));
|
||||
assert_eq!(
|
||||
Some(vec![1, 2, 3]).combine(Some(vec![4, 5, 6])),
|
||||
Some(vec![4, 5, 6, 1, 2, 3])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn combine_map() {
|
||||
let a: HashMap<u32, _> = HashMap::from_iter([(1, "a"), (2, "a"), (3, "a")]);
|
||||
let b: HashMap<u32, _> = HashMap::from_iter([(0, "b"), (2, "b"), (5, "b")]);
|
||||
|
||||
assert_eq!(None.combine(Some(b.clone())), Some(b.clone()));
|
||||
assert_eq!(Some(a.clone()).combine(None), Some(a.clone()));
|
||||
assert_eq!(
|
||||
Some(a).combine(Some(b)),
|
||||
Some(HashMap::from_iter([
|
||||
(0, "b"),
|
||||
// The value from `a` takes precedence
|
||||
(1, "a"),
|
||||
(2, "a"),
|
||||
(3, "a"),
|
||||
(5, "b")
|
||||
]))
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,8 +1,9 @@
|
|||
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
||||
use ruff_python_ast::name::Name;
|
||||
|
||||
use crate::project::combine::Combine;
|
||||
use crate::project::options::Options;
|
||||
use crate::project::pyproject::{PyProject, PyProjectError};
|
||||
use crate::project::settings::Configuration;
|
||||
use red_knot_python_semantic::ProgramSettings;
|
||||
use thiserror::Error;
|
||||
|
||||
|
@ -13,42 +14,36 @@ pub struct ProjectMetadata {
|
|||
|
||||
pub(super) root: SystemPathBuf,
|
||||
|
||||
/// The resolved settings for this project.
|
||||
pub(super) configuration: Configuration,
|
||||
/// The raw options
|
||||
pub(super) options: Options,
|
||||
}
|
||||
|
||||
impl ProjectMetadata {
|
||||
/// Creates a project with the given name and root that uses the default configuration options.
|
||||
/// Creates a project with the given name and root that uses the default options.
|
||||
pub fn new(name: Name, root: SystemPathBuf) -> Self {
|
||||
Self {
|
||||
name,
|
||||
root,
|
||||
configuration: Configuration::default(),
|
||||
options: Options::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Loads a project from a `pyproject.toml` file.
|
||||
pub(crate) fn from_pyproject(
|
||||
pyproject: PyProject,
|
||||
root: SystemPathBuf,
|
||||
base_configuration: Option<&Configuration>,
|
||||
) -> Self {
|
||||
pub(crate) fn from_pyproject(pyproject: PyProject, root: SystemPathBuf) -> Self {
|
||||
let name = pyproject.project.and_then(|project| project.name);
|
||||
let name = name
|
||||
.map(|name| Name::new(&*name))
|
||||
.unwrap_or_else(|| Name::new(root.file_name().unwrap_or("root")));
|
||||
|
||||
// TODO: load configuration from pyrpoject.toml
|
||||
let mut configuration = Configuration::default();
|
||||
|
||||
if let Some(base_configuration) = base_configuration {
|
||||
configuration.extend(base_configuration.clone());
|
||||
}
|
||||
let options = pyproject
|
||||
.tool
|
||||
.and_then(|tool| tool.knot)
|
||||
.unwrap_or_default();
|
||||
|
||||
Self {
|
||||
name,
|
||||
root,
|
||||
configuration,
|
||||
options,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -63,7 +58,6 @@ impl ProjectMetadata {
|
|||
pub fn discover(
|
||||
path: &SystemPath,
|
||||
system: &dyn System,
|
||||
base_configuration: Option<&Configuration>,
|
||||
) -> Result<ProjectMetadata, ProjectDiscoveryError> {
|
||||
tracing::debug!("Searching for a project in '{path}'");
|
||||
|
||||
|
@ -84,11 +78,7 @@ impl ProjectMetadata {
|
|||
})?;
|
||||
|
||||
let has_knot_section = pyproject.knot().is_some();
|
||||
let metadata = ProjectMetadata::from_pyproject(
|
||||
pyproject,
|
||||
ancestor.to_path_buf(),
|
||||
base_configuration,
|
||||
);
|
||||
let metadata = ProjectMetadata::from_pyproject(pyproject, ancestor.to_path_buf());
|
||||
|
||||
if has_knot_section {
|
||||
let project_root = ancestor;
|
||||
|
@ -115,13 +105,11 @@ impl ProjectMetadata {
|
|||
} else {
|
||||
tracing::debug!("The ancestor directories contain no `pyproject.toml`. Falling back to a virtual project.");
|
||||
|
||||
// Create a package with a default configuration
|
||||
Self {
|
||||
name: path.file_name().unwrap_or("root").into(),
|
||||
root: path.to_path_buf(),
|
||||
// TODO create the configuration from the pyproject toml
|
||||
configuration: base_configuration.cloned().unwrap_or_default(),
|
||||
}
|
||||
// Create a project with a default configuration
|
||||
Self::new(
|
||||
path.file_name().unwrap_or("root").into(),
|
||||
path.to_path_buf(),
|
||||
)
|
||||
};
|
||||
|
||||
Ok(metadata)
|
||||
|
@ -135,12 +123,22 @@ impl ProjectMetadata {
|
|||
&self.name
|
||||
}
|
||||
|
||||
pub fn configuration(&self) -> &Configuration {
|
||||
&self.configuration
|
||||
pub fn options(&self) -> &Options {
|
||||
&self.options
|
||||
}
|
||||
|
||||
pub fn to_program_settings(&self) -> ProgramSettings {
|
||||
self.configuration.to_program_settings(self.root())
|
||||
pub fn to_program_settings(&self, system: &dyn System) -> ProgramSettings {
|
||||
self.options.to_program_settings(self.root(), system)
|
||||
}
|
||||
|
||||
/// Combine the project options with the CLI options where the CLI options take precedence.
|
||||
pub fn apply_cli_options(&mut self, options: Options) {
|
||||
self.options = options.combine(std::mem::take(&mut self.options));
|
||||
}
|
||||
|
||||
/// Combine the project options with the user options where project options take precedence.
|
||||
pub fn apply_user_options(&mut self, options: Options) {
|
||||
self.options.combine_with(options);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -177,8 +175,8 @@ mod tests {
|
|||
.write_files([(root.join("foo.py"), ""), (root.join("bar.py"), "")])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let project = ProjectMetadata::discover(&root, &system, None)
|
||||
.context("Failed to discover project")?;
|
||||
let project =
|
||||
ProjectMetadata::discover(&root, &system).context("Failed to discover project")?;
|
||||
|
||||
assert_eq!(project.root(), &*root);
|
||||
|
||||
|
@ -207,14 +205,14 @@ mod tests {
|
|||
])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let project = ProjectMetadata::discover(&root, &system, None)
|
||||
.context("Failed to discover project")?;
|
||||
let project =
|
||||
ProjectMetadata::discover(&root, &system).context("Failed to discover project")?;
|
||||
|
||||
assert_eq!(project.root(), &*root);
|
||||
snapshot_project!(project);
|
||||
|
||||
// Discovering the same package from a subdirectory should give the same result
|
||||
let from_src = ProjectMetadata::discover(&root.join("db"), &system, None)
|
||||
let from_src = ProjectMetadata::discover(&root.join("db"), &system)
|
||||
.context("Failed to discover project from src sub-directory")?;
|
||||
|
||||
assert_eq!(from_src, project);
|
||||
|
@ -243,7 +241,7 @@ mod tests {
|
|||
])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let Err(error) = ProjectMetadata::discover(&root, &system, None) else {
|
||||
let Err(error) = ProjectMetadata::discover(&root, &system) else {
|
||||
return Err(anyhow!("Expected project discovery to fail because of invalid syntax in the pyproject.toml"));
|
||||
};
|
||||
|
||||
|
@ -275,7 +273,8 @@ expected `.`, `]`
|
|||
[project]
|
||||
name = "project-root"
|
||||
|
||||
[tool.knot]
|
||||
[tool.knot.src]
|
||||
root = "src"
|
||||
"#,
|
||||
),
|
||||
(
|
||||
|
@ -284,13 +283,14 @@ expected `.`, `]`
|
|||
[project]
|
||||
name = "nested-project"
|
||||
|
||||
[tool.knot]
|
||||
[tool.knot.src]
|
||||
root = "src"
|
||||
"#,
|
||||
),
|
||||
])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let sub_project = ProjectMetadata::discover(&root.join("packages/a"), &system, None)?;
|
||||
let sub_project = ProjectMetadata::discover(&root.join("packages/a"), &system)?;
|
||||
|
||||
snapshot_project!(sub_project);
|
||||
|
||||
|
@ -311,7 +311,8 @@ expected `.`, `]`
|
|||
[project]
|
||||
name = "project-root"
|
||||
|
||||
[tool.knot]
|
||||
[tool.knot.src]
|
||||
root = "src"
|
||||
"#,
|
||||
),
|
||||
(
|
||||
|
@ -320,13 +321,14 @@ expected `.`, `]`
|
|||
[project]
|
||||
name = "nested-project"
|
||||
|
||||
[tool.knot]
|
||||
[tool.knot.src]
|
||||
root = "src"
|
||||
"#,
|
||||
),
|
||||
])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let root = ProjectMetadata::discover(&root, &system, None)?;
|
||||
let root = ProjectMetadata::discover(&root, &system)?;
|
||||
|
||||
snapshot_project!(root);
|
||||
|
||||
|
@ -358,7 +360,7 @@ expected `.`, `]`
|
|||
])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let sub_project = ProjectMetadata::discover(&root.join("packages/a"), &system, None)?;
|
||||
let sub_project = ProjectMetadata::discover(&root.join("packages/a"), &system)?;
|
||||
|
||||
snapshot_project!(sub_project);
|
||||
|
||||
|
@ -379,7 +381,8 @@ expected `.`, `]`
|
|||
[project]
|
||||
name = "project-root"
|
||||
|
||||
[tool.knot]
|
||||
[tool.knot.environment]
|
||||
python-version = "3.10"
|
||||
"#,
|
||||
),
|
||||
(
|
||||
|
@ -392,7 +395,7 @@ expected `.`, `]`
|
|||
])
|
||||
.context("Failed to write files")?;
|
||||
|
||||
let root = ProjectMetadata::discover(&root.join("packages/a"), &system, None)?;
|
||||
let root = ProjectMetadata::discover(&root.join("packages/a"), &system)?;
|
||||
|
||||
snapshot_project!(root);
|
||||
|
||||
|
|
103
crates/red_knot_workspace/src/project/options.rs
Normal file
103
crates/red_knot_workspace/src/project/options.rs
Normal file
|
@ -0,0 +1,103 @@
|
|||
use red_knot_python_semantic::{
|
||||
ProgramSettings, PythonPlatform, PythonVersion, SearchPathSettings, SitePackages,
|
||||
};
|
||||
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
||||
use ruff_macros::Combine;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// The options for the project.
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq, Combine, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub struct Options {
|
||||
pub environment: Option<EnvironmentOptions>,
|
||||
|
||||
pub src: Option<SrcOptions>,
|
||||
}
|
||||
|
||||
impl Options {
|
||||
pub(super) fn to_program_settings(
|
||||
&self,
|
||||
project_root: &SystemPath,
|
||||
system: &dyn System,
|
||||
) -> ProgramSettings {
|
||||
let (python_version, python_platform) = self
|
||||
.environment
|
||||
.as_ref()
|
||||
.map(|env| (env.python_version, env.python_platform.as_ref()))
|
||||
.unwrap_or_default();
|
||||
|
||||
ProgramSettings {
|
||||
python_version: python_version.unwrap_or_default(),
|
||||
python_platform: python_platform.cloned().unwrap_or_default(),
|
||||
search_paths: self.to_search_path_settings(project_root, system),
|
||||
}
|
||||
}
|
||||
|
||||
fn to_search_path_settings(
|
||||
&self,
|
||||
project_root: &SystemPath,
|
||||
system: &dyn System,
|
||||
) -> SearchPathSettings {
|
||||
let src_roots =
|
||||
if let Some(src_root) = self.src.as_ref().and_then(|src| src.root.as_deref()) {
|
||||
vec![src_root.to_path_buf()]
|
||||
} else {
|
||||
let src = project_root.join("src");
|
||||
|
||||
// Default to `src` and the project root if `src` exists and the root hasn't been specified.
|
||||
if system.is_directory(&src) {
|
||||
vec![project_root.to_path_buf(), src]
|
||||
} else {
|
||||
vec![project_root.to_path_buf()]
|
||||
}
|
||||
};
|
||||
|
||||
let (extra_paths, python, typeshed) = self
|
||||
.environment
|
||||
.as_ref()
|
||||
.map(|env| {
|
||||
(
|
||||
env.extra_paths.clone(),
|
||||
env.venv_path.clone(),
|
||||
env.typeshed.clone(),
|
||||
)
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
SearchPathSettings {
|
||||
extra_paths: extra_paths.unwrap_or_default(),
|
||||
src_roots,
|
||||
typeshed,
|
||||
site_packages: python.unwrap_or(SitePackages::Known(vec![])),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub struct EnvironmentOptions {
|
||||
pub python_version: Option<PythonVersion>,
|
||||
|
||||
pub python_platform: Option<PythonPlatform>,
|
||||
|
||||
/// List of user-provided paths that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
/// or pyright's stubPath configuration setting.
|
||||
pub extra_paths: Option<Vec<SystemPathBuf>>,
|
||||
|
||||
/// Optional path to a "typeshed" directory on disk for us to use for standard-library types.
|
||||
/// If this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,
|
||||
/// bundled as a zip file in the binary
|
||||
pub typeshed: Option<SystemPathBuf>,
|
||||
|
||||
// TODO: Rename to python, see https://github.com/astral-sh/ruff/issues/15530
|
||||
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
|
||||
pub venv_path: Option<SitePackages>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Eq, PartialEq, Combine, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub struct SrcOptions {
|
||||
/// The root of the project, used for finding first-party modules.
|
||||
pub root: Option<SystemPathBuf>,
|
||||
}
|
|
@ -1,15 +1,16 @@
|
|||
mod package_name;
|
||||
|
||||
use pep440_rs::{Version, VersionSpecifiers};
|
||||
use serde::Deserialize;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::project::options::Options;
|
||||
pub(crate) use package_name::PackageName;
|
||||
|
||||
/// A `pyproject.toml` as specified in PEP 517.
|
||||
#[derive(Deserialize, Debug, Default, Clone)]
|
||||
#[derive(Deserialize, Serialize, Debug, Default, Clone)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub(crate) struct PyProject {
|
||||
pub struct PyProject {
|
||||
/// PEP 621-compliant project metadata.
|
||||
pub project: Option<Project>,
|
||||
/// Tool-specific metadata.
|
||||
|
@ -17,7 +18,7 @@ pub(crate) struct PyProject {
|
|||
}
|
||||
|
||||
impl PyProject {
|
||||
pub(crate) fn knot(&self) -> Option<&Knot> {
|
||||
pub(crate) fn knot(&self) -> Option<&Options> {
|
||||
self.tool.as_ref().and_then(|tool| tool.knot.as_ref())
|
||||
}
|
||||
}
|
||||
|
@ -37,10 +38,9 @@ impl PyProject {
|
|||
/// PEP 621 project metadata (`project`).
|
||||
///
|
||||
/// See <https://packaging.python.org/en/latest/specifications/pyproject-toml>.
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq)]
|
||||
#[cfg_attr(test, derive(serde::Serialize))]
|
||||
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub(crate) struct Project {
|
||||
pub struct Project {
|
||||
/// The name of the project
|
||||
///
|
||||
/// Note: Intentionally option to be more permissive during deserialization.
|
||||
|
@ -52,14 +52,8 @@ pub(crate) struct Project {
|
|||
pub requires_python: Option<VersionSpecifiers>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct Tool {
|
||||
pub knot: Option<Knot>,
|
||||
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct Tool {
|
||||
pub knot: Option<Options>,
|
||||
}
|
||||
|
||||
// TODO(micha): Remove allow once we add knot settings.
|
||||
// We can't use a unit struct here or deserializing `[tool.knot]` fails.
|
||||
#[allow(clippy::empty_structs_with_brackets)]
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub(crate) struct Knot {}
|
||||
|
|
|
@ -9,7 +9,7 @@ use thiserror::Error;
|
|||
///
|
||||
/// See: <https://packaging.python.org/en/latest/specifications/name-normalization/>
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)]
|
||||
pub(crate) struct PackageName(String);
|
||||
pub struct PackageName(String);
|
||||
|
||||
impl PackageName {
|
||||
/// Create a validated, normalized package name.
|
||||
|
|
|
@ -1,97 +0,0 @@
|
|||
use red_knot_python_semantic::{
|
||||
ProgramSettings, PythonPlatform, PythonVersion, SearchPathSettings, SitePackages,
|
||||
};
|
||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||
|
||||
/// The resolved configurations.
|
||||
///
|
||||
/// The main difference to [`Configuration`] is that default values are filled in.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(test, derive(serde::Serialize))]
|
||||
pub struct ProjectSettings {
|
||||
pub(super) program: ProgramSettings,
|
||||
}
|
||||
|
||||
impl ProjectSettings {
|
||||
pub fn program(&self) -> &ProgramSettings {
|
||||
&self.program
|
||||
}
|
||||
}
|
||||
|
||||
/// The configuration for the project or a package.
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(test, derive(serde::Serialize))]
|
||||
pub struct Configuration {
|
||||
pub python_version: Option<PythonVersion>,
|
||||
pub search_paths: SearchPathConfiguration,
|
||||
}
|
||||
|
||||
impl Configuration {
|
||||
/// Extends this configuration by using the values from `with` for all values that are absent in `self`.
|
||||
pub fn extend(&mut self, with: Configuration) {
|
||||
self.python_version = self.python_version.or(with.python_version);
|
||||
self.search_paths.extend(with.search_paths);
|
||||
}
|
||||
|
||||
pub(super) fn to_program_settings(&self, first_party_root: &SystemPath) -> ProgramSettings {
|
||||
ProgramSettings {
|
||||
python_version: self.python_version.unwrap_or_default(),
|
||||
python_platform: PythonPlatform::default(),
|
||||
search_paths: self.search_paths.to_settings(first_party_root),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Eq, PartialEq)]
|
||||
#[cfg_attr(test, derive(serde::Serialize))]
|
||||
pub struct SearchPathConfiguration {
|
||||
/// List of user-provided paths that should take first priority in the module resolution.
|
||||
/// Examples in other type checkers are mypy's MYPYPATH environment variable,
|
||||
/// or pyright's stubPath configuration setting.
|
||||
pub extra_paths: Option<Vec<SystemPathBuf>>,
|
||||
|
||||
/// The root of the project, used for finding first-party modules.
|
||||
pub src_root: Option<SystemPathBuf>,
|
||||
|
||||
/// Optional path to a "typeshed" directory on disk for us to use for standard-library types.
|
||||
/// If this is not provided, we will fallback to our vendored typeshed stubs for the stdlib,
|
||||
/// bundled as a zip file in the binary
|
||||
pub typeshed: Option<SystemPathBuf>,
|
||||
|
||||
/// The path to the user's `site-packages` directory, where third-party packages from ``PyPI`` are installed.
|
||||
pub site_packages: Option<SitePackages>,
|
||||
}
|
||||
|
||||
impl SearchPathConfiguration {
|
||||
pub fn to_settings(&self, workspace_root: &SystemPath) -> SearchPathSettings {
|
||||
let site_packages = self
|
||||
.site_packages
|
||||
.clone()
|
||||
.unwrap_or(SitePackages::Known(vec![]));
|
||||
|
||||
SearchPathSettings {
|
||||
extra_paths: self.extra_paths.clone().unwrap_or_default(),
|
||||
src_root: self
|
||||
.clone()
|
||||
.src_root
|
||||
.unwrap_or_else(|| workspace_root.to_path_buf()),
|
||||
typeshed: self.typeshed.clone(),
|
||||
site_packages,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extend(&mut self, with: SearchPathConfiguration) {
|
||||
if let Some(extra_paths) = with.extra_paths {
|
||||
self.extra_paths.get_or_insert(extra_paths);
|
||||
}
|
||||
if let Some(src_root) = with.src_root {
|
||||
self.src_root.get_or_insert(src_root);
|
||||
}
|
||||
if let Some(typeshed) = with.typeshed {
|
||||
self.typeshed.get_or_insert(typeshed);
|
||||
}
|
||||
if let Some(site_packages) = with.site_packages {
|
||||
self.site_packages.get_or_insert(site_packages);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -5,13 +5,10 @@ expression: root
|
|||
ProjectMetadata(
|
||||
name: Name("project-root"),
|
||||
root: "/app",
|
||||
configuration: Configuration(
|
||||
python_version: None,
|
||||
search_paths: SearchPathConfiguration(
|
||||
extra_paths: None,
|
||||
src_root: None,
|
||||
typeshed: None,
|
||||
site_packages: None,
|
||||
),
|
||||
options: Options(
|
||||
environment: None,
|
||||
src: Some(SrcOptions(
|
||||
root: Some("src"),
|
||||
)),
|
||||
),
|
||||
)
|
||||
|
|
|
@ -5,13 +5,10 @@ expression: sub_project
|
|||
ProjectMetadata(
|
||||
name: Name("nested-project"),
|
||||
root: "/app/packages/a",
|
||||
configuration: Configuration(
|
||||
python_version: None,
|
||||
search_paths: SearchPathConfiguration(
|
||||
extra_paths: None,
|
||||
src_root: None,
|
||||
typeshed: None,
|
||||
site_packages: None,
|
||||
),
|
||||
options: Options(
|
||||
environment: None,
|
||||
src: Some(SrcOptions(
|
||||
root: Some("src"),
|
||||
)),
|
||||
),
|
||||
)
|
||||
|
|
|
@ -5,13 +5,14 @@ expression: root
|
|||
ProjectMetadata(
|
||||
name: Name("project-root"),
|
||||
root: "/app",
|
||||
configuration: Configuration(
|
||||
python_version: None,
|
||||
search_paths: SearchPathConfiguration(
|
||||
extra_paths: None,
|
||||
src_root: None,
|
||||
options: Options(
|
||||
environment: Some(EnvironmentOptions(
|
||||
r#python-version: Some("3.10"),
|
||||
r#python-platform: None,
|
||||
r#extra-paths: None,
|
||||
typeshed: None,
|
||||
site_packages: None,
|
||||
),
|
||||
r#venv-path: None,
|
||||
)),
|
||||
src: None,
|
||||
),
|
||||
)
|
||||
|
|
|
@ -5,13 +5,8 @@ expression: sub_project
|
|||
ProjectMetadata(
|
||||
name: Name("nested-project"),
|
||||
root: "/app/packages/a",
|
||||
configuration: Configuration(
|
||||
python_version: None,
|
||||
search_paths: SearchPathConfiguration(
|
||||
extra_paths: None,
|
||||
src_root: None,
|
||||
typeshed: None,
|
||||
site_packages: None,
|
||||
),
|
||||
options: Options(
|
||||
environment: None,
|
||||
src: None,
|
||||
),
|
||||
)
|
||||
|
|
|
@ -5,13 +5,8 @@ expression: project
|
|||
ProjectMetadata(
|
||||
name: Name("backend"),
|
||||
root: "/app",
|
||||
configuration: Configuration(
|
||||
python_version: None,
|
||||
search_paths: SearchPathConfiguration(
|
||||
extra_paths: None,
|
||||
src_root: None,
|
||||
typeshed: None,
|
||||
site_packages: None,
|
||||
),
|
||||
options: Options(
|
||||
environment: None,
|
||||
src: None,
|
||||
),
|
||||
)
|
||||
|
|
|
@ -5,13 +5,8 @@ expression: project
|
|||
ProjectMetadata(
|
||||
name: Name("app"),
|
||||
root: "/app",
|
||||
configuration: Configuration(
|
||||
python_version: None,
|
||||
search_paths: SearchPathConfiguration(
|
||||
extra_paths: None,
|
||||
src_root: None,
|
||||
typeshed: None,
|
||||
site_packages: None,
|
||||
),
|
||||
options: Options(
|
||||
environment: None,
|
||||
src: None,
|
||||
),
|
||||
)
|
||||
|
|
|
@ -9,9 +9,9 @@ use ruff_python_ast::visitor::source_order;
|
|||
use ruff_python_ast::visitor::source_order::SourceOrderVisitor;
|
||||
use ruff_python_ast::{self as ast, Alias, Expr, Parameter, ParameterWithDefault, Stmt};
|
||||
|
||||
fn setup_db(workspace_root: &SystemPath, system: TestSystem) -> anyhow::Result<ProjectDatabase> {
|
||||
let workspace = ProjectMetadata::discover(workspace_root, &system, None)?;
|
||||
ProjectDatabase::new(workspace, system)
|
||||
fn setup_db(project_root: &SystemPath, system: TestSystem) -> anyhow::Result<ProjectDatabase> {
|
||||
let project = ProjectMetadata::discover(project_root, &system)?;
|
||||
ProjectDatabase::new(project, system)
|
||||
}
|
||||
|
||||
fn get_cargo_workspace_root() -> anyhow::Result<SystemPathBuf> {
|
||||
|
|
|
@ -59,13 +59,16 @@ pub(crate) fn analyze_graph(
|
|||
.collect::<FxHashMap<_, _>>();
|
||||
|
||||
// Create a database from the source roots.
|
||||
let src_roots = package_roots
|
||||
.values()
|
||||
.filter_map(|package| package.as_deref())
|
||||
.filter_map(|package| package.parent())
|
||||
.map(Path::to_path_buf)
|
||||
.filter_map(|path| SystemPathBuf::from_path_buf(path).ok())
|
||||
.collect();
|
||||
|
||||
let db = ModuleDb::from_src_roots(
|
||||
package_roots
|
||||
.values()
|
||||
.filter_map(|package| package.as_deref())
|
||||
.filter_map(|package| package.parent())
|
||||
.map(Path::to_path_buf)
|
||||
.filter_map(|path| SystemPathBuf::from_path_buf(path).ok()),
|
||||
src_roots,
|
||||
pyproject_config
|
||||
.settings
|
||||
.analyze
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
use rayon::ThreadPoolBuilder;
|
||||
use red_knot_python_semantic::PythonVersion;
|
||||
use red_knot_workspace::db::{Db, ProjectDatabase};
|
||||
use red_knot_workspace::project::settings::Configuration;
|
||||
use red_knot_workspace::project::options::{EnvironmentOptions, Options};
|
||||
use red_knot_workspace::project::ProjectMetadata;
|
||||
use red_knot_workspace::watch::{ChangeEvent, ChangedKind};
|
||||
use ruff_benchmark::criterion::{criterion_group, criterion_main, BatchSize, Criterion};
|
||||
|
@ -74,15 +74,14 @@ fn setup_case() -> Case {
|
|||
.unwrap();
|
||||
|
||||
let src_root = SystemPath::new("/src");
|
||||
let metadata = ProjectMetadata::discover(
|
||||
src_root,
|
||||
&system,
|
||||
Some(&Configuration {
|
||||
let mut metadata = ProjectMetadata::discover(src_root, &system).unwrap();
|
||||
metadata.apply_cli_options(Options {
|
||||
environment: Some(EnvironmentOptions {
|
||||
python_version: Some(PythonVersion::PY312),
|
||||
..Configuration::default()
|
||||
..EnvironmentOptions::default()
|
||||
}),
|
||||
)
|
||||
.unwrap();
|
||||
..Options::default()
|
||||
});
|
||||
|
||||
let mut db = ProjectDatabase::new(metadata, system).unwrap();
|
||||
|
||||
|
|
|
@ -30,22 +30,10 @@ pub struct ModuleDb {
|
|||
impl ModuleDb {
|
||||
/// Initialize a [`ModuleDb`] from the given source root.
|
||||
pub fn from_src_roots(
|
||||
mut src_roots: impl Iterator<Item = SystemPathBuf>,
|
||||
src_roots: Vec<SystemPathBuf>,
|
||||
python_version: PythonVersion,
|
||||
) -> Result<Self> {
|
||||
let search_paths = {
|
||||
// Use the first source root.
|
||||
let src_root = src_roots
|
||||
.next()
|
||||
.ok_or_else(|| anyhow::anyhow!("No source roots provided"))?;
|
||||
|
||||
let mut search_paths = SearchPathSettings::new(src_root);
|
||||
|
||||
// Add the remaining source roots as extra paths.
|
||||
search_paths.extra_paths.extend(src_roots);
|
||||
|
||||
search_paths
|
||||
};
|
||||
let search_paths = SearchPathSettings::new(src_roots);
|
||||
|
||||
let db = Self::default();
|
||||
Program::from_settings(
|
||||
|
|
43
crates/ruff_macros/src/combine.rs
Normal file
43
crates/ruff_macros/src/combine.rs
Normal file
|
@ -0,0 +1,43 @@
|
|||
use quote::{quote, quote_spanned};
|
||||
use syn::{Data, DataStruct, DeriveInput, Fields};
|
||||
|
||||
pub(crate) fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream> {
|
||||
let DeriveInput { ident, data, .. } = input;
|
||||
|
||||
match data {
|
||||
Data::Struct(DataStruct {
|
||||
fields: Fields::Named(fields),
|
||||
..
|
||||
}) => {
|
||||
let output: Vec<_> = fields
|
||||
.named
|
||||
.iter()
|
||||
.map(|field| {
|
||||
let ident = field
|
||||
.ident
|
||||
.as_ref()
|
||||
.expect("Expected to handle named fields");
|
||||
|
||||
quote_spanned!(
|
||||
ident.span() => crate::project::combine::Combine::combine_with(&mut self.#ident, other.#ident)
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(quote! {
|
||||
#[automatically_derived]
|
||||
impl crate::project::combine::Combine for #ident {
|
||||
fn combine_with(&mut self, other: Self) {
|
||||
#(
|
||||
#output
|
||||
);*
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
_ => Err(syn::Error::new(
|
||||
ident.span(),
|
||||
"Can only derive Combine from structs with named fields.",
|
||||
)),
|
||||
}
|
||||
}
|
|
@ -7,6 +7,7 @@ use proc_macro::TokenStream;
|
|||
use syn::{parse_macro_input, DeriveInput, Error, ItemFn, ItemStruct};
|
||||
|
||||
mod cache_key;
|
||||
mod combine;
|
||||
mod combine_options;
|
||||
mod config;
|
||||
mod derive_message_formats;
|
||||
|
@ -35,6 +36,19 @@ pub fn derive_combine_options(input: TokenStream) -> TokenStream {
|
|||
.into()
|
||||
}
|
||||
|
||||
/// Automatically derives a `red_knot_workspace::project::Combine` implementation for the attributed type
|
||||
/// that calls `red_knot_workspace::project::Combine::combine` for each field.
|
||||
///
|
||||
/// The derive macro can only be used on structs with named fields.
|
||||
#[proc_macro_derive(Combine)]
|
||||
pub fn derive_combine(input: TokenStream) -> TokenStream {
|
||||
let input = parse_macro_input!(input as DeriveInput);
|
||||
|
||||
combine::derive_impl(input)
|
||||
.unwrap_or_else(syn::Error::into_compile_error)
|
||||
.into()
|
||||
}
|
||||
|
||||
/// Converts a screaming snake case identifier to a kebab case string.
|
||||
#[proc_macro]
|
||||
pub fn kebab_case(input: TokenStream) -> TokenStream {
|
||||
|
|
|
@ -118,7 +118,7 @@ fn setup_db() -> TestDb {
|
|||
ProgramSettings {
|
||||
python_version: PythonVersion::default(),
|
||||
python_platform: PythonPlatform::default(),
|
||||
search_paths: SearchPathSettings::new(src_root),
|
||||
search_paths: SearchPathSettings::new(vec![src_root]),
|
||||
},
|
||||
)
|
||||
.expect("Valid search path settings");
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue