mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-30 05:44:56 +00:00
[ty] Add caching for submodule completion suggestions (#19408)
This change makes it so we aren't doing a directory traversal every time we ask for completions from a module. Specifically, submodules that aren't attributes of their parent module can only be discovered by looking at the directory tree. But we want to avoid doing a directory scan unless we think there are changes. To make this work, this change does a little bit of surgery to `FileRoot`. Previously, a `FileRoot` was only used for library search paths. Its revision was bumped whenever a file in that tree was added, deleted or even modified (to support the discovery of `pth` files and changes to its contents). This generally seems fine since these are presumably dependency paths that shouldn't change frequently. In this change, we add a `FileRoot` for the project. But having the `FileRoot`'s revision bumped for every change in the project makes caching based on that `FileRoot` rather ineffective. That is, cache invalidation will occur too aggressively. To the point that there is little point in adding caching in the first place. To mitigate this, a `FileRoot`'s revision is only bumped on a change to a child file's contents when the `FileRoot` is a `LibrarySearchPath`. Otherwise, we only bump the revision when a file is created or added. The effect is that, at least in VS Code, when a new module is added or removed, this change is picked up and the cache is properly invalidated. Other LSP clients with worse support for file watching (which seems to be the case for the CoC vim plugin that I use) don't work as well. Here, the cache is less likely to be invalidated which might cause completions to have stale results. Unless there's an obvious way to fix or improve this, I propose punting on improvements here for now.
This commit is contained in:
parent
99d0ac60b4
commit
64f9481fd0
7 changed files with 275 additions and 44 deletions
|
@ -232,7 +232,7 @@ impl Files {
|
||||||
let roots = inner.roots.read().unwrap();
|
let roots = inner.roots.read().unwrap();
|
||||||
|
|
||||||
for root in roots.all() {
|
for root in roots.all() {
|
||||||
if root.path(db).starts_with(&path) {
|
if path.starts_with(root.path(db)) {
|
||||||
root.set_revision(db).to(FileRevision::now());
|
root.set_revision(db).to(FileRevision::now());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -375,12 +375,25 @@ impl File {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Refreshes the file metadata by querying the file system if needed.
|
/// Refreshes the file metadata by querying the file system if needed.
|
||||||
|
///
|
||||||
|
/// This also "touches" the file root associated with the given path.
|
||||||
|
/// This means that any Salsa queries that depend on the corresponding
|
||||||
|
/// root's revision will become invalidated.
|
||||||
pub fn sync_path(db: &mut dyn Db, path: &SystemPath) {
|
pub fn sync_path(db: &mut dyn Db, path: &SystemPath) {
|
||||||
let absolute = SystemPath::absolute(path, db.system().current_directory());
|
let absolute = SystemPath::absolute(path, db.system().current_directory());
|
||||||
Files::touch_root(db, &absolute);
|
Files::touch_root(db, &absolute);
|
||||||
Self::sync_system_path(db, &absolute, None);
|
Self::sync_system_path(db, &absolute, None);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Refreshes *only* the file metadata by querying the file system if needed.
|
||||||
|
///
|
||||||
|
/// This specifically does not touch any file root associated with the
|
||||||
|
/// given file path.
|
||||||
|
pub fn sync_path_only(db: &mut dyn Db, path: &SystemPath) {
|
||||||
|
let absolute = SystemPath::absolute(path, db.system().current_directory());
|
||||||
|
Self::sync_system_path(db, &absolute, None);
|
||||||
|
}
|
||||||
|
|
||||||
/// Increments the revision for the virtual file at `path`.
|
/// Increments the revision for the virtual file at `path`.
|
||||||
pub fn sync_virtual_path(db: &mut dyn Db, path: &SystemVirtualPath) {
|
pub fn sync_virtual_path(db: &mut dyn Db, path: &SystemVirtualPath) {
|
||||||
if let Some(virtual_file) = db.files().try_virtual_file(path) {
|
if let Some(virtual_file) = db.files().try_virtual_file(path) {
|
||||||
|
|
|
@ -23,7 +23,7 @@ pub struct FileRoot {
|
||||||
pub path: SystemPathBuf,
|
pub path: SystemPathBuf,
|
||||||
|
|
||||||
/// The kind of the root at the time of its creation.
|
/// The kind of the root at the time of its creation.
|
||||||
kind_at_time_of_creation: FileRootKind,
|
pub kind_at_time_of_creation: FileRootKind,
|
||||||
|
|
||||||
/// A revision that changes when the contents of the source root change.
|
/// A revision that changes when the contents of the source root change.
|
||||||
///
|
///
|
||||||
|
|
|
@ -15,7 +15,7 @@ use ty_project::metadata::pyproject::{PyProject, Tool};
|
||||||
use ty_project::metadata::value::{RangedValue, RelativePathBuf};
|
use ty_project::metadata::value::{RangedValue, RelativePathBuf};
|
||||||
use ty_project::watch::{ChangeEvent, ProjectWatcher, directory_watcher};
|
use ty_project::watch::{ChangeEvent, ProjectWatcher, directory_watcher};
|
||||||
use ty_project::{Db, ProjectDatabase, ProjectMetadata};
|
use ty_project::{Db, ProjectDatabase, ProjectMetadata};
|
||||||
use ty_python_semantic::{ModuleName, PythonPlatform, resolve_module};
|
use ty_python_semantic::{Module, ModuleName, PythonPlatform, resolve_module};
|
||||||
|
|
||||||
struct TestCase {
|
struct TestCase {
|
||||||
db: ProjectDatabase,
|
db: ProjectDatabase,
|
||||||
|
@ -40,6 +40,14 @@ impl TestCase {
|
||||||
&self.db
|
&self.db
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Stops file-watching and returns the collected change events.
|
||||||
|
///
|
||||||
|
/// The caller must pass a `MatchEvent` filter that is applied to
|
||||||
|
/// the change events returned. To get all change events, use `|_:
|
||||||
|
/// &ChangeEvent| true`. If possible, callers should pass a filter for a
|
||||||
|
/// specific file name, e.g., `event_for_file("foo.py")`. When done this
|
||||||
|
/// way, the watcher will specifically try to wait for a change event
|
||||||
|
/// matching the filter. This can help avoid flakes.
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn stop_watch<M>(&mut self, matcher: M) -> Vec<ChangeEvent>
|
fn stop_watch<M>(&mut self, matcher: M) -> Vec<ChangeEvent>
|
||||||
where
|
where
|
||||||
|
@ -1877,3 +1885,156 @@ fn rename_files_casing_only() -> anyhow::Result<()> {
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// This tests that retrieving submodules from a module has its cache
|
||||||
|
/// appropriately invalidated after a file is created.
|
||||||
|
#[test]
|
||||||
|
fn submodule_cache_invalidation_created() -> anyhow::Result<()> {
|
||||||
|
let mut case = setup([("lib.py", ""), ("bar/__init__.py", ""), ("bar/foo.py", "")])?;
|
||||||
|
let module = resolve_module(case.db(), &ModuleName::new("bar").unwrap()).expect("`bar` module");
|
||||||
|
let get_submodules = |db: &dyn Db, module: &Module| {
|
||||||
|
let mut names = module
|
||||||
|
.all_submodules(db)
|
||||||
|
.iter()
|
||||||
|
.map(|name| name.as_str().to_string())
|
||||||
|
.collect::<Vec<String>>();
|
||||||
|
names.sort();
|
||||||
|
names.join("\n")
|
||||||
|
};
|
||||||
|
|
||||||
|
insta::assert_snapshot!(
|
||||||
|
get_submodules(case.db(), &module),
|
||||||
|
@"foo",
|
||||||
|
);
|
||||||
|
|
||||||
|
std::fs::write(case.project_path("bar/wazoo.py").as_std_path(), "")?;
|
||||||
|
let changes = case.stop_watch(event_for_file("wazoo.py"));
|
||||||
|
case.apply_changes(changes, None);
|
||||||
|
|
||||||
|
insta::assert_snapshot!(
|
||||||
|
get_submodules(case.db(), &module),
|
||||||
|
@r"
|
||||||
|
foo
|
||||||
|
wazoo
|
||||||
|
",
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This tests that retrieving submodules from a module has its cache
|
||||||
|
/// appropriately invalidated after a file is deleted.
|
||||||
|
#[test]
|
||||||
|
fn submodule_cache_invalidation_deleted() -> anyhow::Result<()> {
|
||||||
|
let mut case = setup([
|
||||||
|
("lib.py", ""),
|
||||||
|
("bar/__init__.py", ""),
|
||||||
|
("bar/foo.py", ""),
|
||||||
|
("bar/wazoo.py", ""),
|
||||||
|
])?;
|
||||||
|
let module = resolve_module(case.db(), &ModuleName::new("bar").unwrap()).expect("`bar` module");
|
||||||
|
let get_submodules = |db: &dyn Db, module: &Module| {
|
||||||
|
let mut names = module
|
||||||
|
.all_submodules(db)
|
||||||
|
.iter()
|
||||||
|
.map(|name| name.as_str().to_string())
|
||||||
|
.collect::<Vec<String>>();
|
||||||
|
names.sort();
|
||||||
|
names.join("\n")
|
||||||
|
};
|
||||||
|
|
||||||
|
insta::assert_snapshot!(
|
||||||
|
get_submodules(case.db(), &module),
|
||||||
|
@r"
|
||||||
|
foo
|
||||||
|
wazoo
|
||||||
|
",
|
||||||
|
);
|
||||||
|
|
||||||
|
std::fs::remove_file(case.project_path("bar/wazoo.py").as_std_path())?;
|
||||||
|
let changes = case.stop_watch(event_for_file("wazoo.py"));
|
||||||
|
case.apply_changes(changes, None);
|
||||||
|
|
||||||
|
insta::assert_snapshot!(
|
||||||
|
get_submodules(case.db(), &module),
|
||||||
|
@"foo",
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This tests that retrieving submodules from a module has its cache
|
||||||
|
/// appropriately invalidated after a file is created and then deleted.
|
||||||
|
#[test]
|
||||||
|
fn submodule_cache_invalidation_created_then_deleted() -> anyhow::Result<()> {
|
||||||
|
let mut case = setup([("lib.py", ""), ("bar/__init__.py", ""), ("bar/foo.py", "")])?;
|
||||||
|
let module = resolve_module(case.db(), &ModuleName::new("bar").unwrap()).expect("`bar` module");
|
||||||
|
let get_submodules = |db: &dyn Db, module: &Module| {
|
||||||
|
let mut names = module
|
||||||
|
.all_submodules(db)
|
||||||
|
.iter()
|
||||||
|
.map(|name| name.as_str().to_string())
|
||||||
|
.collect::<Vec<String>>();
|
||||||
|
names.sort();
|
||||||
|
names.join("\n")
|
||||||
|
};
|
||||||
|
|
||||||
|
insta::assert_snapshot!(
|
||||||
|
get_submodules(case.db(), &module),
|
||||||
|
@"foo",
|
||||||
|
);
|
||||||
|
|
||||||
|
std::fs::write(case.project_path("bar/wazoo.py").as_std_path(), "")?;
|
||||||
|
let changes = case.take_watch_changes(event_for_file("wazoo.py"));
|
||||||
|
case.apply_changes(changes, None);
|
||||||
|
|
||||||
|
std::fs::remove_file(case.project_path("bar/wazoo.py").as_std_path())?;
|
||||||
|
let changes = case.stop_watch(event_for_file("wazoo.py"));
|
||||||
|
case.apply_changes(changes, None);
|
||||||
|
|
||||||
|
insta::assert_snapshot!(
|
||||||
|
get_submodules(case.db(), &module),
|
||||||
|
@"foo",
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This tests that retrieving submodules from a module has its cache
|
||||||
|
/// appropriately invalidated after a file is created *after* a project
|
||||||
|
/// configuration change.
|
||||||
|
#[test]
|
||||||
|
fn submodule_cache_invalidation_after_pyproject_created() -> anyhow::Result<()> {
|
||||||
|
let mut case = setup([("lib.py", ""), ("bar/__init__.py", ""), ("bar/foo.py", "")])?;
|
||||||
|
let module = resolve_module(case.db(), &ModuleName::new("bar").unwrap()).expect("`bar` module");
|
||||||
|
let get_submodules = |db: &dyn Db, module: &Module| {
|
||||||
|
let mut names = module
|
||||||
|
.all_submodules(db)
|
||||||
|
.iter()
|
||||||
|
.map(|name| name.as_str().to_string())
|
||||||
|
.collect::<Vec<String>>();
|
||||||
|
names.sort();
|
||||||
|
names.join("\n")
|
||||||
|
};
|
||||||
|
|
||||||
|
insta::assert_snapshot!(
|
||||||
|
get_submodules(case.db(), &module),
|
||||||
|
@"foo",
|
||||||
|
);
|
||||||
|
|
||||||
|
case.update_options(Options::default())?;
|
||||||
|
|
||||||
|
std::fs::write(case.project_path("bar/wazoo.py").as_std_path(), "")?;
|
||||||
|
let changes = case.take_watch_changes(event_for_file("wazoo.py"));
|
||||||
|
case.apply_changes(changes, None);
|
||||||
|
|
||||||
|
insta::assert_snapshot!(
|
||||||
|
get_submodules(case.db(), &module),
|
||||||
|
@r"
|
||||||
|
foo
|
||||||
|
wazoo
|
||||||
|
",
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
|
@ -6,7 +6,8 @@ use std::collections::BTreeSet;
|
||||||
|
|
||||||
use crate::walk::ProjectFilesWalker;
|
use crate::walk::ProjectFilesWalker;
|
||||||
use ruff_db::Db as _;
|
use ruff_db::Db as _;
|
||||||
use ruff_db::files::{File, Files};
|
use ruff_db::file_revision::FileRevision;
|
||||||
|
use ruff_db::files::{File, FileRootKind, Files};
|
||||||
use ruff_db::system::SystemPath;
|
use ruff_db::system::SystemPath;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use salsa::Setter;
|
use salsa::Setter;
|
||||||
|
@ -57,12 +58,6 @@ impl ProjectDatabase {
|
||||||
let mut synced_files = FxHashSet::default();
|
let mut synced_files = FxHashSet::default();
|
||||||
let mut sync_recursively = BTreeSet::default();
|
let mut sync_recursively = BTreeSet::default();
|
||||||
|
|
||||||
let mut sync_path = |db: &mut ProjectDatabase, path: &SystemPath| {
|
|
||||||
if synced_files.insert(path.to_path_buf()) {
|
|
||||||
File::sync_path(db, path);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
for change in changes {
|
for change in changes {
|
||||||
tracing::trace!("Handle change: {:?}", change);
|
tracing::trace!("Handle change: {:?}", change);
|
||||||
|
|
||||||
|
@ -92,12 +87,49 @@ impl ProjectDatabase {
|
||||||
|
|
||||||
match change {
|
match change {
|
||||||
ChangeEvent::Changed { path, kind: _ } | ChangeEvent::Opened(path) => {
|
ChangeEvent::Changed { path, kind: _ } | ChangeEvent::Opened(path) => {
|
||||||
sync_path(self, &path);
|
if synced_files.insert(path.to_path_buf()) {
|
||||||
|
let absolute =
|
||||||
|
SystemPath::absolute(&path, self.system().current_directory());
|
||||||
|
File::sync_path_only(self, &absolute);
|
||||||
|
if let Some(root) = self.files().root(self, &absolute) {
|
||||||
|
match root.kind_at_time_of_creation(self) {
|
||||||
|
// When a file inside the root of
|
||||||
|
// the project is changed, we don't
|
||||||
|
// want to mark the entire root as
|
||||||
|
// having changed too. In theory it
|
||||||
|
// might make sense to, but at time
|
||||||
|
// of writing, the file root revision
|
||||||
|
// on a project is used to invalidate
|
||||||
|
// the submodule files found within a
|
||||||
|
// directory. If we bumped the revision
|
||||||
|
// on every change within a project,
|
||||||
|
// then this caching technique would be
|
||||||
|
// effectively useless.
|
||||||
|
//
|
||||||
|
// It's plausible we should explore
|
||||||
|
// a more robust cache invalidation
|
||||||
|
// strategy that models more directly
|
||||||
|
// what we care about. For example, by
|
||||||
|
// keeping track of directories and
|
||||||
|
// their direct children explicitly,
|
||||||
|
// and then keying the submodule cache
|
||||||
|
// off of that instead. ---AG
|
||||||
|
FileRootKind::Project => {}
|
||||||
|
FileRootKind::LibrarySearchPath => {
|
||||||
|
root.set_revision(self).to(FileRevision::now());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ChangeEvent::Created { kind, path } => {
|
ChangeEvent::Created { kind, path } => {
|
||||||
match kind {
|
match kind {
|
||||||
CreatedKind::File => sync_path(self, &path),
|
CreatedKind::File => {
|
||||||
|
if synced_files.insert(path.to_path_buf()) {
|
||||||
|
File::sync_path(self, &path);
|
||||||
|
}
|
||||||
|
}
|
||||||
CreatedKind::Directory | CreatedKind::Any => {
|
CreatedKind::Directory | CreatedKind::Any => {
|
||||||
sync_recursively.insert(path.clone());
|
sync_recursively.insert(path.clone());
|
||||||
}
|
}
|
||||||
|
@ -138,7 +170,9 @@ impl ProjectDatabase {
|
||||||
};
|
};
|
||||||
|
|
||||||
if is_file {
|
if is_file {
|
||||||
sync_path(self, &path);
|
if synced_files.insert(path.to_path_buf()) {
|
||||||
|
File::sync_path(self, &path);
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(file) = self.files().try_system(self, &path) {
|
if let Some(file) = self.files().try_system(self, &path) {
|
||||||
project.remove_file(self, file);
|
project.remove_file(self, file);
|
||||||
|
|
|
@ -6,7 +6,7 @@ use files::{Index, Indexed, IndexedFiles};
|
||||||
use metadata::settings::Settings;
|
use metadata::settings::Settings;
|
||||||
pub use metadata::{ProjectMetadata, ProjectMetadataError};
|
pub use metadata::{ProjectMetadata, ProjectMetadataError};
|
||||||
use ruff_db::diagnostic::{Annotation, Diagnostic, DiagnosticId, Severity, Span, SubDiagnostic};
|
use ruff_db::diagnostic::{Annotation, Diagnostic, DiagnosticId, Severity, Span, SubDiagnostic};
|
||||||
use ruff_db::files::File;
|
use ruff_db::files::{File, FileRootKind};
|
||||||
use ruff_db::parsed::parsed_module;
|
use ruff_db::parsed::parsed_module;
|
||||||
use ruff_db::source::{SourceTextError, source_text};
|
use ruff_db::source::{SourceTextError, source_text};
|
||||||
use ruff_db::system::{SystemPath, SystemPathBuf};
|
use ruff_db::system::{SystemPath, SystemPathBuf};
|
||||||
|
@ -141,6 +141,13 @@ impl Project {
|
||||||
pub fn from_metadata(db: &dyn Db, metadata: ProjectMetadata) -> Result<Self, ToSettingsError> {
|
pub fn from_metadata(db: &dyn Db, metadata: ProjectMetadata) -> Result<Self, ToSettingsError> {
|
||||||
let (settings, diagnostics) = metadata.options().to_settings(db, metadata.root())?;
|
let (settings, diagnostics) = metadata.options().to_settings(db, metadata.root())?;
|
||||||
|
|
||||||
|
// This adds a file root for the project itself. This enables
|
||||||
|
// tracking of when changes are made to the files in a project
|
||||||
|
// at the directory level. At time of writing (2025-07-17),
|
||||||
|
// this is used for caching completions for submodules.
|
||||||
|
db.files()
|
||||||
|
.try_add_root(db, metadata.root(), FileRootKind::Project);
|
||||||
|
|
||||||
let project = Project::builder(Box::new(metadata), Box::new(settings), diagnostics)
|
let project = Project::builder(Box::new(metadata), Box::new(settings), diagnostics)
|
||||||
.durability(Durability::MEDIUM)
|
.durability(Durability::MEDIUM)
|
||||||
.open_fileset_durability(Durability::LOW)
|
.open_fileset_durability(Durability::LOW)
|
||||||
|
|
|
@ -17,6 +17,7 @@ pub struct Module {
|
||||||
inner: Arc<ModuleInner>,
|
inner: Arc<ModuleInner>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[salsa::tracked]
|
||||||
impl Module {
|
impl Module {
|
||||||
pub(crate) fn file_module(
|
pub(crate) fn file_module(
|
||||||
name: ModuleName,
|
name: ModuleName,
|
||||||
|
@ -97,11 +98,16 @@ impl Module {
|
||||||
///
|
///
|
||||||
/// The names returned correspond to the "base" name of the module.
|
/// The names returned correspond to the "base" name of the module.
|
||||||
/// That is, `{self.name}.{basename}` should give the full module name.
|
/// That is, `{self.name}.{basename}` should give the full module name.
|
||||||
pub fn all_submodules(&self, db: &dyn Db) -> Vec<Name> {
|
pub fn all_submodules<'db>(&self, db: &'db dyn Db) -> &'db [Name] {
|
||||||
self.all_submodules_inner(db).unwrap_or_default()
|
self.clone()
|
||||||
|
.all_submodules_inner(db, ())
|
||||||
|
.as_deref()
|
||||||
|
.unwrap_or_default()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn all_submodules_inner(&self, db: &dyn Db) -> Option<Vec<Name>> {
|
#[allow(clippy::ref_option, clippy::used_underscore_binding)]
|
||||||
|
#[salsa::tracked(returns(ref))]
|
||||||
|
fn all_submodules_inner(self, db: &dyn Db, _dummy: ()) -> Option<Vec<Name>> {
|
||||||
fn is_submodule(
|
fn is_submodule(
|
||||||
is_dir: bool,
|
is_dir: bool,
|
||||||
is_file: bool,
|
is_file: bool,
|
||||||
|
@ -136,8 +142,17 @@ impl Module {
|
||||||
);
|
);
|
||||||
|
|
||||||
Some(match path.parent()? {
|
Some(match path.parent()? {
|
||||||
SystemOrVendoredPathRef::System(parent_directory) => db
|
SystemOrVendoredPathRef::System(parent_directory) => {
|
||||||
.system()
|
// Read the revision on the corresponding file root to
|
||||||
|
// register an explicit dependency on this directory
|
||||||
|
// tree. When the revision gets bumped, the cache
|
||||||
|
// that Salsa creates does for this routine will be
|
||||||
|
// invalidated.
|
||||||
|
if let Some(root) = db.files().root(db, parent_directory) {
|
||||||
|
let _ = root.revision(db);
|
||||||
|
}
|
||||||
|
|
||||||
|
db.system()
|
||||||
.read_directory(parent_directory)
|
.read_directory(parent_directory)
|
||||||
.inspect_err(|err| {
|
.inspect_err(|err| {
|
||||||
tracing::debug!(
|
tracing::debug!(
|
||||||
|
@ -161,7 +176,8 @@ impl Module {
|
||||||
let stem = entry.path().file_stem()?;
|
let stem = entry.path().file_stem()?;
|
||||||
is_identifier(stem).then(|| Name::from(stem))
|
is_identifier(stem).then(|| Name::from(stem))
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect()
|
||||||
|
}
|
||||||
SystemOrVendoredPathRef::Vendored(parent_directory) => db
|
SystemOrVendoredPathRef::Vendored(parent_directory) => db
|
||||||
.vendored()
|
.vendored()
|
||||||
.read_directory(parent_directory)
|
.read_directory(parent_directory)
|
||||||
|
|
|
@ -86,7 +86,7 @@ impl<'db> SemanticModel<'db> {
|
||||||
};
|
};
|
||||||
let ty = Type::module_literal(self.db, self.file, &submodule);
|
let ty = Type::module_literal(self.db, self.file, &submodule);
|
||||||
completions.push(Completion {
|
completions.push(Completion {
|
||||||
name: submodule_basename,
|
name: submodule_basename.clone(),
|
||||||
ty,
|
ty,
|
||||||
builtin,
|
builtin,
|
||||||
});
|
});
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue