mirror of
https://github.com/denoland/deno.git
synced 2025-08-03 18:38:33 +00:00
feat(lsp): ability to configure document pre-load limit (#19097)
Adds a `deno.preloadLimit` option (ex. `"deno.preloadLimit": 2000`) which specifies how many file entries to traverse on the file system when the lsp loads or its configuration changes. Closes #18955
This commit is contained in:
parent
c926bc0deb
commit
28a72d5488
12 changed files with 239 additions and 168 deletions
|
@ -1,7 +1,6 @@
|
|||
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
use super::cache::calculate_fs_version;
|
||||
use super::client::LspClientKind;
|
||||
use super::text::LineIndex;
|
||||
use super::tsc;
|
||||
use super::tsc::AssetDocument;
|
||||
|
@ -793,6 +792,16 @@ fn get_document_path(
|
|||
}
|
||||
}
|
||||
|
||||
pub struct UpdateDocumentConfigOptions<'a> {
|
||||
pub enabled_urls: Vec<Url>,
|
||||
pub document_preload_limit: usize,
|
||||
pub maybe_import_map: Option<Arc<import_map::ImportMap>>,
|
||||
pub maybe_config_file: Option<&'a ConfigFile>,
|
||||
pub maybe_package_json: Option<&'a PackageJson>,
|
||||
pub npm_registry_api: Arc<CliNpmRegistryApi>,
|
||||
pub npm_resolution: Arc<NpmResolution>,
|
||||
}
|
||||
|
||||
/// Specify the documents to include on a `documents.documents(...)` call.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum DocumentsFilter {
|
||||
|
@ -818,8 +827,6 @@ pub struct Documents {
|
|||
open_docs: HashMap<ModuleSpecifier, Document>,
|
||||
/// Documents stored on the file system.
|
||||
file_system_docs: Arc<Mutex<FileSystemDocuments>>,
|
||||
/// Kind of the client that is using the documents.
|
||||
lsp_client_kind: LspClientKind,
|
||||
/// Hash of the config used for resolution. When the hash changes we update
|
||||
/// dependencies.
|
||||
resolver_config_hash: u64,
|
||||
|
@ -839,14 +846,13 @@ pub struct Documents {
|
|||
}
|
||||
|
||||
impl Documents {
|
||||
pub fn new(location: &Path, lsp_client_kind: LspClientKind) -> Self {
|
||||
pub fn new(location: &Path) -> Self {
|
||||
Self {
|
||||
cache: HttpCache::new(location),
|
||||
dirty: true,
|
||||
dependents_map: Default::default(),
|
||||
open_docs: HashMap::default(),
|
||||
file_system_docs: Default::default(),
|
||||
lsp_client_kind,
|
||||
resolver_config_hash: 0,
|
||||
imports: Default::default(),
|
||||
resolver: Default::default(),
|
||||
|
@ -1161,15 +1167,7 @@ impl Documents {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update_config(
|
||||
&mut self,
|
||||
enabled_urls: Vec<Url>,
|
||||
maybe_import_map: Option<Arc<import_map::ImportMap>>,
|
||||
maybe_config_file: Option<&ConfigFile>,
|
||||
maybe_package_json: Option<&PackageJson>,
|
||||
npm_registry_api: Arc<CliNpmRegistryApi>,
|
||||
npm_resolution: Arc<NpmResolution>,
|
||||
) {
|
||||
pub fn update_config(&mut self, options: UpdateDocumentConfigOptions) {
|
||||
fn calculate_resolver_config_hash(
|
||||
enabled_urls: &[Url],
|
||||
maybe_import_map: Option<&import_map::ImportMap>,
|
||||
|
@ -1208,14 +1206,16 @@ impl Documents {
|
|||
hasher.finish()
|
||||
}
|
||||
|
||||
let maybe_package_json_deps = maybe_package_json.map(|package_json| {
|
||||
package_json::get_local_package_json_version_reqs(package_json)
|
||||
});
|
||||
let maybe_jsx_config =
|
||||
maybe_config_file.and_then(|cf| cf.to_maybe_jsx_import_source_config());
|
||||
let maybe_package_json_deps =
|
||||
options.maybe_package_json.map(|package_json| {
|
||||
package_json::get_local_package_json_version_reqs(package_json)
|
||||
});
|
||||
let maybe_jsx_config = options
|
||||
.maybe_config_file
|
||||
.and_then(|cf| cf.to_maybe_jsx_import_source_config());
|
||||
let new_resolver_config_hash = calculate_resolver_config_hash(
|
||||
&enabled_urls,
|
||||
maybe_import_map.as_deref(),
|
||||
&options.enabled_urls,
|
||||
options.maybe_import_map.as_deref(),
|
||||
maybe_jsx_config.as_ref(),
|
||||
maybe_package_json_deps.as_ref(),
|
||||
);
|
||||
|
@ -1223,21 +1223,21 @@ impl Documents {
|
|||
Arc::new(PackageJsonDepsProvider::new(maybe_package_json_deps));
|
||||
let deps_installer = Arc::new(PackageJsonDepsInstaller::new(
|
||||
deps_provider.clone(),
|
||||
npm_registry_api.clone(),
|
||||
npm_resolution.clone(),
|
||||
options.npm_registry_api.clone(),
|
||||
options.npm_resolution.clone(),
|
||||
));
|
||||
self.resolver = Arc::new(CliGraphResolver::new(
|
||||
maybe_jsx_config,
|
||||
maybe_import_map,
|
||||
options.maybe_import_map,
|
||||
false,
|
||||
npm_registry_api,
|
||||
npm_resolution,
|
||||
options.npm_registry_api,
|
||||
options.npm_resolution,
|
||||
deps_provider,
|
||||
deps_installer,
|
||||
));
|
||||
self.imports = Arc::new(
|
||||
if let Some(Ok(imports)) =
|
||||
maybe_config_file.map(|cf| cf.to_maybe_imports())
|
||||
options.maybe_config_file.map(|cf| cf.to_maybe_imports())
|
||||
{
|
||||
imports
|
||||
.into_iter()
|
||||
|
@ -1257,14 +1257,21 @@ impl Documents {
|
|||
|
||||
// only refresh the dependencies if the underlying configuration has changed
|
||||
if self.resolver_config_hash != new_resolver_config_hash {
|
||||
self.refresh_dependencies(enabled_urls);
|
||||
self.refresh_dependencies(
|
||||
options.enabled_urls,
|
||||
options.document_preload_limit,
|
||||
);
|
||||
self.resolver_config_hash = new_resolver_config_hash;
|
||||
}
|
||||
|
||||
self.dirty = true;
|
||||
}
|
||||
|
||||
fn refresh_dependencies(&mut self, enabled_urls: Vec<Url>) {
|
||||
fn refresh_dependencies(
|
||||
&mut self,
|
||||
enabled_urls: Vec<Url>,
|
||||
document_preload_limit: usize,
|
||||
) {
|
||||
let resolver = self.resolver.as_graph_resolver();
|
||||
for doc in self.open_docs.values_mut() {
|
||||
if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) {
|
||||
|
@ -1274,51 +1281,73 @@ impl Documents {
|
|||
|
||||
// update the file system documents
|
||||
let mut fs_docs = self.file_system_docs.lock();
|
||||
match self.lsp_client_kind {
|
||||
LspClientKind::CodeEditor => {
|
||||
let mut not_found_docs =
|
||||
fs_docs.docs.keys().cloned().collect::<HashSet<_>>();
|
||||
let open_docs = &mut self.open_docs;
|
||||
if document_preload_limit > 0 {
|
||||
let mut not_found_docs =
|
||||
fs_docs.docs.keys().cloned().collect::<HashSet<_>>();
|
||||
let open_docs = &mut self.open_docs;
|
||||
|
||||
log::debug!("Preloading documents from enabled urls...");
|
||||
for specifier in PreloadDocumentFinder::from_enabled_urls(&enabled_urls)
|
||||
log::debug!("Preloading documents from enabled urls...");
|
||||
let mut finder = PreloadDocumentFinder::from_enabled_urls_with_limit(
|
||||
&enabled_urls,
|
||||
document_preload_limit,
|
||||
);
|
||||
for specifier in finder.by_ref() {
|
||||
// mark this document as having been found
|
||||
not_found_docs.remove(&specifier);
|
||||
|
||||
if !open_docs.contains_key(&specifier)
|
||||
&& !fs_docs.docs.contains_key(&specifier)
|
||||
{
|
||||
// mark this document as having been found
|
||||
not_found_docs.remove(&specifier);
|
||||
|
||||
if !open_docs.contains_key(&specifier)
|
||||
&& !fs_docs.docs.contains_key(&specifier)
|
||||
{
|
||||
fs_docs.refresh_document(&self.cache, resolver, &specifier);
|
||||
} else {
|
||||
// update the existing entry to have the new resolver
|
||||
if let Some(doc) = fs_docs.docs.get_mut(&specifier) {
|
||||
if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) {
|
||||
*doc = new_doc;
|
||||
}
|
||||
fs_docs.refresh_document(&self.cache, resolver, &specifier);
|
||||
} else {
|
||||
// update the existing entry to have the new resolver
|
||||
if let Some(doc) = fs_docs.docs.get_mut(&specifier) {
|
||||
if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) {
|
||||
*doc = new_doc;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if finder.hit_limit() {
|
||||
lsp_warn!(
|
||||
concat!(
|
||||
"Hit the language server document preload limit of {} file system entries. ",
|
||||
"You may want to use the \"deno.enablePaths\" configuration setting to only have Deno ",
|
||||
"partially enable a workspace or increase the limit via \"deno.documentPreloadLimit\". ",
|
||||
"In cases where Deno ends up using too much memory, you may want to lower the limit."
|
||||
),
|
||||
document_preload_limit,
|
||||
);
|
||||
|
||||
// since we hit the limit, just update everything to use the new resolver
|
||||
for uri in not_found_docs {
|
||||
if let Some(doc) = fs_docs.docs.get_mut(&uri) {
|
||||
if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) {
|
||||
*doc = new_doc;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// clean up and remove any documents that weren't found
|
||||
for uri in not_found_docs {
|
||||
fs_docs.docs.remove(&uri);
|
||||
}
|
||||
}
|
||||
LspClientKind::Repl => {
|
||||
// This log statement is used in the tests to ensure preloading doesn't
|
||||
// happen, which is not useful in the repl and could be very expensive
|
||||
// if the repl is launched from a directory with a lot of descendants.
|
||||
log::debug!("Skipping document preload for repl.");
|
||||
} else {
|
||||
// This log statement is used in the tests to ensure preloading doesn't
|
||||
// happen, which is not useful in the repl and could be very expensive
|
||||
// if the repl is launched from a directory with a lot of descendants.
|
||||
log::debug!("Skipping document preload.");
|
||||
|
||||
// for the repl, just update to use the new resolver
|
||||
for doc in fs_docs.docs.values_mut() {
|
||||
if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) {
|
||||
*doc = new_doc;
|
||||
}
|
||||
// just update to use the new resolver
|
||||
for doc in fs_docs.docs.values_mut() {
|
||||
if let Some(new_doc) = doc.maybe_with_new_resolver(resolver) {
|
||||
*doc = new_doc;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fs_docs.dirty = true;
|
||||
}
|
||||
|
||||
|
@ -1558,19 +1587,15 @@ enum PendingEntry {
|
|||
/// Iterator that finds documents that can be preloaded into
|
||||
/// the LSP on startup.
|
||||
struct PreloadDocumentFinder {
|
||||
limit: u16,
|
||||
entry_count: u16,
|
||||
limit: usize,
|
||||
entry_count: usize,
|
||||
pending_entries: VecDeque<PendingEntry>,
|
||||
}
|
||||
|
||||
impl PreloadDocumentFinder {
|
||||
pub fn from_enabled_urls(enabled_urls: &Vec<Url>) -> Self {
|
||||
Self::from_enabled_urls_with_limit(enabled_urls, 1_000)
|
||||
}
|
||||
|
||||
pub fn from_enabled_urls_with_limit(
|
||||
enabled_urls: &Vec<Url>,
|
||||
limit: u16,
|
||||
limit: usize,
|
||||
) -> Self {
|
||||
fn is_allowed_root_dir(dir_path: &Path) -> bool {
|
||||
if dir_path.parent().is_none() {
|
||||
|
@ -1605,6 +1630,10 @@ impl PreloadDocumentFinder {
|
|||
finder
|
||||
}
|
||||
|
||||
pub fn hit_limit(&self) -> bool {
|
||||
self.entry_count >= self.limit
|
||||
}
|
||||
|
||||
fn get_valid_specifier(path: &Path) -> Option<ModuleSpecifier> {
|
||||
fn is_allowed_media_type(media_type: MediaType) -> bool {
|
||||
match media_type {
|
||||
|
@ -1699,15 +1728,7 @@ impl Iterator for PreloadDocumentFinder {
|
|||
while let Some(entry) = entries.next() {
|
||||
self.entry_count += 1;
|
||||
|
||||
if self.entry_count >= self.limit {
|
||||
lsp_warn!(
|
||||
concat!(
|
||||
"Hit the language server document preload limit of {} file system entries. ",
|
||||
"You may want to use the \"deno.enablePaths\" configuration setting to only have Deno ",
|
||||
"partially enable a workspace."
|
||||
),
|
||||
self.limit,
|
||||
);
|
||||
if self.hit_limit() {
|
||||
self.pending_entries.clear(); // stop searching
|
||||
return None;
|
||||
}
|
||||
|
@ -1769,7 +1790,7 @@ mod tests {
|
|||
|
||||
fn setup(temp_dir: &TempDir) -> (Documents, PathBuf) {
|
||||
let location = temp_dir.path().join("deps");
|
||||
let documents = Documents::new(&location, Default::default());
|
||||
let documents = Documents::new(&location);
|
||||
(documents, location)
|
||||
}
|
||||
|
||||
|
@ -1899,14 +1920,15 @@ console.log(b, "hello deno");
|
|||
.append("test".to_string(), "./file2.ts".to_string())
|
||||
.unwrap();
|
||||
|
||||
documents.update_config(
|
||||
vec![],
|
||||
Some(Arc::new(import_map)),
|
||||
None,
|
||||
None,
|
||||
npm_registry_api.clone(),
|
||||
npm_resolution.clone(),
|
||||
);
|
||||
documents.update_config(UpdateDocumentConfigOptions {
|
||||
enabled_urls: vec![],
|
||||
document_preload_limit: 1_000,
|
||||
maybe_import_map: Some(Arc::new(import_map)),
|
||||
maybe_config_file: None,
|
||||
maybe_package_json: None,
|
||||
npm_registry_api: npm_registry_api.clone(),
|
||||
npm_resolution: npm_resolution.clone(),
|
||||
});
|
||||
|
||||
// open the document
|
||||
let document = documents.open(
|
||||
|
@ -1939,14 +1961,15 @@ console.log(b, "hello deno");
|
|||
.append("test".to_string(), "./file3.ts".to_string())
|
||||
.unwrap();
|
||||
|
||||
documents.update_config(
|
||||
vec![],
|
||||
Some(Arc::new(import_map)),
|
||||
None,
|
||||
None,
|
||||
documents.update_config(UpdateDocumentConfigOptions {
|
||||
enabled_urls: vec![],
|
||||
document_preload_limit: 1_000,
|
||||
maybe_import_map: Some(Arc::new(import_map)),
|
||||
maybe_config_file: None,
|
||||
maybe_package_json: None,
|
||||
npm_registry_api,
|
||||
npm_resolution,
|
||||
);
|
||||
});
|
||||
|
||||
// check the document's dependencies
|
||||
let document = documents.get(&file1_specifier).unwrap();
|
||||
|
@ -2001,12 +2024,15 @@ console.log(b, "hello deno");
|
|||
temp_dir.create_dir_all("root3/");
|
||||
temp_dir.write("root3/mod.ts", ""); // no, not provided
|
||||
|
||||
let mut urls = PreloadDocumentFinder::from_enabled_urls(&vec![
|
||||
temp_dir.uri().join("root1/").unwrap(),
|
||||
temp_dir.uri().join("root2/file1.ts").unwrap(),
|
||||
temp_dir.uri().join("root2/main.min.ts").unwrap(),
|
||||
temp_dir.uri().join("root2/folder/").unwrap(),
|
||||
])
|
||||
let mut urls = PreloadDocumentFinder::from_enabled_urls_with_limit(
|
||||
&vec![
|
||||
temp_dir.uri().join("root1/").unwrap(),
|
||||
temp_dir.uri().join("root2/file1.ts").unwrap(),
|
||||
temp_dir.uri().join("root2/main.min.ts").unwrap(),
|
||||
temp_dir.uri().join("root2/folder/").unwrap(),
|
||||
],
|
||||
1_000,
|
||||
)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Ideally we would test for order here, which should be BFS, but
|
||||
|
@ -2048,18 +2074,18 @@ console.log(b, "hello deno");
|
|||
#[test]
|
||||
pub fn test_pre_load_document_finder_disallowed_dirs() {
|
||||
if cfg!(windows) {
|
||||
let paths = PreloadDocumentFinder::from_enabled_urls(&vec![Url::parse(
|
||||
"file:///c:/",
|
||||
let paths = PreloadDocumentFinder::from_enabled_urls_with_limit(
|
||||
&vec![Url::parse("file:///c:/").unwrap()],
|
||||
1_000,
|
||||
)
|
||||
.unwrap()])
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(paths, vec![]);
|
||||
} else {
|
||||
let paths =
|
||||
PreloadDocumentFinder::from_enabled_urls(&vec![
|
||||
Url::parse("file:///").unwrap()
|
||||
])
|
||||
.collect::<Vec<_>>();
|
||||
let paths = PreloadDocumentFinder::from_enabled_urls_with_limit(
|
||||
&vec![Url::parse("file:///").unwrap()],
|
||||
1_000,
|
||||
)
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(paths, vec![]);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue