From d91658b45e175cf4c934120f76bf707ceeb13aad Mon Sep 17 00:00:00 2001 From: Nayeem Rahman Date: Mon, 31 Mar 2025 22:25:27 +0100 Subject: [PATCH] refactor(lsp): separate document and module storage (#28469) --- Cargo.lock | 7 + Cargo.toml | 1 + cli/Cargo.toml | 1 + cli/bench/lsp.rs | 22 +- cli/lsp/analysis.rs | 182 +- cli/lsp/cache.rs | 68 +- cli/lsp/code_lens.rs | 262 +-- cli/lsp/completions.rs | 182 +- cli/lsp/config.rs | 170 +- cli/lsp/diagnostics.rs | 542 +++--- cli/lsp/documents.rs | 3078 ++++++++++++++++---------------- cli/lsp/language_server.rs | 2381 +++++++++++++----------- cli/lsp/lsp_custom.rs | 1 + cli/lsp/refactor.rs | 4 +- cli/lsp/resolver.rs | 6 +- cli/lsp/testing/server.rs | 61 +- cli/lsp/tsc.rs | 1692 ++++++++---------- cli/lsp/urls.rs | 436 +---- cli/tsc/98_lsp.js | 16 +- tests/integration/lsp_tests.rs | 346 ++-- 20 files changed, 4619 insertions(+), 4839 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 30425adc4e..0e97b1f653 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1591,6 +1591,7 @@ dependencies = [ "unicode-width 0.1.13", "uuid", "walkdir", + "weak-table", "winapi", "winres", "zip", @@ -9402,6 +9403,12 @@ dependencies = [ "thiserror 2.0.12", ] +[[package]] +name = "weak-table" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "323f4da9523e9a669e1eaf9c6e763892769b1d38c623913647bfdc1532fe4549" + [[package]] name = "web-sys" version = "0.3.77" diff --git a/Cargo.toml b/Cargo.toml index b4df38d608..a852646ea7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -327,6 +327,7 @@ url = { version = "2.5", features = ["serde", "expose_internals"] } urlpattern = "0.3.0" uuid = { version = "1.3.0", features = ["v4"] } walkdir = "=2.5.0" +weak-table = "0.3.2" web-transport-proto = "0.2.3" webpki-root-certs = "0.26.5" webpki-roots = "0.26" diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 31c73777b6..0153aea90d 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -177,6 +177,7 @@ typed-arena.workspace = true unicode-width.workspace = true uuid = { workspace = true, features = ["serde"] } walkdir.workspace = true +weak-table.workspace = true zip = { workspace = true, features = ["deflate-flate2"] } zstd.workspace = true diff --git a/cli/bench/lsp.rs b/cli/bench/lsp.rs index 9896d340f6..c95eff0ed6 100644 --- a/cli/bench/lsp.rs +++ b/cli/bench/lsp.rs @@ -130,11 +130,27 @@ fn bench_deco_apps_edits(deno_exe: &Path) -> Duration { } }), ); - let re = lazy_regex::regex!(r"Documents in memory: (\d+)"); + let open_re = lazy_regex::regex!(r"Open: (\d+)"); + let server_re = lazy_regex::regex!(r"Server: (\d+)"); let res = res.as_str().unwrap().to_string(); assert!(res.starts_with("# Deno Language Server Status")); - let captures = re.captures(&res).unwrap(); - let count = captures.get(1).unwrap().as_str().parse::().unwrap(); + let open_count = open_re + .captures(&res) + .unwrap() + .get(1) + .unwrap() + .as_str() + .parse::() + .unwrap(); + let server_count = server_re + .captures(&res) + .unwrap() + .get(1) + .unwrap() + .as_str() + .parse::() + .unwrap(); + let count = open_count + server_count; assert!(count > 1000, "count: {}", count); client.shutdown(); diff --git a/cli/lsp/analysis.rs b/cli/lsp/analysis.rs index 582b14c5ca..8f622d35d1 100644 --- a/cli/lsp/analysis.rs +++ b/cli/lsp/analysis.rs @@ -5,12 +5,14 @@ use std::cmp::Ordering; use std::collections::HashMap; use std::collections::HashSet; use std::path::Path; +use std::sync::Arc; use deno_ast::SourceRange; use deno_ast::SourceRangedForSpanned; use deno_ast::SourceTextInfo; use deno_core::anyhow::anyhow; use deno_core::error::AnyError; +use deno_core::resolve_url; use deno_core::serde::Deserialize; use deno_core::serde::Serialize; use deno_core::serde_json; @@ -33,6 +35,7 @@ use deno_semver::SmallStackString; use deno_semver::StackString; use deno_semver::Version; use import_map::ImportMap; +use lsp_types::Uri; use node_resolver::InNpmPackageChecker; use node_resolver::NodeResolutionKind; use node_resolver::ResolutionMode; @@ -45,13 +48,12 @@ use tower_lsp::lsp_types::Range; use super::diagnostics::DenoDiagnostic; use super::diagnostics::DiagnosticSource; -use super::documents::Documents; +use super::documents::DocumentModule; +use super::documents::DocumentModules; use super::language_server; use super::resolver::LspResolver; use super::tsc; -use super::urls::url_to_uri; use crate::args::jsr_url; -use crate::lsp::logging::lsp_warn; use crate::tools::lint::CliLinter; use crate::util::path::relative_specifier; @@ -233,27 +235,27 @@ fn code_as_string(code: &Option) -> String { /// Rewrites imports in quick fixes and code changes to be Deno specific. pub struct TsResponseImportMapper<'a> { - documents: &'a Documents, + document_modules: &'a DocumentModules, + scope: Option>, maybe_import_map: Option<&'a ImportMap>, resolver: &'a LspResolver, tsc_specifier_map: &'a tsc::TscSpecifierMap, - file_referrer: ModuleSpecifier, } impl<'a> TsResponseImportMapper<'a> { pub fn new( - documents: &'a Documents, + document_modules: &'a DocumentModules, + scope: Option>, maybe_import_map: Option<&'a ImportMap>, resolver: &'a LspResolver, tsc_specifier_map: &'a tsc::TscSpecifierMap, - file_referrer: &ModuleSpecifier, ) -> Self { Self { - documents, + document_modules, + scope, maybe_import_map, resolver, tsc_specifier_map, - file_referrer: file_referrer.clone(), } } @@ -299,7 +301,7 @@ impl<'a> TsResponseImportMapper<'a> { let export = self.resolver.jsr_lookup_export_for_path( &nv, &path, - Some(&self.file_referrer), + self.scope.as_deref(), )?; let sub_path = (export != ".") .then_some(export) @@ -327,7 +329,7 @@ impl<'a> TsResponseImportMapper<'a> { req = req.or_else(|| { self .resolver - .jsr_lookup_req_for_nv(&nv, Some(&self.file_referrer)) + .jsr_lookup_req_for_nv(&nv, self.scope.as_deref()) }); let spec_str = if let Some(req) = req { let req_ref = PackageReqReference { req, sub_path }; @@ -357,11 +359,11 @@ impl<'a> TsResponseImportMapper<'a> { if let Some(npm_resolver) = self .resolver - .maybe_managed_npm_resolver(Some(&self.file_referrer)) + .maybe_managed_npm_resolver(self.scope.as_deref()) { let in_npm_pkg = self .resolver - .in_npm_pkg_checker(Some(&self.file_referrer)) + .in_npm_pkg_checker(self.scope.as_deref()) .in_npm_package(specifier); if in_npm_pkg { if let Ok(Some(pkg_id)) = @@ -428,7 +430,7 @@ impl<'a> TsResponseImportMapper<'a> { } } else if let Some(dep_name) = self .resolver - .file_url_to_package_json_dep(specifier, Some(&self.file_referrer)) + .file_url_to_package_json_dep(specifier, self.scope.as_deref()) { return Some(dep_name); } @@ -515,7 +517,7 @@ impl<'a> TsResponseImportMapper<'a> { for specifier in specifiers { if let Some(specifier) = self .resolver - .as_cli_resolver(Some(&self.file_referrer)) + .as_cli_resolver(self.scope.as_deref()) .resolve( &specifier, referrer, @@ -525,7 +527,11 @@ impl<'a> TsResponseImportMapper<'a> { ) .ok() .and_then(|s| self.tsc_specifier_map.normalize(s.as_str()).ok()) - .filter(|s| self.documents.exists(s, Some(&self.file_referrer))) + .filter(|s| { + self + .document_modules + .specifier_exists(s, self.scope.as_deref()) + }) { if let Some(specifier) = self .check_specifier(&specifier, referrer) @@ -547,7 +553,7 @@ impl<'a> TsResponseImportMapper<'a> { ) -> bool { self .resolver - .as_cli_resolver(Some(&self.file_referrer)) + .as_cli_resolver(self.scope.as_deref()) .resolve( specifier_text, referrer, @@ -645,6 +651,7 @@ fn try_reverse_map_package_json_exports( /// like an import and rewrite the import specifier to include the extension pub fn fix_ts_import_changes( changes: &[tsc::FileTextChanges], + module: &DocumentModule, language_server: &language_server::Inner, token: &CancellationToken, ) -> Result, AnyError> { @@ -653,16 +660,29 @@ pub fn fix_ts_import_changes( if token.is_cancelled() { return Err(anyhow!("request cancelled")); } - let Ok(referrer) = ModuleSpecifier::parse(&change.file_name) else { + let is_new_file = change.is_new_file.unwrap_or(false); + let Ok(target_specifier) = resolve_url(&change.file_name) else { continue; }; - let referrer_doc = language_server.get_asset_or_document(&referrer).ok(); - let resolution_mode = referrer_doc + let target_module = if is_new_file { + None + } else { + let Some(target_module) = language_server + .document_modules + .inspect_module_for_specifier( + &target_specifier, + module.scope.as_deref(), + ) + else { + continue; + }; + Some(target_module) + }; + let resolution_mode = target_module .as_ref() - .map(|d| d.resolution_mode()) + .map(|m| m.resolution_mode) .unwrap_or(ResolutionMode::Import); - let import_mapper = - language_server.get_ts_response_import_mapper(&referrer); + let import_mapper = language_server.get_ts_response_import_mapper(module); let mut text_changes = Vec::new(); for text_change in &change.text_changes { let lines = text_change.new_text.split('\n'); @@ -673,7 +693,11 @@ pub fn fix_ts_import_changes( let specifier = captures.iter().skip(1).find_map(|s| s).unwrap().as_str(); if let Some(new_specifier) = import_mapper - .check_unresolved_specifier(specifier, &referrer, resolution_mode) + .check_unresolved_specifier( + specifier, + &target_specifier, + resolution_mode, + ) { line.replace(specifier, &new_specifier) } else { @@ -702,9 +726,8 @@ pub fn fix_ts_import_changes( /// Fix tsc import code actions so that the module specifier is correct for /// resolution by Deno (includes the extension). fn fix_ts_import_action<'a>( - referrer: &ModuleSpecifier, - resolution_mode: ResolutionMode, action: &'a tsc::CodeFixAction, + module: &DocumentModule, language_server: &language_server::Inner, ) -> Option> { if !matches!( @@ -721,11 +744,11 @@ fn fix_ts_import_action<'a>( let Some(specifier) = specifier else { return Some(Cow::Borrowed(action)); }; - let import_mapper = language_server.get_ts_response_import_mapper(referrer); + let import_mapper = language_server.get_ts_response_import_mapper(module); if let Some(new_specifier) = import_mapper.check_unresolved_specifier( specifier, - referrer, - resolution_mode, + &module.specifier, + module.resolution_mode, ) { let description = action.description.replace(specifier, &new_specifier); let changes = action @@ -756,8 +779,11 @@ fn fix_ts_import_action<'a>( fix_id: None, fix_all_description: None, })) - } else if !import_mapper.is_valid_import(specifier, referrer, resolution_mode) - { + } else if !import_mapper.is_valid_import( + specifier, + &module.specifier, + module.resolution_mode, + ) { None } else { Some(Cow::Borrowed(action)) @@ -818,16 +844,14 @@ fn is_preferred( /// for an LSP CodeAction. pub fn ts_changes_to_edit( changes: &[tsc::FileTextChanges], + module: &DocumentModule, language_server: &language_server::Inner, ) -> Result, AnyError> { let mut text_document_edits = Vec::new(); for change in changes { - let edit = match change.to_text_document_edit(language_server) { - Ok(e) => e, - Err(err) => { - lsp_warn!("Couldn't covert text document edit: {:#}", err); - continue; - } + let Some(edit) = change.to_text_document_edit(module, language_server) + else { + continue; }; text_document_edits.push(edit); } @@ -841,7 +865,7 @@ pub fn ts_changes_to_edit( #[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CodeActionData { - pub specifier: ModuleSpecifier, + pub uri: Uri, pub fix_id: String, } @@ -866,27 +890,27 @@ pub struct CodeActionCollection { impl CodeActionCollection { pub fn add_deno_fix_action( &mut self, + uri: &Uri, specifier: &ModuleSpecifier, diagnostic: &lsp::Diagnostic, ) -> Result<(), AnyError> { - let code_action = DenoDiagnostic::get_code_action(specifier, diagnostic)?; + let code_action = + DenoDiagnostic::get_code_action(uri, specifier, diagnostic)?; self.actions.push(CodeActionKind::Deno(code_action)); Ok(()) } pub fn add_deno_lint_actions( &mut self, - specifier: &ModuleSpecifier, + uri: &Uri, + module: &DocumentModule, diagnostic: &lsp::Diagnostic, - maybe_text_info: Option<&SourceTextInfo>, - maybe_parsed_source: Option<&deno_ast::ParsedSource>, ) -> Result<(), AnyError> { if let Some(data_quick_fixes) = diagnostic .data .as_ref() .and_then(|d| serde_json::from_value::>(d.clone()).ok()) { - let uri = url_to_uri(specifier)?; for quick_fix in data_quick_fixes { let mut changes = HashMap::new(); changes.insert( @@ -917,22 +941,15 @@ impl CodeActionCollection { self.actions.push(CodeActionKind::DenoLint(code_action)); } } - self.add_deno_lint_ignore_action( - specifier, - diagnostic, - maybe_text_info, - maybe_parsed_source, - ) + self.add_deno_lint_ignore_action(uri, module, diagnostic) } fn add_deno_lint_ignore_action( &mut self, - specifier: &ModuleSpecifier, + uri: &Uri, + module: &DocumentModule, diagnostic: &lsp::Diagnostic, - maybe_text_info: Option<&SourceTextInfo>, - maybe_parsed_source: Option<&deno_ast::ParsedSource>, ) -> Result<(), AnyError> { - let uri = url_to_uri(specifier)?; let code = diagnostic .code .as_ref() @@ -941,11 +958,11 @@ impl CodeActionCollection { _ => "".to_string(), }) .unwrap(); + let text_info = module.text_info(); - let line_content = maybe_text_info.map(|ti| { - ti.line_text(diagnostic.range.start.line as usize) - .to_string() - }); + let line_content = text_info + .line_text(diagnostic.range.start.line as usize) + .to_string(); let mut changes = HashMap::new(); changes.insert( @@ -953,7 +970,7 @@ impl CodeActionCollection { vec![lsp::TextEdit { new_text: prepend_whitespace( format!("// deno-lint-ignore {code}\n"), - line_content, + Some(line_content), ), range: lsp::Range { start: lsp::Position { @@ -986,20 +1003,25 @@ impl CodeActionCollection { .push(CodeActionKind::DenoLint(ignore_error_action)); // Disable a lint error for the entire file. - let maybe_ignore_comment = maybe_parsed_source.and_then(|ps| { - // Note: we can use ps.get_leading_comments() but it doesn't - // work when shebang is present at the top of the file. - ps.comments().get_vec().iter().find_map(|c| { - let comment_text = c.text.trim(); - comment_text.split_whitespace().next().and_then(|prefix| { - if prefix == "deno-lint-ignore-file" { - Some(c.clone()) - } else { - None - } + let maybe_ignore_comment = module + .open_data + .as_ref() + .and_then(|d| d.parsed_source.as_ref()) + .and_then(|ps| { + let ps = ps.as_ref().ok()?; + // Note: we can use ps.get_leading_comments() but it doesn't + // work when shebang is present at the top of the file. + ps.comments().get_vec().iter().find_map(|c| { + let comment_text = c.text.trim(); + comment_text.split_whitespace().next().and_then(|prefix| { + if prefix == "deno-lint-ignore-file" { + Some(c.clone()) + } else { + None + } + }) }) - }) - }); + }); let mut new_text = format!("// deno-lint-ignore-file {code}\n"); let mut range = lsp::Range { @@ -1017,9 +1039,7 @@ impl CodeActionCollection { if let Some(ignore_comment) = maybe_ignore_comment { new_text = format!(" {code}"); // Get the end position of the comment. - let line = maybe_text_info - .unwrap() - .line_and_column_index(ignore_comment.end()); + let line = text_info.line_and_column_index(ignore_comment.end()); let position = lsp::Position { line: line.line_index as u32, character: line.column_index as u32, @@ -1051,7 +1071,7 @@ impl CodeActionCollection { let mut changes = HashMap::new(); changes.insert( - uri, + uri.clone(), vec![lsp::TextEdit { new_text: "// deno-lint-ignore-file\n".to_string(), range: lsp::Range { @@ -1090,10 +1110,9 @@ impl CodeActionCollection { /// Add a TypeScript code fix action to the code actions collection. pub fn add_ts_fix_action( &mut self, - specifier: &ModuleSpecifier, - resolution_mode: ResolutionMode, action: &tsc::CodeFixAction, diagnostic: &lsp::Diagnostic, + module: &DocumentModule, language_server: &language_server::Inner, ) -> Result<(), AnyError> { if action.commands.is_some() { @@ -1112,12 +1131,11 @@ impl CodeActionCollection { .into(), ); } - let Some(action) = - fix_ts_import_action(specifier, resolution_mode, action, language_server) + let Some(action) = fix_ts_import_action(action, module, language_server) else { return Ok(()); }; - let edit = ts_changes_to_edit(&action.changes, language_server)?; + let edit = ts_changes_to_edit(&action.changes, module, language_server)?; let code_action = lsp::CodeAction { title: action.description.clone(), kind: Some(lsp::CodeActionKind::QUICKFIX), @@ -1160,12 +1178,12 @@ impl CodeActionCollection { pub fn add_ts_fix_all_action( &mut self, action: &tsc::CodeFixAction, - specifier: &ModuleSpecifier, + module: &DocumentModule, diagnostic: &lsp::Diagnostic, ) { let data = action.fix_id.as_ref().map(|fix_id| { json!(CodeActionData { - specifier: specifier.clone(), + uri: module.uri.as_ref().clone(), fix_id: fix_id.clone(), }) }); diff --git a/cli/lsp/cache.rs b/cli/lsp/cache.rs index 9ac2a80d46..4d8a30eb14 100644 --- a/cli/lsp/cache.rs +++ b/cli/lsp/cache.rs @@ -19,22 +19,8 @@ use crate::lsp::logging::lsp_log; use crate::lsp::logging::lsp_warn; use crate::sys::CliSys; -pub fn calculate_fs_version( - cache: &LspCache, - specifier: &ModuleSpecifier, - file_referrer: Option<&ModuleSpecifier>, -) -> Option { - match specifier.scheme() { - "npm" | "node" | "data" | "blob" => None, - "file" => url_to_file_path(specifier) - .ok() - .and_then(|path| calculate_fs_version_at_path(&path)), - _ => calculate_fs_version_in_cache(cache, specifier, file_referrer), - } -} - /// Calculate a version for for a given path. -pub fn calculate_fs_version_at_path(path: &Path) -> Option { +pub fn calculate_fs_version_at_path(path: impl AsRef) -> Option { let metadata = fs::metadata(path).ok()?; if let Ok(modified) = metadata.modified() { if let Ok(n) = modified.duration_since(SystemTime::UNIX_EPOCH) { @@ -47,32 +33,11 @@ pub fn calculate_fs_version_at_path(path: &Path) -> Option { } } -fn calculate_fs_version_in_cache( - cache: &LspCache, - specifier: &ModuleSpecifier, - file_referrer: Option<&ModuleSpecifier>, -) -> Option { - let http_cache = cache.for_specifier(file_referrer); - let Ok(cache_key) = http_cache.cache_item_key(specifier) else { - return Some("1".to_string()); - }; - match http_cache.read_modified_time(&cache_key) { - Ok(Some(modified)) => { - match modified.duration_since(SystemTime::UNIX_EPOCH) { - Ok(n) => Some(n.as_millis().to_string()), - Err(_) => Some("1".to_string()), - } - } - Ok(None) => None, - Err(_) => Some("1".to_string()), - } -} - #[derive(Debug, Clone)] pub struct LspCache { deno_dir: DenoDir, global: Arc, - vendors_by_scope: BTreeMap>>, + vendors_by_scope: BTreeMap, Option>>, } impl Default for LspCache { @@ -178,11 +143,30 @@ impl LspCache { vendor.get_remote_url(&path) } - pub fn is_valid_file_referrer(&self, specifier: &ModuleSpecifier) -> bool { - if let Ok(path) = url_to_file_path(specifier) { - if !path.starts_with(&self.deno_dir().root) { - return true; - } + pub fn in_cache_directory(&self, specifier: &Url) -> bool { + let Ok(path) = url_to_file_path(specifier) else { + return false; + }; + if path.starts_with(&self.deno_dir().root) { + return true; + } + let Some(vendor) = self + .vendors_by_scope + .iter() + .rfind(|(s, _)| specifier.as_str().starts_with(s.as_str())) + .and_then(|(_, c)| c.as_ref()) + else { + return false; + }; + vendor.get_remote_url(&path).is_some() + } + + pub fn in_global_cache_directory(&self, specifier: &Url) -> bool { + let Ok(path) = url_to_file_path(specifier) else { + return false; + }; + if path.starts_with(&self.deno_dir().root) { + return true; } false } diff --git a/cli/lsp/code_lens.rs b/cli/lsp/code_lens.rs index 777b03918c..4154cd191e 100644 --- a/cli/lsp/code_lens.rs +++ b/cli/lsp/code_lens.rs @@ -11,18 +11,19 @@ use deno_ast::swc::ecma_visit::VisitWith; use deno_ast::ParsedSource; use deno_ast::SourceRange; use deno_ast::SourceRangedForSpanned; -use deno_core::anyhow::anyhow; use deno_core::error::AnyError; -use deno_core::resolve_url; use deno_core::serde::Deserialize; use deno_core::serde::Serialize; use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::ModuleSpecifier; use lazy_regex::lazy_regex; +use lsp_types::Uri; use once_cell::sync::Lazy; use regex::Regex; use tokio_util::sync::CancellationToken; +use tower_lsp::jsonrpc::Error as LspError; +use tower_lsp::jsonrpc::Result as LspResult; use tower_lsp::lsp_types as lsp; use super::analysis::source_range_to_lsp_range; @@ -36,7 +37,7 @@ static ABSTRACT_MODIFIER: Lazy = lazy_regex!(r"\babstract\b"); static EXPORT_MODIFIER: Lazy = lazy_regex!(r"\bexport\b"); -#[derive(Debug, Deserialize, Serialize)] +#[derive(Debug, Copy, Clone, Eq, PartialEq, Deserialize, Serialize)] pub enum CodeLensSource { #[serde(rename = "implementations")] Implementations, @@ -44,11 +45,11 @@ pub enum CodeLensSource { References, } -#[derive(Debug, Deserialize)] +#[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CodeLensData { pub source: CodeLensSource, - pub specifier: ModuleSpecifier, + pub uri: Uri, } struct DenoTestCollector { @@ -254,83 +255,61 @@ async fn resolve_implementation_code_lens( data: CodeLensData, language_server: &language_server::Inner, token: &CancellationToken, -) -> Result { - let asset_or_doc = language_server.get_asset_or_document(&data.specifier)?; - let line_index = asset_or_doc.line_index(); - let maybe_implementations = language_server - .ts_server - .get_implementations( - language_server.snapshot(), - data.specifier.clone(), - line_index.offset_tsc(code_lens.range.start)?, +) -> LspResult { + let locations = language_server + .goto_implementation( + lsp::request::GotoImplementationParams { + text_document_position_params: lsp::TextDocumentPositionParams { + text_document: lsp::TextDocumentIdentifier { + uri: data.uri.clone(), + }, + position: code_lens.range.start, + }, + work_done_progress_params: Default::default(), + partial_result_params: Default::default(), + }, token, ) - .await - .map_err(|err| { - if token.is_cancelled() { - anyhow!("request cancelled") - } else { - anyhow!( - "Unable to get implementation locations from TypeScript: {:#}", - err - ) - } - })?; - if let Some(implementations) = maybe_implementations { - let mut locations = Vec::new(); - for implementation in implementations { - if token.is_cancelled() { - break; - } - let implementation_specifier = - resolve_url(&implementation.document_span.file_name)?; - let implementation_location = - implementation.to_location(line_index.clone(), language_server); - if !(implementation_specifier == data.specifier - && implementation_location.range.start == code_lens.range.start) - { - locations.push(implementation_location); - } - } - let command = if !locations.is_empty() { - let title = if locations.len() > 1 { - format!("{} implementations", locations.len()) - } else { - "1 implementation".to_string() - }; - lsp::Command { - title, - command: "deno.client.showReferences".to_string(), - arguments: Some(vec![ - json!(data.specifier), - json!(code_lens.range.start), - json!(locations), - ]), - } - } else { - lsp::Command { - title: "0 implementations".to_string(), - command: "".to_string(), - arguments: None, - } - }; - Ok(lsp::CodeLens { - range: code_lens.range, - command: Some(command), - data: None, + .await? + .map(|r| match r { + lsp::GotoDefinitionResponse::Scalar(location) => vec![location], + lsp::GotoDefinitionResponse::Array(locations) => locations, + lsp::GotoDefinitionResponse::Link(links) => links + .into_iter() + .map(|l| lsp::Location { + uri: l.target_uri, + range: l.target_selection_range, + }) + .collect(), }) + .unwrap_or(Vec::new()); + let title = if locations.len() == 1 { + "1 implementation".to_string() } else { - let command = Some(lsp::Command { - title: "0 implementations".to_string(), - command: "".to_string(), + format!("{} implementations", locations.len()) + }; + let command = if locations.is_empty() { + lsp::Command { + title, + command: String::new(), arguments: None, - }); - Ok(lsp::CodeLens { - range: code_lens.range, - command, - data: None, - }) - } + } + } else { + lsp::Command { + title, + command: "deno.client.showReferences".to_string(), + arguments: Some(vec![ + json!(data.uri), + json!(code_lens.range.start), + json!(locations), + ]), + } + }; + Ok(lsp::CodeLens { + range: code_lens.range, + command: Some(command), + data: None, + }) } async fn resolve_references_code_lens( @@ -338,59 +317,26 @@ async fn resolve_references_code_lens( data: CodeLensData, language_server: &language_server::Inner, token: &CancellationToken, -) -> Result { - fn get_locations( - maybe_referenced_symbols: Option>, - language_server: &language_server::Inner, - token: &CancellationToken, - ) -> Result, AnyError> { - let symbols = match maybe_referenced_symbols { - Some(symbols) => symbols, - None => return Ok(Vec::new()), - }; - let mut locations = Vec::new(); - for reference in symbols.iter().flat_map(|s| &s.references) { - if token.is_cancelled() { - break; - } - if reference.is_definition { - continue; - } - let reference_specifier = - resolve_url(&reference.entry.document_span.file_name)?; - let asset_or_doc = - language_server.get_asset_or_document(&reference_specifier)?; - locations.push( - reference - .entry - .to_location(asset_or_doc.line_index(), language_server), - ); - } - Ok(locations) - } - - let asset_or_document = - language_server.get_asset_or_document(&data.specifier)?; - let line_index = asset_or_document.line_index(); - - let maybe_referenced_symbols = language_server - .ts_server - .find_references( - language_server.snapshot(), - data.specifier.clone(), - line_index.offset_tsc(code_lens.range.start)?, +) -> LspResult { + let locations = language_server + .references( + lsp::ReferenceParams { + text_document_position: lsp::TextDocumentPositionParams { + text_document: lsp::TextDocumentIdentifier { + uri: data.uri.clone(), + }, + position: code_lens.range.start, + }, + work_done_progress_params: Default::default(), + partial_result_params: Default::default(), + context: lsp::ReferenceContext { + include_declaration: false, + }, + }, token, ) - .await - .map_err(|err| { - if token.is_cancelled() { - anyhow!("request cancelled") - } else { - anyhow!("Unable to get references from TypeScript: {:#}", err) - } - })?; - let locations = - get_locations(maybe_referenced_symbols, language_server, token)?; + .await? + .unwrap_or_default(); let title = if locations.len() == 1 { "1 reference".to_string() } else { @@ -407,7 +353,7 @@ async fn resolve_references_code_lens( title, command: "deno.client.showReferences".to_string(), arguments: Some(vec![ - json!(data.specifier), + json!(data.uri), json!(code_lens.range.start), json!(locations), ]), @@ -424,9 +370,14 @@ pub async fn resolve_code_lens( code_lens: lsp::CodeLens, language_server: &language_server::Inner, token: &CancellationToken, -) -> Result { +) -> LspResult { let data: CodeLensData = - serde_json::from_value(code_lens.data.clone().unwrap())?; + serde_json::from_value(code_lens.data.clone().unwrap()).map_err(|err| { + LspError::invalid_params(format!( + "Unable to parse code lens data: {:#}", + err + )) + })?; match data.source { CodeLensSource::Implementations => { resolve_implementation_code_lens(code_lens, data, language_server, token) @@ -453,7 +404,7 @@ pub fn collect_test( /// Return tsc navigation tree code lenses. pub fn collect_tsc( - specifier: &ModuleSpecifier, + uri: &Uri, code_lens_settings: &CodeLensSettings, line_index: Arc, navigation_tree: &NavigationTree, @@ -468,11 +419,7 @@ pub fn collect_tsc( let source = CodeLensSource::Implementations; match i.kind { tsc::ScriptElementKind::InterfaceElement => { - code_lenses.push(i.to_code_lens( - line_index.clone(), - specifier, - &source, - )); + code_lenses.push(i.to_code_lens(line_index.clone(), uri, source)); } tsc::ScriptElementKind::ClassElement | tsc::ScriptElementKind::MemberFunctionElement @@ -480,11 +427,7 @@ pub fn collect_tsc( | tsc::ScriptElementKind::MemberGetAccessorElement | tsc::ScriptElementKind::MemberSetAccessorElement => { if ABSTRACT_MODIFIER.is_match(&i.kind_modifiers) { - code_lenses.push(i.to_code_lens( - line_index.clone(), - specifier, - &source, - )); + code_lenses.push(i.to_code_lens(line_index.clone(), uri, source)); } } _ => (), @@ -496,51 +439,31 @@ pub fn collect_tsc( let source = CodeLensSource::References; if let Some(parent) = &mp { if parent.kind == tsc::ScriptElementKind::EnumElement { - code_lenses.push(i.to_code_lens( - line_index.clone(), - specifier, - &source, - )); + code_lenses.push(i.to_code_lens(line_index.clone(), uri, source)); } } match i.kind { tsc::ScriptElementKind::FunctionElement => { if code_lens_settings.references_all_functions { - code_lenses.push(i.to_code_lens( - line_index.clone(), - specifier, - &source, - )); + code_lenses.push(i.to_code_lens(line_index.clone(), uri, source)); } } tsc::ScriptElementKind::ConstElement | tsc::ScriptElementKind::LetElement | tsc::ScriptElementKind::VariableElement => { if EXPORT_MODIFIER.is_match(&i.kind_modifiers) { - code_lenses.push(i.to_code_lens( - line_index.clone(), - specifier, - &source, - )); + code_lenses.push(i.to_code_lens(line_index.clone(), uri, source)); } } tsc::ScriptElementKind::ClassElement => { if i.text != "" { - code_lenses.push(i.to_code_lens( - line_index.clone(), - specifier, - &source, - )); + code_lenses.push(i.to_code_lens(line_index.clone(), uri, source)); } } tsc::ScriptElementKind::InterfaceElement | tsc::ScriptElementKind::TypeElement | tsc::ScriptElementKind::EnumElement => { - code_lenses.push(i.to_code_lens( - line_index.clone(), - specifier, - &source, - )); + code_lenses.push(i.to_code_lens(line_index.clone(), uri, source)); } tsc::ScriptElementKind::LocalFunctionElement | tsc::ScriptElementKind::MemberFunctionElement @@ -556,8 +479,8 @@ pub fn collect_tsc( | tsc::ScriptElementKind::TypeElement => { code_lenses.push(i.to_code_lens( line_index.clone(), - specifier, - &source, + uri, + source, )); } _ => (), @@ -575,6 +498,7 @@ pub fn collect_tsc( #[cfg(test)] mod tests { use deno_ast::MediaType; + use deno_core::resolve_url; use super::*; diff --git a/cli/lsp/completions.rs b/cli/lsp/completions.rs index 2da90cd56e..418c58713b 100644 --- a/cli/lsp/completions.rs +++ b/cli/lsp/completions.rs @@ -25,8 +25,9 @@ use tower_lsp::lsp_types as lsp; use super::client::Client; use super::config::Config; use super::config::WorkspaceSettings; -use super::documents::Documents; -use super::documents::DocumentsFilter; +use super::documents::DocumentModule; +use super::documents::DocumentModules; +use super::documents::ServerDocumentKind; use super::jsr::CliJsrSearchApi; use super::lsp_custom; use super::npm::CliNpmSearchApi; @@ -152,38 +153,36 @@ fn to_narrow_lsp_range( #[allow(clippy::too_many_arguments)] #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] pub async fn get_import_completions( - specifier: &ModuleSpecifier, + module: &DocumentModule, position: &lsp::Position, config: &Config, client: &Client, module_registries: &ModuleRegistry, jsr_search_api: &CliJsrSearchApi, npm_search_api: &CliNpmSearchApi, - documents: &Documents, + document_modules: &DocumentModules, resolver: &LspResolver, maybe_import_map: Option<&ImportMap>, ) -> Option { - let document = documents.get(specifier)?; - let file_referrer = document.file_referrer(); - let (text, _, graph_range) = document.get_maybe_dependency(position)?; + let (text, _, graph_range) = module.dependency_at_position(position)?; let resolution_mode = graph_range .resolution_mode .map(to_node_resolution_mode) - .unwrap_or_else(|| document.resolution_mode()); - let range = to_narrow_lsp_range(document.text_info(), graph_range.range); + .unwrap_or_else(|| module.resolution_mode); + let range = to_narrow_lsp_range(module.text_info(), graph_range.range); let resolved = resolver - .as_cli_resolver(file_referrer) + .as_cli_resolver(module.scope.as_deref()) .resolve( - &text, - specifier, + text, + &module.specifier, deno_graph::Position::zeroed(), resolution_mode, NodeResolutionKind::Execution, ) .ok(); if let Some(completion_list) = get_jsr_completions( - specifier, - &text, + &module.specifier, + text, &range, resolved.as_ref(), jsr_search_api, @@ -193,39 +192,46 @@ pub async fn get_import_completions( { Some(lsp::CompletionResponse::List(completion_list)) } else if let Some(completion_list) = - get_npm_completions(specifier, &text, &range, npm_search_api).await + get_npm_completions(&module.specifier, text, &range, npm_search_api).await { Some(lsp::CompletionResponse::List(completion_list)) - } else if let Some(completion_list) = get_node_completions(&text, &range) { + } else if let Some(completion_list) = get_node_completions(text, &range) { Some(lsp::CompletionResponse::List(completion_list)) - } else if let Some(completion_list) = - get_import_map_completions(specifier, &text, &range, maybe_import_map) - { + } else if let Some(completion_list) = get_import_map_completions( + &module.specifier, + text, + &range, + maybe_import_map, + ) { // completions for import map specifiers Some(lsp::CompletionResponse::List(completion_list)) - } else if let Some(completion_list) = - get_local_completions(specifier, resolution_mode, &text, &range, resolver) - { + } else if let Some(completion_list) = get_local_completions( + &module.specifier, + resolution_mode, + text, + &range, + resolver, + ) { // completions for local relative modules Some(lsp::CompletionResponse::List(completion_list)) } else if !text.is_empty() { // completion of modules from a module registry or cache check_auto_config_registry( - &text, - config.workspace_settings_for_specifier(specifier), + text, + config.workspace_settings_for_specifier(&module.specifier), client, module_registries, ) .await; let maybe_list = module_registries - .get_completions(&text, &range, resolved.as_ref(), |s| { - documents.exists(s, file_referrer) + .get_completions(text, &range, resolved.as_ref(), |s| { + document_modules.specifier_exists(s, module.scope.as_deref()) }) .await; let maybe_list = maybe_list - .or_else(|| module_registries.get_origin_completions(&text, &range)); + .or_else(|| module_registries.get_origin_completions(text, &range)); let list = maybe_list.unwrap_or_else(|| CompletionList { - items: get_workspace_completions(specifier, &text, &range, documents), + items: get_remote_completions(module, text, &range, document_modules), is_incomplete: false, }); Some(lsp::CompletionResponse::List(list)) @@ -248,10 +254,13 @@ pub async fn get_import_completions( .collect(); let mut is_incomplete = false; if let Some(import_map) = maybe_import_map { - items.extend(get_base_import_map_completions(import_map, specifier)); + items.extend(get_base_import_map_completions( + import_map, + &module.specifier, + )); } if let Some(origin_items) = - module_registries.get_origin_completions(&text, &range) + module_registries.get_origin_completions(text, &range) { is_incomplete = origin_items.is_incomplete; items.extend(origin_items.items); @@ -440,22 +449,6 @@ fn get_local_completions( } } -fn get_relative_specifiers( - base: &ModuleSpecifier, - specifiers: Vec, -) -> Vec { - specifiers - .iter() - .filter_map(|s| { - if s != base { - Some(relative_specifier(base, s).unwrap_or_else(|| s.to_string())) - } else { - None - } - }) - .collect() -} - /// Find the index of the '@' delimiting the package name and version, if any. fn parse_bare_specifier_version_index(bare_specifier: &str) -> Option { if bare_specifier.starts_with('@') { @@ -770,34 +763,33 @@ fn get_node_completions( }) } -/// Get workspace completions that include modules in the Deno cache which match +/// Get remote completions that include modules in the Deno cache which match /// the current specifier string. -fn get_workspace_completions( - specifier: &ModuleSpecifier, +fn get_remote_completions( + module: &DocumentModule, current: &str, range: &lsp::Range, - documents: &Documents, + document_modules: &DocumentModules, ) -> Vec { - let workspace_specifiers = documents - .documents(DocumentsFilter::AllDiagnosable) - .into_iter() - .map(|d| d.specifier().clone()) - .collect(); - let specifier_strings = - get_relative_specifiers(specifier, workspace_specifiers); - specifier_strings + let specifiers = document_modules + .documents + .server_docs() .into_iter() + .filter_map(|d| { + if let ServerDocumentKind::RemoteUrl { url, .. } = &d.kind { + if *url == module.specifier { + return None; + } + return Some( + relative_specifier(&module.specifier, url) + .unwrap_or_else(|| url.to_string()), + ); + } + None + }); + specifiers .filter_map(|label| { if label.starts_with(current) { - let detail = Some( - if label.starts_with("http:") || label.starts_with("https:") { - "(remote)".to_string() - } else if label.starts_with("data:") { - "(data)".to_string() - } else { - "(local)".to_string() - }, - ); let text_edit = Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit { range: *range, new_text: label.clone(), @@ -805,7 +797,7 @@ fn get_workspace_completions( Some(lsp::CompletionItem { label, kind: Some(lsp::CompletionItemKind::FILE), - detail, + detail: Some("(remote)".to_string()), sort_text: Some("1".to_string()), text_edit, commit_characters: Some( @@ -831,18 +823,18 @@ mod tests { use super::*; use crate::cache::HttpCache; use crate::lsp::cache::LspCache; - use crate::lsp::documents::Documents; use crate::lsp::documents::LanguageId; use crate::lsp::search::tests::TestPackageSearchApi; + use crate::lsp::urls::url_to_uri; fn setup( open_sources: &[(&str, &str, i32, LanguageId)], fs_sources: &[(&str, &str)], - ) -> Documents { + ) -> DocumentModules { let temp_dir = TempDir::new(); let cache = LspCache::new(Some(temp_dir.url().join(".deno_dir").unwrap())); - let mut documents = Documents::default(); - documents.update_config( + let mut document_modules = DocumentModules::default(); + document_modules.update_config( &Default::default(), &Default::default(), &cache, @@ -851,7 +843,13 @@ mod tests { for (specifier, source, version, language_id) in open_sources { let specifier = resolve_url(specifier).expect("failed to create specifier"); - documents.open(specifier, *version, *language_id, (*source).into(), None); + let uri = url_to_uri(&specifier).unwrap(); + document_modules.open_document( + uri, + *version, + *language_id, + (*source).into(), + ); } for (specifier, source) in fs_sources { let specifier = @@ -860,32 +858,10 @@ mod tests { .global() .set(&specifier, HashMap::default(), source.as_bytes()) .expect("could not cache file"); - let document = documents - .get_or_load(&specifier, Some(&temp_dir.url().join("$").unwrap())); - assert!(document.is_some(), "source could not be setup"); + let module = document_modules.module_for_specifier(&specifier, None); + assert!(module.is_some(), "source could not be setup"); } - documents - } - - #[test] - fn test_get_relative_specifiers() { - let base = resolve_url("file:///a/b/c.ts").unwrap(); - let specifiers = vec![ - resolve_url("file:///a/b/c.ts").unwrap(), - resolve_url("file:///a/b/d.ts").unwrap(), - resolve_url("file:///a/c/c.ts").unwrap(), - resolve_url("file:///a/b/d/d.ts").unwrap(), - resolve_url("https://deno.land/x/a/b/c.ts").unwrap(), - ]; - assert_eq!( - get_relative_specifiers(&base, specifiers), - vec![ - "./d.ts".to_string(), - "../c/c.ts".to_string(), - "./d/d.ts".to_string(), - "https://deno.land/x/a/b/c.ts".to_string(), - ] - ); + document_modules } #[test] @@ -940,7 +916,7 @@ mod tests { } #[tokio::test] - async fn test_get_workspace_completions() { + async fn test_get_remote_completions() { let specifier = resolve_url("file:///a/b/c.ts").unwrap(); let range = lsp::Range { start: lsp::Position { @@ -952,7 +928,7 @@ mod tests { character: 21, }, }; - let documents = setup( + let document_modules = setup( &[ ( "file:///a/b/c.ts", @@ -964,7 +940,11 @@ mod tests { ], &[("https://deno.land/x/a/b/c.ts", "console.log(1);\n")], ); - let actual = get_workspace_completions(&specifier, "h", &range, &documents); + let module = document_modules + .module_for_specifier(&specifier, None) + .unwrap(); + let actual = + get_remote_completions(&module, "h", &range, &document_modules); assert_eq!( actual, vec![lsp::CompletionItem { diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs index 4fcd82dcb0..11dbdf2f7c 100644 --- a/cli/lsp/config.rs +++ b/cli/lsp/config.rs @@ -54,16 +54,19 @@ use deno_resolver::workspace::WorkspaceResolver; use deno_runtime::deno_node::PackageJson; use indexmap::IndexSet; use lsp_types::ClientCapabilities; +use lsp_types::Uri; use tower_lsp::lsp_types as lsp; use super::logging::lsp_log; use super::lsp_custom; +use super::urls::uri_to_url; use super::urls::url_to_uri; use crate::args::CliLockfile; use crate::args::CliLockfileReadFromPathOptions; use crate::args::ConfigFile; use crate::args::LintFlags; use crate::args::LintOptions; +use crate::cache::DenoDir; use crate::file_fetcher::CliFileFetcher; use crate::lsp::logging::lsp_warn; use crate::sys::CliSys; @@ -821,20 +824,13 @@ impl WorkspaceSettings { #[derive(Debug, Default, Clone)] pub struct Settings { pub unscoped: Arc, - pub by_workspace_folder: - BTreeMap>>, - pub first_folder: Option, + pub by_workspace_folder: BTreeMap, Option>>, + pub first_folder: Option>, } impl Settings { - /// Returns `None` if the value should be deferred to the presence of a - /// `deno.json` file. - pub fn specifier_enabled(&self, specifier: &ModuleSpecifier) -> Option { - let Ok(path) = url_to_file_path(specifier) else { - // Non-file URLs are not disabled by these settings. - return Some(true); - }; - let (settings, mut folder_uri) = self.get_for_specifier(specifier); + pub fn path_enabled(&self, path: &Path) -> Option { + let (settings, mut folder_uri) = self.get_for_path(path); folder_uri = folder_uri.or(self.first_folder.as_ref()); let mut disable_paths = vec![]; let mut enable_paths = None; @@ -859,7 +855,7 @@ impl Settings { } else if let Some(enable_paths) = &enable_paths { for enable_path in enable_paths { // Also enable if the checked path is a dir containing an enabled path. - if path.starts_with(enable_path) || enable_path.starts_with(&path) { + if path.starts_with(enable_path) || enable_path.starts_with(path) { return Some(true); } } @@ -869,17 +865,24 @@ impl Settings { } } + /// Returns `None` if the value should be deferred to the presence of a + /// `deno.json` file. + pub fn specifier_enabled(&self, specifier: &ModuleSpecifier) -> Option { + let Ok(path) = url_to_file_path(specifier) else { + // Non-file URLs are not disabled by these settings. + return Some(true); + }; + self.path_enabled(&path) + } + pub fn get_unscoped(&self) -> &WorkspaceSettings { &self.unscoped } - pub fn get_for_specifier( + pub fn get_for_path( &self, - specifier: &ModuleSpecifier, - ) -> (&WorkspaceSettings, Option<&ModuleSpecifier>) { - let Ok(path) = url_to_file_path(specifier) else { - return (&self.unscoped, self.first_folder.as_ref()); - }; + path: &Path, + ) -> (&WorkspaceSettings, Option<&Arc>) { for (folder_uri, settings) in self.by_workspace_folder.iter().rev() { if let Some(settings) = settings { let Ok(folder_path) = url_to_file_path(folder_uri) else { @@ -893,6 +896,23 @@ impl Settings { (&self.unscoped, self.first_folder.as_ref()) } + pub fn get_for_uri( + &self, + uri: &Uri, + ) -> (&WorkspaceSettings, Option<&Arc>) { + self.get_for_specifier(&uri_to_url(uri)) + } + + pub fn get_for_specifier( + &self, + specifier: &ModuleSpecifier, + ) -> (&WorkspaceSettings, Option<&Arc>) { + let Ok(path) = url_to_file_path(specifier) else { + return (&self.unscoped, self.first_folder.as_ref()); + }; + self.get_for_path(&path) + } + pub fn enable_settings_hash(&self) -> u64 { let mut hasher = FastInsecureHasher::new_without_deno_version(); let unscoped = self.get_unscoped(); @@ -917,7 +937,7 @@ impl Settings { pub struct Config { pub client_capabilities: Arc, pub settings: Arc, - pub workspace_folders: Arc>, + pub workspace_folders: Arc, lsp::WorkspaceFolder)>>, pub tree: ConfigTree, } @@ -933,7 +953,7 @@ impl Config { let name = root_url.path_segments().and_then(|s| s.last()); let name = name.unwrap_or_default().to_string(); folders.push(( - root_url, + Arc::new(root_url), lsp::WorkspaceFolder { uri: root_uri, name, @@ -946,7 +966,7 @@ impl Config { pub fn set_workspace_folders( &mut self, - folders: Vec<(ModuleSpecifier, lsp::WorkspaceFolder)>, + folders: Vec<(Arc, lsp::WorkspaceFolder)>, ) { self.settings = Arc::new(Settings { unscoped: self.settings.unscoped.clone(), @@ -962,7 +982,7 @@ impl Config { pub fn set_workspace_settings( &mut self, unscoped: WorkspaceSettings, - folder_settings: Vec<(ModuleSpecifier, WorkspaceSettings)>, + folder_settings: Vec<(Arc, WorkspaceSettings)>, ) { let mut by_folder = folder_settings.into_iter().collect::>(); self.settings = Arc::new(Settings { @@ -981,6 +1001,10 @@ impl Config { self.settings.get_unscoped() } + pub fn workspace_settings_for_uri(&self, uri: &Uri) -> &WorkspaceSettings { + self.settings.get_for_uri(uri).0 + } + pub fn workspace_settings_for_specifier( &self, specifier: &ModuleSpecifier, @@ -1034,15 +1058,32 @@ impl Config { || settings.inlay_hints.enum_member_values.enabled } - pub fn root_uri(&self) -> Option<&Url> { + pub fn root_url(&self) -> Option<&Arc> { self.workspace_folders.first().map(|p| &p.0) } + pub fn uri_enabled(&self, uri: &Uri) -> bool { + if uri.scheme().is_some_and(|s| s.eq_lowercase("deno")) { + return true; + } + self.specifier_enabled(&uri_to_url(uri)) + } + pub fn specifier_enabled(&self, specifier: &ModuleSpecifier) -> bool { + if self.tree.in_global_npm_cache(specifier) { + return true; + } let data = self.tree.data_for_specifier(specifier); if let Some(data) = &data { if let Ok(path) = specifier.to_file_path() { - if data.exclude_files.matches_path(&path) { + // deno_config's exclusion checks exclude vendor dirs invariably. We + // don't want that behavior here. + if data.exclude_files.matches_path(&path) + && !data + .vendor_dir + .as_ref() + .is_some_and(|p| path.starts_with(p)) + { return false; } } @@ -1221,7 +1262,7 @@ impl ConfigData { #[allow(clippy::too_many_arguments)] async fn load( specified_config: Option<&Path>, - scope: &ModuleSpecifier, + scope: &Arc, settings: &Settings, file_fetcher: &Arc, // sync requirement is because the lsp requires sync @@ -1229,7 +1270,7 @@ impl ConfigData { pkg_json_cache: &(dyn PackageJsonCache + Sync), workspace_cache: &(dyn WorkspaceCache + Sync), ) -> Self { - let scope = Arc::new(scope.clone()); + let scope = scope.clone(); let discover_result = match scope.to_file_path() { Ok(scope_dir_path) => { let paths = [scope_dir_path]; @@ -1723,14 +1764,12 @@ impl ConfigData { #[derive(Clone, Debug, Default)] pub struct ConfigTree { - scopes: Arc>>, + scopes: Arc, Arc>>, + global_npm_cache_url: Option>, } impl ConfigTree { - pub fn scope_for_specifier( - &self, - specifier: &ModuleSpecifier, - ) -> Option<&ModuleSpecifier> { + pub fn scope_for_specifier(&self, specifier: &Url) -> Option<&Arc> { self .scopes .iter() @@ -1747,15 +1786,13 @@ impl ConfigTree { .and_then(|s| self.scopes.get(s)) } - pub fn data_by_scope( - &self, - ) -> &Arc>> { + pub fn data_by_scope(&self) -> &Arc, Arc>> { &self.scopes } pub fn workspace_dir_for_specifier( &self, - specifier: &ModuleSpecifier, + specifier: &Url, ) -> Option<&WorkspaceDirectory> { self .data_for_specifier(specifier) @@ -1778,10 +1815,7 @@ impl ConfigTree { .collect() } - pub fn fmt_config_for_specifier( - &self, - specifier: &ModuleSpecifier, - ) -> Arc { + pub fn fmt_config_for_specifier(&self, specifier: &Url) -> Arc { self .data_for_specifier(specifier) .map(|d| d.fmt_config.clone()) @@ -1791,8 +1825,8 @@ impl ConfigTree { /// Returns (scope_url, type). pub fn watched_file_type( &self, - specifier: &ModuleSpecifier, - ) -> Option<(&ModuleSpecifier, ConfigWatchedFileType)> { + specifier: &Url, + ) -> Option<(&Arc, ConfigWatchedFileType)> { for (scope_url, data) in self.scopes.iter() { if let Some(typ) = data.watched_files.get(specifier) { return Some((scope_url, *typ)); @@ -1801,7 +1835,7 @@ impl ConfigTree { None } - pub fn is_watched_file(&self, specifier: &ModuleSpecifier) -> bool { + pub fn is_watched_file(&self, specifier: &Url) -> bool { let path = specifier.path(); if path.ends_with("/deno.json") || path.ends_with("/deno.jsonc") @@ -1856,14 +1890,29 @@ impl ConfigTree { }) }) .collect(); - lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams { data } + let deno_dir_npm_folder_uri = self + .global_npm_cache_url + .as_ref() + .and_then(|s| url_to_uri(s).ok()); + lsp_custom::DidRefreshDenoConfigurationTreeNotificationParams { + data, + deno_dir_npm_folder_uri, + } + } + + pub fn in_global_npm_cache(&self, url: &Url) -> bool { + self + .global_npm_cache_url + .as_ref() + .is_some_and(|s| url.as_str().starts_with(s.as_str())) } pub async fn refresh( &mut self, settings: &Settings, - workspace_files: &IndexSet, + workspace_files: &IndexSet, file_fetcher: &Arc, + deno_dir: &DenoDir, ) { lsp_log!("Refreshing configuration tree..."); // since we're resolving a workspace multiple times in different @@ -1873,24 +1922,24 @@ impl ConfigTree { let pkg_json_cache = PackageJsonMemCache::default(); let workspace_cache = WorkspaceMemCache::default(); let mut scopes = BTreeMap::new(); - for (folder_uri, ws_settings) in &settings.by_workspace_folder { + for (folder_url, ws_settings) in &settings.by_workspace_folder { let mut ws_settings = ws_settings.as_ref(); - if Some(folder_uri) == settings.first_folder.as_ref() { + if Some(folder_url) == settings.first_folder.as_ref() { ws_settings = ws_settings.or(Some(&settings.unscoped)); } if let Some(ws_settings) = ws_settings { let config_file_path = (|| { let config_setting = ws_settings.config.as_ref()?; - let config_uri = folder_uri.join(config_setting).ok()?; + let config_uri = folder_url.join(config_setting).ok()?; url_to_file_path(&config_uri).ok() })(); if config_file_path.is_some() || ws_settings.import_map.is_some() { scopes.insert( - folder_uri.clone(), + folder_url.clone(), Arc::new( ConfigData::load( config_file_path.as_deref(), - folder_uri, + folder_url, settings, file_fetcher, &deno_json_cache, @@ -1904,14 +1953,17 @@ impl ConfigTree { } } - for specifier in workspace_files { - if !(specifier.path().ends_with("/deno.json") - || specifier.path().ends_with("/deno.jsonc") - || specifier.path().ends_with("/package.json")) + for path in workspace_files { + let Ok(file_url) = Url::from_file_path(path) else { + continue; + }; + if !(file_url.path().ends_with("/deno.json") + || file_url.path().ends_with("/deno.jsonc") + || file_url.path().ends_with("/package.json")) { continue; } - let Ok(scope) = specifier.join(".") else { + let Ok(scope) = file_url.join(".").map(Arc::new) else { continue; }; if scopes.contains_key(&scope) { @@ -1944,11 +1996,15 @@ impl ConfigTree { &workspace_cache, ) .await; - scopes.insert(member_scope.as_ref().clone(), Arc::new(member_data)); + scopes.insert(member_scope.clone(), Arc::new(member_data)); } } self.scopes = Arc::new(scopes); + self.global_npm_cache_url = + Url::from_directory_path(deno_dir.npm_folder_path()) + .ok() + .map(Arc::new); } #[cfg(test)] @@ -1956,7 +2012,7 @@ impl ConfigTree { use sys_traits::FsCreateDirAll; use sys_traits::FsWrite; - let scope = config_file.specifier.join(".").unwrap(); + let scope = Arc::new(config_file.specifier.join(".").unwrap()); let json_text = serde_json::to_string(&config_file.json).unwrap(); let memory_sys = sys_traits::impls::InMemorySys::default(); let config_path = url_to_file_path(&config_file.specifier).unwrap(); @@ -1979,7 +2035,7 @@ impl ConfigTree { let data = Arc::new( ConfigData::load_inner( workspace_dir, - Arc::new(scope.clone()), + scope.clone(), &Default::default(), None, ) diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs index 44317a84da..d05200ca04 100644 --- a/cli/lsp/diagnostics.rs +++ b/cli/lsp/diagnostics.rs @@ -1,5 +1,6 @@ // Copyright 2018-2025 the Deno authors. MIT license. +use std::collections::BTreeMap; use std::collections::HashMap; use std::collections::HashSet; use std::path::PathBuf; @@ -13,7 +14,6 @@ use deno_core::anyhow::anyhow; use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; use deno_core::parking_lot::RwLock; -use deno_core::resolve_url; use deno_core::serde::Deserialize; use deno_core::serde_json; use deno_core::serde_json::json; @@ -36,6 +36,7 @@ use deno_semver::package::PackageReq; use import_map::ImportMap; use import_map::ImportMapErrorKind; use log::error; +use lsp_types::Uri; use tokio::sync::mpsc; use tokio::sync::Mutex as AsyncMutex; use tokio::time::Duration; @@ -45,19 +46,15 @@ use tower_lsp::lsp_types as lsp; use super::analysis; use super::client::Client; use super::config::Config; -use super::documents; use super::documents::Document; -use super::documents::Documents; -use super::documents::DocumentsFilter; +use super::documents::DocumentModule; use super::language_server; use super::language_server::StateSnapshot; use super::performance::Performance; use super::tsc; -use super::tsc::ScopedAmbientModules; +use super::tsc::MaybeAmbientModules; use super::tsc::TsServer; use super::urls::uri_parse_unencoded; -use super::urls::url_to_uri; -use super::urls::LspUrlMap; use crate::graph_util; use crate::graph_util::enhanced_resolution_error_message; use crate::lsp::logging::lsp_warn; @@ -69,21 +66,22 @@ use crate::tools::lint::LintRuleProvider; use crate::tsc::DiagnosticCategory; use crate::util::path::to_percent_decoded_str; +pub type ScopedAmbientModules = HashMap>, MaybeAmbientModules>; + #[derive(Debug)] pub struct DiagnosticServerUpdateMessage { pub snapshot: Arc, - pub url_map: LspUrlMap, } #[derive(Debug)] struct DiagnosticRecord { - pub specifier: ModuleSpecifier, + pub uri: Arc, pub versioned: VersionedDiagnostics, } #[derive(Clone, Default, Debug)] struct VersionedDiagnostics { - pub version: Option, + pub version: i32, pub diagnostics: Vec, } @@ -114,8 +112,7 @@ type DiagnosticsBySource = HashMap; struct DiagnosticsPublisher { client: Client, state: Arc, - diagnostics_by_specifier: - AsyncMutex>, + diagnostics_by_uri: AsyncMutex, DiagnosticsBySource>>, } impl DiagnosticsPublisher { @@ -123,7 +120,7 @@ impl DiagnosticsPublisher { Self { client, state, - diagnostics_by_specifier: Default::default(), + diagnostics_by_uri: Default::default(), } } @@ -131,12 +128,9 @@ impl DiagnosticsPublisher { &self, source: DiagnosticSource, diagnostics: DiagnosticVec, - url_map: &LspUrlMap, - documents: &Documents, token: &CancellationToken, ) -> usize { - let mut diagnostics_by_specifier = - self.diagnostics_by_specifier.lock().await; + let mut diagnostics_by_uri = self.diagnostics_by_uri.lock().await; let mut seen_specifiers = HashSet::with_capacity(diagnostics.len()); let mut messages_sent = 0; @@ -145,11 +139,10 @@ impl DiagnosticsPublisher { return messages_sent; } - seen_specifiers.insert(record.specifier.clone()); + seen_specifiers.insert(record.uri.clone()); - let diagnostics_by_source = diagnostics_by_specifier - .entry(record.specifier.clone()) - .or_default(); + let diagnostics_by_source = + diagnostics_by_uri.entry(record.uri.clone()).or_default(); let version = record.versioned.version; let source_diagnostics = diagnostics_by_source.entry(source).or_default(); *source_diagnostics = record.versioned; @@ -165,26 +158,22 @@ impl DiagnosticsPublisher { self .state - .update(&record.specifier, version, &all_specifier_diagnostics); - let file_referrer = documents.get_file_referrer(&record.specifier); - let Ok(uri) = - url_map.specifier_to_uri(&record.specifier, file_referrer.as_deref()) - else { - continue; - }; + .update(&record.uri, version, &all_specifier_diagnostics); self .client - .publish_diagnostics(uri, all_specifier_diagnostics, version) + .publish_diagnostics( + record.uri.as_ref().clone(), + all_specifier_diagnostics, + Some(version), + ) .await; messages_sent += 1; } // now check all the specifiers to clean up any ones with old diagnostics - let mut specifiers_to_remove = Vec::new(); - for (specifier, diagnostics_by_source) in - diagnostics_by_specifier.iter_mut() - { - if seen_specifiers.contains(specifier) { + let mut uris_to_remove = Vec::new(); + for (uri, diagnostics_by_source) in diagnostics_by_uri.iter_mut() { + if seen_specifiers.contains(uri) { continue; } if token.is_cancelled() { @@ -192,19 +181,17 @@ impl DiagnosticsPublisher { } let maybe_removed_value = diagnostics_by_source.remove(&source); if diagnostics_by_source.is_empty() { - specifiers_to_remove.push(specifier.clone()); + uris_to_remove.push(uri.clone()); if let Some(removed_value) = maybe_removed_value { // clear out any diagnostics for this specifier - self.state.update(specifier, removed_value.version, &[]); - let file_referrer = documents.get_file_referrer(specifier); - let Ok(uri) = - url_map.specifier_to_uri(specifier, file_referrer.as_deref()) - else { - continue; - }; + self.state.update(uri, removed_value.version, &[]); self .client - .publish_diagnostics(uri, Vec::new(), removed_value.version) + .publish_diagnostics( + uri.as_ref().clone(), + Vec::new(), + Some(removed_value.version), + ) .await; messages_sent += 1; } @@ -212,20 +199,20 @@ impl DiagnosticsPublisher { } // clean up specifiers with no diagnostics - for specifier in specifiers_to_remove { - diagnostics_by_specifier.remove(&specifier); + for specifier in uris_to_remove { + diagnostics_by_uri.remove(&specifier); } messages_sent } pub async fn clear(&self) { - let mut all_diagnostics = self.diagnostics_by_specifier.lock().await; + let mut all_diagnostics = self.diagnostics_by_uri.lock().await; all_diagnostics.clear(); } } -type DiagnosticMap = HashMap; +type DiagnosticMap = HashMap, VersionedDiagnostics>; #[derive(Clone, Default, Debug)] struct TsDiagnosticsStore(Arc>); @@ -233,23 +220,23 @@ struct TsDiagnosticsStore(Arc>); impl TsDiagnosticsStore { pub fn get( &self, - specifier: &ModuleSpecifier, + uri: &Uri, document_version: Option, ) -> Vec { let ts_diagnostics = self.0.lock(); - if let Some(versioned) = ts_diagnostics.get(specifier) { + if let Some(versioned) = ts_diagnostics.get(uri) { // only get the diagnostics if they're up to date - if document_version == versioned.version { + if document_version == Some(versioned.version) { return versioned.diagnostics.clone(); } } Vec::new() } - pub fn invalidate(&self, specifiers: &[ModuleSpecifier]) { + pub fn invalidate(&self, uris: &[&Uri]) { let mut ts_diagnostics = self.0.lock(); - for specifier in specifiers { - ts_diagnostics.remove(specifier); + for uri in uris { + ts_diagnostics.remove(*uri); } } @@ -261,7 +248,7 @@ impl TsDiagnosticsStore { let mut stored_ts_diagnostics = self.0.lock(); *stored_ts_diagnostics = diagnostics .iter() - .map(|record| (record.specifier.clone(), record.versioned.clone())) + .map(|record| (record.uri.clone(), record.versioned.clone())) .collect(); } } @@ -314,28 +301,24 @@ struct ChannelUpdateMessage { } #[derive(Debug)] -struct SpecifierState { - version: Option, +struct DocumentDiagnosticsState { + version: i32, no_cache_diagnostics: Vec, } #[derive(Debug, Default)] pub struct DiagnosticsState { - specifiers: RwLock>, + documents: RwLock>, } impl DiagnosticsState { - fn update( - &self, - specifier: &ModuleSpecifier, - version: Option, - diagnostics: &[lsp::Diagnostic], - ) { - let mut specifiers = self.specifiers.write(); - let current_version = specifiers.get(specifier).and_then(|s| s.version); - match (version, current_version) { - (Some(arg), Some(existing)) if arg < existing => return, - _ => {} + fn update(&self, uri: &Uri, version: i32, diagnostics: &[lsp::Diagnostic]) { + let mut specifiers = self.documents.write(); + let current_version = specifiers.get(uri).map(|s| s.version); + if let Some(current_version) = current_version { + if version < current_version { + return; + } } let mut no_cache_diagnostics = vec![]; for diagnostic in diagnostics { @@ -350,35 +333,32 @@ impl DiagnosticsState { } } specifiers.insert( - specifier.clone(), - SpecifierState { + uri.clone(), + DocumentDiagnosticsState { version, no_cache_diagnostics, }, ); } - pub fn clear(&self, specifier: &ModuleSpecifier) { - self.specifiers.write().remove(specifier); + pub fn clear(&self, uri: &Uri) { + self.documents.write().remove(uri); } - pub fn has_no_cache_diagnostics(&self, specifier: &ModuleSpecifier) -> bool { + pub fn has_no_cache_diagnostics(&self, uri: &Uri) -> bool { self - .specifiers + .documents .read() - .get(specifier) + .get(uri) .map(|s| !s.no_cache_diagnostics.is_empty()) .unwrap_or(false) } - pub fn no_cache_diagnostics( - &self, - specifier: &ModuleSpecifier, - ) -> Vec { + pub fn no_cache_diagnostics(&self, uri: &Uri) -> Vec { self - .specifiers + .documents .read() - .get(specifier) + .get(uri) .map(|s| s.no_cache_diagnostics.clone()) .unwrap_or_default() } @@ -393,18 +373,19 @@ struct AmbientModules { #[derive(Debug, Default)] struct DeferredDiagnostics { diagnostics: Option>, - ambient_modules_by_scope: HashMap, AmbientModules>, + ambient_modules_by_scope: HashMap>, AmbientModules>, } impl DeferredDiagnostics { - fn invalidate(&mut self, specifiers: &[ModuleSpecifier]) { + fn invalidate(&mut self, uris: &[&Uri]) { if let Some(diagnostics) = &mut self.diagnostics { - diagnostics.retain(|d| !specifiers.contains(&d.document_specifier)); + diagnostics.retain(|d| !uris.contains(&d.uri.as_ref())); } for ambient in self.ambient_modules_by_scope.values_mut() { ambient.dirty = true; } } + fn invalidate_all(&mut self) { self.diagnostics = None; for ambient in self.ambient_modules_by_scope.values_mut() { @@ -450,7 +431,7 @@ impl DeferredDiagnostics { diagnostic.diagnostics.into_iter().map(|d| d.1).collect() }; DiagnosticRecord { - specifier: diagnostic.document_specifier, + uri: diagnostic.uri, versioned: VersionedDiagnostics { version: diagnostic.version, diagnostics: filtered, @@ -544,15 +525,15 @@ impl DiagnosticsServer { pub fn get_ts_diagnostics( &self, - specifier: &ModuleSpecifier, + uri: &Uri, document_version: Option, ) -> Vec { - self.ts_diagnostics.get(specifier, document_version) + self.ts_diagnostics.get(uri, document_version) } - pub fn invalidate(&self, specifiers: &[ModuleSpecifier]) { - self.ts_diagnostics.invalidate(specifiers); - self.deferred_diagnostics.lock().invalidate(specifiers); + pub fn invalidate(&self, uris: &[&Uri]) { + self.ts_diagnostics.invalidate(uris); + self.deferred_diagnostics.lock().invalidate(uris); } pub fn invalidate_all(&self) { @@ -600,10 +581,9 @@ impl DiagnosticsServer { } }; let ChannelUpdateMessage { - message: DiagnosticServerUpdateMessage { snapshot, url_map }, + message: DiagnosticServerUpdateMessage { snapshot }, batch_index, } = message; - let url_map = Arc::new(url_map); // cancel the previous run token.cancel(); @@ -618,7 +598,6 @@ impl DiagnosticsServer { let ts_diagnostics_store = ts_diagnostics_store.clone(); let snapshot = snapshot.clone(); let config = snapshot.config.clone(); - let url_map = url_map.clone(); let deferred_diagnostics_state = deferred_diagnostics_state.clone(); async move { @@ -675,21 +654,13 @@ impl DiagnosticsServer { .publish( DiagnosticSource::DeferredDeno, deferred, - &url_map, - snapshot.documents.as_ref(), &token, ) .await; } } messages_len += diagnostics_publisher - .publish( - DiagnosticSource::Ts, - diagnostics, - &url_map, - snapshot.documents.as_ref(), - &token, - ) + .publish(DiagnosticSource::Ts, diagnostics, &token) .await; if !token.is_cancelled() { @@ -717,7 +688,6 @@ impl DiagnosticsServer { let token = token.clone(); let snapshot = snapshot.clone(); let config = snapshot.config.clone(); - let url_map = url_map.clone(); let deferred_diagnostics_state = deferred_diagnostics_state.clone(); async move { @@ -747,8 +717,6 @@ impl DiagnosticsServer { .publish( DiagnosticSource::DeferredDeno, deferred, - &url_map, - snapshot.documents.as_ref(), &token, ) .await; @@ -756,13 +724,7 @@ impl DiagnosticsServer { } messages_len += diagnostics_publisher - .publish( - DiagnosticSource::Deno, - diagnostics, - &url_map, - snapshot.documents.as_ref(), - &token, - ) + .publish(DiagnosticSource::Deno, diagnostics, &token) .await; if !token.is_cancelled() { @@ -790,7 +752,6 @@ impl DiagnosticsServer { let token = token.clone(); let snapshot = snapshot.clone(); let config = snapshot.config.clone(); - let url_map = url_map.clone(); async move { if let Some(previous_handle) = previous_lint_handle { previous_handle.await; @@ -807,13 +768,7 @@ impl DiagnosticsServer { let mut messages_len = 0; if !token.is_cancelled() { messages_len = diagnostics_publisher - .publish( - DiagnosticSource::Lint, - diagnostics, - &url_map, - snapshot.documents.as_ref(), - &token, - ) + .publish(DiagnosticSource::Lint, diagnostics, &token) .await; if !token.is_cancelled() { @@ -974,20 +929,22 @@ fn generate_lint_diagnostics( config: &Config, token: CancellationToken, ) -> DiagnosticVec { - let documents = snapshot - .documents - .documents(DocumentsFilter::OpenDiagnosable); let config_data_by_scope = config.tree.data_by_scope(); - let mut diagnostics_vec = Vec::new(); - for document in documents { - let specifier = document.specifier(); - if specifier.scheme() != "file" { + let mut records = Vec::new(); + for document in snapshot.document_modules.documents.open_docs() { + let Some(module) = snapshot + .document_modules + .primary_module(&Document::Open(document.clone())) + else { + continue; + }; + if module.specifier.scheme() != "file" { continue; } - if !config.specifier_enabled(specifier) { + if !config.specifier_enabled(&module.specifier) { continue; } - let settings = config.workspace_settings_for_specifier(specifier); + let settings = config.workspace_settings_for_specifier(&module.specifier); if !settings.lint { continue; } @@ -996,13 +953,12 @@ fn generate_lint_diagnostics( break; } // ignore any npm package files - if snapshot.resolver.in_node_modules(specifier) { + if snapshot.resolver.in_node_modules(&module.specifier) { continue; } - let version = document.maybe_lsp_version(); - let (lint_config, linter) = config - .tree - .scope_for_specifier(specifier) + let (lint_config, linter) = module + .scope + .as_ref() .and_then(|s| config_data_by_scope.get(s)) .map(|d| (d.lint_config.clone(), d.linter.clone())) .unwrap_or_else(|| { @@ -1023,12 +979,12 @@ fn generate_lint_diagnostics( })), ) }); - diagnostics_vec.push(DiagnosticRecord { - specifier: specifier.clone(), + records.push(DiagnosticRecord { + uri: document.uri.clone(), versioned: VersionedDiagnostics { - version, + version: document.version, diagnostics: generate_document_lint_diagnostics( - &document, + &module, &lint_config, &linter, token.clone(), @@ -1036,19 +992,25 @@ fn generate_lint_diagnostics( }, }); } - diagnostics_vec + records } fn generate_document_lint_diagnostics( - document: &Document, + module: &DocumentModule, lint_config: &LintConfig, linter: &CliLinter, token: CancellationToken, ) -> Vec { - if !lint_config.files.matches_specifier(document.specifier()) { + if !module.is_diagnosable() + || !lint_config.files.matches_specifier(&module.specifier) + { return Vec::new(); } - match document.maybe_parsed_source() { + match &module + .open_data + .as_ref() + .and_then(|d| d.parsed_source.as_ref()) + { Some(Ok(parsed_source)) => { if let Ok(references) = analysis::get_lint_references(parsed_source, linter, token) @@ -1063,7 +1025,7 @@ fn generate_document_lint_diagnostics( } Some(Err(_)) => Vec::new(), None => { - error!("Missing file contents for: {}", document.specifier()); + error!("Missing file contents for: {}", &module.specifier); Vec::new() } } @@ -1075,32 +1037,58 @@ async fn generate_ts_diagnostics( ts_server: &tsc::TsServer, token: CancellationToken, ) -> Result<(DiagnosticVec, ScopedAmbientModules), AnyError> { - let mut diagnostics_vec = Vec::new(); - let specifiers = snapshot - .documents - .documents(DocumentsFilter::OpenDiagnosable) - .into_iter() - .map(|d| d.specifier().clone()); - let (enabled_specifiers, disabled_specifiers) = specifiers - .into_iter() - .partition::, _>(|s| config.specifier_enabled(s)); - let (ts_diagnostics_map, ambient_modules_by_scope) = - if !enabled_specifiers.is_empty() { - ts_server - .get_diagnostics(snapshot.clone(), enabled_specifiers, &token) - .await? - } else { - Default::default() - }; - for (specifier_str, mut ts_json_diagnostics) in ts_diagnostics_map { - let specifier = resolve_url(&specifier_str)?; + let mut records = Vec::new(); + let mut ambient_modules_by_scope = HashMap::new(); + let mut enabled_modules_by_scope = BTreeMap::<_, Vec<_>>::new(); + let mut disabled_documents = Vec::new(); + for document in snapshot.document_modules.documents.open_docs() { + if let Some(module) = snapshot + .document_modules + .primary_module(&Document::Open(document.clone())) + { + if config.specifier_enabled(&module.specifier) { + enabled_modules_by_scope + .entry(module.scope.clone()) + .or_default() + .push(module); + continue; + } + } + disabled_documents.push(document.clone()); + } + // add an empty diagnostic publish for disabled documents in order + // to clear those diagnostics if they exist + for document in disabled_documents { + records.push(DiagnosticRecord { + uri: document.uri.clone(), + versioned: VersionedDiagnostics { + version: document.version, + diagnostics: Vec::new(), + }, + }); + } + let mut enabled_modules_with_diagnostics = Vec::new(); + for (scope, enabled_modules) in enabled_modules_by_scope { + let (diagnostics_list, ambient_modules) = ts_server + .get_diagnostics( + snapshot.clone(), + enabled_modules.iter().map(|m| m.specifier.as_ref()), + scope.as_ref(), + &token, + ) + .await?; + enabled_modules_with_diagnostics + .extend(enabled_modules.into_iter().zip(diagnostics_list)); + ambient_modules_by_scope.insert(scope.clone(), ambient_modules); + } + for (module, mut diagnostics) in enabled_modules_with_diagnostics { let suggestion_actions_settings = snapshot .config - .language_settings_for_specifier(&specifier) + .language_settings_for_specifier(&module.specifier) .map(|s| s.suggestion_actions.clone()) .unwrap_or_default(); if !suggestion_actions_settings.enabled { - ts_json_diagnostics.retain(|d| { + diagnostics.retain(|d| { d.category != DiagnosticCategory::Suggestion // Still show deprecated and unused diagnostics. // https://github.com/microsoft/vscode/blob/ce50bd4876af457f64d83cfd956bc916535285f4/extensions/typescript-language-features/src/languageFeatures/diagnostics.ts#L113-L114 @@ -1108,41 +1096,16 @@ async fn generate_ts_diagnostics( || d.reports_unnecessary == Some(true) }); } - let version = snapshot - .documents - .get(&specifier) - .and_then(|d| d.maybe_lsp_version()); - // check if the specifier is enabled again just in case TS returns us - // diagnostics for a disabled specifier - let ts_diagnostics = if config.specifier_enabled(&specifier) { - ts_json_to_diagnostics(ts_json_diagnostics) - } else { - Vec::new() - }; - diagnostics_vec.push(DiagnosticRecord { - specifier, + let diagnostics = ts_json_to_diagnostics(diagnostics); + records.push(DiagnosticRecord { + uri: module.uri.clone(), versioned: VersionedDiagnostics { - version, - diagnostics: ts_diagnostics, + version: module.open_data.as_ref().unwrap().version, + diagnostics, }, }); } - // add an empty diagnostic publish for disabled specifiers in order - // to clear those diagnostics if they exist - for specifier in disabled_specifiers { - let version = snapshot - .documents - .get(&specifier) - .and_then(|d| d.maybe_lsp_version()); - diagnostics_vec.push(DiagnosticRecord { - specifier, - versioned: VersionedDiagnostics { - version, - diagnostics: Vec::new(), - }, - }); - } - Ok((diagnostics_vec, ambient_modules_by_scope)) + Ok((records, ambient_modules_by_scope)) } #[derive(Debug, Deserialize)] @@ -1253,6 +1216,7 @@ impl DenoDiagnostic { /// A "static" method which for a diagnostic that originated from the /// structure returns a code action which can resolve the diagnostic. pub fn get_code_action( + uri: &Uri, specifier: &ModuleSpecifier, diagnostic: &lsp::Diagnostic, ) -> Result { @@ -1271,7 +1235,7 @@ impl DenoDiagnostic { diagnostics: Some(vec![diagnostic.clone()]), edit: Some(lsp::WorkspaceEdit { changes: Some(HashMap::from([( - url_to_uri(specifier)?, + uri.clone(), vec![lsp::TextEdit { new_text: format!("\"{to}\""), range: diagnostic.range, @@ -1288,7 +1252,7 @@ impl DenoDiagnostic { diagnostics: Some(vec![diagnostic.clone()]), edit: Some(lsp::WorkspaceEdit { changes: Some(HashMap::from([( - url_to_uri(specifier)?, + uri.clone(), vec![lsp::TextEdit { new_text: " with { type: \"json\" }".to_string(), range: lsp::Range { @@ -1339,7 +1303,7 @@ impl DenoDiagnostic { diagnostics: Some(vec![diagnostic.clone()]), edit: Some(lsp::WorkspaceEdit { changes: Some(HashMap::from([( - url_to_uri(specifier)?, + uri.clone(), vec![lsp::TextEdit { new_text: format!( "\"{}\"", @@ -1365,7 +1329,7 @@ impl DenoDiagnostic { diagnostics: Some(vec![diagnostic.clone()]), edit: Some(lsp::WorkspaceEdit { changes: Some(HashMap::from([( - url_to_uri(specifier)?, + uri.clone(), vec![lsp::TextEdit { new_text: format!( "\"{}\"", @@ -1391,7 +1355,7 @@ impl DenoDiagnostic { diagnostics: Some(vec![diagnostic.clone()]), edit: Some(lsp::WorkspaceEdit { changes: Some(HashMap::from([( - url_to_uri(specifier)?, + uri.clone(), vec![lsp::TextEdit { new_text: format!("\"node:{}\"", data.specifier), range: diagnostic.range, @@ -1591,25 +1555,24 @@ fn diagnose_resolution( resolution: &Resolution, is_dynamic: bool, maybe_assert_type: Option<&str>, - referrer_doc: &Document, + referrer_module: &DocumentModule, import_map: Option<&ImportMap>, ) -> (Vec, Vec) { fn check_redirect_diagnostic( specifier: &ModuleSpecifier, - doc: &Document, + module: &DocumentModule, ) -> Option { - let doc_specifier = doc.specifier(); // If the module was redirected, we want to issue an informational // diagnostic that indicates this. This then allows us to issue a code // action to replace the specifier with the final redirected one. - if specifier.scheme() == "jsr" || doc_specifier == specifier { + if specifier.scheme() == "jsr" || specifier == module.specifier.as_ref() { return None; } // don't bother warning about sloppy import redirects from .js to .d.ts // because explaining how to fix this via a diagnostic involves using // @ts-types and that's a bit complicated to explain - let is_sloppy_import_dts_redirect = doc_specifier.scheme() == "file" - && doc.media_type().is_declaration() + let is_sloppy_import_dts_redirect = module.specifier.scheme() == "file" + && module.media_type.is_declaration() && !MediaType::from_specifier(specifier).is_declaration(); if is_sloppy_import_dts_redirect { return None; @@ -1617,7 +1580,7 @@ fn diagnose_resolution( Some(DenoDiagnostic::Redirect { from: specifier.clone(), - to: doc_specifier.clone(), + to: module.specifier.as_ref().clone(), }) } @@ -1625,30 +1588,32 @@ fn diagnose_resolution( let mut deferred_diagnostics = vec![]; match resolution { Resolution::Ok(resolved) => { - let file_referrer = referrer_doc.file_referrer(); let specifier = &resolved.specifier; - let managed_npm_resolver = - snapshot.resolver.maybe_managed_npm_resolver(file_referrer); + let managed_npm_resolver = snapshot + .resolver + .maybe_managed_npm_resolver(referrer_module.scope.as_deref()); for (_, headers) in snapshot .resolver - .redirect_chain_headers(specifier, file_referrer) + .redirect_chain_headers(specifier, referrer_module.scope.as_deref()) { if let Some(message) = headers.get("x-deno-warning") { diagnostics.push(DenoDiagnostic::DenoWarn(message.clone())); } } - if let Some(doc) = - snapshot.documents.get_or_load(specifier, file_referrer) + if let Some(module) = snapshot + .document_modules + .module_for_specifier(specifier, referrer_module.scope.as_deref()) { - if let Some(headers) = doc.maybe_headers() { + if let Some(headers) = &module.headers { if let Some(message) = headers.get("x-deno-warning") { diagnostics.push(DenoDiagnostic::DenoWarn(message.clone())); } } - if let Some(diagnostic) = check_redirect_diagnostic(specifier, &doc) { + if let Some(diagnostic) = check_redirect_diagnostic(specifier, &module) + { diagnostics.push(diagnostic); } - if doc.media_type() == MediaType::Json { + if module.media_type == MediaType::Json { match maybe_assert_type { // The module has the correct assertion type, no diagnostic Some("json") => (), @@ -1745,7 +1710,7 @@ fn diagnose_dependency( diagnostics: &mut Vec, deferred_diagnostics: &mut Vec<(String, lsp::Diagnostic)>, snapshot: &language_server::StateSnapshot, - referrer_doc: &Document, + referrer_module: &DocumentModule, dependency_key: &str, dependency: &deno_graph::Dependency, ) { @@ -1782,32 +1747,36 @@ fn diagnose_dependency( None } - let referrer = referrer_doc.specifier(); - if snapshot.resolver.in_node_modules(referrer) { + if snapshot + .resolver + .in_node_modules(&referrer_module.specifier) + { return; // ignore, surface typescript errors instead } - let import_map = snapshot - .config - .tree - .data_for_specifier(referrer_doc.file_referrer().unwrap_or(referrer)) - .and_then(|d| d.resolver.maybe_import_map()); + let config_data = referrer_module + .scope + .as_ref() + .and_then(|s| snapshot.config.tree.data_for_specifier(s)); + let import_map = config_data.and_then(|d| d.resolver.maybe_import_map()); if let Some(import_map) = import_map { let resolved = dependency .maybe_code .ok() .or_else(|| dependency.maybe_type.ok()); if let Some(resolved) = resolved { - if let Some(to) = - import_map_lookup(import_map, &resolved.specifier, referrer) - { + if let Some(to) = import_map_lookup( + import_map, + &resolved.specifier, + &referrer_module.specifier, + ) { if dependency_key != to { diagnostics.push( DenoDiagnostic::ImportMapRemap { from: dependency_key.to_string(), to, } - .to_lsp_diagnostic(&documents::to_lsp_range(&resolved.range)), + .to_lsp_diagnostic(&language_server::to_lsp_range(&resolved.range)), ); } } @@ -1817,7 +1786,7 @@ fn diagnose_dependency( let import_ranges: Vec<_> = dependency .imports .iter() - .map(|i| documents::to_lsp_range(&i.specifier_range)) + .map(|i| language_server::to_lsp_range(&i.specifier_range)) .collect(); // TODO(nayeemrmn): This is a crude way of detecting `@ts-types` which has // a different specifier and therefore needs a separate call to @@ -1848,7 +1817,7 @@ fn diagnose_dependency( }, dependency.is_dynamic, dependency.maybe_attribute_type.as_deref(), - referrer_doc, + referrer_module, import_map, ); diagnostics.extend(resolution_diagnostics.iter().flat_map(|diag| { @@ -1871,8 +1840,10 @@ fn diagnose_dependency( if is_types_deno_types { let range = match &dependency.maybe_type { - Resolution::Ok(resolved) => documents::to_lsp_range(&resolved.range), - Resolution::Err(error) => documents::to_lsp_range(error.range()), + Resolution::Ok(resolved) => { + language_server::to_lsp_range(&resolved.range) + } + Resolution::Err(error) => language_server::to_lsp_range(error.range()), Resolution::None => unreachable!(), }; let (resolution_diagnostics, deferred) = diagnose_resolution( @@ -1881,7 +1852,7 @@ fn diagnose_dependency( &dependency.maybe_type, dependency.is_dynamic, dependency.maybe_attribute_type.as_deref(), - referrer_doc, + referrer_module, import_map, ); diagnostics.extend( @@ -1898,9 +1869,9 @@ fn diagnose_dependency( #[derive(Debug)] struct DeferredDiagnosticRecord { - document_specifier: Url, - version: Option, - scope: Option, + uri: Arc, + version: i32, + scope: Option>, diagnostics: Vec<(String, lsp::Diagnostic)>, } @@ -1914,46 +1885,44 @@ fn generate_deno_diagnostics( ) -> (DiagnosticVec, Vec) { let mut diagnostics_vec = Vec::new(); let mut deferred_diagnostics = Vec::new(); - for document in snapshot - .documents - .documents(DocumentsFilter::OpenDiagnosable) - { + for document in snapshot.document_modules.documents.open_docs() { if token.is_cancelled() { break; } + if !document.is_diagnosable() { + continue; + } + let Some(module) = snapshot + .document_modules + .primary_module(&Document::Open(document.clone())) + else { + continue; + }; let mut diagnostics = Vec::new(); let mut deferred = Vec::new(); - let specifier = document.specifier(); - if config.specifier_enabled(specifier) { - for (dependency_key, dependency) in document.dependencies() { + if config.specifier_enabled(&module.specifier) { + for (dependency_key, dependency) in module.dependencies.iter() { diagnose_dependency( &mut diagnostics, &mut deferred, snapshot, - &document, + &module, dependency_key, dependency, ); } } diagnostics_vec.push(DiagnosticRecord { - specifier: specifier.clone(), + uri: document.uri.clone(), versioned: VersionedDiagnostics { - version: document.maybe_lsp_version(), + version: document.version, diagnostics, }, }); deferred_diagnostics.push(DeferredDiagnosticRecord { - document_specifier: specifier.clone(), - scope: if snapshot.documents.is_valid_file_referrer(specifier) { - snapshot.config.tree.scope_for_specifier(specifier).cloned() - } else { - snapshot - .documents - .get(specifier) - .and_then(|d| d.scope().cloned()) - }, - version: document.maybe_lsp_version(), + uri: document.uri.clone(), + scope: module.scope.clone(), + version: document.version, diagnostics: deferred, }); } @@ -1963,9 +1932,11 @@ fn generate_deno_diagnostics( #[cfg(test)] mod tests { + use std::str::FromStr; use std::sync::Arc; use deno_config::deno_json::ConfigFile; + use deno_core::resolve_url; use pretty_assertions::assert_eq; use test_util::TempDir; @@ -1974,13 +1945,15 @@ mod tests { use crate::lsp::config::Config; use crate::lsp::config::Settings; use crate::lsp::config::WorkspaceSettings; - use crate::lsp::documents::Documents; + use crate::lsp::documents::DocumentModules; use crate::lsp::documents::LanguageId; use crate::lsp::language_server::StateSnapshot; use crate::lsp::resolver::LspResolver; + use crate::lsp::urls::uri_to_url; + use crate::lsp::urls::url_to_uri; fn mock_config() -> Config { - let root_url = resolve_url("file:///").unwrap(); + let root_url = Arc::new(resolve_url("file:///").unwrap()); let root_uri = url_to_uri(&root_url).unwrap(); Config { settings: Arc::new(Settings { @@ -2007,33 +1980,37 @@ mod tests { maybe_import_map: Option<(&str, &str)>, ) -> (TempDir, StateSnapshot) { let temp_dir = TempDir::new(); - let root_uri = temp_dir.url(); - let cache = LspCache::new(Some(root_uri.join(".deno_dir").unwrap())); - let mut config = Config::new_with_roots([root_uri.clone()]); + let root_url = temp_dir.url(); + let cache = LspCache::new(Some(root_url.join(".deno_dir").unwrap())); + let mut config = Config::new_with_roots([root_url.clone()]); if let Some((relative_path, json_string)) = maybe_import_map { - let base_url = root_uri.join(relative_path).unwrap(); + let base_url = root_url.join(relative_path).unwrap(); let config_file = ConfigFile::new(json_string, base_url).unwrap(); config.tree.inject_config_file(config_file).await; } let resolver = Arc::new(LspResolver::from_config(&config, &cache, None).await); - let mut documents = Documents::default(); - documents.update_config(&config, &resolver, &cache, &Default::default()); + let mut document_modules = DocumentModules::default(); + document_modules.update_config( + &config, + &resolver, + &cache, + &Default::default(), + ); for (relative_path, source, version, language_id) in sources { - let specifier = root_uri.join(relative_path).unwrap(); - documents.open( - specifier.clone(), + let specifier = root_url.join(relative_path).unwrap(); + document_modules.open_document( + url_to_uri(&specifier).unwrap(), *version, *language_id, (*source).into(), - None, ); } ( temp_dir, StateSnapshot { project_version: 0, - documents: Arc::new(documents), + document_modules, config: Arc::new(config), resolver, }, @@ -2146,17 +2123,18 @@ let c: number = "a"; let mut all_diagnostics = diagnostics .into_iter() - .map(|d| (d.specifier.clone(), d)) + .map(|d| (d.uri.clone(), d)) .collect::>(); for diag in deferred { - let existing = all_diagnostics - .entry(diag.document_specifier.clone()) - .or_insert_with(|| DiagnosticRecord { - specifier: diag.document_specifier.clone(), - versioned: VersionedDiagnostics { - diagnostics: vec![], - version: diag.version, - }, + let existing = + all_diagnostics.entry(diag.uri.clone()).or_insert_with(|| { + DiagnosticRecord { + uri: diag.uri.clone(), + versioned: VersionedDiagnostics { + diagnostics: vec![], + version: diag.version, + }, + } }); existing .versioned @@ -2207,8 +2185,9 @@ let c: number = "a"; let actual = generate_all_deno_diagnostics(&snapshot, &config, token); assert_eq!(actual.len(), 3); for record in actual { + let specifier = uri_to_url(&record.uri); let relative_specifier = - temp_dir.url().make_relative(&record.specifier).unwrap(); + temp_dir.url().make_relative(&specifier).unwrap(); match relative_specifier.as_str() { "std/assert/mod.ts" => { assert_eq!(json!(record.versioned.diagnostics), json!([])) @@ -2241,15 +2220,16 @@ let c: number = "a"; "a/file2.ts" => { assert_eq!(json!(record.versioned.diagnostics), json!([])) } - _ => unreachable!("unexpected specifier {}", record.specifier), + _ => unreachable!("unexpected specifier {}", &specifier), } } } #[test] fn test_get_code_action_import_map_remap() { + let uri = Uri::from_str("file:///a/file.ts").unwrap(); let specifier = ModuleSpecifier::parse("file:///a/file.ts").unwrap(); - let result = DenoDiagnostic::get_code_action(&specifier, &lsp::Diagnostic { + let result = DenoDiagnostic::get_code_action(&uri, &specifier, &lsp::Diagnostic { range: lsp::Range { start: lsp::Position { line: 0, character: 23 }, end: lsp::Position { line: 0, character: 50 }, diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index 4d326fec85..7c2761ec25 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -2,16 +2,18 @@ use std::borrow::Cow; use std::collections::BTreeMap; +use std::collections::BTreeSet; use std::collections::HashMap; use std::collections::HashSet; use std::fs; use std::future::Future; use std::ops::Range; +use std::path::PathBuf; use std::pin::Pin; use std::str::FromStr; -use std::sync::atomic::AtomicBool; -use std::sync::atomic::Ordering; use std::sync::Arc; +use std::sync::Weak; +use std::time::SystemTime; use dashmap::DashMap; use deno_ast::swc::ecma_visit::VisitWith; @@ -22,11 +24,13 @@ use deno_core::error::AnyError; use deno_core::futures::future; use deno_core::futures::future::Shared; use deno_core::futures::FutureExt; -use deno_core::parking_lot::Mutex; +use deno_core::parking_lot::RwLock; use deno_core::resolve_url; +use deno_core::url::Position; +use deno_core::url::Url; use deno_core::ModuleSpecifier; use deno_error::JsErrorBox; -use deno_graph::Resolution; +use deno_graph::TypesDependency; use deno_path_util::url_to_file_path; use deno_runtime::deno_node; use deno_semver::jsr::JsrPackageReqReference; @@ -34,26 +38,1483 @@ use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageReq; use indexmap::IndexMap; use indexmap::IndexSet; +use lsp_types::Uri; use node_resolver::cache::NodeResolutionThreadLocalCache; use node_resolver::NodeResolutionKind; use node_resolver::ResolutionMode; use once_cell::sync::Lazy; +use serde::Serialize; use tower_lsp::lsp_types as lsp; +use weak_table::PtrWeakKeyHashMap; +use weak_table::WeakValueHashMap; -use super::cache::calculate_fs_version; +use super::cache::calculate_fs_version_at_path; use super::cache::LspCache; use super::config::Config; +use super::logging::lsp_warn; use super::resolver::LspResolver; use super::resolver::ScopeDepInfo; use super::resolver::SingleReferrerGraphResolver; use super::testing::TestCollector; use super::testing::TestModule; use super::text::LineIndex; -use super::tsc; +use super::tsc::NavigationTree; +use super::urls::uri_is_file_like; +use super::urls::uri_to_file_path; +use super::urls::uri_to_url; +use super::urls::url_to_uri; +use super::urls::COMPONENT; use crate::graph_util::CliJsrUrlProvider; -pub const DOCUMENT_SCHEMES: [&str; 5] = - ["data", "blob", "file", "http", "https"]; +#[derive(Debug)] +pub struct OpenDocument { + pub uri: Arc, + pub text: Arc, + pub line_index: Arc, + pub version: i32, + pub language_id: LanguageId, + pub fs_version_on_open: Option, +} + +impl OpenDocument { + fn new( + uri: Uri, + version: i32, + language_id: LanguageId, + text: Arc, + ) -> Self { + let line_index = Arc::new(LineIndex::new(&text)); + let fs_version_on_open = uri_to_file_path(&uri) + .ok() + .and_then(calculate_fs_version_at_path); + OpenDocument { + uri: Arc::new(uri), + text, + line_index, + version, + language_id, + fs_version_on_open, + } + } + + fn with_change( + &self, + version: i32, + changes: Vec, + ) -> Result { + let mut text = self.text.to_string(); + let mut line_index = self.line_index.clone(); + let mut index_valid = IndexValid::All; + for change in changes { + if let Some(range) = change.range { + if !index_valid.covers(range.start.line) { + line_index = Arc::new(LineIndex::new(&text)); + } + index_valid = IndexValid::UpTo(range.start.line); + let range = line_index.get_text_range(range)?; + text.replace_range(Range::::from(range), &change.text); + } else { + text = change.text; + index_valid = IndexValid::UpTo(0); + } + } + let text: Arc = text.into(); + let line_index = if index_valid == IndexValid::All { + line_index + } else { + Arc::new(LineIndex::new(&text)) + }; + Ok(OpenDocument { + uri: self.uri.clone(), + text, + line_index, + version, + language_id: self.language_id, + fs_version_on_open: self.fs_version_on_open.clone(), + }) + } + + pub fn is_diagnosable(&self) -> bool { + self.language_id.is_diagnosable() + } + + pub fn is_file_like(&self) -> bool { + uri_is_file_like(&self.uri) + } + + pub fn script_version(&self) -> String { + let fs_version = self.fs_version_on_open.as_deref().unwrap_or("1"); + format!("{fs_version}+{}", self.version) + } +} + +fn remote_url_to_uri(url: &Url) -> Option { + if !matches!(url.scheme(), "http" | "https") { + return None; + } + let mut string = String::with_capacity(url.as_str().len() + 6); + string.push_str("deno:/"); + string.push_str(url.scheme()); + for p in url[Position::BeforeHost..].split('/') { + string.push('/'); + string.push_str( + &percent_encoding::utf8_percent_encode(p, COMPONENT).to_string(), + ); + } + Uri::from_str(&string) + .inspect_err(|err| { + lsp_warn!("Couldn't convert remote URL \"{url}\" to URI: {err}") + }) + .ok() +} + +fn asset_url_to_uri(url: &Url) -> Option { + if url.scheme() != "asset" { + return None; + } + Uri::from_str(&format!("deno:/asset{}", url.path())) + .inspect_err(|err| { + lsp_warn!("Couldn't convert asset URL \"{url}\" to URI: {err}") + }) + .ok() +} + +fn data_url_to_uri(url: &Url) -> Option { + let data_url = deno_media_type::data_url::RawDataUrl::parse(url).ok()?; + let media_type = data_url.media_type(); + let extension = if media_type == MediaType::Unknown { + "" + } else { + media_type.as_ts_extension() + }; + let mut file_name_str = url.path().to_string(); + if let Some(query) = url.query() { + file_name_str.push('?'); + file_name_str.push_str(query); + } + let hash = deno_lib::util::checksum::gen(&[file_name_str.as_bytes()]); + Uri::from_str(&format!("deno:/data_url/{hash}{extension}",)) + .inspect_err(|err| { + lsp_warn!("Couldn't convert data url \"{url}\" to URI: {err}") + }) + .ok() +} + +#[derive(Debug, Clone)] +pub enum DocumentText { + Static(&'static str), + Arc(Arc), +} + +impl DocumentText { + /// Will clone the string if static. + pub fn to_arc(&self) -> Arc { + match self { + Self::Static(s) => (*s).into(), + Self::Arc(s) => s.clone(), + } + } +} + +impl std::ops::Deref for DocumentText { + type Target = str; + + fn deref(&self) -> &Self::Target { + match self { + Self::Static(s) => s, + Self::Arc(s) => s, + } + } +} + +impl Serialize for DocumentText { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + (self as &str).serialize(serializer) + } +} + +#[derive(Debug, Clone)] +pub enum ServerDocumentKind { + Fs { + fs_version: String, + text: Arc, + }, + RemoteUrl { + url: Arc, + fs_cache_version: String, + text: Arc, + }, + DataUrl { + url: Arc, + text: Arc, + }, + Asset { + url: Arc, + text: &'static str, + }, +} + +#[derive(Debug)] +pub struct ServerDocument { + pub uri: Arc, + pub media_type: MediaType, + pub line_index: Arc, + pub kind: ServerDocumentKind, +} + +impl ServerDocument { + fn load(uri: &Uri) -> Option { + let scheme = uri.scheme()?; + if scheme.eq_lowercase("file") { + let url = uri_to_url(uri); + let path = url_to_file_path(&url).ok()?; + let bytes = fs::read(&path).ok()?; + let media_type = MediaType::from_specifier(&url); + let text: Arc = + bytes_to_content(&url, media_type, bytes, None).ok()?.into(); + let fs_version = calculate_fs_version_at_path(&path)?; + let line_index = Arc::new(LineIndex::new(&text)); + return Some(Self { + uri: Arc::new(uri.clone()), + media_type, + line_index, + kind: ServerDocumentKind::Fs { fs_version, text }, + }); + } + None + } + + fn remote_url( + uri: &Uri, + url: Arc, + scope: Option<&Url>, + cache: &LspCache, + ) -> Option { + let media_type = MediaType::from_specifier(&url); + let http_cache = cache.for_specifier(scope); + let cache_key = http_cache.cache_item_key(&url).ok()?; + let cache_entry = http_cache.get(&cache_key, None).ok()??; + let (_, maybe_charset) = + deno_graph::source::resolve_media_type_and_charset_from_headers( + &url, + Some(&cache_entry.metadata.headers), + ); + let fs_cache_version = (|| { + let modified = http_cache.read_modified_time(&cache_key).ok()??; + let duration = modified.duration_since(SystemTime::UNIX_EPOCH).ok()?; + Some(duration.as_millis().to_string()) + })() + .unwrap_or_else(|| "1".to_string()); + let text: Arc = bytes_to_content( + &url, + media_type, + cache_entry.content.into_owned(), + maybe_charset, + ) + .ok()? + .into(); + let line_index = Arc::new(LineIndex::new(&text)); + Some(Self { + uri: Arc::new(uri.clone()), + media_type, + line_index, + kind: ServerDocumentKind::RemoteUrl { + url, + fs_cache_version, + text, + }, + }) + } + + fn asset(name: &str, text: &'static str) -> Self { + let url = Arc::new(Url::parse(&format!("asset:///{name}")).unwrap()); + let uri = asset_url_to_uri(&url).unwrap(); + let media_type = MediaType::from_specifier(&url); + let line_index = Arc::new(LineIndex::new(text)); + Self { + uri: Arc::new(uri), + media_type, + line_index, + kind: ServerDocumentKind::Asset { url, text }, + } + } + + fn data_url(uri: &Uri, url: Arc) -> Option { + let raw_data_url = + deno_media_type::data_url::RawDataUrl::parse(&url).ok()?; + let media_type = raw_data_url.media_type(); + let text: Arc = raw_data_url.decode().ok()?.into(); + let line_index = Arc::new(LineIndex::new(&text)); + Some(Self { + uri: Arc::new(uri.clone()), + media_type, + line_index, + kind: ServerDocumentKind::DataUrl { url, text }, + }) + } + + pub fn text(&self) -> DocumentText { + match &self.kind { + ServerDocumentKind::Fs { text, .. } => DocumentText::Arc(text.clone()), + ServerDocumentKind::RemoteUrl { text, .. } => { + DocumentText::Arc(text.clone()) + } + ServerDocumentKind::DataUrl { text, .. } => { + DocumentText::Arc(text.clone()) + } + ServerDocumentKind::Asset { text, .. } => DocumentText::Static(text), + } + } + + pub fn is_diagnosable(&self) -> bool { + media_type_is_diagnosable(self.media_type) + } + + pub fn is_file_like(&self) -> bool { + uri_is_file_like(&self.uri) + } + + pub fn script_version(&self) -> String { + match &self.kind { + ServerDocumentKind::Fs { fs_version, .. } => fs_version.clone(), + ServerDocumentKind::RemoteUrl { + fs_cache_version, .. + } => fs_cache_version.clone(), + ServerDocumentKind::DataUrl { .. } => "1".to_string(), + ServerDocumentKind::Asset { .. } => "1".to_string(), + } + } +} + +#[derive(Debug)] +pub struct AssetDocuments { + inner: HashMap, Arc>, +} + +impl AssetDocuments { + pub fn get(&self, k: &Uri) -> Option<&Arc> { + self.inner.get(k) + } +} + +pub static ASSET_DOCUMENTS: Lazy = + Lazy::new(|| AssetDocuments { + inner: crate::tsc::LAZILY_LOADED_STATIC_ASSETS + .iter() + .map(|(k, v)| { + let doc = Arc::new(ServerDocument::asset(k, v.as_str())); + let uri = doc.uri.clone(); + (uri, doc) + }) + .collect(), + }); + +#[derive(Debug, Clone)] +pub enum Document { + Open(Arc), + Server(Arc), +} + +impl Document { + pub fn open(&self) -> Option<&Arc> { + match self { + Self::Open(d) => Some(d), + Self::Server(_) => None, + } + } + + pub fn server(&self) -> Option<&Arc> { + match self { + Self::Open(_) => None, + Self::Server(d) => Some(d), + } + } + + pub fn uri(&self) -> &Arc { + match self { + Self::Open(d) => &d.uri, + Self::Server(d) => &d.uri, + } + } + + pub fn text(&self) -> DocumentText { + match self { + Self::Open(d) => DocumentText::Arc(d.text.clone()), + Self::Server(d) => d.text(), + } + } + + pub fn line_index(&self) -> &Arc { + match self { + Self::Open(d) => &d.line_index, + Self::Server(d) => &d.line_index, + } + } + + pub fn script_version(&self) -> String { + match self { + Self::Open(d) => d.script_version(), + Self::Server(d) => d.script_version(), + } + } + + pub fn is_diagnosable(&self) -> bool { + match self { + Self::Open(d) => d.is_diagnosable(), + Self::Server(d) => d.is_diagnosable(), + } + } + + pub fn is_file_like(&self) -> bool { + match self { + Self::Open(d) => d.is_file_like(), + Self::Server(d) => d.is_file_like(), + } + } +} + +#[derive(Debug, Default, Clone)] +pub struct Documents { + open: IndexMap>, + server: Arc>>, + file_like_uris_by_url: Arc>>, + /// These URLs can not be recovered from the URIs we assign them without these + /// maps. We want to be able to discard old documents from here but keep these + /// mappings. + data_urls_by_uri: Arc>>, + remote_urls_by_uri: Arc>>, +} + +impl Documents { + pub fn open( + &mut self, + uri: Uri, + version: i32, + language_id: LanguageId, + text: Arc, + ) -> Arc { + self.server.remove(&uri); + let doc = + Arc::new(OpenDocument::new(uri.clone(), version, language_id, text)); + self.open.insert(uri, doc.clone()); + if !doc.uri.scheme().is_some_and(|s| s.eq_lowercase("file")) { + let url = uri_to_url(&doc.uri); + if url.scheme() == "file" { + self.file_like_uris_by_url.insert(url, doc.uri.clone()); + } + } + doc + } + + pub fn change( + &mut self, + uri: &Uri, + version: i32, + changes: Vec, + ) -> Result, AnyError> { + let Some((uri, doc)) = self.open.shift_remove_entry(uri) else { + return Err( + JsErrorBox::new( + "NotFound", + format!( + "The URI \"{}\" does not refer to an open document.", + uri.as_str() + ), + ) + .into(), + ); + }; + let doc = Arc::new(doc.with_change(version, changes)?); + self.open.insert(uri, doc.clone()); + Ok(doc) + } + + pub fn close(&mut self, uri: &Uri) -> Result, AnyError> { + self.file_like_uris_by_url.retain(|_, u| u.as_ref() != uri); + self.open.shift_remove(uri).ok_or_else(|| { + JsErrorBox::new( + "NotFound", + format!( + "The URI \"{}\" does not refer to an open document.", + uri.as_str() + ), + ) + .into() + }) + } + + pub fn get(&self, uri: &Uri) -> Option { + if let Some(doc) = self.open.get(uri) { + return Some(Document::Open(doc.clone())); + } + if let Some(doc) = ASSET_DOCUMENTS.get(uri) { + return Some(Document::Server(doc.clone())); + } + if let Some(doc) = self.server.get(uri) { + return Some(Document::Server(doc.clone())); + } + let doc = if let Some(doc) = ServerDocument::load(uri) { + doc + } else if let Some(data_url) = self.data_urls_by_uri.get(uri) { + ServerDocument::data_url(uri, data_url.value().clone())? + } else { + return None; + }; + let doc = Arc::new(doc); + self.server.insert(uri.clone(), doc.clone()); + Some(Document::Server(doc)) + } + + /// This will not create any server entries, only retrieve existing entries. + pub fn inspect(&self, uri: &Uri) -> Option { + if let Some(doc) = self.open.get(uri) { + return Some(Document::Open(doc.clone())); + } + if let Some(doc) = self.server.get(uri) { + return Some(Document::Server(doc.clone())); + } + None + } + + pub fn get_for_specifier( + &self, + specifier: &Url, + scope: Option<&Url>, + cache: &LspCache, + ) -> Option { + let scheme = specifier.scheme(); + if scheme == "file" { + let uri = self + .file_like_uris_by_url + .get(specifier) + .map(|e| e.value().clone()) + .or_else(|| url_to_uri(specifier).ok().map(Arc::new))?; + self.get(&uri) + } else if scheme == "asset" { + let uri = asset_url_to_uri(specifier)?; + self.get(&uri) + } else if scheme == "http" || scheme == "https" { + if let Some(vendored_specifier) = + cache.vendored_specifier(specifier, scope) + { + let uri = url_to_uri(&vendored_specifier).ok()?; + self.get(&uri) + } else { + let uri = remote_url_to_uri(specifier)?; + if let Some(doc) = self.server.get(&uri) { + return Some(Document::Server(doc.clone())); + } + let url = Arc::new(specifier.clone()); + self.remote_urls_by_uri.insert(uri.clone(), url.clone()); + let doc = + Arc::new(ServerDocument::remote_url(&uri, url, scope, cache)?); + self.server.insert(uri, doc.clone()); + Some(Document::Server(doc)) + } + } else if scheme == "data" { + let uri = data_url_to_uri(specifier)?; + if let Some(doc) = self.server.get(&uri) { + return Some(Document::Server(doc.clone())); + } + let url = Arc::new(specifier.clone()); + self.data_urls_by_uri.insert(uri.clone(), url.clone()); + let doc = Arc::new(ServerDocument::data_url(&uri, url)?); + self.server.insert(uri, doc.clone()); + Some(Document::Server(doc)) + } else { + None + } + } + + pub fn open_docs(&self) -> impl Iterator> { + self.open.values() + } + + pub fn server_docs(&self) -> Vec> { + self.server.iter().map(|e| e.value().clone()).collect() + } + + pub fn docs(&self) -> Vec { + self + .open + .values() + .map(|d| Document::Open(d.clone())) + .chain( + self + .server + .iter() + .map(|e| Document::Server(e.value().clone())), + ) + .collect() + } + + pub fn filtered_docs( + &self, + predicate: impl FnMut(&Document) -> bool, + ) -> Vec { + self + .open + .values() + .map(|d| Document::Open(d.clone())) + .chain( + self + .server + .iter() + .map(|e| Document::Server(e.value().clone())), + ) + .filter(predicate) + .collect() + } + + pub fn remove_server_doc(&self, uri: &Uri) { + self.server.remove(uri); + } +} + +#[derive(Debug)] +pub struct DocumentModuleOpenData { + pub version: i32, + pub parsed_source: Option, +} + +#[derive(Debug)] +pub struct DocumentModule { + pub uri: Arc, + pub open_data: Option, + pub script_version: String, + pub specifier: Arc, + pub scope: Option>, + pub media_type: MediaType, + pub headers: Option>, + pub text: DocumentText, + pub line_index: Arc, + pub resolution_mode: ResolutionMode, + pub dependencies: Arc>, + pub types_dependency: Option>, + pub navigation_tree: tokio::sync::OnceCell>, + pub semantic_tokens_full: tokio::sync::OnceCell, + text_info_cell: once_cell::sync::OnceCell, + test_module_fut: Option, +} + +impl DocumentModule { + pub fn new( + document: &Document, + specifier: Arc, + scope: Option>, + resolver: &LspResolver, + config: &Config, + cache: &LspCache, + ) -> Self { + let text = document.text(); + let headers = matches!(specifier.scheme(), "http" | "https") + .then(|| { + let http_cache = cache.for_specifier(scope.as_deref()); + let cache_key = http_cache.cache_item_key(&specifier).ok()?; + let cache_entry = http_cache.get(&cache_key, None).ok()??; + Some(cache_entry.metadata.headers) + }) + .flatten(); + let media_type = resolve_media_type( + &specifier, + headers.as_ref(), + document.open().map(|d| d.language_id), + ); + let (parsed_source, maybe_module, resolution_mode) = + if media_type_is_diagnosable(media_type) { + parse_and_analyze_module( + specifier.as_ref().clone(), + text.to_arc(), + headers.as_ref(), + media_type, + scope.as_deref(), + resolver, + ) + } else { + (None, None, ResolutionMode::Import) + }; + let maybe_module = maybe_module.and_then(Result::ok); + let dependencies = maybe_module + .as_ref() + .map(|m| Arc::new(m.dependencies.clone())) + .unwrap_or_default(); + let types_dependency = maybe_module + .as_ref() + .and_then(|m| Some(Arc::new(m.maybe_types_dependency.clone()?))); + let test_module_fut = + get_maybe_test_module_fut(parsed_source.as_ref(), config); + DocumentModule { + uri: document.uri().clone(), + open_data: document.open().map(|d| DocumentModuleOpenData { + version: d.version, + parsed_source, + }), + script_version: document.script_version(), + specifier, + scope, + media_type, + headers, + text, + line_index: document.line_index().clone(), + resolution_mode, + dependencies, + types_dependency, + navigation_tree: Default::default(), + semantic_tokens_full: Default::default(), + text_info_cell: Default::default(), + test_module_fut, + } + } + + pub fn is_diagnosable(&self) -> bool { + media_type_is_diagnosable(self.media_type) + } + + pub fn dependency_at_position( + &self, + position: &lsp::Position, + ) -> Option<(&str, &deno_graph::Dependency, &deno_graph::Range)> { + let position = deno_graph::Position { + line: position.line as usize, + character: position.character as usize, + }; + self + .dependencies + .iter() + .find_map(|(s, dep)| dep.includes(position).map(|r| (s.as_str(), dep, r))) + } + + pub fn text_info(&self) -> &SourceTextInfo { + // try to get the text info from the parsed source and if + // not then create one in the cell + self + .open_data + .as_ref() + .and_then(|d| d.parsed_source.as_ref()) + .and_then(|p| p.as_ref().ok()) + .map(|p| p.text_info_lazy()) + .unwrap_or_else(|| { + self + .text_info_cell + .get_or_init(|| SourceTextInfo::new(self.text.to_arc())) + }) + } + + pub async fn test_module(&self) -> Option> { + self.test_module_fut.clone()?.await + } +} + +type DepInfoByScope = BTreeMap>, Arc>; + +#[derive(Debug, Default)] +struct WeakDocumentModuleMap { + open: RwLock, Arc>>, + server: RwLock, Arc>>, + by_specifier: RwLock, Weak>>, +} + +impl WeakDocumentModuleMap { + fn get(&self, document: &Document) -> Option> { + match document { + Document::Open(d) => self.open.read().get(d).cloned(), + Document::Server(d) => self.server.read().get(d).cloned(), + } + } + + fn get_for_specifier(&self, specifier: &Url) -> Option> { + self.by_specifier.read().get(specifier) + } + + fn contains_specifier(&self, specifier: &Url) -> bool { + self.by_specifier.read().contains_key(specifier) + } + + fn inspect_values(&self) -> Vec> { + self + .open + .read() + .values() + .cloned() + .chain(self.server.read().values().cloned()) + .collect() + } + + fn insert( + &self, + document: &Document, + module: Arc, + ) -> Option> { + match document { + Document::Open(d) => { + self.open.write().insert(d.clone(), module.clone()); + } + Document::Server(d) => { + self.server.write().insert(d.clone(), module.clone()); + } + } + self + .by_specifier + .write() + .insert(module.specifier.clone(), module.clone()); + Some(module) + } + + fn remove_expired(&self) { + // IMPORTANT: Maintain this order based on weak ref relations. + self.open.write().remove_expired(); + self.server.write().remove_expired(); + self.by_specifier.write().remove_expired(); + } +} + +#[derive(Debug, Default, Clone)] +pub struct DocumentModules { + pub documents: Documents, + config: Arc, + resolver: Arc, + cache: Arc, + workspace_files: Arc>, + dep_info_by_scope: once_cell::sync::OnceCell>, + modules_unscoped: Arc, + modules_by_scope: Arc, Arc>>, +} + +impl DocumentModules { + pub fn update_config( + &mut self, + config: &Config, + resolver: &Arc, + cache: &LspCache, + workspace_files: &Arc>, + ) { + self.config = Arc::new(config.clone()); + self.cache = Arc::new(cache.clone()); + self.resolver = resolver.clone(); + self.workspace_files = workspace_files.clone(); + self.modules_unscoped = Default::default(); + self.modules_by_scope = Arc::new( + self + .config + .tree + .data_by_scope() + .keys() + .map(|s| (s.clone(), Default::default())) + .collect(), + ); + self.dep_info_by_scope = Default::default(); + + node_resolver::PackageJsonThreadLocalCache::clear(); + NodeResolutionThreadLocalCache::clear(); + + // Clean up non-existent documents. + self.documents.server.retain(|_, d| { + let Some(module) = + self.inspect_primary_module(&Document::Server(d.clone())) + else { + return false; + }; + let Ok(path) = url_to_file_path(&module.specifier) else { + // Remove non-file schemed docs (deps). They may not be dependencies + // anymore after updating resolvers. + return false; + }; + if !config.specifier_enabled(&module.specifier) { + return false; + } + path.is_file() + }); + } + + pub fn open_document( + &mut self, + uri: Uri, + version: i32, + language_id: LanguageId, + text: Arc, + ) -> Arc { + self.dep_info_by_scope = Default::default(); + self.documents.open(uri, version, language_id, text) + } + + pub fn change_document( + &mut self, + uri: &Uri, + version: i32, + changes: Vec, + ) -> Result, AnyError> { + self.dep_info_by_scope = Default::default(); + let document = self.documents.change(uri, version, changes)?; + Ok(document) + } + + /// Returns if the document is diagnosable. + pub fn close_document( + &mut self, + uri: &Uri, + ) -> Result, AnyError> { + self.dep_info_by_scope = Default::default(); + let document = self.documents.close(uri)?; + // If applicable, try to load the closed document as a server document so + // it's still included as a ts root etc.. + if uri.scheme().is_some_and(|s| s.eq_lowercase("file")) + && self.config.uri_enabled(uri) + { + self.documents.get(uri); + } + Ok(document) + } + + pub fn release(&self, specifier: &Url, scope: Option<&Url>) { + let Some(module) = self.module_for_specifier(specifier, scope) else { + return; + }; + self.documents.remove_server_doc(&module.uri); + } + + fn module_inner( + &self, + document: &Document, + specifier: Option<&Arc>, + scope: Option<&Url>, + ) -> Option> { + let modules = self.modules_for_scope(scope)?; + if let Some(module) = modules.get(document) { + return Some(module); + } + let specifier = specifier + .cloned() + .or_else(|| { + if let Some(document) = document.server() { + match &document.kind { + ServerDocumentKind::Fs { .. } => {} + ServerDocumentKind::RemoteUrl { url, .. } => { + return Some(url.clone()) + } + ServerDocumentKind::DataUrl { url, .. } => { + return Some(url.clone()) + } + ServerDocumentKind::Asset { url, .. } => return Some(url.clone()), + } + } + None + }) + .or_else(|| { + let uri = document.uri(); + let url = uri_to_url(uri); + if url.scheme() != "file" { + return None; + } + if uri.scheme().is_some_and(|s| s.eq_lowercase("file")) { + if let Some(remote_specifier) = self.cache.unvendored_specifier(&url) + { + return Some(Arc::new(remote_specifier)); + } + } + Some(Arc::new(url)) + })?; + let module = Arc::new(DocumentModule::new( + document, + specifier, + scope.cloned().map(Arc::new), + &self.resolver, + &self.config, + &self.cache, + )); + modules.insert(document, module.clone()); + Some(module) + } + + pub fn module( + &self, + document: &Document, + scope: Option<&Url>, + ) -> Option> { + self.module_inner(document, None, scope) + } + + pub fn module_for_specifier( + &self, + specifier: &Url, + scope: Option<&Url>, + ) -> Option> { + let specifier = if let Ok(jsr_req_ref) = + JsrPackageReqReference::from_specifier(specifier) + { + Cow::Owned(self.resolver.jsr_to_resource_url(&jsr_req_ref, scope)?) + } else { + Cow::Borrowed(specifier) + }; + let specifier = self.resolver.resolve_redirects(&specifier, scope)?; + let document = + self + .documents + .get_for_specifier(&specifier, scope, &self.cache)?; + self.module_inner(&document, Some(&Arc::new(specifier)), scope) + } + + pub fn primary_module( + &self, + document: &Document, + ) -> Option> { + if let Some(scope) = self.primary_scope(document.uri()) { + return self.module(document, scope.map(|s| s.as_ref())); + } + for modules in self.modules_by_scope.values() { + if let Some(module) = modules.get(document) { + return Some(module); + } + } + self.modules_unscoped.get(document) + } + + pub fn workspace_file_modules_by_scope( + &self, + ) -> BTreeMap>, Vec>> { + let mut modules_with_scopes = BTreeMap::new(); + for path in self + .workspace_files + .iter() + .take(self.config.settings.unscoped.document_preload_limit) + { + let Ok(url) = Url::from_file_path(path) else { + continue; + }; + let scope = self.config.tree.scope_for_specifier(&url).cloned(); + let Some(document) = + self + .documents + .get_for_specifier(&url, scope.as_deref(), &self.cache) + else { + continue; + }; + if document.open().is_none() + && (!self.config.specifier_enabled(&url) + || self.resolver.in_node_modules(&url) + || self.cache.in_cache_directory(&url)) + { + continue; + } + let Some(module) = self.module(&document, scope.as_deref()) else { + continue; + }; + modules_with_scopes.insert(document.uri().clone(), (module, scope)); + } + // Include files that aren't in `self.workspace_files` for whatever reason. + for document in self.documents.docs() { + let uri = document.uri(); + if modules_with_scopes.contains_key(uri) { + continue; + } + let url = uri_to_url(uri); + if document.open().is_none() + && (url.scheme() != "file" + || !self.config.specifier_enabled(&url) + || self.resolver.in_node_modules(&url) + || self.cache.in_cache_directory(&url)) + { + continue; + } + let scope = self.config.tree.scope_for_specifier(&url).cloned(); + let Some(module) = self.module(&document, scope.as_deref()) else { + continue; + }; + modules_with_scopes.insert(document.uri().clone(), (module, scope)); + } + let mut result = BTreeMap::new(); + for (module, scope) in modules_with_scopes.into_values() { + (result.entry(scope).or_default() as &mut Vec<_>).push(module); + } + result + } + + /// This will not create any module entries, only retrieve existing entries. + pub fn inspect_module_for_specifier( + &self, + specifier: &Url, + scope: Option<&Url>, + ) -> Option> { + let specifier = if let Ok(jsr_req_ref) = + JsrPackageReqReference::from_specifier(specifier) + { + Cow::Owned(self.resolver.jsr_to_resource_url(&jsr_req_ref, scope)?) + } else { + Cow::Borrowed(specifier) + }; + let specifier = self.resolver.resolve_redirects(&specifier, scope)?; + let modules = self.modules_for_scope(scope)?; + modules.get_for_specifier(&specifier) + } + + /// This will not create any module entries, only retrieve existing entries. + pub fn inspect_primary_module( + &self, + document: &Document, + ) -> Option> { + if let Some(scope) = self.primary_scope(document.uri()) { + return self + .modules_for_scope(scope.map(|s| s.as_ref()))? + .get(document); + } + for modules in self.modules_by_scope.values() { + if let Some(module) = modules.get(document) { + return Some(module); + } + } + self.modules_unscoped.get(document) + } + + /// This will not create any module entries, only retrieve existing entries. + pub fn inspect_modules_by_scope( + &self, + document: &Document, + ) -> BTreeMap>, Arc> { + let mut result = BTreeMap::new(); + for (scope, modules) in self.modules_by_scope.iter() { + if let Some(module) = modules.get(document) { + result.insert(Some(scope.clone()), module); + } + } + if let Some(module) = self.modules_unscoped.get(document) { + result.insert(None, module); + } + result + } + + /// This will not store any module entries, only retrieve existing entries or + /// create temporary entries for scopes where one doesn't exist. + pub fn inspect_or_temp_modules_by_scope( + &self, + document: &Document, + ) -> BTreeMap>, Arc> { + let mut result = BTreeMap::new(); + for (scope, modules) in self.modules_by_scope.iter() { + let module = modules.get(document).unwrap_or_else(|| { + Arc::new(DocumentModule::new( + document, + Arc::new(uri_to_url(document.uri())), + Some(scope.clone()), + &self.resolver, + &self.config, + &self.cache, + )) + }); + result.insert(Some(scope.clone()), module); + } + let module = self.modules_unscoped.get(document).unwrap_or_else(|| { + Arc::new(DocumentModule::new( + document, + Arc::new(uri_to_url(document.uri())), + None, + &self.resolver, + &self.config, + &self.cache, + )) + }); + result.insert(None, module); + result + } + + fn modules_for_scope( + &self, + scope: Option<&Url>, + ) -> Option<&Arc> { + match scope { + Some(s) => Some(self.modules_by_scope.get(s)?), + None => Some(&self.modules_unscoped), + } + } + + fn primary_scope(&self, uri: &Uri) -> Option>> { + let url = uri_to_url(uri); + if url.scheme() == "file" && !self.cache.in_global_cache_directory(&url) { + let scope = self.config.tree.scope_for_specifier(&url); + return Some(scope); + } + None + } + + pub fn remove_expired_modules(&self) { + self.modules_unscoped.remove_expired(); + for modules in self.modules_by_scope.values() { + modules.remove_expired(); + } + } + + pub fn scopes(&self) -> BTreeSet>> { + self + .modules_by_scope + .keys() + .cloned() + .map(Some) + .chain([None]) + .collect() + } + + pub fn specifier_exists(&self, specifier: &Url, scope: Option<&Url>) -> bool { + if let Some(modules) = self.modules_for_scope(scope) { + if modules.contains_specifier(specifier) { + return true; + } + } + if specifier.scheme() == "file" { + return url_to_file_path(specifier) + .map(|p| p.is_file()) + .unwrap_or(false); + } + if specifier.scheme() == "data" { + return true; + } + if self.cache.for_specifier(scope).contains(specifier) { + return true; + } + false + } + + pub fn dep_info_by_scope( + &self, + ) -> Arc>, Arc>> { + type ScopeEntry<'a> = + (Option<&'a Arc>, &'a Arc); + let dep_info_from_scope_entry = |(scope, modules): ScopeEntry<'_>| { + let mut dep_info = ScopeDepInfo::default(); + let mut visit_module = |module: &DocumentModule| { + for dependency in module.dependencies.values() { + let code_specifier = dependency.get_code(); + let type_specifier = dependency.get_type(); + if let Some(dep) = code_specifier { + if dep.scheme() == "node" { + dep_info.has_node_specifier = true; + } + if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) { + dep_info.npm_reqs.insert(reference.into_inner().req); + } + } + if let Some(dep) = type_specifier { + if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) { + dep_info.npm_reqs.insert(reference.into_inner().req); + } + } + if dependency.maybe_deno_types_specifier.is_some() { + if let (Some(code_specifier), Some(type_specifier)) = + (code_specifier, type_specifier) + { + if MediaType::from_specifier(type_specifier).is_declaration() { + dep_info + .deno_types_to_code_resolutions + .insert(type_specifier.clone(), code_specifier.clone()); + } + } + } + } + if let Some(dep) = module + .types_dependency + .as_ref() + .and_then(|d| d.dependency.maybe_specifier()) + { + if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) { + dep_info.npm_reqs.insert(reference.into_inner().req); + } + } + }; + for module in modules.inspect_values() { + visit_module(&module); + } + let config_data = + scope.and_then(|s| self.config.tree.data_by_scope().get(s)); + if let Some(config_data) = config_data { + (|| { + let member_dir = &config_data.member_dir; + let jsx_config = + member_dir.to_maybe_jsx_import_source_config().ok()??; + let import_source_types = jsx_config.import_source_types.as_ref()?; + let import_source = jsx_config.import_source.as_ref()?; + let cli_resolver = + self.resolver.as_cli_resolver(scope.map(|s| s.as_ref())); + let type_specifier = cli_resolver + .resolve( + &import_source_types.specifier, + &import_source_types.base, + deno_graph::Position::zeroed(), + // todo(dsherret): this is wrong because it doesn't consider CJS referrers + ResolutionMode::Import, + NodeResolutionKind::Types, + ) + .ok()?; + let code_specifier = cli_resolver + .resolve( + &import_source.specifier, + &import_source.base, + deno_graph::Position::zeroed(), + // todo(dsherret): this is wrong because it doesn't consider CJS referrers + ResolutionMode::Import, + NodeResolutionKind::Execution, + ) + .ok()?; + dep_info + .deno_types_to_code_resolutions + .insert(type_specifier, code_specifier); + Some(()) + })(); + // fill the reqs from the lockfile + if let Some(lockfile) = config_data.lockfile.as_ref() { + let lockfile = lockfile.lock(); + for dep_req in lockfile.content.packages.specifiers.keys() { + if dep_req.kind == deno_semver::package::PackageKind::Npm { + dep_info.npm_reqs.insert(dep_req.req.clone()); + } + } + } + } + if dep_info.has_node_specifier + && !dep_info.npm_reqs.iter().any(|r| r.name == "@types/node") + { + dep_info + .npm_reqs + .insert(PackageReq::from_str("@types/node").unwrap()); + } + (scope.cloned(), Arc::new(dep_info)) + }; + self + .dep_info_by_scope + .get_or_init(|| { + NodeResolutionThreadLocalCache::clear(); + // Ensure at least module entries for workspace files are initialized. + self.workspace_file_modules_by_scope(); + Arc::new( + self + .modules_by_scope + .iter() + .map(|(s, m)| (Some(s), m)) + .chain([(None, &self.modules_unscoped)]) + .map(dep_info_from_scope_entry) + .collect(), + ) + }) + .clone() + } + + pub fn scopes_with_node_specifier(&self) -> HashSet>> { + self + .dep_info_by_scope() + .iter() + .filter(|(_, i)| i.has_node_specifier) + .map(|(s, _)| s.clone()) + .collect::>() + } + + #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] + pub fn resolve( + &self, + // (is_cjs: bool, raw_specifier: String) + raw_specifiers: &[(bool, String)], + referrer: &Url, + scope: Option<&Url>, + ) -> Vec> { + let referrer_module = self.module_for_specifier(referrer, scope); + let dependencies = referrer_module.as_ref().map(|d| &d.dependencies); + let mut results = Vec::new(); + for (is_cjs, raw_specifier) in raw_specifiers { + let resolution_mode = match is_cjs { + true => ResolutionMode::Require, + false => ResolutionMode::Import, + }; + if raw_specifier.starts_with("asset:") { + if let Ok(specifier) = resolve_url(raw_specifier) { + let media_type = MediaType::from_specifier(&specifier); + results.push(Some((specifier, media_type))); + } else { + results.push(None); + } + } else if let Some(dep) = + dependencies.as_ref().and_then(|d| d.get(raw_specifier)) + { + if let Some(specifier) = dep.maybe_type.maybe_specifier() { + results.push(self.resolve_dependency( + specifier, + referrer, + resolution_mode, + scope, + )); + } else if let Some(specifier) = dep.maybe_code.maybe_specifier() { + results.push(self.resolve_dependency( + specifier, + referrer, + resolution_mode, + scope, + )); + } else { + results.push(None); + } + } else if let Ok(specifier) = + self.resolver.as_cli_resolver(scope).resolve( + raw_specifier, + referrer, + deno_graph::Position::zeroed(), + resolution_mode, + NodeResolutionKind::Types, + ) + { + results.push(self.resolve_dependency( + &specifier, + referrer, + resolution_mode, + scope, + )); + } else { + results.push(None); + } + } + results + } + + #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] + pub fn resolve_dependency( + &self, + specifier: &Url, + referrer: &Url, + resolution_mode: ResolutionMode, + scope: Option<&Url>, + ) -> Option<(Url, MediaType)> { + if let Some(module_name) = specifier.as_str().strip_prefix("node:") { + if deno_node::is_builtin_node_module(module_name) { + // return itself for node: specifiers because during type checking + // we resolve to the ambient modules in the @types/node package + // rather than deno_std/node + return Some((specifier.clone(), MediaType::Dts)); + } + } + let mut specifier = specifier.clone(); + let mut media_type = None; + if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(&specifier) { + let (s, mt) = self.resolver.npm_to_file_url( + &npm_ref, + referrer, + resolution_mode, + scope, + )?; + specifier = s; + media_type = Some(mt); + } + let Some(module) = self.module_for_specifier(&specifier, scope) else { + let media_type = + media_type.unwrap_or_else(|| MediaType::from_specifier(&specifier)); + return Some((specifier, media_type)); + }; + if let Some(types) = module + .types_dependency + .as_ref() + .and_then(|d| d.dependency.maybe_specifier()) + { + self.resolve_dependency(types, &specifier, module.resolution_mode, scope) + } else { + Some((module.specifier.as_ref().clone(), module.media_type)) + } + } +} #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum LanguageId { @@ -181,189 +1642,6 @@ impl IndexValid { } } -/// An lsp representation of an asset in memory, that has either been retrieved -/// from static assets built into Rust, or static assets built into tsc. -#[derive(Debug)] -pub struct AssetDocument { - specifier: ModuleSpecifier, - text: &'static str, - line_index: Arc, - maybe_navigation_tree: Mutex>>, -} - -impl AssetDocument { - pub fn new(specifier: ModuleSpecifier, text: &'static str) -> Self { - let line_index = Arc::new(LineIndex::new(text)); - Self { - specifier, - text, - line_index, - maybe_navigation_tree: Default::default(), - } - } - - pub fn specifier(&self) -> &ModuleSpecifier { - &self.specifier - } - - pub fn cache_navigation_tree( - &self, - navigation_tree: Arc, - ) { - *self.maybe_navigation_tree.lock() = Some(navigation_tree); - } - - pub fn text(&self) -> &'static str { - self.text - } - - pub fn line_index(&self) -> Arc { - self.line_index.clone() - } - - pub fn maybe_navigation_tree(&self) -> Option> { - self.maybe_navigation_tree.lock().clone() - } -} - -#[derive(Debug)] -pub struct AssetDocuments { - inner: HashMap>, -} - -impl AssetDocuments { - pub fn contains_key(&self, k: &ModuleSpecifier) -> bool { - self.inner.contains_key(k) - } - - pub fn get(&self, k: &ModuleSpecifier) -> Option> { - self.inner.get(k).cloned() - } -} - -pub static ASSET_DOCUMENTS: Lazy = - Lazy::new(|| AssetDocuments { - inner: crate::tsc::LAZILY_LOADED_STATIC_ASSETS - .iter() - .map(|(k, v)| { - let url_str = format!("asset:///{k}"); - let specifier = resolve_url(&url_str).unwrap(); - let asset = Arc::new(AssetDocument::new(specifier.clone(), v.as_str())); - (specifier, asset) - }) - .collect(), - }); - -#[derive(Debug, Clone)] -pub enum AssetOrDocument { - Document(Arc), - Asset(Arc), -} - -impl AssetOrDocument { - pub fn document(&self) -> Option<&Arc> { - match self { - AssetOrDocument::Asset(_) => None, - AssetOrDocument::Document(doc) => Some(doc), - } - } - - pub fn file_referrer(&self) -> Option<&ModuleSpecifier> { - match self { - AssetOrDocument::Asset(_) => None, - AssetOrDocument::Document(doc) => doc.file_referrer(), - } - } - - pub fn scope(&self) -> Option<&ModuleSpecifier> { - match self { - AssetOrDocument::Asset(asset_doc) => Some(asset_doc.specifier()), - AssetOrDocument::Document(doc) => doc.scope(), - } - } - - pub fn maybe_semantic_tokens(&self) -> Option { - match self { - AssetOrDocument::Asset(_) => None, - AssetOrDocument::Document(d) => d - .open_data - .as_ref() - .and_then(|d| d.maybe_semantic_tokens.lock().clone()), - } - } - - pub fn text_str(&self) -> &str { - match self { - AssetOrDocument::Asset(a) => a.text(), - AssetOrDocument::Document(d) => d.text.as_ref(), - } - } - - pub fn text_fast_string(&self) -> deno_core::FastString { - match self { - AssetOrDocument::Asset(a) => deno_core::FastString::from_static(a.text()), - AssetOrDocument::Document(d) => d.text.clone().into(), - } - } - - pub fn line_index(&self) -> Arc { - match self { - AssetOrDocument::Asset(a) => a.line_index(), - AssetOrDocument::Document(d) => d.line_index(), - } - } - - pub fn maybe_navigation_tree(&self) -> Option> { - match self { - AssetOrDocument::Asset(a) => a.maybe_navigation_tree(), - AssetOrDocument::Document(d) => d.maybe_navigation_tree(), - } - } - - pub fn media_type(&self) -> MediaType { - match self { - AssetOrDocument::Asset(_) => MediaType::TypeScript, // assets are always TypeScript - AssetOrDocument::Document(d) => d.media_type(), - } - } - - pub fn get_maybe_dependency( - &self, - position: &lsp::Position, - ) -> Option<(String, deno_graph::Dependency, deno_graph::Range)> { - self - .document() - .and_then(|d| d.get_maybe_dependency(position)) - } - - pub fn maybe_parsed_source( - &self, - ) -> Option<&Result> { - self.document().and_then(|d| d.maybe_parsed_source()) - } - - pub fn document_lsp_version(&self) -> Option { - self.document().and_then(|d| d.maybe_lsp_version()) - } - - pub fn resolution_mode(&self) -> ResolutionMode { - match self { - AssetOrDocument::Asset(_) => ResolutionMode::Import, - AssetOrDocument::Document(d) => d.resolution_mode(), - } - } - - pub fn cache_navigation_tree( - &self, - navigation_tree: Arc, - ) { - match self { - AssetOrDocument::Asset(a) => a.cache_navigation_tree(navigation_tree), - AssetOrDocument::Document(d) => d.cache_navigation_tree(navigation_tree), - } - } -} - type ModuleResult = Result; type ParsedSourceResult = Result; type TestModuleFut = @@ -395,7 +1673,9 @@ fn get_maybe_test_module_fut( } let parsed_source = maybe_parsed_source?.as_ref().ok()?.clone(); let specifier = parsed_source.specifier(); - if specifier.scheme() != "file" { + if specifier.scheme() != "file" + || specifier.as_str().contains("/node_modules/") + { return None; } if !media_type_is_diagnosable(parsed_source.media_type()) { @@ -418,513 +1698,6 @@ fn get_maybe_test_module_fut( Some(handle) } -#[derive(Clone, Debug, Default)] -pub struct DocumentOpenData { - lsp_version: i32, - maybe_parsed_source: Option, - maybe_semantic_tokens: Arc>>, -} - -#[derive(Debug)] -pub struct Document { - /// Contains the last-known-good set of dependencies from parsing the module. - config: Arc, - dependencies: Arc>, - /// If this is maybe a CJS script and maybe not an ES module. - is_script: Option, - // TODO(nayeemrmn): This is unused, use it for scope attribution for remote - // modules. - file_referrer: Option, - maybe_types_dependency: Option>, - maybe_fs_version: Option, - line_index: Arc, - maybe_headers: Option>, - maybe_language_id: Option, - /// This is cached in a mutex so `workspace/symbol` and - /// `textDocument/codeLens` requests don't require a write lock. - maybe_navigation_tree: Mutex>>, - maybe_test_module_fut: Option, - media_type: MediaType, - /// Present if and only if this is an open document. - open_data: Option, - resolution_mode: ResolutionMode, - resolver: Arc, - specifier: ModuleSpecifier, - text: Arc, - text_info_cell: once_cell::sync::OnceCell, -} - -impl Document { - /// Open documents should have `maybe_lsp_version.is_some()`. - #[allow(clippy::too_many_arguments)] - fn new( - specifier: ModuleSpecifier, - text: Arc, - maybe_lsp_version: Option, - maybe_language_id: Option, - maybe_headers: Option>, - resolver: Arc, - config: Arc, - cache: &Arc, - file_referrer: Option, - ) -> Arc { - let file_referrer = Some(&specifier) - .filter(|s| cache.is_valid_file_referrer(s)) - .cloned() - .or(file_referrer); - let media_type = - resolve_media_type(&specifier, maybe_headers.as_ref(), maybe_language_id); - let (maybe_parsed_source, maybe_module, resolution_mode) = - if media_type_is_diagnosable(media_type) { - parse_and_analyze_module( - specifier.clone(), - text.clone(), - maybe_headers.as_ref(), - media_type, - file_referrer.as_ref(), - &resolver, - ) - } else { - (None, None, ResolutionMode::Import) - }; - let maybe_module = maybe_module.and_then(Result::ok); - let dependencies = maybe_module - .as_ref() - .map(|m| Arc::new(m.dependencies.clone())) - .unwrap_or_default(); - let maybe_types_dependency = maybe_module - .as_ref() - .and_then(|m| Some(Arc::new(m.maybe_types_dependency.clone()?))); - let line_index = Arc::new(LineIndex::new(text.as_ref())); - let maybe_test_module_fut = - get_maybe_test_module_fut(maybe_parsed_source.as_ref(), &config); - Arc::new(Self { - config, - dependencies, - maybe_fs_version: calculate_fs_version( - cache, - &specifier, - file_referrer.as_ref(), - ), - file_referrer, - is_script: maybe_module.as_ref().map(|m| m.is_script), - maybe_types_dependency, - line_index, - maybe_language_id, - maybe_headers, - maybe_navigation_tree: Mutex::new(None), - maybe_test_module_fut, - media_type, - open_data: maybe_lsp_version.map(|v| DocumentOpenData { - lsp_version: v, - maybe_parsed_source, - maybe_semantic_tokens: Default::default(), - }), - resolution_mode, - resolver, - specifier, - text, - text_info_cell: once_cell::sync::OnceCell::new(), - }) - } - - fn with_new_config( - &self, - resolver: Arc, - config: Arc, - ) -> Arc { - let media_type = resolve_media_type( - &self.specifier, - self.maybe_headers.as_ref(), - self.maybe_language_id, - ); - let dependencies; - let maybe_types_dependency; - let maybe_parsed_source; - let found_resolution_mode; - let is_script; - let maybe_test_module_fut; - if media_type != self.media_type { - let parsed_source_result = - parse_source(self.specifier.clone(), self.text.clone(), media_type); - let (maybe_module_result, resolution_mode) = analyze_module( - self.specifier.clone(), - &parsed_source_result, - self.maybe_headers.as_ref(), - self.file_referrer.as_ref(), - &resolver, - ); - let maybe_module = maybe_module_result.ok(); - dependencies = maybe_module - .as_ref() - .map(|m| Arc::new(m.dependencies.clone())) - .unwrap_or_default(); - is_script = maybe_module.as_ref().map(|m| m.is_script); - maybe_types_dependency = maybe_module - .as_ref() - .and_then(|m| Some(Arc::new(m.maybe_types_dependency.clone()?))); - maybe_parsed_source = Some(parsed_source_result); - maybe_test_module_fut = - get_maybe_test_module_fut(maybe_parsed_source.as_ref(), &config); - found_resolution_mode = resolution_mode; - } else { - let cli_resolver = resolver.as_cli_resolver(self.file_referrer.as_ref()); - let is_cjs_resolver = - resolver.as_is_cjs_resolver(self.file_referrer.as_ref()); - let npm_resolver = - resolver.as_graph_npm_resolver(self.file_referrer.as_ref()); - let config_data = resolver.as_config_data(self.file_referrer.as_ref()); - let jsx_import_source_config = - config_data.and_then(|d| d.maybe_jsx_import_source_config()); - found_resolution_mode = is_cjs_resolver - .get_lsp_resolution_mode(&self.specifier, self.is_script); - let resolver = SingleReferrerGraphResolver { - valid_referrer: &self.specifier, - module_resolution_mode: found_resolution_mode, - cli_resolver, - jsx_import_source_config: jsx_import_source_config.as_ref(), - }; - dependencies = Arc::new( - self - .dependencies - .iter() - .map(|(s, d)| { - ( - s.clone(), - d.with_new_resolver( - s, - &CliJsrUrlProvider, - Some(&resolver), - Some(npm_resolver.as_ref()), - ), - ) - }) - .collect(), - ); - maybe_types_dependency = self.maybe_types_dependency.as_ref().map(|d| { - Arc::new(d.with_new_resolver( - &CliJsrUrlProvider, - Some(&resolver), - Some(npm_resolver.as_ref()), - )) - }); - is_script = self.is_script; - maybe_parsed_source = self.maybe_parsed_source().cloned(); - maybe_test_module_fut = self - .maybe_test_module_fut - .clone() - .filter(|_| config.specifier_enabled_for_test(&self.specifier)); - } - Arc::new(Self { - config, - // updated properties - dependencies, - file_referrer: self.file_referrer.clone(), - is_script, - maybe_types_dependency, - maybe_navigation_tree: Mutex::new(None), - // maintain - this should all be copies/clones - maybe_fs_version: self.maybe_fs_version.clone(), - line_index: self.line_index.clone(), - maybe_headers: self.maybe_headers.clone(), - maybe_language_id: self.maybe_language_id, - maybe_test_module_fut, - media_type, - resolution_mode: found_resolution_mode, - open_data: self.open_data.as_ref().map(|d| DocumentOpenData { - lsp_version: d.lsp_version, - maybe_parsed_source, - // reset semantic tokens - maybe_semantic_tokens: Default::default(), - }), - resolver, - specifier: self.specifier.clone(), - text: self.text.clone(), - text_info_cell: once_cell::sync::OnceCell::new(), - }) - } - - fn with_change( - &self, - version: i32, - changes: Vec, - ) -> Result, AnyError> { - let mut content = self.text.to_string(); - let mut line_index = self.line_index.clone(); - let mut index_valid = IndexValid::All; - for change in changes { - if let Some(range) = change.range { - if !index_valid.covers(range.start.line) { - line_index = Arc::new(LineIndex::new(&content)); - } - index_valid = IndexValid::UpTo(range.start.line); - let range = line_index.get_text_range(range)?; - content.replace_range(Range::::from(range), &change.text); - } else { - content = change.text; - index_valid = IndexValid::UpTo(0); - } - } - let text: Arc = content.into(); - let media_type = self.media_type; - let (maybe_parsed_source, maybe_module, resolution_mode) = if self - .maybe_language_id - .as_ref() - .map(|li| li.is_diagnosable()) - .unwrap_or(false) - { - parse_and_analyze_module( - self.specifier.clone(), - text.clone(), - self.maybe_headers.as_ref(), - media_type, - self.file_referrer.as_ref(), - self.resolver.as_ref(), - ) - } else { - (None, None, ResolutionMode::Import) - }; - let maybe_module = maybe_module.and_then(Result::ok); - let dependencies = maybe_module - .as_ref() - .map(|m| Arc::new(m.dependencies.clone())) - .unwrap_or_else(|| self.dependencies.clone()); - let maybe_types_dependency = maybe_module - .as_ref() - .and_then(|m| Some(Arc::new(m.maybe_types_dependency.clone()?))) - .or_else(|| self.maybe_types_dependency.clone()); - let line_index = if index_valid == IndexValid::All { - line_index - } else { - Arc::new(LineIndex::new(text.as_ref())) - }; - let maybe_test_module_fut = - get_maybe_test_module_fut(maybe_parsed_source.as_ref(), &self.config); - Ok(Arc::new(Self { - config: self.config.clone(), - is_script: maybe_module.as_ref().map(|m| m.is_script), - specifier: self.specifier.clone(), - file_referrer: self.file_referrer.clone(), - maybe_fs_version: self.maybe_fs_version.clone(), - maybe_language_id: self.maybe_language_id, - dependencies, - maybe_types_dependency, - text, - text_info_cell: once_cell::sync::OnceCell::new(), - line_index, - maybe_headers: self.maybe_headers.clone(), - maybe_navigation_tree: Mutex::new(None), - maybe_test_module_fut, - media_type, - resolution_mode, - open_data: self.open_data.is_some().then_some(DocumentOpenData { - lsp_version: version, - maybe_parsed_source, - maybe_semantic_tokens: Default::default(), - }), - resolver: self.resolver.clone(), - })) - } - - pub fn closed(&self, cache: &Arc) -> Arc { - Arc::new(Self { - config: self.config.clone(), - specifier: self.specifier.clone(), - file_referrer: self.file_referrer.clone(), - maybe_fs_version: calculate_fs_version( - cache, - &self.specifier, - self.file_referrer.as_ref(), - ), - maybe_language_id: self.maybe_language_id, - dependencies: self.dependencies.clone(), - is_script: self.is_script, - maybe_types_dependency: self.maybe_types_dependency.clone(), - text: self.text.clone(), - text_info_cell: once_cell::sync::OnceCell::new(), - line_index: self.line_index.clone(), - maybe_headers: self.maybe_headers.clone(), - maybe_navigation_tree: Mutex::new( - self.maybe_navigation_tree.lock().clone(), - ), - maybe_test_module_fut: self.maybe_test_module_fut.clone(), - media_type: self.media_type, - open_data: None, - resolution_mode: self.resolution_mode, - resolver: self.resolver.clone(), - }) - } - - pub fn saved(&self, cache: &Arc) -> Arc { - Arc::new(Self { - config: self.config.clone(), - specifier: self.specifier.clone(), - file_referrer: self.file_referrer.clone(), - maybe_fs_version: calculate_fs_version( - cache, - &self.specifier, - self.file_referrer.as_ref(), - ), - maybe_language_id: self.maybe_language_id, - dependencies: self.dependencies.clone(), - is_script: self.is_script, - maybe_types_dependency: self.maybe_types_dependency.clone(), - text: self.text.clone(), - text_info_cell: once_cell::sync::OnceCell::new(), - line_index: self.line_index.clone(), - maybe_headers: self.maybe_headers.clone(), - maybe_navigation_tree: Mutex::new( - self.maybe_navigation_tree.lock().clone(), - ), - maybe_test_module_fut: self.maybe_test_module_fut.clone(), - media_type: self.media_type, - open_data: self.open_data.clone(), - resolution_mode: self.resolution_mode, - resolver: self.resolver.clone(), - }) - } - - pub fn specifier(&self) -> &ModuleSpecifier { - &self.specifier - } - - pub fn file_referrer(&self) -> Option<&ModuleSpecifier> { - self.file_referrer.as_ref() - } - - pub fn scope(&self) -> Option<&ModuleSpecifier> { - self - .file_referrer - .as_ref() - .and_then(|r| self.config.tree.scope_for_specifier(r)) - } - - pub fn content(&self) -> &Arc { - &self.text - } - - pub fn resolution_mode(&self) -> ResolutionMode { - self.resolution_mode - } - - pub fn text_info(&self) -> &SourceTextInfo { - // try to get the text info from the parsed source and if - // not then create one in the cell - self - .maybe_parsed_source() - .and_then(|p| p.as_ref().ok()) - .map(|p| p.text_info_lazy()) - .unwrap_or_else(|| { - self - .text_info_cell - .get_or_init(|| SourceTextInfo::new(self.text.clone())) - }) - } - pub fn line_index(&self) -> Arc { - self.line_index.clone() - } - - pub fn maybe_headers(&self) -> Option<&HashMap> { - self.maybe_headers.as_ref() - } - - fn maybe_fs_version(&self) -> Option<&str> { - self.maybe_fs_version.as_deref() - } - - pub fn script_version(&self) -> String { - match (self.maybe_fs_version(), self.maybe_lsp_version()) { - (None, None) => "1".to_string(), - (None, Some(lsp_version)) => format!("1+{lsp_version}"), - (Some(fs_version), None) => fs_version.to_string(), - (Some(fs_version), Some(lsp_version)) => { - format!("{fs_version}+{lsp_version}") - } - } - } - - pub fn is_diagnosable(&self) -> bool { - media_type_is_diagnosable(self.media_type()) - } - - pub fn is_open(&self) -> bool { - self.open_data.is_some() - } - - pub fn maybe_types_dependency(&self) -> &Resolution { - if let Some(types_dep) = self.maybe_types_dependency.as_deref() { - &types_dep.dependency - } else { - &Resolution::None - } - } - - pub fn media_type(&self) -> MediaType { - self.media_type - } - - pub fn maybe_language_id(&self) -> Option { - self.maybe_language_id - } - - /// Returns the current language server client version if any. - pub fn maybe_lsp_version(&self) -> Option { - self.open_data.as_ref().map(|d| d.lsp_version) - } - - pub fn maybe_parsed_source( - &self, - ) -> Option<&Result> { - self.open_data.as_ref()?.maybe_parsed_source.as_ref() - } - - pub async fn maybe_test_module(&self) -> Option> { - self.maybe_test_module_fut.clone()?.await - } - - pub fn maybe_navigation_tree(&self) -> Option> { - self.maybe_navigation_tree.lock().clone() - } - - pub fn dependencies(&self) -> &IndexMap { - self.dependencies.as_ref() - } - - /// If the supplied position is within a dependency range, return the resolved - /// string specifier for the dependency, the resolved dependency and the range - /// in the source document of the specifier. - pub fn get_maybe_dependency( - &self, - position: &lsp::Position, - ) -> Option<(String, deno_graph::Dependency, deno_graph::Range)> { - let position = deno_graph::Position { - line: position.line as usize, - character: position.character as usize, - }; - self.dependencies().iter().find_map(|(s, dep)| { - dep - .includes(position) - .map(|r| (s.clone(), dep.clone(), r.clone())) - }) - } - - pub fn cache_navigation_tree( - &self, - navigation_tree: Arc, - ) { - *self.maybe_navigation_tree.lock() = Some(navigation_tree); - } - - pub fn cache_semantic_tokens_full( - &self, - semantic_tokens: lsp::SemanticTokens, - ) { - if let Some(open_data) = self.open_data.as_ref() { - *open_data.maybe_semantic_tokens.lock() = Some(semantic_tokens); - } - } -} - fn resolve_media_type( specifier: &ModuleSpecifier, maybe_headers: Option<&HashMap>, @@ -944,759 +1717,10 @@ fn resolve_media_type( MediaType::from_specifier(specifier) } -pub fn to_lsp_range(referrer: &deno_graph::Range) -> lsp::Range { - lsp::Range { - start: lsp::Position { - line: referrer.range.start.line as u32, - character: referrer.range.start.character as u32, - }, - end: lsp::Position { - line: referrer.range.end.line as u32, - character: referrer.range.end.character as u32, - }, - } -} - -#[derive(Debug, Default)] -struct FileSystemDocuments { - docs: DashMap>, - dirty: AtomicBool, -} - -impl FileSystemDocuments { - pub fn get( - &self, - specifier: &ModuleSpecifier, - resolver: &Arc, - config: &Arc, - cache: &Arc, - file_referrer: Option<&ModuleSpecifier>, - ) -> Option> { - let file_referrer = Some(specifier) - .filter(|s| cache.is_valid_file_referrer(s)) - .or(file_referrer); - let new_fs_version = calculate_fs_version(cache, specifier, file_referrer); - let old_doc = self.docs.get(specifier).map(|v| v.value().clone()); - let dirty = match &old_doc { - None => true, - Some(old_doc) => { - match (old_doc.maybe_fs_version(), new_fs_version.as_deref()) { - (None, None) => { - matches!(specifier.scheme(), "file" | "http" | "https") - } - (old, new) => old != new, - } - } - }; - if dirty { - NodeResolutionThreadLocalCache::clear(); - // attempt to update the file on the file system - self.refresh_document(specifier, resolver, config, cache, file_referrer) - } else { - old_doc - } - } - - /// Adds or updates a document by reading the document from the file system - /// returning the document. - fn refresh_document( - &self, - specifier: &ModuleSpecifier, - resolver: &Arc, - config: &Arc, - cache: &Arc, - file_referrer: Option<&ModuleSpecifier>, - ) -> Option> { - let doc = if specifier.scheme() == "file" { - let path = url_to_file_path(specifier).ok()?; - let bytes = fs::read(path).ok()?; - let content = bytes_to_content( - specifier, - MediaType::from_specifier(specifier), - bytes, - None, - ) - .ok()?; - Document::new( - specifier.clone(), - content.into(), - None, - None, - None, - resolver.clone(), - config.clone(), - cache, - file_referrer.cloned(), - ) - } else if specifier.scheme() == "data" { - let source = deno_media_type::data_url::RawDataUrl::parse(specifier) - .ok()? - .decode() - .ok()?; - Document::new( - specifier.clone(), - source.into(), - None, - None, - None, - resolver.clone(), - config.clone(), - cache, - file_referrer.cloned(), - ) - } else { - let http_cache = cache.for_specifier(file_referrer); - let cache_key = http_cache.cache_item_key(specifier).ok()?; - let cached_file = http_cache.get(&cache_key, None).ok()??; - let (_, maybe_charset) = - deno_graph::source::resolve_media_type_and_charset_from_headers( - specifier, - Some(&cached_file.metadata.headers), - ); - let media_type = resolve_media_type( - specifier, - Some(&cached_file.metadata.headers), - None, - ); - let content = bytes_to_content( - specifier, - media_type, - cached_file.content.into_owned(), - maybe_charset, - ) - .ok()?; - Document::new( - specifier.clone(), - content.into(), - None, - None, - Some(cached_file.metadata.headers), - resolver.clone(), - config.clone(), - cache, - file_referrer.cloned(), - ) - }; - self.docs.insert(specifier.clone(), doc.clone()); - self.set_dirty(true); - Some(doc) - } - - pub fn remove_document( - &self, - specifier: &ModuleSpecifier, - ) -> Option> { - Some(self.docs.remove(specifier)?.1) - } - - /// Sets the dirty flag to the provided value and returns the previous value. - pub fn set_dirty(&self, dirty: bool) -> bool { - self.dirty.swap(dirty, Ordering::Relaxed) - } -} - -/// Specify the documents to include on a `documents.documents(...)` call. -#[derive(Debug, Clone, Copy)] -pub enum DocumentsFilter { - /// Includes all the documents (diagnosable & non-diagnosable, open & file system). - All, - /// Includes all the diagnosable documents (open & file system). - AllDiagnosable, - /// Includes only the diagnosable documents that are open. - OpenDiagnosable, -} - -#[derive(Debug, Default, Clone)] -pub struct Documents { - /// The DENO_DIR that the documents looks for non-file based modules. - cache: Arc, - config: Arc, - /// A resolver that takes into account currently loaded import map and JSX - /// settings. - resolver: Arc, - /// A flag that indicates that stated data is potentially invalid and needs to - /// be recalculated before being considered valid. - dirty: bool, - /// A map of documents that are "open" in the language server. - open_docs: HashMap>, - /// Documents stored on the file system. - file_system_docs: Arc, - dep_info_by_scope: Arc, Arc>>, -} - -impl Documents { - /// "Open" a document from the perspective of the editor, meaning that - /// requests for information from the document will come from the in-memory - /// representation received from the language server client, versus reading - /// information from the disk. - pub fn open( - &mut self, - specifier: ModuleSpecifier, - version: i32, - language_id: LanguageId, - content: Arc, - file_referrer: Option, - ) -> Arc { - let document = Document::new( - specifier.clone(), - content, - Some(version), - Some(language_id), - // todo(dsherret): don't we want to pass in the headers from - // the cache for remote modules here in order to get the - // x-typescript-types? - None, - self.resolver.clone(), - self.config.clone(), - &self.cache, - file_referrer, - ); - - self.file_system_docs.remove_document(&specifier); - self.file_system_docs.set_dirty(true); - - self.open_docs.insert(specifier, document.clone()); - self.dirty = true; - document - } - - /// Apply language server content changes to an open document. - pub fn change( - &mut self, - specifier: &ModuleSpecifier, - version: i32, - changes: Vec, - ) -> Result, AnyError> { - let doc = self - .open_docs - .get(specifier) - .cloned() - .or_else(|| self.file_system_docs.remove_document(specifier)) - .map(Ok) - .unwrap_or_else(|| { - Err(JsErrorBox::new( - "NotFound", - format!("The specifier \"{specifier}\" was not found."), - )) - })?; - self.dirty = true; - let doc = doc.with_change(version, changes)?; - self.open_docs.insert(doc.specifier().clone(), doc.clone()); - Ok(doc) - } - - pub fn save(&mut self, specifier: &ModuleSpecifier) { - let doc = self - .open_docs - .get(specifier) - .cloned() - .or_else(|| self.file_system_docs.remove_document(specifier)); - let Some(doc) = doc else { - return; - }; - self.dirty = true; - let doc = doc.saved(&self.cache); - self.open_docs.insert(doc.specifier().clone(), doc.clone()); - } - - /// Close an open document, this essentially clears any editor state that is - /// being held, and the document store will revert to the file system if - /// information about the document is required. - pub fn close(&mut self, specifier: &ModuleSpecifier) { - if let Some(document) = self.open_docs.remove(specifier) { - let document = document.closed(&self.cache); - self - .file_system_docs - .docs - .insert(specifier.clone(), document); - - self.dirty = true; - } - } - - pub fn release(&self, specifier: &ModuleSpecifier) { - self.file_system_docs.remove_document(specifier); - self.file_system_docs.set_dirty(true); - } - - pub fn get_file_referrer<'a>( - &self, - specifier: &'a ModuleSpecifier, - ) -> Option> { - if self.is_valid_file_referrer(specifier) { - return Some(Cow::Borrowed(specifier)); - } - self - .get(specifier) - .and_then(|d| d.file_referrer().cloned().map(Cow::Owned)) - } - - pub fn is_valid_file_referrer(&self, specifier: &ModuleSpecifier) -> bool { - self.cache.is_valid_file_referrer(specifier) - } - - pub fn resolve_document_specifier( - &self, - specifier: &ModuleSpecifier, - file_referrer: Option<&ModuleSpecifier>, - ) -> Option { - let specifier = if let Ok(jsr_req_ref) = - JsrPackageReqReference::from_specifier(specifier) - { - Cow::Owned( - self - .resolver - .jsr_to_resource_url(&jsr_req_ref, file_referrer)?, - ) - } else { - Cow::Borrowed(specifier) - }; - if !DOCUMENT_SCHEMES.contains(&specifier.scheme()) { - return None; - } - self.resolver.resolve_redirects(&specifier, file_referrer) - } - - /// Return `true` if the specifier can be resolved to a document. - pub fn exists( - &self, - specifier: &ModuleSpecifier, - file_referrer: Option<&ModuleSpecifier>, - ) -> bool { - let specifier = self.resolve_document_specifier(specifier, file_referrer); - if let Some(specifier) = specifier { - if self.open_docs.contains_key(&specifier) { - return true; - } - if specifier.scheme() == "data" { - return true; - } - if specifier.scheme() == "file" { - return url_to_file_path(&specifier) - .map(|p| p.is_file()) - .unwrap_or(false); - } - if self.cache.for_specifier(file_referrer).contains(&specifier) { - return true; - } - } - false - } - - pub fn dep_info_by_scope( - &mut self, - ) -> Arc, Arc>> { - self.calculate_dep_info_if_dirty(); - self.dep_info_by_scope.clone() - } - - pub fn scopes_with_node_specifier(&self) -> HashSet> { - self - .dep_info_by_scope - .iter() - .filter(|(_, i)| i.has_node_specifier) - .map(|(s, _)| s.clone()) - .collect::>() - } - - /// Return a document for the specifier. - pub fn get(&self, specifier: &ModuleSpecifier) -> Option> { - if let Some(document) = self.open_docs.get(specifier) { - Some(document.clone()) - } else { - let old_doc = self - .file_system_docs - .docs - .get(specifier) - .map(|d| d.value().clone()); - if let Some(old_doc) = old_doc { - self.file_system_docs.get( - specifier, - &self.resolver, - &self.config, - &self.cache, - old_doc.file_referrer(), - ) - } else { - None - } - } - } - - /// Return a document for the specifier. - pub fn get_or_load( - &self, - specifier: &ModuleSpecifier, - file_referrer: Option<&ModuleSpecifier>, - ) -> Option> { - let specifier = - self.resolve_document_specifier(specifier, file_referrer)?; - if let Some(document) = self.open_docs.get(&specifier) { - Some(document.clone()) - } else { - self.file_system_docs.get( - &specifier, - &self.resolver, - &self.config, - &self.cache, - file_referrer, - ) - } - } - - /// Return a collection of documents that are contained in the document store - /// based on the provided filter. - pub fn documents(&self, filter: DocumentsFilter) -> Vec> { - match filter { - DocumentsFilter::OpenDiagnosable => self - .open_docs - .values() - .filter_map(|doc| { - if doc.is_diagnosable() { - Some(doc.clone()) - } else { - None - } - }) - .collect(), - DocumentsFilter::AllDiagnosable | DocumentsFilter::All => { - let diagnosable_only = - matches!(filter, DocumentsFilter::AllDiagnosable); - // it is technically possible for a Document to end up in both the open - // and closed documents so we need to ensure we don't return duplicates - let mut seen_documents = HashSet::new(); - self - .open_docs - .values() - .cloned() - .chain(self.file_system_docs.docs.iter().map(|v| v.value().clone())) - .filter_map(|doc| { - // this prefers the open documents - if seen_documents.insert(doc.specifier().clone()) - && (!diagnosable_only || doc.is_diagnosable()) - { - Some(doc) - } else { - None - } - }) - .collect() - } - } - } - - /// For a given set of string specifiers, resolve each one from the graph, - /// for a given referrer. This is used to provide resolution information to - /// tsc when type checking. - #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] - pub fn resolve( - &self, - // (is_cjs: bool, raw_specifier: String) - raw_specifiers: &[(bool, String)], - referrer: &ModuleSpecifier, - file_referrer: Option<&ModuleSpecifier>, - ) -> Vec> { - let referrer_doc = self.get(referrer); - let file_referrer = referrer_doc - .as_ref() - .and_then(|d| d.file_referrer()) - .or(file_referrer); - let dependencies = referrer_doc.as_ref().map(|d| d.dependencies()); - let mut results = Vec::new(); - for (is_cjs, raw_specifier) in raw_specifiers { - let resolution_mode = match is_cjs { - true => ResolutionMode::Require, - false => ResolutionMode::Import, - }; - if raw_specifier.starts_with("asset:") { - if let Ok(specifier) = ModuleSpecifier::parse(raw_specifier) { - let media_type = MediaType::from_specifier(&specifier); - results.push(Some((specifier, media_type))); - } else { - results.push(None); - } - } else if let Some(dep) = - dependencies.as_ref().and_then(|d| d.get(raw_specifier)) - { - if let Some(specifier) = dep.maybe_type.maybe_specifier() { - results.push(self.resolve_dependency( - specifier, - referrer, - resolution_mode, - file_referrer, - )); - } else if let Some(specifier) = dep.maybe_code.maybe_specifier() { - results.push(self.resolve_dependency( - specifier, - referrer, - resolution_mode, - file_referrer, - )); - } else { - results.push(None); - } - } else if let Ok(specifier) = - self.resolver.as_cli_resolver(file_referrer).resolve( - raw_specifier, - referrer, - deno_graph::Position::zeroed(), - resolution_mode, - NodeResolutionKind::Types, - ) - { - results.push(self.resolve_dependency( - &specifier, - referrer, - resolution_mode, - file_referrer, - )); - } else { - results.push(None); - } - } - results - } - - pub fn update_config( - &mut self, - config: &Config, - resolver: &Arc, - cache: &LspCache, - workspace_files: &IndexSet, - ) { - self.config = Arc::new(config.clone()); - self.cache = Arc::new(cache.clone()); - self.resolver = resolver.clone(); - - node_resolver::PackageJsonThreadLocalCache::clear(); - NodeResolutionThreadLocalCache::clear(); - - { - let fs_docs = &self.file_system_docs; - // Clean up non-existent documents. - fs_docs.docs.retain(|specifier, _| { - let Ok(path) = url_to_file_path(specifier) else { - // Remove non-file schemed docs (deps). They may not be dependencies - // anymore after updating resolvers. - return false; - }; - if !config.specifier_enabled(specifier) { - return false; - } - path.is_file() - }); - let mut open_docs = std::mem::take(&mut self.open_docs); - for doc in open_docs.values_mut() { - if !config.specifier_enabled(doc.specifier()) { - continue; - } - *doc = doc.with_new_config(self.resolver.clone(), self.config.clone()); - } - for mut doc in self.file_system_docs.docs.iter_mut() { - if !config.specifier_enabled(doc.specifier()) { - continue; - } - *doc.value_mut() = - doc.with_new_config(self.resolver.clone(), self.config.clone()); - } - self.open_docs = open_docs; - let mut preload_count = 0; - for specifier in workspace_files { - if !config.specifier_enabled(specifier) { - continue; - } - if preload_count >= config.settings.unscoped.document_preload_limit { - break; - } - preload_count += 1; - if !self.open_docs.contains_key(specifier) - && !fs_docs.docs.contains_key(specifier) - { - fs_docs.refresh_document( - specifier, - &self.resolver, - &self.config, - &self.cache, - None, - ); - } - } - fs_docs.set_dirty(true); - } - self.dirty = true; - } - - /// Iterate through the documents, building a map where the key is a unique - /// document and the value is a set of specifiers that depend on that - /// document. - fn calculate_dep_info_if_dirty(&mut self) { - let mut dep_info_by_scope: BTreeMap<_, ScopeDepInfo> = Default::default(); - let is_fs_docs_dirty = self.file_system_docs.set_dirty(false); - if !is_fs_docs_dirty && !self.dirty { - return; - } - NodeResolutionThreadLocalCache::clear(); - let mut visit_doc = |doc: &Arc| { - let scope = doc.scope(); - let dep_info = dep_info_by_scope.entry(scope.cloned()).or_default(); - for dependency in doc.dependencies().values() { - let code_specifier = dependency.get_code(); - let type_specifier = dependency.get_type(); - if let Some(dep) = code_specifier { - if dep.scheme() == "node" { - dep_info.has_node_specifier = true; - } - if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) { - dep_info.npm_reqs.insert(reference.into_inner().req); - } - } - if let Some(dep) = type_specifier { - if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) { - dep_info.npm_reqs.insert(reference.into_inner().req); - } - } - if dependency.maybe_deno_types_specifier.is_some() { - if let (Some(code_specifier), Some(type_specifier)) = - (code_specifier, type_specifier) - { - if MediaType::from_specifier(type_specifier).is_declaration() { - dep_info - .deno_types_to_code_resolutions - .insert(type_specifier.clone(), code_specifier.clone()); - } - } - } - } - if let Some(dep) = doc.maybe_types_dependency().maybe_specifier() { - if let Ok(reference) = NpmPackageReqReference::from_specifier(dep) { - dep_info.npm_reqs.insert(reference.into_inner().req); - } - } - }; - for entry in self.file_system_docs.docs.iter() { - visit_doc(entry.value()) - } - for doc in self.open_docs.values() { - visit_doc(doc); - } - - for (scope, config_data) in self.config.tree.data_by_scope().as_ref() { - let dep_info = dep_info_by_scope.entry(Some(scope.clone())).or_default(); - (|| { - let member_dir = &config_data.member_dir; - let jsx_config = - member_dir.to_maybe_jsx_import_source_config().ok()??; - let import_source_types = jsx_config.import_source_types.as_ref()?; - let import_source = jsx_config.import_source.as_ref()?; - let cli_resolver = self.resolver.as_cli_resolver(Some(scope)); - let type_specifier = cli_resolver - .resolve( - &import_source_types.specifier, - &import_source_types.base, - deno_graph::Position::zeroed(), - // todo(dsherret): this is wrong because it doesn't consider CJS referrers - ResolutionMode::Import, - NodeResolutionKind::Types, - ) - .ok()?; - let code_specifier = cli_resolver - .resolve( - &import_source.specifier, - &import_source.base, - deno_graph::Position::zeroed(), - // todo(dsherret): this is wrong because it doesn't consider CJS referrers - ResolutionMode::Import, - NodeResolutionKind::Execution, - ) - .ok()?; - dep_info - .deno_types_to_code_resolutions - .insert(type_specifier, code_specifier); - Some(()) - })(); - // fill the reqs from the lockfile - if let Some(lockfile) = config_data.lockfile.as_ref() { - let lockfile = lockfile.lock(); - for dep_req in lockfile.content.packages.specifiers.keys() { - if dep_req.kind == deno_semver::package::PackageKind::Npm { - dep_info.npm_reqs.insert(dep_req.req.clone()); - } - } - } - } - - // Ensure a @types/node package exists when any module uses a node: specifier. - // Unlike on the command line, here we just add @types/node to the npm package - // requirements since this won't end up in the lockfile. - for dep_info in dep_info_by_scope.values_mut() { - if dep_info.has_node_specifier - && !dep_info.npm_reqs.iter().any(|r| r.name == "@types/node") - { - dep_info - .npm_reqs - .insert(PackageReq::from_str("@types/node").unwrap()); - } - } - - self.dep_info_by_scope = Arc::new( - dep_info_by_scope - .into_iter() - .map(|(s, i)| (s, Arc::new(i))) - .collect(), - ); - self.dirty = false; - } - - #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] - pub fn resolve_dependency( - &self, - specifier: &ModuleSpecifier, - referrer: &ModuleSpecifier, - resolution_mode: ResolutionMode, - file_referrer: Option<&ModuleSpecifier>, - ) -> Option<(ModuleSpecifier, MediaType)> { - if let Some(module_name) = specifier.as_str().strip_prefix("node:") { - if deno_node::is_builtin_node_module(module_name) { - // return itself for node: specifiers because during type checking - // we resolve to the ambient modules in the @types/node package - // rather than deno_std/node - return Some((specifier.clone(), MediaType::Dts)); - } - } - let mut specifier = specifier.clone(); - let mut media_type = None; - if let Ok(npm_ref) = NpmPackageReqReference::from_specifier(&specifier) { - let (s, mt) = self.resolver.npm_to_file_url( - &npm_ref, - referrer, - resolution_mode, - file_referrer, - )?; - specifier = s; - media_type = Some(mt); - } - let Some(doc) = self.get_or_load(&specifier, file_referrer) else { - let media_type = - media_type.unwrap_or_else(|| MediaType::from_specifier(&specifier)); - return Some((specifier, media_type)); - }; - if let Some(types) = doc.maybe_types_dependency().maybe_specifier() { - self.resolve_dependency( - types, - &specifier, - doc.resolution_mode(), - file_referrer, - ) - } else { - Some((doc.specifier().clone(), doc.media_type())) - } - } -} - /// Loader that will look at the open documents. pub struct OpenDocumentsGraphLoader<'a> { pub inner_loader: &'a mut dyn deno_graph::source::Loader, - pub open_docs: &'a HashMap>, + pub open_modules: &'a HashMap, Arc>, } impl OpenDocumentsGraphLoader<'_> { @@ -1705,11 +1729,11 @@ impl OpenDocumentsGraphLoader<'_> { specifier: &ModuleSpecifier, ) -> Option { if specifier.scheme() == "file" { - if let Some(doc) = self.open_docs.get(specifier) { + if let Some(doc) = self.open_modules.get(specifier) { return Some( future::ready(Ok(Some(deno_graph::source::LoadResponse::Module { - content: Arc::from(doc.content().clone()), - specifier: doc.specifier().clone(), + content: Arc::from(doc.text.as_bytes().to_owned()), + specifier: doc.specifier.as_ref().clone(), maybe_headers: None, }))) .boxed_local(), @@ -1873,61 +1897,69 @@ mod tests { use super::*; use crate::lsp::cache::LspCache; - async fn setup() -> (Documents, LspCache, TempDir) { + async fn setup() -> (DocumentModules, LspCache, TempDir) { let temp_dir = TempDir::new(); temp_dir.create_dir_all(".deno_dir"); let cache = LspCache::new(Some(temp_dir.url().join(".deno_dir").unwrap())); let config = Config::default(); let resolver = Arc::new(LspResolver::from_config(&config, &cache, None).await); - let mut documents = Documents::default(); - documents.update_config(&config, &resolver, &cache, &Default::default()); - (documents, cache, temp_dir) + let mut document_modules = DocumentModules::default(); + document_modules.update_config( + &config, + &resolver, + &cache, + &Default::default(), + ); + (document_modules, cache, temp_dir) } #[tokio::test] async fn test_documents_open_close() { - let (mut documents, _, _) = setup().await; - let specifier = ModuleSpecifier::parse("file:///a.ts").unwrap(); + let (mut document_modules, _, _) = setup().await; + let uri = Uri::from_str("file:///a.ts").unwrap(); let content = r#"import * as b from "./b.ts"; console.log(b); "#; - let document = documents.open( - specifier.clone(), + document_modules.open_document( + uri.clone(), 1, "javascript".parse().unwrap(), content.into(), - None, ); + let document = document_modules + .documents + .get(&uri) + .unwrap() + .open() + .cloned() + .unwrap(); + assert_eq!(document.uri.as_ref(), &uri); + assert_eq!(document.text.as_ref(), content); + assert_eq!(document.version, 1); + assert_eq!(document.language_id, LanguageId::JavaScript); assert!(document.is_diagnosable()); - assert!(document.is_open()); - assert!(document.maybe_parsed_source().is_some()); - assert!(document.maybe_lsp_version().is_some()); - documents.close(&specifier); - // We can't use `Documents::get()` here, it will look through the real FS. - let document = documents.file_system_docs.docs.get(&specifier).unwrap(); - assert!(!document.is_open()); - assert!(document.maybe_parsed_source().is_none()); - assert!(document.maybe_lsp_version().is_none()); + assert!(document.is_file_like()); + document_modules.close_document(&uri).unwrap(); + assert!(document_modules.documents.get(&uri).is_none()); } #[tokio::test] async fn test_documents_change() { - let (mut documents, _, _) = setup().await; - let specifier = ModuleSpecifier::parse("file:///a.ts").unwrap(); + let (mut document_modules, _, _) = setup().await; + let uri = Uri::from_str("file:///a.ts").unwrap(); let content = r#"import * as b from "./b.ts"; console.log(b); "#; - documents.open( - specifier.clone(), + document_modules.open_document( + uri.clone(), 1, "javascript".parse().unwrap(), content.into(), - None, ); - documents - .change( - &specifier, + document_modules + .change_document( + &uri, 2, vec![lsp::TextDocumentContentChangeEvent { range: Some(lsp::Range { @@ -1946,45 +1978,23 @@ console.log(b); ) .unwrap(); assert_eq!( - documents.get(&specifier).unwrap().content().as_ref(), + document_modules + .documents + .get(&uri) + .unwrap() + .text() + .as_ref() as &str, r#"import * as b from "./b.ts"; console.log(b, "hello deno"); "# ); } - #[tokio::test] - async fn test_documents_ensure_no_duplicates() { - // it should never happen that a user of this API causes this to happen, - // but we'll guard against it anyway - let (mut documents, _, temp_dir) = setup().await; - let file_path = temp_dir.path().join("file.ts"); - let file_specifier = temp_dir.url().join("file.ts").unwrap(); - file_path.write(""); - - // open the document - documents.open( - file_specifier.clone(), - 1, - LanguageId::TypeScript, - "".into(), - None, - ); - - // make a clone of the document store and close the document in that one - let mut documents2 = documents.clone(); - documents2.close(&file_specifier); - - // At this point the document will be in both documents and the shared file system documents. - // Now make sure that the original documents doesn't return both copies - assert_eq!(documents.documents(DocumentsFilter::All).len(), 1); - } - #[tokio::test] async fn test_documents_refresh_dependencies_config_change() { // it should never happen that a user of this API causes this to happen, // but we'll guard against it anyway - let (mut documents, cache, temp_dir) = setup().await; + let (mut document_modules, cache, temp_dir) = setup().await; let file1_path = temp_dir.path().join("file1.ts"); let file1_specifier = temp_dir.url().join("file1.ts").unwrap(); @@ -2002,11 +2012,19 @@ console.log(b, "hello deno"); let workspace_settings = serde_json::from_str(r#"{ "enable": true }"#).unwrap(); config.set_workspace_settings(workspace_settings, vec![]); - let workspace_files = + let workspace_files = Arc::new( [&file1_specifier, &file2_specifier, &file3_specifier] .into_iter() - .cloned() - .collect::>(); + .map(|s| s.to_file_path().unwrap()) + .collect::>(), + ); + + let document = document_modules.open_document( + url_to_uri(&file1_specifier).unwrap(), + 1, + LanguageId::TypeScript, + "import {} from 'test';".into(), + ); // set the initial import map and point to file 2 { @@ -2020,7 +2038,7 @@ console.log(b, "hello deno"); }, }) .to_string(), - config.root_uri().unwrap().join("deno.json").unwrap(), + config.root_url().unwrap().join("deno.json").unwrap(), ) .unwrap(), ) @@ -2028,20 +2046,19 @@ console.log(b, "hello deno"); let resolver = Arc::new(LspResolver::from_config(&config, &cache, None).await); - documents.update_config(&config, &resolver, &cache, &workspace_files); - - // open the document - let document = documents.open( - file1_specifier.clone(), - 1, - LanguageId::TypeScript, - "import {} from 'test';".into(), - None, + document_modules.update_config( + &config, + &resolver, + &cache, + &workspace_files, ); + let module = document_modules + .primary_module(&Document::Open(document.clone())) + .unwrap(); assert_eq!( - document - .dependencies() + module + .dependencies .get("test") .unwrap() .maybe_code @@ -2063,7 +2080,7 @@ console.log(b, "hello deno"); }, }) .to_string(), - config.root_uri().unwrap().join("deno.json").unwrap(), + config.root_url().unwrap().join("deno.json").unwrap(), ) .unwrap(), ) @@ -2071,13 +2088,20 @@ console.log(b, "hello deno"); let resolver = Arc::new(LspResolver::from_config(&config, &cache, None).await); - documents.update_config(&config, &resolver, &cache, &workspace_files); + document_modules.update_config( + &config, + &resolver, + &cache, + &workspace_files, + ); // check the document's dependencies - let document = documents.get(&file1_specifier).unwrap(); + let module = document_modules + .primary_module(&Document::Open(document.clone())) + .unwrap(); assert_eq!( - document - .dependencies() + module + .dependencies .get("test") .unwrap() .maybe_code diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index 3c679003b9..28f9ce828f 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -16,7 +16,6 @@ use deno_ast::MediaType; use deno_cache_dir::file_fetcher::CacheSetting; use deno_core::anyhow::anyhow; use deno_core::error::AnyError; -use deno_core::resolve_url; use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::serde_json::Value; @@ -35,6 +34,7 @@ use deno_runtime::deno_tls::rustls::RootCertStore; use deno_runtime::deno_tls::RootCertStoreProvider; use deno_semver::jsr::JsrPackageReqReference; use indexmap::Equivalent; +use indexmap::IndexMap; use indexmap::IndexSet; use log::error; use node_resolver::NodeResolutionKind; @@ -70,13 +70,10 @@ use super::diagnostics::DiagnosticDataSpecifier; use super::diagnostics::DiagnosticServerUpdateMessage; use super::diagnostics::DiagnosticsServer; use super::diagnostics::DiagnosticsState; -use super::documents::to_lsp_range; -use super::documents::AssetOrDocument; use super::documents::Document; -use super::documents::Documents; -use super::documents::DocumentsFilter; +use super::documents::DocumentModule; +use super::documents::DocumentModules; use super::documents::LanguageId; -use super::documents::ASSET_DOCUMENTS; use super::jsr::CliJsrSearchApi; use super::logging::lsp_log; use super::logging::lsp_warn; @@ -92,9 +89,8 @@ use super::testing; use super::text; use super::tsc; use super::tsc::ChangeKind; -use super::tsc::GetCompletionDetailsArgs; use super::tsc::TsServer; -use super::urls; +use super::urls::uri_to_file_path; use super::urls::uri_to_url; use super::urls::url_to_uri; use crate::args::Flags; @@ -107,14 +103,12 @@ use crate::http_util::HttpClientProvider; use crate::lsp::config::ConfigWatchedFileType; use crate::lsp::logging::init_log_file; use crate::lsp::tsc::file_text_changes_to_workspace_edit; -use crate::lsp::urls::LspUrlKind; use crate::sys::CliSys; use crate::tools::fmt::format_file; use crate::tools::fmt::format_parsed_source; use crate::tools::upgrade::check_for_upgrades_for_lsp; use crate::tools::upgrade::upgrade_check_enabled; use crate::util::fs::remove_dir_all_if_exists; -use crate::util::path::is_importable_ext; use crate::util::path::to_percent_decoded_str; use crate::util::sync::AsyncFlag; @@ -126,6 +120,46 @@ impl RootCertStoreProvider for LspRootCertStoreProvider { } } +/// Used as a filtering option for `Inner::get_document()`. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Enabled { + /// Return Ok(None) if not enabled. + Filter, + /// Allow even if not enabled. + Ignore, +} + +/// Used as a filtering option for `Inner::get_document()`. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Exists { + /// Return Err(LspError::InvalidParams(...)) if non-existent. + Enforce, + /// Return Ok(None) if non-existent. + Filter, +} + +/// Used as a filtering option for `Inner::get_document()`. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Diagnosable { + /// Return Ok(None) if not diagnosable. + Filter, + /// Allow even if not diagnosable. + Ignore, +} + +pub fn to_lsp_range(referrer: &deno_graph::Range) -> lsp_types::Range { + lsp_types::Range { + start: lsp_types::Position { + line: referrer.range.start.line as u32, + character: referrer.range.start.character as u32, + }, + end: lsp_types::Position { + line: referrer.range.end.line as u32, + character: referrer.range.end.character as u32, + }, + } +} + #[derive(Debug, Clone)] pub struct LanguageServer { client: Client, @@ -145,7 +179,7 @@ pub struct LanguageServer { pub struct StateSnapshot { pub project_version: usize, pub config: Arc, - pub documents: Arc, + pub document_modules: DocumentModules, pub resolver: Arc, } @@ -198,7 +232,7 @@ pub struct Inner { diagnostics_server: diagnostics::DiagnosticsServer, /// The collection of documents that the server is currently handling, either /// on disk or "open" within the client. - pub documents: Documents, + pub document_modules: DocumentModules, http_client_provider: Arc, initial_cwd: PathBuf, jsr_search_api: CliJsrSearchApi, @@ -215,9 +249,7 @@ pub struct Inner { task_queue: LanguageServerTaskQueue, ts_fixable_diagnostics: tokio::sync::OnceCell>, pub ts_server: Arc, - /// A map of specifiers and URLs used to translate over the LSP. - pub url_map: urls::LspUrlMap, - workspace_files: IndexSet, + workspace_files: Arc>, /// Set to `self.config.settings.enable_settings_hash()` after /// refreshing `self.workspace_files`. workspace_files_hash: u64, @@ -251,18 +283,18 @@ impl LanguageServer { async fn create_graph_for_caching( factory: CliFactory, roots: Vec, - open_docs: Vec>, + open_modules: Vec>, ) -> Result<(), AnyError> { - let open_docs = open_docs + let open_modules = open_modules .into_iter() - .map(|d| (d.specifier().clone(), d)) + .map(|m| (m.specifier.clone(), m)) .collect::>(); let module_graph_builder = factory.module_graph_builder().await?; let module_graph_creator = factory.module_graph_creator().await?; let mut inner_loader = module_graph_builder.create_graph_loader(); let mut loader = crate::lsp::documents::OpenDocumentsGraphLoader { inner_loader: &mut inner_loader, - open_docs: &open_docs, + open_modules: &open_modules, }; let graph = module_graph_creator .create_graph_with_loader( @@ -316,9 +348,9 @@ impl LanguageServer { // cache outside the lock let cli_factory = result.cli_factory; let roots = result.roots; - let open_docs = result.open_docs; + let open_modules = result.open_modules; let handle = spawn(async move { - create_graph_for_caching(cli_factory, roots, open_docs).await + create_graph_for_caching(cli_factory, roots, open_modules).await }); if let Err(err) = handle.await.unwrap() { @@ -482,7 +514,6 @@ impl Inner { CliJsrSearchApi::new(module_registry.file_fetcher.clone()); let npm_search_api = CliNpmSearchApi::new(module_registry.file_fetcher.clone()); - let documents = Documents::default(); let config = Config::default(); let ts_server = Arc::new(TsServer::new(performance.clone())); let diagnostics_state = Arc::new(DiagnosticsState::default()); @@ -502,7 +533,7 @@ impl Inner { config, diagnostics_state, diagnostics_server, - documents, + document_modules: Default::default(), http_client_provider, initial_cwd: initial_cwd.clone(), jsr_search_api, @@ -516,97 +547,116 @@ impl Inner { resolver: Default::default(), ts_fixable_diagnostics: Default::default(), ts_server, - url_map: Default::default(), workspace_files: Default::default(), workspace_files_hash: 0, _tracing: Default::default(), } } - /// Searches assets and documents for the provided - /// specifier erroring if it doesn't exist. #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] - pub fn get_asset_or_document( + pub fn get_document( &self, - specifier: &ModuleSpecifier, - ) -> LspResult { - self - .get_maybe_asset_or_document(specifier) - .map(Ok) - .unwrap_or_else(|| { - Err(LspError::invalid_params(format!( - "Unable to find asset or document for: {specifier}" - ))) - }) + uri: &Uri, + enabled: Enabled, + exists: Exists, + diagnosable: Diagnosable, + ) -> LspResult> { + match enabled { + Enabled::Filter => { + if !self.config.uri_enabled(uri) { + return Ok(None); + } + } + Enabled::Ignore => {} + } + let Some(document) = self.document_modules.documents.get(uri) else { + match exists { + Exists::Enforce + if !uri.scheme().is_some_and(|s| s.eq_lowercase("deno")) => + { + return Err(LspError::invalid_params(format!( + "Unable to find document for: {}", + uri.as_str() + ))); + } + Exists::Filter => { + return Ok(None); + } + _ => return Ok(None), + } + }; + match diagnosable { + Diagnosable::Filter => { + if !document.is_diagnosable() { + return Ok(None); + } + } + Diagnosable::Ignore => {} + } + Ok(Some(document)) } - /// Searches assets and documents for the provided specifier. - pub fn get_maybe_asset_or_document( + #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] + pub fn get_primary_module( &self, - specifier: &ModuleSpecifier, - ) -> Option { - if specifier.scheme() == "asset" { - ASSET_DOCUMENTS.get(specifier).map(AssetOrDocument::Asset) - } else { - self.documents.get(specifier).map(AssetOrDocument::Document) - } + document: &Document, + ) -> LspResult>> { + let Some(module) = self.document_modules.primary_module(document) else { + if document + .uri() + .scheme() + .is_some_and(|s| s.eq_lowercase("deno")) + { + return Ok(None); + } + lsp_warn!( + "Unable to get module for document: {}", + document.uri().as_str(), + ); + return Err(LspError::internal_error()); + }; + Ok(Some(module)) } #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] pub async fn get_navigation_tree( &self, - specifier: &ModuleSpecifier, + module: &DocumentModule, token: &CancellationToken, - ) -> Result, AnyError> { + ) -> LspResult> { let mark = self.performance.mark_with_args( "lsp.get_navigation_tree", - json!({ "specifier": specifier }), + json!({ "uri": &module.specifier }), ); - let asset_or_doc = self.get_asset_or_document(specifier)?; - let navigation_tree = - if let Some(navigation_tree) = asset_or_doc.maybe_navigation_tree() { - navigation_tree - } else { - let navigation_tree: tsc::NavigationTree = self + let result = module + .navigation_tree + .get_or_try_init(|| async { + self .ts_server .get_navigation_tree( self.snapshot(), - specifier.clone(), - asset_or_doc.scope().cloned(), + &module.specifier, + module.scope.as_ref(), token, ) - .await?; - let navigation_tree = Arc::new(navigation_tree); - asset_or_doc.cache_navigation_tree(navigation_tree.clone()); - navigation_tree - }; + .await + .map(Arc::new) + .map_err(|err| { + if token.is_cancelled() { + LspError::request_cancelled() + } else { + error!( + "Error getting navigation tree for \"{}\": {:#}", + &module.specifier, err, + ); + LspError::internal_error() + } + }) + }) + .await + .map(Clone::clone); self.performance.measure(mark); - Ok(navigation_tree) - } - - fn is_diagnosable(&self, specifier: &ModuleSpecifier) -> bool { - if specifier.scheme() == "asset" { - matches!( - MediaType::from_specifier(specifier), - MediaType::JavaScript - | MediaType::Jsx - | MediaType::Mjs - | MediaType::Cjs - | MediaType::TypeScript - | MediaType::Tsx - | MediaType::Mts - | MediaType::Cts - | MediaType::Dts - | MediaType::Dmts - | MediaType::Dcts - ) - } else { - self - .documents - .get(specifier) - .map(|d| d.is_diagnosable()) - .unwrap_or(false) - } + result } #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] @@ -614,7 +664,7 @@ impl Inner { Arc::new(StateSnapshot { project_version: self.project_version, config: Arc::new(self.config.clone()), - documents: Arc::new(self.documents.clone()), + document_modules: self.document_modules.clone(), resolver: self.resolver.snapshot(), }) } @@ -672,7 +722,7 @@ impl Inner { let global_cache_url = maybe_cache.and_then(|cache_str| { if let Ok(url) = Url::from_file_path(cache_str) { Some(url) - } else if let Some(root_uri) = self.config.root_uri() { + } else if let Some(root_uri) = self.config.root_url() { root_uri.join(cache_str).inspect_err(|err| lsp_warn!("Failed to resolve custom cache path: {err}")).ok() } else { lsp_warn!( @@ -686,7 +736,7 @@ impl Inner { let workspace_settings = self.config.workspace_settings(); let maybe_root_path = self .config - .root_uri() + .root_url() .and_then(|uri| url_to_file_path(uri).ok()); let root_cert_store = get_root_cert_store( maybe_root_path, @@ -727,7 +777,6 @@ impl Inner { pub fn update_cache(&mut self) { let mark = self.performance.mark("lsp.update_cache"); self.cache.update_config(&self.config); - self.url_map.set_cache(&self.cache); self.performance.measure(mark); } @@ -752,20 +801,10 @@ impl Inner { return; } let exists_enabled_document = self + .document_modules .documents - .documents(DocumentsFilter::OpenDiagnosable) - .into_iter() - .any(|doc| { - doc.maybe_language_id().is_some_and(|l| { - matches!( - l, - LanguageId::JavaScript - | LanguageId::Jsx - | LanguageId::TypeScript - | LanguageId::Tsx - ) - }) && self.config.specifier_enabled(doc.specifier()) - }); + .open_docs() + .any(|doc| doc.is_diagnosable() && self.config.uri_enabled(&doc.uri)); if !exists_enabled_document { return; } @@ -832,12 +871,13 @@ impl Inner { workspace_folders = folders .into_iter() .map(|folder| { - ( - self - .url_map - .uri_to_specifier(&folder.uri, LspUrlKind::Folder), - folder, - ) + let mut url = uri_to_url(&folder.uri); + if !url.path().ends_with('/') { + if let Ok(mut path_segments) = url.path_segments_mut() { + path_segments.push(""); + } + } + (Arc::new(url), folder) }) .collect(); } @@ -846,14 +886,21 @@ impl Inner { #[allow(deprecated)] if let Some(root_uri) = params.root_uri { if !workspace_folders.iter().any(|(_, f)| f.uri == root_uri) { - let root_url = - self.url_map.uri_to_specifier(&root_uri, LspUrlKind::Folder); - let name = root_url.path_segments().and_then(|s| s.last()); - let name = name.unwrap_or_default().to_string(); + let mut root_url = uri_to_url(&root_uri); + let name = root_url + .path_segments() + .and_then(|s| s.last()) + .unwrap_or_default() + .to_string(); + if !root_url.path().ends_with('/') { + if let Ok(mut path_segments) = root_url.path_segments_mut() { + path_segments.push(""); + } + } workspace_folders.insert( 0, ( - root_url, + Arc::new(root_url), WorkspaceFolder { uri: root_uri, name, @@ -893,7 +940,7 @@ impl Inner { } #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] - fn walk_workspace(config: &Config) -> (IndexSet, bool) { + fn walk_workspace(config: &Config) -> (IndexSet, bool) { if !config.workspace_capable() { log::debug!("Skipped workspace walk due to client incapability."); return (Default::default(), false); @@ -905,7 +952,7 @@ impl Inner { let mut roots = config .workspace_folders .iter() - .filter_map(|p| url_to_file_path(&p.0).ok()) + .filter_map(|(_, f)| uri_to_file_path(&f.uri).ok()) .collect::>(); roots.sort(); let roots = roots @@ -929,9 +976,7 @@ impl Inner { for deno_json in ["deno.json", "deno.jsonc"] { let path = root_ancestor.join(deno_json); if path.exists() { - if let Ok(specifier) = ModuleSpecifier::from_file_path(path) { - workspace_files.insert(specifier); - } + workspace_files.insert(path); } } } @@ -948,16 +993,13 @@ impl Inner { } entry_count += 1; let path = parent_path.join(entry.path()); - let Ok(specifier) = ModuleSpecifier::from_file_path(&path) else { - continue; - }; let Ok(file_type) = entry.file_type() else { continue; }; let Some(file_name) = path.file_name() else { continue; }; - if config.settings.specifier_enabled(&specifier) == Some(false) { + if config.settings.path_enabled(&path) == Some(false) { continue; } if file_type.is_dir() { @@ -981,7 +1023,7 @@ impl Inner { continue; } if let Ok(read_dir) = std::fs::read_dir(&path) { - dir_subdirs.insert(specifier, (path, read_dir)); + dir_subdirs.insert(path, read_dir); } } else if file_type.is_file() || file_type.is_symlink() @@ -993,7 +1035,7 @@ impl Inner { if file_name.to_string_lossy().contains(".min.") { continue; } - let media_type = MediaType::from_specifier(&specifier); + let media_type = MediaType::from_path(&path); match media_type { MediaType::JavaScript | MediaType::Jsx @@ -1018,11 +1060,11 @@ impl Inner { } } } - dir_files.insert(specifier); + dir_files.insert(path); } } workspace_files.extend(dir_files); - pending.extend(dir_subdirs.into_values()); + pending.extend(dir_subdirs.into_iter()); } (workspace_files, false) } @@ -1050,7 +1092,7 @@ impl Inner { ); } } - self.workspace_files = workspace_files; + self.workspace_files = Arc::new(workspace_files); self.workspace_files_hash = enable_settings_hash; } @@ -1070,7 +1112,12 @@ impl Inner { self .config .tree - .refresh(&self.config.settings, &self.workspace_files, &file_fetcher) + .refresh( + &self.config.settings, + &self.workspace_files, + &file_fetcher, + self.cache.deno_dir(), + ) .await; self .client @@ -1124,7 +1171,7 @@ impl Inner { #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] async fn refresh_documents_config(&mut self) { - self.documents.update_config( + self.document_modules.update_config( &self.config, &self.resolver, &self.cache, @@ -1144,10 +1191,9 @@ impl Inner { let Some(scheme) = params.text_document.uri.scheme() else { return; }; - if scheme.as_str() == "deno" { - // we can ignore virtual text documents opening, as they don't need to - // be tracked in memory, as they are static assets that won't change - // already managed by the language service + // `deno:` documents are read-only and should only be handled as server + // documents. + if scheme.eq_lowercase("deno") { return; } let language_id = @@ -1166,23 +1212,22 @@ impl Inner { params.text_document.uri.as_str() ); } - let file_referrer = Some(uri_to_url(¶ms.text_document.uri)) - .filter(|s| self.documents.is_valid_file_referrer(s)); - let specifier = self - .url_map - .uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File); - let document = self.documents.open( - specifier.clone(), + let document = self.document_modules.open_document( + params.text_document.uri.clone(), params.text_document.version, params.text_document.language_id.parse().unwrap(), params.text_document.text.into(), - file_referrer, ); if document.is_diagnosable() { self.check_semantic_tokens_capabilities(); - self.project_changed([(document.specifier(), ChangeKind::Opened)], false); self.refresh_dep_info().await; - self.diagnostics_server.invalidate(&[specifier]); + self.project_changed( + [(¶ms.text_document.uri, ChangeKind::Opened)], + false, + ); + self + .diagnostics_server + .invalidate(&[¶ms.text_document.uri]); self.send_diagnostics_update(); self.send_testing_update(); } @@ -1192,37 +1237,44 @@ impl Inner { #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] async fn did_change(&mut self, params: DidChangeTextDocumentParams) { let mark = self.performance.mark_with_args("lsp.did_change", ¶ms); - let specifier = self - .url_map - .uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File); - match self.documents.change( - &specifier, + let Some(scheme) = params.text_document.uri.scheme() else { + return; + }; + // `deno:` documents are read-only and should only be handled as server + // documents. + if scheme.eq_lowercase("deno") { + return; + } + let document = match self.document_modules.change_document( + ¶ms.text_document.uri, params.text_document.version, params.content_changes, ) { - Ok(document) => { - if document.is_diagnosable() { - let old_scopes_with_node_specifier = - self.documents.scopes_with_node_specifier(); - self.refresh_dep_info().await; - let mut config_changed = false; - if !self - .documents - .scopes_with_node_specifier() - .equivalent(&old_scopes_with_node_specifier) - { - config_changed = true; - } - self.project_changed( - [(document.specifier(), ChangeKind::Modified)], - config_changed, - ); - self.diagnostics_server.invalidate(&[specifier]); - self.send_diagnostics_update(); - self.send_testing_update(); - } + Ok(doc) => doc, + Err(err) => { + error!("{:#}", err); + return; } - Err(err) => error!("{:#}", err), + }; + if document.is_diagnosable() { + let old_scopes_with_node_specifier = + self.document_modules.scopes_with_node_specifier(); + self.refresh_dep_info().await; + let mut config_changed = false; + if !self + .document_modules + .scopes_with_node_specifier() + .equivalent(&old_scopes_with_node_specifier) + { + config_changed = true; + } + self.project_changed( + [(document.uri.as_ref(), ChangeKind::Modified)], + config_changed, + ); + self.diagnostics_server.invalidate(&[&document.uri]); + self.send_diagnostics_update(); + self.send_testing_update(); } self.performance.measure(mark); } @@ -1230,27 +1282,53 @@ impl Inner { #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] fn did_save(&mut self, params: DidSaveTextDocumentParams) { let _mark = self.performance.measure_scope("lsp.did_save"); - let specifier = self - .url_map - .uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File); - self.documents.save(&specifier); + let Ok(Some(document)) = self + .get_document( + ¶ms.text_document.uri, + Enabled::Filter, + Exists::Enforce, + Diagnosable::Filter, + ) + .inspect_err(|err| { + lsp_warn!("{:#}", err); + }) + else { + return; + }; if !self .config - .workspace_settings_for_specifier(&specifier) + .workspace_settings_for_uri(document.uri()) .cache_on_save - || !self.config.specifier_enabled(&specifier) - || !self.diagnostics_state.has_no_cache_diagnostics(&specifier) + || !self.config.uri_enabled(document.uri()) + || !self + .diagnostics_state + .has_no_cache_diagnostics(document.uri()) { return; } - match url_to_file_path(&specifier) { - Ok(path) if is_importable_ext(&path) => {} - _ => return, + let Ok(Some(module)) = + self.get_primary_module(&document).inspect_err(|err| { + lsp_warn!("{:#}", err); + }) + else { + return; + }; + if module.specifier.scheme() != "file" + || module.specifier.as_str().contains("/node_modules/") + { + return; } self.task_queue.queue_task(Box::new(|ls: LanguageServer| { spawn(async move { - if let Err(err) = ls.cache(vec![], specifier.clone(), false).await { - lsp_warn!("Failed to cache \"{}\" on save: {:#}", &specifier, err); + if let Err(err) = ls + .cache(vec![], module.specifier.as_ref().clone(), false) + .await + { + lsp_warn!( + "Failed to cache \"{}\" on save: {:#}", + &module.specifier, + err + ); } }); })); @@ -1258,7 +1336,7 @@ impl Inner { #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] async fn refresh_dep_info(&mut self) { - let dep_info_by_scope = self.documents.dep_info_by_scope(); + let dep_info_by_scope = self.document_modules.dep_info_by_scope(); self .resolver .set_dep_info_by_scope(&dep_info_by_scope) @@ -1271,24 +1349,35 @@ impl Inner { let Some(scheme) = params.text_document.uri.scheme() else { return; }; - if scheme.as_str() == "deno" { - // we can ignore virtual text documents closing, as they don't need to - // be tracked in memory, as they are static assets that won't change - // already managed by the language service + // `deno:` documents are read-only and should only be handled as server + // documents. + if scheme.eq_lowercase("deno") { return; } - let specifier = self - .url_map - .uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File); - self.diagnostics_state.clear(&specifier); - if self.is_diagnosable(&specifier) { + self.diagnostics_state.clear(¶ms.text_document.uri); + let document = match self + .document_modules + .close_document(¶ms.text_document.uri) + { + Ok(doc) => doc, + Err(err) => { + error!("{:#}", err); + return; + } + }; + if document.is_diagnosable() { self.refresh_dep_info().await; - self.diagnostics_server.invalidate(&[specifier.clone()]); + drop(document); + self.project_changed( + [(¶ms.text_document.uri, ChangeKind::Closed)], + false, + ); + self + .diagnostics_server + .invalidate(&[¶ms.text_document.uri]); self.send_diagnostics_update(); self.send_testing_update(); } - self.documents.close(&specifier); - self.project_changed([(&specifier, ChangeKind::Closed)], false); self.performance.measure(mark); } @@ -1338,7 +1427,7 @@ impl Inner { let changes = params .changes .into_iter() - .map(|e| (self.url_map.uri_to_specifier(&e.uri, LspUrlKind::File), e)) + .map(|e| (uri_to_url(&e.uri), e)) .collect::>(); if changes .iter() @@ -1373,7 +1462,7 @@ impl Inner { self.refresh_resolver().await; self.refresh_documents_config().await; self.project_changed( - changes.iter().map(|(s, _)| (s, ChangeKind::Modified)), + changes.iter().map(|(_, e)| (&e.uri, ChangeKind::Modified)), false, ); self.ts_server.cleanup_semantic_cache(self.snapshot()).await; @@ -1418,44 +1507,32 @@ impl Inner { params: DocumentSymbolParams, token: &CancellationToken, ) -> LspResult> { - let specifier = self - .url_map - .uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File); - if !self.is_diagnosable(&specifier) - || !self.config.specifier_enabled(&specifier) - { - return Ok(None); - } - let mark = self .performance .mark_with_args("lsp.document_symbol", ¶ms); - let asset_or_document = self.get_asset_or_document(&specifier)?; - let line_index = asset_or_document.line_index(); - - let navigation_tree = self - .get_navigation_tree(&specifier, token) - .await - .map_err(|err| { - if token.is_cancelled() { - LspError::request_cancelled() - } else { - error!( - "Error getting navigation tree for \"{}\": {:#}", - specifier, err - ); - LspError::internal_error() - } - })?; - + let Some(document) = self.get_document( + ¶ms.text_document.uri, + Enabled::Filter, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(None); + }; + let Some(module) = self.get_primary_module(&document)? else { + return Ok(None); + }; + let navigation_tree = self.get_navigation_tree(&module, token).await?; let response = if let Some(child_items) = &navigation_tree.child_items { let mut document_symbols = Vec::::new(); for item in child_items { if token.is_cancelled() { return Err(LspError::request_cancelled()); } - item - .collect_document_symbols(line_index.clone(), &mut document_symbols); + item.collect_document_symbols( + module.line_index.clone(), + &mut document_symbols, + ); } Some(DocumentSymbolResponse::Nested(document_symbols)) } else { @@ -1471,50 +1548,46 @@ impl Inner { params: DocumentFormattingParams, _token: &CancellationToken, ) -> LspResult>> { - let file_referrer = Some(uri_to_url(¶ms.text_document.uri)) - .filter(|s| self.documents.is_valid_file_referrer(s)); - let mut specifier = self - .url_map - .uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File); - // skip formatting any files ignored by the config file - if !self - .config - .tree - .fmt_config_for_specifier(&specifier) - .files - .matches_specifier(&specifier) - { - return Ok(None); - } - let document = self - .documents - .get_or_load(&specifier, file_referrer.as_ref()); - let Some(document) = document else { + let mark = self.performance.mark_with_args("lsp.formatting", ¶ms); + // Untitled files are exempt from enabled-checks because they tend not to + // have meaningful paths, and they won't be auto-formatted on save anyway. + let is_untitled = params + .text_document + .uri + .scheme() + .is_some_and(|s| s.eq_lowercase("untitled")); + let Some(document) = self.get_document( + ¶ms.text_document.uri, + if is_untitled { + Enabled::Ignore + } else { + Enabled::Filter + }, + Exists::Enforce, + Diagnosable::Ignore, + )? + else { return Ok(None); }; - // Detect vendored paths. Vendor file URLs will normalize to their remote - // counterparts, but for formatting we want to favour the file URL. - // TODO(nayeemrmn): Implement `Document::file_resource_path()` or similar. - if specifier.scheme() != "file" - && params.text_document.uri.scheme().map(|s| s.as_str()) == Some("file") - { - specifier = uri_to_url(¶ms.text_document.uri); + let Some(module) = self.get_primary_module(&document)? else { + return Ok(None); + }; + let fmt_config = + self.config.tree.fmt_config_for_specifier(&module.specifier); + if !is_untitled && !fmt_config.files.matches_specifier(&module.specifier) { + return Ok(None); } - let file_path = url_to_file_path(&specifier).map_err(|err| { - error!("{:#}", err); - LspError::invalid_request() - })?; - let mark = self.performance.mark_with_args("lsp.formatting", ¶ms); + let file_path = url_to_file_path(&module.specifier) + .or_else(|_| uri_to_file_path(document.uri())) + .map_err(|err| { + lsp_warn!("{:#}", err); + LspError::invalid_request() + })?; // spawn a blocking task to allow doing other work while this is occurring let text_edits = deno_core::unsync::spawn_blocking({ - let mut fmt_options = self - .config - .tree - .fmt_config_for_specifier(&specifier) - .options - .clone(); - let config_data = self.config.tree.data_for_specifier(&specifier); + let mut fmt_options = fmt_config.options.clone(); + let config_data = self.config.tree.data_for_specifier(&module.specifier); #[allow(clippy::nonminimal_bool)] // clippy's suggestion is more confusing if !config_data.is_some_and(|d| d.maybe_deno_json().is_some()) { fmt_options.use_tabs = Some(!params.options.insert_spaces); @@ -1528,24 +1601,23 @@ impl Inner { .map(|d| d.unstable.contains("fmt-sql")) .unwrap_or(false), }; - let document = document.clone(); move || { - let format_result = match document.maybe_parsed_source() { + let format_result = match &module + .open_data + .as_ref() + .and_then(|d| d.parsed_source.as_ref()) + { Some(Ok(parsed_source)) => { format_parsed_source(parsed_source, &fmt_options) } Some(Err(err)) => Err(anyhow!("{:#}", err)), None => { - // the file path is only used to determine what formatter should - // be used to format the file, so give the filepath an extension - // that matches what the user selected as the language - let ext = document - .maybe_language_id() - .and_then(|id| id.as_extension().map(|s| s.to_string())); - // it's not a js/ts file, so attempt to format its contents + let ext = document.open().and_then(|d| { + d.language_id.as_extension().map(|s| s.to_string()) + }); format_file( &file_path, - document.content(), + &document.text(), &fmt_options, &unstable_options, ext, @@ -1554,9 +1626,9 @@ impl Inner { }; match format_result { Ok(Some(new_text)) => Some(text::get_edits( - document.content(), + &document.text(), &new_text, - document.line_index().as_ref(), + &module.line_index, )), Ok(None) => Some(Vec::new()), Err(err) => { @@ -1587,60 +1659,66 @@ impl Inner { params: HoverParams, token: &CancellationToken, ) -> LspResult> { - let specifier = self.url_map.uri_to_specifier( - ¶ms.text_document_position_params.text_document.uri, - LspUrlKind::File, - ); - if !self.is_diagnosable(&specifier) - || !self.config.specifier_enabled(&specifier) - { - return Ok(None); - } - let mark = self.performance.mark_with_args("lsp.hover", ¶ms); - let asset_or_doc = self.get_asset_or_document(&specifier)?; - let file_referrer = asset_or_doc.file_referrer(); - let hover = if let Some((_, dep, range)) = asset_or_doc - .get_maybe_dependency(¶ms.text_document_position_params.position) + let Some(document) = self.get_document( + ¶ms.text_document_position_params.text_document.uri, + Enabled::Filter, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(None); + }; + let Some(module) = self.get_primary_module(&document)? else { + return Ok(None); + }; + let hover = if let Some((_, dep, range)) = module + .dependency_at_position(¶ms.text_document_position_params.position) { - let dep_doc = dep - .get_code() - .and_then(|s| self.documents.get_or_load(s, file_referrer)); - let dep_maybe_types_dependency = - dep_doc.as_ref().map(|d| d.maybe_types_dependency()); - let value = match (dep.maybe_code.is_none(), dep.maybe_type.is_none(), &dep_maybe_types_dependency) { + let dep_module = dep.get_code().and_then(|s| { + self + .document_modules + .inspect_module_for_specifier(s, module.scope.as_deref()) + }); + let dep_types_dependency = dep_module.as_ref().map(|m| { + m.types_dependency + .as_ref() + .map(|d| &d.dependency) + .unwrap_or(&Resolution::None) + }); + let value = match (dep.maybe_code.is_none(), dep.maybe_type.is_none(), &dep_types_dependency) { (false, false, None) => format!( "**Resolved Dependency**\n\n**Code**: {}\n\n**Types**: {}\n", - self.resolution_to_hover_text(&dep.maybe_code, file_referrer), - self.resolution_to_hover_text(&dep.maybe_type, file_referrer), + self.resolution_to_hover_text(&dep.maybe_code, module.scope.as_deref()), + self.resolution_to_hover_text(&dep.maybe_type, module.scope.as_deref()), ), (false, false, Some(types_dep)) if !types_dep.is_none() => format!( "**Resolved Dependency**\n\n**Code**: {}\n**Types**: {}\n**Import Types**: {}\n", - self.resolution_to_hover_text(&dep.maybe_code, file_referrer), - self.resolution_to_hover_text(&dep.maybe_type, file_referrer), - self.resolution_to_hover_text(types_dep, file_referrer), + self.resolution_to_hover_text(&dep.maybe_code, module.scope.as_deref()), + self.resolution_to_hover_text(&dep.maybe_type, module.scope.as_deref()), + self.resolution_to_hover_text(types_dep, module.scope.as_deref()), ), (false, false, Some(_)) => format!( "**Resolved Dependency**\n\n**Code**: {}\n\n**Types**: {}\n", - self.resolution_to_hover_text(&dep.maybe_code, file_referrer), - self.resolution_to_hover_text(&dep.maybe_type, file_referrer), + self.resolution_to_hover_text(&dep.maybe_code, module.scope.as_deref()), + self.resolution_to_hover_text(&dep.maybe_type, module.scope.as_deref()), ), (false, true, Some(types_dep)) if !types_dep.is_none() => format!( "**Resolved Dependency**\n\n**Code**: {}\n\n**Types**: {}\n", - self.resolution_to_hover_text(&dep.maybe_code, file_referrer), - self.resolution_to_hover_text(types_dep, file_referrer), + self.resolution_to_hover_text(&dep.maybe_code, module.scope.as_deref()), + self.resolution_to_hover_text(types_dep, module.scope.as_deref()), ), (false, true, _) => format!( "**Resolved Dependency**\n\n**Code**: {}\n", - self.resolution_to_hover_text(&dep.maybe_code, file_referrer), + self.resolution_to_hover_text(&dep.maybe_code, module.scope.as_deref()), ), (true, false, _) => format!( "**Resolved Dependency**\n\n**Types**: {}\n", - self.resolution_to_hover_text(&dep.maybe_type, file_referrer), + self.resolution_to_hover_text(&dep.maybe_type, module.scope.as_deref()), ), (true, true, _) => unreachable!("{}", json!(params)), }; - let value = if let Some(docs) = self.module_registry.get_hover(&dep).await + let value = if let Some(docs) = self.module_registry.get_hover(dep).await { format!("{value}\n\n---\n\n{docs}") } else { @@ -1651,19 +1729,19 @@ impl Inner { kind: MarkupKind::Markdown, value, }), - range: Some(to_lsp_range(&range)), + range: Some(to_lsp_range(range)), }) } else { - let line_index = asset_or_doc.line_index(); - let position = - line_index.offset_tsc(params.text_document_position_params.position)?; + let position = module + .line_index + .offset_tsc(params.text_document_position_params.position)?; let maybe_quick_info = self .ts_server .get_quick_info( self.snapshot(), - specifier.clone(), + &module.specifier, position, - asset_or_doc.scope().cloned(), + module.scope.as_ref(), token, ) .await @@ -1675,7 +1753,7 @@ impl Inner { LspError::internal_error() } })?; - maybe_quick_info.map(|qi| qi.to_hover(line_index, self)) + maybe_quick_info.map(|qi| qi.to_hover(&module, self)) }; self.performance.measure(mark); Ok(hover) @@ -1686,7 +1764,7 @@ impl Inner { fn resolution_to_hover_text( &self, resolution: &Resolution, - file_referrer: Option<&ModuleSpecifier>, + scope: Option<&Url>, ) -> String { match resolution { Resolution::Ok(resolved) => { @@ -1709,9 +1787,8 @@ impl Inner { if let Ok(jsr_req_ref) = JsrPackageReqReference::from_specifier(specifier) { - if let Some(url) = self - .resolver - .jsr_to_resource_url(&jsr_req_ref, file_referrer) + if let Some(url) = + self.resolver.jsr_to_resource_url(&jsr_req_ref, scope) { result = format!("{result} (<{url}>)"); } @@ -1731,19 +1808,21 @@ impl Inner { params: CodeActionParams, token: &CancellationToken, ) -> LspResult> { - let specifier = self - .url_map - .uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File); - if !self.is_diagnosable(&specifier) - || !self.config.specifier_enabled(&specifier) - { - return Ok(None); - } - let mark = self.performance.mark_with_args("lsp.code_action", ¶ms); + let Some(document) = self.get_document( + ¶ms.text_document.uri, + Enabled::Filter, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(None); + }; + let Some(module) = self.get_primary_module(&document)? else { + return Ok(None); + }; + let mut all_actions = CodeActionResponse::new(); - let asset_or_doc = self.get_asset_or_document(&specifier)?; - let line_index = asset_or_doc.line_index(); // QuickFix let ts_fixable_diagnosics = self.ts_fixable_diagnostics().await; @@ -1773,11 +1852,7 @@ impl Inner { if !fixable_diagnostics.is_empty() { let file_diagnostics = self .diagnostics_server - .get_ts_diagnostics(&specifier, asset_or_doc.document_lsp_version()); - let specifier_kind = asset_or_doc - .document() - .map(|d| d.resolution_mode()) - .unwrap_or(ResolutionMode::Import); + .get_ts_diagnostics(document.uri(), document.open().map(|d| d.version)); let mut includes_no_cache = false; for diagnostic in &fixable_diagnostics { match diagnostic.source.as_deref() { @@ -1797,21 +1872,21 @@ impl Inner { .ts_server .get_code_fixes( self.snapshot(), - specifier.clone(), - line_index.offset_tsc(diagnostic.range.start)? - ..line_index.offset_tsc(diagnostic.range.end)?, + &module.specifier, + module.line_index.offset_tsc(diagnostic.range.start)? + ..module.line_index.offset_tsc(diagnostic.range.end)?, codes, (&self .config .tree - .fmt_config_for_specifier(&specifier) + .fmt_config_for_specifier(&module.specifier) .options) .into(), tsc::UserPreferences::from_config_for_specifier( &self.config, - &specifier, + &module.specifier, ), - asset_or_doc.scope().cloned(), + module.scope.as_ref(), token, ) .await @@ -1833,13 +1908,7 @@ impl Inner { return Err(LspError::request_cancelled()); } code_actions - .add_ts_fix_action( - &specifier, - specifier_kind, - &action, - diagnostic, - self, - ) + .add_ts_fix_action(&action, diagnostic, &module, self) .map_err(|err| { error!("Unable to convert fix: {:#}", err); LspError::internal_error() @@ -1850,7 +1919,7 @@ impl Inner { &file_diagnostics, ) { code_actions - .add_ts_fix_all_action(&action, &specifier, diagnostic); + .add_ts_fix_all_action(&action, &module, diagnostic); } } } @@ -1865,21 +1934,18 @@ impl Inner { includes_no_cache = true; } code_actions - .add_deno_fix_action(&specifier, diagnostic) + .add_deno_fix_action( + document.uri(), + &module.specifier, + diagnostic, + ) .map_err(|err| { error!("{:#}", err); LspError::internal_error() })? } Some("deno-lint") => code_actions - .add_deno_lint_actions( - &specifier, - diagnostic, - asset_or_doc.document().map(|d| d.text_info()), - asset_or_doc - .maybe_parsed_source() - .and_then(|r| r.as_ref().ok()), - ) + .add_deno_lint_actions(document.uri(), &module, diagnostic) .map_err(|err| { error!("Unable to fix lint error: {:#}", err); LspError::internal_error() @@ -1889,7 +1955,7 @@ impl Inner { } if includes_no_cache { let no_cache_diagnostics = - self.diagnostics_state.no_cache_diagnostics(&specifier); + self.diagnostics_state.no_cache_diagnostics(document.uri()); let uncached_deps = no_cache_diagnostics .iter() .filter_map(|d| { @@ -1901,8 +1967,10 @@ impl Inner { }) .collect::>(); if uncached_deps.len() > 1 { - code_actions - .add_cache_all_action(&specifier, no_cache_diagnostics.to_owned()); + code_actions.add_cache_all_action( + &module.specifier, + no_cache_diagnostics.to_owned(), + ); } } } @@ -1921,16 +1989,16 @@ impl Inner { .ts_server .get_applicable_refactors( self.snapshot(), - specifier.clone(), - line_index.offset_tsc(params.range.start)? - ..line_index.offset_tsc(params.range.end)?, + &module.specifier, + module.line_index.offset_tsc(params.range.start)? + ..module.line_index.offset_tsc(params.range.end)?, Some(tsc::UserPreferences::from_config_for_specifier( &self.config, - &specifier, + &module.specifier, )), params.context.trigger_kind, only, - asset_or_doc.scope().cloned(), + module.scope.as_ref(), token, ) .await @@ -1946,7 +2014,7 @@ impl Inner { .into_iter() .map(|refactor_info| { refactor_info - .to_code_actions(&specifier, ¶ms.range, token) + .to_code_actions(document.uri(), ¶ms.range, token) .map_err(|err| { if token.is_cancelled() { LspError::request_cancelled() @@ -1988,16 +2056,13 @@ impl Inner { params: CodeAction, token: &CancellationToken, ) -> LspResult { - if params.kind.is_none() || params.data.is_none() { - return Ok(params); - } - let mark = self .performance .mark_with_args("lsp.code_action_resolve", ¶ms); - let kind = params.kind.clone().unwrap(); - let data = params.data.clone().unwrap(); - + let (Some(kind), Some(data)) = (params.kind.clone(), params.data.clone()) + else { + return Ok(params); + }; let result = if kind.as_str().starts_with(CodeActionKind::QUICKFIX.as_str()) { let code_action_data: CodeActionData = @@ -2005,25 +2070,35 @@ impl Inner { error!("Unable to decode code action data: {:#}", err); LspError::invalid_params("The CodeAction's data is invalid.") })?; - let maybe_asset_or_doc = - self.get_asset_or_document(&code_action_data.specifier).ok(); - let scope = maybe_asset_or_doc.as_ref().and_then(|d| d.scope().cloned()); + let Some(document) = self.get_document( + &code_action_data.uri, + Enabled::Filter, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(params); + }; + let Some(module) = self.get_primary_module(&document)? else { + return Ok(params); + }; let combined_code_actions = self .ts_server .get_combined_code_fix( self.snapshot(), - &code_action_data, + &module.specifier, + &code_action_data.fix_id, (&self .config .tree - .fmt_config_for_specifier(&code_action_data.specifier) + .fmt_config_for_specifier(&module.specifier) .options) .into(), tsc::UserPreferences::from_config_for_specifier( &self.config, - &code_action_data.specifier, + &module.specifier, ), - scope, + module.scope.as_ref(), token, ) .await @@ -2041,23 +2116,29 @@ impl Inner { } let changes = if code_action_data.fix_id == "fixMissingImport" { - fix_ts_import_changes(&combined_code_actions.changes, self, token) - .map_err(|err| { - if token.is_cancelled() { - LspError::request_cancelled() - } else { - error!("Unable to fix import changes: {:#}", err); - LspError::internal_error() - } - })? + fix_ts_import_changes( + &combined_code_actions.changes, + &module, + self, + token, + ) + .map_err(|err| { + if token.is_cancelled() { + LspError::request_cancelled() + } else { + error!("Unable to fix import changes: {:#}", err); + LspError::internal_error() + } + })? } else { combined_code_actions.changes }; let mut code_action = params; - code_action.edit = ts_changes_to_edit(&changes, self).map_err(|err| { - error!("Unable to convert changes to edits: {:#}", err); - LspError::internal_error() - })?; + code_action.edit = + ts_changes_to_edit(&changes, &module, self).map_err(|err| { + error!("Unable to convert changes to edits: {:#}", err); + LspError::internal_error() + })?; code_action } else if let Some(kind_suffix) = kind .as_str() @@ -2069,28 +2150,38 @@ impl Inner { error!("Unable to decode code action data: {:#}", err); LspError::invalid_params("The CodeAction's data is invalid.") })?; - let asset_or_doc = self.get_asset_or_document(&action_data.specifier)?; - let line_index = asset_or_doc.line_index(); + let Some(document) = self.get_document( + &action_data.uri, + Enabled::Filter, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(code_action); + }; + let Some(module) = self.get_primary_module(&document)? else { + return Ok(code_action); + }; let refactor_edit_info = self .ts_server .get_edits_for_refactor( self.snapshot(), - action_data.specifier.clone(), + &module.specifier, (&self .config .tree - .fmt_config_for_specifier(&action_data.specifier) + .fmt_config_for_specifier(&module.specifier) .options) .into(), - line_index.offset_tsc(action_data.range.start)? - ..line_index.offset_tsc(action_data.range.end)?, + module.line_index.offset_tsc(action_data.range.start)? + ..module.line_index.offset_tsc(action_data.range.end)?, action_data.refactor_name.clone(), action_data.action_name.clone(), Some(tsc::UserPreferences::from_config_for_specifier( &self.config, - &action_data.specifier, + &module.specifier, )), - asset_or_doc.scope().cloned(), + module.scope.as_ref(), token, ) .await @@ -2110,19 +2201,23 @@ impl Inner { if kind_suffix == ".rewrite.function.returnType" || kind_suffix == ".move.newFile" { - refactor_edit_info.edits = - fix_ts_import_changes(&refactor_edit_info.edits, self, token) - .map_err(|err| { - if token.is_cancelled() { - LspError::request_cancelled() - } else { - error!("Unable to fix import changes: {:#}", err); - LspError::internal_error() - } - })? + refactor_edit_info.edits = fix_ts_import_changes( + &refactor_edit_info.edits, + &module, + self, + token, + ) + .map_err(|err| { + if token.is_cancelled() { + LspError::request_cancelled() + } else { + error!("Unable to fix import changes: {:#}", err); + LspError::internal_error() + } + })? } code_action.edit = - refactor_edit_info.to_workspace_edit(self, token)?; + refactor_edit_info.to_workspace_edit(&module, self, token)?; } Err(err) => { if token.is_cancelled() { @@ -2144,20 +2239,20 @@ impl Inner { pub fn get_ts_response_import_mapper( &self, - file_referrer: &ModuleSpecifier, + module: &DocumentModule, ) -> TsResponseImportMapper { TsResponseImportMapper::new( - &self.documents, + &self.document_modules, + module.scope.clone(), self .config .tree - .data_for_specifier(file_referrer) + .data_for_specifier(&module.specifier) // todo(dsherret): this should probably just take the resolver itself // as the import map is an implementation detail .and_then(|d| d.resolver.maybe_import_map()), &self.resolver, &self.ts_server.specifier_map, - file_referrer, ) } @@ -2167,61 +2262,54 @@ impl Inner { params: CodeLensParams, token: &CancellationToken, ) -> LspResult>> { - let specifier = self - .url_map - .uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File); - if !self.is_diagnosable(&specifier) - || !self.config.specifier_enabled(&specifier) - { - return Ok(None); - } - let mark = self.performance.mark_with_args("lsp.code_lens", ¶ms); - let asset_or_doc = self.get_asset_or_document(&specifier)?; - let settings = self.config.workspace_settings_for_specifier(&specifier); + let Some(document) = self.get_document( + ¶ms.text_document.uri, + Enabled::Filter, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(None); + }; + let Some(module) = self.get_primary_module(&document)? else { + return Ok(None); + }; + let settings = self + .config + .workspace_settings_for_specifier(&module.specifier); let mut code_lenses = Vec::new(); if settings.code_lens.test - && self.config.specifier_enabled_for_test(&specifier) + && self.config.specifier_enabled_for_test(&module.specifier) { - if let Some(Ok(parsed_source)) = asset_or_doc.maybe_parsed_source() { + if let Some(Ok(parsed_source)) = &module + .open_data + .as_ref() + .and_then(|d| d.parsed_source.as_ref()) + { code_lenses.extend( - code_lens::collect_test(&specifier, parsed_source, token).map_err( - |err| { + code_lens::collect_test(&module.specifier, parsed_source, token) + .map_err(|err| { if token.is_cancelled() { LspError::request_cancelled() } else { error!( "Error getting test code lenses for \"{}\": {:#}", - &specifier, err + &module.specifier, err ); LspError::internal_error() } - }, - )?, + })?, ); } } if settings.code_lens.implementations || settings.code_lens.references { - let navigation_tree = self - .get_navigation_tree(&specifier, token) - .await - .map_err(|err| { - if token.is_cancelled() { - LspError::request_cancelled() - } else { - error!( - "Error getting navigation tree for \"{}\": {:#}", - specifier, err - ); - LspError::internal_error() - } - })?; - let line_index = asset_or_doc.line_index(); + let navigation_tree = self.get_navigation_tree(&module, token).await?; code_lenses.extend( code_lens::collect_tsc( - &specifier, + document.uri(), &settings.code_lens, - line_index, + module.line_index.clone(), &navigation_tree, token, ) @@ -2231,7 +2319,7 @@ impl Inner { } else { error!( "Error getting ts code lenses for \"{:#}\": {:#}", - &specifier, err + &module.specifier, err ); LspError::internal_error() } @@ -2281,30 +2369,31 @@ impl Inner { params: DocumentHighlightParams, token: &CancellationToken, ) -> LspResult>> { - let specifier = self.url_map.uri_to_specifier( - ¶ms.text_document_position_params.text_document.uri, - LspUrlKind::File, - ); - if !self.is_diagnosable(&specifier) - || !self.config.specifier_enabled(&specifier) - { - return Ok(None); - } - let mark = self .performance .mark_with_args("lsp.document_highlight", ¶ms); - let asset_or_doc = self.get_asset_or_document(&specifier)?; - let line_index = asset_or_doc.line_index(); - let files_to_search = vec![specifier.clone()]; + let Some(document) = self.get_document( + ¶ms.text_document_position_params.text_document.uri, + Enabled::Filter, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(None); + }; + let Some(module) = self.get_primary_module(&document)? else { + return Ok(None); + }; let maybe_document_highlights = self .ts_server .get_document_highlights( self.snapshot(), - specifier, - line_index.offset_tsc(params.text_document_position_params.position)?, - files_to_search, - asset_or_doc.scope().cloned(), + &module.specifier, + module + .line_index + .offset_tsc(params.text_document_position_params.position)?, + vec![module.specifier.as_ref().clone()], + module.scope.as_ref(), token, ) .await @@ -2325,14 +2414,15 @@ impl Inner { document_highlights .into_iter() .map(|dh| { - dh.to_highlight(line_index.clone(), token).map_err(|err| { - if token.is_cancelled() { - LspError::request_cancelled() - } else { - error!("Unable to convert document highlights: {:#}", err); - LspError::internal_error() - } - }) + dh.to_highlight(module.line_index.clone(), token) + .map_err(|err| { + if token.is_cancelled() { + LspError::request_cancelled() + } else { + error!("Unable to convert document highlights: {:#}", err); + LspError::internal_error() + } + }) }) .collect::, _>>() .map(|s| s.into_iter().flatten().collect()) @@ -2343,69 +2433,72 @@ impl Inner { } #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] - async fn references( + pub async fn references( &self, params: ReferenceParams, token: &CancellationToken, ) -> LspResult>> { - let specifier = self.url_map.uri_to_specifier( - ¶ms.text_document_position.text_document.uri, - LspUrlKind::File, - ); - if !self.is_diagnosable(&specifier) - || !self.config.specifier_enabled(&specifier) - { - return Ok(None); - } - let mark = self.performance.mark_with_args("lsp.references", ¶ms); - let asset_or_doc = self.get_asset_or_document(&specifier)?; - let line_index = asset_or_doc.line_index(); - let maybe_referenced_symbols = self - .ts_server - .find_references( - self.snapshot(), - specifier.clone(), - line_index.offset_tsc(params.text_document_position.position)?, - token, - ) - .await - .map_err(|err| { - if token.is_cancelled() { - LspError::request_cancelled() - } else { - error!("Unable to get references from TypeScript: {:#}", err); - LspError::internal_error() - } - })?; - - if let Some(symbols) = maybe_referenced_symbols { - let mut results = Vec::new(); - for reference in symbols.iter().flat_map(|s| &s.references) { + let Some(document) = self.get_document( + ¶ms.text_document_position.text_document.uri, + Enabled::Filter, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(None); + }; + let mut locations = IndexSet::new(); + for (scope, module) in self + .document_modules + .inspect_or_temp_modules_by_scope(&document) + { + if token.is_cancelled() { + return Err(LspError::request_cancelled()); + } + let symbols = self + .ts_server + .find_references( + self.snapshot(), + &module.specifier, + module + .line_index + .offset_tsc(params.text_document_position.position)?, + scope.as_ref(), + token, + ) + .await + .inspect_err(|err| { + if !err.to_string().contains("Could not find source file") { + lsp_warn!( + "Unable to get references from TypeScript: {:#}\nScope: {}", + err, + module.scope.as_ref().map(|s| s.as_str()).unwrap_or("null"), + ); + } + }) + .unwrap_or_default(); + for reference in symbols.iter().flatten().flat_map(|s| &s.references) { if token.is_cancelled() { return Err(LspError::request_cancelled()); } if !params.context.include_declaration && reference.is_definition { continue; } - let reference_specifier = - resolve_url(&reference.entry.document_span.file_name).unwrap(); - let reference_line_index = if reference_specifier == specifier { - line_index.clone() - } else { - let asset_or_doc = - self.get_asset_or_document(&reference_specifier)?; - asset_or_doc.line_index() + let Some(location) = reference.entry.to_location(&module, self) else { + continue; }; - results.push(reference.entry.to_location(reference_line_index, self)); + locations.insert(location); } - - self.performance.measure(mark); - Ok(Some(results)) - } else { - self.performance.measure(mark); - Ok(None) } + let locations = if locations.is_empty() { + None + } else { + Some(locations.into_iter().collect()) + }; + + self.performance.measure(mark); + Ok(locations) } #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] @@ -2414,28 +2507,30 @@ impl Inner { params: GotoDefinitionParams, token: &CancellationToken, ) -> LspResult> { - let specifier = self.url_map.uri_to_specifier( - ¶ms.text_document_position_params.text_document.uri, - LspUrlKind::File, - ); - if !self.is_diagnosable(&specifier) - || !self.config.specifier_enabled(&specifier) - { - return Ok(None); - } - let mark = self .performance .mark_with_args("lsp.goto_definition", ¶ms); - let asset_or_doc = self.get_asset_or_document(&specifier)?; - let line_index = asset_or_doc.line_index(); + let Some(document) = self.get_document( + ¶ms.text_document_position_params.text_document.uri, + Enabled::Filter, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(None); + }; + let Some(module) = self.get_primary_module(&document)? else { + return Ok(None); + }; let maybe_definition = self .ts_server .get_definition( self.snapshot(), - specifier, - line_index.offset_tsc(params.text_document_position_params.position)?, - asset_or_doc.scope().cloned(), + &module.specifier, + module + .line_index + .offset_tsc(params.text_document_position_params.position)?, + module.scope.as_ref(), token, ) .await @@ -2451,7 +2546,7 @@ impl Inner { if let Some(definition) = maybe_definition { let results = definition - .to_definition(line_index, self, token) + .to_definition(&module, self, token) .map_err(|err| { if token.is_cancelled() { LspError::request_cancelled() @@ -2474,28 +2569,30 @@ impl Inner { params: GotoTypeDefinitionParams, token: &CancellationToken, ) -> LspResult> { - let specifier = self.url_map.uri_to_specifier( - ¶ms.text_document_position_params.text_document.uri, - LspUrlKind::File, - ); - if !self.is_diagnosable(&specifier) - || !self.config.specifier_enabled(&specifier) - { - return Ok(None); - } - let mark = self .performance .mark_with_args("lsp.goto_definition", ¶ms); - let asset_or_doc = self.get_asset_or_document(&specifier)?; - let line_index = asset_or_doc.line_index(); + let Some(document) = self.get_document( + ¶ms.text_document_position_params.text_document.uri, + Enabled::Filter, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(None); + }; + let Some(module) = self.get_primary_module(&document)? else { + return Ok(None); + }; let maybe_definition_info = self .ts_server .get_type_definition( self.snapshot(), - specifier, - line_index.offset_tsc(params.text_document_position_params.position)?, - asset_or_doc.scope().cloned(), + &module.specifier, + module + .line_index + .offset_tsc(params.text_document_position_params.position)?, + module.scope.as_ref(), token, ) .await @@ -2517,8 +2614,7 @@ impl Inner { if token.is_cancelled() { return Err(LspError::request_cancelled()); } - if let Some(link) = info.document_span.to_link(line_index.clone(), self) - { + if let Some(link) = info.document_span.to_link(&module, self) { location_links.push(link); } } @@ -2537,21 +2633,26 @@ impl Inner { params: CompletionParams, token: &CancellationToken, ) -> LspResult> { - let specifier = self.url_map.uri_to_specifier( + let mark = self.performance.mark_with_args("lsp.completion", ¶ms); + let Some(document) = self.get_document( ¶ms.text_document_position.text_document.uri, - LspUrlKind::File, - ); - let language_settings = - self.config.language_settings_for_specifier(&specifier); - if !self.is_diagnosable(&specifier) - || !self.config.specifier_enabled(&specifier) - || !language_settings.map(|s| s.suggest.enabled).unwrap_or(true) - { + Enabled::Filter, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(None); + }; + let Some(module) = self.get_primary_module(&document)? else { + return Ok(None); + }; + let language_settings = self + .config + .language_settings_for_specifier(&module.specifier); + if !language_settings.map(|s| s.suggest.enabled).unwrap_or(true) { return Ok(None); } - let mark = self.performance.mark_with_args("lsp.completion", ¶ms); - let asset_or_doc = self.get_asset_or_document(&specifier)?; // Import specifiers are something wholly internal to Deno, so for // completions, we will use internal logic and if there are completions // for imports, we will return those and not send a message into tsc, where @@ -2561,21 +2662,20 @@ impl Inner { .map(|s| s.suggest.include_completions_for_import_statements) .unwrap_or(true) { - let file_referrer = asset_or_doc.file_referrer().unwrap_or(&specifier); response = completions::get_import_completions( - &specifier, + &module, ¶ms.text_document_position.position, &self.config, &self.client, &self.module_registry, &self.jsr_search_api, &self.npm_search_api, - &self.documents, + &self.document_modules, self.resolver.as_ref(), self .config .tree - .data_for_specifier(file_referrer) + .data_for_specifier(&module.specifier) // todo(dsherret): this should probably just take the resolver itself // as the import map is an implementation detail .and_then(|d| d.resolver.maybe_import_map()), @@ -2583,7 +2683,6 @@ impl Inner { .await; } if response.is_none() { - let line_index = asset_or_doc.line_index(); let (trigger_character, trigger_kind) = if let Some(context) = ¶ms.context { ( @@ -2593,19 +2692,19 @@ impl Inner { } else { (None, None) }; - let position = - line_index.offset_tsc(params.text_document_position.position)?; - let scope = asset_or_doc.scope(); + let position = module + .line_index + .offset_tsc(params.text_document_position.position)?; let maybe_completion_info = self .ts_server .get_completions( self.snapshot(), - specifier.clone(), + &module.specifier, position, tsc::GetCompletionsAtPositionOptions { user_preferences: tsc::UserPreferences::from_config_for_specifier( &self.config, - &specifier, + &module.specifier, ), trigger_character, trigger_kind, @@ -2613,10 +2712,10 @@ impl Inner { (&self .config .tree - .fmt_config_for_specifier(&specifier) + .fmt_config_for_specifier(&module.specifier) .options) .into(), - scope.cloned(), + module.scope.as_ref(), token, ) .await @@ -2631,14 +2730,14 @@ impl Inner { response = Some( completions .as_completion_response( - line_index, + module.line_index.clone(), &self .config - .language_settings_for_specifier(&specifier) + .language_settings_for_specifier(&module.specifier) .cloned() .unwrap_or_default() .suggest, - &specifier, + &module, position, self, token, @@ -2676,29 +2775,40 @@ impl Inner { ) })?; if let Some(data) = &data.tsc { - let specifier = &data.specifier; - let scope = self - .get_asset_or_document(specifier) - .ok() - .and_then(|d| d.scope().cloned()); + let Some(document) = self.get_document( + &data.uri, + Enabled::Filter, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(params); + }; + let Some(module) = self.get_primary_module(&document)? else { + return Ok(params); + }; let result = self .ts_server .get_completion_details( self.snapshot(), - GetCompletionDetailsArgs { - format_code_settings: Some( - (&self.config.tree.fmt_config_for_specifier(specifier).options) - .into(), - ), - preferences: Some( - tsc::UserPreferences::from_config_for_specifier( - &self.config, - specifier, - ), - ), - ..data.into() - }, - scope, + &module.specifier, + data.position, + data.name.clone(), + Some( + (&self + .config + .tree + .fmt_config_for_specifier(&module.specifier) + .options) + .into(), + ), + data.source.clone(), + Some(tsc::UserPreferences::from_config_for_specifier( + &self.config, + &module.specifier, + )), + data.data.clone(), + module.scope.as_ref(), token, ) .await; @@ -2706,7 +2816,7 @@ impl Inner { Ok(maybe_completion_info) => { if let Some(completion_info) = maybe_completion_info { completion_info - .as_completion_item(¶ms, data, specifier, self) + .as_completion_item(¶ms, data, &module, self) .map_err(|err| { error!( "Failed to serialize virtual_text_document response: {:#}", @@ -2748,61 +2858,73 @@ impl Inner { } #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] - async fn goto_implementation( + pub async fn goto_implementation( &self, params: GotoImplementationParams, token: &CancellationToken, ) -> LspResult> { - let specifier = self.url_map.uri_to_specifier( - ¶ms.text_document_position_params.text_document.uri, - LspUrlKind::File, - ); - if !self.is_diagnosable(&specifier) - || !self.config.specifier_enabled(&specifier) - { - return Ok(None); - } - let mark = self .performance .mark_with_args("lsp.goto_implementation", ¶ms); - let asset_or_doc = self.get_asset_or_document(&specifier)?; - let line_index = asset_or_doc.line_index(); - - let maybe_implementations = self - .ts_server - .get_implementations( - self.snapshot(), - specifier, - line_index.offset_tsc(params.text_document_position_params.position)?, - token, - ) - .await - .map_err(|err| { - if token.is_cancelled() { - LspError::request_cancelled() - } else { - lsp_warn!( - "Unable to get implementation locations from TypeScript: {:#}", - err - ); - LspError::internal_error() - } - })?; - - let result = if let Some(implementations) = maybe_implementations { - let mut links = Vec::new(); - for implementation in implementations { - if token.is_cancelled() { - return Err(LspError::request_cancelled()); - } - if let Some(link) = implementation.to_link(line_index.clone(), self) { - links.push(link) - } + let Some(document) = self.get_document( + ¶ms.text_document_position_params.text_document.uri, + Enabled::Filter, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(None); + }; + let mut implementations_with_modules = IndexMap::new(); + for (scope, module) in self + .document_modules + .inspect_or_temp_modules_by_scope(&document) + { + if token.is_cancelled() { + return Err(LspError::request_cancelled()); } - Some(GotoDefinitionResponse::Link(links)) - } else { + let maybe_implementations = self + .ts_server + .get_implementations( + self.snapshot(), + &module.specifier, + module + .line_index + .offset_tsc(params.text_document_position_params.position)?, + scope.as_ref(), + token, + ) + .await + .inspect_err(|err| { + if !err.to_string().contains("Could not find source file") { + lsp_warn!( + "Unable to get implementation locations from TypeScript: {:#}\nScope: {}", + err, + module.scope.as_ref().map(|s| s.as_str()).unwrap_or("null"), + ); + } + }) + .unwrap_or_default(); + implementations_with_modules.extend( + maybe_implementations + .into_iter() + .flatten() + .map(|i| (i, module.clone())), + ); + } + let links = implementations_with_modules + .iter() + .flat_map(|(i, module)| { + if token.is_cancelled() { + return Some(Err(LspError::request_cancelled())); + } + Some(Ok(i.to_link(module, self)?)) + }) + .collect::, _>>()?; + let result = if links.is_empty() { None + } else { + Some(GotoDefinitionResponse::Link(links.into_iter().collect())) }; self.performance.measure(mark); @@ -2815,26 +2937,27 @@ impl Inner { params: FoldingRangeParams, token: &CancellationToken, ) -> LspResult>> { - let specifier = self - .url_map - .uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File); - if !self.is_diagnosable(&specifier) - || !self.config.specifier_enabled(&specifier) - { - return Ok(None); - } - let mark = self .performance .mark_with_args("lsp.folding_range", ¶ms); - let asset_or_doc = self.get_asset_or_document(&specifier)?; - + let Some(document) = self.get_document( + ¶ms.text_document.uri, + Enabled::Filter, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(None); + }; + let Some(module) = self.get_primary_module(&document)? else { + return Ok(None); + }; let outlining_spans = self .ts_server .get_outlining_spans( self.snapshot(), - specifier, - asset_or_doc.scope().cloned(), + &module.specifier, + module.scope.as_ref(), token, ) .await @@ -2846,7 +2969,6 @@ impl Inner { LspError::invalid_request() } })?; - let response = if !outlining_spans.is_empty() { Some( outlining_spans @@ -2856,8 +2978,8 @@ impl Inner { return Err(LspError::request_cancelled()); } Ok(span.to_folding_range( - asset_or_doc.line_index(), - asset_or_doc.text_str().as_bytes(), + module.line_index.clone(), + module.text.as_bytes(), self.config.line_folding_only_capable(), )) }) @@ -2876,55 +2998,68 @@ impl Inner { params: CallHierarchyIncomingCallsParams, token: &CancellationToken, ) -> LspResult>> { - let specifier = self - .url_map - .uri_to_specifier(¶ms.item.uri, LspUrlKind::File); - if !self.is_diagnosable(&specifier) - || !self.config.specifier_enabled(&specifier) - { - return Ok(None); - } - let mark = self .performance .mark_with_args("lsp.incoming_calls", ¶ms); - let asset_or_doc = self.get_asset_or_document(&specifier)?; - let line_index = asset_or_doc.line_index(); - - let incoming_calls: Vec = self - .ts_server - .provide_call_hierarchy_incoming_calls( - self.snapshot(), - specifier, - line_index.offset_tsc(params.item.selection_range.start)?, - token, - ) - .await - .map_err(|err| { - if token.is_cancelled() { - LspError::request_cancelled() - } else { - lsp_warn!("Unable to get incoming calls from TypeScript: {:#}", err); - LspError::internal_error() - } - })?; - - let maybe_root_path_owned = self - .config - .root_uri() - .and_then(|uri| url_to_file_path(uri).ok()); - let mut resolved_items = Vec::::new(); - for item in incoming_calls.iter() { + let Some(document) = self.get_document( + ¶ms.item.uri, + Enabled::Filter, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(None); + }; + let mut incoming_calls_with_modules = IndexMap::new(); + for (scope, module) in self + .document_modules + .inspect_or_temp_modules_by_scope(&document) + { if token.is_cancelled() { return Err(LspError::request_cancelled()); } - if let Some(resolved) = item.try_resolve_call_hierarchy_incoming_call( - self, - maybe_root_path_owned.as_deref(), - ) { - resolved_items.push(resolved); - } + let calls = self + .ts_server + .provide_call_hierarchy_incoming_calls( + self.snapshot(), + &module.specifier, + module + .line_index + .offset_tsc(params.item.selection_range.start)?, + scope.as_ref(), + token, + ) + .await + .inspect_err(|err| { + if !err.to_string().contains("Could not find source file") { + lsp_warn!( + "Unable to get incoming calls from TypeScript: {:#}\nScope: {}", + err, + module.scope.as_ref().map(|s| s.as_str()).unwrap_or("null"), + ); + } + }) + .unwrap_or_default(); + incoming_calls_with_modules + .extend(calls.into_iter().map(|c| (c, module.clone()))); } + let root_path = self + .config + .root_url() + .and_then(|s| url_to_file_path(s).ok()); + let resolved_items = incoming_calls_with_modules + .iter() + .flat_map(|(c, module)| { + if token.is_cancelled() { + return Some(Err(LspError::request_cancelled())); + } + Some(Ok(c.try_resolve_call_hierarchy_incoming_call( + module, + self, + root_path.as_deref(), + )?)) + }) + .collect::, _>>()?; self.performance.measure(mark); Ok(Some(resolved_items)) } @@ -2935,28 +3070,30 @@ impl Inner { params: CallHierarchyOutgoingCallsParams, token: &CancellationToken, ) -> LspResult>> { - let specifier = self - .url_map - .uri_to_specifier(¶ms.item.uri, LspUrlKind::File); - if !self.is_diagnosable(&specifier) - || !self.config.specifier_enabled(&specifier) - { - return Ok(None); - } - let mark = self .performance .mark_with_args("lsp.outgoing_calls", ¶ms); - let asset_or_doc = self.get_asset_or_document(&specifier)?; - let line_index = asset_or_doc.line_index(); - + let Some(document) = self.get_document( + ¶ms.item.uri, + Enabled::Filter, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(None); + }; + let Some(module) = self.get_primary_module(&document)? else { + return Ok(None); + }; let outgoing_calls: Vec = self .ts_server .provide_call_hierarchy_outgoing_calls( self.snapshot(), - specifier, - line_index.offset_tsc(params.item.selection_range.start)?, - asset_or_doc.scope().cloned(), + &module.specifier, + module + .line_index + .offset_tsc(params.item.selection_range.start)?, + module.scope.as_ref(), token, ) .await @@ -2968,24 +3105,23 @@ impl Inner { LspError::invalid_request() } })?; - - let maybe_root_path_owned = self + let root_path = self .config - .root_uri() - .and_then(|uri| url_to_file_path(uri).ok()); - let mut resolved_items = Vec::::new(); - for item in outgoing_calls.iter() { - if token.is_cancelled() { - return Err(LspError::request_cancelled()); - } - if let Some(resolved) = item.try_resolve_call_hierarchy_outgoing_call( - line_index.clone(), - self, - maybe_root_path_owned.as_deref(), - ) { - resolved_items.push(resolved); - } - } + .root_url() + .and_then(|s| url_to_file_path(s).ok()); + let resolved_items = outgoing_calls + .iter() + .flat_map(|c| { + if token.is_cancelled() { + return Some(Err(LspError::request_cancelled())); + } + Some(Ok(c.try_resolve_call_hierarchy_outgoing_call( + &module, + self, + root_path.as_deref(), + )?)) + }) + .collect::>()?; self.performance.measure(mark); Ok(Some(resolved_items)) } @@ -2996,29 +3132,30 @@ impl Inner { params: CallHierarchyPrepareParams, token: &CancellationToken, ) -> LspResult>> { - let specifier = self.url_map.uri_to_specifier( - ¶ms.text_document_position_params.text_document.uri, - LspUrlKind::File, - ); - if !self.is_diagnosable(&specifier) - || !self.config.specifier_enabled(&specifier) - { - return Ok(None); - } - let mark = self .performance .mark_with_args("lsp.prepare_call_hierarchy", ¶ms); - let asset_or_doc = self.get_asset_or_document(&specifier)?; - let line_index = asset_or_doc.line_index(); - + let Some(document) = self.get_document( + ¶ms.text_document_position_params.text_document.uri, + Enabled::Filter, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(None); + }; + let Some(module) = self.get_primary_module(&document)? else { + return Ok(None); + }; let maybe_one_or_many = self .ts_server .prepare_call_hierarchy( self.snapshot(), - specifier, - line_index.offset_tsc(params.text_document_position_params.position)?, - asset_or_doc.scope().cloned(), + &module.specifier, + module + .line_index + .offset_tsc(params.text_document_position_params.position)?, + module.scope.as_ref(), token, ) .await @@ -3032,16 +3169,17 @@ impl Inner { })?; let response = if let Some(one_or_many) = maybe_one_or_many { - let maybe_root_path_owned = self + let root_path = self .config - .root_uri() - .and_then(|uri| url_to_file_path(uri).ok()); + .root_url() + .and_then(|s| url_to_file_path(s).ok()); let mut resolved_items = Vec::::new(); match one_or_many { tsc::OneOrMany::One(item) => { if let Some(resolved) = item.try_resolve_call_hierarchy_item( + &module, self, - maybe_root_path_owned.as_deref(), + root_path.as_deref(), ) { resolved_items.push(resolved) } @@ -3052,8 +3190,9 @@ impl Inner { return Err(LspError::request_cancelled()); } if let Some(resolved) = item.try_resolve_call_hierarchy_item( + &module, self, - maybe_root_path_owned.as_deref(), + root_path.as_deref(), ) { resolved_items.push(resolved); } @@ -3074,62 +3213,78 @@ impl Inner { params: RenameParams, token: &CancellationToken, ) -> LspResult> { - let specifier = self.url_map.uri_to_specifier( - ¶ms.text_document_position.text_document.uri, - LspUrlKind::File, - ); - if !self.is_diagnosable(&specifier) - || !self.config.specifier_enabled(&specifier) - { - return Ok(None); - } - let mark = self.performance.mark_with_args("lsp.rename", ¶ms); - let asset_or_doc = self.get_asset_or_document(&specifier)?; - let line_index = asset_or_doc.line_index(); - - let user_preferences = - tsc::UserPreferences::from_config_for_specifier(&self.config, &specifier); - let maybe_locations = self - .ts_server - .find_rename_locations( - self.snapshot(), - specifier, - line_index.offset_tsc(params.text_document_position.position)?, - user_preferences, + let Some(document) = self.get_document( + ¶ms.text_document_position.text_document.uri, + Enabled::Filter, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(None); + }; + let mut locations_with_modules = IndexMap::new(); + for (scope, module) in self + .document_modules + .inspect_or_temp_modules_by_scope(&document) + { + if token.is_cancelled() { + return Err(LspError::request_cancelled()); + } + let maybe_locations = self + .ts_server + .find_rename_locations( + self.snapshot(), + &module.specifier, + module + .line_index + .offset_tsc(params.text_document_position.position)?, + tsc::UserPreferences::from_config_for_specifier( + &self.config, + &module.specifier, + ), + scope.as_ref(), + token, + ) + .await + .inspect_err(|err| { + if !err.to_string().contains("Could not find source file") { + lsp_warn!( + "Unable to get rename locations from TypeScript: {:#}\nScope: {}", + err, + scope.as_ref().map(|s| s.as_str()).unwrap_or("null"), + ); + } + }) + .unwrap_or_default(); + locations_with_modules.extend( + maybe_locations + .into_iter() + .flatten() + .map(|l| (l, module.clone())), + ); + } + let result = if locations_with_modules.is_empty() { + None + } else { + let workspace_edits = tsc::RenameLocation::collect_into_workspace_edit( + locations_with_modules, + ¶ms.new_name, + self, token, ) - .await .map_err(|err| { if token.is_cancelled() { LspError::request_cancelled() } else { - lsp_warn!( - "Unable to get rename locations from TypeScript: {:#}", - err - ); + lsp_warn!("Unable to covert rename locations: {:#}", err); LspError::internal_error() } })?; - - if let Some(locations) = maybe_locations { - let rename_locations = tsc::RenameLocations { locations }; - let workspace_edits = rename_locations - .into_workspace_edit(¶ms.new_name, self, token) - .map_err(|err| { - if token.is_cancelled() { - LspError::request_cancelled() - } else { - lsp_warn!("Unable to covert rename locations: {:#}", err); - LspError::internal_error() - } - })?; - self.performance.measure(mark); - Ok(Some(workspace_edits)) - } else { - self.performance.measure(mark); - Ok(None) - } + Some(workspace_edits) + }; + self.performance.measure(mark); + Ok(result) } #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] @@ -3138,33 +3293,33 @@ impl Inner { params: SelectionRangeParams, token: &CancellationToken, ) -> LspResult>> { - let specifier = self - .url_map - .uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File); - if !self.is_diagnosable(&specifier) - || !self.config.specifier_enabled(&specifier) - { - return Ok(None); - } - let mark = self .performance .mark_with_args("lsp.selection_range", ¶ms); - let asset_or_doc = self.get_asset_or_document(&specifier)?; - let line_index = asset_or_doc.line_index(); - + let Some(document) = self.get_document( + ¶ms.text_document.uri, + Enabled::Filter, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(None); + }; + let Some(module) = self.get_primary_module(&document)? else { + return Ok(None); + }; let mut selection_ranges = Vec::::new(); - for position in params.positions { + for &position in ¶ms.positions { if token.is_cancelled() { return Err(LspError::request_cancelled()); } - let selection_range: tsc::SelectionRange = self + let selection_range = self .ts_server .get_smart_selection_range( self.snapshot(), - specifier.clone(), - line_index.offset_tsc(position)?, - asset_or_doc.scope().cloned(), + &module.specifier, + module.line_index.offset_tsc(position)?, + module.scope.as_ref(), token, ) .await @@ -3181,7 +3336,7 @@ impl Inner { })?; selection_ranges - .push(selection_range.to_selection_range(line_index.clone())); + .push(selection_range.to_selection_range(module.line_index.clone())); } self.performance.measure(mark); Ok(Some(selection_ranges)) @@ -3193,58 +3348,50 @@ impl Inner { params: SemanticTokensParams, token: &CancellationToken, ) -> LspResult> { - let specifier = self - .url_map - .uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File); - if !self.is_diagnosable(&specifier) { - return Ok(None); - } - let mark = self .performance .mark_with_args("lsp.semantic_tokens_full", ¶ms); - let asset_or_doc = self.get_asset_or_document(&specifier)?; - if let Some(tokens) = asset_or_doc.maybe_semantic_tokens() { - let response = if !tokens.data.is_empty() { - Some(SemanticTokensResult::Tokens(tokens.clone())) - } else { - None - }; - self.performance.measure(mark); - return Ok(response); - } - - let line_index = asset_or_doc.line_index(); - - let semantic_classification = self - .ts_server - .get_encoded_semantic_classifications( - self.snapshot(), - specifier, - 0..line_index.text_content_length_utf16().into(), - asset_or_doc.scope().cloned(), - token, - ) - .await - .map_err(|err| { - if token.is_cancelled() { - LspError::request_cancelled() - } else { - lsp_warn!( - "Unable to get semantic classifications from TypeScript: {:#}", - err - ); - LspError::invalid_request() - } - })?; - - let semantic_tokens = - semantic_classification.to_semantic_tokens(line_index, token)?; - - if let Some(doc) = asset_or_doc.document() { - doc.cache_semantic_tokens_full(semantic_tokens.clone()); - } - + let Some(document) = self.get_document( + ¶ms.text_document.uri, + Enabled::Ignore, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(None); + }; + let Some(module) = self.get_primary_module(&document)? else { + return Ok(None); + }; + let semantic_tokens = module + .semantic_tokens_full + .get_or_try_init(|| async { + let semantic_classification = self + .ts_server + .get_encoded_semantic_classifications( + self.snapshot(), + &module.specifier, + 0..module.line_index.text_content_length_utf16().into(), + module.scope.as_ref(), + token, + ) + .await + .map_err(|err| { + if token.is_cancelled() { + LspError::request_cancelled() + } else { + lsp_warn!( + "Unable to get semantic classifications from TypeScript: {:#}", + err + ); + LspError::invalid_request() + } + })?; + semantic_classification + .to_semantic_tokens(module.line_index.clone(), token) + }) + .await? + .clone(); let response = if !semantic_tokens.data.is_empty() { Some(SemanticTokensResult::Tokens(semantic_tokens)) } else { @@ -3260,20 +3407,24 @@ impl Inner { params: SemanticTokensRangeParams, token: &CancellationToken, ) -> LspResult> { - let specifier = self - .url_map - .uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File); - if !self.is_diagnosable(&specifier) { - return Ok(None); - } - let mark = self .performance .mark_with_args("lsp.semantic_tokens_range", ¶ms); - let asset_or_doc = self.get_asset_or_document(&specifier)?; - if let Some(tokens) = asset_or_doc.maybe_semantic_tokens() { + let Some(document) = self.get_document( + ¶ms.text_document.uri, + Enabled::Ignore, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(None); + }; + let Some(module) = self.get_primary_module(&document)? else { + return Ok(None); + }; + if let Some(tokens) = module.semantic_tokens_full.get() { let tokens = - super::semantic_tokens::tokens_within_range(&tokens, params.range); + super::semantic_tokens::tokens_within_range(tokens, params.range); let response = if !tokens.data.is_empty() { Some(SemanticTokensRangeResult::Tokens(tokens)) } else { @@ -3282,17 +3433,14 @@ impl Inner { self.performance.measure(mark); return Ok(response); } - - let line_index = asset_or_doc.line_index(); - let semantic_classification = self .ts_server .get_encoded_semantic_classifications( self.snapshot(), - specifier, - line_index.offset_tsc(params.range.start)? - ..line_index.offset_tsc(params.range.end)?, - asset_or_doc.scope().cloned(), + &module.specifier, + module.line_index.offset_tsc(params.range.start)? + ..module.line_index.offset_tsc(params.range.end)?, + module.scope.as_ref(), token, ) .await @@ -3307,9 +3455,8 @@ impl Inner { LspError::invalid_request() } })?; - - let semantic_tokens = - semantic_classification.to_semantic_tokens(line_index, token)?; + let semantic_tokens = semantic_classification + .to_semantic_tokens(module.line_index.clone(), token)?; let response = if !semantic_tokens.data.is_empty() { Some(SemanticTokensRangeResult::Tokens(semantic_tokens)) } else { @@ -3325,21 +3472,21 @@ impl Inner { params: SignatureHelpParams, token: &CancellationToken, ) -> LspResult> { - let specifier = self.url_map.uri_to_specifier( - ¶ms.text_document_position_params.text_document.uri, - LspUrlKind::File, - ); - if !self.is_diagnosable(&specifier) - || !self.config.specifier_enabled(&specifier) - { - return Ok(None); - } - let mark = self .performance .mark_with_args("lsp.signature_help", ¶ms); - let asset_or_doc = self.get_asset_or_document(&specifier)?; - let line_index = asset_or_doc.line_index(); + let Some(document) = self.get_document( + ¶ms.text_document_position_params.text_document.uri, + Enabled::Filter, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(None); + }; + let Some(module) = self.get_primary_module(&document)? else { + return Ok(None); + }; let options = if let Some(context) = params.context { tsc::SignatureHelpItemsOptions { trigger_reason: Some(tsc::SignatureHelpTriggerReason { @@ -3356,10 +3503,12 @@ impl Inner { .ts_server .get_signature_help_items( self.snapshot(), - specifier, - line_index.offset_tsc(params.text_document_position_params.position)?, + &module.specifier, + module + .line_index + .offset_tsc(params.text_document_position_params.position)?, options, - asset_or_doc.scope().cloned(), + module.scope.as_ref(), token, ) .await @@ -3374,10 +3523,10 @@ impl Inner { LspError::invalid_request() } })?; - - if let Some(signature_help_items) = maybe_signature_help_items { + let result = if let Some(signature_help_items) = maybe_signature_help_items + { let signature_help = signature_help_items - .into_signature_help(self, token) + .into_signature_help(&module, self, token) .map_err(|err| { if token.is_cancelled() { LspError::request_cancelled() @@ -3386,12 +3535,12 @@ impl Inner { LspError::internal_error() } })?; - self.performance.measure(mark); - Ok(Some(signature_help)) + Some(signature_help) } else { - self.performance.measure(mark); - Ok(None) - } + None + }; + self.performance.measure(mark); + Ok(result) } #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] @@ -3403,60 +3552,72 @@ impl Inner { if !self.ts_server.is_started() { return Ok(None); } - let mut changes = vec![]; + let mut changes_with_modules = IndexMap::new(); for rename in params.files { - let old_specifier = self.url_map.uri_to_specifier( + let Some(document) = self.get_document( &Uri::from_str(&rename.old_uri).unwrap(), - LspUrlKind::File, - ); - let options = self - .config - .language_settings_for_specifier(&old_specifier) - .map(|s| s.update_imports_on_file_move.clone()) - .unwrap_or_default(); - // Note that `Always` and `Prompt` are treated the same in the server, the - // client will worry about that after receiving the edits. - if options.enabled == UpdateImportsOnFileMoveEnabled::Never { + Enabled::Ignore, + Exists::Filter, + Diagnosable::Ignore, + )? + else { continue; + }; + for (scope, module) in self + .document_modules + .inspect_or_temp_modules_by_scope(&document) + { + if token.is_cancelled() { + return Err(LspError::request_cancelled()); + } + let options = self + .config + .language_settings_for_specifier(&module.specifier) + .map(|s| s.update_imports_on_file_move.clone()) + .unwrap_or_default(); + // Note that `Always` and `Prompt` are treated the same in the server, the + // client will worry about that after receiving the edits. + if options.enabled == UpdateImportsOnFileMoveEnabled::Never { + continue; + } + let format_code_settings = (&self + .config + .tree + .fmt_config_for_specifier(&module.specifier) + .options) + .into(); + let changes = self + .ts_server + .get_edits_for_file_rename( + self.snapshot(), + &module.specifier, + &uri_to_url(&Uri::from_str(&rename.new_uri).unwrap()), + format_code_settings, + tsc::UserPreferences { + allow_text_changes_in_new_files: Some(true), + ..Default::default() + }, + scope.as_ref(), + token, + ) + .await + .map_err(|err| { + if token.is_cancelled() { + LspError::request_cancelled() + } else { + lsp_warn!( + "Unable to get edits for file rename from TypeScript: {:#}\nScope: {}", + err, + scope.as_ref().map(|s| s.as_str()).unwrap_or("null"), + ); + LspError::internal_error() + } + })?; + changes_with_modules + .extend(changes.into_iter().map(|c| (c, module.clone()))); } - let format_code_settings = (&self - .config - .tree - .fmt_config_for_specifier(&old_specifier) - .options) - .into(); - changes.extend( - self - .ts_server - .get_edits_for_file_rename( - self.snapshot(), - old_specifier, - self.url_map.uri_to_specifier( - &Uri::from_str(&rename.new_uri).unwrap(), - LspUrlKind::File, - ), - format_code_settings, - tsc::UserPreferences { - allow_text_changes_in_new_files: Some(true), - ..Default::default() - }, - token, - ) - .await - .map_err(|err| { - if token.is_cancelled() { - LspError::request_cancelled() - } else { - lsp_warn!( - "Unable to get edits for file rename from TypeScript: {:#}", - err - ); - LspError::internal_error() - } - })?, - ); } - file_text_changes_to_workspace_edit(&changes, self, token) + file_text_changes_to_workspace_edit(&changes_with_modules, self, token) } #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] @@ -3470,62 +3631,75 @@ impl Inner { } let mark = self.performance.mark_with_args("lsp.symbol", ¶ms); - - let navigate_to_items = self - .ts_server - .get_navigate_to_items( - self.snapshot(), - tsc::GetNavigateToItemsArgs { - search: params.query, + let mut items_with_scopes = IndexMap::new(); + for scope in self.document_modules.scopes() { + if token.is_cancelled() { + return Err(LspError::request_cancelled()); + } + let items = self + .ts_server + .get_navigate_to_items( + self.snapshot(), + params.query.clone(), // this matches vscode's hard coded result count - max_result_count: Some(256), - file: None, - }, - token, - ) - .await - .map_err(|err| { - if token.is_cancelled() { - LspError::request_cancelled() - } else { + Some(256), + None, + scope.as_ref(), + token, + ) + .await + .inspect_err(|err| { lsp_warn!( - "Unable to get signature help items from TypeScript: {:#}", - err + "Unable to get signature help items from TypeScript: {:#}\nScope: {}", + err, + scope.as_ref().map(|s| s.as_str()).unwrap_or("null"), ); - LspError::invalid_request() + }) + .unwrap_or_default(); + items_with_scopes.extend(items.into_iter().map(|i| (i, scope.clone()))); + } + let symbol_information = items_with_scopes + .into_iter() + .flat_map(|(item, scope)| { + if token.is_cancelled() { + return Some(Err(LspError::request_cancelled())); } - })?; - - let maybe_symbol_information = if navigate_to_items.is_empty() { + Some(Ok(item.to_symbol_information(scope.as_deref(), self)?)) + }) + .collect::, _>>()?; + let symbol_information = if symbol_information.is_empty() { None } else { - let mut symbol_information = Vec::new(); - for item in navigate_to_items { - if token.is_cancelled() { - return Err(LspError::request_cancelled()); - } - if let Some(info) = item.to_symbol_information(self) { - symbol_information.push(info); - } - } Some(symbol_information) }; - self.performance.measure(mark); - Ok(maybe_symbol_information) + Ok(symbol_information) } #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] - fn project_changed<'a>( &mut self, - modified_scripts: impl IntoIterator, + changed_docs: impl IntoIterator, config_changed: bool, ) { self.project_version += 1; // increment before getting the snapshot + let modified_scripts = changed_docs + .into_iter() + .filter_map(|(u, k)| { + Some((self.document_modules.documents.inspect(u)?, k)) + }) + .flat_map(|(d, k)| { + self + .document_modules + .inspect_modules_by_scope(&d) + .values() + .map(|m| (m.specifier.clone(), k)) + .collect::>() + }) + .collect::>(); self.ts_server.project_changed( self.snapshot(), - modified_scripts, + modified_scripts.iter().map(|(s, k)| (s.as_ref(), *k)), config_changed.then(|| { self .config @@ -3536,13 +3710,13 @@ impl Inner { .collect() }), ); + self.document_modules.remove_expired_modules(); } #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] fn send_diagnostics_update(&self) { let snapshot = DiagnosticServerUpdateMessage { snapshot: self.snapshot(), - url_map: self.url_map.clone(), }; if let Err(err) = self.diagnostics_server.update(snapshot) { error!("Cannot update diagnostics: {:#}", err); @@ -4093,7 +4267,7 @@ impl tower_lsp::LanguageServer for LanguageServer { struct PrepareCacheResult { cli_factory: CliFactory, roots: Vec, - open_docs: Vec>, + open_modules: Vec>, } // These are implementations of custom commands supported by the LSP @@ -4149,7 +4323,7 @@ impl Inner { let test_server = testing::TestServer::new( self.client.clone(), self.performance.clone(), - self.config.root_uri().cloned(), + self.config.root_url().cloned(), ); self.maybe_testing_server = Some(test_server); } @@ -4199,6 +4373,7 @@ impl Inner { force_global_cache: bool, ) -> Result { let config_data = self.config.tree.data_for_specifier(&referrer); + let scope = config_data.map(|d| d.scope.clone()); let byonm = config_data.map(|d| d.byonm).unwrap_or(false); let mut roots = if !specifiers.is_empty() { specifiers @@ -4209,9 +4384,10 @@ impl Inner { if byonm { roots.retain(|s| s.scheme() != "npm"); } else if let Some(dep_info) = self - .documents + .document_modules .dep_info_by_scope() - .get(&config_data.map(|d| d.scope.as_ref().clone())) + .get(&scope) + .cloned() { // always include the npm packages since resolution of one npm package // might affect the resolution of other npm packages @@ -4262,10 +4438,20 @@ impl Inner { cli_factory.set_workspace_dir(d.member_dir.clone()); }; - let open_docs = self.documents.documents(DocumentsFilter::OpenDiagnosable); + let open_modules = self + .document_modules + .documents + .open_docs() + .filter(|d| d.is_diagnosable()) + .flat_map(|d| { + self + .document_modules + .module(&Document::Open(d.clone()), scope.as_deref()) + }) + .collect(); Ok(PrepareCacheResult { cli_factory, - open_docs, + open_modules, roots, }) } @@ -4292,14 +4478,15 @@ impl Inner { .added .into_iter() .map(|folder| { - ( - self - .url_map - .uri_to_specifier(&folder.uri, LspUrlKind::Folder), - folder, - ) + let mut url = uri_to_url(&folder.uri); + if !url.path().ends_with('/') { + if let Ok(mut path_segments) = url.path_segments_mut() { + path_segments.push(""); + } + } + (Arc::new(url), folder) }) - .collect::>(); + .collect::>(); for (specifier, folder) in self.config.workspace_folders.as_ref() { if !params.event.removed.is_empty() && params.event.removed.iter().any(|f| f.uri == folder.uri) @@ -4397,37 +4584,42 @@ impl Inner { params: InlayHintParams, token: &CancellationToken, ) -> LspResult>> { - let specifier = self - .url_map - .uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File); - if !self.is_diagnosable(&specifier) - || !self.config.specifier_enabled(&specifier) - || !self.config.enabled_inlay_hints_for_specifier(&specifier) + let mark = self.performance.mark_with_args("lsp.inlay_hint", ¶ms); + let Some(document) = self.get_document( + ¶ms.text_document.uri, + Enabled::Filter, + Exists::Enforce, + Diagnosable::Filter, + )? + else { + return Ok(None); + }; + let Some(module) = self.get_primary_module(&document)? else { + return Ok(None); + }; + if !self + .config + .enabled_inlay_hints_for_specifier(&module.specifier) { return Ok(None); } - - let mark = self.performance.mark_with_args("lsp.inlay_hint", ¶ms); - let asset_or_doc = self.get_asset_or_document(&specifier)?; - let line_index = asset_or_doc.line_index(); let text_span = - tsc::TextSpan::from_range(¶ms.range, line_index.clone()).map_err( - |err| { + tsc::TextSpan::from_range(¶ms.range, module.line_index.clone()) + .map_err(|err| { error!("Failed to convert range to text_span: {:#}", err); LspError::internal_error() - }, - )?; + })?; let maybe_inlay_hints = self .ts_server .provide_inlay_hints( self.snapshot(), - specifier.clone(), + &module.specifier, text_span, tsc::UserPreferences::from_config_for_specifier( &self.config, - &specifier, + &module.specifier, ), - asset_or_doc.scope().cloned(), + module.scope.as_ref(), token, ) .await @@ -4447,7 +4639,7 @@ impl Inner { if token.is_cancelled() { return Err(LspError::request_cancelled()); } - Ok(hint.to_lsp(line_index.clone(), self)) + Ok(hint.to_lsp(&module, self)) }) .collect() }) @@ -4476,19 +4668,23 @@ impl Inner { let mark = self .performance .mark_with_args("lsp.virtual_text_document", ¶ms); - let specifier = self - .url_map - .uri_to_specifier(¶ms.text_document.uri, LspUrlKind::File); - let contents = if specifier.scheme() == "deno" - && specifier.path() == "/status.md" + let contents = if params + .text_document + .uri + .scheme() + .is_some_and(|s| s.eq_lowercase("deno")) + && params.text_document.uri.path().as_str() == "/status.md" { let mut contents = String::new(); - let documents = self.documents.documents(DocumentsFilter::All); - let mut documents_specifiers = - documents.iter().map(|d| d.specifier()).collect::>(); - documents_specifiers.sort(); + let mut open_docs = self + .document_modules + .documents + .open_docs() + .collect::>(); + open_docs.sort_by_cached_key(|d| d.uri.to_string()); + let mut server_docs = self.document_modules.documents.server_docs(); + server_docs.sort_by_cached_key(|d| d.uri.to_string()); let measures = self.performance.to_vec(); - let workspace_settings = self.config.workspace_settings(); write!( contents, @@ -4496,41 +4692,47 @@ impl Inner { ## Workspace Settings -```json {} -``` -## Workspace Details +## Documents - -
Documents in memory: {} +
Open: {} - - {} +{} -
+
- -
Performance measures: {} +
Server: {} - - {} +{} -
+
+ +## Performance Measures + +
Measures: {} + +{} + +
"#, - serde_json::to_string_pretty(&workspace_settings) - .inspect_err(|e| { - lsp_warn!("{e}"); - }) - .unwrap(), - documents_specifiers.len(), - documents_specifiers - .into_iter() - .map(|s| s.as_str()) - .collect::>() - .join("\n - "), + self.config.settings.by_workspace_folder + .iter() + .filter_map(|(folder_url, settings)| Some((format!("\"{folder_url}\""), settings.as_ref()?))) + .chain(std::iter::once(("Unscoped".to_string(), &self.config.settings.unscoped))) + .map(|(title, settings)| format!("
{title}\n\n```json\n{}\n```\n\n
", serde_json::to_string_pretty(settings).unwrap())) + .collect::>() + .join("\n\n"), + open_docs.len(), + open_docs.iter().map(|d| format!("- {}", d.uri.as_str())).collect::>().join("\n"), + server_docs.len(), + server_docs.iter().map(|d| format!("- {}", d.uri.as_str())).collect::>().join("\n"), measures.len(), measures .iter() - .map(|m| m.to_string()) - .collect::>() - .join("\n - ") + .map(|m| format!("- {m}")) + .collect::>() + .join("\n"), ) .unwrap(); @@ -4558,14 +4760,19 @@ impl Inner { } Some(contents) + } else if let Some(document) = self.get_document( + ¶ms.text_document.uri, + Enabled::Ignore, + Exists::Filter, + Diagnosable::Ignore, + )? { + Some(document.text().to_string()) } else { - let asset_or_doc = self.get_maybe_asset_or_document(&specifier); - if let Some(asset_or_doc) = asset_or_doc { - Some(asset_or_doc.text_str().to_string()) - } else { - error!("The source was not found: {}", specifier); - None - } + lsp_warn!( + "The document was not found: {}", + params.text_document.uri.as_str() + ); + None }; self.performance.measure(mark); Ok(contents) @@ -4650,14 +4857,14 @@ mod tests { Default::default(), vec![ ( - temp_dir.url().join("root1/").unwrap(), + Arc::new(temp_dir.url().join("root1/").unwrap()), WorkspaceSettings { enable: Some(true), ..Default::default() }, ), ( - temp_dir.url().join("root2/").unwrap(), + Arc::new(temp_dir.url().join("root2/").unwrap()), WorkspaceSettings { enable: Some(true), enable_paths: Some(vec![ @@ -4670,21 +4877,21 @@ mod tests { }, ), ( - temp_dir.url().join("root2/root2.1/").unwrap(), + Arc::new(temp_dir.url().join("root2/root2.1/").unwrap()), WorkspaceSettings { enable: Some(true), ..Default::default() }, ), ( - temp_dir.url().join("root3/").unwrap(), + Arc::new(temp_dir.url().join("root3/").unwrap()), WorkspaceSettings { enable: Some(false), ..Default::default() }, ), ( - temp_dir.url().join("root4_parent/root4/").unwrap(), + Arc::new(temp_dir.url().join("root4_parent/root4/").unwrap()), WorkspaceSettings { enable: Some(true), ..Default::default() @@ -4694,6 +4901,10 @@ mod tests { ); let (workspace_files, hit_limit) = Inner::walk_workspace(&config); + let workspace_files = workspace_files + .into_iter() + .map(|p| Url::from_file_path(p).unwrap()) + .collect::>(); assert!(!hit_limit); assert_eq!( json!(workspace_files), diff --git a/cli/lsp/lsp_custom.rs b/cli/lsp/lsp_custom.rs index 050fcf3184..5800072792 100644 --- a/cli/lsp/lsp_custom.rs +++ b/cli/lsp/lsp_custom.rs @@ -57,6 +57,7 @@ pub struct DenoConfigurationData { #[serde(rename_all = "camelCase")] pub struct DidRefreshDenoConfigurationTreeNotificationParams { pub data: Vec, + pub deno_dir_npm_folder_uri: Option, } pub enum DidRefreshDenoConfigurationTreeNotification {} diff --git a/cli/lsp/refactor.rs b/cli/lsp/refactor.rs index e1abd963a2..c661ad52f5 100644 --- a/cli/lsp/refactor.rs +++ b/cli/lsp/refactor.rs @@ -5,7 +5,7 @@ use deno_core::serde::Deserialize; use deno_core::serde::Serialize; -use deno_core::ModuleSpecifier; +use lsp_types::Uri; use once_cell::sync::Lazy; use tower_lsp::lsp_types as lsp; @@ -150,7 +150,7 @@ pub static ALL_KNOWN_REFACTOR_ACTION_KINDS: Lazy< #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct RefactorCodeActionData { - pub specifier: ModuleSpecifier, + pub uri: Uri, pub range: lsp::Range, pub refactor_name: String, pub action_name: String, diff --git a/cli/lsp/resolver.rs b/cli/lsp/resolver.rs index 7a0d5e7eef..03c88940bd 100644 --- a/cli/lsp/resolver.rs +++ b/cli/lsp/resolver.rs @@ -299,7 +299,7 @@ impl LspScopeResolver { #[derive(Debug, Default, Clone)] pub struct LspResolver { unscoped: Arc, - by_scope: BTreeMap>, + by_scope: BTreeMap, Arc>, } impl LspResolver { @@ -357,9 +357,7 @@ impl LspResolver { pub async fn set_dep_info_by_scope( &self, - dep_info_by_scope: &Arc< - BTreeMap, Arc>, - >, + dep_info_by_scope: &Arc>, Arc>>, ) { for (scope, resolver) in [(None, &self.unscoped)] .into_iter() diff --git a/cli/lsp/testing/server.rs b/cli/lsp/testing/server.rs index e0570fcada..159788b0f2 100644 --- a/cli/lsp/testing/server.rs +++ b/cli/lsp/testing/server.rs @@ -9,6 +9,7 @@ use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; use deno_core::serde_json::json; use deno_core::serde_json::Value; +use deno_core::url::Url; use deno_core::ModuleSpecifier; use deno_runtime::tokio_util::create_basic_runtime; use tokio::sync::mpsc; @@ -22,7 +23,6 @@ use super::lsp_custom; use crate::lsp::client::Client; use crate::lsp::client::TestingNotification; use crate::lsp::config; -use crate::lsp::documents::DocumentsFilter; use crate::lsp::language_server::StateSnapshot; use crate::lsp::performance::Performance; use crate::lsp::urls::url_to_uri; @@ -63,7 +63,7 @@ impl TestServer { pub fn new( client: Client, performance: Arc, - maybe_root_uri: Option, + maybe_root_url: Option>, ) -> Self { let tests = Default::default(); @@ -83,7 +83,7 @@ impl TestServer { let tests = server.tests.clone(); let client = server.client.clone(); let performance = server.performance.clone(); - let mru = maybe_root_uri.clone(); + let mru = maybe_root_url.clone(); let _update_join_handle = thread::spawn(move || { let runtime = create_basic_runtime(); @@ -99,47 +99,62 @@ impl TestServer { let mut keys: HashSet = tests.keys().cloned().collect(); for document in snapshot + .document_modules .documents - .documents(DocumentsFilter::AllDiagnosable) + .filtered_docs(|d| d.is_file_like() && d.is_diagnosable()) { - let specifier = document.specifier(); - if specifier.scheme() != "file" { + let Some(module) = + snapshot.document_modules.primary_module(&document) + else { + continue; + }; + if module.specifier.scheme() != "file" { continue; } - if !snapshot.config.specifier_enabled_for_test(specifier) { + if !snapshot + .config + .specifier_enabled_for_test(&module.specifier) + { continue; } - keys.remove(specifier); + keys.remove(&module.specifier); let script_version = document.script_version(); - let valid = - if let Some((_, old_script_version)) = tests.get(specifier) { - old_script_version == &script_version - } else { - false - }; + let valid = if let Some((_, old_script_version)) = + tests.get(&module.specifier) + { + old_script_version == &script_version + } else { + false + }; if !valid { let was_empty = tests - .remove(specifier) + .remove(&module.specifier) .map(|(tm, _)| tm.is_empty()) .unwrap_or(true); - let test_module = document - .maybe_test_module() + let test_module = module + .test_module() .await .map(|tm| tm.as_ref().clone()) - .unwrap_or_else(|| TestModule::new(specifier.clone())); + .unwrap_or_else(|| { + TestModule::new(module.specifier.as_ref().clone()) + }); if !test_module.is_empty() { if let Ok(params) = - test_module.as_replace_notification(mru.as_ref()) + test_module.as_replace_notification(mru.as_deref()) { client.send_test_notification(params); } } else if !was_empty { - if let Ok(params) = as_delete_notification(specifier) { + if let Ok(params) = + as_delete_notification(&module.specifier) + { client.send_test_notification(params); } } - tests - .insert(specifier.clone(), (test_module, script_version)); + tests.insert( + module.specifier.as_ref().clone(), + (test_module, script_version), + ); } } for key in &keys { @@ -169,7 +184,7 @@ impl TestServer { runs.get(&id).cloned() }; if let Some(run) = maybe_run { - match run.exec(&client, maybe_root_uri.as_ref()).await { + match run.exec(&client, maybe_root_url.as_deref()).await { Ok(_) => (), Err(err) => { client.show_message(lsp::MessageType::ERROR, err); diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index c2c1664b7d..3294673177 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -22,9 +22,7 @@ use deno_core::anyhow::anyhow; use deno_core::convert::Smi; use deno_core::convert::ToV8; use deno_core::error::AnyError; -use deno_core::futures::stream::FuturesOrdered; use deno_core::futures::FutureExt; -use deno_core::futures::StreamExt; use deno_core::op2; use deno_core::parking_lot::Mutex; use deno_core::resolve_url; @@ -35,6 +33,7 @@ use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::serde_json::Value; use deno_core::serde_v8; +use deno_core::url::Url; use deno_core::v8; use deno_core::JsRuntime; use deno_core::ModuleSpecifier; @@ -51,6 +50,7 @@ use indexmap::IndexMap; use indexmap::IndexSet; use lazy_regex::lazy_regex; use log::error; +use lsp_types::Uri; use node_resolver::cache::NodeResolutionThreadLocalCache; use node_resolver::ResolutionMode; use once_cell::sync::Lazy; @@ -68,14 +68,12 @@ use tower_lsp::jsonrpc::Error as LspError; use tower_lsp::jsonrpc::Result as LspResult; use tower_lsp::lsp_types as lsp; -use super::analysis::CodeActionData; use super::code_lens; +use super::code_lens::CodeLensData; use super::config; use super::config::LspTsConfig; -use super::documents::AssetOrDocument; -use super::documents::Document; -use super::documents::DocumentsFilter; -use super::documents::ASSET_DOCUMENTS; +use super::documents::DocumentModule; +use super::documents::DocumentText; use super::language_server; use super::language_server::StateSnapshot; use super::logging::lsp_log; @@ -91,8 +89,6 @@ use super::semantic_tokens::SemanticTokensBuilder; use super::text::LineIndex; use super::urls::uri_to_url; use super::urls::url_to_uri; -use super::urls::INVALID_SPECIFIER; -use super::urls::INVALID_URI; use crate::args::jsr_url; use crate::args::FmtOptionsConfig; use crate::lsp::logging::lsp_warn; @@ -121,7 +117,7 @@ const FILE_EXTENSION_KIND_MODIFIERS: &[&str] = type Request = ( TscRequest, - Option, + Option>, Arc, oneshot::Sender>, CancellationToken, @@ -297,7 +293,7 @@ impl Serialize for ChangeKind { pub struct PendingChange { pub modified_scripts: Vec<(String, ChangeKind)>, pub project_version: usize, - pub new_configs_by_scope: Option>>, + pub new_configs_by_scope: Option, Arc>>, } impl<'a> ToV8<'a> for PendingChange { @@ -349,7 +345,7 @@ impl PendingChange { &mut self, new_version: usize, modified_scripts: Vec<(String, ChangeKind)>, - new_configs_by_scope: Option>>, + new_configs_by_scope: Option, Arc>>, ) { use ChangeKind::*; self.project_version = self.project_version.max(new_version); @@ -394,9 +390,6 @@ impl PendingChange { } } -pub type DiagnosticsMap = IndexMap>; -pub type ScopedAmbientModules = - HashMap, MaybeAmbientModules>; pub type MaybeAmbientModules = Option>; impl TsServer { @@ -473,8 +466,8 @@ impl TsServer { pub fn project_changed<'a>( &self, snapshot: Arc, - modified_scripts: impl IntoIterator, - new_configs_by_scope: Option>>, + modified_scripts: impl IntoIterator, + new_configs_by_scope: Option, Arc>>, ) { let modified_scripts = modified_scripts .into_iter() @@ -503,66 +496,31 @@ impl TsServer { pub async fn get_diagnostics( &self, snapshot: Arc, - specifiers: Vec, + specifiers: impl IntoIterator, + scope: Option<&Arc>, token: &CancellationToken, - ) -> Result<(DiagnosticsMap, ScopedAmbientModules), AnyError> { - let mut diagnostics_map = IndexMap::with_capacity(specifiers.len()); - let mut specifiers_by_scope = BTreeMap::new(); - for specifier in specifiers { - let scope = if snapshot.documents.is_valid_file_referrer(&specifier) { - snapshot - .config - .tree - .scope_for_specifier(&specifier) - .cloned() - } else { - snapshot - .documents - .get(&specifier) - .and_then(|d| d.scope().cloned()) - }; - let specifiers = specifiers_by_scope.entry(scope).or_insert(vec![]); - specifiers.push(self.specifier_map.denormalize(&specifier)); - } - let mut results = FuturesOrdered::new(); - for (scope, specifiers) in specifiers_by_scope { - let req = - TscRequest::GetDiagnostics((specifiers, snapshot.project_version)); - results.push_back( - self - .request::<(DiagnosticsMap, MaybeAmbientModules)>( - snapshot.clone(), - req, - scope.clone(), - token, - ) - .map(|res| (scope, res)), - ); - } - let mut ambient_modules_by_scope = HashMap::with_capacity(2); - while let Some((scope, raw_diagnostics)) = results.next().await { - if let Some(err) = raw_diagnostics.as_ref().err() { - if token.is_cancelled() { - return Err(anyhow!("request cancelled")); - } else { - lsp_warn!("Error generating TypeScript diagnostics: {err}"); - } - } - let (raw_diagnostics, ambient_modules) = - raw_diagnostics.unwrap_or_default(); - for (mut specifier, mut diagnostics) in raw_diagnostics { - specifier = self.specifier_map.normalize(&specifier)?.to_string(); - for diagnostic in &mut diagnostics { + ) -> Result<(Vec>, MaybeAmbientModules), AnyError> + { + let specifiers = specifiers + .into_iter() + .map(|s| self.specifier_map.denormalize(s)) + .collect(); + let req = + TscRequest::GetDiagnostics((specifiers, snapshot.project_version)); + self + .request::<(Vec>, MaybeAmbientModules)>( + snapshot, req, scope, token, + ) + .await + .and_then(|(mut diagnostics, ambient_modules)| { + for diagnostic in diagnostics.iter_mut().flatten() { if token.is_cancelled() { return Err(anyhow!("request cancelled")); } normalize_diagnostic(diagnostic, &self.specifier_map)?; } - diagnostics_map.insert(specifier, diagnostics); - } - ambient_modules_by_scope.insert(scope, ambient_modules); - } - Ok((diagnostics_map, ambient_modules_by_scope)) + Ok((diagnostics, ambient_modules)) + }) } #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] @@ -580,12 +538,7 @@ impl TsServer { { let req = TscRequest::CleanupSemanticCache; self - .request::<()>( - snapshot.clone(), - req, - scope.cloned(), - &Default::default(), - ) + .request::<()>(snapshot.clone(), req, scope, &Default::default()) .await .map_err(|err| { log::error!("Failed to request to tsserver {}", err); @@ -599,68 +552,40 @@ impl TsServer { pub async fn find_references( &self, snapshot: Arc, - specifier: ModuleSpecifier, + specifier: &Url, position: u32, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result>, AnyError> { let req = TscRequest::FindReferences(( - self.specifier_map.denormalize(&specifier), + self.specifier_map.denormalize(specifier), position, )); - let mut results = FuturesOrdered::new(); - for scope in snapshot - .config - .tree - .data_by_scope() - .keys() - .map(Some) - .chain(std::iter::once(None)) - { - results.push_back(self.request::>>( - snapshot.clone(), - req.clone(), - scope.cloned(), - token, - )); - } - let mut all_symbols = IndexSet::new(); - while let Some(symbols) = results.next().await { - let symbols = symbols - .inspect_err(|err| { - let err = err.to_string(); - if !err.contains("Could not find source file") { - lsp_warn!("Unable to get references from TypeScript: {err}"); + self + .request::>>(snapshot, req, scope, token) + .await + .and_then(|mut symbols| { + for symbol in symbols.iter_mut().flatten() { + if token.is_cancelled() { + return Err(anyhow!("request cancelled")); } - }) - .unwrap_or_default(); - let Some(mut symbols) = symbols else { - continue; - }; - for symbol in &mut symbols { - if token.is_cancelled() { - return Err(anyhow!("request cancelled")); + symbol.normalize(&self.specifier_map)?; } - symbol.normalize(&self.specifier_map)?; - } - all_symbols.extend(symbols); - } - if all_symbols.is_empty() { - return Ok(None); - } - Ok(Some(all_symbols.into_iter().collect())) + Ok(symbols) + }) } #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] pub async fn get_navigation_tree( &self, snapshot: Arc, - specifier: ModuleSpecifier, - scope: Option, + specifier: &Url, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result { let req = TscRequest::GetNavigationTree((self .specifier_map - .denormalize(&specifier),)); + .denormalize(specifier),)); self.request(snapshot, req, scope, token).await } @@ -683,13 +608,13 @@ impl TsServer { pub async fn get_quick_info( &self, snapshot: Arc, - specifier: ModuleSpecifier, + specifier: &Url, position: u32, - scope: Option, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result, AnyError> { let req = TscRequest::GetQuickInfoAtPosition(( - self.specifier_map.denormalize(&specifier), + self.specifier_map.denormalize(specifier), position, )); self.request(snapshot, req, scope, token).await @@ -700,16 +625,16 @@ impl TsServer { pub async fn get_code_fixes( &self, snapshot: Arc, - specifier: ModuleSpecifier, + specifier: &Url, range: Range, codes: Vec, format_code_settings: FormatCodeSettings, preferences: UserPreferences, - scope: Option, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result, AnyError> { let req = TscRequest::GetCodeFixesAtPosition(Box::new(( - self.specifier_map.denormalize(&specifier), + self.specifier_map.denormalize(specifier), range.start, range.end, codes, @@ -732,12 +657,12 @@ impl TsServer { pub async fn get_applicable_refactors( &self, snapshot: Arc, - specifier: ModuleSpecifier, + specifier: &Url, range: Range, preferences: Option, trigger_kind: Option, only: String, - scope: Option, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result, LspError> { let trigger_kind = trigger_kind.map(|reason| match reason { @@ -746,7 +671,7 @@ impl TsServer { _ => unreachable!(), }); let req = TscRequest::GetApplicableRefactors(Box::new(( - self.specifier_map.denormalize(&specifier), + self.specifier_map.denormalize(specifier), range.into(), preferences.unwrap_or_default(), trigger_kind, @@ -762,21 +687,23 @@ impl TsServer { } #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] + #[allow(clippy::too_many_arguments)] pub async fn get_combined_code_fix( &self, snapshot: Arc, - code_action_data: &CodeActionData, + specifier: &Url, + fix_id: &str, format_code_settings: FormatCodeSettings, preferences: UserPreferences, - scope: Option, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result { let req = TscRequest::GetCombinedCodeFix(Box::new(( CombinedCodeFixScope { r#type: "file", - file_name: self.specifier_map.denormalize(&code_action_data.specifier), + file_name: self.specifier_map.denormalize(specifier), }, - code_action_data.fix_id.clone(), + fix_id.to_string(), format_code_settings, preferences, ))); @@ -794,17 +721,17 @@ impl TsServer { pub async fn get_edits_for_refactor( &self, snapshot: Arc, - specifier: ModuleSpecifier, + specifier: &Url, format_code_settings: FormatCodeSettings, range: Range, refactor_name: String, action_name: String, preferences: Option, - scope: Option, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result { let req = TscRequest::GetEditsForRefactor(Box::new(( - self.specifier_map.denormalize(&specifier), + self.specifier_map.denormalize(specifier), format_code_settings, range.into(), refactor_name, @@ -821,76 +748,53 @@ impl TsServer { } #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] + #[allow(clippy::too_many_arguments)] pub async fn get_edits_for_file_rename( &self, snapshot: Arc, - old_specifier: ModuleSpecifier, - new_specifier: ModuleSpecifier, + old_specifier: &Url, + new_specifier: &Url, format_code_settings: FormatCodeSettings, user_preferences: UserPreferences, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result, AnyError> { let req = TscRequest::GetEditsForFileRename(Box::new(( - self.specifier_map.denormalize(&old_specifier), - self.specifier_map.denormalize(&new_specifier), + self.specifier_map.denormalize(old_specifier), + self.specifier_map.denormalize(new_specifier), format_code_settings, user_preferences, ))); - let mut results = FuturesOrdered::new(); - for scope in snapshot - .config - .tree - .data_by_scope() - .keys() - .map(Some) - .chain(std::iter::once(None)) - { - results.push_back(self.request::>( - snapshot.clone(), - req.clone(), - scope.cloned(), - token, - )); - } - let mut all_changes = IndexSet::new(); - while let Some(changes) = results.next().await { - if let Some(err) = changes.as_ref().err() { - if token.is_cancelled() { - return Err(anyhow!("request cancelled")); - } else { - lsp_warn!( - "Unable to get edits for file rename from TypeScript: {err}" - ); - } - } - let mut changes = changes.unwrap_or_default(); - for changes in &mut changes { - changes.normalize(&self.specifier_map)?; - for text_changes in &mut changes.text_changes { - if token.is_cancelled() { - return Err(anyhow!("request cancelled")); + self + .request::>(snapshot, req, scope, token) + .await + .and_then(|mut changes| { + for changes in &mut changes { + changes.normalize(&self.specifier_map)?; + for text_changes in &mut changes.text_changes { + if token.is_cancelled() { + return Err(anyhow!("request cancelled")); + } + text_changes.new_text = + to_percent_decoded_str(&text_changes.new_text); } - text_changes.new_text = - to_percent_decoded_str(&text_changes.new_text); } - } - all_changes.extend(changes); - } - Ok(all_changes.into_iter().collect()) + Ok(changes) + }) } #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] pub async fn get_document_highlights( &self, snapshot: Arc, - specifier: ModuleSpecifier, + specifier: &Url, position: u32, files_to_search: Vec, - scope: Option, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result>, AnyError> { let req = TscRequest::GetDocumentHighlights(Box::new(( - self.specifier_map.denormalize(&specifier), + self.specifier_map.denormalize(specifier), position, files_to_search .into_iter() @@ -904,13 +808,13 @@ impl TsServer { pub async fn get_definition( &self, snapshot: Arc, - specifier: ModuleSpecifier, + specifier: &Url, position: u32, - scope: Option, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result, AnyError> { let req = TscRequest::GetDefinitionAndBoundSpan(( - self.specifier_map.denormalize(&specifier), + self.specifier_map.denormalize(specifier), position, )); self @@ -930,13 +834,13 @@ impl TsServer { pub async fn get_type_definition( &self, snapshot: Arc, - specifier: ModuleSpecifier, + specifier: &Url, position: u32, - scope: Option, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result>, AnyError> { let req = TscRequest::GetTypeDefinitionAtPosition(( - self.specifier_map.denormalize(&specifier), + self.specifier_map.denormalize(specifier), position, )); self @@ -958,15 +862,15 @@ impl TsServer { pub async fn get_completions( &self, snapshot: Arc, - specifier: ModuleSpecifier, + specifier: &Url, position: u32, options: GetCompletionsAtPositionOptions, format_code_settings: FormatCodeSettings, - scope: Option, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result, AnyError> { let req = TscRequest::GetCompletionsAtPosition(Box::new(( - self.specifier_map.denormalize(&specifier), + self.specifier_map.denormalize(specifier), position, options, format_code_settings, @@ -983,21 +887,28 @@ impl TsServer { } #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] + #[allow(clippy::too_many_arguments)] pub async fn get_completion_details( &self, snapshot: Arc, - args: GetCompletionDetailsArgs, - scope: Option, + specifier: &Url, + position: u32, + name: String, + format_code_settings: Option, + source: Option, + preferences: Option, + data: Option, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result, AnyError> { let req = TscRequest::GetCompletionEntryDetails(Box::new(( - self.specifier_map.denormalize(&args.specifier), - args.position, - args.name, - args.format_code_settings.unwrap_or_default(), - args.source, - args.preferences, - args.data, + self.specifier_map.denormalize(specifier), + position, + name, + format_code_settings.unwrap_or_default(), + source, + preferences, + data, ))); self .request::>(snapshot, req, scope, token) @@ -1014,68 +925,42 @@ impl TsServer { pub async fn get_implementations( &self, snapshot: Arc, - specifier: ModuleSpecifier, + specifier: &Url, position: u32, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result>, AnyError> { let req = TscRequest::GetImplementationAtPosition(( - self.specifier_map.denormalize(&specifier), + self.specifier_map.denormalize(specifier), position, )); - let mut results = FuturesOrdered::new(); - for scope in snapshot - .config - .tree - .data_by_scope() - .keys() - .map(Some) - .chain(std::iter::once(None)) - { - results.push_back(self.request::>>( - snapshot.clone(), - req.clone(), - scope.cloned(), - token, - )); - } - let mut all_locations = IndexSet::new(); - while let Some(locations) = results.next().await { - let locations = locations - .inspect_err(|err| { - let err = err.to_string(); - if !err.contains("Could not find source file") { - lsp_warn!("Unable to get implementations from TypeScript: {err}"); + self + .request::>>( + snapshot, req, scope, token, + ) + .await + .and_then(|mut locations| { + for location in locations.iter_mut().flatten() { + if token.is_cancelled() { + return Err(anyhow!("request cancelled")); } - }) - .unwrap_or_default(); - let Some(mut locations) = locations else { - continue; - }; - for location in &mut locations { - if token.is_cancelled() { - return Err(anyhow!("request cancelled")); + location.normalize(&self.specifier_map)?; } - location.normalize(&self.specifier_map)?; - } - all_locations.extend(locations); - } - if all_locations.is_empty() { - return Ok(None); - } - Ok(Some(all_locations.into_iter().collect())) + Ok(locations) + }) } #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] pub async fn get_outlining_spans( &self, snapshot: Arc, - specifier: ModuleSpecifier, - scope: Option, + specifier: &Url, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result, AnyError> { let req = TscRequest::GetOutliningSpans((self .specifier_map - .denormalize(&specifier),)); + .denormalize(specifier),)); self.request(snapshot, req, scope, token).await } @@ -1083,62 +968,37 @@ impl TsServer { pub async fn provide_call_hierarchy_incoming_calls( &self, snapshot: Arc, - specifier: ModuleSpecifier, + specifier: &Url, position: u32, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result, AnyError> { let req = TscRequest::ProvideCallHierarchyIncomingCalls(( - self.specifier_map.denormalize(&specifier), + self.specifier_map.denormalize(specifier), position, )); - let mut results = FuturesOrdered::new(); - for scope in snapshot - .config - .tree - .data_by_scope() - .keys() - .map(Some) - .chain(std::iter::once(None)) - { - results.push_back(self.request::>( - snapshot.clone(), - req.clone(), - scope.cloned(), - token, - )); - } - let mut all_calls = IndexSet::new(); - while let Some(calls) = results.next().await { - if let Some(err) = calls.as_ref().err() { - if token.is_cancelled() { - return Err(anyhow!("request cancelled")); - } else { - lsp_warn!("Unable to get incoming calls from TypeScript: {err}"); + self + .request::>(snapshot, req, scope, token) + .await + .and_then(|mut calls| { + for call in &mut calls { + call.normalize(&self.specifier_map)?; } - } - let mut calls = calls.unwrap_or_default(); - for call in &mut calls { - if token.is_cancelled() { - return Err(anyhow!("request cancelled")); - } - call.normalize(&self.specifier_map)?; - } - all_calls.extend(calls) - } - Ok(all_calls.into_iter().collect()) + Ok(calls) + }) } #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] pub async fn provide_call_hierarchy_outgoing_calls( &self, snapshot: Arc, - specifier: ModuleSpecifier, + specifier: &Url, position: u32, - scope: Option, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result, AnyError> { let req = TscRequest::ProvideCallHierarchyOutgoingCalls(( - self.specifier_map.denormalize(&specifier), + self.specifier_map.denormalize(specifier), position, )); self @@ -1159,13 +1019,13 @@ impl TsServer { pub async fn prepare_call_hierarchy( &self, snapshot: Arc, - specifier: ModuleSpecifier, + specifier: &Url, position: u32, - scope: Option, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result>, AnyError> { let req = TscRequest::PrepareCallHierarchy(( - self.specifier_map.denormalize(&specifier), + self.specifier_map.denormalize(specifier), position, )); self @@ -1193,75 +1053,44 @@ impl TsServer { pub async fn find_rename_locations( &self, snapshot: Arc, - specifier: ModuleSpecifier, + specifier: &Url, position: u32, user_preferences: UserPreferences, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result>, AnyError> { let req = TscRequest::FindRenameLocations(( - self.specifier_map.denormalize(&specifier), + self.specifier_map.denormalize(specifier), position, false, false, user_preferences, )); - let mut results = FuturesOrdered::new(); - for scope in snapshot - .config - .tree - .data_by_scope() - .keys() - .map(Some) - .chain(std::iter::once(None)) - { - results.push_back(self.request::>>( - snapshot.clone(), - req.clone(), - scope.cloned(), - token, - )); - } - let mut all_locations = IndexSet::new(); - while let Some(locations) = results.next().await { - if let Some(err) = locations.as_ref().err() { - if token.is_cancelled() { - return Err(anyhow!("request cancelled")); - } else { - let err = err.to_string(); - if !err.contains("Could not find source file") { - lsp_warn!("Unable to get rename locations from TypeScript: {err}"); + self + .request::>>(snapshot, req, scope, token) + .await + .and_then(|mut locations| { + for location in locations.iter_mut().flatten() { + if token.is_cancelled() { + return Err(anyhow!("request cancelled")); } + location.normalize(&self.specifier_map)?; } - } - let locations = locations.unwrap_or_default(); - let Some(mut locations) = locations else { - continue; - }; - for symbol in &mut locations { - if token.is_cancelled() { - return Err(anyhow!("request cancelled")); - } - symbol.normalize(&self.specifier_map)?; - } - all_locations.extend(locations); - } - if all_locations.is_empty() { - return Ok(None); - } - Ok(Some(all_locations.into_iter().collect())) + Ok(locations) + }) } #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] pub async fn get_smart_selection_range( &self, snapshot: Arc, - specifier: ModuleSpecifier, + specifier: &Url, position: u32, - scope: Option, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result { let req = TscRequest::GetSmartSelectionRange(( - self.specifier_map.denormalize(&specifier), + self.specifier_map.denormalize(specifier), position, )); self.request(snapshot, req, scope, token).await @@ -1271,13 +1100,13 @@ impl TsServer { pub async fn get_encoded_semantic_classifications( &self, snapshot: Arc, - specifier: ModuleSpecifier, + specifier: &Url, range: Range, - scope: Option, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result { let req = TscRequest::GetEncodedSemanticClassifications(( - self.specifier_map.denormalize(&specifier), + self.specifier_map.denormalize(specifier), TextSpan { start: range.start, length: range.end - range.start, @@ -1291,14 +1120,14 @@ impl TsServer { pub async fn get_signature_help_items( &self, snapshot: Arc, - specifier: ModuleSpecifier, + specifier: &Url, position: u32, options: SignatureHelpItemsOptions, - scope: Option, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result, AnyError> { let req = TscRequest::GetSignatureHelpItems(( - self.specifier_map.denormalize(&specifier), + self.specifier_map.denormalize(specifier), position, options, )); @@ -1309,66 +1138,46 @@ impl TsServer { pub async fn get_navigate_to_items( &self, snapshot: Arc, - args: GetNavigateToItemsArgs, + search: String, + max_result_count: Option, + file: Option, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result, AnyError> { let req = TscRequest::GetNavigateToItems(( - args.search, - args.max_result_count, - args.file.map(|f| match resolve_url(&f) { + search, + max_result_count, + file.map(|f| match resolve_url(&f) { Ok(s) => self.specifier_map.denormalize(&s), Err(_) => f, }), )); - let mut results = FuturesOrdered::new(); - for scope in snapshot - .config - .tree - .data_by_scope() - .keys() - .map(Some) - .chain(std::iter::once(None)) - { - results.push_back(self.request::>( - snapshot.clone(), - req.clone(), - scope.cloned(), - token, - )); - } - let mut all_items = IndexSet::new(); - while let Some(items) = results.next().await { - if let Some(err) = items.as_ref().err() { - if token.is_cancelled() { - return Err(anyhow!("request cancelled")); - } else { - lsp_warn!("Unable to get 'navigate to' items from TypeScript: {err}"); + self + .request::>(snapshot, req, scope, token) + .await + .and_then(|mut items| { + for item in &mut items { + if token.is_cancelled() { + return Err(anyhow!("request cancelled")); + } + item.normalize(&self.specifier_map)?; } - } - let mut items = items.unwrap_or_default(); - for item in &mut items { - if token.is_cancelled() { - return Err(anyhow!("request cancelled")); - } - item.normalize(&self.specifier_map)?; - } - all_items.extend(items) - } - Ok(all_items.into_iter().collect()) + Ok(items) + }) } #[cfg_attr(feature = "lsp-tracing", tracing::instrument(skip_all))] pub async fn provide_inlay_hints( &self, snapshot: Arc, - specifier: ModuleSpecifier, + specifier: &Url, text_span: TextSpan, user_preferences: UserPreferences, - scope: Option, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result>, AnyError> { let req = TscRequest::ProvideInlayHints(( - self.specifier_map.denormalize(&specifier), + self.specifier_map.denormalize(specifier), text_span, user_preferences, )); @@ -1379,7 +1188,7 @@ impl TsServer { &self, snapshot: Arc, req: TscRequest, - scope: Option, + scope: Option<&Arc>, token: &CancellationToken, ) -> Result where @@ -1398,7 +1207,7 @@ impl TsServer { .sender .send(( req, - scope, + scope.cloned(), snapshot, tx, token.clone(), @@ -1426,12 +1235,13 @@ impl TsServer { fn get_tag_body_text( tag: &JsDocTagInfo, + module: &DocumentModule, language_server: &language_server::Inner, ) -> Option { tag.text.as_ref().map(|display_parts| { // TODO(@kitsonk) check logic in vscode about handling this API change in // tsserver - let text = display_parts_to_string(display_parts, language_server); + let text = display_parts_to_string(display_parts, module, language_server); match tag.name.as_str() { "example" => { if CAPTION_RE.is_match(&text) { @@ -1455,6 +1265,7 @@ fn get_tag_body_text( fn get_tag_documentation( tag: &JsDocTagInfo, + module: &DocumentModule, language_server: &language_server::Inner, ) -> String { match tag.name.as_str() { @@ -1462,7 +1273,8 @@ fn get_tag_documentation( if let Some(display_parts) = &tag.text { // TODO(@kitsonk) check logic in vscode about handling this API change // in tsserver - let text = display_parts_to_string(display_parts, language_server); + let text = + display_parts_to_string(display_parts, module, language_server); let body: Vec<&str> = PART_RE.split(&text).collect(); if body.len() == 3 { let param = body[1]; @@ -1482,7 +1294,7 @@ fn get_tag_documentation( _ => (), } let label = format!("*@{}*", tag.name); - let maybe_text = get_tag_body_text(tag, language_server); + let maybe_text = get_tag_body_text(tag, module, language_server); if let Some(text) = maybe_text { if text.contains('\n') { format!("{label} \n{text}") @@ -1802,6 +1614,7 @@ struct Link { /// to the their source location. fn display_parts_to_string( parts: &[SymbolDisplayPart], + module: &DocumentModule, language_server: &language_server::Inner, ) -> String { let mut out = Vec::::new(); @@ -1812,7 +1625,7 @@ fn display_parts_to_string( "link" => { if let Some(link) = current_link.as_mut() { if let Some(target) = &link.target { - if let Some(specifier) = target.to_target(language_server) { + if let Some(specifier) = target.to_target(module, language_server) { let link_text = link.text.clone().unwrap_or_else(|| { link .name @@ -1911,14 +1724,14 @@ fn display_parts_to_string( impl QuickInfo { pub fn to_hover( &self, - line_index: Arc, + module: &DocumentModule, language_server: &language_server::Inner, ) -> lsp::Hover { let mut parts = Vec::::new(); if let Some(display_string) = self .display_parts .clone() - .map(|p| display_parts_to_string(&p, language_server)) + .map(|p| display_parts_to_string(&p, module, language_server)) { parts.push(lsp::MarkedString::from_language_code( "typescript".to_string(), @@ -1928,14 +1741,16 @@ impl QuickInfo { if let Some(documentation) = self .documentation .clone() - .map(|p| display_parts_to_string(&p, language_server)) + .map(|p| display_parts_to_string(&p, module, language_server)) { parts.push(lsp::MarkedString::from_markdown(documentation)); } if let Some(tags) = &self.tags { let tags_preview = tags .iter() - .map(|tag_info| get_tag_documentation(tag_info, language_server)) + .map(|tag_info| { + get_tag_documentation(tag_info, module, language_server) + }) .collect::>() .join(" \n\n"); if !tags_preview.is_empty() { @@ -1946,7 +1761,7 @@ impl QuickInfo { } lsp::Hover { contents: lsp::HoverContents::Array(parts), - range: Some(self.text_span.to_range(line_index)), + range: Some(self.text_span.to_range(module.line_index.clone())), } } } @@ -1975,42 +1790,39 @@ impl DocumentSpan { impl DocumentSpan { pub fn to_link( &self, - line_index: Arc, + module: &DocumentModule, language_server: &language_server::Inner, ) -> Option { let target_specifier = resolve_url(&self.file_name).ok()?; - let target_asset_or_doc = - language_server.get_maybe_asset_or_document(&target_specifier)?; - let target_line_index = target_asset_or_doc.line_index(); - let file_referrer = target_asset_or_doc.file_referrer(); - let target_uri = language_server - .url_map - .specifier_to_uri(&target_specifier, file_referrer) - .ok()?; + let target_module = language_server + .document_modules + .inspect_module_for_specifier( + &target_specifier, + module.scope.as_deref(), + )?; let (target_range, target_selection_range) = if let Some(context_span) = &self.context_span { ( - context_span.to_range(target_line_index.clone()), - self.text_span.to_range(target_line_index), + context_span.to_range(target_module.line_index.clone()), + self.text_span.to_range(target_module.line_index.clone()), ) } else { ( - self.text_span.to_range(target_line_index.clone()), - self.text_span.to_range(target_line_index), + self.text_span.to_range(target_module.line_index.clone()), + self.text_span.to_range(target_module.line_index.clone()), ) }; let origin_selection_range = if let Some(original_context_span) = &self.original_context_span { - Some(original_context_span.to_range(line_index)) + Some(original_context_span.to_range(module.line_index.clone())) } else { - self - .original_text_span - .as_ref() - .map(|original_text_span| original_text_span.to_range(line_index)) + self.original_text_span.as_ref().map(|original_text_span| { + original_text_span.to_range(module.line_index.clone()) + }) }; let link = lsp::LocationLink { origin_selection_range, - target_uri, + target_uri: target_module.uri.as_ref().clone(), target_range, target_selection_range, }; @@ -2022,19 +1834,18 @@ impl DocumentSpan { /// links to markdown links. fn to_target( &self, + module: &DocumentModule, language_server: &language_server::Inner, ) -> Option { - let specifier = resolve_url(&self.file_name).ok()?; - let asset_or_doc = - language_server.get_maybe_asset_or_document(&specifier)?; - let line_index = asset_or_doc.line_index(); - let range = self.text_span.to_range(line_index); - let file_referrer = asset_or_doc.file_referrer(); - let target_uri = language_server - .url_map - .specifier_to_uri(&specifier, file_referrer) - .ok()?; - let mut target = uri_to_url(&target_uri); + let target_specifier = resolve_url(&self.file_name).ok()?; + let target_module = language_server + .document_modules + .inspect_module_for_specifier( + &target_specifier, + module.scope.as_deref(), + )?; + let range = self.text_span.to_range(target_module.line_index.clone()); + let mut target = uri_to_url(&target_module.uri); target.set_fragment(Some(&format!( "L{},{}", range.start.line + 1, @@ -2084,19 +1895,18 @@ impl NavigateToItem { impl NavigateToItem { pub fn to_symbol_information( &self, + scope: Option<&Url>, language_server: &language_server::Inner, ) -> Option { - let specifier = resolve_url(&self.file_name).ok()?; - let asset_or_doc = - language_server.get_asset_or_document(&specifier).ok()?; - let line_index = asset_or_doc.line_index(); - let file_referrer = asset_or_doc.file_referrer(); - let uri = language_server - .url_map - .specifier_to_uri(&specifier, file_referrer) - .ok()?; - let range = self.text_span.to_range(line_index); - let location = lsp::Location { uri, range }; + let target_specifier = resolve_url(&self.file_name).ok()?; + let target_module = language_server + .document_modules + .inspect_module_for_specifier(&target_specifier, scope)?; + let range = self.text_span.to_range(target_module.line_index.clone()); + let location = lsp::Location { + uri: target_module.uri.as_ref().clone(), + range, + }; let mut tags: Option> = None; let kind_modifiers = parse_kind_modifier(&self.kind_modifiers); @@ -2130,29 +1940,28 @@ pub struct InlayHintDisplayPart { impl InlayHintDisplayPart { pub fn to_lsp( &self, + module: &DocumentModule, language_server: &language_server::Inner, ) -> lsp::InlayHintLabelPart { - let location = self.file.as_ref().map(|f| { - let specifier = - resolve_url(f).unwrap_or_else(|_| INVALID_SPECIFIER.clone()); - let file_referrer = - language_server.documents.get_file_referrer(&specifier); - let uri = language_server - .url_map - .specifier_to_uri(&specifier, file_referrer.as_deref()) - .unwrap_or_else(|_| INVALID_URI.clone()); + let location = self.file.as_ref().and_then(|f| { + let target_specifier = resolve_url(f).ok()?; + let target_module = language_server + .document_modules + .inspect_module_for_specifier( + &target_specifier, + module.scope.as_deref(), + )?; let range = self .span .as_ref() - .and_then(|s| { - let asset_or_doc = - language_server.get_asset_or_document(&specifier).ok()?; - Some(s.to_range(asset_or_doc.line_index())) - }) + .map(|s| s.to_range(target_module.line_index.clone())) .unwrap_or_else(|| { lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)) }); - lsp::Location { uri, range } + Some(lsp::Location { + uri: target_module.uri.as_ref().clone(), + range, + }) }); lsp::InlayHintLabelPart { value: self.text.clone(), @@ -2194,16 +2003,16 @@ pub struct InlayHint { impl InlayHint { pub fn to_lsp( &self, - line_index: Arc, + module: &DocumentModule, language_server: &language_server::Inner, ) -> lsp::InlayHint { lsp::InlayHint { - position: line_index.position_utf16(self.position.into()), + position: module.line_index.position_utf16(self.position.into()), label: if let Some(display_parts) = &self.display_parts { lsp::InlayHintLabel::LabelParts( display_parts .iter() - .map(|p| p.to_lsp(language_server)) + .map(|p| p.to_lsp(module, language_server)) .collect(), ) } else { @@ -2234,8 +2043,8 @@ impl NavigationTree { pub fn to_code_lens( &self, line_index: Arc, - specifier: &ModuleSpecifier, - source: &code_lens::CodeLensSource, + uri: &Uri, + source: code_lens::CodeLensSource, ) -> lsp::CodeLens { let range = if let Some(name_span) = &self.name_span { name_span.to_range(line_index) @@ -2248,9 +2057,9 @@ impl NavigationTree { lsp::CodeLens { range, command: None, - data: Some(json!({ - "specifier": specifier, - "source": source + data: Some(json!(CodeLensData { + source, + uri: uri.clone(), })), } } @@ -2414,30 +2223,12 @@ impl ImplementationLocation { Ok(()) } - pub fn to_location( - &self, - line_index: Arc, - language_server: &language_server::Inner, - ) -> lsp::Location { - let specifier = resolve_url(&self.document_span.file_name) - .unwrap_or_else(|_| ModuleSpecifier::parse("deno://invalid").unwrap()); - let file_referrer = language_server.documents.get_file_referrer(&specifier); - let uri = language_server - .url_map - .specifier_to_uri(&specifier, file_referrer.as_deref()) - .unwrap_or_else(|_| INVALID_URI.clone()); - lsp::Location { - uri, - range: self.document_span.text_span.to_range(line_index), - } - } - pub fn to_link( &self, - line_index: Arc, + module: &DocumentModule, language_server: &language_server::Inner, ) -> Option { - self.document_span.to_link(line_index, language_server) + self.document_span.to_link(module, language_server) } } @@ -2460,52 +2251,45 @@ impl RenameLocation { } } -pub struct RenameLocations { - pub locations: Vec, -} - -impl RenameLocations { - pub fn into_workspace_edit( - self, +impl RenameLocation { + pub fn collect_into_workspace_edit( + locations_with_modules: impl IntoIterator< + Item = (RenameLocation, Arc), + >, new_name: &str, language_server: &language_server::Inner, token: &CancellationToken, ) -> Result { let mut text_document_edit_map = IndexMap::new(); let mut includes_non_files = false; - for location in self.locations.iter() { + for (location, module) in locations_with_modules { if token.is_cancelled() { return Err(anyhow!("request cancelled")); } - let specifier = resolve_url(&location.document_span.file_name)?; - if specifier.scheme() != "file" { + let target_specifier = resolve_url(&location.document_span.file_name)?; + if target_specifier.scheme() != "file" { includes_non_files = true; continue; } - let file_referrer = - language_server.documents.get_file_referrer(&specifier); - let uri = language_server - .url_map - .specifier_to_uri(&specifier, file_referrer.as_deref())?; - let asset_or_doc = language_server.get_asset_or_document(&specifier)?; - - // ensure TextDocumentEdit for `location.file_name`. - if !text_document_edit_map.contains_key(&uri) { - text_document_edit_map.insert( - uri.clone(), - lsp::TextDocumentEdit { - text_document: lsp::OptionalVersionedTextDocumentIdentifier { - uri: uri.clone(), - version: asset_or_doc.document_lsp_version(), - }, - edits: - Vec::>::new(), + let Some(target_module) = language_server + .document_modules + .inspect_module_for_specifier( + &target_specifier, + module.scope.as_deref(), + ) + else { + continue; + }; + let document_edit = text_document_edit_map + .entry(target_module.uri.clone()) + .or_insert_with(|| lsp::TextDocumentEdit { + text_document: lsp::OptionalVersionedTextDocumentIdentifier { + uri: target_module.uri.as_ref().clone(), + version: target_module.open_data.as_ref().map(|d| d.version), }, - ); - } - - // push TextEdit for ensured `TextDocumentEdit.edits`. - let document_edit = text_document_edit_map.get_mut(&uri).unwrap(); + edits: Vec::>::new( + ), + }); let new_text = [ location.prefix_text.as_deref(), Some(new_name), @@ -2519,7 +2303,7 @@ impl RenameLocations { range: location .document_span .text_span - .to_range(asset_or_doc.line_index()), + .to_range(target_module.line_index.clone()), new_text, })); } @@ -2601,7 +2385,7 @@ impl DefinitionInfoAndBoundSpan { pub fn to_definition( &self, - line_index: Arc, + module: &DocumentModule, language_server: &language_server::Inner, token: &CancellationToken, ) -> Result, AnyError> { @@ -2611,10 +2395,7 @@ impl DefinitionInfoAndBoundSpan { if token.is_cancelled() { return Err(anyhow!("request cancelled")); } - if let Some(link) = di - .document_span - .to_link(line_index.clone(), language_server) - { + if let Some(link) = di.document_span.to_link(module, language_server) { location_links.push(link); } } @@ -2703,19 +2484,43 @@ impl FileTextChanges { pub fn to_text_document_edit( &self, + module: &DocumentModule, language_server: &language_server::Inner, - ) -> Result { - let specifier = resolve_url(&self.file_name)?; - let asset_or_doc = language_server.get_asset_or_document(&specifier)?; + ) -> Option { + let is_new_file = self.is_new_file.unwrap_or(false); + let target_specifier = resolve_url(&self.file_name).ok()?; + let target_module = if is_new_file { + None + } else { + Some( + language_server + .document_modules + .inspect_module_for_specifier( + &target_specifier, + module.scope.as_deref(), + )?, + ) + }; + let target_uri = target_module + .as_ref() + .map(|m| m.uri.clone()) + .or_else(|| url_to_uri(&target_specifier).ok().map(Arc::new))?; + let line_index = target_module + .as_ref() + .map(|m| m.line_index.clone()) + .unwrap_or_else(|| Arc::new(LineIndex::new(""))); let edits = self .text_changes .iter() - .map(|tc| tc.as_text_or_annotated_text_edit(asset_or_doc.line_index())) + .map(|tc| tc.as_text_or_annotated_text_edit(line_index.clone())) .collect(); - Ok(lsp::TextDocumentEdit { + Some(lsp::TextDocumentEdit { text_document: lsp::OptionalVersionedTextDocumentIdentifier { - uri: url_to_uri(&specifier)?, - version: asset_or_doc.document_lsp_version(), + uri: target_uri.as_ref().clone(), + version: target_module + .as_ref() + .and_then(|m| m.open_data.as_ref()) + .map(|d| d.version), }, edits, }) @@ -2723,25 +2528,37 @@ impl FileTextChanges { pub fn to_text_document_change_ops( &self, + module: &DocumentModule, language_server: &language_server::Inner, - ) -> Result, AnyError> { + ) -> Option> { + let is_new_file = self.is_new_file.unwrap_or(false); let mut ops = Vec::::new(); - let specifier = resolve_url(&self.file_name)?; - let maybe_asset_or_document = if !self.is_new_file.unwrap_or(false) { - let asset_or_doc = language_server.get_asset_or_document(&specifier)?; - Some(asset_or_doc) - } else { + let target_specifier = resolve_url(&self.file_name).ok()?; + let target_module = if is_new_file { None + } else { + Some( + language_server + .document_modules + .inspect_module_for_specifier( + &target_specifier, + module.scope.as_deref(), + )?, + ) }; - let line_index = maybe_asset_or_document + let target_uri = target_module .as_ref() - .map(|d| d.line_index()) + .map(|m| m.uri.clone()) + .or_else(|| url_to_uri(&target_specifier).ok().map(Arc::new))?; + let line_index = target_module + .as_ref() + .map(|m| m.line_index.clone()) .unwrap_or_else(|| Arc::new(LineIndex::new(""))); - if self.is_new_file.unwrap_or(false) { + if is_new_file { ops.push(lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create( lsp::CreateFile { - uri: url_to_uri(&specifier)?, + uri: target_uri.as_ref().clone(), options: Some(lsp::CreateFileOptions { ignore_if_exists: Some(true), overwrite: None, @@ -2758,13 +2575,16 @@ impl FileTextChanges { .collect(); ops.push(lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit { text_document: lsp::OptionalVersionedTextDocumentIdentifier { - uri: url_to_uri(&specifier)?, - version: maybe_asset_or_document.and_then(|d| d.document_lsp_version()), + uri: target_uri.as_ref().clone(), + version: target_module + .as_ref() + .and_then(|m| m.open_data.as_ref()) + .map(|d| d.version), }, edits, })); - Ok(ops) + Some(ops) } } @@ -2908,7 +2728,7 @@ pub struct ApplicableRefactorInfo { impl ApplicableRefactorInfo { pub fn to_code_actions( &self, - specifier: &ModuleSpecifier, + uri: &Uri, range: &lsp::Range, token: &CancellationToken, ) -> Result, AnyError> { @@ -2919,7 +2739,7 @@ impl ApplicableRefactorInfo { return Err(anyhow!("request cancelled")); } code_actions - .push(self.as_inline_code_action(action, specifier, range, &self.name)); + .push(self.as_inline_code_action(action, uri, range, &self.name)); } Ok(code_actions) } @@ -2927,7 +2747,7 @@ impl ApplicableRefactorInfo { fn as_inline_code_action( &self, action: &RefactorActionInfo, - specifier: &ModuleSpecifier, + uri: &Uri, range: &lsp::Range, refactor_name: &str, ) -> lsp::CodeAction { @@ -2944,7 +2764,7 @@ impl ApplicableRefactorInfo { disabled, data: Some( serde_json::to_value(RefactorCodeActionData { - specifier: specifier.clone(), + uri: uri.clone(), range: *range, refactor_name: refactor_name.to_owned(), action_name: action.name.clone(), @@ -2956,22 +2776,21 @@ impl ApplicableRefactorInfo { } } -pub fn file_text_changes_to_workspace_edit( - changes: &[FileTextChanges], +pub fn file_text_changes_to_workspace_edit<'a>( + changes_with_modules: impl IntoIterator< + Item = (&'a FileTextChanges, &'a Arc), + >, language_server: &language_server::Inner, token: &CancellationToken, ) -> LspResult> { let mut all_ops = Vec::::new(); - for change in changes { + for (change, module) in changes_with_modules { if token.is_cancelled() { return Err(LspError::request_cancelled()); } - let ops = match change.to_text_document_change_ops(language_server) { - Ok(op) => op, - Err(err) => { - error!("Unable to convert changes to edits: {}", err); - return Err(LspError::internal_error()); - } + let Some(ops) = change.to_text_document_change_ops(module, language_server) + else { + continue; }; all_ops.extend(ops); } @@ -3003,10 +2822,15 @@ impl RefactorEditInfo { pub fn to_workspace_edit( &self, + module: &Arc, language_server: &language_server::Inner, token: &CancellationToken, ) -> LspResult> { - file_text_changes_to_workspace_edit(&self.edits, language_server, token) + file_text_changes_to_workspace_edit( + self.edits.iter().map(|c| (c, module)), + language_server, + token, + ) } } @@ -3165,20 +2989,27 @@ impl ReferenceEntry { impl ReferenceEntry { pub fn to_location( &self, - line_index: Arc, + module: &Arc, language_server: &language_server::Inner, - ) -> lsp::Location { - let specifier = resolve_url(&self.document_span.file_name) - .unwrap_or_else(|_| INVALID_SPECIFIER.clone()); - let file_referrer = language_server.documents.get_file_referrer(&specifier); - let uri = language_server - .url_map - .specifier_to_uri(&specifier, file_referrer.as_deref()) - .unwrap_or_else(|_| INVALID_URI.clone()); - lsp::Location { - uri, - range: self.document_span.text_span.to_range(line_index), - } + ) -> Option { + let target_specifier = resolve_url(&self.document_span.file_name).ok()?; + let target_module = if target_specifier == *module.specifier { + module.clone() + } else { + language_server + .document_modules + .inspect_module_for_specifier( + &target_specifier, + module.scope.as_deref(), + )? + }; + Some(lsp::Location { + uri: target_module.uri.as_ref().clone(), + range: self + .document_span + .text_span + .to_range(target_module.line_index.clone()), + }) } } @@ -3207,54 +3038,42 @@ impl CallHierarchyItem { pub fn try_resolve_call_hierarchy_item( &self, + module: &DocumentModule, language_server: &language_server::Inner, maybe_root_path: Option<&Path>, ) -> Option { - let target_specifier = resolve_url(&self.file).ok()?; - let target_asset_or_doc = - language_server.get_maybe_asset_or_document(&target_specifier)?; - - Some(self.to_call_hierarchy_item( - target_asset_or_doc.line_index(), - language_server, - maybe_root_path, - )) + let (item, _) = + self.to_call_hierarchy_item(module, language_server, maybe_root_path)?; + Some(item) } - pub fn to_call_hierarchy_item( + fn to_call_hierarchy_item( &self, - line_index: Arc, + module: &DocumentModule, language_server: &language_server::Inner, maybe_root_path: Option<&Path>, - ) -> lsp::CallHierarchyItem { - let target_specifier = - resolve_url(&self.file).unwrap_or_else(|_| INVALID_SPECIFIER.clone()); - let file_referrer = language_server - .documents - .get_file_referrer(&target_specifier); - let uri = language_server - .url_map - .specifier_to_uri(&target_specifier, file_referrer.as_deref()) - .unwrap_or_else(|_| INVALID_URI.clone()); + ) -> Option<(lsp::CallHierarchyItem, Arc)> { + let target_specifier = resolve_url(&self.file).ok()?; + let target_module = language_server + .document_modules + .inspect_module_for_specifier( + &target_specifier, + module.scope.as_deref(), + )?; let use_file_name = self.is_source_file_item(); - let maybe_file_path = if uri.scheme().is_some_and(|s| s.as_str() == "file") - { - url_to_file_path(&uri_to_url(&uri)).ok() - } else { - None - }; + let maybe_file_path = url_to_file_path(&target_module.specifier).ok(); let name = if use_file_name { - if let Some(file_path) = maybe_file_path.as_ref() { + if let Some(file_path) = &maybe_file_path { file_path.file_name().unwrap().to_string_lossy().to_string() } else { - uri.as_str().to_string() + target_module.uri.to_string() } } else { self.name.clone() }; let detail = if use_file_name { - if let Some(file_path) = maybe_file_path.as_ref() { + if let Some(file_path) = &maybe_file_path { // TODO: update this to work with multi root workspaces let parent_dir = file_path.parent().unwrap(); if let Some(root_path) = maybe_root_path { @@ -3281,16 +3100,21 @@ impl CallHierarchyItem { } } - lsp::CallHierarchyItem { - name, - tags, - uri, - detail: Some(detail), - kind: self.kind.clone().into(), - range: self.span.to_range(line_index.clone()), - selection_range: self.selection_span.to_range(line_index), - data: None, - } + Some(( + lsp::CallHierarchyItem { + name, + tags, + uri: target_module.uri.as_ref().clone(), + detail: Some(detail), + kind: self.kind.clone().into(), + range: self.span.to_range(target_module.line_index.clone()), + selection_range: self + .selection_span + .to_range(target_module.line_index.clone()), + data: None, + }, + target_module, + )) } fn is_source_file_item(&self) -> bool { @@ -3318,23 +3142,21 @@ impl CallHierarchyIncomingCall { pub fn try_resolve_call_hierarchy_incoming_call( &self, + module: &DocumentModule, language_server: &language_server::Inner, maybe_root_path: Option<&Path>, ) -> Option { - let target_specifier = resolve_url(&self.from.file).ok()?; - let target_asset_or_doc = - language_server.get_maybe_asset_or_document(&target_specifier)?; - + let (from, target_module) = self.from.to_call_hierarchy_item( + module, + language_server, + maybe_root_path, + )?; Some(lsp::CallHierarchyIncomingCall { - from: self.from.to_call_hierarchy_item( - target_asset_or_doc.line_index(), - language_server, - maybe_root_path, - ), + from, from_ranges: self .from_spans .iter() - .map(|span| span.to_range(target_asset_or_doc.line_index())) + .map(|span| span.to_range(target_module.line_index.clone())) .collect(), }) } @@ -3358,24 +3180,21 @@ impl CallHierarchyOutgoingCall { pub fn try_resolve_call_hierarchy_outgoing_call( &self, - line_index: Arc, + module: &DocumentModule, language_server: &language_server::Inner, maybe_root_path: Option<&Path>, ) -> Option { - let target_specifier = resolve_url(&self.to.file).ok()?; - let target_asset_or_doc = - language_server.get_maybe_asset_or_document(&target_specifier)?; - + let (to, _) = self.to.to_call_hierarchy_item( + module, + language_server, + maybe_root_path, + )?; Some(lsp::CallHierarchyOutgoingCall { - to: self.to.to_call_hierarchy_item( - target_asset_or_doc.line_index(), - language_server, - maybe_root_path, - ), + to, from_ranges: self .from_spans .iter() - .map(|span| span.to_range(line_index.clone())) + .map(|span| span.to_range(module.line_index.clone())) .collect(), }) } @@ -3386,8 +3205,7 @@ impl CallHierarchyOutgoingCall { fn parse_code_actions( maybe_code_actions: Option<&Vec>, data: &CompletionItemData, - specifier: &ModuleSpecifier, - language_server: &language_server::Inner, + module: &DocumentModule, ) -> Result<(Option, Option>), AnyError> { if let Some(code_actions) = maybe_code_actions { let mut additional_text_edits: Vec = Vec::new(); @@ -3397,12 +3215,10 @@ fn parse_code_actions( has_remaining_commands_or_edits = true; } - let asset_or_doc = - language_server.get_asset_or_document(&data.specifier)?; for change in &ts_action.changes { - if data.specifier.as_str() == change.file_name { + if module.specifier.as_str() == change.file_name { additional_text_edits.extend(change.text_changes.iter().map(|tc| { - let mut text_edit = tc.as_text_edit(asset_or_doc.line_index()); + let mut text_edit = tc.as_text_edit(module.line_index.clone()); if let Some(specifier_rewrite) = &data.specifier_rewrite { let specifier_index = text_edit .new_text @@ -3442,7 +3258,7 @@ fn parse_code_actions( .changes .clone() .into_iter() - .filter(|ch| ch.file_name == data.specifier.as_str()) + .filter(|ch| ch.file_name == module.specifier.as_str()) .collect(); json!({ "commands": ca.commands, @@ -3454,7 +3270,10 @@ fn parse_code_actions( command = Some(lsp::Command { title: "".to_string(), command: "_typescript.applyCompletionCodeAction".to_string(), - arguments: Some(vec![json!(specifier.to_string()), json!(actions)]), + arguments: Some(vec![ + json!(module.specifier.to_string()), + json!(actions), + ]), }); } @@ -3546,7 +3365,7 @@ impl CompletionEntryDetails { &self, original_item: &lsp::CompletionItem, data: &CompletionItemData, - specifier: &ModuleSpecifier, + module: &DocumentModule, language_server: &language_server::Inner, ) -> Result { let detail = if original_item.detail.is_some() { @@ -3554,6 +3373,7 @@ impl CompletionEntryDetails { } else if !self.display_parts.is_empty() { Some(replace_links(display_parts_to_string( &self.display_parts, + module, language_server, ))) } else { @@ -3561,11 +3381,13 @@ impl CompletionEntryDetails { }; let documentation = if let Some(parts) = &self.documentation { // NOTE: similar as `QuickInfo::to_hover()` - let mut value = display_parts_to_string(parts, language_server); + let mut value = display_parts_to_string(parts, module, language_server); if let Some(tags) = &self.tags { let tags_preview = tags .iter() - .map(|tag_info| get_tag_documentation(tag_info, language_server)) + .map(|tag_info| { + get_tag_documentation(tag_info, module, language_server) + }) .collect::>() .join(" \n\n"); if !tags_preview.is_empty() { @@ -3636,12 +3458,8 @@ impl CompletionEntryDetails { .join("\n\n"), ) .filter(|s| !s.is_empty()); - let (command, additional_text_edits) = parse_code_actions( - self.code_actions.as_ref(), - data, - specifier, - language_server, - )?; + let (command, additional_text_edits) = + parse_code_actions(self.code_actions.as_ref(), data, module)?; let mut insert_text_format = original_item.insert_text_format; let insert_text = if data.use_code_snippet { insert_text_format = Some(lsp::InsertTextFormat::SNIPPET); @@ -3709,7 +3527,7 @@ impl CompletionInfo { &self, line_index: Arc, settings: &config::CompletionSettings, - specifier: &ModuleSpecifier, + module: &DocumentModule, position: u32, language_server: &language_server::Inner, token: &CancellationToken, @@ -3727,7 +3545,7 @@ impl CompletionInfo { line_index.clone(), self, settings, - specifier, + module, position, language_server, &mut cache, @@ -3764,7 +3582,7 @@ pub struct CompletionSpecifierRewrite { #[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct CompletionItemData { - pub specifier: ModuleSpecifier, + pub uri: Uri, pub position: u32, pub name: String, #[serde(skip_serializing_if = "Option::is_none")] @@ -3824,6 +3642,8 @@ pub struct CompletionEntry { is_import_statement_completion: Option, #[serde(skip_serializing_if = "Option::is_none")] data: Option, + #[serde(flatten)] + other: serde_json::Map, /// This is not from tsc, we add it for convenience during normalization. /// Represents `self.data.file_name`, but normalized. #[serde(skip)] @@ -3907,11 +3727,37 @@ impl CompletionEntry { } } - fn get_filter_text(&self) -> Option { + // https://github.com/microsoft/vscode/blob/52eae268f764fd41d69705eb629010f4c0e28ae9/extensions/typescript-language-features/src/languageFeatures/completions.ts#L391-L425 + fn get_filter_text( + &self, + context: Option<(&DocumentModule, u32)>, + ) -> Option { if self.name.starts_with('#') { if let Some(insert_text) = &self.insert_text { if insert_text.starts_with("this.#") { - return Some(insert_text.replace("this.#", "")); + let prefix_starts_with_hash = context + .map(|(module, position)| { + for (_, c) in module + .text + .char_indices() + .rev() + .skip_while(|(i, _)| *i as u32 >= position) + { + if c == '#' { + return true; + } + if !c.is_ascii_alphanumeric() && c != '_' && c != '$' { + break; + } + } + false + }) + .unwrap_or(false); + if prefix_starts_with_hash { + return Some(insert_text.clone()); + } else { + return Some(insert_text.replace("this.#", "")); + } } else { return Some(insert_text.clone()); } @@ -3942,22 +3788,20 @@ impl CompletionEntry { line_index: Arc, info: &CompletionInfo, settings: &config::CompletionSettings, - specifier: &ModuleSpecifier, + module: &DocumentModule, position: u32, language_server: &language_server::Inner, - resolution_cache: &mut HashMap<(ModuleSpecifier, ModuleSpecifier), String>, + resolution_cache: &mut HashMap< + (ModuleSpecifier, Arc), + String, + >, ) -> Option { let mut label = self.name.clone(); let mut label_details: Option = None; let mut kind: Option = Some(self.kind.clone().into()); let mut specifier_rewrite = None; - - let mut sort_text = if self.source.is_some() { - format!("\u{ffff}{}", self.sort_text) - } else { - self.sort_text.clone() - }; + let mut sort_text = self.sort_text.clone(); let preselect = self.is_recommended; let use_code_snippet = settings.complete_function_calls @@ -3970,7 +3814,7 @@ impl CompletionEntry { _ => None, }; let range = self.replacement_span.clone(); - let mut filter_text = self.get_filter_text(); + let mut filter_text = self.get_filter_text(Some((module, position))); let mut tags = None; let mut detail = None; @@ -4005,18 +3849,22 @@ impl CompletionEntry { } } if let Some(source) = &self.source { - let mut display_source = source.clone(); if let Some(import_data) = &self.auto_import_data { + sort_text = format!("\u{ffff}{}", self.sort_text); + let mut display_source = source.clone(); let import_mapper = - language_server.get_ts_response_import_mapper(specifier); + language_server.get_ts_response_import_mapper(module); let maybe_cached = resolution_cache - .get(&(import_data.normalized.clone(), specifier.clone())) + .get(&(import_data.normalized.clone(), module.specifier.clone())) .cloned(); if let Some(mut new_specifier) = maybe_cached .or_else(|| { - import_mapper.check_specifier(&import_data.normalized, specifier) + import_mapper + .check_specifier(&import_data.normalized, &module.specifier) + }) + .or_else(|| { + relative_specifier(&module.specifier, &import_data.normalized) }) - .or_else(|| relative_specifier(specifier, &import_data.normalized)) .or_else(|| { ModuleSpecifier::parse(&import_data.raw.module_specifier) .is_ok() @@ -4024,7 +3872,7 @@ impl CompletionEntry { }) { resolution_cache.insert( - (import_data.normalized.clone(), specifier.clone()), + (import_data.normalized.clone(), module.specifier.clone()), new_specifier.clone(), ); if new_specifier.contains("/node_modules/") { @@ -4035,12 +3883,12 @@ impl CompletionEntry { .resolver .deno_types_to_code_resolution( &import_data.normalized, - Some(specifier), + module.scope.as_deref(), ) .and_then(|s| { import_mapper - .check_specifier(&s, specifier) - .or_else(|| relative_specifier(specifier, &s)) + .check_specifier(&s, &module.specifier) + .or_else(|| relative_specifier(&module.specifier, &s)) }) { new_deno_types_specifier = @@ -4059,17 +3907,17 @@ impl CompletionEntry { } else if source.starts_with(jsr_url().as_str()) { return None; } + // We want relative or bare (import-mapped or otherwise) specifiers to + // appear at the top. + if resolve_url(&display_source).is_err() { + sort_text += "_0"; + } else { + sort_text += "_1"; + } + label_details + .get_or_insert_with(Default::default) + .description = Some(display_source); } - // We want relative or bare (import-mapped or otherwise) specifiers to - // appear at the top. - if resolve_url(&display_source).is_err() { - sort_text += "_0"; - } else { - sort_text += "_1"; - } - label_details - .get_or_insert_with(Default::default) - .description = Some(display_source); } let text_edit = @@ -4086,7 +3934,7 @@ impl CompletionEntry { }; let tsc = CompletionItemData { - specifier: specifier.clone(), + uri: module.uri.as_ref().clone(), position, name: self.name.clone(), source: self.source.clone(), @@ -4222,6 +4070,7 @@ pub struct SignatureHelpItems { impl SignatureHelpItems { pub fn into_signature_help( self, + module: &DocumentModule, language_server: &language_server::Inner, token: &CancellationToken, ) -> Result { @@ -4232,7 +4081,7 @@ impl SignatureHelpItems { if token.is_cancelled() { return Err(anyhow!("request cancelled")); } - Ok(item.into_signature_information(language_server)) + Ok(item.into_signature_information(module, language_server)) }) .collect::>()?; Ok(lsp::SignatureHelp { @@ -4258,22 +4107,29 @@ pub struct SignatureHelpItem { impl SignatureHelpItem { pub fn into_signature_information( self, + module: &DocumentModule, language_server: &language_server::Inner, ) -> lsp::SignatureInformation { - let prefix_text = - display_parts_to_string(&self.prefix_display_parts, language_server); + let prefix_text = display_parts_to_string( + &self.prefix_display_parts, + module, + language_server, + ); let params_text = self .parameters .iter() .map(|param| { - display_parts_to_string(¶m.display_parts, language_server) + display_parts_to_string(¶m.display_parts, module, language_server) }) .collect::>() .join(", "); - let suffix_text = - display_parts_to_string(&self.suffix_display_parts, language_server); + let suffix_text = display_parts_to_string( + &self.suffix_display_parts, + module, + language_server, + ); let documentation = - display_parts_to_string(&self.documentation, language_server); + display_parts_to_string(&self.documentation, module, language_server); lsp::SignatureInformation { label: format!("{prefix_text}{params_text}{suffix_text}"), documentation: Some(lsp::Documentation::MarkupContent( @@ -4286,7 +4142,9 @@ impl SignatureHelpItem { self .parameters .into_iter() - .map(|param| param.into_parameter_information(language_server)) + .map(|param| { + param.into_parameter_information(module, language_server) + }) .collect(), ), active_parameter: None, @@ -4306,13 +4164,15 @@ pub struct SignatureHelpParameter { impl SignatureHelpParameter { pub fn into_parameter_information( self, + module: &DocumentModule, language_server: &language_server::Inner, ) -> lsp::ParameterInformation { let documentation = - display_parts_to_string(&self.documentation, language_server); + display_parts_to_string(&self.documentation, module, language_server); lsp::ParameterInformation { label: lsp::ParameterLabel::Simple(display_parts_to_string( &self.display_parts, + module, language_server, )), documentation: Some(lsp::Documentation::MarkupContent( @@ -4418,7 +4278,7 @@ struct State { response_tx: Option>>, state_snapshot: Arc, specifier_map: Arc, - last_scope: Option, + last_scope: Option>, token: CancellationToken, pending_requests: Option>, mark: Option, @@ -4455,36 +4315,18 @@ impl State { .load(std::sync::atomic::Ordering::Relaxed) } - fn get_document(&self, specifier: &ModuleSpecifier) -> Option> { - self - .state_snapshot - .documents - .get_or_load(specifier, self.last_scope.as_ref()) - } - - fn get_asset_or_document( + fn get_module( &self, specifier: &ModuleSpecifier, - ) -> Option { - if specifier.scheme() == "asset" { - ASSET_DOCUMENTS.get(specifier).map(AssetOrDocument::Asset) - } else { - let document = self.get_document(specifier); - document.map(AssetOrDocument::Document) - } + ) -> Option> { + self + .state_snapshot + .document_modules + .module_for_specifier(specifier, self.last_scope.as_deref()) } fn script_version(&self, specifier: &ModuleSpecifier) -> Option { - if specifier.scheme() == "asset" { - if ASSET_DOCUMENTS.contains_key(specifier) { - Some("1".to_string()) - } else { - None - } - } else { - let document = self.get_document(specifier); - document.map(|d| d.script_version()) - } + self.get_module(specifier).map(|m| m.script_version.clone()) } } @@ -4534,7 +4376,7 @@ enum LoadError { #[derive(Debug, Serialize)] #[serde(rename_all = "camelCase")] struct LoadResponse { - data: deno_core::FastString, + data: DocumentText, script_kind: i32, version: Option, is_cjs: bool, @@ -4552,23 +4394,17 @@ fn op_load<'s>( .performance .mark_with_args("tsc.op.op_load", specifier); let specifier = state.specifier_map.normalize(specifier)?; - let asset_or_document = if specifier.as_str() == MISSING_DEPENDENCY_SPECIFIER - { + let module = if specifier.as_str() == MISSING_DEPENDENCY_SPECIFIER { None } else { - state.get_asset_or_document(&specifier) + state.get_module(&specifier) }; - let maybe_load_response = - asset_or_document.as_ref().map(|doc| LoadResponse { - data: doc.text_fast_string(), - script_kind: crate::tsc::as_ts_script_kind(doc.media_type()), - version: state.script_version(&specifier), - is_cjs: doc - .document() - .map(|d| d.resolution_mode()) - .unwrap_or(ResolutionMode::Import) - == ResolutionMode::Require, - }); + let maybe_load_response = module.as_ref().map(|m| LoadResponse { + data: m.text.clone(), + script_kind: crate::tsc::as_ts_script_kind(m.media_type), + version: state.script_version(&specifier), + is_cjs: m.resolution_mode == ResolutionMode::Require, + }); let serialized = serde_v8::to_v8(scope, maybe_load_response)?; state.performance.measure(mark); Ok(serialized) @@ -4585,7 +4421,10 @@ fn op_release( .performance .mark_with_args("tsc.op.op_release", specifier); let specifier = state.specifier_map.normalize(specifier)?; - state.state_snapshot.documents.release(&specifier); + state + .state_snapshot + .document_modules + .release(&specifier, state.last_scope.as_deref()); state.performance.measure(mark); Ok(()) } @@ -4604,7 +4443,7 @@ fn op_resolve( struct TscRequestArray { request: TscRequest, - scope: Option, + scope: Option>, id: Smi, change: convert::OptionNull, } @@ -4676,7 +4515,7 @@ async fn op_poll_requests( Some(TscRequestArray { request, - scope: scope.map(|s| s.into()), + scope, id: Smi(id), change: change.into(), }) @@ -4694,8 +4533,8 @@ fn op_resolve_inner( let referrer = state.specifier_map.normalize(&args.base)?; let specifiers = state .state_snapshot - .documents - .resolve(&args.specifiers, &referrer, state.last_scope.as_ref()) + .document_modules + .resolve(&args.specifiers, &referrer, state.last_scope.as_deref()) .into_iter() .map(|o| { o.map(|(s, mt)| { @@ -4806,7 +4645,7 @@ fn op_exit_span(op_state: &mut OpState, span: *const c_void, root: bool) { #[serde(rename_all = "camelCase")] struct ScriptNames { unscoped: IndexSet, - by_scope: BTreeMap>, + by_scope: BTreeMap, IndexSet>, } #[op2] @@ -4821,16 +4660,17 @@ fn op_script_names(state: &mut OpState) -> ScriptNames { by_scope: BTreeMap::from_iter( state .state_snapshot - .config - .tree - .data_by_scope() - .keys() - .map(|s| (s.clone(), IndexSet::new())), + .document_modules + .scopes() + .into_iter() + .filter_map(|s| Some((s?, IndexSet::new()))), ), }; - let scopes_with_node_specifier = - state.state_snapshot.documents.scopes_with_node_specifier(); + let scopes_with_node_specifier = state + .state_snapshot + .document_modules + .scopes_with_node_specifier(); if scopes_with_node_specifier.contains(&None) { result .unscoped @@ -4873,32 +4713,35 @@ fn op_script_names(state: &mut OpState) -> ScriptNames { } // finally include the documents - let docs = state + for (scope, modules) in state .state_snapshot - .documents - .documents(DocumentsFilter::AllDiagnosable); - for doc in &docs { - let specifier = doc.specifier(); - let is_open = doc.is_open(); - if is_open - || (specifier.scheme() == "file" - && !state.state_snapshot.resolver.in_node_modules(specifier)) - { - let script_names = doc - .scope() - .and_then(|s| result.by_scope.get_mut(s)) - .unwrap_or(&mut result.unscoped); + .document_modules + .workspace_file_modules_by_scope() + { + let script_names = scope + .as_deref() + .and_then(|s| result.by_scope.get_mut(s)) + .unwrap_or(&mut result.unscoped); + for module in modules { + let is_open = module.open_data.is_some(); let types_specifier = (|| { - let documents = &state.state_snapshot.documents; - let types = doc.maybe_types_dependency().maybe_specifier()?; - let (types, _) = documents.resolve_dependency( - types, - specifier, - doc.resolution_mode(), - doc.file_referrer(), - )?; - let types_doc = documents.get_or_load(&types, doc.file_referrer())?; - Some(types_doc.specifier().clone()) + let types_specifier = module + .types_dependency + .as_ref()? + .dependency + .maybe_specifier()?; + Some( + state + .state_snapshot + .document_modules + .resolve_dependency( + types_specifier, + &module.specifier, + module.resolution_mode, + module.scope.as_deref(), + )? + .0, + ) })(); // If there is a types dep, use that as the root instead. But if the doc // is open, include both as roots. @@ -4906,7 +4749,7 @@ fn op_script_names(state: &mut OpState) -> ScriptNames { script_names.insert(types_specifier.to_string()); } if types_specifier.is_none() || is_open { - script_names.insert(specifier.to_string()); + script_names.insert(module.specifier.to_string()); } } } @@ -5425,43 +5268,6 @@ pub struct SignatureHelpTriggerReason { pub trigger_character: Option, } -#[derive(Debug, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct GetCompletionDetailsArgs { - pub specifier: ModuleSpecifier, - pub position: u32, - pub name: String, - #[serde(skip_serializing_if = "Option::is_none")] - pub format_code_settings: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub source: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub preferences: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub data: Option, -} - -impl From<&CompletionItemData> for GetCompletionDetailsArgs { - fn from(item_data: &CompletionItemData) -> Self { - Self { - specifier: item_data.specifier.clone(), - position: item_data.position, - name: item_data.name.clone(), - source: item_data.source.clone(), - preferences: None, - format_code_settings: None, - data: item_data.data.clone(), - } - } -} - -#[derive(Debug)] -pub struct GetNavigateToItemsArgs { - pub search: String, - pub max_result_count: Option, - pub file: Option, -} - #[derive(Debug, Serialize, Clone, Copy)] pub struct TscTextRange { pos: u32, @@ -5753,7 +5559,7 @@ mod tests { use crate::lsp::cache::LspCache; use crate::lsp::config::Config; use crate::lsp::config::WorkspaceSettings; - use crate::lsp::documents::Documents; + use crate::lsp::documents::DocumentModules; use crate::lsp::documents::LanguageId; use crate::lsp::resolver::LspResolver; use crate::lsp::text::LineIndex; @@ -5780,15 +5586,25 @@ mod tests { .await; let resolver = Arc::new(LspResolver::from_config(&config, &cache, None).await); - let mut documents = Documents::default(); - documents.update_config(&config, &resolver, &cache, &Default::default()); + let mut document_modules = DocumentModules::default(); + document_modules.update_config( + &config, + &resolver, + &cache, + &Default::default(), + ); for (relative_specifier, source, version, language_id) in sources { let specifier = temp_dir.url().join(relative_specifier).unwrap(); - documents.open(specifier, *version, *language_id, (*source).into(), None); + document_modules.documents.open( + url_to_uri(&specifier).unwrap(), + *version, + *language_id, + (*source).into(), + ); } let snapshot = Arc::new(StateSnapshot { project_version: 0, - documents: Arc::new(documents), + document_modules, config: Arc::new(config), resolver, }); @@ -5857,30 +5673,33 @@ mod tests { .await; let specifier = temp_dir.url().join("a.ts").unwrap(); let (diagnostics, _) = ts_server - .get_diagnostics(snapshot, vec![specifier.clone()], &Default::default()) + .get_diagnostics( + snapshot.clone(), + [&specifier], + snapshot.config.tree.scope_for_specifier(&specifier), + &Default::default(), + ) .await .unwrap(); assert_eq!( json!(diagnostics), - json!({ - specifier.clone(): [ - { - "start": { - "line": 0, - "character": 0, - }, - "end": { - "line": 0, - "character": 7 - }, - "fileName": specifier, - "messageText": "Cannot find name 'console'. Do you need to change your target library? Try changing the \'lib\' compiler option to include 'dom'.", - "sourceLine": "console.log(\"hello deno\");", - "category": 1, - "code": 2584 - } - ] - }) + json!([[ + { + "start": { + "line": 0, + "character": 0, + }, + "end": { + "line": 0, + "character": 7 + }, + "fileName": specifier, + "messageText": "Cannot find name 'console'. Do you need to change your target library? Try changing the \'lib\' compiler option to include 'dom'.", + "sourceLine": "console.log(\"hello deno\");", + "category": 1, + "code": 2584 + } + ]]), ); } @@ -5903,10 +5722,15 @@ mod tests { .await; let specifier = temp_dir.url().join("a.ts").unwrap(); let (diagnostics, _) = ts_server - .get_diagnostics(snapshot, vec![specifier.clone()], &Default::default()) + .get_diagnostics( + snapshot.clone(), + [&specifier], + snapshot.config.tree.scope_for_specifier(&specifier), + &Default::default(), + ) .await .unwrap(); - assert_eq!(json!(diagnostics), json!({ specifier: [] })); + assert_eq!(json!(diagnostics), json!([[]])); } #[tokio::test] @@ -5933,10 +5757,15 @@ mod tests { .await; let specifier = temp_dir.url().join("a.ts").unwrap(); let (diagnostics, _ambient) = ts_server - .get_diagnostics(snapshot, vec![specifier.clone()], &Default::default()) + .get_diagnostics( + snapshot.clone(), + [&specifier], + snapshot.config.tree.scope_for_specifier(&specifier), + &Default::default(), + ) .await .unwrap(); - assert_eq!(json!(diagnostics), json!({ specifier: [] })); + assert_eq!(json!(diagnostics), json!([[]])); } #[tokio::test] @@ -5959,13 +5788,18 @@ mod tests { .await; let specifier = temp_dir.url().join("a.ts").unwrap(); let (diagnostics, _ambient) = ts_server - .get_diagnostics(snapshot, vec![specifier.clone()], &Default::default()) + .get_diagnostics( + snapshot.clone(), + [&specifier], + snapshot.config.tree.scope_for_specifier(&specifier), + &Default::default(), + ) .await .unwrap(); assert_eq!( json!(diagnostics), - json!({ - specifier.clone(): [{ + json!([[ + { "start": { "line": 1, "character": 8 @@ -5980,8 +5814,8 @@ mod tests { "category": 2, "code": 6133, "reportsUnnecessary": true, - }] - }) + } + ]]), ); } @@ -6009,10 +5843,15 @@ mod tests { .await; let specifier = temp_dir.url().join("a.ts").unwrap(); let (diagnostics, _ambient) = ts_server - .get_diagnostics(snapshot, vec![specifier.clone()], &Default::default()) + .get_diagnostics( + snapshot.clone(), + [&specifier], + snapshot.config.tree.scope_for_specifier(&specifier), + &Default::default(), + ) .await .unwrap(); - assert_eq!(json!(diagnostics), json!({ specifier: [] })); + assert_eq!(json!(diagnostics), json!([[]])); } #[tokio::test] @@ -6042,13 +5881,18 @@ mod tests { .await; let specifier = temp_dir.url().join("a.ts").unwrap(); let (diagnostics, _ambient) = ts_server - .get_diagnostics(snapshot, vec![specifier.clone()], &Default::default()) + .get_diagnostics( + snapshot.clone(), + [&specifier], + snapshot.config.tree.scope_for_specifier(&specifier), + &Default::default(), + ) .await .unwrap(); assert_eq!( json!(diagnostics), - json!({ - specifier.clone(): [{ + json!([[ + { "start": { "line": 1, "character": 8 @@ -6063,7 +5907,8 @@ mod tests { "category": 2, "code": 6192, "reportsUnnecessary": true, - }, { + }, + { "start": { "line": 8, "character": 29 @@ -6077,8 +5922,8 @@ mod tests { "sourceLine": " import * as test from", "category": 1, "code": 1109 - }] - }) + } + ]]), ); } @@ -6100,30 +5945,33 @@ mod tests { .await; let specifier = temp_dir.url().join("a.ts").unwrap(); let (diagnostics, _ambient) = ts_server - .get_diagnostics(snapshot, vec![specifier.clone()], &Default::default()) + .get_diagnostics( + snapshot.clone(), + [&specifier], + snapshot.config.tree.scope_for_specifier(&specifier), + &Default::default(), + ) .await .unwrap(); assert_eq!( json!(diagnostics), - json!({ - specifier.clone(): [ - { - "start": { - "line": 0, - "character": 35, - }, - "end": { - "line": 0, - "character": 35 - }, - "fileName": specifier, - "messageText": "Identifier expected.", - "sourceLine": "const url = new URL(\"b.js\", import.", - "category": 1, - "code": 1003, - } - ] - }) + json!([[ + { + "start": { + "line": 0, + "character": 35, + }, + "end": { + "line": 0, + "character": 35 + }, + "fileName": specifier, + "messageText": "Identifier expected.", + "sourceLine": "const url = new URL(\"b.js\", import.", + "category": 1, + "code": 1003, + } + ]]), ); } @@ -6162,32 +6010,31 @@ mod tests { let (diagnostics, _) = ts_server .get_diagnostics( snapshot.clone(), - vec![specifier.clone()], + [&specifier], + snapshot.config.tree.scope_for_specifier(&specifier), &Default::default(), ) .await .unwrap(); assert_eq!( json!(diagnostics), - json!({ - specifier.clone(): [ - { - "start": { - "line": 2, - "character": 16, - }, - "end": { - "line": 2, - "character": 17 - }, - "fileName": specifier, - "messageText": "Property \'a\' does not exist on type \'typeof import(\"https://deno.land/x/example/a\")\'.", - "sourceLine": " if (a.a === \"b\") {", - "code": 2339, - "category": 1, - } - ] - }) + json!([[ + { + "start": { + "line": 2, + "character": 16, + }, + "end": { + "line": 2, + "character": 17 + }, + "fileName": specifier, + "messageText": "Property \'a\' does not exist on type \'typeof import(\"https://deno.land/x/example/a\")\'.", + "sourceLine": " if (a.a === \"b\") {", + "code": 2339, + "category": 1, + } + ]]), ); cache .global() @@ -6197,6 +6044,14 @@ mod tests { b"export const b = \"b\";\n\nexport const a = \"b\";\n", ) .unwrap(); + snapshot.document_modules.release( + &specifier_dep, + snapshot + .config + .tree + .scope_for_specifier(&specifier) + .map(|s| s.as_ref()), + ); let snapshot = { Arc::new(StateSnapshot { project_version: snapshot.project_version + 1, @@ -6212,17 +6067,13 @@ mod tests { let (diagnostics, _) = ts_server .get_diagnostics( snapshot.clone(), - vec![specifier.clone()], + [&specifier], + snapshot.config.tree.scope_for_specifier(&specifier), &Default::default(), ) .await .unwrap(); - assert_eq!( - json!(diagnostics), - json!({ - specifier: [] - }) - ); + assert_eq!(json!(diagnostics), json!([[]]),); } #[test] @@ -6233,7 +6084,7 @@ mod tests { insert_text: Some("['foo']".to_string()), ..Default::default() }; - let actual = fixture.get_filter_text(); + let actual = fixture.get_filter_text(None); assert_eq!(actual, Some(".foo".to_string())); let fixture = CompletionEntry { @@ -6241,7 +6092,7 @@ mod tests { name: "#abc".to_string(), ..Default::default() }; - let actual = fixture.get_filter_text(); + let actual = fixture.get_filter_text(None); assert_eq!(actual, None); let fixture = CompletionEntry { @@ -6250,7 +6101,7 @@ mod tests { insert_text: Some("this.#abc".to_string()), ..Default::default() }; - let actual = fixture.get_filter_text(); + let actual = fixture.get_filter_text(None); assert_eq!(actual, Some("abc".to_string())); } @@ -6283,7 +6134,7 @@ mod tests { let info = ts_server .get_completions( snapshot.clone(), - specifier.clone(), + &specifier, position, GetCompletionsAtPositionOptions { user_preferences: UserPreferences { @@ -6294,7 +6145,7 @@ mod tests { trigger_kind: None, }, Default::default(), - Some(temp_dir.url()), + snapshot.config.tree.scope_for_specifier(&specifier), &Default::default(), ) .await @@ -6304,16 +6155,14 @@ mod tests { let details = ts_server .get_completion_details( snapshot.clone(), - GetCompletionDetailsArgs { - specifier, - position, - name: "log".to_string(), - format_code_settings: None, - source: None, - preferences: None, - data: None, - }, - Some(temp_dir.url()), + &specifier, + position, + "log".to_string(), + None, + None, + None, + None, + snapshot.config.tree.scope_for_specifier(&specifier), &Default::default(), ) .await @@ -6477,7 +6326,7 @@ mod tests { let info = ts_server .get_completions( snapshot.clone(), - specifier.clone(), + &specifier, position, GetCompletionsAtPositionOptions { user_preferences: UserPreferences { @@ -6489,7 +6338,7 @@ mod tests { ..Default::default() }, FormatCodeSettings::from(&fmt_options_config), - Some(temp_dir.url()), + snapshot.config.tree.scope_for_specifier(&specifier), &Default::default(), ) .await @@ -6503,21 +6352,17 @@ mod tests { let details = ts_server .get_completion_details( snapshot.clone(), - GetCompletionDetailsArgs { - specifier, - position, - name: entry.name.clone(), - format_code_settings: Some(FormatCodeSettings::from( - &fmt_options_config, - )), - source: entry.source.clone(), - preferences: Some(UserPreferences { - quote_preference: Some((&fmt_options_config).into()), - ..Default::default() - }), - data: entry.data.clone(), - }, - Some(temp_dir.url()), + &specifier, + position, + entry.name.clone(), + Some(FormatCodeSettings::from(&fmt_options_config)), + entry.source.clone(), + Some(UserPreferences { + quote_preference: Some((&fmt_options_config).into()), + ..Default::default() + }), + entry.data.clone(), + snapshot.config.tree.scope_for_specifier(&specifier), &Default::default(), ) .await @@ -6583,10 +6428,11 @@ mod tests { let changes = ts_server .get_edits_for_file_rename( snapshot, - temp_dir.url().join("b.ts").unwrap(), - temp_dir.url().join("🦕.ts").unwrap(), + &temp_dir.url().join("b.ts").unwrap(), + &temp_dir.url().join("🦕.ts").unwrap(), FormatCodeSettings::default(), UserPreferences::default(), + Some(&Arc::new(temp_dir.url())), &Default::default(), ) .await @@ -6672,7 +6518,7 @@ mod tests { fn change>( project_version: usize, scripts: impl IntoIterator, - new_configs_by_scope: Option>>, + new_configs_by_scope: Option, Arc>>, ) -> PendingChange { PendingChange { project_version, diff --git a/cli/lsp/urls.rs b/cli/lsp/urls.rs index 9c9f50dc34..847eae80f6 100644 --- a/cli/lsp/urls.rs +++ b/cli/lsp/urls.rs @@ -1,36 +1,21 @@ // Copyright 2018-2025 the Deno authors. MIT license. -use std::collections::HashMap; +use std::path::PathBuf; use std::str::FromStr; -use std::sync::Arc; -use deno_ast::MediaType; +use deno_config::UrlToFilePathError; use deno_core::error::AnyError; -use deno_core::parking_lot::Mutex; -use deno_core::url::Position; use deno_core::url::Url; -use deno_core::ModuleSpecifier; +use deno_path_util::url_to_file_path; use lsp_types::Uri; -use once_cell::sync::Lazy; -use super::cache::LspCache; use super::logging::lsp_warn; -/// Used in situations where a default URL needs to be used where otherwise a -/// panic is undesired. -pub static INVALID_SPECIFIER: Lazy = - Lazy::new(|| ModuleSpecifier::parse("deno://invalid").unwrap()); - -/// Used in situations where a default URL needs to be used where otherwise a -/// panic is undesired. -pub static INVALID_URI: Lazy = - Lazy::new(|| Uri::from_str("deno://invalid").unwrap()); - /// Matches the `encodeURIComponent()` encoding from JavaScript, which matches /// the component percent encoding set. /// /// See: -const COMPONENT: &percent_encoding::AsciiSet = &percent_encoding::CONTROLS +pub const COMPONENT: &percent_encoding::AsciiSet = &percent_encoding::CONTROLS .add(b' ') .add(b'"') .add(b'#') @@ -56,10 +41,24 @@ const COMPONENT: &percent_encoding::AsciiSet = &percent_encoding::CONTROLS .add(b'+') .add(b','); -/// Characters that may be left unencoded in a `Url` path but not valid in a -/// `Uri` path. +/// Characters that are left unencoded in a `Url` path but will be encoded in a +/// VSCode URI. const URL_TO_URI_PATH: &percent_encoding::AsciiSet = &percent_encoding::CONTROLS + .add(b' ') + .add(b'!') + .add(b'$') + .add(b'&') + .add(b'\'') + .add(b'(') + .add(b')') + .add(b'*') + .add(b'+') + .add(b',') + .add(b':') + .add(b';') + .add(b'=') + .add(b'@') .add(b'[') .add(b']') .add(b'^') @@ -75,73 +74,6 @@ const URL_TO_URI_QUERY: &percent_encoding::AsciiSet = const URL_TO_URI_FRAGMENT: &percent_encoding::AsciiSet = &URL_TO_URI_PATH.add(b'#').add(b'\\').add(b'{').add(b'}'); -fn hash_data_specifier(specifier: &ModuleSpecifier) -> String { - let mut file_name_str = specifier.path().to_string(); - if let Some(query) = specifier.query() { - file_name_str.push('?'); - file_name_str.push_str(query); - } - deno_lib::util::checksum::gen(&[file_name_str.as_bytes()]) -} - -fn to_deno_uri(specifier: &Url) -> String { - let mut string = String::with_capacity(specifier.as_str().len() + 6); - string.push_str("deno:/"); - string.push_str(specifier.scheme()); - for p in specifier[Position::BeforeHost..].split('/') { - string.push('/'); - string.push_str( - &percent_encoding::utf8_percent_encode(p, COMPONENT).to_string(), - ); - } - string -} - -fn from_deno_url(url: &Url) -> Option { - if url.scheme() != "deno" { - return None; - } - let mut segments = url.path_segments()?; - let mut string = String::with_capacity(url.as_str().len()); - string.push_str(segments.next()?); - string.push_str("://"); - string.push_str( - &percent_encoding::percent_decode(segments.next()?.as_bytes()) - .decode_utf8() - .ok()?, - ); - for segment in segments { - string.push('/'); - string.push_str( - &percent_encoding::percent_decode(segment.as_bytes()) - .decode_utf8() - .ok()?, - ); - } - Url::parse(&string).ok() -} - -#[derive(Debug, Default)] -struct LspUrlMapInner { - specifier_to_uri: HashMap, - uri_to_specifier: HashMap, -} - -impl LspUrlMapInner { - fn put(&mut self, specifier: ModuleSpecifier, uri: Uri) { - self.uri_to_specifier.insert(uri.clone(), specifier.clone()); - self.specifier_to_uri.insert(specifier, uri); - } - - fn get_uri(&self, specifier: &ModuleSpecifier) -> Option<&Uri> { - self.specifier_to_uri.get(specifier) - } - - fn get_specifier(&self, uri: &Uri) -> Option<&ModuleSpecifier> { - self.uri_to_specifier.get(uri) - } -} - pub fn uri_parse_unencoded(s: &str) -> Result { url_to_uri(&Url::parse(s)?) } @@ -150,10 +82,31 @@ pub fn url_to_uri(url: &Url) -> Result { let components = deno_core::url::quirks::internal_components(url); let mut input = String::with_capacity(url.as_str().len()); input.push_str(&url.as_str()[..components.path_start as usize]); - input.push_str( - &percent_encoding::utf8_percent_encode(url.path(), URL_TO_URI_PATH) - .to_string(), - ); + if cfg!(windows) && url.scheme() == "file" { + let path = url.path(); + let mut chars = path.chars(); + let has_drive_letter = chars.next().is_some_and(|c| c == '/') + && chars.next().is_some_and(|c| c.is_ascii_alphabetic()) + && chars.next().is_some_and(|c| c == ':') + && chars.next().is_none_or(|c| c == '/'); + if has_drive_letter { + input.push_str(&path[..3]); + input.push_str( + &percent_encoding::utf8_percent_encode(&path[3..], URL_TO_URI_PATH) + .to_string(), + ); + } else { + input.push_str( + &percent_encoding::utf8_percent_encode(path, URL_TO_URI_PATH) + .to_string(), + ); + } + } else { + input.push_str( + &percent_encoding::utf8_percent_encode(url.path(), URL_TO_URI_PATH) + .to_string(), + ); + } if let Some(query) = url.query() { input.push('?'); input.push_str( @@ -174,283 +127,34 @@ pub fn url_to_uri(url: &Url) -> Result { } pub fn uri_to_url(uri: &Uri) -> Url { - Url::parse(uri.as_str()).unwrap() -} - -#[derive(Debug, Clone, Copy)] -pub enum LspUrlKind { - File, - Folder, -} - -/// A bi-directional map of URLs sent to the LSP client and internal module -/// specifiers. We need to map internal specifiers into `deno:` schema URLs -/// to allow the Deno language server to manage these as virtual documents. -#[derive(Debug, Default, Clone)] -pub struct LspUrlMap { - cache: LspCache, - inner: Arc>, -} - -impl LspUrlMap { - pub fn set_cache(&mut self, cache: &LspCache) { - self.cache = cache.clone(); - } - - /// Normalize a specifier that is used internally within Deno (or tsc) to a - /// URL that can be handled as a "virtual" document by an LSP client. - pub fn specifier_to_uri( - &self, - specifier: &ModuleSpecifier, - file_referrer: Option<&ModuleSpecifier>, - ) -> Result { - if let Some(file_url) = - self.cache.vendored_specifier(specifier, file_referrer) + (|| { + let scheme = uri.scheme()?; + if !scheme.eq_lowercase("untitled") + && !scheme.eq_lowercase("vscode-notebook-cell") + && !scheme.eq_lowercase("deno-notebook-cell") { - return url_to_uri(&file_url); + return None; } - let mut inner = self.inner.lock(); - if let Some(uri) = inner.get_uri(specifier).cloned() { - Ok(uri) - } else { - let uri = if specifier.scheme() == "file" { - url_to_uri(specifier)? - } else { - let uri_str = if specifier.scheme() == "asset" { - format!("deno:/asset{}", specifier.path()) - } else if specifier.scheme() == "data" { - let data_url = - deno_media_type::data_url::RawDataUrl::parse(specifier)?; - let media_type = data_url.media_type(); - let extension = if media_type == MediaType::Unknown { - "" - } else { - media_type.as_ts_extension() - }; - format!( - "deno:/{}/data_url{}", - hash_data_specifier(specifier), - extension - ) - } else { - to_deno_uri(specifier) - }; - let uri = uri_parse_unencoded(&uri_str)?; - inner.put(specifier.clone(), uri.clone()); - uri - }; - Ok(uri) - } - } - - /// Normalize URLs from the client, where "virtual" `deno:///` URLs are - /// converted into proper module specifiers, as well as handle situations - /// where the client encodes a file URL differently than Rust does by default - /// causing issues with string matching of URLs. - /// - /// Note: Sometimes the url provided by the client may not have a trailing slash, - /// so we need to force it to in the mapping and nee to explicitly state whether - /// this is a file or directory url. - pub fn uri_to_specifier( - &self, - uri: &Uri, - kind: LspUrlKind, - ) -> ModuleSpecifier { - let url = uri_to_url(uri); - if let Some(remote_url) = self.cache.unvendored_specifier(&url) { - return remote_url; - } - let mut inner = self.inner.lock(); - if let Some(specifier) = inner.get_specifier(uri).cloned() { - return specifier; - } - let mut specifier = None; - if url.scheme() == "file" { - if let Ok(path) = url.to_file_path() { - specifier = Some(match kind { - LspUrlKind::Folder => Url::from_directory_path(path).unwrap(), - LspUrlKind::File => Url::from_file_path(path).unwrap(), - }); - } - } else if let Some(s) = file_like_to_file_specifier(&url) { - specifier = Some(s); - } else if let Some(s) = from_deno_url(&url) { - specifier = Some(s); - } - let specifier = specifier.unwrap_or_else(|| url.clone()); - inner.put(specifier.clone(), uri.clone()); - specifier - } -} - -/// Convert a e.g. `vscode-notebook-cell:` specifier to a `file:` specifier. -/// ```rust -/// assert_eq!( -/// file_like_to_file_specifier( -/// &Url::parse("vscode-notebook-cell:/path/to/file.ipynb#abc").unwrap(), -/// ), -/// Some(Url::parse("file:///path/to/file.ipynb?scheme=untitled#abc").unwrap()), -/// ); -fn file_like_to_file_specifier(specifier: &Url) -> Option { - if matches!( - specifier.scheme(), - "untitled" | "vscode-notebook-cell" | "deno-notebook-cell" - ) { - if let Ok(mut s) = ModuleSpecifier::parse(&format!( + Url::parse(&format!( "file:///{}", - &specifier.as_str()[deno_core::url::quirks::internal_components(specifier) - .host_end as usize..].trim_start_matches('/'), - )) { - s.query_pairs_mut() - .append_pair("scheme", specifier.scheme()); - return Some(s); - } - } - None + &uri.as_str()[uri.path_bounds.0 as usize..uri.path_bounds.1 as usize] + .trim_start_matches('/'), + )) + .ok() + })() + .unwrap_or_else(|| Url::parse(uri.as_str()).unwrap()) } -#[cfg(test)] -mod tests { - use deno_core::resolve_url; - - use super::*; - - #[test] - fn test_hash_data_specifier() { - let fixture = resolve_url("data:application/typescript;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=").unwrap(); - let actual = hash_data_specifier(&fixture); - assert_eq!( - actual, - "c21c7fc382b2b0553dc0864aa81a3acacfb7b3d1285ab5ae76da6abec213fb37" - ); - } - - #[test] - fn test_lsp_url_map() { - let map = LspUrlMap::default(); - let fixture = resolve_url("https://deno.land/x/pkg@1.0.0/mod.ts").unwrap(); - let actual_uri = map - .specifier_to_uri(&fixture, None) - .expect("could not handle specifier"); - assert_eq!( - actual_uri.as_str(), - "deno:/https/deno.land/x/pkg%401.0.0/mod.ts" - ); - let actual_specifier = map.uri_to_specifier(&actual_uri, LspUrlKind::File); - assert_eq!(actual_specifier, fixture); - } - - #[test] - fn test_lsp_url_reverse() { - let map = LspUrlMap::default(); - let fixture = - Uri::from_str("deno:/https/deno.land/x/pkg%401.0.0/mod.ts").unwrap(); - let actual_specifier = map.uri_to_specifier(&fixture, LspUrlKind::File); - let expected_specifier = - Url::parse("https://deno.land/x/pkg@1.0.0/mod.ts").unwrap(); - assert_eq!(&actual_specifier, &expected_specifier); - - let actual_uri = map.specifier_to_uri(&actual_specifier, None).unwrap(); - assert_eq!(actual_uri, fixture); - } - - #[test] - fn test_lsp_url_map_complex_encoding() { - // Test fix for #9741 - not properly encoding certain URLs - let map = LspUrlMap::default(); - let fixture = resolve_url("https://cdn.skypack.dev/-/postcss@v8.2.9-E4SktPp9c0AtxrJHp8iV/dist=es2020,mode=types/lib/postcss.d.ts").unwrap(); - let actual_uri = map - .specifier_to_uri(&fixture, None) - .expect("could not handle specifier"); - assert_eq!(actual_uri.as_str(), "deno:/https/cdn.skypack.dev/-/postcss%40v8.2.9-E4SktPp9c0AtxrJHp8iV/dist%3Des2020%2Cmode%3Dtypes/lib/postcss.d.ts"); - let actual_specifier = map.uri_to_specifier(&actual_uri, LspUrlKind::File); - assert_eq!(actual_specifier, fixture); - } - - #[test] - fn test_lsp_url_map_data() { - let map = LspUrlMap::default(); - let fixture = resolve_url("data:application/typescript;base64,ZXhwb3J0IGNvbnN0IGEgPSAiYSI7CgpleHBvcnQgZW51bSBBIHsKICBBLAogIEIsCiAgQywKfQo=").unwrap(); - let actual_uri = map - .specifier_to_uri(&fixture, None) - .expect("could not handle specifier"); - let expected_url = Url::parse("deno:/c21c7fc382b2b0553dc0864aa81a3acacfb7b3d1285ab5ae76da6abec213fb37/data_url.ts").unwrap(); - assert_eq!(&uri_to_url(&actual_uri), &expected_url); - - let actual_specifier = map.uri_to_specifier(&actual_uri, LspUrlKind::File); - assert_eq!(actual_specifier, fixture); - } - - #[test] - fn test_lsp_url_map_host_with_port() { - let map = LspUrlMap::default(); - let fixture = resolve_url("http://localhost:8000/mod.ts").unwrap(); - let actual_uri = map - .specifier_to_uri(&fixture, None) - .expect("could not handle specifier"); - assert_eq!(actual_uri.as_str(), "deno:/http/localhost%3A8000/mod.ts"); - let actual_specifier = map.uri_to_specifier(&actual_uri, LspUrlKind::File); - assert_eq!(actual_specifier, fixture); - } - - #[cfg(windows)] - #[test] - fn test_normalize_windows_path() { - let map = LspUrlMap::default(); - let fixture = Uri::from_str( - "file:///c%3A/Users/deno/Desktop/file%20with%20spaces%20in%20name.txt", - ) - .unwrap(); - let actual = map.uri_to_specifier(&fixture, LspUrlKind::File); - let expected = - Url::parse("file:///C:/Users/deno/Desktop/file with spaces in name.txt") - .unwrap(); - assert_eq!(actual, expected); - } - - #[cfg(not(windows))] - #[test] - fn test_normalize_percent_encoded_path() { - let map = LspUrlMap::default(); - let fixture = Uri::from_str( - "file:///Users/deno/Desktop/file%20with%20spaces%20in%20name.txt", - ) - .unwrap(); - let actual = map.uri_to_specifier(&fixture, LspUrlKind::File); - let expected = - Url::parse("file:///Users/deno/Desktop/file with spaces in name.txt") - .unwrap(); - assert_eq!(actual, expected); - } - - #[test] - fn test_normalize_deno_status() { - let map = LspUrlMap::default(); - let fixture = Uri::from_str("deno:/status.md").unwrap(); - let actual = map.uri_to_specifier(&fixture, LspUrlKind::File); - assert_eq!(actual.as_str(), fixture.as_str()); - } - - #[test] - fn test_file_like_to_file_specifier() { - assert_eq!( - file_like_to_file_specifier( - &Url::parse("vscode-notebook-cell:/path/to/file.ipynb#abc").unwrap(), - ), - Some( - Url::parse( - "file:///path/to/file.ipynb?scheme=vscode-notebook-cell#abc" - ) - .unwrap() - ), - ); - assert_eq!( - file_like_to_file_specifier( - &Url::parse("untitled:/path/to/file.ipynb#123").unwrap(), - ), - Some( - Url::parse("file:///path/to/file.ipynb?scheme=untitled#123").unwrap() - ), - ); - } +pub fn uri_to_file_path(uri: &Uri) -> Result { + url_to_file_path(&uri_to_url(uri)) +} + +pub fn uri_is_file_like(uri: &Uri) -> bool { + let Some(scheme) = uri.scheme() else { + return false; + }; + scheme.eq_lowercase("file") + || scheme.eq_lowercase("untitled") + || scheme.eq_lowercase("vscode-notebook-cell") + || scheme.eq_lowercase("deno-notebook-cell") } diff --git a/cli/tsc/98_lsp.js b/cli/tsc/98_lsp.js index 6ca8dfce52..f6ef817173 100644 --- a/cli/tsc/98_lsp.js +++ b/cli/tsc/98_lsp.js @@ -478,17 +478,17 @@ function serverRequestInner(id, method, args, scope, maybeChange) { // (it's about to be invalidated anyway). const cachedProjectVersion = PROJECT_VERSION_CACHE.get(); if (cachedProjectVersion && projectVersion !== cachedProjectVersion) { - return respond(id, [{}, null]); + return respond(id, [[], null]); } try { - /** @type {Record} */ - const diagnosticMap = {}; + /** @type {any[][]} */ + const diagnosticsList = []; for (const specifier of args[0]) { - diagnosticMap[specifier] = fromTypeScriptDiagnostics([ + diagnosticsList.push(fromTypeScriptDiagnostics([ ...ls.getSemanticDiagnostics(specifier), ...ls.getSuggestionDiagnostics(specifier), ...ls.getSyntacticDiagnostics(specifier), - ].filter(filterMapDiagnostic)); + ].filter(filterMapDiagnostic))); } let ambient = ls.getProgram()?.getTypeChecker().getAmbientModules().map((symbol) => @@ -502,18 +502,18 @@ function serverRequestInner(id, method, args, scope, maybeChange) { } else { ambientModulesCacheByScope.set(scope, ambient); } - return respond(id, [diagnosticMap, ambient]); + return respond(id, [diagnosticsList, ambient]); } catch (e) { if ( !isCancellationError(e) ) { return respond( id, - [{}, null], + [[], null], formatErrorWithArgs(e, [id, method, args, scope, maybeChange]), ); } - return respond(id, [{}, null]); + return respond(id, [[], null]); } } default: diff --git a/tests/integration/lsp_tests.rs b/tests/integration/lsp_tests.rs index 6a0a599efd..b52e9db4a1 100644 --- a/tests/integration/lsp_tests.rs +++ b/tests/integration/lsp_tests.rs @@ -895,96 +895,6 @@ fn lsp_import_map_node_specifiers() { client.shutdown(); } -#[test] -#[timeout(300_000)] -fn lsp_format_vendor_path() { - let context = TestContextBuilder::new() - .use_http_server() - .use_temp_cwd() - .build(); - - // put this dependency in the global cache - context - .new_command() - .args("cache --allow-import http://localhost:4545/run/002_hello.ts") - .run() - .skip_output_check(); - - let temp_dir = context.temp_dir(); - temp_dir.write("deno.json", json!({ "vendor": true }).to_string()); - let mut client = context.new_lsp_command().build(); - client.initialize_default(); - let diagnostics = client.did_open(json!({ - "textDocument": { - "uri": temp_dir.url().join("file.ts").unwrap(), - "languageId": "typescript", - "version": 1, - "text": r#"import "http://localhost:4545/run/002_hello.ts";"#, - }, - })); - // copying from the global cache to the local cache requires explicitly - // running the cache command so that the checksums can be verified - assert_eq!( - diagnostics - .all() - .iter() - .map(|d| d.message.as_str()) - .collect::>(), - vec![ - "Uncached or missing remote URL: http://localhost:4545/run/002_hello.ts" - ] - ); - client.write_request( - "workspace/executeCommand", - json!({ - "command": "deno.cache", - "arguments": [[], temp_dir.url().join("file.ts").unwrap()], - }), - ); - client.read_diagnostics(); - assert!(temp_dir - .path() - .join("vendor/http_localhost_4545/run/002_hello.ts") - .exists()); - client.did_open(json!({ - "textDocument": { - "uri": temp_dir.url().join("vendor/http_localhost_4545/run/002_hello.ts").unwrap(), - "languageId": "typescript", - "version": 1, - "text": r#"console.log("Hello World");"#, - }, - })); - let res = client.write_request( - "textDocument/formatting", - json!({ - "textDocument": { - "uri": temp_dir.url().join("vendor/http_localhost_4545/run/002_hello.ts").unwrap(), - }, - "options": { - "tabSize": 2, - "insertSpaces": true, - } - }), - ); - assert_eq!( - res, - json!([{ - "range": { - "start": { - "line": 0, - "character": 27, - }, - "end": { - "line": 0, - "character": 27, - }, - }, - "newText": "\n", - }]), - ); - client.shutdown(); -} - // Regression test for https://github.com/denoland/deno/issues/19802. // Disable the `workspace/configuration` capability. Ensure the LSP falls back // to using `enablePaths` from the `InitializationOptions`. @@ -1088,11 +998,12 @@ fn lsp_did_refresh_deno_configuration_tree_notification() { temp_dir.write("non_workspace1/deno.json", json!({}).to_string()); let mut client = context.new_lsp_command().build(); client.initialize_default(); - let res = client + let mut res = client .read_notification_with_method::( "deno/didRefreshDenoConfigurationTree", ) .unwrap(); + res.as_object_mut().unwrap().remove("denoDirNpmFolderUri"); assert_eq!( res, json!({ @@ -1142,11 +1053,12 @@ fn lsp_did_refresh_deno_configuration_tree_notification() { }], })); client.read_diagnostics(); - let res = client + let mut res = client .read_notification_with_method::( "deno/didRefreshDenoConfigurationTree", ) .unwrap(); + res.as_object_mut().unwrap().remove("denoDirNpmFolderUri"); assert_eq!( res, json!({ @@ -1201,11 +1113,12 @@ fn lsp_did_refresh_deno_configuration_tree_notification() { "disablePaths": ["non_workspace1"], }, })); - let res = client + let mut res = client .read_notification_with_method::( "deno/didRefreshDenoConfigurationTree", ) .unwrap(); + res.as_object_mut().unwrap().remove("denoDirNpmFolderUri"); assert_eq!( res, json!({ @@ -4138,7 +4051,7 @@ fn lsp_code_lens_references() { "end": { "line": 0, "character": 7 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }, { @@ -4147,7 +4060,7 @@ fn lsp_code_lens_references() { "end": { "line": 1, "character": 3 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }, { @@ -4156,7 +4069,7 @@ fn lsp_code_lens_references() { "end": { "line": 3, "character": 3 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }, { @@ -4165,7 +4078,7 @@ fn lsp_code_lens_references() { "end": { "line": 7, "character": 3 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }]) @@ -4178,7 +4091,7 @@ fn lsp_code_lens_references() { "end": { "line": 0, "character": 7 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }), @@ -4217,7 +4130,7 @@ fn lsp_code_lens_references() { "end": { "line": 14, "character": 7 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }), @@ -4245,7 +4158,7 @@ fn lsp_code_lens_references() { "end": { "line": 15, "character": 7 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }), @@ -4325,7 +4238,7 @@ fn lsp_code_lens_implementations() { "end": { "line": 0, "character": 11 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "implementations" } }, { @@ -4334,7 +4247,7 @@ fn lsp_code_lens_implementations() { "end": { "line": 0, "character": 11 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }, { @@ -4343,7 +4256,7 @@ fn lsp_code_lens_implementations() { "end": { "line": 1, "character": 3 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }, { @@ -4352,7 +4265,7 @@ fn lsp_code_lens_implementations() { "end": { "line": 4, "character": 7 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }, { @@ -4361,7 +4274,7 @@ fn lsp_code_lens_implementations() { "end": { "line": 5, "character": 3 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }, { @@ -4370,7 +4283,7 @@ fn lsp_code_lens_implementations() { "end": { "line": 10, "character": 11 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "implementations" } }, { @@ -4379,7 +4292,7 @@ fn lsp_code_lens_implementations() { "end": { "line": 10, "character": 11 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }, { @@ -4388,7 +4301,7 @@ fn lsp_code_lens_implementations() { "end": { "line": 11, "character": 3 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }]) @@ -4401,7 +4314,7 @@ fn lsp_code_lens_implementations() { "end": { "line": 0, "character": 11 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "implementations" } }), @@ -4438,7 +4351,7 @@ fn lsp_code_lens_implementations() { "end": { "line": 10, "character": 11 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "implementations" } }), @@ -4816,7 +4729,7 @@ fn lsp_code_lens_non_doc_nav_tree() { "end": { "line": 416, "character": 19 } }, "data": { - "specifier": "asset:///lib.deno.shared_globals.d.ts", + "uri": "deno:/asset/lib.deno.shared_globals.d.ts", "source": "references" } }), @@ -4865,7 +4778,7 @@ fn lsp_nav_tree_updates() { "end": { "line": 0, "character": 11 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "implementations" } }, { @@ -4874,7 +4787,7 @@ fn lsp_nav_tree_updates() { "end": { "line": 0, "character": 11 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }, { @@ -4883,7 +4796,7 @@ fn lsp_nav_tree_updates() { "end": { "line": 1, "character": 3 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }, { @@ -4892,7 +4805,7 @@ fn lsp_nav_tree_updates() { "end": { "line": 4, "character": 7 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }, { @@ -4901,7 +4814,7 @@ fn lsp_nav_tree_updates() { "end": { "line": 5, "character": 3 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }, { @@ -4910,7 +4823,7 @@ fn lsp_nav_tree_updates() { "end": { "line": 10, "character": 11 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "implementations" } }, { @@ -4919,7 +4832,7 @@ fn lsp_nav_tree_updates() { "end": { "line": 10, "character": 11 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }, { @@ -4928,7 +4841,7 @@ fn lsp_nav_tree_updates() { "end": { "line": 11, "character": 3 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }]) @@ -4967,7 +4880,7 @@ fn lsp_nav_tree_updates() { "end": { "line": 0, "character": 11 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "implementations" } }, { @@ -4976,7 +4889,7 @@ fn lsp_nav_tree_updates() { "end": { "line": 0, "character": 11 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }, { @@ -4985,7 +4898,7 @@ fn lsp_nav_tree_updates() { "end": { "line": 1, "character": 3 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }, { @@ -4994,7 +4907,7 @@ fn lsp_nav_tree_updates() { "end": { "line": 4, "character": 7 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }, { @@ -5003,7 +4916,7 @@ fn lsp_nav_tree_updates() { "end": { "line": 5, "character": 3 } }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "source": "references" } }]) @@ -5338,7 +5251,7 @@ fn lsp_code_actions() { "relatedInformation": [] }], "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "fixId": "fixAwaitInSyncFunction" } }]) @@ -5361,7 +5274,7 @@ fn lsp_code_actions() { "relatedInformation": [] }], "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "fixId": "fixAwaitInSyncFunction" } }), @@ -5424,7 +5337,7 @@ fn lsp_code_actions() { }] }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "fixId": "fixAwaitInSyncFunction" } }) @@ -6254,7 +6167,7 @@ fn lsp_jsr_code_action_move_to_new_file() { }, "isPreferred": false, "data": { - "specifier": file.url(), + "uri": file.url(), "range": { "start": { "line": 2, "character": 19 }, "end": { "line": 2, "character": 28 }, @@ -6594,7 +6507,7 @@ fn lsp_asset_document_dom_code_action() { let res = client.write_request( "textDocument/codeAction", json!({ - "textDocument": { "uri": "asset:///lib.dom.d.ts" }, + "textDocument": { "uri": "deno:/asset/lib.dom.d.ts" }, "range": { "start": { "line": 0, "character": 0 }, "end": { "line": 0, "character": 0 }, @@ -6816,7 +6729,7 @@ export class DuckConfig { "message": "Cannot find name 'DuckConfigOptions'." }], "data": { - "specifier": "file:///a/file00.ts", + "uri": "file:///a/file00.ts", "fixId": "fixMissingImport" } }, { @@ -6874,7 +6787,7 @@ export class DuckConfig { "message": "Cannot find name 'DuckConfig'." }], "data": { - "specifier": "file:///a/file00.ts", + "uri": "file:///a/file00.ts", "fixId": "fixMissingImport" } }), @@ -6919,7 +6832,7 @@ export class DuckConfig { }] }, "data": { - "specifier": "file:///a/file00.ts", + "uri": "file:///a/file00.ts", "fixId": "fixMissingImport" } }) @@ -7150,7 +7063,7 @@ fn lsp_code_actions_refactor() { "kind": "refactor.move.newFile", "isPreferred": false, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "range": { "start": { "line": 0, "character": 0 }, "end": { "line": 1, "character": 0 } @@ -7163,7 +7076,7 @@ fn lsp_code_actions_refactor() { "kind": "refactor.extract.function", "isPreferred": false, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "range": { "start": { "line": 0, "character": 0 }, "end": { "line": 1, "character": 0 } @@ -7176,7 +7089,7 @@ fn lsp_code_actions_refactor() { "kind": "refactor.extract.constant", "isPreferred": false, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "range": { "start": { "line": 0, "character": 0 }, "end": { "line": 1, "character": 0 } @@ -7192,7 +7105,7 @@ fn lsp_code_actions_refactor() { "reason": "This file already has a default export" }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "range": { "start": { "line": 0, "character": 0 }, "end": { "line": 1, "character": 0 } @@ -7208,7 +7121,7 @@ fn lsp_code_actions_refactor() { "reason": "This file already has a default export" }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "range": { "start": { "line": 0, "character": 0 }, "end": { "line": 1, "character": 0 } @@ -7224,7 +7137,7 @@ fn lsp_code_actions_refactor() { "reason": "Selection is not an import declaration." }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "range": { "start": { "line": 0, "character": 0 }, "end": { "line": 1, "character": 0 } @@ -7240,7 +7153,7 @@ fn lsp_code_actions_refactor() { "reason": "Selection is not an import declaration." }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "range": { "start": { "line": 0, "character": 0 }, "end": { "line": 1, "character": 0 } @@ -7256,7 +7169,7 @@ fn lsp_code_actions_refactor() { "reason": "Selection is not an import declaration." }, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "range": { "start": { "line": 0, "character": 0 }, "end": { "line": 1, "character": 0 } @@ -7273,7 +7186,7 @@ fn lsp_code_actions_refactor() { "kind": "refactor.extract.interface", "isPreferred": true, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "range": { "start": { "line": 0, "character": 7 }, "end": { "line": 0, "character": 33 } @@ -7311,7 +7224,7 @@ fn lsp_code_actions_refactor() { }, "isPreferred": true, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "range": { "start": { "line": 0, "character": 7 }, "end": { "line": 0, "character": 33 } @@ -7448,7 +7361,7 @@ fn lsp_code_actions_imports_respects_fmt_config() { "message": "Cannot find name 'DuckConfigOptions'." }], "data": { - "specifier": temp_dir.url().join("file00.ts").unwrap(), + "uri": temp_dir.url().join("file00.ts").unwrap(), "fixId": "fixMissingImport" } }), @@ -7484,7 +7397,7 @@ fn lsp_code_actions_imports_respects_fmt_config() { }] }, "data": { - "specifier": temp_dir.url().join("file00.ts").unwrap(), + "uri": temp_dir.url().join("file00.ts").unwrap(), "fixId": "fixMissingImport" } }) @@ -7679,7 +7592,7 @@ fn lsp_code_actions_refactor_no_disabled_support() { "kind": "refactor.move.newFile", "isPreferred": false, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "range": { "start": { "line": 0, "character": 0 }, "end": { "line": 14, "character": 0 } @@ -7692,7 +7605,7 @@ fn lsp_code_actions_refactor_no_disabled_support() { "kind": "refactor.extract.function", "isPreferred": false, "data": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "range": { "start": { "line": 0, "character": 0 }, "end": { "line": 14, "character": 0 } @@ -7863,7 +7776,7 @@ fn lsp_completions() { "insertTextFormat": 1, "data": { "tsc": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "position": 5, "name": "build", "useCodeSnippet": false @@ -7952,7 +7865,7 @@ fn lsp_completions_optional() { "commitCharacters": [".", ",", ";", "("], "data": { "tsc": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "position": 79, "name": "b", "useCodeSnippet": false @@ -7972,7 +7885,7 @@ fn lsp_completions_optional() { "insertText": "b", "data": { "tsc": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "position": 79, "name": "b", "useCodeSnippet": false @@ -8813,7 +8726,7 @@ fn lsp_infer_return_type() { "kind": "refactor.rewrite.function.returnType", "isPreferred": false, "data": { - "specifier": file.url(), + "uri": file.url(), "range": { "start": { "line": 1, "character": 15 }, "end": { "line": 1, "character": 18 }, @@ -8833,7 +8746,7 @@ fn lsp_infer_return_type() { "kind": "refactor.rewrite.function.returnType", "isPreferred": false, "data": { - "specifier": file.url(), + "uri": file.url(), "range": { "start": { "line": 1, "character": 15 }, "end": { "line": 1, "character": 18 }, @@ -9617,7 +9530,7 @@ fn lsp_completions_snippet() { ], "data": { "tsc": { - "specifier": "file:///a/a.tsx", + "uri": "file:///a/a.tsx", "position": 87, "name": "type", "useCodeSnippet": false @@ -9645,7 +9558,7 @@ fn lsp_completions_snippet() { ], "data": { "tsc": { - "specifier": "file:///a/a.tsx", + "uri": "file:///a/a.tsx", "position": 87, "name": "type", "useCodeSnippet": false @@ -9716,7 +9629,7 @@ fn lsp_completions_no_snippet() { ], "data": { "tsc": { - "specifier": "file:///a/a.tsx", + "uri": "file:///a/a.tsx", "position": 87, "name": "type", "useCodeSnippet": false @@ -9819,7 +9732,7 @@ fn lsp_completions_npm() { "insertTextFormat": 1, "data": { "tsc": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "position": 69, "name": "MyClass", "useCodeSnippet": false @@ -9836,7 +9749,7 @@ fn lsp_completions_npm() { "insertTextFormat": 1, "data": { "tsc": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "position": 69, "name": "MyClass", "useCodeSnippet": false @@ -12731,7 +12644,7 @@ fn lsp_completions_complete_function_calls() { "insertTextFormat": 1, "data": { "tsc": { - "specifier": "file:///a/file.ts", + "uri": "file:///a/file.ts", "position": 3, "name": "map", "useCodeSnippet": true @@ -12757,6 +12670,107 @@ fn lsp_completions_complete_function_calls() { client.shutdown(); } +// Regression test for https://github.com/denoland/vscode_deno/issues/1276. +#[test] +#[timeout(300_000)] +fn lsp_completions_private_class_fields() { + let context = TestContextBuilder::new() + .use_http_server() + .use_temp_cwd() + .build(); + let mut client = context.new_lsp_command().build(); + client.initialize_default(); + client.did_open(json!({ + "textDocument": { + "uri": "file:///a/file.ts", + "languageId": "typescript", + "version": 1, + "text": r#" + export class SomeClass { + #prop = 1; + foo() { + pro; + #pro; + } + } + "#, + }, + })); + + let list = client.get_completion_list( + "file:///a/file.ts", + (4, 15), + json!({ + "triggerKind": 2, + "triggerCharacter": ".", + }), + ); + assert!(!list.is_incomplete); + let item = list.items.iter().find(|i| i.label == "#prop").unwrap(); + assert_eq!( + json!(item), + json!({ + "label": "#prop", + "kind": 5, + "sortText": "14", + "filterText": "prop", + "insertText": "this.#prop", + "commitCharacters": [ + ".", + ",", + ";", + "(" + ], + "data": { + "tsc": { + "uri": "file:///a/file.ts", + "position": 88, + "name": "#prop", + "source": "ThisProperty/", + "useCodeSnippet": false, + }, + }, + }), + ); + + let list = client.get_completion_list( + "file:///a/file.ts", + (5, 16), + json!({ + "triggerKind": 2, + "triggerCharacter": ".", + }), + ); + assert!(!list.is_incomplete); + let item = list.items.iter().find(|i| i.label == "#prop").unwrap(); + assert_eq!( + json!(item), + json!({ + "label": "#prop", + "kind": 5, + "sortText": "14", + "filterText": "this.#prop", + "insertText": "this.#prop", + "commitCharacters": [ + ".", + ",", + ";", + "(" + ], + "data": { + "tsc": { + "uri": "file:///a/file.ts", + "position": 106, + "name": "#prop", + "source": "ThisProperty/", + "useCodeSnippet": false, + }, + }, + }), + ); + client.shutdown(); +} + #[test] #[timeout(300_000)] fn lsp_workspace_symbol() { @@ -14014,7 +14028,7 @@ fn lsp_closed_file_find_references_low_document_pre_load() { ); // won't have results because the document won't be pre-loaded - assert_eq!(res, json!([])); + assert_eq!(res, json!(null)); client.shutdown(); } @@ -14069,7 +14083,7 @@ fn lsp_closed_file_find_references_excluded_path() { ); // won't have results because the documents won't be pre-loaded - assert_eq!(res, json!([])); + assert_eq!(res, json!(null)); client.shutdown(); } @@ -14128,7 +14142,7 @@ fn lsp_data_urls_with_jsx_compiler_option() { "end": { "line": 1, "character": 1 } } }, { - "uri": "deno:/ed0224c51f7e2a845dfc0941ed6959675e5e3e3d2a39b127f0ff569c1ffda8d8/data_url.ts", + "uri": "deno:/data_url/ed0224c51f7e2a845dfc0941ed6959675e5e3e3d2a39b127f0ff569c1ffda8d8.ts", "range": { "start": { "line": 0, "character": 7 }, "end": {"line": 0, "character": 14 }, @@ -14881,7 +14895,7 @@ fn lsp_deno_json_scopes_node_modules_dir() { assert_eq!( res, json!([{ - "targetUri": canon_temp_dir.join("project1/node_modules/.deno/@denotest+add@1.0.0/node_modules/@denotest/add/index.d.ts").unwrap(), + "targetUri": canon_temp_dir.join("project1/node_modules/.deno/%40denotest%2Badd%401.0.0/node_modules/%40denotest/add/index.d.ts").unwrap(), "targetRange": { "start": { "line": 0, @@ -14932,7 +14946,7 @@ fn lsp_deno_json_scopes_node_modules_dir() { assert_eq!( res, json!([{ - "targetUri": canon_temp_dir.join("project2/node_modules/.deno/@denotest+add@1.0.0/node_modules/@denotest/add/index.d.ts").unwrap(), + "targetUri": canon_temp_dir.join("project2/node_modules/.deno/%40denotest%2Badd%401.0.0/node_modules/%40denotest/add/index.d.ts").unwrap(), "targetRange": { "start": { "line": 0, @@ -14983,7 +14997,7 @@ fn lsp_deno_json_scopes_node_modules_dir() { assert_eq!( res, json!([{ - "targetUri": canon_temp_dir.join("project2/project3/node_modules/.deno/@denotest+add@1.0.0/node_modules/@denotest/add/index.d.ts").unwrap(), + "targetUri": canon_temp_dir.join("project2/project3/node_modules/.deno/%40denotest%2Badd%401.0.0/node_modules/%40denotest/add/index.d.ts").unwrap(), "targetRange": { "start": { "line": 0, @@ -16054,7 +16068,7 @@ fn lsp_deno_json_workspace_node_modules_dir() { assert_eq!( res, json!([{ - "targetUri": canon_temp_dir.join("project1/node_modules/.deno/@denotest+add@1.0.0/node_modules/@denotest/add/index.d.ts").unwrap(), + "targetUri": canon_temp_dir.join("project1/node_modules/.deno/%40denotest%2Badd%401.0.0/node_modules/%40denotest/add/index.d.ts").unwrap(), "targetRange": { "start": { "line": 0, @@ -16832,7 +16846,7 @@ fn sloppy_imports_not_enabled() { temp_dir.join("a").url_file(), ), "data": { - "specifier": temp_dir.join("a").url_file(), + "uri": temp_dir.join("a").url_file(), "to": temp_dir.join("a.ts").url_file(), "message": "Add a '.ts' extension.", }, @@ -16859,7 +16873,7 @@ fn sloppy_imports_not_enabled() { temp_dir.join("a").url_file(), ), "data": { - "specifier": temp_dir.join("a").url_file(), + "uri": temp_dir.join("a").url_file(), "to": temp_dir.join("a.ts").url_file(), "message": "Add a '.ts' extension.", },