mirror of
https://github.com/denoland/deno.git
synced 2025-09-26 12:19:12 +00:00
refactor(lsp): remove Documents
mutex and require Documents
to be mutated to change it (#12747)
This commit is contained in:
parent
14f83da221
commit
c82ce74133
5 changed files with 483 additions and 477 deletions
|
@ -436,7 +436,7 @@ mod tests {
|
||||||
source_fixtures: &[(&str, &str)],
|
source_fixtures: &[(&str, &str)],
|
||||||
location: &Path,
|
location: &Path,
|
||||||
) -> language_server::StateSnapshot {
|
) -> language_server::StateSnapshot {
|
||||||
let documents = Documents::new(location);
|
let mut documents = Documents::new(location);
|
||||||
for (specifier, source, version, language_id) in fixtures {
|
for (specifier, source, version, language_id) in fixtures {
|
||||||
let specifier =
|
let specifier =
|
||||||
resolve_url(specifier).expect("failed to create specifier");
|
resolve_url(specifier).expect("failed to create specifier");
|
||||||
|
|
|
@ -175,7 +175,7 @@ impl DiagnosticsServer {
|
||||||
update_diagnostics(
|
update_diagnostics(
|
||||||
&client,
|
&client,
|
||||||
collection.clone(),
|
collection.clone(),
|
||||||
&snapshot,
|
snapshot,
|
||||||
&ts_server
|
&ts_server
|
||||||
).await;
|
).await;
|
||||||
}
|
}
|
||||||
|
@ -352,7 +352,7 @@ async fn generate_lint_diagnostics(
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn generate_ts_diagnostics(
|
async fn generate_ts_diagnostics(
|
||||||
snapshot: &language_server::StateSnapshot,
|
snapshot: Arc<language_server::StateSnapshot>,
|
||||||
collection: Arc<Mutex<DiagnosticCollection>>,
|
collection: Arc<Mutex<DiagnosticCollection>>,
|
||||||
ts_server: &tsc::TsServer,
|
ts_server: &tsc::TsServer,
|
||||||
) -> Result<DiagnosticVec, AnyError> {
|
) -> Result<DiagnosticVec, AnyError> {
|
||||||
|
@ -474,16 +474,14 @@ fn diagnose_dependency(
|
||||||
/// Generate diagnostics for dependencies of a module, attempting to resolve
|
/// Generate diagnostics for dependencies of a module, attempting to resolve
|
||||||
/// dependencies on the local file system or in the DENO_DIR cache.
|
/// dependencies on the local file system or in the DENO_DIR cache.
|
||||||
async fn generate_deps_diagnostics(
|
async fn generate_deps_diagnostics(
|
||||||
snapshot: &language_server::StateSnapshot,
|
snapshot: Arc<language_server::StateSnapshot>,
|
||||||
collection: Arc<Mutex<DiagnosticCollection>>,
|
collection: Arc<Mutex<DiagnosticCollection>>,
|
||||||
) -> Result<DiagnosticVec, AnyError> {
|
) -> Result<DiagnosticVec, AnyError> {
|
||||||
let config = snapshot.config.clone();
|
|
||||||
let documents = snapshot.documents.clone();
|
|
||||||
tokio::task::spawn(async move {
|
tokio::task::spawn(async move {
|
||||||
let mut diagnostics_vec = Vec::new();
|
let mut diagnostics_vec = Vec::new();
|
||||||
|
|
||||||
for document in documents.documents(true, true) {
|
for document in snapshot.documents.documents(true, true) {
|
||||||
if !config.specifier_enabled(document.specifier()) {
|
if !snapshot.config.specifier_enabled(document.specifier()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let version = document.maybe_lsp_version();
|
let version = document.maybe_lsp_version();
|
||||||
|
@ -496,12 +494,12 @@ async fn generate_deps_diagnostics(
|
||||||
for (_, dependency) in document.dependencies() {
|
for (_, dependency) in document.dependencies() {
|
||||||
diagnose_dependency(
|
diagnose_dependency(
|
||||||
&mut diagnostics,
|
&mut diagnostics,
|
||||||
&documents,
|
&snapshot.documents,
|
||||||
&dependency.maybe_code,
|
&dependency.maybe_code,
|
||||||
);
|
);
|
||||||
diagnose_dependency(
|
diagnose_dependency(
|
||||||
&mut diagnostics,
|
&mut diagnostics,
|
||||||
&documents,
|
&snapshot.documents,
|
||||||
&dependency.maybe_type,
|
&dependency.maybe_type,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -563,7 +561,7 @@ async fn publish_diagnostics(
|
||||||
async fn update_diagnostics(
|
async fn update_diagnostics(
|
||||||
client: &lspower::Client,
|
client: &lspower::Client,
|
||||||
collection: Arc<Mutex<DiagnosticCollection>>,
|
collection: Arc<Mutex<DiagnosticCollection>>,
|
||||||
snapshot: &language_server::StateSnapshot,
|
snapshot: Arc<language_server::StateSnapshot>,
|
||||||
ts_server: &tsc::TsServer,
|
ts_server: &tsc::TsServer,
|
||||||
) {
|
) {
|
||||||
let mark = snapshot.performance.mark("update_diagnostics", None::<()>);
|
let mark = snapshot.performance.mark("update_diagnostics", None::<()>);
|
||||||
|
@ -573,7 +571,7 @@ async fn update_diagnostics(
|
||||||
.performance
|
.performance
|
||||||
.mark("update_diagnostics_lint", None::<()>);
|
.mark("update_diagnostics_lint", None::<()>);
|
||||||
let collection = collection.clone();
|
let collection = collection.clone();
|
||||||
let diagnostics = generate_lint_diagnostics(snapshot, collection.clone())
|
let diagnostics = generate_lint_diagnostics(&snapshot, collection.clone())
|
||||||
.await
|
.await
|
||||||
.map_err(|err| {
|
.map_err(|err| {
|
||||||
error!("Error generating lint diagnostics: {}", err);
|
error!("Error generating lint diagnostics: {}", err);
|
||||||
|
@ -585,7 +583,7 @@ async fn update_diagnostics(
|
||||||
collection.set(DiagnosticSource::DenoLint, diagnostic_record);
|
collection.set(DiagnosticSource::DenoLint, diagnostic_record);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
publish_diagnostics(client, collection, snapshot).await;
|
publish_diagnostics(client, collection, &snapshot).await;
|
||||||
snapshot.performance.measure(mark);
|
snapshot.performance.measure(mark);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -595,7 +593,7 @@ async fn update_diagnostics(
|
||||||
.mark("update_diagnostics_ts", None::<()>);
|
.mark("update_diagnostics_ts", None::<()>);
|
||||||
let collection = collection.clone();
|
let collection = collection.clone();
|
||||||
let diagnostics =
|
let diagnostics =
|
||||||
generate_ts_diagnostics(snapshot, collection.clone(), ts_server)
|
generate_ts_diagnostics(snapshot.clone(), collection.clone(), ts_server)
|
||||||
.await
|
.await
|
||||||
.map_err(|err| {
|
.map_err(|err| {
|
||||||
error!("Error generating TypeScript diagnostics: {}", err);
|
error!("Error generating TypeScript diagnostics: {}", err);
|
||||||
|
@ -607,7 +605,7 @@ async fn update_diagnostics(
|
||||||
collection.set(DiagnosticSource::TypeScript, diagnostic_record);
|
collection.set(DiagnosticSource::TypeScript, diagnostic_record);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
publish_diagnostics(client, collection, snapshot).await;
|
publish_diagnostics(client, collection, &snapshot).await;
|
||||||
snapshot.performance.measure(mark);
|
snapshot.performance.measure(mark);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -616,19 +614,20 @@ async fn update_diagnostics(
|
||||||
.performance
|
.performance
|
||||||
.mark("update_diagnostics_deps", None::<()>);
|
.mark("update_diagnostics_deps", None::<()>);
|
||||||
let collection = collection.clone();
|
let collection = collection.clone();
|
||||||
let diagnostics = generate_deps_diagnostics(snapshot, collection.clone())
|
let diagnostics =
|
||||||
.await
|
generate_deps_diagnostics(snapshot.clone(), collection.clone())
|
||||||
.map_err(|err| {
|
.await
|
||||||
error!("Error generating Deno diagnostics: {}", err);
|
.map_err(|err| {
|
||||||
})
|
error!("Error generating Deno diagnostics: {}", err);
|
||||||
.unwrap_or_default();
|
})
|
||||||
|
.unwrap_or_default();
|
||||||
{
|
{
|
||||||
let mut collection = collection.lock().await;
|
let mut collection = collection.lock().await;
|
||||||
for diagnostic_record in diagnostics {
|
for diagnostic_record in diagnostics {
|
||||||
collection.set(DiagnosticSource::Deno, diagnostic_record);
|
collection.set(DiagnosticSource::Deno, diagnostic_record);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
publish_diagnostics(client, collection, snapshot).await;
|
publish_diagnostics(client, collection, &snapshot).await;
|
||||||
snapshot.performance.measure(mark);
|
snapshot.performance.measure(mark);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -652,7 +651,7 @@ mod tests {
|
||||||
fixtures: &[(&str, &str, i32, LanguageId)],
|
fixtures: &[(&str, &str, i32, LanguageId)],
|
||||||
location: &Path,
|
location: &Path,
|
||||||
) -> StateSnapshot {
|
) -> StateSnapshot {
|
||||||
let documents = Documents::new(location);
|
let mut documents = Documents::new(location);
|
||||||
for (specifier, source, version, language_id) in fixtures {
|
for (specifier, source, version, language_id) in fixtures {
|
||||||
let specifier =
|
let specifier =
|
||||||
resolve_url(specifier).expect("failed to create specifier");
|
resolve_url(specifier).expect("failed to create specifier");
|
||||||
|
|
|
@ -186,7 +186,7 @@ impl AssetOrDocument {
|
||||||
self.document().map(|d| d.maybe_parsed_source()).flatten()
|
self.document().map(|d| d.maybe_parsed_source()).flatten()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn document_version(&self) -> Option<i32> {
|
pub fn document_lsp_version(&self) -> Option<i32> {
|
||||||
self.document().map(|d| d.maybe_lsp_version()).flatten()
|
self.document().map(|d| d.maybe_lsp_version()).flatten()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -415,14 +415,6 @@ impl Document {
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn with_closed(&self) -> Document {
|
|
||||||
Document(Arc::new(DocumentInner {
|
|
||||||
maybe_lsp_version: None,
|
|
||||||
maybe_language_id: None,
|
|
||||||
..(*self.0).clone()
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn with_navigation_tree(
|
fn with_navigation_tree(
|
||||||
&self,
|
&self,
|
||||||
navigation_tree: Arc<tsc::NavigationTree>,
|
navigation_tree: Arc<tsc::NavigationTree>,
|
||||||
|
@ -624,46 +616,81 @@ fn recurse_dependents(
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
struct DocumentsInner {
|
struct SpecifierResolver {
|
||||||
/// The DENO_DIR that the documents looks for non-file based modules.
|
|
||||||
cache: HttpCache,
|
cache: HttpCache,
|
||||||
/// A flag that indicates that stated data is potentially invalid and needs to
|
redirects: Mutex<HashMap<ModuleSpecifier, ModuleSpecifier>>,
|
||||||
/// be recalculated before being considered valid.
|
|
||||||
dirty: bool,
|
|
||||||
/// A map where the key is a specifier and the value is a set of specifiers
|
|
||||||
/// that depend on the key.
|
|
||||||
dependents_map: HashMap<ModuleSpecifier, HashSet<ModuleSpecifier>>,
|
|
||||||
/// A map of documents that can either be "open" in the language server, or
|
|
||||||
/// just present on disk.
|
|
||||||
docs: HashMap<ModuleSpecifier, Document>,
|
|
||||||
/// Any imports to the context supplied by configuration files. This is like
|
|
||||||
/// the imports into the a module graph in CLI.
|
|
||||||
imports: HashMap<ModuleSpecifier, SyntheticModule>,
|
|
||||||
/// The optional import map that should be used when resolving dependencies.
|
|
||||||
maybe_import_map: Option<ImportMapResolver>,
|
|
||||||
/// The optional JSX resolver, which is used when JSX imports are configured.
|
|
||||||
maybe_jsx_resolver: Option<JsxResolver>,
|
|
||||||
redirects: HashMap<ModuleSpecifier, ModuleSpecifier>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DocumentsInner {
|
impl SpecifierResolver {
|
||||||
fn new(location: &Path) -> Self {
|
pub fn new(cache_path: &Path) -> Self {
|
||||||
Self {
|
Self {
|
||||||
cache: HttpCache::new(location),
|
cache: HttpCache::new(cache_path),
|
||||||
dirty: true,
|
redirects: Mutex::new(HashMap::new()),
|
||||||
dependents_map: HashMap::default(),
|
|
||||||
docs: HashMap::default(),
|
|
||||||
imports: HashMap::default(),
|
|
||||||
maybe_import_map: None,
|
|
||||||
maybe_jsx_resolver: None,
|
|
||||||
redirects: HashMap::default(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Adds a document by reading the document from the file system.
|
pub fn resolve(
|
||||||
fn add(&mut self, specifier: ModuleSpecifier) -> Option<Document> {
|
&self,
|
||||||
let fs_version = self.calculate_fs_version(&specifier)?;
|
specifier: &ModuleSpecifier,
|
||||||
let path = self.get_path(&specifier)?;
|
) -> Option<ModuleSpecifier> {
|
||||||
|
let scheme = specifier.scheme();
|
||||||
|
if !SUPPORTED_SCHEMES.contains(&scheme) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
if scheme == "data" || scheme == "blob" || scheme == "file" {
|
||||||
|
Some(specifier.clone())
|
||||||
|
} else {
|
||||||
|
let mut redirects = self.redirects.lock();
|
||||||
|
if let Some(specifier) = redirects.get(specifier) {
|
||||||
|
Some(specifier.clone())
|
||||||
|
} else {
|
||||||
|
let redirect = self.resolve_remote(specifier, 10)?;
|
||||||
|
redirects.insert(specifier.clone(), redirect.clone());
|
||||||
|
Some(redirect)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resolve_remote(
|
||||||
|
&self,
|
||||||
|
specifier: &ModuleSpecifier,
|
||||||
|
redirect_limit: usize,
|
||||||
|
) -> Option<ModuleSpecifier> {
|
||||||
|
let cache_filename = self.cache.get_cache_filename(specifier)?;
|
||||||
|
if redirect_limit > 0 && cache_filename.is_file() {
|
||||||
|
let headers = http_cache::Metadata::read(&cache_filename)
|
||||||
|
.ok()
|
||||||
|
.map(|m| m.headers)?;
|
||||||
|
if let Some(location) = headers.get("location") {
|
||||||
|
let redirect =
|
||||||
|
deno_core::resolve_import(location, specifier.as_str()).ok()?;
|
||||||
|
self.resolve_remote(&redirect, redirect_limit - 1)
|
||||||
|
} else {
|
||||||
|
Some(specifier.clone())
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
struct FileSystemDocuments {
|
||||||
|
docs: HashMap<ModuleSpecifier, Document>,
|
||||||
|
dirty: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FileSystemDocuments {
|
||||||
|
/// Adds or updates a document by reading the document from the file system.
|
||||||
|
fn refresh_document(
|
||||||
|
&mut self,
|
||||||
|
cache: &HttpCache,
|
||||||
|
maybe_resolver: Option<&dyn deno_graph::source::Resolver>,
|
||||||
|
specifier: ModuleSpecifier,
|
||||||
|
) -> Option<Document> {
|
||||||
|
let path = get_document_path(cache, &specifier)?;
|
||||||
|
let fs_version = calculate_fs_version(&path)?;
|
||||||
let bytes = fs::read(path).ok()?;
|
let bytes = fs::read(path).ok()?;
|
||||||
let doc = if specifier.scheme() == "file" {
|
let doc = if specifier.scheme() == "file" {
|
||||||
let maybe_charset =
|
let maybe_charset =
|
||||||
|
@ -674,10 +701,10 @@ impl DocumentsInner {
|
||||||
fs_version,
|
fs_version,
|
||||||
None,
|
None,
|
||||||
content,
|
content,
|
||||||
self.get_maybe_resolver(),
|
maybe_resolver,
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
let cache_filename = self.cache.get_cache_filename(&specifier)?;
|
let cache_filename = cache.get_cache_filename(&specifier)?;
|
||||||
let metadata = http_cache::Metadata::read(&cache_filename).ok()?;
|
let metadata = http_cache::Metadata::read(&cache_filename).ok()?;
|
||||||
let maybe_content_type = metadata.headers.get("content-type").cloned();
|
let maybe_content_type = metadata.headers.get("content-type").cloned();
|
||||||
let maybe_headers = Some(&metadata.headers);
|
let maybe_headers = Some(&metadata.headers);
|
||||||
|
@ -688,101 +715,165 @@ impl DocumentsInner {
|
||||||
fs_version,
|
fs_version,
|
||||||
maybe_headers,
|
maybe_headers,
|
||||||
content,
|
content,
|
||||||
self.get_maybe_resolver(),
|
maybe_resolver,
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
self.dirty = true;
|
self.dirty = true;
|
||||||
self.docs.insert(specifier, doc)
|
self.docs.insert(specifier, doc)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Iterate through the documents, building a map where the key is a unique
|
fn calculate_fs_version(path: &Path) -> Option<String> {
|
||||||
/// document and the value is a set of specifiers that depend on that
|
let metadata = fs::metadata(path).ok()?;
|
||||||
/// document.
|
if let Ok(modified) = metadata.modified() {
|
||||||
fn calculate_dependents(&mut self) {
|
if let Ok(n) = modified.duration_since(SystemTime::UNIX_EPOCH) {
|
||||||
let mut dependents_map: HashMap<ModuleSpecifier, HashSet<ModuleSpecifier>> =
|
Some(n.as_millis().to_string())
|
||||||
HashMap::new();
|
|
||||||
for (specifier, doc) in &self.docs {
|
|
||||||
if let Some(Ok(module)) = doc.maybe_module() {
|
|
||||||
for dependency in module.dependencies.values() {
|
|
||||||
if let Some(dep) = dependency.get_code() {
|
|
||||||
dependents_map
|
|
||||||
.entry(dep.clone())
|
|
||||||
.or_default()
|
|
||||||
.insert(specifier.clone());
|
|
||||||
}
|
|
||||||
if let Some(dep) = dependency.get_type() {
|
|
||||||
dependents_map
|
|
||||||
.entry(dep.clone())
|
|
||||||
.or_default()
|
|
||||||
.insert(specifier.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if let Some((_, Some(Ok((dep, _))))) = &module.maybe_types_dependency {
|
|
||||||
dependents_map
|
|
||||||
.entry(dep.clone())
|
|
||||||
.or_default()
|
|
||||||
.insert(specifier.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.dependents_map = dependents_map;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn calculate_fs_version(
|
|
||||||
&self,
|
|
||||||
specifier: &ModuleSpecifier,
|
|
||||||
) -> Option<String> {
|
|
||||||
let path = self.get_path(specifier)?;
|
|
||||||
let metadata = fs::metadata(path).ok()?;
|
|
||||||
if let Ok(modified) = metadata.modified() {
|
|
||||||
if let Ok(n) = modified.duration_since(SystemTime::UNIX_EPOCH) {
|
|
||||||
Some(n.as_millis().to_string())
|
|
||||||
} else {
|
|
||||||
Some("1".to_string())
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
Some("1".to_string())
|
Some("1".to_string())
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
Some("1".to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_document_path(
|
||||||
|
cache: &HttpCache,
|
||||||
|
specifier: &ModuleSpecifier,
|
||||||
|
) -> Option<PathBuf> {
|
||||||
|
if specifier.scheme() == "file" {
|
||||||
|
specifier.to_file_path().ok()
|
||||||
|
} else {
|
||||||
|
let path = cache.get_cache_filename(specifier)?;
|
||||||
|
if path.is_file() {
|
||||||
|
Some(path)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Default)]
|
||||||
|
pub(crate) struct Documents {
|
||||||
|
/// The DENO_DIR that the documents looks for non-file based modules.
|
||||||
|
cache: HttpCache,
|
||||||
|
/// A flag that indicates that stated data is potentially invalid and needs to
|
||||||
|
/// be recalculated before being considered valid.
|
||||||
|
dirty: bool,
|
||||||
|
/// A map where the key is a specifier and the value is a set of specifiers
|
||||||
|
/// that depend on the key.
|
||||||
|
dependents_map: Arc<HashMap<ModuleSpecifier, HashSet<ModuleSpecifier>>>,
|
||||||
|
/// A map of documents that are "open" in the language server.
|
||||||
|
open_docs: HashMap<ModuleSpecifier, Document>,
|
||||||
|
/// Documents stored on the file system.
|
||||||
|
file_system_docs: Arc<Mutex<FileSystemDocuments>>,
|
||||||
|
/// Any imports to the context supplied by configuration files. This is like
|
||||||
|
/// the imports into the a module graph in CLI.
|
||||||
|
imports: Arc<HashMap<ModuleSpecifier, SyntheticModule>>,
|
||||||
|
/// The optional import map that should be used when resolving dependencies.
|
||||||
|
maybe_import_map: Option<ImportMapResolver>,
|
||||||
|
/// The optional JSX resolver, which is used when JSX imports are configured.
|
||||||
|
maybe_jsx_resolver: Option<JsxResolver>,
|
||||||
|
/// Resolves a specifier to its final redirected to specifier.
|
||||||
|
specifier_resolver: Arc<SpecifierResolver>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Documents {
|
||||||
|
pub fn new(location: &Path) -> Self {
|
||||||
|
Self {
|
||||||
|
cache: HttpCache::new(location),
|
||||||
|
dirty: true,
|
||||||
|
dependents_map: Default::default(),
|
||||||
|
open_docs: HashMap::default(),
|
||||||
|
file_system_docs: Default::default(),
|
||||||
|
imports: Default::default(),
|
||||||
|
maybe_import_map: None,
|
||||||
|
maybe_jsx_resolver: None,
|
||||||
|
specifier_resolver: Arc::new(SpecifierResolver::new(location)),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn change(
|
/// "Open" a document from the perspective of the editor, meaning that
|
||||||
|
/// requests for information from the document will come from the in-memory
|
||||||
|
/// representation received from the language server client, versus reading
|
||||||
|
/// information from the disk.
|
||||||
|
pub fn open(
|
||||||
|
&mut self,
|
||||||
|
specifier: ModuleSpecifier,
|
||||||
|
version: i32,
|
||||||
|
language_id: LanguageId,
|
||||||
|
content: Arc<String>,
|
||||||
|
) -> Document {
|
||||||
|
let maybe_resolver = self.get_maybe_resolver();
|
||||||
|
let document = Document::open(
|
||||||
|
specifier.clone(),
|
||||||
|
version,
|
||||||
|
language_id,
|
||||||
|
content,
|
||||||
|
maybe_resolver,
|
||||||
|
);
|
||||||
|
let mut file_system_docs = self.file_system_docs.lock();
|
||||||
|
file_system_docs.docs.remove(&specifier);
|
||||||
|
file_system_docs.dirty = true;
|
||||||
|
self.open_docs.insert(specifier, document.clone());
|
||||||
|
self.dirty = true;
|
||||||
|
document
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Apply language server content changes to an open document.
|
||||||
|
pub fn change(
|
||||||
&mut self,
|
&mut self,
|
||||||
specifier: &ModuleSpecifier,
|
specifier: &ModuleSpecifier,
|
||||||
version: i32,
|
version: i32,
|
||||||
changes: Vec<lsp::TextDocumentContentChangeEvent>,
|
changes: Vec<lsp::TextDocumentContentChangeEvent>,
|
||||||
) -> Result<Document, AnyError> {
|
) -> Result<Document, AnyError> {
|
||||||
let doc = self.docs.get(specifier).map_or_else(
|
let doc = self
|
||||||
|| {
|
.open_docs
|
||||||
Err(custom_error(
|
.get(specifier)
|
||||||
"NotFound",
|
.cloned()
|
||||||
format!("The specifier \"{}\" was not found.", specifier),
|
.or_else(|| {
|
||||||
))
|
let mut file_system_docs = self.file_system_docs.lock();
|
||||||
},
|
file_system_docs.docs.remove(specifier)
|
||||||
Ok,
|
})
|
||||||
)?;
|
.map_or_else(
|
||||||
|
|| {
|
||||||
|
Err(custom_error(
|
||||||
|
"NotFound",
|
||||||
|
format!("The specifier \"{}\" was not found.", specifier),
|
||||||
|
))
|
||||||
|
},
|
||||||
|
Ok,
|
||||||
|
)?;
|
||||||
self.dirty = true;
|
self.dirty = true;
|
||||||
let doc = doc.with_change(version, changes, self.get_maybe_resolver())?;
|
let doc = doc.with_change(version, changes, self.get_maybe_resolver())?;
|
||||||
self.docs.insert(doc.specifier().clone(), doc.clone());
|
self.open_docs.insert(doc.specifier().clone(), doc.clone());
|
||||||
Ok(doc)
|
Ok(doc)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn close(&mut self, specifier: &ModuleSpecifier) -> Result<(), AnyError> {
|
/// Close an open document, this essentially clears any editor state that is
|
||||||
let doc = self.docs.get_mut(specifier).map_or_else(
|
/// being held, and the document store will revert to the file system if
|
||||||
|| {
|
/// information about the document is required.
|
||||||
Err(custom_error(
|
pub fn close(&mut self, specifier: &ModuleSpecifier) -> Result<(), AnyError> {
|
||||||
|
if self.open_docs.remove(specifier).is_some() {
|
||||||
|
self.dirty = true;
|
||||||
|
} else {
|
||||||
|
let mut file_system_docs = self.file_system_docs.lock();
|
||||||
|
if file_system_docs.docs.remove(specifier).is_some() {
|
||||||
|
file_system_docs.dirty = true;
|
||||||
|
} else {
|
||||||
|
return Err(custom_error(
|
||||||
"NotFound",
|
"NotFound",
|
||||||
format!("The specifier \"{}\" was not found.", specifier),
|
format!("The specifier \"{}\" was not found.", specifier),
|
||||||
))
|
));
|
||||||
},
|
}
|
||||||
Ok,
|
}
|
||||||
)?;
|
|
||||||
*doc = doc.with_closed();
|
|
||||||
self.dirty = true;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn contains_import(
|
/// Return `true` if the provided specifier can be resolved to a document,
|
||||||
&mut self,
|
/// otherwise `false`.
|
||||||
|
pub fn contains_import(
|
||||||
|
&self,
|
||||||
specifier: &str,
|
specifier: &str,
|
||||||
referrer: &ModuleSpecifier,
|
referrer: &ModuleSpecifier,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
|
@ -799,26 +890,21 @@ impl DocumentsInner {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn contains_specifier(&mut self, specifier: &ModuleSpecifier) -> bool {
|
/// Return `true` if the specifier can be resolved to a document.
|
||||||
let specifier = self
|
pub fn contains_specifier(&self, specifier: &ModuleSpecifier) -> bool {
|
||||||
.resolve_specifier(specifier)
|
self.get(specifier).is_some()
|
||||||
.unwrap_or_else(|| specifier.clone());
|
|
||||||
if !self.is_valid(&specifier) {
|
|
||||||
self.add(specifier.clone());
|
|
||||||
}
|
|
||||||
self.docs.contains_key(&specifier)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dependents(
|
/// Return an array of specifiers, if any, that are dependent upon the
|
||||||
|
/// supplied specifier. This is used to determine invalidation of diagnostics
|
||||||
|
/// when a module has been changed.
|
||||||
|
pub fn dependents(
|
||||||
&mut self,
|
&mut self,
|
||||||
specifier: &ModuleSpecifier,
|
specifier: &ModuleSpecifier,
|
||||||
) -> Vec<ModuleSpecifier> {
|
) -> Vec<ModuleSpecifier> {
|
||||||
if self.dirty {
|
self.calculate_dependents_if_dirty();
|
||||||
self.calculate_dependents();
|
|
||||||
self.dirty = false;
|
|
||||||
}
|
|
||||||
let mut dependents = HashSet::new();
|
let mut dependents = HashSet::new();
|
||||||
if let Some(specifier) = self.resolve_specifier(specifier) {
|
if let Some(specifier) = self.specifier_resolver.resolve(specifier) {
|
||||||
recurse_dependents(&specifier, &self.dependents_map, &mut dependents);
|
recurse_dependents(&specifier, &self.dependents_map, &mut dependents);
|
||||||
dependents.into_iter().collect()
|
dependents.into_iter().collect()
|
||||||
} else {
|
} else {
|
||||||
|
@ -826,106 +912,82 @@ impl DocumentsInner {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get(&mut self, specifier: &ModuleSpecifier) -> Option<&Document> {
|
/// Return a document for the specifier.
|
||||||
let specifier = self.resolve_specifier(specifier)?;
|
pub fn get(&self, specifier: &ModuleSpecifier) -> Option<Document> {
|
||||||
if !self.is_valid(&specifier) {
|
let specifier = self.specifier_resolver.resolve(specifier)?;
|
||||||
self.add(specifier.clone());
|
if let Some(document) = self.open_docs.get(&specifier) {
|
||||||
}
|
Some(document.clone())
|
||||||
self.docs.get(&specifier)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_cached(&mut self, specifier: &ModuleSpecifier) -> Option<&Document> {
|
|
||||||
let specifier = self
|
|
||||||
.resolve_specifier(specifier)
|
|
||||||
.unwrap_or_else(|| specifier.clone());
|
|
||||||
// this does not use `self.get` since that lazily adds documents, and we
|
|
||||||
// only care about documents already in the cache.
|
|
||||||
self.docs.get(&specifier)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_maybe_resolver(&self) -> Option<&dyn deno_graph::source::Resolver> {
|
|
||||||
if self.maybe_jsx_resolver.is_some() {
|
|
||||||
self.maybe_jsx_resolver.as_ref().map(|jr| jr.as_resolver())
|
|
||||||
} else {
|
} else {
|
||||||
self.maybe_import_map.as_ref().map(|im| im.as_resolver())
|
let mut file_system_docs = self.file_system_docs.lock();
|
||||||
}
|
let fs_version = get_document_path(&self.cache, &specifier)
|
||||||
}
|
.map(|path| calculate_fs_version(&path))
|
||||||
|
.flatten();
|
||||||
fn get_path(&self, specifier: &ModuleSpecifier) -> Option<PathBuf> {
|
if file_system_docs
|
||||||
if specifier.scheme() == "file" {
|
|
||||||
specifier.to_file_path().ok()
|
|
||||||
} else {
|
|
||||||
let path = self.cache.get_cache_filename(specifier)?;
|
|
||||||
if path.is_file() {
|
|
||||||
Some(path)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_valid(&mut self, specifier: &ModuleSpecifier) -> bool {
|
|
||||||
if self
|
|
||||||
.get_cached(specifier)
|
|
||||||
.map(|d| d.is_open())
|
|
||||||
.unwrap_or(false)
|
|
||||||
{
|
|
||||||
true
|
|
||||||
} else if let Some(specifier) = self.resolve_specifier(specifier) {
|
|
||||||
self
|
|
||||||
.docs
|
.docs
|
||||||
.get(&specifier)
|
.get(&specifier)
|
||||||
.map(|d| d.fs_version().to_string())
|
.map(|d| d.fs_version().to_string())
|
||||||
== self.calculate_fs_version(&specifier)
|
!= fs_version
|
||||||
} else {
|
{
|
||||||
// even though it isn't valid, it just can't exist, so we will say it is
|
// attempt to update the file on the file system
|
||||||
// valid
|
file_system_docs.refresh_document(
|
||||||
true
|
&self.cache,
|
||||||
|
self.get_maybe_resolver(),
|
||||||
|
specifier.clone(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
file_system_docs.docs.get(&specifier).cloned()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn open(
|
/// Return a vector of documents that are contained in the document store,
|
||||||
&mut self,
|
/// where `open_only` flag would provide only those documents currently open
|
||||||
specifier: ModuleSpecifier,
|
/// in the editor and `diagnosable_only` would provide only those documents
|
||||||
version: i32,
|
/// that the language server can provide diagnostics for.
|
||||||
language_id: LanguageId,
|
pub fn documents(
|
||||||
content: Arc<String>,
|
|
||||||
) -> Document {
|
|
||||||
let maybe_resolver = self.get_maybe_resolver();
|
|
||||||
let document = Document::open(
|
|
||||||
specifier.clone(),
|
|
||||||
version,
|
|
||||||
language_id,
|
|
||||||
content,
|
|
||||||
maybe_resolver,
|
|
||||||
);
|
|
||||||
self.docs.insert(specifier, document.clone());
|
|
||||||
self.dirty = true;
|
|
||||||
document
|
|
||||||
}
|
|
||||||
|
|
||||||
fn documents(
|
|
||||||
&self,
|
&self,
|
||||||
open_only: bool,
|
open_only: bool,
|
||||||
diagnosable_only: bool,
|
diagnosable_only: bool,
|
||||||
) -> Vec<Document> {
|
) -> Vec<Document> {
|
||||||
self
|
if open_only {
|
||||||
.docs
|
self
|
||||||
.values()
|
.open_docs
|
||||||
.filter_map(|doc| {
|
.values()
|
||||||
let open = open_only && doc.is_open();
|
.filter_map(|doc| {
|
||||||
let diagnosable = diagnosable_only && doc.is_diagnosable();
|
if !diagnosable_only || doc.is_diagnosable() {
|
||||||
if (!open_only || open) && (!diagnosable_only || diagnosable) {
|
Some(doc.clone())
|
||||||
Some(doc.clone())
|
} else {
|
||||||
} else {
|
None
|
||||||
None
|
}
|
||||||
}
|
})
|
||||||
})
|
.collect()
|
||||||
.collect()
|
} else {
|
||||||
|
// it is technically possible for a Document to end up in both the open
|
||||||
|
// and closed documents so we need to ensure we don't return duplicates
|
||||||
|
let mut seen_documents = HashSet::new();
|
||||||
|
let file_system_docs = self.file_system_docs.lock();
|
||||||
|
self
|
||||||
|
.open_docs
|
||||||
|
.values()
|
||||||
|
.chain(file_system_docs.docs.values())
|
||||||
|
.filter_map(|doc| {
|
||||||
|
// this prefers the open documents
|
||||||
|
if seen_documents.insert(doc.specifier().clone())
|
||||||
|
&& (!diagnosable_only || doc.is_diagnosable())
|
||||||
|
{
|
||||||
|
Some(doc.clone())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve(
|
/// For a given set of string specifiers, resolve each one from the graph,
|
||||||
&mut self,
|
/// for a given referrer. This is used to provide resolution information to
|
||||||
|
/// tsc when type checking.
|
||||||
|
pub fn resolve(
|
||||||
|
&self,
|
||||||
specifiers: Vec<String>,
|
specifiers: Vec<String>,
|
||||||
referrer: &ModuleSpecifier,
|
referrer: &ModuleSpecifier,
|
||||||
) -> Option<Vec<Option<(ModuleSpecifier, MediaType)>>> {
|
) -> Option<Vec<Option<(ModuleSpecifier, MediaType)>>> {
|
||||||
|
@ -960,9 +1022,134 @@ impl DocumentsInner {
|
||||||
Some(results)
|
Some(results)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_dependency(
|
/// Update the location of the on disk cache for the document store.
|
||||||
|
pub fn set_location(&mut self, location: PathBuf) {
|
||||||
|
// TODO update resolved dependencies?
|
||||||
|
self.cache = HttpCache::new(&location);
|
||||||
|
self.specifier_resolver = Arc::new(SpecifierResolver::new(&location));
|
||||||
|
self.dirty = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Tries to cache a navigation tree that is associated with the provided specifier
|
||||||
|
/// if the document stored has the same script version.
|
||||||
|
pub fn try_cache_navigation_tree(
|
||||||
&mut self,
|
&mut self,
|
||||||
specifier: &ModuleSpecifier,
|
specifier: &ModuleSpecifier,
|
||||||
|
script_version: &str,
|
||||||
|
navigation_tree: Arc<tsc::NavigationTree>,
|
||||||
|
) -> Result<(), AnyError> {
|
||||||
|
if let Some(doc) = self.open_docs.get_mut(specifier) {
|
||||||
|
if doc.script_version() == script_version {
|
||||||
|
*doc = doc.with_navigation_tree(navigation_tree);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let mut file_system_docs = self.file_system_docs.lock();
|
||||||
|
if let Some(doc) = file_system_docs.docs.get_mut(specifier) {
|
||||||
|
// ensure we are updating the same document
|
||||||
|
// that the navigation tree was created for
|
||||||
|
if doc.script_version() == script_version {
|
||||||
|
*doc = doc.with_navigation_tree(navigation_tree);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return Err(custom_error(
|
||||||
|
"NotFound",
|
||||||
|
format!("Specifier not found {}", specifier),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update_config(
|
||||||
|
&mut self,
|
||||||
|
maybe_import_map: Option<Arc<import_map::ImportMap>>,
|
||||||
|
maybe_config_file: Option<&ConfigFile>,
|
||||||
|
) {
|
||||||
|
// TODO(@kitsonk) update resolved dependencies?
|
||||||
|
self.maybe_import_map = maybe_import_map.map(ImportMapResolver::new);
|
||||||
|
self.maybe_jsx_resolver = maybe_config_file
|
||||||
|
.map(|cf| {
|
||||||
|
cf.to_maybe_jsx_import_source_module()
|
||||||
|
.map(|im| JsxResolver::new(im, self.maybe_import_map.clone()))
|
||||||
|
})
|
||||||
|
.flatten();
|
||||||
|
self.imports = Arc::new(
|
||||||
|
if let Some(Ok(Some(imports))) =
|
||||||
|
maybe_config_file.map(|cf| cf.to_maybe_imports())
|
||||||
|
{
|
||||||
|
imports
|
||||||
|
.into_iter()
|
||||||
|
.map(|(referrer, dependencies)| {
|
||||||
|
let dependencies =
|
||||||
|
dependencies.into_iter().map(|s| (s, None)).collect();
|
||||||
|
let module = SyntheticModule::new(
|
||||||
|
referrer.clone(),
|
||||||
|
dependencies,
|
||||||
|
self.get_maybe_resolver(),
|
||||||
|
);
|
||||||
|
(referrer, module)
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
} else {
|
||||||
|
HashMap::new()
|
||||||
|
},
|
||||||
|
);
|
||||||
|
self.dirty = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Iterate through the documents, building a map where the key is a unique
|
||||||
|
/// document and the value is a set of specifiers that depend on that
|
||||||
|
/// document.
|
||||||
|
fn calculate_dependents_if_dirty(&mut self) {
|
||||||
|
let mut file_system_docs = self.file_system_docs.lock();
|
||||||
|
if !file_system_docs.dirty && !self.dirty {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut dependents_map: HashMap<ModuleSpecifier, HashSet<ModuleSpecifier>> =
|
||||||
|
HashMap::new();
|
||||||
|
// favour documents that are open in case a document exists in both collections
|
||||||
|
let documents = file_system_docs.docs.iter().chain(self.open_docs.iter());
|
||||||
|
for (specifier, doc) in documents {
|
||||||
|
if let Some(Ok(module)) = doc.maybe_module() {
|
||||||
|
for dependency in module.dependencies.values() {
|
||||||
|
if let Some(dep) = dependency.get_code() {
|
||||||
|
dependents_map
|
||||||
|
.entry(dep.clone())
|
||||||
|
.or_default()
|
||||||
|
.insert(specifier.clone());
|
||||||
|
}
|
||||||
|
if let Some(dep) = dependency.get_type() {
|
||||||
|
dependents_map
|
||||||
|
.entry(dep.clone())
|
||||||
|
.or_default()
|
||||||
|
.insert(specifier.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some((_, Some(Ok((dep, _))))) = &module.maybe_types_dependency {
|
||||||
|
dependents_map
|
||||||
|
.entry(dep.clone())
|
||||||
|
.or_default()
|
||||||
|
.insert(specifier.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.dependents_map = Arc::new(dependents_map);
|
||||||
|
self.dirty = false;
|
||||||
|
file_system_docs.dirty = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_maybe_resolver(&self) -> Option<&dyn deno_graph::source::Resolver> {
|
||||||
|
if self.maybe_jsx_resolver.is_some() {
|
||||||
|
self.maybe_jsx_resolver.as_ref().map(|jr| jr.as_resolver())
|
||||||
|
} else {
|
||||||
|
self.maybe_import_map.as_ref().map(|im| im.as_resolver())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resolve_dependency(
|
||||||
|
&self,
|
||||||
|
specifier: &ModuleSpecifier,
|
||||||
) -> Option<(ModuleSpecifier, MediaType)> {
|
) -> Option<(ModuleSpecifier, MediaType)> {
|
||||||
let doc = self.get(specifier)?;
|
let doc = self.get(specifier)?;
|
||||||
let maybe_module = doc.maybe_module().map(|r| r.as_ref().ok()).flatten();
|
let maybe_module = doc.maybe_module().map(|r| r.as_ref().ok()).flatten();
|
||||||
|
@ -998,217 +1185,6 @@ impl DocumentsInner {
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_remote_specifier(
|
|
||||||
&self,
|
|
||||||
specifier: &ModuleSpecifier,
|
|
||||||
redirect_limit: usize,
|
|
||||||
) -> Option<ModuleSpecifier> {
|
|
||||||
let cache_filename = self.cache.get_cache_filename(specifier)?;
|
|
||||||
if redirect_limit > 0 && cache_filename.is_file() {
|
|
||||||
let headers = http_cache::Metadata::read(&cache_filename)
|
|
||||||
.ok()
|
|
||||||
.map(|m| m.headers)?;
|
|
||||||
if let Some(location) = headers.get("location") {
|
|
||||||
let redirect =
|
|
||||||
deno_core::resolve_import(location, specifier.as_str()).ok()?;
|
|
||||||
self.resolve_remote_specifier(&redirect, redirect_limit - 1)
|
|
||||||
} else {
|
|
||||||
Some(specifier.clone())
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn resolve_specifier(
|
|
||||||
&mut self,
|
|
||||||
specifier: &ModuleSpecifier,
|
|
||||||
) -> Option<ModuleSpecifier> {
|
|
||||||
let scheme = specifier.scheme();
|
|
||||||
if !SUPPORTED_SCHEMES.contains(&scheme) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
if scheme == "data" || scheme == "blob" || scheme == "file" {
|
|
||||||
Some(specifier.clone())
|
|
||||||
} else if let Some(specifier) = self.redirects.get(specifier) {
|
|
||||||
Some(specifier.clone())
|
|
||||||
} else {
|
|
||||||
let redirect = self.resolve_remote_specifier(specifier, 10)?;
|
|
||||||
self.redirects.insert(specifier.clone(), redirect.clone());
|
|
||||||
Some(redirect)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn set_location(&mut self, location: PathBuf) {
|
|
||||||
// TODO update resolved dependencies?
|
|
||||||
self.cache = HttpCache::new(&location);
|
|
||||||
self.dirty = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn set_navigation_tree(
|
|
||||||
&mut self,
|
|
||||||
specifier: &ModuleSpecifier,
|
|
||||||
navigation_tree: Arc<tsc::NavigationTree>,
|
|
||||||
) -> Result<(), AnyError> {
|
|
||||||
let doc = self.docs.get_mut(specifier).ok_or_else(|| {
|
|
||||||
custom_error("NotFound", format!("Specifier not found {}", specifier))
|
|
||||||
})?;
|
|
||||||
*doc = doc.with_navigation_tree(navigation_tree);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_config(
|
|
||||||
&mut self,
|
|
||||||
maybe_import_map: Option<Arc<import_map::ImportMap>>,
|
|
||||||
maybe_config_file: Option<&ConfigFile>,
|
|
||||||
) {
|
|
||||||
// TODO(@kitsonk) update resolved dependencies?
|
|
||||||
self.maybe_import_map = maybe_import_map.map(ImportMapResolver::new);
|
|
||||||
self.maybe_jsx_resolver = maybe_config_file
|
|
||||||
.map(|cf| {
|
|
||||||
cf.to_maybe_jsx_import_source_module()
|
|
||||||
.map(|im| JsxResolver::new(im, self.maybe_import_map.clone()))
|
|
||||||
})
|
|
||||||
.flatten();
|
|
||||||
if let Some(Ok(Some(imports))) =
|
|
||||||
maybe_config_file.map(|cf| cf.to_maybe_imports())
|
|
||||||
{
|
|
||||||
for (referrer, dependencies) in imports {
|
|
||||||
let dependencies =
|
|
||||||
dependencies.into_iter().map(|s| (s, None)).collect();
|
|
||||||
let module = SyntheticModule::new(
|
|
||||||
referrer.clone(),
|
|
||||||
dependencies,
|
|
||||||
self.get_maybe_resolver(),
|
|
||||||
);
|
|
||||||
self.imports.insert(referrer, module);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.dirty = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default)]
|
|
||||||
pub(crate) struct Documents(Arc<Mutex<DocumentsInner>>);
|
|
||||||
|
|
||||||
impl Documents {
|
|
||||||
pub fn new(location: &Path) -> Self {
|
|
||||||
Self(Arc::new(Mutex::new(DocumentsInner::new(location))))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// "Open" a document from the perspective of the editor, meaning that
|
|
||||||
/// requests for information from the document will come from the in-memory
|
|
||||||
/// representation received from the language server client, versus reading
|
|
||||||
/// information from the disk.
|
|
||||||
pub fn open(
|
|
||||||
&self,
|
|
||||||
specifier: ModuleSpecifier,
|
|
||||||
version: i32,
|
|
||||||
language_id: LanguageId,
|
|
||||||
content: Arc<String>,
|
|
||||||
) -> Document {
|
|
||||||
self.0.lock().open(specifier, version, language_id, content)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Apply language server content changes to an open document.
|
|
||||||
pub fn change(
|
|
||||||
&self,
|
|
||||||
specifier: &ModuleSpecifier,
|
|
||||||
version: i32,
|
|
||||||
changes: Vec<lsp::TextDocumentContentChangeEvent>,
|
|
||||||
) -> Result<Document, AnyError> {
|
|
||||||
self.0.lock().change(specifier, version, changes)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Close an open document, this essentially clears any editor state that is
|
|
||||||
/// being held, and the document store will revert to the file system if
|
|
||||||
/// information about the document is required.
|
|
||||||
pub fn close(&self, specifier: &ModuleSpecifier) -> Result<(), AnyError> {
|
|
||||||
self.0.lock().close(specifier)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return `true` if the provided specifier can be resolved to a document,
|
|
||||||
/// otherwise `false`.
|
|
||||||
pub fn contains_import(
|
|
||||||
&self,
|
|
||||||
specifier: &str,
|
|
||||||
referrer: &ModuleSpecifier,
|
|
||||||
) -> bool {
|
|
||||||
self.0.lock().contains_import(specifier, referrer)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return `true` if the specifier can be resolved to a document.
|
|
||||||
pub fn contains_specifier(&self, specifier: &ModuleSpecifier) -> bool {
|
|
||||||
self.0.lock().contains_specifier(specifier)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return an array of specifiers, if any, that are dependent upon the
|
|
||||||
/// supplied specifier. This is used to determine invalidation of diagnostics
|
|
||||||
/// when a module has been changed.
|
|
||||||
pub fn dependents(
|
|
||||||
&self,
|
|
||||||
specifier: &ModuleSpecifier,
|
|
||||||
) -> Vec<ModuleSpecifier> {
|
|
||||||
self.0.lock().dependents(specifier)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return a vector of documents that are contained in the document store,
|
|
||||||
/// where `open_only` flag would provide only those documents currently open
|
|
||||||
/// in the editor and `diagnosable_only` would provide only those documents
|
|
||||||
/// that the language server can provide diagnostics for.
|
|
||||||
pub fn documents(
|
|
||||||
&self,
|
|
||||||
open_only: bool,
|
|
||||||
diagnosable_only: bool,
|
|
||||||
) -> Vec<Document> {
|
|
||||||
self.0.lock().documents(open_only, diagnosable_only)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return a document for the specifier.
|
|
||||||
pub fn get(&self, specifier: &ModuleSpecifier) -> Option<Document> {
|
|
||||||
self.0.lock().get(specifier).cloned()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// For a given set of string specifiers, resolve each one from the graph,
|
|
||||||
/// for a given referrer. This is used to provide resolution information to
|
|
||||||
/// tsc when type checking.
|
|
||||||
pub fn resolve(
|
|
||||||
&self,
|
|
||||||
specifiers: Vec<String>,
|
|
||||||
referrer: &ModuleSpecifier,
|
|
||||||
) -> Option<Vec<Option<(ModuleSpecifier, MediaType)>>> {
|
|
||||||
self.0.lock().resolve(specifiers, referrer)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Update the location of the on disk cache for the document store.
|
|
||||||
pub fn set_location(&self, location: PathBuf) {
|
|
||||||
self.0.lock().set_location(location)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set a navigation tree that is associated with the provided specifier.
|
|
||||||
pub fn set_navigation_tree(
|
|
||||||
&self,
|
|
||||||
specifier: &ModuleSpecifier,
|
|
||||||
navigation_tree: Arc<tsc::NavigationTree>,
|
|
||||||
) -> Result<(), AnyError> {
|
|
||||||
self
|
|
||||||
.0
|
|
||||||
.lock()
|
|
||||||
.set_navigation_tree(specifier, navigation_tree)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn update_config(
|
|
||||||
&self,
|
|
||||||
maybe_import_map: Option<Arc<import_map::ImportMap>>,
|
|
||||||
maybe_config_file: Option<&ConfigFile>,
|
|
||||||
) {
|
|
||||||
self
|
|
||||||
.0
|
|
||||||
.lock()
|
|
||||||
.update_config(maybe_import_map, maybe_config_file)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -1225,7 +1201,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_documents_open() {
|
fn test_documents_open() {
|
||||||
let (documents, _) = setup();
|
let (mut documents, _) = setup();
|
||||||
let specifier = ModuleSpecifier::parse("file:///a.ts").unwrap();
|
let specifier = ModuleSpecifier::parse("file:///a.ts").unwrap();
|
||||||
let content = Arc::new(
|
let content = Arc::new(
|
||||||
r#"import * as b from "./b.ts";
|
r#"import * as b from "./b.ts";
|
||||||
|
@ -1241,7 +1217,7 @@ console.log(b);
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_documents_change() {
|
fn test_documents_change() {
|
||||||
let (documents, _) = setup();
|
let (mut documents, _) = setup();
|
||||||
let specifier = ModuleSpecifier::parse("file:///a.ts").unwrap();
|
let specifier = ModuleSpecifier::parse("file:///a.ts").unwrap();
|
||||||
let content = Arc::new(
|
let content = Arc::new(
|
||||||
r#"import * as b from "./b.ts";
|
r#"import * as b from "./b.ts";
|
||||||
|
@ -1282,4 +1258,31 @@ console.log(b, "hello deno");
|
||||||
"#
|
"#
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_documents_ensure_no_duplicates() {
|
||||||
|
// it should never happen that a user of this API causes this to happen,
|
||||||
|
// but we'll guard against it anyway
|
||||||
|
let (mut documents, documents_path) = setup();
|
||||||
|
let file_path = documents_path.join("file.ts");
|
||||||
|
let file_specifier = ModuleSpecifier::from_file_path(&file_path).unwrap();
|
||||||
|
fs::create_dir_all(&documents_path).unwrap();
|
||||||
|
fs::write(&file_path, "").unwrap();
|
||||||
|
|
||||||
|
// open the document
|
||||||
|
documents.open(
|
||||||
|
file_specifier.clone(),
|
||||||
|
1,
|
||||||
|
LanguageId::TypeScript,
|
||||||
|
Default::default(),
|
||||||
|
);
|
||||||
|
|
||||||
|
// make a clone of the document store and close the document in that one
|
||||||
|
let mut documents2 = documents.clone();
|
||||||
|
documents2.close(&file_specifier).unwrap();
|
||||||
|
|
||||||
|
// At this point the document will be in both documents and the shared file system documents.
|
||||||
|
// Now make sure that the original documents doesn't return both copies
|
||||||
|
assert_eq!(documents.documents(false, false).len(), 1);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -70,7 +70,7 @@ const CACHE_PATH: &str = "deps";
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct LanguageServer(Arc<tokio::sync::Mutex<Inner>>);
|
pub struct LanguageServer(Arc<tokio::sync::Mutex<Inner>>);
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub(crate) struct StateSnapshot {
|
pub(crate) struct StateSnapshot {
|
||||||
pub assets: Assets,
|
pub assets: Assets,
|
||||||
pub config: ConfigSnapshot,
|
pub config: ConfigSnapshot,
|
||||||
|
@ -271,14 +271,17 @@ impl Inner {
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
let navigation_tree = Arc::new(navigation_tree);
|
let navigation_tree = Arc::new(navigation_tree);
|
||||||
if specifier.scheme() == "asset" {
|
match asset_or_doc {
|
||||||
self
|
AssetOrDocument::Asset(_) => self
|
||||||
.assets
|
.assets
|
||||||
.set_navigation_tree(specifier, navigation_tree.clone())?;
|
.cache_navigation_tree(specifier, navigation_tree.clone())?,
|
||||||
} else {
|
AssetOrDocument::Document(doc) => {
|
||||||
self
|
self.documents.try_cache_navigation_tree(
|
||||||
.documents
|
specifier,
|
||||||
.set_navigation_tree(specifier, navigation_tree.clone())?;
|
&doc.script_version(),
|
||||||
|
navigation_tree.clone(),
|
||||||
|
)?
|
||||||
|
}
|
||||||
}
|
}
|
||||||
navigation_tree
|
navigation_tree
|
||||||
};
|
};
|
||||||
|
@ -368,8 +371,8 @@ impl Inner {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn snapshot(&self) -> LspResult<StateSnapshot> {
|
pub(crate) fn snapshot(&self) -> LspResult<Arc<StateSnapshot>> {
|
||||||
Ok(StateSnapshot {
|
Ok(Arc::new(StateSnapshot {
|
||||||
assets: self.assets.clone(),
|
assets: self.assets.clone(),
|
||||||
config: self.config.snapshot().map_err(|err| {
|
config: self.config.snapshot().map_err(|err| {
|
||||||
error!("{}", err);
|
error!("{}", err);
|
||||||
|
@ -382,7 +385,7 @@ impl Inner {
|
||||||
module_registries: self.module_registries.clone(),
|
module_registries: self.module_registries.clone(),
|
||||||
performance: self.performance.clone(),
|
performance: self.performance.clone(),
|
||||||
url_map: self.url_map.clone(),
|
url_map: self.url_map.clone(),
|
||||||
})
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_cache(&mut self) -> Result<(), AnyError> {
|
pub fn update_cache(&mut self) -> Result<(), AnyError> {
|
||||||
|
@ -1638,10 +1641,11 @@ impl Inner {
|
||||||
// completions, we will use internal logic and if there are completions
|
// completions, we will use internal logic and if there are completions
|
||||||
// for imports, we will return those and not send a message into tsc, where
|
// for imports, we will return those and not send a message into tsc, where
|
||||||
// other completions come from.
|
// other completions come from.
|
||||||
|
let snapshot = self.snapshot()?;
|
||||||
let response = if let Some(response) = completions::get_import_completions(
|
let response = if let Some(response) = completions::get_import_completions(
|
||||||
&specifier,
|
&specifier,
|
||||||
¶ms.text_document_position.position,
|
¶ms.text_document_position.position,
|
||||||
&self.snapshot()?,
|
&snapshot,
|
||||||
self.client.clone(),
|
self.client.clone(),
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
|
|
|
@ -71,7 +71,7 @@ const FILE_EXTENSION_KIND_MODIFIERS: &[&str] =
|
||||||
|
|
||||||
type Request = (
|
type Request = (
|
||||||
RequestMethod,
|
RequestMethod,
|
||||||
StateSnapshot,
|
Arc<StateSnapshot>,
|
||||||
oneshot::Sender<Result<Value, AnyError>>,
|
oneshot::Sender<Result<Value, AnyError>>,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -107,7 +107,7 @@ impl TsServer {
|
||||||
|
|
||||||
pub(crate) async fn request<R>(
|
pub(crate) async fn request<R>(
|
||||||
&self,
|
&self,
|
||||||
snapshot: StateSnapshot,
|
snapshot: Arc<StateSnapshot>,
|
||||||
req: RequestMethod,
|
req: RequestMethod,
|
||||||
) -> Result<R, AnyError>
|
) -> Result<R, AnyError>
|
||||||
where
|
where
|
||||||
|
@ -211,7 +211,7 @@ impl Assets {
|
||||||
self.0.insert(k, v)
|
self.0.insert(k, v)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_navigation_tree(
|
pub fn cache_navigation_tree(
|
||||||
&mut self,
|
&mut self,
|
||||||
specifier: &ModuleSpecifier,
|
specifier: &ModuleSpecifier,
|
||||||
navigation_tree: Arc<NavigationTree>,
|
navigation_tree: Arc<NavigationTree>,
|
||||||
|
@ -234,7 +234,7 @@ impl Assets {
|
||||||
pub(crate) async fn get_asset(
|
pub(crate) async fn get_asset(
|
||||||
specifier: &ModuleSpecifier,
|
specifier: &ModuleSpecifier,
|
||||||
ts_server: &TsServer,
|
ts_server: &TsServer,
|
||||||
state_snapshot: StateSnapshot,
|
state_snapshot: Arc<StateSnapshot>,
|
||||||
) -> Result<Option<AssetDocument>, AnyError> {
|
) -> Result<Option<AssetDocument>, AnyError> {
|
||||||
let specifier_str = specifier.to_string().replace("asset:///", "");
|
let specifier_str = specifier.to_string().replace("asset:///", "");
|
||||||
if let Some(text) = tsc::get_asset(&specifier_str) {
|
if let Some(text) = tsc::get_asset(&specifier_str) {
|
||||||
|
@ -892,7 +892,7 @@ impl RenameLocations {
|
||||||
lsp::TextDocumentEdit {
|
lsp::TextDocumentEdit {
|
||||||
text_document: lsp::OptionalVersionedTextDocumentIdentifier {
|
text_document: lsp::OptionalVersionedTextDocumentIdentifier {
|
||||||
uri: uri.clone(),
|
uri: uri.clone(),
|
||||||
version: asset_or_doc.document_version(),
|
version: asset_or_doc.document_lsp_version(),
|
||||||
},
|
},
|
||||||
edits:
|
edits:
|
||||||
Vec::<lsp::OneOf<lsp::TextEdit, lsp::AnnotatedTextEdit>>::new(),
|
Vec::<lsp::OneOf<lsp::TextEdit, lsp::AnnotatedTextEdit>>::new(),
|
||||||
|
@ -1058,7 +1058,7 @@ impl FileTextChanges {
|
||||||
Ok(lsp::TextDocumentEdit {
|
Ok(lsp::TextDocumentEdit {
|
||||||
text_document: lsp::OptionalVersionedTextDocumentIdentifier {
|
text_document: lsp::OptionalVersionedTextDocumentIdentifier {
|
||||||
uri: specifier.clone(),
|
uri: specifier.clone(),
|
||||||
version: asset_or_doc.document_version(),
|
version: asset_or_doc.document_lsp_version(),
|
||||||
},
|
},
|
||||||
edits,
|
edits,
|
||||||
})
|
})
|
||||||
|
@ -1104,7 +1104,7 @@ impl FileTextChanges {
|
||||||
text_document: lsp::OptionalVersionedTextDocumentIdentifier {
|
text_document: lsp::OptionalVersionedTextDocumentIdentifier {
|
||||||
uri: specifier.clone(),
|
uri: specifier.clone(),
|
||||||
version: maybe_asset_or_document
|
version: maybe_asset_or_document
|
||||||
.map(|d| d.document_version())
|
.map(|d| d.document_lsp_version())
|
||||||
.flatten(),
|
.flatten(),
|
||||||
},
|
},
|
||||||
edits,
|
edits,
|
||||||
|
@ -2091,13 +2091,13 @@ struct Response {
|
||||||
struct State<'a> {
|
struct State<'a> {
|
||||||
last_id: usize,
|
last_id: usize,
|
||||||
response: Option<Response>,
|
response: Option<Response>,
|
||||||
state_snapshot: StateSnapshot,
|
state_snapshot: Arc<StateSnapshot>,
|
||||||
snapshots: HashMap<(ModuleSpecifier, Cow<'a, str>), String>,
|
snapshots: HashMap<(ModuleSpecifier, Cow<'a, str>), String>,
|
||||||
specifiers: HashMap<String, String>,
|
specifiers: HashMap<String, String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> State<'a> {
|
impl<'a> State<'a> {
|
||||||
fn new(state_snapshot: StateSnapshot) -> Self {
|
fn new(state_snapshot: Arc<StateSnapshot>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
last_id: 1,
|
last_id: 1,
|
||||||
response: None,
|
response: None,
|
||||||
|
@ -2454,7 +2454,7 @@ fn load() -> Result<JsRuntime, AnyError> {
|
||||||
{
|
{
|
||||||
let op_state = runtime.op_state();
|
let op_state = runtime.op_state();
|
||||||
let mut op_state = op_state.borrow_mut();
|
let mut op_state = op_state.borrow_mut();
|
||||||
op_state.put(State::new(StateSnapshot::default()));
|
op_state.put(State::new(Arc::new(StateSnapshot::default())));
|
||||||
}
|
}
|
||||||
|
|
||||||
runtime.register_op("op_dispose", op(op_dispose));
|
runtime.register_op("op_dispose", op(op_dispose));
|
||||||
|
@ -2888,7 +2888,7 @@ impl RequestMethod {
|
||||||
/// Send a request into a runtime and return the JSON value of the response.
|
/// Send a request into a runtime and return the JSON value of the response.
|
||||||
pub(crate) fn request(
|
pub(crate) fn request(
|
||||||
runtime: &mut JsRuntime,
|
runtime: &mut JsRuntime,
|
||||||
state_snapshot: StateSnapshot,
|
state_snapshot: Arc<StateSnapshot>,
|
||||||
method: RequestMethod,
|
method: RequestMethod,
|
||||||
) -> Result<Value, AnyError> {
|
) -> Result<Value, AnyError> {
|
||||||
let performance = state_snapshot.performance.clone();
|
let performance = state_snapshot.performance.clone();
|
||||||
|
@ -2937,7 +2937,7 @@ mod tests {
|
||||||
fixtures: &[(&str, &str, i32, LanguageId)],
|
fixtures: &[(&str, &str, i32, LanguageId)],
|
||||||
location: &Path,
|
location: &Path,
|
||||||
) -> StateSnapshot {
|
) -> StateSnapshot {
|
||||||
let documents = Documents::new(location);
|
let mut documents = Documents::new(location);
|
||||||
for (specifier, source, version, language_id) in fixtures {
|
for (specifier, source, version, language_id) in fixtures {
|
||||||
let specifier =
|
let specifier =
|
||||||
resolve_url(specifier).expect("failed to create specifier");
|
resolve_url(specifier).expect("failed to create specifier");
|
||||||
|
@ -2958,10 +2958,10 @@ mod tests {
|
||||||
debug: bool,
|
debug: bool,
|
||||||
config: Value,
|
config: Value,
|
||||||
sources: &[(&str, &str, i32, LanguageId)],
|
sources: &[(&str, &str, i32, LanguageId)],
|
||||||
) -> (JsRuntime, StateSnapshot, PathBuf) {
|
) -> (JsRuntime, Arc<StateSnapshot>, PathBuf) {
|
||||||
let temp_dir = TempDir::new().expect("could not create temp dir");
|
let temp_dir = TempDir::new().expect("could not create temp dir");
|
||||||
let location = temp_dir.path().join("deps");
|
let location = temp_dir.path().join("deps");
|
||||||
let state_snapshot = mock_state_snapshot(sources, &location);
|
let state_snapshot = Arc::new(mock_state_snapshot(sources, &location));
|
||||||
let mut runtime = load().expect("could not start server");
|
let mut runtime = load().expect("could not start server");
|
||||||
start(&mut runtime, debug, &state_snapshot)
|
start(&mut runtime, debug, &state_snapshot)
|
||||||
.expect("could not start server");
|
.expect("could not start server");
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue