lsp: Encapsulate DocumentCache better

The Document cache is a specialized typeloader now, make it provide the
necessary APIs directly, so that we can be sure nobody will do anything
that breaks the data:-)
This commit is contained in:
Tobias Hunger 2024-06-05 19:04:01 +02:00 committed by Tobias Hunger
parent 64d2b6117b
commit 6c034372a1
14 changed files with 444 additions and 368 deletions

View file

@ -4,14 +4,17 @@
//! Data structures common between LSP and previewer //! Data structures common between LSP and previewer
use i_slint_compiler::diagnostics::{BuildDiagnostics, SourceFile, SourceFileVersion}; use i_slint_compiler::diagnostics::{BuildDiagnostics, SourceFile, SourceFileVersion};
use i_slint_compiler::object_tree::ElementRc; use i_slint_compiler::object_tree::{Document, ElementRc};
use i_slint_compiler::parser::{syntax_nodes, SyntaxKind, SyntaxNode}; use i_slint_compiler::parser::{syntax_nodes, SyntaxKind, SyntaxNode};
use i_slint_compiler::typeloader::TypeLoader; use i_slint_compiler::typeloader::TypeLoader;
use i_slint_compiler::typeregister::TypeRegister;
use i_slint_compiler::CompilerConfiguration; use i_slint_compiler::CompilerConfiguration;
use lsp_types::{TextEdit, Url, WorkspaceEdit}; use lsp_types::{TextEdit, Url, WorkspaceEdit};
use std::path::Path;
use std::{collections::HashMap, path::PathBuf}; use std::{collections::HashMap, path::PathBuf};
pub mod component_catalog;
pub mod rename_component; pub mod rename_component;
#[cfg(test)] #[cfg(test)]
pub mod test; pub mod test;
@ -25,32 +28,142 @@ pub type UrlVersion = Option<i32>;
#[cfg(target_arch = "wasm32")] #[cfg(target_arch = "wasm32")]
use crate::wasm_prelude::*; use crate::wasm_prelude::*;
pub fn uri_to_file(uri: &lsp_types::Url) -> Option<PathBuf> { pub fn uri_to_file(uri: &Url) -> Option<PathBuf> {
let path = uri.to_file_path().ok()?; if uri.scheme() == "builtin" {
let cleaned_path = i_slint_compiler::pathutils::clean_path(&path); let path = String::from("builtin:") + uri.path();
Some(cleaned_path) Some(PathBuf::from(path))
} else {
let path = uri.to_file_path().ok()?;
let cleaned_path = i_slint_compiler::pathutils::clean_path(&path);
Some(cleaned_path)
}
}
pub fn file_to_uri(path: &Path) -> Option<Url> {
if path.starts_with("builtin:/") {
let p_str = path.to_string_lossy();
let p_str = if &p_str[9..11] == "///" {
p_str.to_string()
} else {
let mut r = p_str.to_string();
r.insert_str(8, "//");
r
};
Url::parse(&p_str).ok()
} else {
Url::from_file_path(path).ok()
}
} }
/// A cache of loaded documents /// A cache of loaded documents
pub struct DocumentCache { pub struct DocumentCache(TypeLoader);
pub(crate) documents: TypeLoader,
}
impl DocumentCache { impl DocumentCache {
pub fn new(config: CompilerConfiguration) -> Self { pub fn new(config: CompilerConfiguration) -> Self {
let documents = TypeLoader::new( Self(TypeLoader::new(
i_slint_compiler::typeregister::TypeRegister::builtin(), i_slint_compiler::typeregister::TypeRegister::builtin(),
config, config,
&mut BuildDiagnostics::default(), &mut BuildDiagnostics::default(),
); ))
Self { documents }
} }
pub fn document_version(&self, target_uri: &lsp_types::Url) -> SourceFileVersion { pub fn new_from_typeloader(type_loader: TypeLoader) -> Self {
self.documents Self(type_loader)
}
pub fn resolve_import_path(
&self,
import_token: Option<&i_slint_compiler::parser::NodeOrToken>,
maybe_relative_path_or_url: &str,
) -> Option<(PathBuf, Option<&'static [u8]>)> {
self.0.resolve_import_path(import_token, maybe_relative_path_or_url)
}
pub fn document_version(&self, target_uri: &Url) -> SourceFileVersion {
self.0
.get_document(&uri_to_file(target_uri).unwrap_or_default()) .get_document(&uri_to_file(target_uri).unwrap_or_default())
.and_then(|doc| doc.node.as_ref()?.source_file.version()) .and_then(|doc| doc.node.as_ref()?.source_file.version())
} }
pub fn get_document(&self, url: &Url) -> Option<&Document> {
let path = uri_to_file(url)?;
self.0.get_document(&path)
}
pub fn get_document_by_path(&self, path: &Path) -> Option<&Document> {
self.0.get_document(&path)
}
pub fn get_document_for_source_file(&self, source_file: &SourceFile) -> Option<&Document> {
self.0.get_document(source_file.path())
}
pub fn all_url_documents(&self) -> impl Iterator<Item = (Url, &Document)> + '_ {
self.0.all_file_documents().filter_map(|(p, d)| Some((file_to_uri(p)?, d)))
}
pub fn all_urls(&self) -> impl Iterator<Item = Url> + '_ {
self.0.all_files().filter_map(|p| file_to_uri(p))
}
pub fn global_type_registry(&self) -> std::cell::Ref<TypeRegister> {
self.0.global_type_registry.borrow()
}
pub async fn reconfigure(
&mut self,
style: Option<String>,
include_paths: Option<Vec<PathBuf>>,
library_paths: Option<HashMap<String, PathBuf>>,
) -> Result<CompilerConfiguration> {
if style.is_none() && include_paths.is_none() && library_paths.is_none() {
return Ok(self.0.compiler_config.clone());
}
if let Some(s) = style {
if s.is_empty() {
self.0.compiler_config.style = None;
} else {
self.0.compiler_config.style = Some(s);
}
}
if let Some(ip) = include_paths {
self.0.compiler_config.include_paths = ip;
}
if let Some(lp) = library_paths {
self.0.compiler_config.library_paths = lp;
}
self.preload_builtins().await;
Ok(self.0.compiler_config.clone())
}
pub async fn preload_builtins(&mut self) {
// Always load the widgets so we can auto-complete them
let mut diag = BuildDiagnostics::default();
self.0.import_component("std-widgets.slint", "StyleMetrics", &mut diag).await;
assert!(!diag.has_error());
}
pub async fn load_url(
&mut self,
url: &Url,
version: SourceFileVersion,
content: String,
diag: &mut BuildDiagnostics,
) -> Result<()> {
let path =
uri_to_file(url).ok_or::<Error>(String::from("Failed to convert path").into())?;
self.0.load_file(&path, version, &path, content, false, diag).await;
Ok(())
}
pub fn compiler_configuration(&self) -> &CompilerConfiguration {
&self.0.compiler_config
}
} }
pub fn extract_element(node: SyntaxNode) -> Option<syntax_nodes::Element> { pub fn extract_element(node: SyntaxNode) -> Option<syntax_nodes::Element> {
@ -125,7 +238,7 @@ impl ElementRcNode {
Self::new(self.element.clone(), self.debug_index + 1) Self::new(self.element.clone(), self.debug_index + 1)
} }
pub fn find_in(element: ElementRc, path: &std::path::Path, offset: u32) -> Option<Self> { pub fn find_in(element: ElementRc, path: &Path, offset: u32) -> Option<Self> {
let debug_index = element.borrow().debug.iter().position(|d| { let debug_index = element.borrow().debug.iter().position(|d| {
u32::from(d.node.text_range().start()) == offset && d.node.source_file.path() == path u32::from(d.node.text_range().start()) == offset && d.node.source_file.path() == path
})?; })?;
@ -133,11 +246,7 @@ impl ElementRcNode {
Some(Self { element, debug_index }) Some(Self { element, debug_index })
} }
pub fn find_in_or_below( pub fn find_in_or_below(element: ElementRc, path: &Path, offset: u32) -> Option<Self> {
element: ElementRc,
path: &std::path::Path,
offset: u32,
) -> Option<Self> {
let debug_index = element.borrow().debug.iter().position(|d| { let debug_index = element.borrow().debug.iter().position(|d| {
u32::from(d.node.text_range().start()) == offset && d.node.source_file.path() == path u32::from(d.node.text_range().start()) == offset && d.node.source_file.path() == path
}); });
@ -579,3 +688,20 @@ pub mod lsp_to_editor {
let _ = fut.await; let _ = fut.await;
} }
} }
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_uri_conversion_of_builtins() {
let builtin_path = PathBuf::from("builtin:/fluent/button.slint");
let url = file_to_uri(&builtin_path).unwrap();
assert_eq!(url.scheme(), "builtin");
let back_conversion = uri_to_file(&url).unwrap();
assert_eq!(back_conversion, builtin_path);
assert!(Url::from_file_path(&builtin_path).is_err());
}
}

View file

@ -3,12 +3,9 @@
// cSpell: ignore descr rfind unindented // cSpell: ignore descr rfind unindented
use crate::common::{ComponentInformation, Position, PropertyChange}; use crate::common::{ComponentInformation, DocumentCache, Position, PropertyChange};
use crate::language::DocumentCache;
use i_slint_compiler::langtype::{DefaultSizeBinding, ElementType}; use i_slint_compiler::langtype::{DefaultSizeBinding, ElementType};
use i_slint_compiler::typeloader::TypeLoader;
use lsp_types::Url; use lsp_types::Url;
use std::{path::Path, rc::Rc};
#[cfg(target_arch = "wasm32")] #[cfg(target_arch = "wasm32")]
use crate::wasm_prelude::UrlWasm; use crate::wasm_prelude::UrlWasm;
@ -117,7 +114,7 @@ fn file_local_component_info(name: &str, position: Position) -> ComponentInforma
} }
pub fn builtin_components(document_cache: &DocumentCache, result: &mut Vec<ComponentInformation>) { pub fn builtin_components(document_cache: &DocumentCache, result: &mut Vec<ComponentInformation>) {
let registry = document_cache.documents.global_type_registry.borrow(); let registry = document_cache.global_type_registry();
result.extend(registry.all_elements().iter().filter_map(|(name, ty)| match ty { result.extend(registry.all_elements().iter().filter_map(|(name, ty)| match ty {
ElementType::Builtin(b) if !b.is_internal => { ElementType::Builtin(b) if !b.is_internal => {
let fills_parent = let fills_parent =
@ -130,15 +127,14 @@ pub fn builtin_components(document_cache: &DocumentCache, result: &mut Vec<Compo
/// Fill the result with all exported components that matches the given filter /// Fill the result with all exported components that matches the given filter
pub fn all_exported_components( pub fn all_exported_components(
documents: &TypeLoader, document_cache: &DocumentCache,
filter: &mut dyn FnMut(&ComponentInformation) -> bool, filter: &mut dyn FnMut(&ComponentInformation) -> bool,
result: &mut Vec<ComponentInformation>, result: &mut Vec<ComponentInformation>,
) { ) {
for file in documents.all_files() { for url in document_cache.all_urls() {
let Some(doc) = documents.get_document(file) else { continue }; let Some(doc) = document_cache.get_document(&url) else { continue };
let is_builtin = file.starts_with("builtin:/"); let is_builtin = url.scheme() == "builtin";
let is_std_widget = is_builtin let is_std_widget = is_builtin && url.path().ends_with("/std-widgets.slint");
&& file.file_name().map(|f| f.to_str() == Some("std-widgets.slint")).unwrap_or(false);
for (exported_name, ty) in &*doc.exports { for (exported_name, ty) in &*doc.exports {
let Some(c) = ty.as_ref().left() else { let Some(c) = ty.as_ref().left() else {
@ -148,15 +144,12 @@ pub fn all_exported_components(
let to_push = if is_std_widget && !exported_name.as_str().ends_with("Impl") { let to_push = if is_std_widget && !exported_name.as_str().ends_with("Impl") {
Some(std_widgets_info(exported_name.as_str(), c.is_global())) Some(std_widgets_info(exported_name.as_str(), c.is_global()))
} else if !is_builtin { } else if !is_builtin {
let Ok(url) = Url::from_file_path(file) else {
continue;
};
let offset = let offset =
c.node.as_ref().map(|n| n.text_range().start().into()).unwrap_or_default(); c.node.as_ref().map(|n| n.text_range().start().into()).unwrap_or_default();
Some(exported_project_component_info( Some(exported_project_component_info(
exported_name.as_str(), exported_name.as_str(),
c.is_global(), c.is_global(),
Position { url, offset }, Position { url: url.clone(), offset },
)) ))
} else { } else {
continue; continue;
@ -180,20 +173,16 @@ pub fn all_exported_components(
pub fn file_local_components( pub fn file_local_components(
document_cache: &DocumentCache, document_cache: &DocumentCache,
file: &Path, url: &Url,
result: &mut Vec<ComponentInformation>, result: &mut Vec<ComponentInformation>,
) { ) {
let Ok(url) = Url::from_file_path(file) else { let Some(doc) = document_cache.get_document(url) else { return };
return;
};
let Some(doc) = document_cache.documents.get_document(file) else { return };
let exported_components = let exported_components =
doc.exports.iter().filter_map(|(_, e)| e.as_ref().left()).cloned().collect::<Vec<_>>(); doc.exports.iter().filter_map(|(_, e)| e.as_ref().left()).cloned().collect::<Vec<_>>();
for component in &*doc.inner_components { for component in &*doc.inner_components {
// component.exported_global_names is always empty since the pass populating it has not // component.exported_global_names is always empty since the pass populating it has not
// run. // run.
if !exported_components.iter().any(|rc| Rc::ptr_eq(rc, component)) { if !exported_components.iter().any(|rc| std::rc::Rc::ptr_eq(rc, component)) {
let offset = let offset =
component.node.as_ref().map(|n| n.text_range().start().into()).unwrap_or_default(); component.node.as_ref().map(|n| n.text_range().start().into()).unwrap_or_default();
result.push(file_local_component_info( result.push(file_local_component_info(
@ -229,7 +218,7 @@ mod tests {
let (dc, _, _) = crate::language::test::loaded_document_cache(r#""#.to_string()); let (dc, _, _) = crate::language::test::loaded_document_cache(r#""#.to_string());
let mut result = Default::default(); let mut result = Default::default();
all_exported_components(&dc.documents, &mut |_| true, &mut result); all_exported_components(&dc, &mut |_| true, &mut result);
assert!(result.iter().all(|ci| ci.is_std_widget)); assert!(result.iter().all(|ci| ci.is_std_widget));
assert!(result.iter().all(|ci| ci.is_exported)); assert!(result.iter().all(|ci| ci.is_exported));
@ -245,7 +234,7 @@ mod tests {
let (dc, _, _) = crate::language::test::loaded_document_cache(r#""#.to_string()); let (dc, _, _) = crate::language::test::loaded_document_cache(r#""#.to_string());
let mut result = Default::default(); let mut result = Default::default();
all_exported_components(&dc.documents, &mut |_| false, &mut result); all_exported_components(&dc, &mut |_| false, &mut result);
assert!(result.is_empty()); assert!(result.is_empty());
} }
@ -256,7 +245,7 @@ mod tests {
let (dc, _, _) = crate::language::test::loaded_document_cache(r#""#.to_string()); let (dc, _, _) = crate::language::test::loaded_document_cache(r#""#.to_string());
let mut result = Default::default(); let mut result = Default::default();
all_exported_components(&dc.documents, &mut |_| true, &mut result); all_exported_components(&dc, &mut |_| true, &mut result);
result.len() result.len()
}; };
@ -265,7 +254,7 @@ mod tests {
); );
let mut result = Default::default(); let mut result = Default::default();
all_exported_components(&dc.documents, &mut |_| true, &mut result); all_exported_components(&dc, &mut |_| true, &mut result);
assert!(result.iter().any(|ci| &ci.name == "Test1")); assert!(result.iter().any(|ci| &ci.name == "Test1"));
assert!(!result.iter().any(|ci| &ci.name == "TouchArea")); assert!(!result.iter().any(|ci| &ci.name == "TouchArea"));
@ -280,7 +269,7 @@ mod tests {
crate::language::test::loaded_document_cache(r#"component Test1 {}"#.to_string()); crate::language::test::loaded_document_cache(r#"component Test1 {}"#.to_string());
let mut result = Default::default(); let mut result = Default::default();
file_local_components(&dc, &url.to_file_path().unwrap(), &mut result); file_local_components(&dc, &url, &mut result);
assert!(result.is_empty()); // Test1 is implicitly exported! assert!(result.is_empty()); // Test1 is implicitly exported!
} }
@ -294,7 +283,7 @@ mod tests {
); );
let mut result = Default::default(); let mut result = Default::default();
file_local_components(&dc, &url.to_file_path().unwrap(), &mut result); file_local_components(&dc, &url, &mut result);
assert_eq!(result.len(), 1); assert_eq!(result.len(), 1);
let test1 = result.iter().find(|ci| &ci.name == "Test1").unwrap(); let test1 = result.iter().find(|ci| &ci.name == "Test1").unwrap();
@ -315,7 +304,7 @@ mod tests {
); );
let mut result = Default::default(); let mut result = Default::default();
file_local_components(&dc, &url.to_file_path().unwrap(), &mut result); file_local_components(&dc, &url, &mut result);
assert_eq!(result.len(), 2); assert_eq!(result.len(), 2);
let test1 = result.iter().find(|ci| &ci.name == "Test1").unwrap(); let test1 = result.iter().find(|ci| &ci.name == "Test1").unwrap();

View file

@ -9,8 +9,11 @@ use i_slint_compiler::{
diagnostics::{SourceFile, Spanned}, diagnostics::{SourceFile, Spanned},
object_tree, object_tree,
parser::{syntax_nodes, SyntaxKind}, parser::{syntax_nodes, SyntaxKind},
typeloader::TypeLoader,
}; };
use lsp_types::Url;
#[cfg(target_arch = "wasm32")]
use crate::wasm_prelude::*;
fn symbol_export_names(document_node: &syntax_nodes::Document, type_name: &str) -> Vec<String> { fn symbol_export_names(document_node: &syntax_nodes::Document, type_name: &str) -> Vec<String> {
let mut result = vec![]; let mut result = vec![];
@ -85,30 +88,26 @@ fn replace_element_types(
} }
fn fix_imports( fn fix_imports(
type_loader: &TypeLoader, document_cache: &common::DocumentCache,
exporter_path: &Path, exporter_path: &Path,
old_type: &str, old_type: &str,
new_type: &str, new_type: &str,
edits: &mut Vec<(SourceFile, lsp_types::TextEdit)>, edits: &mut Vec<(SourceFile, lsp_types::TextEdit)>,
) { ) {
for doc in type_loader.all_documents() { let Ok(exporter_url) = Url::from_file_path(exporter_path) else {
let Some(doc_path) = return;
doc.node.as_ref().and_then(|n| n.source_file()).map(|sf| sf.path().to_owned()) };
else { for (url, doc) in document_cache.all_url_documents() {
continue; if url.scheme() == "builtin" || url.path() == exporter_url.path() {
};
if doc_path.starts_with("builtin:") {
continue; continue;
} }
if doc_path == exporter_path {
continue; fix_import_in_document(document_cache, doc, exporter_path, old_type, new_type, edits);
}
fix_import_in_document(type_loader, doc, exporter_path, old_type, new_type, edits);
} }
} }
fn fix_import_in_document( fn fix_import_in_document(
type_loader: &TypeLoader, document_cache: &common::DocumentCache,
document: &object_tree::Document, document: &object_tree::Document,
exporter_path: &Path, exporter_path: &Path,
old_type: &str, old_type: &str,
@ -182,7 +181,7 @@ fn fix_import_in_document(
} }
// Change exports // Change exports
fix_exports(type_loader, document_node, old_type, new_type, edits); fix_exports(document_cache, document_node, old_type, new_type, edits);
// Change all local usages: // Change all local usages:
change_local_element_type(document_node, old_type, new_type, edits); change_local_element_type(document_node, old_type, new_type, edits);
@ -207,7 +206,7 @@ fn change_local_element_type(
} }
fn fix_exports( fn fix_exports(
type_loader: &TypeLoader, document_cache: &common::DocumentCache,
document_node: &syntax_nodes::Document, document_node: &syntax_nodes::Document,
old_type: &str, old_type: &str,
new_type: &str, new_type: &str,
@ -252,8 +251,8 @@ fn fix_exports(
}; };
if update_imports { if update_imports {
let my_path = document_node.source_file.path().to_owned(); let my_path = document_node.source_file.path();
fix_imports(type_loader, &my_path, old_type, new_type, edits); fix_imports(document_cache, &my_path, old_type, new_type, edits);
} }
} }
} }
@ -262,13 +261,14 @@ fn fix_exports(
/// Rename a component by providing the `DeclaredIdentifier` in the component definition. /// Rename a component by providing the `DeclaredIdentifier` in the component definition.
pub fn rename_component_from_definition( pub fn rename_component_from_definition(
type_loader: &TypeLoader, document_cache: &common::DocumentCache,
identifier: &syntax_nodes::DeclaredIdentifier, identifier: &syntax_nodes::DeclaredIdentifier,
new_name: String, new_name: String,
) -> crate::Result<lsp_types::WorkspaceEdit> { ) -> crate::Result<lsp_types::WorkspaceEdit> {
let source_file = identifier.source_file().expect("Identifier had no source file"); let source_file = identifier.source_file().expect("Identifier had no source file");
let document = let document = document_cache
type_loader.get_document(source_file.path()).expect("Identifier is in unknown document"); .get_document_for_source_file(source_file)
.expect("Identifier is in unknown document");
if document.local_registry.lookup(&new_name) != i_slint_compiler::langtype::Type::Invalid { if document.local_registry.lookup(&new_name) != i_slint_compiler::langtype::Type::Invalid {
return Err(format!("{new_name} is already a registered type").into()); return Err(format!("{new_name} is already a registered type").into());
@ -304,13 +304,13 @@ pub fn rename_component_from_definition(
change_local_element_type(document_node, &component_type, &new_name, &mut edits); change_local_element_type(document_node, &component_type, &new_name, &mut edits);
// Change exports // Change exports
fix_exports(type_loader, document_node, &component_type, &new_name, &mut edits); fix_exports(document_cache, document_node, &component_type, &new_name, &mut edits);
let export_names = symbol_export_names(document_node, &component_type); let export_names = symbol_export_names(document_node, &component_type);
if export_names.contains(&component_type) { if export_names.contains(&component_type) {
let my_path = source_file.path().to_owned(); let my_path = source_file.path();
fix_imports(type_loader, &my_path, &component_type, &new_name, &mut edits); fix_imports(document_cache, &my_path, &component_type, &new_name, &mut edits);
} }
common::create_workspace_edit_from_source_files(edits) common::create_workspace_edit_from_source_files(edits)
@ -319,17 +319,18 @@ pub fn rename_component_from_definition(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use lsp_types::Url;
use super::*; use super::*;
use std::collections::HashMap; use std::collections::HashMap;
use std::path::PathBuf;
use crate::common::test; use crate::common::test;
use crate::common::text_edit; use crate::common::text_edit;
#[track_caller] #[track_caller]
fn compile_test_changes( fn compile_test_changes(
type_loader: &TypeLoader, document_cache: &common::DocumentCache,
edit: &lsp_types::WorkspaceEdit, edit: &lsp_types::WorkspaceEdit,
) -> Vec<text_edit::EditedText> { ) -> Vec<text_edit::EditedText> {
eprintln!("Edit:"); eprintln!("Edit:");
@ -338,7 +339,7 @@ mod tests {
} }
eprintln!("*** All edits reported ***"); eprintln!("*** All edits reported ***");
let changed_text = text_edit::apply_workspace_edit(&type_loader, &edit).unwrap(); let changed_text = text_edit::apply_workspace_edit(&document_cache, &edit).unwrap();
assert!(!changed_text.is_empty()); // there was a change! assert!(!changed_text.is_empty()); // there was a change!
eprintln!("After changes were applied:"); eprintln!("After changes were applied:");
@ -352,14 +353,14 @@ mod tests {
eprintln!("*** All changes reported ***"); eprintln!("*** All changes reported ***");
let code = { let code = {
let mut map: HashMap<PathBuf, String> = type_loader let mut map: HashMap<Url, String> = document_cache
.all_documents() .all_url_documents()
.filter_map(|doc| doc.node.as_ref()) .filter_map(|(url, doc)| Some((url, doc.node.as_ref()?)))
.map(|dn| dn.source_file.as_ref()) .map(|(url, dn)| (url, dn.source_file.as_ref()))
.map(|sf| (sf.path().to_owned(), sf.source().unwrap().to_string())) .map(|(url, sf)| (url, sf.source().unwrap().to_string()))
.collect(); .collect();
for ct in &changed_text { for ct in &changed_text {
map.insert(ct.url.to_file_path().unwrap(), ct.contents.clone()); map.insert(ct.url.clone(), ct.contents.clone());
} }
map map
}; };
@ -393,10 +394,10 @@ mod tests {
#[test] #[test]
fn test_rename_component_from_definition_ok() { fn test_rename_component_from_definition_ok() {
let type_loader = test::compile_test_with_sources( let document_cache = test::compile_test_with_sources(
"fluent", "fluent",
HashMap::from([( HashMap::from([(
test::main_test_file_name(), Url::from_file_path(test::main_test_file_name()).unwrap(),
r#" r#"
component Foo { } component Foo { }
@ -422,18 +423,18 @@ export component Bar {
)]), )]),
); );
let doc = type_loader.get_document(&test::main_test_file_name()).unwrap(); let doc = document_cache.get_document_by_path(&test::main_test_file_name()).unwrap();
let foo_identifier = let foo_identifier =
find_component_declared_identifier(doc.node.as_ref().unwrap(), "Foo").unwrap(); find_component_declared_identifier(doc.node.as_ref().unwrap(), "Foo").unwrap();
let edit = rename_component_from_definition( let edit = rename_component_from_definition(
&type_loader, &document_cache,
&foo_identifier, &foo_identifier,
"XxxYyyZzz".to_string(), "XxxYyyZzz".to_string(),
) )
.unwrap(); .unwrap();
let edited_text = compile_test_changes(&type_loader, &edit); let edited_text = compile_test_changes(&document_cache, &edit);
assert_eq!(edited_text.len(), 1); assert_eq!(edited_text.len(), 1);
assert!(edited_text[0].contents.contains("XxxYyyZzz")); assert!(edited_text[0].contents.contains("XxxYyyZzz"));
@ -442,11 +443,11 @@ export component Bar {
#[test] #[test]
fn test_rename_component_from_definition_with_renaming_export_ok() { fn test_rename_component_from_definition_with_renaming_export_ok() {
let type_loader = test::compile_test_with_sources( let document_cache = test::compile_test_with_sources(
"fluent", "fluent",
HashMap::from([ HashMap::from([
( (
test::main_test_file_name(), Url::from_file_path(test::main_test_file_name()).unwrap(),
r#" r#"
import { FExport} from "source.slint"; import { FExport} from "source.slint";
@ -457,7 +458,7 @@ export component Foo {
.to_string(), .to_string(),
), ),
( (
test::test_file_name("source.slint"), Url::from_file_path(test::test_file_name("source.slint")).unwrap(),
r#" r#"
component Foo { } component Foo { }
@ -468,18 +469,19 @@ export { Foo as FExport }
]), ]),
); );
let doc = type_loader.get_document(&test::test_file_name("source.slint")).unwrap(); let doc =
document_cache.get_document_by_path(&test::test_file_name("source.slint")).unwrap();
let foo_identifier = let foo_identifier =
find_component_declared_identifier(doc.node.as_ref().unwrap(), "Foo").unwrap(); find_component_declared_identifier(doc.node.as_ref().unwrap(), "Foo").unwrap();
let edit = rename_component_from_definition( let edit = rename_component_from_definition(
&type_loader, &document_cache,
&foo_identifier, &foo_identifier,
"XxxYyyZzz".to_string(), "XxxYyyZzz".to_string(),
) )
.unwrap(); .unwrap();
let edited_text = compile_test_changes(&type_loader, &edit); let edited_text = compile_test_changes(&document_cache, &edit);
assert_eq!(edited_text.len(), 1); assert_eq!(edited_text.len(), 1);
assert_eq!( assert_eq!(
@ -492,11 +494,11 @@ export { Foo as FExport }
#[test] #[test]
fn test_rename_component_from_definition_with_export_ok() { fn test_rename_component_from_definition_with_export_ok() {
let type_loader = test::compile_test_with_sources( let document_cache = test::compile_test_with_sources(
"fluent", "fluent",
HashMap::from([ HashMap::from([
( (
test::main_test_file_name(), Url::from_file_path(test::main_test_file_name()).unwrap(),
r#" r#"
import { Foo } from "source.slint"; import { Foo } from "source.slint";
import { UserComponent } from "user.slint"; import { UserComponent } from "user.slint";
@ -513,14 +515,14 @@ export component Main {
.to_string(), .to_string(),
), ),
( (
test::test_file_name("source.slint"), Url::from_file_path(test::test_file_name("source.slint")).unwrap(),
r#" r#"
export component Foo { } export component Foo { }
"# "#
.to_string(), .to_string(),
), ),
( (
test::test_file_name("user.slint"), Url::from_file_path(test::test_file_name("user.slint")).unwrap(),
r#" r#"
import { Foo as Bar } from "source.slint"; import { Foo as Bar } from "source.slint";
@ -533,7 +535,7 @@ export { Bar }
.to_string(), .to_string(),
), ),
( (
test::test_file_name("user2.slint"), Url::from_file_path(test::test_file_name("user2.slint")).unwrap(),
r#" r#"
import { Foo as XxxYyyZzz } from "source.slint"; import { Foo as XxxYyyZzz } from "source.slint";
@ -544,7 +546,7 @@ export component User2Component {
.to_string(), .to_string(),
), ),
( (
test::test_file_name("user3.slint"), Url::from_file_path(test::test_file_name("user3.slint")).unwrap(),
r#" r#"
import { Foo } from "source.slint"; import { Foo } from "source.slint";
@ -553,7 +555,7 @@ export { Foo }
.to_string(), .to_string(),
), ),
( (
test::test_file_name("user4.slint"), Url::from_file_path(test::test_file_name("user4.slint")).unwrap(),
r#" r#"
import { Foo } from "source.slint"; import { Foo } from "source.slint";
@ -564,18 +566,19 @@ export { Foo as User4Fxx }
]), ]),
); );
let doc = type_loader.get_document(&test::test_file_name("source.slint")).unwrap(); let doc =
document_cache.get_document_by_path(&test::test_file_name("source.slint")).unwrap();
let foo_identifier = let foo_identifier =
find_component_declared_identifier(doc.node.as_ref().unwrap(), "Foo").unwrap(); find_component_declared_identifier(doc.node.as_ref().unwrap(), "Foo").unwrap();
let edit = rename_component_from_definition( let edit = rename_component_from_definition(
&type_loader, &document_cache,
&foo_identifier, &foo_identifier,
"XxxYyyZzz".to_string(), "XxxYyyZzz".to_string(),
) )
.unwrap(); .unwrap();
let edited_text = compile_test_changes(&type_loader, &edit); let edited_text = compile_test_changes(&document_cache, &edit);
for ed in &edited_text { for ed in &edited_text {
let ed_path = ed.url.to_file_path().unwrap(); let ed_path = ed.url.to_file_path().unwrap();
@ -609,11 +612,11 @@ export { Foo as User4Fxx }
#[test] #[test]
fn test_rename_component_from_definition_import_confusion_ok() { fn test_rename_component_from_definition_import_confusion_ok() {
let type_loader = test::compile_test_with_sources( let document_cache = test::compile_test_with_sources(
"fluent", "fluent",
HashMap::from([ HashMap::from([
( (
test::main_test_file_name(), Url::from_file_path(test::main_test_file_name()).unwrap(),
r#" r#"
import { Foo as User1Fxx } from "user1.slint"; import { Foo as User1Fxx } from "user1.slint";
import { Foo as User2Fxx } from "user2.slint"; import { Foo as User2Fxx } from "user2.slint";
@ -626,14 +629,14 @@ export component Main {
.to_string(), .to_string(),
), ),
( (
test::test_file_name("user1.slint"), Url::from_file_path(test::test_file_name("user1.slint")).unwrap(),
r#" r#"
export component Foo { } export component Foo { }
"# "#
.to_string(), .to_string(),
), ),
( (
test::test_file_name("user2.slint"), Url::from_file_path(test::test_file_name("user2.slint")).unwrap(),
r#" r#"
export component Foo { } export component Foo { }
"# "#
@ -642,18 +645,19 @@ export component Foo { }
]), ]),
); );
let doc = type_loader.get_document(&test::test_file_name("user1.slint")).unwrap(); let doc =
document_cache.get_document_by_path(&test::test_file_name("user1.slint")).unwrap();
let foo_identifier = let foo_identifier =
find_component_declared_identifier(doc.node.as_ref().unwrap(), "Foo").unwrap(); find_component_declared_identifier(doc.node.as_ref().unwrap(), "Foo").unwrap();
let edit = rename_component_from_definition( let edit = rename_component_from_definition(
&type_loader, &document_cache,
&foo_identifier, &foo_identifier,
"XxxYyyZzz".to_string(), "XxxYyyZzz".to_string(),
) )
.unwrap(); .unwrap();
let edited_text = compile_test_changes(&type_loader, &edit); let edited_text = compile_test_changes(&document_cache, &edit);
for ed in &edited_text { for ed in &edited_text {
let ed_path = ed.url.to_file_path().unwrap(); let ed_path = ed.url.to_file_path().unwrap();
@ -668,18 +672,19 @@ export component Foo { }
} }
} }
let doc = type_loader.get_document(&test::test_file_name("user2.slint")).unwrap(); let doc =
document_cache.get_document_by_path(&test::test_file_name("user2.slint")).unwrap();
let foo_identifier = let foo_identifier =
find_component_declared_identifier(doc.node.as_ref().unwrap(), "Foo").unwrap(); find_component_declared_identifier(doc.node.as_ref().unwrap(), "Foo").unwrap();
let edit = rename_component_from_definition( let edit = rename_component_from_definition(
&type_loader, &document_cache,
&foo_identifier, &foo_identifier,
"XxxYyyZzz".to_string(), "XxxYyyZzz".to_string(),
) )
.unwrap(); .unwrap();
let edited_text = compile_test_changes(&type_loader, &edit); let edited_text = compile_test_changes(&document_cache, &edit);
for ed in &edited_text { for ed in &edited_text {
let ed_path = ed.url.to_file_path().unwrap(); let ed_path = ed.url.to_file_path().unwrap();
@ -697,10 +702,10 @@ export component Foo { }
#[test] #[test]
fn test_rename_component_from_definition_redefinition_error() { fn test_rename_component_from_definition_redefinition_error() {
let type_loader = test::compile_test_with_sources( let document_cache = test::compile_test_with_sources(
"fluent", "fluent",
HashMap::from([( HashMap::from([(
test::main_test_file_name(), Url::from_file_path(test::main_test_file_name()).unwrap(),
r#" r#"
struct UsedStruct { value: int, } struct UsedStruct { value: int, }
enum UsedEnum { x, y } enum UsedEnum { x, y }
@ -723,29 +728,37 @@ export component Bar {
)]), )]),
); );
let doc = type_loader.get_document(&test::main_test_file_name()).unwrap(); let doc = document_cache.get_document_by_path(&test::main_test_file_name()).unwrap();
let foo_identifier = let foo_identifier =
find_component_declared_identifier(doc.node.as_ref().unwrap(), "Foo").unwrap(); find_component_declared_identifier(doc.node.as_ref().unwrap(), "Foo").unwrap();
assert!(rename_component_from_definition(&type_loader, &foo_identifier, "Foo".to_string())
.is_err());
assert!(rename_component_from_definition( assert!(rename_component_from_definition(
&type_loader, &document_cache,
&foo_identifier,
"Foo".to_string()
)
.is_err());
assert!(rename_component_from_definition(
&document_cache,
&foo_identifier, &foo_identifier,
"UsedStruct".to_string() "UsedStruct".to_string()
) )
.is_err()); .is_err());
assert!(rename_component_from_definition( assert!(rename_component_from_definition(
&type_loader, &document_cache,
&foo_identifier, &foo_identifier,
"UsedEnum".to_string() "UsedEnum".to_string()
) )
.is_err()); .is_err());
assert!(rename_component_from_definition(&type_loader, &foo_identifier, "Baz".to_string())
.is_err());
assert!(rename_component_from_definition( assert!(rename_component_from_definition(
&type_loader, &document_cache,
&foo_identifier,
"Baz".to_string()
)
.is_err());
assert!(rename_component_from_definition(
&document_cache,
&foo_identifier, &foo_identifier,
"HorizontalLayout".to_string() "HorizontalLayout".to_string()
) )
@ -754,10 +767,10 @@ export component Bar {
#[test] #[test]
fn test_exported_type_names() { fn test_exported_type_names() {
let type_loader = test::compile_test_with_sources( let document_cache = test::compile_test_with_sources(
"fluent", "fluent",
HashMap::from([( HashMap::from([(
test::main_test_file_name(), Url::from_file_path(test::main_test_file_name()).unwrap(),
r#" r#"
export component Foo {} export component Foo {}
export component Baz {} export component Baz {}
@ -775,7 +788,7 @@ export enum EnumBar { bar }
)]), )]),
); );
let doc = type_loader.get_document(&test::main_test_file_name()).unwrap(); let doc = document_cache.get_document_by_path(&test::main_test_file_name()).unwrap();
let doc = doc.node.as_ref().unwrap(); let doc = doc.node.as_ref().unwrap();
assert!(symbol_export_names(doc, "Foobar").is_empty()); assert!(symbol_export_names(doc, "Foobar").is_empty());

View file

@ -7,12 +7,14 @@ use std::{
rc::Rc, rc::Rc,
}; };
use i_slint_compiler::{diagnostics::BuildDiagnostics, typeloader::TypeLoader}; use i_slint_compiler::diagnostics::BuildDiagnostics;
use crate::common;
async fn parse_source( async fn parse_source(
include_paths: Vec<PathBuf>, include_paths: Vec<PathBuf>,
library_paths: HashMap<String, PathBuf>, library_paths: HashMap<String, PathBuf>,
path: PathBuf, url: lsp_types::Url,
source_code: String, source_code: String,
style: String, style: String,
file_loader_fallback: impl Fn( file_loader_fallback: impl Fn(
@ -20,7 +22,7 @@ async fn parse_source(
) -> core::pin::Pin< ) -> core::pin::Pin<
Box<dyn core::future::Future<Output = Option<std::io::Result<String>>>>, Box<dyn core::future::Future<Output = Option<std::io::Result<String>>>>,
> + 'static, > + 'static,
) -> (BuildDiagnostics, TypeLoader) { ) -> (BuildDiagnostics, common::DocumentCache) {
let config = { let config = {
let mut tmp = i_slint_compiler::CompilerConfiguration::new( let mut tmp = i_slint_compiler::CompilerConfiguration::new(
i_slint_compiler::generator::OutputFormat::Llr, i_slint_compiler::generator::OutputFormat::Llr,
@ -38,15 +40,13 @@ async fn parse_source(
} }
tmp tmp
}; };
let mut document_cache = common::DocumentCache::new(config);
let mut diag = i_slint_compiler::diagnostics::BuildDiagnostics::default(); let mut diag = i_slint_compiler::diagnostics::BuildDiagnostics::default();
let global_type_registry = i_slint_compiler::typeregister::TypeRegister::builtin(); document_cache.load_url(&url, None, source_code, &mut diag).await.unwrap();
let mut type_loader = TypeLoader::new(global_type_registry, config, &mut diag); (diag, document_cache)
type_loader.load_file(&path, None, &path, source_code, false, &mut diag).await;
(diag, type_loader)
} }
pub fn test_file_prefix() -> PathBuf { pub fn test_file_prefix() -> PathBuf {
@ -64,35 +64,48 @@ pub fn test_file_name(name: &str) -> PathBuf {
} }
#[track_caller] #[track_caller]
pub fn compile_test_with_sources(style: &str, code: HashMap<PathBuf, String>) -> TypeLoader { pub fn compile_test_with_sources(
style: &str,
code: HashMap<lsp_types::Url, String>,
) -> common::DocumentCache {
i_slint_backend_testing::init_no_event_loop(); i_slint_backend_testing::init_no_event_loop();
recompile_test_with_sources(style, code) recompile_test_with_sources(style, code)
} }
#[track_caller] #[track_caller]
pub fn recompile_test_with_sources(style: &str, code: HashMap<PathBuf, String>) -> TypeLoader { pub fn recompile_test_with_sources(
style: &str,
code: HashMap<lsp_types::Url, String>,
) -> common::DocumentCache {
let code = Rc::new(code); let code = Rc::new(code);
let path = main_test_file_name(); let url = lsp_types::Url::from_file_path(main_test_file_name()).unwrap();
let source_code = code.get(&path).unwrap().clone(); let source_code = code.get(&url).unwrap().clone();
let (diagnostics, type_loader) = spin_on::spin_on(parse_source( let (diagnostics, type_loader) = spin_on::spin_on(parse_source(
vec![], vec![],
std::collections::HashMap::new(), std::collections::HashMap::new(),
path, url,
source_code.to_string(), source_code.to_string(),
style.to_string(), style.to_string(),
move |path| { move |path| {
let code = code.clone(); let code = code.clone();
let path = path.to_owned(); let url = lsp_types::Url::from_file_path(path);
Box::pin(async move { Box::pin(async move {
let Some(source) = code.get(&path) else { if let Ok(url) = url {
return Some(Result::Err(std::io::Error::new( let Some(source) = code.get(&url) else {
std::io::ErrorKind::NotFound, return Some(Result::Err(std::io::Error::new(
"path not found", std::io::ErrorKind::NotFound,
))); "path not found",
}; )));
Some(Ok(source.clone())) };
Some(Ok(source.clone()))
} else {
Some(Err(std::io::Error::new(
std::io::ErrorKind::InvalidInput,
"URL conversion failed",
)))
}
}) })
}, },
)); ));

View file

@ -6,8 +6,6 @@ use crate::wasm_prelude::*;
use std::collections::HashMap; use std::collections::HashMap;
use i_slint_compiler::typeloader::TypeLoader;
use crate::common; use crate::common;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -250,39 +248,34 @@ pub struct EditedText {
} }
pub fn apply_workspace_edit( pub fn apply_workspace_edit(
type_loader: &TypeLoader, document_cache: &common::DocumentCache,
workspace_edit: &lsp_types::WorkspaceEdit, workspace_edit: &lsp_types::WorkspaceEdit,
) -> common::Result<Vec<EditedText>> { ) -> common::Result<Vec<EditedText>> {
let mut processing = HashMap::new(); let mut processing = HashMap::new();
for (doc, edit) in EditIterator::new(workspace_edit) { for (doc, edit) in EditIterator::new(workspace_edit) {
let Ok(path) = doc.uri.to_file_path() else {
continue;
};
// This is ugly but necessary since the constructor might error out:-/ // This is ugly but necessary since the constructor might error out:-/
if !processing.contains_key(&path) { if !processing.contains_key(&doc.uri) {
let Some(document) = type_loader.get_document(&path) else { let Some(document) = document_cache.get_document(&doc.uri) else {
continue; continue;
}; };
let Some(document_node) = &document.node else { let Some(document_node) = &document.node else {
continue; continue;
}; };
let editor = TextEditor::new(document_node.source_file.clone())?; let editor = TextEditor::new(document_node.source_file.clone())?;
processing.insert(path.clone(), editor); processing.insert(doc.uri.clone(), editor);
} }
processing processing
.get_mut(&path) .get_mut(&doc.uri)
.expect("just added if missing") .expect("just added if missing")
.apply_versioned(edit, doc.version)?; .apply_versioned(edit, doc.version)?;
} }
Ok(processing Ok(processing
.drain() .drain()
.filter_map(|(k, v)| { .filter_map(|(url, v)| {
let edit_result = v.finalize()?; let edit_result = v.finalize()?;
let url = lsp_types::Url::from_file_path(k).ok()?;
Some(EditedText { Some(EditedText {
url, url,
contents: edit_result.0, contents: edit_result.0,

View file

@ -4,7 +4,6 @@
// cSpell: ignore descr rfind unindented // cSpell: ignore descr rfind unindented
pub mod completion; pub mod completion;
mod component_catalog;
mod formatting; mod formatting;
mod goto; mod goto;
pub mod properties; pub mod properties;
@ -12,14 +11,13 @@ mod semantic_tokens;
#[cfg(test)] #[cfg(test)]
pub mod test; pub mod test;
use crate::common::{self, DocumentCache, Result}; use crate::common::{self, component_catalog, DocumentCache, Result};
use crate::util; use crate::util;
#[cfg(target_arch = "wasm32")] #[cfg(target_arch = "wasm32")]
use crate::wasm_prelude::*; use crate::wasm_prelude::*;
use i_slint_compiler::object_tree::ElementRc; use i_slint_compiler::object_tree::ElementRc;
use i_slint_compiler::parser::{syntax_nodes, NodeOrToken, SyntaxKind, SyntaxNode, SyntaxToken}; use i_slint_compiler::parser::{syntax_nodes, NodeOrToken, SyntaxKind, SyntaxNode, SyntaxToken};
use i_slint_compiler::typeloader::TypeLoader;
use i_slint_compiler::{diagnostics::BuildDiagnostics, langtype::Type}; use i_slint_compiler::{diagnostics::BuildDiagnostics, langtype::Type};
use lsp_types::request::{ use lsp_types::request::{
CodeActionRequest, CodeLensRequest, ColorPresentationRequest, Completion, DocumentColor, CodeActionRequest, CodeLensRequest, ColorPresentationRequest, Completion, DocumentColor,
@ -71,17 +69,10 @@ fn create_show_preview_command(
#[cfg(any(feature = "preview-external", feature = "preview-engine"))] #[cfg(any(feature = "preview-external", feature = "preview-engine"))]
pub fn request_state(ctx: &std::rc::Rc<Context>) { pub fn request_state(ctx: &std::rc::Rc<Context>) {
let cache = ctx.document_cache.borrow(); let document_cache = ctx.document_cache.borrow();
let documents = &cache.documents;
for (p, d) in documents.all_file_documents() { for (url, d) in document_cache.all_url_documents() {
if let Some(node) = &d.node { if let Some(node) = &d.node {
if p.starts_with("builtin:/") {
continue; // The preview knows these, too.
}
let Ok(url) = Url::from_file_path(p) else {
continue;
};
ctx.server_notifier.send_message_to_preview(common::LspToPreviewMessage::SetContents { ctx.server_notifier.send_message_to_preview(common::LspToPreviewMessage::SetContents {
url: common::VersionedUrl::new(url, node.source_file.version()), url: common::VersionedUrl::new(url, node.source_file.version()),
contents: node.text().to_string(), contents: node.text().to_string(),
@ -374,7 +365,7 @@ pub fn register_request_handlers(rh: &mut RequestHandler) {
match p.kind() { match p.kind() {
SyntaxKind::DeclaredIdentifier => { SyntaxKind::DeclaredIdentifier => {
common::rename_component::rename_component_from_definition( common::rename_component::rename_component_from_definition(
&document_cache.documents, &document_cache,
&p.into(), &p.into(),
params.new_name, params.new_name,
) )
@ -413,7 +404,7 @@ pub fn register_request_handlers(rh: &mut RequestHandler) {
#[cfg(any(feature = "preview-builtin", feature = "preview-external"))] #[cfg(any(feature = "preview-builtin", feature = "preview-external"))]
pub fn show_preview_command(params: &[serde_json::Value], ctx: &Rc<Context>) -> Result<()> { pub fn show_preview_command(params: &[serde_json::Value], ctx: &Rc<Context>) -> Result<()> {
let document_cache = &mut ctx.document_cache.borrow_mut(); let document_cache = &mut ctx.document_cache.borrow_mut();
let config = &document_cache.documents.compiler_config; let config = document_cache.compiler_configuration();
let e = || "InvalidParameter"; let e = || "InvalidParameter";
@ -462,9 +453,7 @@ pub fn query_properties_command(
.expect("Failed to serialize none-element property query result!")); .expect("Failed to serialize none-element property query result!"));
}; };
if let Some(element) = if let Some(element) = element_at_position(&document_cache, &text_document_uri, &position) {
element_at_position(&document_cache.documents, &text_document_uri, &position)
{
properties::query_properties(&text_document_uri, source_version, &element) properties::query_properties(&text_document_uri, source_version, &element)
.map(|r| serde_json::to_value(r).expect("Failed to serialize property query result!")) .map(|r| serde_json::to_value(r).expect("Failed to serialize property query result!"))
} else { } else {
@ -516,8 +505,8 @@ pub async fn set_binding_command(
} }
} }
let element = element_at_position(&document_cache.documents, &uri, &element_range.start) let element =
.ok_or_else(|| { element_at_position(&document_cache, &uri, &element_range.start).ok_or_else(|| {
format!("No element found at the given start position {:?}", &element_range.start) format!("No element found at the given start position {:?}", &element_range.start)
})?; })?;
@ -601,8 +590,8 @@ pub async fn remove_binding_command(
} }
} }
let element = element_at_position(&document_cache.documents, &uri, &element_range.start) let element =
.ok_or_else(|| { element_at_position(&document_cache, &uri, &element_range.start).ok_or_else(|| {
format!("No element found at the given start position {:?}", &element_range.start) format!("No element found at the given start position {:?}", &element_range.start)
})?; })?;
@ -664,12 +653,12 @@ pub(crate) async fn reload_document_impl(
if let Some(ctx) = ctx { if let Some(ctx) = ctx {
ctx.server_notifier.send_message_to_preview(common::LspToPreviewMessage::SetContents { ctx.server_notifier.send_message_to_preview(common::LspToPreviewMessage::SetContents {
url: common::VersionedUrl::new(url, version), url: common::VersionedUrl::new(url.clone(), version),
contents: content.clone(), contents: content.clone(),
}); });
} }
let mut diag = BuildDiagnostics::default(); let mut diag = BuildDiagnostics::default();
document_cache.documents.load_file(&path, version, &path, content, false, &mut diag).await; let _ = document_cache.load_url(&url, version, content, &mut diag).await; // ignore url conversion errors
// Always provide diagnostics for all files. Empty diagnostics clear any previous ones. // Always provide diagnostics for all files. Empty diagnostics clear any previous ones.
let mut lsp_diags: HashMap<Url, Vec<lsp_types::Diagnostic>> = core::iter::once(&path) let mut lsp_diags: HashMap<Url, Vec<lsp_types::Diagnostic>> = core::iter::once(&path)
@ -696,7 +685,7 @@ fn report_known_components(document_cache: &mut DocumentCache, ctx: &Rc<Context>
let mut components = Vec::new(); let mut components = Vec::new();
component_catalog::builtin_components(document_cache, &mut components); component_catalog::builtin_components(document_cache, &mut components);
component_catalog::all_exported_components( component_catalog::all_exported_components(
&document_cache.documents, &document_cache,
&mut |ci| !ci.is_global, &mut |ci| !ci.is_global,
&mut components, &mut components,
); );
@ -706,9 +695,7 @@ fn report_known_components(document_cache: &mut DocumentCache, ctx: &Rc<Context>
let url = ctx.to_show.borrow().as_ref().map(|pc| { let url = ctx.to_show.borrow().as_ref().map(|pc| {
let url = pc.url.clone(); let url = pc.url.clone();
let version = document_cache.document_version(&url); let version = document_cache.document_version(&url);
if let Ok(file) = url.to_file_path() { component_catalog::file_local_components(document_cache, &url, &mut components);
component_catalog::file_local_components(document_cache, &file, &mut components);
}
common::VersionedUrl::new(url, version) common::VersionedUrl::new(url, version)
}); });
@ -740,12 +727,11 @@ pub async fn reload_document(
} }
fn get_document_and_offset<'a>( fn get_document_and_offset<'a>(
type_loader: &'a TypeLoader, document_cache: &'a DocumentCache,
text_document_uri: &'a Url, text_document_uri: &'_ Url,
pos: &'a Position, pos: &'_ Position,
) -> Option<(&'a i_slint_compiler::object_tree::Document, u32)> { ) -> Option<(&'a i_slint_compiler::object_tree::Document, u32)> {
let path = common::uri_to_file(text_document_uri)?; let doc = document_cache.get_document(&text_document_uri)?;
let doc = type_loader.get_document(&path)?;
let o = doc.node.as_ref()?.source_file.offset(pos.line as usize + 1, pos.character as usize + 1) let o = doc.node.as_ref()?.source_file.offset(pos.line as usize + 1, pos.character as usize + 1)
as u32; as u32;
doc.node.as_ref()?.text_range().contains_inclusive(o.into()).then_some((doc, o)) doc.node.as_ref()?.text_range().contains_inclusive(o.into()).then_some((doc, o))
@ -769,11 +755,11 @@ fn element_node_contains(element: &common::ElementRcNode, offset: u32) -> bool {
} }
pub fn element_at_position( pub fn element_at_position(
type_loader: &TypeLoader, document_cache: &DocumentCache,
text_document_uri: &Url, text_document_uri: &Url,
pos: &Position, pos: &Position,
) -> Option<common::ElementRcNode> { ) -> Option<common::ElementRcNode> {
let (doc, offset) = get_document_and_offset(type_loader, text_document_uri, pos)?; let (doc, offset) = get_document_and_offset(document_cache, text_document_uri, pos)?;
for component in &doc.inner_components { for component in &doc.inner_components {
let root_element = component.root_element.clone(); let root_element = component.root_element.clone();
@ -807,7 +793,7 @@ fn token_descr(
text_document_uri: &Url, text_document_uri: &Url,
pos: &Position, pos: &Position,
) -> Option<(SyntaxToken, u32)> { ) -> Option<(SyntaxToken, u32)> {
let (doc, o) = get_document_and_offset(&document_cache.documents, text_document_uri, pos)?; let (doc, o) = get_document_and_offset(&document_cache, text_document_uri, pos)?;
let node = doc.node.as_ref()?; let node = doc.node.as_ref()?;
let token = token_at_offset(node, o)?; let token = token_at_offset(node, o)?;
@ -900,10 +886,9 @@ fn get_code_actions(
&& node.parent().map(|n| n.kind()) == Some(SyntaxKind::Element) && node.parent().map(|n| n.kind()) == Some(SyntaxKind::Element)
{ {
let is_lookup_error = { let is_lookup_error = {
let global_tr = document_cache.documents.global_type_registry.borrow(); let global_tr = document_cache.global_type_registry();
let tr = document_cache let tr = document_cache
.documents .get_document_for_source_file(&token.source_file)
.get_document(token.source_file.path())
.map(|doc| &doc.local_registry) .map(|doc| &doc.local_registry)
.unwrap_or(&global_tr); .unwrap_or(&global_tr);
util::lookup_current_element_type(node.clone(), tr).is_none() util::lookup_current_element_type(node.clone(), tr).is_none()
@ -913,7 +898,7 @@ fn get_code_actions(
let text = token.text(); let text = token.text();
completion::build_import_statements_edits( completion::build_import_statements_edits(
&token, &token,
&document_cache.documents, &document_cache,
&mut |ci| !ci.is_global && ci.is_exported && ci.name == text, &mut |ci| !ci.is_global && ci.is_exported && ci.name == text,
&mut |_name, file, edit| { &mut |_name, file, edit| {
result.push(CodeActionOrCommand::CodeAction(lsp_types::CodeAction { result.push(CodeActionOrCommand::CodeAction(lsp_types::CodeAction {
@ -931,7 +916,7 @@ fn get_code_actions(
if has_experimental_client_capability(client_capabilities, "snippetTextEdit") { if has_experimental_client_capability(client_capabilities, "snippetTextEdit") {
let r = util::map_range(&token.source_file, node.parent().unwrap().text_range()); let r = util::map_range(&token.source_file, node.parent().unwrap().text_range());
let element = element_at_position(&document_cache.documents, &uri, &r.start); let element = element_at_position(&document_cache, &uri, &r.start);
let element_indent = element.as_ref().and_then(util::find_element_indent); let element_indent = element.as_ref().and_then(util::find_element_indent);
let indented_lines = node let indented_lines = node
.parent() .parent()
@ -1067,8 +1052,7 @@ fn get_document_color(
text_document: &lsp_types::TextDocumentIdentifier, text_document: &lsp_types::TextDocumentIdentifier,
) -> Option<Vec<ColorInformation>> { ) -> Option<Vec<ColorInformation>> {
let mut result = Vec::new(); let mut result = Vec::new();
let uri_path = common::uri_to_file(&text_document.uri)?; let doc = document_cache.get_document(&text_document.uri)?;
let doc = document_cache.documents.get_document(&uri_path)?;
let root_node = doc.node.as_ref()?; let root_node = doc.node.as_ref()?;
let mut token = root_node.first_token()?; let mut token = root_node.first_token()?;
loop { loop {
@ -1101,8 +1085,7 @@ fn get_document_symbols(
document_cache: &mut DocumentCache, document_cache: &mut DocumentCache,
text_document: &lsp_types::TextDocumentIdentifier, text_document: &lsp_types::TextDocumentIdentifier,
) -> Option<DocumentSymbolResponse> { ) -> Option<DocumentSymbolResponse> {
let uri_path = common::uri_to_file(&text_document.uri)?; let doc = document_cache.get_document(&text_document.uri)?;
let doc = document_cache.documents.get_document(&uri_path)?;
// DocumentSymbol doesn't implement default and some field depends on features or are deprecated // DocumentSymbol doesn't implement default and some field depends on features or are deprecated
let ds: DocumentSymbol = serde_json::from_value( let ds: DocumentSymbol = serde_json::from_value(
@ -1196,8 +1179,7 @@ fn get_code_lenses(
text_document: &lsp_types::TextDocumentIdentifier, text_document: &lsp_types::TextDocumentIdentifier,
) -> Option<Vec<CodeLens>> { ) -> Option<Vec<CodeLens>> {
if cfg!(any(feature = "preview-builtin", feature = "preview-external")) { if cfg!(any(feature = "preview-builtin", feature = "preview-external")) {
let filepath = common::uri_to_file(&text_document.uri)?; let doc = document_cache.get_document(&text_document.uri)?;
let doc = document_cache.documents.get_document(&filepath)?;
let inner_components = doc.inner_components.clone(); let inner_components = doc.inner_components.clone();
@ -1308,40 +1290,47 @@ pub async fn load_configuration(ctx: &Context) -> Result<()> {
)? )?
.await?; .await?;
let document_cache = &mut ctx.document_cache.borrow_mut(); let (hide_ui, include_paths, library_paths, style) = {
let mut hide_ui = None; let mut hide_ui = None;
for v in r { let mut include_paths = None;
if let Some(o) = v.as_object() { let mut library_paths = None;
if let Some(ip) = o.get("includePaths").and_then(|v| v.as_array()) { let mut style = None;
if !ip.is_empty() {
document_cache.documents.compiler_config.include_paths = for v in r {
ip.iter().filter_map(|x| x.as_str()).map(PathBuf::from).collect(); if let Some(o) = v.as_object() {
if let Some(ip) = o.get("includePaths").and_then(|v| v.as_array()) {
if !ip.is_empty() {
include_paths =
Some(ip.iter().filter_map(|x| x.as_str()).map(PathBuf::from).collect());
}
} }
} if let Some(lp) = o.get("libraryPaths").and_then(|v| v.as_object()) {
if let Some(lp) = o.get("libraryPaths").and_then(|v| v.as_object()) { if !lp.is_empty() {
if !lp.is_empty() { library_paths = Some(
document_cache.documents.compiler_config.library_paths = lp lp.iter()
.iter() .filter_map(|(k, v)| {
.filter_map(|(k, v)| v.as_str().map(|v| (k.to_string(), PathBuf::from(v)))) v.as_str().map(|v| (k.to_string(), PathBuf::from(v)))
.collect(); })
.collect(),
);
}
} }
} if let Some(s) =
if let Some(style) = o.get("preview").and_then(|v| v.as_object()?.get("style")?.as_str())
o.get("preview").and_then(|v| v.as_object()?.get("style")?.as_str()) {
{ if !s.is_empty() {
if !style.is_empty() { style = Some(s.to_string());
document_cache.documents.compiler_config.style = Some(style.into()); }
} }
hide_ui = o.get("preview").and_then(|v| v.as_object()?.get("hide_ui")?.as_bool());
} }
hide_ui = o.get("preview").and_then(|v| v.as_object()?.get("hide_ui")?.as_bool());
} }
} (hide_ui, include_paths, library_paths, style)
};
// Always load the widgets so we can auto-complete them let document_cache = &mut ctx.document_cache.borrow_mut();
let mut diag = BuildDiagnostics::default(); let cc = document_cache.reconfigure(style, include_paths, library_paths).await?;
document_cache.documents.import_component("std-widgets.slint", "StyleMetrics", &mut diag).await;
let cc = &document_cache.documents.compiler_config;
let config = common::PreviewConfig { let config = common::PreviewConfig {
hide_ui, hide_ui,
style: cc.style.clone().unwrap_or_default(), style: cc.style.clone().unwrap_or_default(),
@ -1434,7 +1423,7 @@ pub mod tests {
line: u32, line: u32,
character: u32, character: u32,
) -> Option<String> { ) -> Option<String> {
let result = element_at_position(&dc.documents, url, &Position { line, character })?; let result = element_at_position(&dc, url, &Position { line, character })?;
let element = result.element.borrow(); let element = result.element.borrow();
Some(element.id.clone()) Some(element.id.clone())
} }
@ -1445,7 +1434,7 @@ pub mod tests {
line: u32, line: u32,
character: u32, character: u32,
) -> Option<String> { ) -> Option<String> {
let result = element_at_position(&dc.documents, url, &Position { line, character })?; let result = element_at_position(&dc, url, &Position { line, character })?;
let element = result.element.borrow(); let element = result.element.borrow();
Some(format!("{}", &element.base_type)) Some(format!("{}", &element.base_type))
} }
@ -1619,7 +1608,7 @@ enum {}
.unwrap(); .unwrap();
let check_start_with = |pos, str: &str| { let check_start_with = |pos, str: &str| {
let (_, offset) = get_document_and_offset(&dc.documents, &uri, &pos).unwrap(); let (_, offset) = get_document_and_offset(&dc, &uri, &pos).unwrap();
assert_eq!(&source[offset as usize..][..str.len()], str); assert_eq!(&source[offset as usize..][..str.len()], str);
}; };

View file

@ -4,7 +4,7 @@
// cSpell: ignore rfind // cSpell: ignore rfind
use super::component_catalog::all_exported_components; use super::component_catalog::all_exported_components;
use crate::common::{ComponentInformation, DocumentCache}; use crate::common::{self, DocumentCache};
use crate::util::{lookup_current_element_type, map_position, with_lookup_ctx}; use crate::util::{lookup_current_element_type, map_position, with_lookup_ctx};
#[cfg(target_arch = "wasm32")] #[cfg(target_arch = "wasm32")]
@ -15,7 +15,6 @@ use i_slint_compiler::langtype::{ElementType, Type};
use i_slint_compiler::lookup::{LookupCtx, LookupObject, LookupResult}; use i_slint_compiler::lookup::{LookupCtx, LookupObject, LookupResult};
use i_slint_compiler::object_tree::ElementRc; use i_slint_compiler::object_tree::ElementRc;
use i_slint_compiler::parser::{syntax_nodes, SyntaxKind, SyntaxToken}; use i_slint_compiler::parser::{syntax_nodes, SyntaxKind, SyntaxToken};
use i_slint_compiler::typeloader::TypeLoader;
use lsp_types::{ use lsp_types::{
CompletionClientCapabilities, CompletionItem, CompletionItemKind, InsertTextFormat, Position, CompletionClientCapabilities, CompletionItem, CompletionItemKind, InsertTextFormat, Position,
Range, TextEdit, Range, TextEdit,
@ -130,12 +129,7 @@ pub(crate) fn completion_at(
} }
if !is_global && snippet_support { if !is_global && snippet_support {
add_components_to_import( add_components_to_import(&token, &document_cache, available_types, &mut r);
&token,
&document_cache.documents,
available_types,
&mut r,
);
} }
r r
@ -143,9 +137,8 @@ pub(crate) fn completion_at(
} else if let Some(n) = syntax_nodes::Binding::new(node.clone()) { } else if let Some(n) = syntax_nodes::Binding::new(node.clone()) {
if let Some(colon) = n.child_token(SyntaxKind::Colon) { if let Some(colon) = n.child_token(SyntaxKind::Colon) {
if offset >= colon.text_range().end().into() { if offset >= colon.text_range().end().into() {
return with_lookup_ctx(&document_cache.documents, node, |ctx| { return with_lookup_ctx(&document_cache, node, |ctx| {
resolve_expression_scope(ctx, &document_cache.documents, snippet_support) resolve_expression_scope(ctx, &document_cache, snippet_support).map(Into::into)
.map(Into::into)
})?; })?;
} }
} }
@ -164,8 +157,8 @@ pub(crate) fn completion_at(
if offset < double_arrow_range.end().into() { if offset < double_arrow_range.end().into() {
return None; return None;
} }
return with_lookup_ctx(&document_cache.documents, node, |ctx| { return with_lookup_ctx(&document_cache, node, |ctx| {
resolve_expression_scope(ctx, &document_cache.documents, snippet_support) resolve_expression_scope(ctx, &document_cache, snippet_support)
})?; })?;
} else if let Some(n) = syntax_nodes::CallbackConnection::new(node.clone()) { } else if let Some(n) = syntax_nodes::CallbackConnection::new(node.clone()) {
if token.kind() != SyntaxKind::Identifier { if token.kind() != SyntaxKind::Identifier {
@ -237,18 +230,17 @@ pub(crate) fn completion_at(
); );
} }
return with_lookup_ctx(&document_cache.documents, node, |ctx| { return with_lookup_ctx(&document_cache, node, |ctx| {
resolve_expression_scope(ctx, &document_cache.documents, snippet_support) resolve_expression_scope(ctx, &document_cache, snippet_support).map(Into::into)
.map(Into::into)
})?; })?;
} else if let Some(q) = syntax_nodes::QualifiedName::new(node.clone()) { } else if let Some(q) = syntax_nodes::QualifiedName::new(node.clone()) {
match q.parent()?.kind() { match q.parent()?.kind() {
SyntaxKind::Element => { SyntaxKind::Element => {
// auto-complete the components // auto-complete the components
let global_tr = document_cache.documents.global_type_registry.borrow(); let global_tr = document_cache.global_type_registry();
let tr = q let tr = q
.source_file() .source_file()
.and_then(|sf| document_cache.documents.get_document(sf.path())) .and_then(|sf| document_cache.get_document_for_source_file(sf))
.map(|doc| &doc.local_registry) .map(|doc| &doc.local_registry)
.unwrap_or(&global_tr); .unwrap_or(&global_tr);
@ -271,12 +263,7 @@ pub(crate) fn completion_at(
if snippet_support { if snippet_support {
let available_types = result.iter().map(|c| c.label.clone()).collect(); let available_types = result.iter().map(|c| c.label.clone()).collect();
add_components_to_import( add_components_to_import(&token, &document_cache, available_types, &mut result);
&token,
&document_cache.documents,
available_types,
&mut result,
);
} }
return Some(result); return Some(result);
@ -285,19 +272,15 @@ pub(crate) fn completion_at(
return resolve_type_scope(token, document_cache).map(Into::into); return resolve_type_scope(token, document_cache).map(Into::into);
} }
SyntaxKind::Expression => { SyntaxKind::Expression => {
return with_lookup_ctx(&document_cache.documents, node, |ctx| { return with_lookup_ctx(&document_cache, node, |ctx| {
let it = q.children_with_tokens().filter_map(|t| t.into_token()); let it = q.children_with_tokens().filter_map(|t| t.into_token());
let mut it = it.skip_while(|t| { let mut it = it.skip_while(|t| {
t.kind() != SyntaxKind::Identifier && t.token != token.token t.kind() != SyntaxKind::Identifier && t.token != token.token
}); });
let first = it.next(); let first = it.next();
if first.as_ref().map_or(true, |f| f.token == token.token) { if first.as_ref().map_or(true, |f| f.token == token.token) {
return resolve_expression_scope( return resolve_expression_scope(ctx, &document_cache, snippet_support)
ctx, .map(Into::into);
&document_cache.documents,
snippet_support,
)
.map(Into::into);
} }
let first = i_slint_compiler::parser::normalize_identifier(first?.text()); let first = i_slint_compiler::parser::normalize_identifier(first?.text());
let global = i_slint_compiler::lookup::global_lookup(); let global = i_slint_compiler::lookup::global_lookup();
@ -331,13 +314,12 @@ pub(crate) fn completion_at(
let import = syntax_nodes::ImportSpecifier::new(node.parent()?)?; let import = syntax_nodes::ImportSpecifier::new(node.parent()?)?;
let path = document_cache let path = document_cache
.documents
.resolve_import_path( .resolve_import_path(
Some(&token.into()), Some(&token.into()),
import.child_text(SyntaxKind::StringLiteral)?.trim_matches('\"'), import.child_text(SyntaxKind::StringLiteral)?.trim_matches('\"'),
)? )?
.0; .0;
let doc = document_cache.documents.get_document(&path)?; let doc = document_cache.get_document_by_path(&path)?;
return Some( return Some(
doc.exports doc.exports
.iter() .iter()
@ -404,7 +386,7 @@ pub(crate) fn completion_at(
.collect(); .collect();
return Some(r); return Some(r);
} else if node.kind() == SyntaxKind::PropertyAnimation { } else if node.kind() == SyntaxKind::PropertyAnimation {
let global_tr = document_cache.documents.global_type_registry.borrow(); let global_tr = document_cache.global_type_registry();
let r = global_tr let r = global_tr
.property_animation_type_for_property(Type::Float32) .property_animation_type_for_property(Type::Float32)
.property_list() .property_list()
@ -440,10 +422,10 @@ fn resolve_element_scope(
element: syntax_nodes::Element, element: syntax_nodes::Element,
document_cache: &DocumentCache, document_cache: &DocumentCache,
) -> Option<Vec<CompletionItem>> { ) -> Option<Vec<CompletionItem>> {
let global_tr = document_cache.documents.global_type_registry.borrow(); let global_tr = document_cache.global_type_registry();
let tr = element let tr = element
.source_file() .source_file()
.and_then(|sf| document_cache.documents.get_document(sf.path())) .and_then(|sf| document_cache.get_document_for_source_file(sf))
.map(|doc| &doc.local_registry) .map(|doc| &doc.local_registry)
.unwrap_or(&global_tr); .unwrap_or(&global_tr);
let element_type = lookup_current_element_type((*element).clone(), tr).unwrap_or_default(); let element_type = lookup_current_element_type((*element).clone(), tr).unwrap_or_default();
@ -536,7 +518,7 @@ fn de_normalize_property_name_with_element<'a>(element: &ElementRc, prop: &'a st
fn resolve_expression_scope( fn resolve_expression_scope(
lookup_context: &LookupCtx, lookup_context: &LookupCtx,
documents: &TypeLoader, document_cache: &common::DocumentCache,
snippet_support: bool, snippet_support: bool,
) -> Option<Vec<CompletionItem>> { ) -> Option<Vec<CompletionItem>> {
let mut r = Vec::new(); let mut r = Vec::new();
@ -553,8 +535,8 @@ fn resolve_expression_scope(
let mut available_types: HashSet<String> = r.iter().map(|c| c.label.clone()).collect(); let mut available_types: HashSet<String> = r.iter().map(|c| c.label.clone()).collect();
build_import_statements_edits( build_import_statements_edits(
&token, &token,
documents, document_cache,
&mut |ci: &ComponentInformation| { &mut |ci: &common::ComponentInformation| {
if !ci.is_global || !ci.is_exported { if !ci.is_global || !ci.is_exported {
false false
} else if available_types.contains(&ci.name) { } else if available_types.contains(&ci.name) {
@ -630,10 +612,10 @@ fn resolve_type_scope(
token: SyntaxToken, token: SyntaxToken,
document_cache: &DocumentCache, document_cache: &DocumentCache,
) -> Option<Vec<CompletionItem>> { ) -> Option<Vec<CompletionItem>> {
let global_tr = document_cache.documents.global_type_registry.borrow(); let global_tr = document_cache.global_type_registry();
let tr = token let tr = token
.source_file() .source_file()
.and_then(|sf| document_cache.documents.get_document(sf.path())) .and_then(|sf| document_cache.get_document_for_source_file(sf))
.map(|doc| &doc.local_registry) .map(|doc| &doc.local_registry)
.unwrap_or(&global_tr); .unwrap_or(&global_tr);
Some( Some(
@ -686,14 +668,14 @@ fn complete_path_in_string(base: &Path, text: &str, offset: u32) -> Option<Vec<C
/// import and should already be in result /// import and should already be in result
fn add_components_to_import( fn add_components_to_import(
token: &SyntaxToken, token: &SyntaxToken,
documents: &TypeLoader, document_cache: &common::DocumentCache,
mut available_types: HashSet<String>, mut available_types: HashSet<String>,
result: &mut Vec<CompletionItem>, result: &mut Vec<CompletionItem>,
) { ) {
build_import_statements_edits( build_import_statements_edits(
token, token,
documents, document_cache,
&mut |ci: &ComponentInformation| { &mut |ci: &common::ComponentInformation| {
if ci.is_global || !ci.is_exported { if ci.is_global || !ci.is_exported {
false false
} else if available_types.contains(&ci.name) { } else if available_types.contains(&ci.name) {
@ -836,19 +818,20 @@ pub fn create_import_edit(
/// Call `add_edit` with the component name and file name and TextEdit for every component for which the `filter` callback returns true /// Call `add_edit` with the component name and file name and TextEdit for every component for which the `filter` callback returns true
pub fn build_import_statements_edits( pub fn build_import_statements_edits(
token: &SyntaxToken, token: &SyntaxToken,
documents: &TypeLoader, document_cache: &common::DocumentCache,
filter: &mut dyn FnMut(&ComponentInformation) -> bool, filter: &mut dyn FnMut(&common::ComponentInformation) -> bool,
add_edit: &mut dyn FnMut(&str, &str, TextEdit), add_edit: &mut dyn FnMut(&str, &str, TextEdit),
) -> Option<()> { ) -> Option<()> {
// Find out types that can be imported // Find out types that can be imported
let current_file = token.source_file.path().to_owned(); let current_file = token.source_file.path().to_owned();
let current_uri = lsp_types::Url::from_file_path(&current_file).ok(); let current_uri = lsp_types::Url::from_file_path(&current_file).ok();
let current_doc = documents.get_document(&current_file)?.node.as_ref()?; let current_doc =
document_cache.get_document_for_source_file(&token.source_file)?.node.as_ref()?;
let (missing_import_location, known_import_locations) = find_import_locations(current_doc); let (missing_import_location, known_import_locations) = find_import_locations(current_doc);
let exports = { let exports = {
let mut tmp = Vec::new(); let mut tmp = Vec::new();
all_exported_components(documents, filter, &mut tmp); all_exported_components(document_cache, filter, &mut tmp);
tmp tmp
}; };
@ -883,7 +866,6 @@ fn is_followed_by_brace(token: &SyntaxToken) -> bool {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::common::uri_to_file;
/// Given a source text containing the unicode emoji `🔺`, the emoji will be removed and then an autocompletion request will be done as if the cursor was there /// Given a source text containing the unicode emoji `🔺`, the emoji will be removed and then an autocompletion request will be done as if the cursor was there
fn get_completions(file: &str) -> Option<Vec<CompletionItem>> { fn get_completions(file: &str) -> Option<Vec<CompletionItem>> {
@ -892,7 +874,7 @@ mod tests {
let source = file.replace(CURSOR_EMOJI, ""); let source = file.replace(CURSOR_EMOJI, "");
let (mut dc, uri, _) = crate::language::test::loaded_document_cache(source); let (mut dc, uri, _) = crate::language::test::loaded_document_cache(source);
let doc = dc.documents.get_document(&uri_to_file(&uri).unwrap()).unwrap(); let doc = dc.get_document(&uri).unwrap();
let token = crate::language::token_at_offset(doc.node.as_ref().unwrap(), offset)?; let token = crate::language::token_at_offset(doc.node.as_ref().unwrap(), offset)?;
let caps = CompletionClientCapabilities { let caps = CompletionClientCapabilities {
completion_item: Some(lsp_types::CompletionItemCapability { completion_item: Some(lsp_types::CompletionItemCapability {

View file

@ -1,8 +1,7 @@
// Copyright © SixtyFPS GmbH <info@slint.dev> // Copyright © SixtyFPS GmbH <info@slint.dev>
// SPDX-License-Identifier: GPL-3.0-only OR LicenseRef-Slint-Royalty-free-2.0 OR LicenseRef-Slint-Software-3.0 // SPDX-License-Identifier: GPL-3.0-only OR LicenseRef-Slint-Royalty-free-2.0 OR LicenseRef-Slint-Software-3.0
use super::DocumentCache; use crate::common::DocumentCache;
use crate::common;
use crate::fmt::{fmt, writer}; use crate::fmt::{fmt, writer};
use crate::util::map_range; use crate::util::map_range;
use dissimilar::Chunk; use dissimilar::Chunk;
@ -36,8 +35,7 @@ pub fn format_document(
params: DocumentFormattingParams, params: DocumentFormattingParams,
document_cache: &DocumentCache, document_cache: &DocumentCache,
) -> Option<Vec<TextEdit>> { ) -> Option<Vec<TextEdit>> {
let file_path = common::uri_to_file(&params.text_document.uri)?; let doc = document_cache.get_document(&params.text_document.uri)?;
let doc = document_cache.documents.get_document(&file_path)?;
let doc = doc.node.as_ref()?; let doc = doc.node.as_ref()?;
let mut writer = StringWriter { text: String::new() }; let mut writer = StringWriter { text: String::new() };

View file

@ -25,7 +25,7 @@ pub fn goto_definition(
return match parent.kind() { return match parent.kind() {
SyntaxKind::Type => { SyntaxKind::Type => {
let qual = i_slint_compiler::object_tree::QualifiedTypeName::from_node(n); let qual = i_slint_compiler::object_tree::QualifiedTypeName::from_node(n);
let doc = document_cache.documents.get_document(node.source_file.path())?; let doc = document_cache.get_document_for_source_file(&node.source_file)?;
match doc.local_registry.lookup_qualified(&qual.members) { match doc.local_registry.lookup_qualified(&qual.members) {
Type::Struct { node: Some(node), .. } => goto_node(node.parent().as_ref()?), Type::Struct { node: Some(node), .. } => goto_node(node.parent().as_ref()?),
Type::Enumeration(e) => goto_node(e.node.as_ref()?), Type::Enumeration(e) => goto_node(e.node.as_ref()?),
@ -34,7 +34,7 @@ pub fn goto_definition(
} }
SyntaxKind::Element => { SyntaxKind::Element => {
let qual = i_slint_compiler::object_tree::QualifiedTypeName::from_node(n); let qual = i_slint_compiler::object_tree::QualifiedTypeName::from_node(n);
let doc = document_cache.documents.get_document(node.source_file.path())?; let doc = document_cache.get_document_for_source_file(&node.source_file)?;
match doc.local_registry.lookup_element(&qual.to_string()) { match doc.local_registry.lookup_element(&qual.to_string()) {
Ok(ElementType::Component(c)) => { Ok(ElementType::Component(c)) => {
goto_node(&c.root_element.borrow().debug.first()?.node) goto_node(&c.root_element.borrow().debug.first()?.node)
@ -46,7 +46,7 @@ pub fn goto_definition(
if token.kind() != SyntaxKind::Identifier { if token.kind() != SyntaxKind::Identifier {
return None; return None;
} }
let lr = with_lookup_ctx(&document_cache.documents, node, |ctx| { let lr = with_lookup_ctx(&document_cache, node, |ctx| {
let mut it = n let mut it = n
.children_with_tokens() .children_with_tokens()
.filter_map(|t| t.into_token()) .filter_map(|t| t.into_token())
@ -104,7 +104,7 @@ pub fn goto_definition(
_ => None, _ => None,
}; };
} else if let Some(n) = syntax_nodes::ImportIdentifier::new(node.clone()) { } else if let Some(n) = syntax_nodes::ImportIdentifier::new(node.clone()) {
let doc = document_cache.documents.get_document(node.source_file.path())?; let doc = document_cache.get_document_for_source_file(&node.source_file)?;
let imp_name = i_slint_compiler::typeloader::ImportedName::from_node(n); let imp_name = i_slint_compiler::typeloader::ImportedName::from_node(n);
return match doc.local_registry.lookup_element(&imp_name.internal_name) { return match doc.local_registry.lookup_element(&imp_name.internal_name) {
Ok(ElementType::Component(c)) => { Ok(ElementType::Component(c)) => {
@ -120,7 +120,7 @@ pub fn goto_definition(
.unwrap_or_else(|| Path::new("/")) .unwrap_or_else(|| Path::new("/"))
.join(n.child_text(SyntaxKind::StringLiteral)?.trim_matches('\"')); .join(n.child_text(SyntaxKind::StringLiteral)?.trim_matches('\"'));
let import_file = clean_path(&import_file); let import_file = clean_path(&import_file);
let doc = document_cache.documents.get_document(&import_file)?; let doc = document_cache.get_document_by_path(&import_file)?;
let doc_node = doc.node.clone()?; let doc_node = doc.node.clone()?;
return goto_node(&doc_node); return goto_node(&doc_node);
} else if syntax_nodes::BindingExpression::new(node.clone()).is_some() { } else if syntax_nodes::BindingExpression::new(node.clone()).is_some() {
@ -185,10 +185,10 @@ fn find_property_declaration_in_base(
element: syntax_nodes::Element, element: syntax_nodes::Element,
prop_name: &str, prop_name: &str,
) -> Option<SyntaxNode> { ) -> Option<SyntaxNode> {
let global_tr = document_cache.documents.global_type_registry.borrow(); let global_tr = document_cache.global_type_registry();
let tr = element let tr = element
.source_file() .source_file()
.and_then(|sf| document_cache.documents.get_document(sf.path())) .and_then(|sf| document_cache.get_document_for_source_file(&sf))
.map(|doc| &doc.local_registry) .map(|doc| &doc.local_registry)
.unwrap_or(&global_tr); .unwrap_or(&global_tr);
@ -236,13 +236,7 @@ export component Test {
}"#; }"#;
let (mut dc, uri, _) = crate::language::test::loaded_document_cache(source.into()); let (mut dc, uri, _) = crate::language::test::loaded_document_cache(source.into());
let doc = dc let doc = dc.get_document(&uri).unwrap().node.clone().unwrap();
.documents
.get_document(&crate::common::uri_to_file(&uri).unwrap())
.unwrap()
.node
.clone()
.unwrap();
// Jump to the definition of Abc // Jump to the definition of Abc
let offset = source.find("abc := Abc").unwrap() as u32; let offset = source.find("abc := Abc").unwrap() as u32;

View file

@ -631,7 +631,7 @@ pub fn set_binding(
let expr_context_info = element.with_element_node(|node| { let expr_context_info = element.with_element_node(|node| {
util::ExpressionContextInfo::new(node.clone(), property_name.to_string(), false) util::ExpressionContextInfo::new(node.clone(), property_name.to_string(), false)
}); });
util::with_property_lookup_ctx(&document_cache.documents, &expr_context_info, |ctx| { util::with_property_lookup_ctx(&document_cache, &expr_context_info, |ctx| {
let expression = let expression =
i_slint_compiler::expression_tree::Expression::from_binding_expression_node( i_slint_compiler::expression_tree::Expression::from_binding_expression_node(
expression_node, expression_node,
@ -727,14 +727,11 @@ fn element_at_source_code_position(
dc: &mut language::DocumentCache, dc: &mut language::DocumentCache,
position: &common::VersionedPosition, position: &common::VersionedPosition,
) -> Result<common::ElementRcNode> { ) -> Result<common::ElementRcNode> {
let file = lsp_types::Url::to_file_path(position.url())
.map_err(|_| "Failed to convert URL to file path".to_string())?;
if &dc.document_version(position.url()) != position.version() { if &dc.document_version(position.url()) != position.version() {
return Err("Document version mismatch.".into()); return Err("Document version mismatch.".into());
} }
let doc = dc.documents.get_document(&file).ok_or_else(|| "Document not found".to_string())?; let doc = dc.get_document(position.url()).ok_or_else(|| "Document not found".to_string())?;
let source_file = doc let source_file = doc
.node .node
@ -743,9 +740,9 @@ fn element_at_source_code_position(
.ok_or_else(|| "Document had no node".to_string())?; .ok_or_else(|| "Document had no node".to_string())?;
let element_position = util::map_position(&source_file, position.offset().into()); let element_position = util::map_position(&source_file, position.offset().into());
Ok(language::element_at_position(&dc.documents, position.url(), &element_position).ok_or_else( Ok(language::element_at_position(&dc, position.url(), &element_position).ok_or_else(|| {
|| format!("No element found at the given start position {:?}", &element_position), format!("No element found at the given start position {:?}", &element_position)
)?) })?)
} }
#[cfg(any(feature = "preview-external", feature = "preview-engine"))] #[cfg(any(feature = "preview-external", feature = "preview-engine"))]
@ -843,8 +840,6 @@ pub fn remove_binding(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use i_slint_compiler::typeloader::TypeLoader;
use super::*; use super::*;
use crate::language::test::{complex_document_cache, loaded_document_cache}; use crate::language::test::{complex_document_cache, loaded_document_cache};
@ -859,11 +854,14 @@ mod tests {
fn properties_at_position_in_cache( fn properties_at_position_in_cache(
line: u32, line: u32,
character: u32, character: u32,
tl: &TypeLoader, document_cache: &common::DocumentCache,
url: &lsp_types::Url, url: &lsp_types::Url,
) -> Option<(common::ElementRcNode, Vec<PropertyInformation>)> { ) -> Option<(common::ElementRcNode, Vec<PropertyInformation>)> {
let element = let element = language::element_at_position(
language::element_at_position(tl, url, &lsp_types::Position { line, character })?; document_cache,
url,
&lsp_types::Position { line, character },
)?;
Some((element.clone(), get_properties(&element))) Some((element.clone(), get_properties(&element)))
} }
@ -877,8 +875,7 @@ mod tests {
lsp_types::Url, lsp_types::Url,
)> { )> {
let (dc, url, _) = complex_document_cache(); let (dc, url, _) = complex_document_cache();
if let Some((e, p)) = properties_at_position_in_cache(line, character, &dc.documents, &url) if let Some((e, p)) = properties_at_position_in_cache(line, character, &dc, &url) {
{
Some((e, p, dc, url)) Some((e, p, dc, url))
} else { } else {
None None
@ -915,9 +912,9 @@ mod tests {
#[test] #[test]
fn test_element_information() { fn test_element_information() {
let (dc, url, _) = complex_document_cache(); let (document_cache, url, _) = complex_document_cache();
let element = let element =
language::element_at_position(&dc.documents, &url, &lsp_types::Position::new(33, 4)) language::element_at_position(&document_cache, &url, &lsp_types::Position::new(33, 4))
.unwrap(); .unwrap();
let result = get_element_information(&element); let result = get_element_information(&element);
@ -947,8 +944,7 @@ mod tests {
let (dc, url, _) = loaded_document_cache(content); let (dc, url, _) = loaded_document_cache(content);
let (_, result) = let (_, result) = properties_at_position_in_cache(pos_l, pos_c, &dc, &url).unwrap();
properties_at_position_in_cache(pos_l, pos_c, &dc.documents, &url).unwrap();
let p = find_property(&result, "text").unwrap(); let p = find_property(&result, "text").unwrap();
let definition = p.defined_at.as_ref().unwrap(); let definition = p.defined_at.as_ref().unwrap();
@ -1410,11 +1406,10 @@ component MainWindow inherits Window {
"#.to_string()); "#.to_string());
let file_url = url.clone(); let file_url = url.clone();
let doc = dc.documents.get_document(&crate::common::uri_to_file(&url).unwrap()).unwrap(); let doc = dc.get_document(&url).unwrap();
let source = &doc.node.as_ref().unwrap().source_file; let source = &doc.node.as_ref().unwrap().source_file;
let (l, c) = source.line_column(source.source().unwrap().find("base2 :=").unwrap()); let (l, c) = source.line_column(source.source().unwrap().find("base2 :=").unwrap());
let (_, result) = let (_, result) = properties_at_position_in_cache(l as u32, c as u32, &dc, &url).unwrap();
properties_at_position_in_cache(l as u32, c as u32, &dc.documents, &url).unwrap();
let foo_property = find_property(&result, "foo").unwrap(); let foo_property = find_property(&result, "foo").unwrap();
@ -1446,7 +1441,7 @@ component SomeRect inherits Rectangle {
.to_string(), .to_string(),
); );
let (_, result) = properties_at_position_in_cache(1, 25, &dc.documents, &url).unwrap(); let (_, result) = properties_at_position_in_cache(1, 25, &dc, &url).unwrap();
let glob_property = find_property(&result, "glob").unwrap(); let glob_property = find_property(&result, "glob").unwrap();
assert_eq!(glob_property.type_name, "int"); assert_eq!(glob_property.type_name, "int");
@ -1456,7 +1451,7 @@ component SomeRect inherits Rectangle {
assert_eq!(glob_property.group, ""); assert_eq!(glob_property.group, "");
assert_eq!(find_property(&result, "width"), None); assert_eq!(find_property(&result, "width"), None);
let (_, result) = properties_at_position_in_cache(8, 4, &dc.documents, &url).unwrap(); let (_, result) = properties_at_position_in_cache(8, 4, &dc, &url).unwrap();
let abcd_property = find_property(&result, "abcd").unwrap(); let abcd_property = find_property(&result, "abcd").unwrap();
assert_eq!(abcd_property.type_name, "int"); assert_eq!(abcd_property.type_name, "int");
let declaration = abcd_property.declared_at.as_ref().unwrap(); let declaration = abcd_property.declared_at.as_ref().unwrap();
@ -1481,7 +1476,7 @@ component SomeRect inherits Rectangle {
let (dc, url, _) = let (dc, url, _) =
loaded_document_cache(r#"export component Demo { Text { text: } }"#.to_string()); loaded_document_cache(r#"export component Demo { Text { text: } }"#.to_string());
let (_, result) = properties_at_position_in_cache(0, 35, &dc.documents, &url).unwrap(); let (_, result) = properties_at_position_in_cache(0, 35, &dc, &url).unwrap();
let prop = find_property(&result, "text").unwrap(); let prop = find_property(&result, "text").unwrap();
assert_eq!(prop.defined_at, None); // The property has no valid definition at this time assert_eq!(prop.defined_at, None); // The property has no valid definition at this time
@ -1505,7 +1500,7 @@ component Base {
.to_string(), .to_string(),
); );
let (_, result) = properties_at_position_in_cache(3, 0, &dc.documents, &url).unwrap(); let (_, result) = properties_at_position_in_cache(3, 0, &dc, &url).unwrap();
assert_eq!(find_property(&result, "a1").unwrap().type_name, "int"); assert_eq!(find_property(&result, "a1").unwrap().type_name, "int");
assert_eq!( assert_eq!(
find_property(&result, "a1").unwrap().defined_at.as_ref().unwrap().expression_value, find_property(&result, "a1").unwrap().defined_at.as_ref().unwrap().expression_value,
@ -1560,7 +1555,7 @@ component MyComp {
.to_string(), .to_string(),
); );
let (_, result) = properties_at_position_in_cache(11, 1, &dc.documents, &url).unwrap(); let (_, result) = properties_at_position_in_cache(11, 1, &dc, &url).unwrap();
assert_eq!(find_property(&result, "a1").unwrap().type_name, "int"); assert_eq!(find_property(&result, "a1").unwrap().type_name, "int");
assert_eq!( assert_eq!(
find_property(&result, "a1").unwrap().defined_at.as_ref().unwrap().expression_value, find_property(&result, "a1").unwrap().defined_at.as_ref().unwrap().expression_value,
@ -1611,19 +1606,19 @@ component MyComp {
.to_string(), .to_string(),
); );
let (_, result) = properties_at_position_in_cache(3, 0, &dc.documents, &url).unwrap(); let (_, result) = properties_at_position_in_cache(3, 0, &dc, &url).unwrap();
assert_eq!(find_property(&result, "a").unwrap().type_name, "int"); assert_eq!(find_property(&result, "a").unwrap().type_name, "int");
assert_eq!(find_property(&result, "b").unwrap().type_name, "int"); assert_eq!(find_property(&result, "b").unwrap().type_name, "int");
assert_eq!(find_property(&result, "c").unwrap().type_name, "int"); assert_eq!(find_property(&result, "c").unwrap().type_name, "int");
assert_eq!(find_property(&result, "d").unwrap().type_name, "int"); assert_eq!(find_property(&result, "d").unwrap().type_name, "int");
let (_, result) = properties_at_position_in_cache(10, 0, &dc.documents, &url).unwrap(); let (_, result) = properties_at_position_in_cache(10, 0, &dc, &url).unwrap();
assert_eq!(find_property(&result, "a"), None); assert_eq!(find_property(&result, "a"), None);
assert_eq!(find_property(&result, "b").unwrap().type_name, "int"); assert_eq!(find_property(&result, "b").unwrap().type_name, "int");
assert_eq!(find_property(&result, "c"), None); assert_eq!(find_property(&result, "c"), None);
assert_eq!(find_property(&result, "d").unwrap().type_name, "int"); assert_eq!(find_property(&result, "d").unwrap().type_name, "int");
let (_, result) = properties_at_position_in_cache(13, 0, &dc.documents, &url).unwrap(); let (_, result) = properties_at_position_in_cache(13, 0, &dc, &url).unwrap();
assert_eq!(find_property(&result, "enabled").unwrap().type_name, "bool"); assert_eq!(find_property(&result, "enabled").unwrap().type_name, "bool");
assert_eq!(find_property(&result, "pressed"), None); assert_eq!(find_property(&result, "pressed"), None);
} }

View file

@ -6,7 +6,7 @@ use lsp_types::{
SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens, SemanticTokensResult, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens, SemanticTokensResult,
}; };
use crate::common::{self, DocumentCache}; use crate::common::DocumentCache;
/// Give all the used types/modifier a number in an indexed array /// Give all the used types/modifier a number in an indexed array
macro_rules! declare_legend { macro_rules! declare_legend {
@ -30,8 +30,7 @@ pub fn get_semantic_tokens(
document_cache: &mut DocumentCache, document_cache: &mut DocumentCache,
text_document: &lsp_types::TextDocumentIdentifier, text_document: &lsp_types::TextDocumentIdentifier,
) -> Option<SemanticTokensResult> { ) -> Option<SemanticTokensResult> {
let filepath = common::uri_to_file(&text_document.uri)?; let doc = document_cache.get_document(&text_document.uri)?;
let doc = document_cache.documents.get_document(&filepath)?;
let doc_node = doc.node.as_ref()?; let doc_node = doc.node.as_ref()?;
let mut token = doc_node.first_token()?; let mut token = doc_node.first_token()?;
let mut data = vec![]; let mut data = vec![];

View file

@ -25,8 +25,7 @@ pub fn loaded_document_cache(
let mut dc = empty_document_cache(); let mut dc = empty_document_cache();
// Pre-load std-widgets.slint: // Pre-load std-widgets.slint:
let mut diag = i_slint_compiler::diagnostics::BuildDiagnostics::default(); spin_on::spin_on(dc.preload_builtins());
spin_on::spin_on(dc.documents.import_component("std-widgets.slint", "StyleMetrics", &mut diag));
let dummy_absolute_path = let dummy_absolute_path =
if cfg!(target_family = "windows") { "c://foo/bar.slint" } else { "/foo/bar.slint" }; if cfg!(target_family = "windows") { "c://foo/bar.slint" } else { "/foo/bar.slint" };

View file

@ -77,7 +77,7 @@ thread_local! {static PREVIEW_STATE: std::cell::RefCell<PreviewState> = Default:
impl PreviewState { impl PreviewState {
fn refresh_document_cache( fn refresh_document_cache(
&mut self, &mut self,
path: &Path, url: &lsp_types::Url,
version: SourceFileVersion, version: SourceFileVersion,
source_code: String, source_code: String,
) { ) {
@ -86,14 +86,7 @@ impl PreviewState {
}; };
let mut diag = BuildDiagnostics::default(); let mut diag = BuildDiagnostics::default();
spin_on::spin_on(dc.documents.load_file( let _ = spin_on::spin_on(dc.load_url(url, version, source_code, &mut diag)); // ignore url conversion errors
path,
version,
path,
source_code,
false,
&mut diag,
));
eprintln!("Updated Document Cache in Live Preview: has_error: {}", diag.has_error()); eprintln!("Updated Document Cache in Live Preview: has_error: {}", diag.has_error());
} }
@ -130,19 +123,16 @@ pub fn set_contents(url: &common::VersionedUrl, content: String) {
} }
} }
let Some(path) = common::uri_to_file(&url.url()) else { return Default::default() }; let fu = url.clone();
let fp = path.clone();
let fv = url.version().clone();
let fc = content.clone(); let fc = content.clone();
let _ = i_slint_core::api::invoke_from_event_loop(move || { let _ = i_slint_core::api::invoke_from_event_loop(move || {
let path = fp; let url = fu;
let version = fv;
let content = fc; let content = fc;
PREVIEW_STATE PREVIEW_STATE.with(move |ps| {
.with(move |ps| ps.borrow_mut().refresh_document_cache(&path, version, content)) ps.borrow_mut().refresh_document_cache(url.url(), url.version().clone(), content)
})
}); });
let ui_is_visible = cache.ui_is_visible; let ui_is_visible = cache.ui_is_visible;

View file

@ -7,7 +7,6 @@ use i_slint_compiler::lookup::LookupCtx;
use i_slint_compiler::object_tree; use i_slint_compiler::object_tree;
use i_slint_compiler::parser::{syntax_nodes, SyntaxKind, SyntaxNode, SyntaxToken}; use i_slint_compiler::parser::{syntax_nodes, SyntaxKind, SyntaxNode, SyntaxToken};
use i_slint_compiler::parser::{TextRange, TextSize}; use i_slint_compiler::parser::{TextRange, TextSize};
use i_slint_compiler::typeloader::TypeLoader;
use i_slint_compiler::typeregister::TypeRegister; use i_slint_compiler::typeregister::TypeRegister;
use crate::common; use crate::common;
@ -126,17 +125,17 @@ impl ExpressionContextInfo {
/// Run the function with the LookupCtx associated with the token /// Run the function with the LookupCtx associated with the token
pub fn with_lookup_ctx<R>( pub fn with_lookup_ctx<R>(
type_loader: &TypeLoader, document_cache: &common::DocumentCache,
node: SyntaxNode, node: SyntaxNode,
f: impl FnOnce(&mut LookupCtx) -> R, f: impl FnOnce(&mut LookupCtx) -> R,
) -> Option<R> { ) -> Option<R> {
let expr_context_info = lookup_expression_context(node)?; let expr_context_info = lookup_expression_context(node)?;
with_property_lookup_ctx::<R>(type_loader, &expr_context_info, f) with_property_lookup_ctx::<R>(document_cache, &expr_context_info, f)
} }
/// Run the function with the LookupCtx associated with the token /// Run the function with the LookupCtx associated with the token
pub fn with_property_lookup_ctx<R>( pub fn with_property_lookup_ctx<R>(
type_loader: &TypeLoader, document_cache: &common::DocumentCache,
expr_context_info: &ExpressionContextInfo, expr_context_info: &ExpressionContextInfo,
f: impl FnOnce(&mut LookupCtx) -> R, f: impl FnOnce(&mut LookupCtx) -> R,
) -> Option<R> { ) -> Option<R> {
@ -145,10 +144,10 @@ pub fn with_property_lookup_ctx<R>(
expr_context_info.property_name.as_str(), expr_context_info.property_name.as_str(),
expr_context_info.is_animate, expr_context_info.is_animate,
); );
let global_tr = type_loader.global_type_registry.borrow(); let global_tr = document_cache.global_type_registry();
let tr = element let tr = element
.source_file() .source_file()
.and_then(|sf| type_loader.get_document(sf.path())) .and_then(|sf| document_cache.get_document_for_source_file(&sf))
.map(|doc| &doc.local_registry) .map(|doc| &doc.local_registry)
.unwrap_or(&global_tr); .unwrap_or(&global_tr);
@ -320,16 +319,13 @@ mod tests {
.to_string(), .to_string(),
); );
let window = let window = language::element_at_position(&dc, &url, &lsp_types::Position::new(0, 30));
language::element_at_position(&dc.documents, &url, &lsp_types::Position::new(0, 30));
assert_eq!(find_element_indent(&window.unwrap()), None); assert_eq!(find_element_indent(&window.unwrap()), None);
let vbox = let vbox = language::element_at_position(&dc, &url, &lsp_types::Position::new(1, 4));
language::element_at_position(&dc.documents, &url, &lsp_types::Position::new(1, 4));
assert_eq!(find_element_indent(&vbox.unwrap()), Some(" ".to_string())); assert_eq!(find_element_indent(&vbox.unwrap()), Some(" ".to_string()));
let label = let label = language::element_at_position(&dc, &url, &lsp_types::Position::new(2, 17));
language::element_at_position(&dc.documents, &url, &lsp_types::Position::new(2, 17));
assert_eq!(find_element_indent(&label.unwrap()), Some(" ".to_string())); assert_eq!(find_element_indent(&label.unwrap()), Some(" ".to_string()));
} }
} }