Merge branch 'main' into ensure-roc-files

This commit is contained in:
Anton-4 2024-02-09 19:15:13 +01:00 committed by GitHub
commit 3eec1f4bb0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
16 changed files with 1495 additions and 320 deletions

118
Cargo.lock generated
View file

@ -771,6 +771,12 @@ dependencies = [
"winapi",
]
[[package]]
name = "dissimilar"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "86e3bdc80eee6e16b2b6b0f87fbc98c04bee3455e35174c0de1a125d0688c632"
[[package]]
name = "distance"
version = "0.4.0"
@ -838,6 +844,19 @@ dependencies = [
"regex",
]
[[package]]
name = "env_logger"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580"
dependencies = [
"humantime",
"is-terminal",
"log",
"regex",
"termcolor",
]
[[package]]
name = "equivalent"
version = "1.0.1"
@ -864,6 +883,16 @@ dependencies = [
"str-buf",
]
[[package]]
name = "expect-test"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30d9eafeadd538e68fb28016364c9732d78e420b9ff8853fa5e4058861e9f8d3"
dependencies = [
"dissimilar",
"once_cell",
]
[[package]]
name = "fd-lock"
version = "3.0.13"
@ -1151,6 +1180,12 @@ version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
[[package]]
name = "humantime"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
[[package]]
name = "hyper"
version = "0.14.27"
@ -1327,6 +1362,17 @@ version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6"
[[package]]
name = "is-terminal"
version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0bad00257d07be169d870ab665980b06cdb366d792ad690bf2e76876dc503455"
dependencies = [
"hermit-abi 0.3.3",
"rustix",
"windows-sys 0.52.0",
]
[[package]]
name = "itertools"
version = "0.9.0"
@ -1922,7 +1968,7 @@ version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "588f6378e4dd99458b60ec275b4477add41ce4fa9f64dcba6f15adccb19b50d6"
dependencies = [
"env_logger",
"env_logger 0.8.4",
"log",
"rand",
]
@ -2650,6 +2696,10 @@ name = "roc_lang_srv"
version = "0.0.1"
dependencies = [
"bumpalo",
"env_logger 0.10.2",
"expect-test",
"indoc",
"log",
"parking_lot",
"roc_can",
"roc_collections",
@ -4547,6 +4597,15 @@ dependencies = [
"windows-targets 0.48.5",
]
[[package]]
name = "windows-sys"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
dependencies = [
"windows-targets 0.52.0",
]
[[package]]
name = "windows-targets"
version = "0.42.2"
@ -4577,6 +4636,21 @@ dependencies = [
"windows_x86_64_msvc 0.48.5",
]
[[package]]
name = "windows-targets"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd"
dependencies = [
"windows_aarch64_gnullvm 0.52.0",
"windows_aarch64_msvc 0.52.0",
"windows_i686_gnu 0.52.0",
"windows_i686_msvc 0.52.0",
"windows_x86_64_gnu 0.52.0",
"windows_x86_64_gnullvm 0.52.0",
"windows_x86_64_msvc 0.52.0",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.42.2"
@ -4589,6 +4663,12 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea"
[[package]]
name = "windows_aarch64_msvc"
version = "0.42.2"
@ -4601,6 +4681,12 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef"
[[package]]
name = "windows_i686_gnu"
version = "0.42.2"
@ -4613,6 +4699,12 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
[[package]]
name = "windows_i686_gnu"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313"
[[package]]
name = "windows_i686_msvc"
version = "0.42.2"
@ -4625,6 +4717,12 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
[[package]]
name = "windows_i686_msvc"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a"
[[package]]
name = "windows_x86_64_gnu"
version = "0.42.2"
@ -4637,6 +4735,12 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.42.2"
@ -4649,6 +4753,12 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e"
[[package]]
name = "windows_x86_64_msvc"
version = "0.42.2"
@ -4661,6 +4771,12 @@ version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04"
[[package]]
name = "winreg"
version = "0.50.0"

View file

@ -13,7 +13,7 @@ use crate::{
},
pattern::{DestructType, Pattern, RecordDestruct, TupleDestruct},
};
#[derive(Clone)]
pub enum DeclarationInfo<'a> {
Value {
loc_symbol: Loc<Symbol>,
@ -164,7 +164,7 @@ pub fn walk_decls<V: Visitor>(visitor: &mut V, decls: &Declarations) {
}
}
fn walk_decl<V: Visitor>(visitor: &mut V, decl: DeclarationInfo<'_>) {
pub fn walk_decl<V: Visitor>(visitor: &mut V, decl: DeclarationInfo<'_>) {
use DeclarationInfo::*;
match decl {

View file

@ -1071,17 +1071,19 @@ pub fn module_from_builtins<'ctx>(
// Anything not depended on by a `roc_builtin.` function could already by DCE'd theoretically.
// That said, this workaround is good enough and fixes compilations times.
// Also, must_keep is the functions we depend on that would normally be provide by libc.
// Also, must_keep is the functions we depend on that would normally be provide by libc or compiler-rt.
// They are magically linked to by llvm builtins, so we must specify that they can't be DCE'd.
let must_keep = [
// Windows special required when floats are used
"_fltused",
// From libc
"floorf",
"memcpy",
"memset",
// I have no idea why this function is special.
// Without it, some tests hang on M1 mac outside of nix.
// From compiler-rt
"__divti3",
"__modti3",
"__muloti4",
// fixes `Undefined Symbol in relocation`
"__udivti3",
// Roc special functions
"__roc_force_longjmp",

View file

@ -364,7 +364,7 @@ where
}
}
#[derive(Debug)]
#[derive(Debug, Clone)]
pub struct LineInfo {
line_offsets: Vec<u32>,
}

View file

@ -7,6 +7,10 @@ edition = "2021"
name = "roc_ls"
path = "src/server.rs"
[dev-dependencies]
expect-test = "1.4.1"
[dependencies]
roc_can = { path = "../compiler/can" }
roc_collections = { path = "../compiler/collections" }
@ -27,3 +31,6 @@ parking_lot.workspace = true
tower-lsp = "0.17.0"
tokio = { version = "1.20.1", features = [ "rt", "rt-multi-thread", "macros", "io-std" ] }
log.workspace = true
indoc.workspace=true
env_logger = "0.10.1"

View file

@ -78,3 +78,21 @@ If you're using coc.nvim and want to use the configuration above, be sure to als
If you want to debug the server, use [debug_server.sh](./debug_server.sh)
instead of the direct binary.
If you would like to enable debug logging set the `ROCLS_LOG` environment variable to `debug` or `trace` for even more logs.
eg: `ROCLS_LOG=debug`
## Testing
Tests use expect-test, which is a snapshot/expect testing framework.
If a change is made that requires updating the expect tests run `cargo test` confirm that the diff is correct, then run `UPDATE_EXPECT=1 cargo test` to update the contents of the files with the new output.
## Config
You can set the environment variables below to control the operation of the language.
`ROCLS_DEBOUNCE_MS`: Sets the amount of time to delay starting analysis of the document when a change comes in. This prevents starting pointless analysis while you are typing normally.
Default: `100`
`ROCLS_LATEST_DOC_TIMEOUT_MS`: Sets the timeout for waiting for an analysis of the latest document to be complete. If a request is sent that needs the latest version of the document to be analyzed, then it will wait up to this duration before just giving up.
Default: `5000`

View file

@ -1,4 +1,5 @@
#!/usr/bin/bash
SCRIPT_DIR=$(cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd)
RUST_LOG=debug
${SCRIPT_DIR}/../../target/debug/roc_ls "$@" 2> /tmp/roc_ls.err

View file

@ -1,116 +1,120 @@
use std::{
collections::HashMap,
path::{Path, PathBuf},
};
use std::path::{Path, PathBuf};
use bumpalo::Bump;
use roc_can::{abilities::AbilitiesStore, expr::Declarations};
use roc_collections::MutMap;
use roc_load::{CheckedModule, LoadedModule};
use roc_module::symbol::{Interns, ModuleId, Symbol};
use roc_module::symbol::{Interns, ModuleId};
use roc_packaging::cache::{self, RocCacheDir};
use roc_region::all::LineInfo;
use roc_reporting::report::RocDocAllocator;
use roc_solve_problem::TypeError;
use roc_types::subs::Subs;
use tower_lsp::lsp_types::{
Diagnostic, GotoDefinitionResponse, Hover, HoverContents, Location, MarkedString, Position,
Range, SemanticTokenType, SemanticTokens, SemanticTokensResult, TextEdit, Url,
};
use crate::convert::{
diag::{IntoLspDiagnostic, ProblemFmt},
ToRange, ToRocPosition,
};
use tower_lsp::lsp_types::{Diagnostic, SemanticTokenType, Url};
mod analysed_doc;
mod completion;
mod parse_ast;
mod semantic_tokens;
mod tokens;
mod utils;
use crate::convert::diag::{IntoLspDiagnostic, ProblemFmt};
pub(crate) use self::analysed_doc::{AnalyzedDocument, DocInfo};
use self::{analysed_doc::ModuleIdToUrl, tokens::Token};
use self::{parse_ast::Ast, semantic_tokens::arrange_semantic_tokens, tokens::Token};
pub const HIGHLIGHT_TOKENS_LEGEND: &[SemanticTokenType] = Token::LEGEND;
pub(crate) struct GlobalAnalysis {
pub documents: Vec<AnalyzedDocument>,
#[derive(Debug, Clone)]
pub(super) struct AnalyzedModule {
module_id: ModuleId,
interns: Interns,
subs: Subs,
abilities: AbilitiesStore,
declarations: Declarations,
// We need this because ModuleIds are not stable between compilations, so a ModuleId visible to
// one module may not be true global to the language server.
module_id_to_url: ModuleIdToUrl,
}
#[derive(Debug, Clone)]
pub struct AnalysisResult {
module: Option<AnalyzedModule>,
diagnostics: Vec<Diagnostic>,
}
impl GlobalAnalysis {
pub fn new(source_url: Url, source: String) -> GlobalAnalysis {
let arena = Bump::new();
pub(crate) fn global_analysis(doc_info: DocInfo) -> Vec<AnalyzedDocument> {
let fi = doc_info.url.to_file_path().unwrap();
let src_dir = find_src_dir(&fi).to_path_buf();
let fi = source_url.to_file_path().unwrap();
let src_dir = find_src_dir(&fi).to_path_buf();
let line_info = LineInfo::new(&source);
let arena = Bump::new();
let loaded = roc_load::load_and_typecheck_str(
&arena,
fi,
&doc_info.source,
src_dir,
roc_target::TargetInfo::default_x86_64(),
roc_load::FunctionKind::LambdaSet,
roc_reporting::report::RenderTarget::Generic,
RocCacheDir::Persistent(cache::roc_cache_dir().as_path()),
roc_reporting::report::DEFAULT_PALETTE,
);
let loaded = roc_load::load_and_typecheck_str(
&arena,
fi,
&source,
src_dir,
roc_target::TargetInfo::default_x86_64(),
roc_load::FunctionKind::LambdaSet,
roc_reporting::report::RenderTarget::Generic,
RocCacheDir::Persistent(cache::roc_cache_dir().as_path()),
roc_reporting::report::DEFAULT_PALETTE,
);
let module = match loaded {
Ok(module) => module,
Err(problem) => {
let all_problems = problem
.into_lsp_diagnostic(&())
.into_iter()
.collect::<Vec<_>>();
let module = match loaded {
Ok(module) => module,
Err(problem) => {
let all_problems = problem
.into_lsp_diagnostic(&())
.into_iter()
.collect::<Vec<_>>();
let analyzed_document = AnalyzedDocument {
url: source_url,
line_info,
source,
let analyzed_document = AnalyzedDocument {
doc_info,
analysis_result: AnalysisResult {
module: None,
diagnostics: all_problems,
};
},
};
return GlobalAnalysis {
documents: vec![analyzed_document],
};
}
};
let mut documents = vec![];
let LoadedModule {
interns,
mut can_problems,
mut type_problems,
mut declarations_by_id,
sources,
mut typechecked,
solved,
abilities_store,
..
} = module;
let mut root_module = Some(RootModule {
subs: solved.into_inner(),
abilities_store,
});
let mut builder = AnalyzedDocumentBuilder {
interns: &interns,
module_id_to_url: module_id_to_url_from_sources(&sources),
can_problems: &mut can_problems,
type_problems: &mut type_problems,
declarations_by_id: &mut declarations_by_id,
typechecked: &mut typechecked,
root_module: &mut root_module,
};
for (module_id, (path, source)) in sources {
documents.push(builder.build_document(path, source, module_id));
return vec![analyzed_document];
}
};
GlobalAnalysis { documents }
let mut documents = vec![];
let LoadedModule {
interns,
mut can_problems,
mut type_problems,
mut declarations_by_id,
sources,
mut typechecked,
solved,
abilities_store,
..
} = module;
let mut root_module = Some(RootModule {
subs: solved.into_inner(),
abilities_store,
});
let mut builder = AnalyzedDocumentBuilder {
interns: &interns,
module_id_to_url: module_id_to_url_from_sources(&sources),
can_problems: &mut can_problems,
type_problems: &mut type_problems,
declarations_by_id: &mut declarations_by_id,
typechecked: &mut typechecked,
root_module: &mut root_module,
};
for (module_id, (path, source)) in sources {
documents.push(builder.build_document(path, source, module_id, doc_info.version));
}
documents
}
fn find_src_dir(path: &Path) -> &Path {
@ -169,6 +173,7 @@ impl<'a> AnalyzedDocumentBuilder<'a> {
path: PathBuf,
source: Box<str>,
module_id: ModuleId,
version: i32,
) -> AnalyzedDocument {
let subs;
let abilities;
@ -198,11 +203,16 @@ impl<'a> AnalyzedDocumentBuilder<'a> {
let diagnostics = self.build_diagnostics(&path, &source, &line_info, module_id);
AnalyzedDocument {
url: path_to_url(&path),
line_info,
source: source.into(),
module: Some(analyzed_module),
diagnostics,
doc_info: DocInfo {
url: path_to_url(&path),
line_info,
source: source.into(),
version,
},
analysis_result: AnalysisResult {
module: Some(analyzed_module),
diagnostics,
},
}
}
@ -243,157 +253,3 @@ impl<'a> AnalyzedDocumentBuilder<'a> {
all_problems
}
}
type ModuleIdToUrl = HashMap<ModuleId, Url>;
#[derive(Debug)]
struct AnalyzedModule {
module_id: ModuleId,
interns: Interns,
subs: Subs,
abilities: AbilitiesStore,
declarations: Declarations,
// We need this because ModuleIds are not stable between compilations, so a ModuleId visible to
// one module may not be true global to the language server.
module_id_to_url: ModuleIdToUrl,
}
#[derive(Debug)]
pub(crate) struct AnalyzedDocument {
url: Url,
line_info: LineInfo,
source: String,
module: Option<AnalyzedModule>,
diagnostics: Vec<Diagnostic>,
}
impl AnalyzedDocument {
pub fn url(&self) -> &Url {
&self.url
}
fn line_info(&self) -> &LineInfo {
&self.line_info
}
fn module_mut(&mut self) -> Option<&mut AnalyzedModule> {
self.module.as_mut()
}
fn module(&self) -> Option<&AnalyzedModule> {
self.module.as_ref()
}
fn location(&self, range: Range) -> Location {
Location {
uri: self.url.clone(),
range,
}
}
fn whole_document_range(&self) -> Range {
let line_info = self.line_info();
let start = Position::new(0, 0);
let end = Position::new(line_info.num_lines(), 0);
Range::new(start, end)
}
pub fn diagnostics(&mut self) -> Vec<Diagnostic> {
self.diagnostics.clone()
}
pub fn symbol_at(&self, position: Position) -> Option<Symbol> {
let line_info = self.line_info();
let position = position.to_roc_position(line_info);
let AnalyzedModule {
declarations,
abilities,
..
} = self.module()?;
let found_symbol =
roc_can::traverse::find_closest_symbol_at(position, declarations, abilities)?;
Some(found_symbol.implementation_symbol())
}
pub fn hover(&mut self, position: Position) -> Option<Hover> {
let line_info = self.line_info();
let pos = position.to_roc_position(line_info);
let AnalyzedModule {
subs,
declarations,
module_id,
interns,
..
} = self.module_mut()?;
let (region, var) = roc_can::traverse::find_closest_type_at(pos, declarations)?;
let snapshot = subs.snapshot();
let type_str = roc_types::pretty_print::name_and_print_var(
var,
subs,
*module_id,
interns,
roc_types::pretty_print::DebugPrint::NOTHING,
);
subs.rollback_to(snapshot);
let range = region.to_range(self.line_info());
Some(Hover {
contents: HoverContents::Scalar(MarkedString::String(type_str)),
range: Some(range),
})
}
pub fn definition(&self, symbol: Symbol) -> Option<GotoDefinitionResponse> {
let AnalyzedModule { declarations, .. } = self.module()?;
let found_declaration = roc_can::traverse::find_declaration(symbol, declarations)?;
let range = found_declaration.region().to_range(self.line_info());
Some(GotoDefinitionResponse::Scalar(self.location(range)))
}
pub fn format(&self) -> Option<Vec<TextEdit>> {
let source = &self.source;
let arena = &Bump::new();
let ast = Ast::parse(arena, source).ok()?;
let fmt = ast.fmt();
if source == fmt.as_str() {
None
} else {
let range = self.whole_document_range();
let text_edit = TextEdit::new(range, fmt.to_string().to_string());
Some(vec![text_edit])
}
}
pub fn semantic_tokens(&self) -> Option<SemanticTokensResult> {
let source = &self.source;
let arena = &Bump::new();
let ast = Ast::parse(arena, source).ok()?;
let tokens = ast.semantic_tokens();
let data = arrange_semantic_tokens(tokens, &self.line_info);
Some(SemanticTokensResult::Tokens(SemanticTokens {
result_id: None,
data,
}))
}
pub(crate) fn module_url(&self, module_id: ModuleId) -> Option<Url> {
self.module()?.module_id_to_url.get(&module_id).cloned()
}
}

View file

@ -0,0 +1,251 @@
use log::debug;
use std::collections::HashMap;
use bumpalo::Bump;
use roc_module::symbol::{ModuleId, Symbol};
use roc_region::all::LineInfo;
use tower_lsp::lsp_types::{
CompletionItem, Diagnostic, GotoDefinitionResponse, Hover, HoverContents, LanguageString,
Location, MarkedString, Position, Range, SemanticTokens, SemanticTokensResult, TextEdit, Url,
};
use crate::{
analysis::completion::{field_completion, get_completion_items},
convert::{ToRange, ToRocPosition},
};
use super::{
parse_ast::Ast,
semantic_tokens::arrange_semantic_tokens,
utils::{format_var_type, is_roc_identifier_char},
AnalysisResult, AnalyzedModule,
};
pub(super) type ModuleIdToUrl = HashMap<ModuleId, Url>;
#[derive(Debug, Clone)]
pub struct AnalyzedDocument {
pub doc_info: DocInfo,
pub analysis_result: AnalysisResult,
}
#[derive(Debug, Clone)]
pub struct DocInfo {
pub url: Url,
pub line_info: LineInfo,
pub source: String,
pub version: i32,
}
impl DocInfo {
pub fn new(url: Url, source: String, version: i32) -> Self {
Self {
url,
line_info: LineInfo::new(&source),
source,
version,
}
}
#[cfg(debug_assertions)]
#[allow(unused)]
fn debug_log_prefix(&self, offset: usize) {
debug!("Prefix source: {:?}", self.source);
let last_few = self.source.get(offset - 5..offset + 5).unwrap();
let (before, after) = last_few.split_at(5);
debug!(
"Starting to get completion items at offset: {:?} content: '{:?}|{:?}'",
offset, before, after
);
}
fn whole_document_range(&self) -> Range {
let start = Position::new(0, 0);
let end = Position::new(self.line_info.num_lines(), 0);
Range::new(start, end)
}
pub fn get_prefix_at_position(&self, position: Position) -> String {
let position = position.to_roc_position(&self.line_info);
let offset = position.offset as usize;
let source = &self.source.as_bytes()[..offset];
let symbol_len = source
.iter()
.rev()
.take_while(|&a| is_roc_identifier_char(&(*a as char)))
.count();
let symbol = &self.source[offset - symbol_len..offset];
String::from(symbol)
}
pub fn format(&self) -> Option<Vec<TextEdit>> {
let source = &self.source;
let arena = &Bump::new();
let ast = Ast::parse(arena, source).ok()?;
let fmt = ast.fmt();
if source == fmt.as_str() {
None
} else {
let range = self.whole_document_range();
let text_edit = TextEdit::new(range, fmt.to_string().to_string());
Some(vec![text_edit])
}
}
pub fn semantic_tokens(&self) -> Option<SemanticTokensResult> {
let source = &self.source;
let arena = &Bump::new();
let ast = Ast::parse(arena, source).ok()?;
let tokens = ast.semantic_tokens();
let data = arrange_semantic_tokens(tokens, &self.line_info);
Some(SemanticTokensResult::Tokens(SemanticTokens {
result_id: None,
data,
}))
}
}
impl AnalyzedDocument {
pub fn url(&self) -> &Url {
&self.doc_info.url
}
fn line_info(&self) -> &LineInfo {
&self.doc_info.line_info
}
fn module(&self) -> Option<&AnalyzedModule> {
self.analysis_result.module.as_ref()
}
fn location(&self, range: Range) -> Location {
Location {
uri: self.doc_info.url.clone(),
range,
}
}
pub fn type_checked(&self) -> bool {
self.analysis_result.module.is_some()
}
pub fn diagnostics(&self) -> Vec<Diagnostic> {
self.analysis_result.diagnostics.clone()
}
pub fn symbol_at(&self, position: Position) -> Option<Symbol> {
let line_info = self.line_info();
let position = position.to_roc_position(line_info);
let AnalyzedModule {
declarations,
abilities,
..
} = self.module()?;
let found_symbol =
roc_can::traverse::find_closest_symbol_at(position, declarations, abilities)?;
Some(found_symbol.implementation_symbol())
}
pub fn hover(&self, position: Position) -> Option<Hover> {
let line_info = self.line_info();
let pos = position.to_roc_position(line_info);
let AnalyzedModule {
subs,
declarations,
module_id,
interns,
..
} = self.module()?;
let (region, var) = roc_can::traverse::find_closest_type_at(pos, declarations)?;
let type_str = format_var_type(var, &mut subs.clone(), module_id, interns);
let range = region.to_range(self.line_info());
Some(Hover {
contents: HoverContents::Scalar(MarkedString::LanguageString(LanguageString {
language: "roc".to_string(),
value: type_str,
})),
range: Some(range),
})
}
pub fn definition(&self, symbol: Symbol) -> Option<GotoDefinitionResponse> {
let AnalyzedModule { declarations, .. } = self.module()?;
let found_declaration = roc_can::traverse::find_declaration(symbol, declarations)?;
let range = found_declaration.region().to_range(self.line_info());
Some(GotoDefinitionResponse::Scalar(self.location(range)))
}
pub(crate) fn module_url(&self, module_id: ModuleId) -> Option<Url> {
self.module()?.module_id_to_url.get(&module_id).cloned()
}
pub fn completion_items(
&self,
position: Position,
latest_doc: &DocInfo,
) -> Option<Vec<CompletionItem>> {
let symbol_prefix = latest_doc.get_prefix_at_position(position);
debug!(
"Starting to get completion items for prefix: {:?} docVersion:{:?}",
symbol_prefix, latest_doc.version
);
let len_diff = latest_doc.source.len() as i32 - self.doc_info.source.len() as i32;
//We offset the position because we need the position to be in the correct scope in the most recently parsed version of the source. The quick and dirty method is to just remove the difference in length between the source files from the offset. This could cause issues, but is very easy
//TODO: this is kind of a hack and should be removed once we can do some minimal parsing without full type checking
let mut position = position.to_roc_position(&latest_doc.line_info);
position.offset = (position.offset as i32 - len_diff - 1) as u32;
debug!("Completion offset: {:?}", position.offset);
let AnalyzedModule {
module_id,
interns,
subs,
declarations,
..
} = self.module()?;
let is_field_completion = symbol_prefix.contains('.');
if is_field_completion {
field_completion(
position,
symbol_prefix,
declarations,
interns,
&mut subs.clone(),
module_id,
)
} else {
let completions = get_completion_items(
position,
symbol_prefix,
declarations,
&mut subs.clone(),
module_id,
interns,
);
Some(completions)
}
}
}

View file

@ -0,0 +1,443 @@
use log::{debug, trace, warn};
use roc_can::{
def::Def,
expr::{ClosureData, Declarations, Expr, WhenBranch},
pattern::{ListPatterns, Pattern, RecordDestruct, TupleDestruct},
traverse::{walk_decl, walk_def, walk_expr, DeclarationInfo, Visitor},
};
use roc_module::symbol::{Interns, ModuleId, Symbol};
use roc_region::all::{Loc, Position, Region};
use roc_types::subs::{Subs, Variable};
use tower_lsp::lsp_types::{CompletionItem, CompletionItemKind};
use super::utils::format_var_type;
pub struct CompletionVisitor<'a> {
position: Position,
found_decls: Vec<(Symbol, Variable)>,
pub interns: &'a Interns,
pub prefix: String,
}
impl Visitor for CompletionVisitor<'_> {
fn should_visit(&mut self, region: Region) -> bool {
region.contains_pos(self.position)
}
fn visit_expr(&mut self, expr: &Expr, region: Region, var: Variable) {
if region.contains_pos(self.position) {
let mut res = self.expression_defs(expr);
self.found_decls.append(&mut res);
walk_expr(self, expr, var);
}
}
fn visit_decl(&mut self, decl: DeclarationInfo<'_>) {
match decl {
DeclarationInfo::Value { loc_expr, .. }
| DeclarationInfo::Function {
loc_body: loc_expr, ..
}
| DeclarationInfo::Destructure { loc_expr, .. } => {
let res = self.decl_to_completion_item(&decl);
self.found_decls.extend(res);
if loc_expr.region.contains_pos(self.position) {
walk_decl(self, decl);
};
}
_ => {
walk_decl(self, decl);
}
}
}
fn visit_def(&mut self, def: &Def) {
let res = self.extract_defs(def);
self.found_decls.extend(res);
walk_def(self, def);
}
}
impl CompletionVisitor<'_> {
fn extract_defs(&mut self, def: &Def) -> Vec<(Symbol, Variable)> {
trace!("Completion begin");
def.pattern_vars
.iter()
.map(|(symbol, var)| (*symbol, *var))
.collect()
}
fn expression_defs(&self, expr: &Expr) -> Vec<(Symbol, Variable)> {
match expr {
Expr::When {
expr_var, branches, ..
} => self.when_is_expr(branches, expr_var),
Expr::Closure(ClosureData {
arguments,
loc_body,
..
}) => {
//if we are inside the closure complete it's vars
if loc_body.region.contains_pos(self.position) {
arguments
.iter()
.flat_map(|(var, _, pat)| self.patterns(&pat.value, var))
.collect()
} else {
vec![]
}
}
_ => vec![],
}
}
///Extract any variables made available by the branch of a when_is expression that contains `self.position`
fn when_is_expr(
&self,
branches: &[WhenBranch],
expr_var: &Variable,
) -> Vec<(Symbol, Variable)> {
branches
.iter()
.flat_map(
|WhenBranch {
patterns, value, ..
}| {
if value.region.contains_pos(self.position) {
patterns
.iter()
.flat_map(|pattern| self.patterns(&pattern.pattern.value, expr_var))
.collect()
} else {
vec![]
}
},
)
.collect()
}
fn record_destructure(&self, destructs: &[Loc<RecordDestruct>]) -> Vec<(Symbol, Variable)> {
destructs
.iter()
.flat_map(|a| match &a.value.typ {
roc_can::pattern::DestructType::Required
| roc_can::pattern::DestructType::Optional(_, _) => {
vec![(a.value.symbol, a.value.var)]
}
roc_can::pattern::DestructType::Guard(var, pat) => self.patterns(&pat.value, var),
})
.collect()
}
fn tuple_destructure(&self, destructs: &[Loc<TupleDestruct>]) -> Vec<(Symbol, Variable)> {
destructs
.iter()
.flat_map(|a| {
let (var, pattern) = &a.value.typ;
self.patterns(&pattern.value, var)
})
.collect()
}
fn list_pattern(&self, list_elems: &ListPatterns, var: &Variable) -> Vec<(Symbol, Variable)> {
list_elems
.patterns
.iter()
.flat_map(|a| self.patterns(&a.value, var))
.collect()
}
fn tag_pattern(&self, arguments: &[(Variable, Loc<Pattern>)]) -> Vec<(Symbol, Variable)> {
arguments
.iter()
.flat_map(|(var, pat)| self.patterns(&pat.value, var))
.collect()
}
fn as_pattern(
&self,
as_pat: &Pattern,
as_symbol: Symbol,
var: &Variable,
) -> Vec<(Symbol, Variable)> {
//Get the variables introduced within the pattern
let mut patterns = self.patterns(as_pat, var);
//Add the "as" that wraps the whole pattern
patterns.push((as_symbol, *var));
patterns
}
///Returns a list of symbols defined by this pattern.
///`pattern_var`: Variable type of the entire pattern. This will be returned if the pattern turns out to be an identifier
fn patterns(
&self,
pattern: &roc_can::pattern::Pattern,
pattern_var: &Variable,
) -> Vec<(Symbol, Variable)> {
match pattern {
roc_can::pattern::Pattern::Identifier(symbol) => {
if self.is_match(symbol) {
vec![(*symbol, *pattern_var)]
} else {
vec![]
}
}
Pattern::AppliedTag { arguments, .. } => self.tag_pattern(arguments),
Pattern::UnwrappedOpaque { argument, .. } => {
self.patterns(&argument.1.value, &argument.0)
}
Pattern::List {
elem_var, patterns, ..
} => self.list_pattern(patterns, elem_var),
roc_can::pattern::Pattern::As(pat, symbol) => {
self.as_pattern(&pat.value, *symbol, pattern_var)
}
roc_can::pattern::Pattern::RecordDestructure { destructs, .. } => {
self.record_destructure(destructs)
}
roc_can::pattern::Pattern::TupleDestructure { destructs, .. } => {
self.tuple_destructure(destructs)
}
_ => vec![],
}
}
fn is_match(&self, symbol: &Symbol) -> bool {
symbol.as_str(self.interns).starts_with(&self.prefix)
}
fn decl_to_completion_item(&self, decl: &DeclarationInfo) -> Vec<(Symbol, Variable)> {
match decl {
DeclarationInfo::Value {
expr_var, pattern, ..
} => self.patterns(pattern, expr_var),
DeclarationInfo::Function {
expr_var,
pattern,
function,
loc_body,
..
} => {
let mut out = vec![];
//Append the function declaration itself for recursive calls
out.extend(self.patterns(pattern, expr_var));
if loc_body.region.contains_pos(self.position) {
//also add the arguments if we are inside the function
let args = function
.value
.arguments
.iter()
.flat_map(|(var, _, pat)| self.patterns(&pat.value, var));
//We add in the pattern for the function declaration
out.extend(args);
trace!("Added function args to completion output =:{:#?}", out);
}
out
}
DeclarationInfo::Destructure {
loc_pattern,
expr_var,
..
} => self.patterns(&loc_pattern.value, expr_var),
DeclarationInfo::Expectation { .. } => vec![],
}
}
}
fn get_completions(
position: Position,
decls: &Declarations,
prefix: String,
interns: &Interns,
) -> Vec<(Symbol, Variable)> {
let mut visitor = CompletionVisitor {
position,
found_decls: Vec::new(),
interns,
prefix,
};
visitor.visit_decls(decls);
visitor.found_decls
}
fn make_completion_item(
subs: &mut Subs,
module_id: &ModuleId,
interns: &Interns,
str: String,
var: Variable,
) -> CompletionItem {
let type_str = format_var_type(var, subs, module_id, interns);
let typ = match subs.get(var).content {
roc_types::subs::Content::Structure(var) => match var {
roc_types::subs::FlatType::Apply(_, _) => CompletionItemKind::FUNCTION,
roc_types::subs::FlatType::Func(_, _, _) => CompletionItemKind::FUNCTION,
roc_types::subs::FlatType::EmptyTagUnion
| roc_types::subs::FlatType::TagUnion(_, _) => CompletionItemKind::ENUM,
_ => CompletionItemKind::VARIABLE,
},
a => {
debug!(
"No specific completionKind for variable type: {:?} defaulting to 'Variable'",
a
);
CompletionItemKind::VARIABLE
}
};
CompletionItem {
label: str,
detail: Some(type_str),
kind: Some(typ),
..Default::default()
}
}
/// Walks through declarations that would be accessible from the provided position adding them to a list of completion items until all accessible declarations have been fully explored
pub fn get_completion_items(
position: Position,
prefix: String,
decls: &Declarations,
subs: &mut Subs,
module_id: &ModuleId,
interns: &Interns,
) -> Vec<CompletionItem> {
let completions = get_completions(position, decls, prefix, interns);
make_completion_items(
subs,
module_id,
interns,
completions
.into_iter()
.map(|(symb, var)| (symb.as_str(interns).to_string(), var))
.collect(),
)
}
fn make_completion_items(
subs: &mut Subs,
module_id: &ModuleId,
interns: &Interns,
completions: Vec<(String, Variable)>,
) -> Vec<CompletionItem> {
completions
.into_iter()
.map(|(symbol, var)| make_completion_item(subs, module_id, interns, symbol, var))
.collect()
}
///Finds the types of and names of all the fields of a record
///`var` should be a `Variable` that you know is a record's type or else it will return an empty list
fn find_record_fields(var: Variable, subs: &mut Subs) -> Vec<(String, Variable)> {
let content = subs.get(var);
match content.content {
roc_types::subs::Content::Structure(typ) => match typ {
roc_types::subs::FlatType::Record(fields, ext) => {
let field_types = fields.unsorted_iterator(subs, ext);
match field_types {
Ok(field) => field
.map(|a| (a.0.clone().into(), a.1.into_inner()))
.collect::<Vec<_>>(),
Err(err) => {
warn!("Error getting record field types for completion: {:?}", err);
vec![]
}
}
}
roc_types::subs::FlatType::Tuple(elems, ext) => {
let elems = elems.unsorted_iterator(subs, ext);
match elems {
Ok(elem) => elem.map(|(num, var)| (num.to_string(), var)).collect(),
Err(err) => {
warn!("Error getting tuple elems for completion: {:?}", err);
vec![]
}
}
}
_ => {
warn!(
"Trying to get field completion for a type that is not a record: {:?}",
typ
);
vec![]
}
},
roc_types::subs::Content::Error => {
//This is caused by typechecking our partially typed variable name causing the typechecking to be confused as the type of the parent variable
//TODO! ideally i could recover using some previous typecheck result that isn't broken
warn!("Variable type of record was of type 'error', cannot access field",);
vec![]
}
_ => {
warn!(
"Variable before field was unsupported type: {:?}",
subs.dbg(var)
);
vec![]
}
}
}
struct FieldCompletion {
var: String,
field: String,
middle_fields: Vec<String>,
}
///Splits a completion prefix for a field into its components
///E.g. a.b.c.d->{var:"a",middle_fields:["b","c"],field:"d"}
fn get_field_completion_parts(symbol_prefix: &str) -> Option<FieldCompletion> {
let mut parts = symbol_prefix.split('.').collect::<Vec<_>>();
let field = parts.pop().unwrap_or("").to_string();
let var = parts.remove(0);
//Now that we have the head and tail removed this is all the intermediate fields
let middle_fields = parts.into_iter().map(ToString::to_string).collect();
Some(FieldCompletion {
var: var.to_string(),
field,
middle_fields,
})
}
pub fn field_completion(
position: Position,
symbol_prefix: String,
declarations: &Declarations,
interns: &Interns,
subs: &mut Subs,
module_id: &ModuleId,
) -> Option<Vec<CompletionItem>> {
let FieldCompletion {
var,
field,
middle_fields,
} = get_field_completion_parts(&symbol_prefix)?;
debug!(
"Getting record field completions: variable: {:?} field: {:?} middle: {:?} ",
var, field, middle_fields
);
let completion = get_completions(position, declarations, var.to_string(), interns)
.into_iter()
.map(|a| (a.0.as_str(interns).to_string(), a.1))
.next()?;
//If we have a type that has nested records we could have a completion prefix like: "var.field1.field2.fi"
//If the document isn't fully typechecked we won't know what the type of field2 is for us to offer completions based on it's fields
//Instead we get the type of "var" and then the type of "field1" within var's type and then "field2" within field1's type etc etc, until we have the type of the record we are actually looking for field completions for.
let completion_record = middle_fields.iter().fold(completion, |state, chain_field| {
let fields_vars = find_record_fields(state.1, subs);
fields_vars
.into_iter()
.find(|type_field| chain_field == &type_field.0)
.unwrap_or(state)
});
let field_completions: Vec<_> = find_record_fields(completion_record.1, subs)
.into_iter()
.filter(|(str, _)| str.starts_with(&field.to_string()))
.collect();
let field_completions = make_completion_items(subs, module_id, interns, field_completions);
Some(field_completions)
}

View file

@ -0,0 +1,24 @@
use roc_module::symbol::{Interns, ModuleId};
use roc_types::subs::{Subs, Variable};
pub(super) fn format_var_type(
var: Variable,
subs: &mut Subs,
module_id: &ModuleId,
interns: &Interns,
) -> String {
let snapshot = subs.snapshot();
let type_str = roc_types::pretty_print::name_and_print_var(
var,
subs,
*module_id,
interns,
roc_types::pretty_print::DebugPrint::NOTHING,
);
subs.rollback_to(snapshot);
type_str
}
pub(super) fn is_roc_identifier_char(char: &char) -> bool {
matches!(char,'a'..='z'|'A'..='Z'|'0'..='9'|'.')
}

View file

@ -1,75 +1,219 @@
use std::collections::HashMap;
use log::{debug, info, trace};
use tower_lsp::lsp_types::{
Diagnostic, GotoDefinitionResponse, Hover, Position, SemanticTokensResult, TextEdit, Url,
use std::{
collections::HashMap,
sync::{Arc, OnceLock},
time::Duration,
};
use crate::analysis::{AnalyzedDocument, GlobalAnalysis};
use tokio::sync::{Mutex, MutexGuard};
pub(crate) enum DocumentChange {
Modified(Url, String),
Closed(Url),
use tower_lsp::lsp_types::{
CompletionResponse, Diagnostic, GotoDefinitionResponse, Hover, Position, SemanticTokensResult,
TextEdit, Url,
};
use crate::analysis::{AnalyzedDocument, DocInfo};
#[derive(Debug)]
pub(crate) struct DocumentPair {
info: DocInfo,
latest_document: OnceLock<Arc<AnalyzedDocument>>,
last_good_document: Arc<AnalyzedDocument>,
}
impl DocumentPair {
pub(crate) fn new(
latest_doc: Arc<AnalyzedDocument>,
last_good_document: Arc<AnalyzedDocument>,
) -> Self {
Self {
info: latest_doc.doc_info.clone(),
latest_document: OnceLock::from(latest_doc),
last_good_document,
}
}
}
#[derive(Debug)]
pub(crate) struct RegistryConfig {
pub(crate) latest_document_timeout: Duration,
}
impl Default for RegistryConfig {
fn default() -> Self {
Self {
latest_document_timeout: Duration::from_millis(5000),
}
}
}
#[derive(Debug, Default)]
pub(crate) struct Registry {
documents: HashMap<Url, AnalyzedDocument>,
documents: Mutex<HashMap<Url, DocumentPair>>,
config: RegistryConfig,
}
impl Registry {
pub fn apply_change(&mut self, change: DocumentChange) {
match change {
DocumentChange::Modified(url, source) => {
let GlobalAnalysis { documents } = GlobalAnalysis::new(url, source);
pub(crate) fn new(config: RegistryConfig) -> Self {
Self {
documents: Default::default(),
config,
}
}
// Only replace the set of documents and all dependencies that were re-analyzed.
// Note that this is actually the opposite of what we want - in truth we want to
// re-evaluate all dependents!
for document in documents {
let url = document.url().clone();
self.documents.insert(url.clone(), document);
pub async fn get_latest_version(&self, url: &Url) -> Option<i32> {
self.documents.lock().await.get(url).map(|x| x.info.version)
}
fn update_document(
documents: &mut MutexGuard<'_, HashMap<Url, DocumentPair>>,
document: Arc<AnalyzedDocument>,
updating_url: &Url,
) {
if &document.doc_info.url == updating_url {
//Write the newly analysed document into the oncelock that any request requiring the latest document will be waiting on
if let Some(a) = documents.get_mut(updating_url) {
a.latest_document.set(document.clone()).unwrap()
}
}
let url = document.url().clone();
match documents.get_mut(&url) {
Some(old_doc) => {
//If the latest doc_info has a version higher than what we are setting we shouldn't overwrite the document, but we can update the last_good_document if the parse went well
if old_doc.info.version > document.doc_info.version {
if document.type_checked() {
*old_doc = DocumentPair {
info: old_doc.info.clone(),
latest_document: old_doc.latest_document.clone(),
last_good_document: document,
};
}
} else if document.type_checked() {
*old_doc = DocumentPair::new(document.clone(), document);
} else {
debug!(
"Document typechecking failed at version {:?}, not updating last_good_document",
&document.doc_info.version
);
*old_doc = DocumentPair::new(document, old_doc.last_good_document.clone());
}
}
DocumentChange::Closed(_url) => {
// Do nothing.
None => {
documents.insert(url.clone(), DocumentPair::new(document.clone(), document));
}
}
}
fn document_by_url(&mut self, url: &Url) -> Option<&mut AnalyzedDocument> {
self.documents.get_mut(url)
pub async fn apply_changes<'a>(&self, analysed_docs: Vec<AnalyzedDocument>, updating_url: Url) {
let mut documents = self.documents.lock().await;
debug!(
"Finished doc analysis for doc: {}",
updating_url.to_string()
);
for document in analysed_docs {
let document = Arc::new(document);
Registry::update_document(&mut documents, document, &updating_url);
}
}
pub fn diagnostics(&mut self, url: &Url) -> Vec<Diagnostic> {
let Some(document) = self.document_by_url(url) else {
pub async fn apply_doc_info_changes(&self, url: Url, info: DocInfo) {
let mut documents_lock = self.documents.lock().await;
let doc = documents_lock.get_mut(&url);
match doc {
Some(a) => {
debug!(
"Set the docInfo for {:?} to version:{:?}",
url.as_str(),
info.version
);
*a = DocumentPair {
info,
last_good_document: a.last_good_document.clone(),
latest_document: OnceLock::new(),
};
}
None => debug!("So existing docinfo for {:?} ", url.as_str()),
}
}
async fn document_info_by_url(&self, url: &Url) -> Option<DocInfo> {
self.documents.lock().await.get(url).map(|a| a.info.clone())
}
///Tries to get the latest document from analysis.
///Gives up and returns none after 5 seconds.
async fn latest_document_by_url(&self, url: &Url) -> Option<Arc<AnalyzedDocument>> {
tokio::time::timeout(self.config.latest_document_timeout, async {
//TODO: This should really be a condvar that is triggered by the latest being ready, this will do for now though
loop {
let docs = self.documents.lock().await;
if let Some(a) = docs.get(url) {
if let Some(a) = a.latest_document.get() {
return a.clone();
}
}
drop(docs);
tokio::task::yield_now().await;
}
})
.await
.ok()
}
pub async fn diagnostics(&self, url: &Url) -> Vec<Diagnostic> {
let Some(document) = self.latest_document_by_url(url).await else {
return vec![];
};
document.diagnostics()
}
pub fn hover(&mut self, url: &Url, position: Position) -> Option<Hover> {
self.document_by_url(url)?.hover(position)
pub async fn hover(&self, url: &Url, position: Position) -> Option<Hover> {
self.latest_document_by_url(url).await?.hover(position)
}
pub fn goto_definition(
&mut self,
pub async fn goto_definition(
&self,
url: &Url,
position: Position,
) -> Option<GotoDefinitionResponse> {
let document = self.document_by_url(url)?;
let document = self.latest_document_by_url(url).await?;
let symbol = document.symbol_at(position)?;
let def_document_url = document.module_url(symbol.module_id())?;
let def_document = self.document_by_url(&def_document_url)?;
let def_document = self.latest_document_by_url(&def_document_url).await?;
def_document.definition(symbol)
}
pub fn formatting(&mut self, url: &Url) -> Option<Vec<TextEdit>> {
let document = self.document_by_url(url)?;
pub async fn formatting(&self, url: &Url) -> Option<Vec<TextEdit>> {
let document = self.document_info_by_url(url).await?;
document.format()
}
pub fn semantic_tokens(&mut self, url: &Url) -> Option<SemanticTokensResult> {
let document = self.document_by_url(url)?;
pub async fn semantic_tokens(&self, url: &Url) -> Option<SemanticTokensResult> {
let document = self.document_info_by_url(url).await?;
document.semantic_tokens()
}
pub async fn completion_items(
&self,
url: &Url,
position: Position,
) -> Option<CompletionResponse> {
trace!("Starting completion ");
let lock = self.documents.lock().await;
let pair = lock.get(url)?;
let latest_doc_info = &pair.info;
info!(
"Using document version:{:?} for completion ",
latest_doc_info.version
);
let completions = pair
.last_good_document
.completion_items(position, latest_doc_info)?;
Some(CompletionResponse::Array(completions))
}
}

View file

@ -1,32 +1,65 @@
use analysis::HIGHLIGHT_TOKENS_LEGEND;
use parking_lot::{Mutex, MutexGuard};
use registry::{DocumentChange, Registry};
use log::{debug, trace};
use registry::{Registry, RegistryConfig};
use std::future::Future;
use std::time::Duration;
use tower_lsp::jsonrpc::Result;
use tower_lsp::lsp_types::*;
use tower_lsp::{Client, LanguageServer, LspService, Server};
use crate::analysis::{global_analysis, DocInfo};
mod analysis;
mod convert;
mod registry;
#[derive(Debug)]
struct RocLs {
struct RocServer {
pub state: RocServerState,
client: Client,
registry: Mutex<Registry>,
}
impl std::panic::RefUnwindSafe for RocLs {}
struct RocServerConfig {
pub debounce_ms: Duration,
}
impl RocLs {
pub fn new(client: Client) -> Self {
impl Default for RocServerConfig {
fn default() -> Self {
Self {
client,
registry: Mutex::new(Registry::default()),
debounce_ms: Duration::from_millis(100),
}
}
}
fn registry(&self) -> MutexGuard<Registry> {
self.registry.lock()
///This exists so we can test most of RocLs without anything LSP related
struct RocServerState {
registry: Registry,
config: RocServerConfig,
}
impl std::panic::RefUnwindSafe for RocServer {}
fn read_env_num(name: &str) -> Option<u64> {
std::env::var(name)
.ok()
.and_then(|a| str::parse::<u64>(&a).ok())
}
impl RocServer {
pub fn new(client: Client) -> Self {
let registry_config = RegistryConfig {
latest_document_timeout: Duration::from_millis(
read_env_num("ROCLS_LATEST_DOC_TIMEOUT_MS").unwrap_or(5000),
),
};
let config = RocServerConfig {
debounce_ms: Duration::from_millis(read_env_num("ROCLS_DEBOUNCE_MS").unwrap_or(100)),
};
Self {
state: RocServerState::new(config, Registry::new(registry_config)),
client,
}
}
pub fn capabilities() -> ServerCapabilities {
@ -61,39 +94,120 @@ impl RocLs {
range: None,
full: Some(SemanticTokensFullOptions::Bool(true)),
});
let completion_provider = CompletionOptions {
resolve_provider: Some(false),
trigger_characters: Some(vec![".".to_string()]),
all_commit_characters: None,
work_done_progress_options: WorkDoneProgressOptions {
work_done_progress: None,
},
};
ServerCapabilities {
text_document_sync: Some(text_document_sync),
hover_provider: Some(hover_provider),
definition_provider: Some(OneOf::Right(definition_provider)),
document_formatting_provider: Some(OneOf::Right(document_formatting_provider)),
semantic_tokens_provider: Some(semantic_tokens_provider),
completion_provider: Some(completion_provider),
..ServerCapabilities::default()
}
}
/// Records a document content change.
async fn change(&self, fi: Url, text: String, version: i32) {
self.registry()
.apply_change(DocumentChange::Modified(fi.clone(), text));
let updating_result = self.state.change(&fi, text, version).await;
let diagnostics = match std::panic::catch_unwind(|| self.registry().diagnostics(&fi)) {
Ok(ds) => ds,
Err(_) => return,
};
//The analysis task can be cancelled by another change coming in which will update the watched variable
if let Err(e) = updating_result {
debug!("Cancelled change. Reason:{:?}", e);
return;
}
debug!("Applied_changes getting and returning diagnostics");
let diagnostics = self.state.registry.diagnostics(&fi).await;
self.client
.publish_diagnostics(fi, diagnostics, Some(version))
.await;
}
}
async fn close(&self, fi: Url) {
self.registry().apply_change(DocumentChange::Closed(fi));
impl RocServerState {
pub fn new(config: RocServerConfig, registry: Registry) -> RocServerState {
Self { config, registry }
}
async fn registry(&self) -> &Registry {
&self.registry
}
async fn close(&self, _fi: Url) {}
pub async fn change(
&self,
fi: &Url,
text: String,
version: i32,
) -> std::result::Result<(), String> {
debug!("V{:?}:starting change", version);
let doc_info = DocInfo::new(fi.clone(), text, version);
self.registry
.apply_doc_info_changes(fi.clone(), doc_info.clone())
.await;
debug!(
"V{:?}:finished updating docinfo, starting analysis ",
version
);
let inner_ref = self;
let updating_result = async {
//This reduces wasted computation by waiting to allow a new change to come in and update the version before we check, but does delay the final analysis. Ideally this would be replaced with cancelling the analysis when a new one comes in.
tokio::time::sleep(self.config.debounce_ms).await;
let is_latest = inner_ref
.registry
.get_latest_version(fi)
.await
.map(|latest| latest == version)
.unwrap_or(true);
if !is_latest {
return Err("Not latest version skipping analysis".to_string());
}
let results = match tokio::task::spawn_blocking(|| global_analysis(doc_info)).await {
Err(e) => return Err(format!("Document analysis failed. reason:{:?}", e)),
Ok(a) => a,
};
let latest_version = inner_ref.registry.get_latest_version(fi).await;
//if this version is not the latest another change must have come in and this analysis is useless
//if there is no older version we can just proceed with the update
if let Some(latest_version) = latest_version {
if latest_version != version {
return Err(format!(
"Version {0} doesn't match latest: {1} discarding analysis",
version, latest_version
));
}
}
debug!(
"V{:?}:finished document analysis applying changes ",
version
);
inner_ref.registry.apply_changes(results, fi.clone()).await;
Ok(())
}
.await;
debug!("V{:?}:finished document change process", version);
updating_result
}
}
#[tower_lsp::async_trait]
impl LanguageServer for RocLs {
impl LanguageServer for RocServer {
async fn initialize(&self, _: InitializeParams) -> Result<InitializeResult> {
Ok(InitializeResult {
capabilities: Self::capabilities(),
@ -127,7 +241,7 @@ impl LanguageServer for RocLs {
async fn did_close(&self, params: DidCloseTextDocumentParams) {
let TextDocumentIdentifier { uri } = params.text_document;
self.close(uri).await;
self.state.close(uri).await;
}
async fn shutdown(&self) -> Result<()> {
@ -144,7 +258,13 @@ impl LanguageServer for RocLs {
work_done_progress_params: _,
} = params;
panic_wrapper(|| self.registry().hover(&text_document.uri, position))
panic_wrapper_async(|| async {
self.state
.registry
.hover(&text_document.uri, position)
.await
})
.await
}
async fn goto_definition(
@ -161,10 +281,14 @@ impl LanguageServer for RocLs {
partial_result_params: _,
} = params;
panic_wrapper(|| {
self.registry()
panic_wrapper_async(|| async {
self.state
.registry()
.await
.goto_definition(&text_document.uri, position)
.await
})
.await
}
async fn formatting(&self, params: DocumentFormattingParams) -> Result<Option<Vec<TextEdit>>> {
@ -174,7 +298,14 @@ impl LanguageServer for RocLs {
work_done_progress_params: _,
} = params;
panic_wrapper(|| self.registry().formatting(&text_document.uri))
panic_wrapper_async(|| async {
self.state
.registry()
.await
.formatting(&text_document.uri)
.await
})
.await
}
async fn semantic_tokens_full(
@ -187,22 +318,184 @@ impl LanguageServer for RocLs {
partial_result_params: _,
} = params;
panic_wrapper(|| self.registry().semantic_tokens(&text_document.uri))
panic_wrapper_async(|| async {
self.state
.registry()
.await
.semantic_tokens(&text_document.uri)
.await
})
.await
}
async fn completion(&self, params: CompletionParams) -> Result<Option<CompletionResponse>> {
let doc = params.text_document_position;
trace!("Got completion request");
panic_wrapper_async(|| async {
self.state
.registry
.completion_items(&doc.text_document.uri, doc.position)
.await
})
.await
}
}
fn panic_wrapper<T>(f: impl FnOnce() -> Option<T> + std::panic::UnwindSafe) -> Result<Option<T>> {
async fn panic_wrapper_async<Fut, T>(
f: impl FnOnce() -> Fut + std::panic::UnwindSafe,
) -> Result<Option<T>>
where
Fut: Future<Output = Option<T>>,
{
match std::panic::catch_unwind(f) {
Ok(r) => Ok(r),
Ok(r) => Ok(r.await),
Err(_) => Err(tower_lsp::jsonrpc::Error::internal_error()),
}
}
#[tokio::main]
async fn main() {
env_logger::Builder::from_env("ROCLS_LOG").init();
let stdin = tokio::io::stdin();
let stdout = tokio::io::stdout();
let (service, socket) = LspService::new(RocLs::new);
let (service, socket) = LspService::new(RocServer::new);
Server::new(stdin, stdout, socket).serve(service).await;
}
#[cfg(test)]
mod tests {
use std::sync::Once;
use expect_test::expect;
use indoc::indoc;
use log::info;
use super::*;
fn completion_resp_to_labels(resp: CompletionResponse) -> Vec<String> {
match resp {
CompletionResponse::Array(list) => list.into_iter(),
CompletionResponse::List(list) => list.items.into_iter(),
}
.map(|item| item.label)
.collect::<Vec<_>>()
}
///Gets completion and returns only the label for each completion
async fn get_completion_labels(
reg: &Registry,
url: &Url,
position: Position,
) -> Option<Vec<String>> {
reg.completion_items(url, position)
.await
.map(completion_resp_to_labels)
}
const DOC_LIT: &str = indoc! {r#"
interface Test
exposes []
imports []
"#};
static INIT: Once = Once::new();
async fn test_setup(doc: String) -> (RocServerState, Url) {
INIT.call_once(|| {
env_logger::builder()
.is_test(true)
.filter_level(log::LevelFilter::Trace)
.init();
});
info!("Doc is:\n{0}", doc);
let url = Url::parse("file:/Test.roc").unwrap();
let inner = RocServerState::new(RocServerConfig::default(), Registry::default());
//setup the file
inner.change(&url, doc, 0).await.unwrap();
(inner, url)
}
///Test that completion works properly when we apply an "as" pattern to an identifier
#[tokio::test]
async fn test_completion_as_identifier() {
let suffix = DOC_LIT.to_string()
+ indoc! {r#"
main =
when a is
inn as outer ->
"#};
let (inner, url) = test_setup(suffix.clone()).await;
let position = Position::new(6, 7);
let reg = &inner.registry;
let change = suffix.clone() + "o";
inner.change(&url, change, 1).await.unwrap();
let comp1 = get_completion_labels(reg, &url, position).await;
let c = suffix.clone() + "i";
inner.change(&url, c, 2).await.unwrap();
let comp2 = get_completion_labels(reg, &url, position).await;
let actual = [comp1, comp2];
expect![[r#"
[
Some(
[
"outer",
],
),
Some(
[
"inn",
"outer",
],
),
]
"#]]
.assert_debug_eq(&actual)
}
///Test that completion works properly when we apply an "as" pattern to a record
#[tokio::test]
async fn test_completion_as_record() {
let doc = DOC_LIT.to_string()
+ indoc! {r#"
main =
when a is
{one,two} as outer ->
"#};
let (inner, url) = test_setup(doc.clone()).await;
let position = Position::new(6, 7);
let reg = &inner.registry;
let change = doc.clone() + "o";
inner.change(&url, change, 1).await.unwrap();
let comp1 = get_completion_labels(reg, &url, position).await;
let c = doc.clone() + "t";
inner.change(&url, c, 2).await.unwrap();
let comp2 = get_completion_labels(reg, &url, position).await;
let actual = [comp1, comp2];
expect![[r#"
[
Some(
[
"one",
"two",
"outer",
],
),
Some(
[
"one",
"two",
"outer",
],
),
]
"#]]
.assert_debug_eq(&actual);
}
}

View file

@ -52,7 +52,7 @@ pub fn compile_to_mono<'a, 'i, I: Iterator<Item = &'i str>>(
target_info: TargetInfo,
palette: Palette,
) -> (Option<MonomorphizedModule<'a>>, Problems) {
let filename = PathBuf::from("");
let filename = PathBuf::from("replfile.roc");
let src_dir = PathBuf::from("fake/test/path");
let (bytes_before_expr, module_src) = promote_expr_to_module(arena, defs, expr);
let loaded = roc_load::load_and_monomorphize_from_str(

View file

@ -166,7 +166,7 @@ impl<'b> Report<'b> {
if self.title.is_empty() {
self.doc
} else {
let header = if self.filename == PathBuf::from("") {
let header = if self.filename == PathBuf::from("replfile.roc") {
crate::report::pretty_header(&self.title)
} else {
crate::report::pretty_header_with_path(&self.title, &self.filename)

View file

@ -1370,8 +1370,28 @@ pluralize = \singular, plural, count ->
This `expect` will fail if you call `pluralize` passing a count of 0.
Note that inline `expect`s do not halt the program! They are designed to inform, not to affect control flow. In fact, if you do `roc build`, they are not even included in the final binary.
So you'll want to use `roc dev` or `roc test` to get the output for `expect`.
Note that inline `expect`s do not halt the program! They are designed to inform, not to affect control flow. Different `roc` commands will also handle `expect`s differently:
- `roc build` discards all `expect`s for optimal runtime performance.
- `roc dev` only runs inline `expect`s that are encountered during normal execution of the program.
- `roc test` runs top level `expect`s and inline `expect`s that are encountered because of the running of top level `expect`s.
Let's clear up any confusion with an example:
```roc
main =
expect 1 == 2
Stdout.line "Hello, World!"
double = \num ->
expect num > -1
num * 2
expect double 0 == 0
```
- `roc build` wil run `main`, ignore `expect 1 == 2` and just print `Hello, World!`.
- `roc dev` will run `main`, tell you `expect 1 == 2` failed but will still print `Hello, World!`.
- `roc test` will run `expect double 0 == 0` followed by `expect num > -1` and will print how many top level expects passed: `0 failed and 1 passed in 100 ms.`.
## [Modules](#modules) {#modules}