mirror of
https://github.com/erg-lang/erg.git
synced 2025-08-04 10:49:54 +00:00
feat(els): implement file cache
This change increases the efficiency of file IO and improves the accuracy of completions.
This commit is contained in:
parent
947c3c6794
commit
4571ced013
11 changed files with 225 additions and 70 deletions
|
@ -32,7 +32,10 @@ impl<Checker: BuildRunnable> Server<Checker> {
|
|||
return Ok(vec![]);
|
||||
};
|
||||
if let Expr::Def(def) = expr {
|
||||
let mut range = util::loc_to_range(def.loc()).unwrap();
|
||||
let Some(mut range) = util::loc_to_range(def.loc()) else {
|
||||
Self::send_log("range not found")?;
|
||||
return Ok(vec![]);
|
||||
};
|
||||
let next = lsp_types::Range {
|
||||
start: lsp_types::Position {
|
||||
line: range.end.line,
|
||||
|
|
|
@ -5,6 +5,7 @@ use serde_json::Value;
|
|||
use erg_common::traits::Locational;
|
||||
|
||||
use erg_compiler::artifact::BuildRunnable;
|
||||
use erg_compiler::erg_parser::token::TokenKind;
|
||||
use erg_compiler::ty::Type;
|
||||
use erg_compiler::AccessKind;
|
||||
|
||||
|
@ -31,7 +32,17 @@ impl<Checker: BuildRunnable> Server<Checker> {
|
|||
Self::send_log(format!("AccessKind: {acc_kind:?}"))?;
|
||||
let mut result: Vec<CompletionItem> = vec![];
|
||||
let contexts = if acc_kind.is_local() {
|
||||
self.get_local_ctx(&uri, pos)
|
||||
let prev_token = self.file_cache.get_token_relatively(&uri, pos, -1)?;
|
||||
if prev_token
|
||||
.as_ref()
|
||||
.map(|t| t.kind == TokenKind::Dot)
|
||||
.unwrap_or(false)
|
||||
{
|
||||
let dot_pos = util::loc_to_pos(prev_token.unwrap().loc()).unwrap();
|
||||
self.get_receiver_ctxs(&uri, dot_pos)?
|
||||
} else {
|
||||
self.get_local_ctx(&uri, pos)
|
||||
}
|
||||
} else {
|
||||
self.get_receiver_ctxs(&uri, pos)?
|
||||
};
|
||||
|
@ -82,4 +93,10 @@ impl<Checker: BuildRunnable> Server<Checker> {
|
|||
&json!({ "jsonrpc": "2.0", "id": msg["id"].as_i64().unwrap(), "result": result }),
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_completion(&self, msg: &Value) -> ELSResult<()> {
|
||||
Self::send_log(format!("completion resolve requested: {msg}"))?;
|
||||
let item = CompletionItem::deserialize(&msg["params"])?;
|
||||
Self::send(&json!({ "jsonrpc": "2.0", "id": msg["id"].as_i64().unwrap(), "result": item }))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ impl<Checker: BuildRunnable> Server<Checker> {
|
|||
let params = GotoDefinitionParams::deserialize(&msg["params"])?;
|
||||
let uri = util::normalize_url(params.text_document_position_params.text_document.uri);
|
||||
let pos = params.text_document_position_params.position;
|
||||
let result = if let Some(token) = util::get_token(uri.clone(), pos)? {
|
||||
let result = if let Some(token) = self.file_cache.get_token(&uri, pos)? {
|
||||
if let Some(vi) = self.get_definition(&uri, &token)? {
|
||||
match (vi.def_loc.module, util::loc_to_range(vi.def_loc.loc)) {
|
||||
(Some(path), Some(range)) => {
|
||||
|
|
|
@ -1,18 +1,137 @@
|
|||
use std::fs::Metadata;
|
||||
use std::fs::{metadata, Metadata};
|
||||
|
||||
use lsp_types::Url;
|
||||
use lsp_types::{DidChangeTextDocumentParams, Position, Url};
|
||||
|
||||
use erg_common::dict::Dict;
|
||||
use erg_compiler::erg_parser::token::TokenStream;
|
||||
use erg_common::shared::Shared;
|
||||
use erg_common::traits::DequeStream;
|
||||
use erg_compiler::erg_parser::lex::Lexer;
|
||||
use erg_compiler::erg_parser::token::{Token, TokenKind, TokenStream};
|
||||
|
||||
use crate::server::ELSResult;
|
||||
use crate::util;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FileCacheEntry {
|
||||
pub code: String,
|
||||
pub metadata: Metadata,
|
||||
pub token_stream: TokenStream,
|
||||
pub token_stream: Option<TokenStream>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FileCache {
|
||||
pub files: Dict<Url, FileCacheEntry>,
|
||||
pub files: Shared<Dict<Url, FileCacheEntry>>,
|
||||
}
|
||||
|
||||
impl FileCache {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
files: Shared::new(Dict::new()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get(&self, uri: &Url) -> ELSResult<&FileCacheEntry> {
|
||||
let Some(entry) = unsafe { self.files.as_ref() }.get(uri) else {
|
||||
let code = util::get_code_from_uri(uri)?;
|
||||
self.update(uri, code);
|
||||
let entry = unsafe { self.files.as_ref() }.get(uri).ok_or("not found")?;
|
||||
return Ok(entry);
|
||||
};
|
||||
let last_modified = entry.metadata.modified().unwrap();
|
||||
let current_modified = metadata(uri.to_file_path().unwrap())
|
||||
.unwrap()
|
||||
.modified()
|
||||
.unwrap();
|
||||
if last_modified != current_modified {
|
||||
let code = util::get_code_from_uri(uri)?;
|
||||
self.update(uri, code);
|
||||
unsafe { self.files.as_ref() }
|
||||
.get(uri)
|
||||
.ok_or("not found".into())
|
||||
} else {
|
||||
let entry = unsafe { self.files.as_ref() }.get(uri).ok_or("not found")?;
|
||||
Ok(entry)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_token_index(&self, uri: &Url, pos: Position) -> ELSResult<Option<usize>> {
|
||||
let tokens = self.get(uri)?.token_stream.as_ref().ok_or("lex error")?;
|
||||
for (i, tok) in tokens.iter().enumerate() {
|
||||
if util::pos_in_loc(tok, pos) {
|
||||
return Ok(Some(i));
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
pub fn get_token(&self, uri: &Url, pos: Position) -> ELSResult<Option<Token>> {
|
||||
let tokens = self.get(uri)?.token_stream.as_ref().ok_or("lex error")?;
|
||||
for tok in tokens.iter() {
|
||||
if util::pos_in_loc(tok, pos) {
|
||||
return Ok(Some(tok.clone()));
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
pub fn get_token_relatively(
|
||||
&self,
|
||||
uri: &Url,
|
||||
pos: Position,
|
||||
offset: isize,
|
||||
) -> ELSResult<Option<Token>> {
|
||||
let Some(index) = self.get_token_index(uri, pos)? else {
|
||||
let tokens = self.get(uri)?.token_stream.as_ref().ok_or("lex error")?;
|
||||
for token in tokens.iter().rev() {
|
||||
if !token.is(TokenKind::EOF) && !token.is(TokenKind::Dedent) && !token.is(TokenKind::Newline) {
|
||||
return Ok(Some(token.clone()));
|
||||
}
|
||||
}
|
||||
return Ok(None);
|
||||
};
|
||||
let index = (index as isize + offset) as usize;
|
||||
let tokens = self.get(uri)?.token_stream.as_ref().ok_or("lex error")?;
|
||||
if index < tokens.len() {
|
||||
Ok(Some(tokens[index].clone()))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn update(&self, uri: &Url, code: String) {
|
||||
let metadata = metadata(uri.to_file_path().unwrap()).unwrap();
|
||||
let token_stream = Lexer::from_str(code.clone()).lex().ok();
|
||||
self.files.borrow_mut().insert(
|
||||
uri.clone(),
|
||||
FileCacheEntry {
|
||||
code,
|
||||
metadata,
|
||||
token_stream,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
pub(crate) fn incremental_update(&self, params: DidChangeTextDocumentParams) {
|
||||
let uri = util::normalize_url(params.text_document.uri);
|
||||
let Some(entry) = unsafe { self.files.as_mut() }.get_mut(&uri) else {
|
||||
return;
|
||||
};
|
||||
let metadata = metadata(uri.to_file_path().unwrap()).unwrap();
|
||||
let mut code = entry.code.clone();
|
||||
for change in params.content_changes {
|
||||
let range = change.range.unwrap();
|
||||
let start = util::pos_to_index(&code, range.start);
|
||||
let end = util::pos_to_index(&code, range.end);
|
||||
code.replace_range(start..end, &change.text);
|
||||
}
|
||||
let token_stream = Lexer::from_str(code.clone()).lex().ok();
|
||||
entry.code = code;
|
||||
entry.metadata = metadata;
|
||||
entry.token_stream = token_stream;
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn remove(&mut self, uri: &Url) {
|
||||
self.files.borrow_mut().remove(uri);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,11 +48,15 @@ impl<Checker: BuildRunnable> Server<Checker> {
|
|||
let uri = util::normalize_url(params.text_document_position_params.text_document.uri);
|
||||
let pos = params.text_document_position_params.position;
|
||||
let mut contents = vec![];
|
||||
let opt_tok = util::get_token(uri.clone(), pos)?;
|
||||
let opt_tok = self.file_cache.get_token(&uri, pos)?;
|
||||
let opt_token = if let Some(token) = opt_tok {
|
||||
match token.category() {
|
||||
TokenCategory::StrInterpRight => util::get_token_relatively(uri.clone(), pos, -1)?,
|
||||
TokenCategory::StrInterpLeft => util::get_token_relatively(uri.clone(), pos, 1)?,
|
||||
TokenCategory::StrInterpRight => {
|
||||
self.file_cache.get_token_relatively(&uri, pos, -1)?
|
||||
}
|
||||
TokenCategory::StrInterpLeft => {
|
||||
self.file_cache.get_token_relatively(&uri, pos, 1)?
|
||||
}
|
||||
// TODO: StrInterpMid
|
||||
_ => Some(token),
|
||||
}
|
||||
|
|
|
@ -10,11 +10,11 @@ use crate::server::{ELSResult, Server};
|
|||
use crate::util;
|
||||
|
||||
impl<Checker: BuildRunnable> Server<Checker> {
|
||||
pub(crate) fn show_references(&self, msg: &Value) -> ELSResult<()> {
|
||||
pub(crate) fn show_references(&mut self, msg: &Value) -> ELSResult<()> {
|
||||
let params = ReferenceParams::deserialize(&msg["params"])?;
|
||||
let uri = util::normalize_url(params.text_document_position.text_document.uri);
|
||||
let pos = params.text_document_position.position;
|
||||
if let Some(tok) = util::get_token(uri.clone(), pos)? {
|
||||
if let Some(tok) = self.file_cache.get_token(&uri, pos)? {
|
||||
// Self::send_log(format!("token: {tok}"))?;
|
||||
if let Some(visitor) = self.get_visitor(&uri) {
|
||||
if let Some(vi) = visitor.get_info(&tok) {
|
||||
|
|
|
@ -21,7 +21,7 @@ impl<Checker: BuildRunnable> Server<Checker> {
|
|||
Self::send_log(format!("rename request: {params:?}"))?;
|
||||
let uri = util::normalize_url(params.text_document_position.text_document.uri);
|
||||
let pos = params.text_document_position.position;
|
||||
if let Some(tok) = util::get_token(uri.clone(), pos)? {
|
||||
if let Some(tok) = self.file_cache.get_token(&uri, pos)? {
|
||||
// Self::send_log(format!("token: {tok}"))?;
|
||||
if let Some(visitor) = self.get_visitor(&uri) {
|
||||
if let Some(vi) = visitor.get_info(&tok) {
|
||||
|
|
|
@ -22,12 +22,14 @@ use erg_compiler::module::{SharedCompilerResource, SharedModuleIndex};
|
|||
|
||||
use lsp_types::{
|
||||
ClientCapabilities, CodeActionKind, CodeActionOptions, CodeActionProviderCapability,
|
||||
CompletionOptions, ExecuteCommandOptions, HoverProviderCapability, InitializeResult, OneOf,
|
||||
Position, SemanticTokenType, SemanticTokensFullOptions, SemanticTokensLegend,
|
||||
SemanticTokensOptions, SemanticTokensServerCapabilities, ServerCapabilities,
|
||||
TextDocumentSyncCapability, TextDocumentSyncKind, Url, WorkDoneProgressOptions,
|
||||
CompletionOptions, DidChangeTextDocumentParams, ExecuteCommandOptions, HoverProviderCapability,
|
||||
InitializeResult, OneOf, Position, SemanticTokenType, SemanticTokensFullOptions,
|
||||
SemanticTokensLegend, SemanticTokensOptions, SemanticTokensServerCapabilities,
|
||||
ServerCapabilities, TextDocumentSyncCapability, TextDocumentSyncKind, Url,
|
||||
WorkDoneProgressOptions,
|
||||
};
|
||||
|
||||
use crate::file_cache::FileCache;
|
||||
use crate::hir_visitor::HIRVisitor;
|
||||
use crate::message::{ErrorMessage, LogMessage};
|
||||
use crate::util;
|
||||
|
@ -116,6 +118,7 @@ pub struct Server<Checker: BuildRunnable = HIRBuilder> {
|
|||
pub(crate) home: PathBuf,
|
||||
pub(crate) erg_path: PathBuf,
|
||||
pub(crate) client_capas: ClientCapabilities,
|
||||
pub(crate) file_cache: FileCache,
|
||||
pub(crate) modules: Dict<Url, ModuleContext>,
|
||||
pub(crate) hirs: Dict<Url, Option<HIR>>,
|
||||
_checker: std::marker::PhantomData<Checker>,
|
||||
|
@ -128,6 +131,7 @@ impl<Checker: BuildRunnable> Server<Checker> {
|
|||
home: normalize_path(std::env::current_dir().unwrap()),
|
||||
erg_path: erg_path(), // already normalized
|
||||
client_capas: ClientCapabilities::default(),
|
||||
file_cache: FileCache::new(),
|
||||
modules: Dict::new(),
|
||||
hirs: Dict::new(),
|
||||
_checker: std::marker::PhantomData,
|
||||
|
@ -161,11 +165,13 @@ impl<Checker: BuildRunnable> Server<Checker> {
|
|||
}
|
||||
let mut result = InitializeResult::default();
|
||||
result.capabilities = ServerCapabilities::default();
|
||||
result.capabilities.text_document_sync =
|
||||
Some(TextDocumentSyncCapability::from(TextDocumentSyncKind::FULL));
|
||||
result.capabilities.text_document_sync = Some(TextDocumentSyncCapability::from(
|
||||
TextDocumentSyncKind::INCREMENTAL,
|
||||
));
|
||||
let mut comp_options = CompletionOptions::default();
|
||||
comp_options.trigger_characters =
|
||||
Some(vec![".".to_string(), ":".to_string(), "(".to_string()]);
|
||||
comp_options.resolve_provider = Some(true);
|
||||
result.capabilities.completion_provider = Some(comp_options);
|
||||
result.capabilities.rename_provider = Some(OneOf::Left(true));
|
||||
result.capabilities.references_provider = Some(OneOf::Left(true));
|
||||
|
@ -332,6 +338,7 @@ impl<Checker: BuildRunnable> Server<Checker> {
|
|||
"initialize" => self.init(msg, id),
|
||||
"shutdown" => self.shutdown(id),
|
||||
"textDocument/completion" => self.show_completion(msg),
|
||||
"completionItem/resolve" => self.resolve_completion(msg),
|
||||
"textDocument/definition" => self.show_definition(msg),
|
||||
"textDocument/hover" => self.show_hover(msg),
|
||||
"textDocument/rename" => self.rename(msg),
|
||||
|
@ -352,7 +359,9 @@ impl<Checker: BuildRunnable> Server<Checker> {
|
|||
msg["params"]["textDocument"]["uri"].as_str().unwrap(),
|
||||
)?;
|
||||
Self::send_log(format!("{method}: {uri}"))?;
|
||||
self.check_file(uri, msg["params"]["textDocument"]["text"].as_str().unwrap())
|
||||
let code = msg["params"]["textDocument"]["text"].as_str().unwrap();
|
||||
self.file_cache.update(&uri, code.to_string());
|
||||
self.check_file(uri, code)
|
||||
}
|
||||
"textDocument/didSave" => {
|
||||
let uri = util::parse_and_normalize_url(
|
||||
|
@ -363,7 +372,12 @@ impl<Checker: BuildRunnable> Server<Checker> {
|
|||
self.clear_cache(&uri);
|
||||
self.check_file(uri, &code)
|
||||
}
|
||||
// "textDocument/didChange"
|
||||
"textDocument/didChange" => {
|
||||
let params = DidChangeTextDocumentParams::deserialize(msg["params"].clone())?;
|
||||
// Self::send_log(format!("{method}: {params:?}"))?;
|
||||
self.file_cache.incremental_update(params);
|
||||
Ok(())
|
||||
}
|
||||
_ => Self::send_log(format!("received notification: {method}")),
|
||||
}
|
||||
}
|
||||
|
@ -399,7 +413,6 @@ impl<Checker: BuildRunnable> Server<Checker> {
|
|||
let mut ctxs = vec![];
|
||||
if let Some(visitor) = self.get_visitor(uri) {
|
||||
let ns = visitor.get_namespace(pos);
|
||||
Self::send_log(format!("ns: {ns:?}")).unwrap();
|
||||
for i in 1..ns.len() {
|
||||
let ns = ns[..=ns.len() - i].join("");
|
||||
if let Some(ctx) = self.modules.get(uri).unwrap().scope.get(&ns[..]) {
|
||||
|
@ -419,7 +432,9 @@ impl<Checker: BuildRunnable> Server<Checker> {
|
|||
let Some(module) = self.modules.get(uri) else {
|
||||
return Ok(vec![]);
|
||||
};
|
||||
let maybe_token = util::get_token_relatively(uri.clone(), attr_marker_pos, -1)?;
|
||||
let maybe_token = self
|
||||
.file_cache
|
||||
.get_token_relatively(uri, attr_marker_pos, -1)?;
|
||||
if let Some(token) = maybe_token {
|
||||
if token.is(TokenKind::Symbol) {
|
||||
let var_name = token.inspect();
|
||||
|
|
|
@ -18,6 +18,11 @@ pub fn loc_to_range(loc: erg_common::error::Location) -> Option<Range> {
|
|||
Some(Range::new(start, end))
|
||||
}
|
||||
|
||||
pub fn loc_to_pos(loc: erg_common::error::Location) -> Option<Position> {
|
||||
let start = Position::new(loc.ln_begin()? - 1, loc.col_begin()? + 1);
|
||||
Some(start)
|
||||
}
|
||||
|
||||
pub fn pos_in_loc<L: Locational>(loc: &L, pos: Position) -> bool {
|
||||
let ln_begin = loc.ln_begin().unwrap_or(0);
|
||||
let ln_end = loc.ln_end().unwrap_or(0);
|
||||
|
@ -30,6 +35,26 @@ pub fn pos_in_loc<L: Locational>(loc: &L, pos: Position) -> bool {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn pos_to_index(src: &str, pos: Position) -> usize {
|
||||
let mut index = 0;
|
||||
let mut line = 0;
|
||||
let mut col = 0;
|
||||
for c in src.chars() {
|
||||
if line == pos.line && col == pos.character {
|
||||
return index;
|
||||
}
|
||||
if c == '\n' {
|
||||
line += 1;
|
||||
col = 0;
|
||||
index += 1;
|
||||
} else {
|
||||
col += 1;
|
||||
index += 1;
|
||||
}
|
||||
}
|
||||
index
|
||||
}
|
||||
|
||||
pub fn get_token_stream(uri: Url) -> ELSResult<TokenStream> {
|
||||
let mut code = String::new();
|
||||
let path = uri.to_file_path().unwrap();
|
||||
|
@ -46,52 +71,6 @@ pub fn get_token_from_stream(stream: &TokenStream, pos: Position) -> ELSResult<O
|
|||
Ok(None)
|
||||
}
|
||||
|
||||
pub fn get_token_index(uri: Url, pos: Position) -> ELSResult<Option<usize>> {
|
||||
let mut timeout = 300;
|
||||
let path = uri.to_file_path().unwrap();
|
||||
loop {
|
||||
let mut code = String::new();
|
||||
File::open(path.as_path())?.read_to_string(&mut code)?;
|
||||
if let Ok(tokens) = Lexer::from_str(code).lex() {
|
||||
for (i, tok) in tokens.iter().enumerate() {
|
||||
if pos_in_loc(tok, pos) {
|
||||
return Ok(Some(i));
|
||||
}
|
||||
}
|
||||
}
|
||||
std::thread::sleep(std::time::Duration::from_millis(10));
|
||||
timeout -= 1;
|
||||
if timeout == 0 {
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_token(uri: Url, pos: Position) -> ELSResult<Option<Token>> {
|
||||
let index = get_token_index(uri.clone(), pos)?;
|
||||
if let Some(idx) = index {
|
||||
Ok(get_token_stream(uri)?.get(idx).cloned())
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// plus_minus: 0 => same as get_token
|
||||
pub fn get_token_relatively(
|
||||
uri: Url,
|
||||
pos: Position,
|
||||
plus_minus: isize,
|
||||
) -> ELSResult<Option<Token>> {
|
||||
let index = get_token_index(uri.clone(), pos)?;
|
||||
if let Some(idx) = index {
|
||||
Ok(get_token_stream(uri)?
|
||||
.get((idx as isize + plus_minus) as usize)
|
||||
.cloned())
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_code_from_uri(uri: &Url) -> ELSResult<String> {
|
||||
let path = uri.to_file_path().unwrap();
|
||||
let mut code = String::new();
|
||||
|
|
|
@ -75,6 +75,19 @@ impl<T: ?Sized> Shared<T> {
|
|||
RefCell::as_ptr(&self.0)
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// The caller must ensure that the returned reference is not used after the underlying
|
||||
pub unsafe fn as_ref(&self) -> &T {
|
||||
self.as_ptr().as_ref().unwrap()
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
/// The caller must ensure that the returned reference is not used after the underlying
|
||||
#[allow(clippy::mut_from_ref)]
|
||||
pub unsafe fn as_mut(&self) -> &mut T {
|
||||
self.as_ptr().as_mut().unwrap()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn try_borrow_mut(&self) -> Result<RefMut<'_, T>, std::cell::BorrowMutError> {
|
||||
RefCell::try_borrow_mut(&self.0)
|
||||
|
|
|
@ -57,6 +57,11 @@ pub trait DequeStream<T>: Sized {
|
|||
fn iter(&self) -> vec_deque::Iter<'_, T> {
|
||||
self.ref_payload().iter()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn len(&self) -> usize {
|
||||
self.ref_payload().len()
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue