feat(els): support module renaming

This commit is contained in:
Shunsuke Shibayama 2023-02-14 16:23:45 +09:00
parent 38f34edbf0
commit f3fd5e3eeb
13 changed files with 222 additions and 39 deletions

View file

@ -4,7 +4,7 @@ use serde::Deserialize;
use serde_json::json;
use serde_json::Value;
use erg_common::traits::{Locational, Stream};
use erg_common::traits::Locational;
use erg_compiler::artifact::BuildRunnable;
use erg_compiler::erg_parser::token::{Token, TokenKind};
use erg_compiler::hir::Expr;

View file

@ -1,6 +1,12 @@
use std::fs::{metadata, Metadata};
use lsp_types::{DidChangeTextDocumentParams, Position, Url};
use lsp_types::{
DidChangeTextDocumentParams, FileOperationFilter, FileOperationPattern,
FileOperationPatternKind, FileOperationRegistrationOptions, OneOf, Position, Range,
RenameFilesParams, SaveOptions, ServerCapabilities, TextDocumentSyncCapability,
TextDocumentSyncKind, TextDocumentSyncOptions, Url, WorkspaceFileOperationsServerCapabilities,
WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities,
};
use erg_common::dict::Dict;
use erg_common::shared::Shared;
@ -30,6 +36,47 @@ impl FileCache {
}
}
pub(crate) fn set_capabilities(&mut self, capabilities: &mut ServerCapabilities) {
let workspace_folders = WorkspaceFoldersServerCapabilities {
supported: Some(true),
change_notifications: Some(OneOf::Left(true)),
};
let file_op = FileOperationRegistrationOptions {
filters: vec![
FileOperationFilter {
scheme: Some(String::from("file")),
pattern: FileOperationPattern {
glob: String::from("**/*.er"),
matches: Some(FileOperationPatternKind::File),
options: None,
},
},
FileOperationFilter {
scheme: Some(String::from("file")),
pattern: FileOperationPattern {
glob: String::from("**"),
matches: Some(FileOperationPatternKind::Folder),
options: None,
},
},
],
};
capabilities.workspace = Some(WorkspaceServerCapabilities {
workspace_folders: Some(workspace_folders),
file_operations: Some(WorkspaceFileOperationsServerCapabilities {
will_rename: Some(file_op),
..Default::default()
}),
});
let sync_option = TextDocumentSyncOptions {
open_close: Some(true),
change: Some(TextDocumentSyncKind::INCREMENTAL),
save: Some(SaveOptions::default().into()),
..Default::default()
};
capabilities.text_document_sync = Some(TextDocumentSyncCapability::Options(sync_option));
}
pub fn get(&self, uri: &Url) -> ELSResult<&FileCacheEntry> {
let Some(entry) = unsafe { self.files.as_ref() }.get(uri) else {
let code = util::get_code_from_uri(uri)?;
@ -120,6 +167,21 @@ impl FileCache {
);
}
pub(crate) fn ranged_update(&self, uri: &Url, old: Range, new_code: &str) {
let Some(entry) = unsafe { self.files.as_mut() }.get_mut(uri) else {
return;
};
let metadata = metadata(uri.to_file_path().unwrap()).unwrap();
let mut code = entry.code.clone();
let start = util::pos_to_index(&code, old.start);
let end = util::pos_to_index(&code, old.end);
code.replace_range(start..end, new_code);
let token_stream = Lexer::from_str(code.clone()).lex().ok();
entry.code = code;
entry.metadata = metadata;
entry.token_stream = token_stream;
}
pub(crate) fn incremental_update(&self, params: DidChangeTextDocumentParams) {
let uri = util::normalize_url(params.text_document.uri);
let Some(entry) = unsafe { self.files.as_mut() }.get_mut(&uri) else {
@ -143,4 +205,16 @@ impl FileCache {
pub fn remove(&mut self, uri: &Url) {
self.files.borrow_mut().remove(uri);
}
pub fn rename_files(&mut self, params: &RenameFilesParams) -> ELSResult<()> {
for file in &params.files {
let old_uri = util::normalize_url(Url::parse(&file.old_uri).unwrap());
let new_uri = util::normalize_url(Url::parse(&file.new_uri).unwrap());
let Some(entry) = self.files.borrow_mut().remove(&old_uri) else {
continue;
};
self.files.borrow_mut().insert(new_uri, entry);
}
Ok(())
}
}

View file

@ -1,4 +1,4 @@
use erg_common::traits::{Locational, Stream};
use erg_common::traits::Locational;
use erg_common::Str;
use erg_compiler::erg_parser::token::Token;
use erg_compiler::hir::*;

View file

@ -1,6 +1,8 @@
use std::collections::HashMap;
use std::time::SystemTime;
use erg_common::traits::{Locational, Stream};
use erg_compiler::artifact::IncompleteArtifact;
use serde::Deserialize;
use serde_json::json;
use serde_json::Value;
@ -8,9 +10,10 @@ use serde_json::Value;
use erg_common::dict::Dict;
use erg_compiler::artifact::BuildRunnable;
use erg_compiler::hir::{Expr, Literal};
use erg_compiler::varinfo::{AbsLocation, VarKind};
use lsp_types::{RenameParams, TextEdit, Url, WorkspaceEdit};
use lsp_types::{RenameFilesParams, RenameParams, TextEdit, Url, WorkspaceEdit};
use crate::server::{ELSResult, Server};
use crate::util;
@ -145,3 +148,97 @@ impl<Checker: BuildRunnable> Server<Checker> {
.collect()
}
}
impl<Checker: BuildRunnable> Server<Checker> {
fn collect_module_changes(
&mut self,
old_uri: &Url,
new_uri: &Url,
) -> HashMap<Url, Vec<TextEdit>> {
let new_path = util::uri_to_path(new_uri)
.file_stem()
.unwrap()
.to_string_lossy()
.to_string();
let new_path = format!("\"{new_path}\"");
let mut changes = HashMap::new();
for dep in self.dependents_of(old_uri) {
let imports = self.search_imports(&dep, old_uri);
for import in imports.iter() {
let range = util::loc_to_range(import.loc()).unwrap();
self.file_cache.ranged_update(&dep, range, &new_path);
}
let edits = imports
.iter()
.map(|lit| TextEdit::new(util::loc_to_range(lit.loc()).unwrap(), new_path.clone()));
changes.insert(dep, edits.collect());
}
changes
}
/// TODO: multi-path imports
/// returning exprs: import call
fn search_imports(&self, target: &Url, needle: &Url) -> Vec<&Literal> {
let needle_module_name = util::uri_to_path(needle)
.file_stem()
.unwrap()
.to_string_lossy()
.to_string();
let mut imports = vec![];
if let Some(IncompleteArtifact {
object: Some(hir), ..
}) = self.artifacts.get(target)
{
for chunk in hir.module.iter() {
match chunk {
Expr::Def(def) if def.def_kind().is_import() => {
let Some(Expr::Call(import_call)) = def.body.block.first() else {
continue;
};
let module_name = import_call.args.get_left_or_key("Path").unwrap();
match module_name {
Expr::Lit(lit)
if lit
.token
.content
.trim_start_matches('\"')
.trim_end_matches('\"')
== needle_module_name =>
{
imports.push(lit);
}
_ => {}
}
}
_ => {}
}
}
}
imports
}
pub(crate) fn rename_files(&mut self, msg: &Value) -> ELSResult<()> {
Self::send_log("workspace/willRenameFiles request")?;
let params = RenameFilesParams::deserialize(msg["params"].clone())?;
let mut edits = HashMap::new();
for file in &params.files {
let old_uri = util::normalize_url(Url::parse(&file.old_uri).unwrap());
let new_uri = util::normalize_url(Url::parse(&file.new_uri).unwrap());
edits.extend(self.collect_module_changes(&old_uri, &new_uri));
let Some(entry) = self.artifacts.remove(&old_uri) else {
continue;
};
self.artifacts.insert(new_uri.clone(), entry);
let Some(entry) = self.modules.remove(&old_uri) else {
continue;
};
self.modules.insert(new_uri, entry);
if let Some(shared) = self.get_shared() {
shared.clear_all();
}
}
self.file_cache.rename_files(&params)?;
let edit = WorkspaceEdit::new(edits);
Self::send(&json!({ "jsonrpc": "2.0", "id": msg["id"].as_i64().unwrap(), "result": edit }))
}
}

View file

@ -23,8 +23,7 @@ use lsp_types::{
CompletionOptions, DidChangeTextDocumentParams, ExecuteCommandOptions, HoverProviderCapability,
InitializeResult, OneOf, Position, SemanticTokenType, SemanticTokensFullOptions,
SemanticTokensLegend, SemanticTokensOptions, SemanticTokensServerCapabilities,
ServerCapabilities, TextDocumentSyncCapability, TextDocumentSyncKind, Url,
WorkDoneProgressOptions,
ServerCapabilities, Url, WorkDoneProgressOptions,
};
use crate::file_cache::FileCache;
@ -171,9 +170,7 @@ impl<Checker: BuildRunnable> Server<Checker> {
}
let mut result = InitializeResult::default();
result.capabilities = ServerCapabilities::default();
result.capabilities.text_document_sync = Some(TextDocumentSyncCapability::from(
TextDocumentSyncKind::INCREMENTAL,
));
self.file_cache.set_capabilities(&mut result.capabilities);
let mut comp_options = CompletionOptions::default();
comp_options.trigger_characters =
Some(vec![".".to_string(), ":".to_string(), "(".to_string()]);
@ -354,6 +351,7 @@ impl<Checker: BuildRunnable> Server<Checker> {
"textDocument/semanticTokens/full" => self.get_semantic_tokens_full(msg),
"textDocument/inlayHint" => self.get_inlay_hint(msg),
"textDocument/codeAction" => self.send_code_action(msg),
"workspace/willRenameFiles" => self.rename_files(msg),
other => Self::send_error(Some(id), -32600, format!("{other} is not supported")),
}
}

View file

@ -364,6 +364,15 @@ macro_rules! impl_stream {
self.payload().into_iter()
}
}
impl $Strc {
pub fn iter(&self) -> std::slice::Iter<'_, $Inner> {
self.ref_payload().iter()
}
pub fn iter_mut(&mut self) -> std::slice::IterMut<'_, $Inner> {
self.ref_mut_payload().iter_mut()
}
}
};
($Strc: ident, $Inner: ident) => {
impl $Strc {
@ -427,6 +436,15 @@ macro_rules! impl_stream {
&mut self.0
}
}
impl $Strc {
pub fn iter(&self) -> std::slice::Iter<'_, $Inner> {
self.ref_payload().iter()
}
pub fn iter_mut(&mut self) -> std::slice::IterMut<'_, $Inner> {
self.ref_mut_payload().iter_mut()
}
}
};
}

View file

@ -1116,14 +1116,6 @@ impl RecordAttrs {
self.0.is_empty()
}
pub fn iter(&self) -> impl Iterator<Item = &Def> {
self.0.iter()
}
pub fn iter_mut(&mut self) -> impl Iterator<Item = &mut Def> {
self.0.iter_mut()
}
pub fn push(&mut self, attr: Def) {
self.0.push(attr);
}

View file

@ -6,13 +6,13 @@ class Error:
# T = TypeVar("T")
# @_SpecialForm
def Result(self, parameters):
"""Result type.
Result[T] is equivalent to Union[T, Error].
"""
# arg = _type_check(parameters, f"{self} requires a single type.")
return [arg, Error]
# def Result(self, parameters):
# """Result type.
#
# Result[T] is equivalent to Union[T, Error].
# """
# arg = _type_check(parameters, f"{self} requires a single type.")
# return [arg, Error]
def is_ok(obj) -> bool:
return not isinstance(obj, Error)

View file

@ -1,5 +1,5 @@
from _erg_range import Range, LeftOpenRange, RightOpenRange, OpenRange, ClosedRange, RangeIterator
from _erg_result import Result, Error, is_ok
from _erg_result import Error, is_ok
from _erg_float import Float, FloatMut
from _erg_int import Int, IntMut
from _erg_nat import Nat, NatMut

View file

@ -3,7 +3,7 @@ use std::path::{Path, PathBuf};
use erg_common::config::{ErgConfig, Input};
use erg_common::python_util::BUILTIN_PYTHON_MODS;
use erg_common::traits::{Locational, Stream};
use erg_common::traits::Locational;
use erg_common::Str;
use erg_common::{enum_unwrap, log};

View file

@ -139,6 +139,12 @@ impl ModuleCache {
pub fn get_similar_name(&self, name: &str) -> Option<Str> {
get_similar_name(self.cache.iter().map(|(v, _)| v.to_str().unwrap()), name).map(Str::rc)
}
pub fn rename_path(&mut self, old: &PathBuf, new: PathBuf) {
if let Some(entry) = self.cache.remove(old) {
self.cache.insert(new, entry);
}
}
}
#[derive(Debug, Clone, Default)]
@ -220,4 +226,8 @@ impl SharedModuleCache {
}
self.register(builtin_path, None, Rc::try_unwrap(builtin.module).unwrap());
}
pub fn rename_path(&self, path: &PathBuf, new: PathBuf) {
self.0.borrow_mut().rename_path(path, new);
}
}

View file

@ -113,7 +113,7 @@ impl AbsLocation {
pub fn code(&self) -> Option<String> {
use std::io::{BufRead, BufReader};
self.module.as_ref().and_then(|module| {
let file = std::fs::File::open(module).unwrap();
let file = std::fs::File::open(module).ok()?;
let reader = BufReader::new(file);
reader
.lines()

View file

@ -831,16 +831,6 @@ impl From<Vec<Def>> for RecordAttrs {
}
}
impl RecordAttrs {
pub fn iter(&self) -> impl Iterator<Item = &Def> {
self.0.iter()
}
pub fn iter_mut(&mut self) -> impl Iterator<Item = &mut Def> {
self.0.iter_mut()
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct NormalRecord {
pub l_brace: Token,
@ -3550,13 +3540,17 @@ impl DefKind {
self.is_class() || self.is_trait()
}
pub fn is_erg_import(&self) -> bool {
pub const fn is_erg_import(&self) -> bool {
matches!(self, Self::ErgImport)
}
pub fn is_py_import(&self) -> bool {
pub const fn is_py_import(&self) -> bool {
matches!(self, Self::PyImport)
}
pub const fn is_import(&self) -> bool {
self.is_erg_import() || self.is_py_import()
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]