rename all things

This commit is contained in:
Aleksey Kladov 2018-09-16 12:54:24 +03:00
parent ba0bfeee12
commit b5021411a8
478 changed files with 219 additions and 204 deletions

View file

@ -0,0 +1,49 @@
use languageserver_types::{
ServerCapabilities,
TextDocumentSyncCapability,
TextDocumentSyncOptions,
TextDocumentSyncKind,
ExecuteCommandOptions,
CompletionOptions,
DocumentOnTypeFormattingOptions,
};
pub fn server_capabilities() -> ServerCapabilities {
ServerCapabilities {
text_document_sync: Some(TextDocumentSyncCapability::Options(
TextDocumentSyncOptions {
open_close: Some(true),
change: Some(TextDocumentSyncKind::Full),
will_save: None,
will_save_wait_until: None,
save: None,
}
)),
hover_provider: None,
completion_provider: Some(CompletionOptions {
resolve_provider: None,
trigger_characters: None,
}),
signature_help_provider: None,
definition_provider: Some(true),
type_definition_provider: None,
implementation_provider: None,
references_provider: None,
document_highlight_provider: None,
document_symbol_provider: Some(true),
workspace_symbol_provider: Some(true),
code_action_provider: Some(true),
code_lens_provider: None,
document_formatting_provider: None,
document_range_formatting_provider: None,
document_on_type_formatting_provider: Some(DocumentOnTypeFormattingOptions {
first_trigger_character: "=".to_string(),
more_trigger_character: None,
}),
rename_provider: None,
color_provider: None,
execute_command_provider: Some(ExecuteCommandOptions {
commands: vec!["apply_code_action".to_string()],
}),
}
}

View file

@ -0,0 +1,296 @@
use languageserver_types::{
Range, SymbolKind, Position, TextEdit, Location, Url,
TextDocumentIdentifier, VersionedTextDocumentIdentifier, TextDocumentItem,
TextDocumentPositionParams, TextDocumentEdit,
};
use ra_editor::{LineIndex, LineCol, Edit, AtomEdit};
use ra_syntax::{SyntaxKind, TextUnit, TextRange};
use ra_analysis::{FileId, SourceChange, SourceFileEdit, FileSystemEdit};
use {
Result,
server_world::ServerWorld,
req,
};
pub trait Conv {
type Output;
fn conv(self) -> Self::Output;
}
pub trait ConvWith {
type Ctx;
type Output;
fn conv_with(self, ctx: &Self::Ctx) -> Self::Output;
}
pub trait TryConvWith {
type Ctx;
type Output;
fn try_conv_with(self, ctx: &Self::Ctx) -> Result<Self::Output>;
}
impl Conv for SyntaxKind {
type Output = SymbolKind;
fn conv(self) -> <Self as Conv>::Output {
match self {
SyntaxKind::FN_DEF => SymbolKind::Function,
SyntaxKind::STRUCT_DEF => SymbolKind::Struct,
SyntaxKind::ENUM_DEF => SymbolKind::Enum,
SyntaxKind::TRAIT_DEF => SymbolKind::Interface,
SyntaxKind::MODULE => SymbolKind::Module,
SyntaxKind::TYPE_DEF => SymbolKind::TypeParameter,
SyntaxKind::STATIC_DEF => SymbolKind::Constant,
SyntaxKind::CONST_DEF => SymbolKind::Constant,
SyntaxKind::IMPL_ITEM => SymbolKind::Object,
_ => SymbolKind::Variable,
}
}
}
impl ConvWith for Position {
type Ctx = LineIndex;
type Output = TextUnit;
fn conv_with(self, line_index: &LineIndex) -> TextUnit {
// TODO: UTF-16
let line_col = LineCol {
line: self.line as u32,
col: (self.character as u32).into(),
};
line_index.offset(line_col)
}
}
impl ConvWith for TextUnit {
type Ctx = LineIndex;
type Output = Position;
fn conv_with(self, line_index: &LineIndex) -> Position {
let line_col = line_index.line_col(self);
// TODO: UTF-16
Position::new(line_col.line as u64, u32::from(line_col.col) as u64)
}
}
impl ConvWith for TextRange {
type Ctx = LineIndex;
type Output = Range;
fn conv_with(self, line_index: &LineIndex) -> Range {
Range::new(
self.start().conv_with(line_index),
self.end().conv_with(line_index),
)
}
}
impl ConvWith for Range {
type Ctx = LineIndex;
type Output = TextRange;
fn conv_with(self, line_index: &LineIndex) -> TextRange {
TextRange::from_to(
self.start.conv_with(line_index),
self.end.conv_with(line_index),
)
}
}
impl ConvWith for Edit {
type Ctx = LineIndex;
type Output = Vec<TextEdit>;
fn conv_with(self, line_index: &LineIndex) -> Vec<TextEdit> {
self.into_atoms()
.into_iter()
.map_conv_with(line_index)
.collect()
}
}
impl ConvWith for AtomEdit {
type Ctx = LineIndex;
type Output = TextEdit;
fn conv_with(self, line_index: &LineIndex) -> TextEdit {
TextEdit {
range: self.delete.conv_with(line_index),
new_text: self.insert,
}
}
}
impl<T: ConvWith> ConvWith for Option<T> {
type Ctx = <T as ConvWith>::Ctx;
type Output = Option<<T as ConvWith>::Output>;
fn conv_with(self, ctx: &Self::Ctx) -> Self::Output {
self.map(|x| ConvWith::conv_with(x, ctx))
}
}
impl<'a> TryConvWith for &'a Url {
type Ctx = ServerWorld;
type Output = FileId;
fn try_conv_with(self, world: &ServerWorld) -> Result<FileId> {
world.uri_to_file_id(self)
}
}
impl TryConvWith for FileId {
type Ctx = ServerWorld;
type Output = Url;
fn try_conv_with(self, world: &ServerWorld) -> Result<Url> {
world.file_id_to_uri(self)
}
}
impl<'a> TryConvWith for &'a TextDocumentItem {
type Ctx = ServerWorld;
type Output = FileId;
fn try_conv_with(self, world: &ServerWorld) -> Result<FileId> {
self.uri.try_conv_with(world)
}
}
impl<'a> TryConvWith for &'a VersionedTextDocumentIdentifier {
type Ctx = ServerWorld;
type Output = FileId;
fn try_conv_with(self, world: &ServerWorld) -> Result<FileId> {
self.uri.try_conv_with(world)
}
}
impl<'a> TryConvWith for &'a TextDocumentIdentifier {
type Ctx = ServerWorld;
type Output = FileId;
fn try_conv_with(self, world: &ServerWorld) -> Result<FileId> {
world.uri_to_file_id(&self.uri)
}
}
impl<T: TryConvWith> TryConvWith for Vec<T> {
type Ctx = <T as TryConvWith>::Ctx;
type Output = Vec<<T as TryConvWith>::Output>;
fn try_conv_with(self, ctx: &Self::Ctx) -> Result<Self::Output> {
let mut res = Vec::with_capacity(self.len());
for item in self {
res.push(item.try_conv_with(ctx)?);
}
Ok(res)
}
}
impl TryConvWith for SourceChange {
type Ctx = ServerWorld;
type Output = req::SourceChange;
fn try_conv_with(self, world: &ServerWorld) -> Result<req::SourceChange> {
let cursor_position = match self.cursor_position {
None => None,
Some(pos) => {
let line_index = world.analysis().file_line_index(pos.file_id);
Some(TextDocumentPositionParams {
text_document: TextDocumentIdentifier::new(pos.file_id.try_conv_with(world)?),
position: pos.offset.conv_with(&line_index),
})
}
};
let source_file_edits = self.source_file_edits.try_conv_with(world)?;
let file_system_edits = self.file_system_edits.try_conv_with(world)?;
Ok(req::SourceChange {
label: self.label,
source_file_edits,
file_system_edits,
cursor_position,
})
}
}
impl TryConvWith for SourceFileEdit {
type Ctx = ServerWorld;
type Output = TextDocumentEdit;
fn try_conv_with(self, world: &ServerWorld) -> Result<TextDocumentEdit> {
let text_document = VersionedTextDocumentIdentifier {
uri: self.file_id.try_conv_with(world)?,
version: None,
};
let line_index = world.analysis().file_line_index(self.file_id);
let edits = self.edits
.into_iter()
.map_conv_with(&line_index)
.collect();
Ok(TextDocumentEdit { text_document, edits })
}
}
impl TryConvWith for FileSystemEdit {
type Ctx = ServerWorld;
type Output = req::FileSystemEdit;
fn try_conv_with(self, world: &ServerWorld) -> Result<req::FileSystemEdit> {
let res = match self {
FileSystemEdit::CreateFile { anchor, path } => {
let uri = world.file_id_to_uri(anchor)?;
let path = &path.as_str()[3..]; // strip `../` b/c url is weird
let uri = uri.join(path)?;
req::FileSystemEdit::CreateFile { uri }
},
FileSystemEdit::MoveFile { file, path } => {
let src = world.file_id_to_uri(file)?;
let path = &path.as_str()[3..]; // strip `../` b/c url is weird
let dst = src.join(path)?;
req::FileSystemEdit::MoveFile { src, dst }
},
};
Ok(res)
}
}
pub fn to_location(
file_id: FileId,
range: TextRange,
world: &ServerWorld,
line_index: &LineIndex,
) -> Result<Location> {
let url = file_id.try_conv_with(world)?;
let loc = Location::new(
url,
range.conv_with(line_index),
);
Ok(loc)
}
pub trait MapConvWith<'a>: Sized {
type Ctx;
type Output;
fn map_conv_with(self, ctx: &'a Self::Ctx) -> ConvWithIter<'a, Self, Self::Ctx> {
ConvWithIter { iter: self, ctx }
}
}
impl<'a, I> MapConvWith<'a> for I
where I: Iterator,
I::Item: ConvWith
{
type Ctx = <I::Item as ConvWith>::Ctx;
type Output = <I::Item as ConvWith>::Output;
}
pub struct ConvWithIter<'a, I, Ctx: 'a> {
iter: I,
ctx: &'a Ctx,
}
impl<'a, I, Ctx> Iterator for ConvWithIter<'a, I, Ctx>
where
I: Iterator,
I::Item: ConvWith<Ctx=Ctx>,
{
type Item = <I::Item as ConvWith>::Output;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next().map(|item| item.conv_with(self.ctx))
}
}

View file

@ -0,0 +1,37 @@
#[macro_use]
extern crate failure;
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate serde_json;
extern crate languageserver_types;
#[macro_use]
extern crate crossbeam_channel;
extern crate rayon;
#[macro_use]
extern crate log;
extern crate drop_bomb;
extern crate url_serde;
extern crate walkdir;
extern crate im;
extern crate relative_path;
extern crate cargo_metadata;
extern crate gen_lsp_server;
extern crate ra_editor;
extern crate ra_analysis;
extern crate ra_syntax;
mod caps;
pub mod req;
mod conv;
mod main_loop;
mod vfs;
mod path_map;
mod server_world;
mod project_model;
pub mod thread_watcher;
pub type Result<T> = ::std::result::Result<T, ::failure::Error>;
pub use caps::server_capabilities;
pub use main_loop::main_loop;

View file

@ -0,0 +1,52 @@
#[macro_use]
extern crate log;
#[macro_use]
extern crate failure;
extern crate flexi_logger;
extern crate gen_lsp_server;
extern crate ra_lsp_server;
use flexi_logger::{Logger, Duplicate};
use gen_lsp_server::{run_server, stdio_transport};
use ra_lsp_server::Result;
fn main() -> Result<()> {
::std::env::set_var("RUST_BACKTRACE", "short");
Logger::with_env_or_str("error")
.duplicate_to_stderr(Duplicate::All)
.log_to_file()
.directory("log")
.start()?;
info!("lifecycle: server started");
match ::std::panic::catch_unwind(|| main_inner()) {
Ok(res) => {
info!("lifecycle: terminating process with {:?}", res);
res
}
Err(_) => {
error!("server panicked");
bail!("server panicked")
}
}
}
fn main_inner() -> Result<()> {
let (receiver, sender, threads) = stdio_transport();
let cwd = ::std::env::current_dir()?;
run_server(
ra_lsp_server::server_capabilities(),
|params, r, s| {
let root = params.root_uri
.and_then(|it| it.to_file_path().ok())
.unwrap_or(cwd);
ra_lsp_server::main_loop(false, root, r, s)
},
receiver,
sender,
)?;
info!("shutting down IO...");
threads.join()?;
info!("... IO is down");
Ok(())
}

View file

@ -0,0 +1,436 @@
use std::collections::HashMap;
use languageserver_types::{
Diagnostic, DiagnosticSeverity, DocumentSymbol,
Command, TextDocumentIdentifier,
SymbolInformation, Position, Location, TextEdit,
CompletionItem, InsertTextFormat, CompletionItemKind,
};
use serde_json::to_value;
use ra_analysis::{Query, FileId, RunnableKind, JobToken};
use ra_syntax::{
text_utils::contains_offset_nonstrict,
};
use ::{
req::{self, Decoration}, Result,
conv::{Conv, ConvWith, TryConvWith, MapConvWith, to_location},
server_world::ServerWorld,
project_model::TargetKind,
};
pub fn handle_syntax_tree(
world: ServerWorld,
params: req::SyntaxTreeParams,
_token: JobToken,
) -> Result<String> {
let id = params.text_document.try_conv_with(&world)?;
let res = world.analysis().syntax_tree(id);
Ok(res)
}
pub fn handle_extend_selection(
world: ServerWorld,
params: req::ExtendSelectionParams,
_token: JobToken,
) -> Result<req::ExtendSelectionResult> {
let file_id = params.text_document.try_conv_with(&world)?;
let file = world.analysis().file_syntax(file_id);
let line_index = world.analysis().file_line_index(file_id);
let selections = params.selections.into_iter()
.map_conv_with(&line_index)
.map(|r| world.analysis().extend_selection(&file, r))
.map_conv_with(&line_index)
.collect();
Ok(req::ExtendSelectionResult { selections })
}
pub fn handle_find_matching_brace(
world: ServerWorld,
params: req::FindMatchingBraceParams,
_token: JobToken,
) -> Result<Vec<Position>> {
let file_id = params.text_document.try_conv_with(&world)?;
let file = world.analysis().file_syntax(file_id);
let line_index = world.analysis().file_line_index(file_id);
let res = params.offsets
.into_iter()
.map_conv_with(&line_index)
.map(|offset| {
world.analysis().matching_brace(&file, offset).unwrap_or(offset)
})
.map_conv_with(&line_index)
.collect();
Ok(res)
}
pub fn handle_join_lines(
world: ServerWorld,
params: req::JoinLinesParams,
_token: JobToken,
) -> Result<req::SourceChange> {
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id);
let range = params.range.conv_with(&line_index);
world.analysis().join_lines(file_id, range)
.try_conv_with(&world)
}
pub fn handle_on_type_formatting(
world: ServerWorld,
params: req::DocumentOnTypeFormattingParams,
_token: JobToken,
) -> Result<Option<Vec<TextEdit>>> {
if params.ch != "=" {
return Ok(None);
}
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id);
let offset = params.position.conv_with(&line_index);
let edits = match world.analysis().on_eq_typed(file_id, offset) {
None => return Ok(None),
Some(mut action) => action.source_file_edits.pop().unwrap().edits,
};
let edits = edits.into_iter().map_conv_with(&line_index).collect();
Ok(Some(edits))
}
pub fn handle_document_symbol(
world: ServerWorld,
params: req::DocumentSymbolParams,
_token: JobToken,
) -> Result<Option<req::DocumentSymbolResponse>> {
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id);
let mut parents: Vec<(DocumentSymbol, Option<usize>)> = Vec::new();
for symbol in world.analysis().file_structure(file_id) {
let doc_symbol = DocumentSymbol {
name: symbol.label,
detail: Some("".to_string()),
kind: symbol.kind.conv(),
deprecated: None,
range: symbol.node_range.conv_with(&line_index),
selection_range: symbol.navigation_range.conv_with(&line_index),
children: None,
};
parents.push((doc_symbol, symbol.parent));
}
let mut res = Vec::new();
while let Some((node, parent)) = parents.pop() {
match parent {
None => res.push(node),
Some(i) => {
let children = &mut parents[i].0.children;
if children.is_none() {
*children = Some(Vec::new());
}
children.as_mut().unwrap().push(node);
}
}
}
Ok(Some(req::DocumentSymbolResponse::Nested(res)))
}
pub fn handle_workspace_symbol(
world: ServerWorld,
params: req::WorkspaceSymbolParams,
token: JobToken,
) -> Result<Option<Vec<SymbolInformation>>> {
let all_symbols = params.query.contains("#");
let libs = params.query.contains("*");
let query = {
let query: String = params.query.chars()
.filter(|&c| c != '#' && c != '*')
.collect();
let mut q = Query::new(query);
if !all_symbols {
q.only_types();
}
if libs {
q.libs();
}
q.limit(128);
q
};
let mut res = exec_query(&world, query, &token)?;
if res.is_empty() && !all_symbols {
let mut query = Query::new(params.query);
query.limit(128);
res = exec_query(&world, query, &token)?;
}
return Ok(Some(res));
fn exec_query(world: &ServerWorld, query: Query, token: &JobToken) -> Result<Vec<SymbolInformation>> {
let mut res = Vec::new();
for (file_id, symbol) in world.analysis().symbol_search(query, token) {
let line_index = world.analysis().file_line_index(file_id);
let info = SymbolInformation {
name: symbol.name.to_string(),
kind: symbol.kind.conv(),
location: to_location(
file_id, symbol.node_range,
world, &line_index
)?,
container_name: None,
};
res.push(info);
};
Ok(res)
}
}
pub fn handle_goto_definition(
world: ServerWorld,
params: req::TextDocumentPositionParams,
token: JobToken,
) -> Result<Option<req::GotoDefinitionResponse>> {
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id);
let offset = params.position.conv_with(&line_index);
let mut res = Vec::new();
for (file_id, symbol) in world.analysis().approximately_resolve_symbol(file_id, offset, &token) {
let line_index = world.analysis().file_line_index(file_id);
let location = to_location(
file_id, symbol.node_range,
&world, &line_index,
)?;
res.push(location)
}
Ok(Some(req::GotoDefinitionResponse::Array(res)))
}
pub fn handle_parent_module(
world: ServerWorld,
params: TextDocumentIdentifier,
_token: JobToken,
) -> Result<Vec<Location>> {
let file_id = params.try_conv_with(&world)?;
let mut res = Vec::new();
for (file_id, symbol) in world.analysis().parent_module(file_id) {
let line_index = world.analysis().file_line_index(file_id);
let location = to_location(
file_id, symbol.node_range,
&world, &line_index
)?;
res.push(location);
}
Ok(res)
}
pub fn handle_runnables(
world: ServerWorld,
params: req::RunnablesParams,
_token: JobToken,
) -> Result<Vec<req::Runnable>> {
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id);
let offset = params.position.map(|it| it.conv_with(&line_index));
let mut res = Vec::new();
for runnable in world.analysis().runnables(file_id) {
if let Some(offset) = offset {
if !contains_offset_nonstrict(runnable.range, offset) {
continue;
}
}
let args = runnable_args(&world, file_id, &runnable.kind);
let r = req::Runnable {
range: runnable.range.conv_with(&line_index),
label: match &runnable.kind {
RunnableKind::Test { name } =>
format!("test {}", name),
RunnableKind::Bin =>
"run binary".to_string(),
},
bin: "cargo".to_string(),
args,
env: {
let mut m = HashMap::new();
m.insert(
"RUST_BACKTRACE".to_string(),
"short".to_string(),
);
m
}
};
res.push(r);
}
return Ok(res);
fn runnable_args(world: &ServerWorld, file_id: FileId, kind: &RunnableKind) -> Vec<String> {
let spec = if let Some(&crate_id) = world.analysis().crate_for(file_id).first() {
let file_id = world.analysis().crate_root(crate_id);
let path = world.path_map.get_path(file_id);
world.workspaces.iter()
.filter_map(|ws| {
let tgt = ws.target_by_root(path)?;
Some((tgt.package(ws).name(ws).clone(), tgt.name(ws).clone(), tgt.kind(ws)))
})
.next()
} else {
None
};
let mut res = Vec::new();
match kind {
RunnableKind::Test { name } => {
res.push("test".to_string());
if let Some((pkg_name, tgt_name, tgt_kind)) = spec {
spec_args(pkg_name, tgt_name, tgt_kind, &mut res);
}
res.push("--".to_string());
res.push(name.to_string());
res.push("--nocapture".to_string());
}
RunnableKind::Bin => {
res.push("run".to_string());
if let Some((pkg_name, tgt_name, tgt_kind)) = spec {
spec_args(pkg_name, tgt_name, tgt_kind, &mut res);
}
}
}
res
}
fn spec_args(pkg_name: &str, tgt_name: &str, tgt_kind: TargetKind, buf: &mut Vec<String>) {
buf.push("--package".to_string());
buf.push(pkg_name.to_string());
match tgt_kind {
TargetKind::Bin => {
buf.push("--bin".to_string());
buf.push(tgt_name.to_string());
}
TargetKind::Test => {
buf.push("--test".to_string());
buf.push(tgt_name.to_string());
}
TargetKind::Bench => {
buf.push("--bench".to_string());
buf.push(tgt_name.to_string());
}
TargetKind::Example => {
buf.push("--example".to_string());
buf.push(tgt_name.to_string());
}
TargetKind::Lib => {
buf.push("--lib".to_string());
}
TargetKind::Other => (),
}
}
}
pub fn handle_decorations(
world: ServerWorld,
params: TextDocumentIdentifier,
_token: JobToken,
) -> Result<Vec<Decoration>> {
let file_id = params.try_conv_with(&world)?;
Ok(highlight(&world, file_id))
}
pub fn handle_completion(
world: ServerWorld,
params: req::CompletionParams,
_token: JobToken,
) -> Result<Option<req::CompletionResponse>> {
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id);
let offset = params.position.conv_with(&line_index);
let items = match world.analysis().completions(file_id, offset) {
None => return Ok(None),
Some(items) => items,
};
let items = items.into_iter()
.map(|item| {
let mut res = CompletionItem {
label: item.label,
filter_text: item.lookup,
.. Default::default()
};
if let Some(snip) = item.snippet {
res.insert_text = Some(snip);
res.insert_text_format = Some(InsertTextFormat::Snippet);
res.kind = Some(CompletionItemKind::Keyword);
};
res
})
.collect();
Ok(Some(req::CompletionResponse::Array(items)))
}
pub fn handle_code_action(
world: ServerWorld,
params: req::CodeActionParams,
_token: JobToken,
) -> Result<Option<Vec<Command>>> {
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id);
let range = params.range.conv_with(&line_index);
let assists = world.analysis().assists(file_id, range).into_iter();
let fixes = world.analysis().diagnostics(file_id).into_iter()
.filter_map(|d| Some((d.range, d.fix?)))
.filter(|(range, _fix)| contains_offset_nonstrict(*range, range.start()))
.map(|(_range, fix)| fix);
let mut res = Vec::new();
for source_edit in assists.chain(fixes) {
let title = source_edit.label.clone();
let edit = source_edit.try_conv_with(&world)?;
let cmd = Command {
title,
command: "libsyntax-rust.applySourceChange".to_string(),
arguments: Some(vec![to_value(edit).unwrap()]),
};
res.push(cmd);
}
Ok(Some(res))
}
pub fn publish_diagnostics(
world: ServerWorld,
file_id: FileId,
) -> Result<req::PublishDiagnosticsParams> {
let uri = world.file_id_to_uri(file_id)?;
let line_index = world.analysis().file_line_index(file_id);
let diagnostics = world.analysis().diagnostics(file_id)
.into_iter()
.map(|d| Diagnostic {
range: d.range.conv_with(&line_index),
severity: Some(DiagnosticSeverity::Error),
code: None,
source: Some("libsyntax2".to_string()),
message: d.message,
related_information: None,
}).collect();
Ok(req::PublishDiagnosticsParams { uri, diagnostics })
}
pub fn publish_decorations(
world: ServerWorld,
file_id: FileId,
) -> Result<req::PublishDecorationsParams> {
let uri = world.file_id_to_uri(file_id)?;
Ok(req::PublishDecorationsParams {
uri,
decorations: highlight(&world, file_id),
})
}
fn highlight(world: &ServerWorld, file_id: FileId) -> Vec<Decoration> {
let line_index = world.analysis().file_line_index(file_id);
world.analysis().highlight(file_id)
.into_iter()
.map(|h| Decoration {
range: h.range.conv_with(&line_index),
tag: h.tag,
}).collect()
}

View file

@ -0,0 +1,419 @@
mod handlers;
mod subscriptions;
use std::{
path::PathBuf,
collections::{HashMap},
};
use serde::{Serialize, de::DeserializeOwned};
use crossbeam_channel::{unbounded, Sender, Receiver};
use rayon::{self, ThreadPool};
use languageserver_types::{NumberOrString};
use ra_analysis::{FileId, JobHandle, JobToken, LibraryData};
use gen_lsp_server::{
RawRequest, RawNotification, RawMessage, RawResponse, ErrorCode,
handle_shutdown,
};
use {
req,
Result,
vfs::{self, FileEvent},
server_world::{ServerWorldState, ServerWorld},
main_loop::subscriptions::{Subscriptions},
project_model::{CargoWorkspace, workspace_loader},
thread_watcher::Worker,
};
#[derive(Debug)]
enum Task {
Respond(RawResponse),
Notify(RawNotification),
}
pub fn main_loop(
internal_mode: bool,
root: PathBuf,
msg_receriver: &mut Receiver<RawMessage>,
msg_sender: &mut Sender<RawMessage>,
) -> Result<()> {
let pool = rayon::ThreadPoolBuilder::new()
.num_threads(4)
.panic_handler(|_| error!("thread panicked :("))
.build()
.unwrap();
let (task_sender, task_receiver) = unbounded::<Task>();
let (fs_worker, fs_watcher) = vfs::roots_loader();
let (ws_worker, ws_watcher) = workspace_loader();
info!("server initialized, serving requests");
let mut state = ServerWorldState::new();
let mut pending_requests = HashMap::new();
let mut subs = Subscriptions::new();
let main_res = main_loop_inner(
internal_mode,
root,
&pool,
msg_sender,
msg_receriver,
task_sender,
task_receiver.clone(),
fs_worker,
ws_worker,
&mut state,
&mut pending_requests,
&mut subs,
);
info!("waiting for tasks to finish...");
task_receiver.for_each(|task| on_task(task, msg_sender, &mut pending_requests));
info!("...tasks have finished");
info!("joining threadpool...");
drop(pool);
info!("...threadpool has finished");
let fs_res = fs_watcher.stop();
let ws_res = ws_watcher.stop();
main_res?;
fs_res?;
ws_res?;
Ok(())
}
fn main_loop_inner(
internal_mode: bool,
ws_root: PathBuf,
pool: &ThreadPool,
msg_sender: &mut Sender<RawMessage>,
msg_receiver: &mut Receiver<RawMessage>,
task_sender: Sender<Task>,
task_receiver: Receiver<Task>,
fs_worker: Worker<PathBuf, (PathBuf, Vec<FileEvent>)>,
ws_worker: Worker<PathBuf, Result<CargoWorkspace>>,
state: &mut ServerWorldState,
pending_requests: &mut HashMap<u64, JobHandle>,
subs: &mut Subscriptions,
) -> Result<()> {
let (libdata_sender, libdata_receiver) = unbounded();
ws_worker.send(ws_root.clone());
fs_worker.send(ws_root.clone());
loop {
#[derive(Debug)]
enum Event {
Msg(RawMessage),
Task(Task),
Fs(PathBuf, Vec<FileEvent>),
Ws(Result<CargoWorkspace>),
Lib(LibraryData),
}
trace!("selecting");
let event = select! {
recv(msg_receiver, msg) => match msg {
Some(msg) => Event::Msg(msg),
None => bail!("client exited without shutdown"),
},
recv(task_receiver, task) => Event::Task(task.unwrap()),
recv(fs_worker.out, events) => match events {
None => bail!("roots watcher died"),
Some((pb, events)) => Event::Fs(pb, events),
}
recv(ws_worker.out, ws) => match ws {
None => bail!("workspace watcher died"),
Some(ws) => Event::Ws(ws),
}
recv(libdata_receiver, data) => Event::Lib(data.unwrap())
};
let mut state_changed = false;
match event {
Event::Task(task) => on_task(task, msg_sender, pending_requests),
Event::Fs(root, events) => {
info!("fs change, {}, {} events", root.display(), events.len());
if root == ws_root {
state.apply_fs_changes(events);
} else {
let (files, resolver) = state.events_to_files(events);
let sender = libdata_sender.clone();
pool.spawn(move || {
let start = ::std::time::Instant::now();
info!("indexing {} ... ", root.display());
let data = LibraryData::prepare(files, resolver);
info!("indexed {:?} {}", start.elapsed(), root.display());
sender.send(data);
});
}
state_changed = true;
}
Event::Ws(ws) => {
match ws {
Ok(ws) => {
let workspaces = vec![ws];
feedback(internal_mode, "workspace loaded", msg_sender);
for ws in workspaces.iter() {
for pkg in ws.packages().filter(|pkg| !pkg.is_member(ws)) {
debug!("sending root, {}", pkg.root(ws).to_path_buf().display());
fs_worker.send(pkg.root(ws).to_path_buf());
}
}
state.set_workspaces(workspaces);
state_changed = true;
}
Err(e) => warn!("loading workspace failed: {}", e),
}
}
Event::Lib(lib) => {
feedback(internal_mode, "library loaded", msg_sender);
state.add_lib(lib);
}
Event::Msg(msg) => {
match msg {
RawMessage::Request(req) => {
let req = match handle_shutdown(req, msg_sender) {
Some(req) => req,
None => return Ok(()),
};
match on_request(state, pending_requests, pool, &task_sender, req)? {
None => (),
Some(req) => {
error!("unknown request: {:?}", req);
let resp = RawResponse::err(
req.id,
ErrorCode::MethodNotFound as i32,
"unknown request".to_string(),
);
msg_sender.send(RawMessage::Response(resp))
}
}
}
RawMessage::Notification(not) => {
on_notification(msg_sender, state, pending_requests, subs, not)?;
state_changed = true;
}
RawMessage::Response(resp) => {
error!("unexpected response: {:?}", resp)
}
}
}
};
if state_changed {
update_file_notifications_on_threadpool(
pool,
state.snapshot(),
task_sender.clone(),
subs.subscriptions(),
)
}
}
}
fn on_task(
task: Task,
msg_sender: &mut Sender<RawMessage>,
pending_requests: &mut HashMap<u64, JobHandle>,
) {
match task {
Task::Respond(response) => {
if let Some(handle) = pending_requests.remove(&response.id) {
assert!(handle.has_completed());
}
msg_sender.send(RawMessage::Response(response))
}
Task::Notify(n) =>
msg_sender.send(RawMessage::Notification(n)),
}
}
fn on_request(
world: &mut ServerWorldState,
pending_requests: &mut HashMap<u64, JobHandle>,
pool: &ThreadPool,
sender: &Sender<Task>,
req: RawRequest,
) -> Result<Option<RawRequest>> {
let mut pool_dispatcher = PoolDispatcher {
req: Some(req),
res: None,
pool, world, sender
};
let req = pool_dispatcher
.on::<req::SyntaxTree>(handlers::handle_syntax_tree)?
.on::<req::ExtendSelection>(handlers::handle_extend_selection)?
.on::<req::FindMatchingBrace>(handlers::handle_find_matching_brace)?
.on::<req::JoinLines>(handlers::handle_join_lines)?
.on::<req::OnTypeFormatting>(handlers::handle_on_type_formatting)?
.on::<req::DocumentSymbolRequest>(handlers::handle_document_symbol)?
.on::<req::WorkspaceSymbol>(handlers::handle_workspace_symbol)?
.on::<req::GotoDefinition>(handlers::handle_goto_definition)?
.on::<req::ParentModule>(handlers::handle_parent_module)?
.on::<req::Runnables>(handlers::handle_runnables)?
.on::<req::DecorationsRequest>(handlers::handle_decorations)?
.on::<req::Completion>(handlers::handle_completion)?
.on::<req::CodeActionRequest>(handlers::handle_code_action)?
.finish();
match req {
Ok((id, handle)) => {
let inserted = pending_requests.insert(id, handle).is_none();
assert!(inserted, "duplicate request: {}", id);
Ok(None)
},
Err(req) => Ok(Some(req)),
}
}
fn on_notification(
msg_sender: &mut Sender<RawMessage>,
state: &mut ServerWorldState,
pending_requests: &mut HashMap<u64, JobHandle>,
subs: &mut Subscriptions,
not: RawNotification,
) -> Result<()> {
let not = match not.cast::<req::Cancel>() {
Ok(params) => {
let id = match params.id {
NumberOrString::Number(id) => id,
NumberOrString::String(id) => {
panic!("string id's not supported: {:?}", id);
}
};
if let Some(handle) = pending_requests.remove(&id) {
handle.cancel();
}
return Ok(())
}
Err(not) => not,
};
let not = match not.cast::<req::DidOpenTextDocument>() {
Ok(params) => {
let uri = params.text_document.uri;
let path = uri.to_file_path()
.map_err(|()| format_err!("invalid uri: {}", uri))?;
let file_id = state.add_mem_file(path, params.text_document.text);
subs.add_sub(file_id);
return Ok(())
}
Err(not) => not,
};
let not = match not.cast::<req::DidChangeTextDocument>() {
Ok(mut params) => {
let uri = params.text_document.uri;
let path = uri.to_file_path()
.map_err(|()| format_err!("invalid uri: {}", uri))?;
let text = params.content_changes.pop()
.ok_or_else(|| format_err!("empty changes"))?
.text;
state.change_mem_file(path.as_path(), text)?;
return Ok(())
}
Err(not) => not,
};
let not = match not.cast::<req::DidCloseTextDocument>() {
Ok(params) => {
let uri = params.text_document.uri;
let path = uri.to_file_path()
.map_err(|()| format_err!("invalid uri: {}", uri))?;
let file_id = state.remove_mem_file(path.as_path())?;
subs.remove_sub(file_id);
let params = req::PublishDiagnosticsParams { uri, diagnostics: Vec::new() };
let not = RawNotification::new::<req::PublishDiagnostics>(&params);
msg_sender.send(RawMessage::Notification(not));
return Ok(())
}
Err(not) => not,
};
error!("unhandled notification: {:?}", not);
Ok(())
}
struct PoolDispatcher<'a> {
req: Option<RawRequest>,
res: Option<(u64, JobHandle)>,
pool: &'a ThreadPool,
world: &'a ServerWorldState,
sender: &'a Sender<Task>,
}
impl<'a> PoolDispatcher<'a> {
fn on<'b, R>(
&'b mut self,
f: fn(ServerWorld, R::Params, JobToken) -> Result<R::Result>
) -> Result<&'b mut Self>
where R: req::Request,
R::Params: DeserializeOwned + Send + 'static,
R::Result: Serialize + 'static,
{
let req = match self.req.take() {
None => return Ok(self),
Some(req) => req,
};
match req.cast::<R>() {
Ok((id, params)) => {
let (handle, token) = JobHandle::new();
let world = self.world.snapshot();
let sender = self.sender.clone();
self.pool.spawn(move || {
let resp = match f(world, params, token) {
Ok(resp) => RawResponse::ok::<R>(id, &resp),
Err(e) => RawResponse::err(id, ErrorCode::InternalError as i32, e.to_string()),
};
let task = Task::Respond(resp);
sender.send(task);
});
self.res = Some((id, handle));
}
Err(req) => {
self.req = Some(req)
}
}
Ok(self)
}
fn finish(&mut self) -> ::std::result::Result<(u64, JobHandle), RawRequest> {
match (self.res.take(), self.req.take()) {
(Some(res), None) => Ok(res),
(None, Some(req)) => Err(req),
_ => unreachable!(),
}
}
}
fn update_file_notifications_on_threadpool(
pool: &ThreadPool,
world: ServerWorld,
sender: Sender<Task>,
subscriptions: Vec<FileId>,
) {
pool.spawn(move || {
for file_id in subscriptions {
match handlers::publish_diagnostics(world.clone(), file_id) {
Err(e) => {
error!("failed to compute diagnostics: {:?}", e)
}
Ok(params) => {
let not = RawNotification::new::<req::PublishDiagnostics>(&params);
sender.send(Task::Notify(not));
}
}
match handlers::publish_decorations(world.clone(), file_id) {
Err(e) => {
error!("failed to compute decorations: {:?}", e)
}
Ok(params) => {
let not = RawNotification::new::<req::PublishDecorations>(&params);
sender.send(Task::Notify(not))
}
}
}
});
}
fn feedback(intrnal_mode: bool, msg: &str, sender: &Sender<RawMessage>) {
if !intrnal_mode {
return;
}
let not = RawNotification::new::<req::InternalFeedback>(&msg.to_string());
sender.send(RawMessage::Notification(not));
}

View file

@ -0,0 +1,21 @@
use std::collections::HashSet;
use ra_analysis::FileId;
pub struct Subscriptions {
subs: HashSet<FileId>,
}
impl Subscriptions {
pub fn new() -> Subscriptions {
Subscriptions { subs: HashSet::new() }
}
pub fn add_sub(&mut self, file_id: FileId) {
self.subs.insert(file_id);
}
pub fn remove_sub(&mut self, file_id: FileId) {
self.subs.remove(&file_id);
}
pub fn subscriptions(&self) -> Vec<FileId> {
self.subs.iter().cloned().collect()
}
}

View file

@ -0,0 +1,110 @@
use std::path::{PathBuf, Path, Component};
use im;
use relative_path::RelativePath;
use ra_analysis::{FileId, FileResolver};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Root {
Workspace, Lib
}
#[derive(Debug, Default, Clone)]
pub struct PathMap {
next_id: u32,
path2id: im::HashMap<PathBuf, FileId>,
id2path: im::HashMap<FileId, PathBuf>,
id2root: im::HashMap<FileId, Root>,
}
impl PathMap {
pub fn new() -> PathMap {
Default::default()
}
pub fn get_or_insert(&mut self, path: PathBuf, root: Root) -> FileId {
self.path2id.get(path.as_path())
.map(|&id| id)
.unwrap_or_else(|| {
let id = self.new_file_id();
self.insert(path, id, root);
id
})
}
pub fn get_id(&self, path: &Path) -> Option<FileId> {
self.path2id.get(path).map(|&id| id)
}
pub fn get_path(&self, file_id: FileId) -> &Path {
self.id2path.get(&file_id)
.unwrap()
.as_path()
}
pub fn get_root(&self, file_id: FileId) -> Root {
self.id2root[&file_id]
}
fn insert(&mut self, path: PathBuf, file_id: FileId, root: Root) {
self.path2id.insert(path.clone(), file_id);
self.id2path.insert(file_id, path.clone());
self.id2root.insert(file_id, root);
}
fn new_file_id(&mut self) -> FileId {
let id = FileId(self.next_id);
self.next_id += 1;
id
}
}
impl FileResolver for PathMap {
fn file_stem(&self, file_id: FileId) -> String {
self.get_path(file_id).file_stem().unwrap().to_str().unwrap().to_string()
}
fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId> {
let path = path.to_path(&self.get_path(file_id));
let path = normalize(&path);
self.get_id(&path)
}
}
fn normalize(path: &Path) -> PathBuf {
let mut components = path.components().peekable();
let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {
components.next();
PathBuf::from(c.as_os_str())
} else {
PathBuf::new()
};
for component in components {
match component {
Component::Prefix(..) => unreachable!(),
Component::RootDir => {
ret.push(component.as_os_str());
}
Component::CurDir => {}
Component::ParentDir => {
ret.pop();
}
Component::Normal(c) => {
ret.push(c);
}
}
}
ret
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_resolve() {
let mut m = PathMap::new();
let id1 = m.get_or_insert(PathBuf::from("/foo"), Root::Workspace);
let id2 = m.get_or_insert(PathBuf::from("/foo/bar.rs"), Root::Workspace);
assert_eq!(
m.resolve(id1, &RelativePath::new("bar.rs")),
Some(id2),
)
}
}

View file

@ -0,0 +1,175 @@
use std::{
collections::{HashMap, HashSet},
path::{Path, PathBuf},
};
use cargo_metadata::{metadata_run, CargoOpt};
use ra_syntax::SmolStr;
use {
Result,
thread_watcher::{Worker, ThreadWatcher},
};
#[derive(Debug, Clone)]
pub struct CargoWorkspace {
packages: Vec<PackageData>,
targets: Vec<TargetData>,
}
#[derive(Clone, Copy, Debug, Serialize)]
pub struct Package(usize);
#[derive(Clone, Copy, Debug, Serialize)]
pub struct Target(usize);
#[derive(Debug, Clone)]
struct PackageData {
name: SmolStr,
manifest: PathBuf,
targets: Vec<Target>,
is_member: bool,
}
#[derive(Debug, Clone)]
struct TargetData {
pkg: Package,
name: SmolStr,
root: PathBuf,
kind: TargetKind,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum TargetKind {
Bin, Lib, Example, Test, Bench, Other,
}
impl Package {
pub fn name(self, ws: &CargoWorkspace) -> &str {
ws.pkg(self).name.as_str()
}
pub fn root(self, ws: &CargoWorkspace) -> &Path {
ws.pkg(self).manifest.parent().unwrap()
}
pub fn targets<'a>(self, ws: &'a CargoWorkspace) -> impl Iterator<Item=Target> + 'a {
ws.pkg(self).targets.iter().cloned()
}
pub fn is_member(self, ws: &CargoWorkspace) -> bool {
ws.pkg(self).is_member
}
}
impl Target {
pub fn package(self, ws: &CargoWorkspace) -> Package {
ws.tgt(self).pkg
}
pub fn name(self, ws: &CargoWorkspace) -> &str {
ws.tgt(self).name.as_str()
}
pub fn root(self, ws: &CargoWorkspace) -> &Path {
ws.tgt(self).root.as_path()
}
pub fn kind(self, ws: &CargoWorkspace) -> TargetKind {
ws.tgt(self).kind
}
}
impl CargoWorkspace {
pub fn from_cargo_metadata(path: &Path) -> Result<CargoWorkspace> {
let cargo_toml = find_cargo_toml(path)?;
let meta = metadata_run(
Some(cargo_toml.as_path()),
true,
Some(CargoOpt::AllFeatures)
).map_err(|e| format_err!("cargo metadata failed: {}", e))?;
let mut pkg_by_id = HashMap::new();
let mut packages = Vec::new();
let mut targets = Vec::new();
let ws_members: HashSet<String> = meta.workspace_members
.into_iter()
.map(|it| it.raw)
.collect();
for meta_pkg in meta.packages {
let pkg = Package(packages.len());
let is_member = ws_members.contains(&meta_pkg.id);
pkg_by_id.insert(meta_pkg.id.clone(), pkg);
let mut pkg_data = PackageData {
name: meta_pkg.name.into(),
manifest: PathBuf::from(meta_pkg.manifest_path),
targets: Vec::new(),
is_member,
};
for meta_tgt in meta_pkg.targets {
let tgt = Target(targets.len());
targets.push(TargetData {
pkg,
name: meta_tgt.name.into(),
root: PathBuf::from(meta_tgt.src_path),
kind: TargetKind::new(meta_tgt.kind.as_slice()),
});
pkg_data.targets.push(tgt);
}
packages.push(pkg_data)
}
Ok(CargoWorkspace { packages, targets })
}
pub fn packages<'a>(&'a self) -> impl Iterator<Item=Package> + 'a {
(0..self.packages.len()).map(Package)
}
pub fn target_by_root(&self, root: &Path) -> Option<Target> {
self.packages()
.filter_map(|pkg| pkg.targets(self).find(|it| it.root(self) == root))
.next()
}
fn pkg(&self, pkg: Package) -> &PackageData {
&self.packages[pkg.0]
}
fn tgt(&self, tgt: Target) -> &TargetData {
&self.targets[tgt.0]
}
}
fn find_cargo_toml(path: &Path) -> Result<PathBuf> {
if path.ends_with("Cargo.toml") {
return Ok(path.to_path_buf());
}
let mut curr = Some(path);
while let Some(path) = curr {
let candidate = path.join("Cargo.toml");
if candidate.exists() {
return Ok(candidate);
}
curr = path.parent();
}
bail!("can't find Cargo.toml at {}", path.display())
}
impl TargetKind {
fn new(kinds: &[String]) -> TargetKind {
for kind in kinds {
return match kind.as_str() {
"bin" => TargetKind::Bin,
"test" => TargetKind::Test,
"bench" => TargetKind::Bench,
"example" => TargetKind::Example,
_ if kind.contains("lib") => TargetKind::Lib,
_ => continue,
}
}
TargetKind::Other
}
}
pub fn workspace_loader() -> (Worker<PathBuf, Result<CargoWorkspace>>, ThreadWatcher) {
Worker::<PathBuf, Result<CargoWorkspace>>::spawn(
"workspace loader",
1,
|input_receiver, output_sender| {
input_receiver
.into_iter()
.map(|path| CargoWorkspace::from_cargo_metadata(path.as_path()))
.for_each(|it| output_sender.send(it))
}
)
}

View file

@ -0,0 +1,176 @@
use std::collections::HashMap;
use languageserver_types::{TextDocumentIdentifier, Range, Url, Position, Location};
use url_serde;
pub use languageserver_types::{
request::*, notification::*,
InitializeResult, PublishDiagnosticsParams,
DocumentSymbolParams, DocumentSymbolResponse,
CodeActionParams, ApplyWorkspaceEditParams,
ExecuteCommandParams,
WorkspaceSymbolParams,
TextDocumentPositionParams,
TextEdit,
CompletionParams, CompletionResponse,
DocumentOnTypeFormattingParams,
TextDocumentEdit,
};
pub enum SyntaxTree {}
impl Request for SyntaxTree {
type Params = SyntaxTreeParams;
type Result = String;
const METHOD: &'static str = "m/syntaxTree";
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct SyntaxTreeParams {
pub text_document: TextDocumentIdentifier
}
pub enum ExtendSelection {}
impl Request for ExtendSelection {
type Params = ExtendSelectionParams;
type Result = ExtendSelectionResult;
const METHOD: &'static str = "m/extendSelection";
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ExtendSelectionParams {
pub text_document: TextDocumentIdentifier,
pub selections: Vec<Range>,
}
#[derive(Serialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ExtendSelectionResult {
pub selections: Vec<Range>,
}
pub enum FindMatchingBrace {}
impl Request for FindMatchingBrace {
type Params = FindMatchingBraceParams;
type Result = Vec<Position>;
const METHOD: &'static str = "m/findMatchingBrace";
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct FindMatchingBraceParams {
pub text_document: TextDocumentIdentifier,
pub offsets: Vec<Position>,
}
pub enum DecorationsRequest {}
impl Request for DecorationsRequest {
type Params = TextDocumentIdentifier;
type Result = Vec<Decoration>;
const METHOD: &'static str = "m/decorationsRequest";
}
pub enum PublishDecorations {}
impl Notification for PublishDecorations {
type Params = PublishDecorationsParams;
const METHOD: &'static str = "m/publishDecorations";
}
#[derive(Serialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct PublishDecorationsParams {
#[serde(with = "url_serde")]
pub uri: Url,
pub decorations: Vec<Decoration>,
}
#[derive(Serialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Decoration {
pub range: Range,
pub tag: &'static str
}
pub enum ParentModule {}
impl Request for ParentModule {
type Params = TextDocumentIdentifier;
type Result = Vec<Location>;
const METHOD: &'static str = "m/parentModule";
}
pub enum JoinLines {}
impl Request for JoinLines {
type Params = JoinLinesParams;
type Result = SourceChange;
const METHOD: &'static str = "m/joinLines";
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct JoinLinesParams {
pub text_document: TextDocumentIdentifier,
pub range: Range,
}
pub enum Runnables {}
impl Request for Runnables {
type Params = RunnablesParams;
type Result = Vec<Runnable>;
const METHOD: &'static str = "m/runnables";
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct RunnablesParams {
pub text_document: TextDocumentIdentifier,
pub position: Option<Position>,
}
#[derive(Serialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Runnable {
pub range: Range,
pub label: String,
pub bin: String,
pub args: Vec<String>,
pub env: HashMap<String, String>,
}
#[derive(Serialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct SourceChange {
pub label: String,
pub source_file_edits: Vec<TextDocumentEdit>,
pub file_system_edits: Vec<FileSystemEdit>,
pub cursor_position: Option<TextDocumentPositionParams>,
}
#[derive(Serialize, Debug)]
#[serde(tag = "type", rename_all = "camelCase")]
pub enum FileSystemEdit {
CreateFile {
#[serde(with = "url_serde")]
uri: Url
},
MoveFile {
#[serde(with = "url_serde")]
src: Url,
#[serde(with = "url_serde")]
dst: Url,
}
}
pub enum InternalFeedback {}
impl Notification for InternalFeedback {
const METHOD: &'static str = "internalFeedback";
type Params = String;
}

View file

@ -0,0 +1,167 @@
use std::{
fs,
path::{PathBuf, Path},
collections::HashMap,
sync::Arc,
};
use languageserver_types::Url;
use ra_analysis::{FileId, AnalysisHost, Analysis, CrateGraph, CrateId, LibraryData, FileResolver};
use {
Result,
path_map::{PathMap, Root},
vfs::{FileEvent, FileEventKind},
project_model::CargoWorkspace,
};
#[derive(Debug)]
pub struct ServerWorldState {
pub workspaces: Arc<Vec<CargoWorkspace>>,
pub analysis_host: AnalysisHost,
pub path_map: PathMap,
pub mem_map: HashMap<FileId, Option<String>>,
}
#[derive(Clone)]
pub struct ServerWorld {
pub workspaces: Arc<Vec<CargoWorkspace>>,
pub analysis: Analysis,
pub path_map: PathMap,
}
impl ServerWorldState {
pub fn new() -> ServerWorldState {
ServerWorldState {
workspaces: Arc::new(Vec::new()),
analysis_host: AnalysisHost::new(),
path_map: PathMap::new(),
mem_map: HashMap::new(),
}
}
pub fn apply_fs_changes(&mut self, events: Vec<FileEvent>) {
{
let pm = &mut self.path_map;
let mm = &mut self.mem_map;
let changes = events.into_iter()
.map(|event| {
let text = match event.kind {
FileEventKind::Add(text) => Some(text),
};
(event.path, text)
})
.map(|(path, text)| {
(pm.get_or_insert(path, Root::Workspace), text)
})
.filter_map(|(id, text)| {
if mm.contains_key(&id) {
mm.insert(id, text);
None
} else {
Some((id, text))
}
});
self.analysis_host.change_files(changes);
}
self.analysis_host.set_file_resolver(Arc::new(self.path_map.clone()));
}
pub fn events_to_files(&mut self, events: Vec<FileEvent>) -> (Vec<(FileId, String)>, Arc<FileResolver>) {
let files = {
let pm = &mut self.path_map;
events.into_iter()
.map(|event| {
let text = match event.kind {
FileEventKind::Add(text) => text,
};
(event.path, text)
})
.map(|(path, text)| (pm.get_or_insert(path, Root::Lib), text))
.collect()
};
let resolver = Arc::new(self.path_map.clone());
(files, resolver)
}
pub fn add_lib(&mut self, data: LibraryData) {
self.analysis_host.add_library(data);
}
pub fn add_mem_file(&mut self, path: PathBuf, text: String) -> FileId {
let file_id = self.path_map.get_or_insert(path, Root::Workspace);
self.analysis_host.set_file_resolver(Arc::new(self.path_map.clone()));
self.mem_map.insert(file_id, None);
if self.path_map.get_root(file_id) != Root::Lib {
self.analysis_host.change_file(file_id, Some(text));
}
file_id
}
pub fn change_mem_file(&mut self, path: &Path, text: String) -> Result<()> {
let file_id = self.path_map.get_id(path).ok_or_else(|| {
format_err!("change to unknown file: {}", path.display())
})?;
if self.path_map.get_root(file_id) != Root::Lib {
self.analysis_host.change_file(file_id, Some(text));
}
Ok(())
}
pub fn remove_mem_file(&mut self, path: &Path) -> Result<FileId> {
let file_id = self.path_map.get_id(path).ok_or_else(|| {
format_err!("change to unknown file: {}", path.display())
})?;
match self.mem_map.remove(&file_id) {
Some(_) => (),
None => bail!("unmatched close notification"),
};
// Do this via file watcher ideally.
let text = fs::read_to_string(path).ok();
if self.path_map.get_root(file_id) != Root::Lib {
self.analysis_host.change_file(file_id, text);
}
Ok(file_id)
}
pub fn set_workspaces(&mut self, ws: Vec<CargoWorkspace>) {
let mut crate_roots = HashMap::new();
ws.iter()
.flat_map(|ws| {
ws.packages()
.flat_map(move |pkg| pkg.targets(ws))
.map(move |tgt| tgt.root(ws))
})
.for_each(|root| {
if let Some(file_id) = self.path_map.get_id(root) {
let crate_id = CrateId(crate_roots.len() as u32);
crate_roots.insert(crate_id, file_id);
}
});
let crate_graph = CrateGraph { crate_roots };
self.workspaces = Arc::new(ws);
self.analysis_host.set_crate_graph(crate_graph);
}
pub fn snapshot(&self) -> ServerWorld {
ServerWorld {
workspaces: Arc::clone(&self.workspaces),
analysis: self.analysis_host.analysis(),
path_map: self.path_map.clone()
}
}
}
impl ServerWorld {
pub fn analysis(&self) -> &Analysis {
&self.analysis
}
pub fn uri_to_file_id(&self, uri: &Url) -> Result<FileId> {
let path = uri.to_file_path()
.map_err(|()| format_err!("invalid uri: {}", uri))?;
self.path_map.get_id(&path).ok_or_else(|| format_err!("unknown file: {}", path.display()))
}
pub fn file_id_to_uri(&self, id: FileId) -> Result<Url> {
let path = self.path_map.get_path(id);
let url = Url::from_file_path(path)
.map_err(|()| format_err!("can't convert path to url: {}", path.display()))?;
Ok(url)
}
}

View file

@ -0,0 +1,70 @@
use std::thread;
use crossbeam_channel::{bounded, unbounded, Sender, Receiver};
use drop_bomb::DropBomb;
use Result;
pub struct Worker<I, O> {
pub inp: Sender<I>,
pub out: Receiver<O>,
}
impl<I, O> Worker<I, O> {
pub fn spawn<F>(name: &'static str, buf: usize, f: F) -> (Self, ThreadWatcher)
where
F: FnOnce(Receiver<I>, Sender<O>) + Send + 'static,
I: Send + 'static,
O: Send + 'static,
{
let ((inp, out), inp_r, out_s) = worker_chan(buf);
let worker = Worker { inp, out };
let watcher = ThreadWatcher::spawn(name, move || f(inp_r, out_s));
(worker, watcher)
}
pub fn stop(self) -> Receiver<O> {
self.out
}
pub fn send(&self, item: I) {
self.inp.send(item)
}
}
pub struct ThreadWatcher {
name: &'static str,
thread: thread::JoinHandle<()>,
bomb: DropBomb,
}
impl ThreadWatcher {
fn spawn(name: &'static str, f: impl FnOnce() + Send + 'static) -> ThreadWatcher {
let thread = thread::spawn(f);
ThreadWatcher {
name,
thread,
bomb: DropBomb::new(format!("ThreadWatcher {} was not stopped", name)),
}
}
pub fn stop(mut self) -> Result<()> {
info!("waiting for {} to finish ...", self.name);
let name = self.name;
self.bomb.defuse();
let res = self.thread.join()
.map_err(|_| format_err!("ThreadWatcher {} died", name));
match &res {
Ok(()) => info!("... {} terminated with ok", name),
Err(_) => error!("... {} terminated with err", name)
}
res
}
}
/// Sets up worker channels in a deadlock-avoind way.
/// If one sets both input and output buffers to a fixed size,
/// a worker might get stuck.
fn worker_chan<I, O>(buf: usize) -> ((Sender<I>, Receiver<O>), Receiver<I>, Sender<O>) {
let (input_sender, input_receiver) = bounded::<I>(buf);
let (output_sender, output_receiver) = unbounded::<O>();
((input_sender, output_receiver), input_receiver, output_sender)
}

View file

@ -0,0 +1,71 @@
use std::{
path::{PathBuf, Path},
fs,
};
use walkdir::WalkDir;
use {
thread_watcher::{Worker, ThreadWatcher},
};
#[derive(Debug)]
pub struct FileEvent {
pub path: PathBuf,
pub kind: FileEventKind,
}
#[derive(Debug)]
pub enum FileEventKind {
Add(String),
}
pub fn roots_loader() -> (Worker<PathBuf, (PathBuf, Vec<FileEvent>)>, ThreadWatcher) {
Worker::<PathBuf, (PathBuf, Vec<FileEvent>)>::spawn(
"roots loader",
128, |input_receiver, output_sender| {
input_receiver
.into_iter()
.map(|path| {
debug!("loading {} ...", path.as_path().display());
let events = load_root(path.as_path());
debug!("... loaded {}", path.as_path().display());
(path, events)
})
.for_each(|it| output_sender.send(it))
}
)
}
fn load_root(path: &Path) -> Vec<FileEvent> {
let mut res = Vec::new();
for entry in WalkDir::new(path) {
let entry = match entry {
Ok(entry) => entry,
Err(e) => {
warn!("watcher error: {}", e);
continue;
}
};
if !entry.file_type().is_file() {
continue;
}
let path = entry.path();
if path.extension().and_then(|os| os.to_str()) != Some("rs") {
continue;
}
let text = match fs::read_to_string(path) {
Ok(text) => text,
Err(e) => {
warn!("watcher error: {}", e);
continue;
}
};
res.push(FileEvent {
path: path.to_owned(),
kind: FileEventKind::Add(text),
})
}
res
}