Cargo Format

Run `cargo fmt` and ignore generated files
This commit is contained in:
Jeremy A. Kolb 2018-10-15 17:44:23 -04:00
parent 39cb6c6d3f
commit 61f3a438d3
76 changed files with 1936 additions and 1530 deletions

View file

@ -1,14 +1,8 @@
use languageserver_types::{
ServerCapabilities,
CodeActionProviderCapability,
FoldingRangeProviderCapability,
TextDocumentSyncCapability,
CodeActionProviderCapability, CompletionOptions, DocumentOnTypeFormattingOptions,
ExecuteCommandOptions, FoldingRangeProviderCapability, ServerCapabilities,
SignatureHelpOptions, TextDocumentSyncCapability, TextDocumentSyncKind,
TextDocumentSyncOptions,
TextDocumentSyncKind,
ExecuteCommandOptions,
CompletionOptions,
SignatureHelpOptions,
DocumentOnTypeFormattingOptions,
};
pub fn server_capabilities() -> ServerCapabilities {
@ -20,7 +14,7 @@ pub fn server_capabilities() -> ServerCapabilities {
will_save: None,
will_save_wait_until: None,
save: None,
}
},
)),
hover_provider: None,
completion_provider: Some(CompletionOptions {
@ -28,7 +22,7 @@ pub fn server_capabilities() -> ServerCapabilities {
trigger_characters: None,
}),
signature_help_provider: Some(SignatureHelpOptions {
trigger_characters: Some(vec!["(".to_string(), ",".to_string()])
trigger_characters: Some(vec!["(".to_string(), ",".to_string()]),
}),
definition_provider: Some(true),
type_definition_provider: None,

View file

@ -1,17 +1,12 @@
use languageserver_types::{
Range, SymbolKind, Position, TextEdit, Location, Url,
TextDocumentIdentifier, VersionedTextDocumentIdentifier, TextDocumentItem,
TextDocumentPositionParams, TextDocumentEdit,
Location, Position, Range, SymbolKind, TextDocumentEdit, TextDocumentIdentifier,
TextDocumentItem, TextDocumentPositionParams, TextEdit, Url, VersionedTextDocumentIdentifier,
};
use ra_editor::{LineIndex, LineCol, Edit, AtomEdit};
use ra_syntax::{SyntaxKind, TextUnit, TextRange};
use ra_analysis::{FileId, SourceChange, SourceFileEdit, FileSystemEdit};
use ra_analysis::{FileId, FileSystemEdit, SourceChange, SourceFileEdit};
use ra_editor::{AtomEdit, Edit, LineCol, LineIndex};
use ra_syntax::{SyntaxKind, TextRange, TextUnit};
use crate::{
Result,
server_world::ServerWorld,
req,
};
use crate::{req, server_world::ServerWorld, Result};
pub trait Conv {
type Output;
@ -190,8 +185,12 @@ impl TryConvWith for SourceChange {
None => None,
Some(pos) => {
let line_index = world.analysis().file_line_index(pos.file_id);
let edits = self.source_file_edits.iter().find(|it| it.file_id == pos.file_id)
.map(|it| it.edits.as_slice()).unwrap_or(&[]);
let edits = self
.source_file_edits
.iter()
.find(|it| it.file_id == pos.file_id)
.map(|it| it.edits.as_slice())
.unwrap_or(&[]);
let line_col = translate_offset_with_edit(&*line_index, pos.offset, edits);
let position = Position::new(line_col.line as u64, u32::from(line_col.col) as u64);
Some(TextDocumentPositionParams {
@ -224,11 +223,11 @@ fn translate_offset_with_edit(
let fallback = pre_edit_index.line_col(offset);
let edit = match edits.first() {
None => return fallback,
Some(edit) => edit
Some(edit) => edit,
};
let end_offset = edit.delete.start() + TextUnit::of_str(&edit.insert);
if !(edit.delete.start() <= offset && offset <= end_offset) {
return fallback
return fallback;
}
let rel_offset = offset - edit.delete.start();
let in_edit_line_col = LineIndex::new(&edit.insert).line_col(rel_offset);
@ -255,11 +254,11 @@ impl TryConvWith for SourceFileEdit {
version: None,
};
let line_index = world.analysis().file_line_index(self.file_id);
let edits = self.edits
.into_iter()
.map_conv_with(&line_index)
.collect();
Ok(TextDocumentEdit { text_document, edits })
let edits = self.edits.into_iter().map_conv_with(&line_index).collect();
Ok(TextDocumentEdit {
text_document,
edits,
})
}
}
@ -273,13 +272,13 @@ impl TryConvWith for FileSystemEdit {
let path = &path.as_str()[3..]; // strip `../` b/c url is weird
let uri = uri.join(path)?;
req::FileSystemEdit::CreateFile { uri }
},
}
FileSystemEdit::MoveFile { file, path } => {
let src = world.file_id_to_uri(file)?;
let path = &path.as_str()[3..]; // strip `../` b/c url is weird
let dst = src.join(path)?;
req::FileSystemEdit::MoveFile { src, dst }
},
}
};
Ok(res)
}
@ -291,12 +290,9 @@ pub fn to_location(
world: &ServerWorld,
line_index: &LineIndex,
) -> Result<Location> {
let url = file_id.try_conv_with(world)?;
let loc = Location::new(
url,
range.conv_with(line_index),
);
Ok(loc)
let url = file_id.try_conv_with(world)?;
let loc = Location::new(url, range.conv_with(line_index));
Ok(loc)
}
pub trait MapConvWith<'a>: Sized + 'a {
@ -309,8 +305,9 @@ pub trait MapConvWith<'a>: Sized + 'a {
}
impl<'a, I> MapConvWith<'a> for I
where I: Iterator + 'a,
I::Item: ConvWith
where
I: Iterator + 'a,
I::Item: ConvWith,
{
type Ctx = <I::Item as ConvWith>::Ctx;
type Output = <I::Item as ConvWith>::Output;
@ -322,9 +319,9 @@ pub struct ConvWithIter<'a, I, Ctx: 'a> {
}
impl<'a, I, Ctx> Iterator for ConvWithIter<'a, I, Ctx>
where
I: Iterator,
I::Item: ConvWith<Ctx=Ctx>,
where
I: Iterator,
I::Item: ConvWith<Ctx = Ctx>,
{
type Item = <I::Item as ConvWith>::Output;
@ -332,4 +329,3 @@ impl<'a, I, Ctx> Iterator for ConvWithIter<'a, I, Ctx>
self.iter.next().map(|item| item.conv_with(self.ctx))
}
}

View file

@ -2,39 +2,36 @@
extern crate failure;
#[macro_use]
extern crate serde_derive;
extern crate languageserver_types;
extern crate serde;
extern crate serde_json;
extern crate languageserver_types;
#[macro_use]
extern crate crossbeam_channel;
extern crate rayon;
#[macro_use]
extern crate log;
extern crate cargo_metadata;
extern crate drop_bomb;
extern crate url_serde;
extern crate walkdir;
extern crate im;
extern crate relative_path;
extern crate cargo_metadata;
extern crate rustc_hash;
extern crate url_serde;
extern crate walkdir;
extern crate gen_lsp_server;
extern crate ra_editor;
extern crate ra_analysis;
extern crate ra_editor;
extern crate ra_syntax;
mod caps;
pub mod req;
mod conv;
mod main_loop;
mod vfs;
mod path_map;
mod server_world;
mod project_model;
pub mod req;
mod server_world;
pub mod thread_watcher;
mod vfs;
pub type Result<T> = ::std::result::Result<T, ::failure::Error>;
pub use crate::{
main_loop::main_loop,
caps::server_capabilities,
};
pub use crate::{caps::server_capabilities, main_loop::main_loop};

View file

@ -6,7 +6,7 @@ extern crate flexi_logger;
extern crate gen_lsp_server;
extern crate ra_lsp_server;
use flexi_logger::{Logger, Duplicate};
use flexi_logger::{Duplicate, Logger};
use gen_lsp_server::{run_server, stdio_transport};
use ra_lsp_server::Result;
@ -38,7 +38,8 @@ fn main_inner() -> Result<()> {
receiver,
sender,
|params, r, s| {
let root = params.root_uri
let root = params
.root_uri
.and_then(|it| it.to_file_path().ok())
.unwrap_or(cwd);
ra_lsp_server::main_loop(false, root, r, s)
@ -49,4 +50,3 @@ fn main_inner() -> Result<()> {
info!("... IO is down");
Ok(())
}

View file

@ -1,23 +1,20 @@
use rustc_hash::FxHashMap;
use languageserver_types::{
Diagnostic, DiagnosticSeverity, DocumentSymbol,
CodeActionResponse, Command, TextDocumentIdentifier,
SymbolInformation, Position, Location, TextEdit,
CompletionItem, InsertTextFormat, CompletionItemKind,
FoldingRange, FoldingRangeParams, FoldingRangeKind
CodeActionResponse, Command, CompletionItem, CompletionItemKind, Diagnostic,
DiagnosticSeverity, DocumentSymbol, FoldingRange, FoldingRangeKind, FoldingRangeParams,
InsertTextFormat, Location, Position, SymbolInformation, TextDocumentIdentifier, TextEdit,
};
use ra_analysis::{FileId, FoldKind, JobToken, Query, RunnableKind};
use ra_syntax::text_utils::contains_offset_nonstrict;
use serde_json::to_value;
use ra_analysis::{Query, FileId, RunnableKind, JobToken, FoldKind};
use ra_syntax::{
text_utils::contains_offset_nonstrict
};
use crate::{
req::{self, Decoration}, Result,
conv::{Conv, ConvWith, TryConvWith, MapConvWith, to_location},
server_world::ServerWorld,
conv::{to_location, Conv, ConvWith, MapConvWith, TryConvWith},
project_model::TargetKind,
req::{self, Decoration},
server_world::ServerWorld,
Result,
};
pub fn handle_syntax_tree(
@ -38,7 +35,9 @@ pub fn handle_extend_selection(
let file_id = params.text_document.try_conv_with(&world)?;
let file = world.analysis().file_syntax(file_id);
let line_index = world.analysis().file_line_index(file_id);
let selections = params.selections.into_iter()
let selections = params
.selections
.into_iter()
.map_conv_with(&line_index)
.map(|r| world.analysis().extend_selection(&file, r))
.map_conv_with(&line_index)
@ -54,11 +53,15 @@ pub fn handle_find_matching_brace(
let file_id = params.text_document.try_conv_with(&world)?;
let file = world.analysis().file_syntax(file_id);
let line_index = world.analysis().file_line_index(file_id);
let res = params.offsets
let res = params
.offsets
.into_iter()
.map_conv_with(&line_index)
.map(|offset| {
world.analysis().matching_brace(&file, offset).unwrap_or(offset)
world
.analysis()
.matching_brace(&file, offset)
.unwrap_or(offset)
})
.map_conv_with(&line_index)
.collect();
@ -73,7 +76,9 @@ pub fn handle_join_lines(
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id);
let range = params.range.conv_with(&line_index);
world.analysis().join_lines(file_id, range)
world
.analysis()
.join_lines(file_id, range)
.try_conv_with(&world)
}
@ -87,7 +92,7 @@ pub fn handle_on_enter(
let offset = params.position.conv_with(&line_index);
match world.analysis().on_enter(file_id, offset) {
None => Ok(None),
Some(edit) => Ok(Some(edit.try_conv_with(&world)?))
Some(edit) => Ok(Some(edit.try_conv_with(&world)?)),
}
}
@ -158,7 +163,9 @@ pub fn handle_workspace_symbol(
let all_symbols = params.query.contains("#");
let libs = params.query.contains("*");
let query = {
let query: String = params.query.chars()
let query: String = params
.query
.chars()
.filter(|&c| c != '#' && c != '*')
.collect();
let mut q = Query::new(query);
@ -180,22 +187,23 @@ pub fn handle_workspace_symbol(
return Ok(Some(res));
fn exec_query(world: &ServerWorld, query: Query, token: &JobToken) -> Result<Vec<SymbolInformation>> {
fn exec_query(
world: &ServerWorld,
query: Query,
token: &JobToken,
) -> Result<Vec<SymbolInformation>> {
let mut res = Vec::new();
for (file_id, symbol) in world.analysis().symbol_search(query, token) {
let line_index = world.analysis().file_line_index(file_id);
let info = SymbolInformation {
name: symbol.name.to_string(),
kind: symbol.kind.conv(),
location: to_location(
file_id, symbol.node_range,
world, &line_index
)?,
location: to_location(file_id, symbol.node_range, world, &line_index)?,
container_name: None,
deprecated: None,
};
res.push(info);
};
}
Ok(res)
}
}
@ -209,12 +217,12 @@ pub fn handle_goto_definition(
let line_index = world.analysis().file_line_index(file_id);
let offset = params.position.conv_with(&line_index);
let mut res = Vec::new();
for (file_id, symbol) in world.analysis().approximately_resolve_symbol(file_id, offset, &token) {
for (file_id, symbol) in world
.analysis()
.approximately_resolve_symbol(file_id, offset, &token)
{
let line_index = world.analysis().file_line_index(file_id);
let location = to_location(
file_id, symbol.node_range,
&world, &line_index,
)?;
let location = to_location(file_id, symbol.node_range, &world, &line_index)?;
res.push(location)
}
Ok(Some(req::GotoDefinitionResponse::Array(res)))
@ -229,10 +237,7 @@ pub fn handle_parent_module(
let mut res = Vec::new();
for (file_id, symbol) in world.analysis().parent_module(file_id) {
let line_index = world.analysis().file_line_index(file_id);
let location = to_location(
file_id, symbol.node_range,
&world, &line_index
)?;
let location = to_location(file_id, symbol.node_range, &world, &line_index)?;
res.push(location);
}
Ok(res)
@ -259,21 +264,16 @@ pub fn handle_runnables(
let r = req::Runnable {
range: runnable.range.conv_with(&line_index),
label: match &runnable.kind {
RunnableKind::Test { name } =>
format!("test {}", name),
RunnableKind::Bin =>
"run binary".to_string(),
RunnableKind::Test { name } => format!("test {}", name),
RunnableKind::Bin => "run binary".to_string(),
},
bin: "cargo".to_string(),
args,
env: {
let mut m = FxHashMap::default();
m.insert(
"RUST_BACKTRACE".to_string(),
"short".to_string(),
);
m.insert("RUST_BACKTRACE".to_string(), "short".to_string());
m
}
},
};
res.push(r);
}
@ -283,10 +283,16 @@ pub fn handle_runnables(
let spec = if let Some(&crate_id) = world.analysis().crate_for(file_id).first() {
let file_id = world.analysis().crate_root(crate_id);
let path = world.path_map.get_path(file_id);
world.workspaces.iter()
world
.workspaces
.iter()
.filter_map(|ws| {
let tgt = ws.target_by_root(path)?;
Some((tgt.package(ws).name(ws).clone(), tgt.name(ws).clone(), tgt.kind(ws)))
Some((
tgt.package(ws).name(ws).clone(),
tgt.name(ws).clone(),
tgt.kind(ws),
))
})
.next()
} else {
@ -294,22 +300,22 @@ pub fn handle_runnables(
};
let mut res = Vec::new();
match kind {
RunnableKind::Test { name } => {
res.push("test".to_string());
if let Some((pkg_name, tgt_name, tgt_kind)) = spec {
spec_args(pkg_name, tgt_name, tgt_kind, &mut res);
}
res.push("--".to_string());
res.push(name.to_string());
res.push("--nocapture".to_string());
RunnableKind::Test { name } => {
res.push("test".to_string());
if let Some((pkg_name, tgt_name, tgt_kind)) = spec {
spec_args(pkg_name, tgt_name, tgt_kind, &mut res);
}
RunnableKind::Bin => {
res.push("run".to_string());
if let Some((pkg_name, tgt_name, tgt_kind)) = spec {
spec_args(pkg_name, tgt_name, tgt_kind, &mut res);
}
res.push("--".to_string());
res.push(name.to_string());
res.push("--nocapture".to_string());
}
RunnableKind::Bin => {
res.push("run".to_string());
if let Some((pkg_name, tgt_name, tgt_kind)) = spec {
spec_args(pkg_name, tgt_name, tgt_kind, &mut res);
}
}
}
res
}
@ -362,12 +368,13 @@ pub fn handle_completion(
None => return Ok(None),
Some(items) => items,
};
let items = items.into_iter()
let items = items
.into_iter()
.map(|item| {
let mut res = CompletionItem {
label: item.label,
filter_text: item.lookup,
.. Default::default()
..Default::default()
};
if let Some(snip) = item.snippet {
res.insert_text = Some(snip);
@ -389,24 +396,27 @@ pub fn handle_folding_range(
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id);
let res = Some(world.analysis()
.folding_ranges(file_id)
.into_iter()
.map(|fold| {
let kind = match fold.kind {
FoldKind::Comment => FoldingRangeKind::Comment,
FoldKind::Imports => FoldingRangeKind::Imports
};
let range = fold.range.conv_with(&line_index);
FoldingRange {
start_line: range.start.line,
start_character: Some(range.start.character),
end_line: range.end.line,
end_character: Some(range.start.character),
kind: Some(kind)
}
})
.collect());
let res = Some(
world
.analysis()
.folding_ranges(file_id)
.into_iter()
.map(|fold| {
let kind = match fold.kind {
FoldKind::Comment => FoldingRangeKind::Comment,
FoldKind::Imports => FoldingRangeKind::Imports,
};
let range = fold.range.conv_with(&line_index);
FoldingRange {
start_line: range.start.line,
start_character: Some(range.start.character),
end_line: range.end.line,
end_character: Some(range.start.character),
kind: Some(kind),
}
})
.collect(),
);
Ok(res)
}
@ -422,25 +432,28 @@ pub fn handle_signature_help(
let line_index = world.analysis().file_line_index(file_id);
let offset = params.position.conv_with(&line_index);
if let Some((descriptor, active_param)) = world.analysis().resolve_callable(file_id, offset, &token) {
let parameters : Vec<ParameterInformation> =
descriptor.params.iter().map(|param|
ParameterInformation {
label: param.clone(),
documentation: None
}
).collect();
if let Some((descriptor, active_param)) =
world.analysis().resolve_callable(file_id, offset, &token)
{
let parameters: Vec<ParameterInformation> = descriptor
.params
.iter()
.map(|param| ParameterInformation {
label: param.clone(),
documentation: None,
})
.collect();
let sig_info = SignatureInformation {
label: descriptor.label,
documentation: None,
parameters: Some(parameters)
parameters: Some(parameters),
};
Ok(Some(req::SignatureHelp {
signatures: vec![sig_info],
active_signature: Some(0),
active_parameter: active_param.map(|a| a as u64)
active_parameter: active_param.map(|a| a as u64),
}))
} else {
Ok(None)
@ -457,7 +470,10 @@ pub fn handle_code_action(
let range = params.range.conv_with(&line_index);
let assists = world.analysis().assists(file_id, range).into_iter();
let fixes = world.analysis().diagnostics(file_id).into_iter()
let fixes = world
.analysis()
.diagnostics(file_id)
.into_iter()
.filter_map(|d| Some((d.range, d.fix?)))
.filter(|(range, _fix)| contains_offset_nonstrict(*range, range.start()))
.map(|(_range, fix)| fix);
@ -483,7 +499,9 @@ pub fn publish_diagnostics(
) -> Result<req::PublishDiagnosticsParams> {
let uri = world.file_id_to_uri(file_id)?;
let line_index = world.analysis().file_line_index(file_id);
let diagnostics = world.analysis().diagnostics(file_id)
let diagnostics = world
.analysis()
.diagnostics(file_id)
.into_iter()
.map(|d| Diagnostic {
range: d.range.conv_with(&line_index),
@ -492,7 +510,8 @@ pub fn publish_diagnostics(
source: Some("rust-analyzer".to_string()),
message: d.message,
related_information: None,
}).collect();
})
.collect();
Ok(req::PublishDiagnosticsParams { uri, diagnostics })
}
@ -509,10 +528,13 @@ pub fn publish_decorations(
fn highlight(world: &ServerWorld, file_id: FileId) -> Vec<Decoration> {
let line_index = world.analysis().file_line_index(file_id);
world.analysis().highlight(file_id)
world
.analysis()
.highlight(file_id)
.into_iter()
.map(|h| Decoration {
range: h.range.conv_with(&line_index),
tag: h.tag,
}).collect()
})
.collect()
}

View file

@ -1,29 +1,26 @@
mod handlers;
mod subscriptions;
use std::{
path::PathBuf,
};
use std::path::PathBuf;
use serde::{Serialize, de::DeserializeOwned};
use crossbeam_channel::{unbounded, Sender, Receiver};
use rayon::{self, ThreadPool};
use languageserver_types::{NumberOrString};
use ra_analysis::{FileId, JobHandle, JobToken, LibraryData};
use crossbeam_channel::{unbounded, Receiver, Sender};
use gen_lsp_server::{
RawRequest, RawNotification, RawMessage, RawResponse, ErrorCode,
handle_shutdown,
handle_shutdown, ErrorCode, RawMessage, RawNotification, RawRequest, RawResponse,
};
use languageserver_types::NumberOrString;
use ra_analysis::{FileId, JobHandle, JobToken, LibraryData};
use rayon::{self, ThreadPool};
use rustc_hash::FxHashMap;
use serde::{de::DeserializeOwned, Serialize};
use crate::{
main_loop::subscriptions::Subscriptions,
project_model::{workspace_loader, CargoWorkspace},
req,
Result,
vfs::{self, FileEvent},
server_world::{ServerWorldState, ServerWorld},
main_loop::subscriptions::{Subscriptions},
project_model::{CargoWorkspace, workspace_loader},
server_world::{ServerWorld, ServerWorldState},
thread_watcher::Worker,
vfs::{self, FileEvent},
Result,
};
#[derive(Debug)]
@ -147,56 +144,50 @@ fn main_loop_inner(
}
state_changed = true;
}
Event::Ws(ws) => {
match ws {
Ok(ws) => {
let workspaces = vec![ws];
feedback(internal_mode, "workspace loaded", msg_sender);
for ws in workspaces.iter() {
for pkg in ws.packages().filter(|pkg| !pkg.is_member(ws)) {
debug!("sending root, {}", pkg.root(ws).to_path_buf().display());
fs_worker.send(pkg.root(ws).to_path_buf());
}
Event::Ws(ws) => match ws {
Ok(ws) => {
let workspaces = vec![ws];
feedback(internal_mode, "workspace loaded", msg_sender);
for ws in workspaces.iter() {
for pkg in ws.packages().filter(|pkg| !pkg.is_member(ws)) {
debug!("sending root, {}", pkg.root(ws).to_path_buf().display());
fs_worker.send(pkg.root(ws).to_path_buf());
}
state.set_workspaces(workspaces);
state_changed = true;
}
Err(e) => warn!("loading workspace failed: {}", e),
state.set_workspaces(workspaces);
state_changed = true;
}
}
Err(e) => warn!("loading workspace failed: {}", e),
},
Event::Lib(lib) => {
feedback(internal_mode, "library loaded", msg_sender);
state.add_lib(lib);
}
Event::Msg(msg) => {
match msg {
RawMessage::Request(req) => {
let req = match handle_shutdown(req, msg_sender) {
Some(req) => req,
None => return Ok(()),
};
match on_request(state, pending_requests, pool, &task_sender, req)? {
None => (),
Some(req) => {
error!("unknown request: {:?}", req);
let resp = RawResponse::err(
req.id,
ErrorCode::MethodNotFound as i32,
"unknown request".to_string(),
);
msg_sender.send(RawMessage::Response(resp))
}
Event::Msg(msg) => match msg {
RawMessage::Request(req) => {
let req = match handle_shutdown(req, msg_sender) {
Some(req) => req,
None => return Ok(()),
};
match on_request(state, pending_requests, pool, &task_sender, req)? {
None => (),
Some(req) => {
error!("unknown request: {:?}", req);
let resp = RawResponse::err(
req.id,
ErrorCode::MethodNotFound as i32,
"unknown request".to_string(),
);
msg_sender.send(RawMessage::Response(resp))
}
}
RawMessage::Notification(not) => {
on_notification(msg_sender, state, pending_requests, subs, not)?;
state_changed = true;
}
RawMessage::Response(resp) => {
error!("unexpected response: {:?}", resp)
}
}
}
RawMessage::Notification(not) => {
on_notification(msg_sender, state, pending_requests, subs, not)?;
state_changed = true;
}
RawMessage::Response(resp) => error!("unexpected response: {:?}", resp),
},
};
if state_changed {
@ -222,8 +213,7 @@ fn on_task(
}
msg_sender.send(RawMessage::Response(response))
}
Task::Notify(n) =>
msg_sender.send(RawMessage::Notification(n)),
Task::Notify(n) => msg_sender.send(RawMessage::Notification(n)),
}
}
@ -237,7 +227,9 @@ fn on_request(
let mut pool_dispatcher = PoolDispatcher {
req: Some(req),
res: None,
pool, world, sender
pool,
world,
sender,
};
let req = pool_dispatcher
.on::<req::SyntaxTree>(handlers::handle_syntax_tree)?
@ -262,7 +254,7 @@ fn on_request(
let inserted = pending_requests.insert(id, handle).is_none();
assert!(inserted, "duplicate request: {}", id);
Ok(None)
},
}
Err(req) => Ok(Some(req)),
}
}
@ -285,45 +277,53 @@ fn on_notification(
if let Some(handle) = pending_requests.remove(&id) {
handle.cancel();
}
return Ok(())
return Ok(());
}
Err(not) => not,
};
let not = match not.cast::<req::DidOpenTextDocument>() {
Ok(params) => {
let uri = params.text_document.uri;
let path = uri.to_file_path()
let path = uri
.to_file_path()
.map_err(|()| format_err!("invalid uri: {}", uri))?;
let file_id = state.add_mem_file(path, params.text_document.text);
subs.add_sub(file_id);
return Ok(())
return Ok(());
}
Err(not) => not,
};
let not = match not.cast::<req::DidChangeTextDocument>() {
Ok(mut params) => {
let uri = params.text_document.uri;
let path = uri.to_file_path()
let path = uri
.to_file_path()
.map_err(|()| format_err!("invalid uri: {}", uri))?;
let text = params.content_changes.pop()
let text = params
.content_changes
.pop()
.ok_or_else(|| format_err!("empty changes"))?
.text;
state.change_mem_file(path.as_path(), text)?;
return Ok(())
return Ok(());
}
Err(not) => not,
};
let not = match not.cast::<req::DidCloseTextDocument>() {
Ok(params) => {
let uri = params.text_document.uri;
let path = uri.to_file_path()
let path = uri
.to_file_path()
.map_err(|()| format_err!("invalid uri: {}", uri))?;
let file_id = state.remove_mem_file(path.as_path())?;
subs.remove_sub(file_id);
let params = req::PublishDiagnosticsParams { uri, diagnostics: Vec::new() };
let params = req::PublishDiagnosticsParams {
uri,
diagnostics: Vec::new(),
};
let not = RawNotification::new::<req::PublishDiagnostics>(&params);
msg_sender.send(RawMessage::Notification(not));
return Ok(())
return Ok(());
}
Err(not) => not,
};
@ -342,11 +342,12 @@ struct PoolDispatcher<'a> {
impl<'a> PoolDispatcher<'a> {
fn on<'b, R>(
&'b mut self,
f: fn(ServerWorld, R::Params, JobToken) -> Result<R::Result>
f: fn(ServerWorld, R::Params, JobToken) -> Result<R::Result>,
) -> Result<&'b mut Self>
where R: req::Request,
R::Params: DeserializeOwned + Send + 'static,
R::Result: Serialize + 'static,
where
R: req::Request,
R::Params: DeserializeOwned + Send + 'static,
R::Result: Serialize + 'static,
{
let req = match self.req.take() {
None => return Ok(self),
@ -360,16 +361,16 @@ impl<'a> PoolDispatcher<'a> {
self.pool.spawn(move || {
let resp = match f(world, params, token) {
Ok(resp) => RawResponse::ok::<R>(id, &resp),
Err(e) => RawResponse::err(id, ErrorCode::InternalError as i32, e.to_string()),
Err(e) => {
RawResponse::err(id, ErrorCode::InternalError as i32, e.to_string())
}
};
let task = Task::Respond(resp);
sender.send(task);
});
self.res = Some((id, handle));
}
Err(req) => {
self.req = Some(req)
}
Err(req) => self.req = Some(req),
}
Ok(self)
}
@ -392,18 +393,14 @@ fn update_file_notifications_on_threadpool(
pool.spawn(move || {
for file_id in subscriptions {
match handlers::publish_diagnostics(&world, file_id) {
Err(e) => {
error!("failed to compute diagnostics: {:?}", e)
}
Err(e) => error!("failed to compute diagnostics: {:?}", e),
Ok(params) => {
let not = RawNotification::new::<req::PublishDiagnostics>(&params);
sender.send(Task::Notify(not));
}
}
match handlers::publish_decorations(&world, file_id) {
Err(e) => {
error!("failed to compute decorations: {:?}", e)
}
Err(e) => error!("failed to compute decorations: {:?}", e),
Ok(params) => {
let not = RawNotification::new::<req::PublishDecorations>(&params);
sender.send(Task::Notify(not))

View file

@ -1,5 +1,5 @@
use rustc_hash::FxHashSet;
use ra_analysis::FileId;
use rustc_hash::FxHashSet;
pub struct Subscriptions {
subs: FxHashSet<FileId>,
@ -7,7 +7,9 @@ pub struct Subscriptions {
impl Subscriptions {
pub fn new() -> Subscriptions {
Subscriptions { subs: FxHashSet::default() }
Subscriptions {
subs: FxHashSet::default(),
}
}
pub fn add_sub(&mut self, file_id: FileId) {
self.subs.insert(file_id);

View file

@ -1,11 +1,13 @@
use std::path::{PathBuf, Path, Component};
use im;
use relative_path::RelativePath;
use ra_analysis::{FileId, FileResolver};
use relative_path::RelativePath;
use std::path::{Component, Path, PathBuf};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Root {
Workspace, Lib
Workspace,
Lib,
}
#[derive(Debug, Default, Clone)]
@ -21,7 +23,8 @@ impl PathMap {
Default::default()
}
pub fn get_or_insert(&mut self, path: PathBuf, root: Root) -> FileId {
self.path2id.get(path.as_path())
self.path2id
.get(path.as_path())
.map(|&id| id)
.unwrap_or_else(|| {
let id = self.new_file_id();
@ -33,9 +36,7 @@ impl PathMap {
self.path2id.get(path).map(|&id| id)
}
pub fn get_path(&self, file_id: FileId) -> &Path {
self.id2path.get(&file_id)
.unwrap()
.as_path()
self.id2path.get(&file_id).unwrap().as_path()
}
pub fn get_root(&self, file_id: FileId) -> Root {
self.id2root[&file_id]
@ -55,7 +56,12 @@ impl PathMap {
impl FileResolver for PathMap {
fn file_stem(&self, file_id: FileId) -> String {
self.get_path(file_id).file_stem().unwrap().to_str().unwrap().to_string()
self.get_path(file_id)
.file_stem()
.unwrap()
.to_str()
.unwrap()
.to_string()
}
fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId> {
@ -101,10 +107,6 @@ mod test {
let mut m = PathMap::new();
let id1 = m.get_or_insert(PathBuf::from("/foo"), Root::Workspace);
let id2 = m.get_or_insert(PathBuf::from("/foo/bar.rs"), Root::Workspace);
assert_eq!(
m.resolve(id1, &RelativePath::new("bar.rs")),
Some(id2),
)
assert_eq!(m.resolve(id1, &RelativePath::new("bar.rs")), Some(id2),)
}
}

View file

@ -1,13 +1,12 @@
use std::{
path::{Path, PathBuf},
};
use rustc_hash::{FxHashMap, FxHashSet};
use cargo_metadata::{metadata_run, CargoOpt};
use ra_syntax::SmolStr;
use rustc_hash::{FxHashMap, FxHashSet};
use std::path::{Path, PathBuf};
use crate::{
thread_watcher::{ThreadWatcher, Worker},
Result,
thread_watcher::{Worker, ThreadWatcher},
};
#[derive(Debug, Clone)]
@ -39,7 +38,12 @@ struct TargetData {
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum TargetKind {
Bin, Lib, Example, Test, Bench, Other,
Bin,
Lib,
Example,
Test,
Bench,
Other,
}
impl Package {
@ -49,7 +53,7 @@ impl Package {
pub fn root(self, ws: &CargoWorkspace) -> &Path {
ws.pkg(self).manifest.parent().unwrap()
}
pub fn targets<'a>(self, ws: &'a CargoWorkspace) -> impl Iterator<Item=Target> + 'a {
pub fn targets<'a>(self, ws: &'a CargoWorkspace) -> impl Iterator<Item = Target> + 'a {
ws.pkg(self).targets.iter().cloned()
}
pub fn is_member(self, ws: &CargoWorkspace) -> bool {
@ -78,13 +82,15 @@ impl CargoWorkspace {
let meta = metadata_run(
Some(cargo_toml.as_path()),
true,
Some(CargoOpt::AllFeatures)
).map_err(|e| format_err!("cargo metadata failed: {}", e))?;
Some(CargoOpt::AllFeatures),
)
.map_err(|e| format_err!("cargo metadata failed: {}", e))?;
let mut pkg_by_id = FxHashMap::default();
let mut packages = Vec::new();
let mut targets = Vec::new();
let ws_members: FxHashSet<String> = meta.workspace_members
let ws_members: FxHashSet<String> = meta
.workspace_members
.into_iter()
.map(|it| it.raw)
.collect();
@ -114,7 +120,7 @@ impl CargoWorkspace {
Ok(CargoWorkspace { packages, targets })
}
pub fn packages<'a>(&'a self) -> impl Iterator<Item=Package> + 'a {
pub fn packages<'a>(&'a self) -> impl Iterator<Item = Package> + 'a {
(0..self.packages.len()).map(Package)
}
pub fn target_by_root(&self, root: &Path) -> Option<Target> {
@ -155,7 +161,7 @@ impl TargetKind {
"example" => TargetKind::Example,
_ if kind.contains("lib") => TargetKind::Lib,
_ => continue,
}
};
}
TargetKind::Other
}
@ -170,6 +176,6 @@ pub fn workspace_loader() -> (Worker<PathBuf, Result<CargoWorkspace>>, ThreadWat
.into_iter()
.map(|path| CargoWorkspace::from_cargo_metadata(path.as_path()))
.for_each(|it| output_sender.send(it))
}
},
)
}

View file

@ -1,20 +1,13 @@
use languageserver_types::{Location, Position, Range, TextDocumentIdentifier, Url};
use rustc_hash::FxHashMap;
use languageserver_types::{TextDocumentIdentifier, Range, Url, Position, Location};
use url_serde;
pub use languageserver_types::{
request::*, notification::*,
InitializeResult, PublishDiagnosticsParams,
DocumentSymbolParams, DocumentSymbolResponse,
CodeActionParams, ApplyWorkspaceEditParams,
ExecuteCommandParams,
WorkspaceSymbolParams,
TextDocumentPositionParams,
TextEdit,
CompletionParams, CompletionResponse,
DocumentOnTypeFormattingParams,
TextDocumentEdit,
SignatureHelp, Hover
notification::*, request::*, ApplyWorkspaceEditParams, CodeActionParams, CompletionParams,
CompletionResponse, DocumentOnTypeFormattingParams, DocumentSymbolParams,
DocumentSymbolResponse, ExecuteCommandParams, Hover, InitializeResult,
PublishDiagnosticsParams, SignatureHelp, TextDocumentEdit, TextDocumentPositionParams,
TextEdit, WorkspaceSymbolParams,
};
pub enum SyntaxTree {}
@ -28,7 +21,7 @@ impl Request for SyntaxTree {
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct SyntaxTreeParams {
pub text_document: TextDocumentIdentifier
pub text_document: TextDocumentIdentifier,
}
pub enum ExtendSelection {}
@ -94,7 +87,7 @@ pub struct PublishDecorationsParams {
#[serde(rename_all = "camelCase")]
pub struct Decoration {
pub range: Range,
pub tag: &'static str
pub tag: &'static str,
}
pub enum ParentModule {}
@ -167,14 +160,14 @@ pub struct SourceChange {
pub enum FileSystemEdit {
CreateFile {
#[serde(with = "url_serde")]
uri: Url
uri: Url,
},
MoveFile {
#[serde(with = "url_serde")]
src: Url,
#[serde(with = "url_serde")]
dst: Url,
}
},
}
pub enum InternalFeedback {}

View file

@ -1,18 +1,18 @@
use std::{
fs,
path::{PathBuf, Path},
path::{Path, PathBuf},
sync::Arc,
};
use rustc_hash::FxHashMap;
use languageserver_types::Url;
use ra_analysis::{FileId, AnalysisHost, Analysis, CrateGraph, CrateId, LibraryData, FileResolver};
use ra_analysis::{Analysis, AnalysisHost, CrateGraph, CrateId, FileId, FileResolver, LibraryData};
use rustc_hash::FxHashMap;
use crate::{
Result,
path_map::{PathMap, Root},
vfs::{FileEvent, FileEventKind},
project_model::CargoWorkspace,
vfs::{FileEvent, FileEventKind},
Result,
};
#[derive(Debug)]
@ -42,16 +42,15 @@ impl ServerWorldState {
{
let pm = &mut self.path_map;
let mm = &mut self.mem_map;
let changes = events.into_iter()
let changes = events
.into_iter()
.map(|event| {
let text = match event.kind {
FileEventKind::Add(text) => Some(text),
};
(event.path, text)
})
.map(|(path, text)| {
(pm.get_or_insert(path, Root::Workspace), text)
})
.map(|(path, text)| (pm.get_or_insert(path, Root::Workspace), text))
.filter_map(|(id, text)| {
if mm.contains_key(&id) {
mm.insert(id, text);
@ -62,12 +61,17 @@ impl ServerWorldState {
});
self.analysis_host.change_files(changes);
}
self.analysis_host.set_file_resolver(Arc::new(self.path_map.clone()));
self.analysis_host
.set_file_resolver(Arc::new(self.path_map.clone()));
}
pub fn events_to_files(&mut self, events: Vec<FileEvent>) -> (Vec<(FileId, String)>, Arc<FileResolver>) {
pub fn events_to_files(
&mut self,
events: Vec<FileEvent>,
) -> (Vec<(FileId, String)>, Arc<FileResolver>) {
let files = {
let pm = &mut self.path_map;
events.into_iter()
events
.into_iter()
.map(|event| {
let text = match event.kind {
FileEventKind::Add(text) => text,
@ -86,7 +90,8 @@ impl ServerWorldState {
pub fn add_mem_file(&mut self, path: PathBuf, text: String) -> FileId {
let file_id = self.path_map.get_or_insert(path, Root::Workspace);
self.analysis_host.set_file_resolver(Arc::new(self.path_map.clone()));
self.analysis_host
.set_file_resolver(Arc::new(self.path_map.clone()));
self.mem_map.insert(file_id, None);
if self.path_map.get_root(file_id) != Root::Lib {
self.analysis_host.change_file(file_id, Some(text));
@ -95,9 +100,10 @@ impl ServerWorldState {
}
pub fn change_mem_file(&mut self, path: &Path, text: String) -> Result<()> {
let file_id = self.path_map.get_id(path).ok_or_else(|| {
format_err!("change to unknown file: {}", path.display())
})?;
let file_id = self
.path_map
.get_id(path)
.ok_or_else(|| format_err!("change to unknown file: {}", path.display()))?;
if self.path_map.get_root(file_id) != Root::Lib {
self.analysis_host.change_file(file_id, Some(text));
}
@ -105,9 +111,10 @@ impl ServerWorldState {
}
pub fn remove_mem_file(&mut self, path: &Path) -> Result<FileId> {
let file_id = self.path_map.get_id(path).ok_or_else(|| {
format_err!("change to unknown file: {}", path.display())
})?;
let file_id = self
.path_map
.get_id(path)
.ok_or_else(|| format_err!("change to unknown file: {}", path.display()))?;
match self.mem_map.remove(&file_id) {
Some(_) => (),
None => bail!("unmatched close notification"),
@ -122,17 +129,17 @@ impl ServerWorldState {
pub fn set_workspaces(&mut self, ws: Vec<CargoWorkspace>) {
let mut crate_roots = FxHashMap::default();
ws.iter()
.flat_map(|ws| {
ws.packages()
.flat_map(move |pkg| pkg.targets(ws))
.map(move |tgt| tgt.root(ws))
})
.for_each(|root| {
if let Some(file_id) = self.path_map.get_id(root) {
let crate_id = CrateId(crate_roots.len() as u32);
crate_roots.insert(crate_id, file_id);
}
});
.flat_map(|ws| {
ws.packages()
.flat_map(move |pkg| pkg.targets(ws))
.map(move |tgt| tgt.root(ws))
})
.for_each(|root| {
if let Some(file_id) = self.path_map.get_id(root) {
let crate_id = CrateId(crate_roots.len() as u32);
crate_roots.insert(crate_id, file_id);
}
});
let crate_graph = CrateGraph { crate_roots };
self.workspaces = Arc::new(ws);
self.analysis_host.set_crate_graph(crate_graph);
@ -141,7 +148,7 @@ impl ServerWorldState {
ServerWorld {
workspaces: Arc::clone(&self.workspaces),
analysis: self.analysis_host.analysis(),
path_map: self.path_map.clone()
path_map: self.path_map.clone(),
}
}
}
@ -152,9 +159,12 @@ impl ServerWorld {
}
pub fn uri_to_file_id(&self, uri: &Url) -> Result<FileId> {
let path = uri.to_file_path()
let path = uri
.to_file_path()
.map_err(|()| format_err!("invalid uri: {}", uri))?;
self.path_map.get_id(&path).ok_or_else(|| format_err!("unknown file: {}", path.display()))
self.path_map
.get_id(&path)
.ok_or_else(|| format_err!("unknown file: {}", path.display()))
}
pub fn file_id_to_uri(&self, id: FileId) -> Result<Url> {

View file

@ -1,7 +1,8 @@
use std::thread;
use crossbeam_channel::{bounded, unbounded, Sender, Receiver};
use drop_bomb::DropBomb;
use crate::Result;
use crossbeam_channel::{bounded, unbounded, Receiver, Sender};
use drop_bomb::DropBomb;
use std::thread;
pub struct Worker<I, O> {
pub inp: Sender<I>,
@ -50,11 +51,13 @@ impl ThreadWatcher {
info!("waiting for {} to finish ...", self.name);
let name = self.name;
self.bomb.defuse();
let res = self.thread.join()
let res = self
.thread
.join()
.map_err(|_| format_err!("ThreadWatcher {} died", name));
match &res {
Ok(()) => info!("... {} terminated with ok", name),
Err(_) => error!("... {} terminated with err", name)
Err(_) => error!("... {} terminated with err", name),
}
res
}
@ -66,5 +69,9 @@ impl ThreadWatcher {
fn worker_chan<I, O>(buf: usize) -> ((Sender<I>, Receiver<O>), Receiver<I>, Sender<O>) {
let (input_sender, input_receiver) = bounded::<I>(buf);
let (output_sender, output_receiver) = unbounded::<O>();
((input_sender, output_receiver), input_receiver, output_sender)
(
(input_sender, output_receiver),
input_receiver,
output_sender,
)
}

View file

@ -1,14 +1,11 @@
use std::{
path::{PathBuf, Path},
fs,
path::{Path, PathBuf},
};
use walkdir::WalkDir;
use crate::{
thread_watcher::{Worker, ThreadWatcher},
};
use crate::thread_watcher::{ThreadWatcher, Worker};
#[derive(Debug)]
pub struct FileEvent {
@ -24,7 +21,8 @@ pub enum FileEventKind {
pub fn roots_loader() -> (Worker<PathBuf, (PathBuf, Vec<FileEvent>)>, ThreadWatcher) {
Worker::<PathBuf, (PathBuf, Vec<FileEvent>)>::spawn(
"roots loader",
128, |input_receiver, output_sender| {
128,
|input_receiver, output_sender| {
input_receiver
.into_iter()
.map(|path| {
@ -34,7 +32,7 @@ pub fn roots_loader() -> (Worker<PathBuf, (PathBuf, Vec<FileEvent>)>, ThreadWatc
(path, events)
})
.for_each(|it| output_sender.send(it))
}
},
)
}