Cargo Format

Run `cargo fmt` and ignore generated files
This commit is contained in:
Jeremy A. Kolb 2018-10-15 17:44:23 -04:00
parent 39cb6c6d3f
commit 61f3a438d3
76 changed files with 1936 additions and 1530 deletions

View file

@ -1,16 +1,17 @@
use crate::{
module_map::{ModuleDescriptorQuery, ModuleTreeQuery, ModulesDatabase},
symbol_index::SymbolIndex,
FileId, FileResolverImp,
};
use ra_editor::LineIndex;
use ra_syntax::File;
use rustc_hash::FxHashSet;
use salsa;
use std::{
fmt,
sync::Arc,
hash::{Hash, Hasher},
};
use salsa;
use rustc_hash::FxHashSet;
use ra_syntax::File;
use ra_editor::{LineIndex};
use crate::{
symbol_index::SymbolIndex,
module_map::{ModulesDatabase, ModuleTreeQuery, ModuleDescriptorQuery},
FileId, FileResolverImp,
sync::Arc,
};
#[derive(Default)]

View file

@ -1,41 +1,34 @@
use std::{
collections::BTreeMap,
use crate::{imp::FileResolverImp, FileId};
use ra_syntax::{
ast::{self, AstNode, NameOwner},
text_utils::is_subrange,
SmolStr,
};
use relative_path::RelativePathBuf;
use ra_syntax::{
SmolStr,
ast::{self, NameOwner, AstNode},
text_utils::is_subrange
};
use crate::{
FileId,
imp::FileResolverImp,
};
use std::collections::BTreeMap;
#[derive(Debug, PartialEq, Eq, Hash)]
pub struct ModuleDescriptor {
pub submodules: Vec<Submodule>
pub submodules: Vec<Submodule>,
}
impl ModuleDescriptor {
pub fn new(root: ast::Root) -> ModuleDescriptor {
let submodules = modules(root)
.map(|(name, _)| Submodule { name })
.collect();
let submodules = modules(root).map(|(name, _)| Submodule { name }).collect();
ModuleDescriptor { submodules } }
ModuleDescriptor { submodules }
}
}
fn modules<'a>(root: ast::Root<'a>) -> impl Iterator<Item=(SmolStr, ast::Module<'a>)> {
root
.modules()
.filter_map(|module| {
let name = module.name()?.text();
if !module.has_semi() {
return None;
}
Some((name, module))
})
fn modules<'a>(root: ast::Root<'a>) -> impl Iterator<Item = (SmolStr, ast::Module<'a>)> {
root.modules().filter_map(|module| {
let name = module.name()?.text();
if !module.has_semi() {
return None;
}
Some((name, module))
})
}
#[derive(Clone, Hash, PartialEq, Eq, Debug)]
@ -56,7 +49,7 @@ struct Node(usize);
struct NodeData {
file_id: FileId,
links: Vec<Link>,
parents: Vec<Link>
parents: Vec<Link>,
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
@ -69,7 +62,6 @@ struct LinkData {
problem: Option<Problem>,
}
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub enum Problem {
UnresolvedModule {
@ -78,16 +70,18 @@ pub enum Problem {
NotDirOwner {
move_to: RelativePathBuf,
candidate: RelativePathBuf,
}
},
}
impl ModuleTreeDescriptor {
pub(crate) fn new<'a>(
files: impl Iterator<Item=(FileId, &'a ModuleDescriptor)> + Clone,
files: impl Iterator<Item = (FileId, &'a ModuleDescriptor)> + Clone,
file_resolver: &FileResolverImp,
) -> ModuleTreeDescriptor {
let mut file_id2node = BTreeMap::new();
let mut nodes: Vec<NodeData> = files.clone().enumerate()
let mut nodes: Vec<NodeData> = files
.clone()
.enumerate()
.map(|(idx, (file_id, _))| {
file_id2node.insert(file_id, Node(idx));
NodeData {
@ -120,20 +114,19 @@ impl ModuleTreeDescriptor {
points_to,
problem,
})
}
}
ModuleTreeDescriptor {
nodes, links, file_id2node
nodes,
links,
file_id2node,
}
}
pub(crate) fn parent_modules(&self, file_id: FileId) -> Vec<Link> {
let node = self.file_id2node[&file_id];
self.node(node)
.parents
.clone()
self.node(node).parents.clone()
}
pub(crate) fn child_module_by_name(&self, file_id: FileId, name: &str) -> Vec<FileId> {
let node = self.file_id2node[&file_id];
@ -141,10 +134,18 @@ impl ModuleTreeDescriptor {
.links
.iter()
.filter(|it| it.name(self) == name)
.flat_map(|link| link.points_to(self).iter().map(|&node| self.node(node).file_id))
.flat_map(|link| {
link.points_to(self)
.iter()
.map(|&node| self.node(node).file_id)
})
.collect()
}
pub(crate) fn problems<'a, 'b>(&'b self, file_id: FileId, root: ast::Root<'a>) -> Vec<(ast::Name<'a>, &'b Problem)> {
pub(crate) fn problems<'a, 'b>(
&'b self,
file_id: FileId,
root: ast::Root<'a>,
) -> Vec<(ast::Name<'a>, &'b Problem)> {
let node = self.file_id2node[&file_id];
self.node(node)
.links
@ -176,7 +177,11 @@ impl Link {
fn points_to(self, tree: &ModuleTreeDescriptor) -> &[Node] {
&tree.link(self).points_to
}
pub(crate) fn bind_source<'a>(self, tree: &ModuleTreeDescriptor, root: ast::Root<'a>) -> ast::Module<'a> {
pub(crate) fn bind_source<'a>(
self,
tree: &ModuleTreeDescriptor,
root: ast::Root<'a>,
) -> ast::Module<'a> {
modules(root)
.filter(|(name, _)| name == &tree.link(self).name)
.next()
@ -185,22 +190,21 @@ impl Link {
}
}
fn resolve_submodule(
file_id: FileId,
name: &SmolStr,
file_resolver: &FileResolverImp
file_resolver: &FileResolverImp,
) -> (Vec<FileId>, Option<Problem>) {
let mod_name = file_resolver.file_stem(file_id);
let is_dir_owner =
mod_name == "mod" || mod_name == "lib" || mod_name == "main";
let is_dir_owner = mod_name == "mod" || mod_name == "lib" || mod_name == "main";
let file_mod = RelativePathBuf::from(format!("../{}.rs", name));
let dir_mod = RelativePathBuf::from(format!("../{}/mod.rs", name));
let points_to: Vec<FileId>;
let problem: Option<Problem>;
if is_dir_owner {
points_to = [&file_mod, &dir_mod].iter()
points_to = [&file_mod, &dir_mod]
.iter()
.filter_map(|path| file_resolver.resolve(file_id, path))
.collect();
problem = if points_to.is_empty() {
@ -223,7 +227,7 @@ fn resolve_submodule(
#[derive(Debug, Clone)]
pub struct FnDescriptor {
pub name: String,
pub label : String,
pub label: String,
pub ret_type: Option<String>,
pub params: Vec<String>,
}
@ -233,9 +237,11 @@ impl FnDescriptor {
let name = node.name()?.text().to_string();
// Strip the body out for the label.
let label : String = if let Some(body) = node.body() {
let label: String = if let Some(body) = node.body() {
let body_range = body.syntax().range();
let label : String = node.syntax().children()
let label: String = node
.syntax()
.children()
.filter(|child| !is_subrange(body_range, child.range()))
.map(|node| node.text().to_string())
.collect();
@ -251,7 +257,7 @@ impl FnDescriptor {
name,
ret_type,
params,
label
label,
})
}
@ -264,9 +270,11 @@ impl FnDescriptor {
// Maybe use param.pat here? See if we can just extract the name?
//res.extend(param_list.params().map(|p| p.syntax().text().to_string()));
res.extend(param_list.params()
.filter_map(|p| p.pat())
.map(|pat| pat.syntax().text().to_string())
res.extend(
param_list
.params()
.filter_map(|p| p.pat())
.map(|pat| pat.syntax().text().to_string()),
);
}
res

View file

@ -1,32 +1,31 @@
use std::{
sync::{
Arc,
},
hash::{Hash, Hasher},
fmt,
collections::VecDeque,
fmt,
hash::{Hash, Hasher},
iter,
sync::Arc,
};
use ra_editor::{self, find_node_at_offset, resolve_local_name, FileSymbol, LineIndex, LocalEdit};
use ra_syntax::{
ast::{self, ArgListOwner, Expr, NameOwner},
AstNode, File, SmolStr,
SyntaxKind::*,
SyntaxNodeRef, TextRange, TextUnit,
};
use relative_path::RelativePath;
use rustc_hash::FxHashSet;
use ra_editor::{self, FileSymbol, LineIndex, find_node_at_offset, LocalEdit, resolve_local_name};
use ra_syntax::{
TextUnit, TextRange, SmolStr, File, AstNode, SyntaxNodeRef,
SyntaxKind::*,
ast::{self, NameOwner, ArgListOwner, Expr},
};
use crate::{
FileId, FileResolver, Query, Diagnostic, SourceChange, SourceFileEdit, Position, FileSystemEdit,
JobToken, CrateGraph, CrateId,
roots::{SourceRoot, ReadonlySourceRoot, WritableSourceRoot},
descriptors::{FnDescriptor, ModuleTreeDescriptor, Problem},
roots::{ReadonlySourceRoot, SourceRoot, WritableSourceRoot},
CrateGraph, CrateId, Diagnostic, FileId, FileResolver, FileSystemEdit, JobToken, Position,
Query, SourceChange, SourceFileEdit,
};
#[derive(Clone, Debug)]
pub(crate) struct FileResolverImp {
inner: Arc<FileResolver>
inner: Arc<FileResolver>,
}
impl PartialEq for FileResolverImp {
@ -35,8 +34,7 @@ impl PartialEq for FileResolverImp {
}
}
impl Eq for FileResolverImp {
}
impl Eq for FileResolverImp {}
impl Hash for FileResolverImp {
fn hash<H: Hasher>(&self, hasher: &mut H) {
@ -67,17 +65,23 @@ impl Default for FileResolverImp {
fn file_stem(&self, _file_: FileId) -> String {
panic!("file resolver not set")
}
fn resolve(&self, _file_id: FileId, _path: &::relative_path::RelativePath) -> Option<FileId> {
fn resolve(
&self,
_file_id: FileId,
_path: &::relative_path::RelativePath,
) -> Option<FileId> {
panic!("file resolver not set")
}
}
FileResolverImp { inner: Arc::new(DummyResolver) }
FileResolverImp {
inner: Arc::new(DummyResolver),
}
}
}
#[derive(Debug)]
pub(crate) struct AnalysisHostImpl {
data: WorldData
data: WorldData,
}
impl AnalysisHostImpl {
@ -91,13 +95,13 @@ impl AnalysisHostImpl {
data: self.data.clone(),
}
}
pub fn change_files(&mut self, changes: &mut dyn Iterator<Item=(FileId, Option<String>)>) {
self.data_mut()
.root.apply_changes(changes, None);
pub fn change_files(&mut self, changes: &mut dyn Iterator<Item = (FileId, Option<String>)>) {
self.data_mut().root.apply_changes(changes, None);
}
pub fn set_file_resolver(&mut self, resolver: FileResolverImp) {
self.data_mut()
.root.apply_changes(&mut iter::empty(), Some(resolver));
.root
.apply_changes(&mut iter::empty(), Some(resolver));
}
pub fn set_crate_graph(&mut self, graph: CrateGraph) {
let mut visited = FxHashSet::default();
@ -131,7 +135,12 @@ impl AnalysisImpl {
if self.data.root.contains(file_id) {
return &self.data.root;
}
&**self.data.libs.iter().find(|it| it.contains(file_id)).unwrap()
&**self
.data
.libs
.iter()
.find(|it| it.contains(file_id))
.unwrap()
}
pub fn file_syntax(&self, file_id: FileId) -> File {
self.root(file_id).syntax(file_id)
@ -142,18 +151,17 @@ impl AnalysisImpl {
pub fn world_symbols(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
let mut buf = Vec::new();
if query.libs {
self.data.libs.iter()
.for_each(|it| it.symbols(&mut buf));
self.data.libs.iter().for_each(|it| it.symbols(&mut buf));
} else {
self.data.root.symbols(&mut buf);
}
query.search(&buf, token)
}
pub fn parent_module(&self, file_id: FileId) -> Vec<(FileId, FileSymbol)> {
let root = self.root(file_id);
let module_tree = root.module_tree();
module_tree.parent_modules(file_id)
module_tree
.parent_modules(file_id)
.iter()
.map(|link| {
let file_id = link.owner(&module_tree);
@ -203,15 +211,17 @@ impl AnalysisImpl {
let file = root.syntax(file_id);
let syntax = file.syntax();
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, offset) {
// First try to resolve the symbol locally
if let Some((name, range)) = resolve_local_name(&file, offset, name_ref) {
let mut vec = vec![];
vec.push((file_id, FileSymbol {
name,
node_range: range,
kind : NAME
}));
vec.push((
file_id,
FileSymbol {
name,
node_range: range,
kind: NAME,
},
));
return vec;
} else {
@ -224,17 +234,21 @@ impl AnalysisImpl {
if module.has_semi() {
let file_ids = self.resolve_module(&*module_tree, file_id, module);
let res = file_ids.into_iter().map(|id| {
let name = module.name()
.map(|n| n.text())
.unwrap_or_else(|| SmolStr::new(""));
let symbol = FileSymbol {
name,
node_range: TextRange::offset_len(0.into(), 0.into()),
kind: MODULE,
};
(id, symbol)
}).collect();
let res = file_ids
.into_iter()
.map(|id| {
let name = module
.name()
.map(|n| n.text())
.unwrap_or_else(|| SmolStr::new(""));
let symbol = FileSymbol {
name,
node_range: TextRange::offset_len(0.into(), 0.into()),
kind: MODULE,
};
(id, symbol)
})
.collect();
return res;
}
@ -245,12 +259,16 @@ impl AnalysisImpl {
pub fn diagnostics(&self, file_id: FileId) -> Vec<Diagnostic> {
let root = self.root(file_id);
let module_tree = root.module_tree();
let module_tree = root.module_tree();
let syntax = root.syntax(file_id);
let mut res = ra_editor::diagnostics(&syntax)
.into_iter()
.map(|d| Diagnostic { range: d.range, message: d.msg, fix: None })
.map(|d| Diagnostic {
range: d.range,
message: d.msg,
fix: None,
})
.collect::<Vec<_>>();
for (name_node, problem) in module_tree.problems(file_id, syntax.ast()) {
@ -273,8 +291,14 @@ impl AnalysisImpl {
}
}
Problem::NotDirOwner { move_to, candidate } => {
let move_file = FileSystemEdit::MoveFile { file: file_id, path: move_to.clone() };
let create_file = FileSystemEdit::CreateFile { anchor: file_id, path: move_to.join(candidate) };
let move_file = FileSystemEdit::MoveFile {
file: file_id,
path: move_to.clone(),
};
let create_file = FileSystemEdit::CreateFile {
anchor: file_id,
path: move_to.join(candidate),
};
let fix = SourceChange {
label: "move file and create module".to_string(),
source_file_edits: Vec::new(),
@ -297,23 +321,34 @@ impl AnalysisImpl {
let file = self.file_syntax(file_id);
let offset = range.start();
let actions = vec![
("flip comma", ra_editor::flip_comma(&file, offset).map(|f| f())),
("add `#[derive]`", ra_editor::add_derive(&file, offset).map(|f| f())),
(
"flip comma",
ra_editor::flip_comma(&file, offset).map(|f| f()),
),
(
"add `#[derive]`",
ra_editor::add_derive(&file, offset).map(|f| f()),
),
("add impl", ra_editor::add_impl(&file, offset).map(|f| f())),
("introduce variable", ra_editor::introduce_variable(&file, range).map(|f| f())),
(
"introduce variable",
ra_editor::introduce_variable(&file, range).map(|f| f()),
),
];
actions.into_iter()
actions
.into_iter()
.filter_map(|(name, local_edit)| {
Some(SourceChange::from_local_edit(
file_id, name, local_edit?,
))
Some(SourceChange::from_local_edit(file_id, name, local_edit?))
})
.collect()
}
pub fn resolve_callable(&self, file_id: FileId, offset: TextUnit, token: &JobToken)
-> Option<(FnDescriptor, Option<usize>)> {
pub fn resolve_callable(
&self,
file_id: FileId,
offset: TextUnit,
token: &JobToken,
) -> Option<(FnDescriptor, Option<usize>)> {
let root = self.root(file_id);
let file = root.syntax(file_id);
let syntax = file.syntax();
@ -332,9 +367,7 @@ impl AnalysisImpl {
let mut current_parameter = None;
let num_params = descriptor.params.len();
let has_self = fn_def.param_list()
.and_then(|l| l.self_param())
.is_some();
let has_self = fn_def.param_list().and_then(|l| l.self_param()).is_some();
if num_params == 1 {
if !has_self {
@ -350,8 +383,11 @@ impl AnalysisImpl {
let start = arg_list.syntax().range().start();
let range_search = TextRange::from_to(start, offset);
let mut commas: usize = arg_list.syntax().text()
.slice(range_search).to_string()
let mut commas: usize = arg_list
.syntax()
.text()
.slice(range_search)
.to_string()
.matches(",")
.count();
@ -381,7 +417,12 @@ impl AnalysisImpl {
self.world_symbols(query, token)
}
fn resolve_module(&self, module_tree: &ModuleTreeDescriptor, file_id: FileId, module: ast::Module) -> Vec<FileId> {
fn resolve_module(
&self,
module_tree: &ModuleTreeDescriptor,
file_id: FileId,
module: ast::Module,
) -> Vec<FileId> {
let name = match module.name() {
Some(name) => name.text(),
None => return Vec::new(),
@ -407,15 +448,17 @@ impl SourceChange {
label: label.to_string(),
source_file_edits: vec![file_edit],
file_system_edits: vec![],
cursor_position: edit.cursor_position
.map(|offset| Position { offset, file_id })
cursor_position: edit
.cursor_position
.map(|offset| Position { offset, file_id }),
}
}
}
impl CrateGraph {
fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> {
let (&crate_id, _) = self.crate_roots
let (&crate_id, _) = self
.crate_roots
.iter()
.find(|(_crate_id, &root_id)| root_id == file_id)?;
Some(crate_id)
@ -424,7 +467,7 @@ impl CrateGraph {
enum FnCallNode<'a> {
CallExpr(ast::CallExpr<'a>),
MethodCallExpr(ast::MethodCallExpr<'a>)
MethodCallExpr(ast::MethodCallExpr<'a>),
}
impl<'a> FnCallNode<'a> {
@ -440,27 +483,23 @@ impl<'a> FnCallNode<'a> {
pub fn name_ref(&self) -> Option<ast::NameRef> {
match *self {
FnCallNode::CallExpr(call_expr) => {
Some(match call_expr.expr()? {
Expr::PathExpr(path_expr) => {
path_expr.path()?.segment()?.name_ref()?
},
_ => return None
})
},
FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()? {
Expr::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?,
_ => return None,
}),
FnCallNode::MethodCallExpr(call_expr) => {
call_expr.syntax().children()
.filter_map(ast::NameRef::cast)
.nth(0)
}
FnCallNode::MethodCallExpr(call_expr) => call_expr
.syntax()
.children()
.filter_map(ast::NameRef::cast)
.nth(0),
}
}
pub fn arg_list(&self) -> Option<ast::ArgList> {
match *self {
FnCallNode::CallExpr(expr) => expr.arg_list(),
FnCallNode::MethodCallExpr(expr) => expr.arg_list()
FnCallNode::MethodCallExpr(expr) => expr.arg_list(),
}
}
}

View file

@ -14,15 +14,20 @@ impl JobHandle {
pub fn new() -> (JobHandle, JobToken) {
let (sender_alive, receiver_alive) = bounded(0);
let (sender_canceled, receiver_canceled) = bounded(0);
let token = JobToken { _job_alive: sender_alive, job_canceled: receiver_canceled };
let handle = JobHandle { job_alive: receiver_alive, _job_canceled: sender_canceled };
let token = JobToken {
_job_alive: sender_alive,
job_canceled: receiver_canceled,
};
let handle = JobHandle {
job_alive: receiver_alive,
_job_canceled: sender_canceled,
};
(handle, token)
}
pub fn has_completed(&self) -> bool {
is_closed(&self.job_alive)
}
pub fn cancel(self) {
}
pub fn cancel(self) {}
}
impl JobToken {
@ -31,7 +36,6 @@ impl JobToken {
}
}
// We don't actually send messages through the channels,
// and instead just check if the channel is closed,
// so we use uninhabited enum as a message type

View file

@ -1,44 +1,40 @@
extern crate parking_lot;
#[macro_use]
extern crate log;
extern crate once_cell;
extern crate ra_syntax;
extern crate ra_editor;
extern crate fst;
extern crate once_cell;
extern crate ra_editor;
extern crate ra_syntax;
extern crate rayon;
extern crate relative_path;
#[macro_use]
extern crate crossbeam_channel;
extern crate im;
extern crate salsa;
extern crate rustc_hash;
extern crate salsa;
mod symbol_index;
mod module_map;
mod imp;
mod job;
mod roots;
mod db;
mod descriptors;
mod imp;
mod job;
mod module_map;
mod roots;
mod symbol_index;
use std::{
sync::Arc,
fmt::Debug,
};
use std::{fmt::Debug, sync::Arc};
use crate::imp::{AnalysisHostImpl, AnalysisImpl, FileResolverImp};
use ra_syntax::{AtomEdit, File, TextRange, TextUnit};
use relative_path::{RelativePath, RelativePathBuf};
use ra_syntax::{File, TextRange, TextUnit, AtomEdit};
use rustc_hash::FxHashMap;
use crate::imp::{AnalysisImpl, AnalysisHostImpl, FileResolverImp};
pub use ra_editor::{
StructureNode, LineIndex, FileSymbol,
Runnable, RunnableKind, HighlightedRange, CompletionItem,
Fold, FoldKind
};
pub use crate::{
job::{JobToken, JobHandle},
descriptors::FnDescriptor,
job::{JobHandle, JobToken},
};
pub use ra_editor::{
CompletionItem, FileSymbol, Fold, FoldKind, HighlightedRange, LineIndex, Runnable,
RunnableKind, StructureNode,
};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
@ -59,20 +55,24 @@ pub trait FileResolver: Debug + Send + Sync + 'static {
#[derive(Debug)]
pub struct AnalysisHost {
imp: AnalysisHostImpl
imp: AnalysisHostImpl,
}
impl AnalysisHost {
pub fn new() -> AnalysisHost {
AnalysisHost { imp: AnalysisHostImpl::new() }
AnalysisHost {
imp: AnalysisHostImpl::new(),
}
}
pub fn analysis(&self) -> Analysis {
Analysis { imp: self.imp.analysis() }
Analysis {
imp: self.imp.analysis(),
}
}
pub fn change_file(&mut self, file_id: FileId, text: Option<String>) {
self.change_files(::std::iter::once((file_id, text)));
}
pub fn change_files(&mut self, mut changes: impl Iterator<Item=(FileId, Option<String>)>) {
pub fn change_files(&mut self, mut changes: impl Iterator<Item = (FileId, Option<String>)>) {
self.imp.change_files(&mut changes)
}
pub fn set_file_resolver(&mut self, resolver: Arc<FileResolver>) {
@ -115,7 +115,7 @@ pub enum FileSystemEdit {
MoveFile {
file: FileId,
path: RelativePathBuf,
}
},
}
#[derive(Debug)]
@ -144,7 +144,7 @@ impl Query {
only_types: false,
libs: false,
exact: false,
limit: usize::max_value()
limit: usize::max_value(),
}
}
pub fn only_types(&mut self) {
@ -163,7 +163,7 @@ impl Query {
#[derive(Debug)]
pub struct Analysis {
imp: AnalysisImpl
imp: AnalysisImpl,
}
impl Analysis {
@ -195,7 +195,11 @@ impl Analysis {
}
pub fn on_eq_typed(&self, file_id: FileId, offset: TextUnit) -> Option<SourceChange> {
let file = self.imp.file_syntax(file_id);
Some(SourceChange::from_local_edit(file_id, "add semicolon", ra_editor::on_eq_typed(&file, offset)?))
Some(SourceChange::from_local_edit(
file_id,
"add semicolon",
ra_editor::on_eq_typed(&file, offset)?,
))
}
pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> {
let file = self.imp.file_syntax(file_id);
@ -204,8 +208,14 @@ impl Analysis {
pub fn symbol_search(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
self.imp.world_symbols(query, token)
}
pub fn approximately_resolve_symbol(&self, file_id: FileId, offset: TextUnit, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
self.imp.approximately_resolve_symbol(file_id, offset, token)
pub fn approximately_resolve_symbol(
&self,
file_id: FileId,
offset: TextUnit,
token: &JobToken,
) -> Vec<(FileId, FileSymbol)> {
self.imp
.approximately_resolve_symbol(file_id, offset, token)
}
pub fn parent_module(&self, file_id: FileId) -> Vec<(FileId, FileSymbol)> {
self.imp.parent_module(file_id)
@ -239,15 +249,19 @@ impl Analysis {
ra_editor::folding_ranges(&file)
}
pub fn resolve_callable(&self, file_id: FileId, offset: TextUnit, token: &JobToken)
-> Option<(FnDescriptor, Option<usize>)> {
pub fn resolve_callable(
&self,
file_id: FileId,
offset: TextUnit,
token: &JobToken,
) -> Option<(FnDescriptor, Option<usize>)> {
self.imp.resolve_callable(file_id, offset, token)
}
}
#[derive(Debug)]
pub struct LibraryData {
root: roots::ReadonlySourceRoot
root: roots::ReadonlySourceRoot,
}
impl LibraryData {

View file

@ -1,10 +1,11 @@
use std::sync::Arc;
use crate::{
FileId,
db::{SyntaxDatabase},
db::SyntaxDatabase,
descriptors::{ModuleDescriptor, ModuleTreeDescriptor},
FileId,
};
use std::sync::Arc;
salsa::query_group! {
pub(crate) trait ModulesDatabase: SyntaxDatabase {
fn module_tree(key: ()) -> Arc<ModuleTreeDescriptor> {
@ -16,7 +17,6 @@ salsa::query_group! {
}
}
fn module_descriptor(db: &impl ModulesDatabase, file_id: FileId) -> Arc<ModuleDescriptor> {
let file = db.file_syntax(file_id);
Arc::new(ModuleDescriptor::new(file.ast()))
@ -29,6 +29,9 @@ fn module_tree(db: &impl ModulesDatabase, (): ()) -> Arc<ModuleTreeDescriptor> {
let module_descr = db.module_descriptor(file_id);
files.push((file_id, module_descr));
}
let res = ModuleTreeDescriptor::new(files.iter().map(|(file_id, descr)| (*file_id, &**descr)), &file_set.resolver);
let res = ModuleTreeDescriptor::new(
files.iter().map(|(file_id, descr)| (*file_id, &**descr)),
&file_set.resolver,
);
Arc::new(res)
}

View file

@ -1,22 +1,19 @@
use std::{
sync::Arc,
panic,
};
use std::{panic, sync::Arc};
use once_cell::sync::OnceCell;
use rayon::prelude::*;
use salsa::Database;
use rustc_hash::{FxHashMap, FxHashSet};
use ra_editor::LineIndex;
use ra_syntax::File;
use rayon::prelude::*;
use rustc_hash::{FxHashMap, FxHashSet};
use salsa::Database;
use crate::{
FileId,
imp::FileResolverImp,
symbol_index::SymbolIndex,
descriptors::{ModuleDescriptor, ModuleTreeDescriptor},
db::{self, FilesDatabase, SyntaxDatabase},
descriptors::{ModuleDescriptor, ModuleTreeDescriptor},
imp::FileResolverImp,
module_map::ModulesDatabase,
symbol_index::SymbolIndex,
FileId,
};
pub(crate) trait SourceRoot {
@ -35,7 +32,7 @@ pub(crate) struct WritableSourceRoot {
impl WritableSourceRoot {
pub fn apply_changes(
&mut self,
changes: &mut dyn Iterator<Item=(FileId, Option<String>)>,
changes: &mut dyn Iterator<Item = (FileId, Option<String>)>,
file_resolver: Option<FileResolverImp>,
) {
let mut changed = FxHashSet::default();
@ -46,22 +43,22 @@ impl WritableSourceRoot {
removed.insert(file_id);
}
Some(text) => {
self.db.query(db::FileTextQuery)
self.db
.query(db::FileTextQuery)
.set(file_id, Arc::new(text));
changed.insert(file_id);
}
}
}
let file_set = self.db.file_set(());
let mut files: FxHashSet<FileId> = file_set
.files
.clone();
let mut files: FxHashSet<FileId> = file_set.files.clone();
for file_id in removed {
files.remove(&file_id);
}
files.extend(changed);
let resolver = file_resolver.unwrap_or_else(|| file_set.resolver.clone());
self.db.query(db::FileSetQuery)
self.db
.query(db::FileSetQuery)
.set((), Arc::new(db::FileSet { files, resolver }));
}
}
@ -71,9 +68,7 @@ impl SourceRoot for WritableSourceRoot {
self.db.module_tree(())
}
fn contains(&self, file_id: FileId) -> bool {
self.db.file_set(())
.files
.contains(&file_id)
self.db.file_set(()).files.contains(&file_id)
}
fn lines(&self, file_id: FileId) -> Arc<LineIndex> {
self.db.file_lines(file_id)
@ -83,7 +78,7 @@ impl SourceRoot for WritableSourceRoot {
}
fn symbols<'a>(&'a self, acc: &mut Vec<Arc<SymbolIndex>>) {
let db = &self.db;
let symbols = db.file_set(());
let symbols = db.file_set(());
let symbols = symbols
.files
.iter()
@ -108,12 +103,15 @@ impl FileData {
}
}
fn lines(&self) -> &Arc<LineIndex> {
self.lines.get_or_init(|| Arc::new(LineIndex::new(&self.text)))
self.lines
.get_or_init(|| Arc::new(LineIndex::new(&self.text)))
}
fn syntax(&self) -> &File {
let text = &self.text;
let syntax = &self.syntax;
match panic::catch_unwind(panic::AssertUnwindSafe(|| syntax.get_or_init(|| File::parse(text)))) {
match panic::catch_unwind(panic::AssertUnwindSafe(|| {
syntax.get_or_init(|| File::parse(text))
})) {
Ok(file) => file,
Err(err) => {
error!("Parser paniced on:\n------\n{}\n------\n", text);
@ -131,22 +129,23 @@ pub(crate) struct ReadonlySourceRoot {
}
impl ReadonlySourceRoot {
pub(crate) fn new(files: Vec<(FileId, String)>, file_resolver: FileResolverImp) -> ReadonlySourceRoot {
let modules = files.par_iter()
pub(crate) fn new(
files: Vec<(FileId, String)>,
file_resolver: FileResolverImp,
) -> ReadonlySourceRoot {
let modules = files
.par_iter()
.map(|(file_id, text)| {
let syntax = File::parse(text);
let mod_descr = ModuleDescriptor::new(syntax.ast());
(*file_id, syntax, mod_descr)
})
.collect::<Vec<_>>();
let module_tree = ModuleTreeDescriptor::new(
modules.iter().map(|it| (it.0, &it.2)),
&file_resolver,
);
let module_tree =
ModuleTreeDescriptor::new(modules.iter().map(|it| (it.0, &it.2)), &file_resolver);
let symbol_index = SymbolIndex::for_files(
modules.par_iter().map(|it| (it.0, it.1.clone()))
);
let symbol_index =
SymbolIndex::for_files(modules.par_iter().map(|it| (it.0, it.1.clone())));
let file_map: FxHashMap<FileId, FileData> = files
.into_iter()
.map(|(id, text)| (id, FileData::new(text)))

View file

@ -1,15 +1,16 @@
use std::{
sync::Arc,
hash::{Hash, Hasher},
};
use ra_editor::{FileSymbol, file_symbols};
use crate::{FileId, JobToken, Query};
use fst::{self, Streamer};
use ra_editor::{file_symbols, FileSymbol};
use ra_syntax::{
File,
SyntaxKind::{self, *},
};
use fst::{self, Streamer};
use rayon::prelude::*;
use crate::{Query, FileId, JobToken};
use std::{
hash::{Hash, Hasher},
sync::Arc,
};
#[derive(Debug)]
pub(crate) struct SymbolIndex {
@ -23,8 +24,7 @@ impl PartialEq for SymbolIndex {
}
}
impl Eq for SymbolIndex {
}
impl Eq for SymbolIndex {}
impl Hash for SymbolIndex {
fn hash<H: Hasher>(&self, hasher: &mut H) {
@ -33,14 +33,12 @@ impl Hash for SymbolIndex {
}
impl SymbolIndex {
pub(crate) fn for_files(files: impl ParallelIterator<Item=(FileId, File)>) -> SymbolIndex {
pub(crate) fn for_files(files: impl ParallelIterator<Item = (FileId, File)>) -> SymbolIndex {
let mut symbols = files
.flat_map(|(file_id, file)| {
file_symbols(&file)
.into_iter()
.map(move |symbol| {
(symbol.name.as_str().to_lowercase(), (file_id, symbol))
})
.map(move |symbol| (symbol.name.as_str().to_lowercase(), (file_id, symbol)))
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
@ -48,9 +46,7 @@ impl SymbolIndex {
symbols.dedup_by(|s1, s2| s1.0 == s2.0);
let (names, symbols): (Vec<String>, Vec<(FileId, FileSymbol)>) =
symbols.into_iter().unzip();
let map = fst::Map::from_iter(
names.into_iter().zip(0u64..)
).unwrap();
let map = fst::Map::from_iter(names.into_iter().zip(0u64..)).unwrap();
SymbolIndex { symbols, map }
}
@ -65,7 +61,6 @@ impl Query {
indices: &[Arc<SymbolIndex>],
token: &JobToken,
) -> Vec<(FileId, FileSymbol)> {
let mut op = fst::map::OpBuilder::new();
for file_symbols in indices.iter() {
let automaton = fst::automaton::Subsequence::new(&self.lowercased);