rename all things

This commit is contained in:
Aleksey Kladov 2018-09-16 12:54:24 +03:00
parent ba0bfeee12
commit b5021411a8
478 changed files with 219 additions and 204 deletions

View file

@ -0,0 +1,152 @@
use std::{
sync::Arc,
any::Any,
hash::{Hash, Hasher},
collections::hash_map::{DefaultHasher, HashMap},
iter,
};
use salsa;
use {FileId, imp::FileResolverImp};
use super::{State, Query, QueryCtx};
pub(super) type Data = Arc<Any + Send + Sync + 'static>;
#[derive(Debug)]
pub(super) struct Db {
names: Arc<HashMap<salsa::QueryTypeId, &'static str>>,
pub(super) imp: salsa::Db<State, Data>,
}
impl Db {
pub(super) fn new(mut reg: QueryRegistry) -> Db {
let config = reg.config.take().unwrap();
Db {
names: Arc::new(reg.names),
imp: salsa::Db::new(config, State::default())
}
}
pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db {
let names = self.names.clone();
let mut invalidations = salsa::Invalidations::new();
invalidations.invalidate(FILE_TEXT, changed_files.iter().map(hash).map(salsa::InputFingerprint));
if resolver_changed {
invalidations.invalidate(FILE_SET, iter::once(salsa::InputFingerprint(hash(&()))));
} else {
invalidations.invalidate(FILE_SET, iter::empty());
}
let imp = self.imp.with_ground_data(
new_state,
invalidations,
);
Db { names, imp }
}
pub(super) fn extract_trace(&self, ctx: &salsa::QueryCtx<State, Data>) -> Vec<&'static str> {
ctx.trace().into_iter().map(|it| self.names[&it]).collect()
}
}
pub(crate) trait EvalQuery {
type Params;
type Output;
fn query_type(&self) -> salsa::QueryTypeId;
fn f(&self) -> salsa::QueryFn<State, Data>;
fn get(&self, &QueryCtx, Self::Params) -> Arc<Self::Output>;
}
impl<T, R> EvalQuery for Query<T, R>
where
T: Hash + Send + Sync + 'static,
R: Hash + Send + Sync + 'static,
{
type Params = T;
type Output = R;
fn query_type(&self) -> salsa::QueryTypeId {
salsa::QueryTypeId(self.0)
}
fn f(&self) -> salsa::QueryFn<State, Data> {
let f = self.1;
Box::new(move |ctx, data| {
let ctx = QueryCtx { imp: ctx };
let data: &T = data.downcast_ref().unwrap();
let res = f(ctx, data);
let h = hash(&res);
(Arc::new(res), salsa::OutputFingerprint(h))
})
}
fn get(&self, ctx: &QueryCtx, params: Self::Params) -> Arc<Self::Output> {
let query_id = salsa::QueryId(
self.query_type(),
salsa::InputFingerprint(hash(&params)),
);
let res = ctx.imp.get(query_id, Arc::new(params));
res.downcast().unwrap()
}
}
pub(super) struct QueryRegistry {
config: Option<salsa::QueryConfig<State, Data>>,
names: HashMap<salsa::QueryTypeId, &'static str>,
}
impl QueryRegistry {
pub(super) fn new() -> QueryRegistry {
let mut config = salsa::QueryConfig::<State, Data>::new();
config = config.with_ground_query(
FILE_TEXT, Box::new(|state, params| {
let file_id: &FileId = params.downcast_ref().unwrap();
let res = state.file_map[file_id].clone();
let fingerprint = salsa::OutputFingerprint(hash(&res));
(res, fingerprint)
})
);
config = config.with_ground_query(
FILE_SET, Box::new(|state, _params| {
let file_ids: Vec<FileId> = state.file_map.keys().cloned().collect();
let hash = hash(&file_ids);
let file_resolver = state.file_resolver.clone();
let res = (file_ids, file_resolver);
let fingerprint = salsa::OutputFingerprint(hash);
(Arc::new(res), fingerprint)
})
);
let mut names = HashMap::new();
names.insert(FILE_TEXT, "FILE_TEXT");
names.insert(FILE_SET, "FILE_SET");
QueryRegistry { config: Some(config), names }
}
pub(super) fn add<Q: EvalQuery>(&mut self, q: Q, name: &'static str) {
let id = q.query_type();
let prev = self.names.insert(id, name);
assert!(prev.is_none(), "duplicate query: {:?}", id);
let config = self.config.take().unwrap();
let config = config.with_query(id, q.f());
self.config= Some(config);
}
}
fn hash<T: Hash>(x: &T) -> u64 {
let mut hasher = DefaultHasher::new();
x.hash(&mut hasher);
hasher.finish()
}
const FILE_TEXT: salsa::QueryTypeId = salsa::QueryTypeId(0);
pub(super) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc<String> {
let query_id = salsa::QueryId(
FILE_TEXT,
salsa::InputFingerprint(hash(&file_id)),
);
let res = ctx.imp.get(query_id, Arc::new(file_id));
res.downcast().unwrap()
}
const FILE_SET: salsa::QueryTypeId = salsa::QueryTypeId(1);
pub(super) fn file_set(ctx: QueryCtx) -> Arc<(Vec<FileId>, FileResolverImp)> {
let query_id = salsa::QueryId(
FILE_SET,
salsa::InputFingerprint(hash(&())),
);
let res = ctx.imp.get(query_id, Arc::new(()));
res.downcast().unwrap()
}

View file

@ -0,0 +1,85 @@
mod imp;
use std::{
sync::Arc,
};
use im;
use salsa;
use {FileId, imp::FileResolverImp};
#[derive(Debug, Default, Clone)]
pub(crate) struct State {
pub(crate) file_map: im::HashMap<FileId, Arc<String>>,
pub(crate) file_resolver: FileResolverImp
}
#[derive(Debug)]
pub(crate) struct Db {
imp: imp::Db,
}
#[derive(Clone, Copy)]
pub(crate) struct QueryCtx<'a> {
imp: &'a salsa::QueryCtx<State, imp::Data>,
}
pub(crate) struct Query<T, R>(pub(crate) u16, pub(crate) fn(QueryCtx, &T) -> R);
pub(crate) struct QueryRegistry {
imp: imp::QueryRegistry,
}
impl Default for Db {
fn default() -> Db {
Db::new()
}
}
impl Db {
pub(crate) fn new() -> Db {
let reg = QueryRegistry::new();
Db { imp: imp::Db::new(reg.imp) }
}
pub(crate) fn state(&self) -> &State {
self.imp.imp.ground_data()
}
pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db {
Db { imp: self.imp.with_changes(new_state, changed_files, resolver_changed) }
}
pub(crate) fn make_query<F: FnOnce(QueryCtx) -> R, R>(&self, f: F) -> R {
let ctx = QueryCtx { imp: &self.imp.imp.query_ctx() };
f(ctx)
}
#[allow(unused)]
pub(crate) fn trace_query<F: FnOnce(QueryCtx) -> R, R>(&self, f: F) -> (R, Vec<&'static str>) {
let ctx = QueryCtx { imp: &self.imp.imp.query_ctx() };
let res = f(ctx);
let trace = self.imp.extract_trace(ctx.imp);
(res, trace)
}
}
impl<'a> QueryCtx<'a> {
pub(crate) fn get<Q: imp::EvalQuery>(&self, q: Q, params: Q::Params) -> Arc<Q::Output> {
q.get(self, params)
}
}
pub(crate) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc<String> {
imp::file_text(ctx, file_id)
}
pub(crate) fn file_set(ctx: QueryCtx) -> Arc<(Vec<FileId>, FileResolverImp)> {
imp::file_set(ctx)
}
impl QueryRegistry {
fn new() -> QueryRegistry {
let mut reg = QueryRegistry { imp: imp::QueryRegistry::new() };
::queries::register_queries(&mut reg);
::module_map::register_queries(&mut reg);
reg
}
pub(crate) fn add<Q: imp::EvalQuery>(&mut self, q: Q, name: &'static str) {
self.imp.add(q, name)
}
}

View file

@ -0,0 +1,220 @@
use std::{
collections::BTreeMap,
};
use relative_path::RelativePathBuf;
use ra_syntax::{
SmolStr,
ast::{self, NameOwner},
};
use {
FileId,
imp::FileResolverImp,
};
#[derive(Debug, Hash)]
pub struct ModuleDescriptor {
pub submodules: Vec<Submodule>
}
impl ModuleDescriptor {
pub fn new(root: ast::Root) -> ModuleDescriptor {
let submodules = modules(root)
.map(|(name, _)| Submodule { name })
.collect();
ModuleDescriptor { submodules } }
}
fn modules<'a>(root: ast::Root<'a>) -> impl Iterator<Item=(SmolStr, ast::Module<'a>)> {
root
.modules()
.filter_map(|module| {
let name = module.name()?.text();
if !module.has_semi() {
return None;
}
Some((name, module))
})
}
#[derive(Clone, Hash, PartialEq, Eq, Debug)]
pub struct Submodule {
pub name: SmolStr,
}
#[derive(Hash, Debug)]
pub(crate) struct ModuleTreeDescriptor {
nodes: Vec<NodeData>,
links: Vec<LinkData>,
file_id2node: BTreeMap<FileId, Node>,
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
struct Node(usize);
#[derive(Hash, Debug)]
struct NodeData {
file_id: FileId,
links: Vec<Link>,
parents: Vec<Link>
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub(crate) struct Link(usize);
#[derive(Hash, Debug)]
struct LinkData {
owner: Node,
name: SmolStr,
points_to: Vec<Node>,
problem: Option<Problem>,
}
#[derive(Clone, Debug, Hash)]
pub enum Problem {
UnresolvedModule {
candidate: RelativePathBuf,
},
NotDirOwner {
move_to: RelativePathBuf,
candidate: RelativePathBuf,
}
}
impl ModuleTreeDescriptor {
pub(crate) fn new<'a>(
files: impl Iterator<Item=(FileId, &'a ModuleDescriptor)> + Clone,
file_resolver: &FileResolverImp,
) -> ModuleTreeDescriptor {
let mut file_id2node = BTreeMap::new();
let mut nodes: Vec<NodeData> = files.clone().enumerate()
.map(|(idx, (file_id, _))| {
file_id2node.insert(file_id, Node(idx));
NodeData {
file_id,
links: Vec::new(),
parents: Vec::new(),
}
})
.collect();
let mut links = Vec::new();
for (idx, (file_id, descr)) in files.enumerate() {
let owner = Node(idx);
for sub in descr.submodules.iter() {
let link = Link(links.len());
nodes[owner.0].links.push(link);
let (points_to, problem) = resolve_submodule(file_id, &sub.name, file_resolver);
let points_to = points_to
.into_iter()
.map(|file_id| {
let node = file_id2node[&file_id];
nodes[node.0].parents.push(link);
node
})
.collect();
links.push(LinkData {
owner,
name: sub.name.clone(),
points_to,
problem,
})
}
}
ModuleTreeDescriptor {
nodes, links, file_id2node
}
}
pub(crate) fn parent_modules(&self, file_id: FileId) -> Vec<Link> {
let node = self.file_id2node[&file_id];
self.node(node)
.parents
.clone()
}
pub(crate) fn child_module_by_name(&self, file_id: FileId, name: &str) -> Vec<FileId> {
let node = self.file_id2node[&file_id];
self.node(node)
.links
.iter()
.filter(|it| it.name(self) == name)
.flat_map(|link| link.points_to(self).iter().map(|&node| self.node(node).file_id))
.collect()
}
pub(crate) fn problems<'a, 'b>(&'b self, file_id: FileId, root: ast::Root<'a>) -> Vec<(ast::Name<'a>, &'b Problem)> {
let node = self.file_id2node[&file_id];
self.node(node)
.links
.iter()
.filter_map(|&link| {
let problem = self.link(link).problem.as_ref()?;
let name = link.bind_source(self, root).name()?;
Some((name, problem))
})
.collect()
}
fn node(&self, node: Node) -> &NodeData {
&self.nodes[node.0]
}
fn link(&self, link: Link) -> &LinkData {
&self.links[link.0]
}
}
impl Link {
pub(crate) fn name(self, tree: &ModuleTreeDescriptor) -> SmolStr {
tree.link(self).name.clone()
}
pub(crate) fn owner(self, tree: &ModuleTreeDescriptor) -> FileId {
let owner = tree.link(self).owner;
tree.node(owner).file_id
}
fn points_to(self, tree: &ModuleTreeDescriptor) -> &[Node] {
&tree.link(self).points_to
}
pub(crate) fn bind_source<'a>(self, tree: &ModuleTreeDescriptor, root: ast::Root<'a>) -> ast::Module<'a> {
modules(root)
.filter(|(name, _)| name == &tree.link(self).name)
.next()
.unwrap()
.1
}
}
fn resolve_submodule(
file_id: FileId,
name: &SmolStr,
file_resolver: &FileResolverImp
) -> (Vec<FileId>, Option<Problem>) {
let mod_name = file_resolver.file_stem(file_id);
let is_dir_owner =
mod_name == "mod" || mod_name == "lib" || mod_name == "main";
let file_mod = RelativePathBuf::from(format!("../{}.rs", name));
let dir_mod = RelativePathBuf::from(format!("../{}/mod.rs", name));
let points_to: Vec<FileId>;
let problem: Option<Problem>;
if is_dir_owner {
points_to = [&file_mod, &dir_mod].iter()
.filter_map(|path| file_resolver.resolve(file_id, path))
.collect();
problem = if points_to.is_empty() {
Some(Problem::UnresolvedModule {
candidate: file_mod,
})
} else {
None
}
} else {
points_to = Vec::new();
problem = Some(Problem::NotDirOwner {
move_to: RelativePathBuf::from(format!("../{}/mod.rs", mod_name)),
candidate: file_mod,
});
}
(points_to, problem)
}

View file

@ -0,0 +1,342 @@
use std::{
sync::{
Arc,
atomic::{AtomicBool, Ordering::SeqCst},
},
fmt,
collections::{HashSet, VecDeque},
iter,
};
use relative_path::RelativePath;
use ra_editor::{self, FileSymbol, LineIndex, find_node_at_offset, LocalEdit};
use ra_syntax::{
TextUnit, TextRange, SmolStr, File, AstNode,
SyntaxKind::*,
ast::{self, NameOwner},
};
use {
FileId, FileResolver, Query, Diagnostic, SourceChange, SourceFileEdit, Position, FileSystemEdit,
JobToken, CrateGraph, CrateId,
roots::{SourceRoot, ReadonlySourceRoot, WritableSourceRoot},
descriptors::{ModuleTreeDescriptor, Problem},
};
#[derive(Clone, Debug)]
pub(crate) struct FileResolverImp {
inner: Arc<FileResolver>
}
impl FileResolverImp {
pub(crate) fn new(inner: Arc<FileResolver>) -> FileResolverImp {
FileResolverImp { inner }
}
pub(crate) fn file_stem(&self, file_id: FileId) -> String {
self.inner.file_stem(file_id)
}
pub(crate) fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId> {
self.inner.resolve(file_id, path)
}
}
impl Default for FileResolverImp {
fn default() -> FileResolverImp {
#[derive(Debug)]
struct DummyResolver;
impl FileResolver for DummyResolver {
fn file_stem(&self, _file_: FileId) -> String {
panic!("file resolver not set")
}
fn resolve(&self, _file_id: FileId, _path: &::relative_path::RelativePath) -> Option<FileId> {
panic!("file resolver not set")
}
}
FileResolverImp { inner: Arc::new(DummyResolver) }
}
}
#[derive(Debug)]
pub(crate) struct AnalysisHostImpl {
data: Arc<WorldData>
}
impl AnalysisHostImpl {
pub fn new() -> AnalysisHostImpl {
AnalysisHostImpl {
data: Arc::new(WorldData::default()),
}
}
pub fn analysis(&self) -> AnalysisImpl {
AnalysisImpl {
needs_reindex: AtomicBool::new(false),
data: self.data.clone(),
}
}
pub fn change_files(&mut self, changes: &mut dyn Iterator<Item=(FileId, Option<String>)>) {
let data = self.data_mut();
data.root = Arc::new(data.root.apply_changes(changes, None));
}
pub fn set_file_resolver(&mut self, resolver: FileResolverImp) {
let data = self.data_mut();
data.file_resolver = resolver.clone();
data.root = Arc::new(data.root.apply_changes(&mut iter::empty(), Some(resolver)));
}
pub fn set_crate_graph(&mut self, graph: CrateGraph) {
let mut visited = HashSet::new();
for &file_id in graph.crate_roots.values() {
if !visited.insert(file_id) {
panic!("duplicate crate root: {:?}", file_id);
}
}
self.data_mut().crate_graph = graph;
}
pub fn add_library(&mut self, root: ReadonlySourceRoot) {
self.data_mut().libs.push(Arc::new(root));
}
fn data_mut(&mut self) -> &mut WorldData {
Arc::make_mut(&mut self.data)
}
}
pub(crate) struct AnalysisImpl {
needs_reindex: AtomicBool,
data: Arc<WorldData>,
}
impl fmt::Debug for AnalysisImpl {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
(&*self.data).fmt(f)
}
}
impl Clone for AnalysisImpl {
fn clone(&self) -> AnalysisImpl {
AnalysisImpl {
needs_reindex: AtomicBool::new(self.needs_reindex.load(SeqCst)),
data: Arc::clone(&self.data),
}
}
}
impl AnalysisImpl {
fn root(&self, file_id: FileId) -> &SourceRoot {
if self.data.root.contains(file_id) {
return &*self.data.root;
}
&**self.data.libs.iter().find(|it| it.contains(file_id)).unwrap()
}
pub fn file_syntax(&self, file_id: FileId) -> File {
self.root(file_id).syntax(file_id)
}
pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> {
self.root(file_id).lines(file_id)
}
pub fn world_symbols(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
let mut buf = Vec::new();
if query.libs {
self.data.libs.iter()
.for_each(|it| it.symbols(&mut buf));
} else {
self.data.root.symbols(&mut buf);
}
query.search(&buf, token)
}
pub fn parent_module(&self, file_id: FileId) -> Vec<(FileId, FileSymbol)> {
let root = self.root(file_id);
let module_tree = root.module_tree();
module_tree.parent_modules(file_id)
.iter()
.map(|link| {
let file_id = link.owner(&module_tree);
let syntax = root.syntax(file_id);
let decl = link.bind_source(&module_tree, syntax.ast());
let sym = FileSymbol {
name: link.name(&module_tree),
node_range: decl.syntax().range(),
kind: MODULE,
};
(file_id, sym)
})
.collect()
}
pub fn crate_for(&self, file_id: FileId) -> Vec<CrateId> {
let module_tree = self.root(file_id).module_tree();
let crate_graph = &self.data.crate_graph;
let mut res = Vec::new();
let mut work = VecDeque::new();
work.push_back(file_id);
let mut visited = HashSet::new();
while let Some(id) = work.pop_front() {
if let Some(crate_id) = crate_graph.crate_id_for_crate_root(id) {
res.push(crate_id);
continue;
}
let parents = module_tree
.parent_modules(id)
.into_iter()
.map(|link| link.owner(&module_tree))
.filter(|&id| visited.insert(id));
work.extend(parents);
}
res
}
pub fn crate_root(&self, crate_id: CrateId) -> FileId {
self.data.crate_graph.crate_roots[&crate_id]
}
pub fn approximately_resolve_symbol(
&self,
file_id: FileId,
offset: TextUnit,
token: &JobToken,
) -> Vec<(FileId, FileSymbol)> {
let root = self.root(file_id);
let module_tree = root.module_tree();
let file = root.syntax(file_id);
let syntax = file.syntax();
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, offset) {
return self.index_resolve(name_ref, token);
}
if let Some(name) = find_node_at_offset::<ast::Name>(syntax, offset) {
if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) {
if module.has_semi() {
let file_ids = self.resolve_module(&*module_tree, file_id, module);
let res = file_ids.into_iter().map(|id| {
let name = module.name()
.map(|n| n.text())
.unwrap_or_else(|| SmolStr::new(""));
let symbol = FileSymbol {
name,
node_range: TextRange::offset_len(0.into(), 0.into()),
kind: MODULE,
};
(id, symbol)
}).collect();
return res;
}
}
}
vec![]
}
pub fn diagnostics(&self, file_id: FileId) -> Vec<Diagnostic> {
let root = self.root(file_id);
let module_tree = root.module_tree();
let syntax = root.syntax(file_id);
let mut res = ra_editor::diagnostics(&syntax)
.into_iter()
.map(|d| Diagnostic { range: d.range, message: d.msg, fix: None })
.collect::<Vec<_>>();
for (name_node, problem) in module_tree.problems(file_id, syntax.ast()) {
let diag = match problem {
Problem::UnresolvedModule { candidate } => {
let create_file = FileSystemEdit::CreateFile {
anchor: file_id,
path: candidate.clone(),
};
let fix = SourceChange {
label: "create module".to_string(),
source_file_edits: Vec::new(),
file_system_edits: vec![create_file],
cursor_position: None,
};
Diagnostic {
range: name_node.syntax().range(),
message: "unresolved module".to_string(),
fix: Some(fix),
}
}
Problem::NotDirOwner { move_to, candidate } => {
let move_file = FileSystemEdit::MoveFile { file: file_id, path: move_to.clone() };
let create_file = FileSystemEdit::CreateFile { anchor: file_id, path: move_to.join(candidate) };
let fix = SourceChange {
label: "move file and create module".to_string(),
source_file_edits: Vec::new(),
file_system_edits: vec![move_file, create_file],
cursor_position: None,
};
Diagnostic {
range: name_node.syntax().range(),
message: "can't declare module at this location".to_string(),
fix: Some(fix),
}
}
};
res.push(diag)
}
res
}
pub fn assists(&self, file_id: FileId, range: TextRange) -> Vec<SourceChange> {
let file = self.file_syntax(file_id);
let offset = range.start();
let actions = vec![
("flip comma", ra_editor::flip_comma(&file, offset).map(|f| f())),
("add `#[derive]`", ra_editor::add_derive(&file, offset).map(|f| f())),
("add impl", ra_editor::add_impl(&file, offset).map(|f| f())),
("introduce variable", ra_editor::introduce_variable(&file, range).map(|f| f())),
];
actions.into_iter()
.filter_map(|(name, local_edit)| {
Some(SourceChange::from_local_edit(
file_id, name, local_edit?,
))
})
.collect()
}
fn index_resolve(&self, name_ref: ast::NameRef, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
let name = name_ref.text();
let mut query = Query::new(name.to_string());
query.exact();
query.limit(4);
self.world_symbols(query, token)
}
fn resolve_module(&self, module_tree: &ModuleTreeDescriptor, file_id: FileId, module: ast::Module) -> Vec<FileId> {
let name = match module.name() {
Some(name) => name.text(),
None => return Vec::new(),
};
module_tree.child_module_by_name(file_id, name.as_str())
}
}
#[derive(Default, Clone, Debug)]
struct WorldData {
file_resolver: FileResolverImp,
crate_graph: CrateGraph,
root: Arc<WritableSourceRoot>,
libs: Vec<Arc<ReadonlySourceRoot>>,
}
impl SourceChange {
pub(crate) fn from_local_edit(file_id: FileId, label: &str, edit: LocalEdit) -> SourceChange {
let file_edit = SourceFileEdit {
file_id,
edits: edit.edit.into_atoms(),
};
SourceChange {
label: label.to_string(),
source_file_edits: vec![file_edit],
file_system_edits: vec![],
cursor_position: edit.cursor_position
.map(|offset| Position { offset, file_id })
}
}
}
impl CrateGraph {
fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> {
let (&crate_id, _) = self.crate_roots
.iter()
.find(|(_crate_id, &root_id)| root_id == file_id)?;
Some(crate_id)
}
}

View file

@ -0,0 +1,49 @@
use crossbeam_channel::{bounded, Receiver, Sender};
pub struct JobHandle {
job_alive: Receiver<Never>,
_job_canceled: Sender<Never>,
}
pub struct JobToken {
_job_alive: Sender<Never>,
job_canceled: Receiver<Never>,
}
impl JobHandle {
pub fn new() -> (JobHandle, JobToken) {
let (sender_alive, receiver_alive) = bounded(0);
let (sender_canceled, receiver_canceled) = bounded(0);
let token = JobToken { _job_alive: sender_alive, job_canceled: receiver_canceled };
let handle = JobHandle { job_alive: receiver_alive, _job_canceled: sender_canceled };
(handle, token)
}
pub fn has_completed(&self) -> bool {
is_closed(&self.job_alive)
}
pub fn cancel(self) {
}
}
impl JobToken {
pub fn is_canceled(&self) -> bool {
is_closed(&self.job_canceled)
}
}
// We don't actually send messages through the channels,
// and instead just check if the channel is closed,
// so we use uninhabited enum as a message type
enum Never {}
/// Nonblocking
fn is_closed(chan: &Receiver<Never>) -> bool {
select! {
recv(chan, msg) => match msg {
None => true,
Some(never) => match never {}
}
default => false,
}
}

View file

@ -0,0 +1,240 @@
extern crate parking_lot;
#[macro_use]
extern crate log;
extern crate once_cell;
extern crate ra_syntax;
extern crate ra_editor;
extern crate fst;
extern crate rayon;
extern crate relative_path;
#[macro_use]
extern crate crossbeam_channel;
extern crate im;
extern crate salsa;
mod symbol_index;
mod module_map;
mod imp;
mod job;
mod roots;
mod db;
mod queries;
mod descriptors;
use std::{
sync::Arc,
collections::HashMap,
fmt::Debug,
};
use relative_path::{RelativePath, RelativePathBuf};
use ra_syntax::{File, TextRange, TextUnit, AtomEdit};
use imp::{AnalysisImpl, AnalysisHostImpl, FileResolverImp};
pub use ra_editor::{
StructureNode, LineIndex, FileSymbol,
Runnable, RunnableKind, HighlightedRange, CompletionItem,
};
pub use job::{JobToken, JobHandle};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct FileId(pub u32);
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct CrateId(pub u32);
#[derive(Debug, Clone, Default)]
pub struct CrateGraph {
pub crate_roots: HashMap<CrateId, FileId>,
}
pub trait FileResolver: Debug + Send + Sync + 'static {
fn file_stem(&self, file_id: FileId) -> String;
fn resolve(&self, file_id: FileId, path: &RelativePath) -> Option<FileId>;
}
#[derive(Debug)]
pub struct AnalysisHost {
imp: AnalysisHostImpl
}
impl AnalysisHost {
pub fn new() -> AnalysisHost {
AnalysisHost { imp: AnalysisHostImpl::new() }
}
pub fn analysis(&self) -> Analysis {
Analysis { imp: self.imp.analysis() }
}
pub fn change_file(&mut self, file_id: FileId, text: Option<String>) {
self.change_files(::std::iter::once((file_id, text)));
}
pub fn change_files(&mut self, mut changes: impl Iterator<Item=(FileId, Option<String>)>) {
self.imp.change_files(&mut changes)
}
pub fn set_file_resolver(&mut self, resolver: Arc<FileResolver>) {
self.imp.set_file_resolver(FileResolverImp::new(resolver));
}
pub fn set_crate_graph(&mut self, graph: CrateGraph) {
self.imp.set_crate_graph(graph)
}
pub fn add_library(&mut self, data: LibraryData) {
self.imp.add_library(data.root)
}
}
#[derive(Debug)]
pub struct SourceChange {
pub label: String,
pub source_file_edits: Vec<SourceFileEdit>,
pub file_system_edits: Vec<FileSystemEdit>,
pub cursor_position: Option<Position>,
}
#[derive(Debug)]
pub struct Position {
pub file_id: FileId,
pub offset: TextUnit,
}
#[derive(Debug)]
pub struct SourceFileEdit {
pub file_id: FileId,
pub edits: Vec<AtomEdit>,
}
#[derive(Debug)]
pub enum FileSystemEdit {
CreateFile {
anchor: FileId,
path: RelativePathBuf,
},
MoveFile {
file: FileId,
path: RelativePathBuf,
}
}
#[derive(Debug)]
pub struct Diagnostic {
pub message: String,
pub range: TextRange,
pub fix: Option<SourceChange>,
}
#[derive(Debug)]
pub struct Query {
query: String,
lowercased: String,
only_types: bool,
libs: bool,
exact: bool,
limit: usize,
}
impl Query {
pub fn new(query: String) -> Query {
let lowercased = query.to_lowercase();
Query {
query,
lowercased,
only_types: false,
libs: false,
exact: false,
limit: usize::max_value()
}
}
pub fn only_types(&mut self) {
self.only_types = true;
}
pub fn libs(&mut self) {
self.libs = true;
}
pub fn exact(&mut self) {
self.exact = true;
}
pub fn limit(&mut self, limit: usize) {
self.limit = limit
}
}
#[derive(Clone, Debug)]
pub struct Analysis {
imp: AnalysisImpl
}
impl Analysis {
pub fn file_syntax(&self, file_id: FileId) -> File {
self.imp.file_syntax(file_id).clone()
}
pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> {
self.imp.file_line_index(file_id)
}
pub fn extend_selection(&self, file: &File, range: TextRange) -> TextRange {
ra_editor::extend_selection(file, range).unwrap_or(range)
}
pub fn matching_brace(&self, file: &File, offset: TextUnit) -> Option<TextUnit> {
ra_editor::matching_brace(file, offset)
}
pub fn syntax_tree(&self, file_id: FileId) -> String {
let file = self.imp.file_syntax(file_id);
ra_editor::syntax_tree(&file)
}
pub fn join_lines(&self, file_id: FileId, range: TextRange) -> SourceChange {
let file = self.imp.file_syntax(file_id);
SourceChange::from_local_edit(file_id, "join lines", ra_editor::join_lines(&file, range))
}
pub fn on_eq_typed(&self, file_id: FileId, offset: TextUnit) -> Option<SourceChange> {
let file = self.imp.file_syntax(file_id);
Some(SourceChange::from_local_edit(file_id, "add semicolon", ra_editor::on_eq_typed(&file, offset)?))
}
pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> {
let file = self.imp.file_syntax(file_id);
ra_editor::file_structure(&file)
}
pub fn symbol_search(&self, query: Query, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
self.imp.world_symbols(query, token)
}
pub fn approximately_resolve_symbol(&self, file_id: FileId, offset: TextUnit, token: &JobToken) -> Vec<(FileId, FileSymbol)> {
self.imp.approximately_resolve_symbol(file_id, offset, token)
}
pub fn parent_module(&self, file_id: FileId) -> Vec<(FileId, FileSymbol)> {
self.imp.parent_module(file_id)
}
pub fn crate_for(&self, file_id: FileId) -> Vec<CrateId> {
self.imp.crate_for(file_id)
}
pub fn crate_root(&self, crate_id: CrateId) -> FileId {
self.imp.crate_root(crate_id)
}
pub fn runnables(&self, file_id: FileId) -> Vec<Runnable> {
let file = self.imp.file_syntax(file_id);
ra_editor::runnables(&file)
}
pub fn highlight(&self, file_id: FileId) -> Vec<HighlightedRange> {
let file = self.imp.file_syntax(file_id);
ra_editor::highlight(&file)
}
pub fn completions(&self, file_id: FileId, offset: TextUnit) -> Option<Vec<CompletionItem>> {
let file = self.imp.file_syntax(file_id);
ra_editor::scope_completion(&file, offset)
}
pub fn assists(&self, file_id: FileId, range: TextRange) -> Vec<SourceChange> {
self.imp.assists(file_id, range)
}
pub fn diagnostics(&self, file_id: FileId) -> Vec<Diagnostic> {
self.imp.diagnostics(file_id)
}
}
#[derive(Debug)]
pub struct LibraryData {
root: roots::ReadonlySourceRoot
}
impl LibraryData {
pub fn prepare(files: Vec<(FileId, String)>, file_resolver: Arc<FileResolver>) -> LibraryData {
let file_resolver = FileResolverImp::new(file_resolver);
let root = roots::ReadonlySourceRoot::new(files, file_resolver);
LibraryData { root }
}
}

View file

@ -0,0 +1,157 @@
use std::sync::Arc;
use {
FileId,
db::{
Query, QueryRegistry, QueryCtx,
file_set
},
queries::file_syntax,
descriptors::{ModuleDescriptor, ModuleTreeDescriptor},
};
pub(crate) fn register_queries(reg: &mut QueryRegistry) {
reg.add(MODULE_DESCR, "MODULE_DESCR");
reg.add(MODULE_TREE, "MODULE_TREE");
}
pub(crate) fn module_tree(ctx: QueryCtx) -> Arc<ModuleTreeDescriptor> {
ctx.get(MODULE_TREE, ())
}
const MODULE_DESCR: Query<FileId, ModuleDescriptor> = Query(30, |ctx, &file_id| {
let file = file_syntax(ctx, file_id);
ModuleDescriptor::new(file.ast())
});
const MODULE_TREE: Query<(), ModuleTreeDescriptor> = Query(31, |ctx, _| {
let file_set = file_set(ctx);
let mut files = Vec::new();
for &file_id in file_set.0.iter() {
let module_descr = ctx.get(MODULE_DESCR, file_id);
files.push((file_id, module_descr));
}
ModuleTreeDescriptor::new(files.iter().map(|(file_id, descr)| (*file_id, &**descr)), &file_set.1)
});
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use im;
use relative_path::{RelativePath, RelativePathBuf};
use {
db::{Db},
imp::FileResolverImp,
FileId, FileResolver,
};
use super::*;
#[derive(Debug)]
struct FileMap(im::HashMap<FileId, RelativePathBuf>);
impl FileResolver for FileMap {
fn file_stem(&self, file_id: FileId) -> String {
self.0[&file_id].file_stem().unwrap().to_string()
}
fn resolve(&self, file_id: FileId, rel: &RelativePath) -> Option<FileId> {
let path = self.0[&file_id].join(rel).normalize();
self.0.iter()
.filter_map(|&(id, ref p)| Some(id).filter(|_| p == &path))
.next()
}
}
struct Fixture {
next_file_id: u32,
fm: im::HashMap<FileId, RelativePathBuf>,
db: Db,
}
impl Fixture {
fn new() -> Fixture {
Fixture {
next_file_id: 1,
fm: im::HashMap::new(),
db: Db::new(),
}
}
fn add_file(&mut self, path: &str, text: &str) -> FileId {
assert!(path.starts_with("/"));
let file_id = FileId(self.next_file_id);
self.next_file_id += 1;
self.fm.insert(file_id, RelativePathBuf::from(&path[1..]));
let mut new_state = self.db.state().clone();
new_state.file_map.insert(file_id, Arc::new(text.to_string()));
new_state.file_resolver = FileResolverImp::new(
Arc::new(FileMap(self.fm.clone()))
);
self.db = self.db.with_changes(new_state, &[file_id], true);
file_id
}
fn remove_file(&mut self, file_id: FileId) {
self.fm.remove(&file_id);
let mut new_state = self.db.state().clone();
new_state.file_map.remove(&file_id);
new_state.file_resolver = FileResolverImp::new(
Arc::new(FileMap(self.fm.clone()))
);
self.db = self.db.with_changes(new_state, &[file_id], true);
}
fn change_file(&mut self, file_id: FileId, new_text: &str) {
let mut new_state = self.db.state().clone();
new_state.file_map.insert(file_id, Arc::new(new_text.to_string()));
self.db = self.db.with_changes(new_state, &[file_id], false);
}
fn check_parent_modules(
&self,
file_id: FileId,
expected: &[FileId],
queries: &[(&'static str, u64)]
) {
let (tree, events) = self.db.trace_query(|ctx| module_tree(ctx));
let actual = tree.parent_modules(file_id)
.into_iter()
.map(|link| link.owner(&tree))
.collect::<Vec<_>>();
assert_eq!(actual.as_slice(), expected);
let mut counts = HashMap::new();
events.into_iter()
.for_each(|event| *counts.entry(event).or_insert(0) += 1);
for &(query_id, expected_count) in queries.iter() {
let actual_count = *counts.get(&query_id).unwrap_or(&0);
assert_eq!(
actual_count,
expected_count,
"counts for {} differ",
query_id,
)
}
}
}
#[test]
fn test_parent_module() {
let mut f = Fixture::new();
let foo = f.add_file("/foo.rs", "");
f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
let lib = f.add_file("/lib.rs", "mod foo;");
f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 0)]);
f.change_file(lib, "");
f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
f.change_file(lib, "mod foo;");
f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
f.change_file(lib, "mod bar;");
f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
f.change_file(lib, "mod foo;");
f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
f.remove_file(lib);
f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 0)]);
}
}

View file

@ -0,0 +1,39 @@
use std::sync::Arc;
use ra_syntax::File;
use ra_editor::LineIndex;
use {
FileId,
db::{Query, QueryCtx, QueryRegistry},
symbol_index::SymbolIndex,
};
pub(crate) use db::{file_text, file_set};
pub(crate) fn file_syntax(ctx: QueryCtx, file_id: FileId) -> File {
(&*ctx.get(FILE_SYNTAX, file_id)).clone()
}
pub(crate) fn file_lines(ctx: QueryCtx, file_id: FileId) -> Arc<LineIndex> {
ctx.get(FILE_LINES, file_id)
}
pub(crate) fn file_symbols(ctx: QueryCtx, file_id: FileId) -> Arc<SymbolIndex> {
ctx.get(FILE_SYMBOLS, file_id)
}
const FILE_SYNTAX: Query<FileId, File> = Query(16, |ctx, file_id: &FileId| {
let text = file_text(ctx, *file_id);
File::parse(&*text)
});
const FILE_LINES: Query<FileId, LineIndex> = Query(17, |ctx, file_id: &FileId| {
let text = file_text(ctx, *file_id);
LineIndex::new(&*text)
});
const FILE_SYMBOLS: Query<FileId, SymbolIndex> = Query(18, |ctx, file_id: &FileId| {
let syntax = file_syntax(ctx, *file_id);
SymbolIndex::for_file(*file_id, syntax)
});
pub(crate) fn register_queries(reg: &mut QueryRegistry) {
reg.add(FILE_SYNTAX, "FILE_SYNTAX");
reg.add(FILE_LINES, "FILE_LINES");
reg.add(FILE_SYMBOLS, "FILE_SYMBOLS");
}

View file

@ -0,0 +1,178 @@
use std::{
collections::HashMap,
sync::Arc,
panic,
};
use once_cell::sync::OnceCell;
use rayon::prelude::*;
use ra_editor::LineIndex;
use ra_syntax::File;
use {
FileId,
imp::FileResolverImp,
symbol_index::SymbolIndex,
descriptors::{ModuleDescriptor, ModuleTreeDescriptor},
db::Db,
};
pub(crate) trait SourceRoot {
fn contains(&self, file_id: FileId) -> bool;
fn module_tree(&self) -> Arc<ModuleTreeDescriptor>;
fn lines(&self, file_id: FileId) -> Arc<LineIndex>;
fn syntax(&self, file_id: FileId) -> File;
fn symbols(&self, acc: &mut Vec<Arc<SymbolIndex>>);
}
#[derive(Default, Debug)]
pub(crate) struct WritableSourceRoot {
db: Db,
}
impl WritableSourceRoot {
pub fn apply_changes(
&self,
changes: &mut dyn Iterator<Item=(FileId, Option<String>)>,
file_resolver: Option<FileResolverImp>,
) -> WritableSourceRoot {
let resolver_changed = file_resolver.is_some();
let mut changed_files = Vec::new();
let mut new_state = self.db.state().clone();
for (file_id, text) in changes {
changed_files.push(file_id);
match text {
Some(text) => {
new_state.file_map.insert(file_id, Arc::new(text));
},
None => {
new_state.file_map.remove(&file_id);
}
}
}
if let Some(file_resolver) = file_resolver {
new_state.file_resolver = file_resolver
}
WritableSourceRoot {
db: self.db.with_changes(new_state, &changed_files, resolver_changed)
}
}
}
impl SourceRoot for WritableSourceRoot {
fn module_tree(&self) -> Arc<ModuleTreeDescriptor> {
self.db.make_query(::module_map::module_tree)
}
fn contains(&self, file_id: FileId) -> bool {
self.db.state().file_map.contains_key(&file_id)
}
fn lines(&self, file_id: FileId) -> Arc<LineIndex> {
self.db.make_query(|ctx| ::queries::file_lines(ctx, file_id))
}
fn syntax(&self, file_id: FileId) -> File {
self.db.make_query(|ctx| ::queries::file_syntax(ctx, file_id))
}
fn symbols<'a>(&'a self, acc: &mut Vec<Arc<SymbolIndex>>) {
self.db.make_query(|ctx| {
let file_set = ::queries::file_set(ctx);
let syms = file_set.0.iter()
.map(|file_id| ::queries::file_symbols(ctx, *file_id));
acc.extend(syms);
});
}
}
#[derive(Debug)]
struct FileData {
text: String,
lines: OnceCell<Arc<LineIndex>>,
syntax: OnceCell<File>,
}
impl FileData {
fn new(text: String) -> FileData {
FileData {
text,
syntax: OnceCell::new(),
lines: OnceCell::new(),
}
}
fn lines(&self) -> &Arc<LineIndex> {
self.lines.get_or_init(|| Arc::new(LineIndex::new(&self.text)))
}
fn syntax(&self) -> &File {
let text = &self.text;
let syntax = &self.syntax;
match panic::catch_unwind(panic::AssertUnwindSafe(|| syntax.get_or_init(|| File::parse(text)))) {
Ok(file) => file,
Err(err) => {
error!("Parser paniced on:\n------\n{}\n------\n", text);
panic::resume_unwind(err)
}
}
}
}
#[derive(Debug)]
pub(crate) struct ReadonlySourceRoot {
symbol_index: Arc<SymbolIndex>,
file_map: HashMap<FileId, FileData>,
module_tree: Arc<ModuleTreeDescriptor>,
}
impl ReadonlySourceRoot {
pub(crate) fn new(files: Vec<(FileId, String)>, file_resolver: FileResolverImp) -> ReadonlySourceRoot {
let modules = files.par_iter()
.map(|(file_id, text)| {
let syntax = File::parse(text);
let mod_descr = ModuleDescriptor::new(syntax.ast());
(*file_id, syntax, mod_descr)
})
.collect::<Vec<_>>();
let module_tree = ModuleTreeDescriptor::new(
modules.iter().map(|it| (it.0, &it.2)),
&file_resolver,
);
let symbol_index = SymbolIndex::for_files(
modules.par_iter().map(|it| (it.0, it.1.clone()))
);
let file_map: HashMap<FileId, FileData> = files
.into_iter()
.map(|(id, text)| (id, FileData::new(text)))
.collect();
ReadonlySourceRoot {
symbol_index: Arc::new(symbol_index),
file_map,
module_tree: Arc::new(module_tree),
}
}
fn data(&self, file_id: FileId) -> &FileData {
match self.file_map.get(&file_id) {
Some(data) => data,
None => panic!("unknown file: {:?}", file_id),
}
}
}
impl SourceRoot for ReadonlySourceRoot {
fn module_tree(&self) -> Arc<ModuleTreeDescriptor> {
Arc::clone(&self.module_tree)
}
fn contains(&self, file_id: FileId) -> bool {
self.file_map.contains_key(&file_id)
}
fn lines(&self, file_id: FileId) -> Arc<LineIndex> {
Arc::clone(self.data(file_id).lines())
}
fn syntax(&self, file_id: FileId) -> File {
self.data(file_id).syntax().clone()
}
fn symbols(&self, acc: &mut Vec<Arc<SymbolIndex>>) {
acc.push(Arc::clone(&self.symbol_index))
}
}

View file

@ -0,0 +1,94 @@
use std::{
sync::Arc,
hash::{Hash, Hasher},
};
use ra_editor::{FileSymbol, file_symbols};
use ra_syntax::{
File,
SyntaxKind::{self, *},
};
use fst::{self, Streamer};
use rayon::prelude::*;
use {Query, FileId, JobToken};
#[derive(Debug)]
pub(crate) struct SymbolIndex {
symbols: Vec<(FileId, FileSymbol)>,
map: fst::Map,
}
impl Hash for SymbolIndex {
fn hash<H: Hasher>(&self, hasher: &mut H) {
self.symbols.hash(hasher)
}
}
impl SymbolIndex {
pub(crate) fn for_files(files: impl ParallelIterator<Item=(FileId, File)>) -> SymbolIndex {
let mut symbols = files
.flat_map(|(file_id, file)| {
file_symbols(&file)
.into_iter()
.map(move |symbol| {
(symbol.name.as_str().to_lowercase(), (file_id, symbol))
})
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
symbols.par_sort_by(|s1, s2| s1.0.cmp(&s2.0));
symbols.dedup_by(|s1, s2| s1.0 == s2.0);
let (names, symbols): (Vec<String>, Vec<(FileId, FileSymbol)>) =
symbols.into_iter().unzip();
let map = fst::Map::from_iter(
names.into_iter().zip(0u64..)
).unwrap();
SymbolIndex { symbols, map }
}
pub(crate) fn for_file(file_id: FileId, file: File) -> SymbolIndex {
SymbolIndex::for_files(::rayon::iter::once((file_id, file)))
}
}
impl Query {
pub(crate) fn search(
self,
indices: &[Arc<SymbolIndex>],
token: &JobToken,
) -> Vec<(FileId, FileSymbol)> {
let mut op = fst::map::OpBuilder::new();
for file_symbols in indices.iter() {
let automaton = fst::automaton::Subsequence::new(&self.lowercased);
op = op.add(file_symbols.map.search(automaton))
}
let mut stream = op.union();
let mut res = Vec::new();
while let Some((_, indexed_values)) = stream.next() {
if res.len() >= self.limit || token.is_canceled() {
break;
}
for indexed_value in indexed_values {
let file_symbols = &indices[indexed_value.index];
let idx = indexed_value.value as usize;
let (file_id, symbol) = &file_symbols.symbols[idx];
if self.only_types && !is_type(symbol.kind) {
continue;
}
if self.exact && symbol.name != self.query {
continue;
}
res.push((*file_id, symbol.clone()));
}
}
res
}
}
fn is_type(kind: SyntaxKind) -> bool {
match kind {
STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_DEF => true,
_ => false,
}
}