Revert "Merge #2629"

This reverts commit cdc9d682b0, reversing
changes made to 90ef070db3.
This commit is contained in:
Aleksey Kladov 2019-12-21 15:04:33 +01:00
parent a1f4c988e4
commit 973b5cf7e2
13 changed files with 166 additions and 71 deletions

View file

@ -12,8 +12,8 @@ use hir_def::{
resolver::HasResolver, resolver::HasResolver,
type_ref::{Mutability, TypeRef}, type_ref::{Mutability, TypeRef},
AdtId, ConstId, DefWithBodyId, EnumId, FunctionId, HasModule, ImplId, LocalEnumVariantId, AdtId, ConstId, DefWithBodyId, EnumId, FunctionId, HasModule, ImplId, LocalEnumVariantId,
LocalModuleId, LocalStructFieldId, Lookup, ModuleId, StaticId, StructId, TraitId, TypeAliasId, LocalImportId, LocalModuleId, LocalStructFieldId, Lookup, ModuleId, StaticId, StructId,
TypeParamId, UnionId, TraitId, TypeAliasId, TypeParamId, UnionId,
}; };
use hir_expand::{ use hir_expand::{
diagnostics::DiagnosticSink, diagnostics::DiagnosticSink,
@ -180,11 +180,13 @@ impl Module {
} }
/// Returns a `ModuleScope`: a set of items, visible in this module. /// Returns a `ModuleScope`: a set of items, visible in this module.
pub fn scope(self, db: &impl HirDatabase) -> Vec<(Name, ScopeDef)> { pub fn scope(self, db: &impl HirDatabase) -> Vec<(Name, ScopeDef, Option<Import>)> {
db.crate_def_map(self.id.krate)[self.id.local_id] db.crate_def_map(self.id.krate)[self.id.local_id]
.scope .scope
.entries() .entries()
.map(|(name, res)| (name.clone(), res.def.into())) .map(|(name, res)| {
(name.clone(), res.def.into(), res.import.map(|id| Import { parent: self, id }))
})
.collect() .collect()
} }
@ -227,10 +229,10 @@ impl Module {
} }
} }
// pub struct Import { pub struct Import {
// pub(crate) parent: Module, pub(crate) parent: Module,
// pub(crate) id: LocalImportId, pub(crate) id: LocalImportId,
// } }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StructField { pub struct StructField {

View file

@ -4,8 +4,8 @@ pub use hir_def::db::{
BodyQuery, BodyWithSourceMapQuery, ConstDataQuery, CrateDefMapQuery, CrateLangItemsQuery, BodyQuery, BodyWithSourceMapQuery, ConstDataQuery, CrateDefMapQuery, CrateLangItemsQuery,
DefDatabase, DefDatabaseStorage, DocumentationQuery, EnumDataQuery, ExprScopesQuery, DefDatabase, DefDatabaseStorage, DocumentationQuery, EnumDataQuery, ExprScopesQuery,
FunctionDataQuery, GenericParamsQuery, ImplDataQuery, InternDatabase, InternDatabaseStorage, FunctionDataQuery, GenericParamsQuery, ImplDataQuery, InternDatabase, InternDatabaseStorage,
LangItemQuery, ModuleLangItemsQuery, RawItemsQuery, StaticDataQuery, StructDataQuery, LangItemQuery, ModuleLangItemsQuery, RawItemsQuery, RawItemsWithSourceMapQuery,
TraitDataQuery, TypeAliasDataQuery, StaticDataQuery, StructDataQuery, TraitDataQuery, TypeAliasDataQuery,
}; };
pub use hir_expand::db::{ pub use hir_expand::db::{
AstDatabase, AstDatabaseStorage, AstIdMapQuery, MacroArgQuery, MacroDefQuery, MacroExpandQuery, AstDatabase, AstDatabaseStorage, AstIdMapQuery, MacroArgQuery, MacroDefQuery, MacroExpandQuery,

View file

@ -9,8 +9,8 @@ use hir_def::{
use ra_syntax::ast; use ra_syntax::ast;
use crate::{ use crate::{
db::DefDatabase, Const, Enum, EnumVariant, FieldSource, Function, ImplBlock, MacroDef, Module, db::DefDatabase, Const, Enum, EnumVariant, FieldSource, Function, ImplBlock, Import, MacroDef,
Static, Struct, StructField, Trait, TypeAlias, TypeParam, Union, Module, Static, Struct, StructField, Trait, TypeAlias, TypeParam, Union,
}; };
pub use hir_expand::InFile; pub use hir_expand::InFile;
@ -117,6 +117,18 @@ impl HasSource for ImplBlock {
self.id.lookup(db).source(db) self.id.lookup(db).source(db)
} }
} }
impl HasSource for Import {
type Ast = Either<ast::UseTree, ast::ExternCrateItem>;
/// Returns the syntax of the last path segment corresponding to this import
fn source(self, db: &impl DefDatabase) -> InFile<Self::Ast> {
let src = self.parent.definition_source(db);
let (_, source_map) = db.raw_items_with_source_map(src.file_id);
let root = db.parse_or_expand(src.file_id).unwrap();
let ptr = source_map.get(self.id);
src.with_value(ptr.map_left(|it| it.to_node(&root)).map_right(|it| it.to_node(&root)))
}
}
impl HasSource for TypeParam { impl HasSource for TypeParam {
type Ast = Either<ast::TraitDef, ast::TypeParam>; type Ast = Either<ast::TraitDef, ast::TypeParam>;

View file

@ -40,8 +40,8 @@ mod from_source;
pub use crate::{ pub use crate::{
code_model::{ code_model::{
Adt, AssocItem, AttrDef, Const, Crate, CrateDependency, DefWithBody, Docs, Enum, Adt, AssocItem, AttrDef, Const, Crate, CrateDependency, DefWithBody, Docs, Enum,
EnumVariant, FieldSource, Function, GenericDef, HasAttrs, ImplBlock, Local, MacroDef, EnumVariant, FieldSource, Function, GenericDef, HasAttrs, ImplBlock, Import, Local,
Module, ModuleDef, ScopeDef, Static, Struct, StructField, Trait, Type, TypeAlias, MacroDef, Module, ModuleDef, ScopeDef, Static, Struct, StructField, Trait, Type, TypeAlias,
TypeParam, Union, VariantDef, TypeParam, Union, VariantDef,
}, },
from_source::FromSource, from_source::FromSource,

View file

@ -13,7 +13,10 @@ use crate::{
docs::Documentation, docs::Documentation,
generics::GenericParams, generics::GenericParams,
lang_item::{LangItemTarget, LangItems}, lang_item::{LangItemTarget, LangItems},
nameres::{raw::RawItems, CrateDefMap}, nameres::{
raw::{ImportSourceMap, RawItems},
CrateDefMap,
},
AttrDefId, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, FunctionId, FunctionLoc, AttrDefId, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, FunctionId, FunctionLoc,
GenericDefId, ImplId, ImplLoc, ModuleId, StaticId, StaticLoc, StructId, StructLoc, TraitId, GenericDefId, ImplId, ImplLoc, ModuleId, StaticId, StaticLoc, StructId, StructLoc, TraitId,
TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc,
@ -43,6 +46,12 @@ pub trait InternDatabase: SourceDatabase {
#[salsa::query_group(DefDatabaseStorage)] #[salsa::query_group(DefDatabaseStorage)]
pub trait DefDatabase: InternDatabase + AstDatabase { pub trait DefDatabase: InternDatabase + AstDatabase {
#[salsa::invoke(RawItems::raw_items_with_source_map_query)]
fn raw_items_with_source_map(
&self,
file_id: HirFileId,
) -> (Arc<RawItems>, Arc<ImportSourceMap>);
#[salsa::invoke(RawItems::raw_items_query)] #[salsa::invoke(RawItems::raw_items_query)]
fn raw_items(&self, file_id: HirFileId) -> Arc<RawItems>; fn raw_items(&self, file_id: HirFileId) -> Arc<RawItems>;

View file

@ -5,7 +5,7 @@ use hir_expand::name::Name;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use crate::{per_ns::PerNs, BuiltinType, ImplId, MacroDefId, ModuleDefId, TraitId}; use crate::{per_ns::PerNs, BuiltinType, ImplId, LocalImportId, MacroDefId, ModuleDefId, TraitId};
#[derive(Debug, Default, PartialEq, Eq)] #[derive(Debug, Default, PartialEq, Eq)]
pub struct ItemScope { pub struct ItemScope {
@ -30,7 +30,7 @@ static BUILTIN_SCOPE: Lazy<FxHashMap<Name, Resolution>> = Lazy::new(|| {
BuiltinType::ALL BuiltinType::ALL
.iter() .iter()
.map(|(name, ty)| { .map(|(name, ty)| {
(name.clone(), Resolution { def: PerNs::types(ty.clone().into()), declaration: false }) (name.clone(), Resolution { def: PerNs::types(ty.clone().into()), import: None })
}) })
.collect() .collect()
}); });
@ -53,8 +53,10 @@ impl ItemScope {
} }
pub fn declarations(&self) -> impl Iterator<Item = ModuleDefId> + '_ { pub fn declarations(&self) -> impl Iterator<Item = ModuleDefId> + '_ {
self.entries().filter(|(_name, res)| res.declaration).flat_map(|(_name, res)| { self.entries()
res.def.take_types().into_iter().chain(res.def.take_values().into_iter()) .filter_map(|(_name, res)| if res.import.is_none() { Some(res.def) } else { None })
.flat_map(|per_ns| {
per_ns.take_types().into_iter().chain(per_ns.take_values().into_iter())
}) })
} }
@ -110,26 +112,38 @@ impl ItemScope {
self.legacy_macros.insert(name, mac); self.legacy_macros.insert(name, mac);
} }
pub(crate) fn push_res(&mut self, name: Name, res: &Resolution, declaration: bool) -> bool { pub(crate) fn push_res(
&mut self,
name: Name,
res: &Resolution,
import: Option<LocalImportId>,
) -> bool {
let mut changed = false; let mut changed = false;
let existing = self.items.entry(name.clone()).or_default(); let existing = self.items.entry(name.clone()).or_default();
if existing.def.types.is_none() && res.def.types.is_some() { if existing.def.types.is_none() && res.def.types.is_some() {
existing.def.types = res.def.types; existing.def.types = res.def.types;
existing.declaration |= declaration; existing.import = import.or(res.import);
changed = true; changed = true;
} }
if existing.def.values.is_none() && res.def.values.is_some() { if existing.def.values.is_none() && res.def.values.is_some() {
existing.def.values = res.def.values; existing.def.values = res.def.values;
existing.declaration |= declaration; existing.import = import.or(res.import);
changed = true; changed = true;
} }
if existing.def.macros.is_none() && res.def.macros.is_some() { if existing.def.macros.is_none() && res.def.macros.is_some() {
existing.def.macros = res.def.macros; existing.def.macros = res.def.macros;
existing.declaration |= declaration; existing.import = import.or(res.import);
changed = true; changed = true;
} }
if existing.def.is_none()
&& res.def.is_none()
&& existing.import.is_none()
&& res.import.is_some()
{
existing.import = res.import;
}
changed changed
} }
@ -146,5 +160,6 @@ impl ItemScope {
pub struct Resolution { pub struct Resolution {
/// None for unresolved /// None for unresolved
pub def: PerNs, pub def: PerNs,
pub declaration: bool, /// ident by which this is imported into local scope.
pub import: Option<LocalImportId>,
} }

View file

@ -51,6 +51,10 @@ use ra_syntax::{ast, AstNode};
use crate::body::Expander; use crate::body::Expander;
use crate::builtin_type::BuiltinType; use crate::builtin_type::BuiltinType;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct LocalImportId(RawId);
impl_arena_id!(LocalImportId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ModuleId { pub struct ModuleId {
pub krate: CrateId, pub krate: CrateId,

View file

@ -26,7 +26,8 @@ use crate::{
path::{ModPath, PathKind}, path::{ModPath, PathKind},
per_ns::PerNs, per_ns::PerNs,
AdtId, AstId, ConstLoc, ContainerId, EnumLoc, EnumVariantId, FunctionLoc, ImplLoc, Intern, AdtId, AstId, ConstLoc, ContainerId, EnumLoc, EnumVariantId, FunctionLoc, ImplLoc, Intern,
LocalModuleId, ModuleDefId, ModuleId, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, LocalImportId, LocalModuleId, ModuleDefId, ModuleId, StaticLoc, StructLoc, TraitLoc,
TypeAliasLoc, UnionLoc,
}; };
pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap { pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap {
@ -92,7 +93,7 @@ impl PartialResolvedImport {
#[derive(Clone, Debug, Eq, PartialEq)] #[derive(Clone, Debug, Eq, PartialEq)]
struct ImportDirective { struct ImportDirective {
module_id: LocalModuleId, module_id: LocalModuleId,
import_id: raw::LocalImportId, import_id: LocalImportId,
import: raw::ImportData, import: raw::ImportData,
status: PartialResolvedImport, status: PartialResolvedImport,
} }
@ -109,7 +110,7 @@ struct MacroDirective {
struct DefCollector<'a, DB> { struct DefCollector<'a, DB> {
db: &'a DB, db: &'a DB,
def_map: CrateDefMap, def_map: CrateDefMap,
glob_imports: FxHashMap<LocalModuleId, Vec<(LocalModuleId, raw::LocalImportId)>>, glob_imports: FxHashMap<LocalModuleId, Vec<(LocalModuleId, LocalImportId)>>,
unresolved_imports: Vec<ImportDirective>, unresolved_imports: Vec<ImportDirective>,
resolved_imports: Vec<ImportDirective>, resolved_imports: Vec<ImportDirective>,
unexpanded_macros: Vec<MacroDirective>, unexpanded_macros: Vec<MacroDirective>,
@ -217,7 +218,8 @@ where
if export { if export {
self.update( self.update(
self.def_map.root, self.def_map.root,
&[(name, Resolution { def: PerNs::macros(macro_), declaration: false })], None,
&[(name, Resolution { def: PerNs::macros(macro_), import: None })],
); );
} }
} }
@ -372,7 +374,7 @@ where
// Module scoped macros is included // Module scoped macros is included
let items = scope.collect_resolutions(); let items = scope.collect_resolutions();
self.update(module_id, &items); self.update(module_id, Some(import_id), &items);
} else { } else {
// glob import from same crate => we do an initial // glob import from same crate => we do an initial
// import, and then need to propagate any further // import, and then need to propagate any further
@ -382,7 +384,7 @@ where
// Module scoped macros is included // Module scoped macros is included
let items = scope.collect_resolutions(); let items = scope.collect_resolutions();
self.update(module_id, &items); self.update(module_id, Some(import_id), &items);
// record the glob import in case we add further items // record the glob import in case we add further items
let glob = self.glob_imports.entry(m.local_id).or_default(); let glob = self.glob_imports.entry(m.local_id).or_default();
if !glob.iter().any(|it| *it == (module_id, import_id)) { if !glob.iter().any(|it| *it == (module_id, import_id)) {
@ -402,12 +404,12 @@ where
let variant = EnumVariantId { parent: e, local_id }; let variant = EnumVariantId { parent: e, local_id };
let res = Resolution { let res = Resolution {
def: PerNs::both(variant.into(), variant.into()), def: PerNs::both(variant.into(), variant.into()),
declaration: false, import: Some(import_id),
}; };
(name, res) (name, res)
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
self.update(module_id, &resolutions); self.update(module_id, Some(import_id), &resolutions);
} }
Some(d) => { Some(d) => {
log::debug!("glob import {:?} from non-module/enum {:?}", import, d); log::debug!("glob import {:?} from non-module/enum {:?}", import, d);
@ -429,21 +431,27 @@ where
} }
} }
let resolution = Resolution { def, declaration: false }; let resolution = Resolution { def, import: Some(import_id) };
self.update(module_id, &[(name, resolution)]); self.update(module_id, Some(import_id), &[(name, resolution)]);
} }
None => tested_by!(bogus_paths), None => tested_by!(bogus_paths),
} }
} }
} }
fn update(&mut self, module_id: LocalModuleId, resolutions: &[(Name, Resolution)]) { fn update(
self.update_recursive(module_id, resolutions, 0) &mut self,
module_id: LocalModuleId,
import: Option<LocalImportId>,
resolutions: &[(Name, Resolution)],
) {
self.update_recursive(module_id, import, resolutions, 0)
} }
fn update_recursive( fn update_recursive(
&mut self, &mut self,
module_id: LocalModuleId, module_id: LocalModuleId,
import: Option<LocalImportId>,
resolutions: &[(Name, Resolution)], resolutions: &[(Name, Resolution)],
depth: usize, depth: usize,
) { ) {
@ -454,7 +462,7 @@ where
let scope = &mut self.def_map.modules[module_id].scope; let scope = &mut self.def_map.modules[module_id].scope;
let mut changed = false; let mut changed = false;
for (name, res) in resolutions { for (name, res) in resolutions {
changed |= scope.push_res(name.clone(), res, depth == 0 && res.declaration); changed |= scope.push_res(name.clone(), res, import);
} }
if !changed { if !changed {
@ -467,9 +475,9 @@ where
.flat_map(|v| v.iter()) .flat_map(|v| v.iter())
.cloned() .cloned()
.collect::<Vec<_>>(); .collect::<Vec<_>>();
for (glob_importing_module, _glob_import) in glob_imports { for (glob_importing_module, glob_import) in glob_imports {
// We pass the glob import so that the tracked import in those modules is that glob import // We pass the glob import so that the tracked import in those modules is that glob import
self.update_recursive(glob_importing_module, resolutions, depth + 1); self.update_recursive(glob_importing_module, Some(glob_import), resolutions, depth + 1);
} }
} }
@ -711,9 +719,9 @@ where
def: PerNs::types( def: PerNs::types(
ModuleId { krate: self.def_collector.def_map.krate, local_id: res }.into(), ModuleId { krate: self.def_collector.def_map.krate, local_id: res }.into(),
), ),
declaration: true, import: None,
}; };
self.def_collector.update(self.module_id, &[(name, resolution)]); self.def_collector.update(self.module_id, None, &[(name, resolution)]);
res res
} }
@ -783,8 +791,8 @@ where
PerNs::types(def.into()) PerNs::types(def.into())
} }
}; };
let resolution = Resolution { def, declaration: true }; let resolution = Resolution { def, import: None };
self.def_collector.update(self.module_id, &[(name, resolution)]) self.def_collector.update(self.module_id, None, &[(name, resolution)])
} }
fn collect_derives(&mut self, attrs: &Attrs, def: &raw::DefData) { fn collect_derives(&mut self, attrs: &Attrs, def: &raw::DefData) {

View file

@ -7,24 +7,24 @@
use std::{ops::Index, sync::Arc}; use std::{ops::Index, sync::Arc};
use either::Either;
use hir_expand::{ use hir_expand::{
ast_id_map::AstIdMap, ast_id_map::AstIdMap,
db::AstDatabase, db::AstDatabase,
hygiene::Hygiene, hygiene::Hygiene,
name::{AsName, Name}, name::{AsName, Name},
}; };
use ra_arena::{impl_arena_id, Arena, RawId}; use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId};
use ra_syntax::{ use ra_syntax::{
ast::{self, AttrsOwner, NameOwner}, ast::{self, AttrsOwner, NameOwner},
AstNode, AstNode, AstPtr,
}; };
use test_utils::tested_by; use test_utils::tested_by;
use crate::{attr::Attrs, db::DefDatabase, path::ModPath, FileAstId, HirFileId, InFile}; use crate::{
attr::Attrs, db::DefDatabase, path::ModPath, trace::Trace, FileAstId, HirFileId, InFile,
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] LocalImportId,
pub(super) struct LocalImportId(RawId); };
impl_arena_id!(LocalImportId);
/// `RawItems` is a set of top-level items in a file (except for impls). /// `RawItems` is a set of top-level items in a file (except for impls).
/// ///
@ -41,14 +41,35 @@ pub struct RawItems {
items: Vec<RawItem>, items: Vec<RawItem>,
} }
#[derive(Debug, Default, PartialEq, Eq)]
pub struct ImportSourceMap {
map: ArenaMap<LocalImportId, ImportSourcePtr>,
}
type ImportSourcePtr = Either<AstPtr<ast::UseTree>, AstPtr<ast::ExternCrateItem>>;
impl ImportSourceMap {
pub fn get(&self, import: LocalImportId) -> ImportSourcePtr {
self.map[import].clone()
}
}
impl RawItems { impl RawItems {
pub(crate) fn raw_items_query( pub(crate) fn raw_items_query(
db: &(impl DefDatabase + AstDatabase), db: &(impl DefDatabase + AstDatabase),
file_id: HirFileId, file_id: HirFileId,
) -> Arc<RawItems> { ) -> Arc<RawItems> {
db.raw_items_with_source_map(file_id).0
}
pub(crate) fn raw_items_with_source_map_query(
db: &(impl DefDatabase + AstDatabase),
file_id: HirFileId,
) -> (Arc<RawItems>, Arc<ImportSourceMap>) {
let mut collector = RawItemsCollector { let mut collector = RawItemsCollector {
raw_items: RawItems::default(), raw_items: RawItems::default(),
source_ast_id_map: db.ast_id_map(file_id), source_ast_id_map: db.ast_id_map(file_id),
imports: Trace::new(),
file_id, file_id,
hygiene: Hygiene::new(db, file_id), hygiene: Hygiene::new(db, file_id),
}; };
@ -59,8 +80,11 @@ impl RawItems {
collector.process_module(None, item_list); collector.process_module(None, item_list);
} }
} }
let raw_items = collector.raw_items; let mut raw_items = collector.raw_items;
Arc::new(raw_items) let (arena, map) = collector.imports.into_arena_and_map();
raw_items.imports = arena;
let source_map = ImportSourceMap { map };
(Arc::new(raw_items), Arc::new(source_map))
} }
pub(super) fn items(&self) -> &[RawItem] { pub(super) fn items(&self) -> &[RawItem] {
@ -199,6 +223,7 @@ pub(super) struct ImplData {
struct RawItemsCollector { struct RawItemsCollector {
raw_items: RawItems, raw_items: RawItems,
imports: Trace<LocalImportId, ImportData, ImportSourcePtr>,
source_ast_id_map: Arc<AstIdMap>, source_ast_id_map: Arc<AstIdMap>,
file_id: HirFileId, file_id: HirFileId,
hygiene: Hygiene, hygiene: Hygiene,
@ -305,7 +330,7 @@ impl RawItemsCollector {
ModPath::expand_use_item( ModPath::expand_use_item(
InFile { value: use_item, file_id: self.file_id }, InFile { value: use_item, file_id: self.file_id },
&self.hygiene, &self.hygiene,
|path, _use_tree, is_glob, alias| { |path, use_tree, is_glob, alias| {
let import_data = ImportData { let import_data = ImportData {
path, path,
alias, alias,
@ -314,11 +339,11 @@ impl RawItemsCollector {
is_extern_crate: false, is_extern_crate: false,
is_macro_use: false, is_macro_use: false,
}; };
buf.push(import_data); buf.push((import_data, Either::Left(AstPtr::new(use_tree))));
}, },
); );
for import_data in buf { for (import_data, ptr) in buf {
self.push_import(current_module, attrs.clone(), import_data); self.push_import(current_module, attrs.clone(), import_data, ptr);
} }
} }
@ -341,7 +366,12 @@ impl RawItemsCollector {
is_extern_crate: true, is_extern_crate: true,
is_macro_use, is_macro_use,
}; };
self.push_import(current_module, attrs, import_data); self.push_import(
current_module,
attrs,
import_data,
Either::Right(AstPtr::new(&extern_crate)),
);
} }
} }
@ -372,8 +402,14 @@ impl RawItemsCollector {
self.push_item(current_module, attrs, RawItemKind::Impl(imp)) self.push_item(current_module, attrs, RawItemKind::Impl(imp))
} }
fn push_import(&mut self, current_module: Option<Module>, attrs: Attrs, data: ImportData) { fn push_import(
let import = self.raw_items.imports.alloc(data); &mut self,
current_module: Option<Module>,
attrs: Attrs,
data: ImportData,
source: ImportSourcePtr,
) {
let import = self.imports.alloc(|| source, || data);
self.push_item(current_module, attrs, RawItemKind::Import(import)) self.push_item(current_module, attrs, RawItemKind::Import(import))
} }

View file

@ -18,6 +18,10 @@ pub(crate) struct Trace<ID: ArenaId, T, V> {
} }
impl<ID: ra_arena::ArenaId + Copy, T, V> Trace<ID, T, V> { impl<ID: ra_arena::ArenaId + Copy, T, V> Trace<ID, T, V> {
pub(crate) fn new() -> Trace<ID, T, V> {
Trace { arena: Some(Arena::default()), map: Some(ArenaMap::default()), len: 0 }
}
pub(crate) fn new_for_arena() -> Trace<ID, T, V> { pub(crate) fn new_for_arena() -> Trace<ID, T, V> {
Trace { arena: Some(Arena::default()), map: None, len: 0 } Trace { arena: Some(Arena::default()), map: None, len: 0 }
} }
@ -48,4 +52,8 @@ impl<ID: ra_arena::ArenaId + Copy, T, V> Trace<ID, T, V> {
pub(crate) fn into_map(mut self) -> ArenaMap<ID, V> { pub(crate) fn into_map(mut self) -> ArenaMap<ID, V> {
self.map.take().unwrap() self.map.take().unwrap()
} }
pub(crate) fn into_arena_and_map(mut self) -> (Arena<ID, T>, ArenaMap<ID, V>) {
(self.arena.take().unwrap(), self.map.take().unwrap())
}
} }

View file

@ -270,6 +270,7 @@ impl RootDatabase {
self.query(hir::db::AstIdMapQuery).sweep(sweep); self.query(hir::db::AstIdMapQuery).sweep(sweep);
self.query(hir::db::RawItemsWithSourceMapQuery).sweep(sweep);
self.query(hir::db::BodyWithSourceMapQuery).sweep(sweep); self.query(hir::db::BodyWithSourceMapQuery).sweep(sweep);
self.query(hir::db::ExprScopesQuery).sweep(sweep); self.query(hir::db::ExprScopesQuery).sweep(sweep);
@ -308,6 +309,7 @@ impl RootDatabase {
hir::db::StructDataQuery hir::db::StructDataQuery
hir::db::EnumDataQuery hir::db::EnumDataQuery
hir::db::TraitDataQuery hir::db::TraitDataQuery
hir::db::RawItemsWithSourceMapQuery
hir::db::RawItemsQuery hir::db::RawItemsQuery
hir::db::CrateDefMapQuery hir::db::CrateDefMapQuery
hir::db::GenericParamsQuery hir::db::GenericParamsQuery

View file

@ -1,6 +1,7 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use hir::{Adt, PathResolution, ScopeDef}; use either::Either;
use hir::{Adt, HasSource, PathResolution};
use ra_syntax::AstNode; use ra_syntax::AstNode;
use test_utils::tested_by; use test_utils::tested_by;
@ -18,15 +19,17 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
match def { match def {
hir::ModuleDef::Module(module) => { hir::ModuleDef::Module(module) => {
let module_scope = module.scope(ctx.db); let module_scope = module.scope(ctx.db);
for (name, def) in module_scope { for (name, def, import) in module_scope {
if ctx.use_item_syntax.is_some() {
if let hir::ScopeDef::ModuleDef(hir::ModuleDef::BuiltinType(..)) = def { if let hir::ScopeDef::ModuleDef(hir::ModuleDef::BuiltinType(..)) = def {
if ctx.use_item_syntax.is_some() {
tested_by!(dont_complete_primitive_in_use); tested_by!(dont_complete_primitive_in_use);
continue; continue;
} }
if let ScopeDef::Unknown = def { }
if let Some(name_ref) = ctx.name_ref.as_ref() { if Some(module) == ctx.module {
if &name_ref.syntax().text() == name.to_string().as_str() { if let Some(import) = import {
if let Either::Left(use_tree) = import.source(ctx.db).value {
if use_tree.syntax().text_range().contains_inclusive(ctx.offset) {
// for `use self::foo<|>`, don't suggest `foo` as a completion // for `use self::foo<|>`, don't suggest `foo` as a completion
tested_by!(dont_complete_current_use); tested_by!(dont_complete_current_use);
continue; continue;

View file

@ -18,7 +18,6 @@ pub(crate) struct CompletionContext<'a> {
pub(super) analyzer: hir::SourceAnalyzer, pub(super) analyzer: hir::SourceAnalyzer,
pub(super) offset: TextUnit, pub(super) offset: TextUnit,
pub(super) token: SyntaxToken, pub(super) token: SyntaxToken,
pub(super) name_ref: Option<ast::NameRef>,
pub(super) module: Option<hir::Module>, pub(super) module: Option<hir::Module>,
pub(super) function_syntax: Option<ast::FnDef>, pub(super) function_syntax: Option<ast::FnDef>,
pub(super) use_item_syntax: Option<ast::UseItem>, pub(super) use_item_syntax: Option<ast::UseItem>,
@ -69,7 +68,6 @@ impl<'a> CompletionContext<'a> {
analyzer, analyzer,
token, token,
offset: position.offset, offset: position.offset,
name_ref: None,
module, module,
function_syntax: None, function_syntax: None,
use_item_syntax: None, use_item_syntax: None,
@ -144,8 +142,6 @@ impl<'a> CompletionContext<'a> {
} }
fn classify_name_ref(&mut self, original_file: SourceFile, name_ref: ast::NameRef) { fn classify_name_ref(&mut self, original_file: SourceFile, name_ref: ast::NameRef) {
self.name_ref =
find_node_at_offset(original_file.syntax(), name_ref.syntax().text_range().start());
let name_range = name_ref.syntax().text_range(); let name_range = name_ref.syntax().text_range();
if name_ref.syntax().parent().and_then(ast::RecordField::cast).is_some() { if name_ref.syntax().parent().and_then(ast::RecordField::cast).is_some() {
self.record_lit_syntax = find_node_at_offset(original_file.syntax(), self.offset); self.record_lit_syntax = find_node_at_offset(original_file.syntax(), self.offset);