mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-29 02:52:11 +00:00
Merge pull request #19559 from ChayimFriedman2/rust-186
internal: Switch to Rust 1.86.0
This commit is contained in:
commit
55efab890b
81 changed files with 1010 additions and 1258 deletions
|
|
@ -43,10 +43,6 @@ macro_rules! impl_intern_key {
|
|||
};
|
||||
}
|
||||
|
||||
pub trait Upcast<T: ?Sized> {
|
||||
fn upcast(&self) -> &T;
|
||||
}
|
||||
|
||||
pub const DEFAULT_FILE_TEXT_LRU_CAP: u16 = 16;
|
||||
pub const DEFAULT_PARSE_LRU_CAP: u16 = 128;
|
||||
pub const DEFAULT_BORROWCK_LRU_CAP: u16 = 2024;
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ impl Attrs {
|
|||
}
|
||||
|
||||
pub(crate) fn filter(db: &dyn DefDatabase, krate: Crate, raw_attrs: RawAttrs) -> Attrs {
|
||||
Attrs(raw_attrs.filter(db.upcast(), krate))
|
||||
Attrs(raw_attrs.filter(db, krate))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -507,7 +507,7 @@ impl AttrsWithOwner {
|
|||
// FIXME: We should be never getting `None` here.
|
||||
match src.value.get(it.local_id()) {
|
||||
Some(val) => RawAttrs::from_attrs_owner(
|
||||
db.upcast(),
|
||||
db,
|
||||
src.with_value(val),
|
||||
db.span_map(src.file_id).as_ref(),
|
||||
),
|
||||
|
|
@ -519,7 +519,7 @@ impl AttrsWithOwner {
|
|||
// FIXME: We should be never getting `None` here.
|
||||
match src.value.get(it.local_id()) {
|
||||
Some(val) => RawAttrs::from_attrs_owner(
|
||||
db.upcast(),
|
||||
db,
|
||||
src.with_value(val),
|
||||
db.span_map(src.file_id).as_ref(),
|
||||
),
|
||||
|
|
@ -531,7 +531,7 @@ impl AttrsWithOwner {
|
|||
// FIXME: We should be never getting `None` here.
|
||||
match src.value.get(it.local_id) {
|
||||
Some(val) => RawAttrs::from_attrs_owner(
|
||||
db.upcast(),
|
||||
db,
|
||||
src.with_value(val),
|
||||
db.span_map(src.file_id).as_ref(),
|
||||
),
|
||||
|
|
@ -544,7 +544,7 @@ impl AttrsWithOwner {
|
|||
AttrDefId::UseId(it) => attrs_from_item_tree_loc(db, it),
|
||||
};
|
||||
|
||||
let attrs = raw_attrs.filter(db.upcast(), def.krate(db));
|
||||
let attrs = raw_attrs.filter(db, def.krate(db));
|
||||
Attrs(attrs)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
//! Defines database & queries for name resolution.
|
||||
use base_db::{Crate, RootQueryDb, SourceDatabase, Upcast};
|
||||
use base_db::{Crate, RootQueryDb, SourceDatabase};
|
||||
use either::Either;
|
||||
use hir_expand::{HirFileId, MacroDefId, db::ExpandDatabase};
|
||||
use intern::sym;
|
||||
|
|
@ -100,13 +100,7 @@ pub trait InternDatabase: RootQueryDb {
|
|||
}
|
||||
|
||||
#[query_group::query_group]
|
||||
pub trait DefDatabase:
|
||||
InternDatabase
|
||||
+ ExpandDatabase
|
||||
+ SourceDatabase
|
||||
+ Upcast<dyn ExpandDatabase>
|
||||
+ Upcast<dyn RootQueryDb>
|
||||
{
|
||||
pub trait DefDatabase: InternDatabase + ExpandDatabase + SourceDatabase {
|
||||
/// Whether to expand procedural macros during name resolution.
|
||||
#[salsa::input]
|
||||
fn expand_proc_attr_macros(&self) -> bool;
|
||||
|
|
@ -381,7 +375,7 @@ fn include_macro_invoc(
|
|||
.flat_map(|m| m.scope.iter_macro_invoc())
|
||||
.filter_map(|invoc| {
|
||||
db.lookup_intern_macro_call(*invoc.1)
|
||||
.include_file_id(db.upcast(), *invoc.1)
|
||||
.include_file_id(db, *invoc.1)
|
||||
.map(|x| (*invoc.1, x))
|
||||
})
|
||||
.collect()
|
||||
|
|
|
|||
|
|
@ -72,7 +72,7 @@ impl Expander {
|
|||
krate: Crate,
|
||||
has_attrs: &dyn HasAttrs,
|
||||
) -> Attrs {
|
||||
Attrs::filter(db, krate, RawAttrs::new(db.upcast(), has_attrs, self.span_map.as_ref()))
|
||||
Attrs::filter(db, krate, RawAttrs::new(db, has_attrs, self.span_map.as_ref()))
|
||||
}
|
||||
|
||||
pub(super) fn is_cfg_enabled(
|
||||
|
|
@ -103,7 +103,7 @@ impl Expander {
|
|||
let result = self.within_limit(db, |this| {
|
||||
let macro_call = this.in_file(¯o_call);
|
||||
match macro_call.as_call_id_with_errors(
|
||||
db.upcast(),
|
||||
db,
|
||||
krate,
|
||||
|path| resolver(path).map(|it| db.macro_def(it)),
|
||||
eager_callback,
|
||||
|
|
@ -178,7 +178,7 @@ impl Expander {
|
|||
self.recursion_depth = u32::MAX;
|
||||
cov_mark::hit!(your_stack_belongs_to_me);
|
||||
return ExpandResult::only_err(ExpandError::new(
|
||||
db.macro_arg_considering_derives(call_id, &call_id.lookup(db.upcast()).kind).2,
|
||||
db.macro_arg_considering_derives(call_id, &call_id.lookup(db).kind).2,
|
||||
ExpandErrorKind::RecursionOverflow,
|
||||
));
|
||||
}
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ pub(super) fn lower_path(
|
|||
return None;
|
||||
}
|
||||
break kind = resolve_crate_root(
|
||||
collector.db.upcast(),
|
||||
collector.db,
|
||||
collector.expander.ctx_for_range(name_ref.syntax().text_range()),
|
||||
)
|
||||
.map(PathKind::DollarCrate)
|
||||
|
|
@ -216,7 +216,7 @@ pub(super) fn lower_path(
|
|||
let syn_ctxt = collector.expander.ctx_for_range(path.segment()?.syntax().text_range());
|
||||
if let Some(macro_call_id) = syn_ctxt.outer_expn(collector.db) {
|
||||
if collector.db.lookup_intern_macro_call(macro_call_id).def.local_inner {
|
||||
kind = match resolve_crate_root(collector.db.upcast(), syn_ctxt) {
|
||||
kind = match resolve_crate_root(collector.db, syn_ctxt) {
|
||||
Some(crate_root) => PathKind::DollarCrate(crate_root),
|
||||
None => PathKind::Crate,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -53,19 +53,18 @@ pub(crate) fn print_body_hir(
|
|||
edition: Edition,
|
||||
) -> String {
|
||||
let header = match owner {
|
||||
DefWithBodyId::FunctionId(it) => it
|
||||
.lookup(db)
|
||||
.id
|
||||
.resolved(db, |it| format!("fn {}", it.name.display(db.upcast(), edition))),
|
||||
DefWithBodyId::FunctionId(it) => {
|
||||
it.lookup(db).id.resolved(db, |it| format!("fn {}", it.name.display(db, edition)))
|
||||
}
|
||||
DefWithBodyId::StaticId(it) => it
|
||||
.lookup(db)
|
||||
.id
|
||||
.resolved(db, |it| format!("static {} = ", it.name.display(db.upcast(), edition))),
|
||||
.resolved(db, |it| format!("static {} = ", it.name.display(db, edition))),
|
||||
DefWithBodyId::ConstId(it) => it.lookup(db).id.resolved(db, |it| {
|
||||
format!(
|
||||
"const {} = ",
|
||||
match &it.name {
|
||||
Some(name) => name.display(db.upcast(), edition).to_string(),
|
||||
Some(name) => name.display(db, edition).to_string(),
|
||||
None => "_".to_owned(),
|
||||
}
|
||||
)
|
||||
|
|
@ -75,8 +74,8 @@ pub(crate) fn print_body_hir(
|
|||
let enum_loc = loc.parent.lookup(db);
|
||||
format!(
|
||||
"enum {}::{}",
|
||||
enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast(), edition),
|
||||
loc.id.item_tree(db)[loc.id.value].name.display(db.upcast(), edition),
|
||||
enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db, edition),
|
||||
loc.id.item_tree(db)[loc.id.value].name.display(db, edition),
|
||||
)
|
||||
}
|
||||
};
|
||||
|
|
@ -162,7 +161,7 @@ pub(crate) fn print_struct(
|
|||
wln!(p, "#[fundamental]");
|
||||
}
|
||||
w!(p, "struct ");
|
||||
w!(p, "{}", name.display(db.upcast(), edition));
|
||||
w!(p, "{}", name.display(db, edition));
|
||||
print_generic_params(db, generic_params, &mut p);
|
||||
match shape {
|
||||
FieldsShape::Record => wln!(p, " {{...}}"),
|
||||
|
|
@ -219,7 +218,7 @@ pub(crate) fn print_function(
|
|||
w!(p, "extern \"{}\" ", abi.as_str());
|
||||
}
|
||||
w!(p, "fn ");
|
||||
w!(p, "{}", name.display(db.upcast(), edition));
|
||||
w!(p, "{}", name.display(db, edition));
|
||||
print_generic_params(db, generic_params, &mut p);
|
||||
w!(p, "(");
|
||||
for (i, param) in params.iter().enumerate() {
|
||||
|
|
@ -260,7 +259,7 @@ fn print_where_clauses(db: &dyn DefDatabase, generic_params: &GenericParams, p:
|
|||
}
|
||||
WherePredicateTypeTarget::TypeOrConstParam(idx) => {
|
||||
match generic_params[*idx].name() {
|
||||
Some(name) => w!(p, "{}", name.display(db.upcast(), p.edition)),
|
||||
Some(name) => w!(p, "{}", name.display(db, p.edition)),
|
||||
None => w!(p, "Param[{}]", idx.into_raw()),
|
||||
}
|
||||
w!(p, ": ");
|
||||
|
|
@ -278,7 +277,7 @@ fn print_where_clauses(db: &dyn DefDatabase, generic_params: &GenericParams, p:
|
|||
if i != 0 {
|
||||
w!(p, ", ");
|
||||
}
|
||||
w!(p, "{}", lifetime.display(db.upcast(), p.edition));
|
||||
w!(p, "{}", lifetime.display(db, p.edition));
|
||||
}
|
||||
w!(p, "> ");
|
||||
match target {
|
||||
|
|
@ -289,7 +288,7 @@ fn print_where_clauses(db: &dyn DefDatabase, generic_params: &GenericParams, p:
|
|||
}
|
||||
WherePredicateTypeTarget::TypeOrConstParam(idx) => {
|
||||
match generic_params[*idx].name() {
|
||||
Some(name) => w!(p, "{}", name.display(db.upcast(), p.edition)),
|
||||
Some(name) => w!(p, "{}", name.display(db, p.edition)),
|
||||
None => w!(p, "Param[{}]", idx.into_raw()),
|
||||
}
|
||||
w!(p, ": ");
|
||||
|
|
@ -313,7 +312,7 @@ fn print_generic_params(db: &dyn DefDatabase, generic_params: &GenericParams, p:
|
|||
w!(p, ", ");
|
||||
}
|
||||
first = false;
|
||||
w!(p, "{}", param.name.display(db.upcast(), p.edition));
|
||||
w!(p, "{}", param.name.display(db, p.edition));
|
||||
}
|
||||
for (i, param) in generic_params.iter_type_or_consts() {
|
||||
if !first {
|
||||
|
|
@ -321,7 +320,7 @@ fn print_generic_params(db: &dyn DefDatabase, generic_params: &GenericParams, p:
|
|||
}
|
||||
first = false;
|
||||
if let Some(const_param) = param.const_param() {
|
||||
w!(p, "const {}: ", const_param.name.display(db.upcast(), p.edition));
|
||||
w!(p, "const {}: ", const_param.name.display(db, p.edition));
|
||||
p.print_type_ref(const_param.ty);
|
||||
if let Some(default) = const_param.default {
|
||||
w!(p, " = ");
|
||||
|
|
@ -330,7 +329,7 @@ fn print_generic_params(db: &dyn DefDatabase, generic_params: &GenericParams, p:
|
|||
}
|
||||
if let Some(type_param) = param.type_param() {
|
||||
match &type_param.name {
|
||||
Some(name) => w!(p, "{}", name.display(db.upcast(), p.edition)),
|
||||
Some(name) => w!(p, "{}", name.display(db, p.edition)),
|
||||
None => w!(p, "Param[{}]", i.into_raw()),
|
||||
}
|
||||
if let Some(default) = type_param.default {
|
||||
|
|
@ -476,7 +475,7 @@ impl Printer<'_> {
|
|||
offset_of
|
||||
.fields
|
||||
.iter()
|
||||
.format_with(".", |field, f| f(&field.display(self.db.upcast(), edition)))
|
||||
.format_with(".", |field, f| f(&field.display(self.db, edition)))
|
||||
);
|
||||
}
|
||||
Expr::Path(path) => self.print_path(path),
|
||||
|
|
@ -498,7 +497,7 @@ impl Printer<'_> {
|
|||
}
|
||||
Expr::Loop { body, label } => {
|
||||
if let Some(lbl) = label {
|
||||
w!(self, "{}: ", self.store[*lbl].name.display(self.db.upcast(), self.edition));
|
||||
w!(self, "{}: ", self.store[*lbl].name.display(self.db, self.edition));
|
||||
}
|
||||
w!(self, "loop ");
|
||||
self.print_expr(*body);
|
||||
|
|
@ -518,7 +517,7 @@ impl Printer<'_> {
|
|||
}
|
||||
Expr::MethodCall { receiver, method_name, args, generic_args } => {
|
||||
self.print_expr(*receiver);
|
||||
w!(self, ".{}", method_name.display(self.db.upcast(), self.edition));
|
||||
w!(self, ".{}", method_name.display(self.db, self.edition));
|
||||
if let Some(args) = generic_args {
|
||||
w!(self, "::<");
|
||||
self.print_generic_args(args);
|
||||
|
|
@ -556,13 +555,13 @@ impl Printer<'_> {
|
|||
Expr::Continue { label } => {
|
||||
w!(self, "continue");
|
||||
if let Some(lbl) = label {
|
||||
w!(self, " {}", self.store[*lbl].name.display(self.db.upcast(), self.edition));
|
||||
w!(self, " {}", self.store[*lbl].name.display(self.db, self.edition));
|
||||
}
|
||||
}
|
||||
Expr::Break { expr, label } => {
|
||||
w!(self, "break");
|
||||
if let Some(lbl) = label {
|
||||
w!(self, " {}", self.store[*lbl].name.display(self.db.upcast(), self.edition));
|
||||
w!(self, " {}", self.store[*lbl].name.display(self.db, self.edition));
|
||||
}
|
||||
if let Some(expr) = expr {
|
||||
self.whitespace();
|
||||
|
|
@ -607,7 +606,7 @@ impl Printer<'_> {
|
|||
let edition = self.edition;
|
||||
self.indented(|p| {
|
||||
for field in &**fields {
|
||||
w!(p, "{}: ", field.name.display(self.db.upcast(), edition));
|
||||
w!(p, "{}: ", field.name.display(self.db, edition));
|
||||
p.print_expr(field.expr);
|
||||
wln!(p, ",");
|
||||
}
|
||||
|
|
@ -621,7 +620,7 @@ impl Printer<'_> {
|
|||
}
|
||||
Expr::Field { expr, name } => {
|
||||
self.print_expr(*expr);
|
||||
w!(self, ".{}", name.display(self.db.upcast(), self.edition));
|
||||
w!(self, ".{}", name.display(self.db, self.edition));
|
||||
}
|
||||
Expr::Await { expr } => {
|
||||
self.print_expr(*expr);
|
||||
|
|
@ -760,7 +759,7 @@ impl Printer<'_> {
|
|||
Expr::Literal(lit) => self.print_literal(lit),
|
||||
Expr::Block { id: _, statements, tail, label } => {
|
||||
let label = label.map(|lbl| {
|
||||
format!("{}: ", self.store[lbl].name.display(self.db.upcast(), self.edition))
|
||||
format!("{}: ", self.store[lbl].name.display(self.db, self.edition))
|
||||
});
|
||||
self.print_block(label.as_deref(), statements, tail);
|
||||
}
|
||||
|
|
@ -846,7 +845,7 @@ impl Printer<'_> {
|
|||
let oneline = matches!(self.line_format, LineFormat::Oneline);
|
||||
self.indented(|p| {
|
||||
for (idx, arg) in args.iter().enumerate() {
|
||||
let field_name = arg.name.display(self.db.upcast(), edition).to_string();
|
||||
let field_name = arg.name.display(self.db, edition).to_string();
|
||||
|
||||
let mut same_name = false;
|
||||
if let Pat::Bind { id, subpat: None } = &self.store[arg.pat] {
|
||||
|
|
@ -1019,7 +1018,7 @@ impl Printer<'_> {
|
|||
BindingAnnotation::Ref => "ref ",
|
||||
BindingAnnotation::RefMut => "ref mut ",
|
||||
};
|
||||
w!(self, "{}{}", mode, name.display(self.db.upcast(), self.edition));
|
||||
w!(self, "{}{}", mode, name.display(self.db, self.edition));
|
||||
}
|
||||
|
||||
fn print_path(&mut self, path: &Path) {
|
||||
|
|
@ -1030,7 +1029,7 @@ impl Printer<'_> {
|
|||
let loc = $it.lookup(self.db);
|
||||
let tree = loc.item_tree_id().item_tree(self.db);
|
||||
let name = &tree[loc.id.value].name;
|
||||
w!(self, "{}", name.display(self.db.upcast(), self.edition));
|
||||
w!(self, "{}", name.display(self.db, self.edition));
|
||||
}};
|
||||
}
|
||||
match *it {
|
||||
|
|
@ -1046,7 +1045,7 @@ impl Printer<'_> {
|
|||
}
|
||||
|
||||
if let Some(s) = s {
|
||||
w!(self, "::{}", s.display(self.db.upcast(), self.edition));
|
||||
w!(self, "::{}", s.display(self.db, self.edition));
|
||||
}
|
||||
return w!(self, ")");
|
||||
}
|
||||
|
|
@ -1088,7 +1087,7 @@ impl Printer<'_> {
|
|||
w!(self, "::");
|
||||
}
|
||||
|
||||
w!(self, "{}", segment.name.display(self.db.upcast(), self.edition));
|
||||
w!(self, "{}", segment.name.display(self.db, self.edition));
|
||||
if let Some(generics) = segment.args_and_bindings {
|
||||
w!(self, "::<");
|
||||
self.print_generic_args(generics);
|
||||
|
|
@ -1121,7 +1120,7 @@ impl Printer<'_> {
|
|||
w!(self, ", ");
|
||||
}
|
||||
first = false;
|
||||
w!(self, "{}", binding.name.display(self.db.upcast(), self.edition));
|
||||
w!(self, "{}", binding.name.display(self.db, self.edition));
|
||||
if !binding.bounds.is_empty() {
|
||||
w!(self, ": ");
|
||||
self.print_type_bounds(&binding.bounds);
|
||||
|
|
@ -1145,7 +1144,7 @@ impl Printer<'_> {
|
|||
let generic_params = self.db.generic_params(param.parent());
|
||||
|
||||
match generic_params[param.local_id()].name() {
|
||||
Some(name) => w!(self, "{}", name.display(self.db.upcast(), self.edition)),
|
||||
Some(name) => w!(self, "{}", name.display(self.db, self.edition)),
|
||||
None => w!(self, "Param[{}]", param.local_id().into_raw()),
|
||||
}
|
||||
}
|
||||
|
|
@ -1154,7 +1153,7 @@ impl Printer<'_> {
|
|||
match lt_ref {
|
||||
LifetimeRef::Static => w!(self, "'static"),
|
||||
LifetimeRef::Named(lt) => {
|
||||
w!(self, "{}", lt.display(self.db.upcast(), self.edition))
|
||||
w!(self, "{}", lt.display(self.db, self.edition))
|
||||
}
|
||||
LifetimeRef::Placeholder => w!(self, "'_"),
|
||||
LifetimeRef::Error => w!(self, "'{{error}}"),
|
||||
|
|
@ -1270,7 +1269,7 @@ impl Printer<'_> {
|
|||
"for<{}> ",
|
||||
lifetimes
|
||||
.iter()
|
||||
.map(|it| it.display(self.db.upcast(), self.edition))
|
||||
.map(|it| it.display(self.db, self.edition))
|
||||
.format(", ")
|
||||
.to_string()
|
||||
);
|
||||
|
|
@ -1286,7 +1285,7 @@ impl Printer<'_> {
|
|||
}
|
||||
match arg {
|
||||
UseArgRef::Name(it) => {
|
||||
w!(self, "{}", it.display(self.db.upcast(), self.edition))
|
||||
w!(self, "{}", it.display(self.db, self.edition))
|
||||
}
|
||||
UseArgRef::Lifetime(it) => self.print_lifetime_ref(it),
|
||||
}
|
||||
|
|
|
|||
|
|
@ -68,12 +68,7 @@ impl ImportMap {
|
|||
for (k, v) in self.item_to_info_map.iter() {
|
||||
format_to!(out, "{:?} ({:?}) -> ", k, v.1);
|
||||
for v in &v.0 {
|
||||
format_to!(
|
||||
out,
|
||||
"{}:{:?}, ",
|
||||
v.name.display(db.upcast(), Edition::CURRENT),
|
||||
v.container
|
||||
);
|
||||
format_to!(out, "{}:{:?}, ", v.name.display(db, Edition::CURRENT), v.container);
|
||||
}
|
||||
format_to!(out, "\n");
|
||||
}
|
||||
|
|
@ -483,7 +478,7 @@ fn search_maps(
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use base_db::{RootQueryDb, Upcast};
|
||||
use base_db::RootQueryDb;
|
||||
use expect_test::{Expect, expect};
|
||||
use test_fixture::WithFixture;
|
||||
|
||||
|
|
@ -533,10 +528,10 @@ mod tests {
|
|||
})
|
||||
.expect("could not find crate");
|
||||
|
||||
let actual = search_dependencies(db.upcast(), krate, &query)
|
||||
let actual = search_dependencies(&db, krate, &query)
|
||||
.into_iter()
|
||||
.filter_map(|(dependency, _)| {
|
||||
let dependency_krate = dependency.krate(db.upcast())?;
|
||||
let dependency_krate = dependency.krate(&db)?;
|
||||
let dependency_imports = db.import_map(dependency_krate);
|
||||
|
||||
let (path, mark) = match assoc_item_path(&db, &dependency_imports, dependency) {
|
||||
|
|
@ -594,7 +589,7 @@ mod tests {
|
|||
Some(format!(
|
||||
"{}::{}",
|
||||
render_path(db, &trait_info[0]),
|
||||
assoc_item_name.display(db.upcast(), Edition::CURRENT)
|
||||
assoc_item_name.display(db, Edition::CURRENT)
|
||||
))
|
||||
}
|
||||
|
||||
|
|
@ -611,7 +606,7 @@ mod tests {
|
|||
|
||||
let map = db.import_map(krate);
|
||||
|
||||
Some(format!("{name}:\n{}\n", map.fmt_for_test(db.upcast())))
|
||||
Some(format!("{name}:\n{}\n", map.fmt_for_test(&db)))
|
||||
})
|
||||
.sorted()
|
||||
.collect::<String>();
|
||||
|
|
@ -634,7 +629,7 @@ mod tests {
|
|||
module = parent;
|
||||
}
|
||||
|
||||
segments.iter().rev().map(|it| it.display(db.upcast(), Edition::CURRENT)).join("::")
|
||||
segments.iter().rev().map(|it| it.display(db, Edition::CURRENT)).join("::")
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
|||
|
|
@ -105,7 +105,7 @@ impl ItemTree {
|
|||
let mut item_tree = match_ast! {
|
||||
match syntax {
|
||||
ast::SourceFile(file) => {
|
||||
top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.span_map()));
|
||||
top_attrs = Some(RawAttrs::new(db, &file, ctx.span_map()));
|
||||
ctx.lower_module_items(&file)
|
||||
},
|
||||
ast::MacroItems(items) => {
|
||||
|
|
@ -150,7 +150,7 @@ impl ItemTree {
|
|||
static EMPTY: OnceLock<Arc<ItemTree>> = OnceLock::new();
|
||||
|
||||
let loc = block.lookup(db);
|
||||
let block = loc.ast_id.to_node(db.upcast());
|
||||
let block = loc.ast_id.to_node(db);
|
||||
|
||||
let ctx = lower::Ctx::new(db, loc.ast_id.file_id);
|
||||
let mut item_tree = ctx.lower_block(&block);
|
||||
|
|
@ -885,7 +885,7 @@ impl Use {
|
|||
) -> ast::UseTree {
|
||||
// Re-lower the AST item and get the source map.
|
||||
// Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
|
||||
let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
|
||||
let ast = InFile::new(file_id, self.ast_id).to_node(db);
|
||||
let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
|
||||
let (_, source_map) = lower::lower_use_tree(db, ast_use_tree, &mut |range| {
|
||||
db.span_map(file_id).span_for_range(range).ctx
|
||||
|
|
@ -902,7 +902,7 @@ impl Use {
|
|||
) -> Arena<ast::UseTree> {
|
||||
// Re-lower the AST item and get the source map.
|
||||
// Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
|
||||
let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
|
||||
let ast = InFile::new(file_id, self.ast_id).to_node(db);
|
||||
let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
|
||||
lower::lower_use_tree(db, ast_use_tree, &mut |range| {
|
||||
db.span_map(file_id).span_for_range(range).ctx
|
||||
|
|
|
|||
|
|
@ -95,9 +95,7 @@ impl<'a> Ctx<'a> {
|
|||
}
|
||||
|
||||
pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree {
|
||||
self.tree
|
||||
.attrs
|
||||
.insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.span_map()));
|
||||
self.tree.attrs.insert(AttrOwner::TopLevel, RawAttrs::new(self.db, block, self.span_map()));
|
||||
self.tree.top_level = block
|
||||
.statements()
|
||||
.filter_map(|stmt| match stmt {
|
||||
|
|
@ -146,7 +144,7 @@ impl<'a> Ctx<'a> {
|
|||
ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(),
|
||||
ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(),
|
||||
};
|
||||
let attrs = RawAttrs::new(self.db.upcast(), item, self.span_map());
|
||||
let attrs = RawAttrs::new(self.db, item, self.span_map());
|
||||
self.add_attrs(mod_item.into(), attrs);
|
||||
|
||||
Some(mod_item)
|
||||
|
|
@ -172,7 +170,7 @@ impl<'a> Ctx<'a> {
|
|||
ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()),
|
||||
ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into),
|
||||
}?;
|
||||
let attrs = RawAttrs::new(self.db.upcast(), item_node, self.span_map());
|
||||
let attrs = RawAttrs::new(self.db, item_node, self.span_map());
|
||||
self.add_attrs(
|
||||
match item {
|
||||
AssocItem::Function(it) => AttrOwner::ModItem(ModItem::Function(it)),
|
||||
|
|
@ -217,7 +215,7 @@ impl<'a> Ctx<'a> {
|
|||
for (i, field) in it.fields().enumerate() {
|
||||
let data = self.lower_record_field(&field);
|
||||
fields.push(data);
|
||||
let attr = RawAttrs::new(self.db.upcast(), &field, self.span_map());
|
||||
let attr = RawAttrs::new(self.db, &field, self.span_map());
|
||||
if !attr.is_empty() {
|
||||
attrs.push((i, attr))
|
||||
}
|
||||
|
|
@ -231,7 +229,7 @@ impl<'a> Ctx<'a> {
|
|||
for (i, field) in it.fields().enumerate() {
|
||||
let data = self.lower_tuple_field(i, &field);
|
||||
fields.push(data);
|
||||
let attr = RawAttrs::new(self.db.upcast(), &field, self.span_map());
|
||||
let attr = RawAttrs::new(self.db, &field, self.span_map());
|
||||
if !attr.is_empty() {
|
||||
attrs.push((i, attr))
|
||||
}
|
||||
|
|
@ -299,10 +297,7 @@ impl<'a> Ctx<'a> {
|
|||
let start = self.next_variant_idx();
|
||||
for variant in variants.variants() {
|
||||
let idx = self.lower_variant(&variant);
|
||||
self.add_attrs(
|
||||
id(idx).into(),
|
||||
RawAttrs::new(self.db.upcast(), &variant, self.span_map()),
|
||||
);
|
||||
self.add_attrs(id(idx).into(), RawAttrs::new(self.db, &variant, self.span_map()));
|
||||
}
|
||||
let end = self.next_variant_idx();
|
||||
FileItemTreeId(start)..FileItemTreeId(end)
|
||||
|
|
@ -465,7 +460,7 @@ impl<'a> Ctx<'a> {
|
|||
let span_map = self.span_map();
|
||||
let path = m.path()?;
|
||||
let range = path.syntax().text_range();
|
||||
let path = Interned::new(ModPath::from_src(self.db.upcast(), path, &mut |range| {
|
||||
let path = Interned::new(ModPath::from_src(self.db, path, &mut |range| {
|
||||
span_map.span_for_range(range).ctx
|
||||
})?);
|
||||
let ast_id = self.source_ast_id_map.ast_id(m);
|
||||
|
|
@ -508,7 +503,7 @@ impl<'a> Ctx<'a> {
|
|||
ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(ty)?.into(),
|
||||
ast::ExternItem::MacroCall(call) => self.lower_macro_call(call)?.into(),
|
||||
};
|
||||
let attrs = RawAttrs::new(self.db.upcast(), &item, self.span_map());
|
||||
let attrs = RawAttrs::new(self.db, &item, self.span_map());
|
||||
self.add_attrs(mod_item.into(), attrs);
|
||||
Some(mod_item)
|
||||
})
|
||||
|
|
@ -559,7 +554,7 @@ impl UseTreeLowering<'_> {
|
|||
// E.g. `use something::{inner}` (prefix is `None`, path is `something`)
|
||||
// or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`)
|
||||
Some(path) => {
|
||||
match ModPath::from_src(self.db.upcast(), path, span_for_range) {
|
||||
match ModPath::from_src(self.db, path, span_for_range) {
|
||||
Some(it) => Some(it),
|
||||
None => return None, // FIXME: report errors somewhere
|
||||
}
|
||||
|
|
@ -580,7 +575,7 @@ impl UseTreeLowering<'_> {
|
|||
} else {
|
||||
let is_glob = tree.star_token().is_some();
|
||||
let path = match tree.path() {
|
||||
Some(path) => Some(ModPath::from_src(self.db.upcast(), path, span_for_range)?),
|
||||
Some(path) => Some(ModPath::from_src(self.db, path, span_for_range)?),
|
||||
None => None,
|
||||
};
|
||||
let alias = tree.rename().map(|a| {
|
||||
|
|
@ -639,7 +634,7 @@ fn visibility_from_ast(
|
|||
let Some(node) = node else { return private_vis() };
|
||||
let path = match node.kind() {
|
||||
ast::VisibilityKind::In(path) => {
|
||||
let path = ModPath::from_src(db.upcast(), path, span_for_range);
|
||||
let path = ModPath::from_src(db, path, span_for_range);
|
||||
match path {
|
||||
None => return private_vis(),
|
||||
Some(path) => path,
|
||||
|
|
|
|||
|
|
@ -96,7 +96,7 @@ impl Printer<'_> {
|
|||
self,
|
||||
"#{}[{}{}]{}",
|
||||
inner,
|
||||
attr.path.display(self.db.upcast(), self.edition),
|
||||
attr.path.display(self.db, self.edition),
|
||||
attr.input.as_ref().map(|it| it.to_string()).unwrap_or_default(),
|
||||
separated_by,
|
||||
);
|
||||
|
|
@ -112,7 +112,7 @@ impl Printer<'_> {
|
|||
fn print_visibility(&mut self, vis: RawVisibilityId) {
|
||||
match &self.tree[vis] {
|
||||
RawVisibility::Module(path, _expl) => {
|
||||
w!(self, "pub({}) ", path.display(self.db.upcast(), self.edition))
|
||||
w!(self, "pub({}) ", path.display(self.db, self.edition))
|
||||
}
|
||||
RawVisibility::Public => w!(self, "pub "),
|
||||
};
|
||||
|
|
@ -135,7 +135,7 @@ impl Printer<'_> {
|
|||
w!(this, "unsafe ");
|
||||
}
|
||||
|
||||
wln!(this, "{},", name.display(self.db.upcast(), edition));
|
||||
wln!(this, "{},", name.display(self.db, edition));
|
||||
}
|
||||
});
|
||||
w!(self, "}}");
|
||||
|
|
@ -152,7 +152,7 @@ impl Printer<'_> {
|
|||
if *is_unsafe {
|
||||
w!(this, "unsafe ");
|
||||
}
|
||||
wln!(this, "{},", name.display(self.db.upcast(), edition));
|
||||
wln!(this, "{},", name.display(self.db, edition));
|
||||
}
|
||||
});
|
||||
w!(self, ")");
|
||||
|
|
@ -164,20 +164,20 @@ impl Printer<'_> {
|
|||
fn print_use_tree(&mut self, use_tree: &UseTree) {
|
||||
match &use_tree.kind {
|
||||
UseTreeKind::Single { path, alias } => {
|
||||
w!(self, "{}", path.display(self.db.upcast(), self.edition));
|
||||
w!(self, "{}", path.display(self.db, self.edition));
|
||||
if let Some(alias) = alias {
|
||||
w!(self, " as {}", alias.display(self.edition));
|
||||
}
|
||||
}
|
||||
UseTreeKind::Glob { path } => {
|
||||
if let Some(path) = path {
|
||||
w!(self, "{}::", path.display(self.db.upcast(), self.edition));
|
||||
w!(self, "{}::", path.display(self.db, self.edition));
|
||||
}
|
||||
w!(self, "*");
|
||||
}
|
||||
UseTreeKind::Prefixed { prefix, list } => {
|
||||
if let Some(prefix) = prefix {
|
||||
w!(self, "{}::", prefix.display(self.db.upcast(), self.edition));
|
||||
w!(self, "{}::", prefix.display(self.db, self.edition));
|
||||
}
|
||||
w!(self, "{{");
|
||||
for (i, tree) in list.iter().enumerate() {
|
||||
|
|
@ -207,7 +207,7 @@ impl Printer<'_> {
|
|||
let ExternCrate { name, alias, visibility, ast_id } = &self.tree[it];
|
||||
self.print_ast_id(ast_id.erase());
|
||||
self.print_visibility(*visibility);
|
||||
w!(self, "extern crate {}", name.display(self.db.upcast(), self.edition));
|
||||
w!(self, "extern crate {}", name.display(self.db, self.edition));
|
||||
if let Some(alias) = alias {
|
||||
w!(self, " as {}", alias.display(self.edition));
|
||||
}
|
||||
|
|
@ -232,13 +232,13 @@ impl Printer<'_> {
|
|||
let Function { name, visibility, ast_id } = &self.tree[it];
|
||||
self.print_ast_id(ast_id.erase());
|
||||
self.print_visibility(*visibility);
|
||||
wln!(self, "fn {};", name.display(self.db.upcast(), self.edition));
|
||||
wln!(self, "fn {};", name.display(self.db, self.edition));
|
||||
}
|
||||
ModItem::Struct(it) => {
|
||||
let Struct { visibility, name, fields, shape: kind, ast_id } = &self.tree[it];
|
||||
self.print_ast_id(ast_id.erase());
|
||||
self.print_visibility(*visibility);
|
||||
w!(self, "struct {}", name.display(self.db.upcast(), self.edition));
|
||||
w!(self, "struct {}", name.display(self.db, self.edition));
|
||||
self.print_fields(FieldParent::Struct(it), *kind, fields);
|
||||
if matches!(kind, FieldsShape::Record) {
|
||||
wln!(self);
|
||||
|
|
@ -250,7 +250,7 @@ impl Printer<'_> {
|
|||
let Union { name, visibility, fields, ast_id } = &self.tree[it];
|
||||
self.print_ast_id(ast_id.erase());
|
||||
self.print_visibility(*visibility);
|
||||
w!(self, "union {}", name.display(self.db.upcast(), self.edition));
|
||||
w!(self, "union {}", name.display(self.db, self.edition));
|
||||
self.print_fields(FieldParent::Union(it), FieldsShape::Record, fields);
|
||||
wln!(self);
|
||||
}
|
||||
|
|
@ -258,14 +258,14 @@ impl Printer<'_> {
|
|||
let Enum { name, visibility, variants, ast_id } = &self.tree[it];
|
||||
self.print_ast_id(ast_id.erase());
|
||||
self.print_visibility(*visibility);
|
||||
w!(self, "enum {}", name.display(self.db.upcast(), self.edition));
|
||||
w!(self, "enum {}", name.display(self.db, self.edition));
|
||||
let edition = self.edition;
|
||||
self.indented(|this| {
|
||||
for variant in FileItemTreeId::range_iter(variants.clone()) {
|
||||
let Variant { name, fields, shape: kind, ast_id } = &this.tree[variant];
|
||||
this.print_ast_id(ast_id.erase());
|
||||
this.print_attrs_of(variant, "\n");
|
||||
w!(this, "{}", name.display(self.db.upcast(), edition));
|
||||
w!(this, "{}", name.display(self.db, edition));
|
||||
this.print_fields(FieldParent::EnumVariant(variant), *kind, fields);
|
||||
wln!(this, ",");
|
||||
}
|
||||
|
|
@ -278,7 +278,7 @@ impl Printer<'_> {
|
|||
self.print_visibility(*visibility);
|
||||
w!(self, "const ");
|
||||
match name {
|
||||
Some(name) => w!(self, "{}", name.display(self.db.upcast(), self.edition)),
|
||||
Some(name) => w!(self, "{}", name.display(self.db, self.edition)),
|
||||
None => w!(self, "_"),
|
||||
}
|
||||
wln!(self, " = _;");
|
||||
|
|
@ -288,7 +288,7 @@ impl Printer<'_> {
|
|||
self.print_ast_id(ast_id.erase());
|
||||
self.print_visibility(*visibility);
|
||||
w!(self, "static ");
|
||||
w!(self, "{}", name.display(self.db.upcast(), self.edition));
|
||||
w!(self, "{}", name.display(self.db, self.edition));
|
||||
w!(self, " = _;");
|
||||
wln!(self);
|
||||
}
|
||||
|
|
@ -296,7 +296,7 @@ impl Printer<'_> {
|
|||
let Trait { name, visibility, items, ast_id } = &self.tree[it];
|
||||
self.print_ast_id(ast_id.erase());
|
||||
self.print_visibility(*visibility);
|
||||
w!(self, "trait {} {{", name.display(self.db.upcast(), self.edition));
|
||||
w!(self, "trait {} {{", name.display(self.db, self.edition));
|
||||
self.indented(|this| {
|
||||
for item in &**items {
|
||||
this.print_mod_item((*item).into());
|
||||
|
|
@ -308,7 +308,7 @@ impl Printer<'_> {
|
|||
let TraitAlias { name, visibility, ast_id } = &self.tree[it];
|
||||
self.print_ast_id(ast_id.erase());
|
||||
self.print_visibility(*visibility);
|
||||
wln!(self, "trait {} = ..;", name.display(self.db.upcast(), self.edition));
|
||||
wln!(self, "trait {} = ..;", name.display(self.db, self.edition));
|
||||
}
|
||||
ModItem::Impl(it) => {
|
||||
let Impl { items, ast_id } = &self.tree[it];
|
||||
|
|
@ -325,7 +325,7 @@ impl Printer<'_> {
|
|||
let TypeAlias { name, visibility, ast_id } = &self.tree[it];
|
||||
self.print_ast_id(ast_id.erase());
|
||||
self.print_visibility(*visibility);
|
||||
w!(self, "type {}", name.display(self.db.upcast(), self.edition));
|
||||
w!(self, "type {}", name.display(self.db, self.edition));
|
||||
w!(self, ";");
|
||||
wln!(self);
|
||||
}
|
||||
|
|
@ -333,7 +333,7 @@ impl Printer<'_> {
|
|||
let Mod { name, visibility, kind, ast_id } = &self.tree[it];
|
||||
self.print_ast_id(ast_id.erase());
|
||||
self.print_visibility(*visibility);
|
||||
w!(self, "mod {}", name.display(self.db.upcast(), self.edition));
|
||||
w!(self, "mod {}", name.display(self.db, self.edition));
|
||||
match kind {
|
||||
ModKind::Inline { items } => {
|
||||
w!(self, " {{");
|
||||
|
|
@ -358,22 +358,18 @@ impl Printer<'_> {
|
|||
ctxt,
|
||||
expand_to
|
||||
);
|
||||
wln!(self, "{}!(...);", path.display(self.db.upcast(), self.edition));
|
||||
wln!(self, "{}!(...);", path.display(self.db, self.edition));
|
||||
}
|
||||
ModItem::MacroRules(it) => {
|
||||
let MacroRules { name, ast_id } = &self.tree[it];
|
||||
self.print_ast_id(ast_id.erase());
|
||||
wln!(
|
||||
self,
|
||||
"macro_rules! {} {{ ... }}",
|
||||
name.display(self.db.upcast(), self.edition)
|
||||
);
|
||||
wln!(self, "macro_rules! {} {{ ... }}", name.display(self.db, self.edition));
|
||||
}
|
||||
ModItem::Macro2(it) => {
|
||||
let Macro2 { name, visibility, ast_id } = &self.tree[it];
|
||||
self.print_ast_id(ast_id.erase());
|
||||
self.print_visibility(*visibility);
|
||||
wln!(self, "macro {} {{ ... }}", name.display(self.db.upcast(), self.edition));
|
||||
wln!(self, "macro {} {{ ... }}", name.display(self.db, self.edition));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -640,7 +640,7 @@ impl GeneralConstId {
|
|||
GeneralConstId::StaticId(it) => {
|
||||
let loc = it.lookup(db);
|
||||
let tree = loc.item_tree_id().item_tree(db);
|
||||
let name = tree[loc.id.value].name.display(db.upcast(), Edition::CURRENT);
|
||||
let name = tree[loc.id.value].name.display(db, Edition::CURRENT);
|
||||
name.to_string()
|
||||
}
|
||||
GeneralConstId::ConstId(const_id) => {
|
||||
|
|
@ -648,7 +648,7 @@ impl GeneralConstId {
|
|||
let tree = loc.item_tree_id().item_tree(db);
|
||||
tree[loc.id.value].name.as_ref().map_or_else(
|
||||
|| "_".to_owned(),
|
||||
|name| name.display(db.upcast(), Edition::CURRENT).to_string(),
|
||||
|name| name.display(db, Edition::CURRENT).to_string(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -336,11 +336,11 @@ impl ModuleOrigin {
|
|||
&ModuleOrigin::Inline { definition, definition_tree_id } => InFile::new(
|
||||
definition_tree_id.file_id(),
|
||||
ModuleSource::Module(
|
||||
AstId::new(definition_tree_id.file_id(), definition).to_node(db.upcast()),
|
||||
AstId::new(definition_tree_id.file_id(), definition).to_node(db),
|
||||
),
|
||||
),
|
||||
ModuleOrigin::BlockExpr { block, .. } => {
|
||||
InFile::new(block.file_id, ModuleSource::BlockExpr(block.to_node(db.upcast())))
|
||||
InFile::new(block.file_id, ModuleSource::BlockExpr(block.to_node(db)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -607,12 +607,12 @@ impl DefMap {
|
|||
) {
|
||||
format_to!(buf, "{}\n", path);
|
||||
|
||||
map.modules[module].scope.dump(db.upcast(), buf);
|
||||
map.modules[module].scope.dump(db, buf);
|
||||
|
||||
for (name, child) in
|
||||
map.modules[module].children.iter().sorted_by(|a, b| Ord::cmp(&a.0, &b.0))
|
||||
{
|
||||
let path = format!("{path}::{}", name.display(db.upcast(), Edition::LATEST));
|
||||
let path = format!("{path}::{}", name.display(db, Edition::LATEST));
|
||||
buf.push('\n');
|
||||
go(buf, db, map, &path, *child);
|
||||
}
|
||||
|
|
@ -748,17 +748,14 @@ impl ModuleData {
|
|||
&ModuleOrigin::File { definition, .. } | &ModuleOrigin::CrateRoot { definition } => {
|
||||
InFile::new(
|
||||
definition.into(),
|
||||
ErasedAstId::new(definition.into(), ROOT_ERASED_FILE_AST_ID)
|
||||
.to_range(db.upcast()),
|
||||
ErasedAstId::new(definition.into(), ROOT_ERASED_FILE_AST_ID).to_range(db),
|
||||
)
|
||||
}
|
||||
&ModuleOrigin::Inline { definition, definition_tree_id } => InFile::new(
|
||||
definition_tree_id.file_id(),
|
||||
AstId::new(definition_tree_id.file_id(), definition).to_range(db.upcast()),
|
||||
AstId::new(definition_tree_id.file_id(), definition).to_range(db),
|
||||
),
|
||||
ModuleOrigin::BlockExpr { block, .. } => {
|
||||
InFile::new(block.file_id, block.to_range(db.upcast()))
|
||||
}
|
||||
ModuleOrigin::BlockExpr { block, .. } => InFile::new(block.file_id, block.to_range(db)),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -766,7 +763,7 @@ impl ModuleData {
|
|||
/// `None` for the crate root or block.
|
||||
pub fn declaration_source(&self, db: &dyn DefDatabase) -> Option<InFile<ast::Module>> {
|
||||
let decl = self.origin.declaration()?;
|
||||
let value = decl.to_node(db.upcast());
|
||||
let value = decl.to_node(db);
|
||||
Some(InFile { file_id: decl.file_id, value })
|
||||
}
|
||||
|
||||
|
|
@ -774,7 +771,7 @@ impl ModuleData {
|
|||
/// `None` for the crate root or block.
|
||||
pub fn declaration_source_range(&self, db: &dyn DefDatabase) -> Option<InFile<TextRange>> {
|
||||
let decl = self.origin.declaration()?;
|
||||
Some(InFile { file_id: decl.file_id, value: decl.to_range(db.upcast()) })
|
||||
Some(InFile { file_id: decl.file_id, value: decl.to_range(db) })
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -259,7 +259,7 @@ impl<'a> AssocItemCollector<'a> {
|
|||
.map(|it| self.db.macro_def(it))
|
||||
};
|
||||
match macro_call_as_call_id(
|
||||
self.db.upcast(),
|
||||
self.db,
|
||||
&AstIdWithPath::new(tree_id.file_id(), ast_id, Clone::clone(path)),
|
||||
ctxt,
|
||||
expand_to,
|
||||
|
|
|
|||
|
|
@ -121,7 +121,7 @@ pub(super) fn attr_macro_as_call_id(
|
|||
};
|
||||
|
||||
def.make_call(
|
||||
db.upcast(),
|
||||
db,
|
||||
krate,
|
||||
MacroCallKind::Attr {
|
||||
ast_id: item_attr.ast_id,
|
||||
|
|
@ -146,7 +146,7 @@ pub(super) fn derive_macro_as_call_id(
|
|||
.filter(|(_, def_id)| def_id.is_derive())
|
||||
.ok_or_else(|| UnresolvedMacro { path: item_attr.path.as_ref().clone() })?;
|
||||
let call_id = def_id.make_call(
|
||||
db.upcast(),
|
||||
db,
|
||||
krate,
|
||||
MacroCallKind::Derive {
|
||||
ast_id: item_attr.ast_id,
|
||||
|
|
|
|||
|
|
@ -286,14 +286,13 @@ impl DefCollector<'_> {
|
|||
crate_data.rustc_coherence_is_core = true;
|
||||
}
|
||||
() if *attr_name == sym::feature.clone() => {
|
||||
let features = attr
|
||||
.parse_path_comma_token_tree(self.db.upcast())
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.filter_map(|(feat, _)| match feat.segments() {
|
||||
[name] => Some(name.symbol().clone()),
|
||||
_ => None,
|
||||
});
|
||||
let features =
|
||||
attr.parse_path_comma_token_tree(self.db).into_iter().flatten().filter_map(
|
||||
|(feat, _)| match feat.segments() {
|
||||
[name] => Some(name.symbol().clone()),
|
||||
_ => None,
|
||||
},
|
||||
);
|
||||
crate_data.unstable_features.extend(features);
|
||||
}
|
||||
() if *attr_name == sym::register_attr.clone() => {
|
||||
|
|
@ -549,7 +548,7 @@ impl DefCollector<'_> {
|
|||
types => {
|
||||
tracing::debug!(
|
||||
"could not resolve prelude path `{}` to module (resolved to {:?})",
|
||||
path.display(self.db.upcast(), Edition::LATEST),
|
||||
path.display(self.db, Edition::LATEST),
|
||||
types
|
||||
);
|
||||
}
|
||||
|
|
@ -809,7 +808,7 @@ impl DefCollector<'_> {
|
|||
}
|
||||
|
||||
fn resolve_import(&self, module_id: LocalModuleId, import: &Import) -> PartialResolvedImport {
|
||||
let _p = tracing::info_span!("resolve_import", import_path = %import.path.display(self.db.upcast(), Edition::LATEST))
|
||||
let _p = tracing::info_span!("resolve_import", import_path = %import.path.display(self.db, Edition::LATEST))
|
||||
.entered();
|
||||
tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition);
|
||||
let res = self.def_map.resolve_path_fp_with_macro(
|
||||
|
|
@ -1256,7 +1255,7 @@ impl DefCollector<'_> {
|
|||
match &directive.kind {
|
||||
MacroDirectiveKind::FnLike { ast_id, expand_to, ctxt: call_site } => {
|
||||
let call_id = macro_call_as_call_id(
|
||||
self.db.upcast(),
|
||||
self.db,
|
||||
ast_id,
|
||||
*call_site,
|
||||
*expand_to,
|
||||
|
|
@ -1410,7 +1409,7 @@ impl DefCollector<'_> {
|
|||
|
||||
let ast_id = ast_id.with_value(ast_adt_id);
|
||||
|
||||
match attr.parse_path_comma_token_tree(self.db.upcast()) {
|
||||
match attr.parse_path_comma_token_tree(self.db) {
|
||||
Some(derive_macros) => {
|
||||
let call_id = call_id();
|
||||
let mut len = 0;
|
||||
|
|
@ -1517,7 +1516,7 @@ impl DefCollector<'_> {
|
|||
|
||||
let item_tree = self.db.file_item_tree(file_id);
|
||||
|
||||
let mod_dir = if macro_call_id.as_macro_file().is_include_macro(self.db.upcast()) {
|
||||
let mod_dir = if macro_call_id.as_macro_file().is_include_macro(self.db) {
|
||||
ModDir::root()
|
||||
} else {
|
||||
self.mod_dirs[&module_id].clone()
|
||||
|
|
@ -1543,7 +1542,7 @@ impl DefCollector<'_> {
|
|||
MacroDirectiveKind::FnLike { ast_id, expand_to, ctxt: call_site } => {
|
||||
// FIXME: we shouldn't need to re-resolve the macro here just to get the unresolved error!
|
||||
let macro_call_as_call_id = macro_call_as_call_id(
|
||||
self.db.upcast(),
|
||||
self.db,
|
||||
ast_id,
|
||||
*call_site,
|
||||
*expand_to,
|
||||
|
|
@ -1991,8 +1990,7 @@ impl ModCollector<'_, '_> {
|
|||
cov_mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use);
|
||||
let mut single_imports = Vec::new();
|
||||
for attr in macro_use_attrs {
|
||||
let Some(paths) = attr.parse_path_comma_token_tree(self.def_collector.db.upcast())
|
||||
else {
|
||||
let Some(paths) = attr.parse_path_comma_token_tree(self.def_collector.db) else {
|
||||
// `#[macro_use]` (without any paths) found, forget collected names and just import
|
||||
// all visible macros.
|
||||
self.def_collector.import_macros_from_extern_crate(
|
||||
|
|
@ -2222,7 +2220,7 @@ impl ModCollector<'_, '_> {
|
|||
}
|
||||
tracing::debug!(
|
||||
"non-builtin attribute {}",
|
||||
attr.path.display(self.def_collector.db.upcast(), Edition::LATEST)
|
||||
attr.path.display(self.def_collector.db, Edition::LATEST)
|
||||
);
|
||||
|
||||
let ast_id = AstIdWithPath::new(
|
||||
|
|
@ -2359,8 +2357,8 @@ impl ModCollector<'_, '_> {
|
|||
stdx::always!(
|
||||
name == mac.name,
|
||||
"built-in macro {} has #[rustc_builtin_macro] which declares different name {}",
|
||||
mac.name.display(self.def_collector.db.upcast(), Edition::LATEST),
|
||||
name.display(self.def_collector.db.upcast(), Edition::LATEST),
|
||||
mac.name.display(self.def_collector.db, Edition::LATEST),
|
||||
name.display(self.def_collector.db, Edition::LATEST),
|
||||
);
|
||||
helpers_opt = Some(helpers);
|
||||
}
|
||||
|
|
@ -2424,7 +2422,7 @@ impl ModCollector<'_, '_> {
|
|||
let mut eager_callback_buffer = vec![];
|
||||
// Case 1: try to resolve macro calls with single-segment name and expand macro_rules
|
||||
if let Ok(res) = macro_call_as_call_id_with_eager(
|
||||
db.upcast(),
|
||||
db,
|
||||
ast_id.ast_id,
|
||||
&ast_id.path,
|
||||
ctxt,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
//! This module resolves `mod foo;` declaration to file.
|
||||
use arrayvec::ArrayVec;
|
||||
use base_db::{AnchoredPath, RootQueryDb};
|
||||
use base_db::AnchoredPath;
|
||||
use hir_expand::{HirFileIdExt, name::Name};
|
||||
use span::EditionedFileId;
|
||||
|
||||
|
|
@ -77,11 +77,10 @@ impl ModDir {
|
|||
}
|
||||
};
|
||||
|
||||
let orig_file_id = file_id.original_file_respecting_includes(db.upcast());
|
||||
let orig_file_id = file_id.original_file_respecting_includes(db);
|
||||
for candidate in candidate_files.iter() {
|
||||
let path = AnchoredPath { anchor: orig_file_id.file_id(), path: candidate.as_str() };
|
||||
if let Some(file_id) = base_db::Upcast::<dyn RootQueryDb>::upcast(db).resolve_path(path)
|
||||
{
|
||||
if let Some(file_id) = db.resolve_path(path) {
|
||||
let is_mod_rs = candidate.ends_with("/mod.rs");
|
||||
|
||||
let root_dir_owner = is_mod_rs || attr_path.is_some();
|
||||
|
|
|
|||
|
|
@ -4,9 +4,9 @@ use std::{fmt, panic, sync::Mutex};
|
|||
|
||||
use base_db::{
|
||||
Crate, CrateGraphBuilder, CratesMap, FileSourceRootInput, FileText, RootQueryDb,
|
||||
SourceDatabase, SourceRoot, SourceRootId, SourceRootInput, Upcast,
|
||||
SourceDatabase, SourceRoot, SourceRootId, SourceRootInput,
|
||||
};
|
||||
use hir_expand::{InFile, db::ExpandDatabase, files::FilePosition};
|
||||
use hir_expand::{InFile, files::FilePosition};
|
||||
use salsa::{AsDynDatabase, Durability};
|
||||
use span::{EditionedFileId, FileId};
|
||||
use syntax::{AstNode, algo, ast};
|
||||
|
|
@ -44,32 +44,6 @@ impl Default for TestDB {
|
|||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ExpandDatabase> for TestDB {
|
||||
#[inline]
|
||||
fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn DefDatabase> for TestDB {
|
||||
#[inline]
|
||||
fn upcast(&self) -> &(dyn DefDatabase + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn RootQueryDb> for TestDB {
|
||||
fn upcast(&self) -> &(dyn RootQueryDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDatabase> for TestDB {
|
||||
fn upcast(&self) -> &(dyn SourceDatabase + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl salsa::Database for TestDB {
|
||||
fn salsa_event(&self, event: &dyn std::ops::Fn() -> salsa::Event) {
|
||||
|
|
@ -158,10 +132,8 @@ impl TestDB {
|
|||
}
|
||||
|
||||
pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId {
|
||||
let db = <TestDB as Upcast<dyn DefDatabase>>::upcast(self);
|
||||
|
||||
for &krate in self.relevant_crates(file_id).iter() {
|
||||
let crate_def_map = db.crate_def_map(krate);
|
||||
let crate_def_map = self.crate_def_map(krate);
|
||||
for (local_id, data) in crate_def_map.modules() {
|
||||
if data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id) {
|
||||
return crate_def_map.module_id(local_id);
|
||||
|
|
@ -172,10 +144,8 @@ impl TestDB {
|
|||
}
|
||||
|
||||
pub(crate) fn module_at_position(&self, position: FilePosition) -> ModuleId {
|
||||
let db = <TestDB as Upcast<dyn DefDatabase>>::upcast(self);
|
||||
|
||||
let file_module = self.module_for_file(position.file_id.file_id());
|
||||
let mut def_map = file_module.def_map(db);
|
||||
let mut def_map = file_module.def_map(self);
|
||||
let module = self.mod_at_position(&def_map, position);
|
||||
|
||||
def_map = match self.block_at_position(&def_map, position) {
|
||||
|
|
@ -198,11 +168,10 @@ impl TestDB {
|
|||
|
||||
/// Finds the smallest/innermost module in `def_map` containing `position`.
|
||||
fn mod_at_position(&self, def_map: &DefMap, position: FilePosition) -> LocalModuleId {
|
||||
let db = <TestDB as Upcast<dyn DefDatabase>>::upcast(self);
|
||||
let mut size = None;
|
||||
let mut res = DefMap::ROOT;
|
||||
for (module, data) in def_map.modules() {
|
||||
let src = data.definition_source(db);
|
||||
let src = data.definition_source(self);
|
||||
if src.file_id != position.file_id {
|
||||
continue;
|
||||
}
|
||||
|
|
@ -238,18 +207,17 @@ impl TestDB {
|
|||
}
|
||||
|
||||
fn block_at_position(&self, def_map: &DefMap, position: FilePosition) -> Option<Arc<DefMap>> {
|
||||
let db = <TestDB as Upcast<dyn DefDatabase>>::upcast(self);
|
||||
// Find the smallest (innermost) function in `def_map` containing the cursor.
|
||||
let mut size = None;
|
||||
let mut fn_def = None;
|
||||
for (_, module) in def_map.modules() {
|
||||
let file_id = module.definition_source(db).file_id;
|
||||
let file_id = module.definition_source(self).file_id;
|
||||
if file_id != position.file_id {
|
||||
continue;
|
||||
}
|
||||
for decl in module.scope.declarations() {
|
||||
if let ModuleDefId::FunctionId(it) = decl {
|
||||
let range = it.lookup(db).source(db).value.syntax().text_range();
|
||||
let range = it.lookup(self).source(self).value.syntax().text_range();
|
||||
|
||||
if !range.contains(position.offset) {
|
||||
continue;
|
||||
|
|
@ -275,13 +243,13 @@ impl TestDB {
|
|||
|
||||
// Find the innermost block expression that has a `DefMap`.
|
||||
let def_with_body = fn_def?.into();
|
||||
let source_map = db.body_with_source_map(def_with_body).1;
|
||||
let scopes = db.expr_scopes(def_with_body);
|
||||
let source_map = self.body_with_source_map(def_with_body).1;
|
||||
let scopes = self.expr_scopes(def_with_body);
|
||||
|
||||
let editioned_file_id_wrapper =
|
||||
base_db::EditionedFileId::new(db.as_dyn_database(), position.file_id);
|
||||
base_db::EditionedFileId::new(self.as_dyn_database(), position.file_id);
|
||||
|
||||
let root_syntax_node = db.parse(editioned_file_id_wrapper).syntax_node();
|
||||
let root_syntax_node = self.parse(editioned_file_id_wrapper).syntax_node();
|
||||
let scope_iter =
|
||||
algo::ancestors_at_offset(&root_syntax_node, position.offset).filter_map(|node| {
|
||||
let block = ast::BlockExpr::cast(node)?;
|
||||
|
|
@ -298,7 +266,7 @@ impl TestDB {
|
|||
let mut containing_blocks =
|
||||
scopes.scope_chain(Some(scope)).filter_map(|scope| scopes.block(scope));
|
||||
|
||||
if let Some(block) = containing_blocks.next().map(|block| db.block_def_map(block)) {
|
||||
if let Some(block) = containing_blocks.next().map(|block| self.block_def_map(block)) {
|
||||
return Some(block);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -209,12 +209,12 @@ impl TyBuilder<()> {
|
|||
}
|
||||
|
||||
pub fn placeholder_subst(db: &dyn HirDatabase, def: impl Into<GenericDefId>) -> Substitution {
|
||||
let params = generics(db.upcast(), def.into());
|
||||
let params = generics(db, def.into());
|
||||
params.placeholder_subst(db)
|
||||
}
|
||||
|
||||
pub fn unknown_subst(db: &dyn HirDatabase, def: impl Into<GenericDefId>) -> Substitution {
|
||||
let params = generics(db.upcast(), def.into());
|
||||
let params = generics(db, def.into());
|
||||
Substitution::from_iter(
|
||||
Interner,
|
||||
params.iter_id().map(|id| match id {
|
||||
|
|
@ -233,7 +233,7 @@ impl TyBuilder<()> {
|
|||
def: impl Into<GenericDefId>,
|
||||
parent_subst: Option<Substitution>,
|
||||
) -> TyBuilder<()> {
|
||||
let generics = generics(db.upcast(), def.into());
|
||||
let generics = generics(db, def.into());
|
||||
assert!(generics.parent_generics().is_some() == parent_subst.is_some());
|
||||
let params = generics
|
||||
.iter_self()
|
||||
|
|
@ -259,9 +259,8 @@ impl TyBuilder<()> {
|
|||
/// This method prepopulates the builder with placeholder substitution of `parent`, so you
|
||||
/// should only push exactly 3 `GenericArg`s before building.
|
||||
pub fn subst_for_coroutine(db: &dyn HirDatabase, parent: DefWithBodyId) -> TyBuilder<()> {
|
||||
let parent_subst = parent
|
||||
.as_generic_def_id(db.upcast())
|
||||
.map(|p| generics(db.upcast(), p).placeholder_subst(db));
|
||||
let parent_subst =
|
||||
parent.as_generic_def_id(db).map(|p| generics(db, p).placeholder_subst(db));
|
||||
// These represent resume type, yield type, and return type of coroutine.
|
||||
let params = std::iter::repeat_n(ParamKind::Type, 3).collect();
|
||||
TyBuilder::new((), params, parent_subst)
|
||||
|
|
@ -274,13 +273,13 @@ impl TyBuilder<()> {
|
|||
) -> Substitution {
|
||||
let sig_ty = sig_ty.cast(Interner);
|
||||
let self_subst = iter::once(&sig_ty);
|
||||
let Some(parent) = parent.as_generic_def_id(db.upcast()) else {
|
||||
let Some(parent) = parent.as_generic_def_id(db) else {
|
||||
return Substitution::from_iter(Interner, self_subst);
|
||||
};
|
||||
Substitution::from_iter(
|
||||
Interner,
|
||||
self_subst
|
||||
.chain(generics(db.upcast(), parent).placeholder_subst(db).iter(Interner))
|
||||
.chain(generics(db, parent).placeholder_subst(db).iter(Interner))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
|
|
|
|||
|
|
@ -426,19 +426,19 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
|
|||
|
||||
fn trait_name(&self, trait_id: chalk_ir::TraitId<Interner>) -> String {
|
||||
let id = from_chalk_trait_id(trait_id);
|
||||
self.db.trait_signature(id).name.display(self.db.upcast(), self.edition()).to_string()
|
||||
self.db.trait_signature(id).name.display(self.db, self.edition()).to_string()
|
||||
}
|
||||
fn adt_name(&self, chalk_ir::AdtId(adt_id): AdtId) -> String {
|
||||
let edition = self.edition();
|
||||
match adt_id {
|
||||
hir_def::AdtId::StructId(id) => {
|
||||
self.db.struct_signature(id).name.display(self.db.upcast(), edition).to_string()
|
||||
self.db.struct_signature(id).name.display(self.db, edition).to_string()
|
||||
}
|
||||
hir_def::AdtId::EnumId(id) => {
|
||||
self.db.enum_signature(id).name.display(self.db.upcast(), edition).to_string()
|
||||
self.db.enum_signature(id).name.display(self.db, edition).to_string()
|
||||
}
|
||||
hir_def::AdtId::UnionId(id) => {
|
||||
self.db.union_signature(id).name.display(self.db.upcast(), edition).to_string()
|
||||
self.db.union_signature(id).name.display(self.db, edition).to_string()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -448,7 +448,7 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
|
|||
}
|
||||
fn assoc_type_name(&self, assoc_ty_id: chalk_ir::AssocTypeId<Interner>) -> String {
|
||||
let id = self.db.associated_ty_data(from_assoc_type_id(assoc_ty_id)).name;
|
||||
self.db.type_alias_signature(id).name.display(self.db.upcast(), self.edition()).to_string()
|
||||
self.db.type_alias_signature(id).name.display(self.db, self.edition()).to_string()
|
||||
}
|
||||
fn opaque_type_name(&self, opaque_ty_id: chalk_ir::OpaqueTyId<Interner>) -> String {
|
||||
format!("Opaque_{:?}", opaque_ty_id.0)
|
||||
|
|
@ -537,13 +537,13 @@ impl ChalkContext<'_> {
|
|||
// `impl_datum` relies on that and will panic if the trait can't be resolved.
|
||||
let in_deps = self.db.trait_impls_in_deps(self.krate);
|
||||
let in_self = self.db.trait_impls_in_crate(self.krate);
|
||||
let trait_module = trait_id.module(self.db.upcast());
|
||||
let trait_module = trait_id.module(self.db);
|
||||
let type_module = match self_ty_fp {
|
||||
Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(self.db.upcast())),
|
||||
Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(self.db)),
|
||||
Some(TyFingerprint::ForeignType(type_id)) => {
|
||||
Some(from_foreign_def_id(type_id).module(self.db.upcast()))
|
||||
Some(from_foreign_def_id(type_id).module(self.db))
|
||||
}
|
||||
Some(TyFingerprint::Dyn(trait_id)) => Some(trait_id.module(self.db.upcast())),
|
||||
Some(TyFingerprint::Dyn(trait_id)) => Some(trait_id.module(self.db)),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
|
|
@ -605,15 +605,15 @@ pub(crate) fn associated_ty_data_query(
|
|||
type_alias: TypeAliasId,
|
||||
) -> Arc<AssociatedTyDatum> {
|
||||
debug!("associated_ty_data {:?}", type_alias);
|
||||
let trait_ = match type_alias.lookup(db.upcast()).container {
|
||||
let trait_ = match type_alias.lookup(db).container {
|
||||
ItemContainerId::TraitId(t) => t,
|
||||
_ => panic!("associated type not in trait"),
|
||||
};
|
||||
|
||||
// Lower bounds -- we could/should maybe move this to a separate query in `lower`
|
||||
let type_alias_data = db.type_alias_signature(type_alias);
|
||||
let generic_params = generics(db.upcast(), type_alias.into());
|
||||
let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db.upcast());
|
||||
let generic_params = generics(db, type_alias.into());
|
||||
let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db);
|
||||
let mut ctx =
|
||||
crate::TyLoweringContext::new(db, &resolver, &type_alias_data.store, type_alias.into())
|
||||
.with_type_param_mode(crate::lower::ParamLoweringMode::Variable);
|
||||
|
|
@ -671,11 +671,11 @@ pub(crate) fn trait_datum_query(
|
|||
let trait_ = from_chalk_trait_id(trait_id);
|
||||
let trait_data = db.trait_signature(trait_);
|
||||
debug!("trait {:?} = {:?}", trait_id, trait_data.name);
|
||||
let generic_params = generics(db.upcast(), trait_.into());
|
||||
let generic_params = generics(db, trait_.into());
|
||||
let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
|
||||
let flags = rust_ir::TraitFlags {
|
||||
auto: trait_data.flags.contains(TraitFlags::IS_AUTO),
|
||||
upstream: trait_.lookup(db.upcast()).container.krate() != krate,
|
||||
upstream: trait_.lookup(db).container.krate() != krate,
|
||||
non_enumerable: true,
|
||||
coinductive: false, // only relevant for Chalk testing
|
||||
// FIXME: set these flags correctly
|
||||
|
|
@ -754,7 +754,7 @@ pub(crate) fn adt_datum_query(
|
|||
chalk_ir::AdtId(adt_id): AdtId,
|
||||
) -> Arc<AdtDatum> {
|
||||
debug!("adt_datum {:?}", adt_id);
|
||||
let generic_params = generics(db.upcast(), adt_id.into());
|
||||
let generic_params = generics(db, adt_id.into());
|
||||
let bound_vars_subst = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
|
||||
let where_clauses = convert_where_clauses(db, adt_id.into(), &bound_vars_subst);
|
||||
|
||||
|
|
@ -771,14 +771,14 @@ pub(crate) fn adt_datum_query(
|
|||
hir_def::AdtId::EnumId(_) => (false, false),
|
||||
};
|
||||
let flags = rust_ir::AdtFlags {
|
||||
upstream: adt_id.module(db.upcast()).krate() != krate,
|
||||
upstream: adt_id.module(db).krate() != krate,
|
||||
fundamental,
|
||||
phantom_data,
|
||||
};
|
||||
|
||||
// this slows down rust-analyzer by quite a bit unfortunately, so enabling this is currently not worth it
|
||||
let _variant_id_to_fields = |id: VariantId| {
|
||||
let variant_data = &id.variant_data(db.upcast());
|
||||
let variant_data = &id.variant_data(db);
|
||||
let fields = if variant_data.fields().is_empty() {
|
||||
vec![]
|
||||
} else {
|
||||
|
|
@ -842,10 +842,10 @@ fn impl_def_datum(db: &dyn HirDatabase, krate: Crate, impl_id: hir_def::ImplId)
|
|||
.0;
|
||||
let impl_data = db.impl_signature(impl_id);
|
||||
|
||||
let generic_params = generics(db.upcast(), impl_id.into());
|
||||
let generic_params = generics(db, impl_id.into());
|
||||
let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
|
||||
let trait_ = trait_ref.hir_trait_id();
|
||||
let impl_type = if impl_id.lookup(db.upcast()).container.krate() == krate {
|
||||
let impl_type = if impl_id.lookup(db).container.krate() == krate {
|
||||
rust_ir::ImplType::Local
|
||||
} else {
|
||||
rust_ir::ImplType::External
|
||||
|
|
@ -896,7 +896,7 @@ fn type_alias_associated_ty_value(
|
|||
type_alias: TypeAliasId,
|
||||
) -> Arc<AssociatedTyValue> {
|
||||
let type_alias_data = db.type_alias_signature(type_alias);
|
||||
let impl_id = match type_alias.lookup(db.upcast()).container {
|
||||
let impl_id = match type_alias.lookup(db).container {
|
||||
ItemContainerId::ImplId(it) => it,
|
||||
_ => panic!("assoc ty value should be in impl"),
|
||||
};
|
||||
|
|
@ -925,8 +925,8 @@ pub(crate) fn fn_def_datum_query(
|
|||
db: &dyn HirDatabase,
|
||||
callable_def: CallableDefId,
|
||||
) -> Arc<FnDefDatum> {
|
||||
let generic_def = GenericDefId::from_callable(db.upcast(), callable_def);
|
||||
let generic_params = generics(db.upcast(), generic_def);
|
||||
let generic_def = GenericDefId::from_callable(db, callable_def);
|
||||
let generic_params = generics(db, generic_def);
|
||||
let (sig, binders) = db.callable_item_signature(callable_def).into_value_and_skipped_binders();
|
||||
let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST);
|
||||
let where_clauses = convert_where_clauses(db, generic_def, &bound_vars);
|
||||
|
|
@ -960,7 +960,7 @@ pub(crate) fn fn_def_variance_query(
|
|||
) -> Variances {
|
||||
Variances::from_iter(
|
||||
Interner,
|
||||
db.variances_of(GenericDefId::from_callable(db.upcast(), callable_def))
|
||||
db.variances_of(GenericDefId::from_callable(db, callable_def))
|
||||
.as_deref()
|
||||
.unwrap_or_default()
|
||||
.iter()
|
||||
|
|
@ -1023,8 +1023,7 @@ pub(super) fn generic_predicate_to_inline_bound(
|
|||
Some(chalk_ir::Binders::new(binders, rust_ir::InlineBound::TraitBound(trait_bound)))
|
||||
}
|
||||
WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection_ty), ty }) => {
|
||||
let generics =
|
||||
generics(db.upcast(), from_assoc_type_id(projection_ty.associated_ty_id).into());
|
||||
let generics = generics(db, from_assoc_type_id(projection_ty.associated_ty_id).into());
|
||||
let (assoc_args, trait_args) =
|
||||
projection_ty.substitution.as_slice(Interner).split_at(generics.len_self());
|
||||
let (self_ty, args_no_self) =
|
||||
|
|
|
|||
|
|
@ -191,7 +191,7 @@ impl TyExt for Ty {
|
|||
match *self.kind(Interner) {
|
||||
TyKind::Adt(AdtId(adt), ..) => Some(adt.into()),
|
||||
TyKind::FnDef(callable, ..) => Some(GenericDefId::from_callable(
|
||||
db.upcast(),
|
||||
db,
|
||||
db.lookup_intern_callable_def(callable.into()),
|
||||
)),
|
||||
TyKind::AssociatedType(type_alias, ..) => Some(from_assoc_type_id(type_alias).into()),
|
||||
|
|
@ -250,7 +250,7 @@ impl TyExt for Ty {
|
|||
TyKind::OpaqueType(opaque_ty_id, subst) => {
|
||||
match db.lookup_intern_impl_trait_id((*opaque_ty_id).into()) {
|
||||
ImplTraitId::AsyncBlockTypeImplTrait(def, _expr) => {
|
||||
let krate = def.module(db.upcast()).krate();
|
||||
let krate = def.module(db).krate();
|
||||
if let Some(future_trait) =
|
||||
db.lang_item(krate, LangItem::Future).and_then(|item| item.as_trait())
|
||||
{
|
||||
|
|
@ -348,17 +348,12 @@ impl TyExt for Ty {
|
|||
|
||||
fn associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<TraitId> {
|
||||
match self.kind(Interner) {
|
||||
TyKind::AssociatedType(id, ..) => {
|
||||
match from_assoc_type_id(*id).lookup(db.upcast()).container {
|
||||
ItemContainerId::TraitId(trait_id) => Some(trait_id),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
TyKind::AssociatedType(id, ..) => match from_assoc_type_id(*id).lookup(db).container {
|
||||
ItemContainerId::TraitId(trait_id) => Some(trait_id),
|
||||
_ => None,
|
||||
},
|
||||
TyKind::Alias(AliasTy::Projection(projection_ty)) => {
|
||||
match from_assoc_type_id(projection_ty.associated_ty_id)
|
||||
.lookup(db.upcast())
|
||||
.container
|
||||
{
|
||||
match from_assoc_type_id(projection_ty.associated_ty_id).lookup(db).container {
|
||||
ItemContainerId::TraitId(trait_id) => Some(trait_id),
|
||||
_ => None,
|
||||
}
|
||||
|
|
@ -368,7 +363,7 @@ impl TyExt for Ty {
|
|||
}
|
||||
|
||||
fn is_copy(self, db: &dyn HirDatabase, owner: DefWithBodyId) -> bool {
|
||||
let crate_id = owner.module(db.upcast()).krate();
|
||||
let crate_id = owner.module(db).krate();
|
||||
let Some(copy_trait) = db.lang_item(crate_id, LangItem::Copy).and_then(|it| it.as_trait())
|
||||
else {
|
||||
return false;
|
||||
|
|
@ -422,7 +417,7 @@ pub trait ProjectionTyExt {
|
|||
impl ProjectionTyExt for ProjectionTy {
|
||||
fn trait_ref(&self, db: &dyn HirDatabase) -> TraitRef {
|
||||
// FIXME: something like `Split` trait from chalk-solve might be nice.
|
||||
let generics = generics(db.upcast(), from_assoc_type_id(self.associated_ty_id).into());
|
||||
let generics = generics(db, from_assoc_type_id(self.associated_ty_id).into());
|
||||
let substitution = Substitution::from_iter(
|
||||
Interner,
|
||||
self.substitution.iter(Interner).skip(generics.len_self()),
|
||||
|
|
@ -431,7 +426,7 @@ impl ProjectionTyExt for ProjectionTy {
|
|||
}
|
||||
|
||||
fn trait_(&self, db: &dyn HirDatabase) -> TraitId {
|
||||
match from_assoc_type_id(self.associated_ty_id).lookup(db.upcast()).container {
|
||||
match from_assoc_type_id(self.associated_ty_id).lookup(db).container {
|
||||
ItemContainerId::TraitId(it) => it,
|
||||
_ => panic!("projection ty without parent trait"),
|
||||
}
|
||||
|
|
|
|||
|
|
@ -104,7 +104,7 @@ pub(crate) fn path_to_const<'g>(
|
|||
debruijn: DebruijnIndex,
|
||||
expected_ty: Ty,
|
||||
) -> Option<Const> {
|
||||
match resolver.resolve_path_in_value_ns_fully(db.upcast(), path, HygieneId::ROOT) {
|
||||
match resolver.resolve_path_in_value_ns_fully(db, path, HygieneId::ROOT) {
|
||||
Some(ValueNs::GenericParam(p)) => {
|
||||
let ty = db.const_param_ty(p);
|
||||
let value = match mode {
|
||||
|
|
@ -263,7 +263,7 @@ pub(crate) fn const_eval_query(
|
|||
db.monomorphized_mir_body(c.into(), subst, db.trait_environment(c.into()))?
|
||||
}
|
||||
GeneralConstId::StaticId(s) => {
|
||||
let krate = s.module(db.upcast()).krate();
|
||||
let krate = s.module(db).krate();
|
||||
db.monomorphized_mir_body(s.into(), subst, TraitEnvironment::empty(krate))?
|
||||
}
|
||||
};
|
||||
|
|
@ -290,7 +290,7 @@ pub(crate) fn const_eval_discriminant_variant(
|
|||
) -> Result<i128, ConstEvalError> {
|
||||
let def = variant_id.into();
|
||||
let body = db.body(def);
|
||||
let loc = variant_id.lookup(db.upcast());
|
||||
let loc = variant_id.lookup(db);
|
||||
if body.exprs[body.body_expr] == Expr::Missing {
|
||||
let prev_idx = loc.index.checked_sub(1);
|
||||
let value = match prev_idx {
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
use std::sync;
|
||||
|
||||
use base_db::{Crate, Upcast, impl_intern_key};
|
||||
use base_db::{Crate, impl_intern_key};
|
||||
use hir_def::{
|
||||
AdtId, BlockId, CallableDefId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId,
|
||||
GeneralConstId, GenericDefId, ImplId, LifetimeParamId, LocalFieldId, StaticId, TraitId,
|
||||
|
|
@ -29,7 +29,7 @@ use crate::{
|
|||
};
|
||||
|
||||
#[query_group::query_group]
|
||||
pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> + std::fmt::Debug {
|
||||
pub trait HirDatabase: DefDatabase + std::fmt::Debug {
|
||||
#[salsa::invoke_actual(crate::infer::infer_query)]
|
||||
fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
|
||||
|
||||
|
|
|
|||
|
|
@ -154,7 +154,7 @@ impl<'a> DeclValidator<'a> {
|
|||
|
||||
fn validate_module(&mut self, module_id: ModuleId) {
|
||||
// Check the module name.
|
||||
let Some(module_name) = module_id.name(self.db.upcast()) else { return };
|
||||
let Some(module_name) = module_id.name(self.db) else { return };
|
||||
let Some(module_name_replacement) =
|
||||
to_lower_snake_case(module_name.as_str()).map(|new_name| Replacement {
|
||||
current_name: module_name,
|
||||
|
|
@ -164,8 +164,8 @@ impl<'a> DeclValidator<'a> {
|
|||
else {
|
||||
return;
|
||||
};
|
||||
let module_data = &module_id.def_map(self.db.upcast())[module_id.local_id];
|
||||
let Some(module_src) = module_data.declaration_source(self.db.upcast()) else {
|
||||
let module_data = &module_id.def_map(self.db)[module_id.local_id];
|
||||
let Some(module_src) = module_data.declaration_source(self.db) else {
|
||||
return;
|
||||
};
|
||||
self.create_incorrect_case_diagnostic_for_ast_node(
|
||||
|
|
@ -188,7 +188,7 @@ impl<'a> DeclValidator<'a> {
|
|||
}
|
||||
|
||||
fn validate_func(&mut self, func: FunctionId) {
|
||||
let container = func.lookup(self.db.upcast()).container;
|
||||
let container = func.lookup(self.db).container;
|
||||
if matches!(container, ItemContainerId::ExternBlockId(_)) {
|
||||
cov_mark::hit!(extern_func_incorrect_case_ignored);
|
||||
return;
|
||||
|
|
@ -259,7 +259,7 @@ impl<'a> DeclValidator<'a> {
|
|||
let Some(ptr) = source_ptr.value.cast::<ast::IdentPat>() else {
|
||||
continue;
|
||||
};
|
||||
let root = source_ptr.file_syntax(self.db.upcast());
|
||||
let root = source_ptr.file_syntax(self.db);
|
||||
let ident_pat = ptr.to_node(&root);
|
||||
let Some(parent) = ident_pat.syntax().parent() else {
|
||||
continue;
|
||||
|
|
@ -287,7 +287,7 @@ impl<'a> DeclValidator<'a> {
|
|||
}
|
||||
|
||||
fn edition(&self, id: impl HasModule) -> span::Edition {
|
||||
let krate = id.krate(self.db.upcast());
|
||||
let krate = id.krate(self.db);
|
||||
krate.data(self.db).edition
|
||||
}
|
||||
|
||||
|
|
@ -331,8 +331,8 @@ impl<'a> DeclValidator<'a> {
|
|||
return;
|
||||
}
|
||||
|
||||
let struct_loc = struct_id.lookup(self.db.upcast());
|
||||
let struct_src = struct_loc.source(self.db.upcast());
|
||||
let struct_loc = struct_id.lookup(self.db);
|
||||
let struct_src = struct_loc.source(self.db);
|
||||
|
||||
let Some(ast::FieldList::RecordFieldList(struct_fields_list)) =
|
||||
struct_src.value.field_list()
|
||||
|
|
@ -421,8 +421,8 @@ impl<'a> DeclValidator<'a> {
|
|||
return;
|
||||
}
|
||||
|
||||
let enum_loc = enum_id.lookup(self.db.upcast());
|
||||
let enum_src = enum_loc.source(self.db.upcast());
|
||||
let enum_loc = enum_id.lookup(self.db);
|
||||
let enum_src = enum_loc.source(self.db);
|
||||
|
||||
let Some(enum_variants_list) = enum_src.value.variant_list() else {
|
||||
always!(
|
||||
|
|
@ -492,8 +492,8 @@ impl<'a> DeclValidator<'a> {
|
|||
return;
|
||||
}
|
||||
|
||||
let variant_loc = variant_id.lookup(self.db.upcast());
|
||||
let variant_src = variant_loc.source(self.db.upcast());
|
||||
let variant_loc = variant_id.lookup(self.db);
|
||||
let variant_src = variant_loc.source(self.db);
|
||||
|
||||
let Some(ast::FieldList::RecordFieldList(variant_fields_list)) =
|
||||
variant_src.value.field_list()
|
||||
|
|
@ -540,7 +540,7 @@ impl<'a> DeclValidator<'a> {
|
|||
}
|
||||
|
||||
fn validate_const(&mut self, const_id: ConstId) {
|
||||
let container = const_id.lookup(self.db.upcast()).container;
|
||||
let container = const_id.lookup(self.db).container;
|
||||
if self.is_trait_impl_container(container) {
|
||||
cov_mark::hit!(trait_impl_assoc_const_incorrect_case_ignored);
|
||||
return;
|
||||
|
|
@ -574,7 +574,7 @@ impl<'a> DeclValidator<'a> {
|
|||
}
|
||||
|
||||
fn validate_type_alias(&mut self, type_alias_id: TypeAliasId) {
|
||||
let container = type_alias_id.lookup(self.db.upcast()).container;
|
||||
let container = type_alias_id.lookup(self.db).container;
|
||||
if self.is_trait_impl_container(container) {
|
||||
cov_mark::hit!(trait_impl_assoc_type_incorrect_case_ignored);
|
||||
return;
|
||||
|
|
@ -607,19 +607,16 @@ impl<'a> DeclValidator<'a> {
|
|||
CaseType::UpperCamelCase => to_camel_case,
|
||||
};
|
||||
let edition = self.edition(item_id);
|
||||
let Some(replacement) = to_expected_case_type(
|
||||
&name.display(self.db.upcast(), edition).to_smolstr(),
|
||||
)
|
||||
.map(|new_name| Replacement {
|
||||
current_name: name.clone(),
|
||||
suggested_text: new_name,
|
||||
expected_case,
|
||||
}) else {
|
||||
let Some(replacement) =
|
||||
to_expected_case_type(&name.display(self.db, edition).to_smolstr()).map(|new_name| {
|
||||
Replacement { current_name: name.clone(), suggested_text: new_name, expected_case }
|
||||
})
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
let item_loc = item_id.lookup(self.db.upcast());
|
||||
let item_src = item_loc.source(self.db.upcast());
|
||||
let item_loc = item_id.lookup(self.db);
|
||||
let item_src = item_loc.source(self.db);
|
||||
self.create_incorrect_case_diagnostic_for_ast_node(
|
||||
replacement,
|
||||
item_src.file_id,
|
||||
|
|
@ -647,13 +644,13 @@ impl<'a> DeclValidator<'a> {
|
|||
return;
|
||||
};
|
||||
|
||||
let edition = file_id.original_file(self.db.upcast()).edition();
|
||||
let edition = file_id.original_file(self.db).edition();
|
||||
let diagnostic = IncorrectCase {
|
||||
file: file_id,
|
||||
ident_type,
|
||||
ident: AstPtr::new(&name_ast),
|
||||
expected_case: replacement.expected_case,
|
||||
ident_text: replacement.current_name.display(self.db.upcast(), edition).to_string(),
|
||||
ident_text: replacement.current_name.display(self.db, edition).to_string(),
|
||||
suggested_text: replacement.suggested_text,
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -164,9 +164,8 @@ impl ExprValidator {
|
|||
None => return,
|
||||
};
|
||||
|
||||
let checker = filter_map_next_checker.get_or_insert_with(|| {
|
||||
FilterMapNextChecker::new(&self.owner.resolver(db.upcast()), db)
|
||||
});
|
||||
let checker = filter_map_next_checker
|
||||
.get_or_insert_with(|| FilterMapNextChecker::new(&self.owner.resolver(db), db));
|
||||
|
||||
if checker.check(call_id, receiver, &callee).is_some() {
|
||||
self.diagnostics.push(BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap {
|
||||
|
|
@ -191,7 +190,7 @@ impl ExprValidator {
|
|||
return;
|
||||
}
|
||||
|
||||
let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db);
|
||||
let cx = MatchCheckCtx::new(self.owner.module(db), self.owner, db);
|
||||
|
||||
let pattern_arena = Arena::new();
|
||||
let mut m_arms = Vec::with_capacity(arms.len());
|
||||
|
|
@ -264,7 +263,7 @@ impl ExprValidator {
|
|||
scrut_ty,
|
||||
witnesses,
|
||||
m_arms.is_empty(),
|
||||
self.owner.krate(db.upcast()),
|
||||
self.owner.krate(db),
|
||||
),
|
||||
});
|
||||
}
|
||||
|
|
@ -288,17 +287,16 @@ impl ExprValidator {
|
|||
match &self.body[scrutinee_expr] {
|
||||
Expr::UnaryOp { op: UnaryOp::Deref, .. } => false,
|
||||
Expr::Path(path) => {
|
||||
let value_or_partial =
|
||||
self.owner.resolver(db.upcast()).resolve_path_in_value_ns_fully(
|
||||
db.upcast(),
|
||||
path,
|
||||
self.body.expr_path_hygiene(scrutinee_expr),
|
||||
);
|
||||
let value_or_partial = self.owner.resolver(db).resolve_path_in_value_ns_fully(
|
||||
db,
|
||||
path,
|
||||
self.body.expr_path_hygiene(scrutinee_expr),
|
||||
);
|
||||
value_or_partial.is_none_or(|v| !matches!(v, ValueNs::StaticId(_)))
|
||||
}
|
||||
Expr::Field { expr, .. } => match self.infer.type_of_expr[*expr].kind(Interner) {
|
||||
TyKind::Adt(adt, ..)
|
||||
if db.adt_datum(self.owner.krate(db.upcast()), *adt).kind == AdtKind::Union =>
|
||||
if db.adt_datum(self.owner.krate(db), *adt).kind == AdtKind::Union =>
|
||||
{
|
||||
false
|
||||
}
|
||||
|
|
@ -319,7 +317,7 @@ impl ExprValidator {
|
|||
return;
|
||||
};
|
||||
let pattern_arena = Arena::new();
|
||||
let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db);
|
||||
let cx = MatchCheckCtx::new(self.owner.module(db), self.owner, db);
|
||||
for stmt in &**statements {
|
||||
let &Statement::Let { pat, initializer, else_branch: None, .. } = stmt else {
|
||||
continue;
|
||||
|
|
@ -359,7 +357,7 @@ impl ExprValidator {
|
|||
ty,
|
||||
witnesses,
|
||||
false,
|
||||
self.owner.krate(db.upcast()),
|
||||
self.owner.krate(db),
|
||||
),
|
||||
});
|
||||
}
|
||||
|
|
@ -438,7 +436,7 @@ impl ExprValidator {
|
|||
let Ok(source_ptr) = source_map.expr_syntax(id) else {
|
||||
return;
|
||||
};
|
||||
let root = source_ptr.file_syntax(db.upcast());
|
||||
let root = source_ptr.file_syntax(db);
|
||||
let either::Left(ast::Expr::IfExpr(if_expr)) =
|
||||
source_ptr.value.to_node(&root)
|
||||
else {
|
||||
|
|
@ -490,7 +488,7 @@ impl FilterMapNextChecker {
|
|||
{
|
||||
Some(next_function_id) => (
|
||||
Some(next_function_id),
|
||||
match next_function_id.lookup(db.upcast()).container {
|
||||
match next_function_id.lookup(db).container {
|
||||
ItemContainerId::TraitId(iterator_trait_id) => {
|
||||
let iterator_trait_items = &db.trait_items(iterator_trait_id).items;
|
||||
iterator_trait_items.iter().find_map(|(name, it)| match it {
|
||||
|
|
@ -558,7 +556,7 @@ pub fn record_literal_missing_fields(
|
|||
return None;
|
||||
}
|
||||
|
||||
let variant_data = variant_def.variant_data(db.upcast());
|
||||
let variant_data = variant_def.variant_data(db);
|
||||
|
||||
let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
|
||||
let missed_fields: Vec<LocalFieldId> = variant_data
|
||||
|
|
@ -588,7 +586,7 @@ pub fn record_pattern_missing_fields(
|
|||
return None;
|
||||
}
|
||||
|
||||
let variant_data = variant_def.variant_data(db.upcast());
|
||||
let variant_data = variant_def.variant_data(db);
|
||||
|
||||
let specified_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect();
|
||||
let missed_fields: Vec<LocalFieldId> = variant_data
|
||||
|
|
|
|||
|
|
@ -157,7 +157,7 @@ impl<'a> PatCtxt<'a> {
|
|||
(BindingMode::Ref(_), _) => {
|
||||
never!(
|
||||
"`ref {}` has wrong type {:?}",
|
||||
name.display(self.db.upcast(), Edition::LATEST),
|
||||
name.display(self.db, Edition::LATEST),
|
||||
ty
|
||||
);
|
||||
self.errors.push(PatternError::UnexpectedType);
|
||||
|
|
@ -169,13 +169,13 @@ impl<'a> PatCtxt<'a> {
|
|||
}
|
||||
|
||||
hir_def::hir::Pat::TupleStruct { ref args, ellipsis, .. } if variant.is_some() => {
|
||||
let expected_len = variant.unwrap().variant_data(self.db.upcast()).fields().len();
|
||||
let expected_len = variant.unwrap().variant_data(self.db).fields().len();
|
||||
let subpatterns = self.lower_tuple_subpats(args, expected_len, ellipsis);
|
||||
self.lower_variant_or_leaf(pat, ty, subpatterns)
|
||||
}
|
||||
|
||||
hir_def::hir::Pat::Record { ref args, .. } if variant.is_some() => {
|
||||
let variant_data = variant.unwrap().variant_data(self.db.upcast());
|
||||
let variant_data = variant.unwrap().variant_data(self.db);
|
||||
let subpatterns = args
|
||||
.iter()
|
||||
.map(|field| {
|
||||
|
|
@ -304,7 +304,7 @@ impl HirDisplay for Pat {
|
|||
PatKind::Wild => write!(f, "_"),
|
||||
PatKind::Never => write!(f, "!"),
|
||||
PatKind::Binding { name, subpattern } => {
|
||||
write!(f, "{}", name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}", name.display(f.db, f.edition()))?;
|
||||
if let Some(subpattern) = subpattern {
|
||||
write!(f, " @ ")?;
|
||||
subpattern.hir_fmt(f)?;
|
||||
|
|
@ -324,28 +324,28 @@ impl HirDisplay for Pat {
|
|||
if let Some(variant) = variant {
|
||||
match variant {
|
||||
VariantId::EnumVariantId(v) => {
|
||||
let loc = v.lookup(f.db.upcast());
|
||||
let loc = v.lookup(f.db);
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
f.db.enum_variants(loc.parent).variants[loc.index as usize]
|
||||
.1
|
||||
.display(f.db.upcast(), f.edition())
|
||||
.display(f.db, f.edition())
|
||||
)?;
|
||||
}
|
||||
VariantId::StructId(s) => write!(
|
||||
f,
|
||||
"{}",
|
||||
f.db.struct_signature(s).name.display(f.db.upcast(), f.edition())
|
||||
f.db.struct_signature(s).name.display(f.db, f.edition())
|
||||
)?,
|
||||
VariantId::UnionId(u) => write!(
|
||||
f,
|
||||
"{}",
|
||||
f.db.union_signature(u).name.display(f.db.upcast(), f.edition())
|
||||
f.db.union_signature(u).name.display(f.db, f.edition())
|
||||
)?,
|
||||
};
|
||||
|
||||
let variant_data = variant.variant_data(f.db.upcast());
|
||||
let variant_data = variant.variant_data(f.db);
|
||||
if variant_data.shape == FieldsShape::Record {
|
||||
write!(f, " {{ ")?;
|
||||
|
||||
|
|
@ -361,7 +361,7 @@ impl HirDisplay for Pat {
|
|||
"{}: ",
|
||||
variant_data.fields()[p.field]
|
||||
.name
|
||||
.display(f.db.upcast(), f.edition())
|
||||
.display(f.db, f.edition())
|
||||
)?;
|
||||
p.pattern.hir_fmt(f)
|
||||
})
|
||||
|
|
@ -376,8 +376,8 @@ impl HirDisplay for Pat {
|
|||
}
|
||||
}
|
||||
|
||||
let num_fields = variant
|
||||
.map_or(subpatterns.len(), |v| v.variant_data(f.db.upcast()).fields().len());
|
||||
let num_fields =
|
||||
variant.map_or(subpatterns.len(), |v| v.variant_data(f.db).fields().len());
|
||||
if num_fields != 0 || variant.is_none() {
|
||||
write!(f, "(")?;
|
||||
let subpats = (0..num_fields).map(|i| {
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ impl EnumVariantContiguousIndex {
|
|||
fn from_enum_variant_id(db: &dyn HirDatabase, target_evid: EnumVariantId) -> Self {
|
||||
// Find the index of this variant in the list of variants.
|
||||
use hir_def::Lookup;
|
||||
let i = target_evid.lookup(db.upcast()).index as usize;
|
||||
let i = target_evid.lookup(db).index as usize;
|
||||
EnumVariantContiguousIndex(i)
|
||||
}
|
||||
|
||||
|
|
@ -105,7 +105,7 @@ impl<'db> MatchCheckCtx<'db> {
|
|||
|
||||
/// Returns whether the given ADT is from another crate declared `#[non_exhaustive]`.
|
||||
fn is_foreign_non_exhaustive(&self, adt: hir_def::AdtId) -> bool {
|
||||
let is_local = adt.krate(self.db.upcast()) == self.module.krate();
|
||||
let is_local = adt.krate(self.db) == self.module.krate();
|
||||
!is_local && self.db.attrs(adt.into()).by_key(&sym::non_exhaustive).exists()
|
||||
}
|
||||
|
||||
|
|
@ -139,7 +139,7 @@ impl<'db> MatchCheckCtx<'db> {
|
|||
let (_, substs) = ty.as_adt().unwrap();
|
||||
|
||||
let field_tys = self.db.field_types(variant);
|
||||
let fields_len = variant.variant_data(self.db.upcast()).fields().len() as u32;
|
||||
let fields_len = variant.variant_data(self.db).fields().len() as u32;
|
||||
|
||||
(0..fields_len).map(|idx| LocalFieldId::from_raw(idx.into())).map(move |fid| {
|
||||
let ty = field_tys[fid].clone().substitute(Interner, substs);
|
||||
|
|
@ -222,7 +222,7 @@ impl<'db> MatchCheckCtx<'db> {
|
|||
}
|
||||
};
|
||||
let variant = Self::variant_id_for_adt(self.db, &ctor, adt).unwrap();
|
||||
arity = variant.variant_data(self.db.upcast()).fields().len();
|
||||
arity = variant.variant_data(self.db).fields().len();
|
||||
}
|
||||
_ => {
|
||||
never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty);
|
||||
|
|
@ -341,7 +341,7 @@ impl PatCx for MatchCheckCtx<'_> {
|
|||
1
|
||||
} else {
|
||||
let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
|
||||
variant.variant_data(self.db.upcast()).fields().len()
|
||||
variant.variant_data(self.db).fields().len()
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
|
|
@ -389,8 +389,7 @@ impl PatCx for MatchCheckCtx<'_> {
|
|||
.map(move |(fid, ty)| {
|
||||
let is_visible = || {
|
||||
matches!(adt, hir_def::AdtId::EnumId(..))
|
||||
|| visibilities[fid]
|
||||
.is_visible_from(self.db.upcast(), self.module)
|
||||
|| visibilities[fid].is_visible_from(self.db, self.module)
|
||||
};
|
||||
let is_uninhabited = self.is_uninhabited(&ty);
|
||||
let private_uninhabited = is_uninhabited && !is_visible();
|
||||
|
|
@ -493,13 +492,13 @@ impl PatCx for MatchCheckCtx<'_> {
|
|||
// if let Some(variant) = variant {
|
||||
// match variant {
|
||||
// VariantId::EnumVariantId(v) => {
|
||||
// write!(f, "{}", db.enum_variant_data(v).name.display(db.upcast()))?;
|
||||
// write!(f, "{}", db.enum_variant_data(v).name.display(db))?;
|
||||
// }
|
||||
// VariantId::StructId(s) => {
|
||||
// write!(f, "{}", db.struct_data(s).name.display(db.upcast()))?
|
||||
// write!(f, "{}", db.struct_data(s).name.display(db))?
|
||||
// }
|
||||
// VariantId::UnionId(u) => {
|
||||
// write!(f, "{}", db.union_data(u).name.display(db.upcast()))?
|
||||
// write!(f, "{}", db.union_data(u).name.display(db))?
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
|
|
|||
|
|
@ -127,7 +127,7 @@ pub fn unsafe_operations(
|
|||
}
|
||||
};
|
||||
let mut visitor = UnsafeVisitor::new(db, infer, body, def, &mut visitor_callback);
|
||||
_ = visitor.resolver.update_to_inner_scope(db.upcast(), def, current);
|
||||
_ = visitor.resolver.update_to_inner_scope(db, def, current);
|
||||
visitor.walk_expr(current);
|
||||
}
|
||||
|
||||
|
|
@ -154,7 +154,7 @@ impl<'a> UnsafeVisitor<'a> {
|
|||
def: DefWithBodyId,
|
||||
unsafe_expr_cb: &'a mut dyn FnMut(UnsafeDiagnostic),
|
||||
) -> Self {
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let resolver = def.resolver(db);
|
||||
let def_target_features = match def {
|
||||
DefWithBodyId::FunctionId(func) => TargetFeatures::from_attrs(&db.attrs(func.into())),
|
||||
_ => TargetFeatures::default(),
|
||||
|
|
@ -200,7 +200,7 @@ impl<'a> UnsafeVisitor<'a> {
|
|||
}
|
||||
|
||||
fn walk_pats_top(&mut self, pats: impl Iterator<Item = PatId>, parent_expr: ExprId) {
|
||||
let guard = self.resolver.update_to_inner_scope(self.db.upcast(), self.def, parent_expr);
|
||||
let guard = self.resolver.update_to_inner_scope(self.db, self.def, parent_expr);
|
||||
pats.for_each(|pat| self.walk_pat(pat));
|
||||
self.resolver.reset_to_guard(guard);
|
||||
}
|
||||
|
|
@ -268,8 +268,7 @@ impl<'a> UnsafeVisitor<'a> {
|
|||
}
|
||||
}
|
||||
Expr::Path(path) => {
|
||||
let guard =
|
||||
self.resolver.update_to_inner_scope(self.db.upcast(), self.def, current);
|
||||
let guard = self.resolver.update_to_inner_scope(self.db, self.def, current);
|
||||
self.mark_unsafe_path(current.into(), path);
|
||||
self.resolver.reset_to_guard(guard);
|
||||
}
|
||||
|
|
@ -357,8 +356,7 @@ impl<'a> UnsafeVisitor<'a> {
|
|||
|
||||
fn mark_unsafe_path(&mut self, node: ExprOrPatId, path: &Path) {
|
||||
let hygiene = self.body.expr_or_pat_path_hygiene(node);
|
||||
let value_or_partial =
|
||||
self.resolver.resolve_path_in_value_ns(self.db.upcast(), path, hygiene);
|
||||
let value_or_partial = self.resolver.resolve_path_in_value_ns(self.db, path, hygiene);
|
||||
if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id), _)) = value_or_partial {
|
||||
let static_data = self.db.static_signature(id);
|
||||
if static_data.flags.contains(StaticFlags::MUTABLE) {
|
||||
|
|
|
|||
|
|
@ -570,7 +570,7 @@ impl HirDisplay for ProjectionTy {
|
|||
if !f.bounds_formatting_ctx.contains(self) {
|
||||
let db = f.db;
|
||||
let id = from_placeholder_idx(db, *idx);
|
||||
let generics = generics(db.upcast(), id.parent);
|
||||
let generics = generics(db, id.parent);
|
||||
|
||||
let substs = generics.placeholder_subst(db);
|
||||
let bounds = db
|
||||
|
|
@ -619,7 +619,7 @@ impl HirDisplay for ProjectionTy {
|
|||
">::{}",
|
||||
f.db.type_alias_signature(from_assoc_type_id(self.associated_ty_id))
|
||||
.name
|
||||
.display(f.db.upcast(), f.edition())
|
||||
.display(f.db, f.edition())
|
||||
)?;
|
||||
let proj_params_count =
|
||||
self.substitution.len(Interner) - trait_ref.substitution.len(Interner);
|
||||
|
|
@ -656,21 +656,16 @@ impl HirDisplay for Const {
|
|||
ConstValue::InferenceVar(..) => write!(f, "#c#"),
|
||||
ConstValue::Placeholder(idx) => {
|
||||
let id = from_placeholder_idx(f.db, *idx);
|
||||
let generics = generics(f.db.upcast(), id.parent);
|
||||
let generics = generics(f.db, id.parent);
|
||||
let param_data = &generics[id.local_id];
|
||||
write!(f, "{}", param_data.name().unwrap().display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}", param_data.name().unwrap().display(f.db, f.edition()))?;
|
||||
Ok(())
|
||||
}
|
||||
ConstValue::Concrete(c) => match &c.interned {
|
||||
ConstScalar::Bytes(b, m) => render_const_scalar(f, b, m, &data.ty),
|
||||
ConstScalar::UnevaluatedConst(c, parameters) => {
|
||||
write!(f, "{}", c.name(f.db.upcast()))?;
|
||||
hir_fmt_generics(
|
||||
f,
|
||||
parameters.as_slice(Interner),
|
||||
c.generic_def(f.db.upcast()),
|
||||
None,
|
||||
)?;
|
||||
write!(f, "{}", c.name(f.db))?;
|
||||
hir_fmt_generics(f, parameters.as_slice(Interner), c.generic_def(f.db), None)?;
|
||||
Ok(())
|
||||
}
|
||||
ConstScalar::Unknown => f.write_char('_'),
|
||||
|
|
@ -790,7 +785,7 @@ fn render_const_scalar(
|
|||
TyKind::Adt(adt, _) if b.len() == 2 * size_of::<usize>() => match adt.0 {
|
||||
hir_def::AdtId::StructId(s) => {
|
||||
let data = f.db.struct_signature(s);
|
||||
write!(f, "&{}", data.name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "&{}", data.name.display(f.db, f.edition()))?;
|
||||
Ok(())
|
||||
}
|
||||
_ => f.write_str("<unsized-enum-or-union>"),
|
||||
|
|
@ -848,7 +843,7 @@ fn render_const_scalar(
|
|||
match adt.0 {
|
||||
hir_def::AdtId::StructId(s) => {
|
||||
let data = f.db.struct_signature(s);
|
||||
write!(f, "{}", data.name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}", data.name.display(f.db, f.edition()))?;
|
||||
let field_types = f.db.field_types(s.into());
|
||||
render_variant_after_name(
|
||||
&f.db.variant_fields(s.into()),
|
||||
|
|
@ -862,11 +857,7 @@ fn render_const_scalar(
|
|||
)
|
||||
}
|
||||
hir_def::AdtId::UnionId(u) => {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
f.db.union_signature(u).name.display(f.db.upcast(), f.edition())
|
||||
)
|
||||
write!(f, "{}", f.db.union_signature(u).name.display(f.db, f.edition()))
|
||||
}
|
||||
hir_def::AdtId::EnumId(e) => {
|
||||
let Ok(target_data_layout) = f.db.target_data_layout(trait_env.krate) else {
|
||||
|
|
@ -877,13 +868,13 @@ fn render_const_scalar(
|
|||
else {
|
||||
return f.write_str("<failed-to-detect-variant>");
|
||||
};
|
||||
let loc = var_id.lookup(f.db.upcast());
|
||||
let loc = var_id.lookup(f.db);
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
f.db.enum_variants(loc.parent).variants[loc.index as usize]
|
||||
.1
|
||||
.display(f.db.upcast(), f.edition())
|
||||
.display(f.db, f.edition())
|
||||
)?;
|
||||
let field_types = f.db.field_types(var_id.into());
|
||||
render_variant_after_name(
|
||||
|
|
@ -969,11 +960,11 @@ fn render_variant_after_name(
|
|||
if matches!(data.shape, FieldsShape::Record) {
|
||||
write!(f, " {{")?;
|
||||
if let Some((id, data)) = it.next() {
|
||||
write!(f, " {}: ", data.name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, " {}: ", data.name.display(f.db, f.edition()))?;
|
||||
render_field(f, id)?;
|
||||
}
|
||||
for (id, data) in it {
|
||||
write!(f, ", {}: ", data.name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, ", {}: ", data.name.display(f.db, f.edition()))?;
|
||||
render_field(f, id)?;
|
||||
}
|
||||
write!(f, " }}")?;
|
||||
|
|
@ -1059,7 +1050,7 @@ impl HirDisplay for Ty {
|
|||
bounds.iter().any(|bound| {
|
||||
if let WhereClause::Implemented(trait_ref) = bound.skip_binders() {
|
||||
let trait_ = trait_ref.hir_trait_id();
|
||||
fn_traits(db.upcast(), trait_).any(|it| it == trait_)
|
||||
fn_traits(db, trait_).any(|it| it == trait_)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
|
|
@ -1087,8 +1078,7 @@ impl HirDisplay for Ty {
|
|||
|
||||
// Don't count Sized but count when it absent
|
||||
// (i.e. when explicit ?Sized bound is set).
|
||||
let default_sized =
|
||||
SizedByDefault::Sized { anchor: func.krate(db.upcast()) };
|
||||
let default_sized = SizedByDefault::Sized { anchor: func.krate(db) };
|
||||
let sized_bounds = bounds
|
||||
.skip_binders()
|
||||
.iter()
|
||||
|
|
@ -1098,7 +1088,7 @@ impl HirDisplay for Ty {
|
|||
WhereClause::Implemented(trait_ref)
|
||||
if default_sized.is_sized_trait(
|
||||
trait_ref.hir_trait_id(),
|
||||
db.upcast(),
|
||||
db,
|
||||
),
|
||||
)
|
||||
})
|
||||
|
|
@ -1166,32 +1156,28 @@ impl HirDisplay for Ty {
|
|||
write!(f, "fn ")?;
|
||||
f.start_location_link(def.into());
|
||||
match def {
|
||||
CallableDefId::FunctionId(ff) => write!(
|
||||
f,
|
||||
"{}",
|
||||
db.function_signature(ff).name.display(f.db.upcast(), f.edition())
|
||||
)?,
|
||||
CallableDefId::StructId(s) => write!(
|
||||
f,
|
||||
"{}",
|
||||
db.struct_signature(s).name.display(f.db.upcast(), f.edition())
|
||||
)?,
|
||||
CallableDefId::FunctionId(ff) => {
|
||||
write!(f, "{}", db.function_signature(ff).name.display(f.db, f.edition()))?
|
||||
}
|
||||
CallableDefId::StructId(s) => {
|
||||
write!(f, "{}", db.struct_signature(s).name.display(f.db, f.edition()))?
|
||||
}
|
||||
CallableDefId::EnumVariantId(e) => {
|
||||
let loc = e.lookup(db.upcast());
|
||||
let loc = e.lookup(db);
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
db.enum_variants(loc.parent).variants[loc.index as usize]
|
||||
.1
|
||||
.display(db.upcast(), f.edition())
|
||||
.display(db, f.edition())
|
||||
)?
|
||||
}
|
||||
};
|
||||
f.end_location_link();
|
||||
|
||||
if parameters.len(Interner) > 0 {
|
||||
let generic_def_id = GenericDefId::from_callable(db.upcast(), def);
|
||||
let generics = generics(db.upcast(), generic_def_id);
|
||||
let generic_def_id = GenericDefId::from_callable(db, def);
|
||||
let generics = generics(db, generic_def_id);
|
||||
let (parent_len, self_param, type_, const_, impl_, lifetime) =
|
||||
generics.provenance_split();
|
||||
let parameters = parameters.as_slice(Interner);
|
||||
|
|
@ -1252,11 +1238,11 @@ impl HirDisplay for Ty {
|
|||
hir_def::AdtId::UnionId(it) => db.union_signature(it).name.clone(),
|
||||
hir_def::AdtId::EnumId(it) => db.enum_signature(it).name.clone(),
|
||||
};
|
||||
write!(f, "{}", name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}", name.display(f.db, f.edition()))?;
|
||||
}
|
||||
DisplayKind::SourceCode { target_module_id: module_id, allow_opaque: _ } => {
|
||||
if let Some(path) = find_path::find_path(
|
||||
db.upcast(),
|
||||
db,
|
||||
ItemInNs::Types((*def_id).into()),
|
||||
module_id,
|
||||
PrefixKind::Plain,
|
||||
|
|
@ -1269,7 +1255,7 @@ impl HirDisplay for Ty {
|
|||
allow_unstable: true,
|
||||
},
|
||||
) {
|
||||
write!(f, "{}", path.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}", path.display(f.db, f.edition()))?;
|
||||
} else {
|
||||
return Err(HirDisplayError::DisplaySourceCodeError(
|
||||
DisplaySourceCodeError::PathNotFound,
|
||||
|
|
@ -1285,7 +1271,7 @@ impl HirDisplay for Ty {
|
|||
}
|
||||
TyKind::AssociatedType(assoc_type_id, parameters) => {
|
||||
let type_alias = from_assoc_type_id(*assoc_type_id);
|
||||
let trait_ = match type_alias.lookup(db.upcast()).container {
|
||||
let trait_ = match type_alias.lookup(db).container {
|
||||
ItemContainerId::TraitId(it) => it,
|
||||
_ => panic!("not an associated type"),
|
||||
};
|
||||
|
|
@ -1295,12 +1281,12 @@ impl HirDisplay for Ty {
|
|||
// Use placeholder associated types when the target is test (https://rust-lang.github.io/chalk/book/clauses/type_equality.html#placeholder-associated-types)
|
||||
if f.display_kind.is_test() {
|
||||
f.start_location_link(trait_.into());
|
||||
write!(f, "{}", trait_data.name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}", trait_data.name.display(f.db, f.edition()))?;
|
||||
f.end_location_link();
|
||||
write!(f, "::")?;
|
||||
|
||||
f.start_location_link(type_alias.into());
|
||||
write!(f, "{}", type_alias_data.name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}", type_alias_data.name.display(f.db, f.edition()))?;
|
||||
f.end_location_link();
|
||||
// Note that the generic args for the associated type come before those for the
|
||||
// trait (including the self type).
|
||||
|
|
@ -1318,7 +1304,7 @@ impl HirDisplay for Ty {
|
|||
let alias = from_foreign_def_id(*type_alias);
|
||||
let type_alias = db.type_alias_signature(alias);
|
||||
f.start_location_link(alias.into());
|
||||
write!(f, "{}", type_alias.name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}", type_alias.name.display(f.db, f.edition()))?;
|
||||
f.end_location_link();
|
||||
}
|
||||
TyKind::OpaqueType(opaque_ty_id, parameters) => {
|
||||
|
|
@ -1335,7 +1321,7 @@ impl HirDisplay for Ty {
|
|||
let data =
|
||||
(*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone());
|
||||
let bounds = data.substitute(Interner, ¶meters);
|
||||
let krate = func.krate(db.upcast());
|
||||
let krate = func.krate(db);
|
||||
write_bounds_like_dyn_trait_with_prefix(
|
||||
f,
|
||||
"impl",
|
||||
|
|
@ -1350,7 +1336,7 @@ impl HirDisplay for Ty {
|
|||
db.type_alias_impl_traits(alias).expect("impl trait id without data");
|
||||
let data = (*datas).as_ref().map(|it| it.impl_traits[idx].bounds.clone());
|
||||
let bounds = data.substitute(Interner, ¶meters);
|
||||
let krate = alias.krate(db.upcast());
|
||||
let krate = alias.krate(db);
|
||||
write_bounds_like_dyn_trait_with_prefix(
|
||||
f,
|
||||
"impl",
|
||||
|
|
@ -1361,7 +1347,7 @@ impl HirDisplay for Ty {
|
|||
}
|
||||
ImplTraitId::AsyncBlockTypeImplTrait(body, ..) => {
|
||||
let future_trait = db
|
||||
.lang_item(body.module(db.upcast()).krate(), LangItem::Future)
|
||||
.lang_item(body.module(db).krate(), LangItem::Future)
|
||||
.and_then(LangItemTarget::as_trait);
|
||||
let output = future_trait.and_then(|t| {
|
||||
db.trait_items(t).associated_type_by_name(&Name::new_symbol_root(
|
||||
|
|
@ -1442,7 +1428,7 @@ impl HirDisplay for Ty {
|
|||
}
|
||||
TyKind::Placeholder(idx) => {
|
||||
let id = from_placeholder_idx(db, *idx);
|
||||
let generics = generics(db.upcast(), id.parent);
|
||||
let generics = generics(db, id.parent);
|
||||
let param_data = &generics[id.local_id];
|
||||
match param_data {
|
||||
TypeOrConstParamData::TypeParamData(p) => match p.provenance {
|
||||
|
|
@ -1453,7 +1439,7 @@ impl HirDisplay for Ty {
|
|||
p.name
|
||||
.clone()
|
||||
.unwrap_or_else(Name::missing)
|
||||
.display(f.db.upcast(), f.edition())
|
||||
.display(f.db, f.edition())
|
||||
)?
|
||||
}
|
||||
TypeParamProvenance::ArgumentImplTrait => {
|
||||
|
|
@ -1475,7 +1461,7 @@ impl HirDisplay for Ty {
|
|||
WhereClause::LifetimeOutlives(_) => false,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let krate = id.parent.module(db.upcast()).krate();
|
||||
let krate = id.parent.module(db).krate();
|
||||
write_bounds_like_dyn_trait_with_prefix(
|
||||
f,
|
||||
"impl",
|
||||
|
|
@ -1486,7 +1472,7 @@ impl HirDisplay for Ty {
|
|||
}
|
||||
},
|
||||
TypeOrConstParamData::ConstParamData(p) => {
|
||||
write!(f, "{}", p.name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}", p.name.display(f.db, f.edition()))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1525,7 +1511,7 @@ impl HirDisplay for Ty {
|
|||
let data =
|
||||
(*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone());
|
||||
let bounds = data.substitute(Interner, &opaque_ty.substitution);
|
||||
let krate = func.krate(db.upcast());
|
||||
let krate = func.krate(db);
|
||||
write_bounds_like_dyn_trait_with_prefix(
|
||||
f,
|
||||
"impl",
|
||||
|
|
@ -1540,7 +1526,7 @@ impl HirDisplay for Ty {
|
|||
let data =
|
||||
(*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone());
|
||||
let bounds = data.substitute(Interner, &opaque_ty.substitution);
|
||||
let krate = alias.krate(db.upcast());
|
||||
let krate = alias.krate(db);
|
||||
write_bounds_like_dyn_trait_with_prefix(
|
||||
f,
|
||||
"impl",
|
||||
|
|
@ -1788,7 +1774,7 @@ fn write_bounds_like_dyn_trait(
|
|||
match p.skip_binders() {
|
||||
WhereClause::Implemented(trait_ref) => {
|
||||
let trait_ = trait_ref.hir_trait_id();
|
||||
if default_sized.is_sized_trait(trait_, f.db.upcast()) {
|
||||
if default_sized.is_sized_trait(trait_, f.db) {
|
||||
is_sized = true;
|
||||
if matches!(default_sized, SizedByDefault::Sized { .. }) {
|
||||
// Don't print +Sized, but rather +?Sized if absent.
|
||||
|
|
@ -1796,7 +1782,7 @@ fn write_bounds_like_dyn_trait(
|
|||
}
|
||||
}
|
||||
if !is_fn_trait {
|
||||
is_fn_trait = fn_traits(f.db.upcast(), trait_).any(|it| it == trait_);
|
||||
is_fn_trait = fn_traits(f.db, trait_).any(|it| it == trait_);
|
||||
}
|
||||
if !is_fn_trait && angle_open {
|
||||
write!(f, ">")?;
|
||||
|
|
@ -1809,11 +1795,7 @@ fn write_bounds_like_dyn_trait(
|
|||
// existential) here, which is the only thing that's
|
||||
// possible in actual Rust, and hence don't print it
|
||||
f.start_location_link(trait_.into());
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
f.db.trait_signature(trait_).name.display(f.db.upcast(), f.edition())
|
||||
)?;
|
||||
write!(f, "{}", f.db.trait_signature(trait_).name.display(f.db, f.edition()))?;
|
||||
f.end_location_link();
|
||||
if is_fn_trait {
|
||||
if let [self_, params @ ..] = trait_ref.substitution.as_slice(Interner) {
|
||||
|
|
@ -1887,10 +1869,10 @@ fn write_bounds_like_dyn_trait(
|
|||
let assoc_ty_id = from_assoc_type_id(proj.associated_ty_id);
|
||||
let type_alias = f.db.type_alias_signature(assoc_ty_id);
|
||||
f.start_location_link(assoc_ty_id.into());
|
||||
write!(f, "{}", type_alias.name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}", type_alias.name.display(f.db, f.edition()))?;
|
||||
f.end_location_link();
|
||||
|
||||
let proj_arg_count = generics(f.db.upcast(), assoc_ty_id.into()).len_self();
|
||||
let proj_arg_count = generics(f.db, assoc_ty_id.into()).len_self();
|
||||
if proj_arg_count > 0 {
|
||||
write!(f, "<")?;
|
||||
hir_fmt_generic_arguments(
|
||||
|
|
@ -1938,7 +1920,7 @@ impl HirDisplay for TraitRef {
|
|||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||
let trait_ = self.hir_trait_id();
|
||||
f.start_location_link(trait_.into());
|
||||
write!(f, "{}", f.db.trait_signature(trait_).name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}", f.db.trait_signature(trait_).name.display(f.db, f.edition()))?;
|
||||
f.end_location_link();
|
||||
let substs = self.substitution.as_slice(Interner);
|
||||
hir_fmt_generics(f, &substs[1..], None, substs[0].ty(Interner))
|
||||
|
|
@ -1969,7 +1951,7 @@ impl HirDisplay for WhereClause {
|
|||
write!(
|
||||
f,
|
||||
"{}",
|
||||
f.db.type_alias_signature(type_alias).name.display(f.db.upcast(), f.edition()),
|
||||
f.db.type_alias_signature(type_alias).name.display(f.db, f.edition()),
|
||||
)?;
|
||||
f.end_location_link();
|
||||
write!(f, " = ")?;
|
||||
|
|
@ -2004,9 +1986,9 @@ impl HirDisplay for LifetimeData {
|
|||
match self {
|
||||
LifetimeData::Placeholder(idx) => {
|
||||
let id = lt_from_placeholder_idx(f.db, *idx);
|
||||
let generics = generics(f.db.upcast(), id.parent);
|
||||
let generics = generics(f.db, id.parent);
|
||||
let param_data = &generics[id.local_id];
|
||||
write!(f, "{}", param_data.name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}", param_data.name.display(f.db, f.edition()))?;
|
||||
Ok(())
|
||||
}
|
||||
_ if f.display_kind.is_source_code() => write!(f, "'_"),
|
||||
|
|
@ -2048,14 +2030,14 @@ pub fn write_visibility(
|
|||
match vis {
|
||||
Visibility::Public => write!(f, "pub "),
|
||||
Visibility::Module(vis_id, _) => {
|
||||
let def_map = module_id.def_map(f.db.upcast());
|
||||
let def_map = module_id.def_map(f.db);
|
||||
let root_module_id = def_map.module_id(DefMap::ROOT);
|
||||
if vis_id == module_id {
|
||||
// pub(self) or omitted
|
||||
Ok(())
|
||||
} else if root_module_id == vis_id {
|
||||
write!(f, "pub(crate) ")
|
||||
} else if module_id.containing_module(f.db.upcast()) == Some(vis_id) {
|
||||
} else if module_id.containing_module(f.db) == Some(vis_id) {
|
||||
write!(f, "pub(super) ")
|
||||
} else {
|
||||
write!(f, "pub(in ...) ")
|
||||
|
|
@ -2109,7 +2091,7 @@ impl HirDisplayWithExpressionStore for LifetimeRef {
|
|||
_store: &ExpressionStore,
|
||||
) -> Result<(), HirDisplayError> {
|
||||
match self {
|
||||
LifetimeRef::Named(name) => write!(f, "{}", name.display(f.db.upcast(), f.edition())),
|
||||
LifetimeRef::Named(name) => write!(f, "{}", name.display(f.db, f.edition())),
|
||||
LifetimeRef::Static => write!(f, "'static"),
|
||||
LifetimeRef::Placeholder => write!(f, "'_"),
|
||||
LifetimeRef::Error => write!(f, "'{{error}}"),
|
||||
|
|
@ -2127,7 +2109,7 @@ impl HirDisplayWithExpressionStore for TypeRefId {
|
|||
TypeRef::TypeParam(param) => {
|
||||
let generic_params = f.db.generic_params(param.parent());
|
||||
match generic_params[param.local_id()].name() {
|
||||
Some(name) => write!(f, "{}", name.display(f.db.upcast(), f.edition()))?,
|
||||
Some(name) => write!(f, "{}", name.display(f.db, f.edition()))?,
|
||||
None => {
|
||||
write!(f, "impl ")?;
|
||||
f.write_joined(
|
||||
|
|
@ -2208,7 +2190,7 @@ impl HirDisplayWithExpressionStore for TypeRefId {
|
|||
for index in 0..function_parameters.len() {
|
||||
let (param_name, param_type) = &function_parameters[index];
|
||||
if let Some(name) = param_name {
|
||||
write!(f, "{}: ", name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}: ", name.display(f.db, f.edition()))?;
|
||||
}
|
||||
|
||||
param_type.hir_fmt(f, store)?;
|
||||
|
|
@ -2277,7 +2259,7 @@ impl HirDisplayWithExpressionStore for TypeBound {
|
|||
write!(
|
||||
f,
|
||||
"for<{}> ",
|
||||
lifetimes.iter().map(|it| it.display(f.db.upcast(), edition)).format(", ")
|
||||
lifetimes.iter().map(|it| it.display(f.db, edition)).format(", ")
|
||||
)?;
|
||||
store[*path].hir_fmt(f, store)
|
||||
}
|
||||
|
|
@ -2287,7 +2269,7 @@ impl HirDisplayWithExpressionStore for TypeBound {
|
|||
for (idx, arg) in args.iter().enumerate() {
|
||||
match arg {
|
||||
UseArgRef::Lifetime(lt) => lt.hir_fmt(f, store)?,
|
||||
UseArgRef::Name(n) => write!(f, "{}", n.display(f.db.upcast(), edition))?,
|
||||
UseArgRef::Name(n) => write!(f, "{}", n.display(f.db, edition))?,
|
||||
}
|
||||
if idx != last {
|
||||
write!(f, ", ")?;
|
||||
|
|
@ -2361,7 +2343,7 @@ impl HirDisplayWithExpressionStore for Path {
|
|||
if !matches!(self.kind(), PathKind::Plain) || seg_idx > 0 {
|
||||
write!(f, "::")?;
|
||||
}
|
||||
write!(f, "{}", segment.name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}", segment.name.display(f.db, f.edition()))?;
|
||||
if let Some(generic_args) = segment.args_and_bindings {
|
||||
// We should be in type context, so format as `Foo<Bar>` instead of `Foo::<Bar>`.
|
||||
// Do we actually format expressions?
|
||||
|
|
@ -2414,7 +2396,7 @@ impl HirDisplayWithExpressionStore for Path {
|
|||
} else {
|
||||
write!(f, ", ")?;
|
||||
}
|
||||
write!(f, "{}", binding.name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}", binding.name.display(f.db, f.edition()))?;
|
||||
match &binding.type_ref {
|
||||
Some(ty) => {
|
||||
write!(f, " = ")?;
|
||||
|
|
@ -2461,7 +2443,7 @@ impl HirDisplayWithExpressionStore for hir_def::expr_store::path::GenericArg {
|
|||
match self {
|
||||
hir_def::expr_store::path::GenericArg::Type(ty) => ty.hir_fmt(f, store),
|
||||
hir_def::expr_store::path::GenericArg::Const(_c) => {
|
||||
// write!(f, "{}", c.display(f.db.upcast(), f.edition()))
|
||||
// write!(f, "{}", c.display(f.db, f.edition()))
|
||||
write!(f, "<expr>")
|
||||
}
|
||||
hir_def::expr_store::path::GenericArg::Lifetime(lifetime) => lifetime.hir_fmt(f, store),
|
||||
|
|
|
|||
|
|
@ -53,7 +53,7 @@ pub fn dyn_compatibility(
|
|||
db: &dyn HirDatabase,
|
||||
trait_: TraitId,
|
||||
) -> Option<DynCompatibilityViolation> {
|
||||
for super_trait in all_super_traits(db.upcast(), trait_).into_iter().skip(1).rev() {
|
||||
for super_trait in all_super_traits(db, trait_).into_iter().skip(1).rev() {
|
||||
if db.dyn_compatibility_of_trait(super_trait).is_some() {
|
||||
return Some(DynCompatibilityViolation::HasNonCompatibleSuperTrait(super_trait));
|
||||
}
|
||||
|
|
@ -70,7 +70,7 @@ pub fn dyn_compatibility_with_callback<F>(
|
|||
where
|
||||
F: FnMut(DynCompatibilityViolation) -> ControlFlow<()>,
|
||||
{
|
||||
for super_trait in all_super_traits(db.upcast(), trait_).into_iter().skip(1).rev() {
|
||||
for super_trait in all_super_traits(db, trait_).into_iter().skip(1).rev() {
|
||||
if db.dyn_compatibility_of_trait(super_trait).is_some() {
|
||||
cb(DynCompatibilityViolation::HasNonCompatibleSuperTrait(trait_))?;
|
||||
}
|
||||
|
|
@ -124,12 +124,12 @@ pub fn dyn_compatibility_of_trait_query(
|
|||
}
|
||||
|
||||
fn generics_require_sized_self(db: &dyn HirDatabase, def: GenericDefId) -> bool {
|
||||
let krate = def.module(db.upcast()).krate();
|
||||
let krate = def.module(db).krate();
|
||||
let Some(sized) = db.lang_item(krate, LangItem::Sized).and_then(|l| l.as_trait()) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
let Some(trait_self_param_idx) = trait_self_param_idx(db.upcast(), def) else {
|
||||
let Some(trait_self_param_idx) = trait_self_param_idx(db, def) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
|
|
@ -254,7 +254,7 @@ fn contains_illegal_self_type_reference<T: TypeVisitable<Interner>>(
|
|||
outer_binder: DebruijnIndex,
|
||||
allow_self_projection: AllowSelfProjection,
|
||||
) -> bool {
|
||||
let Some(trait_self_param_idx) = trait_self_param_idx(db.upcast(), def) else {
|
||||
let Some(trait_self_param_idx) = trait_self_param_idx(db, def) else {
|
||||
return false;
|
||||
};
|
||||
struct IllegalSelfTypeVisitor<'a> {
|
||||
|
|
@ -288,8 +288,7 @@ fn contains_illegal_self_type_reference<T: TypeVisitable<Interner>>(
|
|||
AllowSelfProjection::Yes => {
|
||||
let trait_ = proj.trait_(self.db);
|
||||
if self.super_traits.is_none() {
|
||||
self.super_traits =
|
||||
Some(all_super_traits(self.db.upcast(), self.trait_));
|
||||
self.super_traits = Some(all_super_traits(self.db, self.trait_));
|
||||
}
|
||||
if self.super_traits.as_ref().is_some_and(|s| s.contains(&trait_)) {
|
||||
ControlFlow::Continue(())
|
||||
|
|
@ -345,7 +344,7 @@ where
|
|||
})
|
||||
}
|
||||
AssocItemId::TypeAliasId(it) => {
|
||||
let def_map = db.crate_def_map(trait_.krate(db.upcast()));
|
||||
let def_map = db.crate_def_map(trait_.krate(db));
|
||||
if def_map.is_unstable_feature_enabled(&intern::sym::generic_associated_type_extended) {
|
||||
ControlFlow::Continue(())
|
||||
} else {
|
||||
|
|
@ -419,7 +418,7 @@ where
|
|||
}
|
||||
|
||||
let predicates = &*db.generic_predicates_without_parent(func.into());
|
||||
let trait_self_idx = trait_self_param_idx(db.upcast(), func.into());
|
||||
let trait_self_idx = trait_self_param_idx(db, func.into());
|
||||
for pred in predicates {
|
||||
let pred = pred.skip_binders().skip_binders();
|
||||
|
||||
|
|
@ -466,7 +465,7 @@ fn receiver_is_dispatchable(
|
|||
func: FunctionId,
|
||||
sig: &Binders<CallableSig>,
|
||||
) -> bool {
|
||||
let Some(trait_self_idx) = trait_self_param_idx(db.upcast(), func.into()) else {
|
||||
let Some(trait_self_idx) = trait_self_param_idx(db, func.into()) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
|
|
@ -484,14 +483,14 @@ fn receiver_is_dispatchable(
|
|||
return true;
|
||||
}
|
||||
|
||||
let placeholder_subst = generics(db.upcast(), func.into()).placeholder_subst(db);
|
||||
let placeholder_subst = generics(db, func.into()).placeholder_subst(db);
|
||||
|
||||
let substituted_sig = sig.clone().substitute(Interner, &placeholder_subst);
|
||||
let Some(receiver_ty) = substituted_sig.params().first() else {
|
||||
return false;
|
||||
};
|
||||
|
||||
let krate = func.module(db.upcast()).krate();
|
||||
let krate = func.module(db).krate();
|
||||
let traits = (
|
||||
db.lang_item(krate, LangItem::Unsize).and_then(|it| it.as_trait()),
|
||||
db.lang_item(krate, LangItem::DispatchFromDyn).and_then(|it| it.as_trait()),
|
||||
|
|
@ -550,8 +549,8 @@ fn receiver_is_dispatchable(
|
|||
}
|
||||
|
||||
fn receiver_for_self_ty(db: &dyn HirDatabase, func: FunctionId, ty: Ty) -> Option<Ty> {
|
||||
let generics = generics(db.upcast(), func.into());
|
||||
let trait_self_idx = trait_self_param_idx(db.upcast(), func.into())?;
|
||||
let generics = generics(db, func.into());
|
||||
let trait_self_idx = trait_self_param_idx(db, func.into())?;
|
||||
let subst = generics.placeholder_subst(db);
|
||||
let subst = Substitution::from_iter(
|
||||
Interner,
|
||||
|
|
|
|||
|
|
@ -87,7 +87,7 @@ pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
|
|||
/// The entry point of type inference.
|
||||
pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
|
||||
let _p = tracing::info_span!("infer_query").entered();
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let resolver = def.resolver(db);
|
||||
let body = db.body(def);
|
||||
let mut ctx = InferenceContext::new(db, def, &body, resolver);
|
||||
|
||||
|
|
@ -99,7 +99,7 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
|
|||
DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_signature(s)),
|
||||
DefWithBodyId::VariantId(v) => {
|
||||
ctx.return_ty = TyBuilder::builtin(
|
||||
match db.enum_signature(v.lookup(db.upcast()).parent).variant_body_type() {
|
||||
match db.enum_signature(v.lookup(db).parent).variant_body_type() {
|
||||
hir_def::layout::IntegerType::Pointer(signed) => match signed {
|
||||
true => BuiltinType::Int(BuiltinInt::Isize),
|
||||
false => BuiltinType::Uint(BuiltinUint::Usize),
|
||||
|
|
@ -702,10 +702,10 @@ impl<'a> InferenceContext<'a> {
|
|||
DefWithBodyId::FunctionId(it) => it.into(),
|
||||
DefWithBodyId::StaticId(it) => it.into(),
|
||||
DefWithBodyId::ConstId(it) => it.into(),
|
||||
DefWithBodyId::VariantId(it) => it.lookup(db.upcast()).parent.into(),
|
||||
DefWithBodyId::VariantId(it) => it.lookup(db).parent.into(),
|
||||
},
|
||||
body,
|
||||
traits_in_scope: resolver.traits_in_scope(db.upcast()),
|
||||
traits_in_scope: resolver.traits_in_scope(db),
|
||||
resolver,
|
||||
diverges: Diverges::Maybe,
|
||||
breakables: Vec::new(),
|
||||
|
|
@ -721,7 +721,7 @@ impl<'a> InferenceContext<'a> {
|
|||
}
|
||||
|
||||
pub(crate) fn generics(&self) -> &Generics {
|
||||
self.generics.get_or_init(|| crate::generics::generics(self.db.upcast(), self.generic_def))
|
||||
self.generics.get_or_init(|| crate::generics::generics(self.db, self.generic_def))
|
||||
}
|
||||
|
||||
// FIXME: This function should be private in module. It is currently only used in the consteval, since we need
|
||||
|
|
@ -1470,7 +1470,7 @@ impl<'a> InferenceContext<'a> {
|
|||
) -> Ty {
|
||||
match assoc_ty {
|
||||
Some(res_assoc_ty) => {
|
||||
let trait_ = match res_assoc_ty.lookup(self.db.upcast()).container {
|
||||
let trait_ = match res_assoc_ty.lookup(self.db).container {
|
||||
hir_def::ItemContainerId::TraitId(trait_) => trait_,
|
||||
_ => panic!("resolve_associated_type called with non-associated type"),
|
||||
};
|
||||
|
|
@ -1523,7 +1523,7 @@ impl<'a> InferenceContext<'a> {
|
|||
ValueNs::EnumVariantId(var) => {
|
||||
let substs = path_ctx.substs_from_path(var.into(), true);
|
||||
drop(ctx);
|
||||
let ty = self.db.ty(var.lookup(self.db.upcast()).parent.into());
|
||||
let ty = self.db.ty(var.lookup(self.db).parent.into());
|
||||
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
|
||||
return (ty, Some(var.into()));
|
||||
}
|
||||
|
|
@ -1566,12 +1566,12 @@ impl<'a> InferenceContext<'a> {
|
|||
TypeNs::EnumVariantId(var) => {
|
||||
let substs = path_ctx.substs_from_path(var.into(), true);
|
||||
drop(ctx);
|
||||
let ty = self.db.ty(var.lookup(self.db.upcast()).parent.into());
|
||||
let ty = self.db.ty(var.lookup(self.db).parent.into());
|
||||
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
|
||||
forbid_unresolved_segments((ty, Some(var.into())), unresolved)
|
||||
}
|
||||
TypeNs::SelfType(impl_id) => {
|
||||
let generics = crate::generics::generics(self.db.upcast(), impl_id.into());
|
||||
let generics = crate::generics::generics(self.db, impl_id.into());
|
||||
let substs = generics.placeholder_subst(self.db);
|
||||
let mut ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
|
||||
|
||||
|
|
@ -1757,7 +1757,7 @@ impl<'a> InferenceContext<'a> {
|
|||
let ItemContainerId::TraitId(trait_) = self
|
||||
.resolve_lang_item(LangItem::IntoFutureIntoFuture)?
|
||||
.as_function()?
|
||||
.lookup(self.db.upcast())
|
||||
.lookup(self.db)
|
||||
.container
|
||||
else {
|
||||
return None;
|
||||
|
|
|
|||
|
|
@ -44,11 +44,7 @@ impl CastTy {
|
|||
return None;
|
||||
};
|
||||
let enum_data = table.db.enum_variants(id);
|
||||
if enum_data.is_payload_free(table.db.upcast()) {
|
||||
Some(Self::Int(Int::CEnum))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
if enum_data.is_payload_free(table.db) { Some(Self::Int(Int::CEnum)) } else { None }
|
||||
}
|
||||
TyKind::Raw(m, ty) => Some(Self::Ptr(ty.clone(), *m)),
|
||||
TyKind::Function(_) => Some(Self::FnPtr),
|
||||
|
|
|
|||
|
|
@ -335,7 +335,7 @@ impl InferenceContext<'_> {
|
|||
// Search for a predicate like `<$self as FnX<Args>>::Output == Ret`
|
||||
|
||||
let fn_traits: SmallVec<[ChalkTraitId; 3]> =
|
||||
utils::fn_traits(self.db.upcast(), self.owner.module(self.db.upcast()).krate())
|
||||
utils::fn_traits(self.db, self.owner.module(self.db).krate())
|
||||
.map(to_chalk_trait_id)
|
||||
.collect();
|
||||
|
||||
|
|
@ -386,7 +386,7 @@ impl InferenceContext<'_> {
|
|||
projected_ty: &Ty,
|
||||
) -> Option<FnSubst<Interner>> {
|
||||
let container =
|
||||
from_assoc_type_id(projection_ty.associated_ty_id).lookup(self.db.upcast()).container;
|
||||
from_assoc_type_id(projection_ty.associated_ty_id).lookup(self.db).container;
|
||||
let trait_ = match container {
|
||||
hir_def::ItemContainerId::TraitId(trait_) => trait_,
|
||||
_ => return None,
|
||||
|
|
@ -567,7 +567,7 @@ impl HirPlace {
|
|||
|_, _, _| {
|
||||
unreachable!("Closure field only happens in MIR");
|
||||
},
|
||||
ctx.owner.module(ctx.db.upcast()).krate(),
|
||||
ctx.owner.module(ctx.db).krate(),
|
||||
);
|
||||
}
|
||||
ty
|
||||
|
|
@ -641,7 +641,7 @@ impl CapturedItem {
|
|||
match proj {
|
||||
ProjectionElem::Deref => {}
|
||||
ProjectionElem::Field(Either::Left(f)) => {
|
||||
let variant_data = f.parent.variant_data(db.upcast());
|
||||
let variant_data = f.parent.variant_data(db);
|
||||
match variant_data.shape {
|
||||
FieldsShape::Record => {
|
||||
result.push('_');
|
||||
|
|
@ -668,7 +668,7 @@ impl CapturedItem {
|
|||
}
|
||||
}
|
||||
}
|
||||
if is_raw_identifier(&result, owner.module(db.upcast()).krate().data(db).edition) {
|
||||
if is_raw_identifier(&result, owner.module(db).krate().data(db).edition) {
|
||||
result.insert_str(0, "r#");
|
||||
}
|
||||
result
|
||||
|
|
@ -676,20 +676,20 @@ impl CapturedItem {
|
|||
|
||||
pub fn display_place_source_code(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String {
|
||||
let body = db.body(owner);
|
||||
let krate = owner.krate(db.upcast());
|
||||
let krate = owner.krate(db);
|
||||
let edition = krate.data(db).edition;
|
||||
let mut result = body[self.place.local].name.display(db.upcast(), edition).to_string();
|
||||
let mut result = body[self.place.local].name.display(db, edition).to_string();
|
||||
for proj in &self.place.projections {
|
||||
match proj {
|
||||
// In source code autoderef kicks in.
|
||||
ProjectionElem::Deref => {}
|
||||
ProjectionElem::Field(Either::Left(f)) => {
|
||||
let variant_data = f.parent.variant_data(db.upcast());
|
||||
let variant_data = f.parent.variant_data(db);
|
||||
match variant_data.shape {
|
||||
FieldsShape::Record => format_to!(
|
||||
result,
|
||||
".{}",
|
||||
variant_data.fields()[f.local_id].name.display(db.upcast(), edition)
|
||||
variant_data.fields()[f.local_id].name.display(db, edition)
|
||||
),
|
||||
FieldsShape::Tuple => format_to!(
|
||||
result,
|
||||
|
|
@ -732,9 +732,9 @@ impl CapturedItem {
|
|||
|
||||
pub fn display_place(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String {
|
||||
let body = db.body(owner);
|
||||
let krate = owner.krate(db.upcast());
|
||||
let krate = owner.krate(db);
|
||||
let edition = krate.data(db).edition;
|
||||
let mut result = body[self.place.local].name.display(db.upcast(), edition).to_string();
|
||||
let mut result = body[self.place.local].name.display(db, edition).to_string();
|
||||
let mut field_need_paren = false;
|
||||
for proj in &self.place.projections {
|
||||
match proj {
|
||||
|
|
@ -746,7 +746,7 @@ impl CapturedItem {
|
|||
if field_need_paren {
|
||||
result = format!("({result})");
|
||||
}
|
||||
let variant_data = f.parent.variant_data(db.upcast());
|
||||
let variant_data = f.parent.variant_data(db);
|
||||
let field = match variant_data.shape {
|
||||
FieldsShape::Record => {
|
||||
variant_data.fields()[f.local_id].name.as_str().to_owned()
|
||||
|
|
@ -880,8 +880,8 @@ impl InferenceContext<'_> {
|
|||
return None;
|
||||
}
|
||||
let hygiene = self.body.expr_or_pat_path_hygiene(id);
|
||||
self.resolver.resolve_path_in_value_ns_fully(self.db.upcast(), path, hygiene).and_then(
|
||||
|result| match result {
|
||||
self.resolver.resolve_path_in_value_ns_fully(self.db, path, hygiene).and_then(|result| {
|
||||
match result {
|
||||
ValueNs::LocalBinding(binding) => {
|
||||
let mir_span = match id {
|
||||
ExprOrPatId::ExprId(id) => MirSpan::ExprId(id),
|
||||
|
|
@ -891,8 +891,8 @@ impl InferenceContext<'_> {
|
|||
Some(HirPlace { local: binding, projections: Vec::new() })
|
||||
}
|
||||
_ => None,
|
||||
},
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Changes `current_capture_span_stack` to contain the stack of spans for this expr.
|
||||
|
|
@ -901,7 +901,7 @@ impl InferenceContext<'_> {
|
|||
match &self.body[tgt_expr] {
|
||||
Expr::Path(p) => {
|
||||
let resolver_guard =
|
||||
self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, tgt_expr);
|
||||
self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr);
|
||||
let result = self.path_place(p, tgt_expr.into());
|
||||
self.resolver.reset_to_guard(resolver_guard);
|
||||
return result;
|
||||
|
|
@ -1263,7 +1263,7 @@ impl InferenceContext<'_> {
|
|||
&Expr::Assignment { target, value } => {
|
||||
self.walk_expr(value);
|
||||
let resolver_guard =
|
||||
self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, tgt_expr);
|
||||
self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr);
|
||||
match self.place_of_expr(value) {
|
||||
Some(rhs_place) => {
|
||||
self.inside_assignment = true;
|
||||
|
|
@ -1322,7 +1322,7 @@ impl InferenceContext<'_> {
|
|||
| Pat::Or(_) => (),
|
||||
Pat::TupleStruct { .. } | Pat::Record { .. } => {
|
||||
if let Some(variant) = self.result.variant_resolution_for_pat(p) {
|
||||
let adt = variant.adt_id(self.db.upcast());
|
||||
let adt = variant.adt_id(self.db);
|
||||
let is_multivariant = match adt {
|
||||
hir_def::AdtId::EnumId(e) => self.db.enum_variants(e).variants.len() != 1,
|
||||
_ => false,
|
||||
|
|
@ -1413,7 +1413,7 @@ impl InferenceContext<'_> {
|
|||
|_, _, _| {
|
||||
unreachable!("Closure field only happens in MIR");
|
||||
},
|
||||
self.owner.module(self.db.upcast()).krate(),
|
||||
self.owner.module(self.db).krate(),
|
||||
);
|
||||
if ty.as_raw_ptr().is_some() || ty.is_union() {
|
||||
capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
|
||||
|
|
|
|||
|
|
@ -197,11 +197,7 @@ impl InferenceContext<'_> {
|
|||
Expr::Path(Path::Normal(path)) => path.type_anchor.is_none(),
|
||||
Expr::Path(path) => self
|
||||
.resolver
|
||||
.resolve_path_in_value_ns_fully(
|
||||
self.db.upcast(),
|
||||
path,
|
||||
self.body.expr_path_hygiene(expr),
|
||||
)
|
||||
.resolve_path_in_value_ns_fully(self.db, path, self.body.expr_path_hygiene(expr))
|
||||
.is_none_or(|res| matches!(res, ValueNs::LocalBinding(_) | ValueNs::StaticId(_))),
|
||||
Expr::Underscore => true,
|
||||
Expr::UnaryOp { op: UnaryOp::Deref, .. } => true,
|
||||
|
|
@ -543,16 +539,15 @@ impl InferenceContext<'_> {
|
|||
_ if fields.is_empty() => {}
|
||||
Some(def) => {
|
||||
let field_types = self.db.field_types(def);
|
||||
let variant_data = def.variant_data(self.db.upcast());
|
||||
let variant_data = def.variant_data(self.db);
|
||||
let visibilities = self.db.field_visibilities(def);
|
||||
for field in fields.iter() {
|
||||
let field_def = {
|
||||
match variant_data.field(&field.name) {
|
||||
Some(local_id) => {
|
||||
if !visibilities[local_id].is_visible_from(
|
||||
self.db.upcast(),
|
||||
self.resolver.module(),
|
||||
) {
|
||||
if !visibilities[local_id]
|
||||
.is_visible_from(self.db, self.resolver.module())
|
||||
{
|
||||
self.push_diagnostic(
|
||||
InferenceDiagnostic::NoSuchField {
|
||||
field: field.expr.into(),
|
||||
|
|
@ -744,7 +739,7 @@ impl InferenceContext<'_> {
|
|||
} else {
|
||||
let rhs_ty = self.infer_expr(value, &Expectation::none(), ExprIsRead::Yes);
|
||||
let resolver_guard =
|
||||
self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, tgt_expr);
|
||||
self.resolver.update_to_inner_scope(self.db, self.owner, tgt_expr);
|
||||
self.inside_assignment = true;
|
||||
self.infer_top_pat(target, &rhs_ty, None);
|
||||
self.inside_assignment = false;
|
||||
|
|
@ -1030,7 +1025,7 @@ impl InferenceContext<'_> {
|
|||
}
|
||||
|
||||
fn infer_expr_path(&mut self, path: &Path, id: ExprOrPatId, scope_id: ExprId) -> Ty {
|
||||
let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, scope_id);
|
||||
let g = self.resolver.update_to_inner_scope(self.db, self.owner, scope_id);
|
||||
let ty = match self.infer_path(path, id) {
|
||||
Some(ty) => ty,
|
||||
None => {
|
||||
|
|
@ -1403,7 +1398,7 @@ impl InferenceContext<'_> {
|
|||
expected: &Expectation,
|
||||
) -> Ty {
|
||||
let coerce_ty = expected.coercion_target_type(&mut self.table);
|
||||
let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr);
|
||||
let g = self.resolver.update_to_inner_scope(self.db, self.owner, expr);
|
||||
let prev_env = block_id.map(|block_id| {
|
||||
let prev_env = self.table.trait_env.clone();
|
||||
TraitEnvironment::with_block(&mut self.table.trait_env, block_id);
|
||||
|
|
@ -1576,7 +1571,7 @@ impl InferenceContext<'_> {
|
|||
_ => return None,
|
||||
};
|
||||
let is_visible = self.db.field_visibilities(field_id.parent)[field_id.local_id]
|
||||
.is_visible_from(self.db.upcast(), self.resolver.module());
|
||||
.is_visible_from(self.db, self.resolver.module());
|
||||
if !is_visible {
|
||||
if private_field.is_none() {
|
||||
private_field = Some((field_id, parameters));
|
||||
|
|
@ -1663,7 +1658,7 @@ impl InferenceContext<'_> {
|
|||
match resolved {
|
||||
Some((adjust, func, _)) => {
|
||||
let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
|
||||
let generics = generics(self.db.upcast(), func.into());
|
||||
let generics = generics(self.db, func.into());
|
||||
let substs = self.substs_for_method_call(generics, None);
|
||||
self.write_expr_adj(receiver, adjustments);
|
||||
self.write_method_resolution(tgt_expr, func, substs.clone());
|
||||
|
|
@ -1816,7 +1811,7 @@ impl InferenceContext<'_> {
|
|||
let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
|
||||
self.write_expr_adj(receiver, adjustments);
|
||||
|
||||
let generics = generics(self.db.upcast(), func.into());
|
||||
let generics = generics(self.db, func.into());
|
||||
let substs = self.substs_for_method_call(generics, generic_args);
|
||||
self.write_method_resolution(tgt_expr, func, substs.clone());
|
||||
self.check_method_call(
|
||||
|
|
@ -1867,7 +1862,7 @@ impl InferenceContext<'_> {
|
|||
|
||||
let recovered = match assoc_func_with_same_name {
|
||||
Some(f) => {
|
||||
let generics = generics(self.db.upcast(), f.into());
|
||||
let generics = generics(self.db, f.into());
|
||||
let substs = self.substs_for_method_call(generics, generic_args);
|
||||
let f = self
|
||||
.db
|
||||
|
|
@ -2136,7 +2131,7 @@ impl InferenceContext<'_> {
|
|||
if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind(Interner) {
|
||||
let def: CallableDefId = from_chalk(self.db, *fn_def);
|
||||
let generic_predicates =
|
||||
self.db.generic_predicates(GenericDefId::from_callable(self.db.upcast(), def));
|
||||
self.db.generic_predicates(GenericDefId::from_callable(self.db, def));
|
||||
for predicate in generic_predicates.iter() {
|
||||
let (predicate, binders) = predicate
|
||||
.clone()
|
||||
|
|
@ -2148,10 +2143,10 @@ impl InferenceContext<'_> {
|
|||
// add obligation for trait implementation, if this is a trait method
|
||||
match def {
|
||||
CallableDefId::FunctionId(f) => {
|
||||
if let ItemContainerId::TraitId(trait_) = f.lookup(self.db.upcast()).container {
|
||||
if let ItemContainerId::TraitId(trait_) = f.lookup(self.db).container {
|
||||
// construct a TraitRef
|
||||
let params_len = parameters.len(Interner);
|
||||
let trait_params_len = generics(self.db.upcast(), trait_.into()).len();
|
||||
let trait_params_len = generics(self.db, trait_.into()).len();
|
||||
let substs = Substitution::from_iter(
|
||||
Interner,
|
||||
// The generic parameters for the trait come after those for the
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ impl InferenceContext<'_> {
|
|||
decl: Option<DeclContext>,
|
||||
) -> Ty {
|
||||
let (ty, def) = self.resolve_variant(id.into(), path, true);
|
||||
let var_data = def.map(|it| it.variant_data(self.db.upcast()));
|
||||
let var_data = def.map(|it| it.variant_data(self.db));
|
||||
if let Some(variant) = def {
|
||||
self.write_variant_resolution(id.into(), variant);
|
||||
}
|
||||
|
|
@ -60,7 +60,7 @@ impl InferenceContext<'_> {
|
|||
_ if subs.is_empty() => {}
|
||||
Some(def) => {
|
||||
let field_types = self.db.field_types(def);
|
||||
let variant_data = def.variant_data(self.db.upcast());
|
||||
let variant_data = def.variant_data(self.db);
|
||||
let visibilities = self.db.field_visibilities(def);
|
||||
|
||||
let (pre, post) = match ellipsis {
|
||||
|
|
@ -79,7 +79,7 @@ impl InferenceContext<'_> {
|
|||
match variant_data.field(&Name::new_tuple_field(i)) {
|
||||
Some(local_id) => {
|
||||
if !visibilities[local_id]
|
||||
.is_visible_from(self.db.upcast(), self.resolver.module())
|
||||
.is_visible_from(self.db, self.resolver.module())
|
||||
{
|
||||
// FIXME(DIAGNOSE): private tuple field
|
||||
}
|
||||
|
|
@ -129,7 +129,7 @@ impl InferenceContext<'_> {
|
|||
_ if subs.len() == 0 => {}
|
||||
Some(def) => {
|
||||
let field_types = self.db.field_types(def);
|
||||
let variant_data = def.variant_data(self.db.upcast());
|
||||
let variant_data = def.variant_data(self.db);
|
||||
let visibilities = self.db.field_visibilities(def);
|
||||
|
||||
let substs = ty.as_adt().map(TupleExt::tail);
|
||||
|
|
@ -139,7 +139,7 @@ impl InferenceContext<'_> {
|
|||
match variant_data.field(&name) {
|
||||
Some(local_id) => {
|
||||
if !visibilities[local_id]
|
||||
.is_visible_from(self.db.upcast(), self.resolver.module())
|
||||
.is_visible_from(self.db, self.resolver.module())
|
||||
{
|
||||
self.push_diagnostic(InferenceDiagnostic::NoSuchField {
|
||||
field: inner.into(),
|
||||
|
|
@ -594,8 +594,7 @@ impl InferenceContext<'_> {
|
|||
}
|
||||
|
||||
let len = before.len() + suffix.len();
|
||||
let size =
|
||||
consteval::usize_const(self.db, Some(len as u128), self.owner.krate(self.db.upcast()));
|
||||
let size = consteval::usize_const(self.db, Some(len as u128), self.owner.krate(self.db));
|
||||
|
||||
let elem_ty = self.table.new_type_var();
|
||||
let array_ty = TyKind::Array(elem_ty.clone(), size).intern(Interner);
|
||||
|
|
|
|||
|
|
@ -67,7 +67,7 @@ impl InferenceContext<'_> {
|
|||
};
|
||||
}
|
||||
ValueNs::ImplSelf(impl_id) => {
|
||||
let generics = crate::generics::generics(self.db.upcast(), impl_id.into());
|
||||
let generics = crate::generics::generics(self.db, impl_id.into());
|
||||
let substs = generics.placeholder_subst(self.db);
|
||||
let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
|
||||
return if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() {
|
||||
|
|
@ -128,7 +128,7 @@ impl InferenceContext<'_> {
|
|||
}
|
||||
|
||||
let parent_substs = self_subst.or_else(|| {
|
||||
let generics = generics(self.db.upcast(), generic_def);
|
||||
let generics = generics(self.db, generic_def);
|
||||
let parent_params_len = generics.parent_generics()?.len();
|
||||
let parent_args = &substs[substs.len() - parent_params_len..];
|
||||
Some(Substitution::from_iter(Interner, parent_args))
|
||||
|
|
@ -255,13 +255,13 @@ impl InferenceContext<'_> {
|
|||
|
||||
// We need to add `Self: Trait` obligation when `def` is a trait assoc item.
|
||||
let container = match def {
|
||||
GenericDefId::FunctionId(id) => id.lookup(self.db.upcast()).container,
|
||||
GenericDefId::ConstId(id) => id.lookup(self.db.upcast()).container,
|
||||
GenericDefId::FunctionId(id) => id.lookup(self.db).container,
|
||||
GenericDefId::ConstId(id) => id.lookup(self.db).container,
|
||||
_ => return,
|
||||
};
|
||||
|
||||
if let ItemContainerId::TraitId(trait_) = container {
|
||||
let param_len = generics(self.db.upcast(), def).len_self();
|
||||
let param_len = generics(self.db, def).len_self();
|
||||
let parent_subst =
|
||||
Substitution::from_iter(Interner, subst.iter(Interner).skip(param_len));
|
||||
let trait_ref =
|
||||
|
|
@ -351,10 +351,8 @@ impl InferenceContext<'_> {
|
|||
let (item, visible) = res?;
|
||||
|
||||
let (def, container) = match item {
|
||||
AssocItemId::FunctionId(f) => {
|
||||
(ValueNs::FunctionId(f), f.lookup(self.db.upcast()).container)
|
||||
}
|
||||
AssocItemId::ConstId(c) => (ValueNs::ConstId(c), c.lookup(self.db.upcast()).container),
|
||||
AssocItemId::FunctionId(f) => (ValueNs::FunctionId(f), f.lookup(self.db).container),
|
||||
AssocItemId::ConstId(c) => (ValueNs::ConstId(c), c.lookup(self.db).container),
|
||||
AssocItemId::TypeAliasId(_) => unreachable!(),
|
||||
};
|
||||
let substs = match container {
|
||||
|
|
|
|||
|
|
@ -139,7 +139,7 @@ impl UninhabitedFrom<'_> {
|
|||
ty: &Binders<Ty>,
|
||||
subst: &Substitution,
|
||||
) -> ControlFlow<VisiblyUninhabited> {
|
||||
if vis.is_none_or(|it| it.is_visible_from(self.db.upcast(), self.target_mod)) {
|
||||
if vis.is_none_or(|it| it.is_visible_from(self.db, self.target_mod)) {
|
||||
let ty = ty.clone().substitute(Interner, subst);
|
||||
ty.visit_with(self, DebruijnIndex::INNERMOST)
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -301,7 +301,7 @@ impl Hash for ConstScalar {
|
|||
|
||||
/// Return an index of a parameter in the generic type parameter list by it's id.
|
||||
pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<usize> {
|
||||
generics::generics(db.upcast(), id.parent).type_or_const_param_idx(id)
|
||||
generics::generics(db, id.parent).type_or_const_param_idx(id)
|
||||
}
|
||||
|
||||
pub(crate) fn wrap_empty_binders<T>(value: T) -> Binders<T>
|
||||
|
|
|
|||
|
|
@ -243,7 +243,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
}
|
||||
|
||||
fn generics(&self) -> &Generics {
|
||||
self.generics.get_or_init(|| generics(self.db.upcast(), self.def))
|
||||
self.generics.get_or_init(|| generics(self.db, self.def))
|
||||
}
|
||||
|
||||
pub fn lower_ty_ext(&mut self, type_ref_id: TypeRefId) -> (Ty, Option<TypeNs>) {
|
||||
|
|
@ -356,8 +356,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||
|a| ImplTraitId::TypeAliasImplTrait(a, idx),
|
||||
);
|
||||
let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
|
||||
let generics =
|
||||
generics(self.db.upcast(), origin.either(|f| f.into(), |a| a.into()));
|
||||
let generics = generics(self.db, origin.either(|f| f.into(), |a| a.into()));
|
||||
let parameters = generics.bound_vars_subst(self.db, self.in_binders);
|
||||
TyKind::OpaqueType(opaque_ty_id, parameters).intern(Interner)
|
||||
}
|
||||
|
|
@ -771,7 +770,7 @@ fn named_associated_type_shorthand_candidates<R>(
|
|||
// FIXME: A `HirDatabase` query may be handy if this process is needed in more
|
||||
// places. It'd be almost identical as `impl_trait_query` where `resolver` would be
|
||||
// of `def` instead of `impl_id`.
|
||||
let starting_idx = generics(db.upcast(), def).len_self();
|
||||
let starting_idx = generics(db, def).len_self();
|
||||
let subst = TyBuilder::subst_for_def(db, impl_id, None)
|
||||
.fill_with_bound_vars(DebruijnIndex::INNERMOST, starting_idx)
|
||||
.build();
|
||||
|
|
@ -797,9 +796,9 @@ fn named_associated_type_shorthand_candidates<R>(
|
|||
}
|
||||
// Handle `Self::Type` referring to own associated type in trait definitions
|
||||
if let GenericDefId::TraitId(trait_id) = param_id.parent() {
|
||||
let trait_generics = generics(db.upcast(), trait_id.into());
|
||||
let trait_generics = generics(db, trait_id.into());
|
||||
if trait_generics[param_id.local_id()].is_trait_self() {
|
||||
let def_generics = generics(db.upcast(), def);
|
||||
let def_generics = generics(db, def);
|
||||
let starting_idx = match def {
|
||||
GenericDefId::TraitId(_) => 0,
|
||||
// `def` is an item within trait. We need to substitute `BoundVar`s but
|
||||
|
|
@ -839,13 +838,11 @@ pub(crate) fn field_types_with_diagnostics_query(
|
|||
) -> (Arc<ArenaMap<LocalFieldId, Binders<Ty>>>, Diagnostics) {
|
||||
let var_data = db.variant_fields(variant_id);
|
||||
let (resolver, def): (_, GenericDefId) = match variant_id {
|
||||
VariantId::StructId(it) => (it.resolver(db.upcast()), it.into()),
|
||||
VariantId::UnionId(it) => (it.resolver(db.upcast()), it.into()),
|
||||
VariantId::EnumVariantId(it) => {
|
||||
(it.resolver(db.upcast()), it.lookup(db.upcast()).parent.into())
|
||||
}
|
||||
VariantId::StructId(it) => (it.resolver(db), it.into()),
|
||||
VariantId::UnionId(it) => (it.resolver(db), it.into()),
|
||||
VariantId::EnumVariantId(it) => (it.resolver(db), it.lookup(db).parent.into()),
|
||||
};
|
||||
let generics = generics(db.upcast(), def);
|
||||
let generics = generics(db, def);
|
||||
let mut res = ArenaMap::default();
|
||||
let mut ctx = TyLoweringContext::new(db, &resolver, &var_data.store, def)
|
||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||
|
|
@ -869,8 +866,8 @@ pub(crate) fn generic_predicates_for_param_query(
|
|||
param_id: TypeOrConstParamId,
|
||||
assoc_name: Option<Name>,
|
||||
) -> GenericPredicates {
|
||||
let generics = generics(db.upcast(), def);
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let generics = generics(db, def);
|
||||
let resolver = def.resolver(db);
|
||||
let mut ctx = TyLoweringContext::new(db, &resolver, generics.store(), def)
|
||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||
|
||||
|
|
@ -903,12 +900,12 @@ pub(crate) fn generic_predicates_for_param_query(
|
|||
|
||||
let Some(assoc_name) = &assoc_name else { return true };
|
||||
let Some(TypeNs::TraitId(tr)) =
|
||||
resolver.resolve_path_in_type_ns_fully(db.upcast(), path)
|
||||
resolver.resolve_path_in_type_ns_fully(db, path)
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
|
||||
all_super_traits(db.upcast(), tr).iter().any(|tr| {
|
||||
all_super_traits(db, tr).iter().any(|tr| {
|
||||
db.trait_items(*tr).items.iter().any(|(name, item)| {
|
||||
matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name
|
||||
})
|
||||
|
|
@ -968,8 +965,8 @@ pub(crate) fn trait_environment_for_body_query(
|
|||
db: &dyn HirDatabase,
|
||||
def: DefWithBodyId,
|
||||
) -> Arc<TraitEnvironment> {
|
||||
let Some(def) = def.as_generic_def_id(db.upcast()) else {
|
||||
let krate = def.module(db.upcast()).krate();
|
||||
let Some(def) = def.as_generic_def_id(db) else {
|
||||
let krate = def.module(db).krate();
|
||||
return TraitEnvironment::empty(krate);
|
||||
};
|
||||
db.trait_environment(def)
|
||||
|
|
@ -979,8 +976,8 @@ pub(crate) fn trait_environment_query(
|
|||
db: &dyn HirDatabase,
|
||||
def: GenericDefId,
|
||||
) -> Arc<TraitEnvironment> {
|
||||
let generics = generics(db.upcast(), def);
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let generics = generics(db, def);
|
||||
let resolver = def.resolver(db);
|
||||
let mut ctx = TyLoweringContext::new(db, &resolver, generics.store(), def)
|
||||
.with_type_param_mode(ParamLoweringMode::Placeholder);
|
||||
let mut traits_in_scope = Vec::new();
|
||||
|
|
@ -1001,7 +998,7 @@ pub(crate) fn trait_environment_query(
|
|||
}
|
||||
}
|
||||
|
||||
if let Some(trait_id) = def.assoc_trait_container(db.upcast()) {
|
||||
if let Some(trait_id) = def.assoc_trait_container(db) {
|
||||
// add `Self: Trait<T1, T2, ...>` to the environment in trait
|
||||
// function default implementations (and speculative code
|
||||
// inside consts or type aliases)
|
||||
|
|
@ -1076,8 +1073,8 @@ fn generic_predicates_filtered_by<F>(
|
|||
where
|
||||
F: Fn(&WherePredicate, GenericDefId) -> bool,
|
||||
{
|
||||
let generics = generics(db.upcast(), def);
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let generics = generics(db, def);
|
||||
let resolver = def.resolver(db);
|
||||
let mut ctx = TyLoweringContext::new(db, &resolver, generics.store(), def)
|
||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||
|
||||
|
|
@ -1128,7 +1125,7 @@ fn implicitly_sized_clauses<'a, 'subst: 'a>(
|
|||
.lang_item(resolver.krate(), LangItem::Sized)
|
||||
.and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id))?;
|
||||
|
||||
let trait_self_idx = trait_self_param_idx(db.upcast(), def);
|
||||
let trait_self_idx = trait_self_param_idx(db, def);
|
||||
|
||||
Some(
|
||||
substitution
|
||||
|
|
@ -1172,11 +1169,11 @@ pub(crate) fn generic_defaults_with_diagnostics_query(
|
|||
db: &dyn HirDatabase,
|
||||
def: GenericDefId,
|
||||
) -> (GenericDefaults, Diagnostics) {
|
||||
let generic_params = generics(db.upcast(), def);
|
||||
let generic_params = generics(db, def);
|
||||
if generic_params.len() == 0 {
|
||||
return (GenericDefaults(None), None);
|
||||
}
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let resolver = def.resolver(db);
|
||||
let parent_start_idx = generic_params.len_self();
|
||||
|
||||
let mut ctx = TyLoweringContext::new(db, &resolver, generic_params.store(), def)
|
||||
|
|
@ -1249,7 +1246,7 @@ pub(crate) fn generic_defaults_with_diagnostics_recover(
|
|||
_cycle: &Cycle,
|
||||
def: GenericDefId,
|
||||
) -> (GenericDefaults, Diagnostics) {
|
||||
let generic_params = generics(db.upcast(), def);
|
||||
let generic_params = generics(db, def);
|
||||
if generic_params.len() == 0 {
|
||||
return (GenericDefaults(None), None);
|
||||
}
|
||||
|
|
@ -1268,7 +1265,7 @@ pub(crate) fn generic_defaults_with_diagnostics_recover(
|
|||
|
||||
fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
|
||||
let data = db.function_signature(def);
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let resolver = def.resolver(db);
|
||||
let mut ctx_params = TyLoweringContext::new(db, &resolver, &data.store, def.into())
|
||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||
let params = data.params.iter().map(|&tr| ctx_params.lower_ty(tr));
|
||||
|
|
@ -1282,7 +1279,7 @@ fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
|
|||
}
|
||||
None => TyKind::Tuple(0, Substitution::empty(Interner)).intern(Interner),
|
||||
};
|
||||
let generics = generics(db.upcast(), def.into());
|
||||
let generics = generics(db, def.into());
|
||||
let sig = CallableSig::from_params_and_return(
|
||||
params,
|
||||
ret,
|
||||
|
|
@ -1296,7 +1293,7 @@ fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
|
|||
/// Build the declared type of a function. This should not need to look at the
|
||||
/// function body.
|
||||
fn type_for_fn(db: &dyn HirDatabase, def: FunctionId) -> Binders<Ty> {
|
||||
let generics = generics(db.upcast(), def.into());
|
||||
let generics = generics(db, def.into());
|
||||
let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
|
||||
make_binders(
|
||||
db,
|
||||
|
|
@ -1308,8 +1305,8 @@ fn type_for_fn(db: &dyn HirDatabase, def: FunctionId) -> Binders<Ty> {
|
|||
/// Build the declared type of a const.
|
||||
fn type_for_const(db: &dyn HirDatabase, def: ConstId) -> Binders<Ty> {
|
||||
let data = db.const_signature(def);
|
||||
let generics = generics(db.upcast(), def.into());
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let generics = generics(db, def.into());
|
||||
let resolver = def.resolver(db);
|
||||
let mut ctx = TyLoweringContext::new(db, &resolver, &data.store, def.into())
|
||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||
|
||||
|
|
@ -1319,7 +1316,7 @@ fn type_for_const(db: &dyn HirDatabase, def: ConstId) -> Binders<Ty> {
|
|||
/// Build the declared type of a static.
|
||||
fn type_for_static(db: &dyn HirDatabase, def: StaticId) -> Binders<Ty> {
|
||||
let data = db.static_signature(def);
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let resolver = def.resolver(db);
|
||||
let mut ctx = TyLoweringContext::new(db, &resolver, &data.store, def.into());
|
||||
|
||||
Binders::empty(Interner, ctx.lower_ty(data.type_ref))
|
||||
|
|
@ -1328,7 +1325,7 @@ fn type_for_static(db: &dyn HirDatabase, def: StaticId) -> Binders<Ty> {
|
|||
fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnSig {
|
||||
let struct_data = db.variant_fields(def.into());
|
||||
let fields = struct_data.fields();
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let resolver = def.resolver(db);
|
||||
let mut ctx = TyLoweringContext::new(db, &resolver, &struct_data.store, def.into())
|
||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||
let params = fields.iter().map(|(_, field)| ctx.lower_ty(field.type_ref));
|
||||
|
|
@ -1346,7 +1343,7 @@ fn type_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> Option<Bi
|
|||
FieldsShape::Record => None,
|
||||
FieldsShape::Unit => Some(type_for_adt(db, def.into())),
|
||||
FieldsShape::Tuple => {
|
||||
let generics = generics(db.upcast(), AdtId::from(def).into());
|
||||
let generics = generics(db, AdtId::from(def).into());
|
||||
let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
|
||||
Some(make_binders(
|
||||
db,
|
||||
|
|
@ -1360,8 +1357,8 @@ fn type_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> Option<Bi
|
|||
fn fn_sig_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> PolyFnSig {
|
||||
let var_data = db.variant_fields(def.into());
|
||||
let fields = var_data.fields();
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let parent = def.lookup(db.upcast()).parent;
|
||||
let resolver = def.resolver(db);
|
||||
let parent = def.lookup(db).parent;
|
||||
let mut ctx = TyLoweringContext::new(db, &resolver, &var_data.store, parent.into())
|
||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||
let params = fields.iter().map(|(_, field)| ctx.lower_ty(field.type_ref));
|
||||
|
|
@ -1377,12 +1374,12 @@ fn type_for_enum_variant_constructor(
|
|||
db: &dyn HirDatabase,
|
||||
def: EnumVariantId,
|
||||
) -> Option<Binders<Ty>> {
|
||||
let e = def.lookup(db.upcast()).parent;
|
||||
let e = def.lookup(db).parent;
|
||||
match db.variant_fields(def.into()).shape {
|
||||
FieldsShape::Record => None,
|
||||
FieldsShape::Unit => Some(type_for_adt(db, e.into())),
|
||||
FieldsShape::Tuple => {
|
||||
let generics = generics(db.upcast(), e.into());
|
||||
let generics = generics(db, e.into());
|
||||
let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
|
||||
Some(make_binders(
|
||||
db,
|
||||
|
|
@ -1404,12 +1401,12 @@ pub(crate) fn type_for_adt_recovery(
|
|||
_cycle: &salsa::Cycle,
|
||||
adt: AdtId,
|
||||
) -> Binders<Ty> {
|
||||
let generics = generics(db.upcast(), adt.into());
|
||||
let generics = generics(db, adt.into());
|
||||
make_binders(db, &generics, TyKind::Error.intern(Interner))
|
||||
}
|
||||
|
||||
fn type_for_adt(db: &dyn HirDatabase, adt: AdtId) -> Binders<Ty> {
|
||||
let generics = generics(db.upcast(), adt.into());
|
||||
let generics = generics(db, adt.into());
|
||||
let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
|
||||
let ty = TyKind::Adt(crate::AdtId(adt), subst).intern(Interner);
|
||||
make_binders(db, &generics, ty)
|
||||
|
|
@ -1419,13 +1416,13 @@ pub(crate) fn type_for_type_alias_with_diagnostics_query(
|
|||
db: &dyn HirDatabase,
|
||||
t: TypeAliasId,
|
||||
) -> (Binders<Ty>, Diagnostics) {
|
||||
let generics = generics(db.upcast(), t.into());
|
||||
let generics = generics(db, t.into());
|
||||
let type_alias_data = db.type_alias_signature(t);
|
||||
let mut diags = None;
|
||||
let inner = if type_alias_data.flags.contains(TypeAliasFlags::IS_EXTERN) {
|
||||
TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner)
|
||||
} else {
|
||||
let resolver = t.resolver(db.upcast());
|
||||
let resolver = t.resolver(db);
|
||||
let alias = db.type_alias_signature(t);
|
||||
let mut ctx = TyLoweringContext::new(db, &resolver, &alias.store, t.into())
|
||||
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
|
||||
|
|
@ -1446,7 +1443,7 @@ pub(crate) fn type_for_type_alias_with_diagnostics_query_recover(
|
|||
_cycle: &salsa::Cycle,
|
||||
adt: TypeAliasId,
|
||||
) -> (Binders<Ty>, Diagnostics) {
|
||||
let generics = generics(db.upcast(), adt.into());
|
||||
let generics = generics(db, adt.into());
|
||||
(make_binders(db, &generics, TyKind::Error.intern(Interner)), None)
|
||||
}
|
||||
|
||||
|
|
@ -1475,7 +1472,7 @@ impl ValueTyDefId {
|
|||
Self::FunctionId(id) => id.into(),
|
||||
Self::StructId(id) => id.into(),
|
||||
Self::UnionId(id) => id.into(),
|
||||
Self::EnumVariantId(var) => var.lookup(db.upcast()).parent.into(),
|
||||
Self::EnumVariantId(var) => var.lookup(db).parent.into(),
|
||||
Self::ConstId(id) => id.into(),
|
||||
Self::StaticId(id) => id.into(),
|
||||
}
|
||||
|
|
@ -1514,8 +1511,8 @@ pub(crate) fn impl_self_ty_with_diagnostics_query(
|
|||
impl_id: ImplId,
|
||||
) -> (Binders<Ty>, Diagnostics) {
|
||||
let impl_data = db.impl_signature(impl_id);
|
||||
let resolver = impl_id.resolver(db.upcast());
|
||||
let generics = generics(db.upcast(), impl_id.into());
|
||||
let resolver = impl_id.resolver(db);
|
||||
let generics = generics(db, impl_id.into());
|
||||
let mut ctx = TyLoweringContext::new(db, &resolver, &impl_data.store, impl_id.into())
|
||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||
(
|
||||
|
|
@ -1535,7 +1532,7 @@ pub(crate) fn const_param_ty_with_diagnostics_query(
|
|||
) -> (Ty, Diagnostics) {
|
||||
let (parent_data, store) = db.generic_params_and_store(def.parent());
|
||||
let data = &parent_data[def.local_id()];
|
||||
let resolver = def.parent().resolver(db.upcast());
|
||||
let resolver = def.parent().resolver(db);
|
||||
let mut ctx = TyLoweringContext::new(db, &resolver, &store, def.parent());
|
||||
let ty = match data {
|
||||
TypeOrConstParamData::TypeParamData(_) => {
|
||||
|
|
@ -1552,7 +1549,7 @@ pub(crate) fn impl_self_ty_with_diagnostics_recover(
|
|||
_cycle: &salsa::Cycle,
|
||||
impl_id: ImplId,
|
||||
) -> (Binders<Ty>, Diagnostics) {
|
||||
let generics = generics(db.upcast(), (impl_id).into());
|
||||
let generics = generics(db, (impl_id).into());
|
||||
(make_binders(db, &generics, TyKind::Error.intern(Interner)), None)
|
||||
}
|
||||
|
||||
|
|
@ -1565,7 +1562,7 @@ pub(crate) fn impl_trait_with_diagnostics_query(
|
|||
impl_id: ImplId,
|
||||
) -> Option<(Binders<TraitRef>, Diagnostics)> {
|
||||
let impl_data = db.impl_signature(impl_id);
|
||||
let resolver = impl_id.resolver(db.upcast());
|
||||
let resolver = impl_id.resolver(db);
|
||||
let mut ctx = TyLoweringContext::new(db, &resolver, &impl_data.store, impl_id.into())
|
||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||
let (self_ty, binders) = db.impl_self_ty(impl_id).into_value_and_skipped_binders();
|
||||
|
|
@ -1580,14 +1577,14 @@ pub(crate) fn return_type_impl_traits(
|
|||
) -> Option<Arc<Binders<ImplTraits>>> {
|
||||
// FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe
|
||||
let data = db.function_signature(def);
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let resolver = def.resolver(db);
|
||||
let mut ctx_ret = TyLoweringContext::new(db, &resolver, &data.store, def.into())
|
||||
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
|
||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||
if let Some(ret_type) = data.ret_type {
|
||||
let _ret = ctx_ret.lower_ty(ret_type);
|
||||
}
|
||||
let generics = generics(db.upcast(), def.into());
|
||||
let generics = generics(db, def.into());
|
||||
let return_type_impl_traits =
|
||||
ImplTraits { impl_traits: ctx_ret.impl_trait_mode.opaque_type_data };
|
||||
if return_type_impl_traits.impl_traits.is_empty() {
|
||||
|
|
@ -1602,7 +1599,7 @@ pub(crate) fn type_alias_impl_traits(
|
|||
def: hir_def::TypeAliasId,
|
||||
) -> Option<Arc<Binders<ImplTraits>>> {
|
||||
let data = db.type_alias_signature(def);
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let resolver = def.resolver(db);
|
||||
let mut ctx = TyLoweringContext::new(db, &resolver, &data.store, def.into())
|
||||
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
|
||||
.with_type_param_mode(ParamLoweringMode::Variable);
|
||||
|
|
@ -1613,7 +1610,7 @@ pub(crate) fn type_alias_impl_traits(
|
|||
if type_alias_impl_traits.impl_traits.is_empty() {
|
||||
None
|
||||
} else {
|
||||
let generics = generics(db.upcast(), def.into());
|
||||
let generics = generics(db, def.into());
|
||||
Some(Arc::new(make_binders(db, &generics, type_alias_impl_traits)))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -185,7 +185,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
|
|||
None,
|
||||
);
|
||||
let len_self =
|
||||
generics(self.ctx.db.upcast(), associated_ty.into()).len_self();
|
||||
generics(self.ctx.db, associated_ty.into()).len_self();
|
||||
let substitution = Substitution::from_iter(
|
||||
Interner,
|
||||
substitution
|
||||
|
|
@ -265,7 +265,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
|
|||
}
|
||||
}
|
||||
TypeNs::AdtSelfType(adt) => {
|
||||
let generics = generics(self.ctx.db.upcast(), adt.into());
|
||||
let generics = generics(self.ctx.db, adt.into());
|
||||
let substs = match self.ctx.type_param_mode {
|
||||
ParamLoweringMode::Placeholder => generics.placeholder_subst(self.ctx.db),
|
||||
ParamLoweringMode::Variable => {
|
||||
|
|
@ -327,10 +327,8 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
|
|||
}
|
||||
|
||||
pub(crate) fn resolve_path_in_type_ns(&mut self) -> Option<(TypeNs, Option<usize>)> {
|
||||
let (resolution, remaining_index, _, prefix_info) = self
|
||||
.ctx
|
||||
.resolver
|
||||
.resolve_path_in_type_ns_with_prefix_info(self.ctx.db.upcast(), self.path)?;
|
||||
let (resolution, remaining_index, _, prefix_info) =
|
||||
self.ctx.resolver.resolve_path_in_type_ns_with_prefix_info(self.ctx.db, self.path)?;
|
||||
|
||||
let segments = self.segments;
|
||||
if segments.is_empty() || matches!(self.path, Path::LangItem(..)) {
|
||||
|
|
@ -385,7 +383,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
|
|||
hygiene_id: HygieneId,
|
||||
) -> Option<ResolveValueResult> {
|
||||
let (res, prefix_info) = self.ctx.resolver.resolve_path_in_value_ns_with_prefix_info(
|
||||
self.ctx.db.upcast(),
|
||||
self.ctx.db,
|
||||
self.path,
|
||||
hygiene_id,
|
||||
)?;
|
||||
|
|
@ -510,8 +508,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
|
|||
let substs = self.substs_from_path_segment(associated_ty.into(), false, None);
|
||||
|
||||
let len_self =
|
||||
crate::generics::generics(self.ctx.db.upcast(), associated_ty.into())
|
||||
.len_self();
|
||||
crate::generics::generics(self.ctx.db, associated_ty.into()).len_self();
|
||||
|
||||
let substs = Substitution::from_iter(
|
||||
Interner,
|
||||
|
|
@ -583,7 +580,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
|
|||
self.current_or_prev_segment = penultimate;
|
||||
}
|
||||
}
|
||||
var.lookup(self.ctx.db.upcast()).parent.into()
|
||||
var.lookup(self.ctx.db).parent.into()
|
||||
}
|
||||
};
|
||||
let result = self.substs_from_path_segment(generic_def, infer_args, None);
|
||||
|
|
@ -639,7 +636,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
|
|||
// - Lifetime parameters
|
||||
// - Type or Const parameters
|
||||
// - Parent parameters
|
||||
let def_generics = generics(self.ctx.db.upcast(), def);
|
||||
let def_generics = generics(self.ctx.db, def);
|
||||
let (
|
||||
parent_params,
|
||||
self_param,
|
||||
|
|
@ -742,7 +739,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
|
|||
// ignore them.
|
||||
let is_assoc_ty = || match def {
|
||||
GenericDefId::TypeAliasId(id) => {
|
||||
matches!(id.lookup(self.ctx.db.upcast()).container, ItemContainerId::TraitId(_))
|
||||
matches!(id.lookup(self.ctx.db).container, ItemContainerId::TraitId(_))
|
||||
}
|
||||
_ => false,
|
||||
};
|
||||
|
|
@ -816,7 +813,7 @@ impl<'a, 'b> PathLoweringContext<'a, 'b> {
|
|||
false, // this is not relevant
|
||||
Some(super_trait_ref.self_type_parameter(Interner)),
|
||||
);
|
||||
let generics = generics(self.ctx.db.upcast(), associated_ty.into());
|
||||
let generics = generics(self.ctx.db, associated_ty.into());
|
||||
let self_params = generics.len_self();
|
||||
let substitution = Substitution::from_iter(
|
||||
Interner,
|
||||
|
|
|
|||
|
|
@ -213,7 +213,7 @@ impl TraitImpls {
|
|||
// const _: () = { ... };
|
||||
for konst in module_data.scope.unnamed_consts() {
|
||||
let body = db.body(konst.into());
|
||||
for (_, block_def_map) in body.blocks(db.upcast()) {
|
||||
for (_, block_def_map) in body.blocks(db) {
|
||||
Self::collect_def_map(db, map, &block_def_map);
|
||||
}
|
||||
}
|
||||
|
|
@ -336,7 +336,7 @@ impl InherentImpls {
|
|||
// const _: () = { ... };
|
||||
for konst in module_data.scope.unnamed_consts() {
|
||||
let body = db.body(konst.into());
|
||||
for (_, block_def_map) in body.blocks(db.upcast()) {
|
||||
for (_, block_def_map) in body.blocks(db) {
|
||||
self.collect_def_map(db, &block_def_map);
|
||||
}
|
||||
}
|
||||
|
|
@ -399,7 +399,7 @@ pub fn def_crates(db: &dyn HirDatabase, ty: &Ty, cur_crate: Crate) -> Option<Sma
|
|||
Some(if rustc_has_incoherent_inherent_impls {
|
||||
db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::Adt(def_id))
|
||||
} else {
|
||||
smallvec![def_id.module(db.upcast()).krate()]
|
||||
smallvec![def_id.module(db).krate()]
|
||||
})
|
||||
}
|
||||
&TyKind::Foreign(id) => {
|
||||
|
|
@ -412,7 +412,7 @@ pub fn def_crates(db: &dyn HirDatabase, ty: &Ty, cur_crate: Crate) -> Option<Sma
|
|||
{
|
||||
db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::ForeignType(id))
|
||||
} else {
|
||||
smallvec![alias.module(db.upcast()).krate()]
|
||||
smallvec![alias.module(db).krate()]
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
@ -426,7 +426,7 @@ pub fn def_crates(db: &dyn HirDatabase, ty: &Ty, cur_crate: Crate) -> Option<Sma
|
|||
{
|
||||
db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::Dyn(trait_id))
|
||||
} else {
|
||||
smallvec![trait_id.module(db.upcast()).krate()]
|
||||
smallvec![trait_id.module(db).krate()]
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
@ -620,7 +620,7 @@ pub fn lookup_impl_const(
|
|||
const_id: ConstId,
|
||||
subs: Substitution,
|
||||
) -> (ConstId, Substitution) {
|
||||
let trait_id = match const_id.lookup(db.upcast()).container {
|
||||
let trait_id = match const_id.lookup(db).container {
|
||||
ItemContainerId::TraitId(id) => id,
|
||||
_ => return (const_id, subs),
|
||||
};
|
||||
|
|
@ -648,7 +648,7 @@ pub fn is_dyn_method(
|
|||
func: FunctionId,
|
||||
fn_subst: Substitution,
|
||||
) -> Option<usize> {
|
||||
let ItemContainerId::TraitId(trait_id) = func.lookup(db.upcast()).container else {
|
||||
let ItemContainerId::TraitId(trait_id) = func.lookup(db).container else {
|
||||
return None;
|
||||
};
|
||||
let trait_params = db.generic_params(trait_id.into()).len();
|
||||
|
|
@ -667,7 +667,7 @@ pub fn is_dyn_method(
|
|||
.map(|it| it.skip_binders())
|
||||
.flat_map(|it| match it {
|
||||
WhereClause::Implemented(tr) => {
|
||||
all_super_traits(db.upcast(), from_chalk_trait_id(tr.trait_id))
|
||||
all_super_traits(db, from_chalk_trait_id(tr.trait_id))
|
||||
}
|
||||
_ => smallvec![],
|
||||
})
|
||||
|
|
@ -690,7 +690,7 @@ pub(crate) fn lookup_impl_method_query(
|
|||
func: FunctionId,
|
||||
fn_subst: Substitution,
|
||||
) -> (FunctionId, Substitution) {
|
||||
let ItemContainerId::TraitId(trait_id) = func.lookup(db.upcast()).container else {
|
||||
let ItemContainerId::TraitId(trait_id) = func.lookup(db).container else {
|
||||
return (func, fn_subst);
|
||||
};
|
||||
let trait_params = db.generic_params(trait_id.into()).len();
|
||||
|
|
@ -729,13 +729,11 @@ fn lookup_impl_assoc_item_for_trait_ref(
|
|||
let self_ty_fp = TyFingerprint::for_trait_impl(&self_ty)?;
|
||||
let impls = db.trait_impls_in_deps(env.krate);
|
||||
|
||||
let trait_module = hir_trait_id.module(db.upcast());
|
||||
let trait_module = hir_trait_id.module(db);
|
||||
let type_module = match self_ty_fp {
|
||||
TyFingerprint::Adt(adt_id) => Some(adt_id.module(db.upcast())),
|
||||
TyFingerprint::ForeignType(type_id) => {
|
||||
Some(from_foreign_def_id(type_id).module(db.upcast()))
|
||||
}
|
||||
TyFingerprint::Dyn(trait_id) => Some(trait_id.module(db.upcast())),
|
||||
TyFingerprint::Adt(adt_id) => Some(adt_id.module(db)),
|
||||
TyFingerprint::ForeignType(type_id) => Some(from_foreign_def_id(type_id).module(db)),
|
||||
TyFingerprint::Dyn(trait_id) => Some(trait_id.module(db)),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
|
|
@ -810,9 +808,9 @@ fn is_inherent_impl_coherent(
|
|||
| TyKind::Str
|
||||
| TyKind::Scalar(_) => def_map.is_rustc_coherence_is_core(),
|
||||
|
||||
&TyKind::Adt(AdtId(adt), _) => adt.module(db.upcast()).krate() == def_map.krate(),
|
||||
&TyKind::Adt(AdtId(adt), _) => adt.module(db).krate() == def_map.krate(),
|
||||
TyKind::Dyn(it) => it.principal_id().is_some_and(|trait_id| {
|
||||
from_chalk_trait_id(trait_id).module(db.upcast()).krate() == def_map.krate()
|
||||
from_chalk_trait_id(trait_id).module(db).krate() == def_map.krate()
|
||||
}),
|
||||
|
||||
_ => true,
|
||||
|
|
@ -883,12 +881,12 @@ pub fn check_orphan_rules(db: &dyn HirDatabase, impl_: ImplId) -> bool {
|
|||
return true;
|
||||
};
|
||||
|
||||
let local_crate = impl_.lookup(db.upcast()).container.krate();
|
||||
let local_crate = impl_.lookup(db).container.krate();
|
||||
let is_local = |tgt_crate| tgt_crate == local_crate;
|
||||
|
||||
let trait_ref = impl_trait.substitute(Interner, &substs);
|
||||
let trait_id = from_chalk_trait_id(trait_ref.trait_id);
|
||||
if is_local(trait_id.module(db.upcast()).krate()) {
|
||||
if is_local(trait_id.module(db).krate()) {
|
||||
// trait to be implemented is local
|
||||
return true;
|
||||
}
|
||||
|
|
@ -920,11 +918,11 @@ pub fn check_orphan_rules(db: &dyn HirDatabase, impl_: ImplId) -> bool {
|
|||
// - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`)
|
||||
let is_not_orphan = trait_ref.substitution.type_parameters(Interner).any(|ty| {
|
||||
match unwrap_fundamental(ty).kind(Interner) {
|
||||
&TyKind::Adt(AdtId(id), _) => is_local(id.module(db.upcast()).krate()),
|
||||
&TyKind::Adt(AdtId(id), _) => is_local(id.module(db).krate()),
|
||||
TyKind::Error => true,
|
||||
TyKind::Dyn(it) => it.principal_id().is_some_and(|trait_id| {
|
||||
is_local(from_chalk_trait_id(trait_id).module(db.upcast()).krate())
|
||||
}),
|
||||
TyKind::Dyn(it) => it
|
||||
.principal_id()
|
||||
.is_some_and(|trait_id| is_local(from_chalk_trait_id(trait_id).module(db).krate())),
|
||||
_ => false,
|
||||
}
|
||||
});
|
||||
|
|
@ -1303,7 +1301,7 @@ fn iterate_inherent_methods(
|
|||
let env = table.trait_env.clone();
|
||||
let traits = env
|
||||
.traits_in_scope_from_clauses(self_ty.clone())
|
||||
.flat_map(|t| all_super_traits(db.upcast(), t));
|
||||
.flat_map(|t| all_super_traits(db, t));
|
||||
iterate_inherent_trait_methods(
|
||||
self_ty,
|
||||
table,
|
||||
|
|
@ -1316,7 +1314,7 @@ fn iterate_inherent_methods(
|
|||
}
|
||||
TyKind::Dyn(_) => {
|
||||
if let Some(principal_trait) = self_ty.dyn_trait() {
|
||||
let traits = all_super_traits(db.upcast(), principal_trait);
|
||||
let traits = all_super_traits(db, principal_trait);
|
||||
iterate_inherent_trait_methods(
|
||||
self_ty,
|
||||
table,
|
||||
|
|
@ -1507,7 +1505,7 @@ fn is_valid_impl_method_candidate(
|
|||
check_that!(name.is_none_or(|n| n == item_name));
|
||||
|
||||
if let Some(from_module) = visible_from_module {
|
||||
if !db.const_visibility(c).is_visible_from(db.upcast(), from_module) {
|
||||
if !db.const_visibility(c).is_visible_from(db, from_module) {
|
||||
cov_mark::hit!(const_candidate_not_visible);
|
||||
return IsValidCandidate::NotVisible;
|
||||
}
|
||||
|
|
@ -1596,7 +1594,7 @@ fn is_valid_impl_fn_candidate(
|
|||
let data = db.function_signature(fn_id);
|
||||
|
||||
if let Some(from_module) = visible_from_module {
|
||||
if !db.function_visibility(fn_id).is_visible_from(db.upcast(), from_module) {
|
||||
if !db.function_visibility(fn_id).is_visible_from(db, from_module) {
|
||||
cov_mark::hit!(autoderef_candidate_not_visible);
|
||||
return IsValidCandidate::NotVisible;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -132,7 +132,7 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
|
|||
ty,
|
||||
db,
|
||||
make_fetch_closure_field(db),
|
||||
body.owner.module(db.upcast()).krate(),
|
||||
body.owner.module(db).krate(),
|
||||
);
|
||||
}
|
||||
if is_dereference_of_ref
|
||||
|
|
@ -223,7 +223,7 @@ fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec<PartiallyMoved>
|
|||
ty,
|
||||
db,
|
||||
make_fetch_closure_field(db),
|
||||
body.owner.module(db.upcast()).krate(),
|
||||
body.owner.module(db).krate(),
|
||||
);
|
||||
}
|
||||
if !ty.clone().is_copy(db, body.owner)
|
||||
|
|
@ -369,12 +369,7 @@ fn place_case(db: &dyn HirDatabase, body: &MirBody, lvalue: &Place) -> Projectio
|
|||
}
|
||||
ProjectionElem::OpaqueCast(_) => (),
|
||||
}
|
||||
ty = proj.projected_ty(
|
||||
ty,
|
||||
db,
|
||||
make_fetch_closure_field(db),
|
||||
body.owner.module(db.upcast()).krate(),
|
||||
);
|
||||
ty = proj.projected_ty(ty, db, make_fetch_closure_field(db), body.owner.module(db).krate());
|
||||
}
|
||||
if is_part_of { ProjectionCase::DirectPart } else { ProjectionCase::Direct }
|
||||
}
|
||||
|
|
@ -419,10 +414,7 @@ fn ever_initialized_map(
|
|||
let Some(terminator) = &block.terminator else {
|
||||
never!(
|
||||
"Terminator should be none only in construction.\nThe body:\n{}",
|
||||
body.pretty_print(
|
||||
db,
|
||||
DisplayTarget::from_crate(db, body.owner.krate(db.upcast()))
|
||||
)
|
||||
body.pretty_print(db, DisplayTarget::from_crate(db, body.owner.krate(db)))
|
||||
);
|
||||
return;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -373,7 +373,7 @@ impl MirEvalError {
|
|||
writeln!(
|
||||
f,
|
||||
"In function {} ({:?})",
|
||||
function_name.name.display(db.upcast(), display_target.edition),
|
||||
function_name.name.display(db, display_target.edition),
|
||||
func
|
||||
)?;
|
||||
}
|
||||
|
|
@ -407,7 +407,7 @@ impl MirEvalError {
|
|||
},
|
||||
MirSpan::Unknown => continue,
|
||||
};
|
||||
let file_id = span.file_id.original_file(db.upcast());
|
||||
let file_id = span.file_id.original_file(db);
|
||||
let text_range = span.value.text_range();
|
||||
writeln!(f, "{}", span_formatter(file_id.file_id(), text_range))?;
|
||||
}
|
||||
|
|
@ -423,9 +423,9 @@ impl MirEvalError {
|
|||
}
|
||||
MirEvalError::MirLowerError(func, err) => {
|
||||
let function_name = db.function_signature(*func);
|
||||
let self_ = match func.lookup(db.upcast()).container {
|
||||
let self_ = match func.lookup(db).container {
|
||||
ItemContainerId::ImplId(impl_id) => Some({
|
||||
let generics = crate::generics::generics(db.upcast(), impl_id.into());
|
||||
let generics = crate::generics::generics(db, impl_id.into());
|
||||
let substs = generics.placeholder_subst(db);
|
||||
db.impl_self_ty(impl_id)
|
||||
.substitute(Interner, &substs)
|
||||
|
|
@ -433,10 +433,7 @@ impl MirEvalError {
|
|||
.to_string()
|
||||
}),
|
||||
ItemContainerId::TraitId(it) => Some(
|
||||
db.trait_signature(it)
|
||||
.name
|
||||
.display(db.upcast(), display_target.edition)
|
||||
.to_string(),
|
||||
db.trait_signature(it).name.display(db, display_target.edition).to_string(),
|
||||
),
|
||||
_ => None,
|
||||
};
|
||||
|
|
@ -445,7 +442,7 @@ impl MirEvalError {
|
|||
"MIR lowering for function `{}{}{}` ({:?}) failed due:",
|
||||
self_.as_deref().unwrap_or_default(),
|
||||
if self_.is_some() { "::" } else { "" },
|
||||
function_name.name.display(db.upcast(), display_target.edition),
|
||||
function_name.name.display(db, display_target.edition),
|
||||
func
|
||||
)?;
|
||||
err.pretty_print(f, db, span_formatter, display_target)?;
|
||||
|
|
@ -628,7 +625,7 @@ impl Evaluator<'_> {
|
|||
assert_placeholder_ty_is_unused: bool,
|
||||
trait_env: Option<Arc<TraitEnvironment>>,
|
||||
) -> Result<Evaluator<'_>> {
|
||||
let crate_id = owner.module(db.upcast()).krate();
|
||||
let crate_id = owner.module(db).krate();
|
||||
let target_data_layout = match db.target_data_layout(crate_id) {
|
||||
Ok(target_data_layout) => target_data_layout,
|
||||
Err(e) => return Err(MirEvalError::TargetDataLayoutNotAvailable(e)),
|
||||
|
|
@ -821,7 +818,7 @@ impl Evaluator<'_> {
|
|||
Variants::Multiple { variants, .. } => {
|
||||
&variants[match f.parent {
|
||||
hir_def::VariantId::EnumVariantId(it) => {
|
||||
RustcEnumVariantIdx(it.lookup(self.db.upcast()).index as usize)
|
||||
RustcEnumVariantIdx(it.lookup(self.db).index as usize)
|
||||
}
|
||||
_ => {
|
||||
return Err(MirEvalError::InternalError(
|
||||
|
|
@ -1790,11 +1787,11 @@ impl Evaluator<'_> {
|
|||
subst: Substitution,
|
||||
locals: &Locals,
|
||||
) -> Result<(usize, Arc<Layout>, Option<(usize, usize, i128)>)> {
|
||||
let adt = it.adt_id(self.db.upcast());
|
||||
let adt = it.adt_id(self.db);
|
||||
if let DefWithBodyId::VariantId(f) = locals.body.owner {
|
||||
if let VariantId::EnumVariantId(it) = it {
|
||||
if let AdtId::EnumId(e) = adt {
|
||||
if f.lookup(self.db.upcast()).parent == e {
|
||||
if f.lookup(self.db).parent == e {
|
||||
// Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and
|
||||
// infinite sized type errors) we use a dummy layout
|
||||
let i = self.const_eval_discriminant(it)?;
|
||||
|
|
@ -1812,7 +1809,7 @@ impl Evaluator<'_> {
|
|||
_ => not_supported!("multi variant layout for non-enums"),
|
||||
};
|
||||
let mut discriminant = self.const_eval_discriminant(enum_variant_id)?;
|
||||
let lookup = enum_variant_id.lookup(self.db.upcast());
|
||||
let lookup = enum_variant_id.lookup(self.db);
|
||||
let rustc_enum_variant_idx = RustcEnumVariantIdx(lookup.index as usize);
|
||||
let variant_layout = variants[rustc_enum_variant_idx].clone();
|
||||
let have_tag = match tag_encoding {
|
||||
|
|
@ -1919,7 +1916,7 @@ impl Evaluator<'_> {
|
|||
.db
|
||||
.const_eval(const_id, subst, Some(self.trait_env.clone()))
|
||||
.map_err(|e| {
|
||||
let name = const_id.name(self.db.upcast());
|
||||
let name = const_id.name(self.db);
|
||||
MirEvalError::ConstEvalError(name, Box::new(e))
|
||||
})?;
|
||||
if let chalk_ir::ConstValue::Concrete(c) = &result_owner.data(Interner).value {
|
||||
|
|
@ -2070,7 +2067,7 @@ impl Evaluator<'_> {
|
|||
}
|
||||
if let DefWithBodyId::VariantId(f) = locals.body.owner {
|
||||
if let Some((AdtId::EnumId(e), _)) = ty.as_adt() {
|
||||
if f.lookup(self.db.upcast()).parent == e {
|
||||
if f.lookup(self.db).parent == e {
|
||||
// Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and
|
||||
// infinite sized type errors) we use a dummy size
|
||||
return Ok(Some((16, 16)));
|
||||
|
|
@ -2781,14 +2778,14 @@ impl Evaluator<'_> {
|
|||
match r {
|
||||
Ok(r) => Ok(r),
|
||||
Err(e) => {
|
||||
let db = self.db.upcast();
|
||||
let db = self.db;
|
||||
let loc = variant.lookup(db);
|
||||
let enum_loc = loc.parent.lookup(db);
|
||||
let edition = self.crate_id.data(self.db).edition;
|
||||
let name = format!(
|
||||
"{}::{}",
|
||||
enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast(), edition),
|
||||
loc.id.item_tree(db)[loc.id.value].name.display(db.upcast(), edition),
|
||||
enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db, edition),
|
||||
loc.id.item_tree(db)[loc.id.value].name.display(db, edition),
|
||||
);
|
||||
Err(MirEvalError::ConstEvalError(name, Box::new(e)))
|
||||
}
|
||||
|
|
@ -2921,9 +2918,9 @@ pub fn render_const_using_debug_impl(
|
|||
drop_flags: DropFlags::default(),
|
||||
};
|
||||
let data = evaluator.allocate_const_in_heap(locals, c)?;
|
||||
let resolver = owner.resolver(db.upcast());
|
||||
let resolver = owner.resolver(db);
|
||||
let Some(TypeNs::TraitId(debug_trait)) = resolver.resolve_path_in_type_ns_fully(
|
||||
db.upcast(),
|
||||
db,
|
||||
&hir_def::expr_store::path::Path::from_known_path_with_no_generic(path![core::fmt::Debug]),
|
||||
) else {
|
||||
not_supported!("core::fmt::Debug not found");
|
||||
|
|
@ -2954,7 +2951,7 @@ pub fn render_const_using_debug_impl(
|
|||
evaluator.write_memory(a3.offset(2 * evaluator.ptr_size()), &a2.to_bytes())?;
|
||||
evaluator.write_memory(a3.offset(3 * evaluator.ptr_size()), &[1])?;
|
||||
let Some(ValueNs::FunctionId(format_fn)) = resolver.resolve_path_in_value_ns_fully(
|
||||
db.upcast(),
|
||||
db,
|
||||
&hir_def::expr_store::path::Path::from_known_path_with_no_generic(path![std::fmt::format]),
|
||||
HygieneId::ROOT,
|
||||
) else {
|
||||
|
|
|
|||
|
|
@ -63,10 +63,10 @@ impl Evaluator<'_> {
|
|||
// Keep this around for a bit until extern "rustc-intrinsic" abis are no longer used
|
||||
|| (match &function_data.abi {
|
||||
Some(abi) => *abi == sym::rust_dash_intrinsic,
|
||||
None => match def.lookup(self.db.upcast()).container {
|
||||
None => match def.lookup(self.db).container {
|
||||
hir_def::ItemContainerId::ExternBlockId(block) => {
|
||||
let id = block.lookup(self.db.upcast()).id;
|
||||
id.item_tree(self.db.upcast())[id.value].abi.as_ref()
|
||||
let id = block.lookup(self.db).id;
|
||||
id.item_tree(self.db)[id.value].abi.as_ref()
|
||||
== Some(&sym::rust_dash_intrinsic)
|
||||
}
|
||||
_ => false,
|
||||
|
|
@ -85,10 +85,10 @@ impl Evaluator<'_> {
|
|||
|| attrs.by_key(&sym::rustc_intrinsic_must_be_overridden).exists(),
|
||||
);
|
||||
}
|
||||
let is_extern_c = match def.lookup(self.db.upcast()).container {
|
||||
let is_extern_c = match def.lookup(self.db).container {
|
||||
hir_def::ItemContainerId::ExternBlockId(block) => {
|
||||
let id = block.lookup(self.db.upcast()).id;
|
||||
id.item_tree(self.db.upcast())[id.value].abi.as_ref() == Some(&sym::C)
|
||||
let id = block.lookup(self.db).id;
|
||||
id.item_tree(self.db)[id.value].abi.as_ref() == Some(&sym::C)
|
||||
}
|
||||
_ => false,
|
||||
};
|
||||
|
|
@ -124,7 +124,7 @@ impl Evaluator<'_> {
|
|||
destination.write_from_bytes(self, &result)?;
|
||||
return Ok(true);
|
||||
}
|
||||
if let ItemContainerId::TraitId(t) = def.lookup(self.db.upcast()).container {
|
||||
if let ItemContainerId::TraitId(t) = def.lookup(self.db).container {
|
||||
if self.db.lang_attr(t.into()) == Some(LangItem::Clone) {
|
||||
let [self_ty] = generic_args.as_slice(Interner) else {
|
||||
not_supported!("wrong generic arg count for clone");
|
||||
|
|
@ -154,8 +154,7 @@ impl Evaluator<'_> {
|
|||
) -> Result<Option<FunctionId>> {
|
||||
// `PanicFmt` is redirected to `ConstPanicFmt`
|
||||
if let Some(LangItem::PanicFmt) = self.db.lang_attr(def.into()) {
|
||||
let resolver =
|
||||
self.db.crate_def_map(self.crate_id).crate_root().resolver(self.db.upcast());
|
||||
let resolver = self.db.crate_def_map(self.crate_id).crate_root().resolver(self.db);
|
||||
|
||||
let Some(hir_def::lang_item::LangItemTarget::Function(const_panic_fmt)) =
|
||||
self.db.lang_item(resolver.krate(), LangItem::ConstPanicFmt)
|
||||
|
|
@ -828,14 +827,14 @@ impl Evaluator<'_> {
|
|||
};
|
||||
let ty_name = match ty.display_source_code(
|
||||
self.db,
|
||||
locals.body.owner.module(self.db.upcast()),
|
||||
locals.body.owner.module(self.db),
|
||||
true,
|
||||
) {
|
||||
Ok(ty_name) => ty_name,
|
||||
// Fallback to human readable display in case of `Err`. Ideally we want to use `display_source_code` to
|
||||
// render full paths.
|
||||
Err(_) => {
|
||||
let krate = locals.body.owner.krate(self.db.upcast());
|
||||
let krate = locals.body.owner.krate(self.db);
|
||||
ty.display(self.db, DisplayTarget::from_crate(self.db, krate)).to_string()
|
||||
}
|
||||
};
|
||||
|
|
|
|||
|
|
@ -177,7 +177,7 @@ impl MirLowerError {
|
|||
writeln!(
|
||||
f,
|
||||
"Missing function definition for {}",
|
||||
body.pretty_print_expr(db.upcast(), *owner, *it, display_target.edition)
|
||||
body.pretty_print_expr(db, *owner, *it, display_target.edition)
|
||||
)?;
|
||||
}
|
||||
MirLowerError::HasErrors => writeln!(f, "Type inference result contains errors")?,
|
||||
|
|
@ -193,10 +193,7 @@ impl MirLowerError {
|
|||
writeln!(
|
||||
f,
|
||||
"Generic arg not provided for {}",
|
||||
param
|
||||
.name()
|
||||
.unwrap_or(&Name::missing())
|
||||
.display(db.upcast(), display_target.edition)
|
||||
param.name().unwrap_or(&Name::missing()).display(db, display_target.edition)
|
||||
)?;
|
||||
writeln!(f, "Provided args: [")?;
|
||||
for g in subst.iter(Interner) {
|
||||
|
|
@ -288,7 +285,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
owner,
|
||||
closures: vec![],
|
||||
};
|
||||
let resolver = owner.resolver(db.upcast());
|
||||
let resolver = owner.resolver(db);
|
||||
|
||||
MirLowerCtx {
|
||||
result: mir,
|
||||
|
|
@ -413,7 +410,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
}
|
||||
Expr::Missing => {
|
||||
if let DefWithBodyId::FunctionId(f) = self.owner {
|
||||
let assoc = f.lookup(self.db.upcast());
|
||||
let assoc = f.lookup(self.db);
|
||||
if let ItemContainerId::TraitId(t) = assoc.container {
|
||||
let name = &self.db.function_signature(f).name;
|
||||
return Err(MirLowerError::TraitFunctionDefinition(t, name.clone()));
|
||||
|
|
@ -422,54 +419,53 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
Err(MirLowerError::IncompleteExpr)
|
||||
}
|
||||
Expr::Path(p) => {
|
||||
let pr = if let Some((assoc, subst)) =
|
||||
self.infer.assoc_resolutions_for_expr(expr_id)
|
||||
{
|
||||
match assoc {
|
||||
hir_def::AssocItemId::ConstId(c) => {
|
||||
self.lower_const(
|
||||
c.into(),
|
||||
current,
|
||||
place,
|
||||
subst,
|
||||
expr_id.into(),
|
||||
self.expr_ty_without_adjust(expr_id),
|
||||
)?;
|
||||
return Ok(Some(current));
|
||||
let pr =
|
||||
if let Some((assoc, subst)) = self.infer.assoc_resolutions_for_expr(expr_id) {
|
||||
match assoc {
|
||||
hir_def::AssocItemId::ConstId(c) => {
|
||||
self.lower_const(
|
||||
c.into(),
|
||||
current,
|
||||
place,
|
||||
subst,
|
||||
expr_id.into(),
|
||||
self.expr_ty_without_adjust(expr_id),
|
||||
)?;
|
||||
return Ok(Some(current));
|
||||
}
|
||||
hir_def::AssocItemId::FunctionId(_) => {
|
||||
// FnDefs are zero sized, no action is needed.
|
||||
return Ok(Some(current));
|
||||
}
|
||||
hir_def::AssocItemId::TypeAliasId(_) => {
|
||||
// FIXME: If it is unreachable, use proper error instead of `not_supported`.
|
||||
not_supported!("associated functions and types")
|
||||
}
|
||||
}
|
||||
hir_def::AssocItemId::FunctionId(_) => {
|
||||
// FnDefs are zero sized, no action is needed.
|
||||
return Ok(Some(current));
|
||||
} else if let Some(variant) = self.infer.variant_resolution_for_expr(expr_id) {
|
||||
match variant {
|
||||
VariantId::EnumVariantId(e) => ValueNs::EnumVariantId(e),
|
||||
VariantId::StructId(s) => ValueNs::StructId(s),
|
||||
VariantId::UnionId(_) => implementation_error!("Union variant as path"),
|
||||
}
|
||||
hir_def::AssocItemId::TypeAliasId(_) => {
|
||||
// FIXME: If it is unreachable, use proper error instead of `not_supported`.
|
||||
not_supported!("associated functions and types")
|
||||
}
|
||||
}
|
||||
} else if let Some(variant) = self.infer.variant_resolution_for_expr(expr_id) {
|
||||
match variant {
|
||||
VariantId::EnumVariantId(e) => ValueNs::EnumVariantId(e),
|
||||
VariantId::StructId(s) => ValueNs::StructId(s),
|
||||
VariantId::UnionId(_) => implementation_error!("Union variant as path"),
|
||||
}
|
||||
} else {
|
||||
let resolver_guard =
|
||||
self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr_id);
|
||||
let hygiene = self.body.expr_path_hygiene(expr_id);
|
||||
let result = self
|
||||
.resolver
|
||||
.resolve_path_in_value_ns_fully(self.db.upcast(), p, hygiene)
|
||||
.ok_or_else(|| {
|
||||
MirLowerError::unresolved_path(
|
||||
self.db,
|
||||
p,
|
||||
DisplayTarget::from_crate(self.db, self.krate()),
|
||||
self.body,
|
||||
)
|
||||
})?;
|
||||
self.resolver.reset_to_guard(resolver_guard);
|
||||
result
|
||||
};
|
||||
} else {
|
||||
let resolver_guard =
|
||||
self.resolver.update_to_inner_scope(self.db, self.owner, expr_id);
|
||||
let hygiene = self.body.expr_path_hygiene(expr_id);
|
||||
let result = self
|
||||
.resolver
|
||||
.resolve_path_in_value_ns_fully(self.db, p, hygiene)
|
||||
.ok_or_else(|| {
|
||||
MirLowerError::unresolved_path(
|
||||
self.db,
|
||||
p,
|
||||
DisplayTarget::from_crate(self.db, self.krate()),
|
||||
self.body,
|
||||
)
|
||||
})?;
|
||||
self.resolver.reset_to_guard(resolver_guard);
|
||||
result
|
||||
};
|
||||
match pr {
|
||||
ValueNs::LocalBinding(_) | ValueNs::StaticId(_) => {
|
||||
let Some((temp, current)) =
|
||||
|
|
@ -513,10 +509,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
Ok(Some(current))
|
||||
}
|
||||
ValueNs::GenericParam(p) => {
|
||||
let Some(def) = self.owner.as_generic_def_id(self.db.upcast()) else {
|
||||
let Some(def) = self.owner.as_generic_def_id(self.db) else {
|
||||
not_supported!("owner without generic def id");
|
||||
};
|
||||
let generics = generics(self.db.upcast(), def);
|
||||
let generics = generics(self.db, def);
|
||||
let ty = self.expr_ty_without_adjust(expr_id);
|
||||
self.push_assignment(
|
||||
current,
|
||||
|
|
@ -577,7 +573,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
};
|
||||
self.push_fake_read(current, cond_place, expr_id.into());
|
||||
let resolver_guard =
|
||||
self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr_id);
|
||||
self.resolver.update_to_inner_scope(self.db, self.owner, expr_id);
|
||||
let (then_target, else_target) =
|
||||
self.pattern_match(current, None, cond_place, *pat)?;
|
||||
self.resolver.reset_to_guard(resolver_guard);
|
||||
|
|
@ -693,7 +689,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
let (func_id, generic_args) =
|
||||
self.infer.method_resolution(expr_id).ok_or_else(|| {
|
||||
MirLowerError::UnresolvedMethod(
|
||||
method_name.display(self.db.upcast(), self.edition()).to_string(),
|
||||
method_name.display(self.db, self.edition()).to_string(),
|
||||
)
|
||||
})?;
|
||||
let func = Operand::from_fn(self.db, func_id, generic_args);
|
||||
|
|
@ -715,7 +711,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
self.push_fake_read(current, cond_place, expr_id.into());
|
||||
let mut end = None;
|
||||
let resolver_guard =
|
||||
self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr_id);
|
||||
self.resolver.update_to_inner_scope(self.db, self.owner, expr_id);
|
||||
for MatchArm { pat, guard, expr } in arms.iter() {
|
||||
let (then, mut otherwise) =
|
||||
self.pattern_match(current, None, cond_place, *pat)?;
|
||||
|
|
@ -1129,7 +1125,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
};
|
||||
self.push_fake_read(current, value, expr_id.into());
|
||||
let resolver_guard =
|
||||
self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr_id);
|
||||
self.resolver.update_to_inner_scope(self.db, self.owner, expr_id);
|
||||
current = self.pattern_match_assignment(current, value, target)?;
|
||||
self.resolver.reset_to_guard(resolver_guard);
|
||||
Ok(Some(current))
|
||||
|
|
@ -1328,7 +1324,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
}
|
||||
|
||||
fn placeholder_subst(&mut self) -> Substitution {
|
||||
match self.owner.as_generic_def_id(self.db.upcast()) {
|
||||
match self.owner.as_generic_def_id(self.db) {
|
||||
Some(it) => TyBuilder::placeholder_subst(self.db, it),
|
||||
None => Substitution::empty(Interner),
|
||||
}
|
||||
|
|
@ -1369,13 +1365,13 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
MirLowerError::unresolved_path(
|
||||
self.db,
|
||||
c,
|
||||
DisplayTarget::from_crate(db, owner.krate(db.upcast())),
|
||||
DisplayTarget::from_crate(db, owner.krate(db)),
|
||||
self.body,
|
||||
)
|
||||
};
|
||||
let pr = self
|
||||
.resolver
|
||||
.resolve_path_in_value_ns(self.db.upcast(), c, HygieneId::ROOT)
|
||||
.resolve_path_in_value_ns(self.db, c, HygieneId::ROOT)
|
||||
.ok_or_else(unresolved_name)?;
|
||||
match pr {
|
||||
ResolveValueResult::ValueNs(v, _) => {
|
||||
|
|
@ -1475,7 +1471,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
// We can't evaluate constant with substitution now, as generics are not monomorphized in lowering.
|
||||
intern_const_scalar(ConstScalar::UnevaluatedConst(const_id, subst), ty)
|
||||
} else {
|
||||
let name = const_id.name(self.db.upcast());
|
||||
let name = const_id.name(self.db);
|
||||
self.db
|
||||
.const_eval(const_id, subst, None)
|
||||
.map_err(|e| MirLowerError::ConstEvalError(name.into(), Box::new(e)))?
|
||||
|
|
@ -1708,7 +1704,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
}
|
||||
|
||||
fn is_uninhabited(&self, expr_id: ExprId) -> bool {
|
||||
is_ty_uninhabited_from(self.db, &self.infer[expr_id], self.owner.module(self.db.upcast()))
|
||||
is_ty_uninhabited_from(self.db, &self.infer[expr_id], self.owner.module(self.db))
|
||||
}
|
||||
|
||||
/// This function push `StorageLive` statement for the binding, and applies changes to add `StorageDead` and
|
||||
|
|
@ -1730,7 +1726,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
}
|
||||
|
||||
fn resolve_lang_item(&self, item: LangItem) -> Result<LangItemTarget> {
|
||||
let crate_id = self.owner.module(self.db.upcast()).krate();
|
||||
let crate_id = self.owner.module(self.db).krate();
|
||||
self.db.lang_item(crate_id, item).ok_or(MirLowerError::LangItemNotFound(item))
|
||||
}
|
||||
|
||||
|
|
@ -1758,11 +1754,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
self.push_fake_read(current, init_place, span);
|
||||
// Using the initializer for the resolver scope is good enough for us, as it cannot create new declarations
|
||||
// and has all declarations of the `let`.
|
||||
let resolver_guard = self.resolver.update_to_inner_scope(
|
||||
self.db.upcast(),
|
||||
self.owner,
|
||||
*expr_id,
|
||||
);
|
||||
let resolver_guard =
|
||||
self.resolver.update_to_inner_scope(self.db, self.owner, *expr_id);
|
||||
(current, else_block) =
|
||||
self.pattern_match(current, None, init_place, *pat)?;
|
||||
self.resolver.reset_to_guard(resolver_guard);
|
||||
|
|
@ -1906,13 +1899,13 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
Ok(r) => Ok(r),
|
||||
Err(e) => {
|
||||
let edition = self.edition();
|
||||
let db = self.db.upcast();
|
||||
let db = self.db;
|
||||
let loc = variant.lookup(db);
|
||||
let enum_loc = loc.parent.lookup(db);
|
||||
let name = format!(
|
||||
"{}::{}",
|
||||
enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast(), edition),
|
||||
loc.id.item_tree(db)[loc.id.value].name.display(db.upcast(), edition),
|
||||
enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db, edition),
|
||||
loc.id.item_tree(db)[loc.id.value].name.display(db, edition),
|
||||
);
|
||||
Err(MirLowerError::ConstEvalError(name.into(), Box::new(e)))
|
||||
}
|
||||
|
|
@ -1924,7 +1917,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
|
|||
}
|
||||
|
||||
fn krate(&self) -> Crate {
|
||||
self.owner.krate(self.db.upcast())
|
||||
self.owner.krate(self.db)
|
||||
}
|
||||
|
||||
fn display_target(&self) -> DisplayTarget {
|
||||
|
|
@ -2046,7 +2039,7 @@ pub fn mir_body_for_closure_query(
|
|||
let Some(sig) = ClosureSubst(substs).sig_ty().callable_sig(db) else {
|
||||
implementation_error!("closure has not callable sig");
|
||||
};
|
||||
let resolver_guard = ctx.resolver.update_to_inner_scope(db.upcast(), owner, expr);
|
||||
let resolver_guard = ctx.resolver.update_to_inner_scope(db, owner, expr);
|
||||
let current = ctx.lower_params_and_bindings(
|
||||
args.iter().zip(sig.params().iter()).map(|(it, y)| (*it, y.clone())),
|
||||
None,
|
||||
|
|
@ -2120,27 +2113,27 @@ pub fn mir_body_for_closure_query(
|
|||
}
|
||||
|
||||
pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<MirBody>> {
|
||||
let krate = def.krate(db.upcast());
|
||||
let krate = def.krate(db);
|
||||
let edition = krate.data(db).edition;
|
||||
let detail = match def {
|
||||
DefWithBodyId::FunctionId(it) => {
|
||||
db.function_signature(it).name.display(db.upcast(), edition).to_string()
|
||||
db.function_signature(it).name.display(db, edition).to_string()
|
||||
}
|
||||
DefWithBodyId::StaticId(it) => {
|
||||
db.static_signature(it).name.display(db.upcast(), edition).to_string()
|
||||
db.static_signature(it).name.display(db, edition).to_string()
|
||||
}
|
||||
DefWithBodyId::ConstId(it) => db
|
||||
.const_signature(it)
|
||||
.name
|
||||
.clone()
|
||||
.unwrap_or_else(Name::missing)
|
||||
.display(db.upcast(), edition)
|
||||
.display(db, edition)
|
||||
.to_string(),
|
||||
DefWithBodyId::VariantId(it) => {
|
||||
let loc = it.lookup(db.upcast());
|
||||
let loc = it.lookup(db);
|
||||
db.enum_variants(loc.parent).variants[loc.index as usize]
|
||||
.1
|
||||
.display(db.upcast(), edition)
|
||||
.display(db, edition)
|
||||
.to_string()
|
||||
}
|
||||
};
|
||||
|
|
|
|||
|
|
@ -136,10 +136,9 @@ impl MirLowerCtx<'_> {
|
|||
match &self.body.exprs[expr_id] {
|
||||
Expr::Path(p) => {
|
||||
let resolver_guard =
|
||||
self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr_id);
|
||||
self.resolver.update_to_inner_scope(self.db, self.owner, expr_id);
|
||||
let hygiene = self.body.expr_path_hygiene(expr_id);
|
||||
let resolved =
|
||||
self.resolver.resolve_path_in_value_ns_fully(self.db.upcast(), p, hygiene);
|
||||
let resolved = self.resolver.resolve_path_in_value_ns_fully(self.db, p, hygiene);
|
||||
self.resolver.reset_to_guard(resolver_guard);
|
||||
let Some(pr) = resolved else {
|
||||
return try_rvalue(self);
|
||||
|
|
|
|||
|
|
@ -355,7 +355,7 @@ impl MirLowerCtx<'_> {
|
|||
let hygiene = self.body.pat_path_hygiene(pattern);
|
||||
let pr = self
|
||||
.resolver
|
||||
.resolve_path_in_value_ns(self.db.upcast(), p, hygiene)
|
||||
.resolve_path_in_value_ns(self.db, p, hygiene)
|
||||
.ok_or_else(unresolved_name)?;
|
||||
|
||||
if let (
|
||||
|
|
|
|||
|
|
@ -77,7 +77,7 @@ impl FallibleTypeFolder<Interner> for Filler<'_> {
|
|||
owner: self.owner,
|
||||
trait_env: self.trait_env.clone(),
|
||||
subst: &subst,
|
||||
generics: Some(generics(self.db.upcast(), func.into())),
|
||||
generics: Some(generics(self.db, func.into())),
|
||||
};
|
||||
filler.try_fold_ty(infer.type_of_rpit[idx].clone(), outer_binder)
|
||||
}
|
||||
|
|
@ -305,7 +305,7 @@ pub fn monomorphized_mir_body_query(
|
|||
subst: Substitution,
|
||||
trait_env: Arc<crate::TraitEnvironment>,
|
||||
) -> Result<Arc<MirBody>, MirLowerError> {
|
||||
let generics = owner.as_generic_def_id(db.upcast()).map(|g_def| generics(db.upcast(), g_def));
|
||||
let generics = owner.as_generic_def_id(db).map(|g_def| generics(db, g_def));
|
||||
let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner };
|
||||
let body = db.mir_body(owner)?;
|
||||
let mut body = (*body).clone();
|
||||
|
|
@ -331,7 +331,7 @@ pub fn monomorphized_mir_body_for_closure_query(
|
|||
trait_env: Arc<crate::TraitEnvironment>,
|
||||
) -> Result<Arc<MirBody>, MirLowerError> {
|
||||
let InternedClosure(owner, _) = db.lookup_intern_closure(closure);
|
||||
let generics = owner.as_generic_def_id(db.upcast()).map(|g_def| generics(db.upcast(), g_def));
|
||||
let generics = owner.as_generic_def_id(db).map(|g_def| generics(db, g_def));
|
||||
let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner };
|
||||
let body = db.mir_body_for_closure(closure)?;
|
||||
let mut body = (*body).clone();
|
||||
|
|
@ -347,7 +347,7 @@ pub fn monomorphize_mir_body_bad(
|
|||
trait_env: Arc<crate::TraitEnvironment>,
|
||||
) -> Result<MirBody, MirLowerError> {
|
||||
let owner = body.owner;
|
||||
let generics = owner.as_generic_def_id(db.upcast()).map(|g_def| generics(db.upcast(), g_def));
|
||||
let generics = owner.as_generic_def_id(db).map(|g_def| generics(db, g_def));
|
||||
let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner };
|
||||
filler.fill_body(&mut body)?;
|
||||
Ok(body)
|
||||
|
|
|
|||
|
|
@ -44,15 +44,11 @@ impl MirBody {
|
|||
ctx.for_body(|this| match ctx.body.owner {
|
||||
hir_def::DefWithBodyId::FunctionId(id) => {
|
||||
let data = db.function_signature(id);
|
||||
w!(this, "fn {}() ", data.name.display(db.upcast(), this.display_target.edition));
|
||||
w!(this, "fn {}() ", data.name.display(db, this.display_target.edition));
|
||||
}
|
||||
hir_def::DefWithBodyId::StaticId(id) => {
|
||||
let data = db.static_signature(id);
|
||||
w!(
|
||||
this,
|
||||
"static {}: _ = ",
|
||||
data.name.display(db.upcast(), this.display_target.edition)
|
||||
);
|
||||
w!(this, "static {}: _ = ", data.name.display(db, this.display_target.edition));
|
||||
}
|
||||
hir_def::DefWithBodyId::ConstId(id) => {
|
||||
let data = db.const_signature(id);
|
||||
|
|
@ -62,21 +58,21 @@ impl MirBody {
|
|||
data.name
|
||||
.as_ref()
|
||||
.unwrap_or(&Name::missing())
|
||||
.display(db.upcast(), this.display_target.edition)
|
||||
.display(db, this.display_target.edition)
|
||||
);
|
||||
}
|
||||
hir_def::DefWithBodyId::VariantId(id) => {
|
||||
let loc = id.lookup(db.upcast());
|
||||
let enum_loc = loc.parent.lookup(db.upcast());
|
||||
let loc = id.lookup(db);
|
||||
let enum_loc = loc.parent.lookup(db);
|
||||
w!(
|
||||
this,
|
||||
"enum {}::{} = ",
|
||||
enum_loc.id.item_tree(db.upcast())[enum_loc.id.value]
|
||||
enum_loc.id.item_tree(db)[enum_loc.id.value]
|
||||
.name
|
||||
.display(db.upcast(), this.display_target.edition),
|
||||
loc.id.item_tree(db.upcast())[loc.id.value]
|
||||
.display(db, this.display_target.edition),
|
||||
loc.id.item_tree(db)[loc.id.value]
|
||||
.name
|
||||
.display(db.upcast(), this.display_target.edition),
|
||||
.display(db, this.display_target.edition),
|
||||
)
|
||||
}
|
||||
});
|
||||
|
|
@ -131,7 +127,7 @@ impl HirDisplay for LocalName {
|
|||
match self {
|
||||
LocalName::Unknown(l) => write!(f, "_{}", u32::from(l.into_raw())),
|
||||
LocalName::Binding(n, l) => {
|
||||
write!(f, "{}_{}", n.display(f.db.upcast(), f.edition()), u32::from(l.into_raw()))
|
||||
write!(f, "{}_{}", n.display(f.db, f.edition()), u32::from(l.into_raw()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -336,23 +332,19 @@ impl<'a> MirPrettyCtx<'a> {
|
|||
hir_def::VariantId::EnumVariantId(e) => {
|
||||
w!(this, "(");
|
||||
f(this, local, head);
|
||||
let loc = e.lookup(this.db.upcast());
|
||||
let loc = e.lookup(this.db);
|
||||
w!(
|
||||
this,
|
||||
" as {}).{}",
|
||||
this.db.enum_variants(loc.parent).variants[loc.index as usize]
|
||||
.1
|
||||
.display(this.db.upcast(), this.display_target.edition),
|
||||
name.display(this.db.upcast(), this.display_target.edition)
|
||||
.display(this.db, this.display_target.edition),
|
||||
name.display(this.db, this.display_target.edition)
|
||||
);
|
||||
}
|
||||
hir_def::VariantId::StructId(_) | hir_def::VariantId::UnionId(_) => {
|
||||
f(this, local, head);
|
||||
w!(
|
||||
this,
|
||||
".{}",
|
||||
name.display(this.db.upcast(), this.display_target.edition)
|
||||
);
|
||||
w!(this, ".{}", name.display(this.db, this.display_target.edition));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,11 +4,10 @@ use std::{fmt, panic, sync::Mutex};
|
|||
|
||||
use base_db::{
|
||||
CrateGraphBuilder, CratesMap, FileSourceRootInput, FileText, RootQueryDb, SourceDatabase,
|
||||
SourceRoot, SourceRootId, SourceRootInput, Upcast,
|
||||
SourceRoot, SourceRootId, SourceRootInput,
|
||||
};
|
||||
|
||||
use hir_def::{ModuleId, db::DefDatabase};
|
||||
use hir_expand::db::ExpandDatabase;
|
||||
use rustc_hash::FxHashMap;
|
||||
use salsa::{AsDynDatabase, Durability};
|
||||
use span::{EditionedFileId, FileId};
|
||||
|
|
@ -47,30 +46,6 @@ impl fmt::Debug for TestDB {
|
|||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ExpandDatabase> for TestDB {
|
||||
fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn DefDatabase> for TestDB {
|
||||
fn upcast(&self) -> &(dyn DefDatabase + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn RootQueryDb> for TestDB {
|
||||
fn upcast(&self) -> &(dyn RootQueryDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDatabase> for TestDB {
|
||||
fn upcast(&self) -> &(dyn SourceDatabase + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl SourceDatabase for TestDB {
|
||||
fn file_text(&self, file_id: base_db::FileId) -> FileText {
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ impl DebugContext<'_> {
|
|||
AdtId::UnionId(it) => self.0.union_signature(it).name.clone(),
|
||||
AdtId::EnumId(it) => self.0.enum_signature(it).name.clone(),
|
||||
};
|
||||
name.display(self.0.upcast(), Edition::LATEST).fmt(f)?;
|
||||
name.display(self.0, Edition::LATEST).fmt(f)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
@ -36,7 +36,7 @@ impl DebugContext<'_> {
|
|||
) -> Result<(), fmt::Error> {
|
||||
let trait_: hir_def::TraitId = from_chalk_trait_id(id);
|
||||
let trait_data = self.0.trait_signature(trait_);
|
||||
trait_data.name.display(self.0.upcast(), Edition::LATEST).fmt(f)?;
|
||||
trait_data.name.display(self.0, Edition::LATEST).fmt(f)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
@ -47,7 +47,7 @@ impl DebugContext<'_> {
|
|||
) -> Result<(), fmt::Error> {
|
||||
let type_alias: TypeAliasId = from_assoc_type_id(id);
|
||||
let type_alias_data = self.0.type_alias_signature(type_alias);
|
||||
let trait_ = match type_alias.lookup(self.0.upcast()).container {
|
||||
let trait_ = match type_alias.lookup(self.0).container {
|
||||
ItemContainerId::TraitId(t) => t,
|
||||
_ => panic!("associated type not in trait"),
|
||||
};
|
||||
|
|
@ -55,8 +55,8 @@ impl DebugContext<'_> {
|
|||
write!(
|
||||
fmt,
|
||||
"{}::{}",
|
||||
trait_data.name.display(self.0.upcast(), Edition::LATEST),
|
||||
type_alias_data.name.display(self.0.upcast(), Edition::LATEST)
|
||||
trait_data.name.display(self.0, Edition::LATEST),
|
||||
type_alias_data.name.display(self.0, Edition::LATEST)
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -68,7 +68,7 @@ impl DebugContext<'_> {
|
|||
) -> Result<(), fmt::Error> {
|
||||
let type_alias = from_assoc_type_id(projection_ty.associated_ty_id);
|
||||
let type_alias_data = self.0.type_alias_signature(type_alias);
|
||||
let trait_ = match type_alias.lookup(self.0.upcast()).container {
|
||||
let trait_ = match type_alias.lookup(self.0).container {
|
||||
ItemContainerId::TraitId(t) => t,
|
||||
_ => panic!("associated type not in trait"),
|
||||
};
|
||||
|
|
@ -76,7 +76,7 @@ impl DebugContext<'_> {
|
|||
let trait_ref = projection_ty.trait_ref(self.0);
|
||||
let trait_params = trait_ref.substitution.as_slice(Interner);
|
||||
let self_ty = trait_ref.self_type_parameter(Interner);
|
||||
write!(fmt, "<{self_ty:?} as {}", trait_name.display(self.0.upcast(), Edition::LATEST))?;
|
||||
write!(fmt, "<{self_ty:?} as {}", trait_name.display(self.0, Edition::LATEST))?;
|
||||
if trait_params.len() > 1 {
|
||||
write!(
|
||||
fmt,
|
||||
|
|
@ -84,7 +84,7 @@ impl DebugContext<'_> {
|
|||
trait_params[1..].iter().format_with(", ", |x, f| f(&format_args!("{x:?}"))),
|
||||
)?;
|
||||
}
|
||||
write!(fmt, ">::{}", type_alias_data.name.display(self.0.upcast(), Edition::LATEST))?;
|
||||
write!(fmt, ">::{}", type_alias_data.name.display(self.0, Edition::LATEST))?;
|
||||
|
||||
let proj_params_count = projection_ty.substitution.len(Interner) - trait_params.len();
|
||||
let proj_params = &projection_ty.substitution.as_slice(Interner)[..proj_params_count];
|
||||
|
|
@ -109,16 +109,16 @@ impl DebugContext<'_> {
|
|||
CallableDefId::FunctionId(ff) => self.0.function_signature(ff).name.clone(),
|
||||
CallableDefId::StructId(s) => self.0.struct_signature(s).name.clone(),
|
||||
CallableDefId::EnumVariantId(e) => {
|
||||
let loc = e.lookup(self.0.upcast());
|
||||
let loc = e.lookup(self.0);
|
||||
self.0.enum_variants(loc.parent).variants[loc.index as usize].1.clone()
|
||||
}
|
||||
};
|
||||
match def {
|
||||
CallableDefId::FunctionId(_) => {
|
||||
write!(fmt, "{{fn {}}}", name.display(self.0.upcast(), Edition::LATEST))
|
||||
write!(fmt, "{{fn {}}}", name.display(self.0, Edition::LATEST))
|
||||
}
|
||||
CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {
|
||||
write!(fmt, "{{ctor {}}}", name.display(self.0.upcast(), Edition::LATEST))
|
||||
write!(fmt, "{{ctor {}}}", name.display(self.0, Edition::LATEST))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -117,7 +117,7 @@ pub(crate) fn trait_solve_query(
|
|||
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => db
|
||||
.trait_signature(it.hir_trait_id())
|
||||
.name
|
||||
.display(db.upcast(), Edition::LATEST)
|
||||
.display(db, Edition::LATEST)
|
||||
.to_string(),
|
||||
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_owned(),
|
||||
_ => "??".to_owned(),
|
||||
|
|
|
|||
|
|
@ -296,12 +296,12 @@ pub fn is_fn_unsafe_to_call(
|
|||
}
|
||||
}
|
||||
|
||||
let loc = func.lookup(db.upcast());
|
||||
let loc = func.lookup(db);
|
||||
match loc.container {
|
||||
hir_def::ItemContainerId::ExternBlockId(block) => {
|
||||
let id = block.lookup(db.upcast()).id;
|
||||
let id = block.lookup(db).id;
|
||||
let is_intrinsic_block =
|
||||
id.item_tree(db.upcast())[id.value].abi.as_ref() == Some(&sym::rust_dash_intrinsic);
|
||||
id.item_tree(db)[id.value].abi.as_ref() == Some(&sym::rust_dash_intrinsic);
|
||||
if is_intrinsic_block {
|
||||
// legacy intrinsics
|
||||
// extern "rust-intrinsic" intrinsics are unsafe unless they have the rustc_safe_intrinsic attribute
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Option<Ar
|
|||
_ => return None,
|
||||
}
|
||||
|
||||
let generics = generics(db.upcast(), def);
|
||||
let generics = generics(db, def);
|
||||
let count = generics.len();
|
||||
if count == 0 {
|
||||
return None;
|
||||
|
|
@ -60,7 +60,7 @@ pub(crate) fn variances_of_cycle(
|
|||
_cycle: &Cycle,
|
||||
def: GenericDefId,
|
||||
) -> Option<Arc<[Variance]>> {
|
||||
let generics = generics(db.upcast(), def);
|
||||
let generics = generics(db, def);
|
||||
let count = generics.len();
|
||||
|
||||
if count == 0 {
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ macro_rules! impl_has_attrs {
|
|||
impl HasAttrs for $def {
|
||||
fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
|
||||
let def = AttrDefId::$def_id(self.into());
|
||||
AttrsWithOwner::new(db.upcast(), def)
|
||||
AttrsWithOwner::new(db, def)
|
||||
}
|
||||
fn attr_id(self) -> AttrDefId {
|
||||
AttrDefId::$def_id(self.into())
|
||||
|
|
@ -95,7 +95,7 @@ impl HasAttrs for AssocItem {
|
|||
impl HasAttrs for crate::Crate {
|
||||
fn attrs(self, db: &dyn HirDatabase) -> AttrsWithOwner {
|
||||
let def = AttrDefId::ModuleId(self.root_module().id);
|
||||
AttrsWithOwner::new(db.upcast(), def)
|
||||
AttrsWithOwner::new(db, def)
|
||||
}
|
||||
fn attr_id(self) -> AttrDefId {
|
||||
AttrDefId::ModuleId(self.root_module().id)
|
||||
|
|
@ -119,27 +119,27 @@ fn resolve_doc_path_on_(
|
|||
ns: Option<Namespace>,
|
||||
) -> Option<DocLinkDef> {
|
||||
let resolver = match attr_id {
|
||||
AttrDefId::ModuleId(it) => it.resolver(db.upcast()),
|
||||
AttrDefId::FieldId(it) => it.parent.resolver(db.upcast()),
|
||||
AttrDefId::AdtId(it) => it.resolver(db.upcast()),
|
||||
AttrDefId::FunctionId(it) => it.resolver(db.upcast()),
|
||||
AttrDefId::EnumVariantId(it) => it.resolver(db.upcast()),
|
||||
AttrDefId::StaticId(it) => it.resolver(db.upcast()),
|
||||
AttrDefId::ConstId(it) => it.resolver(db.upcast()),
|
||||
AttrDefId::TraitId(it) => it.resolver(db.upcast()),
|
||||
AttrDefId::TraitAliasId(it) => it.resolver(db.upcast()),
|
||||
AttrDefId::TypeAliasId(it) => it.resolver(db.upcast()),
|
||||
AttrDefId::ImplId(it) => it.resolver(db.upcast()),
|
||||
AttrDefId::ExternBlockId(it) => it.resolver(db.upcast()),
|
||||
AttrDefId::UseId(it) => it.resolver(db.upcast()),
|
||||
AttrDefId::MacroId(it) => it.resolver(db.upcast()),
|
||||
AttrDefId::ExternCrateId(it) => it.resolver(db.upcast()),
|
||||
AttrDefId::ModuleId(it) => it.resolver(db),
|
||||
AttrDefId::FieldId(it) => it.parent.resolver(db),
|
||||
AttrDefId::AdtId(it) => it.resolver(db),
|
||||
AttrDefId::FunctionId(it) => it.resolver(db),
|
||||
AttrDefId::EnumVariantId(it) => it.resolver(db),
|
||||
AttrDefId::StaticId(it) => it.resolver(db),
|
||||
AttrDefId::ConstId(it) => it.resolver(db),
|
||||
AttrDefId::TraitId(it) => it.resolver(db),
|
||||
AttrDefId::TraitAliasId(it) => it.resolver(db),
|
||||
AttrDefId::TypeAliasId(it) => it.resolver(db),
|
||||
AttrDefId::ImplId(it) => it.resolver(db),
|
||||
AttrDefId::ExternBlockId(it) => it.resolver(db),
|
||||
AttrDefId::UseId(it) => it.resolver(db),
|
||||
AttrDefId::MacroId(it) => it.resolver(db),
|
||||
AttrDefId::ExternCrateId(it) => it.resolver(db),
|
||||
AttrDefId::GenericParamId(_) => return None,
|
||||
};
|
||||
|
||||
let mut modpath = doc_modpath_from_str(link)?;
|
||||
|
||||
let resolved = resolver.resolve_module_path_in_items(db.upcast(), &modpath);
|
||||
let resolved = resolver.resolve_module_path_in_items(db, &modpath);
|
||||
if resolved.is_none() {
|
||||
let last_name = modpath.pop_segment()?;
|
||||
resolve_assoc_or_field(db, resolver, modpath, last_name, ns)
|
||||
|
|
@ -168,7 +168,7 @@ fn resolve_assoc_or_field(
|
|||
let path = Path::from_known_path_with_no_generic(path);
|
||||
// FIXME: This does not handle `Self` on trait definitions, which we should resolve to the
|
||||
// trait itself.
|
||||
let base_def = resolver.resolve_path_in_type_ns_fully(db.upcast(), &path)?;
|
||||
let base_def = resolver.resolve_path_in_type_ns_fully(db, &path)?;
|
||||
|
||||
let ty = match base_def {
|
||||
TypeNs::SelfType(id) => Impl::from(id).self_ty(db),
|
||||
|
|
@ -255,7 +255,7 @@ fn resolve_impl_trait_item(
|
|||
let environment = resolver
|
||||
.generic_def()
|
||||
.map_or_else(|| crate::TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
|
||||
let traits_in_scope = resolver.traits_in_scope(db.upcast());
|
||||
let traits_in_scope = resolver.traits_in_scope(db);
|
||||
|
||||
let mut result = None;
|
||||
|
||||
|
|
|
|||
|
|
@ -433,7 +433,7 @@ impl AnyDiagnostic {
|
|||
) -> Option<AnyDiagnostic> {
|
||||
match diagnostic {
|
||||
BodyValidationDiagnostic::RecordMissingFields { record, variant, missed_fields } => {
|
||||
let variant_data = variant.variant_data(db.upcast());
|
||||
let variant_data = variant.variant_data(db);
|
||||
let missed_fields = missed_fields
|
||||
.into_iter()
|
||||
.map(|idx| variant_data.fields()[idx].name.clone())
|
||||
|
|
@ -444,7 +444,7 @@ impl AnyDiagnostic {
|
|||
Either::Right(record_pat) => source_map.pat_syntax(record_pat).ok()?,
|
||||
};
|
||||
let file = record.file_id;
|
||||
let root = record.file_syntax(db.upcast());
|
||||
let root = record.file_syntax(db);
|
||||
match record.value.to_node(&root) {
|
||||
Either::Left(ast::Expr::RecordExpr(record_expr)) => {
|
||||
if record_expr.record_expr_field_list().is_some() {
|
||||
|
|
@ -493,7 +493,7 @@ impl AnyDiagnostic {
|
|||
BodyValidationDiagnostic::MissingMatchArms { match_expr, uncovered_patterns } => {
|
||||
match source_map.expr_syntax(match_expr) {
|
||||
Ok(source_ptr) => {
|
||||
let root = source_ptr.file_syntax(db.upcast());
|
||||
let root = source_ptr.file_syntax(db);
|
||||
if let Either::Left(ast::Expr::MatchExpr(match_expr)) =
|
||||
&source_ptr.value.to_node(&root)
|
||||
{
|
||||
|
|
|
|||
|
|
@ -88,7 +88,7 @@ impl HirDisplay for Function {
|
|||
if let Some(abi) = &data.abi {
|
||||
write!(f, "extern \"{}\" ", abi.as_str())?;
|
||||
}
|
||||
write!(f, "fn {}", data.name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "fn {}", data.name.display(f.db, f.edition()))?;
|
||||
|
||||
write_generic_params(GenericDefId::FunctionId(self.id), f)?;
|
||||
|
||||
|
|
@ -112,8 +112,7 @@ impl HirDisplay for Function {
|
|||
}
|
||||
|
||||
let pat_id = body.params[param.idx - body.self_param.is_some() as usize];
|
||||
let pat_str =
|
||||
body.pretty_print_pat(db.upcast(), self.id.into(), pat_id, true, f.edition());
|
||||
let pat_str = body.pretty_print_pat(db, self.id.into(), pat_id, true, f.edition());
|
||||
f.write_str(&pat_str)?;
|
||||
|
||||
f.write_str(": ")?;
|
||||
|
|
@ -194,7 +193,7 @@ fn write_impl_header(impl_: &Impl, f: &mut HirFormatter<'_>) -> Result<(), HirDi
|
|||
|
||||
if let Some(trait_) = impl_.trait_(db) {
|
||||
let trait_data = db.trait_signature(trait_.id);
|
||||
write!(f, " {} for", trait_data.name.display(db.upcast(), f.edition()))?;
|
||||
write!(f, " {} for", trait_data.name.display(db, f.edition()))?;
|
||||
}
|
||||
|
||||
f.write_char(' ')?;
|
||||
|
|
@ -245,7 +244,7 @@ impl HirDisplay for Struct {
|
|||
// FIXME: Render repr if its set explicitly?
|
||||
write_visibility(module_id, self.visibility(f.db), f)?;
|
||||
f.write_str("struct ")?;
|
||||
write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}", self.name(f.db).display(f.db, f.edition()))?;
|
||||
let def_id = GenericDefId::AdtId(AdtId::StructId(self.id));
|
||||
write_generic_params(def_id, f)?;
|
||||
|
||||
|
|
@ -284,7 +283,7 @@ impl HirDisplay for Enum {
|
|||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
|
||||
f.write_str("enum ")?;
|
||||
write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}", self.name(f.db).display(f.db, f.edition()))?;
|
||||
let def_id = GenericDefId::AdtId(AdtId::EnumId(self.id));
|
||||
write_generic_params(def_id, f)?;
|
||||
|
||||
|
|
@ -301,7 +300,7 @@ impl HirDisplay for Union {
|
|||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
|
||||
f.write_str("union ")?;
|
||||
write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}", self.name(f.db).display(f.db, f.edition()))?;
|
||||
let def_id = GenericDefId::AdtId(AdtId::UnionId(self.id));
|
||||
write_generic_params(def_id, f)?;
|
||||
|
||||
|
|
@ -361,7 +360,7 @@ fn write_variants(
|
|||
} else {
|
||||
f.write_str("{\n")?;
|
||||
for variant in &variants[..count] {
|
||||
write!(f, " {}", variant.name(f.db).display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, " {}", variant.name(f.db).display(f.db, f.edition()))?;
|
||||
match variant.kind(f.db) {
|
||||
StructKind::Tuple => {
|
||||
let fields_str =
|
||||
|
|
@ -390,21 +389,21 @@ fn write_variants(
|
|||
impl HirDisplay for Field {
|
||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||
write_visibility(self.parent.module(f.db).id, self.visibility(f.db), f)?;
|
||||
write!(f, "{}: ", self.name(f.db).display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}: ", self.name(f.db).display(f.db, f.edition()))?;
|
||||
self.ty(f.db).hir_fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl HirDisplay for TupleField {
|
||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||
write!(f, "pub {}: ", self.name().display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "pub {}: ", self.name().display(f.db, f.edition()))?;
|
||||
self.ty(f.db).hir_fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl HirDisplay for Variant {
|
||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||
write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}", self.name(f.db).display(f.db, f.edition()))?;
|
||||
let data = f.db.variant_fields(self.id.into());
|
||||
match data.shape {
|
||||
FieldsShape::Unit => {}
|
||||
|
|
@ -442,7 +441,7 @@ impl HirDisplay for ExternCrateDecl {
|
|||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
|
||||
f.write_str("extern crate ")?;
|
||||
write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}", self.name(f.db).display(f.db, f.edition()))?;
|
||||
if let Some(alias) = self.alias(f.db) {
|
||||
write!(f, " as {}", alias.display(f.edition()))?;
|
||||
}
|
||||
|
|
@ -496,7 +495,7 @@ impl HirDisplay for TypeParam {
|
|||
match param_data {
|
||||
TypeOrConstParamData::TypeParamData(p) => match p.provenance {
|
||||
TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
|
||||
write!(f, "{}", p.name.clone().unwrap().display(f.db.upcast(), f.edition()))?
|
||||
write!(f, "{}", p.name.clone().unwrap().display(f.db, f.edition()))?
|
||||
}
|
||||
TypeParamProvenance::ArgumentImplTrait => {
|
||||
return write_bounds_like_dyn_trait_with_prefix(
|
||||
|
|
@ -509,7 +508,7 @@ impl HirDisplay for TypeParam {
|
|||
}
|
||||
},
|
||||
TypeOrConstParamData::ConstParamData(p) => {
|
||||
write!(f, "{}", p.name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}", p.name.display(f.db, f.edition()))?;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -543,13 +542,13 @@ impl HirDisplay for TypeParam {
|
|||
|
||||
impl HirDisplay for LifetimeParam {
|
||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||
write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))
|
||||
write!(f, "{}", self.name(f.db).display(f.db, f.edition()))
|
||||
}
|
||||
}
|
||||
|
||||
impl HirDisplay for ConstParam {
|
||||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||
write!(f, "const {}: ", self.name(f.db).display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "const {}: ", self.name(f.db).display(f.db, f.edition()))?;
|
||||
self.ty(f.db).hir_fmt(f)
|
||||
}
|
||||
}
|
||||
|
|
@ -581,7 +580,7 @@ fn write_generic_params(
|
|||
};
|
||||
for (_, lifetime) in params.iter_lt() {
|
||||
delim(f)?;
|
||||
write!(f, "{}", lifetime.name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}", lifetime.name.display(f.db, f.edition()))?;
|
||||
}
|
||||
for (_, ty) in params.iter_type_or_consts() {
|
||||
if let Some(name) = &ty.name() {
|
||||
|
|
@ -591,7 +590,7 @@ fn write_generic_params(
|
|||
continue;
|
||||
}
|
||||
delim(f)?;
|
||||
write!(f, "{}", name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}", name.display(f.db, f.edition()))?;
|
||||
if let Some(default) = &ty.default {
|
||||
f.write_str(" = ")?;
|
||||
default.hir_fmt(f, &store)?;
|
||||
|
|
@ -599,7 +598,7 @@ fn write_generic_params(
|
|||
}
|
||||
TypeOrConstParamData::ConstParamData(c) => {
|
||||
delim(f)?;
|
||||
write!(f, "const {}: ", name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "const {}: ", name.display(f.db, f.edition()))?;
|
||||
c.ty.hir_fmt(f, &store)?;
|
||||
|
||||
if let Some(default) = &c.default {
|
||||
|
|
@ -657,7 +656,7 @@ fn write_where_predicates(
|
|||
let write_target = |target: &WherePredicateTypeTarget, f: &mut HirFormatter<'_>| match target {
|
||||
WherePredicateTypeTarget::TypeRef(ty) => ty.hir_fmt(f, store),
|
||||
WherePredicateTypeTarget::TypeOrConstParam(id) => match params[*id].name() {
|
||||
Some(name) => write!(f, "{}", name.display(f.db.upcast(), f.edition())),
|
||||
Some(name) => write!(f, "{}", name.display(f.db, f.edition())),
|
||||
None => f.write_str("{unnamed}"),
|
||||
},
|
||||
};
|
||||
|
|
@ -691,8 +690,7 @@ fn write_where_predicates(
|
|||
bound.hir_fmt(f, store)?;
|
||||
}
|
||||
ForLifetime { lifetimes, target, bound } => {
|
||||
let lifetimes =
|
||||
lifetimes.iter().map(|it| it.display(f.db.upcast(), f.edition())).join(", ");
|
||||
let lifetimes = lifetimes.iter().map(|it| it.display(f.db, f.edition())).join(", ");
|
||||
write!(f, "for<{lifetimes}> ")?;
|
||||
write_target(target, f)?;
|
||||
f.write_str(": ")?;
|
||||
|
|
@ -726,7 +724,7 @@ impl HirDisplay for Const {
|
|||
let data = db.const_signature(self.id);
|
||||
f.write_str("const ")?;
|
||||
match &data.name {
|
||||
Some(name) => write!(f, "{}: ", name.display(f.db.upcast(), f.edition()))?,
|
||||
Some(name) => write!(f, "{}: ", name.display(f.db, f.edition()))?,
|
||||
None => f.write_str("_: ")?,
|
||||
}
|
||||
data.type_ref.hir_fmt(f, &data.store)?;
|
||||
|
|
@ -742,7 +740,7 @@ impl HirDisplay for Static {
|
|||
if data.flags.contains(StaticFlags::MUTABLE) {
|
||||
f.write_str("mut ")?;
|
||||
}
|
||||
write!(f, "{}: ", data.name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "{}: ", data.name.display(f.db, f.edition()))?;
|
||||
data.type_ref.hir_fmt(f, &data.store)?;
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -802,7 +800,7 @@ fn write_trait_header(trait_: &Trait, f: &mut HirFormatter<'_>) -> Result<(), Hi
|
|||
if data.flags.contains(TraitFlags::IS_AUTO) {
|
||||
f.write_str("auto ")?;
|
||||
}
|
||||
write!(f, "trait {}", data.name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "trait {}", data.name.display(f.db, f.edition()))?;
|
||||
write_generic_params(GenericDefId::TraitId(trait_.id), f)?;
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -811,7 +809,7 @@ impl HirDisplay for TraitAlias {
|
|||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
|
||||
let data = f.db.trait_alias_signature(self.id);
|
||||
write!(f, "trait {}", data.name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "trait {}", data.name.display(f.db, f.edition()))?;
|
||||
let def_id = GenericDefId::TraitAliasId(self.id);
|
||||
write_generic_params(def_id, f)?;
|
||||
f.write_str(" = ")?;
|
||||
|
|
@ -827,7 +825,7 @@ impl HirDisplay for TypeAlias {
|
|||
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
|
||||
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
|
||||
let data = f.db.type_alias_signature(self.id);
|
||||
write!(f, "type {}", data.name.display(f.db.upcast(), f.edition()))?;
|
||||
write!(f, "type {}", data.name.display(f.db, f.edition()))?;
|
||||
let def_id = GenericDefId::TypeAliasId(self.id);
|
||||
write_generic_params(def_id, f)?;
|
||||
if !data.bounds.is_empty() {
|
||||
|
|
@ -858,7 +856,7 @@ impl HirDisplay for Module {
|
|||
}
|
||||
}
|
||||
match self.name(f.db) {
|
||||
Some(name) => write!(f, "mod {}", name.display(f.db.upcast(), f.edition())),
|
||||
Some(name) => write!(f, "mod {}", name.display(f.db, f.edition())),
|
||||
None => f.write_str("mod {unknown}"),
|
||||
}
|
||||
}
|
||||
|
|
@ -880,6 +878,6 @@ impl HirDisplay for Macro {
|
|||
hir_def::MacroId::MacroRulesId(_) => f.write_str("macro_rules!"),
|
||||
hir_def::MacroId::ProcMacroId(_) => f.write_str("proc_macro"),
|
||||
}?;
|
||||
write!(f, " {}", self.name(f.db).display(f.db.upcast(), f.edition()))
|
||||
write!(f, " {}", self.name(f.db).display(f.db, f.edition()))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -36,23 +36,23 @@ pub trait HasSource {
|
|||
impl Module {
|
||||
/// Returns a node which defines this module. That is, a file or a `mod foo {}` with items.
|
||||
pub fn definition_source(self, db: &dyn HirDatabase) -> InFile<ModuleSource> {
|
||||
let def_map = self.id.def_map(db.upcast());
|
||||
def_map[self.id.local_id].definition_source(db.upcast())
|
||||
let def_map = self.id.def_map(db);
|
||||
def_map[self.id.local_id].definition_source(db)
|
||||
}
|
||||
|
||||
/// Returns a node which defines this module. That is, a file or a `mod foo {}` with items.
|
||||
pub fn definition_source_range(self, db: &dyn HirDatabase) -> InFile<TextRange> {
|
||||
let def_map = self.id.def_map(db.upcast());
|
||||
def_map[self.id.local_id].definition_source_range(db.upcast())
|
||||
let def_map = self.id.def_map(db);
|
||||
def_map[self.id.local_id].definition_source_range(db)
|
||||
}
|
||||
|
||||
pub fn definition_source_file_id(self, db: &dyn HirDatabase) -> HirFileId {
|
||||
let def_map = self.id.def_map(db.upcast());
|
||||
let def_map = self.id.def_map(db);
|
||||
def_map[self.id.local_id].definition_source_file_id()
|
||||
}
|
||||
|
||||
pub fn is_mod_rs(self, db: &dyn HirDatabase) -> bool {
|
||||
let def_map = self.id.def_map(db.upcast());
|
||||
let def_map = self.id.def_map(db);
|
||||
match def_map[self.id.local_id].origin {
|
||||
ModuleOrigin::File { is_mod_rs, .. } => is_mod_rs,
|
||||
_ => false,
|
||||
|
|
@ -60,7 +60,7 @@ impl Module {
|
|||
}
|
||||
|
||||
pub fn as_source_file_id(self, db: &dyn HirDatabase) -> Option<EditionedFileId> {
|
||||
let def_map = self.id.def_map(db.upcast());
|
||||
let def_map = self.id.def_map(db);
|
||||
match def_map[self.id.local_id].origin {
|
||||
ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition, .. } => {
|
||||
Some(definition)
|
||||
|
|
@ -70,22 +70,22 @@ impl Module {
|
|||
}
|
||||
|
||||
pub fn is_inline(self, db: &dyn HirDatabase) -> bool {
|
||||
let def_map = self.id.def_map(db.upcast());
|
||||
let def_map = self.id.def_map(db);
|
||||
def_map[self.id.local_id].origin.is_inline()
|
||||
}
|
||||
|
||||
/// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
|
||||
/// `None` for the crate root.
|
||||
pub fn declaration_source(self, db: &dyn HirDatabase) -> Option<InFile<ast::Module>> {
|
||||
let def_map = self.id.def_map(db.upcast());
|
||||
def_map[self.id.local_id].declaration_source(db.upcast())
|
||||
let def_map = self.id.def_map(db);
|
||||
def_map[self.id.local_id].declaration_source(db)
|
||||
}
|
||||
|
||||
/// Returns a text range which declares this module, either a `mod foo;` or a `mod foo {}`.
|
||||
/// `None` for the crate root.
|
||||
pub fn declaration_source_range(self, db: &dyn HirDatabase) -> Option<InFile<TextRange>> {
|
||||
let def_map = self.id.def_map(db.upcast());
|
||||
def_map[self.id.local_id].declaration_source_range(db.upcast())
|
||||
let def_map = self.id.def_map(db);
|
||||
def_map[self.id.local_id].declaration_source_range(db)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -93,7 +93,7 @@ impl HasSource for Field {
|
|||
type Ast = FieldSource;
|
||||
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||
let var = VariantId::from(self.parent);
|
||||
let src = var.child_source(db.upcast());
|
||||
let src = var.child_source(db);
|
||||
let field_source = src.map(|it| match it[self.id].clone() {
|
||||
Either::Left(it) => FieldSource::Pos(it),
|
||||
Either::Right(it) => FieldSource::Named(it),
|
||||
|
|
@ -124,96 +124,88 @@ impl HasSource for VariantDef {
|
|||
impl HasSource for Struct {
|
||||
type Ast = ast::Struct;
|
||||
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||
Some(self.id.lookup(db.upcast()).source(db.upcast()))
|
||||
Some(self.id.lookup(db).source(db))
|
||||
}
|
||||
}
|
||||
impl HasSource for Union {
|
||||
type Ast = ast::Union;
|
||||
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||
Some(self.id.lookup(db.upcast()).source(db.upcast()))
|
||||
Some(self.id.lookup(db).source(db))
|
||||
}
|
||||
}
|
||||
impl HasSource for Enum {
|
||||
type Ast = ast::Enum;
|
||||
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||
Some(self.id.lookup(db.upcast()).source(db.upcast()))
|
||||
Some(self.id.lookup(db).source(db))
|
||||
}
|
||||
}
|
||||
impl HasSource for Variant {
|
||||
type Ast = ast::Variant;
|
||||
fn source(self, db: &dyn HirDatabase) -> Option<InFile<ast::Variant>> {
|
||||
Some(self.id.lookup(db.upcast()).source(db.upcast()))
|
||||
Some(self.id.lookup(db).source(db))
|
||||
}
|
||||
}
|
||||
impl HasSource for Function {
|
||||
type Ast = ast::Fn;
|
||||
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||
Some(self.id.lookup(db.upcast()).source(db.upcast()))
|
||||
Some(self.id.lookup(db).source(db))
|
||||
}
|
||||
}
|
||||
impl HasSource for Const {
|
||||
type Ast = ast::Const;
|
||||
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||
Some(self.id.lookup(db.upcast()).source(db.upcast()))
|
||||
Some(self.id.lookup(db).source(db))
|
||||
}
|
||||
}
|
||||
impl HasSource for Static {
|
||||
type Ast = ast::Static;
|
||||
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||
Some(self.id.lookup(db.upcast()).source(db.upcast()))
|
||||
Some(self.id.lookup(db).source(db))
|
||||
}
|
||||
}
|
||||
impl HasSource for Trait {
|
||||
type Ast = ast::Trait;
|
||||
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||
Some(self.id.lookup(db.upcast()).source(db.upcast()))
|
||||
Some(self.id.lookup(db).source(db))
|
||||
}
|
||||
}
|
||||
impl HasSource for TraitAlias {
|
||||
type Ast = ast::TraitAlias;
|
||||
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||
Some(self.id.lookup(db.upcast()).source(db.upcast()))
|
||||
Some(self.id.lookup(db).source(db))
|
||||
}
|
||||
}
|
||||
impl HasSource for TypeAlias {
|
||||
type Ast = ast::TypeAlias;
|
||||
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||
Some(self.id.lookup(db.upcast()).source(db.upcast()))
|
||||
Some(self.id.lookup(db).source(db))
|
||||
}
|
||||
}
|
||||
impl HasSource for Macro {
|
||||
type Ast = Either<ast::Macro, ast::Fn>;
|
||||
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||
match self.id {
|
||||
MacroId::Macro2Id(it) => Some(
|
||||
it.lookup(db.upcast())
|
||||
.source(db.upcast())
|
||||
.map(ast::Macro::MacroDef)
|
||||
.map(Either::Left),
|
||||
),
|
||||
MacroId::MacroRulesId(it) => Some(
|
||||
it.lookup(db.upcast())
|
||||
.source(db.upcast())
|
||||
.map(ast::Macro::MacroRules)
|
||||
.map(Either::Left),
|
||||
),
|
||||
MacroId::ProcMacroId(it) => {
|
||||
Some(it.lookup(db.upcast()).source(db.upcast()).map(Either::Right))
|
||||
MacroId::Macro2Id(it) => {
|
||||
Some(it.lookup(db).source(db).map(ast::Macro::MacroDef).map(Either::Left))
|
||||
}
|
||||
MacroId::MacroRulesId(it) => {
|
||||
Some(it.lookup(db).source(db).map(ast::Macro::MacroRules).map(Either::Left))
|
||||
}
|
||||
MacroId::ProcMacroId(it) => Some(it.lookup(db).source(db).map(Either::Right)),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl HasSource for Impl {
|
||||
type Ast = ast::Impl;
|
||||
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||
Some(self.id.lookup(db.upcast()).source(db.upcast()))
|
||||
Some(self.id.lookup(db).source(db))
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSource for TypeOrConstParam {
|
||||
type Ast = Either<ast::TypeOrConstParam, ast::TraitOrAlias>;
|
||||
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||
let child_source = self.id.parent.child_source(db.upcast());
|
||||
let child_source = self.id.parent.child_source(db);
|
||||
child_source.map(|it| it.get(self.id.local_id).cloned()).transpose()
|
||||
}
|
||||
}
|
||||
|
|
@ -221,7 +213,7 @@ impl HasSource for TypeOrConstParam {
|
|||
impl HasSource for LifetimeParam {
|
||||
type Ast = ast::LifetimeParam;
|
||||
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||
let child_source = self.id.parent.child_source(db.upcast());
|
||||
let child_source = self.id.parent.child_source(db);
|
||||
child_source.map(|it| it.get(self.id.local_id).cloned()).transpose()
|
||||
}
|
||||
}
|
||||
|
|
@ -291,7 +283,7 @@ impl HasSource for Label {
|
|||
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||
let (_body, source_map) = db.body_with_source_map(self.parent);
|
||||
let src = source_map.label_syntax(self.label_id);
|
||||
let root = src.file_syntax(db.upcast());
|
||||
let root = src.file_syntax(db);
|
||||
Some(src.map(|ast| ast.to_node(&root)))
|
||||
}
|
||||
}
|
||||
|
|
@ -300,7 +292,7 @@ impl HasSource for ExternCrateDecl {
|
|||
type Ast = ast::ExternCrate;
|
||||
|
||||
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||
Some(self.id.lookup(db.upcast()).source(db.upcast()))
|
||||
Some(self.id.lookup(db).source(db))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -309,7 +301,7 @@ impl HasSource for InlineAsmOperand {
|
|||
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
|
||||
let source_map = db.body_with_source_map(self.owner).1;
|
||||
if let Ok(src) = source_map.expr_syntax(self.expr) {
|
||||
let root = src.file_syntax(db.upcast());
|
||||
let root = src.file_syntax(db);
|
||||
return src
|
||||
.map(|ast| match ast.to_node(&root) {
|
||||
Either::Left(ast::Expr::AsmExpr(asm)) => asm
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -349,7 +349,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
ModuleOrigin::File { declaration, declaration_tree_id, .. } => {
|
||||
let file_id = declaration_tree_id.file_id();
|
||||
let in_file = InFile::new(file_id, declaration);
|
||||
let node = in_file.to_node(self.db.upcast());
|
||||
let node = in_file.to_node(self.db);
|
||||
let root = find_root(node.syntax());
|
||||
self.cache(root, file_id);
|
||||
Some(in_file.with_value(node.syntax().clone()))
|
||||
|
|
@ -358,10 +358,8 @@ impl<'db> SemanticsImpl<'db> {
|
|||
}
|
||||
}
|
||||
HirFileIdRepr::MacroFile(macro_file) => {
|
||||
let node = self
|
||||
.db
|
||||
.lookup_intern_macro_call(macro_file.macro_call_id)
|
||||
.to_node(self.db.upcast());
|
||||
let node =
|
||||
self.db.lookup_intern_macro_call(macro_file.macro_call_id).to_node(self.db);
|
||||
let root = find_root(&node.value);
|
||||
self.cache(root, node.file_id);
|
||||
Some(node)
|
||||
|
|
@ -372,8 +370,8 @@ impl<'db> SemanticsImpl<'db> {
|
|||
/// Returns the `SyntaxNode` of the module. If this is a file module, returns
|
||||
/// the `SyntaxNode` of the *definition* file, not of the *declaration*.
|
||||
pub fn module_definition_node(&self, module: Module) -> InFile<SyntaxNode> {
|
||||
let def_map = module.id.def_map(self.db.upcast());
|
||||
let definition = def_map[module.id.local_id].origin.definition_source(self.db.upcast());
|
||||
let def_map = module.id.def_map(self.db);
|
||||
let definition = def_map[module.id.local_id].origin.definition_source(self.db);
|
||||
let definition = definition.map(|it| it.node());
|
||||
let root_node = find_root(&definition.value);
|
||||
self.cache(root_node, definition.file_id);
|
||||
|
|
@ -412,7 +410,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
self.db.lookup_intern_macro_call(macro_file.macro_call_id).krate
|
||||
}
|
||||
};
|
||||
hir_expand::check_cfg_attr_value(self.db.upcast(), attr, krate)
|
||||
hir_expand::check_cfg_attr_value(self.db, attr, krate)
|
||||
}
|
||||
|
||||
/// Expands the macro if it isn't one of the built-in ones that expand to custom syntax or dummy
|
||||
|
|
@ -565,7 +563,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
let macro_call = InFile::new(analyzer.file_id, actual_macro_call);
|
||||
let macro_file = analyzer.expansion(macro_call)?;
|
||||
hir_expand::db::expand_speculative(
|
||||
self.db.upcast(),
|
||||
self.db,
|
||||
macro_file.macro_call_id,
|
||||
speculative_args.syntax(),
|
||||
token_to_map,
|
||||
|
|
@ -579,7 +577,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
token_to_map: SyntaxToken,
|
||||
) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
|
||||
hir_expand::db::expand_speculative(
|
||||
self.db.upcast(),
|
||||
self.db,
|
||||
macro_file.macro_call_id,
|
||||
speculative_args,
|
||||
token_to_map,
|
||||
|
|
@ -597,7 +595,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
let macro_call = self.wrap_node_infile(actual_macro_call.clone());
|
||||
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call.as_ref()))?;
|
||||
hir_expand::db::expand_speculative(
|
||||
self.db.upcast(),
|
||||
self.db,
|
||||
macro_call_id,
|
||||
speculative_args.syntax(),
|
||||
token_to_map,
|
||||
|
|
@ -616,7 +614,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
ctx.attr_to_derive_macro_call(attr.with_value(&adt), attr).map(|(_, it, _)| it)
|
||||
})?;
|
||||
hir_expand::db::expand_speculative(
|
||||
self.db.upcast(),
|
||||
self.db,
|
||||
macro_call_id,
|
||||
speculative_args.syntax(),
|
||||
token_to_map,
|
||||
|
|
@ -627,7 +625,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
/// and returns the conflicting locals.
|
||||
pub fn rename_conflicts(&self, to_be_renamed: &Local, new_name: &str) -> Vec<Local> {
|
||||
let body = self.db.body(to_be_renamed.parent);
|
||||
let resolver = to_be_renamed.parent.resolver(self.db.upcast());
|
||||
let resolver = to_be_renamed.parent.resolver(self.db);
|
||||
let starting_expr =
|
||||
body.binding_owners.get(&to_be_renamed.binding_id).copied().unwrap_or(body.body_expr);
|
||||
let mut visitor = RenameConflictsVisitor {
|
||||
|
|
@ -891,7 +889,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
let token = self.wrap_token_infile(token);
|
||||
if let Ok(token) = token.clone().into_real_file() {
|
||||
self.descend_into_macros_impl(token, &mut |t, ctx| {
|
||||
if !ctx.is_opaque(self.db.upcast()) {
|
||||
if !ctx.is_opaque(self.db) {
|
||||
// Don't descend into opaque contexts
|
||||
res.push(t);
|
||||
}
|
||||
|
|
@ -924,7 +922,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
|
||||
let matches = (kind == mapped_kind || any_ident_match())
|
||||
&& text == value.text()
|
||||
&& !ctx.is_opaque(self.db.upcast());
|
||||
&& !ctx.is_opaque(self.db);
|
||||
if matches {
|
||||
r.push(value);
|
||||
}
|
||||
|
|
@ -1097,7 +1095,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
filter_duplicates(tokens, text_range);
|
||||
|
||||
process_expansion_for_token(&mut stack, file_id).or(file_id
|
||||
.eager_arg(self.db.upcast())
|
||||
.eager_arg(self.db)
|
||||
.and_then(|arg| {
|
||||
// also descend into eager expansions
|
||||
process_expansion_for_token(&mut stack, arg.as_macro_file())
|
||||
|
|
@ -1229,21 +1227,19 @@ impl<'db> SemanticsImpl<'db> {
|
|||
/// macro file the node resides in.
|
||||
pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
|
||||
let node = self.find_file(node);
|
||||
node.original_file_range_rooted(self.db.upcast())
|
||||
node.original_file_range_rooted(self.db)
|
||||
}
|
||||
|
||||
/// Attempts to map the node out of macro expanded files returning the original file range.
|
||||
pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
|
||||
let node = self.find_file(node);
|
||||
node.original_file_range_opt(self.db.upcast())
|
||||
.filter(|(_, ctx)| ctx.is_root())
|
||||
.map(TupleExt::head)
|
||||
node.original_file_range_opt(self.db).filter(|(_, ctx)| ctx.is_root()).map(TupleExt::head)
|
||||
}
|
||||
|
||||
/// Attempts to map the node out of macro expanded files.
|
||||
/// This only work for attribute expansions, as other ones do not have nodes as input.
|
||||
pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
|
||||
self.wrap_node_infile(node).original_ast_node_rooted(self.db.upcast()).map(
|
||||
self.wrap_node_infile(node).original_ast_node_rooted(self.db).map(
|
||||
|InRealFile { file_id, value }| {
|
||||
self.cache(find_root(value.syntax()), file_id.into());
|
||||
value
|
||||
|
|
@ -1255,7 +1251,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
/// This only work for attribute expansions, as other ones do not have nodes as input.
|
||||
pub fn original_syntax_node_rooted(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
|
||||
let InFile { file_id, .. } = self.find_file(node);
|
||||
InFile::new(file_id, node).original_syntax_node_rooted(self.db.upcast()).map(
|
||||
InFile::new(file_id, node).original_syntax_node_rooted(self.db).map(
|
||||
|InRealFile { file_id, value }| {
|
||||
self.cache(find_root(&value), file_id.into());
|
||||
value
|
||||
|
|
@ -1266,7 +1262,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
pub fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
|
||||
let root = self.parse_or_expand(src.file_id);
|
||||
let node = src.map(|it| it.to_node(&root));
|
||||
node.as_ref().original_file_range_rooted(self.db.upcast())
|
||||
node.as_ref().original_file_range_rooted(self.db)
|
||||
}
|
||||
|
||||
fn token_ancestors_with_macros(
|
||||
|
|
@ -1338,7 +1334,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
hir_def::type_ref::TypeRef::Path(path) => path,
|
||||
_ => return None,
|
||||
};
|
||||
match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), path)? {
|
||||
match analyze.resolver.resolve_path_in_type_ns_fully(self.db, path)? {
|
||||
TypeNs::TraitId(trait_id) => Some(trait_id.into()),
|
||||
_ => None,
|
||||
}
|
||||
|
|
@ -1613,7 +1609,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
path: &ModPath,
|
||||
) -> Option<impl Iterator<Item = ItemInNs>> {
|
||||
let analyze = self.analyze(scope)?;
|
||||
let items = analyze.resolver.resolve_module_path_in_items(self.db.upcast(), path);
|
||||
let items = analyze.resolver.resolve_module_path_in_items(self.db, path);
|
||||
Some(items.iter_items().map(|(item, _)| item.into()))
|
||||
}
|
||||
|
||||
|
|
@ -1754,7 +1750,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
ChildContainer::GenericDefId(it) => {
|
||||
return Some(SourceAnalyzer::new_generic_def(self.db, it, node, offset));
|
||||
}
|
||||
ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()),
|
||||
ChildContainer::ModuleId(it) => it.resolver(self.db),
|
||||
};
|
||||
Some(SourceAnalyzer::new_for_resolver(resolver, node))
|
||||
}
|
||||
|
|
@ -1867,7 +1863,7 @@ fn macro_call_to_macro_id(
|
|||
) -> Option<MacroId> {
|
||||
use span::HirFileIdRepr;
|
||||
|
||||
let db: &dyn ExpandDatabase = ctx.db.upcast();
|
||||
let db: &dyn ExpandDatabase = ctx.db;
|
||||
let loc = db.lookup_intern_macro_call(macro_call_id);
|
||||
|
||||
match loc.def.ast_id() {
|
||||
|
|
@ -2002,12 +1998,12 @@ impl SemanticsScope<'_> {
|
|||
/// Note: `VisibleTraits` should be treated as an opaque type, passed into `Type
|
||||
pub fn visible_traits(&self) -> VisibleTraits {
|
||||
let resolver = &self.resolver;
|
||||
VisibleTraits(resolver.traits_in_scope(self.db.upcast()))
|
||||
VisibleTraits(resolver.traits_in_scope(self.db))
|
||||
}
|
||||
|
||||
/// Calls the passed closure `f` on all names in scope.
|
||||
pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
|
||||
let scope = self.resolver.names_in_scope(self.db.upcast());
|
||||
let scope = self.resolver.names_in_scope(self.db);
|
||||
for (name, entries) in scope {
|
||||
for entry in entries {
|
||||
let def = match entry {
|
||||
|
|
@ -2071,7 +2067,7 @@ impl SemanticsScope<'_> {
|
|||
}
|
||||
|
||||
pub fn resolve_mod_path(&self, path: &ModPath) -> impl Iterator<Item = ItemInNs> + use<> {
|
||||
let items = self.resolver.resolve_module_path_in_items(self.db.upcast(), path);
|
||||
let items = self.resolver.resolve_module_path_in_items(self.db, path);
|
||||
items.iter_items().map(|(item, _)| item.into())
|
||||
}
|
||||
|
||||
|
|
@ -2100,7 +2096,7 @@ impl SemanticsScope<'_> {
|
|||
}
|
||||
|
||||
pub fn extern_crate_decls(&self) -> impl Iterator<Item = Name> + '_ {
|
||||
self.resolver.extern_crate_decls_in_scope(self.db.upcast())
|
||||
self.resolver.extern_crate_decls_in_scope(self.db)
|
||||
}
|
||||
|
||||
pub fn has_same_self_type(&self, other: &SemanticsScope<'_>) -> bool {
|
||||
|
|
@ -2136,7 +2132,7 @@ impl RenameConflictsVisitor<'_> {
|
|||
if let Some(name) = path.as_ident() {
|
||||
if *name.symbol() == self.new_name {
|
||||
if let Some(conflicting) = self.resolver.rename_will_conflict_with_renamed(
|
||||
self.db.upcast(),
|
||||
self.db,
|
||||
name,
|
||||
path,
|
||||
self.body.expr_or_pat_path_hygiene(node),
|
||||
|
|
@ -2147,7 +2143,7 @@ impl RenameConflictsVisitor<'_> {
|
|||
} else if *name.symbol() == self.old_name {
|
||||
if let Some(conflicting) =
|
||||
self.resolver.rename_will_conflict_with_another_variable(
|
||||
self.db.upcast(),
|
||||
self.db,
|
||||
name,
|
||||
path,
|
||||
self.body.expr_or_pat_path_hygiene(node),
|
||||
|
|
@ -2165,12 +2161,12 @@ impl RenameConflictsVisitor<'_> {
|
|||
fn rename_conflicts(&mut self, expr: ExprId) {
|
||||
match &self.body[expr] {
|
||||
Expr::Path(path) => {
|
||||
let guard = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr);
|
||||
let guard = self.resolver.update_to_inner_scope(self.db, self.owner, expr);
|
||||
self.resolve_path(expr.into(), path);
|
||||
self.resolver.reset_to_guard(guard);
|
||||
}
|
||||
&Expr::Assignment { target, .. } => {
|
||||
let guard = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr);
|
||||
let guard = self.resolver.update_to_inner_scope(self.db, self.owner, expr);
|
||||
self.body.walk_pats(target, &mut |pat| {
|
||||
if let Pat::Path(path) = &self.body[pat] {
|
||||
self.resolve_path(pat.into(), path);
|
||||
|
|
|
|||
|
|
@ -38,7 +38,7 @@ impl ChildBySource for TraitId {
|
|||
|
||||
data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
|
||||
|(ast_id, call_id)| {
|
||||
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
|
||||
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db), call_id);
|
||||
},
|
||||
);
|
||||
data.items.iter().for_each(|&(_, item)| {
|
||||
|
|
@ -53,7 +53,7 @@ impl ChildBySource for ImplId {
|
|||
// FIXME: Macro calls
|
||||
data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
|
||||
|(ast_id, call_id)| {
|
||||
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
|
||||
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db), call_id);
|
||||
},
|
||||
);
|
||||
data.items.iter().for_each(|&(_, item)| {
|
||||
|
|
@ -84,7 +84,7 @@ impl ChildBySource for ItemScope {
|
|||
.for_each(|konst| insert_item_loc(db, res, file_id, konst, keys::CONST));
|
||||
self.attr_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each(
|
||||
|(ast_id, call_id)| {
|
||||
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db.upcast()), call_id);
|
||||
res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db), call_id);
|
||||
},
|
||||
);
|
||||
self.legacy_macros().for_each(|(_, ids)| {
|
||||
|
|
@ -99,7 +99,7 @@ impl ChildBySource for ItemScope {
|
|||
});
|
||||
self.derive_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each(
|
||||
|(ast_id, calls)| {
|
||||
let adt = ast_id.to_node(db.upcast());
|
||||
let adt = ast_id.to_node(db);
|
||||
calls.for_each(|(attr_id, call_id, calls)| {
|
||||
if let Some((_, Either::Left(attr))) =
|
||||
collect_attrs(&adt).nth(attr_id.ast_index())
|
||||
|
|
@ -112,7 +112,7 @@ impl ChildBySource for ItemScope {
|
|||
);
|
||||
self.iter_macro_invoc().filter(|(id, _)| id.file_id == file_id).for_each(
|
||||
|(ast_id, &call)| {
|
||||
let ast = ast_id.to_ptr(db.upcast());
|
||||
let ast = ast_id.to_ptr(db);
|
||||
res[keys::MACRO_CALL].insert(ast, call);
|
||||
},
|
||||
);
|
||||
|
|
@ -204,7 +204,7 @@ impl ChildBySource for DefWithBodyId {
|
|||
// All block expressions are merged into the same map, because they logically all add
|
||||
// inner items to the containing `DefWithBodyId`.
|
||||
def_map[DefMap::ROOT].scope.child_by_source_to(db, res, file_id);
|
||||
res[keys::BLOCK].insert(block.lookup(db).ast_id.to_ptr(db.upcast()), block);
|
||||
res[keys::BLOCK].insert(block.lookup(db).ast_id.to_ptr(db), block);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -85,7 +85,6 @@
|
|||
//! active crate for a given position, and then provide an API to resolve all
|
||||
//! syntax nodes against this specific crate.
|
||||
|
||||
use base_db::{RootQueryDb, Upcast};
|
||||
use either::Either;
|
||||
use hir_def::{
|
||||
AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId,
|
||||
|
|
@ -144,7 +143,7 @@ impl SourceToDefCache {
|
|||
return m;
|
||||
}
|
||||
self.included_file_cache.insert(file, None);
|
||||
for &crate_id in Upcast::<dyn RootQueryDb>::upcast(db).relevant_crates(file.into()).iter() {
|
||||
for &crate_id in db.relevant_crates(file.into()).iter() {
|
||||
db.include_macro_invoc(crate_id).iter().for_each(|&(macro_call_id, file_id)| {
|
||||
self.included_file_cache.insert(file_id, Some(MacroFileId { macro_call_id }));
|
||||
});
|
||||
|
|
@ -158,7 +157,7 @@ impl SourceToDefCache {
|
|||
macro_file: MacroFileId,
|
||||
) -> &ExpansionInfo {
|
||||
self.expansion_info_cache.entry(macro_file).or_insert_with(|| {
|
||||
let exp_info = macro_file.expansion_info(db.upcast());
|
||||
let exp_info = macro_file.expansion_info(db);
|
||||
|
||||
let InMacroFile { file_id, value } = exp_info.expanded();
|
||||
Self::cache(&mut self.root_to_file_cache, value, file_id.into());
|
||||
|
|
@ -179,8 +178,7 @@ impl SourceToDefCtx<'_, '_> {
|
|||
self.cache.file_to_def_cache.entry(file).or_insert_with(|| {
|
||||
let mut mods = SmallVec::new();
|
||||
|
||||
for &crate_id in Upcast::<dyn RootQueryDb>::upcast(self.db).relevant_crates(file).iter()
|
||||
{
|
||||
for &crate_id in self.db.relevant_crates(file).iter() {
|
||||
// Note: `mod` declarations in block modules cannot be supported here
|
||||
let crate_def_map = self.db.crate_def_map(crate_id);
|
||||
let n_mods = mods.len();
|
||||
|
|
@ -202,10 +200,10 @@ impl SourceToDefCtx<'_, '_> {
|
|||
.insert(file_id, Some(MacroFileId { macro_call_id }));
|
||||
modules(
|
||||
macro_call_id
|
||||
.lookup(self.db.upcast())
|
||||
.lookup(self.db)
|
||||
.kind
|
||||
.file_id()
|
||||
.original_file(self.db.upcast())
|
||||
.original_file(self.db)
|
||||
.file_id(),
|
||||
)
|
||||
}),
|
||||
|
|
@ -235,20 +233,20 @@ impl SourceToDefCtx<'_, '_> {
|
|||
self.module_to_def(parent_declaration.as_ref())
|
||||
}
|
||||
None => {
|
||||
let file_id = src.file_id.original_file(self.db.upcast());
|
||||
let file_id = src.file_id.original_file(self.db);
|
||||
self.file_to_def(file_id.file_id()).first().copied()
|
||||
}
|
||||
}?;
|
||||
|
||||
let child_name = src.value.name()?.as_name();
|
||||
let def_map = parent_module.def_map(self.db.upcast());
|
||||
let def_map = parent_module.def_map(self.db);
|
||||
let &child_id = def_map[parent_module.local_id].children.get(&child_name)?;
|
||||
Some(def_map.module_id(child_id))
|
||||
}
|
||||
|
||||
pub(super) fn source_file_to_def(&mut self, src: InFile<&ast::SourceFile>) -> Option<ModuleId> {
|
||||
let _p = tracing::info_span!("source_file_to_def").entered();
|
||||
let file_id = src.file_id.original_file(self.db.upcast());
|
||||
let file_id = src.file_id.original_file(self.db);
|
||||
self.file_to_def(file_id.file_id()).first().copied()
|
||||
}
|
||||
|
||||
|
|
@ -528,10 +526,8 @@ impl SourceToDefCtx<'_, '_> {
|
|||
return Some(def);
|
||||
}
|
||||
|
||||
let def = self
|
||||
.file_to_def(src.file_id.original_file(self.db.upcast()).file_id())
|
||||
.first()
|
||||
.copied()?;
|
||||
let def =
|
||||
self.file_to_def(src.file_id.original_file(self.db).file_id()).first().copied()?;
|
||||
Some(def.into())
|
||||
}
|
||||
|
||||
|
|
@ -754,7 +750,6 @@ impl_from! {
|
|||
impl ChildContainer {
|
||||
fn child_by_source(self, db: &dyn HirDatabase, file_id: HirFileId) -> DynMap {
|
||||
let _p = tracing::info_span!("ChildContainer::child_by_source").entered();
|
||||
let db = db.upcast();
|
||||
match self {
|
||||
ChildContainer::DefWithBodyId(it) => it.child_by_source(db, file_id),
|
||||
ChildContainer::ModuleId(it) => it.child_by_source(db, file_id),
|
||||
|
|
|
|||
|
|
@ -125,7 +125,7 @@ impl SourceAnalyzer {
|
|||
scope_for_offset(db, &scopes, &source_map, node.file_id, offset)
|
||||
}
|
||||
};
|
||||
let resolver = resolver_for_scope(db.upcast(), def, scope);
|
||||
let resolver = resolver_for_scope(db, def, scope);
|
||||
SourceAnalyzer {
|
||||
resolver,
|
||||
body_or_sig: Some(BodyOrSig::Body { def, body, source_map, infer }),
|
||||
|
|
@ -140,7 +140,7 @@ impl SourceAnalyzer {
|
|||
_offset: Option<TextSize>,
|
||||
) -> SourceAnalyzer {
|
||||
let (_params, store, source_map) = db.generic_params_and_store_and_source_map(def);
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let resolver = def.resolver(db);
|
||||
SourceAnalyzer {
|
||||
resolver,
|
||||
body_or_sig: Some(BodyOrSig::Sig { _def: def, store, source_map }),
|
||||
|
|
@ -155,7 +155,7 @@ impl SourceAnalyzer {
|
|||
_offset: Option<TextSize>,
|
||||
) -> SourceAnalyzer {
|
||||
let (fields, source_map) = db.variant_fields_with_source_map(def);
|
||||
let resolver = def.resolver(db.upcast());
|
||||
let resolver = def.resolver(db);
|
||||
SourceAnalyzer {
|
||||
resolver,
|
||||
body_or_sig: Some(BodyOrSig::VariantFields {
|
||||
|
|
@ -500,7 +500,7 @@ impl SourceAnalyzer {
|
|||
(RangeOp::Inclusive, None, None) => return None,
|
||||
(RangeOp::Inclusive, Some(_), None) => return None,
|
||||
};
|
||||
self.resolver.resolve_known_struct(db.upcast(), &path)
|
||||
self.resolver.resolve_known_struct(db, &path)
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_range_expr(
|
||||
|
|
@ -520,7 +520,7 @@ impl SourceAnalyzer {
|
|||
(RangeOp::Inclusive, None, None) => return None,
|
||||
(RangeOp::Inclusive, Some(_), None) => return None,
|
||||
};
|
||||
self.resolver.resolve_known_struct(db.upcast(), &path)
|
||||
self.resolver.resolve_known_struct(db, &path)
|
||||
}
|
||||
|
||||
pub(crate) fn resolve_await_to_poll(
|
||||
|
|
@ -532,7 +532,7 @@ impl SourceAnalyzer {
|
|||
|
||||
let into_future_trait = self
|
||||
.resolver
|
||||
.resolve_known_trait(db.upcast(), &path![core::future::IntoFuture])
|
||||
.resolve_known_trait(db, &path![core::future::IntoFuture])
|
||||
.map(Trait::from);
|
||||
|
||||
if let Some(into_future_trait) = into_future_trait {
|
||||
|
|
@ -666,7 +666,7 @@ impl SourceAnalyzer {
|
|||
let ty = self.ty_of_expr(try_expr.expr()?)?;
|
||||
|
||||
let op_fn = db.lang_item(self.resolver.krate(), LangItem::TryTraitBranch)?.as_function()?;
|
||||
let op_trait = match op_fn.lookup(db.upcast()).container {
|
||||
let op_trait = match op_fn.lookup(db).container {
|
||||
ItemContainerId::TraitId(id) => id,
|
||||
_ => return None,
|
||||
};
|
||||
|
|
@ -697,7 +697,7 @@ impl SourceAnalyzer {
|
|||
once(local_name.clone()),
|
||||
));
|
||||
match self.resolver.resolve_path_in_value_ns_fully(
|
||||
db.upcast(),
|
||||
db,
|
||||
&path,
|
||||
name_hygiene(db, InFile::new(self.file_id, ast_name.syntax())),
|
||||
) {
|
||||
|
|
@ -709,7 +709,7 @@ impl SourceAnalyzer {
|
|||
};
|
||||
let (adt, subst) = self.infer()?.type_of_expr_or_pat(expr_id)?.as_adt()?;
|
||||
let variant = self.infer()?.variant_resolution_for_expr_or_pat(expr_id)?;
|
||||
let variant_data = variant.variant_data(db.upcast());
|
||||
let variant_data = variant.variant_data(db);
|
||||
let field = FieldId { parent: variant, local_id: variant_data.field(&local_name)? };
|
||||
let field_ty =
|
||||
db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst);
|
||||
|
|
@ -730,7 +730,7 @@ impl SourceAnalyzer {
|
|||
let record_pat = ast::RecordPat::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
|
||||
let pat_id = self.pat_id(&record_pat.into())?;
|
||||
let variant = self.infer()?.variant_resolution_for_pat(pat_id.as_pat()?)?;
|
||||
let variant_data = variant.variant_data(db.upcast());
|
||||
let variant_data = variant.variant_data(db);
|
||||
let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? };
|
||||
let (adt, subst) = self.infer()?.type_of_pat.get(pat_id.as_pat()?)?.as_adt()?;
|
||||
let field_ty =
|
||||
|
|
@ -750,7 +750,7 @@ impl SourceAnalyzer {
|
|||
let bs = self.store_sm()?;
|
||||
bs.expansion(macro_call).and_then(|it| {
|
||||
// FIXME: Block def maps
|
||||
let def = it.macro_call_id.lookup(db.upcast()).def;
|
||||
let def = it.macro_call_id.lookup(db).def;
|
||||
db.crate_def_map(def.krate)
|
||||
.macro_def_to_macro_id
|
||||
.get(&def.kind.erased_ast_id())
|
||||
|
|
@ -930,7 +930,7 @@ impl SourceAnalyzer {
|
|||
}
|
||||
|
||||
// FIXME: collectiong here shouldnt be necessary?
|
||||
let mut collector = ExprCollector::new(db.upcast(), self.resolver.module(), self.file_id);
|
||||
let mut collector = ExprCollector::new(db, self.resolver.module(), self.file_id);
|
||||
let hir_path = collector.lower_path(path.clone(), &mut |_| TypeRef::Error)?;
|
||||
let parent_hir_path =
|
||||
path.parent_path().and_then(|p| collector.lower_path(p, &mut |_| TypeRef::Error));
|
||||
|
|
@ -979,10 +979,8 @@ impl SourceAnalyzer {
|
|||
// ```
|
||||
Some(it) if matches!(it, PathResolution::Def(ModuleDef::BuiltinType(_))) => {
|
||||
if let Some(mod_path) = hir_path.mod_path() {
|
||||
if let Some(ModuleDefId::ModuleId(id)) = self
|
||||
.resolver
|
||||
.resolve_module_path_in_items(db.upcast(), mod_path)
|
||||
.take_types()
|
||||
if let Some(ModuleDefId::ModuleId(id)) =
|
||||
self.resolver.resolve_module_path_in_items(db, mod_path).take_types()
|
||||
{
|
||||
let parent_hir_name =
|
||||
parent_hir_path.segments().get(1).map(|it| it.name);
|
||||
|
|
@ -1372,7 +1370,7 @@ fn scope_for(
|
|||
source_map: &BodySourceMap,
|
||||
node: InFile<&SyntaxNode>,
|
||||
) -> Option<ScopeId> {
|
||||
node.ancestors_with_macros(db.upcast())
|
||||
node.ancestors_with_macros(db)
|
||||
.take_while(|it| {
|
||||
!ast::Item::can_cast(it.kind())
|
||||
|| ast::MacroCall::can_cast(it.kind())
|
||||
|
|
@ -1400,12 +1398,11 @@ fn scope_for_offset(
|
|||
}
|
||||
|
||||
// FIXME handle attribute expansion
|
||||
let source =
|
||||
iter::successors(file_id.macro_file().map(|it| it.call_node(db.upcast())), |it| {
|
||||
Some(it.file_id.macro_file()?.call_node(db.upcast()))
|
||||
})
|
||||
.find(|it| it.file_id == from_file)
|
||||
.filter(|it| it.kind() == SyntaxKind::MACRO_CALL)?;
|
||||
let source = iter::successors(file_id.macro_file().map(|it| it.call_node(db)), |it| {
|
||||
Some(it.file_id.macro_file()?.call_node(db))
|
||||
})
|
||||
.find(|it| it.file_id == from_file)
|
||||
.filter(|it| it.kind() == SyntaxKind::MACRO_CALL)?;
|
||||
Some((source.text_range(), scope))
|
||||
})
|
||||
.filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end())
|
||||
|
|
@ -1435,7 +1432,7 @@ fn adjust(
|
|||
if source.file_id != from_file {
|
||||
return None;
|
||||
}
|
||||
let root = source.file_syntax(db.upcast());
|
||||
let root = source.file_syntax(db);
|
||||
let node = source.value.to_node(&root);
|
||||
Some((node.syntax().text_range(), scope))
|
||||
})
|
||||
|
|
@ -1474,7 +1471,7 @@ pub(crate) fn resolve_hir_path_as_attr_macro(
|
|||
path: &Path,
|
||||
) -> Option<Macro> {
|
||||
resolver
|
||||
.resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Attr))
|
||||
.resolve_path_as_macro(db, path.mod_path()?, Some(MacroSubNs::Attr))
|
||||
.map(|(it, _)| it)
|
||||
.map(Into::into)
|
||||
}
|
||||
|
|
@ -1495,7 +1492,7 @@ fn resolve_hir_path_(
|
|||
res.map(|ty_ns| (ty_ns, path.segments().first()))
|
||||
}),
|
||||
None => {
|
||||
let (ty, remaining_idx, _) = resolver.resolve_path_in_type_ns(db.upcast(), path)?;
|
||||
let (ty, remaining_idx, _) = resolver.resolve_path_in_type_ns(db, path)?;
|
||||
match remaining_idx {
|
||||
Some(remaining_idx) => {
|
||||
if remaining_idx + 1 == path.segments().len() {
|
||||
|
|
@ -1554,14 +1551,14 @@ fn resolve_hir_path_(
|
|||
|
||||
let items = || {
|
||||
resolver
|
||||
.resolve_module_path_in_items(db.upcast(), path.mod_path()?)
|
||||
.resolve_module_path_in_items(db, path.mod_path()?)
|
||||
.take_types()
|
||||
.map(|it| PathResolution::Def(it.into()))
|
||||
};
|
||||
|
||||
let macros = || {
|
||||
resolver
|
||||
.resolve_path_as_macro(db.upcast(), path.mod_path()?, None)
|
||||
.resolve_path_as_macro(db, path.mod_path()?, None)
|
||||
.map(|(def, _)| PathResolution::Def(ModuleDef::Macro(def.into())))
|
||||
};
|
||||
|
||||
|
|
@ -1577,7 +1574,7 @@ fn resolve_hir_value_path(
|
|||
path: &Path,
|
||||
hygiene: HygieneId,
|
||||
) -> Option<PathResolution> {
|
||||
resolver.resolve_path_in_value_ns_fully(db.upcast(), path, hygiene).and_then(|val| {
|
||||
resolver.resolve_path_in_value_ns_fully(db, path, hygiene).and_then(|val| {
|
||||
let res = match val {
|
||||
ValueNs::LocalBinding(binding_id) => {
|
||||
let var = Local { parent: body_owner?, binding_id };
|
||||
|
|
@ -1622,7 +1619,7 @@ fn resolve_hir_path_qualifier(
|
|||
res.map(|ty_ns| (ty_ns, path.segments().first()))
|
||||
}),
|
||||
None => {
|
||||
let (ty, remaining_idx, _) = resolver.resolve_path_in_type_ns(db.upcast(), path)?;
|
||||
let (ty, remaining_idx, _) = resolver.resolve_path_in_type_ns(db, path)?;
|
||||
match remaining_idx {
|
||||
Some(remaining_idx) => {
|
||||
if remaining_idx + 1 == path.segments().len() {
|
||||
|
|
@ -1677,7 +1674,7 @@ fn resolve_hir_path_qualifier(
|
|||
})()
|
||||
.or_else(|| {
|
||||
resolver
|
||||
.resolve_module_path_in_items(db.upcast(), path.mod_path()?)
|
||||
.resolve_module_path_in_items(db, path.mod_path()?)
|
||||
.take_types()
|
||||
.map(|it| PathResolution::Def(it.into()))
|
||||
})
|
||||
|
|
|
|||
|
|
@ -178,7 +178,7 @@ impl<'a> SymbolCollector<'a> {
|
|||
let mut push_import = |this: &mut Self, i: ImportId, name: &Name, def: ModuleDefId, vis| {
|
||||
let source = import_child_source_cache
|
||||
.entry(i.use_)
|
||||
.or_insert_with(|| i.use_.child_source(this.db.upcast()));
|
||||
.or_insert_with(|| i.use_.child_source(this.db));
|
||||
let Some(use_tree_src) = source.value.get(i.idx) else { return };
|
||||
let rename = use_tree_src.rename().and_then(|rename| rename.name());
|
||||
let name_syntax = match rename {
|
||||
|
|
@ -209,8 +209,8 @@ impl<'a> SymbolCollector<'a> {
|
|||
|
||||
let push_extern_crate =
|
||||
|this: &mut Self, i: ExternCrateId, name: &Name, def: ModuleDefId, vis| {
|
||||
let loc = i.lookup(this.db.upcast());
|
||||
let source = loc.source(this.db.upcast());
|
||||
let loc = i.lookup(this.db);
|
||||
let source = loc.source(this.db);
|
||||
let rename = source.value.rename().and_then(|rename| rename.name());
|
||||
|
||||
let name_syntax = match rename {
|
||||
|
|
@ -237,7 +237,7 @@ impl<'a> SymbolCollector<'a> {
|
|||
});
|
||||
};
|
||||
|
||||
let def_map = module_id.def_map(self.db.upcast());
|
||||
let def_map = module_id.def_map(self.db);
|
||||
let scope = &def_map[module_id.local_id].scope;
|
||||
|
||||
for impl_id in scope.impls() {
|
||||
|
|
@ -290,7 +290,7 @@ impl<'a> SymbolCollector<'a> {
|
|||
|
||||
for (name, id) in scope.legacy_macros() {
|
||||
for &id in id {
|
||||
if id.module(self.db.upcast()) == module_id {
|
||||
if id.module(self.db) == module_id {
|
||||
match id {
|
||||
MacroId::Macro2Id(id) => self.push_decl(id, name, false, None),
|
||||
MacroId::MacroRulesId(id) => self.push_decl(id, name, false, None),
|
||||
|
|
@ -306,7 +306,7 @@ impl<'a> SymbolCollector<'a> {
|
|||
let body = self.db.body(body_id);
|
||||
|
||||
// Descend into the blocks and enqueue collection of all modules within.
|
||||
for (_, def_map) in body.blocks(self.db.upcast()) {
|
||||
for (_, def_map) in body.blocks(self.db) {
|
||||
for (id, _) in def_map.modules() {
|
||||
self.work.push(SymbolCollectorWork {
|
||||
module_id: def_map.module_id(id),
|
||||
|
|
@ -374,8 +374,8 @@ impl<'a> SymbolCollector<'a> {
|
|||
<L as Lookup>::Data: HasSource,
|
||||
<<L as Lookup>::Data as HasSource>::Value: HasName,
|
||||
{
|
||||
let loc = id.lookup(self.db.upcast());
|
||||
let source = loc.source(self.db.upcast());
|
||||
let loc = id.lookup(self.db);
|
||||
let source = loc.source(self.db);
|
||||
let Some(name_node) = source.value.name() else { return Complete::Yes };
|
||||
let def = ModuleDef::from(id.into());
|
||||
let dec_loc = DeclarationLocation {
|
||||
|
|
@ -419,10 +419,10 @@ impl<'a> SymbolCollector<'a> {
|
|||
}
|
||||
|
||||
fn push_module(&mut self, module_id: ModuleId, name: &Name) {
|
||||
let def_map = module_id.def_map(self.db.upcast());
|
||||
let def_map = module_id.def_map(self.db);
|
||||
let module_data = &def_map[module_id.local_id];
|
||||
let Some(declaration) = module_data.origin.declaration() else { return };
|
||||
let module = declaration.to_node(self.db.upcast());
|
||||
let module = declaration.to_node(self.db);
|
||||
let Some(name_node) = module.name() else { return };
|
||||
let dec_loc = DeclarationLocation {
|
||||
hir_file_id: declaration.file_id,
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ fn mod_item_path(
|
|||
) -> Option<ModPath> {
|
||||
let db = sema_scope.db;
|
||||
let m = sema_scope.module();
|
||||
m.find_path(db.upcast(), *def, cfg)
|
||||
m.find_path(db, *def, cfg)
|
||||
}
|
||||
|
||||
/// Helper function to get path to `ModuleDef` as string
|
||||
|
|
@ -33,7 +33,7 @@ fn mod_item_path_str(
|
|||
edition: Edition,
|
||||
) -> Result<String, DisplaySourceCodeError> {
|
||||
let path = mod_item_path(sema_scope, def, cfg);
|
||||
path.map(|it| it.display(sema_scope.db.upcast(), edition).to_string())
|
||||
path.map(|it| it.display(sema_scope.db, edition).to_string())
|
||||
.ok_or(DisplaySourceCodeError::PathNotFound)
|
||||
}
|
||||
|
||||
|
|
@ -111,15 +111,15 @@ impl Expr {
|
|||
container_name(container, sema_scope, cfg, edition, display_target)?;
|
||||
let const_name = it
|
||||
.name(db)
|
||||
.map(|c| c.display(db.upcast(), edition).to_string())
|
||||
.map(|c| c.display(db, edition).to_string())
|
||||
.unwrap_or(String::new());
|
||||
Ok(format!("{container_name}::{const_name}"))
|
||||
}
|
||||
None => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)),
|
||||
},
|
||||
Expr::Static(it) => mod_item_path_str(sema_scope, &ModuleDef::Static(*it)),
|
||||
Expr::Local(it) => Ok(it.name(db).display(db.upcast(), edition).to_string()),
|
||||
Expr::ConstParam(it) => Ok(it.name(db).display(db.upcast(), edition).to_string()),
|
||||
Expr::Local(it) => Ok(it.name(db).display(db, edition).to_string()),
|
||||
Expr::ConstParam(it) => Ok(it.name(db).display(db, edition).to_string()),
|
||||
Expr::FamousType { value, .. } => Ok(value.to_string()),
|
||||
Expr::Function { func, params, .. } => {
|
||||
let args = params
|
||||
|
|
@ -133,7 +133,7 @@ impl Expr {
|
|||
Some(container) => {
|
||||
let container_name =
|
||||
container_name(container, sema_scope, cfg, edition, display_target)?;
|
||||
let fn_name = func.name(db).display(db.upcast(), edition).to_string();
|
||||
let fn_name = func.name(db).display(db, edition).to_string();
|
||||
Ok(format!("{container_name}::{fn_name}({args})"))
|
||||
}
|
||||
None => {
|
||||
|
|
@ -147,7 +147,7 @@ impl Expr {
|
|||
return Ok(many_formatter(&target.ty(db)));
|
||||
}
|
||||
|
||||
let func_name = func.name(db).display(db.upcast(), edition).to_string();
|
||||
let func_name = func.name(db).display(db, edition).to_string();
|
||||
let self_param = func.self_param(db).unwrap();
|
||||
let target_str =
|
||||
target.gen_source_code(sema_scope, many_formatter, cfg, display_target)?;
|
||||
|
|
@ -199,7 +199,7 @@ impl Expr {
|
|||
.map(|(a, f)| {
|
||||
let tmp = format!(
|
||||
"{}: {}",
|
||||
f.name(db).display(db.upcast(), edition),
|
||||
f.name(db).display(db, edition),
|
||||
a.gen_source_code(
|
||||
sema_scope,
|
||||
many_formatter,
|
||||
|
|
@ -241,7 +241,7 @@ impl Expr {
|
|||
.map(|(a, f)| {
|
||||
let tmp = format!(
|
||||
"{}: {}",
|
||||
f.name(db).display(db.upcast(), edition),
|
||||
f.name(db).display(db, edition),
|
||||
a.gen_source_code(
|
||||
sema_scope,
|
||||
many_formatter,
|
||||
|
|
@ -279,7 +279,7 @@ impl Expr {
|
|||
|
||||
let strukt =
|
||||
expr.gen_source_code(sema_scope, many_formatter, cfg, display_target)?;
|
||||
let field = field.name(db).display(db.upcast(), edition).to_string();
|
||||
let field = field.name(db).display(db, edition).to_string();
|
||||
Ok(format!("{strukt}.{field}"))
|
||||
}
|
||||
Expr::Reference(expr) => {
|
||||
|
|
@ -387,7 +387,7 @@ fn container_name(
|
|||
let self_ty = imp.self_ty(sema_scope.db);
|
||||
// Should it be guaranteed that `mod_item_path` always exists?
|
||||
match self_ty.as_adt().and_then(|adt| mod_item_path(sema_scope, &adt.into(), cfg)) {
|
||||
Some(path) => path.display(sema_scope.db.upcast(), edition).to_string(),
|
||||
Some(path) => path.display(sema_scope.db, edition).to_string(),
|
||||
None => self_ty.display(sema_scope.db, display_target).to_string(),
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -248,11 +248,8 @@ fn make_function_name(semantics_scope: &hir::SemanticsScope<'_>) -> ast::NameRef
|
|||
let mut names_in_scope = vec![];
|
||||
semantics_scope.process_all_names(&mut |name, _| {
|
||||
names_in_scope.push(
|
||||
name.display(
|
||||
semantics_scope.db.upcast(),
|
||||
semantics_scope.krate().edition(semantics_scope.db),
|
||||
)
|
||||
.to_string(),
|
||||
name.display(semantics_scope.db, semantics_scope.krate().edition(semantics_scope.db))
|
||||
.to_string(),
|
||||
)
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -159,11 +159,7 @@ fn target_data_for_def(
|
|||
let in_file_syntax = source.syntax();
|
||||
let file_id = in_file_syntax.file_id;
|
||||
let range = in_file_syntax.value.text_range();
|
||||
Some((
|
||||
ast::AnyHasVisibility::new(source.value),
|
||||
range,
|
||||
file_id.original_file(db.upcast()).file_id(),
|
||||
))
|
||||
Some((ast::AnyHasVisibility::new(source.value), range, file_id.original_file(db).file_id()))
|
||||
}
|
||||
|
||||
let target_name;
|
||||
|
|
@ -203,7 +199,7 @@ fn target_data_for_def(
|
|||
hir::ModuleDef::Module(m) => {
|
||||
target_name = m.name(db);
|
||||
let in_file_source = m.declaration_source(db)?;
|
||||
let file_id = in_file_source.file_id.original_file(db.upcast());
|
||||
let file_id = in_file_source.file_id.original_file(db);
|
||||
let range = in_file_source.value.syntax().text_range();
|
||||
(ast::AnyHasVisibility::new(in_file_source.value), range, file_id.file_id())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ use hir::{HasVisibility, HirDisplay, HirFileIdExt, Module};
|
|||
use ide_db::{
|
||||
FileId,
|
||||
assists::AssistId,
|
||||
base_db::Upcast,
|
||||
defs::{Definition, NameRefClass},
|
||||
};
|
||||
use syntax::{
|
||||
|
|
@ -123,7 +122,7 @@ fn target_data_for_generate_constant(
|
|||
return None;
|
||||
}
|
||||
let in_file_source = current_module.definition_source(ctx.sema.db);
|
||||
let file_id = in_file_source.file_id.original_file(ctx.sema.db.upcast());
|
||||
let file_id = in_file_source.file_id.original_file(ctx.sema.db);
|
||||
match in_file_source.value {
|
||||
hir::ModuleSource::Module(module_node) => {
|
||||
let indent = IndentLevel::from_node(module_node.syntax());
|
||||
|
|
|
|||
|
|
@ -1166,7 +1166,7 @@ fn next_space_for_fn_in_module(
|
|||
target_module: hir::Module,
|
||||
) -> (FileId, GeneratedFunctionTarget) {
|
||||
let module_source = target_module.definition_source(db);
|
||||
let file = module_source.file_id.original_file(db.upcast());
|
||||
let file = module_source.file_id.original_file(db);
|
||||
let assist_item = match &module_source.value {
|
||||
hir::ModuleSource::SourceFile(it) => match it.items().last() {
|
||||
Some(last_item) => GeneratedFunctionTarget::AfterItem(last_item.syntax().clone()),
|
||||
|
|
|
|||
|
|
@ -191,7 +191,7 @@ fn render_record_as_pat(
|
|||
format!(
|
||||
"{name} {{ {}{} }}",
|
||||
fields.enumerate().format_with(", ", |(idx, field), f| {
|
||||
f(&format_args!("{}${}", field.name(db).display(db.upcast(), edition), idx + 1))
|
||||
f(&format_args!("{}${}", field.name(db).display(db, edition), idx + 1))
|
||||
}),
|
||||
if fields_omitted { ", .." } else { "" },
|
||||
name = name
|
||||
|
|
|
|||
|
|
@ -52,11 +52,11 @@ use std::{fmt, mem::ManuallyDrop};
|
|||
|
||||
use base_db::{
|
||||
CrateGraphBuilder, CratesMap, FileSourceRootInput, FileText, Files, RootQueryDb,
|
||||
SourceDatabase, SourceRoot, SourceRootId, SourceRootInput, Upcast, query_group,
|
||||
SourceDatabase, SourceRoot, SourceRootId, SourceRootInput, query_group,
|
||||
};
|
||||
use hir::{
|
||||
FilePositionWrapper, FileRangeWrapper,
|
||||
db::{DefDatabase, ExpandDatabase, HirDatabase},
|
||||
db::{DefDatabase, ExpandDatabase},
|
||||
};
|
||||
use triomphe::Arc;
|
||||
|
||||
|
|
@ -116,39 +116,6 @@ impl fmt::Debug for RootDatabase {
|
|||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn ExpandDatabase> for RootDatabase {
|
||||
#[inline]
|
||||
fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn DefDatabase> for RootDatabase {
|
||||
#[inline]
|
||||
fn upcast(&self) -> &(dyn DefDatabase + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn HirDatabase> for RootDatabase {
|
||||
#[inline]
|
||||
fn upcast(&self) -> &(dyn HirDatabase + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn RootQueryDb> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn RootQueryDb + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Upcast<dyn SourceDatabase> for RootDatabase {
|
||||
fn upcast(&self) -> &(dyn SourceDatabase + 'static) {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
impl SourceDatabase for RootDatabase {
|
||||
fn file_text(&self, file_id: vfs::FileId) -> FileText {
|
||||
|
|
|
|||
|
|
@ -210,7 +210,7 @@ impl<'a> PathTransform<'a> {
|
|||
.flat_map(|it| it.lifetime_params(db))
|
||||
.zip(self.substs.lifetimes.clone())
|
||||
.filter_map(|(k, v)| {
|
||||
Some((k.name(db).display(db.upcast(), target_edition).to_string(), v.lifetime()?))
|
||||
Some((k.name(db).display(db, target_edition).to_string(), v.lifetime()?))
|
||||
})
|
||||
.collect();
|
||||
let ctx = Ctx {
|
||||
|
|
@ -325,7 +325,7 @@ impl Ctx<'_> {
|
|||
allow_unstable: true,
|
||||
};
|
||||
let found_path = self.target_module.find_path(
|
||||
self.source_scope.db.upcast(),
|
||||
self.source_scope.db,
|
||||
hir::ModuleDef::Trait(trait_ref),
|
||||
cfg,
|
||||
)?;
|
||||
|
|
@ -384,8 +384,7 @@ impl Ctx<'_> {
|
|||
prefer_absolute: false,
|
||||
allow_unstable: true,
|
||||
};
|
||||
let found_path =
|
||||
self.target_module.find_path(self.source_scope.db.upcast(), def, cfg)?;
|
||||
let found_path = self.target_module.find_path(self.source_scope.db, def, cfg)?;
|
||||
let res = mod_path_to_ast(&found_path, self.target_edition).clone_for_update();
|
||||
if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) {
|
||||
if let Some(segment) = res.segment() {
|
||||
|
|
@ -425,7 +424,7 @@ impl Ctx<'_> {
|
|||
allow_unstable: true,
|
||||
};
|
||||
let found_path = self.target_module.find_path(
|
||||
self.source_scope.db.upcast(),
|
||||
self.source_scope.db,
|
||||
ModuleDef::from(adt),
|
||||
cfg,
|
||||
)?;
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ use std::{
|
|||
ops::ControlFlow,
|
||||
};
|
||||
|
||||
use base_db::{RootQueryDb, SourceDatabase, SourceRootId, Upcast};
|
||||
use base_db::{RootQueryDb, SourceDatabase, SourceRootId};
|
||||
use fst::{Automaton, Streamer, raw::IndexedValue};
|
||||
use hir::{
|
||||
Crate, Module,
|
||||
|
|
@ -97,7 +97,7 @@ impl Query {
|
|||
}
|
||||
|
||||
#[query_group::query_group]
|
||||
pub trait SymbolsDatabase: HirDatabase + SourceDatabase + Upcast<dyn HirDatabase> {
|
||||
pub trait SymbolsDatabase: HirDatabase + SourceDatabase {
|
||||
/// The symbol index for a given module. These modules should only be in source roots that
|
||||
/// are inside local_roots.
|
||||
fn module_symbols(&self, module: Module) -> Arc<SymbolIndex>;
|
||||
|
|
@ -123,11 +123,11 @@ pub trait SymbolsDatabase: HirDatabase + SourceDatabase + Upcast<dyn HirDatabase
|
|||
fn library_symbols(db: &dyn SymbolsDatabase, source_root_id: SourceRootId) -> Arc<SymbolIndex> {
|
||||
let _p = tracing::info_span!("library_symbols").entered();
|
||||
|
||||
let mut symbol_collector = SymbolCollector::new(db.upcast());
|
||||
let mut symbol_collector = SymbolCollector::new(db);
|
||||
|
||||
db.source_root_crates(source_root_id)
|
||||
.iter()
|
||||
.flat_map(|&krate| Crate::from(krate).modules(db.upcast()))
|
||||
.flat_map(|&krate| Crate::from(krate).modules(db))
|
||||
// we specifically avoid calling other SymbolsDatabase queries here, even though they do the same thing,
|
||||
// as the index for a library is not going to really ever change, and we do not want to store each
|
||||
// the module or crate indices for those in salsa unless we need to.
|
||||
|
|
@ -139,12 +139,12 @@ fn library_symbols(db: &dyn SymbolsDatabase, source_root_id: SourceRootId) -> Ar
|
|||
fn module_symbols(db: &dyn SymbolsDatabase, module: Module) -> Arc<SymbolIndex> {
|
||||
let _p = tracing::info_span!("module_symbols").entered();
|
||||
|
||||
Arc::new(SymbolIndex::new(SymbolCollector::new_module(db.upcast(), module)))
|
||||
Arc::new(SymbolIndex::new(SymbolCollector::new_module(db, module)))
|
||||
}
|
||||
|
||||
pub fn crate_symbols(db: &dyn SymbolsDatabase, krate: Crate) -> Box<[Arc<SymbolIndex>]> {
|
||||
let _p = tracing::info_span!("crate_symbols").entered();
|
||||
krate.modules(db.upcast()).into_iter().map(|module| db.module_symbols(module)).collect()
|
||||
krate.modules(db).into_iter().map(|module| db.module_symbols(module)).collect()
|
||||
}
|
||||
|
||||
// Feature: Workspace Symbol
|
||||
|
|
|
|||
|
|
@ -175,7 +175,7 @@ fn make_ty(
|
|||
edition: Edition,
|
||||
) -> ast::Type {
|
||||
let ty_str = match ty.as_adt() {
|
||||
Some(adt) => adt.name(db).display(db.upcast(), edition).to_string(),
|
||||
Some(adt) => adt.name(db).display(db, edition).to_string(),
|
||||
None => {
|
||||
ty.display_source_code(db, module.into(), false).ok().unwrap_or_else(|| "_".to_owned())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,9 +6,7 @@ use hir::{DefMap, InFile, ModuleSource, db::DefDatabase};
|
|||
use ide_db::base_db::RootQueryDb;
|
||||
use ide_db::text_edit::TextEdit;
|
||||
use ide_db::{
|
||||
FileId, FileRange, LineIndexDatabase,
|
||||
base_db::{SourceDatabase, Upcast},
|
||||
source_change::SourceChange,
|
||||
FileId, FileRange, LineIndexDatabase, base_db::SourceDatabase, source_change::SourceChange,
|
||||
};
|
||||
use paths::Utf8Component;
|
||||
use syntax::{
|
||||
|
|
@ -101,7 +99,7 @@ fn fixes(
|
|||
};
|
||||
|
||||
// check crate roots, i.e. main.rs, lib.rs, ...
|
||||
let relevant_crates = Upcast::<dyn RootQueryDb>::upcast(db).relevant_crates(file_id);
|
||||
let relevant_crates = db.relevant_crates(file_id);
|
||||
'crates: for &krate in &*relevant_crates {
|
||||
let crate_def_map = ctx.sema.db.crate_def_map(krate);
|
||||
|
||||
|
|
@ -150,7 +148,7 @@ fn fixes(
|
|||
paths.into_iter().find_map(|path| source_root.file_for_path(&path))
|
||||
})?;
|
||||
stack.pop();
|
||||
let relevant_crates = Upcast::<dyn RootQueryDb>::upcast(db).relevant_crates(parent_id);
|
||||
let relevant_crates = db.relevant_crates(parent_id);
|
||||
'crates: for &krate in relevant_crates.iter() {
|
||||
let crate_def_map = ctx.sema.db.crate_def_map(krate);
|
||||
let Some((_, module)) = crate_def_map.modules().find(|(_, module)| {
|
||||
|
|
|
|||
|
|
@ -234,6 +234,8 @@ fn _format(
|
|||
file_id: FileId,
|
||||
expansion: &str,
|
||||
) -> Option<String> {
|
||||
use ide_db::base_db::RootQueryDb;
|
||||
|
||||
// hack until we get hygiene working (same character amount to preserve formatting as much as possible)
|
||||
const DOLLAR_CRATE_REPLACE: &str = "__r_a_";
|
||||
const BUILTIN_REPLACE: &str = "builtin__POUND";
|
||||
|
|
@ -247,9 +249,8 @@ fn _format(
|
|||
};
|
||||
let expansion = format!("{prefix}{expansion}{suffix}");
|
||||
|
||||
let upcast_db = ide_db::base_db::Upcast::<dyn ide_db::base_db::RootQueryDb>::upcast(db);
|
||||
let &crate_id = upcast_db.relevant_crates(file_id).iter().next()?;
|
||||
let edition = crate_id.data(upcast_db).edition;
|
||||
let &crate_id = db.relevant_crates(file_id).iter().next()?;
|
||||
let edition = crate_id.data(db).edition;
|
||||
|
||||
#[allow(clippy::disallowed_methods)]
|
||||
let mut cmd = std::process::Command::new(toolchain::Tool::Rustfmt.path());
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ use hir::{
|
|||
};
|
||||
use ide_db::{
|
||||
RootDatabase, SymbolKind,
|
||||
base_db::{AnchoredPath, RootQueryDb, SourceDatabase, Upcast},
|
||||
base_db::{AnchoredPath, SourceDatabase},
|
||||
defs::{Definition, IdentClass},
|
||||
famous_defs::FamousDefs,
|
||||
helpers::pick_best_token,
|
||||
|
|
@ -222,8 +222,7 @@ fn try_lookup_include_path(
|
|||
}
|
||||
let path = token.value.value().ok()?;
|
||||
|
||||
let file_id = Upcast::<dyn RootQueryDb>::upcast(sema.db)
|
||||
.resolve_path(AnchoredPath { anchor: file_id, path: &path })?;
|
||||
let file_id = sema.db.resolve_path(AnchoredPath { anchor: file_id, path: &path })?;
|
||||
let size = sema.db.file_text(file_id).text(sema.db).len().try_into().ok()?;
|
||||
Some(NavigationTarget {
|
||||
file_id,
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
use hir::GenericParam;
|
||||
use ide_db::{RootDatabase, base_db::Upcast, defs::Definition, helpers::pick_best_token};
|
||||
use ide_db::{RootDatabase, defs::Definition, helpers::pick_best_token};
|
||||
use syntax::{AstNode, SyntaxKind::*, SyntaxToken, T, ast, match_ast};
|
||||
|
||||
use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav};
|
||||
|
|
@ -87,7 +87,7 @@ pub(crate) fn goto_type_definition(
|
|||
ast::Pat(it) => sema.type_of_pat(&it)?.original,
|
||||
ast::SelfParam(it) => sema.type_of_self(&it)?,
|
||||
ast::Type(it) => sema.resolve_type(&it)?,
|
||||
ast::RecordField(it) => sema.to_def(&it)?.ty(db.upcast()),
|
||||
ast::RecordField(it) => sema.to_def(&it)?.ty(db),
|
||||
// can't match on RecordExprField directly as `ast::Expr` will match an iteration too early otherwise
|
||||
ast::NameRef(it) => {
|
||||
if let Some(record_field) = ast::RecordExprField::for_name_ref(&it) {
|
||||
|
|
|
|||
|
|
@ -66,8 +66,7 @@ use hir::{ChangeWithProcMacros, sym};
|
|||
use ide_db::{
|
||||
FxHashMap, FxIndexSet, LineIndexDatabase,
|
||||
base_db::{
|
||||
CrateOrigin, CrateWorkspaceData, Env, FileSet, RootQueryDb, SourceDatabase, Upcast,
|
||||
VfsPath,
|
||||
CrateOrigin, CrateWorkspaceData, Env, FileSet, RootQueryDb, SourceDatabase, VfsPath,
|
||||
salsa::{AsDynDatabase, Cancelled},
|
||||
},
|
||||
prime_caches, symbol_index,
|
||||
|
|
@ -623,10 +622,7 @@ impl Analysis {
|
|||
|
||||
/// Returns crates that this file *might* belong to.
|
||||
pub fn relevant_crates_for(&self, file_id: FileId) -> Cancellable<Vec<Crate>> {
|
||||
self.with_db(|db| {
|
||||
let db = Upcast::<dyn RootQueryDb>::upcast(db);
|
||||
db.relevant_crates(file_id).iter().copied().collect()
|
||||
})
|
||||
self.with_db(|db| db.relevant_crates(file_id).iter().copied().collect())
|
||||
}
|
||||
|
||||
/// Returns the edition of the given crate.
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
use hir::{Semantics, db::DefDatabase};
|
||||
use ide_db::{
|
||||
FileId, FilePosition, RootDatabase,
|
||||
base_db::{Crate, RootQueryDb, Upcast},
|
||||
base_db::{Crate, RootQueryDb},
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use syntax::{
|
||||
|
|
@ -54,9 +54,7 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
|
|||
|
||||
/// This returns `Vec` because a module may be included from several places.
|
||||
pub(crate) fn crates_for(db: &RootDatabase, file_id: FileId) -> Vec<Crate> {
|
||||
let root_db = Upcast::<dyn RootQueryDb>::upcast(db);
|
||||
root_db
|
||||
.relevant_crates(file_id)
|
||||
db.relevant_crates(file_id)
|
||||
.iter()
|
||||
.copied()
|
||||
.filter(|&crate_id| db.crate_def_map(crate_id).modules_for_file(file_id).next().is_some())
|
||||
|
|
|
|||
|
|
@ -86,6 +86,6 @@ fn full_name_of_item(db: &dyn HirDatabase, module: Module, name: Name) -> String
|
|||
.rev()
|
||||
.filter_map(|it| it.name(db))
|
||||
.chain(Some(name))
|
||||
.map(|it| it.display(db.upcast(), Edition::LATEST).to_string())
|
||||
.map(|it| it.display(db, Edition::LATEST).to_string())
|
||||
.join("::")
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue