Merge commit '457b966b17' into sync-from-ra

This commit is contained in:
Laurențiu Nicola 2023-12-11 11:16:01 +02:00
parent 5285df4f6c
commit f532576ac5
263 changed files with 9788 additions and 6258 deletions

View file

@ -12,12 +12,10 @@ rust-version.workspace = true
doctest = false
[dependencies]
rust-analyzer-salsa = "0.17.0-pre.3"
rustc-hash = "1.1.0"
triomphe.workspace = true
la-arena.workspace = true
rust-analyzer-salsa.workspace = true
rustc-hash.workspace = true
triomphe.workspace = true
# local deps
cfg.workspace = true

View file

@ -8,11 +8,12 @@ use test_utils::{
ESCAPED_CURSOR_MARKER,
};
use triomphe::Arc;
use tt::token_id::{Leaf, Subtree, TokenTree};
use tt::{Leaf, Subtree, TokenTree};
use vfs::{file_set::FileSet, VfsPath};
use crate::{
input::{CrateName, CrateOrigin, LangCrateOrigin},
span::SpanData,
Change, CrateDisplayName, CrateGraph, CrateId, Dependency, DependencyKind, Edition, Env,
FileId, FilePosition, FileRange, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
ProcMacros, ReleaseChannel, SourceDatabaseExt, SourceRoot, SourceRootId,
@ -134,7 +135,7 @@ impl ChangeFixture {
let mut file_set = FileSet::default();
let mut current_source_root_kind = SourceRootKind::Local;
let mut file_id = FileId(0);
let mut file_id = FileId::from_raw(0);
let mut roots = Vec::new();
let mut file_position = None;
@ -209,7 +210,7 @@ impl ChangeFixture {
let path = VfsPath::new_virtual_path(meta.path);
file_set.insert(file_id, path);
files.push(file_id);
file_id.0 += 1;
file_id = FileId::from_raw(file_id.index() + 1);
}
if crates.is_empty() {
@ -254,7 +255,7 @@ impl ChangeFixture {
if let Some(mini_core) = mini_core {
let core_file = file_id;
file_id.0 += 1;
file_id = FileId::from_raw(file_id.index() + 1);
let mut fs = FileSet::default();
fs.insert(core_file, VfsPath::new_virtual_path("/sysroot/core/lib.rs".to_string()));
@ -295,7 +296,6 @@ impl ChangeFixture {
let mut proc_macros = ProcMacros::default();
if !proc_macro_names.is_empty() {
let proc_lib_file = file_id;
file_id.0 += 1;
proc_macro_defs.extend(default_test_proc_macros());
let (proc_macro, source) = filter_test_proc_macros(&proc_macro_names, proc_macro_defs);
@ -539,10 +539,13 @@ struct IdentityProcMacroExpander;
impl ProcMacroExpander for IdentityProcMacroExpander {
fn expand(
&self,
subtree: &Subtree,
_: Option<&Subtree>,
subtree: &Subtree<SpanData>,
_: Option<&Subtree<SpanData>>,
_: &Env,
) -> Result<Subtree, ProcMacroExpansionError> {
_: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
Ok(subtree.clone())
}
}
@ -553,10 +556,13 @@ struct AttributeInputReplaceProcMacroExpander;
impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander {
fn expand(
&self,
_: &Subtree,
attrs: Option<&Subtree>,
_: &Subtree<SpanData>,
attrs: Option<&Subtree<SpanData>>,
_: &Env,
) -> Result<Subtree, ProcMacroExpansionError> {
_: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
attrs
.cloned()
.ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into()))
@ -568,11 +574,14 @@ struct MirrorProcMacroExpander;
impl ProcMacroExpander for MirrorProcMacroExpander {
fn expand(
&self,
input: &Subtree,
_: Option<&Subtree>,
input: &Subtree<SpanData>,
_: Option<&Subtree<SpanData>>,
_: &Env,
) -> Result<Subtree, ProcMacroExpansionError> {
fn traverse(input: &Subtree) -> Subtree {
_: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
fn traverse(input: &Subtree<SpanData>) -> Subtree<SpanData> {
let mut token_trees = vec![];
for tt in input.token_trees.iter().rev() {
let tt = match tt {
@ -595,13 +604,16 @@ struct ShortenProcMacroExpander;
impl ProcMacroExpander for ShortenProcMacroExpander {
fn expand(
&self,
input: &Subtree,
_: Option<&Subtree>,
input: &Subtree<SpanData>,
_: Option<&Subtree<SpanData>>,
_: &Env,
) -> Result<Subtree, ProcMacroExpansionError> {
_: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
return Ok(traverse(input));
fn traverse(input: &Subtree) -> Subtree {
fn traverse(input: &Subtree<SpanData>) -> Subtree<SpanData> {
let token_trees = input
.token_trees
.iter()
@ -613,7 +625,7 @@ impl ProcMacroExpander for ShortenProcMacroExpander {
Subtree { delimiter: input.delimiter, token_trees }
}
fn modify_leaf(leaf: &Leaf) -> Leaf {
fn modify_leaf(leaf: &Leaf<SpanData>) -> Leaf<SpanData> {
let mut leaf = leaf.clone();
match &mut leaf {
Leaf::Literal(it) => {

View file

@ -13,9 +13,10 @@ use la_arena::{Arena, Idx};
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::SmolStr;
use triomphe::Arc;
use tt::token_id::Subtree;
use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};
use crate::span::SpanData;
// Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`,
// then the crate for the proc-macro hasn't been build yet as the build data is missing.
pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>;
@ -242,6 +243,9 @@ impl CrateDisplayName {
}
}
// FIXME: These should not be defined in here? Why does base db know about proc-macros
// ProcMacroKind is used in [`fixture`], but that module probably shouldn't be in this crate either.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct ProcMacroId(pub u32);
@ -255,10 +259,13 @@ pub enum ProcMacroKind {
pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
fn expand(
&self,
subtree: &Subtree,
attrs: Option<&Subtree>,
subtree: &tt::Subtree<SpanData>,
attrs: Option<&tt::Subtree<SpanData>>,
env: &Env,
) -> Result<Subtree, ProcMacroExpansionError>;
def_site: SpanData,
call_site: SpanData,
mixed_site: SpanData,
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError>;
}
#[derive(Debug)]
@ -323,7 +330,9 @@ pub struct CrateData {
pub dependencies: Vec<Dependency>,
pub origin: CrateOrigin,
pub is_proc_macro: bool,
// FIXME: These things should not be per crate! These are more per workspace crate graph level things
// FIXME: These things should not be per crate! These are more per workspace crate graph level
// things. This info does need to be somewhat present though as to prevent deduplication from
// happening across different workspaces with different layouts.
pub target_layout: TargetLayoutLoadResult,
pub channel: Option<ReleaseChannel>,
}
@ -871,7 +880,7 @@ mod tests {
fn detect_cyclic_dependency_indirect() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
FileId(1u32),
FileId::from_raw(1u32),
Edition2018,
None,
None,
@ -884,7 +893,7 @@ mod tests {
None,
);
let crate2 = graph.add_crate_root(
FileId(2u32),
FileId::from_raw(2u32),
Edition2018,
None,
None,
@ -897,7 +906,7 @@ mod tests {
None,
);
let crate3 = graph.add_crate_root(
FileId(3u32),
FileId::from_raw(3u32),
Edition2018,
None,
None,
@ -933,7 +942,7 @@ mod tests {
fn detect_cyclic_dependency_direct() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
FileId(1u32),
FileId::from_raw(1u32),
Edition2018,
None,
None,
@ -946,7 +955,7 @@ mod tests {
None,
);
let crate2 = graph.add_crate_root(
FileId(2u32),
FileId::from_raw(2u32),
Edition2018,
None,
None,
@ -976,7 +985,7 @@ mod tests {
fn it_works() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
FileId(1u32),
FileId::from_raw(1u32),
Edition2018,
None,
None,
@ -989,7 +998,7 @@ mod tests {
None,
);
let crate2 = graph.add_crate_root(
FileId(2u32),
FileId::from_raw(2u32),
Edition2018,
None,
None,
@ -1002,7 +1011,7 @@ mod tests {
None,
);
let crate3 = graph.add_crate_root(
FileId(3u32),
FileId::from_raw(3u32),
Edition2018,
None,
None,
@ -1032,7 +1041,7 @@ mod tests {
fn dashes_are_normalized() {
let mut graph = CrateGraph::default();
let crate1 = graph.add_crate_root(
FileId(1u32),
FileId::from_raw(1u32),
Edition2018,
None,
None,
@ -1045,7 +1054,7 @@ mod tests {
None,
);
let crate2 = graph.add_crate_root(
FileId(2u32),
FileId::from_raw(2u32),
Edition2018,
None,
None,

View file

@ -1,10 +1,11 @@
//! base_db defines basic database traits. The concrete DB is defined by ide.
#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
#![warn(rust_2018_idioms, unused_lifetimes)]
mod input;
mod change;
pub mod fixture;
pub mod span;
use std::panic;
@ -12,14 +13,13 @@ use rustc_hash::FxHashSet;
use syntax::{ast, Parse, SourceFile, TextRange, TextSize};
use triomphe::Arc;
pub use crate::input::DependencyKind;
pub use crate::{
change::Change,
input::{
CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency,
Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, ProcMacros,
ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
DependencyKind, Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander,
ProcMacroExpansionError, ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths,
ProcMacros, ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
},
};
pub use salsa::{self, Cancelled};
@ -68,8 +68,7 @@ pub trait FileLoader {
/// model. Everything else in rust-analyzer is derived from these queries.
#[salsa::query_group(SourceDatabaseStorage)]
pub trait SourceDatabase: FileLoader + std::fmt::Debug {
// Parses the file into the syntax tree.
#[salsa::invoke(parse_query)]
/// Parses the file into the syntax tree.
fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>;
/// The crate graph.
@ -81,7 +80,7 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug {
fn proc_macros(&self) -> Arc<ProcMacros>;
}
fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
let _p = profile::span("parse_query").detail(|| format!("{file_id:?}"));
let text = db.file_text(file_id);
SourceFile::parse(&text)

203
crates/base-db/src/span.rs Normal file
View file

@ -0,0 +1,203 @@
//! File and span related types.
// FIXME: This should probably be moved into its own crate.
use std::fmt;
use salsa::InternId;
use tt::SyntaxContext;
use vfs::FileId;
pub type ErasedFileAstId = la_arena::Idx<syntax::SyntaxNodePtr>;
// The first inde is always the root node's AstId
pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId =
la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(0));
pub type SpanData = tt::SpanData<SpanAnchor, SyntaxContextId>;
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct SyntaxContextId(InternId);
impl fmt::Debug for SyntaxContextId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if *self == Self::SELF_REF {
f.debug_tuple("SyntaxContextId")
.field(&{
#[derive(Debug)]
#[allow(non_camel_case_types)]
struct SELF_REF;
SELF_REF
})
.finish()
} else {
f.debug_tuple("SyntaxContextId").field(&self.0).finish()
}
}
}
crate::impl_intern_key!(SyntaxContextId);
impl fmt::Display for SyntaxContextId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0.as_u32())
}
}
impl SyntaxContext for SyntaxContextId {
const DUMMY: Self = Self::ROOT;
}
// inherent trait impls please tyvm
impl SyntaxContextId {
pub const ROOT: Self = SyntaxContextId(unsafe { InternId::new_unchecked(0) });
// veykril(HACK): FIXME salsa doesn't allow us fetching the id of the current input to be allocated so
// we need a special value that behaves as the current context.
pub const SELF_REF: Self =
SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) });
pub fn is_root(self) -> bool {
self == Self::ROOT
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct SpanAnchor {
pub file_id: FileId,
pub ast_id: ErasedFileAstId,
}
impl fmt::Debug for SpanAnchor {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("SpanAnchor").field(&self.file_id).field(&self.ast_id.into_raw()).finish()
}
}
impl tt::SpanAnchor for SpanAnchor {
const DUMMY: Self = SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID };
}
/// Input to the analyzer is a set of files, where each file is identified by
/// `FileId` and contains source code. However, another source of source code in
/// Rust are macros: each macro can be thought of as producing a "temporary
/// file". To assign an id to such a file, we use the id of the macro call that
/// produced the file. So, a `HirFileId` is either a `FileId` (source code
/// written by user), or a `MacroCallId` (source code produced by macro).
///
/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file
/// containing the call plus the offset of the macro call in the file. Note that
/// this is a recursive definition! However, the size_of of `HirFileId` is
/// finite (because everything bottoms out at the real `FileId`) and small
/// (`MacroCallId` uses the location interning. You can check details here:
/// <https://en.wikipedia.org/wiki/String_interning>).
///
/// The two variants are encoded in a single u32 which are differentiated by the MSB.
/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a
/// `MacroCallId`.
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct HirFileId(u32);
impl From<HirFileId> for u32 {
fn from(value: HirFileId) -> Self {
value.0
}
}
impl From<MacroCallId> for HirFileId {
fn from(value: MacroCallId) -> Self {
value.as_file()
}
}
impl fmt::Debug for HirFileId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.repr().fmt(f)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroFileId {
pub macro_call_id: MacroCallId,
}
/// `MacroCallId` identifies a particular macro invocation, like
/// `println!("Hello, {}", world)`.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct MacroCallId(salsa::InternId);
crate::impl_intern_key!(MacroCallId);
impl MacroCallId {
pub fn as_file(self) -> HirFileId {
MacroFileId { macro_call_id: self }.into()
}
pub fn as_macro_file(self) -> MacroFileId {
MacroFileId { macro_call_id: self }
}
}
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub enum HirFileIdRepr {
FileId(FileId),
MacroFile(MacroFileId),
}
impl fmt::Debug for HirFileIdRepr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::FileId(arg0) => f.debug_tuple("FileId").field(&arg0.index()).finish(),
Self::MacroFile(arg0) => {
f.debug_tuple("MacroFile").field(&arg0.macro_call_id.0).finish()
}
}
}
}
impl From<FileId> for HirFileId {
fn from(id: FileId) -> Self {
assert!(id.index() < Self::MAX_FILE_ID);
HirFileId(id.index())
}
}
impl From<MacroFileId> for HirFileId {
fn from(MacroFileId { macro_call_id: MacroCallId(id) }: MacroFileId) -> Self {
let id = id.as_u32();
assert!(id < Self::MAX_FILE_ID);
HirFileId(id | Self::MACRO_FILE_TAG_MASK)
}
}
impl HirFileId {
const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK;
const MACRO_FILE_TAG_MASK: u32 = 1 << 31;
#[inline]
pub fn is_macro(self) -> bool {
self.0 & Self::MACRO_FILE_TAG_MASK != 0
}
#[inline]
pub fn macro_file(self) -> Option<MacroFileId> {
match self.0 & Self::MACRO_FILE_TAG_MASK {
0 => None,
_ => Some(MacroFileId {
macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
}),
}
}
#[inline]
pub fn file_id(self) -> Option<FileId> {
match self.0 & Self::MACRO_FILE_TAG_MASK {
0 => Some(FileId::from_raw(self.0)),
_ => None,
}
}
#[inline]
pub fn repr(self) -> HirFileIdRepr {
match self.0 & Self::MACRO_FILE_TAG_MASK {
0 => HirFileIdRepr::FileId(FileId::from_raw(self.0)),
_ => HirFileIdRepr::MacroFile(MacroFileId {
macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
}),
}
}
}