This commit is contained in:
Lukas Wirth 2023-11-25 17:10:18 +01:00
parent ab8f12e169
commit 92d447f976
26 changed files with 102 additions and 123 deletions

View file

@ -13,7 +13,7 @@ fn assert_parse_result(input: &str, expected: DocExpr) {
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(
tt.syntax(),
SpanMapRef::RealSpanMap(&RealSpanMap::empty(FileId(0))),
SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId(0))),
);
let cfg = DocExpr::parse(&tt);
assert_eq!(cfg, expected);

View file

@ -131,7 +131,7 @@ impl Expander {
pub(crate) fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option<Path> {
let ctx = LowerCtx::new(db, self.span_map.clone(), self.current_file_id);
Path::from_src(path, &ctx)
Path::from_src(&ctx, path)
}
fn within_limit<F, T: ast::AstNode>(

View file

@ -40,7 +40,7 @@ impl<'a> LowerCtx<'a> {
}
pub(crate) fn lower_path(&self, ast: ast::Path) -> Option<Path> {
Path::from_src(ast, self)
Path::from_src(self, ast)
}
pub(crate) fn ast_id<N: AstIdNode>(&self, item: &N) -> Option<AstId<N>> {

View file

@ -11,6 +11,7 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change:
let (mut db, pos) = TestDB::with_position(ra_fixture_initial);
let krate = {
let crate_graph = db.crate_graph();
// Some of these tests use minicore/proc-macros which will be injected as the first crate
crate_graph.iter().last().unwrap()
};
{

View file

@ -96,8 +96,8 @@ pub enum GenericArg {
impl Path {
/// Converts an `ast::Path` to `Path`. Works with use trees.
/// It correctly handles `$crate` based path from macro call.
pub fn from_src(path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path> {
lower::lower_path(path, ctx)
pub fn from_src(ctx: &LowerCtx<'_>, path: ast::Path) -> Option<Path> {
lower::lower_path(ctx, path)
}
/// Converts a known mod path to `Path`.

View file

@ -16,12 +16,9 @@ use crate::{
type_ref::{LifetimeRef, TypeBound, TypeRef},
};
// fn resolve_crate_root
/// Converts an `ast::Path` to `Path`. Works with use trees.
/// It correctly handles `$crate` based path from macro call.
// FIXME: flip the params
pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path> {
pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path> {
let mut kind = PathKind::Plain;
let mut type_anchor = None;
let mut segments = Vec::new();
@ -36,18 +33,15 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
match segment.kind()? {
ast::PathSegmentKind::Name(name_ref) => {
let name = if name_ref.text() == "$crate" {
kind = resolve_crate_root(
if name_ref.text() == "$crate" {
break kind = resolve_crate_root(
ctx.db.upcast(),
span_map.span_for_range(name_ref.syntax().text_range()).ctx,
)
.map(PathKind::DollarCrate)
.unwrap_or(PathKind::Crate);
break;
} else {
name_ref.as_name()
};
}
let name = name_ref.as_name();
let args = segment
.generic_arg_list()
.and_then(|it| lower_generic_args(ctx, it))
@ -82,7 +76,7 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
// <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo
Some(trait_ref) => {
let Path::Normal { mod_path, generic_args: path_generic_args, .. } =
Path::from_src(trait_ref.path()?, ctx)?
Path::from_src(ctx, trait_ref.path()?)?
else {
return None;
};

View file

@ -17,10 +17,10 @@ use profile::Count;
use rustc_hash::FxHasher;
use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
pub use base_db::span::ErasedFileAstId;
use crate::db;
pub use base_db::span::ErasedFileAstId;
/// `AstId` points to an AST node in any file.
///
/// It is stable across reparses, and can be used as salsa key/value.

View file

@ -293,13 +293,8 @@ impl Attr {
if tts.is_empty() {
return None;
}
// FIXME: Absolutely wrong
let call_site = match tts.first().unwrap() {
tt::TokenTree::Leaf(l) => l.span().ctx,
tt::TokenTree::Subtree(s) => s.delimiter.open.ctx,
};
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation
// here.
// here or maybe just parse a mod path from a token tree directly
let subtree = tt::Subtree {
delimiter: tt::Delimiter::unspecified(),
token_trees: tts.to_vec(),
@ -313,6 +308,7 @@ impl Attr {
return None;
}
let path = meta.path()?;
let call_site = span_map.span_for_range(path.syntax().text_range()).ctx;
Some((
ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?,
call_site,

View file

@ -553,7 +553,7 @@ pub(crate) fn include_arg_to_tt(
let Some(EagerCallInfo { arg, arg_id, .. }) = loc.eager.as_deref() else {
panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager);
};
let path = parse_string(&arg.0)?;
let path = parse_string(&arg)?;
let file_id = relative_file(db, *arg_id, &path, false)?;
// why are we not going through a SyntaxNode here?

View file

@ -190,15 +190,16 @@ pub fn expand_speculative(
speculative_args: &SyntaxNode,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
// FIXME spanmaps
let loc = db.lookup_intern_macro_call(actual_macro_call);
// Build the subtree and token mapping for the speculative args
let _censor = censor_for_macro_input(&loc, speculative_args);
let span_map = RealSpanMap::absolute(SpanAnchor::DUMMY.file_id);
let span_map = SpanMapRef::RealSpanMap(&span_map);
let mut tt = mbe::syntax_node_to_token_tree(
speculative_args,
// we don't leak these spans into any query so its fine to make them absolute
SpanMapRef::RealSpanMap(&RealSpanMap::empty(SpanAnchor::DUMMY.file_id)),
span_map,
);
let attr_arg = match loc.kind {
@ -216,10 +217,7 @@ pub fn expand_speculative(
}?;
match attr.token_tree() {
Some(token_tree) => {
let mut tree = syntax_node_to_token_tree(
token_tree.syntax(),
SpanMapRef::RealSpanMap(&RealSpanMap::empty(SpanAnchor::DUMMY.file_id)),
);
let mut tree = syntax_node_to_token_tree(token_tree.syntax(), span_map);
tree.delimiter = tt::Delimiter::UNSPECIFIED;
Some(tree)
@ -243,12 +241,7 @@ pub fn expand_speculative(
MacroDefKind::BuiltInDerive(expander, ..) => {
// this cast is a bit sus, can we avoid losing the typedness here?
let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
expander.expand(
db,
actual_macro_call,
&adt,
SpanMapRef::RealSpanMap(&RealSpanMap::empty(SpanAnchor::DUMMY.file_id)),
)
expander.expand(db, actual_macro_call, &adt, span_map)
}
MacroDefKind::Declarative(it) => {
db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt)
@ -305,6 +298,8 @@ fn parse_or_expand_with_err(
}
}
// FIXME: We should verify that the parsed node is one of the many macro node variants we expect
// instead of having it be untyped
fn parse_macro_expansion(
db: &dyn ExpandDatabase,
macro_file: MacroFileId,
@ -330,6 +325,18 @@ fn parse_macro_expansion_error(
.map(|it| it.0.errors().to_vec().into_boxed_slice())
}
fn parse_with_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> (Parse<SyntaxNode>, SpanMap) {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
(db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
}
HirFileIdRepr::MacroFile(macro_file) => {
let (parse, map) = db.parse_macro_expansion(macro_file).value;
(parse, SpanMap::ExpansionSpanMap(map))
}
}
}
fn macro_arg(
db: &dyn ExpandDatabase,
id: MacroCallId,
@ -361,32 +368,22 @@ fn macro_arg(
.then(|| loc.eager.as_deref())
.flatten()
{
ValueResult::ok(Some(Arc::new(arg.0.clone())))
ValueResult::ok(Some(arg.clone()))
} else {
//FIXME: clean this up, the ast id map lookup is done twice here
let (parse, map) = match loc.kind.file_id().repr() {
HirFileIdRepr::FileId(file_id) => {
let syntax = db.parse(file_id).to_syntax();
(syntax, SpanMap::RealSpanMap(db.real_span_map(file_id)))
}
HirFileIdRepr::MacroFile(macro_file) => {
let (parse, map) = db.parse_macro_expansion(macro_file).value;
(parse, SpanMap::ExpansionSpanMap(map))
}
};
let (parse, map) = parse_with_map(db, loc.kind.file_id());
let root = parse.syntax_node();
let syntax = match loc.kind {
MacroCallKind::FnLike { ast_id, .. } => {
let node = &ast_id.to_ptr(db).to_node(&root);
let offset = node.syntax().text_range().start();
match node.token_tree().map(|it| it.syntax().clone()) {
match node.token_tree() {
Some(tt) => {
if let Some(e) = mismatched_delimiters(&tt) {
let tt = tt.syntax();
if let Some(e) = mismatched_delimiters(tt) {
return ValueResult::only_err(e);
}
tt
tt.clone()
}
None => {
return ValueResult::only_err(Arc::new(Box::new([
@ -479,17 +476,8 @@ fn decl_macro_expander(
id: AstId<ast::Macro>,
) -> Arc<DeclarativeMacroExpander> {
let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021;
let (root, map) = match id.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
// FIXME: Arc
// FIXME: id.to_ptr duplicated, expensive
(db.parse(file_id).syntax_node(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
}
HirFileIdRepr::MacroFile(macro_file) => {
let (parse, map) = db.parse_macro_expansion(macro_file).value;
(parse.syntax_node(), SpanMap::ExpansionSpanMap(map))
}
};
let (root, map) = parse_with_map(db, id.file_id);
let root = root.syntax_node();
let transparency = |node| {
// ... would be nice to have the item tree here
@ -568,21 +556,8 @@ fn macro_expand(
let ExpandResult { value: tt, mut err } = match loc.def.kind {
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id),
MacroDefKind::BuiltInDerive(expander, ..) => {
// FIXME: add firewall query for this?
let hir_file_id = loc.kind.file_id();
let (root, map) = match hir_file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
// FIXME: query for span map
(
db.parse(file_id).syntax_node(),
SpanMap::RealSpanMap(db.real_span_map(file_id)),
)
}
HirFileIdRepr::MacroFile(macro_file) => {
let (parse, map) = db.parse_macro_expansion(macro_file).value;
(parse.syntax_node(), SpanMap::ExpansionSpanMap(map))
}
};
let (root, map) = parse_with_map(db, loc.kind.file_id());
let root = root.syntax_node();
let MacroCallKind::Derive { ast_id, .. } = loc.kind else { unreachable!() };
let node = ast_id.to_ptr(db).to_node(&root);
@ -710,9 +685,9 @@ fn token_tree_to_syntax_node(
ExpandTo::Type => mbe::TopEntryPoint::Type,
ExpandTo::Expr => mbe::TopEntryPoint::Expr,
};
let mut tm = mbe::token_tree_to_syntax_node(tt, entry_point);
let (parse, mut span_map) = mbe::token_tree_to_syntax_node(tt, entry_point);
// FIXME: now what the hell is going on here
tm.1.span_map.sort_by(|(_, a), (_, b)| {
span_map.span_map.sort_by(|(_, a), (_, b)| {
a.anchor.file_id.cmp(&b.anchor.file_id).then_with(|| {
let map = db.ast_id_map(a.anchor.file_id.into());
map.get_erased(a.anchor.ast_id)
@ -721,7 +696,7 @@ fn token_tree_to_syntax_node(
.cmp(&map.get_erased(b.anchor.ast_id).text_range().start())
})
});
tm
(parse, span_map)
}
fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>> {

View file

@ -81,7 +81,7 @@ pub fn expand_eager_macro_input(
// FIXME: Spans!
let mut subtree = mbe::syntax_node_to_token_tree(
&expanded_eager_input,
RealSpanMap::empty(<SpanAnchor as tt::SpanAnchor>::DUMMY.file_id),
RealSpanMap::absolute(<SpanAnchor as tt::SpanAnchor>::DUMMY.file_id),
);
subtree.delimiter = crate::tt::Delimiter::UNSPECIFIED;
@ -89,11 +89,7 @@ pub fn expand_eager_macro_input(
let loc = MacroCallLoc {
def,
krate,
eager: Some(Box::new(EagerCallInfo {
arg: Arc::new((subtree,)),
arg_id,
error: err.clone(),
})),
eager: Some(Box::new(EagerCallInfo { arg: Arc::new(subtree), arg_id, error: err.clone() })),
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
call_site,
};

View file

@ -8,7 +8,7 @@ use base_db::span::{MacroCallId, SpanData, SyntaxContextId};
use crate::db::ExpandDatabase;
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct SyntaxContextData {
pub outer_expn: Option<MacroCallId>,
pub outer_transparency: Transparency,

View file

@ -135,7 +135,7 @@ pub enum MacroDefKind {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct EagerCallInfo {
/// The expanded argument of the eager macro.
arg: Arc<(tt::Subtree,)>,
arg: Arc<tt::Subtree>,
/// Call id of the eager macro's input file (this is the macro file for its fully expanded input).
arg_id: MacroCallId,
error: Option<ExpandError>,
@ -537,8 +537,6 @@ impl MacroCallKind {
FileRange { range, file_id }
}
// FIXME: -> InFile<SyntaxNode> it should be impossible for the token tree to be missing at
// this point!
fn arg(&self, db: &dyn db::ExpandDatabase) -> InFile<Option<SyntaxNode>> {
match self {
MacroCallKind::FnLike { ast_id, .. } => {
@ -561,7 +559,6 @@ impl MacroCallKind {
pub struct ExpansionInfo {
pub expanded: InMacroFile<SyntaxNode>,
/// The argument TokenTree or item for attributes
// FIXME: Can this ever be `None`?
arg: InFile<Option<SyntaxNode>>,
/// The `macro_rules!` or attribute input.
attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,

View file

@ -77,7 +77,8 @@ pub struct RealSpanMap {
}
impl RealSpanMap {
pub fn empty(file_id: FileId) -> Self {
/// Creates a real file span map that returns absolute ranges (relative ranges to the root ast id).
pub fn absolute(file_id: FileId) -> Self {
RealSpanMap { file_id, pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]) }
}

View file

@ -1738,7 +1738,7 @@ impl HirDisplay for TypeRef {
);
let macro_call = macro_call.to_node(f.db.upcast());
match macro_call.path() {
Some(path) => match Path::from_src(path, &ctx) {
Some(path) => match Path::from_src(&ctx, path) {
Some(path) => path.hir_fmt(f)?,
None => write!(f, "{{macro}}")?,
},

View file

@ -241,7 +241,7 @@ fn modpath_from_str(db: &dyn HirDatabase, link: &str) -> Option<ModPath> {
ModPath::from_src(
db.upcast(),
ast_path,
SpanMapRef::RealSpanMap(&RealSpanMap::empty(FileId(0))),
SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId(0))),
)
};

View file

@ -3,10 +3,27 @@
//! we didn't do that.
//!
//! But we need this for at least LRU caching at the query level.
pub use hir_def::db::*;
pub use hir_def::db::{
AttrsQuery, BlockDefMapQuery, BlockItemTreeQueryQuery, BodyQuery, BodyWithSourceMapQuery,
ConstDataQuery, ConstVisibilityQuery, CrateDefMapQueryQuery, CrateLangItemsQuery,
CrateSupportsNoStdQuery, DefDatabase, DefDatabaseStorage, EnumDataQuery,
EnumDataWithDiagnosticsQuery, ExprScopesQuery, ExternCrateDeclDataQuery,
FieldVisibilitiesQuery, FieldsAttrsQuery, FieldsAttrsSourceMapQuery, FileItemTreeQuery,
FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery, ImplDataQuery,
ImplDataWithDiagnosticsQuery, ImportMapQuery, InternAnonymousConstQuery, InternBlockQuery,
InternConstQuery, InternDatabase, InternDatabaseStorage, InternEnumQuery,
InternExternBlockQuery, InternExternCrateQuery, InternFunctionQuery, InternImplQuery,
InternInTypeConstQuery, InternMacro2Query, InternMacroRulesQuery, InternProcMacroQuery,
InternStaticQuery, InternStructQuery, InternTraitAliasQuery, InternTraitQuery,
InternTypeAliasQuery, InternUnionQuery, InternUseQuery, LangAttrQuery, LangItemQuery,
Macro2DataQuery, MacroRulesDataQuery, ProcMacroDataQuery, StaticDataQuery, StructDataQuery,
StructDataWithDiagnosticsQuery, TraitAliasDataQuery, TraitDataQuery,
TraitDataWithDiagnosticsQuery, TypeAliasDataQuery, UnionDataQuery,
UnionDataWithDiagnosticsQuery, VariantsAttrsQuery, VariantsAttrsSourceMapQuery,
};
pub use hir_expand::db::{
AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
ExpandProcMacroQuery, InternMacroCallQuery, MacroArgQuery, MacroExpandQuery,
ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery,
ExpandProcMacroQuery, IncludeExpandQuery, InternMacroCallQuery, MacroArgQuery,
MacroExpandQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, RealSpanMapQuery,
};
pub use hir_ty::db::*;

View file

@ -137,7 +137,13 @@ pub use {
// These are negative re-exports: pub using these names is forbidden, they
// should remain private to hir internals.
#[allow(unused)]
use {hir_def::path::Path, hir_expand::name::AsName};
use {
hir_def::path::Path,
hir_expand::{
name::AsName,
span::{ExpansionSpanMap, RealSpanMap, SpanMap, SpanMapRef},
},
};
/// hir::Crate describes a single crate. It's the main interface with which
/// a crate's dependencies interact. Mostly, it should be just a proxy for the
@ -3483,11 +3489,6 @@ impl Impl {
self.id.lookup(db.upcast()).container.into()
}
pub fn as_builtin_derive_attr(self, db: &dyn HirDatabase) -> Option<InFile<ast::Attr>> {
let src = self.source(db)?;
src.file_id.as_builtin_derive_attr_node(db.upcast())
}
pub fn as_builtin_derive_path(self, db: &dyn HirDatabase) -> Option<InMacroFile<ast::Path>> {
let src = self.source(db)?;

View file

@ -868,7 +868,7 @@ impl<'db> SemanticsImpl<'db> {
let analyze = self.analyze(path.syntax())?;
let span_map = self.db.span_map(analyze.file_id);
let ctx = LowerCtx::with_span_map(self.db.upcast(), span_map);
let hir_path = Path::from_src(path.clone(), &ctx)?;
let hir_path = Path::from_src(&ctx, path.clone())?;
match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), &hir_path)? {
TypeNs::TraitId(id) => Some(Trait { id }),
_ => None,
@ -1466,7 +1466,7 @@ impl SemanticsScope<'_> {
/// necessary a heuristic, as it doesn't take hygiene into account.
pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> {
let ctx = LowerCtx::with_file_id(self.db.upcast(), self.file_id);
let path = Path::from_src(path.clone(), &ctx)?;
let path = Path::from_src(&ctx, path.clone())?;
resolve_hir_path(self.db, &self.resolver, &path)
}

View file

@ -483,7 +483,7 @@ impl SourceAnalyzer {
macro_call: InFile<&ast::MacroCall>,
) -> Option<Macro> {
let ctx = LowerCtx::with_file_id(db.upcast(), macro_call.file_id);
let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &ctx))?;
let path = macro_call.value.path().and_then(|ast| Path::from_src(&ctx, ast))?;
self.resolver
.resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Bang))
.map(|(it, _)| it.into())
@ -596,7 +596,7 @@ impl SourceAnalyzer {
// This must be a normal source file rather than macro file.
let ctx = LowerCtx::with_span_map(db.upcast(), db.span_map(self.file_id));
let hir_path = Path::from_src(path.clone(), &ctx)?;
let hir_path = Path::from_src(&ctx, path.clone())?;
// Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are
// trying to resolve foo::bar.

View file

@ -683,7 +683,7 @@ pub(super) fn complete_name_ref(
ctx: &CompletionContext<'_>,
NameRefContext { nameref, kind }: &NameRefContext,
) {
match dbg!(kind) {
match kind {
NameRefKind::Path(path_ctx) => {
flyimport::import_on_the_fly_path(acc, ctx, path_ctx);

View file

@ -97,12 +97,14 @@ impl RootDatabase {
// ExpandDatabase
hir::db::AstIdMapQuery
hir::db::ParseMacroExpansionQuery
hir::db::DeclMacroExpanderQuery
hir::db::ExpandProcMacroQuery
hir::db::IncludeExpandQuery
hir::db::InternMacroCallQuery
hir::db::MacroArgQuery
hir::db::DeclMacroExpanderQuery
hir::db::MacroExpandQuery
hir::db::ExpandProcMacroQuery
hir::db::ParseMacroExpansionQuery
hir::db::RealSpanMapQuery
// DefDatabase
hir::db::FileItemTreeQuery
@ -142,6 +144,13 @@ impl RootDatabase {
hir::db::FunctionVisibilityQuery
hir::db::ConstVisibilityQuery
hir::db::CrateSupportsNoStdQuery
hir::db::BlockItemTreeQueryQuery
hir::db::ExternCrateDeclDataQuery
hir::db::LangAttrQuery
hir::db::InternAnonymousConstQuery
hir::db::InternExternCrateQuery
hir::db::InternInTypeConstQuery
hir::db::InternUseQuery
// HirDatabase
hir::db::InferQueryQuery

View file

@ -166,7 +166,6 @@ fn hover_simple(
} else {
sema.descend_into_macros_with_same_text(original_token.clone(), offset)
};
dbg!(&descended);
let descended = || descended.iter();
let result = descended()

View file

@ -565,7 +565,6 @@ impl<SpanMap> Converter<SpanMap> {
#[derive(Debug)]
enum SynToken {
Ordinary(SyntaxToken),
// FIXME is this supposed to be `Punct`?
Punct(SyntaxToken, usize),
}

View file

@ -6,12 +6,6 @@ use stdx::itertools::Itertools;
use syntax::TextRange;
use tt::Span;
// pub type HirFile = u32;
// pub type FileRange = (HirFile, TextRange);
// Option<MacroCallId>, LocalSyntaxContet
// pub type SyntaxContext = ();
// pub type LocalSyntaxContext = u32;
/// Maps absolute text ranges for the corresponding file to the relevant span data.
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
// FIXME: Rename to SpanMap

View file

@ -120,7 +120,7 @@ fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> {
Ok(())
}
/*
/*TODO
#[cfg(test)]
mod tests {