diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index c9145f7d21..bb67fa63a1 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -508,9 +508,7 @@ impl<'db> SemanticsImpl<'db> { }) } - pub fn is_derive_annotated(&self, adt: &ast::Adt) -> bool { - let file_id = self.find_file(adt.syntax()).file_id; - let adt = InFile::new(file_id, adt); + pub fn is_derive_annotated(&self, adt: InFile<&ast::Adt>) -> bool { self.with_ctx(|ctx| ctx.has_derives(adt)) } @@ -551,10 +549,8 @@ impl<'db> SemanticsImpl<'db> { res.is_empty().not().then_some(res) } - pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool { - let file_id = self.find_file(item.syntax()).file_id; - let src = InFile::new(file_id, item); - self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some()) + pub fn is_attr_macro_call(&self, item: InFile<&ast::Item>) -> bool { + self.with_ctx(|ctx| ctx.item_to_macro_call(item).is_some()) } /// Expand the macro call with a different token tree, mapping the `token_to_map` down into the @@ -1526,8 +1522,13 @@ impl<'db> SemanticsImpl<'db> { self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field) } + // FIXME: Replace this with `resolve_macro_call2` pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option { let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call); + self.resolve_macro_call2(macro_call) + } + + pub fn resolve_macro_call2(&self, macro_call: InFile<&ast::MacroCall>) -> Option { self.with_ctx(|ctx| { ctx.macro_call_to_macro_call(macro_call) .and_then(|call| macro_call_to_macro_id(ctx, call)) @@ -1538,8 +1539,8 @@ impl<'db> SemanticsImpl<'db> { }) } - pub fn is_proc_macro_call(&self, macro_call: &ast::MacroCall) -> bool { - self.resolve_macro_call(macro_call) + pub fn is_proc_macro_call(&self, macro_call: InFile<&ast::MacroCall>) -> bool { + self.resolve_macro_call2(macro_call) .is_some_and(|m| matches!(m.id, MacroId::ProcMacroId(..))) } diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index 1853e3a340..6c5e81ae66 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -7,7 +7,6 @@ mod escape; mod format; mod highlight; mod inject; -mod macro_; mod html; #[cfg(test)] @@ -15,14 +14,14 @@ mod tests; use std::ops::ControlFlow; -use hir::{InRealFile, Name, Semantics}; +use hir::{InFile, InRealFile, Name, Semantics}; use ide_db::{FxHashMap, Ranker, RootDatabase, SymbolKind}; use span::EditionedFileId; use syntax::{ ast::{self, IsString}, AstNode, AstToken, NodeOrToken, SyntaxKind::*, - SyntaxNode, TextRange, WalkEvent, T, + SyntaxNode, SyntaxToken, TextRange, WalkEvent, T, }; use crate::{ @@ -30,7 +29,6 @@ use crate::{ escape::{highlight_escape_byte, highlight_escape_char, highlight_escape_string}, format::highlight_format_string, highlights::Highlights, - macro_::MacroHighlighter, tags::Highlight, }, FileId, HlMod, HlOperator, HlPunct, HlTag, @@ -221,7 +219,7 @@ pub(crate) fn highlight( Some(it) => it.krate(), None => return hl.to_vec(), }; - traverse(&mut hl, &sema, config, file_id, &root, krate, range_to_highlight); + traverse(&mut hl, &sema, config, InRealFile::new(file_id, &root), krate, range_to_highlight); hl.to_vec() } @@ -229,8 +227,7 @@ fn traverse( hl: &mut Highlights, sema: &Semantics<'_, RootDatabase>, config: HighlightConfig, - file_id: EditionedFileId, - root: &SyntaxNode, + InRealFile { file_id, value: root }: InRealFile<&SyntaxNode>, krate: hir::Crate, range_to_highlight: TextRange, ) { @@ -252,8 +249,6 @@ fn traverse( let mut tt_level = 0; let mut attr_or_derive_item = None; - let mut current_macro: Option = None; - let mut macro_highlighter = MacroHighlighter::default(); // FIXME: these are not perfectly accurate, we determine them by the real file's syntax tree // an attribute nested in a macro call will not emit `inside_attribute` @@ -263,7 +258,8 @@ fn traverse( // Walk all nodes, keeping track of whether we are inside a macro or not. // If in macro, expand it first and highlight the expanded code. - for event in root.preorder_with_tokens() { + let mut preorder = root.preorder_with_tokens(); + while let Some(event) = preorder.next() { use WalkEvent::{Enter, Leave}; let range = match &event { @@ -275,16 +271,11 @@ fn traverse( continue; } - // set macro and attribute highlighting states match event.clone() { - Enter(NodeOrToken::Node(node)) - if current_macro.is_none() && ast::TokenTree::can_cast(node.kind()) => - { + Enter(NodeOrToken::Node(node)) if ast::TokenTree::can_cast(node.kind()) => { tt_level += 1; } - Leave(NodeOrToken::Node(node)) - if current_macro.is_none() && ast::TokenTree::can_cast(node.kind()) => - { + Leave(NodeOrToken::Node(node)) if ast::TokenTree::can_cast(node.kind()) => { tt_level -= 1; } Enter(NodeOrToken::Node(node)) if ast::Attr::can_cast(node.kind()) => { @@ -297,28 +288,19 @@ fn traverse( Enter(NodeOrToken::Node(node)) => { if let Some(item) = ast::Item::cast(node.clone()) { match item { - ast::Item::MacroRules(mac) => { - macro_highlighter.init(); - current_macro = Some(mac.into()); - continue; - } - ast::Item::MacroDef(mac) => { - macro_highlighter.init(); - current_macro = Some(mac.into()); - continue; - } ast::Item::Fn(_) | ast::Item::Const(_) | ast::Item::Static(_) => { bindings_shadow_count.clear() } ast::Item::MacroCall(ref macro_call) => { inside_macro_call = true; - inside_proc_macro_call = sema.is_proc_macro_call(macro_call); + inside_proc_macro_call = + sema.is_proc_macro_call(InFile::new(file_id.into(), macro_call)); } _ => (), } if attr_or_derive_item.is_none() { - if sema.is_attr_macro_call(&item) { + if sema.is_attr_macro_call(InFile::new(file_id.into(), &item)) { attr_or_derive_item = Some(AttrOrDerive::Attr(item)); } else { let adt = match item { @@ -328,7 +310,10 @@ fn traverse( _ => None, }; match adt { - Some(adt) if sema.is_derive_annotated(&adt) => { + Some(adt) + if sema + .is_derive_annotated(InFile::new(file_id.into(), &adt)) => + { attr_or_derive_item = Some(AttrOrDerive::Derive(ast::Item::from(adt))); } @@ -340,16 +325,6 @@ fn traverse( } Leave(NodeOrToken::Node(node)) if ast::Item::can_cast(node.kind()) => { match ast::Item::cast(node.clone()) { - Some(ast::Item::MacroRules(mac)) => { - assert_eq!(current_macro, Some(mac.into())); - current_macro = None; - macro_highlighter = MacroHighlighter::default(); - } - Some(ast::Item::MacroDef(mac)) => { - assert_eq!(current_macro, Some(mac.into())); - current_macro = None; - macro_highlighter = MacroHighlighter::default(); - } Some(item) if attr_or_derive_item.as_ref().is_some_and(|it| *it.item() == item) => { @@ -379,12 +354,6 @@ fn traverse( } }; - if current_macro.is_some() { - if let Some(tok) = element.as_token() { - macro_highlighter.advance(tok); - } - } - let element = match element.clone() { NodeOrToken::Node(n) => match ast::NameLike::cast(n) { Some(n) => NodeOrToken::Node(n), @@ -392,7 +361,7 @@ fn traverse( }, NodeOrToken::Token(t) => NodeOrToken::Token(t), }; - let token = element.as_token().cloned(); + let original_token = element.as_token().cloned(); // Descending tokens into macros is expensive even if no descending occurs, so make sure // that we actually are in a position where descending is possible. @@ -405,144 +374,52 @@ fn traverse( let descended_element = if in_macro { // Attempt to descend tokens into macro-calls. - let res = match element { - NodeOrToken::Token(token) if token.kind() != COMMENT => { - let ranker = Ranker::from_token(&token); - - let mut t = None; - let mut r = 0; - sema.descend_into_macros_breakable( - InRealFile::new(file_id, token.clone()), - |tok, _ctx| { - // FIXME: Consider checking ctx transparency for being opaque? - let tok = tok.value; - let my_rank = ranker.rank_token(&tok); - - if my_rank >= Ranker::MAX_RANK { - // a rank of 0b1110 means that we have found a maximally interesting - // token so stop early. - t = Some(tok); - return ControlFlow::Break(()); - } - - // r = r.max(my_rank); - // t = Some(t.take_if(|_| r < my_rank).unwrap_or(tok)); - match &mut t { - Some(prev) if r < my_rank => { - *prev = tok; - r = my_rank; - } - Some(_) => (), - None => { - r = my_rank; - t = Some(tok) - } - } - ControlFlow::Continue(()) - }, - ); - - let token = t.unwrap_or(token); - match token.parent().and_then(ast::NameLike::cast) { - // Remap the token into the wrapping single token nodes - Some(parent) => match (token.kind(), parent.syntax().kind()) { - (T![self] | T![ident], NAME | NAME_REF) => NodeOrToken::Node(parent), - (T![self] | T![super] | T![crate] | T![Self], NAME_REF) => { - NodeOrToken::Node(parent) - } - (INT_NUMBER, NAME_REF) => NodeOrToken::Node(parent), - (LIFETIME_IDENT, LIFETIME) => NodeOrToken::Node(parent), - _ => NodeOrToken::Token(token), - }, - None => NodeOrToken::Token(token), - } - } - e => e, - }; - res + match element { + NodeOrToken::Token(token) => descend_token(sema, file_id, token), + n => n, + } } else { element }; - // FIXME: do proper macro def highlighting https://github.com/rust-lang/rust-analyzer/issues/6232 - // Skip metavariables from being highlighted to prevent keyword highlighting in them - if descended_element.as_token().and_then(|t| macro_highlighter.highlight(t)).is_some() { - continue; - } - // string highlight injections, note this does not use the descended element as proc-macros // can rewrite string literals which invalidates our indices - if let (Some(token), Some(descended_token)) = (token, descended_element.as_token()) { - if ast::String::can_cast(token.kind()) && ast::String::can_cast(descended_token.kind()) - { - let string = ast::String::cast(token); - let string_to_highlight = ast::String::cast(descended_token.clone()); - if let Some((string, expanded_string)) = string.zip(string_to_highlight) { - if string.is_raw() - && inject::ra_fixture(hl, sema, config, &string, &expanded_string).is_some() - { - continue; - } - highlight_format_string( - hl, - sema, - krate, - &string, - &expanded_string, - range, - file_id.edition(), - ); - - if !string.is_raw() { - highlight_escape_string(hl, &string, range.start()); - } - } - } else if ast::ByteString::can_cast(token.kind()) - && ast::ByteString::can_cast(descended_token.kind()) - { - if let Some(byte_string) = ast::ByteString::cast(token) { - if !byte_string.is_raw() { - highlight_escape_string(hl, &byte_string, range.start()); - } - } - } else if ast::CString::can_cast(token.kind()) - && ast::CString::can_cast(descended_token.kind()) - { - if let Some(c_string) = ast::CString::cast(token) { - if !c_string.is_raw() { - highlight_escape_string(hl, &c_string, range.start()); - } - } - } else if ast::Char::can_cast(token.kind()) - && ast::Char::can_cast(descended_token.kind()) - { - let Some(char) = ast::Char::cast(token) else { - continue; - }; - - highlight_escape_char(hl, &char, range.start()) - } else if ast::Byte::can_cast(token.kind()) - && ast::Byte::can_cast(descended_token.kind()) - { - let Some(byte) = ast::Byte::cast(token) else { - continue; - }; - - highlight_escape_byte(hl, &byte, range.start()) + if let (Some(original_token), Some(descended_token)) = + (original_token, descended_element.as_token()) + { + let control_flow = string_injections( + hl, + sema, + config, + file_id, + krate, + original_token, + descended_token, + ); + if control_flow.is_break() { + continue; } } let element = match descended_element { - NodeOrToken::Node(name_like) => highlight::name_like( - sema, - krate, - &mut bindings_shadow_count, - config.syntactic_name_ref_highlighting, - name_like, - file_id.edition(), - ), + NodeOrToken::Node(name_like) => { + let hl = highlight::name_like( + sema, + krate, + &mut bindings_shadow_count, + config.syntactic_name_ref_highlighting, + name_like, + file_id.edition(), + ); + if hl.is_some() && !in_macro { + // skip highlighting the contained token of our name-like node + // as that would potentially overwrite our result + preorder.skip_subtree(); + } + hl + } NodeOrToken::Token(token) => { - highlight::token(sema, token, file_id.edition()).zip(Some(None)) + highlight::token(sema, token, file_id.edition(), tt_level > 0).zip(Some(None)) } }; if let Some((mut highlight, binding_hash)) = element { @@ -551,13 +428,6 @@ fn traverse( // let the editor do its highlighting for these tokens instead continue; } - if highlight.tag == HlTag::UnresolvedReference - && matches!(attr_or_derive_item, Some(AttrOrDerive::Derive(_)) if inside_attribute) - { - // do not emit unresolved references in derive helpers if the token mapping maps to - // something unresolvable. FIXME: There should be a way to prevent that - continue; - } // apply config filtering if !filter_by_config(&mut highlight, config) { @@ -579,6 +449,115 @@ fn traverse( } } +fn string_injections( + hl: &mut Highlights, + sema: &Semantics<'_, RootDatabase>, + config: HighlightConfig, + file_id: EditionedFileId, + krate: hir::Crate, + token: SyntaxToken, + descended_token: &SyntaxToken, +) -> ControlFlow<()> { + if ast::String::can_cast(token.kind()) && ast::String::can_cast(descended_token.kind()) { + let string = ast::String::cast(token); + let string_to_highlight = ast::String::cast(descended_token.clone()); + if let Some((string, descended_string)) = string.zip(string_to_highlight) { + if string.is_raw() + && inject::ra_fixture(hl, sema, config, &string, &descended_string).is_some() + { + return ControlFlow::Break(()); + } + highlight_format_string(hl, sema, krate, &string, &descended_string, file_id.edition()); + + if !string.is_raw() { + highlight_escape_string(hl, &string); + } + } + } else if ast::ByteString::can_cast(token.kind()) + && ast::ByteString::can_cast(descended_token.kind()) + { + if let Some(byte_string) = ast::ByteString::cast(token) { + if !byte_string.is_raw() { + highlight_escape_string(hl, &byte_string); + } + } + } else if ast::CString::can_cast(token.kind()) && ast::CString::can_cast(descended_token.kind()) + { + if let Some(c_string) = ast::CString::cast(token) { + if !c_string.is_raw() { + highlight_escape_string(hl, &c_string); + } + } + } else if ast::Char::can_cast(token.kind()) && ast::Char::can_cast(descended_token.kind()) { + let Some(char) = ast::Char::cast(token) else { + return ControlFlow::Break(()); + }; + + highlight_escape_char(hl, &char) + } else if ast::Byte::can_cast(token.kind()) && ast::Byte::can_cast(descended_token.kind()) { + let Some(byte) = ast::Byte::cast(token) else { + return ControlFlow::Break(()); + }; + + highlight_escape_byte(hl, &byte) + } + ControlFlow::Continue(()) +} + +fn descend_token( + sema: &Semantics<'_, RootDatabase>, + file_id: EditionedFileId, + token: SyntaxToken, +) -> NodeOrToken { + if token.kind() == COMMENT { + return NodeOrToken::Token(token); + } + let ranker = Ranker::from_token(&token); + + let mut t = None; + let mut r = 0; + sema.descend_into_macros_breakable(InRealFile::new(file_id, token.clone()), |tok, _ctx| { + // FIXME: Consider checking ctx transparency for being opaque? + let tok = tok.value; + let my_rank = ranker.rank_token(&tok); + + if my_rank >= Ranker::MAX_RANK { + // a rank of 0b1110 means that we have found a maximally interesting + // token so stop early. + t = Some(tok); + return ControlFlow::Break(()); + } + + // r = r.max(my_rank); + // t = Some(t.take_if(|_| r < my_rank).unwrap_or(tok)); + match &mut t { + Some(prev) if r < my_rank => { + *prev = tok; + r = my_rank; + } + Some(_) => (), + None => { + r = my_rank; + t = Some(tok) + } + } + ControlFlow::Continue(()) + }); + + let token = t.unwrap_or(token); + match token.parent().and_then(ast::NameLike::cast) { + // Remap the token into the wrapping single token nodes + Some(parent) => match (token.kind(), parent.syntax().kind()) { + (T![self] | T![ident], NAME | NAME_REF) => NodeOrToken::Node(parent), + (T![self] | T![super] | T![crate] | T![Self], NAME_REF) => NodeOrToken::Node(parent), + (INT_NUMBER, NAME_REF) => NodeOrToken::Node(parent), + (LIFETIME_IDENT, LIFETIME) => NodeOrToken::Node(parent), + _ => NodeOrToken::Token(token), + }, + None => NodeOrToken::Token(token), + } +} + fn filter_by_config(highlight: &mut Highlight, config: HighlightConfig) -> bool { match &mut highlight.tag { HlTag::StringLiteral if !config.strings => return false, diff --git a/crates/ide/src/syntax_highlighting/escape.rs b/crates/ide/src/syntax_highlighting/escape.rs index 552ce9cd8c..094f88f3a8 100644 --- a/crates/ide/src/syntax_highlighting/escape.rs +++ b/crates/ide/src/syntax_highlighting/escape.rs @@ -4,12 +4,9 @@ use crate::{HlRange, HlTag}; use syntax::ast::{Byte, Char, IsString}; use syntax::{AstToken, TextRange, TextSize}; -pub(super) fn highlight_escape_string( - stack: &mut Highlights, - string: &T, - start: TextSize, -) { +pub(super) fn highlight_escape_string(stack: &mut Highlights, string: &T) { let text = string.text(); + let start = string.syntax().text_range().start(); string.escaped_char_ranges(&mut |piece_range, char| { if text[piece_range.start().into()..].starts_with('\\') { let highlight = match char { @@ -25,7 +22,7 @@ pub(super) fn highlight_escape_string( }); } -pub(super) fn highlight_escape_char(stack: &mut Highlights, char: &Char, start: TextSize) { +pub(super) fn highlight_escape_char(stack: &mut Highlights, char: &Char) { if char.value().is_err() { // We do not emit invalid escapes highlighting here. The lexer would likely be in a bad // state and this token contains junk, since `'` is not a reliable delimiter (consider @@ -42,11 +39,14 @@ pub(super) fn highlight_escape_char(stack: &mut Highlights, char: &Char, start: return; }; - let range = TextRange::at(start + TextSize::from(1), TextSize::from(text.len() as u32)); + let range = TextRange::at( + char.syntax().text_range().start() + TextSize::from(1), + TextSize::from(text.len() as u32), + ); stack.add(HlRange { range, highlight: HlTag::EscapeSequence.into(), binding_hash: None }) } -pub(super) fn highlight_escape_byte(stack: &mut Highlights, byte: &Byte, start: TextSize) { +pub(super) fn highlight_escape_byte(stack: &mut Highlights, byte: &Byte) { if byte.value().is_err() { // See `highlight_escape_char` for why no error highlighting here. return; @@ -61,6 +61,9 @@ pub(super) fn highlight_escape_byte(stack: &mut Highlights, byte: &Byte, start: return; }; - let range = TextRange::at(start + TextSize::from(2), TextSize::from(text.len() as u32)); + let range = TextRange::at( + byte.syntax().text_range().start() + TextSize::from(2), + TextSize::from(text.len() as u32), + ); stack.add(HlRange { range, highlight: HlTag::EscapeSequence.into(), binding_hash: None }) } diff --git a/crates/ide/src/syntax_highlighting/format.rs b/crates/ide/src/syntax_highlighting/format.rs index 43a6bdad7e..c63043621c 100644 --- a/crates/ide/src/syntax_highlighting/format.rs +++ b/crates/ide/src/syntax_highlighting/format.rs @@ -5,7 +5,7 @@ use ide_db::{ SymbolKind, }; use span::Edition; -use syntax::{ast, TextRange}; +use syntax::{ast, AstToken}; use crate::{ syntax_highlighting::{highlight::highlight_def, highlights::Highlights}, @@ -18,15 +18,15 @@ pub(super) fn highlight_format_string( krate: hir::Crate, string: &ast::String, expanded_string: &ast::String, - range: TextRange, edition: Edition, ) { if is_format_string(expanded_string) { + let start = string.syntax().text_range().start(); // FIXME: Replace this with the HIR info we have now. lex_format_specifiers(string, &mut |piece_range, kind| { if let Some(highlight) = highlight_format_specifier(kind) { stack.add(HlRange { - range: piece_range + range.start(), + range: piece_range + start, highlight: highlight.into(), binding_hash: None, }); diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs index 194fde1160..316fa5b6e9 100644 --- a/crates/ide/src/syntax_highlighting/highlight.rs +++ b/crates/ide/src/syntax_highlighting/highlight.rs @@ -23,6 +23,7 @@ pub(super) fn token( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, edition: Edition, + in_tt: bool, ) -> Option { if let Some(comment) = ast::Comment::cast(token.clone()) { let h = HlTag::Comment; @@ -40,13 +41,20 @@ pub(super) fn token( INT_NUMBER | FLOAT_NUMBER => HlTag::NumericLiteral.into(), BYTE => HlTag::ByteLiteral.into(), CHAR => HlTag::CharLiteral.into(), - IDENT if token.parent().and_then(ast::TokenTree::cast).is_some() => { + IDENT if in_tt => { // from this point on we are inside a token tree, this only happens for identifiers // that were not mapped down into macro invocations HlTag::None.into() } p if p.is_punct() => punctuation(sema, token, p), - k if k.is_keyword(edition) => keyword(sema, token, k)?, + k if k.is_keyword(edition) => { + if in_tt && token.prev_token().is_some_and(|t| t.kind() == T![$]) { + // we are likely within a macro definition where our keyword is a fragment name + HlTag::None.into() + } else { + keyword(sema, token, k)? + } + } _ => return None, }; Some(highlight) @@ -214,12 +222,6 @@ fn keyword( T![true] | T![false] => HlTag::BoolLiteral.into(), // crate is handled just as a token if it's in an `extern crate` T![crate] if parent_matches::(&token) => h, - // self, crate, super and `Self` are handled as either a Name or NameRef already, unless they - // are inside unmapped token trees - T![self] | T![crate] | T![super] | T![Self] if parent_matches::(&token) => { - return None - } - T![self] if parent_matches::(&token) => return None, T![ref] => match token.parent().and_then(ast::IdentPat::cast) { Some(ident) if sema.is_unsafe_ident_pat(&ident) => h | HlMod::Unsafe, _ => h, diff --git a/crates/ide/src/syntax_highlighting/macro_.rs b/crates/ide/src/syntax_highlighting/macro_.rs deleted file mode 100644 index b441b4cc90..0000000000 --- a/crates/ide/src/syntax_highlighting/macro_.rs +++ /dev/null @@ -1,128 +0,0 @@ -//! Syntax highlighting for macro_rules!. -use syntax::{SyntaxKind, SyntaxToken, TextRange, T}; - -use crate::{HlRange, HlTag}; - -#[derive(Default)] -pub(super) struct MacroHighlighter { - state: Option, -} - -impl MacroHighlighter { - pub(super) fn init(&mut self) { - self.state = Some(MacroMatcherParseState::default()); - } - - pub(super) fn advance(&mut self, token: &SyntaxToken) { - if let Some(state) = self.state.as_mut() { - update_macro_state(state, token); - } - } - - pub(super) fn highlight(&self, token: &SyntaxToken) -> Option { - if let Some(state) = self.state.as_ref() { - if matches!(state.rule_state, RuleState::Matcher | RuleState::Expander) { - if let Some(range) = is_metavariable(token) { - return Some(HlRange { - range, - highlight: HlTag::UnresolvedReference.into(), - binding_hash: None, - }); - } - } - } - None - } -} - -struct MacroMatcherParseState { - /// Opening and corresponding closing bracket of the matcher or expander of the current rule - paren_ty: Option<(SyntaxKind, SyntaxKind)>, - paren_level: usize, - rule_state: RuleState, - /// Whether we are inside the outer `{` `}` macro block that holds the rules - in_invoc_body: bool, -} - -impl Default for MacroMatcherParseState { - fn default() -> Self { - MacroMatcherParseState { - paren_ty: None, - paren_level: 0, - in_invoc_body: false, - rule_state: RuleState::None, - } - } -} - -#[derive(Copy, Clone, Debug, PartialEq)] -enum RuleState { - Matcher, - Expander, - Between, - None, -} - -impl RuleState { - fn transition(&mut self) { - *self = match self { - RuleState::Matcher => RuleState::Between, - RuleState::Expander => RuleState::None, - RuleState::Between => RuleState::Expander, - RuleState::None => RuleState::Matcher, - }; - } -} - -fn update_macro_state(state: &mut MacroMatcherParseState, tok: &SyntaxToken) { - if !state.in_invoc_body { - if tok.kind() == T!['{'] || tok.kind() == T!['('] { - state.in_invoc_body = true; - } - return; - } - - match state.paren_ty { - Some((open, close)) => { - if tok.kind() == open { - state.paren_level += 1; - } else if tok.kind() == close { - state.paren_level -= 1; - if state.paren_level == 0 { - state.rule_state.transition(); - state.paren_ty = None; - } - } - } - None => { - match tok.kind() { - T!['('] => { - state.paren_ty = Some((T!['('], T![')'])); - } - T!['{'] => { - state.paren_ty = Some((T!['{'], T!['}'])); - } - T!['['] => { - state.paren_ty = Some((T!['['], T![']'])); - } - _ => (), - } - if state.paren_ty.is_some() { - state.paren_level = 1; - state.rule_state.transition(); - } - } - } -} - -fn is_metavariable(token: &SyntaxToken) -> Option { - match token.kind() { - kind if kind.is_any_identifier() => { - if let Some(_dollar) = token.prev_token().filter(|t| t.kind() == T![$]) { - return Some(token.text_range()); - } - } - _ => (), - }; - None -} diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html index eb77c14c2a..5b5c51a363 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html @@ -156,7 +156,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd } /// ``` -/// macro_rules! noop { ($expr:expr) => { $expr }} +/// macro_rules! noop { ($expr:expr) => { $expr }} /// noop!(1); /// ``` macro_rules! noop { diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_general.html b/crates/ide/src/syntax_highlighting/test_data/highlight_general.html index 9477d0d1b8..b5e950ffc2 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_general.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_general.html @@ -151,7 +151,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd let bar = Foo::baz; let baz = (-42,); - let baz = -baz.0; + let baz = -baz.0; let _ = !true;