mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 21:05:02 +00:00
Rename range to by_kind
This commit is contained in:
parent
e16f3a5ee2
commit
2ea1cfd780
3 changed files with 4 additions and 4 deletions
|
@ -227,7 +227,7 @@ impl ExpansionInfo {
|
||||||
let token_id = self.macro_arg.1.token_by_range(range)?;
|
let token_id = self.macro_arg.1.token_by_range(range)?;
|
||||||
let token_id = self.macro_def.0.map_id_down(token_id);
|
let token_id = self.macro_def.0.map_id_down(token_id);
|
||||||
|
|
||||||
let range = self.exp_map.range_by_token(token_id)?.range(token.value.kind())?;
|
let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?;
|
||||||
|
|
||||||
let token = algo::find_covering_element(&self.expanded.value, range).into_token()?;
|
let token = algo::find_covering_element(&self.expanded.value, range).into_token()?;
|
||||||
|
|
||||||
|
@ -248,7 +248,7 @@ impl ExpansionInfo {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let range = token_map.range_by_token(token_id)?.range(token.value.kind())?;
|
let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?;
|
||||||
let token = algo::find_covering_element(&tt.value, range + tt.value.text_range().start())
|
let token = algo::find_covering_element(&tt.value, range + tt.value.text_range().start())
|
||||||
.into_token()?;
|
.into_token()?;
|
||||||
Some((tt.with_value(token), origin))
|
Some((tt.with_value(token), origin))
|
||||||
|
|
|
@ -19,7 +19,7 @@ pub enum TokenTextRange {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenTextRange {
|
impl TokenTextRange {
|
||||||
pub fn range(self, kind: SyntaxKind) -> Option<TextRange> {
|
pub fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> {
|
||||||
match self {
|
match self {
|
||||||
TokenTextRange::Token(it) => Some(it),
|
TokenTextRange::Token(it) => Some(it),
|
||||||
TokenTextRange::Delimiter(open, close) => match kind {
|
TokenTextRange::Delimiter(open, close) => match kind {
|
||||||
|
|
|
@ -102,7 +102,7 @@ macro_rules! foobar {
|
||||||
let (expansion, (token_map, content)) = expand_and_map(&rules, "foobar!(baz);");
|
let (expansion, (token_map, content)) = expand_and_map(&rules, "foobar!(baz);");
|
||||||
|
|
||||||
let get_text = |id, kind| -> String {
|
let get_text = |id, kind| -> String {
|
||||||
content[token_map.range_by_token(id).unwrap().range(kind).unwrap()].to_string()
|
content[token_map.range_by_token(id).unwrap().by_kind(kind).unwrap()].to_string()
|
||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(expansion.token_trees.len(), 4);
|
assert_eq!(expansion.token_trees.len(), 4);
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue