mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-30 13:51:31 +00:00
rename range -> text_range
This commit is contained in:
parent
6d5d82e412
commit
f3bdbec1b6
54 changed files with 219 additions and 192 deletions
|
@ -47,7 +47,7 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
|
|||
// Number of arguments specified at the call site
|
||||
let num_args_at_callsite = arg_list.args().count();
|
||||
|
||||
let arg_list_range = arg_list.syntax().range();
|
||||
let arg_list_range = arg_list.syntax().text_range();
|
||||
if !arg_list_range.contains_inclusive(position.offset) {
|
||||
tested_by!(call_info_bad_offset);
|
||||
return None;
|
||||
|
@ -57,7 +57,7 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
|
|||
num_args_at_callsite,
|
||||
arg_list
|
||||
.args()
|
||||
.take_while(|arg| arg.syntax().range().end() < position.offset)
|
||||
.take_while(|arg| arg.syntax().text_range().end() < position.offset)
|
||||
.count(),
|
||||
);
|
||||
|
||||
|
|
|
@ -88,7 +88,7 @@ fn is_in_loop_body(leaf: &SyntaxToken) -> bool {
|
|||
.visit::<ast::LoopExpr, _>(|it| it.loop_body())
|
||||
.accept(&node);
|
||||
if let Some(Some(body)) = loop_body {
|
||||
if leaf.range().is_subrange(&body.syntax().range()) {
|
||||
if leaf.text_range().is_subrange(&body.syntax().text_range()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
|
|||
if Some(module) == ctx.module {
|
||||
if let Some(import) = res.import {
|
||||
if let Either::A(use_tree) = module.import_source(ctx.db, import) {
|
||||
if use_tree.syntax().range().contains_inclusive(ctx.offset) {
|
||||
if use_tree.syntax().text_range().contains_inclusive(ctx.offset) {
|
||||
// for `use self::foo<|>`, don't suggest `foo` as a completion
|
||||
tested_by!(dont_complete_current_use);
|
||||
continue;
|
||||
|
|
|
@ -12,7 +12,7 @@ use ra_text_edit::TextEditBuilder;
|
|||
fn postfix_snippet(ctx: &CompletionContext, label: &str, detail: &str, snippet: &str) -> Builder {
|
||||
let edit = {
|
||||
let receiver_range =
|
||||
ctx.dot_receiver.as_ref().expect("no receiver available").syntax().range();
|
||||
ctx.dot_receiver.as_ref().expect("no receiver available").syntax().text_range();
|
||||
let delete_range = TextRange::from_to(receiver_range.start(), ctx.source_range().end());
|
||||
let mut builder = TextEditBuilder::default();
|
||||
builder.replace(delete_range, snippet.to_string());
|
||||
|
|
|
@ -78,7 +78,7 @@ impl<'a> CompletionContext<'a> {
|
|||
pub(crate) fn source_range(&self) -> TextRange {
|
||||
match self.token.kind() {
|
||||
// workaroud when completion is triggered by trigger characters.
|
||||
IDENT => self.token.range(),
|
||||
IDENT => self.token.text_range(),
|
||||
_ => TextRange::offset_len(self.offset, 0.into()),
|
||||
}
|
||||
}
|
||||
|
@ -123,13 +123,17 @@ impl<'a> CompletionContext<'a> {
|
|||
}
|
||||
|
||||
fn classify_name_ref(&mut self, original_file: SourceFile, name_ref: ast::NameRef) {
|
||||
let name_range = name_ref.syntax().range();
|
||||
let name_range = name_ref.syntax().text_range();
|
||||
if name_ref.syntax().parent().and_then(ast::NamedField::cast).is_some() {
|
||||
self.struct_lit_syntax = find_node_at_offset(original_file.syntax(), self.offset);
|
||||
}
|
||||
|
||||
let top_node =
|
||||
name_ref.syntax().ancestors().take_while(|it| it.range() == name_range).last().unwrap();
|
||||
let top_node = name_ref
|
||||
.syntax()
|
||||
.ancestors()
|
||||
.take_while(|it| it.text_range() == name_range)
|
||||
.last()
|
||||
.unwrap();
|
||||
|
||||
match top_node.parent().map(|it| it.kind()) {
|
||||
Some(SOURCE_FILE) | Some(ITEM_LIST) => {
|
||||
|
@ -180,23 +184,27 @@ impl<'a> CompletionContext<'a> {
|
|||
.ancestors()
|
||||
.find_map(|node| {
|
||||
if let Some(stmt) = ast::ExprStmt::cast(node.clone()) {
|
||||
return Some(stmt.syntax().range() == name_ref.syntax().range());
|
||||
return Some(
|
||||
stmt.syntax().text_range() == name_ref.syntax().text_range(),
|
||||
);
|
||||
}
|
||||
if let Some(block) = ast::Block::cast(node) {
|
||||
return Some(
|
||||
block.expr().map(|e| e.syntax().range())
|
||||
== Some(name_ref.syntax().range()),
|
||||
block.expr().map(|e| e.syntax().text_range())
|
||||
== Some(name_ref.syntax().text_range()),
|
||||
);
|
||||
}
|
||||
None
|
||||
})
|
||||
.unwrap_or(false);
|
||||
|
||||
if let Some(off) = name_ref.syntax().range().start().checked_sub(2.into()) {
|
||||
if let Some(off) = name_ref.syntax().text_range().start().checked_sub(2.into()) {
|
||||
if let Some(if_expr) =
|
||||
find_node_at_offset::<ast::IfExpr>(original_file.syntax(), off)
|
||||
{
|
||||
if if_expr.syntax().range().end() < name_ref.syntax().range().start() {
|
||||
if if_expr.syntax().text_range().end()
|
||||
< name_ref.syntax().text_range().start()
|
||||
{
|
||||
self.after_if = true;
|
||||
}
|
||||
}
|
||||
|
@ -208,14 +216,14 @@ impl<'a> CompletionContext<'a> {
|
|||
// ident, so it should have the same range in the non-modified file
|
||||
self.dot_receiver = field_expr
|
||||
.expr()
|
||||
.map(|e| e.syntax().range())
|
||||
.map(|e| e.syntax().text_range())
|
||||
.and_then(|r| find_node_with_range(original_file.syntax(), r));
|
||||
}
|
||||
if let Some(method_call_expr) = ast::MethodCallExpr::cast(parent) {
|
||||
// As above
|
||||
self.dot_receiver = method_call_expr
|
||||
.expr()
|
||||
.map(|e| e.syntax().range())
|
||||
.map(|e| e.syntax().text_range())
|
||||
.and_then(|r| find_node_with_range(original_file.syntax(), r));
|
||||
self.is_call = true;
|
||||
}
|
||||
|
@ -229,6 +237,6 @@ fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Op
|
|||
fn is_node<N: AstNode>(node: &SyntaxNode) -> bool {
|
||||
match node.ancestors().filter_map(N::cast).next() {
|
||||
None => false,
|
||||
Some(n) => n.syntax().range() == node.range(),
|
||||
Some(n) => n.syntax().text_range() == node.text_range(),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -96,7 +96,7 @@ fn check_unnecessary_braces_in_use_statement(
|
|||
) -> Option<()> {
|
||||
let use_tree_list = ast::UseTreeList::cast(node.clone())?;
|
||||
if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() {
|
||||
let range = use_tree_list.syntax().range();
|
||||
let range = use_tree_list.syntax().text_range();
|
||||
let edit =
|
||||
text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(&single_use_tree)
|
||||
.unwrap_or_else(|| {
|
||||
|
@ -126,8 +126,8 @@ fn text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(
|
|||
) -> Option<TextEdit> {
|
||||
let use_tree_list_node = single_use_tree.syntax().parent()?;
|
||||
if single_use_tree.path()?.segment()?.syntax().first_child_or_token()?.kind() == T![self] {
|
||||
let start = use_tree_list_node.prev_sibling_or_token()?.range().start();
|
||||
let end = use_tree_list_node.range().end();
|
||||
let start = use_tree_list_node.prev_sibling_or_token()?.text_range().start();
|
||||
let end = use_tree_list_node.text_range().end();
|
||||
let range = TextRange::from_to(start, end);
|
||||
let mut edit_builder = TextEditBuilder::default();
|
||||
edit_builder.delete(range);
|
||||
|
@ -149,12 +149,12 @@ fn check_struct_shorthand_initialization(
|
|||
let field_expr = expr.syntax().text().to_string();
|
||||
if field_name == field_expr {
|
||||
let mut edit_builder = TextEditBuilder::default();
|
||||
edit_builder.delete(named_field.syntax().range());
|
||||
edit_builder.insert(named_field.syntax().range().start(), field_name);
|
||||
edit_builder.delete(named_field.syntax().text_range());
|
||||
edit_builder.insert(named_field.syntax().text_range().start(), field_name);
|
||||
let edit = edit_builder.finish();
|
||||
|
||||
acc.push(Diagnostic {
|
||||
range: named_field.syntax().range(),
|
||||
range: named_field.syntax().text_range(),
|
||||
message: "Shorthand struct initialization".to_string(),
|
||||
severity: Severity::WeakWarning,
|
||||
fix: Some(SourceChange::source_file_edit(
|
||||
|
|
|
@ -275,7 +275,7 @@ impl NavigationTarget {
|
|||
) -> NavigationTarget {
|
||||
//FIXME: use `_` instead of empty string
|
||||
let name = node.name().map(|it| it.text().clone()).unwrap_or_default();
|
||||
let focus_range = node.name().map(|it| it.syntax().range());
|
||||
let focus_range = node.name().map(|it| it.syntax().text_range());
|
||||
NavigationTarget::from_syntax(file_id, name, focus_range, node.syntax(), docs, description)
|
||||
}
|
||||
|
||||
|
@ -291,7 +291,7 @@ impl NavigationTarget {
|
|||
file_id,
|
||||
name,
|
||||
kind: node.kind(),
|
||||
full_range: node.range(),
|
||||
full_range: node.text_range(),
|
||||
focus_range,
|
||||
// ptr: Some(LocalSyntaxPtr::new(node)),
|
||||
container_name: None,
|
||||
|
|
|
@ -73,8 +73,8 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
|
|||
Some(StructureNode {
|
||||
parent: None,
|
||||
label: name.text().to_string(),
|
||||
navigation_range: name.syntax().range(),
|
||||
node_range: node.syntax().range(),
|
||||
navigation_range: name.syntax().text_range(),
|
||||
node_range: node.syntax().text_range(),
|
||||
kind: node.syntax().kind(),
|
||||
detail,
|
||||
deprecated: node.attrs().filter_map(|x| x.as_named()).any(|x| x == "deprecated"),
|
||||
|
@ -140,8 +140,8 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
|
|||
let node = StructureNode {
|
||||
parent: None,
|
||||
label,
|
||||
navigation_range: target_type.syntax().range(),
|
||||
node_range: im.syntax().range(),
|
||||
navigation_range: target_type.syntax().text_range(),
|
||||
node_range: im.syntax().text_range(),
|
||||
kind: im.syntax().kind(),
|
||||
detail: None,
|
||||
deprecated: false,
|
||||
|
|
|
@ -42,19 +42,20 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
|
|||
TokenAtOffset::None => return None,
|
||||
TokenAtOffset::Single(l) => {
|
||||
if string_kinds.contains(&l.kind()) {
|
||||
extend_single_word_in_comment_or_string(&l, offset).unwrap_or_else(|| l.range())
|
||||
extend_single_word_in_comment_or_string(&l, offset)
|
||||
.unwrap_or_else(|| l.text_range())
|
||||
} else {
|
||||
l.range()
|
||||
l.text_range()
|
||||
}
|
||||
}
|
||||
TokenAtOffset::Between(l, r) => pick_best(l, r).range(),
|
||||
TokenAtOffset::Between(l, r) => pick_best(l, r).text_range(),
|
||||
};
|
||||
return Some(leaf_range);
|
||||
};
|
||||
let node = match find_covering_element(root, range) {
|
||||
SyntaxElement::Token(token) => {
|
||||
if token.range() != range {
|
||||
return Some(token.range());
|
||||
if token.text_range() != range {
|
||||
return Some(token.text_range());
|
||||
}
|
||||
if let Some(comment) = ast::Comment::cast(token.clone()) {
|
||||
if let Some(range) = extend_comments(comment) {
|
||||
|
@ -65,12 +66,12 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
|
|||
}
|
||||
SyntaxElement::Node(node) => node,
|
||||
};
|
||||
if node.range() != range {
|
||||
return Some(node.range());
|
||||
if node.text_range() != range {
|
||||
return Some(node.text_range());
|
||||
}
|
||||
|
||||
// Using shallowest node with same range allows us to traverse siblings.
|
||||
let node = node.ancestors().take_while(|n| n.range() == node.range()).last().unwrap();
|
||||
let node = node.ancestors().take_while(|n| n.text_range() == node.text_range()).last().unwrap();
|
||||
|
||||
if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
|
||||
if let Some(range) = extend_list_item(&node) {
|
||||
|
@ -78,7 +79,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
|
|||
}
|
||||
}
|
||||
|
||||
node.parent().map(|it| it.range())
|
||||
node.parent().map(|it| it.text_range())
|
||||
}
|
||||
|
||||
fn extend_single_word_in_comment_or_string(
|
||||
|
@ -86,7 +87,7 @@ fn extend_single_word_in_comment_or_string(
|
|||
offset: TextUnit,
|
||||
) -> Option<TextRange> {
|
||||
let text: &str = leaf.text();
|
||||
let cursor_position: u32 = (offset - leaf.range().start()).into();
|
||||
let cursor_position: u32 = (offset - leaf.text_range().start()).into();
|
||||
|
||||
let (before, after) = text.split_at(cursor_position as usize);
|
||||
|
||||
|
@ -104,31 +105,31 @@ fn extend_single_word_in_comment_or_string(
|
|||
if range.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(range + leaf.range().start())
|
||||
Some(range + leaf.text_range().start())
|
||||
}
|
||||
}
|
||||
|
||||
fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextUnit) -> TextRange {
|
||||
let ws_text = ws.text();
|
||||
let suffix = TextRange::from_to(offset, ws.range().end()) - ws.range().start();
|
||||
let prefix = TextRange::from_to(ws.range().start(), offset) - ws.range().start();
|
||||
let suffix = TextRange::from_to(offset, ws.text_range().end()) - ws.text_range().start();
|
||||
let prefix = TextRange::from_to(ws.text_range().start(), offset) - ws.text_range().start();
|
||||
let ws_suffix = &ws_text.as_str()[suffix];
|
||||
let ws_prefix = &ws_text.as_str()[prefix];
|
||||
if ws_text.contains('\n') && !ws_suffix.contains('\n') {
|
||||
if let Some(node) = ws.next_sibling_or_token() {
|
||||
let start = match ws_prefix.rfind('\n') {
|
||||
Some(idx) => ws.range().start() + TextUnit::from((idx + 1) as u32),
|
||||
None => node.range().start(),
|
||||
Some(idx) => ws.text_range().start() + TextUnit::from((idx + 1) as u32),
|
||||
None => node.text_range().start(),
|
||||
};
|
||||
let end = if root.text().char_at(node.range().end()) == Some('\n') {
|
||||
node.range().end() + TextUnit::of_char('\n')
|
||||
let end = if root.text().char_at(node.text_range().end()) == Some('\n') {
|
||||
node.text_range().end() + TextUnit::of_char('\n')
|
||||
} else {
|
||||
node.range().end()
|
||||
node.text_range().end()
|
||||
};
|
||||
return TextRange::from_to(start, end);
|
||||
}
|
||||
}
|
||||
ws.range()
|
||||
ws.text_range()
|
||||
}
|
||||
|
||||
fn pick_best<'a>(l: SyntaxToken, r: SyntaxToken) -> SyntaxToken {
|
||||
|
@ -161,7 +162,7 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
|
|||
}
|
||||
|
||||
if let Some(comma_node) = nearby_comma(node, Direction::Prev) {
|
||||
return Some(TextRange::from_to(comma_node.range().start(), node.range().end()));
|
||||
return Some(TextRange::from_to(comma_node.text_range().start(), node.text_range().end()));
|
||||
}
|
||||
if let Some(comma_node) = nearby_comma(node, Direction::Next) {
|
||||
// Include any following whitespace when comma if after list item.
|
||||
|
@ -171,7 +172,7 @@ fn extend_list_item(node: &SyntaxNode) -> Option<TextRange> {
|
|||
.filter(|node| is_single_line_ws(node))
|
||||
.unwrap_or(comma_node);
|
||||
|
||||
return Some(TextRange::from_to(node.range().start(), final_node.range().end()));
|
||||
return Some(TextRange::from_to(node.text_range().start(), final_node.text_range().end()));
|
||||
}
|
||||
|
||||
None
|
||||
|
@ -181,7 +182,10 @@ fn extend_comments(comment: ast::Comment) -> Option<TextRange> {
|
|||
let prev = adj_comments(&comment, Direction::Prev);
|
||||
let next = adj_comments(&comment, Direction::Next);
|
||||
if prev != next {
|
||||
Some(TextRange::from_to(prev.syntax().range().start(), next.syntax().range().end()))
|
||||
Some(TextRange::from_to(
|
||||
prev.syntax().text_range().start(),
|
||||
next.syntax().text_range().end(),
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
|
|||
SyntaxElement::Token(token) => token.text().contains('\n'),
|
||||
};
|
||||
if is_multiline {
|
||||
res.push(Fold { range: element.range(), kind });
|
||||
res.push(Fold { range: element.text_range(), kind });
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
@ -132,7 +132,7 @@ fn contiguous_range_for_group_unless(
|
|||
}
|
||||
|
||||
if first != &last {
|
||||
Some(TextRange::from_to(first.range().start(), last.range().end()))
|
||||
Some(TextRange::from_to(first.text_range().start(), last.text_range().end()))
|
||||
} else {
|
||||
// The group consists of only one element, therefore it cannot be folded
|
||||
None
|
||||
|
@ -178,7 +178,10 @@ fn contiguous_range_for_comment(
|
|||
}
|
||||
|
||||
if first != last {
|
||||
Some(TextRange::from_to(first.syntax().range().start(), last.syntax().range().end()))
|
||||
Some(TextRange::from_to(
|
||||
first.syntax().text_range().start(),
|
||||
last.syntax().text_range().end(),
|
||||
))
|
||||
} else {
|
||||
// The group consists of only one element, therefore it cannot be folded
|
||||
None
|
||||
|
|
|
@ -23,11 +23,11 @@ pub(crate) fn goto_definition(
|
|||
let syntax = parse.tree().syntax().clone();
|
||||
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&syntax, position.offset) {
|
||||
let navs = reference_definition(db, position.file_id, &name_ref).to_vec();
|
||||
return Some(RangeInfo::new(name_ref.syntax().range(), navs.to_vec()));
|
||||
return Some(RangeInfo::new(name_ref.syntax().text_range(), navs.to_vec()));
|
||||
}
|
||||
if let Some(name) = find_node_at_offset::<ast::Name>(&syntax, position.offset) {
|
||||
let navs = name_definition(db, position.file_id, &name)?;
|
||||
return Some(RangeInfo::new(name.syntax().range(), navs));
|
||||
return Some(RangeInfo::new(name.syntax().text_range(), navs));
|
||||
}
|
||||
None
|
||||
}
|
||||
|
|
|
@ -32,7 +32,7 @@ pub(crate) fn goto_type_definition(
|
|||
let adt_def = analyzer.autoderef(db, ty).find_map(|ty| ty.as_adt().map(|adt| adt.0))?;
|
||||
|
||||
let nav = NavigationTarget::from_adt_def(db, adt_def);
|
||||
Some(RangeInfo::new(node.range(), vec![nav]))
|
||||
Some(RangeInfo::new(node.text_range(), vec![nav]))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -172,7 +172,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
|||
}
|
||||
|
||||
if !res.is_empty() {
|
||||
range = Some(name_ref.syntax().range())
|
||||
range = Some(name_ref.syntax().text_range())
|
||||
}
|
||||
} else if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), position.offset) {
|
||||
if let Some(parent) = name.syntax().parent() {
|
||||
|
@ -210,7 +210,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
|||
}
|
||||
|
||||
if !res.is_empty() && range.is_none() {
|
||||
range = Some(name.syntax().range());
|
||||
range = Some(name.syntax().text_range());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -218,9 +218,9 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
|||
let node = ancestors_at_offset(file.syntax(), position.offset).find(|n| {
|
||||
ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some()
|
||||
})?;
|
||||
let frange = FileRange { file_id: position.file_id, range: node.range() };
|
||||
let frange = FileRange { file_id: position.file_id, range: node.text_range() };
|
||||
res.extend(type_of(db, frange).map(rust_code_markup));
|
||||
range = Some(node.range());
|
||||
range = Some(node.text_range());
|
||||
}
|
||||
|
||||
let range = range?;
|
||||
|
@ -246,7 +246,7 @@ pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
|
|||
// if we picked identifier, expand to pattern/expression
|
||||
let node = leaf_node
|
||||
.ancestors()
|
||||
.take_while(|it| it.range() == leaf_node.range())
|
||||
.take_while(|it| it.text_range() == leaf_node.text_range())
|
||||
.find(|it| ast::Expr::cast(it.clone()).is_some() || ast::Pat::cast(it.clone()).is_some())?;
|
||||
let analyzer = hir::SourceAnalyzer::new(db, frange.file_id, &node, None);
|
||||
let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))
|
||||
|
|
|
@ -15,12 +15,12 @@ pub(crate) fn goto_implementation(
|
|||
|
||||
if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(&syntax, position.offset) {
|
||||
return Some(RangeInfo::new(
|
||||
nominal_def.syntax().range(),
|
||||
nominal_def.syntax().text_range(),
|
||||
impls_for_def(db, &nominal_def, module)?,
|
||||
));
|
||||
} else if let Some(trait_def) = find_node_at_offset::<ast::TraitDef>(&syntax, position.offset) {
|
||||
return Some(RangeInfo::new(
|
||||
trait_def.syntax().range(),
|
||||
trait_def.syntax().text_range(),
|
||||
impls_for_trait(db, &trait_def, module)?,
|
||||
));
|
||||
}
|
||||
|
|
|
@ -28,14 +28,14 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit {
|
|||
};
|
||||
let mut edit = TextEditBuilder::default();
|
||||
for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) {
|
||||
let range = match range.intersection(&token.range()) {
|
||||
let range = match range.intersection(&token.text_range()) {
|
||||
Some(range) => range,
|
||||
None => continue,
|
||||
} - token.range().start();
|
||||
} - token.text_range().start();
|
||||
let text = token.text();
|
||||
for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') {
|
||||
let pos: TextUnit = (pos as u32).into();
|
||||
let off = token.range().start() + range.start() + pos;
|
||||
let off = token.text_range().start() + range.start() + pos;
|
||||
if !edit.invalidates_offset(off) {
|
||||
remove_newline(&mut edit, &token, off);
|
||||
}
|
||||
|
@ -49,7 +49,7 @@ fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextU
|
|||
if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 {
|
||||
// The node is either the first or the last in the file
|
||||
let suff = &token.text()[TextRange::from_to(
|
||||
offset - token.range().start() + TextUnit::of_char('\n'),
|
||||
offset - token.text_range().start() + TextUnit::of_char('\n'),
|
||||
TextUnit::of_str(token.text()),
|
||||
)];
|
||||
let spaces = suff.bytes().take_while(|&b| b == b' ').count();
|
||||
|
@ -86,7 +86,7 @@ fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextU
|
|||
let next = token.next_sibling_or_token().unwrap();
|
||||
if is_trailing_comma(prev.kind(), next.kind()) {
|
||||
// Removes: trailing comma, newline (incl. surrounding whitespace)
|
||||
edit.delete(TextRange::from_to(prev.range().start(), token.range().end()));
|
||||
edit.delete(TextRange::from_to(prev.text_range().start(), token.text_range().end()));
|
||||
} else if prev.kind() == T![,] && next.kind() == T!['}'] {
|
||||
// Removes: comma, newline (incl. surrounding whitespace)
|
||||
let space = if let Some(left) = prev.prev_sibling_or_token() {
|
||||
|
@ -95,7 +95,7 @@ fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextU
|
|||
" "
|
||||
};
|
||||
edit.replace(
|
||||
TextRange::from_to(prev.range().start(), token.range().end()),
|
||||
TextRange::from_to(prev.text_range().start(), token.text_range().end()),
|
||||
space.to_string(),
|
||||
);
|
||||
} else if let (Some(_), Some(next)) = (
|
||||
|
@ -104,12 +104,12 @@ fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextU
|
|||
) {
|
||||
// Removes: newline (incl. surrounding whitespace), start of the next comment
|
||||
edit.delete(TextRange::from_to(
|
||||
token.range().start(),
|
||||
next.syntax().range().start() + TextUnit::of_str(next.prefix()),
|
||||
token.text_range().start(),
|
||||
next.syntax().text_range().start() + TextUnit::of_str(next.prefix()),
|
||||
));
|
||||
} else {
|
||||
// Remove newline but add a computed amount of whitespace characters
|
||||
edit.replace(token.range(), compute_ws(prev.kind(), next.kind()).to_string());
|
||||
edit.replace(token.text_range(), compute_ws(prev.kind(), next.kind()).to_string());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -125,7 +125,7 @@ fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Op
|
|||
let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?;
|
||||
let expr = extract_trivial_expression(&block)?;
|
||||
|
||||
let block_range = block_expr.syntax().range();
|
||||
let block_range = block_expr.syntax().text_range();
|
||||
let mut buf = expr.syntax().text().to_string();
|
||||
|
||||
// Match block needs to have a comma after the block
|
||||
|
@ -143,7 +143,7 @@ fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Op
|
|||
fn join_single_use_tree(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> {
|
||||
let use_tree_list = ast::UseTreeList::cast(token.parent())?;
|
||||
let (tree,) = use_tree_list.use_trees().collect_tuple()?;
|
||||
edit.replace(use_tree_list.syntax().range(), tree.syntax().text().to_string());
|
||||
edit.replace(use_tree_list.syntax().text_range(), tree.syntax().text().to_string());
|
||||
Some(())
|
||||
}
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option<TextUnit> {
|
|||
let parent = brace_node.parent();
|
||||
let matching_kind = BRACES[brace_idx ^ 1];
|
||||
let matching_node = parent.children_with_tokens().find(|node| node.kind() == matching_kind)?;
|
||||
Some(matching_node.range().start())
|
||||
Some(matching_node.text_range().start())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -166,7 +166,7 @@ fn rename_mod(
|
|||
file_id: position.file_id,
|
||||
edit: {
|
||||
let mut builder = ra_text_edit::TextEditBuilder::default();
|
||||
builder.replace(ast_name.syntax().range(), new_name.into());
|
||||
builder.replace(ast_name.syntax().text_range(), new_name.into());
|
||||
builder.finish()
|
||||
},
|
||||
};
|
||||
|
|
|
@ -47,7 +47,7 @@ fn runnable_fn(fn_def: ast::FnDef) -> Option<Runnable> {
|
|||
} else {
|
||||
return None;
|
||||
};
|
||||
Some(Runnable { range: fn_def.syntax().range(), kind })
|
||||
Some(Runnable { range: fn_def.syntax().text_range(), kind })
|
||||
}
|
||||
|
||||
fn runnable_mod(db: &RootDatabase, file_id: FileId, module: ast::Module) -> Option<Runnable> {
|
||||
|
@ -62,7 +62,7 @@ fn runnable_mod(db: &RootDatabase, file_id: FileId, module: ast::Module) -> Opti
|
|||
if !has_test_function {
|
||||
return None;
|
||||
}
|
||||
let range = module.syntax().range();
|
||||
let range = module.syntax().text_range();
|
||||
let module = hir::source_binder::module_from_child_node(db, file_id, module.syntax())?;
|
||||
|
||||
let path = module.path_to_root(db).into_iter().rev().filter_map(|it| it.name(db)).join("::");
|
||||
|
|
|
@ -271,7 +271,7 @@ fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec
|
|||
fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> {
|
||||
fn decl<N: NameOwner>(node: N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> {
|
||||
let name = node.name()?;
|
||||
let name_range = name.syntax().range();
|
||||
let name_range = name.syntax().text_range();
|
||||
let name = name.text().clone();
|
||||
let ptr = SyntaxNodePtr::new(node.syntax());
|
||||
|
||||
|
|
|
@ -189,11 +189,11 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
|
|||
if let Some(segment) = path.segment() {
|
||||
if let Some(name_ref) = segment.name_ref() {
|
||||
highlighted.insert(name_ref.syntax().clone().into());
|
||||
let range_start = name_ref.syntax().range().start();
|
||||
let mut range_end = name_ref.syntax().range().end();
|
||||
let range_start = name_ref.syntax().text_range().start();
|
||||
let mut range_end = name_ref.syntax().text_range().end();
|
||||
for sibling in path.syntax().siblings_with_tokens(Direction::Next) {
|
||||
match sibling.kind() {
|
||||
T![!] | IDENT => range_end = sibling.range().end(),
|
||||
T![!] | IDENT => range_end = sibling.text_range().end(),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
@ -209,7 +209,7 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
|
|||
continue;
|
||||
}
|
||||
};
|
||||
res.push(HighlightedRange { range: node.range(), tag, binding_hash })
|
||||
res.push(HighlightedRange { range: node.text_range(), tag, binding_hash })
|
||||
}
|
||||
res
|
||||
}
|
||||
|
@ -239,9 +239,9 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
|
|||
buf.push_str("<pre><code>");
|
||||
let tokens = parse.tree().syntax().descendants_with_tokens().filter_map(|it| it.into_token());
|
||||
for token in tokens {
|
||||
could_intersect.retain(|it| token.range().start() <= it.range.end());
|
||||
could_intersect.retain(|it| token.text_range().start() <= it.range.end());
|
||||
while let Some(r) = ranges.get(frontier) {
|
||||
if r.range.start() <= token.range().end() {
|
||||
if r.range.start() <= token.text_range().end() {
|
||||
could_intersect.push(r);
|
||||
frontier += 1;
|
||||
} else {
|
||||
|
@ -251,7 +251,7 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
|
|||
let text = html_escape(&token.text());
|
||||
let ranges = could_intersect
|
||||
.iter()
|
||||
.filter(|it| token.range().is_subrange(&it.range))
|
||||
.filter(|it| token.text_range().is_subrange(&it.range))
|
||||
.collect::<Vec<_>>();
|
||||
if ranges.is_empty() {
|
||||
buf.push_str(&text);
|
||||
|
|
|
@ -45,7 +45,7 @@ fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option<
|
|||
|
||||
fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option<String> {
|
||||
// Range of the full node
|
||||
let node_range = node.range();
|
||||
let node_range = node.text_range();
|
||||
let text = node.text().to_string();
|
||||
|
||||
// We start at some point inside the node
|
||||
|
|
|
@ -24,7 +24,7 @@ pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<Sour
|
|||
|
||||
let prefix = comment.prefix();
|
||||
if position.offset
|
||||
< comment.syntax().range().start() + TextUnit::of_str(prefix) + TextUnit::from(1)
|
||||
< comment.syntax().text_range().start() + TextUnit::of_str(prefix) + TextUnit::from(1)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
@ -45,7 +45,7 @@ pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<Sour
|
|||
}
|
||||
|
||||
fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option<SmolStr> {
|
||||
let ws = match find_token_at_offset(file.syntax(), token.range().start()) {
|
||||
let ws = match find_token_at_offset(file.syntax(), token.text_range().start()) {
|
||||
TokenAtOffset::Between(l, r) => {
|
||||
assert!(r == *token);
|
||||
l
|
||||
|
@ -71,7 +71,7 @@ pub fn on_eq_typed(file: &SourceFile, eq_offset: TextUnit) -> Option<TextEdit> {
|
|||
return None;
|
||||
}
|
||||
if let Some(expr) = let_stmt.initializer() {
|
||||
let expr_range = expr.syntax().range();
|
||||
let expr_range = expr.syntax().text_range();
|
||||
if expr_range.contains(eq_offset) && eq_offset != expr_range.start() {
|
||||
return None;
|
||||
}
|
||||
|
@ -81,7 +81,7 @@ pub fn on_eq_typed(file: &SourceFile, eq_offset: TextUnit) -> Option<TextEdit> {
|
|||
} else {
|
||||
return None;
|
||||
}
|
||||
let offset = let_stmt.syntax().range().end();
|
||||
let offset = let_stmt.syntax().text_range().end();
|
||||
let mut edit = TextEditBuilder::default();
|
||||
edit.insert(offset, ";".to_string());
|
||||
Some(edit.finish())
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue