mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-30 22:01:37 +00:00
streamline API
This commit is contained in:
parent
62be91b82d
commit
d52ee59a71
10 changed files with 33 additions and 37 deletions
|
@ -1,6 +1,6 @@
|
|||
use hir::source_binder;
|
||||
use ra_syntax::{
|
||||
algo::{find_covering_element, find_node_at_offset, find_token_at_offset},
|
||||
algo::{find_covering_element, find_node_at_offset},
|
||||
ast, AstNode, Parse, SourceFile,
|
||||
SyntaxKind::*,
|
||||
SyntaxNode, SyntaxToken, TextRange, TextUnit,
|
||||
|
@ -48,7 +48,7 @@ impl<'a> CompletionContext<'a> {
|
|||
) -> Option<CompletionContext<'a>> {
|
||||
let module = source_binder::module_from_position(db, position);
|
||||
let token =
|
||||
find_token_at_offset(original_parse.tree().syntax(), position.offset).left_biased()?;
|
||||
original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?;
|
||||
let analyzer =
|
||||
hir::SourceAnalyzer::new(db, position.file_id, &token.parent(), Some(position.offset));
|
||||
let mut ctx = CompletionContext {
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
use ra_db::SourceDatabase;
|
||||
use ra_syntax::{
|
||||
algo::{find_covering_element, find_token_at_offset, TokenAtOffset},
|
||||
algo::find_covering_element,
|
||||
ast::{self, AstNode, AstToken},
|
||||
Direction, NodeOrToken,
|
||||
SyntaxKind::*,
|
||||
SyntaxNode, SyntaxToken, TextRange, TextUnit, T,
|
||||
SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T,
|
||||
};
|
||||
|
||||
use crate::{db::RootDatabase, FileRange};
|
||||
|
@ -34,7 +34,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange
|
|||
|
||||
if range.is_empty() {
|
||||
let offset = range.start();
|
||||
let mut leaves = find_token_at_offset(root, offset);
|
||||
let mut leaves = root.token_at_offset(offset);
|
||||
if leaves.clone().all(|it| it.kind() == WHITESPACE) {
|
||||
return Some(extend_ws(root, leaves.next()?, offset));
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use ra_db::SourceDatabase;
|
||||
use ra_syntax::{algo::find_token_at_offset, ast, AstNode};
|
||||
use ra_syntax::{ast, AstNode};
|
||||
|
||||
use crate::{db::RootDatabase, FilePosition, NavigationTarget, RangeInfo};
|
||||
|
||||
|
@ -9,7 +9,7 @@ pub(crate) fn goto_type_definition(
|
|||
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
||||
let parse = db.parse(position.file_id);
|
||||
|
||||
let node = find_token_at_offset(parse.tree().syntax(), position.offset).find_map(|token| {
|
||||
let node = parse.tree().syntax().token_at_offset(position.offset).find_map(|token| {
|
||||
token
|
||||
.parent()
|
||||
.ancestors()
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
use ra_syntax::{algo::find_token_at_offset, ast::AstNode, SourceFile, SyntaxKind, TextUnit, T};
|
||||
use ra_syntax::{ast::AstNode, SourceFile, SyntaxKind, TextUnit, T};
|
||||
|
||||
pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option<TextUnit> {
|
||||
const BRACES: &[SyntaxKind] =
|
||||
&[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>]];
|
||||
let (brace_node, brace_idx) = find_token_at_offset(file.syntax(), offset)
|
||||
let (brace_node, brace_idx) = file
|
||||
.syntax()
|
||||
.token_at_offset(offset)
|
||||
.filter_map(|node| {
|
||||
let idx = BRACES.iter().position(|&brace| brace == node.kind())?;
|
||||
Some((node, idx))
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
use ra_db::{FilePosition, SourceDatabase};
|
||||
use ra_fmt::leading_indent;
|
||||
use ra_syntax::{
|
||||
algo::{find_node_at_offset, find_token_at_offset, TokenAtOffset},
|
||||
algo::find_node_at_offset,
|
||||
ast::{self, AstToken},
|
||||
AstNode, SmolStr, SourceFile,
|
||||
SyntaxKind::*,
|
||||
SyntaxToken, TextRange, TextUnit,
|
||||
SyntaxToken, TextRange, TextUnit, TokenAtOffset,
|
||||
};
|
||||
use ra_text_edit::{TextEdit, TextEditBuilder};
|
||||
|
||||
|
@ -14,7 +14,9 @@ use crate::{db::RootDatabase, SourceChange, SourceFileEdit};
|
|||
pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> {
|
||||
let parse = db.parse(position.file_id);
|
||||
let file = parse.tree();
|
||||
let comment = find_token_at_offset(file.syntax(), position.offset)
|
||||
let comment = file
|
||||
.syntax()
|
||||
.token_at_offset(position.offset)
|
||||
.left_biased()
|
||||
.and_then(ast::Comment::cast)?;
|
||||
|
||||
|
@ -45,7 +47,7 @@ pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<Sour
|
|||
}
|
||||
|
||||
fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option<SmolStr> {
|
||||
let ws = match find_token_at_offset(file.syntax(), token.text_range().start()) {
|
||||
let ws = match file.syntax().token_at_offset(token.text_range().start()) {
|
||||
TokenAtOffset::Between(l, r) => {
|
||||
assert!(r == *token);
|
||||
l
|
||||
|
@ -91,7 +93,10 @@ pub(crate) fn on_dot_typed(db: &RootDatabase, position: FilePosition) -> Option<
|
|||
let parse = db.parse(position.file_id);
|
||||
assert_eq!(parse.tree().syntax().text().char_at(position.offset), Some('.'));
|
||||
|
||||
let whitespace = find_token_at_offset(parse.tree().syntax(), position.offset)
|
||||
let whitespace = parse
|
||||
.tree()
|
||||
.syntax()
|
||||
.token_at_offset(position.offset)
|
||||
.left_biased()
|
||||
.and_then(ast::Whitespace::cast)?;
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue