mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-11-01 04:18:20 +00:00
Merge commit '37f84c101b' into sync-from-ra
This commit is contained in:
parent
6502421771
commit
4704881b64
311 changed files with 13700 additions and 9110 deletions
|
|
@ -213,6 +213,28 @@ pub trait AttrsOwnerEdit: ast::HasAttrs {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn add_attr(&self, attr: ast::Attr) {
|
||||
add_attr(self.syntax(), attr);
|
||||
|
||||
fn add_attr(node: &SyntaxNode, attr: ast::Attr) {
|
||||
let indent = IndentLevel::from_node(node);
|
||||
attr.reindent_to(indent);
|
||||
|
||||
let after_attrs_and_comments = node
|
||||
.children_with_tokens()
|
||||
.find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR))
|
||||
.map_or(Position::first_child_of(node), |it| Position::before(it));
|
||||
|
||||
ted::insert_all(
|
||||
after_attrs_and_comments,
|
||||
vec![
|
||||
attr.syntax().clone().into(),
|
||||
make::tokens::whitespace(&format!("\n{indent}")).into(),
|
||||
],
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ast::HasAttrs> AttrsOwnerEdit for T {}
|
||||
|
|
@ -676,12 +698,6 @@ fn get_or_insert_comma_after(syntax: &SyntaxNode) -> SyntaxToken {
|
|||
}
|
||||
}
|
||||
|
||||
impl ast::StmtList {
|
||||
pub fn push_front(&self, statement: ast::Stmt) {
|
||||
ted::insert(Position::after(self.l_curly_token().unwrap()), statement.syntax());
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::VariantList {
|
||||
pub fn add_variant(&self, variant: ast::Variant) {
|
||||
let (indent, position) = match self.variants().last() {
|
||||
|
|
@ -732,6 +748,27 @@ fn normalize_ws_between_braces(node: &SyntaxNode) -> Option<()> {
|
|||
Some(())
|
||||
}
|
||||
|
||||
pub trait HasVisibilityEdit: ast::HasVisibility {
|
||||
fn set_visibility(&self, visbility: ast::Visibility) {
|
||||
match self.visibility() {
|
||||
Some(current_visibility) => {
|
||||
ted::replace(current_visibility.syntax(), visbility.syntax())
|
||||
}
|
||||
None => {
|
||||
let vis_before = self
|
||||
.syntax()
|
||||
.children_with_tokens()
|
||||
.find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR))
|
||||
.unwrap_or_else(|| self.syntax().first_child_or_token().unwrap());
|
||||
|
||||
ted::insert(ted::Position::before(vis_before), visbility.syntax());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ast::HasVisibility> HasVisibilityEdit for T {}
|
||||
|
||||
pub trait Indent: AstNode + Clone + Sized {
|
||||
fn indent_level(&self) -> IndentLevel {
|
||||
IndentLevel::from_node(self.syntax())
|
||||
|
|
|
|||
|
|
@ -10,6 +10,8 @@
|
|||
//! `parse(format!())` we use internally is an implementation detail -- long
|
||||
//! term, it will be replaced with direct tree manipulation.
|
||||
use itertools::Itertools;
|
||||
use parser::T;
|
||||
use rowan::NodeOrToken;
|
||||
use stdx::{format_to, never};
|
||||
|
||||
use crate::{ast, utils::is_raw_identifier, AstNode, SourceFile, SyntaxKind, SyntaxToken};
|
||||
|
|
@ -447,6 +449,21 @@ pub fn block_expr(
|
|||
ast_from_text(&format!("fn f() {buf}"))
|
||||
}
|
||||
|
||||
pub fn async_move_block_expr(
|
||||
stmts: impl IntoIterator<Item = ast::Stmt>,
|
||||
tail_expr: Option<ast::Expr>,
|
||||
) -> ast::BlockExpr {
|
||||
let mut buf = "async move {\n".to_string();
|
||||
for stmt in stmts.into_iter() {
|
||||
format_to!(buf, " {stmt}\n");
|
||||
}
|
||||
if let Some(tail_expr) = tail_expr {
|
||||
format_to!(buf, " {tail_expr}\n");
|
||||
}
|
||||
buf += "}";
|
||||
ast_from_text(&format!("const _: () = {buf};"))
|
||||
}
|
||||
|
||||
pub fn tail_only_block_expr(tail_expr: ast::Expr) -> ast::BlockExpr {
|
||||
ast_from_text(&format!("fn f() {{ {tail_expr} }}"))
|
||||
}
|
||||
|
|
@ -848,6 +865,36 @@ pub fn param_list(
|
|||
ast_from_text(&list)
|
||||
}
|
||||
|
||||
pub fn trait_(
|
||||
is_unsafe: bool,
|
||||
ident: &str,
|
||||
gen_params: Option<ast::GenericParamList>,
|
||||
where_clause: Option<ast::WhereClause>,
|
||||
assoc_items: ast::AssocItemList,
|
||||
) -> ast::Trait {
|
||||
let mut text = String::new();
|
||||
|
||||
if is_unsafe {
|
||||
format_to!(text, "unsafe ");
|
||||
}
|
||||
|
||||
format_to!(text, "trait {ident}");
|
||||
|
||||
if let Some(gen_params) = gen_params {
|
||||
format_to!(text, "{} ", gen_params.to_string());
|
||||
} else {
|
||||
text.push(' ');
|
||||
}
|
||||
|
||||
if let Some(where_clause) = where_clause {
|
||||
format_to!(text, "{} ", where_clause.to_string());
|
||||
}
|
||||
|
||||
format_to!(text, "{}", assoc_items.to_string());
|
||||
|
||||
ast_from_text(&text)
|
||||
}
|
||||
|
||||
pub fn type_bound(bound: &str) -> ast::TypeBound {
|
||||
ast_from_text(&format!("fn f<T: {bound}>() {{ }}"))
|
||||
}
|
||||
|
|
@ -985,6 +1032,41 @@ pub fn struct_(
|
|||
ast_from_text(&format!("{visibility}struct {strukt_name}{type_params}{field_list}{semicolon}",))
|
||||
}
|
||||
|
||||
pub fn attr_outer(meta: ast::Meta) -> ast::Attr {
|
||||
ast_from_text(&format!("#[{meta}]"))
|
||||
}
|
||||
|
||||
pub fn attr_inner(meta: ast::Meta) -> ast::Attr {
|
||||
ast_from_text(&format!("#![{meta}]"))
|
||||
}
|
||||
|
||||
pub fn meta_expr(path: ast::Path, expr: ast::Expr) -> ast::Meta {
|
||||
ast_from_text(&format!("#[{path} = {expr}]"))
|
||||
}
|
||||
|
||||
pub fn meta_token_tree(path: ast::Path, tt: ast::TokenTree) -> ast::Meta {
|
||||
ast_from_text(&format!("#[{path}{tt}]"))
|
||||
}
|
||||
|
||||
pub fn meta_path(path: ast::Path) -> ast::Meta {
|
||||
ast_from_text(&format!("#[{path}]"))
|
||||
}
|
||||
|
||||
pub fn token_tree(
|
||||
delimiter: SyntaxKind,
|
||||
tt: Vec<NodeOrToken<ast::TokenTree, SyntaxToken>>,
|
||||
) -> ast::TokenTree {
|
||||
let (l_delimiter, r_delimiter) = match delimiter {
|
||||
T!['('] => ('(', ')'),
|
||||
T!['['] => ('[', ']'),
|
||||
T!['{'] => ('{', '}'),
|
||||
_ => panic!("invalid delimiter `{delimiter:?}`"),
|
||||
};
|
||||
let tt = tt.into_iter().join("");
|
||||
|
||||
ast_from_text(&format!("tt!{l_delimiter}{tt}{r_delimiter}"))
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn ast_from_text<N: AstNode>(text: &str) -> N {
|
||||
let parse = SourceFile::parse(text);
|
||||
|
|
@ -1022,6 +1104,17 @@ pub mod tokens {
|
|||
)
|
||||
});
|
||||
|
||||
pub fn semicolon() -> SyntaxToken {
|
||||
SOURCE_FILE
|
||||
.tree()
|
||||
.syntax()
|
||||
.clone_for_update()
|
||||
.descendants_with_tokens()
|
||||
.filter_map(|it| it.into_token())
|
||||
.find(|it| it.kind() == SEMICOLON)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub fn single_space() -> SyntaxToken {
|
||||
SOURCE_FILE
|
||||
.tree()
|
||||
|
|
|
|||
|
|
@ -171,6 +171,109 @@ impl SourceFile {
|
|||
}
|
||||
}
|
||||
|
||||
impl ast::TokenTree {
|
||||
pub fn reparse_as_expr(self) -> Parse<ast::Expr> {
|
||||
let tokens = self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token);
|
||||
|
||||
let mut parser_input = parser::Input::default();
|
||||
let mut was_joint = false;
|
||||
for t in tokens {
|
||||
let kind = t.kind();
|
||||
if kind.is_trivia() {
|
||||
was_joint = false
|
||||
} else {
|
||||
if kind == SyntaxKind::IDENT {
|
||||
let token_text = t.text();
|
||||
let contextual_kw = SyntaxKind::from_contextual_keyword(token_text)
|
||||
.unwrap_or(SyntaxKind::IDENT);
|
||||
parser_input.push_ident(contextual_kw);
|
||||
} else {
|
||||
if was_joint {
|
||||
parser_input.was_joint();
|
||||
}
|
||||
parser_input.push(kind);
|
||||
// Tag the token as joint if it is float with a fractional part
|
||||
// we use this jointness to inform the parser about what token split
|
||||
// event to emit when we encounter a float literal in a field access
|
||||
if kind == SyntaxKind::FLOAT_NUMBER && !t.text().ends_with('.') {
|
||||
parser_input.was_joint();
|
||||
}
|
||||
}
|
||||
was_joint = true;
|
||||
}
|
||||
}
|
||||
|
||||
let parser_output = parser::TopEntryPoint::Expr.parse(&parser_input);
|
||||
|
||||
let mut tokens =
|
||||
self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token);
|
||||
let mut text = String::new();
|
||||
let mut pos = TextSize::from(0);
|
||||
let mut builder = SyntaxTreeBuilder::default();
|
||||
for event in parser_output.iter() {
|
||||
match event {
|
||||
parser::Step::Token { kind, n_input_tokens } => {
|
||||
let mut token = tokens.next().unwrap();
|
||||
while token.kind().is_trivia() {
|
||||
let text = token.text();
|
||||
pos += TextSize::from(text.len() as u32);
|
||||
builder.token(token.kind(), text);
|
||||
|
||||
token = tokens.next().unwrap();
|
||||
}
|
||||
text.push_str(token.text());
|
||||
for _ in 1..n_input_tokens {
|
||||
let token = tokens.next().unwrap();
|
||||
text.push_str(token.text());
|
||||
}
|
||||
|
||||
pos += TextSize::from(text.len() as u32);
|
||||
builder.token(kind, &text);
|
||||
text.clear();
|
||||
}
|
||||
parser::Step::FloatSplit { ends_in_dot: has_pseudo_dot } => {
|
||||
let token = tokens.next().unwrap();
|
||||
let text = token.text();
|
||||
|
||||
match text.split_once('.') {
|
||||
Some((left, right)) => {
|
||||
assert!(!left.is_empty());
|
||||
builder.start_node(SyntaxKind::NAME_REF);
|
||||
builder.token(SyntaxKind::INT_NUMBER, left);
|
||||
builder.finish_node();
|
||||
|
||||
// here we move the exit up, the original exit has been deleted in process
|
||||
builder.finish_node();
|
||||
|
||||
builder.token(SyntaxKind::DOT, ".");
|
||||
|
||||
if has_pseudo_dot {
|
||||
assert!(right.is_empty(), "{left}.{right}");
|
||||
} else {
|
||||
builder.start_node(SyntaxKind::NAME_REF);
|
||||
builder.token(SyntaxKind::INT_NUMBER, right);
|
||||
builder.finish_node();
|
||||
|
||||
// the parser creates an unbalanced start node, we are required to close it here
|
||||
builder.finish_node();
|
||||
}
|
||||
}
|
||||
None => unreachable!(),
|
||||
}
|
||||
pos += TextSize::from(text.len() as u32);
|
||||
}
|
||||
parser::Step::Enter { kind } => builder.start_node(kind),
|
||||
parser::Step::Exit => builder.finish_node(),
|
||||
parser::Step::Error { msg } => builder.error(msg.to_owned(), pos),
|
||||
}
|
||||
}
|
||||
|
||||
let (green, errors) = builder.finish_raw();
|
||||
|
||||
Parse { green, errors: Arc::new(errors), _ty: PhantomData }
|
||||
}
|
||||
}
|
||||
|
||||
/// Matches a `SyntaxNode` against an `ast` type.
|
||||
///
|
||||
/// # Example:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue