mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-27 04:19:13 +00:00
Raise edition one more level
This commit is contained in:
parent
454e481422
commit
2cf5d8811a
13 changed files with 61 additions and 35 deletions
|
@ -172,7 +172,7 @@ pub use crate::ast::SourceFile;
|
|||
impl SourceFile {
|
||||
pub fn parse(text: &str) -> Parse<SourceFile> {
|
||||
let _p = tracing::span!(tracing::Level::INFO, "SourceFile::parse").entered();
|
||||
let (green, errors) = parsing::parse_text(text);
|
||||
let (green, errors) = parsing::parse_text(text, parser::Edition::Edition2021);
|
||||
let root = SyntaxNode::new_root(green.clone());
|
||||
|
||||
assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
|
||||
|
@ -185,7 +185,10 @@ impl SourceFile {
|
|||
}
|
||||
|
||||
impl ast::TokenTree {
|
||||
pub fn reparse_as_comma_separated_expr(self) -> Parse<ast::MacroEagerInput> {
|
||||
pub fn reparse_as_comma_separated_expr(
|
||||
self,
|
||||
edition: parser::Edition,
|
||||
) -> Parse<ast::MacroEagerInput> {
|
||||
let tokens = self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token);
|
||||
|
||||
let mut parser_input = parser::Input::default();
|
||||
|
@ -219,8 +222,7 @@ impl ast::TokenTree {
|
|||
}
|
||||
}
|
||||
|
||||
let parser_output = parser::TopEntryPoint::MacroEagerInput
|
||||
.parse(&parser_input, parser::Edition::Edition2021);
|
||||
let parser_output = parser::TopEntryPoint::MacroEagerInput.parse(&parser_input, edition);
|
||||
|
||||
let mut tokens =
|
||||
self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token);
|
||||
|
|
|
@ -9,12 +9,11 @@ use crate::{syntax_node::GreenNode, SyntaxError, SyntaxTreeBuilder};
|
|||
|
||||
pub(crate) use crate::parsing::reparsing::incremental_reparse;
|
||||
|
||||
pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
|
||||
pub(crate) fn parse_text(text: &str, edition: parser::Edition) -> (GreenNode, Vec<SyntaxError>) {
|
||||
let _p = tracing::span!(tracing::Level::INFO, "parse_text").entered();
|
||||
let lexed = parser::LexedStr::new(text);
|
||||
let parser_input = lexed.to_input();
|
||||
let parser_output =
|
||||
parser::TopEntryPoint::SourceFile.parse(&parser_input, parser::Edition::Edition2021);
|
||||
let parser_output = parser::TopEntryPoint::SourceFile.parse(&parser_input, edition);
|
||||
let (node, errors, _eof) = build_tree(lexed, parser_output);
|
||||
(node, errors)
|
||||
}
|
||||
|
|
|
@ -26,7 +26,9 @@ pub(crate) fn incremental_reparse(
|
|||
return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
|
||||
}
|
||||
|
||||
if let Some((green, new_errors, old_range)) = reparse_block(node, edit) {
|
||||
if let Some((green, new_errors, old_range)) =
|
||||
reparse_block(node, edit, parser::Edition::Edition2021)
|
||||
{
|
||||
return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
|
||||
}
|
||||
None
|
||||
|
@ -84,6 +86,7 @@ fn reparse_token(
|
|||
fn reparse_block(
|
||||
root: &SyntaxNode,
|
||||
edit: &Indel,
|
||||
edition: parser::Edition,
|
||||
) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
|
||||
let (node, reparser) = find_reparsable_node(root, edit.delete)?;
|
||||
let text = get_text_after_edit(node.clone().into(), edit);
|
||||
|
@ -94,7 +97,7 @@ fn reparse_block(
|
|||
return None;
|
||||
}
|
||||
|
||||
let tree_traversal = reparser.parse(&parser_input, parser::Edition::Edition2021);
|
||||
let tree_traversal = reparser.parse(&parser_input, edition);
|
||||
|
||||
let (green, new_parser_errors, _eof) = build_tree(lexed, tree_traversal);
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue