Add always disabled gen parse support

This commit is contained in:
Lukas Wirth 2024-07-17 10:49:12 +02:00
parent 983c9c122e
commit 7011094685
32 changed files with 376 additions and 55 deletions

View file

@ -352,13 +352,22 @@ pub enum BlockModifier {
Unsafe(SyntaxToken),
Try(SyntaxToken),
Const(SyntaxToken),
AsyncGen(SyntaxToken),
Gen(SyntaxToken),
Label(ast::Label),
}
impl ast::BlockExpr {
pub fn modifier(&self) -> Option<BlockModifier> {
self.async_token()
.map(BlockModifier::Async)
self.gen_token()
.map(|v| {
if self.async_token().is_some() {
BlockModifier::AsyncGen(v)
} else {
BlockModifier::Gen(v)
}
})
.or_else(|| self.async_token().map(BlockModifier::Async))
.or_else(|| self.unsafe_token().map(BlockModifier::Unsafe))
.or_else(|| self.try_token().map(BlockModifier::Try))
.or_else(|| self.const_token().map(BlockModifier::Const))

View file

@ -184,6 +184,10 @@ impl BlockExpr {
#[inline]
pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
#[inline]
pub fn gen_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![gen]) }
#[inline]
pub fn move_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![move]) }
#[inline]
pub fn try_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![try]) }
#[inline]
pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
@ -239,6 +243,17 @@ impl CastExpr {
pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ClosureBinder {
pub(crate) syntax: SyntaxNode,
}
impl ClosureBinder {
#[inline]
pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
#[inline]
pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ClosureExpr {
pub(crate) syntax: SyntaxNode,
@ -246,7 +261,7 @@ pub struct ClosureExpr {
impl ast::HasAttrs for ClosureExpr {}
impl ClosureExpr {
#[inline]
pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
pub fn closure_binder(&self) -> Option<ClosureBinder> { support::child(&self.syntax) }
#[inline]
pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
#[inline]
@ -256,7 +271,7 @@ impl ClosureExpr {
#[inline]
pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
#[inline]
pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
pub fn gen_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![gen]) }
#[inline]
pub fn move_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![move]) }
#[inline]
@ -2446,6 +2461,20 @@ impl AstNode for CastExpr {
#[inline]
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
impl AstNode for ClosureBinder {
#[inline]
fn can_cast(kind: SyntaxKind) -> bool { kind == CLOSURE_BINDER }
#[inline]
fn cast(syntax: SyntaxNode) -> Option<Self> {
if Self::can_cast(syntax.kind()) {
Some(Self { syntax })
} else {
None
}
}
#[inline]
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
impl AstNode for ClosureExpr {
#[inline]
fn can_cast(kind: SyntaxKind) -> bool { kind == CLOSURE_EXPR }
@ -5512,6 +5541,11 @@ impl std::fmt::Display for CastExpr {
std::fmt::Display::fmt(self.syntax(), f)
}
}
impl std::fmt::Display for ClosureBinder {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
}
}
impl std::fmt::Display for ClosureExpr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)

View file

@ -11,7 +11,7 @@ pub(crate) use crate::parsing::reparsing::incremental_reparse;
pub(crate) fn parse_text(text: &str, edition: parser::Edition) -> (GreenNode, Vec<SyntaxError>) {
let _p = tracing::info_span!("parse_text").entered();
let lexed = parser::LexedStr::new(text);
let lexed = parser::LexedStr::new(edition, text);
let parser_input = lexed.to_input();
let parser_output = parser::TopEntryPoint::SourceFile.parse(&parser_input, edition);
let (node, errors, _eof) = build_tree(lexed, parser_output);
@ -24,7 +24,7 @@ pub(crate) fn parse_text_at(
edition: parser::Edition,
) -> (GreenNode, Vec<SyntaxError>) {
let _p = tracing::info_span!("parse_text_at").entered();
let lexed = parser::LexedStr::new(text);
let lexed = parser::LexedStr::new(edition, text);
let parser_input = lexed.to_input();
let parser_output = entry.parse(&parser_input, edition);
let (node, errors, _eof) = build_tree(lexed, parser_output);

View file

@ -6,7 +6,7 @@
//! - otherwise, we search for the nearest `{}` block which contains the edit
//! and try to parse only this block.
use parser::Reparser;
use parser::{Edition, Reparser};
use text_edit::Indel;
use crate::{
@ -51,7 +51,8 @@ fn reparse_token(
}
let mut new_text = get_text_after_edit(prev_token.clone().into(), edit);
let (new_token_kind, new_err) = parser::LexedStr::single_token(&new_text)?;
let (new_token_kind, new_err) =
parser::LexedStr::single_token(Edition::CURRENT, &new_text)?;
if new_token_kind != prev_token_kind
|| (new_token_kind == IDENT && is_contextual_kw(&new_text))
@ -64,7 +65,8 @@ fn reparse_token(
// `b` no longer remains an identifier, but becomes a part of byte string literal
if let Some(next_char) = root.text().char_at(prev_token.text_range().end()) {
new_text.push(next_char);
let token_with_next_char = parser::LexedStr::single_token(&new_text);
let token_with_next_char =
parser::LexedStr::single_token(Edition::CURRENT, &new_text);
if let Some((_kind, _error)) = token_with_next_char {
return None;
}
@ -91,7 +93,7 @@ fn reparse_block(
let (node, reparser) = find_reparsable_node(root, edit.delete)?;
let text = get_text_after_edit(node.clone().into(), edit);
let lexed = parser::LexedStr::new(text.as_str());
let lexed = parser::LexedStr::new(Edition::CURRENT, text.as_str());
let parser_input = lexed.to_input();
if !is_balanced(&lexed) {
return None;