Merge commit '3b7c7f97e4' into sync-from-ra

This commit is contained in:
Laurențiu Nicola 2023-11-08 08:15:03 +02:00
parent 6eaf3f8bb2
commit d1d111d09e
177 changed files with 14930 additions and 2099 deletions

View file

@ -23,7 +23,7 @@ indexmap = "2.0.0"
smol_str.workspace = true
triomphe.workspace = true
rustc_lexer.workspace = true
rustc-dependencies.workspace = true
parser.workspace = true
profile.workspace = true
@ -41,4 +41,4 @@ test-utils.workspace = true
sourcegen.workspace = true
[features]
in-rust-tree = []
in-rust-tree = ["rustc-dependencies/in-rust-tree"]

View file

@ -224,7 +224,7 @@ pub trait AttrsOwnerEdit: ast::HasAttrs {
let after_attrs_and_comments = node
.children_with_tokens()
.find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR))
.map_or(Position::first_child_of(node), |it| Position::before(it));
.map_or(Position::first_child_of(node), Position::before);
ted::insert_all(
after_attrs_and_comments,
@ -433,7 +433,9 @@ impl ast::UseTree {
if &path == prefix && self.use_tree_list().is_none() {
if self.star_token().is_some() {
// path$0::* -> *
self.coloncolon_token().map(ted::remove);
if let Some(a) = self.coloncolon_token() {
ted::remove(a)
}
ted::remove(prefix.syntax());
} else {
// path$0 -> self
@ -460,7 +462,9 @@ impl ast::UseTree {
for p in successors(parent.parent_path(), |it| it.parent_path()) {
p.segment()?;
}
prefix.parent_path().and_then(|p| p.coloncolon_token()).map(ted::remove);
if let Some(a) = prefix.parent_path().and_then(|p| p.coloncolon_token()) {
ted::remove(a)
}
ted::remove(prefix.syntax());
Some(())
}
@ -976,7 +980,9 @@ enum Foo {
fn check_add_variant(before: &str, expected: &str, variant: ast::Variant) {
let enum_ = ast_mut_from_text::<ast::Enum>(before);
enum_.variant_list().map(|it| it.add_variant(variant));
if let Some(it) = enum_.variant_list() {
it.add_variant(variant)
}
let after = enum_.to_string();
assert_eq_text!(&trim_indent(expected.trim()), &trim_indent(after.trim()));
}

View file

@ -433,7 +433,6 @@ pub fn record_field(
ast_from_text(&format!("struct S {{ {visibility}{name}: {ty}, }}"))
}
// TODO
pub fn block_expr(
stmts: impl IntoIterator<Item = ast::Stmt>,
tail_expr: Option<ast::Expr>,
@ -853,6 +852,10 @@ pub fn self_param() -> ast::SelfParam {
ast_from_text("fn f(&self) { }")
}
pub fn mut_self_param() -> ast::SelfParam {
ast_from_text("fn f(&mut self) { }")
}
pub fn ret_type(ty: ast::Type) -> ast::RetType {
ast_from_text(&format!("fn f() -> {ty} {{ }}"))
}
@ -973,6 +976,11 @@ pub fn tuple_field(visibility: Option<ast::Visibility>, ty: ast::Type) -> ast::T
ast_from_text(&format!("struct f({visibility}{ty});"))
}
pub fn variant_list(variants: impl IntoIterator<Item = ast::Variant>) -> ast::VariantList {
let variants = variants.into_iter().join(", ");
ast_from_text(&format!("enum f {{ {variants} }}"))
}
pub fn variant(name: ast::Name, field_list: Option<ast::FieldList>) -> ast::Variant {
let field_list = match field_list {
None => String::new(),
@ -1037,6 +1045,19 @@ pub fn struct_(
ast_from_text(&format!("{visibility}struct {strukt_name}{type_params}{field_list}{semicolon}",))
}
pub fn enum_(
visibility: Option<ast::Visibility>,
enum_name: ast::Name,
variant_list: ast::VariantList,
) -> ast::Enum {
let visibility = match visibility {
None => String::new(),
Some(it) => format!("{it} "),
};
ast_from_text(&format!("{visibility}enum {enum_name} {variant_list}"))
}
pub fn attr_outer(meta: ast::Meta) -> ast::Attr {
ast_from_text(&format!("#[{meta}]"))
}
@ -1149,6 +1170,16 @@ pub mod tokens {
lit.syntax().first_child_or_token().unwrap().into_token().unwrap()
}
pub fn ident(text: &str) -> SyntaxToken {
assert_eq!(text.trim(), text);
let path: ast::Path = super::ext::ident_path(text);
path.syntax()
.descendants_with_tokens()
.filter_map(|it| it.into_token())
.find(|it| it.kind() == IDENT)
.unwrap()
}
pub fn single_newline() -> SyntaxToken {
let res = SOURCE_FILE
.tree()

View file

@ -2,6 +2,8 @@
use std::borrow::Cow;
use rustc_dependencies::lexer as rustc_lexer;
use rustc_lexer::unescape::{
unescape_byte, unescape_c_string, unescape_char, unescape_literal, CStrUnit, Mode,
};

View file

@ -19,6 +19,7 @@
//! [RFC]: <https://github.com/rust-lang/rfcs/pull/2256>
//! [Swift]: <https://github.com/apple/swift/blob/13d593df6f359d0cb2fc81cfaac273297c539455/lib/Syntax/README.md>
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
#[allow(unused)]
@ -74,7 +75,7 @@ pub use smol_str::SmolStr;
#[derive(Debug, PartialEq, Eq)]
pub struct Parse<T> {
green: GreenNode,
errors: Arc<Vec<SyntaxError>>,
errors: Arc<[SyntaxError]>,
_ty: PhantomData<fn() -> T>,
}
@ -86,7 +87,7 @@ impl<T> Clone for Parse<T> {
impl<T> Parse<T> {
fn new(green: GreenNode, errors: Vec<SyntaxError>) -> Parse<T> {
Parse { green, errors: Arc::new(errors), _ty: PhantomData }
Parse { green, errors: errors.into(), _ty: PhantomData }
}
pub fn syntax_node(&self) -> SyntaxNode {
@ -106,7 +107,7 @@ impl<T: AstNode> Parse<T> {
T::cast(self.syntax_node()).unwrap()
}
pub fn ok(self) -> Result<T, Arc<Vec<SyntaxError>>> {
pub fn ok(self) -> Result<T, Arc<[SyntaxError]>> {
if self.errors.is_empty() {
Ok(self.tree())
} else {
@ -143,7 +144,7 @@ impl Parse<SourceFile> {
parsing::incremental_reparse(self.tree().syntax(), indel, self.errors.to_vec()).map(
|(green_node, errors, _reparsed_range)| Parse {
green: green_node,
errors: Arc::new(errors),
errors: errors.into(),
_ty: PhantomData,
},
)
@ -167,7 +168,7 @@ impl SourceFile {
errors.extend(validation::validate(&root));
assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
Parse { green, errors: Arc::new(errors), _ty: PhantomData }
Parse { green, errors: errors.into(), _ty: PhantomData }
}
}
@ -181,29 +182,27 @@ impl ast::TokenTree {
let kind = t.kind();
if kind.is_trivia() {
was_joint = false
} else if kind == SyntaxKind::IDENT {
let token_text = t.text();
let contextual_kw =
SyntaxKind::from_contextual_keyword(token_text).unwrap_or(SyntaxKind::IDENT);
parser_input.push_ident(contextual_kw);
} else {
if kind == SyntaxKind::IDENT {
let token_text = t.text();
let contextual_kw = SyntaxKind::from_contextual_keyword(token_text)
.unwrap_or(SyntaxKind::IDENT);
parser_input.push_ident(contextual_kw);
} else {
if was_joint {
if was_joint {
parser_input.was_joint();
}
parser_input.push(kind);
// Tag the token as joint if it is float with a fractional part
// we use this jointness to inform the parser about what token split
// event to emit when we encounter a float literal in a field access
if kind == SyntaxKind::FLOAT_NUMBER {
if !t.text().ends_with('.') {
parser_input.was_joint();
}
parser_input.push(kind);
// Tag the token as joint if it is float with a fractional part
// we use this jointness to inform the parser about what token split
// event to emit when we encounter a float literal in a field access
if kind == SyntaxKind::FLOAT_NUMBER {
if !t.text().ends_with('.') {
parser_input.was_joint();
} else {
was_joint = false;
}
} else {
was_joint = true;
was_joint = false;
}
} else {
was_joint = true;
}
}
}
@ -276,7 +275,7 @@ impl ast::TokenTree {
let (green, errors) = builder.finish_raw();
Parse { green, errors: Arc::new(errors), _ty: PhantomData }
Parse { green, errors: errors.into(), _ty: PhantomData }
}
}

View file

@ -73,6 +73,10 @@ impl<N: AstNode> AstPtr<N> {
Some(AstPtr { raw: self.raw, _ty: PhantomData })
}
pub fn kind(&self) -> parser::SyntaxKind {
self.raw.kind()
}
pub fn upcast<M: AstNode>(self) -> AstPtr<M>
where
N: Into<M>,
@ -84,6 +88,20 @@ impl<N: AstNode> AstPtr<N> {
pub fn try_from_raw(raw: SyntaxNodePtr) -> Option<AstPtr<N>> {
N::can_cast(raw.kind()).then_some(AstPtr { raw, _ty: PhantomData })
}
pub fn wrap_left<R>(self) -> AstPtr<either::Either<N, R>>
where
either::Either<N, R>: AstNode,
{
AstPtr { raw: self.raw, _ty: PhantomData }
}
pub fn wrap_right<L>(self) -> AstPtr<either::Either<L, N>>
where
either::Either<L, N>: AstNode,
{
AstPtr { raw: self.raw, _ty: PhantomData }
}
}
impl<N: AstNode> From<AstPtr<N>> for SyntaxNodePtr {

View file

@ -17,11 +17,11 @@ use crate::{ast, fuzz, AstNode, SourceFile, SyntaxError};
#[test]
fn parse_smoke_test() {
let code = r##"
let code = r#"
fn main() {
println!("Hello, world!")
}
"##;
"#;
let parse = SourceFile::parse(code);
// eprintln!("{:#?}", parse.syntax_node());

View file

@ -622,7 +622,7 @@ fn lower_enum(grammar: &Grammar, rule: &Rule) -> Option<Vec<String>> {
}
fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, rule: &Rule) {
if lower_seperated_list(acc, grammar, label, rule) {
if lower_separated_list(acc, grammar, label, rule) {
return;
}
@ -688,7 +688,7 @@ fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, r
}
// (T (',' T)* ','?)
fn lower_seperated_list(
fn lower_separated_list(
acc: &mut Vec<Field>,
grammar: &Grammar,
label: Option<&String>,

View file

@ -5,7 +5,7 @@
mod block;
use rowan::Direction;
use rustc_lexer::unescape::{self, unescape_literal, Mode};
use rustc_dependencies::lexer::unescape::{self, unescape_literal, Mode};
use crate::{
algo,