internal: overhaul code generation

* Keep codegen adjacent to the relevant crates.
* Remove codgen deps from xtask, speeding-up from-source installation.

This regresses the release process a bit, as it now needs to run the
tests (and, by extension, compile the code).
This commit is contained in:
Aleksey Kladov 2021-07-03 22:11:03 +03:00
parent 668d061245
commit 58d2ece88a
31 changed files with 686 additions and 659 deletions

View file

@ -1,167 +0,0 @@
//! Generates `assists.md` documentation.
use std::{fmt, path::Path};
use xshell::write_file;
use crate::{
codegen::{self, extract_comment_blocks_with_empty_lines, reformat, Location, PREAMBLE},
project_root, rust_files_in, Result,
};
pub(crate) fn generate_assists_tests() -> Result<()> {
let assists = Assist::collect()?;
generate_tests(&assists)
}
pub(crate) fn generate_assists_docs() -> Result<()> {
let assists = Assist::collect()?;
let contents = assists.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim());
let dst = project_root().join("docs/user/generated_assists.adoc");
write_file(dst, &contents)?;
Ok(())
}
#[derive(Debug)]
struct Assist {
id: String,
location: Location,
doc: String,
before: String,
after: String,
}
impl Assist {
fn collect() -> Result<Vec<Assist>> {
let mut res = Vec::new();
for path in rust_files_in(&project_root().join("crates/ide_assists/src/handlers")) {
collect_file(&mut res, path.as_path())?;
}
res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
return Ok(res);
fn collect_file(acc: &mut Vec<Assist>, path: &Path) -> Result<()> {
let text = xshell::read_file(path)?;
let comment_blocks = extract_comment_blocks_with_empty_lines("Assist", &text);
for block in comment_blocks {
// FIXME: doesn't support blank lines yet, need to tweak
// `extract_comment_blocks` for that.
let id = block.id;
assert!(
id.chars().all(|it| it.is_ascii_lowercase() || it == '_'),
"invalid assist id: {:?}",
id
);
let mut lines = block.contents.iter();
let doc = take_until(lines.by_ref(), "```").trim().to_string();
assert!(
doc.chars().next().unwrap().is_ascii_uppercase() && doc.ends_with('.'),
"\n\n{}: assist docs should be proper sentences, with capitalization and a full stop at the end.\n\n{}\n\n",
id, doc,
);
let before = take_until(lines.by_ref(), "```");
assert_eq!(lines.next().unwrap().as_str(), "->");
assert_eq!(lines.next().unwrap().as_str(), "```");
let after = take_until(lines.by_ref(), "```");
let location = Location::new(path.to_path_buf(), block.line);
acc.push(Assist { id, location, doc, before, after })
}
fn take_until<'a>(lines: impl Iterator<Item = &'a String>, marker: &str) -> String {
let mut buf = Vec::new();
for line in lines {
if line == marker {
break;
}
buf.push(line.clone());
}
buf.join("\n")
}
Ok(())
}
}
}
impl fmt::Display for Assist {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let before = self.before.replace("$0", ""); // Unicode pseudo-graphics bar
let after = self.after.replace("$0", "");
writeln!(
f,
"[discrete]\n=== `{}`
**Source:** {}
{}
.Before
```rust
{}```
.After
```rust
{}```",
self.id,
self.location,
self.doc,
hide_hash_comments(&before),
hide_hash_comments(&after)
)
}
}
fn generate_tests(assists: &[Assist]) -> Result<()> {
let mut buf = String::from("use super::check_doc_test;\n");
for assist in assists.iter() {
let test = format!(
r######"
#[test]
fn doctest_{}() {{
check_doc_test(
"{}",
r#####"
{}"#####, r#####"
{}"#####)
}}
"######,
assist.id,
assist.id,
reveal_hash_comments(&assist.before),
reveal_hash_comments(&assist.after)
);
buf.push_str(&test)
}
let buf = reformat(&buf)?;
codegen::ensure_file_contents(
&project_root().join("crates/ide_assists/src/tests/generated.rs"),
&buf,
)
}
fn hide_hash_comments(text: &str) -> String {
text.split('\n') // want final newline
.filter(|&it| !(it.starts_with("# ") || it == "#"))
.map(|it| format!("{}\n", it))
.collect()
}
fn reveal_hash_comments(text: &str) -> String {
text.split('\n') // want final newline
.map(|it| {
if let Some(stripped) = it.strip_prefix("# ") {
stripped
} else if it == "#" {
""
} else {
it
}
})
.map(|it| format!("{}\n", it))
.collect()
}

View file

@ -1,76 +0,0 @@
//! Generates `assists.md` documentation.
use std::{fmt, path::PathBuf};
use xshell::write_file;
use crate::{
codegen::{extract_comment_blocks_with_empty_lines, Location, PREAMBLE},
project_root, rust_files, Result,
};
pub(crate) fn generate_diagnostic_docs() -> Result<()> {
let diagnostics = Diagnostic::collect()?;
let contents =
diagnostics.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim());
let dst = project_root().join("docs/user/generated_diagnostic.adoc");
write_file(&dst, &contents)?;
Ok(())
}
#[derive(Debug)]
struct Diagnostic {
id: String,
location: Location,
doc: String,
}
impl Diagnostic {
fn collect() -> Result<Vec<Diagnostic>> {
let mut res = Vec::new();
for path in rust_files() {
collect_file(&mut res, path)?;
}
res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
return Ok(res);
fn collect_file(acc: &mut Vec<Diagnostic>, path: PathBuf) -> Result<()> {
let text = xshell::read_file(&path)?;
let comment_blocks = extract_comment_blocks_with_empty_lines("Diagnostic", &text);
for block in comment_blocks {
let id = block.id;
if let Err(msg) = is_valid_diagnostic_name(&id) {
panic!("invalid diagnostic name: {:?}:\n {}", id, msg)
}
let doc = block.contents.join("\n");
let location = Location::new(path.clone(), block.line);
acc.push(Diagnostic { id, location, doc })
}
Ok(())
}
}
}
fn is_valid_diagnostic_name(diagnostic: &str) -> Result<(), String> {
let diagnostic = diagnostic.trim();
if diagnostic.find(char::is_whitespace).is_some() {
return Err("Diagnostic names can't contain whitespace symbols".into());
}
if diagnostic.chars().any(|c| c.is_ascii_uppercase()) {
return Err("Diagnostic names can't contain uppercase symbols".into());
}
if diagnostic.chars().any(|c| !c.is_ascii()) {
return Err("Diagnostic can't contain non-ASCII symbols".into());
}
Ok(())
}
impl fmt::Display for Diagnostic {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(f, "=== {}\n**Source:** {}\n{}", self.id, self.location, self.doc)
}
}

View file

@ -1,79 +0,0 @@
//! Generates `assists.md` documentation.
use std::{fmt, path::PathBuf};
use xshell::write_file;
use crate::{
codegen::{extract_comment_blocks_with_empty_lines, Location, PREAMBLE},
project_root, rust_files, Result,
};
pub(crate) fn generate_feature_docs() -> Result<()> {
let features = Feature::collect()?;
let contents = features.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim());
let dst = project_root().join("docs/user/generated_features.adoc");
write_file(&dst, &contents)?;
Ok(())
}
#[derive(Debug)]
struct Feature {
id: String,
location: Location,
doc: String,
}
impl Feature {
fn collect() -> Result<Vec<Feature>> {
let mut res = Vec::new();
for path in rust_files() {
collect_file(&mut res, path)?;
}
res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
return Ok(res);
fn collect_file(acc: &mut Vec<Feature>, path: PathBuf) -> Result<()> {
let text = xshell::read_file(&path)?;
let comment_blocks = extract_comment_blocks_with_empty_lines("Feature", &text);
for block in comment_blocks {
let id = block.id;
if let Err(msg) = is_valid_feature_name(&id) {
panic!("invalid feature name: {:?}:\n {}", id, msg)
}
let doc = block.contents.join("\n");
let location = Location::new(path.clone(), block.line);
acc.push(Feature { id, location, doc })
}
Ok(())
}
}
}
fn is_valid_feature_name(feature: &str) -> Result<(), String> {
'word: for word in feature.split_whitespace() {
for &short in ["to", "and"].iter() {
if word == short {
continue 'word;
}
}
for &short in ["To", "And"].iter() {
if word == short {
return Err(format!("Don't capitalize {:?}", word));
}
}
if !word.starts_with(char::is_uppercase) {
return Err(format!("Capitalize {:?}", word));
}
}
Ok(())
}
impl fmt::Display for Feature {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(f, "=== {}\n**Source:** {}\n{}", self.id, self.location, self.doc)
}
}

View file

@ -1,170 +0,0 @@
//! Generates descriptors structure for unstable feature from Unstable Book
use std::borrow::Cow;
use std::fmt::Write;
use std::path::{Path, PathBuf};
use walkdir::WalkDir;
use xshell::{cmd, read_file};
use crate::codegen::{ensure_file_contents, project_root, reformat, Result};
pub(crate) fn generate_lint_completions() -> Result<()> {
if !project_root().join("./target/rust").exists() {
cmd!("git clone --depth=1 https://github.com/rust-lang/rust ./target/rust").run()?;
}
let mut contents = String::from(
r#"pub struct Lint {
pub label: &'static str,
pub description: &'static str,
}
"#,
);
generate_lint_descriptor(&mut contents)?;
contents.push('\n');
generate_feature_descriptor(&mut contents, "./target/rust/src/doc/unstable-book/src".into())?;
contents.push('\n');
cmd!("curl https://rust-lang.github.io/rust-clippy/master/lints.json --output ./target/clippy_lints.json").run()?;
generate_descriptor_clippy(&mut contents, Path::new("./target/clippy_lints.json"))?;
let contents = reformat(&contents)?;
let destination = project_root().join("crates/ide_db/src/helpers/generated_lints.rs");
ensure_file_contents(destination.as_path(), &contents)?;
Ok(())
}
fn generate_lint_descriptor(buf: &mut String) -> Result<()> {
let stdout = cmd!("rustc -W help").read()?;
let start_lints =
stdout.find("---- ------- -------").ok_or_else(|| anyhow::format_err!(""))?;
let start_lint_groups =
stdout.find("---- ---------").ok_or_else(|| anyhow::format_err!(""))?;
let end_lints =
stdout.find("Lint groups provided by rustc:").ok_or_else(|| anyhow::format_err!(""))?;
let end_lint_groups = stdout
.find("Lint tools like Clippy can provide additional lints and lint groups.")
.ok_or_else(|| anyhow::format_err!(""))?;
buf.push_str(r#"pub const DEFAULT_LINTS: &[Lint] = &["#);
buf.push('\n');
let mut lints = stdout[start_lints..end_lints]
.lines()
.skip(1)
.filter(|l| !l.is_empty())
.map(|line| {
let (name, rest) = line.trim().split_once(char::is_whitespace).unwrap();
let (_default_level, description) =
rest.trim().split_once(char::is_whitespace).unwrap();
(name.trim(), Cow::Borrowed(description.trim()))
})
.collect::<Vec<_>>();
lints.extend(
stdout[start_lint_groups..end_lint_groups].lines().skip(1).filter(|l| !l.is_empty()).map(
|line| {
let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
(name.trim(), format!("lint group for: {}", lints.trim()).into())
},
),
);
lints.sort_by(|(ident, _), (ident2, _)| ident.cmp(ident2));
lints.into_iter().for_each(|(name, description)| {
push_lint_completion(buf, &name.replace("-", "_"), &description)
});
buf.push_str("];\n");
Ok(())
}
fn generate_feature_descriptor(buf: &mut String, src_dir: PathBuf) -> Result<()> {
buf.push_str(r#"pub const FEATURES: &[Lint] = &["#);
buf.push('\n');
let mut vec = ["language-features", "library-features"]
.iter()
.flat_map(|it| WalkDir::new(src_dir.join(it)))
.filter_map(|e| e.ok())
.filter(|entry| {
// Get all `.md ` files
entry.file_type().is_file() && entry.path().extension().unwrap_or_default() == "md"
})
.map(|entry| {
let path = entry.path();
let feature_ident = path.file_stem().unwrap().to_str().unwrap().replace("-", "_");
let doc = read_file(path).unwrap();
(feature_ident, doc)
})
.collect::<Vec<_>>();
vec.sort_by(|(feature_ident, _), (feature_ident2, _)| feature_ident.cmp(feature_ident2));
vec.into_iter()
.for_each(|(feature_ident, doc)| push_lint_completion(buf, &feature_ident, &doc));
buf.push_str("];\n");
Ok(())
}
#[derive(Default)]
struct ClippyLint {
help: String,
id: String,
}
fn unescape(s: &str) -> String {
s.replace(r#"\""#, "").replace(r#"\n"#, "\n").replace(r#"\r"#, "")
}
fn generate_descriptor_clippy(buf: &mut String, path: &Path) -> Result<()> {
let file_content = read_file(path)?;
let mut clippy_lints: Vec<ClippyLint> = vec![];
for line in file_content.lines().map(|line| line.trim()) {
if line.starts_with(r#""id":"#) {
let clippy_lint = ClippyLint {
id: line
.strip_prefix(r#""id": ""#)
.expect("should be prefixed by id")
.strip_suffix(r#"","#)
.expect("should be suffixed by comma")
.into(),
help: String::new(),
};
clippy_lints.push(clippy_lint)
} else if line.starts_with(r#""What it does":"#) {
// Typical line to strip: "What is doest": "Here is my useful content",
let prefix_to_strip = r#""What it does": ""#;
let suffix_to_strip = r#"","#;
let clippy_lint = clippy_lints.last_mut().expect("clippy lint must already exist");
clippy_lint.help = line
.strip_prefix(prefix_to_strip)
.expect("should be prefixed by what it does")
.strip_suffix(suffix_to_strip)
.map(unescape)
.expect("should be suffixed by comma");
}
}
clippy_lints.sort_by(|lint, lint2| lint.id.cmp(&lint2.id));
buf.push_str(r#"pub const CLIPPY_LINTS: &[Lint] = &["#);
buf.push('\n');
clippy_lints.into_iter().for_each(|clippy_lint| {
let lint_ident = format!("clippy::{}", clippy_lint.id);
let doc = clippy_lint.help;
push_lint_completion(buf, &lint_ident, &doc);
});
buf.push_str("];\n");
Ok(())
}
fn push_lint_completion(buf: &mut String, label: &str, description: &str) {
writeln!(
buf,
r###" Lint {{
label: "{}",
description: r##"{}"##
}},"###,
label, description
)
.unwrap();
}

View file

@ -1,132 +0,0 @@
//! This module greps parser's code for specially formatted comments and turnes
//! them into tests.
use std::{
collections::HashMap,
fs, iter,
path::{Path, PathBuf},
};
use crate::{
codegen::{ensure_file_contents, extract_comment_blocks},
project_root, Result,
};
pub(crate) fn generate_parser_tests() -> Result<()> {
let tests = tests_from_dir(&project_root().join(Path::new("crates/parser/src/grammar")))?;
fn install_tests(tests: &HashMap<String, Test>, into: &str) -> Result<()> {
let tests_dir = project_root().join(into);
if !tests_dir.is_dir() {
fs::create_dir_all(&tests_dir)?;
}
// ok is never actually read, but it needs to be specified to create a Test in existing_tests
let existing = existing_tests(&tests_dir, true)?;
for t in existing.keys().filter(|&t| !tests.contains_key(t)) {
panic!("Test is deleted: {}", t);
}
let mut new_idx = existing.len() + 1;
for (name, test) in tests {
let path = match existing.get(name) {
Some((path, _test)) => path.clone(),
None => {
let file_name = format!("{:04}_{}.rs", new_idx, name);
new_idx += 1;
tests_dir.join(file_name)
}
};
ensure_file_contents(&path, &test.text)?;
}
Ok(())
}
install_tests(&tests.ok, "crates/syntax/test_data/parser/inline/ok")?;
install_tests(&tests.err, "crates/syntax/test_data/parser/inline/err")
}
#[derive(Debug)]
struct Test {
name: String,
text: String,
ok: bool,
}
#[derive(Default, Debug)]
struct Tests {
ok: HashMap<String, Test>,
err: HashMap<String, Test>,
}
fn collect_tests(s: &str) -> Vec<Test> {
let mut res = Vec::new();
for comment_block in extract_comment_blocks(s) {
let first_line = &comment_block[0];
let (name, ok) = if let Some(name) = first_line.strip_prefix("test ") {
(name.to_string(), true)
} else if let Some(name) = first_line.strip_prefix("test_err ") {
(name.to_string(), false)
} else {
continue;
};
let text: String = comment_block[1..]
.iter()
.cloned()
.chain(iter::once(String::new()))
.collect::<Vec<_>>()
.join("\n");
assert!(!text.trim().is_empty() && text.ends_with('\n'));
res.push(Test { name, text, ok })
}
res
}
fn tests_from_dir(dir: &Path) -> Result<Tests> {
let mut res = Tests::default();
for entry in ::walkdir::WalkDir::new(dir) {
let entry = entry.unwrap();
if !entry.file_type().is_file() {
continue;
}
if entry.path().extension().unwrap_or_default() != "rs" {
continue;
}
process_file(&mut res, entry.path())?;
}
let grammar_rs = dir.parent().unwrap().join("grammar.rs");
process_file(&mut res, &grammar_rs)?;
return Ok(res);
fn process_file(res: &mut Tests, path: &Path) -> Result<()> {
let text = fs::read_to_string(path)?;
for test in collect_tests(&text) {
if test.ok {
if let Some(old_test) = res.ok.insert(test.name.clone(), test) {
anyhow::bail!("Duplicate test: {}", old_test.name);
}
} else if let Some(old_test) = res.err.insert(test.name.clone(), test) {
anyhow::bail!("Duplicate test: {}", old_test.name);
}
}
Ok(())
}
}
fn existing_tests(dir: &Path, ok: bool) -> Result<HashMap<String, (PathBuf, Test)>> {
let mut res = HashMap::new();
for file in fs::read_dir(dir)? {
let file = file?;
let path = file.path();
if path.extension().unwrap_or_default() != "rs" {
continue;
}
let name = {
let file_name = path.file_name().unwrap().to_str().unwrap();
file_name[5..file_name.len() - 3].to_string()
};
let text = xshell::read_file(&path)?;
let test = Test { name: name.clone(), text, ok };
if let Some(old) = res.insert(name, (path, test)) {
println!("Duplicate test: {:?}", old);
}
}
Ok(res)
}

View file

@ -1,747 +0,0 @@
//! This module generates AST datatype used by rust-analyzer.
//!
//! Specifically, it generates the `SyntaxKind` enum and a number of newtype
//! wrappers around `SyntaxNode` which implement `syntax::AstNode`.
use std::{
collections::{BTreeSet, HashSet},
fmt::Write,
};
use proc_macro2::{Punct, Spacing};
use quote::{format_ident, quote};
use ungrammar::{rust_grammar, Grammar, Rule};
use crate::{
ast_src::{AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc, KINDS_SRC},
codegen::{ensure_file_contents, reformat},
project_root, Result,
};
pub(crate) fn generate_syntax() -> Result<()> {
let grammar = rust_grammar();
let ast = lower(&grammar);
let syntax_kinds_file = project_root().join("crates/parser/src/syntax_kind/generated.rs");
let syntax_kinds = generate_syntax_kinds(KINDS_SRC)?;
ensure_file_contents(syntax_kinds_file.as_path(), &syntax_kinds)?;
let ast_tokens_file = project_root().join("crates/syntax/src/ast/generated/tokens.rs");
let contents = generate_tokens(&ast)?;
ensure_file_contents(ast_tokens_file.as_path(), &contents)?;
let ast_nodes_file = project_root().join("crates/syntax/src/ast/generated/nodes.rs");
let contents = generate_nodes(KINDS_SRC, &ast)?;
ensure_file_contents(ast_nodes_file.as_path(), &contents)?;
Ok(())
}
fn generate_tokens(grammar: &AstSrc) -> Result<String> {
let tokens = grammar.tokens.iter().map(|token| {
let name = format_ident!("{}", token);
let kind = format_ident!("{}", to_upper_snake_case(token));
quote! {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct #name {
pub(crate) syntax: SyntaxToken,
}
impl std::fmt::Display for #name {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(&self.syntax, f)
}
}
impl AstToken for #name {
fn can_cast(kind: SyntaxKind) -> bool { kind == #kind }
fn cast(syntax: SyntaxToken) -> Option<Self> {
if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
}
fn syntax(&self) -> &SyntaxToken { &self.syntax }
}
}
});
let pretty = reformat(
&quote! {
use crate::{SyntaxKind::{self, *}, SyntaxToken, ast::AstToken};
#(#tokens)*
}
.to_string(),
)?
.replace("#[derive", "\n#[derive");
Ok(pretty)
}
fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> Result<String> {
let (node_defs, node_boilerplate_impls): (Vec<_>, Vec<_>) = grammar
.nodes
.iter()
.map(|node| {
let name = format_ident!("{}", node.name);
let kind = format_ident!("{}", to_upper_snake_case(&node.name));
let traits = node.traits.iter().map(|trait_name| {
let trait_name = format_ident!("{}", trait_name);
quote!(impl ast::#trait_name for #name {})
});
let methods = node.fields.iter().map(|field| {
let method_name = field.method_name();
let ty = field.ty();
if field.is_many() {
quote! {
pub fn #method_name(&self) -> AstChildren<#ty> {
support::children(&self.syntax)
}
}
} else if let Some(token_kind) = field.token_kind() {
quote! {
pub fn #method_name(&self) -> Option<#ty> {
support::token(&self.syntax, #token_kind)
}
}
} else {
quote! {
pub fn #method_name(&self) -> Option<#ty> {
support::child(&self.syntax)
}
}
}
});
(
quote! {
#[pretty_doc_comment_placeholder_workaround]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct #name {
pub(crate) syntax: SyntaxNode,
}
#(#traits)*
impl #name {
#(#methods)*
}
},
quote! {
impl AstNode for #name {
fn can_cast(kind: SyntaxKind) -> bool {
kind == #kind
}
fn cast(syntax: SyntaxNode) -> Option<Self> {
if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
},
)
})
.unzip();
let (enum_defs, enum_boilerplate_impls): (Vec<_>, Vec<_>) = grammar
.enums
.iter()
.map(|en| {
let variants: Vec<_> = en.variants.iter().map(|var| format_ident!("{}", var)).collect();
let name = format_ident!("{}", en.name);
let kinds: Vec<_> = variants
.iter()
.map(|name| format_ident!("{}", to_upper_snake_case(&name.to_string())))
.collect();
let traits = en.traits.iter().map(|trait_name| {
let trait_name = format_ident!("{}", trait_name);
quote!(impl ast::#trait_name for #name {})
});
let ast_node = if en.name == "Stmt" {
quote! {}
} else {
quote! {
impl AstNode for #name {
fn can_cast(kind: SyntaxKind) -> bool {
match kind {
#(#kinds)|* => true,
_ => false,
}
}
fn cast(syntax: SyntaxNode) -> Option<Self> {
let res = match syntax.kind() {
#(
#kinds => #name::#variants(#variants { syntax }),
)*
_ => return None,
};
Some(res)
}
fn syntax(&self) -> &SyntaxNode {
match self {
#(
#name::#variants(it) => &it.syntax,
)*
}
}
}
}
};
(
quote! {
#[pretty_doc_comment_placeholder_workaround]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum #name {
#(#variants(#variants),)*
}
#(#traits)*
},
quote! {
#(
impl From<#variants> for #name {
fn from(node: #variants) -> #name {
#name::#variants(node)
}
}
)*
#ast_node
},
)
})
.unzip();
let enum_names = grammar.enums.iter().map(|it| &it.name);
let node_names = grammar.nodes.iter().map(|it| &it.name);
let display_impls =
enum_names.chain(node_names.clone()).map(|it| format_ident!("{}", it)).map(|name| {
quote! {
impl std::fmt::Display for #name {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
}
}
}
});
let defined_nodes: HashSet<_> = node_names.collect();
for node in kinds
.nodes
.iter()
.map(|kind| to_pascal_case(kind))
.filter(|name| !defined_nodes.iter().any(|&it| it == name))
{
drop(node)
// TODO: restore this
// eprintln!("Warning: node {} not defined in ast source", node);
}
let ast = quote! {
use crate::{
SyntaxNode, SyntaxToken, SyntaxKind::{self, *},
ast::{self, AstNode, AstChildren, support},
T,
};
#(#node_defs)*
#(#enum_defs)*
#(#node_boilerplate_impls)*
#(#enum_boilerplate_impls)*
#(#display_impls)*
};
let ast = ast.to_string().replace("T ! [", "T![");
let mut res = String::with_capacity(ast.len() * 2);
let mut docs =
grammar.nodes.iter().map(|it| &it.doc).chain(grammar.enums.iter().map(|it| &it.doc));
for chunk in ast.split("# [pretty_doc_comment_placeholder_workaround] ") {
res.push_str(chunk);
if let Some(doc) = docs.next() {
write_doc_comment(doc, &mut res);
}
}
let pretty = reformat(&res)?;
Ok(pretty)
}
fn write_doc_comment(contents: &[String], dest: &mut String) {
for line in contents {
writeln!(dest, "///{}", line).unwrap();
}
}
fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> Result<String> {
let (single_byte_tokens_values, single_byte_tokens): (Vec<_>, Vec<_>) = grammar
.punct
.iter()
.filter(|(token, _name)| token.len() == 1)
.map(|(token, name)| (token.chars().next().unwrap(), format_ident!("{}", name)))
.unzip();
let punctuation_values = grammar.punct.iter().map(|(token, _name)| {
if "{}[]()".contains(token) {
let c = token.chars().next().unwrap();
quote! { #c }
} else {
let cs = token.chars().map(|c| Punct::new(c, Spacing::Joint));
quote! { #(#cs)* }
}
});
let punctuation =
grammar.punct.iter().map(|(_token, name)| format_ident!("{}", name)).collect::<Vec<_>>();
let full_keywords_values = &grammar.keywords;
let full_keywords =
full_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(kw)));
let all_keywords_values =
grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>();
let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw));
let all_keywords = all_keywords_values
.iter()
.map(|name| format_ident!("{}_KW", to_upper_snake_case(name)))
.collect::<Vec<_>>();
let literals =
grammar.literals.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
let tokens = grammar.tokens.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
let nodes = grammar.nodes.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
let ast = quote! {
#![allow(bad_style, missing_docs, unreachable_pub)]
/// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT`.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
#[repr(u16)]
pub enum SyntaxKind {
// Technical SyntaxKinds: they appear temporally during parsing,
// but never end up in the final tree
#[doc(hidden)]
TOMBSTONE,
#[doc(hidden)]
EOF,
#(#punctuation,)*
#(#all_keywords,)*
#(#literals,)*
#(#tokens,)*
#(#nodes,)*
// Technical kind so that we can cast from u16 safely
#[doc(hidden)]
__LAST,
}
use self::SyntaxKind::*;
impl SyntaxKind {
pub fn is_keyword(self) -> bool {
match self {
#(#all_keywords)|* => true,
_ => false,
}
}
pub fn is_punct(self) -> bool {
match self {
#(#punctuation)|* => true,
_ => false,
}
}
pub fn is_literal(self) -> bool {
match self {
#(#literals)|* => true,
_ => false,
}
}
pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {
let kw = match ident {
#(#full_keywords_values => #full_keywords,)*
_ => return None,
};
Some(kw)
}
pub fn from_char(c: char) -> Option<SyntaxKind> {
let tok = match c {
#(#single_byte_tokens_values => #single_byte_tokens,)*
_ => return None,
};
Some(tok)
}
}
#[macro_export]
macro_rules! T {
#([#punctuation_values] => { $crate::SyntaxKind::#punctuation };)*
#([#all_keywords_idents] => { $crate::SyntaxKind::#all_keywords };)*
[lifetime_ident] => { $crate::SyntaxKind::LIFETIME_IDENT };
[ident] => { $crate::SyntaxKind::IDENT };
[shebang] => { $crate::SyntaxKind::SHEBANG };
}
};
reformat(&ast.to_string())
}
fn to_upper_snake_case(s: &str) -> String {
let mut buf = String::with_capacity(s.len());
let mut prev = false;
for c in s.chars() {
if c.is_ascii_uppercase() && prev {
buf.push('_')
}
prev = true;
buf.push(c.to_ascii_uppercase());
}
buf
}
fn to_lower_snake_case(s: &str) -> String {
let mut buf = String::with_capacity(s.len());
let mut prev = false;
for c in s.chars() {
if c.is_ascii_uppercase() && prev {
buf.push('_')
}
prev = true;
buf.push(c.to_ascii_lowercase());
}
buf
}
fn to_pascal_case(s: &str) -> String {
let mut buf = String::with_capacity(s.len());
let mut prev_is_underscore = true;
for c in s.chars() {
if c == '_' {
prev_is_underscore = true;
} else if prev_is_underscore {
buf.push(c.to_ascii_uppercase());
prev_is_underscore = false;
} else {
buf.push(c.to_ascii_lowercase());
}
}
buf
}
fn pluralize(s: &str) -> String {
format!("{}s", s)
}
impl Field {
fn is_many(&self) -> bool {
matches!(self, Field::Node { cardinality: Cardinality::Many, .. })
}
fn token_kind(&self) -> Option<proc_macro2::TokenStream> {
match self {
Field::Token(token) => {
let token: proc_macro2::TokenStream = token.parse().unwrap();
Some(quote! { T![#token] })
}
_ => None,
}
}
fn method_name(&self) -> proc_macro2::Ident {
match self {
Field::Token(name) => {
let name = match name.as_str() {
";" => "semicolon",
"->" => "thin_arrow",
"'{'" => "l_curly",
"'}'" => "r_curly",
"'('" => "l_paren",
"')'" => "r_paren",
"'['" => "l_brack",
"']'" => "r_brack",
"<" => "l_angle",
">" => "r_angle",
"=" => "eq",
"!" => "excl",
"*" => "star",
"&" => "amp",
"_" => "underscore",
"." => "dot",
".." => "dotdot",
"..." => "dotdotdot",
"..=" => "dotdoteq",
"=>" => "fat_arrow",
"@" => "at",
":" => "colon",
"::" => "coloncolon",
"#" => "pound",
"?" => "question_mark",
"," => "comma",
"|" => "pipe",
_ => name,
};
format_ident!("{}_token", name)
}
Field::Node { name, .. } => {
if name == "type" {
format_ident!("ty")
} else {
format_ident!("{}", name)
}
}
}
}
fn ty(&self) -> proc_macro2::Ident {
match self {
Field::Token(_) => format_ident!("SyntaxToken"),
Field::Node { ty, .. } => format_ident!("{}", ty),
}
}
}
fn lower(grammar: &Grammar) -> AstSrc {
let mut res = AstSrc::default();
res.tokens = "Whitespace Comment String ByteString IntNumber FloatNumber"
.split_ascii_whitespace()
.map(|it| it.to_string())
.collect::<Vec<_>>();
let nodes = grammar.iter().collect::<Vec<_>>();
for &node in &nodes {
let name = grammar[node].name.clone();
let rule = &grammar[node].rule;
match lower_enum(grammar, rule) {
Some(variants) => {
let enum_src = AstEnumSrc { doc: Vec::new(), name, traits: Vec::new(), variants };
res.enums.push(enum_src);
}
None => {
let mut fields = Vec::new();
lower_rule(&mut fields, grammar, None, rule);
res.nodes.push(AstNodeSrc { doc: Vec::new(), name, traits: Vec::new(), fields });
}
}
}
deduplicate_fields(&mut res);
extract_enums(&mut res);
extract_struct_traits(&mut res);
extract_enum_traits(&mut res);
res
}
fn lower_enum(grammar: &Grammar, rule: &Rule) -> Option<Vec<String>> {
let alternatives = match rule {
Rule::Alt(it) => it,
_ => return None,
};
let mut variants = Vec::new();
for alternative in alternatives {
match alternative {
Rule::Node(it) => variants.push(grammar[*it].name.clone()),
Rule::Token(it) if grammar[*it].name == ";" => (),
_ => return None,
}
}
Some(variants)
}
fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, rule: &Rule) {
if lower_comma_list(acc, grammar, label, rule) {
return;
}
match rule {
Rule::Node(node) => {
let ty = grammar[*node].name.clone();
let name = label.cloned().unwrap_or_else(|| to_lower_snake_case(&ty));
let field = Field::Node { name, ty, cardinality: Cardinality::Optional };
acc.push(field);
}
Rule::Token(token) => {
assert!(label.is_none());
let mut name = grammar[*token].name.clone();
if name != "int_number" && name != "string" {
if "[]{}()".contains(&name) {
name = format!("'{}'", name);
}
let field = Field::Token(name);
acc.push(field);
}
}
Rule::Rep(inner) => {
if let Rule::Node(node) = &**inner {
let ty = grammar[*node].name.clone();
let name = label.cloned().unwrap_or_else(|| pluralize(&to_lower_snake_case(&ty)));
let field = Field::Node { name, ty, cardinality: Cardinality::Many };
acc.push(field);
return;
}
todo!("{:?}", rule)
}
Rule::Labeled { label: l, rule } => {
assert!(label.is_none());
let manually_implemented = matches!(
l.as_str(),
"lhs"
| "rhs"
| "then_branch"
| "else_branch"
| "start"
| "end"
| "op"
| "index"
| "base"
| "value"
| "trait"
| "self_ty"
);
if manually_implemented {
return;
}
lower_rule(acc, grammar, Some(l), rule);
}
Rule::Seq(rules) | Rule::Alt(rules) => {
for rule in rules {
lower_rule(acc, grammar, label, rule)
}
}
Rule::Opt(rule) => lower_rule(acc, grammar, label, rule),
}
}
// (T (',' T)* ','?)
fn lower_comma_list(
acc: &mut Vec<Field>,
grammar: &Grammar,
label: Option<&String>,
rule: &Rule,
) -> bool {
let rule = match rule {
Rule::Seq(it) => it,
_ => return false,
};
let (node, repeat, trailing_comma) = match rule.as_slice() {
[Rule::Node(node), Rule::Rep(repeat), Rule::Opt(trailing_comma)] => {
(node, repeat, trailing_comma)
}
_ => return false,
};
let repeat = match &**repeat {
Rule::Seq(it) => it,
_ => return false,
};
match repeat.as_slice() {
[comma, Rule::Node(n)] if comma == &**trailing_comma && n == node => (),
_ => return false,
}
let ty = grammar[*node].name.clone();
let name = label.cloned().unwrap_or_else(|| pluralize(&to_lower_snake_case(&ty)));
let field = Field::Node { name, ty, cardinality: Cardinality::Many };
acc.push(field);
true
}
fn deduplicate_fields(ast: &mut AstSrc) {
for node in &mut ast.nodes {
let mut i = 0;
'outer: while i < node.fields.len() {
for j in 0..i {
let f1 = &node.fields[i];
let f2 = &node.fields[j];
if f1 == f2 {
node.fields.remove(i);
continue 'outer;
}
}
i += 1;
}
}
}
fn extract_enums(ast: &mut AstSrc) {
for node in &mut ast.nodes {
for enm in &ast.enums {
let mut to_remove = Vec::new();
for (i, field) in node.fields.iter().enumerate() {
let ty = field.ty().to_string();
if enm.variants.iter().any(|it| it == &ty) {
to_remove.push(i);
}
}
if to_remove.len() == enm.variants.len() {
node.remove_field(to_remove);
let ty = enm.name.clone();
let name = to_lower_snake_case(&ty);
node.fields.push(Field::Node { name, ty, cardinality: Cardinality::Optional });
}
}
}
}
fn extract_struct_traits(ast: &mut AstSrc) {
let traits: &[(&str, &[&str])] = &[
("AttrsOwner", &["attrs"]),
("NameOwner", &["name"]),
("VisibilityOwner", &["visibility"]),
("GenericParamsOwner", &["generic_param_list", "where_clause"]),
("TypeBoundsOwner", &["type_bound_list", "colon_token"]),
("ModuleItemOwner", &["items"]),
("LoopBodyOwner", &["label", "loop_body"]),
("ArgListOwner", &["arg_list"]),
];
for node in &mut ast.nodes {
for (name, methods) in traits {
extract_struct_trait(node, name, methods);
}
}
}
fn extract_struct_trait(node: &mut AstNodeSrc, trait_name: &str, methods: &[&str]) {
let mut to_remove = Vec::new();
for (i, field) in node.fields.iter().enumerate() {
let method_name = field.method_name().to_string();
if methods.iter().any(|&it| it == method_name) {
to_remove.push(i);
}
}
if to_remove.len() == methods.len() {
node.traits.push(trait_name.to_string());
node.remove_field(to_remove);
}
}
fn extract_enum_traits(ast: &mut AstSrc) {
for enm in &mut ast.enums {
if enm.name == "Stmt" {
continue;
}
let nodes = &ast.nodes;
let mut variant_traits = enm
.variants
.iter()
.map(|var| nodes.iter().find(|it| &it.name == var).unwrap())
.map(|node| node.traits.iter().cloned().collect::<BTreeSet<_>>());
let mut enum_traits = match variant_traits.next() {
Some(it) => it,
None => continue,
};
for traits in variant_traits {
enum_traits = enum_traits.intersection(&traits).cloned().collect();
}
enm.traits = enum_traits.into_iter().collect();
}
}
impl AstNodeSrc {
fn remove_field(&mut self, to_remove: Vec<usize>) {
to_remove.into_iter().rev().for_each(|idx| {
self.fields.remove(idx);
});
}
}