Cleanup feature generation

This commit is contained in:
Aleksey Kladov 2020-08-18 19:31:06 +02:00
parent f18f9da7d8
commit 27ccc95c60
12 changed files with 95 additions and 115 deletions

View file

@ -32,7 +32,7 @@ struct Assist {
impl Assist {
fn collect() -> Result<Vec<Assist>> {
let mut res = Vec::new();
for path in rust_files(&project_root().join(codegen::ASSISTS_DIR)) {
for path in rust_files(&project_root().join("crates/assists/src/handlers")) {
collect_file(&mut res, path.as_path())?;
}
res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
@ -135,7 +135,7 @@ r#####"
buf.push_str(&test)
}
let buf = reformat(buf)?;
codegen::update(&project_root().join(codegen::ASSISTS_TESTS), &buf, mode)
codegen::update(&project_root().join("crates/assists/src/tests/generated.rs"), &buf, mode)
}
fn hide_hash_comments(text: &str) -> String {

View file

@ -0,0 +1,50 @@
//! Generates descriptors structure for unstable feature from Unstable Book
use std::path::{Path, PathBuf};
use quote::quote;
use walkdir::WalkDir;
use crate::{
codegen::{project_root, reformat, update, Mode, Result},
not_bash::{fs2, run},
};
pub fn generate_features(mode: Mode) -> Result<()> {
if !Path::new("./target/rust").exists() {
run!("git clone https://github.com/rust-lang/rust ./target/rust")?;
}
let contents = generate_descriptor("./target/rust/src/doc/unstable-book/src".into())?;
let destination = project_root().join("crates/ide/src/completion/generated_features.rs");
update(destination.as_path(), &contents, mode)?;
Ok(())
}
fn generate_descriptor(src_dir: PathBuf) -> Result<String> {
let definitions = ["language-features", "library-features"]
.iter()
.flat_map(|it| WalkDir::new(src_dir.join(it)))
.filter_map(|e| e.ok())
.filter(|entry| {
// Get all `.md ` files
entry.file_type().is_file() && entry.path().extension().unwrap_or_default() == "md"
})
.map(|entry| {
let path = entry.path();
let feature_ident = path.file_stem().unwrap().to_str().unwrap().replace("-", "_");
let doc = fs2::read_to_string(path).unwrap();
quote! { LintCompletion { label: #feature_ident, description: #doc } }
});
let ts = quote! {
use crate::completion::complete_attribute::LintCompletion;
pub(super) const FEATURES: &[LintCompletion] = &[
#(#definitions),*
];
};
reformat(ts)
}

View file

@ -8,12 +8,12 @@ use std::{
};
use crate::{
codegen::{self, extract_comment_blocks, update, Mode},
codegen::{extract_comment_blocks, update, Mode},
project_root, Result,
};
pub fn generate_parser_tests(mode: Mode) -> Result<()> {
let tests = tests_from_dir(&project_root().join(Path::new(codegen::GRAMMAR_DIR)))?;
let tests = tests_from_dir(&project_root().join(Path::new("crates/parser/src/grammar")))?;
fn install_tests(tests: &HashMap<String, Test>, into: &str, mode: Mode) -> Result<()> {
let tests_dir = project_root().join(into);
if !tests_dir.is_dir() {
@ -39,8 +39,8 @@ pub fn generate_parser_tests(mode: Mode) -> Result<()> {
}
Ok(())
}
install_tests(&tests.ok, codegen::OK_INLINE_TESTS_DIR, mode)?;
install_tests(&tests.err, codegen::ERR_INLINE_TESTS_DIR, mode)
install_tests(&tests.ok, "crates/syntax/test_data/parser/inline/ok", mode)?;
install_tests(&tests.err, "crates/syntax/test_data/parser/inline/err", mode)
}
#[derive(Debug)]

View file

@ -14,7 +14,7 @@ use ungrammar::{rust_grammar, Grammar, Rule};
use crate::{
ast_src::{AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc, KINDS_SRC},
codegen::{self, reformat, update, Mode},
codegen::{reformat, update, Mode},
project_root, Result,
};
@ -22,15 +22,15 @@ pub fn generate_syntax(mode: Mode) -> Result<()> {
let grammar = rust_grammar();
let ast = lower(&grammar);
let syntax_kinds_file = project_root().join(codegen::SYNTAX_KINDS);
let syntax_kinds_file = project_root().join("crates/parser/src/syntax_kind/generated.rs");
let syntax_kinds = generate_syntax_kinds(KINDS_SRC)?;
update(syntax_kinds_file.as_path(), &syntax_kinds, mode)?;
let ast_tokens_file = project_root().join(codegen::AST_TOKENS);
let ast_tokens_file = project_root().join("crates/syntax/src/ast/generated/tokens.rs");
let contents = generate_tokens(&ast)?;
update(ast_tokens_file.as_path(), &contents, mode)?;
let ast_nodes_file = project_root().join(codegen::AST_NODES);
let ast_nodes_file = project_root().join("crates/syntax/src/ast/generated/nodes.rs");
let contents = generate_nodes(KINDS_SRC, &ast)?;
update(ast_nodes_file.as_path(), &contents, mode)?;

View file

@ -1,61 +0,0 @@
//! Generates descriptors structure for unstable feature from Unstable Book
use crate::codegen::{self, project_root, Mode, Result};
use crate::codegen::{reformat, update};
use crate::not_bash::{fs2, pushd, run};
use proc_macro2::TokenStream;
use quote::quote;
use std::path::PathBuf;
use walkdir::WalkDir;
fn generate_descriptor(src_dir: PathBuf) -> Result<TokenStream> {
let files = WalkDir::new(src_dir.join("language-features"))
.into_iter()
.chain(WalkDir::new(src_dir.join("library-features")))
.filter_map(|e| e.ok())
.filter(|entry| {
// Get all `.md ` files
entry.file_type().is_file() && entry.path().extension().unwrap_or_default() == "md"
})
.collect::<Vec<_>>();
let definitions = files
.iter()
.map(|entry| {
let path = entry.path();
let feature_ident =
format!("{}", path.file_stem().unwrap().to_str().unwrap().replace("-", "_"));
let doc = format!("{}", std::fs::read_to_string(path).unwrap());
quote! { LintCompletion { label: #feature_ident, description: #doc } }
})
.collect::<Vec<_>>();
let ts = quote! {
use crate::completion::LintCompletion;
pub(crate) const UNSTABLE_FEATURE_DESCRIPTOR: &[LintCompletion] = &[
#(#definitions),*
];
};
Ok(ts)
}
pub fn generate_unstable_future_descriptor(mode: Mode) -> Result<()> {
let path = project_root().join(codegen::STORAGE);
fs2::create_dir_all(path.clone())?;
let _d = pushd(path.clone());
run!("git init")?;
run!("git remote add -f origin {}", codegen::REPOSITORY_URL)?;
run!("git pull origin master")?;
let src_dir = path.join(codegen::REPO_PATH);
let content = generate_descriptor(src_dir)?.to_string();
let contents = reformat(content)?;
let destination = project_root().join(codegen::UNSTABLE_FEATURE);
update(destination.as_path(), &contents, mode)?;
Ok(())
}