mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 12:54:58 +00:00
Rewrite inline parser test infra to generated proper rust test cases
This commit is contained in:
parent
ef462ca88e
commit
7a5bf92b89
503 changed files with 955 additions and 163 deletions
|
@ -6,68 +6,132 @@ use std::{
|
|||
collections::HashMap,
|
||||
fs, iter,
|
||||
path::{Path, PathBuf},
|
||||
time::SystemTime,
|
||||
};
|
||||
|
||||
use anyhow::Result;
|
||||
use itertools::Itertools as _;
|
||||
|
||||
use crate::{
|
||||
codegen::{ensure_file_contents, CommentBlock},
|
||||
codegen::{ensure_file_contents, reformat, CommentBlock},
|
||||
project_root,
|
||||
util::list_rust_files,
|
||||
};
|
||||
|
||||
const PARSER_CRATE_ROOT: &str = "crates/parser";
|
||||
const PARSER_TEST_DATA: &str = "crates/parser/test_data";
|
||||
const PARSER_TEST_DATA_INLINE: &str = "crates/parser/test_data/parser/inline";
|
||||
|
||||
pub(crate) fn generate(check: bool) {
|
||||
let grammar_dir = project_root().join(Path::new("crates/parser/src/grammar"));
|
||||
let tests = tests_from_dir(&grammar_dir);
|
||||
let tests = tests_from_dir(
|
||||
&project_root().join(Path::new(&format!("{PARSER_CRATE_ROOT}/src/grammar"))),
|
||||
);
|
||||
|
||||
install_tests(&tests.ok, "crates/parser/test_data/parser/inline/ok", check);
|
||||
install_tests(&tests.err, "crates/parser/test_data/parser/inline/err", check);
|
||||
let mut some_file_was_updated = false;
|
||||
some_file_was_updated |=
|
||||
install_tests(&tests.ok, &format!("{PARSER_TEST_DATA_INLINE}/ok"), check).unwrap();
|
||||
some_file_was_updated |=
|
||||
install_tests(&tests.err, &format!("{PARSER_TEST_DATA_INLINE}/err"), check).unwrap();
|
||||
|
||||
fn install_tests(tests: &HashMap<String, Test>, into: &str, check: bool) {
|
||||
let tests_dir = project_root().join(into);
|
||||
if !tests_dir.is_dir() {
|
||||
fs::create_dir_all(&tests_dir).unwrap();
|
||||
}
|
||||
// ok is never actually read, but it needs to be specified to create a Test in existing_tests
|
||||
let existing = existing_tests(&tests_dir, true);
|
||||
if let Some(t) = existing.keys().find(|&t| !tests.contains_key(t)) {
|
||||
panic!("Test is deleted: {t}");
|
||||
}
|
||||
if some_file_was_updated {
|
||||
let _ = fs::File::open(&format!("{PARSER_CRATE_ROOT}/src/tests.rs"))
|
||||
.unwrap()
|
||||
.set_modified(SystemTime::now());
|
||||
|
||||
let mut new_idx = existing.len() + 1;
|
||||
for (name, test) in tests {
|
||||
let path = match existing.get(name) {
|
||||
Some((path, _test)) => path.clone(),
|
||||
None => {
|
||||
let file_name = format!("{new_idx:04}_{name}.rs");
|
||||
new_idx += 1;
|
||||
tests_dir.join(file_name)
|
||||
let ok_tests = tests.ok.keys().sorted().map(|k| {
|
||||
let test_name = quote::format_ident!("{}", k);
|
||||
let test_file = format!("test_data/parser/inline/ok/{test_name}.rs");
|
||||
quote::quote! {
|
||||
#[test]
|
||||
fn #test_name() {
|
||||
run_and_expect_no_errors(#test_file);
|
||||
}
|
||||
};
|
||||
ensure_file_contents(crate::flags::CodegenType::ParserTests, &path, &test.text, check);
|
||||
}
|
||||
});
|
||||
let err_tests = tests.err.keys().sorted().map(|k| {
|
||||
let test_name = quote::format_ident!("{}", k);
|
||||
let test_file = format!("test_data/parser/inline/err/{test_name}.rs");
|
||||
quote::quote! {
|
||||
#[test]
|
||||
fn #test_name() {
|
||||
run_and_expect_errors(#test_file);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let output = quote::quote! {
|
||||
mod ok {
|
||||
use crate::tests::run_and_expect_no_errors;
|
||||
#(#ok_tests)*
|
||||
}
|
||||
mod err {
|
||||
use crate::tests::run_and_expect_errors;
|
||||
#(#err_tests)*
|
||||
}
|
||||
};
|
||||
|
||||
let pretty = reformat(output.to_string());
|
||||
ensure_file_contents(
|
||||
crate::flags::CodegenType::ParserTests,
|
||||
format!("{PARSER_TEST_DATA}/generated/runner.rs").as_ref(),
|
||||
&pretty,
|
||||
check,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn install_tests(tests: &HashMap<String, Test>, into: &str, check: bool) -> Result<bool> {
|
||||
let tests_dir = project_root().join(into);
|
||||
if !tests_dir.is_dir() {
|
||||
fs::create_dir_all(&tests_dir)?;
|
||||
}
|
||||
let existing = existing_tests(&tests_dir, TestKind::Ok)?;
|
||||
if let Some((t, (path, _))) = existing.iter().find(|&(t, _)| !tests.contains_key(t)) {
|
||||
panic!("Test `{t}` is deleted: {}", path.display());
|
||||
}
|
||||
|
||||
let mut some_file_was_updated = false;
|
||||
|
||||
for (name, test) in tests {
|
||||
let path = match existing.get(name) {
|
||||
Some((path, _test)) => path.clone(),
|
||||
None => tests_dir.join(name).with_extension("rs"),
|
||||
};
|
||||
if ensure_file_contents(crate::flags::CodegenType::ParserTests, &path, &test.text, check) {
|
||||
some_file_was_updated = true;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(some_file_was_updated)
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Test {
|
||||
name: String,
|
||||
text: String,
|
||||
ok: bool,
|
||||
pub name: String,
|
||||
pub text: String,
|
||||
pub kind: TestKind,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum TestKind {
|
||||
Ok,
|
||||
Err,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
struct Tests {
|
||||
ok: HashMap<String, Test>,
|
||||
err: HashMap<String, Test>,
|
||||
pub ok: HashMap<String, Test>,
|
||||
pub err: HashMap<String, Test>,
|
||||
}
|
||||
|
||||
fn collect_tests(s: &str) -> Vec<Test> {
|
||||
let mut res = Vec::new();
|
||||
for comment_block in CommentBlock::extract_untagged(s) {
|
||||
let first_line = &comment_block.contents[0];
|
||||
let (name, ok) = if let Some(name) = first_line.strip_prefix("test ") {
|
||||
(name.to_owned(), true)
|
||||
let (name, kind) = if let Some(name) = first_line.strip_prefix("test ") {
|
||||
(name.to_owned(), TestKind::Ok)
|
||||
} else if let Some(name) = first_line.strip_prefix("test_err ") {
|
||||
(name.to_owned(), false)
|
||||
(name.to_owned(), TestKind::Err)
|
||||
} else {
|
||||
continue;
|
||||
};
|
||||
|
@ -78,7 +142,7 @@ fn collect_tests(s: &str) -> Vec<Test> {
|
|||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
assert!(!text.trim().is_empty() && text.ends_with('\n'));
|
||||
res.push(Test { name, text, ok })
|
||||
res.push(Test { name, text, kind })
|
||||
}
|
||||
res
|
||||
}
|
||||
|
@ -96,7 +160,7 @@ fn tests_from_dir(dir: &Path) -> Tests {
|
|||
let text = fs::read_to_string(path).unwrap();
|
||||
|
||||
for test in collect_tests(&text) {
|
||||
if test.ok {
|
||||
if let TestKind::Ok = test.kind {
|
||||
if let Some(old_test) = res.ok.insert(test.name.clone(), test) {
|
||||
panic!("Duplicate test: {}", old_test.name);
|
||||
}
|
||||
|
@ -107,25 +171,22 @@ fn tests_from_dir(dir: &Path) -> Tests {
|
|||
}
|
||||
}
|
||||
|
||||
fn existing_tests(dir: &Path, ok: bool) -> HashMap<String, (PathBuf, Test)> {
|
||||
let mut res = HashMap::default();
|
||||
for file in fs::read_dir(dir).unwrap() {
|
||||
let file = file.unwrap();
|
||||
let path = file.path();
|
||||
if path.extension().unwrap_or_default() != "rs" {
|
||||
continue;
|
||||
}
|
||||
let name = {
|
||||
let file_name = path.file_name().unwrap().to_str().unwrap();
|
||||
file_name[5..file_name.len() - 3].to_string()
|
||||
};
|
||||
let text = fs::read_to_string(&path).unwrap();
|
||||
let test = Test { name: name.clone(), text, ok };
|
||||
if let Some(old) = res.insert(name, (path, test)) {
|
||||
println!("Duplicate test: {old:?}");
|
||||
fn existing_tests(dir: &Path, ok: TestKind) -> Result<HashMap<String, (PathBuf, Test)>> {
|
||||
let mut res = HashMap::new();
|
||||
for file in fs::read_dir(dir)? {
|
||||
let path = file?.path();
|
||||
let rust_file = path.extension().and_then(|ext| ext.to_str()) == Some("rs");
|
||||
|
||||
if rust_file {
|
||||
let name = path.file_stem().map(|x| x.to_string_lossy().to_string()).unwrap();
|
||||
let text = fs::read_to_string(&path)?;
|
||||
let test = Test { name: name.clone(), text, kind: ok };
|
||||
if let Some(old) = res.insert(name, (path, test)) {
|
||||
println!("Duplicate test: {:?}", old);
|
||||
}
|
||||
}
|
||||
}
|
||||
res
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue