7918: Generalize file ensuring infrastructure r=matklad a=matklad

bors r+
🤖

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2021-03-08 18:52:08 +00:00 committed by GitHub
commit 8b7e82b012
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
16 changed files with 158 additions and 144 deletions

View file

@ -859,12 +859,12 @@ fn manual(fields: &[(&'static str, &'static str, &[&str], &str)]) -> String {
mod tests { mod tests {
use std::fs; use std::fs;
use test_utils::project_dir; use test_utils::{ensure_file_contents, project_root};
use super::*; use super::*;
#[test] #[test]
fn schema_in_sync_with_package_json() { fn generate_package_json_config() {
let s = Config::json_schema(); let s = Config::json_schema();
let schema = format!("{:#}", s); let schema = format!("{:#}", s);
let mut schema = schema let mut schema = schema
@ -877,7 +877,7 @@ mod tests {
.to_string(); .to_string();
schema.push_str(",\n"); schema.push_str(",\n");
let package_json_path = project_dir().join("editors/code/package.json"); let package_json_path = project_root().join("editors/code/package.json");
let mut package_json = fs::read_to_string(&package_json_path).unwrap(); let mut package_json = fs::read_to_string(&package_json_path).unwrap();
let start_marker = " \"$generated-start\": false,\n"; let start_marker = " \"$generated-start\": false,\n";
@ -885,19 +885,18 @@ mod tests {
let start = package_json.find(start_marker).unwrap() + start_marker.len(); let start = package_json.find(start_marker).unwrap() + start_marker.len();
let end = package_json.find(end_marker).unwrap(); let end = package_json.find(end_marker).unwrap();
let p = remove_ws(&package_json[start..end]); let p = remove_ws(&package_json[start..end]);
let s = remove_ws(&schema); let s = remove_ws(&schema);
if !p.contains(&s) { if !p.contains(&s) {
package_json.replace_range(start..end, &schema); package_json.replace_range(start..end, &schema);
fs::write(&package_json_path, &mut package_json).unwrap(); ensure_file_contents(&package_json_path, &package_json)
panic!("new config, updating package.json")
} }
} }
#[test] #[test]
fn schema_in_sync_with_docs() { fn generate_config_documentation() {
let docs_path = project_dir().join("docs/user/generated_config.adoc"); let docs_path = project_root().join("docs/user/generated_config.adoc");
let current = fs::read_to_string(&docs_path).unwrap(); let current = fs::read_to_string(&docs_path).unwrap();
let expected = ConfigData::manual(); let expected = ConfigData::manual();

View file

@ -7,7 +7,7 @@ use std::{
use ast::NameOwner; use ast::NameOwner;
use expect_test::expect_file; use expect_test::expect_file;
use rayon::prelude::*; use rayon::prelude::*;
use test_utils::{bench, bench_fixture, project_dir, skip_slow_tests}; use test_utils::{bench, bench_fixture, project_root, skip_slow_tests};
use crate::{ast, fuzz, tokenize, AstNode, SourceFile, SyntaxError, TextRange, TextSize, Token}; use crate::{ast, fuzz, tokenize, AstNode, SourceFile, SyntaxError, TextRange, TextSize, Token};
@ -153,7 +153,7 @@ fn reparse_fuzz_tests() {
/// Test that Rust-analyzer can parse and validate the rust-analyzer /// Test that Rust-analyzer can parse and validate the rust-analyzer
#[test] #[test]
fn self_hosting_parsing() { fn self_hosting_parsing() {
let dir = project_dir().join("crates"); let dir = project_root().join("crates");
let files = walkdir::WalkDir::new(dir) let files = walkdir::WalkDir::new(dir)
.into_iter() .into_iter()
.filter_entry(|entry| { .filter_entry(|entry| {
@ -193,7 +193,7 @@ fn self_hosting_parsing() {
} }
fn test_data_dir() -> PathBuf { fn test_data_dir() -> PathBuf {
project_dir().join("crates/syntax/test_data") project_root().join("crates/syntax/test_data")
} }
fn assert_errors_are_present(errors: &[SyntaxError], path: &Path) { fn assert_errors_are_present(errors: &[SyntaxError], path: &Path) {

View file

@ -4,7 +4,7 @@ use std::fs;
use stdx::format_to; use stdx::format_to;
use crate::project_dir; use crate::project_root;
pub fn big_struct() -> String { pub fn big_struct() -> String {
let n = 1_000; let n = 1_000;
@ -32,11 +32,11 @@ struct S{} {{
} }
pub fn glorious_old_parser() -> String { pub fn glorious_old_parser() -> String {
let path = project_dir().join("bench_data/glorious_old_parser"); let path = project_root().join("bench_data/glorious_old_parser");
fs::read_to_string(&path).unwrap() fs::read_to_string(&path).unwrap()
} }
pub fn numerous_macro_rules() -> String { pub fn numerous_macro_rules() -> String {
let path = project_dir().join("bench_data/numerous_macro_rules"); let path = project_root().join("bench_data/numerous_macro_rules");
fs::read_to_string(&path).unwrap() fs::read_to_string(&path).unwrap()
} }

View file

@ -14,11 +14,11 @@ mod fixture;
use std::{ use std::{
convert::{TryFrom, TryInto}, convert::{TryFrom, TryInto},
env, fs, env, fs,
path::PathBuf, path::{Path, PathBuf},
}; };
use profile::StopWatch; use profile::StopWatch;
use stdx::lines_with_ends; use stdx::{is_ci, lines_with_ends};
use text_size::{TextRange, TextSize}; use text_size::{TextRange, TextSize};
pub use dissimilar::diff as __diff; pub use dissimilar::diff as __diff;
@ -288,14 +288,14 @@ pub fn skip_slow_tests() -> bool {
if should_skip { if should_skip {
eprintln!("ignoring slow test") eprintln!("ignoring slow test")
} else { } else {
let path = project_dir().join("./target/.slow_tests_cookie"); let path = project_root().join("./target/.slow_tests_cookie");
fs::write(&path, ".").unwrap(); fs::write(&path, ".").unwrap();
} }
should_skip should_skip
} }
/// Returns the path to the root directory of `rust-analyzer` project. /// Returns the path to the root directory of `rust-analyzer` project.
pub fn project_dir() -> PathBuf { pub fn project_root() -> PathBuf {
let dir = env!("CARGO_MANIFEST_DIR"); let dir = env!("CARGO_MANIFEST_DIR");
PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned() PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned()
} }
@ -353,3 +353,39 @@ pub fn bench(label: &'static str) -> impl Drop {
Bencher { sw: StopWatch::start(), label } Bencher { sw: StopWatch::start(), label }
} }
/// Checks that the `file` has the specified `contents`. If that is not the
/// case, updates the file and then fails the test.
pub fn ensure_file_contents(file: &Path, contents: &str) {
if let Err(()) = try_ensure_file_contents(file, contents) {
panic!("Some files were not up-to-date");
}
}
/// Checks that the `file` has the specified `contents`. If that is not the
/// case, updates the file and return an Error.
pub fn try_ensure_file_contents(file: &Path, contents: &str) -> Result<(), ()> {
match std::fs::read_to_string(file) {
Ok(old_contents) if normalize_newlines(&old_contents) == normalize_newlines(contents) => {
return Ok(())
}
_ => (),
}
let display_path = file.strip_prefix(&project_root()).unwrap_or(file);
eprintln!(
"\n\x1b[31;1merror\x1b[0m: {} was not up-to-date, updating\n",
display_path.display()
);
if is_ci() {
eprintln!(" NOTE: run `cargo test` locally and commit the updated files\n");
}
if let Some(parent) = file.parent() {
let _ = std::fs::create_dir_all(parent);
}
std::fs::write(file, contents).unwrap();
Err(())
}
fn normalize_newlines(s: &str) -> String {
s.replace("\r\n", "\n")
}

View file

@ -308,9 +308,8 @@ This sections talks about the things which are everywhere and nowhere in particu
### Code generation ### Code generation
Some of the components of this repository are generated through automatic processes. Some of the components of this repository are generated through automatic processes.
`cargo xtask codegen` runs all generation tasks. Generated code is updated automatically on `cargo test`.
Generated code is generally committed to the git repository. Generated code is generally committed to the git repository.
There are tests to check that the generated code is fresh.
In particular, we generate: In particular, we generate:

View file

@ -7,68 +7,66 @@
mod gen_syntax; mod gen_syntax;
mod gen_parser_tests; mod gen_parser_tests;
mod gen_lint_completions;
mod gen_assists_docs; mod gen_assists_docs;
mod gen_feature_docs; mod gen_feature_docs;
mod gen_lint_completions;
mod gen_diagnostic_docs; mod gen_diagnostic_docs;
use std::{ use std::{
fmt, mem, fmt, mem,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use xshell::{cmd, pushenv, read_file, write_file}; use xshell::{cmd, pushenv};
use crate::{ensure_rustfmt, flags, project_root, Result}; use crate::{ensure_rustfmt, project_root, Result};
pub(crate) use self::{ pub(crate) use self::{
gen_assists_docs::{generate_assists_docs, generate_assists_tests}, gen_assists_docs::generate_assists_tests, gen_lint_completions::generate_lint_completions,
gen_diagnostic_docs::generate_diagnostic_docs, gen_parser_tests::generate_parser_tests, gen_syntax::generate_syntax,
gen_feature_docs::generate_feature_docs,
gen_lint_completions::generate_lint_completions,
gen_parser_tests::generate_parser_tests,
gen_syntax::generate_syntax,
}; };
#[derive(Debug, PartialEq, Eq, Clone, Copy)] pub(crate) fn docs() -> Result<()> {
pub(crate) enum Mode { // We don't commit docs to the repo, so we can just overwrite them.
Overwrite, gen_assists_docs::generate_assists_docs()?;
Verify, gen_feature_docs::generate_feature_docs()?;
gen_diagnostic_docs::generate_diagnostic_docs()?;
Ok(())
} }
impl flags::Codegen { #[allow(unused)]
pub(crate) fn run(self) -> Result<()> { fn used() {
if self.features { generate_parser_tests();
generate_lint_completions(Mode::Overwrite)?; generate_assists_tests();
} generate_syntax();
generate_syntax(Mode::Overwrite)?; generate_lint_completions();
generate_parser_tests(Mode::Overwrite)?;
generate_assists_tests(Mode::Overwrite)?;
generate_assists_docs(Mode::Overwrite)?;
generate_feature_docs(Mode::Overwrite)?;
generate_diagnostic_docs(Mode::Overwrite)?;
Ok(())
}
} }
/// A helper to update file on disk if it has changed. /// Checks that the `file` has the specified `contents`. If that is not the
/// With verify = false, /// case, updates the file and then fails the test.
fn update(path: &Path, contents: &str, mode: Mode) -> Result<()> { pub(crate) fn ensure_file_contents(file: &Path, contents: &str) -> Result<()> {
match read_file(path) { match std::fs::read_to_string(file) {
Ok(old_contents) if normalize(&old_contents) == normalize(contents) => { Ok(old_contents) if normalize_newlines(&old_contents) == normalize_newlines(contents) => {
return Ok(()); return Ok(())
} }
_ => (), _ => (),
} }
if mode == Mode::Verify { let display_path = file.strip_prefix(&project_root()).unwrap_or(file);
anyhow::bail!("`{}` is not up-to-date", path.display()); eprintln!(
"\n\x1b[31;1merror\x1b[0m: {} was not up-to-date, updating\n",
display_path.display()
);
if std::env::var("CI").is_ok() {
eprintln!(" NOTE: run `cargo test` locally and commit the updated files\n");
} }
eprintln!("updating {}", path.display()); if let Some(parent) = file.parent() {
write_file(path, contents)?; let _ = std::fs::create_dir_all(parent);
return Ok(()); }
std::fs::write(file, contents).unwrap();
anyhow::bail!("some file were not up to date")
}
fn normalize(s: &str) -> String { fn normalize_newlines(s: &str) -> String {
s.replace("\r\n", "\n") s.replace("\r\n", "\n")
}
} }
const PREAMBLE: &str = "Generated file, do not edit by hand, see `xtask/src/codegen`"; const PREAMBLE: &str = "Generated file, do not edit by hand, see `xtask/src/codegen`";

View file

@ -2,22 +2,25 @@
use std::{fmt, path::Path}; use std::{fmt, path::Path};
use xshell::write_file;
use crate::{ use crate::{
codegen::{self, extract_comment_blocks_with_empty_lines, reformat, Location, Mode, PREAMBLE}, codegen::{self, extract_comment_blocks_with_empty_lines, reformat, Location, PREAMBLE},
project_root, rust_files_in, Result, project_root, rust_files_in, Result,
}; };
pub(crate) fn generate_assists_tests(mode: Mode) -> Result<()> { pub(crate) fn generate_assists_tests() -> Result<()> {
let assists = Assist::collect()?; let assists = Assist::collect()?;
generate_tests(&assists, mode) generate_tests(&assists)
} }
pub(crate) fn generate_assists_docs(mode: Mode) -> Result<()> { pub(crate) fn generate_assists_docs() -> Result<()> {
let assists = Assist::collect()?; let assists = Assist::collect()?;
let contents = assists.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); let contents = assists.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim()); let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim());
let dst = project_root().join("docs/user/generated_assists.adoc"); let dst = project_root().join("docs/user/generated_assists.adoc");
codegen::update(&dst, &contents, mode) write_file(dst, &contents)?;
Ok(())
} }
#[derive(Debug)] #[derive(Debug)]
@ -111,7 +114,7 @@ impl fmt::Display for Assist {
} }
} }
fn generate_tests(assists: &[Assist], mode: Mode) -> Result<()> { fn generate_tests(assists: &[Assist]) -> Result<()> {
let mut buf = String::from("use super::check_doc_test;\n"); let mut buf = String::from("use super::check_doc_test;\n");
for assist in assists.iter() { for assist in assists.iter() {
@ -135,7 +138,10 @@ r#####"
buf.push_str(&test) buf.push_str(&test)
} }
let buf = reformat(&buf)?; let buf = reformat(&buf)?;
codegen::update(&project_root().join("crates/ide_assists/src/tests/generated.rs"), &buf, mode) codegen::ensure_file_contents(
&project_root().join("crates/ide_assists/src/tests/generated.rs"),
&buf,
)
} }
fn hide_hash_comments(text: &str) -> String { fn hide_hash_comments(text: &str) -> String {

View file

@ -2,18 +2,20 @@
use std::{fmt, path::PathBuf}; use std::{fmt, path::PathBuf};
use xshell::write_file;
use crate::{ use crate::{
codegen::{self, extract_comment_blocks_with_empty_lines, Location, Mode, PREAMBLE}, codegen::{extract_comment_blocks_with_empty_lines, Location, PREAMBLE},
project_root, rust_files, Result, project_root, rust_files, Result,
}; };
pub(crate) fn generate_diagnostic_docs(mode: Mode) -> Result<()> { pub(crate) fn generate_diagnostic_docs() -> Result<()> {
let diagnostics = Diagnostic::collect()?; let diagnostics = Diagnostic::collect()?;
let contents = let contents =
diagnostics.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); diagnostics.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim()); let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim());
let dst = project_root().join("docs/user/generated_diagnostic.adoc"); let dst = project_root().join("docs/user/generated_diagnostic.adoc");
codegen::update(&dst, &contents, mode)?; write_file(&dst, &contents)?;
Ok(()) Ok(())
} }

View file

@ -2,17 +2,19 @@
use std::{fmt, path::PathBuf}; use std::{fmt, path::PathBuf};
use xshell::write_file;
use crate::{ use crate::{
codegen::{self, extract_comment_blocks_with_empty_lines, Location, Mode, PREAMBLE}, codegen::{extract_comment_blocks_with_empty_lines, Location, PREAMBLE},
project_root, rust_files, Result, project_root, rust_files, Result,
}; };
pub(crate) fn generate_feature_docs(mode: Mode) -> Result<()> { pub(crate) fn generate_feature_docs() -> Result<()> {
let features = Feature::collect()?; let features = Feature::collect()?;
let contents = features.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); let contents = features.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim()); let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim());
let dst = project_root().join("docs/user/generated_features.adoc"); let dst = project_root().join("docs/user/generated_features.adoc");
codegen::update(&dst, &contents, mode)?; write_file(&dst, &contents)?;
Ok(()) Ok(())
} }

View file

@ -5,13 +5,10 @@ use std::path::{Path, PathBuf};
use walkdir::WalkDir; use walkdir::WalkDir;
use xshell::{cmd, read_file}; use xshell::{cmd, read_file};
use crate::{ use crate::codegen::{ensure_file_contents, project_root, reformat, Result};
codegen::{project_root, reformat, update, Mode, Result},
run_rustfmt,
};
pub(crate) fn generate_lint_completions(mode: Mode) -> Result<()> { pub(crate) fn generate_lint_completions() -> Result<()> {
if !Path::new("./target/rust").exists() { if !project_root().join("./target/rust").exists() {
cmd!("git clone --depth=1 https://github.com/rust-lang/rust ./target/rust").run()?; cmd!("git clone --depth=1 https://github.com/rust-lang/rust ./target/rust").run()?;
} }
@ -25,8 +22,7 @@ pub(crate) fn generate_lint_completions(mode: Mode) -> Result<()> {
let destination = let destination =
project_root().join("crates/ide_completion/src/generated_lint_completions.rs"); project_root().join("crates/ide_completion/src/generated_lint_completions.rs");
update(destination.as_path(), &contents, mode)?; ensure_file_contents(destination.as_path(), &contents)?;
run_rustfmt(mode)?;
Ok(()) Ok(())
} }

View file

@ -8,13 +8,13 @@ use std::{
}; };
use crate::{ use crate::{
codegen::{extract_comment_blocks, update, Mode}, codegen::{ensure_file_contents, extract_comment_blocks},
project_root, Result, project_root, Result,
}; };
pub(crate) fn generate_parser_tests(mode: Mode) -> Result<()> { pub(crate) fn generate_parser_tests() -> Result<()> {
let tests = tests_from_dir(&project_root().join(Path::new("crates/parser/src/grammar")))?; let tests = tests_from_dir(&project_root().join(Path::new("crates/parser/src/grammar")))?;
fn install_tests(tests: &HashMap<String, Test>, into: &str, mode: Mode) -> Result<()> { fn install_tests(tests: &HashMap<String, Test>, into: &str) -> Result<()> {
let tests_dir = project_root().join(into); let tests_dir = project_root().join(into);
if !tests_dir.is_dir() { if !tests_dir.is_dir() {
fs::create_dir_all(&tests_dir)?; fs::create_dir_all(&tests_dir)?;
@ -35,12 +35,12 @@ pub(crate) fn generate_parser_tests(mode: Mode) -> Result<()> {
tests_dir.join(file_name) tests_dir.join(file_name)
} }
}; };
update(&path, &test.text, mode)?; ensure_file_contents(&path, &test.text)?;
} }
Ok(()) Ok(())
} }
install_tests(&tests.ok, "crates/syntax/test_data/parser/inline/ok", mode)?; install_tests(&tests.ok, "crates/syntax/test_data/parser/inline/ok")?;
install_tests(&tests.err, "crates/syntax/test_data/parser/inline/err", mode) install_tests(&tests.err, "crates/syntax/test_data/parser/inline/err")
} }
#[derive(Debug)] #[derive(Debug)]

View file

@ -14,25 +14,25 @@ use ungrammar::{rust_grammar, Grammar, Rule};
use crate::{ use crate::{
ast_src::{AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc, KINDS_SRC}, ast_src::{AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc, KINDS_SRC},
codegen::{reformat, update, Mode}, codegen::{ensure_file_contents, reformat},
project_root, Result, project_root, Result,
}; };
pub(crate) fn generate_syntax(mode: Mode) -> Result<()> { pub(crate) fn generate_syntax() -> Result<()> {
let grammar = rust_grammar(); let grammar = rust_grammar();
let ast = lower(&grammar); let ast = lower(&grammar);
let syntax_kinds_file = project_root().join("crates/parser/src/syntax_kind/generated.rs"); let syntax_kinds_file = project_root().join("crates/parser/src/syntax_kind/generated.rs");
let syntax_kinds = generate_syntax_kinds(KINDS_SRC)?; let syntax_kinds = generate_syntax_kinds(KINDS_SRC)?;
update(syntax_kinds_file.as_path(), &syntax_kinds, mode)?; ensure_file_contents(syntax_kinds_file.as_path(), &syntax_kinds)?;
let ast_tokens_file = project_root().join("crates/syntax/src/ast/generated/tokens.rs"); let ast_tokens_file = project_root().join("crates/syntax/src/ast/generated/tokens.rs");
let contents = generate_tokens(&ast)?; let contents = generate_tokens(&ast)?;
update(ast_tokens_file.as_path(), &contents, mode)?; ensure_file_contents(ast_tokens_file.as_path(), &contents)?;
let ast_nodes_file = project_root().join("crates/syntax/src/ast/generated/nodes.rs"); let ast_nodes_file = project_root().join("crates/syntax/src/ast/generated/nodes.rs");
let contents = generate_nodes(KINDS_SRC, &ast)?; let contents = generate_nodes(KINDS_SRC, &ast)?;
update(ast_nodes_file.as_path(), &contents, mode)?; ensure_file_contents(ast_nodes_file.as_path(), &contents)?;
Ok(()) Ok(())
} }

View file

@ -27,10 +27,6 @@ xflags::xflags! {
optional --jemalloc optional --jemalloc
} }
cmd codegen {
optional --features
}
cmd lint {} cmd lint {}
cmd fuzz-tests {} cmd fuzz-tests {}
cmd pre-cache {} cmd pre-cache {}
@ -67,7 +63,6 @@ pub struct Xtask {
pub enum XtaskCmd { pub enum XtaskCmd {
Help(Help), Help(Help),
Install(Install), Install(Install),
Codegen(Codegen),
Lint(Lint), Lint(Lint),
FuzzTests(FuzzTests), FuzzTests(FuzzTests),
PreCache(PreCache), PreCache(PreCache),
@ -92,11 +87,6 @@ pub struct Install {
pub jemalloc: bool, pub jemalloc: bool,
} }
#[derive(Debug)]
pub struct Codegen {
pub features: bool,
}
#[derive(Debug)] #[derive(Debug)]
pub struct Lint; pub struct Lint;

View file

@ -28,7 +28,7 @@ use std::{
use walkdir::{DirEntry, WalkDir}; use walkdir::{DirEntry, WalkDir};
use xshell::{cmd, cp, pushd, pushenv}; use xshell::{cmd, cp, pushd, pushenv};
use crate::{codegen::Mode, dist::DistCmd}; use crate::dist::DistCmd;
fn main() -> Result<()> { fn main() -> Result<()> {
let _d = pushd(project_root())?; let _d = pushd(project_root())?;
@ -40,7 +40,6 @@ fn main() -> Result<()> {
return Ok(()); return Ok(());
} }
flags::XtaskCmd::Install(cmd) => cmd.run(), flags::XtaskCmd::Install(cmd) => cmd.run(),
flags::XtaskCmd::Codegen(cmd) => cmd.run(),
flags::XtaskCmd::Lint(_) => run_clippy(), flags::XtaskCmd::Lint(_) => run_clippy(),
flags::XtaskCmd::FuzzTests(_) => run_fuzzer(), flags::XtaskCmd::FuzzTests(_) => run_fuzzer(),
flags::XtaskCmd::PreCache(cmd) => cmd.run(), flags::XtaskCmd::PreCache(cmd) => cmd.run(),
@ -85,18 +84,6 @@ fn rust_files_in(path: &Path) -> impl Iterator<Item = PathBuf> {
files_in(path, "rs") files_in(path, "rs")
} }
fn run_rustfmt(mode: Mode) -> Result<()> {
let _dir = pushd(project_root())?;
let _e = pushenv("RUSTUP_TOOLCHAIN", "stable");
ensure_rustfmt()?;
let check = match mode {
Mode::Overwrite => &[][..],
Mode::Verify => &["--", "--check"],
};
cmd!("cargo fmt {check...}").run()?;
Ok(())
}
fn ensure_rustfmt() -> Result<()> { fn ensure_rustfmt() -> Result<()> {
let out = cmd!("rustfmt --version").read()?; let out = cmd!("rustfmt --version").read()?;
if !out.contains("stable") { if !out.contains("stable") {

View file

@ -2,7 +2,7 @@ use std::fmt::Write;
use xshell::{cmd, cp, pushd, read_dir, write_file}; use xshell::{cmd, cp, pushd, read_dir, write_file};
use crate::{codegen, date_iso, flags, is_release_tag, project_root, Mode, Result}; use crate::{codegen, date_iso, flags, is_release_tag, project_root, Result};
impl flags::Release { impl flags::Release {
pub(crate) fn run(self) -> Result<()> { pub(crate) fn run(self) -> Result<()> {
@ -12,8 +12,7 @@ impl flags::Release {
cmd!("git reset --hard tags/nightly").run()?; cmd!("git reset --hard tags/nightly").run()?;
cmd!("git push").run()?; cmd!("git push").run()?;
} }
codegen::generate_assists_docs(Mode::Overwrite)?; codegen::docs()?;
codegen::generate_feature_docs(Mode::Overwrite)?;
let website_root = project_root().join("../rust-analyzer.github.io"); let website_root = project_root().join("../rust-analyzer.github.io");
let changelog_dir = website_root.join("./thisweek/_posts"); let changelog_dir = website_root.join("./thisweek/_posts");

View file

@ -3,48 +3,48 @@ use std::{
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use xshell::{cmd, read_file}; use xshell::{cmd, pushd, pushenv, read_file};
use crate::{ use crate::{cargo_files, codegen, project_root, rust_files};
cargo_files,
codegen::{self, Mode},
project_root, run_rustfmt, rust_files,
};
#[test] #[test]
fn generated_grammar_is_fresh() { fn generate_grammar() {
if let Err(error) = codegen::generate_syntax(Mode::Verify) { codegen::generate_syntax().unwrap()
panic!("{}. Please update it by running `cargo xtask codegen`", error);
}
} }
#[test] #[test]
fn generated_tests_are_fresh() { fn generate_parser_tests() {
if let Err(error) = codegen::generate_parser_tests(Mode::Verify) { codegen::generate_parser_tests().unwrap()
panic!("{}. Please update tests by running `cargo xtask codegen`", error);
}
} }
#[test] #[test]
fn generated_assists_are_fresh() { fn generate_assists_tests() {
if let Err(error) = codegen::generate_assists_tests(Mode::Verify) { codegen::generate_assists_tests().unwrap();
panic!("{}. Please update assists by running `cargo xtask codegen`", error); }
}
/// This clones rustc repo, and so is not worth to keep up-to-date. We update
/// manually by un-ignoring the test from time to time.
#[test]
#[ignore]
fn generate_lint_completions() {
codegen::generate_lint_completions().unwrap()
} }
#[test] #[test]
fn check_code_formatting() { fn check_code_formatting() {
if let Err(error) = run_rustfmt(Mode::Verify) { let _dir = pushd(project_root()).unwrap();
panic!("{}. Please format the code by running `cargo format`", error); let _e = pushenv("RUSTUP_TOOLCHAIN", "stable");
crate::ensure_rustfmt().unwrap();
let res = cmd!("cargo fmt -- --check").run();
if !res.is_ok() {
let _ = cmd!("cargo fmt").run();
} }
res.unwrap()
} }
#[test] #[test]
fn smoke_test_docs_generation() { fn smoke_test_generate_documentation() {
// We don't commit docs to the repo, so we can just overwrite in tests. codegen::docs().unwrap()
codegen::generate_assists_docs(Mode::Overwrite).unwrap();
codegen::generate_feature_docs(Mode::Overwrite).unwrap();
codegen::generate_diagnostic_docs(Mode::Overwrite).unwrap();
} }
#[test] #[test]