mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-29 05:15:04 +00:00
Merge #9476
9476: internal: overhaul codegen r=matklad a=matklad
bors r+
🤖
Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
commit
336194c09b
32 changed files with 767 additions and 725 deletions
|
@ -24,4 +24,5 @@ hir = { path = "../hir", version = "0.0.0" }
|
|||
|
||||
[dev-dependencies]
|
||||
test_utils = { path = "../test_utils" }
|
||||
sourcegen = { path = "../sourcegen" }
|
||||
expect-test = "1.1"
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
mod sourcegen;
|
||||
mod generated;
|
||||
|
||||
use expect_test::expect;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
//! Generated file, do not edit by hand, see `xtask/src/codegen`
|
||||
//! Generated by `sourcegen_assists_docs`, do not edit by hand.
|
||||
|
||||
use super::check_doc_test;
|
||||
|
||||
|
|
171
crates/ide_assists/src/tests/sourcegen.rs
Normal file
171
crates/ide_assists/src/tests/sourcegen.rs
Normal file
|
@ -0,0 +1,171 @@
|
|||
//! Generates `assists.md` documentation.
|
||||
|
||||
use std::{fmt, fs, path::Path};
|
||||
|
||||
use test_utils::project_root;
|
||||
|
||||
#[test]
|
||||
fn sourcegen_assists_docs() {
|
||||
let assists = Assist::collect();
|
||||
|
||||
{
|
||||
// Generate doctests.
|
||||
|
||||
let mut buf = "
|
||||
use super::check_doc_test;
|
||||
"
|
||||
.to_string();
|
||||
for assist in assists.iter() {
|
||||
let test = format!(
|
||||
r######"
|
||||
#[test]
|
||||
fn doctest_{}() {{
|
||||
check_doc_test(
|
||||
"{}",
|
||||
r#####"
|
||||
{}"#####, r#####"
|
||||
{}"#####)
|
||||
}}
|
||||
"######,
|
||||
assist.id,
|
||||
assist.id,
|
||||
reveal_hash_comments(&assist.before),
|
||||
reveal_hash_comments(&assist.after)
|
||||
);
|
||||
|
||||
buf.push_str(&test)
|
||||
}
|
||||
let buf = sourcegen::add_preamble("sourcegen_assists_docs", sourcegen::reformat(buf));
|
||||
sourcegen::ensure_file_contents(
|
||||
&project_root().join("crates/ide_assists/src/tests/generated.rs"),
|
||||
&buf,
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
// Generate assists manual. Note that we do _not_ commit manual to the
|
||||
// git repo. Instead, `cargo xtask release` runs this test before making
|
||||
// a release.
|
||||
|
||||
let contents = sourcegen::add_preamble(
|
||||
"sourcegen_assists_docs",
|
||||
assists.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"),
|
||||
);
|
||||
let dst = project_root().join("docs/user/generated_assists.adoc");
|
||||
fs::write(dst, contents).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Assist {
|
||||
id: String,
|
||||
location: sourcegen::Location,
|
||||
doc: String,
|
||||
before: String,
|
||||
after: String,
|
||||
}
|
||||
|
||||
impl Assist {
|
||||
fn collect() -> Vec<Assist> {
|
||||
let handlers_dir = project_root().join("crates/ide_assists/src/handlers");
|
||||
|
||||
let mut res = Vec::new();
|
||||
for path in sourcegen::list_rust_files(&handlers_dir) {
|
||||
collect_file(&mut res, path.as_path());
|
||||
}
|
||||
res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
|
||||
return res;
|
||||
|
||||
fn collect_file(acc: &mut Vec<Assist>, path: &Path) {
|
||||
let text = fs::read_to_string(path).unwrap();
|
||||
let comment_blocks = sourcegen::CommentBlock::extract("Assist", &text);
|
||||
|
||||
for block in comment_blocks {
|
||||
// FIXME: doesn't support blank lines yet, need to tweak
|
||||
// `extract_comment_blocks` for that.
|
||||
let id = block.id;
|
||||
assert!(
|
||||
id.chars().all(|it| it.is_ascii_lowercase() || it == '_'),
|
||||
"invalid assist id: {:?}",
|
||||
id
|
||||
);
|
||||
let mut lines = block.contents.iter();
|
||||
|
||||
let doc = take_until(lines.by_ref(), "```").trim().to_string();
|
||||
assert!(
|
||||
doc.chars().next().unwrap().is_ascii_uppercase() && doc.ends_with('.'),
|
||||
"\n\n{}: assist docs should be proper sentences, with capitalization and a full stop at the end.\n\n{}\n\n",
|
||||
id, doc,
|
||||
);
|
||||
|
||||
let before = take_until(lines.by_ref(), "```");
|
||||
|
||||
assert_eq!(lines.next().unwrap().as_str(), "->");
|
||||
assert_eq!(lines.next().unwrap().as_str(), "```");
|
||||
let after = take_until(lines.by_ref(), "```");
|
||||
let location = sourcegen::Location { file: path.to_path_buf(), line: block.line };
|
||||
acc.push(Assist { id, location, doc, before, after })
|
||||
}
|
||||
}
|
||||
|
||||
fn take_until<'a>(lines: impl Iterator<Item = &'a String>, marker: &str) -> String {
|
||||
let mut buf = Vec::new();
|
||||
for line in lines {
|
||||
if line == marker {
|
||||
break;
|
||||
}
|
||||
buf.push(line.clone());
|
||||
}
|
||||
buf.join("\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Assist {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let before = self.before.replace("$0", "┃"); // Unicode pseudo-graphics bar
|
||||
let after = self.after.replace("$0", "┃");
|
||||
writeln!(
|
||||
f,
|
||||
"[discrete]\n=== `{}`
|
||||
**Source:** {}
|
||||
|
||||
{}
|
||||
|
||||
.Before
|
||||
```rust
|
||||
{}```
|
||||
|
||||
.After
|
||||
```rust
|
||||
{}```",
|
||||
self.id,
|
||||
self.location,
|
||||
self.doc,
|
||||
hide_hash_comments(&before),
|
||||
hide_hash_comments(&after)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn hide_hash_comments(text: &str) -> String {
|
||||
text.split('\n') // want final newline
|
||||
.filter(|&it| !(it.starts_with("# ") || it == "#"))
|
||||
.map(|it| format!("{}\n", it))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn reveal_hash_comments(text: &str) -> String {
|
||||
text.split('\n') // want final newline
|
||||
.map(|it| {
|
||||
if let Some(stripped) = it.strip_prefix("# ") {
|
||||
stripped
|
||||
} else if it == "#" {
|
||||
""
|
||||
} else {
|
||||
it
|
||||
}
|
||||
})
|
||||
.map(|it| format!("{}\n", it))
|
||||
.collect()
|
||||
}
|
|
@ -29,5 +29,8 @@ profile = { path = "../profile", version = "0.0.0" }
|
|||
hir = { path = "../hir", version = "0.0.0" }
|
||||
|
||||
[dev-dependencies]
|
||||
test_utils = { path = "../test_utils" }
|
||||
expect-test = "1.1"
|
||||
xshell = "0.1"
|
||||
|
||||
test_utils = { path = "../test_utils" }
|
||||
sourcegen = { path = "../sourcegen" }
|
||||
|
|
|
@ -10,6 +10,7 @@ mod items;
|
|||
mod pattern;
|
||||
mod type_pos;
|
||||
mod predicate;
|
||||
mod sourcegen;
|
||||
|
||||
use std::mem;
|
||||
|
||||
|
|
168
crates/ide_completion/src/tests/sourcegen.rs
Normal file
168
crates/ide_completion/src/tests/sourcegen.rs
Normal file
|
@ -0,0 +1,168 @@
|
|||
//! Generates descriptors structure for unstable feature from Unstable Book
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use stdx::format_to;
|
||||
use test_utils::project_root;
|
||||
use xshell::cmd;
|
||||
|
||||
/// This clones rustc repo, and so is not worth to keep up-to-date. We update
|
||||
/// manually by un-ignoring the test from time to time.
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn sourcegen_lint_completions() {
|
||||
let rust_repo = project_root().join("./target/rust");
|
||||
if !rust_repo.exists() {
|
||||
cmd!("git clone --depth=1 https://github.com/rust-lang/rust {rust_repo}").run().unwrap();
|
||||
}
|
||||
|
||||
let mut contents = r"
|
||||
pub struct Lint {
|
||||
pub label: &'static str,
|
||||
pub description: &'static str,
|
||||
}
|
||||
"
|
||||
.to_string();
|
||||
generate_lint_descriptor(&mut contents);
|
||||
contents.push('\n');
|
||||
|
||||
generate_feature_descriptor(&mut contents, "./target/rust/src/doc/unstable-book/src".into());
|
||||
contents.push('\n');
|
||||
|
||||
cmd!("curl https://rust-lang.github.io/rust-clippy/master/lints.json --output ./target/clippy_lints.json").run().unwrap();
|
||||
generate_descriptor_clippy(&mut contents, Path::new("./target/clippy_lints.json"));
|
||||
|
||||
let contents =
|
||||
sourcegen::add_preamble("sourcegen_lint_completions", sourcegen::reformat(contents));
|
||||
|
||||
let destination = project_root().join("crates/ide_db/src/helpers/generated_lints.rs");
|
||||
sourcegen::ensure_file_contents(destination.as_path(), &contents);
|
||||
}
|
||||
|
||||
fn generate_lint_descriptor(buf: &mut String) {
|
||||
let stdout = cmd!("rustc -W help").read().unwrap();
|
||||
let start_lints = stdout.find("---- ------- -------").unwrap();
|
||||
let start_lint_groups = stdout.find("---- ---------").unwrap();
|
||||
let end_lints = stdout.find("Lint groups provided by rustc:").unwrap();
|
||||
let end_lint_groups = stdout
|
||||
.find("Lint tools like Clippy can provide additional lints and lint groups.")
|
||||
.unwrap();
|
||||
buf.push_str(r#"pub const DEFAULT_LINTS: &[Lint] = &["#);
|
||||
buf.push('\n');
|
||||
let mut lints = stdout[start_lints..end_lints]
|
||||
.lines()
|
||||
.skip(1)
|
||||
.filter(|l| !l.is_empty())
|
||||
.map(|line| {
|
||||
let (name, rest) = line.trim().split_once(char::is_whitespace).unwrap();
|
||||
let (_default_level, description) =
|
||||
rest.trim().split_once(char::is_whitespace).unwrap();
|
||||
(name.trim(), Cow::Borrowed(description.trim()))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
lints.extend(
|
||||
stdout[start_lint_groups..end_lint_groups].lines().skip(1).filter(|l| !l.is_empty()).map(
|
||||
|line| {
|
||||
let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
|
||||
(name.trim(), format!("lint group for: {}", lints.trim()).into())
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
lints.sort_by(|(ident, _), (ident2, _)| ident.cmp(ident2));
|
||||
lints.into_iter().for_each(|(name, description)| {
|
||||
push_lint_completion(buf, &name.replace("-", "_"), &description)
|
||||
});
|
||||
buf.push_str("];\n");
|
||||
}
|
||||
|
||||
fn generate_feature_descriptor(buf: &mut String, src_dir: PathBuf) {
|
||||
let mut features = ["language-features", "library-features"]
|
||||
.iter()
|
||||
.flat_map(|it| sourcegen::list_files(&src_dir.join(it)))
|
||||
.filter(|path| {
|
||||
// Get all `.md ` files
|
||||
path.extension().unwrap_or_default().to_str().unwrap_or_default() == "md"
|
||||
})
|
||||
.map(|path| {
|
||||
let feature_ident = path.file_stem().unwrap().to_str().unwrap().replace("-", "_");
|
||||
let doc = fs::read_to_string(path).unwrap();
|
||||
(feature_ident, doc)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
features.sort_by(|(feature_ident, _), (feature_ident2, _)| feature_ident.cmp(feature_ident2));
|
||||
|
||||
buf.push_str(r#"pub const FEATURES: &[Lint] = &["#);
|
||||
for (feature_ident, doc) in features.into_iter() {
|
||||
push_lint_completion(buf, &feature_ident, &doc)
|
||||
}
|
||||
buf.push('\n');
|
||||
buf.push_str("];\n");
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct ClippyLint {
|
||||
help: String,
|
||||
id: String,
|
||||
}
|
||||
|
||||
fn unescape(s: &str) -> String {
|
||||
s.replace(r#"\""#, "").replace(r#"\n"#, "\n").replace(r#"\r"#, "")
|
||||
}
|
||||
|
||||
fn generate_descriptor_clippy(buf: &mut String, path: &Path) {
|
||||
let file_content = std::fs::read_to_string(path).unwrap();
|
||||
let mut clippy_lints: Vec<ClippyLint> = Vec::new();
|
||||
|
||||
for line in file_content.lines().map(|line| line.trim()) {
|
||||
if line.starts_with(r#""id":"#) {
|
||||
let clippy_lint = ClippyLint {
|
||||
id: line
|
||||
.strip_prefix(r#""id": ""#)
|
||||
.expect("should be prefixed by id")
|
||||
.strip_suffix(r#"","#)
|
||||
.expect("should be suffixed by comma")
|
||||
.into(),
|
||||
help: String::new(),
|
||||
};
|
||||
clippy_lints.push(clippy_lint)
|
||||
} else if line.starts_with(r#""What it does":"#) {
|
||||
// Typical line to strip: "What is doest": "Here is my useful content",
|
||||
let prefix_to_strip = r#""What it does": ""#;
|
||||
let suffix_to_strip = r#"","#;
|
||||
|
||||
let clippy_lint = clippy_lints.last_mut().expect("clippy lint must already exist");
|
||||
clippy_lint.help = line
|
||||
.strip_prefix(prefix_to_strip)
|
||||
.expect("should be prefixed by what it does")
|
||||
.strip_suffix(suffix_to_strip)
|
||||
.map(unescape)
|
||||
.expect("should be suffixed by comma");
|
||||
}
|
||||
}
|
||||
clippy_lints.sort_by(|lint, lint2| lint.id.cmp(&lint2.id));
|
||||
|
||||
buf.push_str(r#"pub const CLIPPY_LINTS: &[Lint] = &["#);
|
||||
buf.push('\n');
|
||||
for clippy_lint in clippy_lints.into_iter() {
|
||||
let lint_ident = format!("clippy::{}", clippy_lint.id);
|
||||
let doc = clippy_lint.help;
|
||||
push_lint_completion(buf, &lint_ident, &doc);
|
||||
}
|
||||
buf.push_str("];\n");
|
||||
}
|
||||
|
||||
fn push_lint_completion(buf: &mut String, label: &str, description: &str) {
|
||||
format_to!(
|
||||
buf,
|
||||
r###" Lint {{
|
||||
label: "{}",
|
||||
description: r##"{}"##
|
||||
}},"###,
|
||||
label,
|
||||
description
|
||||
);
|
||||
}
|
|
@ -1,10 +1,9 @@
|
|||
//! Generated file, do not edit by hand, see `xtask/src/codegen`
|
||||
//! Generated by `sourcegen_lint_completions`, do not edit by hand.
|
||||
|
||||
pub struct Lint {
|
||||
pub label: &'static str,
|
||||
pub description: &'static str,
|
||||
}
|
||||
|
||||
pub const DEFAULT_LINTS: &[Lint] = &[
|
||||
Lint {
|
||||
label: "absolute_paths_not_starting_with_crate",
|
||||
|
@ -71,8 +70,8 @@ pub const DEFAULT_LINTS: &[Lint] = &[
|
|||
description: r##"detects when an null pointer is dereferenced"##,
|
||||
},
|
||||
Lint {
|
||||
label: "disjoint_capture_migration",
|
||||
description: r##"Drop reorder and auto traits error because of `capture_disjoint_fields`"##,
|
||||
label: "disjoint_capture_drop_reorder",
|
||||
description: r##"Drop reorder because of `capture_disjoint_fields`"##,
|
||||
},
|
||||
Lint { label: "drop_bounds", description: r##"bounds of the form `T: Drop` are useless"## },
|
||||
Lint {
|
||||
|
@ -98,7 +97,7 @@ pub const DEFAULT_LINTS: &[Lint] = &[
|
|||
},
|
||||
Lint {
|
||||
label: "future_incompatible",
|
||||
description: r##"lint group for: keyword-idents, anonymous-parameters, ellipsis-inclusive-range-patterns, forbidden-lint-groups, illegal-floating-point-literal-pattern, private-in-public, pub-use-of-private-extern-crate, invalid-type-param-default, const-err, unaligned-references, patterns-in-fns-without-body, missing-fragment-specifier, late-bound-lifetime-arguments, order-dependent-trait-objects, coherence-leak-check, tyvar-behind-raw-pointer, bare-trait-objects, absolute-paths-not-starting-with-crate, unstable-name-collisions, where-clauses-object-safety, proc-macro-derive-resolution-fallback, macro-expanded-macro-exports-accessed-by-absolute-paths, ill-formed-attribute-input, conflicting-repr-hints, ambiguous-associated-items, mutable-borrow-reservation-conflict, indirect-structural-match, pointer-structural-match, nontrivial-structural-match, soft-unstable, cenum-impl-drop-cast, const-evaluatable-unchecked, uninhabited-static, unsupported-naked-functions, semicolon-in-expressions-from-macros, legacy-derive-helpers, proc-macro-back-compat, array-into-iter"##,
|
||||
description: r##"lint group for: keyword-idents, anonymous-parameters, forbidden-lint-groups, illegal-floating-point-literal-pattern, private-in-public, pub-use-of-private-extern-crate, invalid-type-param-default, const-err, unaligned-references, patterns-in-fns-without-body, missing-fragment-specifier, late-bound-lifetime-arguments, order-dependent-trait-objects, coherence-leak-check, tyvar-behind-raw-pointer, absolute-paths-not-starting-with-crate, unstable-name-collisions, where-clauses-object-safety, proc-macro-derive-resolution-fallback, macro-expanded-macro-exports-accessed-by-absolute-paths, ill-formed-attribute-input, conflicting-repr-hints, ambiguous-associated-items, mutable-borrow-reservation-conflict, indirect-structural-match, pointer-structural-match, nontrivial-structural-match, soft-unstable, cenum-impl-drop-cast, const-evaluatable-unchecked, uninhabited-static, unsupported-naked-functions, semicolon-in-expressions-from-macros, legacy-derive-helpers, proc-macro-back-compat, array-into-iter"##,
|
||||
},
|
||||
Lint {
|
||||
label: "ill_formed_attribute_input",
|
||||
|
@ -139,7 +138,7 @@ pub const DEFAULT_LINTS: &[Lint] = &[
|
|||
},
|
||||
Lint {
|
||||
label: "invalid_value",
|
||||
description: r##"an invalid value is being created (such as a null reference)"##,
|
||||
description: r##"an invalid value is being created (such as a NULL reference)"##,
|
||||
},
|
||||
Lint {
|
||||
label: "irrefutable_let_patterns",
|
||||
|
@ -291,10 +290,6 @@ pub const DEFAULT_LINTS: &[Lint] = &[
|
|||
label: "rust_2018_idioms",
|
||||
description: r##"lint group for: bare-trait-objects, unused-extern-crates, ellipsis-inclusive-range-patterns, elided-lifetimes-in-paths, explicit-outlives-requirements"##,
|
||||
},
|
||||
Lint {
|
||||
label: "rust_2021_compatibility",
|
||||
description: r##"lint group for: ellipsis-inclusive-range-patterns, bare-trait-objects"##,
|
||||
},
|
||||
Lint {
|
||||
label: "semicolon_in_expressions_from_macros",
|
||||
description: r##"trailing semicolon in macro body used as expression"##,
|
||||
|
@ -804,6 +799,7 @@ Inline assembly is currently supported on the following architectures:
|
|||
- Hexagon
|
||||
- MIPS32r2 and MIPS64r2
|
||||
- wasm32
|
||||
- BPF
|
||||
|
||||
## Basic usage
|
||||
|
||||
|
@ -1229,7 +1225,7 @@ reg_spec := <register class> / "<explicit register>"
|
|||
operand_expr := expr / "_" / expr "=>" expr / expr "=>" "_"
|
||||
reg_operand := dir_spec "(" reg_spec ")" operand_expr
|
||||
operand := reg_operand / "const" const_expr / "sym" path
|
||||
option := "pure" / "nomem" / "readonly" / "preserves_flags" / "noreturn" / "nostack" / "att_syntax"
|
||||
option := "pure" / "nomem" / "readonly" / "preserves_flags" / "noreturn" / "nostack" / "att_syntax" / "raw"
|
||||
options := "options(" option *["," option] [","] ")"
|
||||
asm := "asm!(" format_string *("," format_string) *("," [ident "="] operand) ["," options] [","] ")"
|
||||
```
|
||||
|
@ -1344,6 +1340,8 @@ Here is the list of currently supported register classes:
|
|||
| PowerPC | `reg_nonzero` | | `r[1-31]` | `b` |
|
||||
| PowerPC | `freg` | `f[0-31]` | `f` |
|
||||
| wasm32 | `local` | None\* | `r` |
|
||||
| BPF | `reg` | `r[0-10]` | `r` |
|
||||
| BPF | `wreg` | `w[0-10]` | `w` |
|
||||
|
||||
> **Note**: On x86 we treat `reg_byte` differently from `reg` because the compiler can allocate `al` and `ah` separately whereas `reg` reserves the whole register.
|
||||
>
|
||||
|
@ -1389,6 +1387,8 @@ Each register class has constraints on which value types they can be used with.
|
|||
| PowerPC | `reg_nonzero` | None | `i8`, `i16`, `i32` |
|
||||
| PowerPC | `freg` | None | `f32`, `f64` |
|
||||
| wasm32 | `local` | None | `i8` `i16` `i32` `i64` `f32` `f64` |
|
||||
| BPF | `reg` | None | `i8` `i16` `i32` `i64` |
|
||||
| BPF | `wreg` | `alu32` | `i8` `i16` `i32` |
|
||||
|
||||
> **Note**: For the purposes of the above table pointers, function pointers and `isize`/`usize` are treated as the equivalent integer type (`i16`/`i32`/`i64` depending on the target).
|
||||
|
||||
|
@ -1448,6 +1448,7 @@ Some registers have multiple names. These are all treated by the compiler as ide
|
|||
| Hexagon | `r29` | `sp` |
|
||||
| Hexagon | `r30` | `fr` |
|
||||
| Hexagon | `r31` | `lr` |
|
||||
| BPF | `r[0-10]` | `w[0-10]` |
|
||||
|
||||
Some registers cannot be used for input or output operands:
|
||||
|
||||
|
@ -1549,6 +1550,7 @@ Currently the following options are defined:
|
|||
- `noreturn`: The `asm` block never returns, and its return type is defined as `!` (never). Behavior is undefined if execution falls through past the end of the asm code. A `noreturn` asm block behaves just like a function which doesn't return; notably, local variables in scope are not dropped before it is invoked.
|
||||
- `nostack`: The `asm` block does not push data to the stack, or write to the stack red-zone (if supported by the target). If this option is *not* used then the stack pointer is guaranteed to be suitably aligned (according to the target ABI) for a function call.
|
||||
- `att_syntax`: This option is only valid on x86, and causes the assembler to use the `.att_syntax prefix` mode of the GNU assembler. Register operands are substituted in with a leading `%`.
|
||||
- `raw`: This causes the template string to be parsed as a raw assembly string, with no special handling for `{` and `}`. This is primarily useful when including raw assembly code from an external file using `include_str!`.
|
||||
|
||||
The compiler performs some additional checks on options:
|
||||
- The `nomem` and `readonly` options are mutually exclusive: it is a compile-time error to specify both.
|
||||
|
@ -3801,6 +3803,39 @@ fn cheap_clone<T: CheapToClone>(t: T) -> T {
|
|||
|
||||
This is expected to replace the unstable `overlapping_marker_traits`
|
||||
feature, which applied to all empty traits (without needing an opt-in).
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "more_qualified_paths",
|
||||
description: r##"# `more_qualified_paths`
|
||||
|
||||
The `more_qualified_paths` feature can be used in order to enable the
|
||||
use of qualified paths in patterns.
|
||||
|
||||
## Example
|
||||
|
||||
```rust
|
||||
#![feature(more_qualified_paths)]
|
||||
|
||||
fn main() {
|
||||
// destructure through a qualified path
|
||||
let <Foo as A>::Assoc { br } = StructStruct { br: 2 };
|
||||
}
|
||||
|
||||
struct StructStruct {
|
||||
br: i8,
|
||||
}
|
||||
|
||||
struct Foo;
|
||||
|
||||
trait A {
|
||||
type Assoc;
|
||||
}
|
||||
|
||||
impl A for Foo {
|
||||
type Assoc = StructStruct;
|
||||
}
|
||||
```
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
|
@ -4681,60 +4716,6 @@ let result: Result<i32, ParseIntError> = try {
|
|||
};
|
||||
assert!(result.is_err());
|
||||
```
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
label: "try_trait",
|
||||
description: r##"# `try_trait`
|
||||
|
||||
The tracking issue for this feature is: [#42327]
|
||||
|
||||
[#42327]: https://github.com/rust-lang/rust/issues/42327
|
||||
|
||||
------------------------
|
||||
|
||||
This introduces a new trait `Try` for extending the `?` operator to types
|
||||
other than `Result` (a part of [RFC 1859]). The trait provides the canonical
|
||||
way to _view_ a type in terms of a success/failure dichotomy. This will
|
||||
allow `?` to supplant the `try_opt!` macro on `Option` and the `try_ready!`
|
||||
macro on `Poll`, among other things.
|
||||
|
||||
[RFC 1859]: https://github.com/rust-lang/rfcs/pull/1859
|
||||
|
||||
Here's an example implementation of the trait:
|
||||
|
||||
```rust,ignore (cannot-reimpl-Try)
|
||||
/// A distinct type to represent the `None` value of an `Option`.
|
||||
///
|
||||
/// This enables using the `?` operator on `Option`; it's rarely useful alone.
|
||||
#[derive(Debug)]
|
||||
#[unstable(feature = "try_trait", issue = "42327")]
|
||||
pub struct None { _priv: () }
|
||||
|
||||
#[unstable(feature = "try_trait", issue = "42327")]
|
||||
impl<T> ops::Try for Option<T> {
|
||||
type Ok = T;
|
||||
type Error = None;
|
||||
|
||||
fn into_result(self) -> Result<T, None> {
|
||||
self.ok_or(None { _priv: () })
|
||||
}
|
||||
|
||||
fn from_ok(v: T) -> Self {
|
||||
Some(v)
|
||||
}
|
||||
|
||||
fn from_error(_: None) -> Self {
|
||||
None
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Note the `Error` associated type here is a new marker. The `?` operator
|
||||
allows interconversion between different `Try` implementers only when
|
||||
the error type can be converted `Into` the error type of the enclosing
|
||||
function (or catch block). Having a distinct error type (as opposed to
|
||||
just `()`, or similar) restricts this to where it's semantically meaningful.
|
||||
"##,
|
||||
},
|
||||
Lint {
|
||||
|
@ -5035,6 +5016,10 @@ checked."##,
|
|||
label: "clippy::almost_swapped",
|
||||
description: r##"Checks for `foo = bar; bar = foo` sequences."##,
|
||||
},
|
||||
Lint {
|
||||
label: "clippy::append_instead_of_extend",
|
||||
description: r##"Checks for occurrences where one vector gets extended instead of append"##,
|
||||
},
|
||||
Lint {
|
||||
label: "clippy::approx_constant",
|
||||
description: r##"Checks for floating point literals that approximate
|
||||
|
@ -5371,6 +5356,25 @@ explicitly or vice versa."##,
|
|||
label: "clippy::disallowed_method",
|
||||
description: r##"Denies the configured methods and functions in clippy.toml"##,
|
||||
},
|
||||
Lint {
|
||||
label: "clippy::disallowed_script_idents",
|
||||
description: r##"Checks for usage of unicode scripts other than those explicitly allowed
|
||||
by the lint config.
|
||||
|
||||
This lint doesn't take into account non-text scripts such as `Unknown` and `Linear_A`.
|
||||
It also ignores the `Common` script type.
|
||||
While configuring, be sure to use official script name [aliases] from
|
||||
[the list of supported scripts][supported_scripts].
|
||||
|
||||
See also: [`non_ascii_idents`].
|
||||
|
||||
[aliases]: http://www.unicode.org/reports/tr24/tr24-31.html#Script_Value_Aliases
|
||||
[supported_scripts]: https://www.unicode.org/iso15924/iso15924-codes.html"##,
|
||||
},
|
||||
Lint {
|
||||
label: "clippy::disallowed_type",
|
||||
description: r##"Denies the configured types in clippy.toml."##,
|
||||
},
|
||||
Lint {
|
||||
label: "clippy::diverging_sub_expression",
|
||||
description: r##"Checks for diverging calls that are not match arms or
|
||||
|
@ -6147,6 +6151,11 @@ used to clamp values, but switched so that the result is constant."##,
|
|||
label: "clippy::missing_docs_in_private_items",
|
||||
description: r##"Warns if there is missing doc for any documentable item
|
||||
(public or private)."##,
|
||||
},
|
||||
Lint {
|
||||
label: "clippy::missing_enforced_import_renames",
|
||||
description: r##"Checks for imports that do not rename the item as specified
|
||||
in the `enforce-import-renames` config option."##,
|
||||
},
|
||||
Lint {
|
||||
label: "clippy::missing_errors_doc",
|
||||
|
@ -6372,6 +6381,10 @@ concisely."##,
|
|||
description: r##"Checks for duplicate open options as well as combinations
|
||||
that make no sense."##,
|
||||
},
|
||||
Lint {
|
||||
label: "clippy::nonstandard_macro_braces",
|
||||
description: r##"Checks that common macros are used with consistent bracing."##,
|
||||
},
|
||||
Lint {
|
||||
label: "clippy::not_unsafe_ptr_arg_deref",
|
||||
description: r##"Checks for public functions that dereference raw pointer
|
||||
|
@ -6560,6 +6573,7 @@ upper bound, e.g., `x..(y+1)`."##,
|
|||
label: "clippy::rc_buffer",
|
||||
description: r##"Checks for `Rc<T>` and `Arc<T>` when `T` is a mutable buffer type such as `String` or `Vec`."##,
|
||||
},
|
||||
Lint { label: "clippy::rc_mutex", description: r##"Checks for `Rc<Mutex<T>>`."## },
|
||||
Lint {
|
||||
label: "clippy::redundant_allocation",
|
||||
description: r##"Checks for use of redundant allocations anywhere in the code."##,
|
||||
|
|
|
@ -27,3 +27,4 @@ ide_db = { path = "../ide_db", version = "0.0.0" }
|
|||
expect-test = "1.1"
|
||||
|
||||
test_utils = { path = "../test_utils" }
|
||||
sourcegen = { path = "../sourcegen" }
|
||||
|
|
|
@ -49,6 +49,9 @@ mod handlers {
|
|||
pub(crate) mod unlinked_file;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use hir::{diagnostics::AnyDiagnostic, Semantics};
|
||||
use ide_db::{
|
||||
assists::{Assist, AssistId, AssistKind, AssistResolveStrategy},
|
||||
|
@ -223,152 +226,3 @@ fn unresolved_fix(id: &'static str, label: &str, target: TextRange) -> Assist {
|
|||
source_change: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use expect_test::Expect;
|
||||
use ide_db::{
|
||||
assists::AssistResolveStrategy,
|
||||
base_db::{fixture::WithFixture, SourceDatabaseExt},
|
||||
RootDatabase,
|
||||
};
|
||||
use stdx::trim_indent;
|
||||
use test_utils::{assert_eq_text, extract_annotations};
|
||||
|
||||
use crate::{DiagnosticsConfig, Severity};
|
||||
|
||||
/// Takes a multi-file input fixture with annotated cursor positions,
|
||||
/// and checks that:
|
||||
/// * a diagnostic is produced
|
||||
/// * the first diagnostic fix trigger range touches the input cursor position
|
||||
/// * that the contents of the file containing the cursor match `after` after the diagnostic fix is applied
|
||||
#[track_caller]
|
||||
pub(crate) fn check_fix(ra_fixture_before: &str, ra_fixture_after: &str) {
|
||||
check_nth_fix(0, ra_fixture_before, ra_fixture_after);
|
||||
}
|
||||
/// Takes a multi-file input fixture with annotated cursor positions,
|
||||
/// and checks that:
|
||||
/// * a diagnostic is produced
|
||||
/// * every diagnostic fixes trigger range touches the input cursor position
|
||||
/// * that the contents of the file containing the cursor match `after` after each diagnostic fix is applied
|
||||
pub(crate) fn check_fixes(ra_fixture_before: &str, ra_fixtures_after: Vec<&str>) {
|
||||
for (i, ra_fixture_after) in ra_fixtures_after.iter().enumerate() {
|
||||
check_nth_fix(i, ra_fixture_before, ra_fixture_after)
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn check_nth_fix(nth: usize, ra_fixture_before: &str, ra_fixture_after: &str) {
|
||||
let after = trim_indent(ra_fixture_after);
|
||||
|
||||
let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
|
||||
let diagnostic = super::diagnostics(
|
||||
&db,
|
||||
&DiagnosticsConfig::default(),
|
||||
&AssistResolveStrategy::All,
|
||||
file_position.file_id,
|
||||
)
|
||||
.pop()
|
||||
.expect("no diagnostics");
|
||||
let fix = &diagnostic.fixes.expect("diagnostic misses fixes")[nth];
|
||||
let actual = {
|
||||
let source_change = fix.source_change.as_ref().unwrap();
|
||||
let file_id = *source_change.source_file_edits.keys().next().unwrap();
|
||||
let mut actual = db.file_text(file_id).to_string();
|
||||
|
||||
for edit in source_change.source_file_edits.values() {
|
||||
edit.apply(&mut actual);
|
||||
}
|
||||
actual
|
||||
};
|
||||
|
||||
assert_eq_text!(&after, &actual);
|
||||
assert!(
|
||||
fix.target.contains_inclusive(file_position.offset),
|
||||
"diagnostic fix range {:?} does not touch cursor position {:?}",
|
||||
fix.target,
|
||||
file_position.offset
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks that there's a diagnostic *without* fix at `$0`.
|
||||
pub(crate) fn check_no_fix(ra_fixture: &str) {
|
||||
let (db, file_position) = RootDatabase::with_position(ra_fixture);
|
||||
let diagnostic = super::diagnostics(
|
||||
&db,
|
||||
&DiagnosticsConfig::default(),
|
||||
&AssistResolveStrategy::All,
|
||||
file_position.file_id,
|
||||
)
|
||||
.pop()
|
||||
.unwrap();
|
||||
assert!(diagnostic.fixes.is_none(), "got a fix when none was expected: {:?}", diagnostic);
|
||||
}
|
||||
|
||||
pub(crate) fn check_expect(ra_fixture: &str, expect: Expect) {
|
||||
let (db, file_id) = RootDatabase::with_single_file(ra_fixture);
|
||||
let diagnostics = super::diagnostics(
|
||||
&db,
|
||||
&DiagnosticsConfig::default(),
|
||||
&AssistResolveStrategy::All,
|
||||
file_id,
|
||||
);
|
||||
expect.assert_debug_eq(&diagnostics)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub(crate) fn check_diagnostics(ra_fixture: &str) {
|
||||
let mut config = DiagnosticsConfig::default();
|
||||
config.disabled.insert("inactive-code".to_string());
|
||||
check_diagnostics_with_config(config, ra_fixture)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixture: &str) {
|
||||
let (db, files) = RootDatabase::with_many_files(ra_fixture);
|
||||
for file_id in files {
|
||||
let diagnostics =
|
||||
super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
|
||||
|
||||
let expected = extract_annotations(&*db.file_text(file_id));
|
||||
let mut actual = diagnostics
|
||||
.into_iter()
|
||||
.map(|d| {
|
||||
let mut annotation = String::new();
|
||||
if let Some(fixes) = &d.fixes {
|
||||
assert!(!fixes.is_empty());
|
||||
annotation.push_str("💡 ")
|
||||
}
|
||||
annotation.push_str(match d.severity {
|
||||
Severity::Error => "error",
|
||||
Severity::WeakWarning => "weak",
|
||||
});
|
||||
annotation.push_str(": ");
|
||||
annotation.push_str(&d.message);
|
||||
(d.range, annotation)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
actual.sort_by_key(|(range, _)| range.start());
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_disabled_diagnostics() {
|
||||
let mut config = DiagnosticsConfig::default();
|
||||
config.disabled.insert("unresolved-module".into());
|
||||
|
||||
let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#);
|
||||
|
||||
let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
|
||||
assert!(diagnostics.is_empty());
|
||||
|
||||
let diagnostics = super::diagnostics(
|
||||
&db,
|
||||
&DiagnosticsConfig::default(),
|
||||
&AssistResolveStrategy::All,
|
||||
file_id,
|
||||
);
|
||||
assert!(!diagnostics.is_empty());
|
||||
}
|
||||
}
|
||||
|
|
146
crates/ide_diagnostics/src/tests.rs
Normal file
146
crates/ide_diagnostics/src/tests.rs
Normal file
|
@ -0,0 +1,146 @@
|
|||
mod sourcegen;
|
||||
|
||||
use expect_test::Expect;
|
||||
use ide_db::{
|
||||
assists::AssistResolveStrategy,
|
||||
base_db::{fixture::WithFixture, SourceDatabaseExt},
|
||||
RootDatabase,
|
||||
};
|
||||
use stdx::trim_indent;
|
||||
use test_utils::{assert_eq_text, extract_annotations};
|
||||
|
||||
use crate::{DiagnosticsConfig, Severity};
|
||||
|
||||
/// Takes a multi-file input fixture with annotated cursor positions,
|
||||
/// and checks that:
|
||||
/// * a diagnostic is produced
|
||||
/// * the first diagnostic fix trigger range touches the input cursor position
|
||||
/// * that the contents of the file containing the cursor match `after` after the diagnostic fix is applied
|
||||
#[track_caller]
|
||||
pub(crate) fn check_fix(ra_fixture_before: &str, ra_fixture_after: &str) {
|
||||
check_nth_fix(0, ra_fixture_before, ra_fixture_after);
|
||||
}
|
||||
/// Takes a multi-file input fixture with annotated cursor positions,
|
||||
/// and checks that:
|
||||
/// * a diagnostic is produced
|
||||
/// * every diagnostic fixes trigger range touches the input cursor position
|
||||
/// * that the contents of the file containing the cursor match `after` after each diagnostic fix is applied
|
||||
pub(crate) fn check_fixes(ra_fixture_before: &str, ra_fixtures_after: Vec<&str>) {
|
||||
for (i, ra_fixture_after) in ra_fixtures_after.iter().enumerate() {
|
||||
check_nth_fix(i, ra_fixture_before, ra_fixture_after)
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn check_nth_fix(nth: usize, ra_fixture_before: &str, ra_fixture_after: &str) {
|
||||
let after = trim_indent(ra_fixture_after);
|
||||
|
||||
let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
|
||||
let diagnostic = super::diagnostics(
|
||||
&db,
|
||||
&DiagnosticsConfig::default(),
|
||||
&AssistResolveStrategy::All,
|
||||
file_position.file_id,
|
||||
)
|
||||
.pop()
|
||||
.expect("no diagnostics");
|
||||
let fix = &diagnostic.fixes.expect("diagnostic misses fixes")[nth];
|
||||
let actual = {
|
||||
let source_change = fix.source_change.as_ref().unwrap();
|
||||
let file_id = *source_change.source_file_edits.keys().next().unwrap();
|
||||
let mut actual = db.file_text(file_id).to_string();
|
||||
|
||||
for edit in source_change.source_file_edits.values() {
|
||||
edit.apply(&mut actual);
|
||||
}
|
||||
actual
|
||||
};
|
||||
|
||||
assert_eq_text!(&after, &actual);
|
||||
assert!(
|
||||
fix.target.contains_inclusive(file_position.offset),
|
||||
"diagnostic fix range {:?} does not touch cursor position {:?}",
|
||||
fix.target,
|
||||
file_position.offset
|
||||
);
|
||||
}
|
||||
|
||||
/// Checks that there's a diagnostic *without* fix at `$0`.
|
||||
pub(crate) fn check_no_fix(ra_fixture: &str) {
|
||||
let (db, file_position) = RootDatabase::with_position(ra_fixture);
|
||||
let diagnostic = super::diagnostics(
|
||||
&db,
|
||||
&DiagnosticsConfig::default(),
|
||||
&AssistResolveStrategy::All,
|
||||
file_position.file_id,
|
||||
)
|
||||
.pop()
|
||||
.unwrap();
|
||||
assert!(diagnostic.fixes.is_none(), "got a fix when none was expected: {:?}", diagnostic);
|
||||
}
|
||||
|
||||
pub(crate) fn check_expect(ra_fixture: &str, expect: Expect) {
|
||||
let (db, file_id) = RootDatabase::with_single_file(ra_fixture);
|
||||
let diagnostics = super::diagnostics(
|
||||
&db,
|
||||
&DiagnosticsConfig::default(),
|
||||
&AssistResolveStrategy::All,
|
||||
file_id,
|
||||
);
|
||||
expect.assert_debug_eq(&diagnostics)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub(crate) fn check_diagnostics(ra_fixture: &str) {
|
||||
let mut config = DiagnosticsConfig::default();
|
||||
config.disabled.insert("inactive-code".to_string());
|
||||
check_diagnostics_with_config(config, ra_fixture)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixture: &str) {
|
||||
let (db, files) = RootDatabase::with_many_files(ra_fixture);
|
||||
for file_id in files {
|
||||
let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
|
||||
|
||||
let expected = extract_annotations(&*db.file_text(file_id));
|
||||
let mut actual = diagnostics
|
||||
.into_iter()
|
||||
.map(|d| {
|
||||
let mut annotation = String::new();
|
||||
if let Some(fixes) = &d.fixes {
|
||||
assert!(!fixes.is_empty());
|
||||
annotation.push_str("💡 ")
|
||||
}
|
||||
annotation.push_str(match d.severity {
|
||||
Severity::Error => "error",
|
||||
Severity::WeakWarning => "weak",
|
||||
});
|
||||
annotation.push_str(": ");
|
||||
annotation.push_str(&d.message);
|
||||
(d.range, annotation)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
actual.sort_by_key(|(range, _)| range.start());
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_disabled_diagnostics() {
|
||||
let mut config = DiagnosticsConfig::default();
|
||||
config.disabled.insert("unresolved-module".into());
|
||||
|
||||
let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#);
|
||||
|
||||
let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
|
||||
assert!(diagnostics.is_empty());
|
||||
|
||||
let diagnostics = super::diagnostics(
|
||||
&db,
|
||||
&DiagnosticsConfig::default(),
|
||||
&AssistResolveStrategy::All,
|
||||
file_id,
|
||||
);
|
||||
assert!(!diagnostics.is_empty());
|
||||
}
|
73
crates/ide_diagnostics/src/tests/sourcegen.rs
Normal file
73
crates/ide_diagnostics/src/tests/sourcegen.rs
Normal file
|
@ -0,0 +1,73 @@
|
|||
//! Generates `assists.md` documentation.
|
||||
|
||||
use std::{fmt, fs, io, path::PathBuf};
|
||||
|
||||
use sourcegen::project_root;
|
||||
|
||||
#[test]
|
||||
fn sourcegen_diagnostic_docs() {
|
||||
let diagnostics = Diagnostic::collect().unwrap();
|
||||
let contents =
|
||||
diagnostics.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
|
||||
let contents = sourcegen::add_preamble("sourcegen_diagnostic_docs", contents);
|
||||
let dst = project_root().join("docs/user/generated_diagnostic.adoc");
|
||||
fs::write(&dst, &contents).unwrap();
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Diagnostic {
|
||||
id: String,
|
||||
location: sourcegen::Location,
|
||||
doc: String,
|
||||
}
|
||||
|
||||
impl Diagnostic {
|
||||
fn collect() -> io::Result<Vec<Diagnostic>> {
|
||||
let handlers_dir = project_root().join("crates/ide_diagnostics/src/handlers");
|
||||
|
||||
let mut res = Vec::new();
|
||||
for path in sourcegen::list_rust_files(&handlers_dir) {
|
||||
collect_file(&mut res, path)?;
|
||||
}
|
||||
res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
|
||||
return Ok(res);
|
||||
|
||||
fn collect_file(acc: &mut Vec<Diagnostic>, path: PathBuf) -> io::Result<()> {
|
||||
let text = fs::read_to_string(&path)?;
|
||||
let comment_blocks = sourcegen::CommentBlock::extract("Diagnostic", &text);
|
||||
|
||||
for block in comment_blocks {
|
||||
let id = block.id;
|
||||
if let Err(msg) = is_valid_diagnostic_name(&id) {
|
||||
panic!("invalid diagnostic name: {:?}:\n {}", id, msg)
|
||||
}
|
||||
let doc = block.contents.join("\n");
|
||||
let location = sourcegen::Location { file: path.clone(), line: block.line };
|
||||
acc.push(Diagnostic { id, location, doc })
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn is_valid_diagnostic_name(diagnostic: &str) -> Result<(), String> {
|
||||
let diagnostic = diagnostic.trim();
|
||||
if diagnostic.find(char::is_whitespace).is_some() {
|
||||
return Err("Diagnostic names can't contain whitespace symbols".into());
|
||||
}
|
||||
if diagnostic.chars().any(|c| c.is_ascii_uppercase()) {
|
||||
return Err("Diagnostic names can't contain uppercase symbols".into());
|
||||
}
|
||||
if diagnostic.chars().any(|c| !c.is_ascii()) {
|
||||
return Err("Diagnostic can't contain non-ASCII symbols".into());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl fmt::Display for Diagnostic {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
writeln!(f, "=== {}\n**Source:** {}\n{}", self.id, self.location, self.doc)
|
||||
}
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
//! Generated file, do not edit by hand, see `xtask/src/codegen`
|
||||
//! Generated by `sourcegen_ast`, do not edit by hand.
|
||||
|
||||
#![allow(bad_style, missing_docs, unreachable_pub)]
|
||||
#[doc = r" The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT`."]
|
||||
|
|
|
@ -66,7 +66,9 @@ jemallocator = { version = "0.4.1", package = "tikv-jemallocator", optional = tr
|
|||
|
||||
[dev-dependencies]
|
||||
expect-test = "1.1"
|
||||
|
||||
test_utils = { path = "../test_utils" }
|
||||
sourcegen = { path = "../sourcegen" }
|
||||
mbe = { path = "../mbe" }
|
||||
tt = { path = "../tt" }
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
//! specific JSON shapes here -- there's little value in such tests, as we can't
|
||||
//! be sure without a real client anyway.
|
||||
|
||||
mod sourcegen;
|
||||
mod testdir;
|
||||
mod support;
|
||||
|
||||
|
|
80
crates/rust-analyzer/tests/slow-tests/sourcegen.rs
Normal file
80
crates/rust-analyzer/tests/slow-tests/sourcegen.rs
Normal file
|
@ -0,0 +1,80 @@
|
|||
//! Generates `assists.md` documentation.
|
||||
|
||||
use std::{fmt, fs, io, path::PathBuf};
|
||||
|
||||
#[test]
|
||||
fn sourcegen_feature_docs() {
|
||||
let features = Feature::collect().unwrap();
|
||||
let contents = features.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
|
||||
let contents = format!(
|
||||
"
|
||||
// Generated file, do not edit by hand, see `sourcegen_feature_docs`.
|
||||
{}
|
||||
",
|
||||
contents.trim()
|
||||
);
|
||||
let dst = sourcegen::project_root().join("docs/user/generated_features.adoc");
|
||||
fs::write(&dst, &contents).unwrap();
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Feature {
|
||||
id: String,
|
||||
location: sourcegen::Location,
|
||||
doc: String,
|
||||
}
|
||||
|
||||
impl Feature {
|
||||
fn collect() -> io::Result<Vec<Feature>> {
|
||||
let crates_dir = sourcegen::project_root().join("crates");
|
||||
|
||||
let mut res = Vec::new();
|
||||
for path in sourcegen::list_rust_files(&crates_dir) {
|
||||
collect_file(&mut res, path)?;
|
||||
}
|
||||
res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
|
||||
return Ok(res);
|
||||
|
||||
fn collect_file(acc: &mut Vec<Feature>, path: PathBuf) -> io::Result<()> {
|
||||
let text = std::fs::read_to_string(&path)?;
|
||||
let comment_blocks = sourcegen::CommentBlock::extract("Feature", &text);
|
||||
|
||||
for block in comment_blocks {
|
||||
let id = block.id;
|
||||
if let Err(msg) = is_valid_feature_name(&id) {
|
||||
panic!("invalid feature name: {:?}:\n {}", id, msg)
|
||||
}
|
||||
let doc = block.contents.join("\n");
|
||||
let location = sourcegen::Location { file: path.clone(), line: block.line };
|
||||
acc.push(Feature { id, location, doc })
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn is_valid_feature_name(feature: &str) -> Result<(), String> {
|
||||
'word: for word in feature.split_whitespace() {
|
||||
for &short in ["to", "and"].iter() {
|
||||
if word == short {
|
||||
continue 'word;
|
||||
}
|
||||
}
|
||||
for &short in ["To", "And"].iter() {
|
||||
if word == short {
|
||||
return Err(format!("Don't capitalize {:?}", word));
|
||||
}
|
||||
}
|
||||
if !word.starts_with(char::is_uppercase) {
|
||||
return Err(format!("Capitalize {:?}", word));
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl fmt::Display for Feature {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
writeln!(f, "=== {}\n**Source:** {}\n{}", self.id, self.location, self.doc)
|
||||
}
|
||||
}
|
13
crates/sourcegen/Cargo.toml
Normal file
13
crates/sourcegen/Cargo.toml
Normal file
|
@ -0,0 +1,13 @@
|
|||
[package]
|
||||
name = "sourcegen"
|
||||
version = "0.0.0"
|
||||
description = "TBD"
|
||||
license = "MIT OR Apache-2.0"
|
||||
authors = ["rust-analyzer developers"]
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
xshell = "0.1"
|
195
crates/sourcegen/src/lib.rs
Normal file
195
crates/sourcegen/src/lib.rs
Normal file
|
@ -0,0 +1,195 @@
|
|||
//! rust-analyzer relies heavily on source code generation.
|
||||
//!
|
||||
//! Things like feature documentation or assist tests are implemented by
|
||||
//! processing rust-analyzer's own source code and generating the appropriate
|
||||
//! output. See `sourcegen_` tests in various crates.
|
||||
//!
|
||||
//! This crate contains utilities to make this kind of source-gen easy.
|
||||
|
||||
use std::{
|
||||
fmt, fs, mem,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use xshell::{cmd, pushenv};
|
||||
|
||||
pub fn list_rust_files(dir: &Path) -> Vec<PathBuf> {
|
||||
let mut res = list_files(dir);
|
||||
res.retain(|it| {
|
||||
it.file_name().unwrap_or_default().to_str().unwrap_or_default().ends_with(".rs")
|
||||
});
|
||||
res
|
||||
}
|
||||
|
||||
pub fn list_files(dir: &Path) -> Vec<PathBuf> {
|
||||
let mut res = Vec::new();
|
||||
let mut work = vec![dir.to_path_buf()];
|
||||
while let Some(dir) = work.pop() {
|
||||
for entry in dir.read_dir().unwrap() {
|
||||
let entry = entry.unwrap();
|
||||
let file_type = entry.file_type().unwrap();
|
||||
let path = entry.path();
|
||||
let is_hidden =
|
||||
path.file_name().unwrap_or_default().to_str().unwrap_or_default().starts_with('.');
|
||||
if !is_hidden {
|
||||
if file_type.is_dir() {
|
||||
work.push(path)
|
||||
} else if file_type.is_file() {
|
||||
res.push(path)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub struct CommentBlock {
|
||||
pub id: String,
|
||||
pub line: usize,
|
||||
pub contents: Vec<String>,
|
||||
}
|
||||
|
||||
impl CommentBlock {
|
||||
pub fn extract(tag: &str, text: &str) -> Vec<CommentBlock> {
|
||||
assert!(tag.starts_with(char::is_uppercase));
|
||||
|
||||
let tag = format!("{}:", tag);
|
||||
let mut res = Vec::new();
|
||||
for (line, mut block) in do_extract_comment_blocks(text, true) {
|
||||
let first = block.remove(0);
|
||||
if let Some(id) = first.strip_prefix(&tag) {
|
||||
let id = id.trim().to_string();
|
||||
let block = CommentBlock { id, line, contents: block };
|
||||
res.push(block);
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub fn extract_untagged(text: &str) -> Vec<CommentBlock> {
|
||||
let mut res = Vec::new();
|
||||
for (line, block) in do_extract_comment_blocks(text, false) {
|
||||
let id = String::new();
|
||||
let block = CommentBlock { id, line, contents: block };
|
||||
res.push(block);
|
||||
}
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
fn do_extract_comment_blocks(
|
||||
text: &str,
|
||||
allow_blocks_with_empty_lines: bool,
|
||||
) -> Vec<(usize, Vec<String>)> {
|
||||
let mut res = Vec::new();
|
||||
|
||||
let prefix = "// ";
|
||||
let lines = text.lines().map(str::trim_start);
|
||||
|
||||
let mut block = (0, vec![]);
|
||||
for (line_num, line) in lines.enumerate() {
|
||||
if line == "//" && allow_blocks_with_empty_lines {
|
||||
block.1.push(String::new());
|
||||
continue;
|
||||
}
|
||||
|
||||
let is_comment = line.starts_with(prefix);
|
||||
if is_comment {
|
||||
block.1.push(line[prefix.len()..].to_string());
|
||||
} else {
|
||||
if !block.1.is_empty() {
|
||||
res.push(mem::take(&mut block));
|
||||
}
|
||||
block.0 = line_num + 2;
|
||||
}
|
||||
}
|
||||
if !block.1.is_empty() {
|
||||
res.push(block)
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Location {
|
||||
pub file: PathBuf,
|
||||
pub line: usize,
|
||||
}
|
||||
|
||||
impl fmt::Display for Location {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let path = self.file.strip_prefix(&project_root()).unwrap().display().to_string();
|
||||
let path = path.replace('\\', "/");
|
||||
let name = self.file.file_name().unwrap();
|
||||
write!(
|
||||
f,
|
||||
"https://github.com/rust-analyzer/rust-analyzer/blob/master/{}#L{}[{}]",
|
||||
path,
|
||||
self.line,
|
||||
name.to_str().unwrap()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn ensure_rustfmt() {
|
||||
let version = cmd!("rustfmt --version").read().unwrap_or_default();
|
||||
if !version.contains("stable") {
|
||||
panic!(
|
||||
"Failed to run rustfmt from toolchain 'stable'. \
|
||||
Please run `rustup component add rustfmt --toolchain stable` to install it.",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reformat(text: String) -> String {
|
||||
let _e = pushenv("RUSTUP_TOOLCHAIN", "stable");
|
||||
ensure_rustfmt();
|
||||
let rustfmt_toml = project_root().join("rustfmt.toml");
|
||||
let mut stdout = cmd!("rustfmt --config-path {rustfmt_toml} --config fn_single_line=true")
|
||||
.stdin(text)
|
||||
.read()
|
||||
.unwrap();
|
||||
if !stdout.ends_with('\n') {
|
||||
stdout.push('\n');
|
||||
}
|
||||
stdout
|
||||
}
|
||||
|
||||
pub fn add_preamble(generator: &'static str, mut text: String) -> String {
|
||||
let preamble = format!("//! Generated by `{}`, do not edit by hand.\n\n", generator);
|
||||
text.insert_str(0, &preamble);
|
||||
text
|
||||
}
|
||||
|
||||
/// Checks that the `file` has the specified `contents`. If that is not the
|
||||
/// case, updates the file and then fails the test.
|
||||
pub fn ensure_file_contents(file: &Path, contents: &str) {
|
||||
if let Ok(old_contents) = fs::read_to_string(file) {
|
||||
if normalize_newlines(&old_contents) == normalize_newlines(contents) {
|
||||
// File is already up to date.
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let display_path = file.strip_prefix(&project_root()).unwrap_or(file);
|
||||
eprintln!(
|
||||
"\n\x1b[31;1merror\x1b[0m: {} was not up-to-date, updating\n",
|
||||
display_path.display()
|
||||
);
|
||||
if std::env::var("CI").is_ok() {
|
||||
eprintln!(" NOTE: run `cargo test` locally and commit the updated files\n");
|
||||
}
|
||||
if let Some(parent) = file.parent() {
|
||||
let _ = fs::create_dir_all(parent);
|
||||
}
|
||||
fs::write(file, contents).unwrap();
|
||||
panic!("some file was not up to date and has been updated, simply re-run the tests")
|
||||
}
|
||||
|
||||
fn normalize_newlines(s: &str) -> String {
|
||||
s.replace("\r\n", "\n")
|
||||
}
|
||||
|
||||
pub fn project_root() -> PathBuf {
|
||||
let dir = env!("CARGO_MANIFEST_DIR");
|
||||
PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned()
|
||||
}
|
|
@ -28,7 +28,11 @@ parser = { path = "../parser", version = "0.0.0" }
|
|||
profile = { path = "../profile", version = "0.0.0" }
|
||||
|
||||
[dev-dependencies]
|
||||
test_utils = { path = "../test_utils" }
|
||||
walkdir = "2.3.1"
|
||||
rayon = "1"
|
||||
expect-test = "1.1"
|
||||
proc-macro2 = "1.0.8"
|
||||
quote = "1.0.2"
|
||||
ungrammar = "=1.14"
|
||||
|
||||
test_utils = { path = "../test_utils" }
|
||||
sourcegen = { path = "../sourcegen" }
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
//! Generated file, do not edit by hand, see `xtask/src/codegen`
|
||||
//! Generated by `sourcegen_ast`, do not edit by hand.
|
||||
|
||||
use crate::{
|
||||
ast::{self, support, AstChildren, AstNode},
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
//! Generated file, do not edit by hand, see `xtask/src/codegen`
|
||||
//! Generated by `sourcegen_ast`, do not edit by hand.
|
||||
|
||||
use crate::{
|
||||
ast::AstToken,
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
mod sourcegen_tests;
|
||||
mod sourcegen_ast;
|
||||
mod ast_src;
|
||||
|
||||
use std::{
|
||||
fmt::Write,
|
||||
fs,
|
||||
|
@ -152,20 +156,14 @@ fn reparse_fuzz_tests() {
|
|||
/// Test that Rust-analyzer can parse and validate the rust-analyzer
|
||||
#[test]
|
||||
fn self_hosting_parsing() {
|
||||
let dir = project_root().join("crates");
|
||||
let files = walkdir::WalkDir::new(dir)
|
||||
.into_iter()
|
||||
.filter_entry(|entry| {
|
||||
// Get all files which are not in the crates/syntax/test_data folder
|
||||
!entry.path().components().any(|component| component.as_os_str() == "test_data")
|
||||
})
|
||||
.map(|e| e.unwrap())
|
||||
.filter(|entry| {
|
||||
// Get all `.rs ` files
|
||||
!entry.path().is_dir() && (entry.path().extension().unwrap_or_default() == "rs")
|
||||
})
|
||||
.map(|entry| entry.into_path())
|
||||
.collect::<Vec<_>>();
|
||||
let crates_dir = project_root().join("crates");
|
||||
|
||||
let mut files = ::sourcegen::list_rust_files(&crates_dir);
|
||||
files.retain(|path| {
|
||||
// Get all files which are not in the crates/syntax/test_data folder
|
||||
!path.components().any(|component| component.as_os_str() == "test_data")
|
||||
});
|
||||
|
||||
assert!(
|
||||
files.len() > 100,
|
||||
"self_hosting_parsing found too few files - is it running in the right directory?"
|
||||
|
|
258
crates/syntax/src/tests/ast_src.rs
Normal file
258
crates/syntax/src/tests/ast_src.rs
Normal file
|
@ -0,0 +1,258 @@
|
|||
//! Defines input for code generation process.
|
||||
|
||||
pub(crate) struct KindsSrc<'a> {
|
||||
pub(crate) punct: &'a [(&'a str, &'a str)],
|
||||
pub(crate) keywords: &'a [&'a str],
|
||||
pub(crate) contextual_keywords: &'a [&'a str],
|
||||
pub(crate) literals: &'a [&'a str],
|
||||
pub(crate) tokens: &'a [&'a str],
|
||||
pub(crate) nodes: &'a [&'a str],
|
||||
}
|
||||
|
||||
pub(crate) const KINDS_SRC: KindsSrc = KindsSrc {
|
||||
punct: &[
|
||||
(";", "SEMICOLON"),
|
||||
(",", "COMMA"),
|
||||
("(", "L_PAREN"),
|
||||
(")", "R_PAREN"),
|
||||
("{", "L_CURLY"),
|
||||
("}", "R_CURLY"),
|
||||
("[", "L_BRACK"),
|
||||
("]", "R_BRACK"),
|
||||
("<", "L_ANGLE"),
|
||||
(">", "R_ANGLE"),
|
||||
("@", "AT"),
|
||||
("#", "POUND"),
|
||||
("~", "TILDE"),
|
||||
("?", "QUESTION"),
|
||||
("$", "DOLLAR"),
|
||||
("&", "AMP"),
|
||||
("|", "PIPE"),
|
||||
("+", "PLUS"),
|
||||
("*", "STAR"),
|
||||
("/", "SLASH"),
|
||||
("^", "CARET"),
|
||||
("%", "PERCENT"),
|
||||
("_", "UNDERSCORE"),
|
||||
(".", "DOT"),
|
||||
("..", "DOT2"),
|
||||
("...", "DOT3"),
|
||||
("..=", "DOT2EQ"),
|
||||
(":", "COLON"),
|
||||
("::", "COLON2"),
|
||||
("=", "EQ"),
|
||||
("==", "EQ2"),
|
||||
("=>", "FAT_ARROW"),
|
||||
("!", "BANG"),
|
||||
("!=", "NEQ"),
|
||||
("-", "MINUS"),
|
||||
("->", "THIN_ARROW"),
|
||||
("<=", "LTEQ"),
|
||||
(">=", "GTEQ"),
|
||||
("+=", "PLUSEQ"),
|
||||
("-=", "MINUSEQ"),
|
||||
("|=", "PIPEEQ"),
|
||||
("&=", "AMPEQ"),
|
||||
("^=", "CARETEQ"),
|
||||
("/=", "SLASHEQ"),
|
||||
("*=", "STAREQ"),
|
||||
("%=", "PERCENTEQ"),
|
||||
("&&", "AMP2"),
|
||||
("||", "PIPE2"),
|
||||
("<<", "SHL"),
|
||||
(">>", "SHR"),
|
||||
("<<=", "SHLEQ"),
|
||||
(">>=", "SHREQ"),
|
||||
],
|
||||
keywords: &[
|
||||
"as", "async", "await", "box", "break", "const", "continue", "crate", "dyn", "else",
|
||||
"enum", "extern", "false", "fn", "for", "if", "impl", "in", "let", "loop", "macro",
|
||||
"match", "mod", "move", "mut", "pub", "ref", "return", "self", "static", "struct", "super",
|
||||
"trait", "true", "try", "type", "unsafe", "use", "where", "while", "yield",
|
||||
],
|
||||
contextual_keywords: &["auto", "default", "existential", "union", "raw", "macro_rules"],
|
||||
literals: &["INT_NUMBER", "FLOAT_NUMBER", "CHAR", "BYTE", "STRING", "BYTE_STRING"],
|
||||
tokens: &[
|
||||
"ERROR",
|
||||
"IDENT",
|
||||
"WHITESPACE",
|
||||
"LIFETIME_IDENT",
|
||||
"COMMENT",
|
||||
"SHEBANG",
|
||||
"L_DOLLAR",
|
||||
"R_DOLLAR",
|
||||
],
|
||||
nodes: &[
|
||||
"SOURCE_FILE",
|
||||
"STRUCT",
|
||||
"UNION",
|
||||
"ENUM",
|
||||
"FN",
|
||||
"RET_TYPE",
|
||||
"EXTERN_CRATE",
|
||||
"MODULE",
|
||||
"USE",
|
||||
"STATIC",
|
||||
"CONST",
|
||||
"TRAIT",
|
||||
"IMPL",
|
||||
"TYPE_ALIAS",
|
||||
"MACRO_CALL",
|
||||
"MACRO_RULES",
|
||||
"MACRO_ARM",
|
||||
"TOKEN_TREE",
|
||||
"MACRO_DEF",
|
||||
"PAREN_TYPE",
|
||||
"TUPLE_TYPE",
|
||||
"MACRO_TYPE",
|
||||
"NEVER_TYPE",
|
||||
"PATH_TYPE",
|
||||
"PTR_TYPE",
|
||||
"ARRAY_TYPE",
|
||||
"SLICE_TYPE",
|
||||
"REF_TYPE",
|
||||
"INFER_TYPE",
|
||||
"FN_PTR_TYPE",
|
||||
"FOR_TYPE",
|
||||
"IMPL_TRAIT_TYPE",
|
||||
"DYN_TRAIT_TYPE",
|
||||
"OR_PAT",
|
||||
"PAREN_PAT",
|
||||
"REF_PAT",
|
||||
"BOX_PAT",
|
||||
"IDENT_PAT",
|
||||
"WILDCARD_PAT",
|
||||
"REST_PAT",
|
||||
"PATH_PAT",
|
||||
"RECORD_PAT",
|
||||
"RECORD_PAT_FIELD_LIST",
|
||||
"RECORD_PAT_FIELD",
|
||||
"TUPLE_STRUCT_PAT",
|
||||
"TUPLE_PAT",
|
||||
"SLICE_PAT",
|
||||
"RANGE_PAT",
|
||||
"LITERAL_PAT",
|
||||
"MACRO_PAT",
|
||||
"CONST_BLOCK_PAT",
|
||||
// atoms
|
||||
"TUPLE_EXPR",
|
||||
"ARRAY_EXPR",
|
||||
"PAREN_EXPR",
|
||||
"PATH_EXPR",
|
||||
"CLOSURE_EXPR",
|
||||
"IF_EXPR",
|
||||
"WHILE_EXPR",
|
||||
"CONDITION",
|
||||
"LOOP_EXPR",
|
||||
"FOR_EXPR",
|
||||
"CONTINUE_EXPR",
|
||||
"BREAK_EXPR",
|
||||
"LABEL",
|
||||
"BLOCK_EXPR",
|
||||
"RETURN_EXPR",
|
||||
"YIELD_EXPR",
|
||||
"MATCH_EXPR",
|
||||
"MATCH_ARM_LIST",
|
||||
"MATCH_ARM",
|
||||
"MATCH_GUARD",
|
||||
"RECORD_EXPR",
|
||||
"RECORD_EXPR_FIELD_LIST",
|
||||
"RECORD_EXPR_FIELD",
|
||||
"EFFECT_EXPR",
|
||||
"BOX_EXPR",
|
||||
// postfix
|
||||
"CALL_EXPR",
|
||||
"INDEX_EXPR",
|
||||
"METHOD_CALL_EXPR",
|
||||
"FIELD_EXPR",
|
||||
"AWAIT_EXPR",
|
||||
"TRY_EXPR",
|
||||
"CAST_EXPR",
|
||||
// unary
|
||||
"REF_EXPR",
|
||||
"PREFIX_EXPR",
|
||||
"RANGE_EXPR", // just weird
|
||||
"BIN_EXPR",
|
||||
"EXTERN_BLOCK",
|
||||
"EXTERN_ITEM_LIST",
|
||||
"VARIANT",
|
||||
"RECORD_FIELD_LIST",
|
||||
"RECORD_FIELD",
|
||||
"TUPLE_FIELD_LIST",
|
||||
"TUPLE_FIELD",
|
||||
"VARIANT_LIST",
|
||||
"ITEM_LIST",
|
||||
"ASSOC_ITEM_LIST",
|
||||
"ATTR",
|
||||
"META",
|
||||
"USE_TREE",
|
||||
"USE_TREE_LIST",
|
||||
"PATH",
|
||||
"PATH_SEGMENT",
|
||||
"LITERAL",
|
||||
"RENAME",
|
||||
"VISIBILITY",
|
||||
"WHERE_CLAUSE",
|
||||
"WHERE_PRED",
|
||||
"ABI",
|
||||
"NAME",
|
||||
"NAME_REF",
|
||||
"LET_STMT",
|
||||
"EXPR_STMT",
|
||||
"GENERIC_PARAM_LIST",
|
||||
"GENERIC_PARAM",
|
||||
"LIFETIME_PARAM",
|
||||
"TYPE_PARAM",
|
||||
"CONST_PARAM",
|
||||
"GENERIC_ARG_LIST",
|
||||
"LIFETIME",
|
||||
"LIFETIME_ARG",
|
||||
"TYPE_ARG",
|
||||
"ASSOC_TYPE_ARG",
|
||||
"CONST_ARG",
|
||||
"PARAM_LIST",
|
||||
"PARAM",
|
||||
"SELF_PARAM",
|
||||
"ARG_LIST",
|
||||
"TYPE_BOUND",
|
||||
"TYPE_BOUND_LIST",
|
||||
// macro related
|
||||
"MACRO_ITEMS",
|
||||
"MACRO_STMTS",
|
||||
],
|
||||
};
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub(crate) struct AstSrc {
|
||||
pub(crate) tokens: Vec<String>,
|
||||
pub(crate) nodes: Vec<AstNodeSrc>,
|
||||
pub(crate) enums: Vec<AstEnumSrc>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct AstNodeSrc {
|
||||
pub(crate) doc: Vec<String>,
|
||||
pub(crate) name: String,
|
||||
pub(crate) traits: Vec<String>,
|
||||
pub(crate) fields: Vec<Field>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub(crate) enum Field {
|
||||
Token(String),
|
||||
Node { name: String, ty: String, cardinality: Cardinality },
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub(crate) enum Cardinality {
|
||||
Optional,
|
||||
Many,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct AstEnumSrc {
|
||||
pub(crate) doc: Vec<String>,
|
||||
pub(crate) name: String,
|
||||
pub(crate) traits: Vec<String>,
|
||||
pub(crate) variants: Vec<String>,
|
||||
}
|
747
crates/syntax/src/tests/sourcegen_ast.rs
Normal file
747
crates/syntax/src/tests/sourcegen_ast.rs
Normal file
|
@ -0,0 +1,747 @@
|
|||
//! This module generates AST datatype used by rust-analyzer.
|
||||
//!
|
||||
//! Specifically, it generates the `SyntaxKind` enum and a number of newtype
|
||||
//! wrappers around `SyntaxNode` which implement `syntax::AstNode`.
|
||||
|
||||
use std::{
|
||||
collections::{BTreeSet, HashSet},
|
||||
fmt::Write,
|
||||
};
|
||||
|
||||
use proc_macro2::{Punct, Spacing};
|
||||
use quote::{format_ident, quote};
|
||||
use ungrammar::{rust_grammar, Grammar, Rule};
|
||||
|
||||
use crate::tests::ast_src::{
|
||||
AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc, KINDS_SRC,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn sourcegen_ast() {
|
||||
let grammar = rust_grammar();
|
||||
let ast = lower(&grammar);
|
||||
|
||||
let syntax_kinds_file =
|
||||
sourcegen::project_root().join("crates/parser/src/syntax_kind/generated.rs");
|
||||
let syntax_kinds = generate_syntax_kinds(KINDS_SRC);
|
||||
sourcegen::ensure_file_contents(syntax_kinds_file.as_path(), &syntax_kinds);
|
||||
|
||||
let ast_tokens_file =
|
||||
sourcegen::project_root().join("crates/syntax/src/ast/generated/tokens.rs");
|
||||
let contents = generate_tokens(&ast);
|
||||
sourcegen::ensure_file_contents(ast_tokens_file.as_path(), &contents);
|
||||
|
||||
let ast_nodes_file = sourcegen::project_root().join("crates/syntax/src/ast/generated/nodes.rs");
|
||||
let contents = generate_nodes(KINDS_SRC, &ast);
|
||||
sourcegen::ensure_file_contents(ast_nodes_file.as_path(), &contents);
|
||||
}
|
||||
|
||||
fn generate_tokens(grammar: &AstSrc) -> String {
|
||||
let tokens = grammar.tokens.iter().map(|token| {
|
||||
let name = format_ident!("{}", token);
|
||||
let kind = format_ident!("{}", to_upper_snake_case(token));
|
||||
quote! {
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct #name {
|
||||
pub(crate) syntax: SyntaxToken,
|
||||
}
|
||||
impl std::fmt::Display for #name {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(&self.syntax, f)
|
||||
}
|
||||
}
|
||||
impl AstToken for #name {
|
||||
fn can_cast(kind: SyntaxKind) -> bool { kind == #kind }
|
||||
fn cast(syntax: SyntaxToken) -> Option<Self> {
|
||||
if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
|
||||
}
|
||||
fn syntax(&self) -> &SyntaxToken { &self.syntax }
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
sourcegen::add_preamble(
|
||||
"sourcegen_ast",
|
||||
sourcegen::reformat(
|
||||
quote! {
|
||||
use crate::{SyntaxKind::{self, *}, SyntaxToken, ast::AstToken};
|
||||
#(#tokens)*
|
||||
}
|
||||
.to_string(),
|
||||
),
|
||||
)
|
||||
.replace("#[derive", "\n#[derive")
|
||||
}
|
||||
|
||||
fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String {
|
||||
let (node_defs, node_boilerplate_impls): (Vec<_>, Vec<_>) = grammar
|
||||
.nodes
|
||||
.iter()
|
||||
.map(|node| {
|
||||
let name = format_ident!("{}", node.name);
|
||||
let kind = format_ident!("{}", to_upper_snake_case(&node.name));
|
||||
let traits = node.traits.iter().map(|trait_name| {
|
||||
let trait_name = format_ident!("{}", trait_name);
|
||||
quote!(impl ast::#trait_name for #name {})
|
||||
});
|
||||
|
||||
let methods = node.fields.iter().map(|field| {
|
||||
let method_name = field.method_name();
|
||||
let ty = field.ty();
|
||||
|
||||
if field.is_many() {
|
||||
quote! {
|
||||
pub fn #method_name(&self) -> AstChildren<#ty> {
|
||||
support::children(&self.syntax)
|
||||
}
|
||||
}
|
||||
} else if let Some(token_kind) = field.token_kind() {
|
||||
quote! {
|
||||
pub fn #method_name(&self) -> Option<#ty> {
|
||||
support::token(&self.syntax, #token_kind)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
quote! {
|
||||
pub fn #method_name(&self) -> Option<#ty> {
|
||||
support::child(&self.syntax)
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
(
|
||||
quote! {
|
||||
#[pretty_doc_comment_placeholder_workaround]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct #name {
|
||||
pub(crate) syntax: SyntaxNode,
|
||||
}
|
||||
|
||||
#(#traits)*
|
||||
|
||||
impl #name {
|
||||
#(#methods)*
|
||||
}
|
||||
},
|
||||
quote! {
|
||||
impl AstNode for #name {
|
||||
fn can_cast(kind: SyntaxKind) -> bool {
|
||||
kind == #kind
|
||||
}
|
||||
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
||||
if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
|
||||
}
|
||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
||||
}
|
||||
},
|
||||
)
|
||||
})
|
||||
.unzip();
|
||||
|
||||
let (enum_defs, enum_boilerplate_impls): (Vec<_>, Vec<_>) = grammar
|
||||
.enums
|
||||
.iter()
|
||||
.map(|en| {
|
||||
let variants: Vec<_> = en.variants.iter().map(|var| format_ident!("{}", var)).collect();
|
||||
let name = format_ident!("{}", en.name);
|
||||
let kinds: Vec<_> = variants
|
||||
.iter()
|
||||
.map(|name| format_ident!("{}", to_upper_snake_case(&name.to_string())))
|
||||
.collect();
|
||||
let traits = en.traits.iter().map(|trait_name| {
|
||||
let trait_name = format_ident!("{}", trait_name);
|
||||
quote!(impl ast::#trait_name for #name {})
|
||||
});
|
||||
|
||||
let ast_node = if en.name == "Stmt" {
|
||||
quote! {}
|
||||
} else {
|
||||
quote! {
|
||||
impl AstNode for #name {
|
||||
fn can_cast(kind: SyntaxKind) -> bool {
|
||||
match kind {
|
||||
#(#kinds)|* => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
fn cast(syntax: SyntaxNode) -> Option<Self> {
|
||||
let res = match syntax.kind() {
|
||||
#(
|
||||
#kinds => #name::#variants(#variants { syntax }),
|
||||
)*
|
||||
_ => return None,
|
||||
};
|
||||
Some(res)
|
||||
}
|
||||
fn syntax(&self) -> &SyntaxNode {
|
||||
match self {
|
||||
#(
|
||||
#name::#variants(it) => &it.syntax,
|
||||
)*
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
(
|
||||
quote! {
|
||||
#[pretty_doc_comment_placeholder_workaround]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum #name {
|
||||
#(#variants(#variants),)*
|
||||
}
|
||||
|
||||
#(#traits)*
|
||||
},
|
||||
quote! {
|
||||
#(
|
||||
impl From<#variants> for #name {
|
||||
fn from(node: #variants) -> #name {
|
||||
#name::#variants(node)
|
||||
}
|
||||
}
|
||||
)*
|
||||
#ast_node
|
||||
},
|
||||
)
|
||||
})
|
||||
.unzip();
|
||||
|
||||
let enum_names = grammar.enums.iter().map(|it| &it.name);
|
||||
let node_names = grammar.nodes.iter().map(|it| &it.name);
|
||||
|
||||
let display_impls =
|
||||
enum_names.chain(node_names.clone()).map(|it| format_ident!("{}", it)).map(|name| {
|
||||
quote! {
|
||||
impl std::fmt::Display for #name {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(self.syntax(), f)
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let defined_nodes: HashSet<_> = node_names.collect();
|
||||
|
||||
for node in kinds
|
||||
.nodes
|
||||
.iter()
|
||||
.map(|kind| to_pascal_case(kind))
|
||||
.filter(|name| !defined_nodes.iter().any(|&it| it == name))
|
||||
{
|
||||
drop(node)
|
||||
// FIXME: restore this
|
||||
// eprintln!("Warning: node {} not defined in ast source", node);
|
||||
}
|
||||
|
||||
let ast = quote! {
|
||||
use crate::{
|
||||
SyntaxNode, SyntaxToken, SyntaxKind::{self, *},
|
||||
ast::{self, AstNode, AstChildren, support},
|
||||
T,
|
||||
};
|
||||
|
||||
#(#node_defs)*
|
||||
#(#enum_defs)*
|
||||
#(#node_boilerplate_impls)*
|
||||
#(#enum_boilerplate_impls)*
|
||||
#(#display_impls)*
|
||||
};
|
||||
|
||||
let ast = ast.to_string().replace("T ! [", "T![");
|
||||
|
||||
let mut res = String::with_capacity(ast.len() * 2);
|
||||
|
||||
let mut docs =
|
||||
grammar.nodes.iter().map(|it| &it.doc).chain(grammar.enums.iter().map(|it| &it.doc));
|
||||
|
||||
for chunk in ast.split("# [pretty_doc_comment_placeholder_workaround] ") {
|
||||
res.push_str(chunk);
|
||||
if let Some(doc) = docs.next() {
|
||||
write_doc_comment(doc, &mut res);
|
||||
}
|
||||
}
|
||||
|
||||
sourcegen::add_preamble("sourcegen_ast", sourcegen::reformat(res))
|
||||
}
|
||||
|
||||
fn write_doc_comment(contents: &[String], dest: &mut String) {
|
||||
for line in contents {
|
||||
writeln!(dest, "///{}", line).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> String {
|
||||
let (single_byte_tokens_values, single_byte_tokens): (Vec<_>, Vec<_>) = grammar
|
||||
.punct
|
||||
.iter()
|
||||
.filter(|(token, _name)| token.len() == 1)
|
||||
.map(|(token, name)| (token.chars().next().unwrap(), format_ident!("{}", name)))
|
||||
.unzip();
|
||||
|
||||
let punctuation_values = grammar.punct.iter().map(|(token, _name)| {
|
||||
if "{}[]()".contains(token) {
|
||||
let c = token.chars().next().unwrap();
|
||||
quote! { #c }
|
||||
} else {
|
||||
let cs = token.chars().map(|c| Punct::new(c, Spacing::Joint));
|
||||
quote! { #(#cs)* }
|
||||
}
|
||||
});
|
||||
let punctuation =
|
||||
grammar.punct.iter().map(|(_token, name)| format_ident!("{}", name)).collect::<Vec<_>>();
|
||||
|
||||
let full_keywords_values = &grammar.keywords;
|
||||
let full_keywords =
|
||||
full_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(kw)));
|
||||
|
||||
let all_keywords_values =
|
||||
grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>();
|
||||
let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw));
|
||||
let all_keywords = all_keywords_values
|
||||
.iter()
|
||||
.map(|name| format_ident!("{}_KW", to_upper_snake_case(name)))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let literals =
|
||||
grammar.literals.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
|
||||
|
||||
let tokens = grammar.tokens.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
|
||||
|
||||
let nodes = grammar.nodes.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
|
||||
|
||||
let ast = quote! {
|
||||
#![allow(bad_style, missing_docs, unreachable_pub)]
|
||||
/// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT`.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
|
||||
#[repr(u16)]
|
||||
pub enum SyntaxKind {
|
||||
// Technical SyntaxKinds: they appear temporally during parsing,
|
||||
// but never end up in the final tree
|
||||
#[doc(hidden)]
|
||||
TOMBSTONE,
|
||||
#[doc(hidden)]
|
||||
EOF,
|
||||
#(#punctuation,)*
|
||||
#(#all_keywords,)*
|
||||
#(#literals,)*
|
||||
#(#tokens,)*
|
||||
#(#nodes,)*
|
||||
|
||||
// Technical kind so that we can cast from u16 safely
|
||||
#[doc(hidden)]
|
||||
__LAST,
|
||||
}
|
||||
use self::SyntaxKind::*;
|
||||
|
||||
impl SyntaxKind {
|
||||
pub fn is_keyword(self) -> bool {
|
||||
match self {
|
||||
#(#all_keywords)|* => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_punct(self) -> bool {
|
||||
match self {
|
||||
#(#punctuation)|* => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_literal(self) -> bool {
|
||||
match self {
|
||||
#(#literals)|* => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {
|
||||
let kw = match ident {
|
||||
#(#full_keywords_values => #full_keywords,)*
|
||||
_ => return None,
|
||||
};
|
||||
Some(kw)
|
||||
}
|
||||
|
||||
pub fn from_char(c: char) -> Option<SyntaxKind> {
|
||||
let tok = match c {
|
||||
#(#single_byte_tokens_values => #single_byte_tokens,)*
|
||||
_ => return None,
|
||||
};
|
||||
Some(tok)
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! T {
|
||||
#([#punctuation_values] => { $crate::SyntaxKind::#punctuation };)*
|
||||
#([#all_keywords_idents] => { $crate::SyntaxKind::#all_keywords };)*
|
||||
[lifetime_ident] => { $crate::SyntaxKind::LIFETIME_IDENT };
|
||||
[ident] => { $crate::SyntaxKind::IDENT };
|
||||
[shebang] => { $crate::SyntaxKind::SHEBANG };
|
||||
}
|
||||
};
|
||||
|
||||
sourcegen::add_preamble("sourcegen_ast", sourcegen::reformat(ast.to_string()))
|
||||
}
|
||||
|
||||
fn to_upper_snake_case(s: &str) -> String {
|
||||
let mut buf = String::with_capacity(s.len());
|
||||
let mut prev = false;
|
||||
for c in s.chars() {
|
||||
if c.is_ascii_uppercase() && prev {
|
||||
buf.push('_')
|
||||
}
|
||||
prev = true;
|
||||
|
||||
buf.push(c.to_ascii_uppercase());
|
||||
}
|
||||
buf
|
||||
}
|
||||
|
||||
fn to_lower_snake_case(s: &str) -> String {
|
||||
let mut buf = String::with_capacity(s.len());
|
||||
let mut prev = false;
|
||||
for c in s.chars() {
|
||||
if c.is_ascii_uppercase() && prev {
|
||||
buf.push('_')
|
||||
}
|
||||
prev = true;
|
||||
|
||||
buf.push(c.to_ascii_lowercase());
|
||||
}
|
||||
buf
|
||||
}
|
||||
|
||||
fn to_pascal_case(s: &str) -> String {
|
||||
let mut buf = String::with_capacity(s.len());
|
||||
let mut prev_is_underscore = true;
|
||||
for c in s.chars() {
|
||||
if c == '_' {
|
||||
prev_is_underscore = true;
|
||||
} else if prev_is_underscore {
|
||||
buf.push(c.to_ascii_uppercase());
|
||||
prev_is_underscore = false;
|
||||
} else {
|
||||
buf.push(c.to_ascii_lowercase());
|
||||
}
|
||||
}
|
||||
buf
|
||||
}
|
||||
|
||||
fn pluralize(s: &str) -> String {
|
||||
format!("{}s", s)
|
||||
}
|
||||
|
||||
impl Field {
|
||||
fn is_many(&self) -> bool {
|
||||
matches!(self, Field::Node { cardinality: Cardinality::Many, .. })
|
||||
}
|
||||
fn token_kind(&self) -> Option<proc_macro2::TokenStream> {
|
||||
match self {
|
||||
Field::Token(token) => {
|
||||
let token: proc_macro2::TokenStream = token.parse().unwrap();
|
||||
Some(quote! { T![#token] })
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
fn method_name(&self) -> proc_macro2::Ident {
|
||||
match self {
|
||||
Field::Token(name) => {
|
||||
let name = match name.as_str() {
|
||||
";" => "semicolon",
|
||||
"->" => "thin_arrow",
|
||||
"'{'" => "l_curly",
|
||||
"'}'" => "r_curly",
|
||||
"'('" => "l_paren",
|
||||
"')'" => "r_paren",
|
||||
"'['" => "l_brack",
|
||||
"']'" => "r_brack",
|
||||
"<" => "l_angle",
|
||||
">" => "r_angle",
|
||||
"=" => "eq",
|
||||
"!" => "excl",
|
||||
"*" => "star",
|
||||
"&" => "amp",
|
||||
"_" => "underscore",
|
||||
"." => "dot",
|
||||
".." => "dotdot",
|
||||
"..." => "dotdotdot",
|
||||
"..=" => "dotdoteq",
|
||||
"=>" => "fat_arrow",
|
||||
"@" => "at",
|
||||
":" => "colon",
|
||||
"::" => "coloncolon",
|
||||
"#" => "pound",
|
||||
"?" => "question_mark",
|
||||
"," => "comma",
|
||||
"|" => "pipe",
|
||||
_ => name,
|
||||
};
|
||||
format_ident!("{}_token", name)
|
||||
}
|
||||
Field::Node { name, .. } => {
|
||||
if name == "type" {
|
||||
format_ident!("ty")
|
||||
} else {
|
||||
format_ident!("{}", name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
fn ty(&self) -> proc_macro2::Ident {
|
||||
match self {
|
||||
Field::Token(_) => format_ident!("SyntaxToken"),
|
||||
Field::Node { ty, .. } => format_ident!("{}", ty),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lower(grammar: &Grammar) -> AstSrc {
|
||||
let mut res = AstSrc::default();
|
||||
|
||||
res.tokens = "Whitespace Comment String ByteString IntNumber FloatNumber"
|
||||
.split_ascii_whitespace()
|
||||
.map(|it| it.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let nodes = grammar.iter().collect::<Vec<_>>();
|
||||
|
||||
for &node in &nodes {
|
||||
let name = grammar[node].name.clone();
|
||||
let rule = &grammar[node].rule;
|
||||
match lower_enum(grammar, rule) {
|
||||
Some(variants) => {
|
||||
let enum_src = AstEnumSrc { doc: Vec::new(), name, traits: Vec::new(), variants };
|
||||
res.enums.push(enum_src);
|
||||
}
|
||||
None => {
|
||||
let mut fields = Vec::new();
|
||||
lower_rule(&mut fields, grammar, None, rule);
|
||||
res.nodes.push(AstNodeSrc { doc: Vec::new(), name, traits: Vec::new(), fields });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
deduplicate_fields(&mut res);
|
||||
extract_enums(&mut res);
|
||||
extract_struct_traits(&mut res);
|
||||
extract_enum_traits(&mut res);
|
||||
res
|
||||
}
|
||||
|
||||
fn lower_enum(grammar: &Grammar, rule: &Rule) -> Option<Vec<String>> {
|
||||
let alternatives = match rule {
|
||||
Rule::Alt(it) => it,
|
||||
_ => return None,
|
||||
};
|
||||
let mut variants = Vec::new();
|
||||
for alternative in alternatives {
|
||||
match alternative {
|
||||
Rule::Node(it) => variants.push(grammar[*it].name.clone()),
|
||||
Rule::Token(it) if grammar[*it].name == ";" => (),
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
Some(variants)
|
||||
}
|
||||
|
||||
fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, rule: &Rule) {
|
||||
if lower_comma_list(acc, grammar, label, rule) {
|
||||
return;
|
||||
}
|
||||
|
||||
match rule {
|
||||
Rule::Node(node) => {
|
||||
let ty = grammar[*node].name.clone();
|
||||
let name = label.cloned().unwrap_or_else(|| to_lower_snake_case(&ty));
|
||||
let field = Field::Node { name, ty, cardinality: Cardinality::Optional };
|
||||
acc.push(field);
|
||||
}
|
||||
Rule::Token(token) => {
|
||||
assert!(label.is_none());
|
||||
let mut name = grammar[*token].name.clone();
|
||||
if name != "int_number" && name != "string" {
|
||||
if "[]{}()".contains(&name) {
|
||||
name = format!("'{}'", name);
|
||||
}
|
||||
let field = Field::Token(name);
|
||||
acc.push(field);
|
||||
}
|
||||
}
|
||||
Rule::Rep(inner) => {
|
||||
if let Rule::Node(node) = &**inner {
|
||||
let ty = grammar[*node].name.clone();
|
||||
let name = label.cloned().unwrap_or_else(|| pluralize(&to_lower_snake_case(&ty)));
|
||||
let field = Field::Node { name, ty, cardinality: Cardinality::Many };
|
||||
acc.push(field);
|
||||
return;
|
||||
}
|
||||
panic!("unhandled rule: {:?}", rule)
|
||||
}
|
||||
Rule::Labeled { label: l, rule } => {
|
||||
assert!(label.is_none());
|
||||
let manually_implemented = matches!(
|
||||
l.as_str(),
|
||||
"lhs"
|
||||
| "rhs"
|
||||
| "then_branch"
|
||||
| "else_branch"
|
||||
| "start"
|
||||
| "end"
|
||||
| "op"
|
||||
| "index"
|
||||
| "base"
|
||||
| "value"
|
||||
| "trait"
|
||||
| "self_ty"
|
||||
);
|
||||
if manually_implemented {
|
||||
return;
|
||||
}
|
||||
lower_rule(acc, grammar, Some(l), rule);
|
||||
}
|
||||
Rule::Seq(rules) | Rule::Alt(rules) => {
|
||||
for rule in rules {
|
||||
lower_rule(acc, grammar, label, rule)
|
||||
}
|
||||
}
|
||||
Rule::Opt(rule) => lower_rule(acc, grammar, label, rule),
|
||||
}
|
||||
}
|
||||
|
||||
// (T (',' T)* ','?)
|
||||
fn lower_comma_list(
|
||||
acc: &mut Vec<Field>,
|
||||
grammar: &Grammar,
|
||||
label: Option<&String>,
|
||||
rule: &Rule,
|
||||
) -> bool {
|
||||
let rule = match rule {
|
||||
Rule::Seq(it) => it,
|
||||
_ => return false,
|
||||
};
|
||||
let (node, repeat, trailing_comma) = match rule.as_slice() {
|
||||
[Rule::Node(node), Rule::Rep(repeat), Rule::Opt(trailing_comma)] => {
|
||||
(node, repeat, trailing_comma)
|
||||
}
|
||||
_ => return false,
|
||||
};
|
||||
let repeat = match &**repeat {
|
||||
Rule::Seq(it) => it,
|
||||
_ => return false,
|
||||
};
|
||||
match repeat.as_slice() {
|
||||
[comma, Rule::Node(n)] if comma == &**trailing_comma && n == node => (),
|
||||
_ => return false,
|
||||
}
|
||||
let ty = grammar[*node].name.clone();
|
||||
let name = label.cloned().unwrap_or_else(|| pluralize(&to_lower_snake_case(&ty)));
|
||||
let field = Field::Node { name, ty, cardinality: Cardinality::Many };
|
||||
acc.push(field);
|
||||
true
|
||||
}
|
||||
|
||||
fn deduplicate_fields(ast: &mut AstSrc) {
|
||||
for node in &mut ast.nodes {
|
||||
let mut i = 0;
|
||||
'outer: while i < node.fields.len() {
|
||||
for j in 0..i {
|
||||
let f1 = &node.fields[i];
|
||||
let f2 = &node.fields[j];
|
||||
if f1 == f2 {
|
||||
node.fields.remove(i);
|
||||
continue 'outer;
|
||||
}
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_enums(ast: &mut AstSrc) {
|
||||
for node in &mut ast.nodes {
|
||||
for enm in &ast.enums {
|
||||
let mut to_remove = Vec::new();
|
||||
for (i, field) in node.fields.iter().enumerate() {
|
||||
let ty = field.ty().to_string();
|
||||
if enm.variants.iter().any(|it| it == &ty) {
|
||||
to_remove.push(i);
|
||||
}
|
||||
}
|
||||
if to_remove.len() == enm.variants.len() {
|
||||
node.remove_field(to_remove);
|
||||
let ty = enm.name.clone();
|
||||
let name = to_lower_snake_case(&ty);
|
||||
node.fields.push(Field::Node { name, ty, cardinality: Cardinality::Optional });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_struct_traits(ast: &mut AstSrc) {
|
||||
let traits: &[(&str, &[&str])] = &[
|
||||
("AttrsOwner", &["attrs"]),
|
||||
("NameOwner", &["name"]),
|
||||
("VisibilityOwner", &["visibility"]),
|
||||
("GenericParamsOwner", &["generic_param_list", "where_clause"]),
|
||||
("TypeBoundsOwner", &["type_bound_list", "colon_token"]),
|
||||
("ModuleItemOwner", &["items"]),
|
||||
("LoopBodyOwner", &["label", "loop_body"]),
|
||||
("ArgListOwner", &["arg_list"]),
|
||||
];
|
||||
|
||||
for node in &mut ast.nodes {
|
||||
for (name, methods) in traits {
|
||||
extract_struct_trait(node, name, methods);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_struct_trait(node: &mut AstNodeSrc, trait_name: &str, methods: &[&str]) {
|
||||
let mut to_remove = Vec::new();
|
||||
for (i, field) in node.fields.iter().enumerate() {
|
||||
let method_name = field.method_name().to_string();
|
||||
if methods.iter().any(|&it| it == method_name) {
|
||||
to_remove.push(i);
|
||||
}
|
||||
}
|
||||
if to_remove.len() == methods.len() {
|
||||
node.traits.push(trait_name.to_string());
|
||||
node.remove_field(to_remove);
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_enum_traits(ast: &mut AstSrc) {
|
||||
for enm in &mut ast.enums {
|
||||
if enm.name == "Stmt" {
|
||||
continue;
|
||||
}
|
||||
let nodes = &ast.nodes;
|
||||
let mut variant_traits = enm
|
||||
.variants
|
||||
.iter()
|
||||
.map(|var| nodes.iter().find(|it| &it.name == var).unwrap())
|
||||
.map(|node| node.traits.iter().cloned().collect::<BTreeSet<_>>());
|
||||
|
||||
let mut enum_traits = match variant_traits.next() {
|
||||
Some(it) => it,
|
||||
None => continue,
|
||||
};
|
||||
for traits in variant_traits {
|
||||
enum_traits = enum_traits.intersection(&traits).cloned().collect();
|
||||
}
|
||||
enm.traits = enum_traits.into_iter().collect();
|
||||
}
|
||||
}
|
||||
|
||||
impl AstNodeSrc {
|
||||
fn remove_field(&mut self, to_remove: Vec<usize>) {
|
||||
to_remove.into_iter().rev().for_each(|idx| {
|
||||
self.fields.remove(idx);
|
||||
});
|
||||
}
|
||||
}
|
124
crates/syntax/src/tests/sourcegen_tests.rs
Normal file
124
crates/syntax/src/tests/sourcegen_tests.rs
Normal file
|
@ -0,0 +1,124 @@
|
|||
//! This module greps parser's code for specially formatted comments and turns
|
||||
//! them into tests.
|
||||
|
||||
use std::{
|
||||
fs, iter,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
#[test]
|
||||
fn sourcegen_parser_tests() {
|
||||
let grammar_dir = sourcegen::project_root().join(Path::new("crates/parser/src/grammar"));
|
||||
let tests = tests_from_dir(&grammar_dir);
|
||||
|
||||
install_tests(&tests.ok, "crates/syntax/test_data/parser/inline/ok");
|
||||
install_tests(&tests.err, "crates/syntax/test_data/parser/inline/err");
|
||||
|
||||
fn install_tests(tests: &FxHashMap<String, Test>, into: &str) {
|
||||
let tests_dir = sourcegen::project_root().join(into);
|
||||
if !tests_dir.is_dir() {
|
||||
fs::create_dir_all(&tests_dir).unwrap();
|
||||
}
|
||||
// ok is never actually read, but it needs to be specified to create a Test in existing_tests
|
||||
let existing = existing_tests(&tests_dir, true);
|
||||
for t in existing.keys().filter(|&t| !tests.contains_key(t)) {
|
||||
panic!("Test is deleted: {}", t);
|
||||
}
|
||||
|
||||
let mut new_idx = existing.len() + 1;
|
||||
for (name, test) in tests {
|
||||
let path = match existing.get(name) {
|
||||
Some((path, _test)) => path.clone(),
|
||||
None => {
|
||||
let file_name = format!("{:04}_{}.rs", new_idx, name);
|
||||
new_idx += 1;
|
||||
tests_dir.join(file_name)
|
||||
}
|
||||
};
|
||||
sourcegen::ensure_file_contents(&path, &test.text);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Test {
|
||||
name: String,
|
||||
text: String,
|
||||
ok: bool,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
struct Tests {
|
||||
ok: FxHashMap<String, Test>,
|
||||
err: FxHashMap<String, Test>,
|
||||
}
|
||||
|
||||
fn collect_tests(s: &str) -> Vec<Test> {
|
||||
let mut res = Vec::new();
|
||||
for comment_block in sourcegen::CommentBlock::extract_untagged(s) {
|
||||
let first_line = &comment_block.contents[0];
|
||||
let (name, ok) = if let Some(name) = first_line.strip_prefix("test ") {
|
||||
(name.to_string(), true)
|
||||
} else if let Some(name) = first_line.strip_prefix("test_err ") {
|
||||
(name.to_string(), false)
|
||||
} else {
|
||||
continue;
|
||||
};
|
||||
let text: String = comment_block.contents[1..]
|
||||
.iter()
|
||||
.cloned()
|
||||
.chain(iter::once(String::new()))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
assert!(!text.trim().is_empty() && text.ends_with('\n'));
|
||||
res.push(Test { name, text, ok })
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
fn tests_from_dir(dir: &Path) -> Tests {
|
||||
let mut res = Tests::default();
|
||||
for entry in sourcegen::list_rust_files(dir) {
|
||||
process_file(&mut res, entry.as_path());
|
||||
}
|
||||
let grammar_rs = dir.parent().unwrap().join("grammar.rs");
|
||||
process_file(&mut res, &grammar_rs);
|
||||
return res;
|
||||
|
||||
fn process_file(res: &mut Tests, path: &Path) {
|
||||
let text = fs::read_to_string(path).unwrap();
|
||||
|
||||
for test in collect_tests(&text) {
|
||||
if test.ok {
|
||||
if let Some(old_test) = res.ok.insert(test.name.clone(), test) {
|
||||
panic!("Duplicate test: {}", old_test.name);
|
||||
}
|
||||
} else if let Some(old_test) = res.err.insert(test.name.clone(), test) {
|
||||
panic!("Duplicate test: {}", old_test.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn existing_tests(dir: &Path, ok: bool) -> FxHashMap<String, (PathBuf, Test)> {
|
||||
let mut res = FxHashMap::default();
|
||||
for file in fs::read_dir(dir).unwrap() {
|
||||
let file = file.unwrap();
|
||||
let path = file.path();
|
||||
if path.extension().unwrap_or_default() != "rs" {
|
||||
continue;
|
||||
}
|
||||
let name = {
|
||||
let file_name = path.file_name().unwrap().to_str().unwrap();
|
||||
file_name[5..file_name.len() - 3].to_string()
|
||||
};
|
||||
let text = fs::read_to_string(&path).unwrap();
|
||||
let test = Test { name: name.clone(), text, ok };
|
||||
if let Some(old) = res.insert(name, (path, test)) {
|
||||
println!("Duplicate test: {:?}", old);
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue