mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-10-03 07:04:49 +00:00
Merge #1337
1337: Move syntax errors our of syntax tree r=matklad a=matklad I am not really sure if it's a good idea, but `SyntaxError` do not really belong to a `SyntaxTree`. So let's just store them on the side? Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
commit
7a1cae59ac
89 changed files with 705 additions and 687 deletions
|
@ -71,7 +71,7 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> {
|
||||||
where
|
where
|
||||||
F: FnOnce(AssistCtx<DB>) -> T,
|
F: FnOnce(AssistCtx<DB>) -> T,
|
||||||
{
|
{
|
||||||
let source_file = &db.parse(frange.file_id);
|
let source_file = &db.parse(frange.file_id).tree;
|
||||||
let assist =
|
let assist =
|
||||||
if should_compute_edit { Assist::Resolved(vec![]) } else { Assist::Unresolved(vec![]) };
|
if should_compute_edit { Assist::Resolved(vec![]) } else { Assist::Unresolved(vec![]) };
|
||||||
|
|
||||||
|
|
|
@ -283,7 +283,7 @@ impl AstBuilder<ast::NameRef> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ast_node_from_file_text<N: AstNode>(text: &str) -> TreeArc<N> {
|
fn ast_node_from_file_text<N: AstNode>(text: &str) -> TreeArc<N> {
|
||||||
let file = SourceFile::parse(text);
|
let file = SourceFile::parse(text).tree;
|
||||||
let res = file.syntax().descendants().find_map(N::cast).unwrap().to_owned();
|
let res = file.syntax().descendants().find_map(N::cast).unwrap().to_owned();
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
@ -292,7 +292,7 @@ mod tokens {
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use ra_syntax::{AstNode, SourceFile, TreeArc, SyntaxToken, SyntaxKind::*, T};
|
use ra_syntax::{AstNode, SourceFile, TreeArc, SyntaxToken, SyntaxKind::*, T};
|
||||||
|
|
||||||
static SOURCE_FILE: Lazy<TreeArc<SourceFile>> = Lazy::new(|| SourceFile::parse(",\n; ;"));
|
static SOURCE_FILE: Lazy<TreeArc<SourceFile>> = Lazy::new(|| SourceFile::parse(",\n; ;").tree);
|
||||||
|
|
||||||
pub(crate) fn comma() -> SyntaxToken<'static> {
|
pub(crate) fn comma() -> SyntaxToken<'static> {
|
||||||
SOURCE_FILE
|
SOURCE_FILE
|
||||||
|
@ -326,7 +326,7 @@ mod tokens {
|
||||||
|
|
||||||
impl WsBuilder {
|
impl WsBuilder {
|
||||||
pub(crate) fn new(text: &str) -> WsBuilder {
|
pub(crate) fn new(text: &str) -> WsBuilder {
|
||||||
WsBuilder(SourceFile::parse(text))
|
WsBuilder(SourceFile::parse(text).ok().unwrap())
|
||||||
}
|
}
|
||||||
pub(crate) fn ws(&self) -> SyntaxToken<'_> {
|
pub(crate) fn ws(&self) -> SyntaxToken<'_> {
|
||||||
self.0.syntax().first_child_or_token().unwrap().as_token().unwrap()
|
self.0.syntax().first_child_or_token().unwrap().as_token().unwrap()
|
||||||
|
|
|
@ -34,7 +34,7 @@ fn main() -> Result<()> {
|
||||||
if !matches.is_present("no-dump") {
|
if !matches.is_present("no-dump") {
|
||||||
println!("{}", file.syntax().debug_dump());
|
println!("{}", file.syntax().debug_dump());
|
||||||
}
|
}
|
||||||
::std::mem::forget(file);
|
std::mem::forget(file);
|
||||||
}
|
}
|
||||||
("symbols", _) => {
|
("symbols", _) => {
|
||||||
let file = file()?;
|
let file = file()?;
|
||||||
|
@ -60,11 +60,11 @@ fn main() -> Result<()> {
|
||||||
|
|
||||||
fn file() -> Result<TreeArc<SourceFile>> {
|
fn file() -> Result<TreeArc<SourceFile>> {
|
||||||
let text = read_stdin()?;
|
let text = read_stdin()?;
|
||||||
Ok(SourceFile::parse(&text))
|
Ok(SourceFile::parse(&text).tree)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_stdin() -> Result<String> {
|
fn read_stdin() -> Result<String> {
|
||||||
let mut buff = String::new();
|
let mut buff = String::new();
|
||||||
::std::io::stdin().read_to_string(&mut buff)?;
|
std::io::stdin().read_to_string(&mut buff)?;
|
||||||
Ok(buff)
|
Ok(buff)
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@ mod input;
|
||||||
|
|
||||||
use std::{panic, sync::Arc};
|
use std::{panic, sync::Arc};
|
||||||
|
|
||||||
use ra_syntax::{TextUnit, TextRange, SourceFile, TreeArc};
|
use ra_syntax::{TextUnit, TextRange, SourceFile, Parse};
|
||||||
use relative_path::RelativePathBuf;
|
use relative_path::RelativePathBuf;
|
||||||
use ra_prof::profile;
|
use ra_prof::profile;
|
||||||
|
|
||||||
|
@ -74,7 +74,7 @@ pub trait SourceDatabase: CheckCanceled + std::fmt::Debug {
|
||||||
fn file_text(&self, file_id: FileId) -> Arc<String>;
|
fn file_text(&self, file_id: FileId) -> Arc<String>;
|
||||||
// Parses the file into the syntax tree.
|
// Parses the file into the syntax tree.
|
||||||
#[salsa::invoke(parse_query)]
|
#[salsa::invoke(parse_query)]
|
||||||
fn parse(&self, file_id: FileId) -> TreeArc<SourceFile>;
|
fn parse(&self, file_id: FileId) -> Parse;
|
||||||
/// Path to a file, relative to the root of its source root.
|
/// Path to a file, relative to the root of its source root.
|
||||||
#[salsa::input]
|
#[salsa::input]
|
||||||
fn file_relative_path(&self, file_id: FileId) -> RelativePathBuf;
|
fn file_relative_path(&self, file_id: FileId) -> RelativePathBuf;
|
||||||
|
@ -98,7 +98,7 @@ fn source_root_crates(db: &impl SourceDatabase, id: SourceRootId) -> Arc<Vec<Cra
|
||||||
Arc::new(res)
|
Arc::new(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_query(db: &impl SourceDatabase, file_id: FileId) -> TreeArc<SourceFile> {
|
fn parse_query(db: &impl SourceDatabase, file_id: FileId) -> Parse {
|
||||||
let _p = profile("parse_query");
|
let _p = profile("parse_query");
|
||||||
let text = db.file_text(file_id);
|
let text = db.file_text(file_id);
|
||||||
SourceFile::parse(&*text)
|
SourceFile::parse(&*text)
|
||||||
|
|
|
@ -116,7 +116,7 @@ impl ModuleSource {
|
||||||
) -> ModuleSource {
|
) -> ModuleSource {
|
||||||
match (file_id, decl_id) {
|
match (file_id, decl_id) {
|
||||||
(Some(file_id), _) => {
|
(Some(file_id), _) => {
|
||||||
let source_file = db.parse(file_id);
|
let source_file = db.parse(file_id).tree;
|
||||||
ModuleSource::SourceFile(source_file)
|
ModuleSource::SourceFile(source_file)
|
||||||
}
|
}
|
||||||
(None, Some(item_id)) => {
|
(None, Some(item_id)) => {
|
||||||
|
|
|
@ -190,7 +190,7 @@ mod tests {
|
||||||
};
|
};
|
||||||
|
|
||||||
let (db, _source_root, file_id) = MockDatabase::with_single_file(&code);
|
let (db, _source_root, file_id) = MockDatabase::with_single_file(&code);
|
||||||
let file = db.parse(file_id);
|
let file = db.parse(file_id).ok().unwrap();
|
||||||
let marker: &ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap();
|
let marker: &ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap();
|
||||||
let analyzer = SourceAnalyzer::new(&db, file_id, marker.syntax(), None);
|
let analyzer = SourceAnalyzer::new(&db, file_id, marker.syntax(), None);
|
||||||
|
|
||||||
|
@ -288,7 +288,7 @@ mod tests {
|
||||||
let (off, code) = extract_offset(code);
|
let (off, code) = extract_offset(code);
|
||||||
|
|
||||||
let (db, _source_root, file_id) = MockDatabase::with_single_file(&code);
|
let (db, _source_root, file_id) = MockDatabase::with_single_file(&code);
|
||||||
let file = db.parse(file_id);
|
let file = db.parse(file_id).ok().unwrap();
|
||||||
let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into())
|
let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into())
|
||||||
.expect("failed to find a name at the target offset");
|
.expect("failed to find a name at the target offset");
|
||||||
let name_ref: &ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap();
|
let name_ref: &ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap();
|
||||||
|
|
|
@ -75,7 +75,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
|
||||||
}
|
}
|
||||||
let source_map = self.func.body_source_map(db);
|
let source_map = self.func.body_source_map(db);
|
||||||
let file_id = self.func.source(db).0;
|
let file_id = self.func.source(db).0;
|
||||||
let source_file = db.parse(file_id.original_file(db));
|
let source_file = db.parse(file_id.original_file(db)).tree;
|
||||||
if let Some(field_list_node) = source_map
|
if let Some(field_list_node) = source_map
|
||||||
.expr_syntax(id)
|
.expr_syntax(id)
|
||||||
.map(|ptr| ptr.to_node(source_file.syntax()))
|
.map(|ptr| ptr.to_node(source_file.syntax()))
|
||||||
|
|
|
@ -64,7 +64,7 @@ impl HirFileId {
|
||||||
db.check_canceled();
|
db.check_canceled();
|
||||||
let _p = profile("parse_or_expand_query");
|
let _p = profile("parse_or_expand_query");
|
||||||
match file_id.0 {
|
match file_id.0 {
|
||||||
HirFileIdRepr::File(file_id) => Some(db.parse(file_id).syntax().to_owned()),
|
HirFileIdRepr::File(file_id) => Some(db.parse(file_id).tree.syntax().to_owned()),
|
||||||
HirFileIdRepr::Macro(macro_file) => {
|
HirFileIdRepr::Macro(macro_file) => {
|
||||||
let macro_call_id = macro_file.macro_call_id;
|
let macro_call_id = macro_file.macro_call_id;
|
||||||
let tt = db
|
let tt = db
|
||||||
|
|
|
@ -46,7 +46,7 @@ pub fn module_from_declaration(
|
||||||
|
|
||||||
/// Locates the module by position in the source code.
|
/// Locates the module by position in the source code.
|
||||||
pub fn module_from_position(db: &impl HirDatabase, position: FilePosition) -> Option<Module> {
|
pub fn module_from_position(db: &impl HirDatabase, position: FilePosition) -> Option<Module> {
|
||||||
let file = db.parse(position.file_id);
|
let file = db.parse(position.file_id).tree;
|
||||||
match find_node_at_offset::<ast::Module>(file.syntax(), position.offset) {
|
match find_node_at_offset::<ast::Module>(file.syntax(), position.offset) {
|
||||||
Some(m) if !m.has_semi() => module_from_inline(db, position.file_id.into(), m),
|
Some(m) if !m.has_semi() => module_from_inline(db, position.file_id.into(), m),
|
||||||
_ => module_from_file_id(db, position.file_id.into()),
|
_ => module_from_file_id(db, position.file_id.into()),
|
||||||
|
|
|
@ -59,7 +59,7 @@ fn test() {
|
||||||
let b: usize = 1;
|
let b: usize = 1;
|
||||||
let c = b;
|
let c = b;
|
||||||
}
|
}
|
||||||
}"#),
|
"#),
|
||||||
@r###"
|
@r###"
|
||||||
[11; 71) '{ ...= b; }': ()
|
[11; 71) '{ ...= b; }': ()
|
||||||
[21; 22) 'a': isize
|
[21; 22) 'a': isize
|
||||||
|
@ -85,7 +85,7 @@ fn test() {
|
||||||
a();
|
a();
|
||||||
b::c();
|
b::c();
|
||||||
}
|
}
|
||||||
}"#),
|
"#),
|
||||||
@r###"
|
@r###"
|
||||||
[15; 20) '{ 1 }': u32
|
[15; 20) '{ 1 }': u32
|
||||||
[17; 18) '1': u32
|
[17; 18) '1': u32
|
||||||
|
@ -1004,7 +1004,7 @@ fn infer_tuple_struct_generics() {
|
||||||
assert_snapshot_matches!(
|
assert_snapshot_matches!(
|
||||||
infer(r#"
|
infer(r#"
|
||||||
struct A<T>(T);
|
struct A<T>(T);
|
||||||
enum Option<T> { Some(T), None };
|
enum Option<T> { Some(T), None }
|
||||||
use Option::*;
|
use Option::*;
|
||||||
|
|
||||||
fn test() {
|
fn test() {
|
||||||
|
@ -1017,22 +1017,24 @@ fn test() {
|
||||||
}
|
}
|
||||||
"#),
|
"#),
|
||||||
@r###"
|
@r###"
|
||||||
[77; 185) '{ ...one; }': ()
|
⋮
|
||||||
[83; 84) 'A': A<i32>(T) -> A<T>
|
⋮[76; 184) '{ ...one; }': ()
|
||||||
[83; 88) 'A(42)': A<i32>
|
⋮[82; 83) 'A': A<i32>(T) -> A<T>
|
||||||
[85; 87) '42': i32
|
⋮[82; 87) 'A(42)': A<i32>
|
||||||
[94; 95) 'A': A<u128>(T) -> A<T>
|
⋮[84; 86) '42': i32
|
||||||
[94; 103) 'A(42u128)': A<u128>
|
⋮[93; 94) 'A': A<u128>(T) -> A<T>
|
||||||
[96; 102) '42u128': u128
|
⋮[93; 102) 'A(42u128)': A<u128>
|
||||||
[109; 113) 'Some': Some<&str>(T) -> Option<T>
|
⋮[95; 101) '42u128': u128
|
||||||
[109; 118) 'Some("x")': Option<&str>
|
⋮[108; 112) 'Some': Some<&str>(T) -> Option<T>
|
||||||
[114; 117) '"x"': &str
|
⋮[108; 117) 'Some("x")': Option<&str>
|
||||||
[124; 136) 'Option::Some': Some<&str>(T) -> Option<T>
|
⋮[113; 116) '"x"': &str
|
||||||
[124; 141) 'Option...e("x")': Option<&str>
|
⋮[123; 135) 'Option::Some': Some<&str>(T) -> Option<T>
|
||||||
[137; 140) '"x"': &str
|
⋮[123; 140) 'Option...e("x")': Option<&str>
|
||||||
[147; 151) 'None': Option<{unknown}>
|
⋮[136; 139) '"x"': &str
|
||||||
[161; 162) 'x': Option<i64>
|
⋮[146; 150) 'None': Option<{unknown}>
|
||||||
[178; 182) 'None': Option<i64>"###
|
⋮[160; 161) 'x': Option<i64>
|
||||||
|
⋮[177; 181) 'None': Option<i64>
|
||||||
|
"###
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1268,7 +1270,7 @@ impl Struct {
|
||||||
const FOO: u32 = 1;
|
const FOO: u32 = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
enum Enum;
|
enum Enum {}
|
||||||
|
|
||||||
impl Enum {
|
impl Enum {
|
||||||
const BAR: u32 = 2;
|
const BAR: u32 = 2;
|
||||||
|
@ -1291,16 +1293,18 @@ fn test() {
|
||||||
}
|
}
|
||||||
"#),
|
"#),
|
||||||
@r###"
|
@r###"
|
||||||
[52; 53) '1': u32
|
⋮
|
||||||
[103; 104) '2': u32
|
⋮[52; 53) '1': u32
|
||||||
[211; 212) '5': u32
|
⋮[105; 106) '2': u32
|
||||||
[227; 305) '{ ...:ID; }': ()
|
⋮[213; 214) '5': u32
|
||||||
[237; 238) 'x': u32
|
⋮[229; 307) '{ ...:ID; }': ()
|
||||||
[241; 252) 'Struct::FOO': u32
|
⋮[239; 240) 'x': u32
|
||||||
[262; 263) 'y': u32
|
⋮[243; 254) 'Struct::FOO': u32
|
||||||
[266; 275) 'Enum::BAR': u32
|
⋮[264; 265) 'y': u32
|
||||||
[285; 286) 'z': {unknown}
|
⋮[268; 277) 'Enum::BAR': u32
|
||||||
[289; 302) 'TraitTest::ID': {unknown}"###
|
⋮[287; 288) 'z': {unknown}
|
||||||
|
⋮[291; 304) 'TraitTest::ID': {unknown}
|
||||||
|
"###
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1308,7 +1312,7 @@ fn test() {
|
||||||
fn infer_associated_method_struct() {
|
fn infer_associated_method_struct() {
|
||||||
assert_snapshot_matches!(
|
assert_snapshot_matches!(
|
||||||
infer(r#"
|
infer(r#"
|
||||||
struct A { x: u32 };
|
struct A { x: u32 }
|
||||||
|
|
||||||
impl A {
|
impl A {
|
||||||
fn new() -> A {
|
fn new() -> A {
|
||||||
|
@ -1321,15 +1325,17 @@ fn test() {
|
||||||
}
|
}
|
||||||
"#),
|
"#),
|
||||||
@r###"
|
@r###"
|
||||||
[50; 76) '{ ... }': A
|
⋮
|
||||||
[60; 70) 'A { x: 0 }': A
|
⋮[49; 75) '{ ... }': A
|
||||||
[67; 68) '0': u32
|
⋮[59; 69) 'A { x: 0 }': A
|
||||||
[89; 123) '{ ...a.x; }': ()
|
⋮[66; 67) '0': u32
|
||||||
[99; 100) 'a': A
|
⋮[88; 122) '{ ...a.x; }': ()
|
||||||
[103; 109) 'A::new': fn new() -> A
|
⋮[98; 99) 'a': A
|
||||||
[103; 111) 'A::new()': A
|
⋮[102; 108) 'A::new': fn new() -> A
|
||||||
[117; 118) 'a': A
|
⋮[102; 110) 'A::new()': A
|
||||||
[117; 120) 'a.x': u32"###
|
⋮[116; 117) 'a': A
|
||||||
|
⋮[116; 119) 'a.x': u32
|
||||||
|
"###
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1337,7 +1343,7 @@ fn test() {
|
||||||
fn infer_associated_method_enum() {
|
fn infer_associated_method_enum() {
|
||||||
assert_snapshot_matches!(
|
assert_snapshot_matches!(
|
||||||
infer(r#"
|
infer(r#"
|
||||||
enum A { B, C };
|
enum A { B, C }
|
||||||
|
|
||||||
impl A {
|
impl A {
|
||||||
pub fn b() -> A {
|
pub fn b() -> A {
|
||||||
|
@ -1355,19 +1361,21 @@ fn test() {
|
||||||
}
|
}
|
||||||
"#),
|
"#),
|
||||||
@r###"
|
@r###"
|
||||||
[48; 68) '{ ... }': A
|
⋮
|
||||||
[58; 62) 'A::B': A
|
⋮[47; 67) '{ ... }': A
|
||||||
[89; 109) '{ ... }': A
|
⋮[57; 61) 'A::B': A
|
||||||
[99; 103) 'A::C': A
|
⋮[88; 108) '{ ... }': A
|
||||||
[122; 179) '{ ... c; }': ()
|
⋮[98; 102) 'A::C': A
|
||||||
[132; 133) 'a': A
|
⋮[121; 178) '{ ... c; }': ()
|
||||||
[136; 140) 'A::b': fn b() -> A
|
⋮[131; 132) 'a': A
|
||||||
[136; 142) 'A::b()': A
|
⋮[135; 139) 'A::b': fn b() -> A
|
||||||
[148; 149) 'a': A
|
⋮[135; 141) 'A::b()': A
|
||||||
[159; 160) 'c': A
|
⋮[147; 148) 'a': A
|
||||||
[163; 167) 'A::c': fn c() -> A
|
⋮[158; 159) 'c': A
|
||||||
[163; 169) 'A::c()': A
|
⋮[162; 166) 'A::c': fn c() -> A
|
||||||
[175; 176) 'c': A"###
|
⋮[162; 168) 'A::c()': A
|
||||||
|
⋮[174; 175) 'c': A
|
||||||
|
"###
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1540,7 +1548,7 @@ fn test() {
|
||||||
fn infer_type_alias() {
|
fn infer_type_alias() {
|
||||||
assert_snapshot_matches!(
|
assert_snapshot_matches!(
|
||||||
infer(r#"
|
infer(r#"
|
||||||
struct A<X, Y> { x: X, y: Y };
|
struct A<X, Y> { x: X, y: Y }
|
||||||
type Foo = A<u32, i128>;
|
type Foo = A<u32, i128>;
|
||||||
type Bar<T> = A<T, u128>;
|
type Bar<T> = A<T, u128>;
|
||||||
type Baz<U, V> = A<V, U>;
|
type Baz<U, V> = A<V, U>;
|
||||||
|
@ -1554,22 +1562,24 @@ fn test(x: Foo, y: Bar<&str>, z: Baz<i8, u8>) {
|
||||||
}
|
}
|
||||||
"#),
|
"#),
|
||||||
@r###"
|
@r###"
|
||||||
[117; 118) 'x': A<u32, i128>
|
⋮
|
||||||
[125; 126) 'y': A<&str, u128>
|
⋮[116; 117) 'x': A<u32, i128>
|
||||||
[139; 140) 'z': A<u8, i8>
|
⋮[124; 125) 'y': A<&str, u128>
|
||||||
[155; 212) '{ ...z.y; }': ()
|
⋮[138; 139) 'z': A<u8, i8>
|
||||||
[161; 162) 'x': A<u32, i128>
|
⋮[154; 211) '{ ...z.y; }': ()
|
||||||
[161; 164) 'x.x': u32
|
⋮[160; 161) 'x': A<u32, i128>
|
||||||
[170; 171) 'x': A<u32, i128>
|
⋮[160; 163) 'x.x': u32
|
||||||
[170; 173) 'x.y': i128
|
⋮[169; 170) 'x': A<u32, i128>
|
||||||
[179; 180) 'y': A<&str, u128>
|
⋮[169; 172) 'x.y': i128
|
||||||
[179; 182) 'y.x': &str
|
⋮[178; 179) 'y': A<&str, u128>
|
||||||
[188; 189) 'y': A<&str, u128>
|
⋮[178; 181) 'y.x': &str
|
||||||
[188; 191) 'y.y': u128
|
⋮[187; 188) 'y': A<&str, u128>
|
||||||
[197; 198) 'z': A<u8, i8>
|
⋮[187; 190) 'y.y': u128
|
||||||
[197; 200) 'z.x': u8
|
⋮[196; 197) 'z': A<u8, i8>
|
||||||
[206; 207) 'z': A<u8, i8>
|
⋮[196; 199) 'z.x': u8
|
||||||
[206; 209) 'z.y': i8"###
|
⋮[205; 206) 'z': A<u8, i8>
|
||||||
|
⋮[205; 208) 'z.y': i8
|
||||||
|
"###
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1578,7 +1588,7 @@ fn test(x: Foo, y: Bar<&str>, z: Baz<i8, u8>) {
|
||||||
fn recursive_type_alias() {
|
fn recursive_type_alias() {
|
||||||
assert_snapshot_matches!(
|
assert_snapshot_matches!(
|
||||||
infer(r#"
|
infer(r#"
|
||||||
struct A<X> {};
|
struct A<X> {}
|
||||||
type Foo = Foo;
|
type Foo = Foo;
|
||||||
type Bar = A<Bar>;
|
type Bar = A<Bar>;
|
||||||
fn test(x: Foo) {}
|
fn test(x: Foo) {}
|
||||||
|
@ -1795,18 +1805,21 @@ fn infer_std_crash_3() {
|
||||||
assert_snapshot_matches!(
|
assert_snapshot_matches!(
|
||||||
infer(r#"
|
infer(r#"
|
||||||
pub fn compute() {
|
pub fn compute() {
|
||||||
match _ {
|
match nope!() {
|
||||||
SizeSkeleton::Pointer { non_zero: true, tail } => {}
|
SizeSkeleton::Pointer { non_zero: true, tail } => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"#),
|
"#),
|
||||||
@r###"
|
@r###"
|
||||||
[18; 102) '{ ... } }': ()
|
⋮
|
||||||
[24; 100) 'match ... }': ()
|
⋮[18; 108) '{ ... } }': ()
|
||||||
[42; 88) 'SizeSk...tail }': {unknown}
|
⋮[24; 106) 'match ... }': ()
|
||||||
[76; 80) 'true': {unknown}
|
⋮[30; 37) 'nope!()': {unknown}
|
||||||
[82; 86) 'tail': {unknown}
|
⋮[48; 94) 'SizeSk...tail }': {unknown}
|
||||||
[92; 94) '{}': ()"###
|
⋮[82; 86) 'true': {unknown}
|
||||||
|
⋮[88; 92) 'tail': {unknown}
|
||||||
|
⋮[98; 100) '{}': ()
|
||||||
|
"###
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1817,20 +1830,21 @@ fn infer_std_crash_4() {
|
||||||
infer(r#"
|
infer(r#"
|
||||||
pub fn primitive_type() {
|
pub fn primitive_type() {
|
||||||
match *self {
|
match *self {
|
||||||
BorrowedRef { type_: box Primitive(p), ..} => {},
|
BorrowedRef { type_: Primitive(p), ..} => {},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"#),
|
"#),
|
||||||
@r###"
|
@r###"
|
||||||
[25; 110) '{ ... } }': ()
|
⋮
|
||||||
[31; 108) 'match ... }': ()
|
⋮[25; 106) '{ ... } }': ()
|
||||||
[37; 42) '*self': {unknown}
|
⋮[31; 104) 'match ... }': ()
|
||||||
[38; 42) 'self': {unknown}
|
⋮[37; 42) '*self': {unknown}
|
||||||
[53; 95) 'Borrow...), ..}': {unknown}
|
⋮[38; 42) 'self': {unknown}
|
||||||
[74; 77) 'box': {unknown}
|
⋮[53; 91) 'Borrow...), ..}': {unknown}
|
||||||
[78; 87) 'Primitive': {unknown}
|
⋮[74; 86) 'Primitive(p)': {unknown}
|
||||||
[88; 89) 'p': {unknown}
|
⋮[84; 85) 'p': {unknown}
|
||||||
[99; 101) '{}': ()"###
|
⋮[95; 97) '{}': ()
|
||||||
|
"###
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2304,8 +2318,8 @@ trait Into<T> {
|
||||||
fn into(self) -> T;
|
fn into(self) -> T;
|
||||||
}
|
}
|
||||||
struct S;
|
struct S;
|
||||||
impl Into<u32> for S;
|
impl Into<u32> for S {}
|
||||||
impl Into<u64> for S;
|
impl Into<u64> for S {}
|
||||||
fn test() {
|
fn test() {
|
||||||
let x: u32 = S.into();
|
let x: u32 = S.into();
|
||||||
let y: u64 = S.into();
|
let y: u64 = S.into();
|
||||||
|
@ -2313,18 +2327,20 @@ fn test() {
|
||||||
}
|
}
|
||||||
"#),
|
"#),
|
||||||
@r###"
|
@r###"
|
||||||
[29; 33) 'self': Self
|
⋮
|
||||||
[107; 198) '{ ...(S); }': ()
|
⋮[29; 33) 'self': Self
|
||||||
[117; 118) 'x': u32
|
⋮[111; 202) '{ ...(S); }': ()
|
||||||
[126; 127) 'S': S
|
⋮[121; 122) 'x': u32
|
||||||
[126; 134) 'S.into()': u32
|
⋮[130; 131) 'S': S
|
||||||
[144; 145) 'y': u64
|
⋮[130; 138) 'S.into()': u32
|
||||||
[153; 154) 'S': S
|
⋮[148; 149) 'y': u64
|
||||||
[153; 161) 'S.into()': u64
|
⋮[157; 158) 'S': S
|
||||||
[171; 172) 'z': {unknown}
|
⋮[157; 165) 'S.into()': u64
|
||||||
[175; 192) 'Into::...::into': {unknown}
|
⋮[175; 176) 'z': {unknown}
|
||||||
[175; 195) 'Into::...nto(S)': {unknown}
|
⋮[179; 196) 'Into::...::into': {unknown}
|
||||||
[193; 194) 'S': S"###
|
⋮[179; 199) 'Into::...nto(S)': {unknown}
|
||||||
|
⋮[197; 198) 'S': S
|
||||||
|
"###
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2617,7 +2633,7 @@ fn method_resolution_where_clause_1() {
|
||||||
trait Clone {}
|
trait Clone {}
|
||||||
trait Trait { fn foo(self) -> u128; }
|
trait Trait { fn foo(self) -> u128; }
|
||||||
struct S;
|
struct S;
|
||||||
impl Clone for S {};
|
impl Clone for S {}
|
||||||
impl<T> Trait for T where T: Clone {}
|
impl<T> Trait for T where T: Clone {}
|
||||||
fn test() { S.foo()<|>; }
|
fn test() { S.foo()<|>; }
|
||||||
"#,
|
"#,
|
||||||
|
@ -2634,7 +2650,7 @@ trait Into<T> { fn into(self) -> T; }
|
||||||
trait From<T> { fn from(other: T) -> Self; }
|
trait From<T> { fn from(other: T) -> Self; }
|
||||||
struct S1;
|
struct S1;
|
||||||
struct S2;
|
struct S2;
|
||||||
impl From<S2> for S1 {};
|
impl From<S2> for S1 {}
|
||||||
impl<T, U> Into<U> for T where U: From<T> {}
|
impl<T, U> Into<U> for T where U: From<T> {}
|
||||||
fn test() { S2.into()<|>; }
|
fn test() { S2.into()<|>; }
|
||||||
"#,
|
"#,
|
||||||
|
@ -2651,7 +2667,7 @@ trait Into<T> { fn into(self) -> T; }
|
||||||
trait From<T> { fn from(other: T) -> Self; }
|
trait From<T> { fn from(other: T) -> Self; }
|
||||||
struct S1;
|
struct S1;
|
||||||
struct S2;
|
struct S2;
|
||||||
impl From<S2> for S1 {};
|
impl From<S2> for S1 {}
|
||||||
impl<T, U: From<T>> Into<U> for T {}
|
impl<T, U: From<T>> Into<U> for T {}
|
||||||
fn test() { S2.into()<|>; }
|
fn test() { S2.into()<|>; }
|
||||||
"#,
|
"#,
|
||||||
|
@ -2680,8 +2696,8 @@ fn method_resolution_slow() {
|
||||||
//- /main.rs
|
//- /main.rs
|
||||||
trait SendX {}
|
trait SendX {}
|
||||||
|
|
||||||
struct S1; impl SendX for S1;
|
struct S1; impl SendX for S1 {}
|
||||||
struct S2; impl SendX for S2;
|
struct S2; impl SendX for S2 {}
|
||||||
struct U1;
|
struct U1;
|
||||||
|
|
||||||
trait Trait { fn method(self); }
|
trait Trait { fn method(self); }
|
||||||
|
@ -2702,7 +2718,7 @@ fn test() { (S {}).method()<|>; }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn type_at_pos(db: &MockDatabase, pos: FilePosition) -> String {
|
fn type_at_pos(db: &MockDatabase, pos: FilePosition) -> String {
|
||||||
let file = db.parse(pos.file_id);
|
let file = db.parse(pos.file_id).ok().unwrap();
|
||||||
let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap();
|
let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap();
|
||||||
let analyzer = SourceAnalyzer::new(db, pos.file_id, expr.syntax(), Some(pos.offset));
|
let analyzer = SourceAnalyzer::new(db, pos.file_id, expr.syntax(), Some(pos.offset));
|
||||||
let ty = analyzer.type_of(db, expr).unwrap();
|
let ty = analyzer.type_of(db, expr).unwrap();
|
||||||
|
@ -2716,7 +2732,7 @@ fn type_at(content: &str) -> String {
|
||||||
|
|
||||||
fn infer(content: &str) -> String {
|
fn infer(content: &str) -> String {
|
||||||
let (db, _, file_id) = MockDatabase::with_single_file(content);
|
let (db, _, file_id) = MockDatabase::with_single_file(content);
|
||||||
let source_file = db.parse(file_id);
|
let source_file = db.parse(file_id).ok().unwrap();
|
||||||
|
|
||||||
let mut acc = String::new();
|
let mut acc = String::new();
|
||||||
acc.push_str("\n");
|
acc.push_str("\n");
|
||||||
|
@ -2794,7 +2810,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
|
||||||
",
|
",
|
||||||
);
|
);
|
||||||
{
|
{
|
||||||
let file = db.parse(pos.file_id);
|
let file = db.parse(pos.file_id).ok().unwrap();
|
||||||
let node =
|
let node =
|
||||||
algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent();
|
algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent();
|
||||||
let events = db.log_executed(|| {
|
let events = db.log_executed(|| {
|
||||||
|
@ -2815,7 +2831,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
|
||||||
db.query_mut(ra_db::FileTextQuery).set(pos.file_id, Arc::new(new_text));
|
db.query_mut(ra_db::FileTextQuery).set(pos.file_id, Arc::new(new_text));
|
||||||
|
|
||||||
{
|
{
|
||||||
let file = db.parse(pos.file_id);
|
let file = db.parse(pos.file_id).ok().unwrap();
|
||||||
let node =
|
let node =
|
||||||
algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent();
|
algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent();
|
||||||
let events = db.log_executed(|| {
|
let events = db.log_executed(|| {
|
||||||
|
|
|
@ -10,7 +10,7 @@ use crate::{FilePosition, CallInfo, FunctionSignature, db::RootDatabase};
|
||||||
|
|
||||||
/// Computes parameter information for the given call expression.
|
/// Computes parameter information for the given call expression.
|
||||||
pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> {
|
pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> {
|
||||||
let file = db.parse(position.file_id);
|
let file = db.parse(position.file_id).tree;
|
||||||
let syntax = file.syntax();
|
let syntax = file.syntax();
|
||||||
|
|
||||||
// Find the calling expression and it's NameRef
|
// Find the calling expression and it's NameRef
|
||||||
|
|
|
@ -138,7 +138,7 @@ impl LibraryData {
|
||||||
files: Vec<(FileId, RelativePathBuf, Arc<String>)>,
|
files: Vec<(FileId, RelativePathBuf, Arc<String>)>,
|
||||||
) -> LibraryData {
|
) -> LibraryData {
|
||||||
let symbol_index = SymbolIndex::for_files(files.par_iter().map(|(file_id, _, text)| {
|
let symbol_index = SymbolIndex::for_files(files.par_iter().map(|(file_id, _, text)| {
|
||||||
let file = SourceFile::parse(text);
|
let file = SourceFile::parse(text).tree;
|
||||||
(*file_id, file)
|
(*file_id, file)
|
||||||
}));
|
}));
|
||||||
let mut root_change = RootChange::default();
|
let mut root_change = RootChange::default();
|
||||||
|
|
|
@ -51,8 +51,8 @@ pub use crate::completion::completion_item::{CompletionItem, CompletionItemKind,
|
||||||
/// identifier prefix/fuzzy match should be done higher in the stack, together
|
/// identifier prefix/fuzzy match should be done higher in the stack, together
|
||||||
/// with ordering of completions (currently this is done by the client).
|
/// with ordering of completions (currently this is done by the client).
|
||||||
pub(crate) fn completions(db: &db::RootDatabase, position: FilePosition) -> Option<Completions> {
|
pub(crate) fn completions(db: &db::RootDatabase, position: FilePosition) -> Option<Completions> {
|
||||||
let original_file = db.parse(position.file_id);
|
let original_parse = db.parse(position.file_id);
|
||||||
let ctx = CompletionContext::new(db, &original_file, position)?;
|
let ctx = CompletionContext::new(db, &original_parse, position)?;
|
||||||
|
|
||||||
let mut acc = Completions::default();
|
let mut acc = Completions::default();
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use ra_text_edit::AtomTextEdit;
|
use ra_text_edit::AtomTextEdit;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
AstNode, SyntaxNode, SourceFile, TextUnit, TextRange, SyntaxToken,
|
AstNode, SyntaxNode, SourceFile, TextUnit, TextRange, SyntaxToken, Parse,
|
||||||
ast,
|
ast,
|
||||||
algo::{find_token_at_offset, find_covering_element, find_node_at_offset},
|
algo::{find_token_at_offset, find_covering_element, find_node_at_offset},
|
||||||
SyntaxKind::*,
|
SyntaxKind::*,
|
||||||
|
@ -43,11 +43,12 @@ pub(crate) struct CompletionContext<'a> {
|
||||||
impl<'a> CompletionContext<'a> {
|
impl<'a> CompletionContext<'a> {
|
||||||
pub(super) fn new(
|
pub(super) fn new(
|
||||||
db: &'a db::RootDatabase,
|
db: &'a db::RootDatabase,
|
||||||
original_file: &'a SourceFile,
|
original_parse: &'a Parse,
|
||||||
position: FilePosition,
|
position: FilePosition,
|
||||||
) -> Option<CompletionContext<'a>> {
|
) -> Option<CompletionContext<'a>> {
|
||||||
let module = source_binder::module_from_position(db, position);
|
let module = source_binder::module_from_position(db, position);
|
||||||
let token = find_token_at_offset(original_file.syntax(), position.offset).left_biased()?;
|
let token =
|
||||||
|
find_token_at_offset(original_parse.tree.syntax(), position.offset).left_biased()?;
|
||||||
let analyzer =
|
let analyzer =
|
||||||
hir::SourceAnalyzer::new(db, position.file_id, token.parent(), Some(position.offset));
|
hir::SourceAnalyzer::new(db, position.file_id, token.parent(), Some(position.offset));
|
||||||
let mut ctx = CompletionContext {
|
let mut ctx = CompletionContext {
|
||||||
|
@ -69,7 +70,7 @@ impl<'a> CompletionContext<'a> {
|
||||||
dot_receiver: None,
|
dot_receiver: None,
|
||||||
is_call: false,
|
is_call: false,
|
||||||
};
|
};
|
||||||
ctx.fill(original_file, position.offset);
|
ctx.fill(&original_parse, position.offset);
|
||||||
Some(ctx)
|
Some(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -82,13 +83,13 @@ impl<'a> CompletionContext<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fill(&mut self, original_file: &'a SourceFile, offset: TextUnit) {
|
fn fill(&mut self, original_parse: &'a Parse, offset: TextUnit) {
|
||||||
// Insert a fake ident to get a valid parse tree. We will use this file
|
// Insert a fake ident to get a valid parse tree. We will use this file
|
||||||
// to determine context, though the original_file will be used for
|
// to determine context, though the original_file will be used for
|
||||||
// actual completion.
|
// actual completion.
|
||||||
let file = {
|
let file = {
|
||||||
let edit = AtomTextEdit::insert(offset, "intellijRulezz".to_string());
|
let edit = AtomTextEdit::insert(offset, "intellijRulezz".to_string());
|
||||||
original_file.reparse(&edit)
|
original_parse.reparse(&edit).tree
|
||||||
};
|
};
|
||||||
|
|
||||||
// First, let's try to complete a reference to some declaration.
|
// First, let's try to complete a reference to some declaration.
|
||||||
|
@ -99,7 +100,7 @@ impl<'a> CompletionContext<'a> {
|
||||||
self.is_param = true;
|
self.is_param = true;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
self.classify_name_ref(original_file, name_ref);
|
self.classify_name_ref(&original_parse.tree, name_ref);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Otherwise, see if this is a declaration. We can use heuristics to
|
// Otherwise, see if this is a declaration. We can use heuristics to
|
||||||
|
|
|
@ -4,7 +4,7 @@ use itertools::Itertools;
|
||||||
use hir::{source_binder, diagnostics::{Diagnostic as _, DiagnosticSink}};
|
use hir::{source_binder, diagnostics::{Diagnostic as _, DiagnosticSink}};
|
||||||
use ra_db::SourceDatabase;
|
use ra_db::SourceDatabase;
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
T, Location, SourceFile, TextRange, SyntaxNode,
|
T, Location, TextRange, SyntaxNode,
|
||||||
ast::{self, AstNode, NamedFieldList, NamedField},
|
ast::{self, AstNode, NamedFieldList, NamedField},
|
||||||
};
|
};
|
||||||
use ra_assists::ast_editor::{AstEditor, AstBuilder};
|
use ra_assists::ast_editor::{AstEditor, AstBuilder};
|
||||||
|
@ -21,10 +21,17 @@ pub enum Severity {
|
||||||
|
|
||||||
pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> {
|
pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> {
|
||||||
let _p = profile("diagnostics");
|
let _p = profile("diagnostics");
|
||||||
let source_file = db.parse(file_id);
|
let parse = db.parse(file_id);
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
|
|
||||||
syntax_errors(&mut res, &source_file);
|
res.extend(parse.errors.iter().map(|err| Diagnostic {
|
||||||
|
range: location_to_range(err.location()),
|
||||||
|
message: format!("Syntax Error: {}", err),
|
||||||
|
severity: Severity::Error,
|
||||||
|
fix: None,
|
||||||
|
}));
|
||||||
|
|
||||||
|
let source_file = parse.tree;
|
||||||
|
|
||||||
for node in source_file.syntax().descendants() {
|
for node in source_file.syntax().descendants() {
|
||||||
check_unnecessary_braces_in_use_statement(&mut res, file_id, node);
|
check_unnecessary_braces_in_use_statement(&mut res, file_id, node);
|
||||||
|
@ -51,8 +58,6 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.on::<hir::diagnostics::MissingFields, _>(|d| {
|
.on::<hir::diagnostics::MissingFields, _>(|d| {
|
||||||
let file_id = d.file().original_file(db);
|
|
||||||
let source_file = db.parse(file_id);
|
|
||||||
let syntax_node = d.syntax_node_ptr();
|
let syntax_node = d.syntax_node_ptr();
|
||||||
let node = NamedFieldList::cast(syntax_node.to_node(source_file.syntax())).unwrap();
|
let node = NamedFieldList::cast(syntax_node.to_node(source_file.syntax())).unwrap();
|
||||||
let mut ast_editor = AstEditor::new(node);
|
let mut ast_editor = AstEditor::new(node);
|
||||||
|
@ -77,21 +82,11 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
|
||||||
drop(sink);
|
drop(sink);
|
||||||
res.into_inner()
|
res.into_inner()
|
||||||
}
|
}
|
||||||
|
fn location_to_range(location: Location) -> TextRange {
|
||||||
fn syntax_errors(acc: &mut Vec<Diagnostic>, source_file: &SourceFile) {
|
match location {
|
||||||
fn location_to_range(location: Location) -> TextRange {
|
Location::Offset(offset) => TextRange::offset_len(offset, 1.into()),
|
||||||
match location {
|
Location::Range(range) => range,
|
||||||
Location::Offset(offset) => TextRange::offset_len(offset, 1.into()),
|
|
||||||
Location::Range(range) => range,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
acc.extend(source_file.errors().into_iter().map(|err| Diagnostic {
|
|
||||||
range: location_to_range(err.location()),
|
|
||||||
message: format!("Syntax Error: {}", err),
|
|
||||||
severity: Severity::Error,
|
|
||||||
fix: None,
|
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_unnecessary_braces_in_use_statement(
|
fn check_unnecessary_braces_in_use_statement(
|
||||||
|
@ -177,6 +172,7 @@ fn check_struct_shorthand_initialization(
|
||||||
mod tests {
|
mod tests {
|
||||||
use test_utils::assert_eq_text;
|
use test_utils::assert_eq_text;
|
||||||
use insta::assert_debug_snapshot_matches;
|
use insta::assert_debug_snapshot_matches;
|
||||||
|
use ra_syntax::SourceFile;
|
||||||
|
|
||||||
use crate::mock_analysis::single_file;
|
use crate::mock_analysis::single_file;
|
||||||
|
|
||||||
|
@ -185,7 +181,7 @@ mod tests {
|
||||||
type DiagnosticChecker = fn(&mut Vec<Diagnostic>, FileId, &SyntaxNode) -> Option<()>;
|
type DiagnosticChecker = fn(&mut Vec<Diagnostic>, FileId, &SyntaxNode) -> Option<()>;
|
||||||
|
|
||||||
fn check_not_applicable(code: &str, func: DiagnosticChecker) {
|
fn check_not_applicable(code: &str, func: DiagnosticChecker) {
|
||||||
let file = SourceFile::parse(code);
|
let file = SourceFile::parse(code).tree;
|
||||||
let mut diagnostics = Vec::new();
|
let mut diagnostics = Vec::new();
|
||||||
for node in file.syntax().descendants() {
|
for node in file.syntax().descendants() {
|
||||||
func(&mut diagnostics, FileId(0), node);
|
func(&mut diagnostics, FileId(0), node);
|
||||||
|
@ -194,7 +190,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_apply(before: &str, after: &str, func: DiagnosticChecker) {
|
fn check_apply(before: &str, after: &str, func: DiagnosticChecker) {
|
||||||
let file = SourceFile::parse(before);
|
let file = SourceFile::parse(before).tree;
|
||||||
let mut diagnostics = Vec::new();
|
let mut diagnostics = Vec::new();
|
||||||
for node in file.syntax().descendants() {
|
for node in file.syntax().descendants() {
|
||||||
func(&mut diagnostics, FileId(0), node);
|
func(&mut diagnostics, FileId(0), node);
|
||||||
|
|
|
@ -79,7 +79,7 @@ impl NavigationTarget {
|
||||||
file_id: FileId,
|
file_id: FileId,
|
||||||
pat: AstPtr<ast::Pat>,
|
pat: AstPtr<ast::Pat>,
|
||||||
) -> NavigationTarget {
|
) -> NavigationTarget {
|
||||||
let file = db.parse(file_id);
|
let file = db.parse(file_id).tree;
|
||||||
let (name, full_range) = match pat.to_node(file.syntax()).kind() {
|
let (name, full_range) = match pat.to_node(file.syntax()).kind() {
|
||||||
ast::PatKind::BindPat(pat) => return NavigationTarget::from_bind_pat(file_id, &pat),
|
ast::PatKind::BindPat(pat) => return NavigationTarget::from_bind_pat(file_id, &pat),
|
||||||
_ => ("_".into(), pat.syntax_node_ptr().range()),
|
_ => ("_".into(), pat.syntax_node_ptr().range()),
|
||||||
|
@ -290,7 +290,7 @@ impl NavigationTarget {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn node(&self, db: &RootDatabase) -> Option<TreeArc<SyntaxNode>> {
|
pub(crate) fn node(&self, db: &RootDatabase) -> Option<TreeArc<SyntaxNode>> {
|
||||||
let source_file = db.parse(self.file_id());
|
let source_file = db.parse(self.file_id()).tree;
|
||||||
let source_file = source_file.syntax();
|
let source_file = source_file.syntax();
|
||||||
let node = source_file
|
let node = source_file
|
||||||
.descendants()
|
.descendants()
|
||||||
|
|
|
@ -183,7 +183,9 @@ fn obsolete() {}
|
||||||
#[deprecated(note = "for awhile")]
|
#[deprecated(note = "for awhile")]
|
||||||
fn very_obsolete() {}
|
fn very_obsolete() {}
|
||||||
"#,
|
"#,
|
||||||
);
|
)
|
||||||
|
.ok()
|
||||||
|
.unwrap();
|
||||||
let structure = file_structure(&file);
|
let structure = file_structure(&file);
|
||||||
assert_debug_snapshot_matches!("file_structure", structure);
|
assert_debug_snapshot_matches!("file_structure", structure);
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,7 @@ use crate::{FileRange, db::RootDatabase};
|
||||||
|
|
||||||
// FIXME: restore macro support
|
// FIXME: restore macro support
|
||||||
pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange {
|
pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange {
|
||||||
let source_file = db.parse(frange.file_id);
|
let source_file = db.parse(frange.file_id).tree;
|
||||||
try_extend_selection(source_file.syntax(), frange.range).unwrap_or(frange.range)
|
try_extend_selection(source_file.syntax(), frange.range).unwrap_or(frange.range)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -212,7 +212,7 @@ mod tests {
|
||||||
|
|
||||||
fn do_check(before: &str, afters: &[&str]) {
|
fn do_check(before: &str, afters: &[&str]) {
|
||||||
let (cursor, before) = extract_offset(before);
|
let (cursor, before) = extract_offset(before);
|
||||||
let file = SourceFile::parse(&before);
|
let file = SourceFile::parse(&before).tree;
|
||||||
let mut range = TextRange::offset_len(cursor, 0.into());
|
let mut range = TextRange::offset_len(cursor, 0.into());
|
||||||
for &after in afters {
|
for &after in afters {
|
||||||
range = try_extend_selection(file.syntax(), range).unwrap();
|
range = try_extend_selection(file.syntax(), range).unwrap();
|
||||||
|
|
|
@ -191,7 +191,7 @@ mod tests {
|
||||||
|
|
||||||
fn do_check(text: &str, fold_kinds: &[FoldKind]) {
|
fn do_check(text: &str, fold_kinds: &[FoldKind]) {
|
||||||
let (ranges, text) = extract_ranges(text, "fold");
|
let (ranges, text) = extract_ranges(text, "fold");
|
||||||
let file = SourceFile::parse(&text);
|
let file = SourceFile::parse(&text).tree;
|
||||||
let folds = folding_ranges(&file);
|
let folds = folding_ranges(&file);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
|
|
@ -19,7 +19,7 @@ pub(crate) fn goto_definition(
|
||||||
db: &RootDatabase,
|
db: &RootDatabase,
|
||||||
position: FilePosition,
|
position: FilePosition,
|
||||||
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
||||||
let file = db.parse(position.file_id);
|
let file = db.parse(position.file_id).tree;
|
||||||
let syntax = file.syntax();
|
let syntax = file.syntax();
|
||||||
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) {
|
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) {
|
||||||
let navs = reference_definition(db, position.file_id, name_ref).to_vec();
|
let navs = reference_definition(db, position.file_id, name_ref).to_vec();
|
||||||
|
|
|
@ -10,7 +10,7 @@ pub(crate) fn goto_type_definition(
|
||||||
db: &RootDatabase,
|
db: &RootDatabase,
|
||||||
position: FilePosition,
|
position: FilePosition,
|
||||||
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
||||||
let file = db.parse(position.file_id);
|
let file = db.parse(position.file_id).tree;
|
||||||
|
|
||||||
let node = find_token_at_offset(file.syntax(), position.offset).find_map(|token| {
|
let node = find_token_at_offset(file.syntax(), position.offset).find_map(|token| {
|
||||||
token
|
token
|
||||||
|
|
|
@ -68,7 +68,7 @@ impl HoverResult {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeInfo<HoverResult>> {
|
pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeInfo<HoverResult>> {
|
||||||
let file = db.parse(position.file_id);
|
let file = db.parse(position.file_id).tree;
|
||||||
let mut res = HoverResult::new();
|
let mut res = HoverResult::new();
|
||||||
|
|
||||||
let mut range = None;
|
let mut range = None;
|
||||||
|
@ -120,7 +120,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
|
pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
|
||||||
let file = db.parse(frange.file_id);
|
let file = db.parse(frange.file_id).tree;
|
||||||
let syntax = file.syntax();
|
let syntax = file.syntax();
|
||||||
let leaf_node = find_covering_element(syntax, frange.range);
|
let leaf_node = find_covering_element(syntax, frange.range);
|
||||||
// if we picked identifier, expand to pattern/expression
|
// if we picked identifier, expand to pattern/expression
|
||||||
|
|
|
@ -11,7 +11,7 @@ pub(crate) fn goto_implementation(
|
||||||
db: &RootDatabase,
|
db: &RootDatabase,
|
||||||
position: FilePosition,
|
position: FilePosition,
|
||||||
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
||||||
let file = db.parse(position.file_id);
|
let file = db.parse(position.file_id).tree;
|
||||||
let syntax = file.syntax();
|
let syntax = file.syntax();
|
||||||
|
|
||||||
let module = source_binder::module_from_position(db, position)?;
|
let module = source_binder::module_from_position(db, position)?;
|
||||||
|
|
|
@ -506,7 +506,7 @@ fn foo() {
|
||||||
|
|
||||||
fn check_join_lines_sel(before: &str, after: &str) {
|
fn check_join_lines_sel(before: &str, after: &str) {
|
||||||
let (sel, before) = extract_range(before);
|
let (sel, before) = extract_range(before);
|
||||||
let file = SourceFile::parse(&before);
|
let file = SourceFile::parse(&before).tree;
|
||||||
let result = join_lines(&file, sel);
|
let result = join_lines(&file, sel);
|
||||||
let actual = result.apply(&before);
|
let actual = result.apply(&before);
|
||||||
assert_eq_text!(after, &actual);
|
assert_eq_text!(after, &actual);
|
||||||
|
|
|
@ -314,7 +314,7 @@ impl Analysis {
|
||||||
|
|
||||||
/// Gets the syntax tree of the file.
|
/// Gets the syntax tree of the file.
|
||||||
pub fn parse(&self, file_id: FileId) -> TreeArc<SourceFile> {
|
pub fn parse(&self, file_id: FileId) -> TreeArc<SourceFile> {
|
||||||
self.db.parse(file_id).clone()
|
self.db.parse(file_id).tree
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Gets the file's `LineIndex`: data structure to convert between absolute
|
/// Gets the file's `LineIndex`: data structure to convert between absolute
|
||||||
|
@ -331,7 +331,7 @@ impl Analysis {
|
||||||
/// Returns position of the matching brace (all types of braces are
|
/// Returns position of the matching brace (all types of braces are
|
||||||
/// supported).
|
/// supported).
|
||||||
pub fn matching_brace(&self, position: FilePosition) -> Option<TextUnit> {
|
pub fn matching_brace(&self, position: FilePosition) -> Option<TextUnit> {
|
||||||
let file = self.db.parse(position.file_id);
|
let file = self.db.parse(position.file_id).tree;
|
||||||
matching_brace::matching_brace(&file, position.offset)
|
matching_brace::matching_brace(&file, position.offset)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -344,7 +344,7 @@ impl Analysis {
|
||||||
/// Returns an edit to remove all newlines in the range, cleaning up minor
|
/// Returns an edit to remove all newlines in the range, cleaning up minor
|
||||||
/// stuff like trailing commas.
|
/// stuff like trailing commas.
|
||||||
pub fn join_lines(&self, frange: FileRange) -> SourceChange {
|
pub fn join_lines(&self, frange: FileRange) -> SourceChange {
|
||||||
let file = self.db.parse(frange.file_id);
|
let file = self.db.parse(frange.file_id).tree;
|
||||||
let file_edit = SourceFileEdit {
|
let file_edit = SourceFileEdit {
|
||||||
file_id: frange.file_id,
|
file_id: frange.file_id,
|
||||||
edit: join_lines::join_lines(&file, frange.range),
|
edit: join_lines::join_lines(&file, frange.range),
|
||||||
|
@ -362,7 +362,7 @@ impl Analysis {
|
||||||
/// this works when adding `let =`.
|
/// this works when adding `let =`.
|
||||||
// FIXME: use a snippet completion instead of this hack here.
|
// FIXME: use a snippet completion instead of this hack here.
|
||||||
pub fn on_eq_typed(&self, position: FilePosition) -> Option<SourceChange> {
|
pub fn on_eq_typed(&self, position: FilePosition) -> Option<SourceChange> {
|
||||||
let file = self.db.parse(position.file_id);
|
let file = self.db.parse(position.file_id).tree;
|
||||||
let edit = typing::on_eq_typed(&file, position.offset)?;
|
let edit = typing::on_eq_typed(&file, position.offset)?;
|
||||||
Some(SourceChange::source_file_edit(
|
Some(SourceChange::source_file_edit(
|
||||||
"add semicolon",
|
"add semicolon",
|
||||||
|
@ -378,13 +378,13 @@ impl Analysis {
|
||||||
/// Returns a tree representation of symbols in the file. Useful to draw a
|
/// Returns a tree representation of symbols in the file. Useful to draw a
|
||||||
/// file outline.
|
/// file outline.
|
||||||
pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> {
|
pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> {
|
||||||
let file = self.db.parse(file_id);
|
let file = self.db.parse(file_id).tree;
|
||||||
file_structure(&file)
|
file_structure(&file)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the set of folding ranges.
|
/// Returns the set of folding ranges.
|
||||||
pub fn folding_ranges(&self, file_id: FileId) -> Vec<Fold> {
|
pub fn folding_ranges(&self, file_id: FileId) -> Vec<Fold> {
|
||||||
let file = self.db.parse(file_id);
|
let file = self.db.parse(file_id).tree;
|
||||||
folding_ranges::folding_ranges(&file)
|
folding_ranges::folding_ranges(&file)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -31,7 +31,7 @@ mod tests {
|
||||||
fn test_matching_brace() {
|
fn test_matching_brace() {
|
||||||
fn do_check(before: &str, after: &str) {
|
fn do_check(before: &str, after: &str) {
|
||||||
let (pos, before) = extract_offset(before);
|
let (pos, before) = extract_offset(before);
|
||||||
let file = SourceFile::parse(&before);
|
let file = SourceFile::parse(&before).tree;
|
||||||
let new_pos = match matching_brace(&file, pos) {
|
let new_pos = match matching_brace(&file, pos) {
|
||||||
None => pos,
|
None => pos,
|
||||||
Some(pos) => pos,
|
Some(pos) => pos,
|
||||||
|
|
|
@ -60,7 +60,7 @@ pub(crate) fn find_all_refs(
|
||||||
db: &RootDatabase,
|
db: &RootDatabase,
|
||||||
position: FilePosition,
|
position: FilePosition,
|
||||||
) -> Option<ReferenceSearchResult> {
|
) -> Option<ReferenceSearchResult> {
|
||||||
let file = db.parse(position.file_id);
|
let file = db.parse(position.file_id).tree;
|
||||||
let (binding, analyzer) = find_binding(db, &file, position)?;
|
let (binding, analyzer) = find_binding(db, &file, position)?;
|
||||||
let declaration = NavigationTarget::from_bind_pat(position.file_id, binding);
|
let declaration = NavigationTarget::from_bind_pat(position.file_id, binding);
|
||||||
|
|
||||||
|
@ -99,7 +99,7 @@ pub(crate) fn rename(
|
||||||
position: FilePosition,
|
position: FilePosition,
|
||||||
new_name: &str,
|
new_name: &str,
|
||||||
) -> Option<SourceChange> {
|
) -> Option<SourceChange> {
|
||||||
let source_file = db.parse(position.file_id);
|
let source_file = db.parse(position.file_id).tree;
|
||||||
let syntax = source_file.syntax();
|
let syntax = source_file.syntax();
|
||||||
|
|
||||||
if let Some((ast_name, ast_module)) = find_name_and_module_at_offset(syntax, position) {
|
if let Some((ast_name, ast_module)) = find_name_and_module_at_offset(syntax, position) {
|
||||||
|
|
|
@ -22,7 +22,7 @@ pub enum RunnableKind {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
|
pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
|
||||||
let source_file = db.parse(file_id);
|
let source_file = db.parse(file_id).tree;
|
||||||
source_file.syntax().descendants().filter_map(|i| runnable(db, file_id, i)).collect()
|
source_file.syntax().descendants().filter_map(|i| runnable(db, file_id, i)).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ use std::{
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
|
|
||||||
use ra_syntax::{AstNode, TreeArc, SourceFile};
|
use ra_syntax::{AstNode, Parse};
|
||||||
use ra_db::{
|
use ra_db::{
|
||||||
ParseQuery, FileTextQuery, SourceRootId,
|
ParseQuery, FileTextQuery, SourceRootId,
|
||||||
salsa::{Database, debug::{DebugQueryTable, TableEntry}},
|
salsa::{Database, debug::{DebugQueryTable, TableEntry}},
|
||||||
|
@ -72,17 +72,17 @@ impl fmt::Display for SyntaxTreeStats {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromIterator<TableEntry<FileId, TreeArc<SourceFile>>> for SyntaxTreeStats {
|
impl FromIterator<TableEntry<FileId, Parse>> for SyntaxTreeStats {
|
||||||
fn from_iter<T>(iter: T) -> SyntaxTreeStats
|
fn from_iter<T>(iter: T) -> SyntaxTreeStats
|
||||||
where
|
where
|
||||||
T: IntoIterator<Item = TableEntry<FileId, TreeArc<SourceFile>>>,
|
T: IntoIterator<Item = TableEntry<FileId, Parse>>,
|
||||||
{
|
{
|
||||||
let mut res = SyntaxTreeStats::default();
|
let mut res = SyntaxTreeStats::default();
|
||||||
for entry in iter {
|
for entry in iter {
|
||||||
res.total += 1;
|
res.total += 1;
|
||||||
if let Some(value) = entry.value {
|
if let Some(value) = entry.value {
|
||||||
res.retained += 1;
|
res.retained += 1;
|
||||||
res.retained_size += value.syntax().memory_size_of_subtree();
|
res.retained_size += value.tree.syntax().memory_size_of_subtree();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
res
|
res
|
||||||
|
|
|
@ -63,7 +63,7 @@ pub(crate) trait SymbolsDatabase: hir::db::HirDatabase {
|
||||||
|
|
||||||
fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex> {
|
fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex> {
|
||||||
db.check_canceled();
|
db.check_canceled();
|
||||||
let source_file = db.parse(file_id);
|
let source_file = db.parse(file_id).tree;
|
||||||
|
|
||||||
let symbols = source_file_to_file_symbols(&source_file, file_id);
|
let symbols = source_file_to_file_symbols(&source_file, file_id);
|
||||||
|
|
||||||
|
|
|
@ -32,7 +32,7 @@ fn is_control_keyword(kind: SyntaxKind) -> bool {
|
||||||
|
|
||||||
pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> {
|
pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> {
|
||||||
let _p = profile("highlight");
|
let _p = profile("highlight");
|
||||||
let source_file = db.parse(file_id);
|
let source_file = db.parse(file_id).tree;
|
||||||
|
|
||||||
fn calc_binding_hash(file_id: FileId, text: &SmolStr, shadow_count: u32) -> u64 {
|
fn calc_binding_hash(file_id: FileId, text: &SmolStr, shadow_count: u32) -> u64 {
|
||||||
fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 {
|
fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 {
|
||||||
|
@ -177,7 +177,7 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String {
|
pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String {
|
||||||
let source_file = db.parse(file_id);
|
let source_file = db.parse(file_id).tree;
|
||||||
|
|
||||||
fn rainbowify(seed: u64) -> String {
|
fn rainbowify(seed: u64) -> String {
|
||||||
use rand::prelude::*;
|
use rand::prelude::*;
|
||||||
|
|
|
@ -14,7 +14,7 @@ pub(crate) fn syntax_tree(
|
||||||
text_range: Option<TextRange>,
|
text_range: Option<TextRange>,
|
||||||
) -> String {
|
) -> String {
|
||||||
if let Some(text_range) = text_range {
|
if let Some(text_range) = text_range {
|
||||||
let file = db.parse(file_id);
|
let file = db.parse(file_id).tree;
|
||||||
let node = match algo::find_covering_element(file.syntax(), text_range) {
|
let node = match algo::find_covering_element(file.syntax(), text_range) {
|
||||||
SyntaxElement::Node(node) => node,
|
SyntaxElement::Node(node) => node,
|
||||||
SyntaxElement::Token(token) => {
|
SyntaxElement::Token(token) => {
|
||||||
|
@ -27,7 +27,7 @@ pub(crate) fn syntax_tree(
|
||||||
|
|
||||||
node.debug_dump()
|
node.debug_dump()
|
||||||
} else {
|
} else {
|
||||||
db.parse(file_id).syntax().debug_dump()
|
db.parse(file_id).tree.syntax().debug_dump()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -84,8 +84,8 @@ fn syntax_tree_for_token(node: SyntaxToken, text_range: TextRange) -> Option<Str
|
||||||
|
|
||||||
// If the "file" parsed without errors,
|
// If the "file" parsed without errors,
|
||||||
// return its syntax
|
// return its syntax
|
||||||
if parsed.errors().is_empty() {
|
if parsed.errors.is_empty() {
|
||||||
return Some(parsed.syntax().debug_dump());
|
return Some(parsed.tree.syntax().debug_dump());
|
||||||
}
|
}
|
||||||
|
|
||||||
None
|
None
|
||||||
|
|
|
@ -9,7 +9,7 @@ pub fn check_action<F: Fn(&SourceFile, TextUnit) -> Option<TextEdit>>(
|
||||||
f: F,
|
f: F,
|
||||||
) {
|
) {
|
||||||
let (before_cursor_pos, before) = extract_offset(before);
|
let (before_cursor_pos, before) = extract_offset(before);
|
||||||
let file = SourceFile::parse(&before);
|
let file = SourceFile::parse(&before).ok().unwrap();
|
||||||
let result = f(&file, before_cursor_pos).expect("code action is not applicable");
|
let result = f(&file, before_cursor_pos).expect("code action is not applicable");
|
||||||
let actual = result.apply(&before);
|
let actual = result.apply(&before);
|
||||||
let actual_cursor_pos =
|
let actual_cursor_pos =
|
||||||
|
|
|
@ -10,7 +10,7 @@ use ra_db::{FilePosition, SourceDatabase};
|
||||||
use crate::{db::RootDatabase, SourceChange, SourceFileEdit};
|
use crate::{db::RootDatabase, SourceChange, SourceFileEdit};
|
||||||
|
|
||||||
pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> {
|
pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> {
|
||||||
let file = db.parse(position.file_id);
|
let file = db.parse(position.file_id).tree;
|
||||||
let comment = find_token_at_offset(file.syntax(), position.offset)
|
let comment = find_token_at_offset(file.syntax(), position.offset)
|
||||||
.left_biased()
|
.left_biased()
|
||||||
.and_then(ast::Comment::cast)?;
|
.and_then(ast::Comment::cast)?;
|
||||||
|
@ -85,7 +85,7 @@ pub fn on_eq_typed(file: &SourceFile, eq_offset: TextUnit) -> Option<TextEdit> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn on_dot_typed(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> {
|
pub(crate) fn on_dot_typed(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> {
|
||||||
let file = db.parse(position.file_id);
|
let file = db.parse(position.file_id).tree;
|
||||||
assert_eq!(file.syntax().text().char_at(position.offset), Some('.'));
|
assert_eq!(file.syntax().text().char_at(position.offset), Some('.'));
|
||||||
|
|
||||||
let whitespace = find_token_at_offset(file.syntax(), position.offset)
|
let whitespace = find_token_at_offset(file.syntax(), position.offset)
|
||||||
|
@ -138,7 +138,7 @@ mod tests {
|
||||||
let mut edit = TextEditBuilder::default();
|
let mut edit = TextEditBuilder::default();
|
||||||
edit.insert(offset, "=".to_string());
|
edit.insert(offset, "=".to_string());
|
||||||
let before = edit.finish().apply(&before);
|
let before = edit.finish().apply(&before);
|
||||||
let file = SourceFile::parse(&before);
|
let file = SourceFile::parse(&before).tree;
|
||||||
if let Some(result) = on_eq_typed(&file, offset) {
|
if let Some(result) = on_eq_typed(&file, offset) {
|
||||||
let actual = result.apply(&before);
|
let actual = result.apply(&before);
|
||||||
assert_eq_text!(after, &actual);
|
assert_eq_text!(after, &actual);
|
||||||
|
|
|
@ -597,7 +597,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_rules(macro_definition: &str) -> crate::MacroRules {
|
fn create_rules(macro_definition: &str) -> crate::MacroRules {
|
||||||
let source_file = ast::SourceFile::parse(macro_definition);
|
let source_file = ast::SourceFile::parse(macro_definition).ok().unwrap();
|
||||||
let macro_definition =
|
let macro_definition =
|
||||||
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
||||||
|
|
||||||
|
@ -609,7 +609,7 @@ mod tests {
|
||||||
rules: &crate::MacroRules,
|
rules: &crate::MacroRules,
|
||||||
invocation: &str,
|
invocation: &str,
|
||||||
) -> Result<tt::Subtree, ExpandError> {
|
) -> Result<tt::Subtree, ExpandError> {
|
||||||
let source_file = ast::SourceFile::parse(invocation);
|
let source_file = ast::SourceFile::parse(invocation).ok().unwrap();
|
||||||
let macro_invocation =
|
let macro_invocation =
|
||||||
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
||||||
|
|
||||||
|
|
|
@ -175,7 +175,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_rules(macro_definition: &str) -> Result<crate::MacroRules, ParseError> {
|
fn create_rules(macro_definition: &str) -> Result<crate::MacroRules, ParseError> {
|
||||||
let source_file = ast::SourceFile::parse(macro_definition);
|
let source_file = ast::SourceFile::parse(macro_definition).ok().unwrap();
|
||||||
let macro_definition =
|
let macro_definition =
|
||||||
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
||||||
|
|
||||||
|
|
|
@ -56,8 +56,9 @@ where
|
||||||
if tree_sink.roots.len() != 1 {
|
if tree_sink.roots.len() != 1 {
|
||||||
return Err(ExpandError::ConversionError);
|
return Err(ExpandError::ConversionError);
|
||||||
}
|
}
|
||||||
|
//FIXME: would be cool to report errors
|
||||||
Ok(tree_sink.inner.finish())
|
let (tree, _errors) = tree_sink.inner.finish();
|
||||||
|
Ok(tree)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses the token tree (result of macro expansion) to an expression
|
/// Parses the token tree (result of macro expansion) to an expression
|
||||||
|
@ -383,7 +384,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
let expansion = expand(&rules, "literals!(foo)");
|
let expansion = expand(&rules, "literals!(foo);");
|
||||||
let buffer = tt::buffer::TokenBuffer::new(&[expansion.clone().into()]);
|
let buffer = tt::buffer::TokenBuffer::new(&[expansion.clone().into()]);
|
||||||
let mut tt_src = SubtreeTokenSource::new(&buffer);
|
let mut tt_src = SubtreeTokenSource::new(&buffer);
|
||||||
let mut tokens = vec![];
|
let mut tokens = vec![];
|
||||||
|
@ -422,7 +423,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
let expansion = expand(&rules, "stmts!()");
|
let expansion = expand(&rules, "stmts!();");
|
||||||
assert!(token_tree_to_expr(&expansion).is_err());
|
assert!(token_tree_to_expr(&expansion).is_err());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,11 +29,11 @@ macro_rules! impl_froms {
|
||||||
impl_froms!(TokenTree: Leaf, Subtree);
|
impl_froms!(TokenTree: Leaf, Subtree);
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
let source_file = ast::SourceFile::parse(macro_definition);
|
let source_file = ast::SourceFile::parse(macro_definition).ok().unwrap();
|
||||||
let macro_definition =
|
let macro_definition =
|
||||||
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
||||||
|
|
||||||
let source_file = ast::SourceFile::parse(macro_invocation);
|
let source_file = ast::SourceFile::parse(macro_invocation).ok().unwrap();
|
||||||
let macro_invocation =
|
let macro_invocation =
|
||||||
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
||||||
|
|
||||||
|
@ -49,7 +49,7 @@ impl_froms!(TokenTree: Leaf, Subtree);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn create_rules(macro_definition: &str) -> MacroRules {
|
pub(crate) fn create_rules(macro_definition: &str) -> MacroRules {
|
||||||
let source_file = ast::SourceFile::parse(macro_definition);
|
let source_file = ast::SourceFile::parse(macro_definition).ok().unwrap();
|
||||||
let macro_definition =
|
let macro_definition =
|
||||||
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
||||||
|
|
||||||
|
@ -58,7 +58,7 @@ pub(crate) fn create_rules(macro_definition: &str) -> MacroRules {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree {
|
pub(crate) fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree {
|
||||||
let source_file = ast::SourceFile::parse(invocation);
|
let source_file = ast::SourceFile::parse(invocation).ok().unwrap();
|
||||||
let macro_invocation =
|
let macro_invocation =
|
||||||
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
|
||||||
|
|
||||||
|
@ -95,7 +95,7 @@ pub(crate) fn expand_to_expr(
|
||||||
pub(crate) fn text_to_tokentree(text: &str) -> tt::Subtree {
|
pub(crate) fn text_to_tokentree(text: &str) -> tt::Subtree {
|
||||||
// wrap the given text to a macro call
|
// wrap the given text to a macro call
|
||||||
let wrapped = format!("wrap_macro!( {} )", text);
|
let wrapped = format!("wrap_macro!( {} )", text);
|
||||||
let wrapped = ast::SourceFile::parse(&wrapped);
|
let wrapped = ast::SourceFile::parse(&wrapped).tree;
|
||||||
let wrapped = wrapped.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
let wrapped = wrapped.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
|
||||||
let mut wrapped = ast_to_token_tree(wrapped).unwrap().0;
|
let mut wrapped = ast_to_token_tree(wrapped).unwrap().0;
|
||||||
wrapped.delimiter = tt::Delimiter::None;
|
wrapped.delimiter = tt::Delimiter::None;
|
||||||
|
@ -294,7 +294,7 @@ fn test_match_group_pattern_with_multiple_defs() {
|
||||||
macro_rules! foo {
|
macro_rules! foo {
|
||||||
($ ($ i:ident),*) => ( struct Bar { $ (
|
($ ($ i:ident),*) => ( struct Bar { $ (
|
||||||
fn $ i {}
|
fn $ i {}
|
||||||
)*} );
|
)*} );
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
|
@ -314,7 +314,7 @@ fn test_match_group_pattern_with_multiple_statement() {
|
||||||
macro_rules! foo {
|
macro_rules! foo {
|
||||||
($ ($ i:ident),*) => ( fn baz { $ (
|
($ ($ i:ident),*) => ( fn baz { $ (
|
||||||
$ i ();
|
$ i ();
|
||||||
)*} );
|
)*} );
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
|
@ -329,7 +329,7 @@ fn test_match_group_pattern_with_multiple_statement_without_semi() {
|
||||||
macro_rules! foo {
|
macro_rules! foo {
|
||||||
($ ($ i:ident),*) => ( fn baz { $ (
|
($ ($ i:ident),*) => ( fn baz { $ (
|
||||||
$i()
|
$i()
|
||||||
);*} );
|
);*} );
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
|
@ -344,7 +344,7 @@ fn test_match_group_empty_fixed_token() {
|
||||||
macro_rules! foo {
|
macro_rules! foo {
|
||||||
($ ($ i:ident)* #abc) => ( fn baz { $ (
|
($ ($ i:ident)* #abc) => ( fn baz { $ (
|
||||||
$ i ();
|
$ i ();
|
||||||
)*} );
|
)*} );
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
|
@ -356,10 +356,10 @@ fn test_match_group_empty_fixed_token() {
|
||||||
fn test_match_group_in_subtree() {
|
fn test_match_group_in_subtree() {
|
||||||
let rules = create_rules(
|
let rules = create_rules(
|
||||||
r#"
|
r#"
|
||||||
macro_rules! foo {
|
macro_rules! foo {
|
||||||
(fn $name:ident {$($i:ident)*} ) => ( fn $name() { $ (
|
(fn $name:ident {$($i:ident)*} ) => ( fn $name() { $ (
|
||||||
$ i ();
|
$ i ();
|
||||||
)*} );
|
)*} );
|
||||||
}"#,
|
}"#,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -370,15 +370,15 @@ fn test_match_group_in_subtree() {
|
||||||
fn test_match_group_with_multichar_sep() {
|
fn test_match_group_with_multichar_sep() {
|
||||||
let rules = create_rules(
|
let rules = create_rules(
|
||||||
r#"
|
r#"
|
||||||
macro_rules! foo {
|
macro_rules! foo {
|
||||||
(fn $name:ident {$($i:literal)*} ) => ( fn $name() -> bool { $($i)&&*} );
|
(fn $name:ident {$($i:literal)*} ) => ( fn $name() -> bool { $($i)&&*} );
|
||||||
}"#,
|
}"#,
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_expansion(
|
assert_expansion(
|
||||||
MacroKind::Items,
|
MacroKind::Items,
|
||||||
&rules,
|
&rules,
|
||||||
"foo! (fn baz {true true} )",
|
"foo! (fn baz {true true} );",
|
||||||
"fn baz () -> bool {true &&true}",
|
"fn baz () -> bool {true &&true}",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -387,24 +387,24 @@ fn test_match_group_with_multichar_sep() {
|
||||||
fn test_match_group_zero_match() {
|
fn test_match_group_zero_match() {
|
||||||
let rules = create_rules(
|
let rules = create_rules(
|
||||||
r#"
|
r#"
|
||||||
macro_rules! foo {
|
macro_rules! foo {
|
||||||
( $($i:ident)* ) => ();
|
( $($i:ident)* ) => ();
|
||||||
}"#,
|
}"#,
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_expansion(MacroKind::Items, &rules, "foo! ()", "");
|
assert_expansion(MacroKind::Items, &rules, "foo! ();", "");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_match_group_in_group() {
|
fn test_match_group_in_group() {
|
||||||
let rules = create_rules(
|
let rules = create_rules(
|
||||||
r#"
|
r#"
|
||||||
macro_rules! foo {
|
macro_rules! foo {
|
||||||
{ $( ( $($i:ident)* ) )* } => ( $( ( $($i)* ) )* );
|
{ $( ( $($i:ident)* ) )* } => ( $( ( $($i)* ) )* );
|
||||||
}"#,
|
}"#,
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_expansion(MacroKind::Items, &rules, "foo! ( (a b) )", "(a b)");
|
assert_expansion(MacroKind::Items, &rules, "foo! ( (a b) );", "(a b)");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -418,7 +418,7 @@ fn test_expand_to_item_list() {
|
||||||
}
|
}
|
||||||
",
|
",
|
||||||
);
|
);
|
||||||
let expansion = expand(&rules, "structs!(Foo, Bar)");
|
let expansion = expand(&rules, "structs!(Foo, Bar);");
|
||||||
let tree = token_tree_to_macro_items(&expansion);
|
let tree = token_tree_to_macro_items(&expansion);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tree.unwrap().syntax().debug_dump().trim(),
|
tree.unwrap().syntax().debug_dump().trim(),
|
||||||
|
@ -490,7 +490,7 @@ fn test_expand_literals_to_token_tree() {
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
let expansion = expand(&rules, "literals!(foo)");
|
let expansion = expand(&rules, "literals!(foo);");
|
||||||
let stm_tokens = &to_subtree(&expansion.token_trees[0]).token_trees;
|
let stm_tokens = &to_subtree(&expansion.token_trees[0]).token_trees;
|
||||||
|
|
||||||
// [let] [a] [=] ['c'] [;]
|
// [let] [a] [=] ['c'] [;]
|
||||||
|
@ -586,7 +586,7 @@ fn test_match_literal() {
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
assert_expansion(MacroKind::Items, &rules, "foo! ['(']", "fn foo () {}");
|
assert_expansion(MacroKind::Items, &rules, "foo! ['('];", "fn foo () {}");
|
||||||
}
|
}
|
||||||
|
|
||||||
// The following tests are port from intellij-rust directly
|
// The following tests are port from intellij-rust directly
|
||||||
|
@ -651,7 +651,7 @@ fn test_expr() {
|
||||||
r#"
|
r#"
|
||||||
macro_rules! foo {
|
macro_rules! foo {
|
||||||
($ i:expr) => {
|
($ i:expr) => {
|
||||||
fn bar() { $ i; }
|
fn bar() { $ i; }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
|
@ -671,7 +671,7 @@ fn test_expr_order() {
|
||||||
r#"
|
r#"
|
||||||
macro_rules! foo {
|
macro_rules! foo {
|
||||||
($ i:expr) => {
|
($ i:expr) => {
|
||||||
fn bar() { $ i * 2; }
|
fn bar() { $ i * 2; }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
|
@ -725,7 +725,7 @@ fn test_last_expr() {
|
||||||
assert_expansion(
|
assert_expansion(
|
||||||
MacroKind::Items,
|
MacroKind::Items,
|
||||||
&rules,
|
&rules,
|
||||||
"vec!(1,2,3)",
|
"vec!(1,2,3);",
|
||||||
"{let mut v = Vec :: new () ; v . push (1) ; v . push (2) ; v . push (3) ; v}",
|
"{let mut v = Vec :: new () ; v . push (1) ; v . push (2) ; v . push (3) ; v}",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -896,13 +896,13 @@ fn test_meta_doc_comments() {
|
||||||
assert_expansion(
|
assert_expansion(
|
||||||
MacroKind::Items,
|
MacroKind::Items,
|
||||||
&rules,
|
&rules,
|
||||||
r#"foo! {
|
r#"foo! {
|
||||||
/// Single Line Doc 1
|
/// Single Line Doc 1
|
||||||
/**
|
/**
|
||||||
MultiLines Doc
|
MultiLines Doc
|
||||||
*/
|
*/
|
||||||
}"#,
|
}"#,
|
||||||
"# [doc = \" Single Line Doc 1\"] # [doc = \" \\\\n MultiLines Doc\\\\n \"] fn bar () {}",
|
"# [doc = \" Single Line Doc 1\"] # [doc = \"\\\\n MultiLines Doc\\\\n \"] fn bar () {}",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -950,7 +950,7 @@ fn test_literal() {
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
assert_expansion(MacroKind::Items, &rules, r#"foo!(u8 0)"#, r#"const VALUE : u8 = 0 ;"#);
|
assert_expansion(MacroKind::Items, &rules, r#"foo!(u8 0);"#, r#"const VALUE : u8 = 0 ;"#);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -984,7 +984,7 @@ macro_rules! foo {
|
||||||
bar!($a);
|
bar!($a);
|
||||||
fn $b() -> u8 {$c}
|
fn $b() -> u8 {$c}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
assert_expansion(
|
assert_expansion(
|
||||||
|
@ -1017,12 +1017,12 @@ fn test_vec() {
|
||||||
assert_expansion(
|
assert_expansion(
|
||||||
MacroKind::Items,
|
MacroKind::Items,
|
||||||
&rules,
|
&rules,
|
||||||
r#"vec![1u32,2]"#,
|
r#"vec![1u32,2];"#,
|
||||||
r#"{let mut v = Vec :: new () ; v . push (1u32) ; v . push (2) ; v}"#,
|
r#"{let mut v = Vec :: new () ; v . push (1u32) ; v . push (2) ; v}"#,
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
expand_to_expr(&rules, r#"vec![1u32,2]"#).syntax().debug_dump().trim(),
|
expand_to_expr(&rules, r#"vec![1u32,2];"#).syntax().debug_dump().trim(),
|
||||||
r#"BLOCK_EXPR@[0; 45)
|
r#"BLOCK_EXPR@[0; 45)
|
||||||
BLOCK@[0; 45)
|
BLOCK@[0; 45)
|
||||||
L_CURLY@[0; 1) "{"
|
L_CURLY@[0; 1) "{"
|
||||||
|
@ -1119,7 +1119,7 @@ macro_rules! STRUCT {
|
||||||
// from https://github.com/retep998/winapi-rs/blob/a7ef2bca086aae76cf6c4ce4c2552988ed9798ad/src/shared/d3d9caps.rs
|
// from https://github.com/retep998/winapi-rs/blob/a7ef2bca086aae76cf6c4ce4c2552988ed9798ad/src/shared/d3d9caps.rs
|
||||||
assert_expansion(MacroKind::Items, &rules, r#"STRUCT!{struct D3DVSHADERCAPS2_0 {Caps: u8,}}"#,
|
assert_expansion(MacroKind::Items, &rules, r#"STRUCT!{struct D3DVSHADERCAPS2_0 {Caps: u8,}}"#,
|
||||||
"# [repr (C)] # [derive (Copy)] pub struct D3DVSHADERCAPS2_0 {pub Caps : u8 ,} impl Clone for D3DVSHADERCAPS2_0 {# [inline] fn clone (& self) -> D3DVSHADERCAPS2_0 {* self}} # [cfg (feature = \"impl-default\")] impl Default for D3DVSHADERCAPS2_0 {# [inline] fn default () -> D3DVSHADERCAPS2_0 {unsafe {$crate :: _core :: mem :: zeroed ()}}}");
|
"# [repr (C)] # [derive (Copy)] pub struct D3DVSHADERCAPS2_0 {pub Caps : u8 ,} impl Clone for D3DVSHADERCAPS2_0 {# [inline] fn clone (& self) -> D3DVSHADERCAPS2_0 {* self}} # [cfg (feature = \"impl-default\")] impl Default for D3DVSHADERCAPS2_0 {# [inline] fn default () -> D3DVSHADERCAPS2_0 {unsafe {$crate :: _core :: mem :: zeroed ()}}}");
|
||||||
assert_expansion(MacroKind::Items, &rules, r#"STRUCT!{#[cfg_attr(target_arch = "x86", repr(packed))] struct D3DCONTENTPROTECTIONCAPS {Caps : u8 ,}}"#,
|
assert_expansion(MacroKind::Items, &rules, r#"STRUCT!{#[cfg_attr(target_arch = "x86", repr(packed))] struct D3DCONTENTPROTECTIONCAPS {Caps : u8 ,}}"#,
|
||||||
"# [repr (C)] # [derive (Copy)] # [cfg_attr (target_arch = \"x86\" , repr (packed))] pub struct D3DCONTENTPROTECTIONCAPS {pub Caps : u8 ,} impl Clone for D3DCONTENTPROTECTIONCAPS {# [inline] fn clone (& self) -> D3DCONTENTPROTECTIONCAPS {* self}} # [cfg (feature = \"impl-default\")] impl Default for D3DCONTENTPROTECTIONCAPS {# [inline] fn default () -> D3DCONTENTPROTECTIONCAPS {unsafe {$crate :: _core :: mem :: zeroed ()}}}");
|
"# [repr (C)] # [derive (Copy)] # [cfg_attr (target_arch = \"x86\" , repr (packed))] pub struct D3DCONTENTPROTECTIONCAPS {pub Caps : u8 ,} impl Clone for D3DCONTENTPROTECTIONCAPS {# [inline] fn clone (& self) -> D3DCONTENTPROTECTIONCAPS {* self}} # [cfg (feature = \"impl-default\")] impl Default for D3DCONTENTPROTECTIONCAPS {# [inline] fn default () -> D3DCONTENTPROTECTIONCAPS {unsafe {$crate :: _core :: mem :: zeroed ()}}}");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1136,11 +1136,11 @@ macro_rules! int_base {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_expansion(MacroKind::Items, &rules, r#" int_base!{Binary for isize as usize -> Binary}"#,
|
assert_expansion(MacroKind::Items, &rules, r#" int_base!{Binary for isize as usize -> Binary}"#,
|
||||||
"# [stable (feature = \"rust1\" , since = \"1.0.0\")] impl fmt ::Binary for isize {fn fmt (& self , f : & mut fmt :: Formatter < \'_ >) -> fmt :: Result {Binary . fmt_int (* self as usize , f)}}"
|
"# [stable (feature = \"rust1\" , since = \"1.0.0\")] impl fmt ::Binary for isize {fn fmt (& self , f : & mut fmt :: Formatter < \'_ >) -> fmt :: Result {Binary . fmt_int (* self as usize , f)}}"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -1150,7 +1150,7 @@ fn test_generate_pattern_iterators() {
|
||||||
// from https://github.com/rust-lang/rust/blob/316a391dcb7d66dc25f1f9a4ec9d368ef7615005/src/libcore/str/mod.rs
|
// from https://github.com/rust-lang/rust/blob/316a391dcb7d66dc25f1f9a4ec9d368ef7615005/src/libcore/str/mod.rs
|
||||||
let rules = create_rules(
|
let rules = create_rules(
|
||||||
r#"
|
r#"
|
||||||
macro_rules! generate_pattern_iterators {
|
macro_rules! generate_pattern_iterators {
|
||||||
{ double ended; with $(#[$common_stability_attribute:meta])*,
|
{ double ended; with $(#[$common_stability_attribute:meta])*,
|
||||||
$forward_iterator:ident,
|
$forward_iterator:ident,
|
||||||
$reverse_iterator:ident, $iterty:ty
|
$reverse_iterator:ident, $iterty:ty
|
||||||
|
@ -1161,7 +1161,7 @@ macro_rules! generate_pattern_iterators {
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_expansion(MacroKind::Items, &rules, r#"generate_pattern_iterators ! ( double ended ; with # [ stable ( feature = "rust1" , since = "1.0.0" ) ] , Split , RSplit , & 'a str )"#,
|
assert_expansion(MacroKind::Items, &rules, r#"generate_pattern_iterators ! ( double ended ; with # [ stable ( feature = "rust1" , since = "1.0.0" ) ] , Split , RSplit , & 'a str );"#,
|
||||||
"fn foo () {}");
|
"fn foo () {}");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1170,7 +1170,7 @@ fn test_impl_fn_for_zst() {
|
||||||
// from https://github.com/rust-lang/rust/blob/5d20ff4d2718c820632b38c1e49d4de648a9810b/src/libcore/internal_macros.rs
|
// from https://github.com/rust-lang/rust/blob/5d20ff4d2718c820632b38c1e49d4de648a9810b/src/libcore/internal_macros.rs
|
||||||
let rules = create_rules(
|
let rules = create_rules(
|
||||||
r#"
|
r#"
|
||||||
macro_rules! impl_fn_for_zst {
|
macro_rules! impl_fn_for_zst {
|
||||||
{ $( $( #[$attr: meta] )*
|
{ $( $( #[$attr: meta] )*
|
||||||
struct $Name: ident impl$( <$( $lifetime : lifetime ),+> )? Fn =
|
struct $Name: ident impl$( <$( $lifetime : lifetime ),+> )? Fn =
|
||||||
|$( $arg: ident: $ArgTy: ty ),*| -> $ReturnTy: ty
|
|$( $arg: ident: $ArgTy: ty ),*| -> $ReturnTy: ty
|
||||||
|
@ -1208,27 +1208,26 @@ $body: block; )+
|
||||||
)+
|
)+
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_expansion(MacroKind::Items, &rules, r#"
|
assert_expansion(MacroKind::Items, &rules, r#"
|
||||||
impl_fn_for_zst ! {
|
impl_fn_for_zst ! {
|
||||||
# [ derive ( Clone ) ]
|
# [ derive ( Clone ) ]
|
||||||
struct CharEscapeDebugContinue impl Fn = | c : char | -> char :: EscapeDebug {
|
struct CharEscapeDebugContinue impl Fn = | c : char | -> char :: EscapeDebug {
|
||||||
c . escape_debug_ext ( false )
|
c . escape_debug_ext ( false )
|
||||||
} ;
|
} ;
|
||||||
|
|
||||||
# [ derive ( Clone ) ]
|
# [ derive ( Clone ) ]
|
||||||
struct CharEscapeUnicode impl Fn = | c : char | -> char :: EscapeUnicode {
|
struct CharEscapeUnicode impl Fn = | c : char | -> char :: EscapeUnicode {
|
||||||
c . escape_unicode ( )
|
c . escape_unicode ( )
|
||||||
} ;
|
} ;
|
||||||
# [ derive ( Clone ) ]
|
# [ derive ( Clone ) ]
|
||||||
struct CharEscapeDefault impl Fn = | c : char | -> char :: EscapeDefault {
|
struct CharEscapeDefault impl Fn = | c : char | -> char :: EscapeDefault {
|
||||||
c . escape_default ( )
|
c . escape_default ( )
|
||||||
} ;
|
} ;
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
"# [derive (Clone)] struct CharEscapeDebugContinue ; impl Fn < (char ,) > for CharEscapeDebugContinue {# [inline] extern \"rust-call\" fn call (& self , (c ,) : (char ,)) -> char :: EscapeDebug {{c . escape_debug_ext (false)}}} impl FnMut < (char ,) > for CharEscapeDebugContinue {# [inline] extern \"rust-call\" fn call_mut (& mut self , (c ,) : (char ,)) -> char :: EscapeDebug {Fn :: call (&* self , (c ,))}} impl FnOnce < (char ,) > for CharEscapeDebugContinue {type Output = char :: EscapeDebug ; # [inline] extern \"rust-call\" fn call_once (self , (c ,) : (char ,)) -> char :: EscapeDebug {Fn :: call (& self , (c ,))}} # [derive (Clone)] struct CharEscapeUnicode ; impl Fn < (char ,) > for CharEscapeUnicode {# [inline] extern \"rust-call\" fn call (& self , (c ,) : (char ,)) -> char :: EscapeUnicode {{c . escape_unicode ()}}} impl FnMut < (char ,) > for CharEscapeUnicode {# [inline] extern \"rust-call\" fn call_mut (& mut self , (c ,) : (char ,)) -> char :: EscapeUnicode {Fn :: call (&* self , (c ,))}} impl FnOnce < (char ,) > for CharEscapeUnicode {type Output = char :: EscapeUnicode ; # [inline] extern \"rust-call\" fn call_once (self , (c ,) : (char ,)) -> char :: EscapeUnicode {Fn :: call (& self , (c ,))}} # [derive (Clone)] struct CharEscapeDefault ; impl Fn < (char ,) > for CharEscapeDefault {# [inline] extern \"rust-call\" fn call (& self , (c ,) : (char ,)) -> char :: EscapeDefault {{c . escape_default ()}}} impl FnMut < (char ,) > for CharEscapeDefault {# [inline] extern \"rust-call\" fn call_mut (& mut self , (c ,) : (char ,)) -> char :: EscapeDefault {Fn :: call (&* self , (c ,))}} impl FnOnce < (char ,) > for CharEscapeDefault {type Output = char :: EscapeDefault ; # [inline] extern \"rust-call\" fn call_once (self , (c ,) : (char ,)) -> char :: EscapeDefault {Fn :: call (& self , (c ,))}}");
|
"# [derive (Clone)] struct CharEscapeDebugContinue ; impl Fn < (char ,) > for CharEscapeDebugContinue {# [inline] extern \"rust-call\" fn call (& self , (c ,) : (char ,)) -> char :: EscapeDebug {{c . escape_debug_ext (false)}}} impl FnMut < (char ,) > for CharEscapeDebugContinue {# [inline] extern \"rust-call\" fn call_mut (& mut self , (c ,) : (char ,)) -> char :: EscapeDebug {Fn :: call (&* self , (c ,))}} impl FnOnce < (char ,) > for CharEscapeDebugContinue {type Output = char :: EscapeDebug ; # [inline] extern \"rust-call\" fn call_once (self , (c ,) : (char ,)) -> char :: EscapeDebug {Fn :: call (& self , (c ,))}} # [derive (Clone)] struct CharEscapeUnicode ; impl Fn < (char ,) > for CharEscapeUnicode {# [inline] extern \"rust-call\" fn call (& self , (c ,) : (char ,)) -> char :: EscapeUnicode {{c . escape_unicode ()}}} impl FnMut < (char ,) > for CharEscapeUnicode {# [inline] extern \"rust-call\" fn call_mut (& mut self , (c ,) : (char ,)) -> char :: EscapeUnicode {Fn :: call (&* self , (c ,))}} impl FnOnce < (char ,) > for CharEscapeUnicode {type Output = char :: EscapeUnicode ; # [inline] extern \"rust-call\" fn call_once (self , (c ,) : (char ,)) -> char :: EscapeUnicode {Fn :: call (& self , (c ,))}} # [derive (Clone)] struct CharEscapeDefault ; impl Fn < (char ,) > for CharEscapeDefault {# [inline] extern \"rust-call\" fn call (& self , (c ,) : (char ,)) -> char :: EscapeDefault {{c . escape_default ()}}} impl FnMut < (char ,) > for CharEscapeDefault {# [inline] extern \"rust-call\" fn call_mut (& mut self , (c ,) : (char ,)) -> char :: EscapeDefault {Fn :: call (&* self , (c ,))}} impl FnOnce < (char ,) > for CharEscapeDefault {type Output = char :: EscapeDefault ; # [inline] extern \"rust-call\" fn call_once (self , (c ,) : (char ,)) -> char :: EscapeDefault {Fn :: call (& self , (c ,))}}");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1263,7 +1262,7 @@ fn test_cfg_if_items() {
|
||||||
"#,
|
"#,
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_expansion(MacroKind::Items, &rules, r#"__cfg_if_items ! { ( rustdoc , ) ; ( ( ) ( # [ cfg ( any ( target_os = "redox" , unix ) ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as unix ; # [ cfg ( windows ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as windows ; # [ cfg ( any ( target_os = "linux" , target_os = "l4re" ) ) ] pub mod linux ; ) ) , }"#,
|
assert_expansion(MacroKind::Items, &rules, r#"__cfg_if_items ! { ( rustdoc , ) ; ( ( ) ( # [ cfg ( any ( target_os = "redox" , unix ) ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as unix ; # [ cfg ( windows ) ] # [ stable ( feature = "rust1" , since = "1.0.0" ) ] pub use sys :: ext as windows ; # [ cfg ( any ( target_os = "linux" , target_os = "l4re" ) ) ] pub mod linux ; ) ) , }"#,
|
||||||
"__cfg_if_items ! {(rustdoc ,) ;}");
|
"__cfg_if_items ! {(rustdoc ,) ;}");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1294,23 +1293,23 @@ fn test_cfg_if_main() {
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_expansion(MacroKind::Items, &rules, r#"
|
assert_expansion(MacroKind::Items, &rules, r#"
|
||||||
cfg_if ! {
|
cfg_if ! {
|
||||||
if # [ cfg ( target_env = "msvc" ) ] {
|
if # [ cfg ( target_env = "msvc" ) ] {
|
||||||
// no extra unwinder support needed
|
// no extra unwinder support needed
|
||||||
} else if # [ cfg ( all ( target_arch = "wasm32" , not ( target_os = "emscripten" ) ) ) ] {
|
} else if # [ cfg ( all ( target_arch = "wasm32" , not ( target_os = "emscripten" ) ) ) ] {
|
||||||
// no unwinder on the system!
|
// no unwinder on the system!
|
||||||
} else {
|
} else {
|
||||||
mod libunwind ;
|
mod libunwind ;
|
||||||
pub use libunwind :: * ;
|
pub use libunwind :: * ;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"#,
|
"#,
|
||||||
"__cfg_if_items ! {() ; ((target_env = \"msvc\") ()) , ((all (target_arch = \"wasm32\" , not (target_os = \"emscripten\"))) ()) , (() (mod libunwind ; pub use libunwind :: * ;)) ,}");
|
"__cfg_if_items ! {() ; ((target_env = \"msvc\") ()) , ((all (target_arch = \"wasm32\" , not (target_os = \"emscripten\"))) ()) , (() (mod libunwind ; pub use libunwind :: * ;)) ,}");
|
||||||
|
|
||||||
assert_expansion(MacroKind::Items, &rules, r#"
|
assert_expansion(MacroKind::Items, &rules, r#"
|
||||||
cfg_if ! { @ __apply cfg ( all ( not ( any ( not ( any ( target_os = "solaris" , target_os = "illumos" ) ) ) ) ) ) , }
|
cfg_if ! { @ __apply cfg ( all ( not ( any ( not ( any ( target_os = "solaris" , target_os = "illumos" ) ) ) ) ) ) , }
|
||||||
"#,
|
"#,
|
||||||
""
|
""
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1329,16 +1328,16 @@ macro_rules! arbitrary {
|
||||||
$logic
|
$logic
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
}"#,
|
}"#,
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_expansion(MacroKind::Items, &rules, r#"arbitrary ! ( [ A : Arbitrary ]
|
assert_expansion(MacroKind::Items, &rules, r#"arbitrary ! ( [ A : Arbitrary ]
|
||||||
Vec < A > ,
|
Vec < A > ,
|
||||||
VecStrategy < A :: Strategy > ,
|
VecStrategy < A :: Strategy > ,
|
||||||
RangedParams1 < A :: Parameters > ;
|
RangedParams1 < A :: Parameters > ;
|
||||||
args => { let product_unpack ! [ range , a ] = args ; vec ( any_with :: < A > ( a ) , range ) }
|
args => { let product_unpack ! [ range , a ] = args ; vec ( any_with :: < A > ( a ) , range ) }
|
||||||
) ;"#,
|
) ;"#,
|
||||||
"impl <A : Arbitrary > $crate :: arbitrary :: Arbitrary for Vec < A > {type Parameters = RangedParams1 < A :: Parameters > ; type Strategy = VecStrategy < A :: Strategy > ; fn arbitrary_with (args : Self :: Parameters) -> Self :: Strategy {{let product_unpack ! [range , a] = args ; vec (any_with :: < A > (a) , range)}}}");
|
"impl <A : Arbitrary > $crate :: arbitrary :: Arbitrary for Vec < A > {type Parameters = RangedParams1 < A :: Parameters > ; type Strategy = VecStrategy < A :: Strategy > ; fn arbitrary_with (args : Self :: Parameters) -> Self :: Strategy {{let product_unpack ! [range , a] = args ; vec (any_with :: < A > (a) , range)}}}");
|
||||||
}
|
}
|
||||||
|
@ -1350,7 +1349,7 @@ fn test_old_ridl() {
|
||||||
let rules = create_rules(
|
let rules = create_rules(
|
||||||
r#"
|
r#"
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! RIDL {
|
macro_rules! RIDL {
|
||||||
(interface $interface:ident ($vtbl:ident) : $pinterface:ident ($pvtbl:ident)
|
(interface $interface:ident ($vtbl:ident) : $pinterface:ident ($pvtbl:ident)
|
||||||
{$(
|
{$(
|
||||||
fn $method:ident(&mut self $(,$p:ident : $t:ty)*) -> $rtr:ty
|
fn $method:ident(&mut self $(,$p:ident : $t:ty)*) -> $rtr:ty
|
||||||
|
@ -1360,7 +1359,7 @@ macro_rules! RIDL {
|
||||||
$(pub unsafe fn $method(&mut self) -> $rtr {
|
$(pub unsafe fn $method(&mut self) -> $rtr {
|
||||||
((*self.lpVtbl).$method)(self $(,$p)*)
|
((*self.lpVtbl).$method)(self $(,$p)*)
|
||||||
})+
|
})+
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}"#,
|
}"#,
|
||||||
);
|
);
|
||||||
|
@ -1388,11 +1387,11 @@ macro_rules! quick_error {
|
||||||
quick_error!(ENUM_DEFINITION [enum $name $( #[$meta] )*]
|
quick_error!(ENUM_DEFINITION [enum $name $( #[$meta] )*]
|
||||||
body []
|
body []
|
||||||
queue [$(
|
queue [$(
|
||||||
$( #[$imeta] )*
|
$( #[$imeta] )*
|
||||||
=>
|
=>
|
||||||
$iitem: $imode [$( $ivar: $ityp ),*]
|
$iitem: $imode [$( $ivar: $ityp ),*]
|
||||||
)*]
|
)*]
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -1403,7 +1402,7 @@ macro_rules! quick_error {
|
||||||
&rules,
|
&rules,
|
||||||
r#"
|
r#"
|
||||||
quick_error ! (SORT [enum Wrapped # [derive (Debug)]] items [
|
quick_error ! (SORT [enum Wrapped # [derive (Debug)]] items [
|
||||||
=> One : UNIT [] {}
|
=> One : UNIT [] {}
|
||||||
=> Two : TUPLE [s :String] {display ("two: {}" , s) from ()}
|
=> Two : TUPLE [s :String] {display ("two: {}" , s) from ()}
|
||||||
] buf [] queue []) ;
|
] buf [] queue []) ;
|
||||||
"#,
|
"#,
|
||||||
|
|
|
@ -80,7 +80,9 @@ fn test_doc_comment_none() {
|
||||||
// non-doc
|
// non-doc
|
||||||
mod foo {}
|
mod foo {}
|
||||||
"#,
|
"#,
|
||||||
);
|
)
|
||||||
|
.ok()
|
||||||
|
.unwrap();
|
||||||
let module = file.syntax().descendants().find_map(Module::cast).unwrap();
|
let module = file.syntax().descendants().find_map(Module::cast).unwrap();
|
||||||
assert!(module.doc_comment_text().is_none());
|
assert!(module.doc_comment_text().is_none());
|
||||||
}
|
}
|
||||||
|
@ -93,7 +95,9 @@ fn test_doc_comment_of_items() {
|
||||||
// non-doc
|
// non-doc
|
||||||
mod foo {}
|
mod foo {}
|
||||||
"#,
|
"#,
|
||||||
);
|
)
|
||||||
|
.ok()
|
||||||
|
.unwrap();
|
||||||
let module = file.syntax().descendants().find_map(Module::cast).unwrap();
|
let module = file.syntax().descendants().find_map(Module::cast).unwrap();
|
||||||
assert_eq!("doc", module.doc_comment_text().unwrap());
|
assert_eq!("doc", module.doc_comment_text().unwrap());
|
||||||
}
|
}
|
||||||
|
@ -110,7 +114,9 @@ fn test_doc_comment_preserves_indents() {
|
||||||
/// ```
|
/// ```
|
||||||
mod foo {}
|
mod foo {}
|
||||||
"#,
|
"#,
|
||||||
);
|
)
|
||||||
|
.ok()
|
||||||
|
.unwrap();
|
||||||
let module = file.syntax().descendants().find_map(Module::cast).unwrap();
|
let module = file.syntax().descendants().find_map(Module::cast).unwrap();
|
||||||
assert_eq!("doc1\n```\nfn foo() {\n // ...\n}\n```", module.doc_comment_text().unwrap());
|
assert_eq!("doc1\n```\nfn foo() {\n // ...\n}\n```", module.doc_comment_text().unwrap());
|
||||||
}
|
}
|
||||||
|
@ -133,7 +139,9 @@ where
|
||||||
for<'a> F: Fn(&'a str)
|
for<'a> F: Fn(&'a str)
|
||||||
{}
|
{}
|
||||||
"#,
|
"#,
|
||||||
);
|
)
|
||||||
|
.ok()
|
||||||
|
.unwrap();
|
||||||
let where_clause = file.syntax().descendants().find_map(WhereClause::cast).unwrap();
|
let where_clause = file.syntax().descendants().find_map(WhereClause::cast).unwrap();
|
||||||
|
|
||||||
let mut predicates = where_clause.predicates();
|
let mut predicates = where_clause.predicates();
|
||||||
|
|
|
@ -5,12 +5,11 @@ use std::str::{self, FromStr};
|
||||||
fn check_file_invariants(file: &SourceFile) {
|
fn check_file_invariants(file: &SourceFile) {
|
||||||
let root = file.syntax();
|
let root = file.syntax();
|
||||||
validation::validate_block_structure(root);
|
validation::validate_block_structure(root);
|
||||||
let _ = file.errors();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_parser(text: &str) {
|
pub fn check_parser(text: &str) {
|
||||||
let file = SourceFile::parse(text);
|
let file = SourceFile::parse(text);
|
||||||
check_file_invariants(&file);
|
check_file_invariants(&file.tree);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
|
@ -44,16 +43,18 @@ impl CheckReparse {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run(&self) {
|
pub fn run(&self) {
|
||||||
let file = SourceFile::parse(&self.text);
|
let parse = SourceFile::parse(&self.text);
|
||||||
let new_file = file.reparse(&self.edit);
|
let new_parse = parse.reparse(&self.edit);
|
||||||
check_file_invariants(&new_file);
|
check_file_invariants(&new_parse.tree);
|
||||||
assert_eq!(&new_file.syntax().text().to_string(), &self.edited_text);
|
assert_eq!(&new_parse.tree.syntax().text().to_string(), &self.edited_text);
|
||||||
let full_reparse = SourceFile::parse(&self.edited_text);
|
let full_reparse = SourceFile::parse(&self.edited_text);
|
||||||
for (a, b) in new_file.syntax().descendants().zip(full_reparse.syntax().descendants()) {
|
for (a, b) in
|
||||||
|
new_parse.tree.syntax().descendants().zip(full_reparse.tree.syntax().descendants())
|
||||||
|
{
|
||||||
if (a.kind(), a.range()) != (b.kind(), b.range()) {
|
if (a.kind(), a.range()) != (b.kind(), b.range()) {
|
||||||
eprint!("original:\n{}", file.syntax().debug_dump());
|
eprint!("original:\n{}", parse.tree.syntax().debug_dump());
|
||||||
eprint!("reparsed:\n{}", new_file.syntax().debug_dump());
|
eprint!("reparsed:\n{}", new_parse.tree.syntax().debug_dump());
|
||||||
eprint!("full reparse:\n{}", full_reparse.syntax().debug_dump());
|
eprint!("full reparse:\n{}", full_reparse.tree.syntax().debug_dump());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
format!("{:?}", a),
|
format!("{:?}", a),
|
||||||
format!("{:?}", b),
|
format!("{:?}", b),
|
||||||
|
|
|
@ -31,6 +31,12 @@ pub mod ast;
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
pub mod fuzz;
|
pub mod fuzz;
|
||||||
|
|
||||||
|
use std::{sync::Arc, fmt::Write};
|
||||||
|
|
||||||
|
use ra_text_edit::AtomTextEdit;
|
||||||
|
|
||||||
|
use crate::syntax_node::GreenNode;
|
||||||
|
|
||||||
pub use rowan::{SmolStr, TextRange, TextUnit};
|
pub use rowan::{SmolStr, TextRange, TextUnit};
|
||||||
pub use ra_parser::SyntaxKind;
|
pub use ra_parser::SyntaxKind;
|
||||||
pub use ra_parser::T;
|
pub use ra_parser::T;
|
||||||
|
@ -43,15 +49,60 @@ pub use crate::{
|
||||||
parsing::{tokenize, classify_literal, Token},
|
parsing::{tokenize, classify_literal, Token},
|
||||||
};
|
};
|
||||||
|
|
||||||
use ra_text_edit::AtomTextEdit;
|
/// `Parse` is the result of the parsing: a syntax tree and a collection of
|
||||||
use crate::syntax_node::GreenNode;
|
/// errors.
|
||||||
|
///
|
||||||
|
/// Note that we always produce a syntax tree, even for completely invalid
|
||||||
|
/// files.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub struct Parse {
|
||||||
|
pub tree: TreeArc<SourceFile>,
|
||||||
|
pub errors: Arc<Vec<SyntaxError>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Parse {
|
||||||
|
pub fn ok(self) -> Result<TreeArc<SourceFile>, Arc<Vec<SyntaxError>>> {
|
||||||
|
if self.errors.is_empty() {
|
||||||
|
Ok(self.tree)
|
||||||
|
} else {
|
||||||
|
Err(self.errors)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reparse(&self, edit: &AtomTextEdit) -> Parse {
|
||||||
|
self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn debug_dump(&self) -> String {
|
||||||
|
let mut buf = self.tree.syntax().debug_dump();
|
||||||
|
for err in self.errors.iter() {
|
||||||
|
writeln!(buf, "err: `{}`", err).unwrap();
|
||||||
|
}
|
||||||
|
buf
|
||||||
|
}
|
||||||
|
|
||||||
|
fn incremental_reparse(&self, edit: &AtomTextEdit) -> Option<Parse> {
|
||||||
|
// FIXME: validation errors are not handled here
|
||||||
|
parsing::incremental_reparse(self.tree.syntax(), edit, self.errors.to_vec()).map(
|
||||||
|
|(green_node, errors, _reparsed_range)| Parse {
|
||||||
|
tree: SourceFile::new(green_node),
|
||||||
|
errors: Arc::new(errors),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn full_reparse(&self, edit: &AtomTextEdit) -> Parse {
|
||||||
|
let text = edit.apply(self.tree.syntax().text().to_string());
|
||||||
|
SourceFile::parse(&text)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// `SourceFile` represents a parse tree for a single Rust file.
|
/// `SourceFile` represents a parse tree for a single Rust file.
|
||||||
pub use crate::ast::SourceFile;
|
pub use crate::ast::SourceFile;
|
||||||
|
|
||||||
impl SourceFile {
|
impl SourceFile {
|
||||||
fn new(green: GreenNode, errors: Vec<SyntaxError>) -> TreeArc<SourceFile> {
|
fn new(green: GreenNode) -> TreeArc<SourceFile> {
|
||||||
let root = SyntaxNode::new(green, errors);
|
let root = SyntaxNode::new(green);
|
||||||
if cfg!(debug_assertions) {
|
if cfg!(debug_assertions) {
|
||||||
validation::validate_block_structure(&root);
|
validation::validate_block_structure(&root);
|
||||||
}
|
}
|
||||||
|
@ -59,29 +110,11 @@ impl SourceFile {
|
||||||
TreeArc::cast(root)
|
TreeArc::cast(root)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse(text: &str) -> TreeArc<SourceFile> {
|
pub fn parse(text: &str) -> Parse {
|
||||||
let (green, errors) = parsing::parse_text(text);
|
let (green, mut errors) = parsing::parse_text(text);
|
||||||
SourceFile::new(green, errors)
|
let tree = SourceFile::new(green);
|
||||||
}
|
errors.extend(validation::validate(&tree));
|
||||||
|
Parse { tree, errors: Arc::new(errors) }
|
||||||
pub fn reparse(&self, edit: &AtomTextEdit) -> TreeArc<SourceFile> {
|
|
||||||
self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn incremental_reparse(&self, edit: &AtomTextEdit) -> Option<TreeArc<SourceFile>> {
|
|
||||||
parsing::incremental_reparse(self.syntax(), edit, self.errors())
|
|
||||||
.map(|(green_node, errors, _reparsed_range)| SourceFile::new(green_node, errors))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn full_reparse(&self, edit: &AtomTextEdit) -> TreeArc<SourceFile> {
|
|
||||||
let text = edit.apply(self.syntax().text().to_string());
|
|
||||||
SourceFile::parse(&text)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn errors(&self) -> Vec<SyntaxError> {
|
|
||||||
let mut errors = self.syntax.root_data().to_vec();
|
|
||||||
errors.extend(validation::validate(self));
|
|
||||||
errors
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -98,14 +131,15 @@ fn api_walkthrough() {
|
||||||
";
|
";
|
||||||
// `SourceFile` is the main entry point.
|
// `SourceFile` is the main entry point.
|
||||||
//
|
//
|
||||||
// Note how `parse` does not return a `Result`: even completely invalid
|
// The `parse` method returns a `Parse` -- a pair of syntax tree and a list
|
||||||
// source code might be parsed.
|
// of errors. That is, syntax tree is constructed even in presence of errors.
|
||||||
let file = SourceFile::parse(source_code);
|
let parse = SourceFile::parse(source_code);
|
||||||
|
assert!(parse.errors.is_empty());
|
||||||
|
|
||||||
// Due to the way ownership is set up, owned syntax Nodes always live behind
|
// Due to the way ownership is set up, owned syntax Nodes always live behind
|
||||||
// a `TreeArc` smart pointer. `TreeArc` is roughly an `std::sync::Arc` which
|
// a `TreeArc` smart pointer. `TreeArc` is roughly an `std::sync::Arc` which
|
||||||
// points to the whole file instead of an individual node.
|
// points to the whole file instead of an individual node.
|
||||||
let file: TreeArc<SourceFile> = file;
|
let file: TreeArc<SourceFile> = parse.tree;
|
||||||
|
|
||||||
// `SourceFile` is the root of the syntax tree. We can iterate file's items:
|
// `SourceFile` is the root of the syntax tree. We can iterate file's items:
|
||||||
let mut func = None;
|
let mut func = None;
|
||||||
|
|
|
@ -166,9 +166,11 @@ fn merge_errors(
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use test_utils::{extract_range, assert_eq_text};
|
use test_utils::{extract_range, assert_eq_text};
|
||||||
|
|
||||||
use crate::{SourceFile, AstNode};
|
use crate::{SourceFile, AstNode, Parse};
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
fn do_check(before: &str, replace_with: &str, reparsed_len: u32) {
|
fn do_check(before: &str, replace_with: &str, reparsed_len: u32) {
|
||||||
|
@ -181,14 +183,14 @@ mod tests {
|
||||||
let f = SourceFile::parse(&before);
|
let f = SourceFile::parse(&before);
|
||||||
let edit = AtomTextEdit { delete: range, insert: replace_with.to_string() };
|
let edit = AtomTextEdit { delete: range, insert: replace_with.to_string() };
|
||||||
let (green, new_errors, range) =
|
let (green, new_errors, range) =
|
||||||
incremental_reparse(f.syntax(), &edit, f.errors()).unwrap();
|
incremental_reparse(f.tree.syntax(), &edit, f.errors.to_vec()).unwrap();
|
||||||
assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length");
|
assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length");
|
||||||
SourceFile::new(green, new_errors)
|
Parse { tree: SourceFile::new(green), errors: Arc::new(new_errors) }
|
||||||
};
|
};
|
||||||
|
|
||||||
assert_eq_text!(
|
assert_eq_text!(
|
||||||
&fully_reparsed.syntax().debug_dump(),
|
&fully_reparsed.tree.syntax().debug_dump(),
|
||||||
&incrementally_reparsed.syntax().debug_dump(),
|
&incrementally_reparsed.tree.syntax().debug_dump(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -76,7 +76,7 @@ impl<N: AstNode> From<AstPtr<N>> for SyntaxNodePtr {
|
||||||
fn test_local_syntax_ptr() {
|
fn test_local_syntax_ptr() {
|
||||||
use crate::{ast, AstNode, SourceFile};
|
use crate::{ast, AstNode, SourceFile};
|
||||||
|
|
||||||
let file = SourceFile::parse("struct Foo { f: u32, }");
|
let file = SourceFile::parse("struct Foo { f: u32, }").ok().unwrap();
|
||||||
let field = file.syntax().descendants().find_map(ast::NamedFieldDef::cast).unwrap();
|
let field = file.syntax().descendants().find_map(ast::NamedFieldDef::cast).unwrap();
|
||||||
let ptr = SyntaxNodePtr::new(field.syntax());
|
let ptr = SyntaxNodePtr::new(field.syntax());
|
||||||
let field_syntax = ptr.to_node(file.syntax());
|
let field_syntax = ptr.to_node(file.syntax());
|
||||||
|
|
|
@ -9,7 +9,6 @@
|
||||||
use std::{
|
use std::{
|
||||||
ops::RangeInclusive,
|
ops::RangeInclusive,
|
||||||
fmt::{self, Write},
|
fmt::{self, Write},
|
||||||
any::Any,
|
|
||||||
borrow::Borrow,
|
borrow::Borrow,
|
||||||
iter::successors,
|
iter::successors,
|
||||||
};
|
};
|
||||||
|
@ -133,10 +132,8 @@ pub enum Direction {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SyntaxNode {
|
impl SyntaxNode {
|
||||||
pub(crate) fn new(green: GreenNode, errors: Vec<SyntaxError>) -> TreeArc<SyntaxNode> {
|
pub(crate) fn new(green: GreenNode) -> TreeArc<SyntaxNode> {
|
||||||
let errors: Option<Box<Any + Send + Sync>> =
|
let ptr = TreeArc(rowan::SyntaxNode::new(green, None));
|
||||||
if errors.is_empty() { None } else { Some(Box::new(errors)) };
|
|
||||||
let ptr = TreeArc(rowan::SyntaxNode::new(green, errors));
|
|
||||||
TreeArc::cast(ptr)
|
TreeArc::cast(ptr)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -259,37 +256,18 @@ impl SyntaxNode {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn debug_dump(&self) -> String {
|
pub fn debug_dump(&self) -> String {
|
||||||
let mut errors: Vec<_> = match self.ancestors().find_map(SourceFile::cast) {
|
|
||||||
Some(file) => file.errors(),
|
|
||||||
None => self.root_data().to_vec(),
|
|
||||||
};
|
|
||||||
errors.sort_by_key(|e| e.offset());
|
|
||||||
let mut err_pos = 0;
|
|
||||||
let mut level = 0;
|
let mut level = 0;
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
macro_rules! indent {
|
|
||||||
() => {
|
|
||||||
for _ in 0..level {
|
|
||||||
buf.push_str(" ");
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
for event in self.preorder_with_tokens() {
|
for event in self.preorder_with_tokens() {
|
||||||
match event {
|
match event {
|
||||||
WalkEvent::Enter(element) => {
|
WalkEvent::Enter(element) => {
|
||||||
indent!();
|
for _ in 0..level {
|
||||||
|
buf.push_str(" ");
|
||||||
|
}
|
||||||
match element {
|
match element {
|
||||||
SyntaxElement::Node(node) => writeln!(buf, "{:?}", node).unwrap(),
|
SyntaxElement::Node(node) => writeln!(buf, "{:?}", node).unwrap(),
|
||||||
SyntaxElement::Token(token) => {
|
SyntaxElement::Token(token) => writeln!(buf, "{:?}", token).unwrap(),
|
||||||
writeln!(buf, "{:?}", token).unwrap();
|
|
||||||
let off = token.range().end();
|
|
||||||
while err_pos < errors.len() && errors[err_pos].offset() <= off {
|
|
||||||
indent!();
|
|
||||||
writeln!(buf, "err: `{}`", errors[err_pos]).unwrap();
|
|
||||||
err_pos += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
level += 1;
|
level += 1;
|
||||||
}
|
}
|
||||||
|
@ -298,23 +276,10 @@ impl SyntaxNode {
|
||||||
}
|
}
|
||||||
|
|
||||||
assert_eq!(level, 0);
|
assert_eq!(level, 0);
|
||||||
for err in errors[err_pos..].iter() {
|
|
||||||
writeln!(buf, "err: `{}`", err).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
buf
|
buf
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn root_data(&self) -> &[SyntaxError] {
|
|
||||||
match self.0.root_data() {
|
|
||||||
None => &[],
|
|
||||||
Some(data) => {
|
|
||||||
let data: &Vec<SyntaxError> = std::any::Any::downcast_ref(data).unwrap();
|
|
||||||
data.as_slice()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn replace_with(&self, replacement: GreenNode) -> GreenNode {
|
pub(crate) fn replace_with(&self, replacement: GreenNode) -> GreenNode {
|
||||||
self.0.replace_with(replacement)
|
self.0.replace_with(replacement)
|
||||||
}
|
}
|
||||||
|
@ -386,7 +351,7 @@ impl SyntaxNode {
|
||||||
let len = new_children.iter().map(|it| it.text_len()).sum::<TextUnit>();
|
let len = new_children.iter().map(|it| it.text_len()).sum::<TextUnit>();
|
||||||
let new_node = GreenNode::new(rowan::SyntaxKind(self.kind() as u16), new_children);
|
let new_node = GreenNode::new(rowan::SyntaxKind(self.kind() as u16), new_children);
|
||||||
let new_file_node = self.replace_with(new_node);
|
let new_file_node = self.replace_with(new_node);
|
||||||
let file = SourceFile::new(new_file_node, Vec::new());
|
let file = SourceFile::new(new_file_node);
|
||||||
|
|
||||||
// FIXME: use a more elegant way to re-fetch the node (#1185), make
|
// FIXME: use a more elegant way to re-fetch the node (#1185), make
|
||||||
// `range` private afterwards
|
// `range` private afterwards
|
||||||
|
@ -629,13 +594,13 @@ impl SyntaxTreeBuilder {
|
||||||
(green, self.errors)
|
(green, self.errors)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn finish(self) -> TreeArc<SyntaxNode> {
|
pub fn finish(self) -> (TreeArc<SyntaxNode>, Vec<SyntaxError>) {
|
||||||
let (green, errors) = self.finish_raw();
|
let (green, errors) = self.finish_raw();
|
||||||
let node = SyntaxNode::new(green, errors);
|
let node = SyntaxNode::new(green);
|
||||||
if cfg!(debug_assertions) {
|
if cfg!(debug_assertions) {
|
||||||
crate::validation::validate_block_structure(&node);
|
crate::validation::validate_block_structure(&node);
|
||||||
}
|
}
|
||||||
node
|
(node, errors)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) {
|
pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) {
|
||||||
|
|
|
@ -18,7 +18,6 @@ SOURCE_FILE@[0; 34)
|
||||||
PATH_SEGMENT@[18; 21)
|
PATH_SEGMENT@[18; 21)
|
||||||
NAME_REF@[18; 21)
|
NAME_REF@[18; 21)
|
||||||
IDENT@[18; 21) "u32"
|
IDENT@[18; 21) "u32"
|
||||||
err: `expected COMMA`
|
|
||||||
WHITESPACE@[21; 26) "\n "
|
WHITESPACE@[21; 26) "\n "
|
||||||
NAMED_FIELD_DEF@[26; 32)
|
NAMED_FIELD_DEF@[26; 32)
|
||||||
NAME@[26; 27)
|
NAME@[26; 27)
|
||||||
|
@ -32,3 +31,4 @@ SOURCE_FILE@[0; 34)
|
||||||
IDENT@[29; 32) "u32"
|
IDENT@[29; 32) "u32"
|
||||||
WHITESPACE@[32; 33) "\n"
|
WHITESPACE@[32; 33) "\n"
|
||||||
R_CURLY@[33; 34) "}"
|
R_CURLY@[33; 34) "}"
|
||||||
|
err: `expected COMMA`
|
||||||
|
|
|
@ -1,9 +1,7 @@
|
||||||
SOURCE_FILE@[0; 21)
|
SOURCE_FILE@[0; 21)
|
||||||
ERROR@[0; 2)
|
ERROR@[0; 2)
|
||||||
IF_KW@[0; 2) "if"
|
IF_KW@[0; 2) "if"
|
||||||
err: `expected an item`
|
|
||||||
WHITESPACE@[2; 3) " "
|
WHITESPACE@[2; 3) " "
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[3; 8)
|
ERROR@[3; 8)
|
||||||
MATCH_KW@[3; 8) "match"
|
MATCH_KW@[3; 8) "match"
|
||||||
WHITESPACE@[8; 10) "\n\n"
|
WHITESPACE@[8; 10) "\n\n"
|
||||||
|
@ -16,3 +14,5 @@ SOURCE_FILE@[0; 21)
|
||||||
NAMED_FIELD_DEF_LIST@[19; 21)
|
NAMED_FIELD_DEF_LIST@[19; 21)
|
||||||
L_CURLY@[19; 20) "{"
|
L_CURLY@[19; 20) "{"
|
||||||
R_CURLY@[20; 21) "}"
|
R_CURLY@[20; 21) "}"
|
||||||
|
err: `expected an item`
|
||||||
|
err: `expected an item`
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
SOURCE_FILE@[0; 42)
|
SOURCE_FILE@[0; 42)
|
||||||
SHEBANG@[0; 20) "#!/use/bin/env rusti"
|
SHEBANG@[0; 20) "#!/use/bin/env rusti"
|
||||||
WHITESPACE@[20; 21) "\n"
|
WHITESPACE@[20; 21) "\n"
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[21; 41)
|
ERROR@[21; 41)
|
||||||
SHEBANG@[21; 41) "#!/use/bin/env rusti"
|
SHEBANG@[21; 41) "#!/use/bin/env rusti"
|
||||||
WHITESPACE@[41; 42) "\n"
|
WHITESPACE@[41; 42) "\n"
|
||||||
|
err: `expected an item`
|
||||||
|
|
|
@ -33,7 +33,7 @@ SOURCE_FILE@[0; 40)
|
||||||
COMMA@[36; 37) ","
|
COMMA@[36; 37) ","
|
||||||
WHITESPACE@[37; 38) "\n"
|
WHITESPACE@[37; 38) "\n"
|
||||||
R_CURLY@[38; 39) "}"
|
R_CURLY@[38; 39) "}"
|
||||||
err: `expected item, found `;`
|
|
||||||
consider removing this semicolon`
|
|
||||||
ERROR@[39; 40)
|
ERROR@[39; 40)
|
||||||
SEMI@[39; 40) ";"
|
SEMI@[39; 40) ";"
|
||||||
|
err: `expected item, found `;`
|
||||||
|
consider removing this semicolon`
|
||||||
|
|
|
@ -9,8 +9,8 @@ SOURCE_FILE@[0; 12)
|
||||||
NAME_REF@[4; 7)
|
NAME_REF@[4; 7)
|
||||||
IDENT@[4; 7) "foo"
|
IDENT@[4; 7) "foo"
|
||||||
COLONCOLON@[7; 9) "::"
|
COLONCOLON@[7; 9) "::"
|
||||||
err: `expected identifier`
|
|
||||||
PATH_SEGMENT@[9; 11)
|
PATH_SEGMENT@[9; 11)
|
||||||
ERROR@[9; 11)
|
ERROR@[9; 11)
|
||||||
INT_NUMBER@[9; 11) "92"
|
INT_NUMBER@[9; 11) "92"
|
||||||
SEMI@[11; 12) ";"
|
SEMI@[11; 12) ";"
|
||||||
|
err: `expected identifier`
|
||||||
|
|
|
@ -49,7 +49,7 @@ SOURCE_FILE@[0; 54)
|
||||||
L_CURLY@[50; 51) "{"
|
L_CURLY@[50; 51) "{"
|
||||||
WHITESPACE@[51; 52) "\n"
|
WHITESPACE@[51; 52) "\n"
|
||||||
R_CURLY@[52; 53) "}"
|
R_CURLY@[52; 53) "}"
|
||||||
err: `expected R_PAREN`
|
|
||||||
err: `expected R_BRACK`
|
|
||||||
err: `expected an item`
|
|
||||||
WHITESPACE@[53; 54) "\n"
|
WHITESPACE@[53; 54) "\n"
|
||||||
|
err: `expected R_PAREN`
|
||||||
|
err: `expected R_BRACK`
|
||||||
|
err: `expected an item`
|
||||||
|
|
|
@ -23,25 +23,17 @@ SOURCE_FILE@[0; 74)
|
||||||
VISIBILITY@[27; 30)
|
VISIBILITY@[27; 30)
|
||||||
PUB_KW@[27; 30) "pub"
|
PUB_KW@[27; 30) "pub"
|
||||||
WHITESPACE@[30; 31) " "
|
WHITESPACE@[30; 31) " "
|
||||||
err: `expected field declaration`
|
|
||||||
ERROR@[31; 33)
|
ERROR@[31; 33)
|
||||||
INT_NUMBER@[31; 33) "92"
|
INT_NUMBER@[31; 33) "92"
|
||||||
err: `expected COMMA`
|
|
||||||
WHITESPACE@[33; 38) "\n "
|
WHITESPACE@[33; 38) "\n "
|
||||||
err: `expected field declaration`
|
|
||||||
ERROR@[38; 39)
|
ERROR@[38; 39)
|
||||||
PLUS@[38; 39) "+"
|
PLUS@[38; 39) "+"
|
||||||
err: `expected COMMA`
|
|
||||||
WHITESPACE@[39; 40) " "
|
WHITESPACE@[39; 40) " "
|
||||||
err: `expected field declaration`
|
|
||||||
ERROR@[40; 41)
|
ERROR@[40; 41)
|
||||||
MINUS@[40; 41) "-"
|
MINUS@[40; 41) "-"
|
||||||
err: `expected COMMA`
|
|
||||||
WHITESPACE@[41; 42) " "
|
WHITESPACE@[41; 42) " "
|
||||||
err: `expected field declaration`
|
|
||||||
ERROR@[42; 43)
|
ERROR@[42; 43)
|
||||||
STAR@[42; 43) "*"
|
STAR@[42; 43) "*"
|
||||||
err: `expected COMMA`
|
|
||||||
WHITESPACE@[43; 48) "\n "
|
WHITESPACE@[43; 48) "\n "
|
||||||
NAMED_FIELD_DEF@[48; 58)
|
NAMED_FIELD_DEF@[48; 58)
|
||||||
VISIBILITY@[48; 51)
|
VISIBILITY@[48; 51)
|
||||||
|
@ -72,3 +64,11 @@ SOURCE_FILE@[0; 74)
|
||||||
WHITESPACE@[71; 72) "\n"
|
WHITESPACE@[71; 72) "\n"
|
||||||
R_CURLY@[72; 73) "}"
|
R_CURLY@[72; 73) "}"
|
||||||
WHITESPACE@[73; 74) "\n"
|
WHITESPACE@[73; 74) "\n"
|
||||||
|
err: `expected field declaration`
|
||||||
|
err: `expected COMMA`
|
||||||
|
err: `expected field declaration`
|
||||||
|
err: `expected COMMA`
|
||||||
|
err: `expected field declaration`
|
||||||
|
err: `expected COMMA`
|
||||||
|
err: `expected field declaration`
|
||||||
|
err: `expected COMMA`
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
SOURCE_FILE@[0; 31)
|
SOURCE_FILE@[0; 31)
|
||||||
ERROR@[0; 1)
|
ERROR@[0; 1)
|
||||||
R_CURLY@[0; 1) "}"
|
R_CURLY@[0; 1) "}"
|
||||||
err: `unmatched `}``
|
|
||||||
WHITESPACE@[1; 3) "\n\n"
|
WHITESPACE@[1; 3) "\n\n"
|
||||||
STRUCT_DEF@[3; 12)
|
STRUCT_DEF@[3; 12)
|
||||||
STRUCT_KW@[3; 9) "struct"
|
STRUCT_KW@[3; 9) "struct"
|
||||||
|
@ -10,7 +9,6 @@ SOURCE_FILE@[0; 31)
|
||||||
IDENT@[10; 11) "S"
|
IDENT@[10; 11) "S"
|
||||||
SEMI@[11; 12) ";"
|
SEMI@[11; 12) ";"
|
||||||
WHITESPACE@[12; 14) "\n\n"
|
WHITESPACE@[12; 14) "\n\n"
|
||||||
err: `unmatched `}``
|
|
||||||
ERROR@[14; 15)
|
ERROR@[14; 15)
|
||||||
R_CURLY@[14; 15) "}"
|
R_CURLY@[14; 15) "}"
|
||||||
WHITESPACE@[15; 17) "\n\n"
|
WHITESPACE@[15; 17) "\n\n"
|
||||||
|
@ -26,7 +24,9 @@ SOURCE_FILE@[0; 31)
|
||||||
L_CURLY@[25; 26) "{"
|
L_CURLY@[25; 26) "{"
|
||||||
R_CURLY@[26; 27) "}"
|
R_CURLY@[26; 27) "}"
|
||||||
WHITESPACE@[27; 29) "\n\n"
|
WHITESPACE@[27; 29) "\n\n"
|
||||||
err: `unmatched `}``
|
|
||||||
ERROR@[29; 30)
|
ERROR@[29; 30)
|
||||||
R_CURLY@[29; 30) "}"
|
R_CURLY@[29; 30) "}"
|
||||||
WHITESPACE@[30; 31) "\n"
|
WHITESPACE@[30; 31) "\n"
|
||||||
|
err: `unmatched `}``
|
||||||
|
err: `unmatched `}``
|
||||||
|
err: `unmatched `}``
|
||||||
|
|
|
@ -18,13 +18,10 @@ SOURCE_FILE@[0; 95)
|
||||||
PATH_SEGMENT@[14; 17)
|
PATH_SEGMENT@[14; 17)
|
||||||
NAME_REF@[14; 17)
|
NAME_REF@[14; 17)
|
||||||
IDENT@[14; 17) "bar"
|
IDENT@[14; 17) "bar"
|
||||||
err: `expected EXCL`
|
|
||||||
TOKEN_TREE@[17; 19)
|
TOKEN_TREE@[17; 19)
|
||||||
L_PAREN@[17; 18) "("
|
L_PAREN@[17; 18) "("
|
||||||
R_PAREN@[18; 19) ")"
|
R_PAREN@[18; 19) ")"
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[19; 20) " "
|
WHITESPACE@[19; 20) " "
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[20; 80)
|
ERROR@[20; 80)
|
||||||
L_CURLY@[20; 21) "{"
|
L_CURLY@[20; 21) "{"
|
||||||
WHITESPACE@[21; 26) "\n "
|
WHITESPACE@[21; 26) "\n "
|
||||||
|
@ -75,3 +72,6 @@ SOURCE_FILE@[0; 95)
|
||||||
WHITESPACE@[92; 93) "\n"
|
WHITESPACE@[92; 93) "\n"
|
||||||
R_CURLY@[93; 94) "}"
|
R_CURLY@[93; 94) "}"
|
||||||
WHITESPACE@[94; 95) "\n"
|
WHITESPACE@[94; 95) "\n"
|
||||||
|
err: `expected EXCL`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected an item`
|
||||||
|
|
|
@ -6,25 +6,17 @@ SOURCE_FILE@[0; 43)
|
||||||
IDENT@[7; 8) "S"
|
IDENT@[7; 8) "S"
|
||||||
TYPE_PARAM_LIST@[8; 11)
|
TYPE_PARAM_LIST@[8; 11)
|
||||||
L_ANGLE@[8; 9) "<"
|
L_ANGLE@[8; 9) "<"
|
||||||
err: `expected type parameter`
|
|
||||||
ERROR@[9; 11)
|
ERROR@[9; 11)
|
||||||
INT_NUMBER@[9; 11) "90"
|
INT_NUMBER@[9; 11) "90"
|
||||||
err: `expected COMMA`
|
|
||||||
err: `expected R_ANGLE`
|
|
||||||
err: `expected `;`, `{`, or `(``
|
|
||||||
WHITESPACE@[11; 12) " "
|
WHITESPACE@[11; 12) " "
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[12; 13)
|
ERROR@[12; 13)
|
||||||
PLUS@[12; 13) "+"
|
PLUS@[12; 13) "+"
|
||||||
WHITESPACE@[13; 14) " "
|
WHITESPACE@[13; 14) " "
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[14; 15)
|
ERROR@[14; 15)
|
||||||
INT_NUMBER@[14; 15) "2"
|
INT_NUMBER@[14; 15) "2"
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[15; 16)
|
ERROR@[15; 16)
|
||||||
R_ANGLE@[15; 16) ">"
|
R_ANGLE@[15; 16) ">"
|
||||||
WHITESPACE@[16; 17) " "
|
WHITESPACE@[16; 17) " "
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[17; 31)
|
ERROR@[17; 31)
|
||||||
L_CURLY@[17; 18) "{"
|
L_CURLY@[17; 18) "{"
|
||||||
WHITESPACE@[18; 23) "\n "
|
WHITESPACE@[18; 23) "\n "
|
||||||
|
@ -34,12 +26,9 @@ SOURCE_FILE@[0; 43)
|
||||||
PATH_SEGMENT@[23; 24)
|
PATH_SEGMENT@[23; 24)
|
||||||
NAME_REF@[23; 24)
|
NAME_REF@[23; 24)
|
||||||
IDENT@[23; 24) "f"
|
IDENT@[23; 24) "f"
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[24; 25)
|
EXPR_STMT@[24; 25)
|
||||||
ERROR@[24; 25)
|
ERROR@[24; 25)
|
||||||
COLON@[24; 25) ":"
|
COLON@[24; 25) ":"
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[25; 26) " "
|
WHITESPACE@[25; 26) " "
|
||||||
PATH_EXPR@[26; 29)
|
PATH_EXPR@[26; 29)
|
||||||
PATH@[26; 29)
|
PATH@[26; 29)
|
||||||
|
@ -56,3 +45,14 @@ SOURCE_FILE@[0; 43)
|
||||||
IDENT@[40; 41) "T"
|
IDENT@[40; 41) "T"
|
||||||
SEMI@[41; 42) ";"
|
SEMI@[41; 42) ";"
|
||||||
WHITESPACE@[42; 43) "\n"
|
WHITESPACE@[42; 43) "\n"
|
||||||
|
err: `expected type parameter`
|
||||||
|
err: `expected COMMA`
|
||||||
|
err: `expected R_ANGLE`
|
||||||
|
err: `expected `;`, `{`, or `(``
|
||||||
|
err: `expected an item`
|
||||||
|
err: `expected an item`
|
||||||
|
err: `expected an item`
|
||||||
|
err: `expected an item`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
|
|
@ -23,7 +23,6 @@ SOURCE_FILE@[0; 42)
|
||||||
TUPLE_TYPE@[22; 24)
|
TUPLE_TYPE@[22; 24)
|
||||||
L_PAREN@[22; 23) "("
|
L_PAREN@[22; 23) "("
|
||||||
R_PAREN@[23; 24) ")"
|
R_PAREN@[23; 24) ")"
|
||||||
err: `expected `{``
|
|
||||||
WHITESPACE@[24; 25) " "
|
WHITESPACE@[24; 25) " "
|
||||||
BLOCK_EXPR@[25; 38)
|
BLOCK_EXPR@[25; 38)
|
||||||
UNSAFE_KW@[25; 31) "unsafe"
|
UNSAFE_KW@[25; 31) "unsafe"
|
||||||
|
@ -40,3 +39,4 @@ SOURCE_FILE@[0; 42)
|
||||||
WHITESPACE@[39; 40) "\n"
|
WHITESPACE@[39; 40) "\n"
|
||||||
R_CURLY@[40; 41) "}"
|
R_CURLY@[40; 41) "}"
|
||||||
WHITESPACE@[41; 42) "\n"
|
WHITESPACE@[41; 42) "\n"
|
||||||
|
err: `expected `{``
|
||||||
|
|
|
@ -2,7 +2,6 @@ SOURCE_FILE@[0; 19)
|
||||||
ERROR@[0; 6)
|
ERROR@[0; 6)
|
||||||
ABI@[0; 6)
|
ABI@[0; 6)
|
||||||
EXTERN_KW@[0; 6) "extern"
|
EXTERN_KW@[0; 6) "extern"
|
||||||
err: `expected fn, trait or impl`
|
|
||||||
WHITESPACE@[6; 7) " "
|
WHITESPACE@[6; 7) " "
|
||||||
STRUCT_DEF@[7; 18)
|
STRUCT_DEF@[7; 18)
|
||||||
STRUCT_KW@[7; 13) "struct"
|
STRUCT_KW@[7; 13) "struct"
|
||||||
|
@ -11,3 +10,4 @@ SOURCE_FILE@[0; 19)
|
||||||
IDENT@[14; 17) "Foo"
|
IDENT@[14; 17) "Foo"
|
||||||
SEMI@[17; 18) ";"
|
SEMI@[17; 18) ";"
|
||||||
WHITESPACE@[18; 19) "\n"
|
WHITESPACE@[18; 19) "\n"
|
||||||
|
err: `expected fn, trait or impl`
|
||||||
|
|
|
@ -43,17 +43,9 @@ SOURCE_FILE@[0; 86)
|
||||||
IDENT@[63; 66) "Box"
|
IDENT@[63; 66) "Box"
|
||||||
TYPE_ARG_LIST@[66; 68)
|
TYPE_ARG_LIST@[66; 68)
|
||||||
L_ANGLE@[66; 67) "<"
|
L_ANGLE@[66; 67) "<"
|
||||||
err: `expected type`
|
|
||||||
TYPE_ARG@[67; 68)
|
TYPE_ARG@[67; 68)
|
||||||
ERROR@[67; 68)
|
ERROR@[67; 68)
|
||||||
AT@[67; 68) "@"
|
AT@[67; 68) "@"
|
||||||
err: `expected COMMA`
|
|
||||||
err: `expected R_ANGLE`
|
|
||||||
err: `expected COMMA`
|
|
||||||
err: `expected R_ANGLE`
|
|
||||||
err: `expected COMMA`
|
|
||||||
err: `expected R_ANGLE`
|
|
||||||
err: `expected COMMA`
|
|
||||||
WHITESPACE@[68; 69) " "
|
WHITESPACE@[68; 69) " "
|
||||||
POS_FIELD_DEF@[69; 72)
|
POS_FIELD_DEF@[69; 72)
|
||||||
PATH_TYPE@[69; 72)
|
PATH_TYPE@[69; 72)
|
||||||
|
@ -61,29 +53,37 @@ SOURCE_FILE@[0; 86)
|
||||||
PATH_SEGMENT@[69; 72)
|
PATH_SEGMENT@[69; 72)
|
||||||
NAME_REF@[69; 72)
|
NAME_REF@[69; 72)
|
||||||
IDENT@[69; 72) "Any"
|
IDENT@[69; 72) "Any"
|
||||||
err: `expected COMMA`
|
|
||||||
err: `expected a type`
|
|
||||||
err: `expected R_PAREN`
|
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[72; 72)
|
ERROR@[72; 72)
|
||||||
ERROR@[72; 73)
|
ERROR@[72; 73)
|
||||||
R_ANGLE@[72; 73) ">"
|
R_ANGLE@[72; 73) ">"
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[73; 74)
|
ERROR@[73; 74)
|
||||||
COMMA@[73; 74) ","
|
COMMA@[73; 74) ","
|
||||||
WHITESPACE@[74; 79) "\n "
|
WHITESPACE@[74; 79) "\n "
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[79; 80)
|
ERROR@[79; 80)
|
||||||
R_ANGLE@[79; 80) ">"
|
R_ANGLE@[79; 80) ">"
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[80; 81)
|
ERROR@[80; 81)
|
||||||
R_ANGLE@[80; 81) ">"
|
R_ANGLE@[80; 81) ">"
|
||||||
WHITESPACE@[81; 82) "\n"
|
WHITESPACE@[81; 82) "\n"
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[82; 83)
|
ERROR@[82; 83)
|
||||||
R_PAREN@[82; 83) ")"
|
R_PAREN@[82; 83) ")"
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[83; 84)
|
ERROR@[83; 84)
|
||||||
SEMI@[83; 84) ";"
|
SEMI@[83; 84) ";"
|
||||||
WHITESPACE@[84; 86) "\n\n"
|
WHITESPACE@[84; 86) "\n\n"
|
||||||
|
err: `expected type`
|
||||||
|
err: `expected COMMA`
|
||||||
|
err: `expected R_ANGLE`
|
||||||
|
err: `expected COMMA`
|
||||||
|
err: `expected R_ANGLE`
|
||||||
|
err: `expected COMMA`
|
||||||
|
err: `expected R_ANGLE`
|
||||||
|
err: `expected COMMA`
|
||||||
|
err: `expected COMMA`
|
||||||
|
err: `expected a type`
|
||||||
|
err: `expected R_PAREN`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected an item`
|
||||||
|
err: `expected an item`
|
||||||
|
err: `expected an item`
|
||||||
|
err: `expected an item`
|
||||||
|
err: `expected an item`
|
||||||
|
err: `expected an item`
|
||||||
|
|
|
@ -23,9 +23,9 @@ SOURCE_FILE@[0; 23)
|
||||||
PATH_SEGMENT@[18; 19)
|
PATH_SEGMENT@[18; 19)
|
||||||
NAME_REF@[18; 19)
|
NAME_REF@[18; 19)
|
||||||
IDENT@[18; 19) "T"
|
IDENT@[18; 19) "T"
|
||||||
err: `expected colon`
|
|
||||||
WHITESPACE@[19; 20) " "
|
WHITESPACE@[19; 20) " "
|
||||||
BLOCK@[20; 22)
|
BLOCK@[20; 22)
|
||||||
L_CURLY@[20; 21) "{"
|
L_CURLY@[20; 21) "{"
|
||||||
R_CURLY@[21; 22) "}"
|
R_CURLY@[21; 22) "}"
|
||||||
WHITESPACE@[22; 23) "\n"
|
WHITESPACE@[22; 23) "\n"
|
||||||
|
err: `expected colon`
|
||||||
|
|
|
@ -6,19 +6,19 @@ SOURCE_FILE@[0; 14)
|
||||||
IDENT@[3; 6) "foo"
|
IDENT@[3; 6) "foo"
|
||||||
PARAM_LIST@[6; 7)
|
PARAM_LIST@[6; 7)
|
||||||
L_PAREN@[6; 7) "("
|
L_PAREN@[6; 7) "("
|
||||||
err: `expected value parameter`
|
|
||||||
err: `expected R_PAREN`
|
|
||||||
err: `expected a block`
|
|
||||||
err: `unmatched `}``
|
|
||||||
ERROR@[7; 8)
|
ERROR@[7; 8)
|
||||||
R_CURLY@[7; 8) "}"
|
R_CURLY@[7; 8) "}"
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[8; 9)
|
ERROR@[8; 9)
|
||||||
R_PAREN@[8; 9) ")"
|
R_PAREN@[8; 9) ")"
|
||||||
WHITESPACE@[9; 10) " "
|
WHITESPACE@[9; 10) " "
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[10; 13)
|
ERROR@[10; 13)
|
||||||
L_CURLY@[10; 11) "{"
|
L_CURLY@[10; 11) "{"
|
||||||
WHITESPACE@[11; 12) "\n"
|
WHITESPACE@[11; 12) "\n"
|
||||||
R_CURLY@[12; 13) "}"
|
R_CURLY@[12; 13) "}"
|
||||||
WHITESPACE@[13; 14) "\n"
|
WHITESPACE@[13; 14) "\n"
|
||||||
|
err: `expected value parameter`
|
||||||
|
err: `expected R_PAREN`
|
||||||
|
err: `expected a block`
|
||||||
|
err: `unmatched `}``
|
||||||
|
err: `expected an item`
|
||||||
|
err: `expected an item`
|
||||||
|
|
|
@ -29,7 +29,6 @@ SOURCE_FILE@[0; 56)
|
||||||
INT_NUMBER@[31; 32) "2"
|
INT_NUMBER@[31; 32) "2"
|
||||||
WHITESPACE@[32; 37) "\n "
|
WHITESPACE@[32; 37) "\n "
|
||||||
R_PAREN@[37; 38) ")"
|
R_PAREN@[37; 38) ")"
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[38; 43) "\n "
|
WHITESPACE@[38; 43) "\n "
|
||||||
EXPR_STMT@[43; 53)
|
EXPR_STMT@[43; 53)
|
||||||
RETURN_EXPR@[43; 52)
|
RETURN_EXPR@[43; 52)
|
||||||
|
@ -41,3 +40,4 @@ SOURCE_FILE@[0; 56)
|
||||||
WHITESPACE@[53; 54) "\n"
|
WHITESPACE@[53; 54) "\n"
|
||||||
R_CURLY@[54; 55) "}"
|
R_CURLY@[54; 55) "}"
|
||||||
WHITESPACE@[55; 56) "\n"
|
WHITESPACE@[55; 56) "\n"
|
||||||
|
err: `expected SEMI`
|
||||||
|
|
|
@ -40,7 +40,7 @@ SOURCE_FILE@[0; 47)
|
||||||
INT_NUMBER@[41; 42) "1"
|
INT_NUMBER@[41; 42) "1"
|
||||||
WHITESPACE@[42; 43) " "
|
WHITESPACE@[42; 43) " "
|
||||||
PLUS@[43; 44) "+"
|
PLUS@[43; 44) "+"
|
||||||
err: `expected expression`
|
|
||||||
WHITESPACE@[44; 45) "\n"
|
WHITESPACE@[44; 45) "\n"
|
||||||
R_CURLY@[45; 46) "}"
|
R_CURLY@[45; 46) "}"
|
||||||
WHITESPACE@[46; 47) "\n"
|
WHITESPACE@[46; 47) "\n"
|
||||||
|
err: `expected expression`
|
||||||
|
|
|
@ -21,9 +21,6 @@ SOURCE_FILE@[0; 183)
|
||||||
PARAM@[33; 34)
|
PARAM@[33; 34)
|
||||||
REF_PAT@[33; 34)
|
REF_PAT@[33; 34)
|
||||||
AMP@[33; 34) "&"
|
AMP@[33; 34) "&"
|
||||||
err: `expected pattern`
|
|
||||||
err: `expected COLON`
|
|
||||||
err: `expected type`
|
|
||||||
R_PAREN@[34; 35) ")"
|
R_PAREN@[34; 35) ")"
|
||||||
WHITESPACE@[35; 36) " "
|
WHITESPACE@[35; 36) " "
|
||||||
RET_TYPE@[36; 46)
|
RET_TYPE@[36; 46)
|
||||||
|
@ -124,8 +121,11 @@ SOURCE_FILE@[0; 183)
|
||||||
WHITESPACE@[169; 170) " "
|
WHITESPACE@[169; 170) " "
|
||||||
NAME@[170; 180)
|
NAME@[170; 180)
|
||||||
IDENT@[170; 180) "set_parent"
|
IDENT@[170; 180) "set_parent"
|
||||||
err: `expected function arguments`
|
|
||||||
err: `expected a block`
|
|
||||||
WHITESPACE@[180; 181) "\n"
|
WHITESPACE@[180; 181) "\n"
|
||||||
R_CURLY@[181; 182) "}"
|
R_CURLY@[181; 182) "}"
|
||||||
WHITESPACE@[182; 183) "\n"
|
WHITESPACE@[182; 183) "\n"
|
||||||
|
err: `expected pattern`
|
||||||
|
err: `expected COLON`
|
||||||
|
err: `expected type`
|
||||||
|
err: `expected function arguments`
|
||||||
|
err: `expected a block`
|
||||||
|
|
|
@ -19,8 +19,6 @@ SOURCE_FILE@[0; 139)
|
||||||
IDENT@[19; 22) "foo"
|
IDENT@[19; 22) "foo"
|
||||||
WHITESPACE@[22; 23) " "
|
WHITESPACE@[22; 23) " "
|
||||||
EQ@[23; 24) "="
|
EQ@[23; 24) "="
|
||||||
err: `expected expression`
|
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[24; 29) "\n "
|
WHITESPACE@[24; 29) "\n "
|
||||||
LET_STMT@[29; 41)
|
LET_STMT@[29; 41)
|
||||||
LET_KW@[29; 32) "let"
|
LET_KW@[29; 32) "let"
|
||||||
|
@ -37,8 +35,6 @@ SOURCE_FILE@[0; 139)
|
||||||
WHITESPACE@[41; 46) "\n "
|
WHITESPACE@[41; 46) "\n "
|
||||||
LET_STMT@[46; 49)
|
LET_STMT@[46; 49)
|
||||||
LET_KW@[46; 49) "let"
|
LET_KW@[46; 49) "let"
|
||||||
err: `expected pattern`
|
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[49; 54) "\n "
|
WHITESPACE@[49; 54) "\n "
|
||||||
LET_STMT@[54; 67)
|
LET_STMT@[54; 67)
|
||||||
LET_KW@[54; 57) "let"
|
LET_KW@[54; 57) "let"
|
||||||
|
@ -55,8 +51,6 @@ SOURCE_FILE@[0; 139)
|
||||||
WHITESPACE@[67; 72) "\n "
|
WHITESPACE@[67; 72) "\n "
|
||||||
LET_STMT@[72; 75)
|
LET_STMT@[72; 75)
|
||||||
LET_KW@[72; 75) "let"
|
LET_KW@[72; 75) "let"
|
||||||
err: `expected pattern`
|
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[75; 80) "\n "
|
WHITESPACE@[75; 80) "\n "
|
||||||
EXPR_STMT@[80; 90)
|
EXPR_STMT@[80; 90)
|
||||||
IF_EXPR@[80; 90)
|
IF_EXPR@[80; 90)
|
||||||
|
@ -72,8 +66,6 @@ SOURCE_FILE@[0; 139)
|
||||||
WHITESPACE@[90; 95) "\n "
|
WHITESPACE@[90; 95) "\n "
|
||||||
LET_STMT@[95; 98)
|
LET_STMT@[95; 98)
|
||||||
LET_KW@[95; 98) "let"
|
LET_KW@[95; 98) "let"
|
||||||
err: `expected pattern`
|
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[98; 103) "\n "
|
WHITESPACE@[98; 103) "\n "
|
||||||
EXPR_STMT@[103; 116)
|
EXPR_STMT@[103; 116)
|
||||||
WHILE_EXPR@[103; 116)
|
WHILE_EXPR@[103; 116)
|
||||||
|
@ -89,8 +81,6 @@ SOURCE_FILE@[0; 139)
|
||||||
WHITESPACE@[116; 121) "\n "
|
WHITESPACE@[116; 121) "\n "
|
||||||
LET_STMT@[121; 124)
|
LET_STMT@[121; 124)
|
||||||
LET_KW@[121; 124) "let"
|
LET_KW@[121; 124) "let"
|
||||||
err: `expected pattern`
|
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[124; 129) "\n "
|
WHITESPACE@[124; 129) "\n "
|
||||||
LOOP_EXPR@[129; 136)
|
LOOP_EXPR@[129; 136)
|
||||||
LOOP_KW@[129; 133) "loop"
|
LOOP_KW@[129; 133) "loop"
|
||||||
|
@ -101,3 +91,13 @@ SOURCE_FILE@[0; 139)
|
||||||
WHITESPACE@[136; 137) "\n"
|
WHITESPACE@[136; 137) "\n"
|
||||||
R_CURLY@[137; 138) "}"
|
R_CURLY@[137; 138) "}"
|
||||||
WHITESPACE@[138; 139) "\n"
|
WHITESPACE@[138; 139) "\n"
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected pattern`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected pattern`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected pattern`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected pattern`
|
||||||
|
err: `expected SEMI`
|
||||||
|
|
|
@ -1,9 +1,6 @@
|
||||||
SOURCE_FILE@[0; 16)
|
SOURCE_FILE@[0; 16)
|
||||||
FN_DEF@[0; 2)
|
FN_DEF@[0; 2)
|
||||||
FN_KW@[0; 2) "fn"
|
FN_KW@[0; 2) "fn"
|
||||||
err: `expected a name`
|
|
||||||
err: `expected function arguments`
|
|
||||||
err: `expected a block`
|
|
||||||
WHITESPACE@[2; 4) "\n\n"
|
WHITESPACE@[2; 4) "\n\n"
|
||||||
FN_DEF@[4; 15)
|
FN_DEF@[4; 15)
|
||||||
FN_KW@[4; 6) "fn"
|
FN_KW@[4; 6) "fn"
|
||||||
|
@ -18,3 +15,6 @@ SOURCE_FILE@[0; 16)
|
||||||
L_CURLY@[13; 14) "{"
|
L_CURLY@[13; 14) "{"
|
||||||
R_CURLY@[14; 15) "}"
|
R_CURLY@[14; 15) "}"
|
||||||
WHITESPACE@[15; 16) "\n"
|
WHITESPACE@[15; 16) "\n"
|
||||||
|
err: `expected a name`
|
||||||
|
err: `expected function arguments`
|
||||||
|
err: `expected a block`
|
||||||
|
|
|
@ -23,8 +23,6 @@ SOURCE_FILE@[0; 22)
|
||||||
BIND_PAT@[15; 16)
|
BIND_PAT@[15; 16)
|
||||||
NAME@[15; 16)
|
NAME@[15; 16)
|
||||||
IDENT@[15; 16) "y"
|
IDENT@[15; 16) "y"
|
||||||
err: `expected COLON`
|
|
||||||
err: `expected type`
|
|
||||||
R_PAREN@[16; 17) ")"
|
R_PAREN@[16; 17) ")"
|
||||||
WHITESPACE@[17; 18) " "
|
WHITESPACE@[17; 18) " "
|
||||||
BLOCK@[18; 21)
|
BLOCK@[18; 21)
|
||||||
|
@ -32,3 +30,5 @@ SOURCE_FILE@[0; 22)
|
||||||
WHITESPACE@[19; 20) "\n"
|
WHITESPACE@[19; 20) "\n"
|
||||||
R_CURLY@[20; 21) "}"
|
R_CURLY@[20; 21) "}"
|
||||||
WHITESPACE@[21; 22) "\n"
|
WHITESPACE@[21; 22) "\n"
|
||||||
|
err: `expected COLON`
|
||||||
|
err: `expected type`
|
||||||
|
|
|
@ -21,34 +21,23 @@ SOURCE_FILE@[0; 112)
|
||||||
LITERAL@[13; 14)
|
LITERAL@[13; 14)
|
||||||
INT_NUMBER@[13; 14) "2"
|
INT_NUMBER@[13; 14) "2"
|
||||||
COMMA@[14; 15) ","
|
COMMA@[14; 15) ","
|
||||||
err: `expected expression`
|
|
||||||
err: `expected R_BRACK`
|
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[15; 16) " "
|
WHITESPACE@[15; 16) " "
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[16; 17)
|
EXPR_STMT@[16; 17)
|
||||||
ERROR@[16; 17)
|
ERROR@[16; 17)
|
||||||
AT@[16; 17) "@"
|
AT@[16; 17) "@"
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[17; 18)
|
EXPR_STMT@[17; 18)
|
||||||
ERROR@[17; 18)
|
ERROR@[17; 18)
|
||||||
COMMA@[17; 18) ","
|
COMMA@[17; 18) ","
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[18; 19) " "
|
WHITESPACE@[18; 19) " "
|
||||||
STRUCT_DEF@[19; 26)
|
STRUCT_DEF@[19; 26)
|
||||||
STRUCT_KW@[19; 25) "struct"
|
STRUCT_KW@[19; 25) "struct"
|
||||||
err: `expected a name`
|
|
||||||
ERROR@[25; 26)
|
ERROR@[25; 26)
|
||||||
COMMA@[25; 26) ","
|
COMMA@[25; 26) ","
|
||||||
err: `expected `;`, `{`, or `(``
|
|
||||||
WHITESPACE@[26; 27) " "
|
WHITESPACE@[26; 27) " "
|
||||||
LET_STMT@[27; 31)
|
LET_STMT@[27; 31)
|
||||||
LET_KW@[27; 30) "let"
|
LET_KW@[27; 30) "let"
|
||||||
err: `expected pattern`
|
|
||||||
ERROR@[30; 31)
|
ERROR@[30; 31)
|
||||||
R_BRACK@[30; 31) "]"
|
R_BRACK@[30; 31) "]"
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[31; 32) " "
|
WHITESPACE@[31; 32) " "
|
||||||
R_CURLY@[32; 33) "}"
|
R_CURLY@[32; 33) "}"
|
||||||
WHITESPACE@[33; 34) "\n"
|
WHITESPACE@[33; 34) "\n"
|
||||||
|
@ -80,35 +69,22 @@ SOURCE_FILE@[0; 112)
|
||||||
LITERAL@[50; 51)
|
LITERAL@[50; 51)
|
||||||
INT_NUMBER@[50; 51) "2"
|
INT_NUMBER@[50; 51) "2"
|
||||||
COMMA@[51; 52) ","
|
COMMA@[51; 52) ","
|
||||||
err: `expected expression`
|
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[52; 53) " "
|
WHITESPACE@[52; 53) " "
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[53; 54)
|
EXPR_STMT@[53; 54)
|
||||||
ERROR@[53; 54)
|
ERROR@[53; 54)
|
||||||
AT@[53; 54) "@"
|
AT@[53; 54) "@"
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[54; 55)
|
EXPR_STMT@[54; 55)
|
||||||
ERROR@[54; 55)
|
ERROR@[54; 55)
|
||||||
COMMA@[54; 55) ","
|
COMMA@[54; 55) ","
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[55; 56) " "
|
WHITESPACE@[55; 56) " "
|
||||||
IMPL_BLOCK@[56; 60)
|
IMPL_BLOCK@[56; 60)
|
||||||
IMPL_KW@[56; 60) "impl"
|
IMPL_KW@[56; 60) "impl"
|
||||||
err: `expected type`
|
|
||||||
err: `expected `{``
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[60; 61)
|
EXPR_STMT@[60; 61)
|
||||||
ERROR@[60; 61)
|
ERROR@[60; 61)
|
||||||
COMMA@[60; 61) ","
|
COMMA@[60; 61) ","
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[61; 62) " "
|
WHITESPACE@[61; 62) " "
|
||||||
LET_STMT@[62; 65)
|
LET_STMT@[62; 65)
|
||||||
LET_KW@[62; 65) "let"
|
LET_KW@[62; 65) "let"
|
||||||
err: `expected pattern`
|
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
ERROR@[65; 66)
|
ERROR@[65; 66)
|
||||||
R_PAREN@[65; 66) ")"
|
R_PAREN@[65; 66) ")"
|
||||||
WHITESPACE@[66; 67) " "
|
WHITESPACE@[66; 67) " "
|
||||||
|
@ -145,45 +121,69 @@ SOURCE_FILE@[0; 112)
|
||||||
LITERAL@[89; 90)
|
LITERAL@[89; 90)
|
||||||
INT_NUMBER@[89; 90) "2"
|
INT_NUMBER@[89; 90) "2"
|
||||||
COMMA@[90; 91) ","
|
COMMA@[90; 91) ","
|
||||||
err: `expected expression`
|
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[91; 92) " "
|
WHITESPACE@[91; 92) " "
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[92; 93)
|
EXPR_STMT@[92; 93)
|
||||||
ERROR@[92; 93)
|
ERROR@[92; 93)
|
||||||
AT@[92; 93) "@"
|
AT@[92; 93) "@"
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[93; 94)
|
EXPR_STMT@[93; 94)
|
||||||
ERROR@[93; 94)
|
ERROR@[93; 94)
|
||||||
COMMA@[93; 94) ","
|
COMMA@[93; 94) ","
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[94; 95) " "
|
WHITESPACE@[94; 95) " "
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[95; 96)
|
EXPR_STMT@[95; 96)
|
||||||
ERROR@[95; 96)
|
ERROR@[95; 96)
|
||||||
R_BRACK@[95; 96) "]"
|
R_BRACK@[95; 96) "]"
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[96; 97)
|
EXPR_STMT@[96; 97)
|
||||||
ERROR@[96; 97)
|
ERROR@[96; 97)
|
||||||
COMMA@[96; 97) ","
|
COMMA@[96; 97) ","
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[97; 98) " "
|
WHITESPACE@[97; 98) " "
|
||||||
TRAIT_DEF@[98; 104)
|
TRAIT_DEF@[98; 104)
|
||||||
TRAIT_KW@[98; 103) "trait"
|
TRAIT_KW@[98; 103) "trait"
|
||||||
err: `expected a name`
|
|
||||||
ERROR@[103; 104)
|
ERROR@[103; 104)
|
||||||
COMMA@[103; 104) ","
|
COMMA@[103; 104) ","
|
||||||
err: `expected `{``
|
|
||||||
WHITESPACE@[104; 105) " "
|
WHITESPACE@[104; 105) " "
|
||||||
LET_STMT@[105; 108)
|
LET_STMT@[105; 108)
|
||||||
LET_KW@[105; 108) "let"
|
LET_KW@[105; 108) "let"
|
||||||
err: `expected pattern`
|
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
ERROR@[108; 109)
|
ERROR@[108; 109)
|
||||||
R_PAREN@[108; 109) ")"
|
R_PAREN@[108; 109) ")"
|
||||||
WHITESPACE@[109; 110) " "
|
WHITESPACE@[109; 110) " "
|
||||||
R_CURLY@[110; 111) "}"
|
R_CURLY@[110; 111) "}"
|
||||||
WHITESPACE@[111; 112) "\n"
|
WHITESPACE@[111; 112) "\n"
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected R_BRACK`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected a name`
|
||||||
|
err: `expected `;`, `{`, or `(``
|
||||||
|
err: `expected pattern`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected type`
|
||||||
|
err: `expected `{``
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected pattern`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected a name`
|
||||||
|
err: `expected `{``
|
||||||
|
err: `expected pattern`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
|
|
@ -31,13 +31,13 @@ SOURCE_FILE@[0; 94)
|
||||||
COMMA@[44; 45) ","
|
COMMA@[44; 45) ","
|
||||||
WHITESPACE@[45; 46) " "
|
WHITESPACE@[45; 46) " "
|
||||||
FLOAT_NUMBER@[46; 49) "2.0"
|
FLOAT_NUMBER@[46; 49) "2.0"
|
||||||
err: `unmatched `}``
|
|
||||||
WHITESPACE@[49; 54) "\n "
|
WHITESPACE@[49; 54) "\n "
|
||||||
R_CURLY@[54; 55) "}"
|
R_CURLY@[54; 55) "}"
|
||||||
WHITESPACE@[55; 56) " "
|
WHITESPACE@[55; 56) " "
|
||||||
COMMENT@[56; 91) "//~ ERROR incorrect c ..."
|
COMMENT@[56; 91) "//~ ERROR incorrect c ..."
|
||||||
WHITESPACE@[91; 92) "\n"
|
WHITESPACE@[91; 92) "\n"
|
||||||
err: `unmatched `}``
|
|
||||||
ERROR@[92; 93)
|
ERROR@[92; 93)
|
||||||
R_CURLY@[92; 93) "}"
|
R_CURLY@[92; 93) "}"
|
||||||
WHITESPACE@[93; 94) "\n"
|
WHITESPACE@[93; 94) "\n"
|
||||||
|
err: `unmatched `}``
|
||||||
|
err: `unmatched `}``
|
||||||
|
|
|
@ -103,8 +103,6 @@ SOURCE_FILE@[0; 240)
|
||||||
NAME_REF@[83; 87)
|
NAME_REF@[83; 87)
|
||||||
IDENT@[83; 87) "Copy"
|
IDENT@[83; 87) "Copy"
|
||||||
R_PAREN@[87; 88) ")"
|
R_PAREN@[87; 88) ")"
|
||||||
err: `expected COMMA`
|
|
||||||
err: `expected R_ANGLE`
|
|
||||||
WHITESPACE@[88; 89) " "
|
WHITESPACE@[88; 89) " "
|
||||||
PLUS@[89; 90) "+"
|
PLUS@[89; 90) "+"
|
||||||
WHITESPACE@[90; 91) " "
|
WHITESPACE@[90; 91) " "
|
||||||
|
@ -141,8 +139,6 @@ SOURCE_FILE@[0; 240)
|
||||||
LIFETIME@[117; 119) "\'a"
|
LIFETIME@[117; 119) "\'a"
|
||||||
R_ANGLE@[119; 120) ">"
|
R_ANGLE@[119; 120) ">"
|
||||||
R_PAREN@[120; 121) ")"
|
R_PAREN@[120; 121) ")"
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[121; 123)
|
EXPR_STMT@[121; 123)
|
||||||
ERROR@[121; 122)
|
ERROR@[121; 122)
|
||||||
R_ANGLE@[121; 122) ">"
|
R_ANGLE@[121; 122) ">"
|
||||||
|
@ -165,54 +161,35 @@ SOURCE_FILE@[0; 240)
|
||||||
TYPE_ARG@[139; 141)
|
TYPE_ARG@[139; 141)
|
||||||
PAREN_TYPE@[139; 141)
|
PAREN_TYPE@[139; 141)
|
||||||
L_PAREN@[139; 140) "("
|
L_PAREN@[139; 140) "("
|
||||||
err: `expected type`
|
|
||||||
ERROR@[140; 141)
|
ERROR@[140; 141)
|
||||||
QUESTION@[140; 141) "?"
|
QUESTION@[140; 141) "?"
|
||||||
err: `expected R_PAREN`
|
|
||||||
err: `expected COMMA`
|
|
||||||
err: `expected R_ANGLE`
|
|
||||||
err: `expected SEMI`
|
|
||||||
EXPR_STMT@[141; 146)
|
EXPR_STMT@[141; 146)
|
||||||
PATH_EXPR@[141; 146)
|
PATH_EXPR@[141; 146)
|
||||||
PATH@[141; 146)
|
PATH@[141; 146)
|
||||||
PATH_SEGMENT@[141; 146)
|
PATH_SEGMENT@[141; 146)
|
||||||
NAME_REF@[141; 146)
|
NAME_REF@[141; 146)
|
||||||
IDENT@[141; 146) "Sized"
|
IDENT@[141; 146) "Sized"
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[146; 147)
|
EXPR_STMT@[146; 147)
|
||||||
ERROR@[146; 147)
|
ERROR@[146; 147)
|
||||||
R_PAREN@[146; 147) ")"
|
R_PAREN@[146; 147) ")"
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[147; 148) " "
|
WHITESPACE@[147; 148) " "
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[148; 149)
|
EXPR_STMT@[148; 149)
|
||||||
ERROR@[148; 149)
|
ERROR@[148; 149)
|
||||||
PLUS@[148; 149) "+"
|
PLUS@[148; 149) "+"
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[149; 150) " "
|
WHITESPACE@[149; 150) " "
|
||||||
EXPR_STMT@[150; 151)
|
EXPR_STMT@[150; 151)
|
||||||
PAREN_EXPR@[150; 151)
|
PAREN_EXPR@[150; 151)
|
||||||
L_PAREN@[150; 151) "("
|
L_PAREN@[150; 151) "("
|
||||||
err: `expected expression`
|
|
||||||
err: `expected R_PAREN`
|
|
||||||
err: `expected SEMI`
|
|
||||||
EXPR_STMT@[151; 157)
|
EXPR_STMT@[151; 157)
|
||||||
FOR_EXPR@[151; 157)
|
FOR_EXPR@[151; 157)
|
||||||
FOR_KW@[151; 154) "for"
|
FOR_KW@[151; 154) "for"
|
||||||
err: `expected pattern`
|
|
||||||
ERROR@[154; 155)
|
ERROR@[154; 155)
|
||||||
L_ANGLE@[154; 155) "<"
|
L_ANGLE@[154; 155) "<"
|
||||||
err: `expected IN_KW`
|
|
||||||
err: `expected expression`
|
|
||||||
ERROR@[155; 157)
|
ERROR@[155; 157)
|
||||||
LIFETIME@[155; 157) "\'a"
|
LIFETIME@[155; 157) "\'a"
|
||||||
err: `expected a block`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[157; 158)
|
EXPR_STMT@[157; 158)
|
||||||
ERROR@[157; 158)
|
ERROR@[157; 158)
|
||||||
R_ANGLE@[157; 158) ">"
|
R_ANGLE@[157; 158) ">"
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[158; 159) " "
|
WHITESPACE@[158; 159) " "
|
||||||
EXPR_STMT@[159; 180)
|
EXPR_STMT@[159; 180)
|
||||||
BIN_EXPR@[159; 180)
|
BIN_EXPR@[159; 180)
|
||||||
|
@ -225,11 +202,9 @@ SOURCE_FILE@[0; 240)
|
||||||
NAME_REF@[159; 164)
|
NAME_REF@[159; 164)
|
||||||
IDENT@[159; 164) "Trait"
|
IDENT@[159; 164) "Trait"
|
||||||
L_ANGLE@[164; 165) "<"
|
L_ANGLE@[164; 165) "<"
|
||||||
err: `expected expression`
|
|
||||||
ERROR@[165; 167)
|
ERROR@[165; 167)
|
||||||
LIFETIME@[165; 167) "\'a"
|
LIFETIME@[165; 167) "\'a"
|
||||||
R_ANGLE@[167; 168) ">"
|
R_ANGLE@[167; 168) ">"
|
||||||
err: `expected expression`
|
|
||||||
ERROR@[168; 169)
|
ERROR@[168; 169)
|
||||||
R_PAREN@[168; 169) ")"
|
R_PAREN@[168; 169) ")"
|
||||||
WHITESPACE@[169; 170) " "
|
WHITESPACE@[169; 170) " "
|
||||||
|
@ -244,10 +219,8 @@ SOURCE_FILE@[0; 240)
|
||||||
IDENT@[173; 177) "Copy"
|
IDENT@[173; 177) "Copy"
|
||||||
R_PAREN@[177; 178) ")"
|
R_PAREN@[177; 178) ")"
|
||||||
R_ANGLE@[178; 179) ">"
|
R_ANGLE@[178; 179) ">"
|
||||||
err: `expected expression`
|
|
||||||
ERROR@[179; 180)
|
ERROR@[179; 180)
|
||||||
SEMI@[179; 180) ";"
|
SEMI@[179; 180) ";"
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[180; 185) "\n "
|
WHITESPACE@[180; 185) "\n "
|
||||||
LET_STMT@[185; 235)
|
LET_STMT@[185; 235)
|
||||||
LET_KW@[185; 188) "let"
|
LET_KW@[185; 188) "let"
|
||||||
|
@ -288,8 +261,6 @@ SOURCE_FILE@[0; 240)
|
||||||
LIFETIME@[211; 213) "\'a"
|
LIFETIME@[211; 213) "\'a"
|
||||||
R_ANGLE@[213; 214) ">"
|
R_ANGLE@[213; 214) ">"
|
||||||
R_PAREN@[214; 215) ")"
|
R_PAREN@[214; 215) ")"
|
||||||
err: `expected COMMA`
|
|
||||||
err: `expected R_ANGLE`
|
|
||||||
WHITESPACE@[215; 216) " "
|
WHITESPACE@[215; 216) " "
|
||||||
PLUS@[216; 217) "+"
|
PLUS@[216; 217) "+"
|
||||||
WHITESPACE@[217; 218) " "
|
WHITESPACE@[217; 218) " "
|
||||||
|
@ -313,8 +284,6 @@ SOURCE_FILE@[0; 240)
|
||||||
NAME_REF@[229; 234)
|
NAME_REF@[229; 234)
|
||||||
IDENT@[229; 234) "Sized"
|
IDENT@[229; 234) "Sized"
|
||||||
R_PAREN@[234; 235) ")"
|
R_PAREN@[234; 235) ")"
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[235; 237)
|
EXPR_STMT@[235; 237)
|
||||||
ERROR@[235; 236)
|
ERROR@[235; 236)
|
||||||
R_ANGLE@[235; 236) ">"
|
R_ANGLE@[235; 236) ">"
|
||||||
|
@ -322,3 +291,34 @@ SOURCE_FILE@[0; 240)
|
||||||
WHITESPACE@[237; 238) "\n"
|
WHITESPACE@[237; 238) "\n"
|
||||||
R_CURLY@[238; 239) "}"
|
R_CURLY@[238; 239) "}"
|
||||||
WHITESPACE@[239; 240) "\n"
|
WHITESPACE@[239; 240) "\n"
|
||||||
|
err: `expected COMMA`
|
||||||
|
err: `expected R_ANGLE`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected type`
|
||||||
|
err: `expected R_PAREN`
|
||||||
|
err: `expected COMMA`
|
||||||
|
err: `expected R_ANGLE`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected R_PAREN`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected pattern`
|
||||||
|
err: `expected IN_KW`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected a block`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected COMMA`
|
||||||
|
err: `expected R_ANGLE`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
|
|
@ -50,14 +50,10 @@ SOURCE_FILE@[0; 575)
|
||||||
NAME@[91; 94)
|
NAME@[91; 94)
|
||||||
IDENT@[91; 94) "abc"
|
IDENT@[91; 94) "abc"
|
||||||
COLON@[94; 95) ":"
|
COLON@[94; 95) ":"
|
||||||
err: `expected type`
|
|
||||||
err: `expected COMMA`
|
|
||||||
WHITESPACE@[95; 96) " "
|
WHITESPACE@[95; 96) " "
|
||||||
err: `expected field`
|
|
||||||
ERROR@[96; 98)
|
ERROR@[96; 98)
|
||||||
L_CURLY@[96; 97) "{"
|
L_CURLY@[96; 97) "{"
|
||||||
R_CURLY@[97; 98) "}"
|
R_CURLY@[97; 98) "}"
|
||||||
err: `expected field declaration`
|
|
||||||
ERROR@[98; 99)
|
ERROR@[98; 99)
|
||||||
COMMA@[98; 99) ","
|
COMMA@[98; 99) ","
|
||||||
WHITESPACE@[99; 100) " "
|
WHITESPACE@[99; 100) " "
|
||||||
|
@ -159,17 +155,11 @@ SOURCE_FILE@[0; 575)
|
||||||
PATH_SEGMENT@[368; 371)
|
PATH_SEGMENT@[368; 371)
|
||||||
NAME_REF@[368; 371)
|
NAME_REF@[368; 371)
|
||||||
IDENT@[368; 371) "i32"
|
IDENT@[368; 371) "i32"
|
||||||
err: `expected COMMA`
|
|
||||||
WHITESPACE@[371; 372) " "
|
WHITESPACE@[371; 372) " "
|
||||||
err: `expected a type`
|
|
||||||
err: `expected R_PAREN`
|
|
||||||
err: `expected COMMA`
|
|
||||||
err: `expected enum variant`
|
|
||||||
ERROR@[372; 372)
|
ERROR@[372; 372)
|
||||||
ERROR@[372; 374)
|
ERROR@[372; 374)
|
||||||
L_CURLY@[372; 373) "{"
|
L_CURLY@[372; 373) "{"
|
||||||
R_CURLY@[373; 374) "}"
|
R_CURLY@[373; 374) "}"
|
||||||
err: `expected enum variant`
|
|
||||||
ERROR@[374; 375)
|
ERROR@[374; 375)
|
||||||
R_PAREN@[374; 375) ")"
|
R_PAREN@[374; 375) ")"
|
||||||
WHITESPACE@[375; 376) " "
|
WHITESPACE@[375; 376) " "
|
||||||
|
@ -192,7 +182,6 @@ SOURCE_FILE@[0; 575)
|
||||||
WHITESPACE@[505; 506) " "
|
WHITESPACE@[505; 506) " "
|
||||||
EQ@[506; 507) "="
|
EQ@[506; 507) "="
|
||||||
WHITESPACE@[507; 508) " "
|
WHITESPACE@[507; 508) " "
|
||||||
err: `expected expression`
|
|
||||||
ERROR@[508; 509)
|
ERROR@[508; 509)
|
||||||
UNDERSCORE@[508; 509) "_"
|
UNDERSCORE@[508; 509) "_"
|
||||||
SEMI@[509; 510) ";"
|
SEMI@[509; 510) ";"
|
||||||
|
@ -201,3 +190,14 @@ SOURCE_FILE@[0; 575)
|
||||||
WHITESPACE@[572; 573) "\n"
|
WHITESPACE@[572; 573) "\n"
|
||||||
R_CURLY@[573; 574) "}"
|
R_CURLY@[573; 574) "}"
|
||||||
WHITESPACE@[574; 575) "\n"
|
WHITESPACE@[574; 575) "\n"
|
||||||
|
err: `expected type`
|
||||||
|
err: `expected COMMA`
|
||||||
|
err: `expected field`
|
||||||
|
err: `expected field declaration`
|
||||||
|
err: `expected COMMA`
|
||||||
|
err: `expected a type`
|
||||||
|
err: `expected R_PAREN`
|
||||||
|
err: `expected COMMA`
|
||||||
|
err: `expected enum variant`
|
||||||
|
err: `expected enum variant`
|
||||||
|
err: `expected expression`
|
||||||
|
|
|
@ -16,8 +16,6 @@ SOURCE_FILE@[0; 38)
|
||||||
NAME_REF@[8; 13)
|
NAME_REF@[8; 13)
|
||||||
IDENT@[8; 13) "Clone"
|
IDENT@[8; 13) "Clone"
|
||||||
R_ANGLE@[13; 14) ">"
|
R_ANGLE@[13; 14) ">"
|
||||||
err: `expected trait or type`
|
|
||||||
err: `expected `{``
|
|
||||||
WHITESPACE@[14; 15) "\n"
|
WHITESPACE@[14; 15) "\n"
|
||||||
IMPL_BLOCK@[15; 37)
|
IMPL_BLOCK@[15; 37)
|
||||||
IMPL_KW@[15; 19) "impl"
|
IMPL_KW@[15; 19) "impl"
|
||||||
|
@ -47,3 +45,5 @@ SOURCE_FILE@[0; 38)
|
||||||
L_CURLY@[35; 36) "{"
|
L_CURLY@[35; 36) "{"
|
||||||
R_CURLY@[36; 37) "}"
|
R_CURLY@[36; 37) "}"
|
||||||
WHITESPACE@[37; 38) "\n"
|
WHITESPACE@[37; 38) "\n"
|
||||||
|
err: `expected trait or type`
|
||||||
|
err: `expected `{``
|
||||||
|
|
|
@ -19,10 +19,10 @@ SOURCE_FILE@[0; 30)
|
||||||
LIFETIME_PARAM@[23; 25)
|
LIFETIME_PARAM@[23; 25)
|
||||||
LIFETIME@[23; 25) "\'a"
|
LIFETIME@[23; 25) "\'a"
|
||||||
R_ANGLE@[25; 26) ">"
|
R_ANGLE@[25; 26) ">"
|
||||||
err: `expected a path`
|
|
||||||
err: `expected colon`
|
|
||||||
WHITESPACE@[26; 27) "\n"
|
WHITESPACE@[26; 27) "\n"
|
||||||
BLOCK@[27; 29)
|
BLOCK@[27; 29)
|
||||||
L_CURLY@[27; 28) "{"
|
L_CURLY@[27; 28) "{"
|
||||||
R_CURLY@[28; 29) "}"
|
R_CURLY@[28; 29) "}"
|
||||||
WHITESPACE@[29; 30) "\n"
|
WHITESPACE@[29; 30) "\n"
|
||||||
|
err: `expected a path`
|
||||||
|
err: `expected colon`
|
||||||
|
|
|
@ -4,7 +4,6 @@ SOURCE_FILE@[0; 349)
|
||||||
PATH_SEGMENT@[0; 5)
|
PATH_SEGMENT@[0; 5)
|
||||||
NAME_REF@[0; 5)
|
NAME_REF@[0; 5)
|
||||||
IDENT@[0; 5) "macro"
|
IDENT@[0; 5) "macro"
|
||||||
err: `expected EXCL`
|
|
||||||
WHITESPACE@[5; 6) " "
|
WHITESPACE@[5; 6) " "
|
||||||
NAME@[6; 21)
|
NAME@[6; 21)
|
||||||
IDENT@[6; 21) "parse_use_trees"
|
IDENT@[6; 21) "parse_use_trees"
|
||||||
|
@ -28,9 +27,7 @@ SOURCE_FILE@[0; 349)
|
||||||
R_PAREN@[38; 39) ")"
|
R_PAREN@[38; 39) ")"
|
||||||
STAR@[39; 40) "*"
|
STAR@[39; 40) "*"
|
||||||
R_PAREN@[40; 41) ")"
|
R_PAREN@[40; 41) ")"
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[41; 42) " "
|
WHITESPACE@[41; 42) " "
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[42; 93)
|
ERROR@[42; 93)
|
||||||
L_CURLY@[42; 43) "{"
|
L_CURLY@[42; 43) "{"
|
||||||
WHITESPACE@[43; 48) "\n "
|
WHITESPACE@[43; 48) "\n "
|
||||||
|
@ -85,7 +82,6 @@ SOURCE_FILE@[0; 349)
|
||||||
PATH_SEGMENT@[134; 139)
|
PATH_SEGMENT@[134; 139)
|
||||||
NAME_REF@[134; 139)
|
NAME_REF@[134; 139)
|
||||||
IDENT@[134; 139) "macro"
|
IDENT@[134; 139) "macro"
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[139; 140) " "
|
WHITESPACE@[139; 140) " "
|
||||||
EXPR_STMT@[140; 154)
|
EXPR_STMT@[140; 154)
|
||||||
CALL_EXPR@[140; 154)
|
CALL_EXPR@[140; 154)
|
||||||
|
@ -98,175 +94,112 @@ SOURCE_FILE@[0; 349)
|
||||||
L_PAREN@[150; 151) "("
|
L_PAREN@[150; 151) "("
|
||||||
ARRAY_EXPR@[151; 154)
|
ARRAY_EXPR@[151; 154)
|
||||||
L_BRACK@[151; 152) "["
|
L_BRACK@[151; 152) "["
|
||||||
err: `expected expression`
|
|
||||||
ERROR@[152; 153)
|
ERROR@[152; 153)
|
||||||
DOLLAR@[152; 153) "$"
|
DOLLAR@[152; 153) "$"
|
||||||
err: `expected COMMA`
|
|
||||||
PAREN_EXPR@[153; 154)
|
PAREN_EXPR@[153; 154)
|
||||||
L_PAREN@[153; 154) "("
|
L_PAREN@[153; 154) "("
|
||||||
err: `expected expression`
|
|
||||||
err: `expected R_PAREN`
|
|
||||||
err: `expected COMMA`
|
|
||||||
err: `expected expression`
|
|
||||||
err: `expected R_BRACK`
|
|
||||||
err: `expected COMMA`
|
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[154; 155)
|
EXPR_STMT@[154; 155)
|
||||||
ERROR@[154; 155)
|
ERROR@[154; 155)
|
||||||
DOLLAR@[154; 155) "$"
|
DOLLAR@[154; 155) "$"
|
||||||
err: `expected SEMI`
|
|
||||||
EXPR_STMT@[155; 160)
|
EXPR_STMT@[155; 160)
|
||||||
PATH_EXPR@[155; 160)
|
PATH_EXPR@[155; 160)
|
||||||
PATH@[155; 160)
|
PATH@[155; 160)
|
||||||
PATH_SEGMENT@[155; 160)
|
PATH_SEGMENT@[155; 160)
|
||||||
NAME_REF@[155; 160)
|
NAME_REF@[155; 160)
|
||||||
IDENT@[155; 160) "input"
|
IDENT@[155; 160) "input"
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[160; 161)
|
EXPR_STMT@[160; 161)
|
||||||
ERROR@[160; 161)
|
ERROR@[160; 161)
|
||||||
COLON@[160; 161) ":"
|
COLON@[160; 161) ":"
|
||||||
err: `expected SEMI`
|
|
||||||
EXPR_STMT@[161; 165)
|
EXPR_STMT@[161; 165)
|
||||||
PATH_EXPR@[161; 165)
|
PATH_EXPR@[161; 165)
|
||||||
PATH@[161; 165)
|
PATH@[161; 165)
|
||||||
PATH_SEGMENT@[161; 165)
|
PATH_SEGMENT@[161; 165)
|
||||||
NAME_REF@[161; 165)
|
NAME_REF@[161; 165)
|
||||||
IDENT@[161; 165) "expr"
|
IDENT@[161; 165) "expr"
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[165; 166)
|
EXPR_STMT@[165; 166)
|
||||||
ERROR@[165; 166)
|
ERROR@[165; 166)
|
||||||
R_PAREN@[165; 166) ")"
|
R_PAREN@[165; 166) ")"
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[166; 167)
|
EXPR_STMT@[166; 167)
|
||||||
ERROR@[166; 167)
|
ERROR@[166; 167)
|
||||||
COMMA@[166; 167) ","
|
COMMA@[166; 167) ","
|
||||||
err: `expected SEMI`
|
|
||||||
EXPR_STMT@[167; 170)
|
EXPR_STMT@[167; 170)
|
||||||
PREFIX_EXPR@[167; 170)
|
PREFIX_EXPR@[167; 170)
|
||||||
STAR@[167; 168) "*"
|
STAR@[167; 168) "*"
|
||||||
WHITESPACE@[168; 169) " "
|
WHITESPACE@[168; 169) " "
|
||||||
err: `expected expression`
|
|
||||||
ERROR@[169; 170)
|
ERROR@[169; 170)
|
||||||
DOLLAR@[169; 170) "$"
|
DOLLAR@[169; 170) "$"
|
||||||
err: `expected SEMI`
|
|
||||||
EXPR_STMT@[170; 171)
|
EXPR_STMT@[170; 171)
|
||||||
PAREN_EXPR@[170; 171)
|
PAREN_EXPR@[170; 171)
|
||||||
L_PAREN@[170; 171) "("
|
L_PAREN@[170; 171) "("
|
||||||
err: `expected expression`
|
|
||||||
err: `expected R_PAREN`
|
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[171; 172)
|
EXPR_STMT@[171; 172)
|
||||||
ERROR@[171; 172)
|
ERROR@[171; 172)
|
||||||
COMMA@[171; 172) ","
|
COMMA@[171; 172) ","
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[172; 173)
|
EXPR_STMT@[172; 173)
|
||||||
ERROR@[172; 173)
|
ERROR@[172; 173)
|
||||||
R_PAREN@[172; 173) ")"
|
R_PAREN@[172; 173) ")"
|
||||||
err: `expected SEMI`
|
|
||||||
EXPR_STMT@[173; 175)
|
EXPR_STMT@[173; 175)
|
||||||
PREFIX_EXPR@[173; 175)
|
PREFIX_EXPR@[173; 175)
|
||||||
STAR@[173; 174) "*"
|
STAR@[173; 174) "*"
|
||||||
err: `expected expression`
|
|
||||||
ERROR@[174; 175)
|
ERROR@[174; 175)
|
||||||
R_BRACK@[174; 175) "]"
|
R_BRACK@[174; 175) "]"
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[175; 176)
|
EXPR_STMT@[175; 176)
|
||||||
ERROR@[175; 176)
|
ERROR@[175; 176)
|
||||||
COMMA@[175; 176) ","
|
COMMA@[175; 176) ","
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[176; 177) " "
|
WHITESPACE@[176; 177) " "
|
||||||
EXPR_STMT@[177; 180)
|
EXPR_STMT@[177; 180)
|
||||||
ARRAY_EXPR@[177; 180)
|
ARRAY_EXPR@[177; 180)
|
||||||
L_BRACK@[177; 178) "["
|
L_BRACK@[177; 178) "["
|
||||||
err: `expected expression`
|
|
||||||
ERROR@[178; 179)
|
ERROR@[178; 179)
|
||||||
DOLLAR@[178; 179) "$"
|
DOLLAR@[178; 179) "$"
|
||||||
err: `expected COMMA`
|
|
||||||
PAREN_EXPR@[179; 180)
|
PAREN_EXPR@[179; 180)
|
||||||
L_PAREN@[179; 180) "("
|
L_PAREN@[179; 180) "("
|
||||||
err: `expected expression`
|
|
||||||
err: `expected R_PAREN`
|
|
||||||
err: `expected COMMA`
|
|
||||||
err: `expected expression`
|
|
||||||
err: `expected R_BRACK`
|
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[180; 181)
|
EXPR_STMT@[180; 181)
|
||||||
ERROR@[180; 181)
|
ERROR@[180; 181)
|
||||||
DOLLAR@[180; 181) "$"
|
DOLLAR@[180; 181) "$"
|
||||||
err: `expected SEMI`
|
|
||||||
EXPR_STMT@[181; 187)
|
EXPR_STMT@[181; 187)
|
||||||
PATH_EXPR@[181; 187)
|
PATH_EXPR@[181; 187)
|
||||||
PATH@[181; 187)
|
PATH@[181; 187)
|
||||||
PATH_SEGMENT@[181; 187)
|
PATH_SEGMENT@[181; 187)
|
||||||
NAME_REF@[181; 187)
|
NAME_REF@[181; 187)
|
||||||
IDENT@[181; 187) "output"
|
IDENT@[181; 187) "output"
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[187; 188)
|
EXPR_STMT@[187; 188)
|
||||||
ERROR@[187; 188)
|
ERROR@[187; 188)
|
||||||
COLON@[187; 188) ":"
|
COLON@[187; 188) ":"
|
||||||
err: `expected SEMI`
|
|
||||||
EXPR_STMT@[188; 192)
|
EXPR_STMT@[188; 192)
|
||||||
PATH_EXPR@[188; 192)
|
PATH_EXPR@[188; 192)
|
||||||
PATH@[188; 192)
|
PATH@[188; 192)
|
||||||
PATH_SEGMENT@[188; 192)
|
PATH_SEGMENT@[188; 192)
|
||||||
NAME_REF@[188; 192)
|
NAME_REF@[188; 192)
|
||||||
IDENT@[188; 192) "expr"
|
IDENT@[188; 192) "expr"
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[192; 193)
|
EXPR_STMT@[192; 193)
|
||||||
ERROR@[192; 193)
|
ERROR@[192; 193)
|
||||||
R_PAREN@[192; 193) ")"
|
R_PAREN@[192; 193) ")"
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[193; 194)
|
EXPR_STMT@[193; 194)
|
||||||
ERROR@[193; 194)
|
ERROR@[193; 194)
|
||||||
COMMA@[193; 194) ","
|
COMMA@[193; 194) ","
|
||||||
err: `expected SEMI`
|
|
||||||
EXPR_STMT@[194; 197)
|
EXPR_STMT@[194; 197)
|
||||||
PREFIX_EXPR@[194; 197)
|
PREFIX_EXPR@[194; 197)
|
||||||
STAR@[194; 195) "*"
|
STAR@[194; 195) "*"
|
||||||
WHITESPACE@[195; 196) " "
|
WHITESPACE@[195; 196) " "
|
||||||
err: `expected expression`
|
|
||||||
ERROR@[196; 197)
|
ERROR@[196; 197)
|
||||||
DOLLAR@[196; 197) "$"
|
DOLLAR@[196; 197) "$"
|
||||||
err: `expected SEMI`
|
|
||||||
EXPR_STMT@[197; 198)
|
EXPR_STMT@[197; 198)
|
||||||
PAREN_EXPR@[197; 198)
|
PAREN_EXPR@[197; 198)
|
||||||
L_PAREN@[197; 198) "("
|
L_PAREN@[197; 198) "("
|
||||||
err: `expected expression`
|
|
||||||
err: `expected R_PAREN`
|
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[198; 199)
|
EXPR_STMT@[198; 199)
|
||||||
ERROR@[198; 199)
|
ERROR@[198; 199)
|
||||||
COMMA@[198; 199) ","
|
COMMA@[198; 199) ","
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[199; 200)
|
EXPR_STMT@[199; 200)
|
||||||
ERROR@[199; 200)
|
ERROR@[199; 200)
|
||||||
R_PAREN@[199; 200) ")"
|
R_PAREN@[199; 200) ")"
|
||||||
err: `expected SEMI`
|
|
||||||
EXPR_STMT@[200; 202)
|
EXPR_STMT@[200; 202)
|
||||||
PREFIX_EXPR@[200; 202)
|
PREFIX_EXPR@[200; 202)
|
||||||
STAR@[200; 201) "*"
|
STAR@[200; 201) "*"
|
||||||
err: `expected expression`
|
|
||||||
ERROR@[201; 202)
|
ERROR@[201; 202)
|
||||||
R_BRACK@[201; 202) "]"
|
R_BRACK@[201; 202) "]"
|
||||||
err: `expected SEMI`
|
|
||||||
err: `expected expression`
|
|
||||||
EXPR_STMT@[202; 203)
|
EXPR_STMT@[202; 203)
|
||||||
ERROR@[202; 203)
|
ERROR@[202; 203)
|
||||||
R_PAREN@[202; 203) ")"
|
R_PAREN@[202; 203) ")"
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[203; 204) " "
|
WHITESPACE@[203; 204) " "
|
||||||
BLOCK_EXPR@[204; 346)
|
BLOCK_EXPR@[204; 346)
|
||||||
BLOCK@[204; 346)
|
BLOCK@[204; 346)
|
||||||
|
@ -323,3 +256,70 @@ SOURCE_FILE@[0; 349)
|
||||||
WHITESPACE@[346; 347) "\n"
|
WHITESPACE@[346; 347) "\n"
|
||||||
R_CURLY@[347; 348) "}"
|
R_CURLY@[347; 348) "}"
|
||||||
WHITESPACE@[348; 349) "\n"
|
WHITESPACE@[348; 349) "\n"
|
||||||
|
err: `expected EXCL`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected an item`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected COMMA`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected R_PAREN`
|
||||||
|
err: `expected COMMA`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected R_BRACK`
|
||||||
|
err: `expected COMMA`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected R_PAREN`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected COMMA`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected R_PAREN`
|
||||||
|
err: `expected COMMA`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected R_BRACK`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected R_PAREN`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected expression`
|
||||||
|
err: `expected SEMI`
|
||||||
|
|
|
@ -29,7 +29,7 @@ SOURCE_FILE@[0; 24)
|
||||||
NAME_REF@[19; 20)
|
NAME_REF@[19; 20)
|
||||||
IDENT@[19; 20) "a"
|
IDENT@[19; 20) "a"
|
||||||
DOT@[20; 21) "."
|
DOT@[20; 21) "."
|
||||||
err: `expected field name or number`
|
|
||||||
WHITESPACE@[21; 22) "\n"
|
WHITESPACE@[21; 22) "\n"
|
||||||
R_CURLY@[22; 23) "}"
|
R_CURLY@[22; 23) "}"
|
||||||
WHITESPACE@[23; 24) "\n"
|
WHITESPACE@[23; 24) "\n"
|
||||||
|
err: `expected field name or number`
|
||||||
|
|
|
@ -21,7 +21,6 @@ SOURCE_FILE@[0; 112)
|
||||||
WHITESPACE@[23; 24) " "
|
WHITESPACE@[23; 24) " "
|
||||||
LITERAL@[24; 27)
|
LITERAL@[24; 27)
|
||||||
CHAR@[24; 27) "\'c\'"
|
CHAR@[24; 27) "\'c\'"
|
||||||
err: `expected SEMI`
|
|
||||||
EXPR_STMT@[27; 31)
|
EXPR_STMT@[27; 31)
|
||||||
PATH_EXPR@[27; 30)
|
PATH_EXPR@[27; 30)
|
||||||
PATH@[27; 30)
|
PATH@[27; 30)
|
||||||
|
@ -68,3 +67,4 @@ SOURCE_FILE@[0; 112)
|
||||||
WHITESPACE@[109; 110) "\n"
|
WHITESPACE@[109; 110) "\n"
|
||||||
R_CURLY@[110; 111) "}"
|
R_CURLY@[110; 111) "}"
|
||||||
WHITESPACE@[111; 112) "\n"
|
WHITESPACE@[111; 112) "\n"
|
||||||
|
err: `expected SEMI`
|
||||||
|
|
|
@ -24,7 +24,6 @@ SOURCE_FILE@[0; 350)
|
||||||
BLOCK@[29; 128)
|
BLOCK@[29; 128)
|
||||||
L_CURLY@[29; 30) "{"
|
L_CURLY@[29; 30) "{"
|
||||||
WHITESPACE@[30; 39) "\n "
|
WHITESPACE@[30; 39) "\n "
|
||||||
err: `A block in this position cannot accept inner attributes`
|
|
||||||
ATTR@[39; 83)
|
ATTR@[39; 83)
|
||||||
POUND@[39; 40) "#"
|
POUND@[39; 40) "#"
|
||||||
EXCL@[40; 41) "!"
|
EXCL@[40; 41) "!"
|
||||||
|
@ -53,7 +52,6 @@ SOURCE_FILE@[0; 350)
|
||||||
BLOCK@[142; 257)
|
BLOCK@[142; 257)
|
||||||
L_CURLY@[142; 143) "{"
|
L_CURLY@[142; 143) "{"
|
||||||
WHITESPACE@[143; 152) "\n "
|
WHITESPACE@[143; 152) "\n "
|
||||||
err: `A block in this position cannot accept inner attributes`
|
|
||||||
ATTR@[152; 171)
|
ATTR@[152; 171)
|
||||||
POUND@[152; 153) "#"
|
POUND@[152; 153) "#"
|
||||||
EXCL@[153; 154) "!"
|
EXCL@[153; 154) "!"
|
||||||
|
@ -66,7 +64,6 @@ SOURCE_FILE@[0; 350)
|
||||||
R_PAREN@[169; 170) ")"
|
R_PAREN@[169; 170) ")"
|
||||||
R_BRACK@[170; 171) "]"
|
R_BRACK@[170; 171) "]"
|
||||||
WHITESPACE@[171; 180) "\n "
|
WHITESPACE@[171; 180) "\n "
|
||||||
err: `A block in this position cannot accept inner attributes`
|
|
||||||
ATTR@[180; 212)
|
ATTR@[180; 212)
|
||||||
POUND@[180; 181) "#"
|
POUND@[180; 181) "#"
|
||||||
EXCL@[181; 182) "!"
|
EXCL@[181; 182) "!"
|
||||||
|
@ -93,7 +90,6 @@ SOURCE_FILE@[0; 350)
|
||||||
BLOCK@[273; 347)
|
BLOCK@[273; 347)
|
||||||
L_CURLY@[273; 274) "{"
|
L_CURLY@[273; 274) "{"
|
||||||
WHITESPACE@[274; 283) "\n "
|
WHITESPACE@[274; 283) "\n "
|
||||||
err: `A block in this position cannot accept inner attributes`
|
|
||||||
ATTR@[283; 302)
|
ATTR@[283; 302)
|
||||||
POUND@[283; 284) "#"
|
POUND@[283; 284) "#"
|
||||||
EXCL@[284; 285) "!"
|
EXCL@[284; 285) "!"
|
||||||
|
@ -112,3 +108,7 @@ SOURCE_FILE@[0; 350)
|
||||||
WHITESPACE@[347; 348) "\n"
|
WHITESPACE@[347; 348) "\n"
|
||||||
R_CURLY@[348; 349) "}"
|
R_CURLY@[348; 349) "}"
|
||||||
WHITESPACE@[349; 350) "\n"
|
WHITESPACE@[349; 350) "\n"
|
||||||
|
err: `A block in this position cannot accept inner attributes`
|
||||||
|
err: `A block in this position cannot accept inner attributes`
|
||||||
|
err: `A block in this position cannot accept inner attributes`
|
||||||
|
err: `A block in this position cannot accept inner attributes`
|
||||||
|
|
|
@ -36,11 +36,8 @@ SOURCE_FILE@[0; 293)
|
||||||
MATCH_ARM@[51; 78)
|
MATCH_ARM@[51; 78)
|
||||||
ATTR@[51; 52)
|
ATTR@[51; 52)
|
||||||
POUND@[51; 52) "#"
|
POUND@[51; 52) "#"
|
||||||
err: `expected `[``
|
|
||||||
err: `expected pattern`
|
|
||||||
ERROR@[52; 53)
|
ERROR@[52; 53)
|
||||||
EXCL@[52; 53) "!"
|
EXCL@[52; 53) "!"
|
||||||
err: `expected FAT_ARROW`
|
|
||||||
ARRAY_EXPR@[53; 78)
|
ARRAY_EXPR@[53; 78)
|
||||||
L_BRACK@[53; 54) "["
|
L_BRACK@[53; 54) "["
|
||||||
CALL_EXPR@[54; 77)
|
CALL_EXPR@[54; 77)
|
||||||
|
@ -55,7 +52,6 @@ SOURCE_FILE@[0; 293)
|
||||||
STRING@[58; 76) "\"Not allowed here\""
|
STRING@[58; 76) "\"Not allowed here\""
|
||||||
R_PAREN@[76; 77) ")"
|
R_PAREN@[76; 77) ")"
|
||||||
R_BRACK@[77; 78) "]"
|
R_BRACK@[77; 78) "]"
|
||||||
err: `expected COMMA`
|
|
||||||
WHITESPACE@[78; 87) "\n "
|
WHITESPACE@[78; 87) "\n "
|
||||||
MATCH_ARM@[87; 94)
|
MATCH_ARM@[87; 94)
|
||||||
PLACEHOLDER_PAT@[87; 88)
|
PLACEHOLDER_PAT@[87; 88)
|
||||||
|
@ -106,11 +102,8 @@ SOURCE_FILE@[0; 293)
|
||||||
MATCH_ARM@[160; 179)
|
MATCH_ARM@[160; 179)
|
||||||
ATTR@[160; 161)
|
ATTR@[160; 161)
|
||||||
POUND@[160; 161) "#"
|
POUND@[160; 161) "#"
|
||||||
err: `expected `[``
|
|
||||||
err: `expected pattern`
|
|
||||||
ERROR@[161; 162)
|
ERROR@[161; 162)
|
||||||
EXCL@[161; 162) "!"
|
EXCL@[161; 162) "!"
|
||||||
err: `expected FAT_ARROW`
|
|
||||||
ARRAY_EXPR@[162; 179)
|
ARRAY_EXPR@[162; 179)
|
||||||
L_BRACK@[162; 163) "["
|
L_BRACK@[162; 163) "["
|
||||||
CALL_EXPR@[163; 178)
|
CALL_EXPR@[163; 178)
|
||||||
|
@ -152,11 +145,8 @@ SOURCE_FILE@[0; 293)
|
||||||
WHITESPACE@[222; 231) "\n "
|
WHITESPACE@[222; 231) "\n "
|
||||||
ATTR@[231; 232)
|
ATTR@[231; 232)
|
||||||
POUND@[231; 232) "#"
|
POUND@[231; 232) "#"
|
||||||
err: `expected `[``
|
|
||||||
err: `expected pattern`
|
|
||||||
ERROR@[232; 233)
|
ERROR@[232; 233)
|
||||||
EXCL@[232; 233) "!"
|
EXCL@[232; 233) "!"
|
||||||
err: `expected FAT_ARROW`
|
|
||||||
ARRAY_EXPR@[233; 250)
|
ARRAY_EXPR@[233; 250)
|
||||||
L_BRACK@[233; 234) "["
|
L_BRACK@[233; 234) "["
|
||||||
CALL_EXPR@[234; 249)
|
CALL_EXPR@[234; 249)
|
||||||
|
@ -171,7 +161,6 @@ SOURCE_FILE@[0; 293)
|
||||||
STRING@[238; 248) "\"Nor here\""
|
STRING@[238; 248) "\"Nor here\""
|
||||||
R_PAREN@[248; 249) ")"
|
R_PAREN@[248; 249) ")"
|
||||||
R_BRACK@[249; 250) "]"
|
R_BRACK@[249; 250) "]"
|
||||||
err: `expected COMMA`
|
|
||||||
WHITESPACE@[250; 259) "\n "
|
WHITESPACE@[250; 259) "\n "
|
||||||
MATCH_ARM@[259; 266)
|
MATCH_ARM@[259; 266)
|
||||||
PLACEHOLDER_PAT@[259; 260)
|
PLACEHOLDER_PAT@[259; 260)
|
||||||
|
@ -199,3 +188,14 @@ SOURCE_FILE@[0; 293)
|
||||||
WHITESPACE@[290; 291) "\n"
|
WHITESPACE@[290; 291) "\n"
|
||||||
R_CURLY@[291; 292) "}"
|
R_CURLY@[291; 292) "}"
|
||||||
WHITESPACE@[292; 293) "\n"
|
WHITESPACE@[292; 293) "\n"
|
||||||
|
err: `expected `[``
|
||||||
|
err: `expected pattern`
|
||||||
|
err: `expected FAT_ARROW`
|
||||||
|
err: `expected COMMA`
|
||||||
|
err: `expected `[``
|
||||||
|
err: `expected pattern`
|
||||||
|
err: `expected FAT_ARROW`
|
||||||
|
err: `expected `[``
|
||||||
|
err: `expected pattern`
|
||||||
|
err: `expected FAT_ARROW`
|
||||||
|
err: `expected COMMA`
|
||||||
|
|
|
@ -54,11 +54,11 @@ SOURCE_FILE@[0; 89)
|
||||||
IDENT@[74; 78) "test"
|
IDENT@[74; 78) "test"
|
||||||
R_PAREN@[78; 79) ")"
|
R_PAREN@[78; 79) ")"
|
||||||
R_BRACK@[79; 80) "]"
|
R_BRACK@[79; 80) "]"
|
||||||
err: `expected pattern`
|
|
||||||
err: `expected FAT_ARROW`
|
|
||||||
err: `expected expression`
|
|
||||||
WHITESPACE@[80; 85) "\n "
|
WHITESPACE@[80; 85) "\n "
|
||||||
R_CURLY@[85; 86) "}"
|
R_CURLY@[85; 86) "}"
|
||||||
WHITESPACE@[86; 87) "\n"
|
WHITESPACE@[86; 87) "\n"
|
||||||
R_CURLY@[87; 88) "}"
|
R_CURLY@[87; 88) "}"
|
||||||
WHITESPACE@[88; 89) "\n"
|
WHITESPACE@[88; 89) "\n"
|
||||||
|
err: `expected pattern`
|
||||||
|
err: `expected FAT_ARROW`
|
||||||
|
err: `expected expression`
|
||||||
|
|
|
@ -12,16 +12,16 @@ SOURCE_FILE@[0; 18)
|
||||||
TUPLE_TYPE@[10; 12)
|
TUPLE_TYPE@[10; 12)
|
||||||
L_PAREN@[10; 11) "("
|
L_PAREN@[10; 11) "("
|
||||||
R_PAREN@[11; 12) ")"
|
R_PAREN@[11; 12) ")"
|
||||||
err: `expected `;` or `]``
|
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[12; 13) " "
|
WHITESPACE@[12; 13) " "
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[13; 15)
|
ERROR@[13; 15)
|
||||||
INT_NUMBER@[13; 15) "92"
|
INT_NUMBER@[13; 15) "92"
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[15; 16)
|
ERROR@[15; 16)
|
||||||
R_BRACK@[15; 16) "]"
|
R_BRACK@[15; 16) "]"
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[16; 17)
|
ERROR@[16; 17)
|
||||||
SEMI@[16; 17) ";"
|
SEMI@[16; 17) ";"
|
||||||
WHITESPACE@[17; 18) "\n"
|
WHITESPACE@[17; 18) "\n"
|
||||||
|
err: `expected `;` or `]``
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected an item`
|
||||||
|
err: `expected an item`
|
||||||
|
err: `expected an item`
|
||||||
|
|
|
@ -16,13 +16,13 @@ SOURCE_FILE@[0; 30)
|
||||||
LABEL@[16; 22)
|
LABEL@[16; 22)
|
||||||
LIFETIME@[16; 21) "\'loop"
|
LIFETIME@[16; 21) "\'loop"
|
||||||
COLON@[21; 22) ":"
|
COLON@[21; 22) ":"
|
||||||
err: `expected a loop`
|
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[22; 23) " "
|
WHITESPACE@[22; 23) " "
|
||||||
IMPL_BLOCK@[23; 27)
|
IMPL_BLOCK@[23; 27)
|
||||||
IMPL_KW@[23; 27) "impl"
|
IMPL_KW@[23; 27) "impl"
|
||||||
err: `expected type`
|
|
||||||
err: `expected `{``
|
|
||||||
WHITESPACE@[27; 28) "\n"
|
WHITESPACE@[27; 28) "\n"
|
||||||
R_CURLY@[28; 29) "}"
|
R_CURLY@[28; 29) "}"
|
||||||
WHITESPACE@[29; 30) "\n"
|
WHITESPACE@[29; 30) "\n"
|
||||||
|
err: `expected a loop`
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected type`
|
||||||
|
err: `expected `{``
|
||||||
|
|
|
@ -9,9 +9,9 @@ SOURCE_FILE@[0; 14)
|
||||||
WHITESPACE@[8; 9) " "
|
WHITESPACE@[8; 9) " "
|
||||||
POINTER_TYPE@[9; 12)
|
POINTER_TYPE@[9; 12)
|
||||||
STAR@[9; 10) "*"
|
STAR@[9; 10) "*"
|
||||||
err: `expected mut or const in raw pointer type (use `*mut T` or `*const T` as appropriate)`
|
|
||||||
TUPLE_TYPE@[10; 12)
|
TUPLE_TYPE@[10; 12)
|
||||||
L_PAREN@[10; 11) "("
|
L_PAREN@[10; 11) "("
|
||||||
R_PAREN@[11; 12) ")"
|
R_PAREN@[11; 12) ")"
|
||||||
SEMI@[12; 13) ";"
|
SEMI@[12; 13) ";"
|
||||||
WHITESPACE@[13; 14) "\n"
|
WHITESPACE@[13; 14) "\n"
|
||||||
|
err: `expected mut or const in raw pointer type (use `*mut T` or `*const T` as appropriate)`
|
||||||
|
|
|
@ -35,8 +35,6 @@ SOURCE_FILE@[0; 87)
|
||||||
WHITESPACE@[33; 34) "\n"
|
WHITESPACE@[33; 34) "\n"
|
||||||
IMPL_BLOCK@[34; 38)
|
IMPL_BLOCK@[34; 38)
|
||||||
IMPL_KW@[34; 38) "impl"
|
IMPL_KW@[34; 38) "impl"
|
||||||
err: `expected trait or type`
|
|
||||||
err: `expected `{``
|
|
||||||
WHITESPACE@[38; 39) " "
|
WHITESPACE@[38; 39) " "
|
||||||
IMPL_BLOCK@[39; 54)
|
IMPL_BLOCK@[39; 54)
|
||||||
IMPL_KW@[39; 43) "impl"
|
IMPL_KW@[39; 43) "impl"
|
||||||
|
@ -61,8 +59,6 @@ SOURCE_FILE@[0; 87)
|
||||||
IDENT@[60; 66) "Trait2"
|
IDENT@[60; 66) "Trait2"
|
||||||
WHITESPACE@[66; 67) " "
|
WHITESPACE@[66; 67) " "
|
||||||
FOR_KW@[67; 70) "for"
|
FOR_KW@[67; 70) "for"
|
||||||
err: `expected trait or type`
|
|
||||||
err: `expected `{``
|
|
||||||
WHITESPACE@[70; 71) " "
|
WHITESPACE@[70; 71) " "
|
||||||
IMPL_BLOCK@[71; 86)
|
IMPL_BLOCK@[71; 86)
|
||||||
IMPL_KW@[71; 75) "impl"
|
IMPL_KW@[71; 75) "impl"
|
||||||
|
@ -77,3 +73,7 @@ SOURCE_FILE@[0; 87)
|
||||||
L_CURLY@[84; 85) "{"
|
L_CURLY@[84; 85) "{"
|
||||||
R_CURLY@[85; 86) "}"
|
R_CURLY@[85; 86) "}"
|
||||||
WHITESPACE@[86; 87) "\n"
|
WHITESPACE@[86; 87) "\n"
|
||||||
|
err: `expected trait or type`
|
||||||
|
err: `expected `{``
|
||||||
|
err: `expected trait or type`
|
||||||
|
err: `expected `{``
|
||||||
|
|
|
@ -8,16 +8,16 @@ SOURCE_FILE@[0; 20)
|
||||||
EQ@[7; 8) "="
|
EQ@[7; 8) "="
|
||||||
WHITESPACE@[8; 9) " "
|
WHITESPACE@[8; 9) " "
|
||||||
UNSAFE_KW@[9; 15) "unsafe"
|
UNSAFE_KW@[9; 15) "unsafe"
|
||||||
err: `expected `fn``
|
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[15; 16) " "
|
WHITESPACE@[15; 16) " "
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[16; 17)
|
ERROR@[16; 17)
|
||||||
L_PAREN@[16; 17) "("
|
L_PAREN@[16; 17) "("
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[17; 18)
|
ERROR@[17; 18)
|
||||||
R_PAREN@[17; 18) ")"
|
R_PAREN@[17; 18) ")"
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[18; 19)
|
ERROR@[18; 19)
|
||||||
SEMI@[18; 19) ";"
|
SEMI@[18; 19) ";"
|
||||||
WHITESPACE@[19; 20) "\n"
|
WHITESPACE@[19; 20) "\n"
|
||||||
|
err: `expected `fn``
|
||||||
|
err: `expected SEMI`
|
||||||
|
err: `expected an item`
|
||||||
|
err: `expected an item`
|
||||||
|
err: `expected an item`
|
||||||
|
|
|
@ -11,11 +11,9 @@ SOURCE_FILE@[0; 33)
|
||||||
L_CURLY@[8; 9) "{"
|
L_CURLY@[8; 9) "{"
|
||||||
R_CURLY@[9; 10) "}"
|
R_CURLY@[9; 10) "}"
|
||||||
WHITESPACE@[10; 11) " "
|
WHITESPACE@[10; 11) " "
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[11; 17)
|
ERROR@[11; 17)
|
||||||
UNSAFE_KW@[11; 17) "unsafe"
|
UNSAFE_KW@[11; 17) "unsafe"
|
||||||
WHITESPACE@[17; 18) " "
|
WHITESPACE@[17; 18) " "
|
||||||
err: `expected an item`
|
|
||||||
ERROR@[18; 21)
|
ERROR@[18; 21)
|
||||||
L_CURLY@[18; 19) "{"
|
L_CURLY@[18; 19) "{"
|
||||||
WHITESPACE@[19; 20) " "
|
WHITESPACE@[19; 20) " "
|
||||||
|
@ -33,3 +31,5 @@ SOURCE_FILE@[0; 33)
|
||||||
L_CURLY@[30; 31) "{"
|
L_CURLY@[30; 31) "{"
|
||||||
R_CURLY@[31; 32) "}"
|
R_CURLY@[31; 32) "}"
|
||||||
WHITESPACE@[32; 33) "\n"
|
WHITESPACE@[32; 33) "\n"
|
||||||
|
err: `expected an item`
|
||||||
|
err: `expected an item`
|
||||||
|
|
|
@ -25,7 +25,7 @@ SOURCE_FILE@[0; 30)
|
||||||
BLOCK@[25; 27)
|
BLOCK@[25; 27)
|
||||||
L_CURLY@[25; 26) "{"
|
L_CURLY@[25; 26) "{"
|
||||||
R_CURLY@[26; 27) "}"
|
R_CURLY@[26; 27) "}"
|
||||||
err: `expected SEMI`
|
|
||||||
WHITESPACE@[27; 28) " "
|
WHITESPACE@[27; 28) " "
|
||||||
R_CURLY@[28; 29) "}"
|
R_CURLY@[28; 29) "}"
|
||||||
WHITESPACE@[29; 30) "\n"
|
WHITESPACE@[29; 30) "\n"
|
||||||
|
err: `expected SEMI`
|
||||||
|
|
|
@ -14,7 +14,6 @@ SOURCE_FILE@[0; 21)
|
||||||
ERROR@[11; 14)
|
ERROR@[11; 14)
|
||||||
VISIBILITY@[11; 14)
|
VISIBILITY@[11; 14)
|
||||||
PUB_KW@[11; 14) "pub"
|
PUB_KW@[11; 14) "pub"
|
||||||
err: `expected an item`
|
|
||||||
WHITESPACE@[14; 15) " "
|
WHITESPACE@[14; 15) " "
|
||||||
EXPR_STMT@[15; 18)
|
EXPR_STMT@[15; 18)
|
||||||
LITERAL@[15; 17)
|
LITERAL@[15; 17)
|
||||||
|
@ -23,3 +22,4 @@ SOURCE_FILE@[0; 21)
|
||||||
WHITESPACE@[18; 19) " "
|
WHITESPACE@[18; 19) " "
|
||||||
R_CURLY@[19; 20) "}"
|
R_CURLY@[19; 20) "}"
|
||||||
WHITESPACE@[20; 21) "\n"
|
WHITESPACE@[20; 21) "\n"
|
||||||
|
err: `expected an item`
|
||||||
|
|
|
@ -27,7 +27,6 @@ SOURCE_FILE@[0; 48)
|
||||||
WHITESPACE@[22; 23) " "
|
WHITESPACE@[22; 23) " "
|
||||||
LITERAL@[23; 24)
|
LITERAL@[23; 24)
|
||||||
INT_NUMBER@[23; 24) "2"
|
INT_NUMBER@[23; 24) "2"
|
||||||
err: `attributes are not allowed on BIN_EXPR`
|
|
||||||
SEMI@[24; 25) ";"
|
SEMI@[24; 25) ";"
|
||||||
WHITESPACE@[25; 29) "\n "
|
WHITESPACE@[25; 29) "\n "
|
||||||
EXPR_STMT@[29; 45)
|
EXPR_STMT@[29; 45)
|
||||||
|
@ -48,8 +47,9 @@ SOURCE_FILE@[0; 48)
|
||||||
BLOCK@[42; 44)
|
BLOCK@[42; 44)
|
||||||
L_CURLY@[42; 43) "{"
|
L_CURLY@[42; 43) "{"
|
||||||
R_CURLY@[43; 44) "}"
|
R_CURLY@[43; 44) "}"
|
||||||
err: `attributes are not allowed on IF_EXPR`
|
|
||||||
SEMI@[44; 45) ";"
|
SEMI@[44; 45) ";"
|
||||||
WHITESPACE@[45; 46) "\n"
|
WHITESPACE@[45; 46) "\n"
|
||||||
R_CURLY@[46; 47) "}"
|
R_CURLY@[46; 47) "}"
|
||||||
WHITESPACE@[47; 48) "\n"
|
WHITESPACE@[47; 48) "\n"
|
||||||
|
err: `attributes are not allowed on BIN_EXPR`
|
||||||
|
err: `attributes are not allowed on IF_EXPR`
|
||||||
|
|
|
@ -19,7 +19,6 @@ SOURCE_FILE@[0; 47)
|
||||||
NAME_REF@[15; 16)
|
NAME_REF@[15; 16)
|
||||||
IDENT@[15; 16) "x"
|
IDENT@[15; 16) "x"
|
||||||
DOT@[16; 17) "."
|
DOT@[16; 17) "."
|
||||||
err: `Tuple (struct) field access is only allowed through decimal integers with no underscores or suffix`
|
|
||||||
FLOAT_NUMBER@[17; 19) "0."
|
FLOAT_NUMBER@[17; 19) "0."
|
||||||
SEMI@[19; 20) ";"
|
SEMI@[19; 20) ";"
|
||||||
WHITESPACE@[20; 25) "\n "
|
WHITESPACE@[20; 25) "\n "
|
||||||
|
@ -31,7 +30,6 @@ SOURCE_FILE@[0; 47)
|
||||||
NAME_REF@[25; 26)
|
NAME_REF@[25; 26)
|
||||||
IDENT@[25; 26) "x"
|
IDENT@[25; 26) "x"
|
||||||
DOT@[26; 27) "."
|
DOT@[26; 27) "."
|
||||||
err: `Tuple (struct) field access is only allowed through decimal integers with no underscores or suffix`
|
|
||||||
INT_NUMBER@[27; 31) "1i32"
|
INT_NUMBER@[27; 31) "1i32"
|
||||||
SEMI@[31; 32) ";"
|
SEMI@[31; 32) ";"
|
||||||
WHITESPACE@[32; 37) "\n "
|
WHITESPACE@[32; 37) "\n "
|
||||||
|
@ -43,9 +41,11 @@ SOURCE_FILE@[0; 47)
|
||||||
NAME_REF@[37; 38)
|
NAME_REF@[37; 38)
|
||||||
IDENT@[37; 38) "x"
|
IDENT@[37; 38) "x"
|
||||||
DOT@[38; 39) "."
|
DOT@[38; 39) "."
|
||||||
err: `Tuple (struct) field access is only allowed through decimal integers with no underscores or suffix`
|
|
||||||
INT_NUMBER@[39; 43) "0x01"
|
INT_NUMBER@[39; 43) "0x01"
|
||||||
SEMI@[43; 44) ";"
|
SEMI@[43; 44) ";"
|
||||||
WHITESPACE@[44; 45) "\n"
|
WHITESPACE@[44; 45) "\n"
|
||||||
R_CURLY@[45; 46) "}"
|
R_CURLY@[45; 46) "}"
|
||||||
WHITESPACE@[46; 47) "\n"
|
WHITESPACE@[46; 47) "\n"
|
||||||
|
err: `Tuple (struct) field access is only allowed through decimal integers with no underscores or suffix`
|
||||||
|
err: `Tuple (struct) field access is only allowed through decimal integers with no underscores or suffix`
|
||||||
|
err: `Tuple (struct) field access is only allowed through decimal integers with no underscores or suffix`
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
SOURCE_FILE@[0; 50)
|
SOURCE_FILE@[0; 50)
|
||||||
ERROR@[0; 5)
|
ERROR@[0; 5)
|
||||||
ASYNC_KW@[0; 5) "async"
|
ASYNC_KW@[0; 5) "async"
|
||||||
err: `expected fn, trait or impl`
|
|
||||||
WHITESPACE@[5; 6) " "
|
WHITESPACE@[5; 6) " "
|
||||||
FN_DEF@[6; 24)
|
FN_DEF@[6; 24)
|
||||||
UNSAFE_KW@[6; 12) "unsafe"
|
UNSAFE_KW@[6; 12) "unsafe"
|
||||||
|
@ -20,7 +19,6 @@ SOURCE_FILE@[0; 50)
|
||||||
WHITESPACE@[24; 25) "\n"
|
WHITESPACE@[24; 25) "\n"
|
||||||
ERROR@[25; 31)
|
ERROR@[25; 31)
|
||||||
UNSAFE_KW@[25; 31) "unsafe"
|
UNSAFE_KW@[25; 31) "unsafe"
|
||||||
err: `expected fn, trait or impl`
|
|
||||||
WHITESPACE@[31; 32) " "
|
WHITESPACE@[31; 32) " "
|
||||||
FN_DEF@[32; 49)
|
FN_DEF@[32; 49)
|
||||||
CONST_KW@[32; 37) "const"
|
CONST_KW@[32; 37) "const"
|
||||||
|
@ -37,3 +35,5 @@ SOURCE_FILE@[0; 50)
|
||||||
L_CURLY@[47; 48) "{"
|
L_CURLY@[47; 48) "{"
|
||||||
R_CURLY@[48; 49) "}"
|
R_CURLY@[48; 49) "}"
|
||||||
WHITESPACE@[49; 50) "\n"
|
WHITESPACE@[49; 50) "\n"
|
||||||
|
err: `expected fn, trait or impl`
|
||||||
|
err: `expected fn, trait or impl`
|
||||||
|
|
|
@ -8,7 +8,7 @@ use std::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use test_utils::{project_dir, dir_tests, read_text, collect_tests};
|
use test_utils::{project_dir, dir_tests, read_text, collect_tests};
|
||||||
use ra_syntax::{SourceFile, AstNode, fuzz};
|
use ra_syntax::{SourceFile, fuzz};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn lexer_tests() {
|
fn lexer_tests() {
|
||||||
|
@ -21,26 +21,21 @@ fn lexer_tests() {
|
||||||
#[test]
|
#[test]
|
||||||
fn parser_tests() {
|
fn parser_tests() {
|
||||||
dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| {
|
dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| {
|
||||||
let file = SourceFile::parse(text);
|
let parse = SourceFile::parse(text);
|
||||||
let errors = file.errors();
|
let errors = parse.errors.as_slice();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
&*errors,
|
errors,
|
||||||
&[] as &[ra_syntax::SyntaxError],
|
&[] as &[ra_syntax::SyntaxError],
|
||||||
"There should be no errors in the file {:?}",
|
"There should be no errors in the file {:?}",
|
||||||
path.display()
|
path.display(),
|
||||||
);
|
);
|
||||||
file.syntax().debug_dump()
|
parse.debug_dump()
|
||||||
});
|
});
|
||||||
dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| {
|
dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| {
|
||||||
let file = SourceFile::parse(text);
|
let parse = SourceFile::parse(text);
|
||||||
let errors = file.errors();
|
let errors = parse.errors.as_slice();
|
||||||
assert_ne!(
|
assert!(!errors.is_empty(), "There should be errors in the file {:?}", path.display());
|
||||||
&*errors,
|
parse.debug_dump()
|
||||||
&[] as &[ra_syntax::SyntaxError],
|
|
||||||
"There should be errors in the file {:?}",
|
|
||||||
path.display()
|
|
||||||
);
|
|
||||||
file.syntax().debug_dump()
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -83,9 +78,7 @@ fn self_hosting_parsing() {
|
||||||
{
|
{
|
||||||
count += 1;
|
count += 1;
|
||||||
let text = read_text(entry.path());
|
let text = read_text(entry.path());
|
||||||
let node = SourceFile::parse(&text);
|
SourceFile::parse(&text).ok().expect("There should be no errors in the file");
|
||||||
let errors = node.errors();
|
|
||||||
assert_eq!(&*errors, &[], "There should be no errors in the file {:?}", entry);
|
|
||||||
}
|
}
|
||||||
assert!(
|
assert!(
|
||||||
count > 30,
|
count > 30,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue