Merge branch 'master' of github.com:rust-analyzer/rust-analyzer

This commit is contained in:
Benjamin Coenen 2020-05-02 12:25:04 +02:00
commit 0b40876b99
255 changed files with 11248 additions and 11172 deletions

80
Cargo.lock generated
View file

@ -58,9 +58,9 @@ dependencies = [
[[package]] [[package]]
name = "backtrace-sys" name = "backtrace-sys"
version = "0.1.36" version = "0.1.37"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78848718ee1255a2485d1309ad9cdecfc2e7d0362dd11c6829364c6b35ae1bc7" checksum = "18fbebbe1c9d1f383a9cc7e8ccdb471b91c8d024ee9c2ca5b5346121fe8b4399"
dependencies = [ dependencies = [
"cc", "cc",
"libc", "libc",
@ -170,7 +170,7 @@ dependencies = [
"chalk-ir", "chalk-ir",
"chalk-macros", "chalk-macros",
"chalk-rust-ir", "chalk-rust-ir",
"ena", "ena 0.13.1",
"itertools", "itertools",
"petgraph", "petgraph",
"rustc-hash", "rustc-hash",
@ -199,14 +199,15 @@ dependencies = [
[[package]] [[package]]
name = "console" name = "console"
version = "0.10.0" version = "0.10.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6728a28023f207181b193262711102bfbaf47cc9d13bc71d0736607ef8efe88c" checksum = "2586208b33573b7f76ccfbe5adb076394c88deaf81b84d7213969805b0a952a7"
dependencies = [ dependencies = [
"clicolors-control", "clicolors-control",
"encode_unicode", "encode_unicode",
"lazy_static", "lazy_static",
"libc", "libc",
"terminal_size",
"termios", "termios",
"winapi 0.3.8", "winapi 0.3.8",
] ]
@ -315,6 +316,15 @@ dependencies = [
"log", "log",
] ]
[[package]]
name = "ena"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7402b94a93c24e742487327a7cd839dc9d36fec9de9fb25b09f2dae459f36c3"
dependencies = [
"log",
]
[[package]] [[package]]
name = "encode_unicode" name = "encode_unicode"
version = "0.3.6" version = "0.3.6"
@ -381,9 +391,9 @@ dependencies = [
[[package]] [[package]]
name = "fst" name = "fst"
version = "0.4.1" version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4eaf9ea41cc964d742f7fc7861db75d2d6e83a3ce0d897d5c6f8b621f015ddc8" checksum = "81f9cac32c1741cdf6b66be7dcf0d9c7f25ccf12f8aa84c16cfa31f9f14513b3"
[[package]] [[package]]
name = "fuchsia-zircon" name = "fuchsia-zircon"
@ -447,9 +457,9 @@ dependencies = [
[[package]] [[package]]
name = "hermit-abi" name = "hermit-abi"
version = "0.1.11" version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a0d737e0f947a1864e93d33fdef4af8445a00d1ed8dc0c8ddb73139ea6abf15" checksum = "61565ff7aaace3525556587bd2dc31d4a07071957be715e63ce7b1eccf51a8f4"
dependencies = [ dependencies = [
"libc", "libc",
] ]
@ -645,9 +655,9 @@ dependencies = [
[[package]] [[package]]
name = "lsp-types" name = "lsp-types"
version = "0.74.0" version = "0.74.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "820f746e5716ab9a2d664794636188bd003023b72e55404ee27105dc22869922" checksum = "57c0e6a2b8837d27b29deb3f3e6dc1c6d2f57947677f9be1024e482ec5b59525"
dependencies = [ dependencies = [
"base64", "base64",
"bitflags", "bitflags",
@ -696,9 +706,9 @@ dependencies = [
[[package]] [[package]]
name = "mio" name = "mio"
version = "0.6.21" version = "0.6.22"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "302dec22bcf6bae6dfb69c647187f4b4d0fb6f535521f7bc022430ce8e12008f" checksum = "fce347092656428bc8eaf6201042cb551b8d67855af7374542a92a0fbfcac430"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"fuchsia-zircon", "fuchsia-zircon",
@ -739,9 +749,9 @@ dependencies = [
[[package]] [[package]]
name = "net2" name = "net2"
version = "0.2.33" version = "0.2.34"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42550d9fb7b6684a6d404d9fa7250c2eb2646df731d1c06afc06dcee9e1bcf88" checksum = "2ba7c918ac76704fb42afcbbb43891e72731f3dcca3bef2a19786297baf14af7"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"libc", "libc",
@ -814,9 +824,9 @@ dependencies = [
[[package]] [[package]]
name = "paste" name = "paste"
version = "0.1.10" version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab4fb1930692d1b6a9cfabdde3d06ea0a7d186518e2f4d67660d8970e2fa647a" checksum = "a3c897744f63f34f7ae3a024d9162bb5001f4ad661dd24bea0dc9f075d2de1c6"
dependencies = [ dependencies = [
"paste-impl", "paste-impl",
"proc-macro-hack", "proc-macro-hack",
@ -824,9 +834,9 @@ dependencies = [
[[package]] [[package]]
name = "paste-impl" name = "paste-impl"
version = "0.1.10" version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a62486e111e571b1e93b710b61e8f493c0013be39629b714cb166bdb06aa5a8a" checksum = "66fd6f92e3594f2dd7b3fc23e42d82e292f7bcda6d8e5dcd167072327234ab89"
dependencies = [ dependencies = [
"proc-macro-hack", "proc-macro-hack",
"proc-macro2", "proc-macro2",
@ -885,9 +895,9 @@ dependencies = [
[[package]] [[package]]
name = "quote" name = "quote"
version = "1.0.3" version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2bdc6c187c65bca4260c9011c9e3132efe4909da44726bad24cf7572ae338d7f" checksum = "4c1f4b0efa5fc5e8ceb705136bfee52cfdb6a4e3509f770b478cd6ed434232a7"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
] ]
@ -1023,7 +1033,7 @@ dependencies = [
"chalk-ir", "chalk-ir",
"chalk-rust-ir", "chalk-rust-ir",
"chalk-solve", "chalk-solve",
"ena", "ena 0.14.0",
"insta", "insta",
"itertools", "itertools",
"log", "log",
@ -1374,9 +1384,9 @@ dependencies = [
[[package]] [[package]]
name = "rustc-ap-rustc_lexer" name = "rustc-ap-rustc_lexer"
version = "652.0.0" version = "656.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a6a43c4d0889218c5e2ae68ffea239f303fc05ab1078c73f74e63feb87f7889" checksum = "9cbba98ec46e96a4663197dfa8c0378752de2006e314e5400c0ca74929d6692f"
dependencies = [ dependencies = [
"unicode-xid", "unicode-xid",
] ]
@ -1486,18 +1496,18 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.104" version = "1.0.106"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "414115f25f818d7dfccec8ee535d76949ae78584fc4f79a6f45a904bf8ab4449" checksum = "36df6ac6412072f67cf767ebbde4133a5b2e88e76dc6187fa7104cd16f783399"
dependencies = [ dependencies = [
"serde_derive", "serde_derive",
] ]
[[package]] [[package]]
name = "serde_derive" name = "serde_derive"
version = "1.0.104" version = "1.0.106"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "128f9e303a5a29922045a830221b8f78ec74a5f544944f3d5984f8ec3895ef64" checksum = "9e549e3abf4fb8621bd1609f11dfc9f5e50320802273b12f3811a67e6716ea6c"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -1506,9 +1516,9 @@ dependencies = [
[[package]] [[package]]
name = "serde_json" name = "serde_json"
version = "1.0.51" version = "1.0.52"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da07b57ee2623368351e9a0488bb0b261322a15a6e0ae53e243cbdc0f4208da9" checksum = "a7894c8ed05b7a3a279aeb79025fdec1d3158080b75b98a08faf2806bb799edd"
dependencies = [ dependencies = [
"itoa", "itoa",
"ryu", "ryu",
@ -1606,6 +1616,16 @@ dependencies = [
"winapi 0.3.8", "winapi 0.3.8",
] ]
[[package]]
name = "terminal_size"
version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8038f95fc7a6f351163f4b964af631bd26c9e828f7db085f2a84aca56f70d13b"
dependencies = [
"libc",
"winapi 0.3.8",
]
[[package]] [[package]]
name = "termios" name = "termios"
version = "0.3.2" version = "0.3.2"

View file

@ -85,6 +85,7 @@ impl<'a> SubstituteTypeParams<'a> {
ast::TypeRef::PathType(path_type) => path_type.path()?, ast::TypeRef::PathType(path_type) => path_type.path()?,
_ => return None, _ => return None,
}; };
// FIXME: use `hir::Path::from_src` instead.
let path = hir::Path::from_ast(path)?; let path = hir::Path::from_ast(path)?;
let resolution = self.source_scope.resolve_hir_path(&path)?; let resolution = self.source_scope.resolve_hir_path(&path)?;
match resolution { match resolution {
@ -128,6 +129,7 @@ impl<'a> QualifyPaths<'a> {
// don't try to qualify `Fn(Foo) -> Bar` paths, they are in prelude anyway // don't try to qualify `Fn(Foo) -> Bar` paths, they are in prelude anyway
return None; return None;
} }
// FIXME: use `hir::Path::from_src` instead.
let hir_path = hir::Path::from_ast(p.clone()); let hir_path = hir::Path::from_ast(p.clone());
let resolution = self.source_scope.resolve_hir_path(&hir_path?)?; let resolution = self.source_scope.resolve_hir_path(&hir_path?)?;
match resolution { match resolution {

View file

@ -180,7 +180,9 @@ trait Trait<T> {
} }
impl Trait<u32> for () { impl Trait<u32> for () {
fn foo(&self) -> u32 { todo!() } fn foo(&self) -> u32 {
todo!()
}
} }
"#####, "#####,

View file

@ -1,6 +1,10 @@
use hir::HasSource; use hir::HasSource;
use ra_syntax::{ use ra_syntax::{
ast::{self, edit, make, AstNode, NameOwner}, ast::{
self,
edit::{self, IndentLevel},
make, AstNode, NameOwner,
},
SmolStr, SmolStr,
}; };
@ -40,7 +44,9 @@ enum AddMissingImplMembersMode {
// } // }
// //
// impl Trait<u32> for () { // impl Trait<u32> for () {
// fn foo(&self) -> u32 { todo!() } // fn foo(&self) -> u32 {
// todo!()
// }
// //
// } // }
// ``` // ```
@ -165,7 +171,9 @@ fn add_missing_impl_members_inner(
fn add_body(fn_def: ast::FnDef) -> ast::FnDef { fn add_body(fn_def: ast::FnDef) -> ast::FnDef {
if fn_def.body().is_none() { if fn_def.body().is_none() {
fn_def.with_body(make::block_from_expr(make::expr_todo())) let body = make::block_expr(None, Some(make::expr_todo()));
let body = IndentLevel(1).increase_indent(body);
fn_def.with_body(body)
} else { } else {
fn_def fn_def
} }
@ -181,7 +189,7 @@ mod tests {
fn test_add_missing_impl_members() { fn test_add_missing_impl_members() {
check_assist( check_assist(
add_missing_impl_members, add_missing_impl_members,
" r#"
trait Foo { trait Foo {
type Output; type Output;
@ -197,8 +205,8 @@ struct S;
impl Foo for S { impl Foo for S {
fn bar(&self) {} fn bar(&self) {}
<|> <|>
}", }"#,
" r#"
trait Foo { trait Foo {
type Output; type Output;
@ -215,10 +223,14 @@ impl Foo for S {
fn bar(&self) {} fn bar(&self) {}
<|>type Output; <|>type Output;
const CONST: usize = 42; const CONST: usize = 42;
fn foo(&self) { todo!() } fn foo(&self) {
fn baz(&self) { todo!() } todo!()
}
fn baz(&self) {
todo!()
}
}", }"#,
); );
} }
@ -226,7 +238,7 @@ impl Foo for S {
fn test_copied_overriden_members() { fn test_copied_overriden_members() {
check_assist( check_assist(
add_missing_impl_members, add_missing_impl_members,
" r#"
trait Foo { trait Foo {
fn foo(&self); fn foo(&self);
fn bar(&self) -> bool { true } fn bar(&self) -> bool { true }
@ -238,8 +250,8 @@ struct S;
impl Foo for S { impl Foo for S {
fn bar(&self) {} fn bar(&self) {}
<|> <|>
}", }"#,
" r#"
trait Foo { trait Foo {
fn foo(&self); fn foo(&self);
fn bar(&self) -> bool { true } fn bar(&self) -> bool { true }
@ -250,9 +262,11 @@ struct S;
impl Foo for S { impl Foo for S {
fn bar(&self) {} fn bar(&self) {}
<|>fn foo(&self) { todo!() } <|>fn foo(&self) {
todo!()
}
}", }"#,
); );
} }
@ -260,16 +274,18 @@ impl Foo for S {
fn test_empty_impl_def() { fn test_empty_impl_def() {
check_assist( check_assist(
add_missing_impl_members, add_missing_impl_members,
" r#"
trait Foo { fn foo(&self); } trait Foo { fn foo(&self); }
struct S; struct S;
impl Foo for S { <|> }", impl Foo for S { <|> }"#,
" r#"
trait Foo { fn foo(&self); } trait Foo { fn foo(&self); }
struct S; struct S;
impl Foo for S { impl Foo for S {
<|>fn foo(&self) { todo!() } <|>fn foo(&self) {
}", todo!()
}
}"#,
); );
} }
@ -277,16 +293,18 @@ impl Foo for S {
fn fill_in_type_params_1() { fn fill_in_type_params_1() {
check_assist( check_assist(
add_missing_impl_members, add_missing_impl_members,
" r#"
trait Foo<T> { fn foo(&self, t: T) -> &T; } trait Foo<T> { fn foo(&self, t: T) -> &T; }
struct S; struct S;
impl Foo<u32> for S { <|> }", impl Foo<u32> for S { <|> }"#,
" r#"
trait Foo<T> { fn foo(&self, t: T) -> &T; } trait Foo<T> { fn foo(&self, t: T) -> &T; }
struct S; struct S;
impl Foo<u32> for S { impl Foo<u32> for S {
<|>fn foo(&self, t: u32) -> &u32 { todo!() } <|>fn foo(&self, t: u32) -> &u32 {
}", todo!()
}
}"#,
); );
} }
@ -294,16 +312,18 @@ impl Foo<u32> for S {
fn fill_in_type_params_2() { fn fill_in_type_params_2() {
check_assist( check_assist(
add_missing_impl_members, add_missing_impl_members,
" r#"
trait Foo<T> { fn foo(&self, t: T) -> &T; } trait Foo<T> { fn foo(&self, t: T) -> &T; }
struct S; struct S;
impl<U> Foo<U> for S { <|> }", impl<U> Foo<U> for S { <|> }"#,
" r#"
trait Foo<T> { fn foo(&self, t: T) -> &T; } trait Foo<T> { fn foo(&self, t: T) -> &T; }
struct S; struct S;
impl<U> Foo<U> for S { impl<U> Foo<U> for S {
<|>fn foo(&self, t: U) -> &U { todo!() } <|>fn foo(&self, t: U) -> &U {
}", todo!()
}
}"#,
); );
} }
@ -311,16 +331,18 @@ impl<U> Foo<U> for S {
fn test_cursor_after_empty_impl_def() { fn test_cursor_after_empty_impl_def() {
check_assist( check_assist(
add_missing_impl_members, add_missing_impl_members,
" r#"
trait Foo { fn foo(&self); } trait Foo { fn foo(&self); }
struct S; struct S;
impl Foo for S {}<|>", impl Foo for S {}<|>"#,
" r#"
trait Foo { fn foo(&self); } trait Foo { fn foo(&self); }
struct S; struct S;
impl Foo for S { impl Foo for S {
<|>fn foo(&self) { todo!() } <|>fn foo(&self) {
}", todo!()
}
}"#,
) )
} }
@ -328,22 +350,24 @@ impl Foo for S {
fn test_qualify_path_1() { fn test_qualify_path_1() {
check_assist( check_assist(
add_missing_impl_members, add_missing_impl_members,
" r#"
mod foo { mod foo {
pub struct Bar; pub struct Bar;
trait Foo { fn foo(&self, bar: Bar); } trait Foo { fn foo(&self, bar: Bar); }
} }
struct S; struct S;
impl foo::Foo for S { <|> }", impl foo::Foo for S { <|> }"#,
" r#"
mod foo { mod foo {
pub struct Bar; pub struct Bar;
trait Foo { fn foo(&self, bar: Bar); } trait Foo { fn foo(&self, bar: Bar); }
} }
struct S; struct S;
impl foo::Foo for S { impl foo::Foo for S {
<|>fn foo(&self, bar: foo::Bar) { todo!() } <|>fn foo(&self, bar: foo::Bar) {
}", todo!()
}
}"#,
); );
} }
@ -351,22 +375,24 @@ impl foo::Foo for S {
fn test_qualify_path_generic() { fn test_qualify_path_generic() {
check_assist( check_assist(
add_missing_impl_members, add_missing_impl_members,
" r#"
mod foo { mod foo {
pub struct Bar<T>; pub struct Bar<T>;
trait Foo { fn foo(&self, bar: Bar<u32>); } trait Foo { fn foo(&self, bar: Bar<u32>); }
} }
struct S; struct S;
impl foo::Foo for S { <|> }", impl foo::Foo for S { <|> }"#,
" r#"
mod foo { mod foo {
pub struct Bar<T>; pub struct Bar<T>;
trait Foo { fn foo(&self, bar: Bar<u32>); } trait Foo { fn foo(&self, bar: Bar<u32>); }
} }
struct S; struct S;
impl foo::Foo for S { impl foo::Foo for S {
<|>fn foo(&self, bar: foo::Bar<u32>) { todo!() } <|>fn foo(&self, bar: foo::Bar<u32>) {
}", todo!()
}
}"#,
); );
} }
@ -374,22 +400,24 @@ impl foo::Foo for S {
fn test_qualify_path_and_substitute_param() { fn test_qualify_path_and_substitute_param() {
check_assist( check_assist(
add_missing_impl_members, add_missing_impl_members,
" r#"
mod foo { mod foo {
pub struct Bar<T>; pub struct Bar<T>;
trait Foo<T> { fn foo(&self, bar: Bar<T>); } trait Foo<T> { fn foo(&self, bar: Bar<T>); }
} }
struct S; struct S;
impl foo::Foo<u32> for S { <|> }", impl foo::Foo<u32> for S { <|> }"#,
" r#"
mod foo { mod foo {
pub struct Bar<T>; pub struct Bar<T>;
trait Foo<T> { fn foo(&self, bar: Bar<T>); } trait Foo<T> { fn foo(&self, bar: Bar<T>); }
} }
struct S; struct S;
impl foo::Foo<u32> for S { impl foo::Foo<u32> for S {
<|>fn foo(&self, bar: foo::Bar<u32>) { todo!() } <|>fn foo(&self, bar: foo::Bar<u32>) {
}", todo!()
}
}"#,
); );
} }
@ -398,15 +426,15 @@ impl foo::Foo<u32> for S {
// when substituting params, the substituted param should not be qualified! // when substituting params, the substituted param should not be qualified!
check_assist( check_assist(
add_missing_impl_members, add_missing_impl_members,
" r#"
mod foo { mod foo {
trait Foo<T> { fn foo(&self, bar: T); } trait Foo<T> { fn foo(&self, bar: T); }
pub struct Param; pub struct Param;
} }
struct Param; struct Param;
struct S; struct S;
impl foo::Foo<Param> for S { <|> }", impl foo::Foo<Param> for S { <|> }"#,
" r#"
mod foo { mod foo {
trait Foo<T> { fn foo(&self, bar: T); } trait Foo<T> { fn foo(&self, bar: T); }
pub struct Param; pub struct Param;
@ -414,8 +442,10 @@ mod foo {
struct Param; struct Param;
struct S; struct S;
impl foo::Foo<Param> for S { impl foo::Foo<Param> for S {
<|>fn foo(&self, bar: Param) { todo!() } <|>fn foo(&self, bar: Param) {
}", todo!()
}
}"#,
); );
} }
@ -423,15 +453,15 @@ impl foo::Foo<Param> for S {
fn test_qualify_path_associated_item() { fn test_qualify_path_associated_item() {
check_assist( check_assist(
add_missing_impl_members, add_missing_impl_members,
" r#"
mod foo { mod foo {
pub struct Bar<T>; pub struct Bar<T>;
impl Bar<T> { type Assoc = u32; } impl Bar<T> { type Assoc = u32; }
trait Foo { fn foo(&self, bar: Bar<u32>::Assoc); } trait Foo { fn foo(&self, bar: Bar<u32>::Assoc); }
} }
struct S; struct S;
impl foo::Foo for S { <|> }", impl foo::Foo for S { <|> }"#,
" r#"
mod foo { mod foo {
pub struct Bar<T>; pub struct Bar<T>;
impl Bar<T> { type Assoc = u32; } impl Bar<T> { type Assoc = u32; }
@ -439,8 +469,10 @@ mod foo {
} }
struct S; struct S;
impl foo::Foo for S { impl foo::Foo for S {
<|>fn foo(&self, bar: foo::Bar<u32>::Assoc) { todo!() } <|>fn foo(&self, bar: foo::Bar<u32>::Assoc) {
}", todo!()
}
}"#,
); );
} }
@ -448,15 +480,15 @@ impl foo::Foo for S {
fn test_qualify_path_nested() { fn test_qualify_path_nested() {
check_assist( check_assist(
add_missing_impl_members, add_missing_impl_members,
" r#"
mod foo { mod foo {
pub struct Bar<T>; pub struct Bar<T>;
pub struct Baz; pub struct Baz;
trait Foo { fn foo(&self, bar: Bar<Baz>); } trait Foo { fn foo(&self, bar: Bar<Baz>); }
} }
struct S; struct S;
impl foo::Foo for S { <|> }", impl foo::Foo for S { <|> }"#,
" r#"
mod foo { mod foo {
pub struct Bar<T>; pub struct Bar<T>;
pub struct Baz; pub struct Baz;
@ -464,8 +496,10 @@ mod foo {
} }
struct S; struct S;
impl foo::Foo for S { impl foo::Foo for S {
<|>fn foo(&self, bar: foo::Bar<foo::Baz>) { todo!() } <|>fn foo(&self, bar: foo::Bar<foo::Baz>) {
}", todo!()
}
}"#,
); );
} }
@ -473,22 +507,24 @@ impl foo::Foo for S {
fn test_qualify_path_fn_trait_notation() { fn test_qualify_path_fn_trait_notation() {
check_assist( check_assist(
add_missing_impl_members, add_missing_impl_members,
" r#"
mod foo { mod foo {
pub trait Fn<Args> { type Output; } pub trait Fn<Args> { type Output; }
trait Foo { fn foo(&self, bar: dyn Fn(u32) -> i32); } trait Foo { fn foo(&self, bar: dyn Fn(u32) -> i32); }
} }
struct S; struct S;
impl foo::Foo for S { <|> }", impl foo::Foo for S { <|> }"#,
" r#"
mod foo { mod foo {
pub trait Fn<Args> { type Output; } pub trait Fn<Args> { type Output; }
trait Foo { fn foo(&self, bar: dyn Fn(u32) -> i32); } trait Foo { fn foo(&self, bar: dyn Fn(u32) -> i32); }
} }
struct S; struct S;
impl foo::Foo for S { impl foo::Foo for S {
<|>fn foo(&self, bar: dyn Fn(u32) -> i32) { todo!() } <|>fn foo(&self, bar: dyn Fn(u32) -> i32) {
}", todo!()
}
}"#,
); );
} }
@ -496,10 +532,10 @@ impl foo::Foo for S {
fn test_empty_trait() { fn test_empty_trait() {
check_assist_not_applicable( check_assist_not_applicable(
add_missing_impl_members, add_missing_impl_members,
" r#"
trait Foo; trait Foo;
struct S; struct S;
impl Foo for S { <|> }", impl Foo for S { <|> }"#,
) )
} }
@ -507,13 +543,13 @@ impl Foo for S { <|> }",
fn test_ignore_unnamed_trait_members_and_default_methods() { fn test_ignore_unnamed_trait_members_and_default_methods() {
check_assist_not_applicable( check_assist_not_applicable(
add_missing_impl_members, add_missing_impl_members,
" r#"
trait Foo { trait Foo {
fn (arg: u32); fn (arg: u32);
fn valid(some: u32) -> bool { false } fn valid(some: u32) -> bool { false }
} }
struct S; struct S;
impl Foo for S { <|> }", impl Foo for S { <|> }"#,
) )
} }
@ -544,7 +580,9 @@ trait Foo {
struct S; struct S;
impl Foo for S { impl Foo for S {
<|>type Output; <|>type Output;
fn foo(&self) { todo!() } fn foo(&self) {
todo!()
}
}"#, }"#,
) )
} }
@ -553,7 +591,7 @@ impl Foo for S {
fn test_default_methods() { fn test_default_methods() {
check_assist( check_assist(
add_missing_default_members, add_missing_default_members,
" r#"
trait Foo { trait Foo {
type Output; type Output;
@ -563,8 +601,8 @@ trait Foo {
fn foo(some: u32) -> bool; fn foo(some: u32) -> bool;
} }
struct S; struct S;
impl Foo for S { <|> }", impl Foo for S { <|> }"#,
" r#"
trait Foo { trait Foo {
type Output; type Output;
@ -576,7 +614,7 @@ trait Foo {
struct S; struct S;
impl Foo for S { impl Foo for S {
<|>fn valid(some: u32) -> bool { false } <|>fn valid(some: u32) -> bool { false }
}", }"#,
) )
} }
} }

View file

@ -2,7 +2,7 @@ use std::{iter::once, ops::RangeInclusive};
use ra_syntax::{ use ra_syntax::{
algo::replace_children, algo::replace_children,
ast::{self, edit::IndentLevel, make, Block, Pat::TupleStructPat}, ast::{self, edit::IndentLevel, make},
AstNode, AstNode,
SyntaxKind::{FN_DEF, LOOP_EXPR, L_CURLY, R_CURLY, WHILE_EXPR, WHITESPACE}, SyntaxKind::{FN_DEF, LOOP_EXPR, L_CURLY, R_CURLY, WHILE_EXPR, WHITESPACE},
SyntaxNode, SyntaxNode,
@ -47,7 +47,7 @@ pub(crate) fn convert_to_guarded_return(ctx: AssistCtx) -> Option<Assist> {
// Check if there is an IfLet that we can handle. // Check if there is an IfLet that we can handle.
let if_let_pat = match cond.pat() { let if_let_pat = match cond.pat() {
None => None, // No IfLet, supported. None => None, // No IfLet, supported.
Some(TupleStructPat(pat)) if pat.args().count() == 1 => { Some(ast::Pat::TupleStructPat(pat)) if pat.args().count() == 1 => {
let path = pat.path()?; let path = pat.path()?;
match path.qualifier() { match path.qualifier() {
None => { None => {
@ -61,9 +61,9 @@ pub(crate) fn convert_to_guarded_return(ctx: AssistCtx) -> Option<Assist> {
}; };
let cond_expr = cond.expr()?; let cond_expr = cond.expr()?;
let then_block = if_expr.then_branch()?.block()?; let then_block = if_expr.then_branch()?;
let parent_block = if_expr.syntax().parent()?.ancestors().find_map(ast::Block::cast)?; let parent_block = if_expr.syntax().parent()?.ancestors().find_map(ast::BlockExpr::cast)?;
if parent_block.expr()? != if_expr.clone().into() { if parent_block.expr()? != if_expr.clone().into() {
return None; return None;
@ -80,7 +80,7 @@ pub(crate) fn convert_to_guarded_return(ctx: AssistCtx) -> Option<Assist> {
return None; return None;
} }
let parent_container = parent_block.syntax().parent()?.parent()?; let parent_container = parent_block.syntax().parent()?;
let early_expression: ast::Expr = match parent_container.kind() { let early_expression: ast::Expr = match parent_container.kind() {
WHILE_EXPR | LOOP_EXPR => make::expr_continue(), WHILE_EXPR | LOOP_EXPR => make::expr_continue(),
@ -144,13 +144,13 @@ pub(crate) fn convert_to_guarded_return(ctx: AssistCtx) -> Option<Assist> {
} }
}; };
edit.target(if_expr.syntax().text_range()); edit.target(if_expr.syntax().text_range());
edit.replace_ast(parent_block, ast::Block::cast(new_block).unwrap()); edit.replace_ast(parent_block, ast::BlockExpr::cast(new_block).unwrap());
edit.set_cursor(cursor_position); edit.set_cursor(cursor_position);
fn replace( fn replace(
new_expr: &SyntaxNode, new_expr: &SyntaxNode,
then_block: &Block, then_block: &ast::BlockExpr,
parent_block: &Block, parent_block: &ast::BlockExpr,
if_expr: &ast::IfExpr, if_expr: &ast::IfExpr,
) -> SyntaxNode { ) -> SyntaxNode {
let then_block_items = IndentLevel::from(1).decrease_indent(then_block.clone()); let then_block_items = IndentLevel::from(1).decrease_indent(then_block.clone());

View file

@ -89,6 +89,7 @@ pub(crate) fn inline_local_variable(ctx: AssistCtx) -> Option<Assist> {
| (ast::Expr::ParenExpr(_), _) | (ast::Expr::ParenExpr(_), _)
| (ast::Expr::PathExpr(_), _) | (ast::Expr::PathExpr(_), _)
| (ast::Expr::BlockExpr(_), _) | (ast::Expr::BlockExpr(_), _)
| (ast::Expr::EffectExpr(_), _)
| (_, ast::Expr::CallExpr(_)) | (_, ast::Expr::CallExpr(_))
| (_, ast::Expr::TupleExpr(_)) | (_, ast::Expr::TupleExpr(_))
| (_, ast::Expr::ArrayExpr(_)) | (_, ast::Expr::ArrayExpr(_))

View file

@ -111,7 +111,7 @@ fn valid_target_expr(node: SyntaxNode) -> Option<ast::Expr> {
/// expression like a lambda or match arm. /// expression like a lambda or match arm.
fn anchor_stmt(expr: ast::Expr) -> Option<(SyntaxNode, bool)> { fn anchor_stmt(expr: ast::Expr) -> Option<(SyntaxNode, bool)> {
expr.syntax().ancestors().find_map(|node| { expr.syntax().ancestors().find_map(|node| {
if let Some(expr) = node.parent().and_then(ast::Block::cast).and_then(|it| it.expr()) { if let Some(expr) = node.parent().and_then(ast::BlockExpr::cast).and_then(|it| it.expr()) {
if expr.syntax() == &node { if expr.syntax() == &node {
tested_by!(test_introduce_var_last_expr); tested_by!(test_introduce_var_last_expr);
return Some((node, false)); return Some((node, false));

View file

@ -113,9 +113,9 @@ pub(crate) fn move_arm_cond_to_match_guard(ctx: AssistCtx) -> Option<Assist> {
"Move condition to match guard", "Move condition to match guard",
|edit| { |edit| {
edit.target(if_expr.syntax().text_range()); edit.target(if_expr.syntax().text_range());
let then_only_expr = then_block.block().and_then(|it| it.statements().next()).is_none(); let then_only_expr = then_block.statements().next().is_none();
match &then_block.block().and_then(|it| it.expr()) { match &then_block.expr() {
Some(then_expr) if then_only_expr => { Some(then_expr) if then_only_expr => {
edit.replace(if_expr.syntax().text_range(), then_expr.syntax().text()) edit.replace(if_expr.syntax().text_range(), then_expr.syntax().text())
} }

View file

@ -27,7 +27,7 @@ pub(crate) fn replace_qualified_name_with_use(ctx: AssistCtx) -> Option<Assist>
return None; return None;
} }
let hir_path = hir::Path::from_ast(path.clone())?; let hir_path = ctx.sema.lower_path(&path)?;
let segments = collect_hir_path_segments(&hir_path)?; let segments = collect_hir_path_segments(&hir_path)?;
if segments.len() < 2 { if segments.len() < 2 {
return None; return None;

View file

@ -42,7 +42,6 @@ pub fn unwrap_trivial_block(block: ast::BlockExpr) -> ast::Expr {
} }
pub fn extract_trivial_expression(block: &ast::BlockExpr) -> Option<ast::Expr> { pub fn extract_trivial_expression(block: &ast::BlockExpr) -> Option<ast::Expr> {
let block = block.block()?;
let has_anything_else = |thing: &SyntaxNode| -> bool { let has_anything_else = |thing: &SyntaxNode| -> bool {
let mut non_trivial_children = let mut non_trivial_children =
block.syntax().children_with_tokens().filter(|it| match it.kind() { block.syntax().children_with_tokens().filter(|it| match it.kind() {

View file

@ -70,6 +70,7 @@ pub use hir_def::{
type_ref::Mutability, type_ref::Mutability,
}; };
pub use hir_expand::{ pub use hir_expand::{
name::Name, HirFileId, InFile, MacroCallId, MacroCallLoc, MacroDefId, MacroFile, Origin, hygiene::Hygiene, name::Name, HirFileId, InFile, MacroCallId, MacroCallLoc, MacroDefId,
MacroFile, Origin,
}; };
pub use hir_ty::{display::HirDisplay, CallableDef}; pub use hir_ty::{display::HirDisplay, CallableDef};

View file

@ -8,7 +8,7 @@ use hir_def::{
resolver::{self, HasResolver, Resolver}, resolver::{self, HasResolver, Resolver},
AsMacroCall, TraitId, AsMacroCall, TraitId,
}; };
use hir_expand::ExpansionInfo; use hir_expand::{hygiene::Hygiene, ExpansionInfo};
use hir_ty::associated_type_shorthand_candidates; use hir_ty::associated_type_shorthand_candidates;
use itertools::Itertools; use itertools::Itertools;
use ra_db::{FileId, FileRange}; use ra_db::{FileId, FileRange};
@ -246,6 +246,11 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.analyze(path.syntax()).resolve_path(self.db, path) self.analyze(path.syntax()).resolve_path(self.db, path)
} }
pub fn lower_path(&self, path: &ast::Path) -> Option<Path> {
let src = self.find_file(path.syntax().clone());
Path::from_src(path.clone(), &Hygiene::new(self.db.upcast(), src.file_id.into()))
}
pub fn resolve_bind_pat_to_const(&self, pat: &ast::BindPat) -> Option<ModuleDef> { pub fn resolve_bind_pat_to_const(&self, pat: &ast::BindPat) -> Option<ModuleDef> {
self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat) self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat)
} }

View file

@ -224,7 +224,8 @@ impl SourceAnalyzer {
} }
} }
// This must be a normal source file rather than macro file. // This must be a normal source file rather than macro file.
let hir_path = crate::Path::from_ast(path.clone())?; let hir_path =
crate::Path::from_src(path.clone(), &Hygiene::new(db.upcast(), self.file_id))?;
resolve_hir_path(db, &self.resolver, &hir_path) resolve_hir_path(db, &self.resolver, &hir_path)
} }

View file

@ -12,9 +12,15 @@ use ra_prof::profile;
use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner, VisibilityOwner}; use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner, VisibilityOwner};
use crate::{ use crate::{
body::CfgExpander, db::DefDatabase, src::HasChildSource, src::HasSource, trace::Trace, body::{CfgExpander, LowerCtx},
type_ref::TypeRef, visibility::RawVisibility, EnumId, HasModule, LocalEnumVariantId, db::DefDatabase,
LocalFieldId, Lookup, ModuleId, StructId, UnionId, VariantId, src::HasChildSource,
src::HasSource,
trace::Trace,
type_ref::TypeRef,
visibility::RawVisibility,
EnumId, HasModule, LocalEnumVariantId, LocalFieldId, Lookup, ModuleId, StructId, UnionId,
VariantId,
}; };
/// Note that we use `StructData` for unions as well! /// Note that we use `StructData` for unions as well!
@ -198,6 +204,8 @@ fn lower_struct(
trace: &mut Trace<FieldData, Either<ast::TupleFieldDef, ast::RecordFieldDef>>, trace: &mut Trace<FieldData, Either<ast::TupleFieldDef, ast::RecordFieldDef>>,
ast: &InFile<ast::StructKind>, ast: &InFile<ast::StructKind>,
) -> StructKind { ) -> StructKind {
let ctx = LowerCtx::new(db, ast.file_id);
match &ast.value { match &ast.value {
ast::StructKind::Tuple(fl) => { ast::StructKind::Tuple(fl) => {
for (i, fd) in fl.fields().enumerate() { for (i, fd) in fl.fields().enumerate() {
@ -210,7 +218,7 @@ fn lower_struct(
|| Either::Left(fd.clone()), || Either::Left(fd.clone()),
|| FieldData { || FieldData {
name: Name::new_tuple_field(i), name: Name::new_tuple_field(i),
type_ref: TypeRef::from_ast_opt(fd.type_ref()), type_ref: TypeRef::from_ast_opt(&ctx, fd.type_ref()),
visibility: RawVisibility::from_ast(db, ast.with_value(fd.visibility())), visibility: RawVisibility::from_ast(db, ast.with_value(fd.visibility())),
}, },
); );
@ -228,7 +236,7 @@ fn lower_struct(
|| Either::Right(fd.clone()), || Either::Right(fd.clone()),
|| FieldData { || FieldData {
name: fd.name().map(|n| n.as_name()).unwrap_or_else(Name::missing), name: fd.name().map(|n| n.as_name()).unwrap_or_else(Name::missing),
type_ref: TypeRef::from_ast_opt(fd.ascribed_type()), type_ref: TypeRef::from_ast_opt(&ctx, fd.ascribed_type()),
visibility: RawVisibility::from_ast(db, ast.with_value(fd.visibility())), visibility: RawVisibility::from_ast(db, ast.with_value(fd.visibility())),
}, },
); );

View file

@ -15,6 +15,8 @@ use ra_prof::profile;
use ra_syntax::{ast, AstNode, AstPtr}; use ra_syntax::{ast, AstNode, AstPtr};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
pub(crate) use lower::LowerCtx;
use crate::{ use crate::{
attr::Attrs, attr::Attrs,
db::DefDatabase, db::DefDatabase,

View file

@ -3,8 +3,9 @@
use either::Either; use either::Either;
use hir_expand::{ use hir_expand::{
hygiene::Hygiene,
name::{name, AsName, Name}, name::{name, AsName, Name},
MacroDefId, MacroDefKind, HirFileId, MacroDefId, MacroDefKind,
}; };
use ra_arena::Arena; use ra_arena::Arena;
use ra_syntax::{ use ra_syntax::{
@ -26,7 +27,7 @@ use crate::{
LogicOp, MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, LogicOp, MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement,
}, },
item_scope::BuiltinShadowMode, item_scope::BuiltinShadowMode,
path::GenericArgs, path::{GenericArgs, Path},
type_ref::{Mutability, TypeRef}, type_ref::{Mutability, TypeRef},
AdtId, ConstLoc, ContainerId, DefWithBodyId, EnumLoc, FunctionLoc, Intern, ModuleDefId, AdtId, ConstLoc, ContainerId, DefWithBodyId, EnumLoc, FunctionLoc, Intern, ModuleDefId,
StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc,
@ -35,6 +36,23 @@ use crate::{
use super::{ExprSource, PatSource}; use super::{ExprSource, PatSource};
use ast::AstChildren; use ast::AstChildren;
pub(crate) struct LowerCtx {
hygiene: Hygiene,
}
impl LowerCtx {
pub fn new(db: &dyn DefDatabase, file_id: HirFileId) -> Self {
LowerCtx { hygiene: Hygiene::new(db.upcast(), file_id) }
}
pub fn with_hygiene(hygiene: &Hygiene) -> Self {
LowerCtx { hygiene: hygiene.clone() }
}
pub fn lower_path(&self, ast: ast::Path) -> Option<Path> {
Path::from_src(ast, &self.hygiene)
}
}
pub(super) fn lower( pub(super) fn lower(
db: &dyn DefDatabase, db: &dyn DefDatabase,
def: DefWithBodyId, def: DefWithBodyId,
@ -42,10 +60,13 @@ pub(super) fn lower(
params: Option<ast::ParamList>, params: Option<ast::ParamList>,
body: Option<ast::Expr>, body: Option<ast::Expr>,
) -> (Body, BodySourceMap) { ) -> (Body, BodySourceMap) {
let ctx = LowerCtx::new(db, expander.current_file_id.clone());
ExprCollector { ExprCollector {
db, db,
def, def,
expander, expander,
ctx,
source_map: BodySourceMap::default(), source_map: BodySourceMap::default(),
body: Body { body: Body {
exprs: Arena::default(), exprs: Arena::default(),
@ -62,7 +83,7 @@ struct ExprCollector<'a> {
db: &'a dyn DefDatabase, db: &'a dyn DefDatabase,
def: DefWithBodyId, def: DefWithBodyId,
expander: Expander, expander: Expander,
ctx: LowerCtx,
body: Body, body: Body,
source_map: BodySourceMap, source_map: BodySourceMap,
} }
@ -182,6 +203,16 @@ impl ExprCollector<'_> {
self.alloc_expr(Expr::If { condition, then_branch, else_branch }, syntax_ptr) self.alloc_expr(Expr::If { condition, then_branch, else_branch }, syntax_ptr)
} }
ast::Expr::EffectExpr(e) => match e.effect() {
ast::Effect::Try(_) => {
let body = self.collect_block_opt(e.block_expr());
self.alloc_expr(Expr::TryBlock { body }, syntax_ptr)
}
// FIXME: we need to record these effects somewhere...
ast::Effect::Async(_) | ast::Effect::Label(_) | ast::Effect::Unsafe(_) => {
self.collect_block_opt(e.block_expr())
}
},
ast::Expr::BlockExpr(e) => self.collect_block(e), ast::Expr::BlockExpr(e) => self.collect_block(e),
ast::Expr::LoopExpr(e) => { ast::Expr::LoopExpr(e) => {
let body = self.collect_block_opt(e.loop_body()); let body = self.collect_block_opt(e.loop_body());
@ -237,7 +268,8 @@ impl ExprCollector<'_> {
Vec::new() Vec::new()
}; };
let method_name = e.name_ref().map(|nr| nr.as_name()).unwrap_or_else(Name::missing); let method_name = e.name_ref().map(|nr| nr.as_name()).unwrap_or_else(Name::missing);
let generic_args = e.type_arg_list().and_then(GenericArgs::from_ast); let generic_args =
e.type_arg_list().and_then(|it| GenericArgs::from_ast(&self.ctx, it));
self.alloc_expr( self.alloc_expr(
Expr::MethodCall { receiver, method_name, args, generic_args }, Expr::MethodCall { receiver, method_name, args, generic_args },
syntax_ptr, syntax_ptr,
@ -343,7 +375,7 @@ impl ExprCollector<'_> {
} }
ast::Expr::CastExpr(e) => { ast::Expr::CastExpr(e) => {
let expr = self.collect_expr_opt(e.expr()); let expr = self.collect_expr_opt(e.expr());
let type_ref = TypeRef::from_ast_opt(e.type_ref()); let type_ref = TypeRef::from_ast_opt(&self.ctx, e.type_ref());
self.alloc_expr(Expr::Cast { expr, type_ref }, syntax_ptr) self.alloc_expr(Expr::Cast { expr, type_ref }, syntax_ptr)
} }
ast::Expr::RefExpr(e) => { ast::Expr::RefExpr(e) => {
@ -365,12 +397,16 @@ impl ExprCollector<'_> {
if let Some(pl) = e.param_list() { if let Some(pl) = e.param_list() {
for param in pl.params() { for param in pl.params() {
let pat = self.collect_pat_opt(param.pat()); let pat = self.collect_pat_opt(param.pat());
let type_ref = param.ascribed_type().map(TypeRef::from_ast); let type_ref =
param.ascribed_type().map(|it| TypeRef::from_ast(&self.ctx, it));
args.push(pat); args.push(pat);
arg_types.push(type_ref); arg_types.push(type_ref);
} }
} }
let ret_type = e.ret_type().and_then(|r| r.type_ref()).map(TypeRef::from_ast); let ret_type = e
.ret_type()
.and_then(|r| r.type_ref())
.map(|it| TypeRef::from_ast(&self.ctx, it));
let body = self.collect_expr_opt(e.body()); let body = self.collect_expr_opt(e.body());
self.alloc_expr(Expr::Lambda { args, arg_types, ret_type, body }, syntax_ptr) self.alloc_expr(Expr::Lambda { args, arg_types, ret_type, body }, syntax_ptr)
} }
@ -464,19 +500,15 @@ impl ExprCollector<'_> {
} }
} }
fn collect_block(&mut self, expr: ast::BlockExpr) -> ExprId { fn collect_block(&mut self, block: ast::BlockExpr) -> ExprId {
let syntax_node_ptr = AstPtr::new(&expr.clone().into()); let syntax_node_ptr = AstPtr::new(&block.clone().into());
let block = match expr.block() {
Some(block) => block,
None => return self.alloc_expr(Expr::Missing, syntax_node_ptr),
};
self.collect_block_items(&block); self.collect_block_items(&block);
let statements = block let statements = block
.statements() .statements()
.map(|s| match s { .map(|s| match s {
ast::Stmt::LetStmt(stmt) => { ast::Stmt::LetStmt(stmt) => {
let pat = self.collect_pat_opt(stmt.pat()); let pat = self.collect_pat_opt(stmt.pat());
let type_ref = stmt.ascribed_type().map(TypeRef::from_ast); let type_ref = stmt.ascribed_type().map(|it| TypeRef::from_ast(&self.ctx, it));
let initializer = stmt.initializer().map(|e| self.collect_expr(e)); let initializer = stmt.initializer().map(|e| self.collect_expr(e));
Statement::Let { pat, type_ref, initializer } Statement::Let { pat, type_ref, initializer }
} }
@ -487,7 +519,7 @@ impl ExprCollector<'_> {
self.alloc_expr(Expr::Block { statements, tail }, syntax_node_ptr) self.alloc_expr(Expr::Block { statements, tail }, syntax_node_ptr)
} }
fn collect_block_items(&mut self, block: &ast::Block) { fn collect_block_items(&mut self, block: &ast::BlockExpr) {
let container = ContainerId::DefWithBodyId(self.def); let container = ContainerId::DefWithBodyId(self.def);
for item in block.items() { for item in block.items() {
let (def, name): (ModuleDefId, Option<ast::Name>) = match item { let (def, name): (ModuleDefId, Option<ast::Name>) = match item {

View file

@ -15,6 +15,7 @@ use ra_syntax::ast::{
use crate::{ use crate::{
attr::Attrs, attr::Attrs,
body::LowerCtx,
db::DefDatabase, db::DefDatabase,
path::{path, AssociatedTypeBinding, GenericArgs, Path}, path::{path, AssociatedTypeBinding, GenericArgs, Path},
src::HasSource, src::HasSource,
@ -40,13 +41,14 @@ impl FunctionData {
pub(crate) fn fn_data_query(db: &impl DefDatabase, func: FunctionId) -> Arc<FunctionData> { pub(crate) fn fn_data_query(db: &impl DefDatabase, func: FunctionId) -> Arc<FunctionData> {
let loc = func.lookup(db); let loc = func.lookup(db);
let src = loc.source(db); let src = loc.source(db);
let ctx = LowerCtx::new(db, src.file_id);
let name = src.value.name().map(|n| n.as_name()).unwrap_or_else(Name::missing); let name = src.value.name().map(|n| n.as_name()).unwrap_or_else(Name::missing);
let mut params = Vec::new(); let mut params = Vec::new();
let mut has_self_param = false; let mut has_self_param = false;
if let Some(param_list) = src.value.param_list() { if let Some(param_list) = src.value.param_list() {
if let Some(self_param) = param_list.self_param() { if let Some(self_param) = param_list.self_param() {
let self_type = if let Some(type_ref) = self_param.ascribed_type() { let self_type = if let Some(type_ref) = self_param.ascribed_type() {
TypeRef::from_ast(type_ref) TypeRef::from_ast(&ctx, type_ref)
} else { } else {
let self_type = TypeRef::Path(name![Self].into()); let self_type = TypeRef::Path(name![Self].into());
match self_param.kind() { match self_param.kind() {
@ -63,14 +65,14 @@ impl FunctionData {
has_self_param = true; has_self_param = true;
} }
for param in param_list.params() { for param in param_list.params() {
let type_ref = TypeRef::from_ast_opt(param.ascribed_type()); let type_ref = TypeRef::from_ast_opt(&ctx, param.ascribed_type());
params.push(type_ref); params.push(type_ref);
} }
} }
let attrs = Attrs::new(&src.value, &Hygiene::new(db.upcast(), src.file_id)); let attrs = Attrs::new(&src.value, &Hygiene::new(db.upcast(), src.file_id));
let ret_type = if let Some(type_ref) = src.value.ret_type().and_then(|rt| rt.type_ref()) { let ret_type = if let Some(type_ref) = src.value.ret_type().and_then(|rt| rt.type_ref()) {
TypeRef::from_ast(type_ref) TypeRef::from_ast(&ctx, type_ref)
} else { } else {
TypeRef::unit() TypeRef::unit()
}; };
@ -122,7 +124,8 @@ impl TypeAliasData {
let loc = typ.lookup(db); let loc = typ.lookup(db);
let node = loc.source(db); let node = loc.source(db);
let name = node.value.name().map_or_else(Name::missing, |n| n.as_name()); let name = node.value.name().map_or_else(Name::missing, |n| n.as_name());
let type_ref = node.value.type_ref().map(TypeRef::from_ast); let lower_ctx = LowerCtx::new(db, node.file_id);
let type_ref = node.value.type_ref().map(|it| TypeRef::from_ast(&lower_ctx, it));
let vis_default = RawVisibility::default_for_container(loc.container); let vis_default = RawVisibility::default_for_container(loc.container);
let visibility = RawVisibility::from_ast_with_default( let visibility = RawVisibility::from_ast_with_default(
db, db,
@ -130,7 +133,7 @@ impl TypeAliasData {
node.as_ref().map(|n| n.visibility()), node.as_ref().map(|n| n.visibility()),
); );
let bounds = if let Some(bound_list) = node.value.type_bound_list() { let bounds = if let Some(bound_list) = node.value.type_bound_list() {
bound_list.bounds().map(TypeBound::from_ast).collect() bound_list.bounds().map(|it| TypeBound::from_ast(&lower_ctx, it)).collect()
} else { } else {
Vec::new() Vec::new()
}; };
@ -223,9 +226,10 @@ impl ImplData {
let _p = profile("impl_data_query"); let _p = profile("impl_data_query");
let impl_loc = id.lookup(db); let impl_loc = id.lookup(db);
let src = impl_loc.source(db); let src = impl_loc.source(db);
let lower_ctx = LowerCtx::new(db, src.file_id);
let target_trait = src.value.target_trait().map(TypeRef::from_ast); let target_trait = src.value.target_trait().map(|it| TypeRef::from_ast(&lower_ctx, it));
let target_type = TypeRef::from_ast_opt(src.value.target_type()); let target_type = TypeRef::from_ast_opt(&lower_ctx, src.value.target_type());
let is_negative = src.value.excl_token().is_some(); let is_negative = src.value.excl_token().is_some();
let module_id = impl_loc.container.module(db); let module_id = impl_loc.container.module(db);
@ -279,8 +283,9 @@ impl ConstData {
vis_default: RawVisibility, vis_default: RawVisibility,
node: InFile<N>, node: InFile<N>,
) -> ConstData { ) -> ConstData {
let ctx = LowerCtx::new(db, node.file_id);
let name = node.value.name().map(|n| n.as_name()); let name = node.value.name().map(|n| n.as_name());
let type_ref = TypeRef::from_ast_opt(node.value.ascribed_type()); let type_ref = TypeRef::from_ast_opt(&ctx, node.value.ascribed_type());
let visibility = let visibility =
RawVisibility::from_ast_with_default(db, vis_default, node.map(|n| n.visibility())); RawVisibility::from_ast_with_default(db, vis_default, node.map(|n| n.visibility()));
ConstData { name, type_ref, visibility } ConstData { name, type_ref, visibility }

View file

@ -101,6 +101,9 @@ pub enum Expr {
Try { Try {
expr: ExprId, expr: ExprId,
}, },
TryBlock {
body: ExprId,
},
Cast { Cast {
expr: ExprId, expr: ExprId,
type_ref: TypeRef, type_ref: TypeRef,
@ -236,6 +239,7 @@ impl Expr {
f(*expr); f(*expr);
} }
} }
Expr::TryBlock { body } => f(*body),
Expr::Loop { body } => f(*body), Expr::Loop { body } => f(*body),
Expr::While { condition, body } => { Expr::While { condition, body } => {
f(*condition); f(*condition);

View file

@ -15,6 +15,7 @@ use ra_prof::profile;
use ra_syntax::ast::{self, NameOwner, TypeBoundsOwner, TypeParamsOwner}; use ra_syntax::ast::{self, NameOwner, TypeBoundsOwner, TypeParamsOwner};
use crate::{ use crate::{
body::LowerCtx,
child_by_source::ChildBySource, child_by_source::ChildBySource,
db::DefDatabase, db::DefDatabase,
dyn_map::DynMap, dyn_map::DynMap,
@ -80,11 +81,13 @@ impl GenericParams {
fn new(db: &dyn DefDatabase, def: GenericDefId) -> (GenericParams, InFile<SourceMap>) { fn new(db: &dyn DefDatabase, def: GenericDefId) -> (GenericParams, InFile<SourceMap>) {
let mut generics = GenericParams { types: Arena::default(), where_predicates: Vec::new() }; let mut generics = GenericParams { types: Arena::default(), where_predicates: Vec::new() };
let mut sm = ArenaMap::default(); let mut sm = ArenaMap::default();
// FIXME: add `: Sized` bound for everything except for `Self` in traits // FIXME: add `: Sized` bound for everything except for `Self` in traits
let file_id = match def { let file_id = match def {
GenericDefId::FunctionId(it) => { GenericDefId::FunctionId(it) => {
let src = it.lookup(db).source(db); let src = it.lookup(db).source(db);
generics.fill(&mut sm, &src.value); let lower_ctx = LowerCtx::new(db, src.file_id);
generics.fill(&lower_ctx, &mut sm, &src.value);
// lower `impl Trait` in arguments // lower `impl Trait` in arguments
let data = db.function_data(it); let data = db.function_data(it);
for param in &data.params { for param in &data.params {
@ -94,21 +97,25 @@ impl GenericParams {
} }
GenericDefId::AdtId(AdtId::StructId(it)) => { GenericDefId::AdtId(AdtId::StructId(it)) => {
let src = it.lookup(db).source(db); let src = it.lookup(db).source(db);
generics.fill(&mut sm, &src.value); let lower_ctx = LowerCtx::new(db, src.file_id);
generics.fill(&lower_ctx, &mut sm, &src.value);
src.file_id src.file_id
} }
GenericDefId::AdtId(AdtId::UnionId(it)) => { GenericDefId::AdtId(AdtId::UnionId(it)) => {
let src = it.lookup(db).source(db); let src = it.lookup(db).source(db);
generics.fill(&mut sm, &src.value); let lower_ctx = LowerCtx::new(db, src.file_id);
generics.fill(&lower_ctx, &mut sm, &src.value);
src.file_id src.file_id
} }
GenericDefId::AdtId(AdtId::EnumId(it)) => { GenericDefId::AdtId(AdtId::EnumId(it)) => {
let src = it.lookup(db).source(db); let src = it.lookup(db).source(db);
generics.fill(&mut sm, &src.value); let lower_ctx = LowerCtx::new(db, src.file_id);
generics.fill(&lower_ctx, &mut sm, &src.value);
src.file_id src.file_id
} }
GenericDefId::TraitId(it) => { GenericDefId::TraitId(it) => {
let src = it.lookup(db).source(db); let src = it.lookup(db).source(db);
let lower_ctx = LowerCtx::new(db, src.file_id);
// traits get the Self type as an implicit first type parameter // traits get the Self type as an implicit first type parameter
let self_param_id = generics.types.alloc(TypeParamData { let self_param_id = generics.types.alloc(TypeParamData {
@ -120,14 +127,16 @@ impl GenericParams {
// add super traits as bounds on Self // add super traits as bounds on Self
// i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar // i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar
let self_param = TypeRef::Path(name![Self].into()); let self_param = TypeRef::Path(name![Self].into());
generics.fill_bounds(&src.value, self_param); generics.fill_bounds(&lower_ctx, &src.value, self_param);
generics.fill(&mut sm, &src.value); generics.fill(&lower_ctx, &mut sm, &src.value);
src.file_id src.file_id
} }
GenericDefId::TypeAliasId(it) => { GenericDefId::TypeAliasId(it) => {
let src = it.lookup(db).source(db); let src = it.lookup(db).source(db);
generics.fill(&mut sm, &src.value); let lower_ctx = LowerCtx::new(db, src.file_id);
generics.fill(&lower_ctx, &mut sm, &src.value);
src.file_id src.file_id
} }
// Note that we don't add `Self` here: in `impl`s, `Self` is not a // Note that we don't add `Self` here: in `impl`s, `Self` is not a
@ -135,7 +144,9 @@ impl GenericParams {
// type, so this is handled by the resolver. // type, so this is handled by the resolver.
GenericDefId::ImplId(it) => { GenericDefId::ImplId(it) => {
let src = it.lookup(db).source(db); let src = it.lookup(db).source(db);
generics.fill(&mut sm, &src.value); let lower_ctx = LowerCtx::new(db, src.file_id);
generics.fill(&lower_ctx, &mut sm, &src.value);
src.file_id src.file_id
} }
// We won't be using this ID anyway // We won't be using this ID anyway
@ -145,28 +156,38 @@ impl GenericParams {
(generics, InFile::new(file_id, sm)) (generics, InFile::new(file_id, sm))
} }
fn fill(&mut self, sm: &mut SourceMap, node: &dyn TypeParamsOwner) { fn fill(&mut self, lower_ctx: &LowerCtx, sm: &mut SourceMap, node: &dyn TypeParamsOwner) {
if let Some(params) = node.type_param_list() { if let Some(params) = node.type_param_list() {
self.fill_params(sm, params) self.fill_params(lower_ctx, sm, params)
} }
if let Some(where_clause) = node.where_clause() { if let Some(where_clause) = node.where_clause() {
self.fill_where_predicates(where_clause); self.fill_where_predicates(lower_ctx, where_clause);
} }
} }
fn fill_bounds(&mut self, node: &dyn ast::TypeBoundsOwner, type_ref: TypeRef) { fn fill_bounds(
&mut self,
lower_ctx: &LowerCtx,
node: &dyn ast::TypeBoundsOwner,
type_ref: TypeRef,
) {
for bound in for bound in
node.type_bound_list().iter().flat_map(|type_bound_list| type_bound_list.bounds()) node.type_bound_list().iter().flat_map(|type_bound_list| type_bound_list.bounds())
{ {
self.add_where_predicate_from_bound(bound, type_ref.clone()); self.add_where_predicate_from_bound(lower_ctx, bound, type_ref.clone());
} }
} }
fn fill_params(&mut self, sm: &mut SourceMap, params: ast::TypeParamList) { fn fill_params(
&mut self,
lower_ctx: &LowerCtx,
sm: &mut SourceMap,
params: ast::TypeParamList,
) {
for type_param in params.type_params() { for type_param in params.type_params() {
let name = type_param.name().map_or_else(Name::missing, |it| it.as_name()); let name = type_param.name().map_or_else(Name::missing, |it| it.as_name());
// FIXME: Use `Path::from_src` // FIXME: Use `Path::from_src`
let default = type_param.default_type().map(TypeRef::from_ast); let default = type_param.default_type().map(|it| TypeRef::from_ast(lower_ctx, it));
let param = TypeParamData { let param = TypeParamData {
name: Some(name.clone()), name: Some(name.clone()),
default, default,
@ -176,29 +197,34 @@ impl GenericParams {
sm.insert(param_id, Either::Right(type_param.clone())); sm.insert(param_id, Either::Right(type_param.clone()));
let type_ref = TypeRef::Path(name.into()); let type_ref = TypeRef::Path(name.into());
self.fill_bounds(&type_param, type_ref); self.fill_bounds(&lower_ctx, &type_param, type_ref);
} }
} }
fn fill_where_predicates(&mut self, where_clause: ast::WhereClause) { fn fill_where_predicates(&mut self, lower_ctx: &LowerCtx, where_clause: ast::WhereClause) {
for pred in where_clause.predicates() { for pred in where_clause.predicates() {
let type_ref = match pred.type_ref() { let type_ref = match pred.type_ref() {
Some(type_ref) => type_ref, Some(type_ref) => type_ref,
None => continue, None => continue,
}; };
let type_ref = TypeRef::from_ast(type_ref); let type_ref = TypeRef::from_ast(lower_ctx, type_ref);
for bound in pred.type_bound_list().iter().flat_map(|l| l.bounds()) { for bound in pred.type_bound_list().iter().flat_map(|l| l.bounds()) {
self.add_where_predicate_from_bound(bound, type_ref.clone()); self.add_where_predicate_from_bound(lower_ctx, bound, type_ref.clone());
} }
} }
} }
fn add_where_predicate_from_bound(&mut self, bound: ast::TypeBound, type_ref: TypeRef) { fn add_where_predicate_from_bound(
&mut self,
lower_ctx: &LowerCtx,
bound: ast::TypeBound,
type_ref: TypeRef,
) {
if bound.question_token().is_some() { if bound.question_token().is_some() {
// FIXME: remove this bound // FIXME: remove this bound
return; return;
} }
let bound = TypeBound::from_ast(bound); let bound = TypeBound::from_ast(lower_ctx, bound);
self.where_predicates self.where_predicates
.push(WherePredicate { target: WherePredicateTarget::TypeRef(type_ref), bound }); .push(WherePredicate { target: WherePredicateTarget::TypeRef(type_ref), bound });
} }

View file

@ -7,6 +7,7 @@ use std::{
sync::Arc, sync::Arc,
}; };
use crate::body::LowerCtx;
use hir_expand::{ use hir_expand::{
hygiene::Hygiene, hygiene::Hygiene,
name::{AsName, Name}, name::{AsName, Name},
@ -244,8 +245,8 @@ impl<'a> PathSegments<'a> {
} }
impl GenericArgs { impl GenericArgs {
pub(crate) fn from_ast(node: ast::TypeArgList) -> Option<GenericArgs> { pub(crate) fn from_ast(lower_ctx: &LowerCtx, node: ast::TypeArgList) -> Option<GenericArgs> {
lower::lower_generic_args(node) lower::lower_generic_args(lower_ctx, node)
} }
pub(crate) fn empty() -> GenericArgs { pub(crate) fn empty() -> GenericArgs {

View file

@ -13,6 +13,7 @@ use ra_syntax::ast::{self, AstNode, TypeAscriptionOwner, TypeBoundsOwner};
use super::AssociatedTypeBinding; use super::AssociatedTypeBinding;
use crate::{ use crate::{
body::LowerCtx,
path::{GenericArg, GenericArgs, ModPath, Path, PathKind}, path::{GenericArg, GenericArgs, ModPath, Path, PathKind},
type_ref::{TypeBound, TypeRef}, type_ref::{TypeBound, TypeRef},
}; };
@ -26,6 +27,7 @@ pub(super) fn lower_path(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path>
let mut type_anchor = None; let mut type_anchor = None;
let mut segments = Vec::new(); let mut segments = Vec::new();
let mut generic_args = Vec::new(); let mut generic_args = Vec::new();
let ctx = LowerCtx::with_hygiene(hygiene);
loop { loop {
let segment = path.segment()?; let segment = path.segment()?;
@ -40,9 +42,10 @@ pub(super) fn lower_path(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path>
Either::Left(name) => { Either::Left(name) => {
let args = segment let args = segment
.type_arg_list() .type_arg_list()
.and_then(lower_generic_args) .and_then(|it| lower_generic_args(&ctx, it))
.or_else(|| { .or_else(|| {
lower_generic_args_from_fn_path( lower_generic_args_from_fn_path(
&ctx,
segment.param_list(), segment.param_list(),
segment.ret_type(), segment.ret_type(),
) )
@ -60,7 +63,7 @@ pub(super) fn lower_path(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path>
ast::PathSegmentKind::Type { type_ref, trait_ref } => { ast::PathSegmentKind::Type { type_ref, trait_ref } => {
assert!(path.qualifier().is_none()); // this can only occur at the first segment assert!(path.qualifier().is_none()); // this can only occur at the first segment
let self_type = TypeRef::from_ast(type_ref?); let self_type = TypeRef::from_ast(&ctx, type_ref?);
match trait_ref { match trait_ref {
// <T>::foo // <T>::foo
@ -128,10 +131,13 @@ pub(super) fn lower_path(mut path: ast::Path, hygiene: &Hygiene) -> Option<Path>
} }
} }
pub(super) fn lower_generic_args(node: ast::TypeArgList) -> Option<GenericArgs> { pub(super) fn lower_generic_args(
lower_ctx: &LowerCtx,
node: ast::TypeArgList,
) -> Option<GenericArgs> {
let mut args = Vec::new(); let mut args = Vec::new();
for type_arg in node.type_args() { for type_arg in node.type_args() {
let type_ref = TypeRef::from_ast_opt(type_arg.type_ref()); let type_ref = TypeRef::from_ast_opt(lower_ctx, type_arg.type_ref());
args.push(GenericArg::Type(type_ref)); args.push(GenericArg::Type(type_ref));
} }
// lifetimes ignored for now // lifetimes ignored for now
@ -140,9 +146,9 @@ pub(super) fn lower_generic_args(node: ast::TypeArgList) -> Option<GenericArgs>
let assoc_type_arg: ast::AssocTypeArg = assoc_type_arg; let assoc_type_arg: ast::AssocTypeArg = assoc_type_arg;
if let Some(name_ref) = assoc_type_arg.name_ref() { if let Some(name_ref) = assoc_type_arg.name_ref() {
let name = name_ref.as_name(); let name = name_ref.as_name();
let type_ref = assoc_type_arg.type_ref().map(TypeRef::from_ast); let type_ref = assoc_type_arg.type_ref().map(|it| TypeRef::from_ast(lower_ctx, it));
let bounds = if let Some(l) = assoc_type_arg.type_bound_list() { let bounds = if let Some(l) = assoc_type_arg.type_bound_list() {
l.bounds().map(TypeBound::from_ast).collect() l.bounds().map(|it| TypeBound::from_ast(lower_ctx, it)).collect()
} else { } else {
Vec::new() Vec::new()
}; };
@ -159,6 +165,7 @@ pub(super) fn lower_generic_args(node: ast::TypeArgList) -> Option<GenericArgs>
/// Collect `GenericArgs` from the parts of a fn-like path, i.e. `Fn(X, Y) /// Collect `GenericArgs` from the parts of a fn-like path, i.e. `Fn(X, Y)
/// -> Z` (which desugars to `Fn<(X, Y), Output=Z>`). /// -> Z` (which desugars to `Fn<(X, Y), Output=Z>`).
fn lower_generic_args_from_fn_path( fn lower_generic_args_from_fn_path(
ctx: &LowerCtx,
params: Option<ast::ParamList>, params: Option<ast::ParamList>,
ret_type: Option<ast::RetType>, ret_type: Option<ast::RetType>,
) -> Option<GenericArgs> { ) -> Option<GenericArgs> {
@ -167,14 +174,14 @@ fn lower_generic_args_from_fn_path(
if let Some(params) = params { if let Some(params) = params {
let mut param_types = Vec::new(); let mut param_types = Vec::new();
for param in params.params() { for param in params.params() {
let type_ref = TypeRef::from_ast_opt(param.ascribed_type()); let type_ref = TypeRef::from_ast_opt(&ctx, param.ascribed_type());
param_types.push(type_ref); param_types.push(type_ref);
} }
let arg = GenericArg::Type(TypeRef::Tuple(param_types)); let arg = GenericArg::Type(TypeRef::Tuple(param_types));
args.push(arg); args.push(arg);
} }
if let Some(ret_type) = ret_type { if let Some(ret_type) = ret_type {
let type_ref = TypeRef::from_ast_opt(ret_type.type_ref()); let type_ref = TypeRef::from_ast_opt(&ctx, ret_type.type_ref());
bindings.push(AssociatedTypeBinding { bindings.push(AssociatedTypeBinding {
name: name![Output], name: name![Output],
type_ref: Some(type_ref), type_ref: Some(type_ref),

View file

@ -3,7 +3,7 @@
use ra_syntax::ast::{self, TypeAscriptionOwner, TypeBoundsOwner}; use ra_syntax::ast::{self, TypeAscriptionOwner, TypeBoundsOwner};
use crate::path::Path; use crate::{body::LowerCtx, path::Path};
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub enum Mutability { pub enum Mutability {
@ -64,30 +64,34 @@ pub enum TypeBound {
impl TypeRef { impl TypeRef {
/// Converts an `ast::TypeRef` to a `hir::TypeRef`. /// Converts an `ast::TypeRef` to a `hir::TypeRef`.
pub(crate) fn from_ast(node: ast::TypeRef) -> Self { pub(crate) fn from_ast(ctx: &LowerCtx, node: ast::TypeRef) -> Self {
match node { match node {
ast::TypeRef::ParenType(inner) => TypeRef::from_ast_opt(inner.type_ref()), ast::TypeRef::ParenType(inner) => TypeRef::from_ast_opt(&ctx, inner.type_ref()),
ast::TypeRef::TupleType(inner) => { ast::TypeRef::TupleType(inner) => {
TypeRef::Tuple(inner.fields().map(TypeRef::from_ast).collect()) TypeRef::Tuple(inner.fields().map(|it| TypeRef::from_ast(ctx, it)).collect())
} }
ast::TypeRef::NeverType(..) => TypeRef::Never, ast::TypeRef::NeverType(..) => TypeRef::Never,
ast::TypeRef::PathType(inner) => { ast::TypeRef::PathType(inner) => {
// FIXME: Use `Path::from_src` // FIXME: Use `Path::from_src`
inner.path().and_then(Path::from_ast).map(TypeRef::Path).unwrap_or(TypeRef::Error) inner
.path()
.and_then(|it| ctx.lower_path(it))
.map(TypeRef::Path)
.unwrap_or(TypeRef::Error)
} }
ast::TypeRef::PointerType(inner) => { ast::TypeRef::PointerType(inner) => {
let inner_ty = TypeRef::from_ast_opt(inner.type_ref()); let inner_ty = TypeRef::from_ast_opt(&ctx, inner.type_ref());
let mutability = Mutability::from_mutable(inner.mut_token().is_some()); let mutability = Mutability::from_mutable(inner.mut_token().is_some());
TypeRef::RawPtr(Box::new(inner_ty), mutability) TypeRef::RawPtr(Box::new(inner_ty), mutability)
} }
ast::TypeRef::ArrayType(inner) => { ast::TypeRef::ArrayType(inner) => {
TypeRef::Array(Box::new(TypeRef::from_ast_opt(inner.type_ref()))) TypeRef::Array(Box::new(TypeRef::from_ast_opt(&ctx, inner.type_ref())))
} }
ast::TypeRef::SliceType(inner) => { ast::TypeRef::SliceType(inner) => {
TypeRef::Slice(Box::new(TypeRef::from_ast_opt(inner.type_ref()))) TypeRef::Slice(Box::new(TypeRef::from_ast_opt(&ctx, inner.type_ref())))
} }
ast::TypeRef::ReferenceType(inner) => { ast::TypeRef::ReferenceType(inner) => {
let inner_ty = TypeRef::from_ast_opt(inner.type_ref()); let inner_ty = TypeRef::from_ast_opt(&ctx, inner.type_ref());
let mutability = Mutability::from_mutable(inner.mut_token().is_some()); let mutability = Mutability::from_mutable(inner.mut_token().is_some());
TypeRef::Reference(Box::new(inner_ty), mutability) TypeRef::Reference(Box::new(inner_ty), mutability)
} }
@ -96,10 +100,13 @@ impl TypeRef {
let ret_ty = inner let ret_ty = inner
.ret_type() .ret_type()
.and_then(|rt| rt.type_ref()) .and_then(|rt| rt.type_ref())
.map(TypeRef::from_ast) .map(|it| TypeRef::from_ast(ctx, it))
.unwrap_or_else(|| TypeRef::Tuple(Vec::new())); .unwrap_or_else(|| TypeRef::Tuple(Vec::new()));
let mut params = if let Some(pl) = inner.param_list() { let mut params = if let Some(pl) = inner.param_list() {
pl.params().map(|p| p.ascribed_type()).map(TypeRef::from_ast_opt).collect() pl.params()
.map(|p| p.ascribed_type())
.map(|it| TypeRef::from_ast_opt(&ctx, it))
.collect()
} else { } else {
Vec::new() Vec::new()
}; };
@ -107,19 +114,19 @@ impl TypeRef {
TypeRef::Fn(params) TypeRef::Fn(params)
} }
// for types are close enough for our purposes to the inner type for now... // for types are close enough for our purposes to the inner type for now...
ast::TypeRef::ForType(inner) => TypeRef::from_ast_opt(inner.type_ref()), ast::TypeRef::ForType(inner) => TypeRef::from_ast_opt(&ctx, inner.type_ref()),
ast::TypeRef::ImplTraitType(inner) => { ast::TypeRef::ImplTraitType(inner) => {
TypeRef::ImplTrait(type_bounds_from_ast(inner.type_bound_list())) TypeRef::ImplTrait(type_bounds_from_ast(ctx, inner.type_bound_list()))
} }
ast::TypeRef::DynTraitType(inner) => { ast::TypeRef::DynTraitType(inner) => {
TypeRef::DynTrait(type_bounds_from_ast(inner.type_bound_list())) TypeRef::DynTrait(type_bounds_from_ast(ctx, inner.type_bound_list()))
} }
} }
} }
pub(crate) fn from_ast_opt(node: Option<ast::TypeRef>) -> Self { pub(crate) fn from_ast_opt(ctx: &LowerCtx, node: Option<ast::TypeRef>) -> Self {
if let Some(node) = node { if let Some(node) = node {
TypeRef::from_ast(node) TypeRef::from_ast(ctx, node)
} else { } else {
TypeRef::Error TypeRef::Error
} }
@ -180,24 +187,27 @@ impl TypeRef {
} }
} }
pub(crate) fn type_bounds_from_ast(type_bounds_opt: Option<ast::TypeBoundList>) -> Vec<TypeBound> { pub(crate) fn type_bounds_from_ast(
lower_ctx: &LowerCtx,
type_bounds_opt: Option<ast::TypeBoundList>,
) -> Vec<TypeBound> {
if let Some(type_bounds) = type_bounds_opt { if let Some(type_bounds) = type_bounds_opt {
type_bounds.bounds().map(TypeBound::from_ast).collect() type_bounds.bounds().map(|it| TypeBound::from_ast(lower_ctx, it)).collect()
} else { } else {
vec![] vec![]
} }
} }
impl TypeBound { impl TypeBound {
pub(crate) fn from_ast(node: ast::TypeBound) -> Self { pub(crate) fn from_ast(ctx: &LowerCtx, node: ast::TypeBound) -> Self {
match node.kind() { match node.kind() {
ast::TypeBoundKind::PathType(path_type) => { ast::TypeBoundKind::PathType(path_type) => {
let path = match path_type.path() { let path = match path_type.path() {
Some(p) => p, Some(p) => p,
None => return TypeBound::Error, None => return TypeBound::Error,
}; };
// FIXME: Use `Path::from_src`
let path = match Path::from_ast(path) { let path = match ctx.lower_path(path) {
Some(p) => p, Some(p) => p,
None => return TypeBound::Error, None => return TypeBound::Error,
}; };

View file

@ -330,7 +330,7 @@ fn to_fragment_kind(db: &dyn AstDatabase, id: MacroCallId) -> FragmentKind {
FragmentKind::Expr FragmentKind::Expr
} }
// FIXME: Expand to statements in appropriate positions; HIR lowering needs to handle that // FIXME: Expand to statements in appropriate positions; HIR lowering needs to handle that
EXPR_STMT | BLOCK => FragmentKind::Expr, EXPR_STMT | BLOCK_EXPR => FragmentKind::Expr,
ARG_LIST => FragmentKind::Expr, ARG_LIST => FragmentKind::Expr,
TRY_EXPR => FragmentKind::Expr, TRY_EXPR => FragmentKind::Expr,
TUPLE_EXPR => FragmentKind::Expr, TUPLE_EXPR => FragmentKind::Expr,
@ -342,7 +342,6 @@ fn to_fragment_kind(db: &dyn AstDatabase, id: MacroCallId) -> FragmentKind {
CONDITION => FragmentKind::Expr, CONDITION => FragmentKind::Expr,
BREAK_EXPR => FragmentKind::Expr, BREAK_EXPR => FragmentKind::Expr,
RETURN_EXPR => FragmentKind::Expr, RETURN_EXPR => FragmentKind::Expr,
BLOCK_EXPR => FragmentKind::Expr,
MATCH_EXPR => FragmentKind::Expr, MATCH_EXPR => FragmentKind::Expr,
MATCH_ARM => FragmentKind::Expr, MATCH_ARM => FragmentKind::Expr,
MATCH_GUARD => FragmentKind::Expr, MATCH_GUARD => FragmentKind::Expr,

View file

@ -12,7 +12,7 @@ use crate::{
HirFileId, HirFileIdRepr, MacroCallId, MacroDefKind, HirFileId, HirFileIdRepr, MacroCallId, MacroDefKind,
}; };
#[derive(Debug)] #[derive(Clone, Debug)]
pub struct Hygiene { pub struct Hygiene {
// This is what `$crate` expands to // This is what `$crate` expands to
def_crate: Option<CrateId>, def_crate: Option<CrateId>,

View file

@ -11,7 +11,7 @@ doctest = false
itertools = "0.9.0" itertools = "0.9.0"
arrayvec = "0.5.1" arrayvec = "0.5.1"
smallvec = "1.2.0" smallvec = "1.2.0"
ena = "0.13.1" ena = "0.14.0"
log = "0.4.8" log = "0.4.8"
rustc-hash = "1.1.0" rustc-hash = "1.1.0"

View file

@ -73,6 +73,11 @@ impl<'a> InferenceContext<'a> {
self.coerce_merge_branch(&then_ty, &else_ty) self.coerce_merge_branch(&then_ty, &else_ty)
} }
Expr::Block { statements, tail } => self.infer_block(statements, *tail, expected), Expr::Block { statements, tail } => self.infer_block(statements, *tail, expected),
Expr::TryBlock { body } => {
let _inner = self.infer_expr(*body, expected);
// FIXME should be std::result::Result<{inner}, _>
Ty::Unknown
}
Expr::Loop { body } => { Expr::Loop { body } => {
self.infer_expr(*body, &Expectation::has_type(Ty::unit())); self.infer_expr(*body, &Expectation::has_type(Ty::unit()));
// FIXME handle break with value // FIXME handle break with value

View file

@ -338,6 +338,46 @@ pub fn baz() -> usize { 31usize }
assert_eq!("(i32, usize)", type_at_pos(&db, pos)); assert_eq!("(i32, usize)", type_at_pos(&db, pos));
} }
#[test]
fn infer_macro_with_dollar_crate_is_correct_in_trait_associate_type() {
let (db, pos) = TestDB::with_position(
r#"
//- /main.rs crate:main deps:foo
use foo::Trait;
fn test() {
let msg = foo::Message(foo::MessageRef);
let r = msg.deref();
r<|>;
}
//- /lib.rs crate:foo
pub struct MessageRef;
pub struct Message(MessageRef);
pub trait Trait {
type Target;
fn deref(&self) -> &Self::Target;
}
#[macro_export]
macro_rules! expand {
() => {
impl Trait for Message {
type Target = $crate::MessageRef;
fn deref(&self) -> &Self::Target {
&self.0
}
}
}
}
expand!();
"#,
);
assert_eq!("&MessageRef", type_at_pos(&db, pos));
}
#[test] #[test]
fn infer_type_value_non_legacy_macro_use_as() { fn infer_type_value_non_legacy_macro_use_as() {
assert_snapshot!( assert_snapshot!(

View file

@ -1755,3 +1755,35 @@ fn main() {
"### "###
); );
} }
#[test]
fn effects_smoke_test() {
assert_snapshot!(
infer(r#"
fn main() {
let x = unsafe { 92 };
let y = async { async { () }.await };
let z = try { () };
let t = 'a: { 92 };
}
"#),
@r###"
11..131 '{ ...2 }; }': ()
21..22 'x': i32
32..38 '{ 92 }': i32
34..36 '92': i32
48..49 'y': {unknown}
58..80 '{ asyn...wait }': {unknown}
60..78 'async ....await': {unknown}
66..72 '{ () }': ()
68..70 '()': ()
90..91 'z': {unknown}
94..104 'try { () }': {unknown}
98..104 '{ () }': ()
100..102 '()': ()
114..115 't': i32
122..128 '{ 92 }': i32
124..126 '92': i32
"###
)
}

View file

@ -344,7 +344,7 @@ impl<'a> CompletionContext<'a> {
stmt.syntax().text_range() == name_ref.syntax().text_range(), stmt.syntax().text_range() == name_ref.syntax().text_range(),
); );
} }
if let Some(block) = ast::Block::cast(node) { if let Some(block) = ast::BlockExpr::cast(node) {
return Some( return Some(
block.expr().map(|e| e.syntax().text_range()) block.expr().map(|e| e.syntax().text_range())
== Some(name_ref.syntax().text_range()), == Some(name_ref.syntax().text_range()),

View file

@ -33,8 +33,12 @@ impl ShortLabel for ast::EnumDef {
impl ShortLabel for ast::TraitDef { impl ShortLabel for ast::TraitDef {
fn short_label(&self) -> Option<String> { fn short_label(&self) -> Option<String> {
if self.unsafe_token().is_some() {
short_label_from_node(self, "unsafe trait ")
} else {
short_label_from_node(self, "trait ") short_label_from_node(self, "trait ")
} }
}
} }
impl ShortLabel for ast::Module { impl ShortLabel for ast::Module {

View file

@ -88,7 +88,7 @@ fn fold_kind(kind: SyntaxKind) -> Option<FoldKind> {
| ITEM_LIST | ITEM_LIST
| EXTERN_ITEM_LIST | EXTERN_ITEM_LIST
| USE_TREE_LIST | USE_TREE_LIST
| BLOCK | BLOCK_EXPR
| MATCH_ARM_LIST | MATCH_ARM_LIST
| ENUM_VARIANT_LIST | ENUM_VARIANT_LIST
| TOKEN_TREE => Some(FoldKind::Block), | TOKEN_TREE => Some(FoldKind::Block),

View file

@ -869,4 +869,15 @@ fn func(foo: i32) { if true { <|>foo; }; }
&[r#"pub(crate) async unsafe extern "C" fn foo()"#], &[r#"pub(crate) async unsafe extern "C" fn foo()"#],
); );
} }
#[test]
fn test_hover_trait_show_qualifiers() {
check_hover_result(
"
//- /lib.rs
unsafe trait foo<|>() {}
",
&["unsafe trait foo"],
);
}
} }

View file

@ -129,8 +129,7 @@ fn has_comma_after(node: &SyntaxNode) -> bool {
} }
fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> {
let block = ast::Block::cast(token.parent())?; let block_expr = ast::BlockExpr::cast(token.parent())?;
let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?;
if !block_expr.is_standalone() { if !block_expr.is_standalone() {
return None; return None;
} }

View file

@ -120,7 +120,6 @@ SOURCE_FILE@0..11
R_PAREN@7..8 ")" R_PAREN@7..8 ")"
WHITESPACE@8..9 " " WHITESPACE@8..9 " "
BLOCK_EXPR@9..11 BLOCK_EXPR@9..11
BLOCK@9..11
L_CURLY@9..10 "{" L_CURLY@9..10 "{"
R_CURLY@10..11 "}" R_CURLY@10..11 "}"
"# "#
@ -153,7 +152,6 @@ SOURCE_FILE@0..60
R_PAREN@8..9 ")" R_PAREN@8..9 ")"
WHITESPACE@9..10 " " WHITESPACE@9..10 " "
BLOCK_EXPR@10..60 BLOCK_EXPR@10..60
BLOCK@10..60
L_CURLY@10..11 "{" L_CURLY@10..11 "{"
WHITESPACE@11..16 "\n " WHITESPACE@11..16 "\n "
EXPR_STMT@16..58 EXPR_STMT@16..58
@ -196,7 +194,6 @@ FN_DEF@0..11
R_PAREN@7..8 ")" R_PAREN@7..8 ")"
WHITESPACE@8..9 " " WHITESPACE@8..9 " "
BLOCK_EXPR@9..11 BLOCK_EXPR@9..11
BLOCK@9..11
L_CURLY@9..10 "{" L_CURLY@9..10 "{"
R_CURLY@10..11 "}" R_CURLY@10..11 "}"
"# "#
@ -265,7 +262,6 @@ SOURCE_FILE@0..12
R_PAREN@7..8 ")" R_PAREN@7..8 ")"
WHITESPACE@8..9 " " WHITESPACE@8..9 " "
BLOCK_EXPR@9..12 BLOCK_EXPR@9..12
BLOCK@9..12
L_CURLY@9..10 "{" L_CURLY@9..10 "{"
WHITESPACE@10..11 "\n" WHITESPACE@10..11 "\n"
R_CURLY@11..12 "}" R_CURLY@11..12 "}"
@ -300,7 +296,6 @@ SOURCE_FILE@0..12
R_PAREN@7..8 ")" R_PAREN@7..8 ")"
WHITESPACE@8..9 " " WHITESPACE@8..9 " "
BLOCK_EXPR@9..12 BLOCK_EXPR@9..12
BLOCK@9..12
L_CURLY@9..10 "{" L_CURLY@9..10 "{"
WHITESPACE@10..11 "\n" WHITESPACE@10..11 "\n"
R_CURLY@11..12 "}" R_CURLY@11..12 "}"
@ -334,7 +329,6 @@ SOURCE_FILE@0..25
R_PAREN@7..8 ")" R_PAREN@7..8 ")"
WHITESPACE@8..9 " " WHITESPACE@8..9 " "
BLOCK_EXPR@9..12 BLOCK_EXPR@9..12
BLOCK@9..12
L_CURLY@9..10 "{" L_CURLY@9..10 "{"
WHITESPACE@10..11 "\n" WHITESPACE@10..11 "\n"
R_CURLY@11..12 "}" R_CURLY@11..12 "}"
@ -349,7 +343,6 @@ SOURCE_FILE@0..25
R_PAREN@20..21 ")" R_PAREN@20..21 ")"
WHITESPACE@21..22 " " WHITESPACE@21..22 " "
BLOCK_EXPR@22..25 BLOCK_EXPR@22..25
BLOCK@22..25
L_CURLY@22..23 "{" L_CURLY@22..23 "{"
WHITESPACE@23..24 "\n" WHITESPACE@23..24 "\n"
R_CURLY@24..25 "}" R_CURLY@24..25 "}"

View file

@ -266,7 +266,6 @@ fn test_expr_order() {
L_PAREN@5..6 "(" L_PAREN@5..6 "("
R_PAREN@6..7 ")" R_PAREN@6..7 ")"
BLOCK_EXPR@7..15 BLOCK_EXPR@7..15
BLOCK@7..15
L_CURLY@7..8 "{" L_CURLY@7..8 "{"
EXPR_STMT@8..14 EXPR_STMT@8..14
BIN_EXPR@8..13 BIN_EXPR@8..13
@ -1114,7 +1113,6 @@ fn test_vec() {
assert_eq!( assert_eq!(
format!("{:#?}", tree).trim(), format!("{:#?}", tree).trim(),
r#"BLOCK_EXPR@0..45 r#"BLOCK_EXPR@0..45
BLOCK@0..45
L_CURLY@0..1 "{" L_CURLY@0..1 "{"
LET_STMT@1..20 LET_STMT@1..20
LET_KW@1..4 "let" LET_KW@1..4 "let"

View file

@ -143,7 +143,7 @@ pub(crate) fn reparser(
parent: Option<SyntaxKind>, parent: Option<SyntaxKind>,
) -> Option<fn(&mut Parser)> { ) -> Option<fn(&mut Parser)> {
let res = match node { let res = match node {
BLOCK => expressions::naked_block, BLOCK_EXPR => expressions::block,
RECORD_FIELD_DEF_LIST => items::record_field_def_list, RECORD_FIELD_DEF_LIST => items::record_field_def_list,
RECORD_FIELD_LIST => items::record_field_list, RECORD_FIELD_LIST => items::record_field_list,
ENUM_VARIANT_LIST => items::enum_variant_list, ENUM_VARIANT_LIST => items::enum_variant_list,

View file

@ -59,16 +59,7 @@ pub(crate) fn block(p: &mut Parser) {
p.error("expected a block"); p.error("expected a block");
return; return;
} }
atom::block_expr(p, None); atom::block_expr(p);
}
pub(crate) fn naked_block(p: &mut Parser) {
assert!(p.at(T!['{']));
let m = p.start();
p.bump(T!['{']);
expr_block_contents(p);
p.expect(T!['}']);
m.complete(p, BLOCK);
} }
fn is_expr_stmt_attr_allowed(kind: SyntaxKind) -> bool { fn is_expr_stmt_attr_allowed(kind: SyntaxKind) -> bool {
@ -197,7 +188,7 @@ pub(super) fn stmt(p: &mut Parser, with_semi: StmtWithSemi) {
} }
} }
pub(crate) fn expr_block_contents(p: &mut Parser) { pub(super) fn expr_block_contents(p: &mut Parser) {
// This is checked by a validator // This is checked by a validator
attributes::inner_attributes(p); attributes::inner_attributes(p);

View file

@ -84,7 +84,7 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar
T![box] => box_expr(p, None), T![box] => box_expr(p, None),
T![for] => for_expr(p, None), T![for] => for_expr(p, None),
T![while] => while_expr(p, None), T![while] => while_expr(p, None),
T![try] => try_expr(p, None), T![try] => try_block_expr(p, None),
LIFETIME if la == T![:] => { LIFETIME if la == T![:] => {
let m = p.start(); let m = p.start();
label(p); label(p);
@ -92,7 +92,12 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar
T![loop] => loop_expr(p, Some(m)), T![loop] => loop_expr(p, Some(m)),
T![for] => for_expr(p, Some(m)), T![for] => for_expr(p, Some(m)),
T![while] => while_expr(p, Some(m)), T![while] => while_expr(p, Some(m)),
T!['{'] => block_expr(p, Some(m)), // test labeled_block
// fn f() { 'label: {}; }
T!['{'] => {
block_expr(p);
m.complete(p, EFFECT_EXPR)
}
_ => { _ => {
// test_err misplaced_label_err // test_err misplaced_label_err
// fn main() { // fn main() {
@ -108,13 +113,17 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar
let m = p.start(); let m = p.start();
p.bump(T![async]); p.bump(T![async]);
p.eat(T![move]); p.eat(T![move]);
block_expr(p, Some(m)) block_expr(p);
m.complete(p, EFFECT_EXPR)
} }
T![match] => match_expr(p), T![match] => match_expr(p),
// test unsafe_block
// fn f() { unsafe { } }
T![unsafe] if la == T!['{'] => { T![unsafe] if la == T!['{'] => {
let m = p.start(); let m = p.start();
p.bump(T![unsafe]); p.bump(T![unsafe]);
block_expr(p, Some(m)) block_expr(p);
m.complete(p, EFFECT_EXPR)
} }
T!['{'] => { T!['{'] => {
// test for_range_from // test for_range_from
@ -123,7 +132,7 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar
// break; // break;
// } // }
// } // }
block_expr(p, None) block_expr(p)
} }
T![return] => return_expr(p), T![return] => return_expr(p),
T![continue] => continue_expr(p), T![continue] => continue_expr(p),
@ -134,7 +143,7 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar
} }
}; };
let blocklike = match done.kind() { let blocklike = match done.kind() {
IF_EXPR | WHILE_EXPR | FOR_EXPR | LOOP_EXPR | MATCH_EXPR | BLOCK_EXPR | TRY_EXPR => { IF_EXPR | WHILE_EXPR | FOR_EXPR | LOOP_EXPR | MATCH_EXPR | BLOCK_EXPR | EFFECT_EXPR => {
BlockLike::Block BlockLike::Block
} }
_ => BlockLike::NotBlock, _ => BlockLike::NotBlock,
@ -234,7 +243,7 @@ fn lambda_expr(p: &mut Parser) -> CompletedMarker {
if p.at(T!['{']) { if p.at(T!['{']) {
// test lambda_ret_block // test lambda_ret_block
// fn main() { || -> i32 { 92 }(); } // fn main() { || -> i32 { 92 }(); }
block_expr(p, None); block_expr(p);
} else { } else {
p.error("expected `{`"); p.error("expected `{`");
} }
@ -461,13 +470,13 @@ fn match_guard(p: &mut Parser) -> CompletedMarker {
// test block_expr // test block_expr
// fn foo() { // fn foo() {
// {}; // {};
// unsafe {};
// 'label: {};
// } // }
pub(super) fn block_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { pub(super) fn block_expr(p: &mut Parser) -> CompletedMarker {
assert!(p.at(T!['{'])); assert!(p.at(T!['{']));
let m = m.unwrap_or_else(|| p.start()); let m = p.start();
naked_block(p); p.bump(T!['{']);
expr_block_contents(p);
p.expect(T!['}']);
m.complete(p, BLOCK_EXPR) m.complete(p, BLOCK_EXPR)
} }
@ -532,7 +541,7 @@ fn break_expr(p: &mut Parser, r: Restrictions) -> CompletedMarker {
// fn foo() { // fn foo() {
// let _ = try {}; // let _ = try {};
// } // }
fn try_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { fn try_block_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker {
assert!(p.at(T![try])); assert!(p.at(T![try]));
let m = m.unwrap_or_else(|| p.start()); let m = m.unwrap_or_else(|| p.start());
// Special-case `try!` as macro. // Special-case `try!` as macro.
@ -552,8 +561,8 @@ fn try_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker {
} }
p.bump(T![try]); p.bump(T![try]);
block(p); block_expr(p);
m.complete(p, TRY_EXPR) m.complete(p, EFFECT_EXPR)
} }
// test box_expr // test box_expr

View file

@ -191,6 +191,7 @@ pub enum SyntaxKind {
RECORD_LIT, RECORD_LIT,
RECORD_FIELD_LIST, RECORD_FIELD_LIST,
RECORD_FIELD, RECORD_FIELD,
EFFECT_EXPR,
BOX_EXPR, BOX_EXPR,
CALL_EXPR, CALL_EXPR,
INDEX_EXPR, INDEX_EXPR,
@ -203,7 +204,6 @@ pub enum SyntaxKind {
PREFIX_EXPR, PREFIX_EXPR,
RANGE_EXPR, RANGE_EXPR,
BIN_EXPR, BIN_EXPR,
BLOCK,
EXTERN_BLOCK, EXTERN_BLOCK,
EXTERN_ITEM_LIST, EXTERN_ITEM_LIST,
ENUM_VARIANT, ENUM_VARIANT,

View file

@ -21,4 +21,4 @@ test_utils = { path = "../test_utils" }
cargo_metadata = "0.9.1" cargo_metadata = "0.9.1"
difference = "2.0.0" difference = "2.0.0"
# used as proc macro test target # used as proc macro test target
serde_derive = "=1.0.104" serde_derive = "=1.0.106"

View file

@ -1,5 +1,10 @@
SUBTREE $ SUBTREE $
PUNCH # [alone] 4294967295 PUNCH # [alone] 4294967295
SUBTREE [] 4294967295
IDENT doc 4294967295
SUBTREE () 4294967295
IDENT hidden 4294967295
PUNCH # [alone] 4294967295
SUBTREE [] 4294967295 SUBTREE [] 4294967295
IDENT allow 4294967295 IDENT allow 4294967295
SUBTREE () 4294967295 SUBTREE () 4294967295

View file

@ -10,7 +10,7 @@ fn test_derive_serialize_proc_macro() {
assert_expand( assert_expand(
"serde_derive", "serde_derive",
"Serialize", "Serialize",
"1.0.104", "1.0.106",
r##"struct Foo {}"##, r##"struct Foo {}"##,
include_str!("fixtures/test_serialize_proc_macro.txt"), include_str!("fixtures/test_serialize_proc_macro.txt"),
); );
@ -21,7 +21,7 @@ fn test_derive_serialize_proc_macro_failed() {
assert_expand( assert_expand(
"serde_derive", "serde_derive",
"Serialize", "Serialize",
"1.0.104", "1.0.106",
r##" r##"
struct {} struct {}
"##, "##,
@ -37,7 +37,7 @@ SUBTREE $
#[test] #[test]
fn test_derive_proc_macro_list() { fn test_derive_proc_macro_list() {
let res = list("serde_derive", "1.0.104").join("\n"); let res = list("serde_derive", "1.0.106").join("\n");
assert_eq_text!( assert_eq_text!(
&res, &res,

View file

@ -18,7 +18,7 @@ ra_db = { path = "../ra_db" }
ra_cfg = { path = "../ra_cfg" } ra_cfg = { path = "../ra_cfg" }
ra_proc_macro = { path = "../ra_proc_macro" } ra_proc_macro = { path = "../ra_proc_macro" }
serde = { version = "1.0.104", features = ["derive"] } serde = { version = "1.0.106", features = ["derive"] }
serde_json = "1.0.48" serde_json = "1.0.48"
anyhow = "1.0.26" anyhow = "1.0.26"

View file

@ -13,7 +13,7 @@ doctest = false
[dependencies] [dependencies]
itertools = "0.9.0" itertools = "0.9.0"
rowan = "0.10.0" rowan = "0.10.0"
rustc_lexer = { version = "652.0.0", package = "rustc-ap-rustc_lexer" } rustc_lexer = { version = "656.0.0", package = "rustc-ap-rustc_lexer" }
rustc-hash = "1.1.0" rustc-hash = "1.1.0"
arrayvec = "0.5.1" arrayvec = "0.5.1"
once_cell = "1.3.1" once_cell = "1.3.1"
@ -27,7 +27,7 @@ ra_parser = { path = "../ra_parser" }
# ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here # ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here
# to reduce number of compilations # to reduce number of compilations
smol_str = { version = "0.1.15", features = ["serde"] } smol_str = { version = "0.1.15", features = ["serde"] }
serde = { version = "1.0.104", features = ["derive"] } serde = { version = "1.0.106", features = ["derive"] }
[dev-dependencies] [dev-dependencies]
test_utils = { path = "../test_utils" } test_utils = { path = "../test_utils" }

View file

@ -16,9 +16,7 @@ use crate::{
}; };
pub use self::{ pub use self::{
expr_extensions::{ expr_extensions::{ArrayExprKind, BinOp, Effect, ElseBranch, LiteralKind, PrefixOp, RangeOp},
ArrayExprKind, BinOp, BlockModifier, ElseBranch, LiteralKind, PrefixOp, RangeOp,
},
extensions::{ extensions::{
AttrKind, FieldKind, NameOrNameRef, PathSegmentKind, SelfParamKind, SlicePatComponents, AttrKind, FieldKind, NameOrNameRef, PathSegmentKind, SelfParamKind, SlicePatComponents,
StructKind, TypeBoundKind, VisibilityKind, StructKind, TypeBoundKind, VisibilityKind,

View file

@ -28,7 +28,7 @@ impl ast::BinExpr {
impl ast::FnDef { impl ast::FnDef {
#[must_use] #[must_use]
pub fn with_body(&self, body: ast::Block) -> ast::FnDef { pub fn with_body(&self, body: ast::BlockExpr) -> ast::FnDef {
let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new(); let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new();
let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.body() { let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.body() {
old_body.syntax().clone().into() old_body.syntax().clone().into()

View file

@ -16,7 +16,7 @@ impl ast::Expr {
| ast::Expr::WhileExpr(_) | ast::Expr::WhileExpr(_)
| ast::Expr::BlockExpr(_) | ast::Expr::BlockExpr(_)
| ast::Expr::MatchExpr(_) | ast::Expr::MatchExpr(_)
| ast::Expr::TryExpr(_) => true, | ast::Expr::EffectExpr(_) => true,
_ => false, _ => false,
} }
} }
@ -359,22 +359,34 @@ impl ast::Literal {
} }
} }
pub enum BlockModifier { #[derive(Debug, Clone, PartialEq, Eq)]
pub enum Effect {
Async(SyntaxToken), Async(SyntaxToken),
Unsafe(SyntaxToken), Unsafe(SyntaxToken),
Try(SyntaxToken),
// Very much not an effect, but we stuff it into this node anyway
Label(ast::Label),
}
impl ast::EffectExpr {
pub fn effect(&self) -> Effect {
if let Some(token) = self.async_token() {
return Effect::Async(token);
}
if let Some(token) = self.unsafe_token() {
return Effect::Unsafe(token);
}
if let Some(token) = self.try_token() {
return Effect::Try(token);
}
if let Some(label) = self.label() {
return Effect::Label(label);
}
unreachable!("ast::EffectExpr without Effect")
}
} }
impl ast::BlockExpr { impl ast::BlockExpr {
pub fn modifier(&self) -> Option<BlockModifier> {
if let Some(token) = self.async_token() {
return Some(BlockModifier::Async(token));
}
if let Some(token) = self.unsafe_token() {
return Some(BlockModifier::Unsafe(token));
}
None
}
/// false if the block is an intrinsic part of the syntax and can't be /// false if the block is an intrinsic part of the syntax and can't be
/// replaced with arbitrary expression. /// replaced with arbitrary expression.
/// ///
@ -383,15 +395,12 @@ impl ast::BlockExpr {
/// const FOO: () = { stand_alone }; /// const FOO: () = { stand_alone };
/// ``` /// ```
pub fn is_standalone(&self) -> bool { pub fn is_standalone(&self) -> bool {
if self.modifier().is_some() {
return false;
}
let parent = match self.syntax().parent() { let parent = match self.syntax().parent() {
Some(it) => it, Some(it) => it,
None => return true, None => return true,
}; };
match parent.kind() { match parent.kind() {
FN_DEF | IF_EXPR | WHILE_EXPR | LOOP_EXPR => false, FN_DEF | IF_EXPR | WHILE_EXPR | LOOP_EXPR | EFFECT_EXPR => false,
_ => true, _ => true,
} }
} }

View file

@ -407,7 +407,7 @@ impl ast::Visibility {
} else if self.super_token().is_some() { } else if self.super_token().is_some() {
VisibilityKind::PubSuper VisibilityKind::PubSuper
} else if self.self_token().is_some() { } else if self.self_token().is_some() {
VisibilityKind::PubSuper VisibilityKind::PubSelf
} else { } else {
VisibilityKind::Pub VisibilityKind::Pub
} }

View file

@ -475,6 +475,19 @@ impl LoopExpr {
pub fn loop_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![loop]) } pub fn loop_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![loop]) }
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct EffectExpr {
pub(crate) syntax: SyntaxNode,
}
impl ast::AttrsOwner for EffectExpr {}
impl EffectExpr {
pub fn label(&self) -> Option<Label> { support::child(&self.syntax) }
pub fn try_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![try]) }
pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
pub fn block_expr(&self) -> Option<BlockExpr> { support::child(&self.syntax) }
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ForExpr { pub struct ForExpr {
pub(crate) syntax: SyntaxNode, pub(crate) syntax: SyntaxNode,
@ -541,11 +554,12 @@ pub struct BlockExpr {
pub(crate) syntax: SyntaxNode, pub(crate) syntax: SyntaxNode,
} }
impl ast::AttrsOwner for BlockExpr {} impl ast::AttrsOwner for BlockExpr {}
impl ast::ModuleItemOwner for BlockExpr {}
impl BlockExpr { impl BlockExpr {
pub fn label(&self) -> Option<Label> { support::child(&self.syntax) } pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) } pub fn statements(&self) -> AstChildren<Stmt> { support::children(&self.syntax) }
pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) } pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
pub fn block(&self) -> Option<Block> { support::child(&self.syntax) } pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -617,8 +631,8 @@ pub struct TryExpr {
} }
impl ast::AttrsOwner for TryExpr {} impl ast::AttrsOwner for TryExpr {}
impl TryExpr { impl TryExpr {
pub fn try_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![try]) }
pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
pub fn question_mark_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![?]) }
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -1111,19 +1125,6 @@ impl Condition {
pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) } pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Block {
pub(crate) syntax: SyntaxNode,
}
impl ast::AttrsOwner for Block {}
impl ast::ModuleItemOwner for Block {}
impl Block {
pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
pub fn statements(&self) -> AstChildren<Stmt> { support::children(&self.syntax) }
pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct ParamList { pub struct ParamList {
pub(crate) syntax: SyntaxNode, pub(crate) syntax: SyntaxNode,
@ -1241,6 +1242,8 @@ pub struct PathSegment {
impl PathSegment { impl PathSegment {
pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) } pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
pub fn crate_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![crate]) } pub fn crate_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![crate]) }
pub fn self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![self]) }
pub fn super_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![super]) }
pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) } pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) } pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
pub fn type_arg_list(&self) -> Option<TypeArgList> { support::child(&self.syntax) } pub fn type_arg_list(&self) -> Option<TypeArgList> { support::child(&self.syntax) }
@ -1465,6 +1468,7 @@ pub enum Expr {
FieldExpr(FieldExpr), FieldExpr(FieldExpr),
AwaitExpr(AwaitExpr), AwaitExpr(AwaitExpr),
TryExpr(TryExpr), TryExpr(TryExpr),
EffectExpr(EffectExpr),
CastExpr(CastExpr), CastExpr(CastExpr),
RefExpr(RefExpr), RefExpr(RefExpr),
PrefixExpr(PrefixExpr), PrefixExpr(PrefixExpr),
@ -1947,6 +1951,17 @@ impl AstNode for LoopExpr {
} }
fn syntax(&self) -> &SyntaxNode { &self.syntax } fn syntax(&self) -> &SyntaxNode { &self.syntax }
} }
impl AstNode for EffectExpr {
fn can_cast(kind: SyntaxKind) -> bool { kind == EFFECT_EXPR }
fn cast(syntax: SyntaxNode) -> Option<Self> {
if Self::can_cast(syntax.kind()) {
Some(Self { syntax })
} else {
None
}
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
impl AstNode for ForExpr { impl AstNode for ForExpr {
fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_EXPR } fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_EXPR }
fn cast(syntax: SyntaxNode) -> Option<Self> { fn cast(syntax: SyntaxNode) -> Option<Self> {
@ -2629,17 +2644,6 @@ impl AstNode for Condition {
} }
fn syntax(&self) -> &SyntaxNode { &self.syntax } fn syntax(&self) -> &SyntaxNode { &self.syntax }
} }
impl AstNode for Block {
fn can_cast(kind: SyntaxKind) -> bool { kind == BLOCK }
fn cast(syntax: SyntaxNode) -> Option<Self> {
if Self::can_cast(syntax.kind()) {
Some(Self { syntax })
} else {
None
}
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
impl AstNode for ParamList { impl AstNode for ParamList {
fn can_cast(kind: SyntaxKind) -> bool { kind == PARAM_LIST } fn can_cast(kind: SyntaxKind) -> bool { kind == PARAM_LIST }
fn cast(syntax: SyntaxNode) -> Option<Self> { fn cast(syntax: SyntaxNode) -> Option<Self> {
@ -3288,6 +3292,9 @@ impl From<AwaitExpr> for Expr {
impl From<TryExpr> for Expr { impl From<TryExpr> for Expr {
fn from(node: TryExpr) -> Expr { Expr::TryExpr(node) } fn from(node: TryExpr) -> Expr { Expr::TryExpr(node) }
} }
impl From<EffectExpr> for Expr {
fn from(node: EffectExpr) -> Expr { Expr::EffectExpr(node) }
}
impl From<CastExpr> for Expr { impl From<CastExpr> for Expr {
fn from(node: CastExpr) -> Expr { Expr::CastExpr(node) } fn from(node: CastExpr) -> Expr { Expr::CastExpr(node) }
} }
@ -3318,8 +3325,10 @@ impl AstNode for Expr {
TUPLE_EXPR | ARRAY_EXPR | PAREN_EXPR | PATH_EXPR | LAMBDA_EXPR | IF_EXPR TUPLE_EXPR | ARRAY_EXPR | PAREN_EXPR | PATH_EXPR | LAMBDA_EXPR | IF_EXPR
| LOOP_EXPR | FOR_EXPR | WHILE_EXPR | CONTINUE_EXPR | BREAK_EXPR | LABEL | LOOP_EXPR | FOR_EXPR | WHILE_EXPR | CONTINUE_EXPR | BREAK_EXPR | LABEL
| BLOCK_EXPR | RETURN_EXPR | MATCH_EXPR | RECORD_LIT | CALL_EXPR | INDEX_EXPR | BLOCK_EXPR | RETURN_EXPR | MATCH_EXPR | RECORD_LIT | CALL_EXPR | INDEX_EXPR
| METHOD_CALL_EXPR | FIELD_EXPR | AWAIT_EXPR | TRY_EXPR | CAST_EXPR | REF_EXPR | METHOD_CALL_EXPR | FIELD_EXPR | AWAIT_EXPR | TRY_EXPR | EFFECT_EXPR | CAST_EXPR
| PREFIX_EXPR | RANGE_EXPR | BIN_EXPR | LITERAL | MACRO_CALL | BOX_EXPR => true, | REF_EXPR | PREFIX_EXPR | RANGE_EXPR | BIN_EXPR | LITERAL | MACRO_CALL | BOX_EXPR => {
true
}
_ => false, _ => false,
} }
} }
@ -3347,6 +3356,7 @@ impl AstNode for Expr {
FIELD_EXPR => Expr::FieldExpr(FieldExpr { syntax }), FIELD_EXPR => Expr::FieldExpr(FieldExpr { syntax }),
AWAIT_EXPR => Expr::AwaitExpr(AwaitExpr { syntax }), AWAIT_EXPR => Expr::AwaitExpr(AwaitExpr { syntax }),
TRY_EXPR => Expr::TryExpr(TryExpr { syntax }), TRY_EXPR => Expr::TryExpr(TryExpr { syntax }),
EFFECT_EXPR => Expr::EffectExpr(EffectExpr { syntax }),
CAST_EXPR => Expr::CastExpr(CastExpr { syntax }), CAST_EXPR => Expr::CastExpr(CastExpr { syntax }),
REF_EXPR => Expr::RefExpr(RefExpr { syntax }), REF_EXPR => Expr::RefExpr(RefExpr { syntax }),
PREFIX_EXPR => Expr::PrefixExpr(PrefixExpr { syntax }), PREFIX_EXPR => Expr::PrefixExpr(PrefixExpr { syntax }),
@ -3383,6 +3393,7 @@ impl AstNode for Expr {
Expr::FieldExpr(it) => &it.syntax, Expr::FieldExpr(it) => &it.syntax,
Expr::AwaitExpr(it) => &it.syntax, Expr::AwaitExpr(it) => &it.syntax,
Expr::TryExpr(it) => &it.syntax, Expr::TryExpr(it) => &it.syntax,
Expr::EffectExpr(it) => &it.syntax,
Expr::CastExpr(it) => &it.syntax, Expr::CastExpr(it) => &it.syntax,
Expr::RefExpr(it) => &it.syntax, Expr::RefExpr(it) => &it.syntax,
Expr::PrefixExpr(it) => &it.syntax, Expr::PrefixExpr(it) => &it.syntax,
@ -3863,6 +3874,11 @@ impl std::fmt::Display for LoopExpr {
std::fmt::Display::fmt(self.syntax(), f) std::fmt::Display::fmt(self.syntax(), f)
} }
} }
impl std::fmt::Display for EffectExpr {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
}
}
impl std::fmt::Display for ForExpr { impl std::fmt::Display for ForExpr {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f) std::fmt::Display::fmt(self.syntax(), f)
@ -4173,11 +4189,6 @@ impl std::fmt::Display for Condition {
std::fmt::Display::fmt(self.syntax(), f) std::fmt::Display::fmt(self.syntax(), f)
} }
} }
impl std::fmt::Display for Block {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
}
}
impl std::fmt::Display for ParamList { impl std::fmt::Display for ParamList {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f) std::fmt::Display::fmt(self.syntax(), f)

View file

@ -82,14 +82,6 @@ pub fn block_expr(
ast_from_text(&format!("fn f() {}", buf)) ast_from_text(&format!("fn f() {}", buf))
} }
pub fn block_from_expr(e: ast::Expr) -> ast::Block {
return from_text(&format!("{{ {} }}", e));
fn from_text(text: &str) -> ast::Block {
ast_from_text(&format!("fn f() {}", text))
}
}
pub fn expr_unit() -> ast::Expr { pub fn expr_unit() -> ast::Expr {
expr_from_text("()") expr_from_text("()")
} }

View file

@ -237,8 +237,7 @@ fn api_walkthrough() {
// Let's get the `1 + 1` expression! // Let's get the `1 + 1` expression!
let body: ast::BlockExpr = func.body().unwrap(); let body: ast::BlockExpr = func.body().unwrap();
let block = body.block().unwrap(); let expr: ast::Expr = body.expr().unwrap();
let expr: ast::Expr = block.expr().unwrap();
// Enums are used to group related ast nodes together, and can be used for // Enums are used to group related ast nodes together, and can be used for
// matching. However, because there are no public fields, it's possible to // matching. However, because there are no public fields, it's possible to
@ -274,8 +273,8 @@ fn api_walkthrough() {
assert_eq!(text.to_string(), "1 + 1"); assert_eq!(text.to_string(), "1 + 1");
// There's a bunch of traversal methods on `SyntaxNode`: // There's a bunch of traversal methods on `SyntaxNode`:
assert_eq!(expr_syntax.parent().as_ref(), Some(block.syntax())); assert_eq!(expr_syntax.parent().as_ref(), Some(body.syntax()));
assert_eq!(block.syntax().first_child_or_token().map(|it| it.kind()), Some(T!['{'])); assert_eq!(body.syntax().first_child_or_token().map(|it| it.kind()), Some(T!['{']));
assert_eq!( assert_eq!(
expr_syntax.next_sibling_or_token().map(|it| it.kind()), expr_syntax.next_sibling_or_token().map(|it| it.kind()),
Some(SyntaxKind::WHITESPACE) Some(SyntaxKind::WHITESPACE)

View file

@ -180,7 +180,7 @@ fn rustc_token_kind_to_syntax_kind(
return (syntax_kind, None); return (syntax_kind, None);
fn match_literal_kind(kind: &rustc_lexer::LiteralKind) -> (SyntaxKind, Option<&'static str>) { fn match_literal_kind(kind: &rustc_lexer::LiteralKind) -> (SyntaxKind, Option<&'static str>) {
use rustc_lexer::LiteralKind as LK; use rustc_lexer::{LexRawStrError, LiteralKind as LK};
#[rustfmt::skip] #[rustfmt::skip]
let syntax_kind = match *kind { let syntax_kind = match *kind {
@ -215,21 +215,28 @@ fn rustc_token_kind_to_syntax_kind(
return (BYTE_STRING, Some("Missing trailing `\"` symbol to terminate the byte string literal")) return (BYTE_STRING, Some("Missing trailing `\"` symbol to terminate the byte string literal"))
} }
LK::RawStr { started: true, terminated: true, .. } => RAW_STRING, LK::RawStr(str) => match str.validate() {
LK::RawStr { started: true, terminated: false, .. } => { Ok(_) => RAW_STRING,
Err(LexRawStrError::InvalidStarter) => return (RAW_STRING, Some("Missing `\"` symbol after `#` symbols to begin the raw string literal")),
Err(LexRawStrError::NoTerminator { expected, found, .. }) => if expected == found {
return (RAW_STRING, Some("Missing trailing `\"` to terminate the raw string literal"))
} else {
return (RAW_STRING, Some("Missing trailing `\"` with `#` symbols to terminate the raw string literal")) return (RAW_STRING, Some("Missing trailing `\"` with `#` symbols to terminate the raw string literal"))
}
LK::RawStr { started: false, .. } => {
return (RAW_STRING, Some("Missing `\"` symbol after `#` symbols to begin the raw string literal"))
}
LK::RawByteStr { started: true, terminated: true, .. } => RAW_BYTE_STRING, },
LK::RawByteStr { started: true, terminated: false, .. } => { Err(LexRawStrError::TooManyDelimiters { .. }) => return (RAW_STRING, Some("Too many `#` symbols: raw strings may be delimited by up to 65535 `#` symbols")),
},
LK::RawByteStr(str) => match str.validate() {
Ok(_) => RAW_BYTE_STRING,
Err(LexRawStrError::InvalidStarter) => return (RAW_BYTE_STRING, Some("Missing `\"` symbol after `#` symbols to begin the raw byte string literal")),
Err(LexRawStrError::NoTerminator { expected, found, .. }) => if expected == found {
return (RAW_BYTE_STRING, Some("Missing trailing `\"` to terminate the raw byte string literal"))
} else {
return (RAW_BYTE_STRING, Some("Missing trailing `\"` with `#` symbols to terminate the raw byte string literal")) return (RAW_BYTE_STRING, Some("Missing trailing `\"` with `#` symbols to terminate the raw byte string literal"))
}
LK::RawByteStr { started: false, .. } => { },
return (RAW_BYTE_STRING, Some("Missing `\"` symbol after `#` symbols to begin the raw byte string literal")) Err(LexRawStrError::TooManyDelimiters { .. }) => return (RAW_BYTE_STRING, Some("Too many `#` symbols: raw byte strings may be delimited by up to 65535 `#` symbols")),
} },
}; };
(syntax_kind, None) (syntax_kind, None)

View file

@ -96,7 +96,7 @@ pub(crate) fn validate(root: &SyntaxNode) -> Vec<SyntaxError> {
ast::RecordField(it) => validate_numeric_name(it.name_ref(), &mut errors), ast::RecordField(it) => validate_numeric_name(it.name_ref(), &mut errors),
ast::Visibility(it) => validate_visibility(it, &mut errors), ast::Visibility(it) => validate_visibility(it, &mut errors),
ast::RangeExpr(it) => validate_range_expr(it, &mut errors), ast::RangeExpr(it) => validate_range_expr(it, &mut errors),
ast::PathSegment(it) => validate_crate_keyword_in_path_segment(it, &mut errors), ast::PathSegment(it) => validate_path_keywords(it, &mut errors),
_ => (), _ => (),
} }
} }
@ -224,46 +224,49 @@ fn validate_range_expr(expr: ast::RangeExpr, errors: &mut Vec<SyntaxError>) {
} }
} }
fn validate_crate_keyword_in_path_segment( fn validate_path_keywords(segment: ast::PathSegment, errors: &mut Vec<SyntaxError>) {
segment: ast::PathSegment, use ast::PathSegmentKind;
errors: &mut Vec<SyntaxError>,
) {
const ERR_MSG: &str = "The `crate` keyword is only allowed as the first segment of a path";
let crate_token = match segment.crate_token() { let path = segment.parent_path();
None => return, let is_path_start = segment.coloncolon_token().is_none() && path.qualifier().is_none();
Some(it) => it,
};
// Disallow both ::crate and foo::crate if let Some(token) = segment.self_token() {
let mut path = segment.parent_path(); if !is_path_start {
if segment.coloncolon_token().is_some() || path.qualifier().is_some() { errors.push(SyntaxError::new(
errors.push(SyntaxError::new(ERR_MSG, crate_token.text_range())); "The `self` keyword is only allowed as the first segment of a path",
token.text_range(),
));
}
} else if let Some(token) = segment.crate_token() {
if !is_path_start || use_prefix(path).is_some() {
errors.push(SyntaxError::new(
"The `crate` keyword is only allowed as the first segment of a path",
token.text_range(),
));
}
} else if let Some(token) = segment.super_token() {
if !all_supers(&path) {
errors.push(SyntaxError::new(
"The `super` keyword may only be preceded by other `super`s",
token.text_range(),
));
return; return;
} }
// For expressions and types, validation is complete, but we still have let mut curr_path = path;
// to handle invalid UseItems like this: while let Some(prefix) = use_prefix(curr_path) {
// if !all_supers(&prefix) {
// use foo:{crate::bar::baz}; errors.push(SyntaxError::new(
// "The `super` keyword may only be preceded by other `super`s",
// To handle this we must inspect the parent `UseItem`s and `UseTree`s token.text_range(),
// but right now we're looking deep inside the nested `Path` nodes because ));
// `Path`s are left-associative: return;
// }
// ((crate)::bar)::baz) curr_path = prefix;
// ^ current value of path }
//
// So we need to climb to the top
while let Some(parent) = path.parent_path() {
path = parent;
} }
// Now that we've found the whole path we need to see if there's a prefix fn use_prefix(mut path: ast::Path) -> Option<ast::Path> {
// somewhere in the UseTree hierarchy. This check is arbitrarily deep
// because rust allows arbitrary nesting like so:
//
// use {foo::{{{{crate::bar::baz}}}}};
for node in path.syntax().ancestors().skip(1) { for node in path.syntax().ancestors().skip(1) {
match_ast! { match_ast! {
match node { match node {
@ -271,12 +274,32 @@ fn validate_crate_keyword_in_path_segment(
// Even a top-level path exists within a `UseTree` so we must explicitly // Even a top-level path exists within a `UseTree` so we must explicitly
// allow our path but disallow anything else // allow our path but disallow anything else
if tree_path != path { if tree_path != path {
errors.push(SyntaxError::new(ERR_MSG, crate_token.text_range())); return Some(tree_path);
} }
}, },
ast::UseTreeList(_it) => continue, ast::UseTreeList(_it) => continue,
_ => return, ast::Path(parent) => path = parent,
_ => return None,
} }
}; };
} }
return None;
}
fn all_supers(path: &ast::Path) -> bool {
let segment = match path.segment() {
Some(it) => it,
None => return false,
};
if segment.kind() != Some(PathSegmentKind::SuperKw) {
return false;
}
if let Some(ref subpath) = path.qualifier() {
return all_supers(subpath);
}
return true;
}
} }

View file

@ -6,19 +6,17 @@ use crate::{
SyntaxKind::*, SyntaxKind::*,
}; };
pub(crate) fn validate_block_expr(expr: ast::BlockExpr, errors: &mut Vec<SyntaxError>) { pub(crate) fn validate_block_expr(block: ast::BlockExpr, errors: &mut Vec<SyntaxError>) {
if let Some(parent) = expr.syntax().parent() { if let Some(parent) = block.syntax().parent() {
match parent.kind() { match parent.kind() {
FN_DEF | EXPR_STMT | BLOCK => return, FN_DEF | EXPR_STMT | BLOCK_EXPR => return,
_ => {} _ => {}
} }
} }
if let Some(block) = expr.block() {
errors.extend(block.attrs().map(|attr| { errors.extend(block.attrs().map(|attr| {
SyntaxError::new( SyntaxError::new(
"A block in this position cannot accept inner attributes", "A block in this position cannot accept inner attributes",
attr.syntax().text_range(), attr.syntax().text_range(),
) )
})) }))
}
} }

View file

@ -28,7 +28,6 @@ SOURCE_FILE@0..54
R_PAREN@26..27 ")" R_PAREN@26..27 ")"
WHITESPACE@27..28 " " WHITESPACE@27..28 " "
BLOCK_EXPR@28..31 BLOCK_EXPR@28..31
BLOCK@28..31
L_CURLY@28..29 "{" L_CURLY@28..29 "{"
WHITESPACE@29..30 "\n" WHITESPACE@29..30 "\n"
R_CURLY@30..31 "}" R_CURLY@30..31 "}"

View file

@ -21,7 +21,6 @@ SOURCE_FILE@0..31
L_PAREN@23..24 "(" L_PAREN@23..24 "("
R_PAREN@24..25 ")" R_PAREN@24..25 ")"
BLOCK_EXPR@25..27 BLOCK_EXPR@25..27
BLOCK@25..27
L_CURLY@25..26 "{" L_CURLY@25..26 "{"
R_CURLY@26..27 "}" R_CURLY@26..27 "}"
WHITESPACE@27..29 "\n\n" WHITESPACE@27..29 "\n\n"

View file

@ -9,7 +9,6 @@ SOURCE_FILE@0..95
R_PAREN@7..8 ")" R_PAREN@7..8 ")"
WHITESPACE@8..9 " " WHITESPACE@8..9 " "
BLOCK_EXPR@9..12 BLOCK_EXPR@9..12
BLOCK@9..12
L_CURLY@9..10 "{" L_CURLY@9..10 "{"
WHITESPACE@10..11 "\n" WHITESPACE@10..11 "\n"
R_CURLY@11..12 "}" R_CURLY@11..12 "}"
@ -34,7 +33,6 @@ SOURCE_FILE@0..95
TRUE_KW@29..33 "true" TRUE_KW@29..33 "true"
WHITESPACE@33..34 " " WHITESPACE@33..34 " "
BLOCK_EXPR@34..51 BLOCK_EXPR@34..51
BLOCK@34..51
L_CURLY@34..35 "{" L_CURLY@34..35 "{"
WHITESPACE@35..44 "\n " WHITESPACE@35..44 "\n "
LITERAL@44..45 LITERAL@44..45
@ -45,7 +43,6 @@ SOURCE_FILE@0..95
ELSE_KW@52..56 "else" ELSE_KW@52..56 "else"
WHITESPACE@56..57 " " WHITESPACE@56..57 " "
BLOCK_EXPR@57..78 BLOCK_EXPR@57..78
BLOCK@57..78
L_CURLY@57..58 "{" L_CURLY@57..58 "{"
WHITESPACE@58..67 "\n " WHITESPACE@58..67 "\n "
BIN_EXPR@67..72 BIN_EXPR@67..72
@ -71,7 +68,6 @@ SOURCE_FILE@0..95
R_PAREN@89..90 ")" R_PAREN@89..90 ")"
WHITESPACE@90..91 " " WHITESPACE@90..91 " "
BLOCK_EXPR@91..94 BLOCK_EXPR@91..94
BLOCK@91..94
L_CURLY@91..92 "{" L_CURLY@91..92 "{"
WHITESPACE@92..93 "\n" WHITESPACE@92..93 "\n"
R_CURLY@93..94 "}" R_CURLY@93..94 "}"

View file

@ -9,7 +9,6 @@ SOURCE_FILE@0..42
R_PAREN@8..9 ")" R_PAREN@8..9 ")"
WHITESPACE@9..10 " " WHITESPACE@9..10 " "
BLOCK_EXPR@10..41 BLOCK_EXPR@10..41
BLOCK@10..41
L_CURLY@10..11 "{" L_CURLY@10..11 "{"
WHITESPACE@11..16 "\n " WHITESPACE@11..16 "\n "
EXPR_STMT@16..24 EXPR_STMT@16..24
@ -26,10 +25,10 @@ SOURCE_FILE@0..42
R_PAREN@23..24 ")" R_PAREN@23..24 ")"
WHITESPACE@24..25 " " WHITESPACE@24..25 " "
EXPR_STMT@25..39 EXPR_STMT@25..39
BLOCK_EXPR@25..38 EFFECT_EXPR@25..38
UNSAFE_KW@25..31 "unsafe" UNSAFE_KW@25..31 "unsafe"
WHITESPACE@31..32 " " WHITESPACE@31..32 " "
BLOCK@32..38 BLOCK_EXPR@32..38
L_CURLY@32..33 "{" L_CURLY@32..33 "{"
WHITESPACE@33..34 " " WHITESPACE@33..34 " "
TUPLE_EXPR@34..36 TUPLE_EXPR@34..36

View file

@ -25,7 +25,6 @@ SOURCE_FILE@0..23
IDENT@18..19 "T" IDENT@18..19 "T"
WHITESPACE@19..20 " " WHITESPACE@19..20 " "
BLOCK_EXPR@20..22 BLOCK_EXPR@20..22
BLOCK@20..22
L_CURLY@20..21 "{" L_CURLY@20..21 "{"
R_CURLY@21..22 "}" R_CURLY@21..22 "}"
WHITESPACE@22..23 "\n" WHITESPACE@22..23 "\n"

View file

@ -9,7 +9,6 @@ SOURCE_FILE@0..56
R_PAREN@7..8 ")" R_PAREN@7..8 ")"
WHITESPACE@8..9 " " WHITESPACE@8..9 " "
BLOCK_EXPR@9..55 BLOCK_EXPR@9..55
BLOCK@9..55
L_CURLY@9..10 "{" L_CURLY@9..10 "{"
WHITESPACE@10..15 "\n " WHITESPACE@10..15 "\n "
EXPR_STMT@15..38 EXPR_STMT@15..38

View file

@ -20,7 +20,6 @@ SOURCE_FILE@0..47
R_PAREN@15..16 ")" R_PAREN@15..16 ")"
WHITESPACE@16..17 " " WHITESPACE@16..17 " "
BLOCK_EXPR@17..46 BLOCK_EXPR@17..46
BLOCK@17..46
L_CURLY@17..18 "{" L_CURLY@17..18 "{"
WHITESPACE@18..23 "\n " WHITESPACE@18..23 "\n "
LET_STMT@23..36 LET_STMT@23..36

View file

@ -33,7 +33,6 @@ SOURCE_FILE@0..183
IDENT@39..46 "ScopeId" IDENT@39..46 "ScopeId"
WHITESPACE@46..47 " " WHITESPACE@46..47 " "
BLOCK_EXPR@47..161 BLOCK_EXPR@47..161
BLOCK@47..161
L_CURLY@47..48 "{" L_CURLY@47..48 "{"
WHITESPACE@48..57 "\n " WHITESPACE@48..57 "\n "
LET_STMT@57..85 LET_STMT@57..85

View file

@ -9,7 +9,6 @@ SOURCE_FILE@0..139
R_PAREN@7..8 ")" R_PAREN@7..8 ")"
WHITESPACE@8..9 " " WHITESPACE@8..9 " "
BLOCK_EXPR@9..138 BLOCK_EXPR@9..138
BLOCK@9..138
L_CURLY@9..10 "{" L_CURLY@9..10 "{"
WHITESPACE@10..15 "\n " WHITESPACE@10..15 "\n "
LET_STMT@15..24 LET_STMT@15..24
@ -62,7 +61,6 @@ SOURCE_FILE@0..139
TRUE_KW@83..87 "true" TRUE_KW@83..87 "true"
WHITESPACE@87..88 " " WHITESPACE@87..88 " "
BLOCK_EXPR@88..90 BLOCK_EXPR@88..90
BLOCK@88..90
L_CURLY@88..89 "{" L_CURLY@88..89 "{"
R_CURLY@89..90 "}" R_CURLY@89..90 "}"
WHITESPACE@90..95 "\n " WHITESPACE@90..95 "\n "
@ -78,7 +76,6 @@ SOURCE_FILE@0..139
TRUE_KW@109..113 "true" TRUE_KW@109..113 "true"
WHITESPACE@113..114 " " WHITESPACE@113..114 " "
BLOCK_EXPR@114..116 BLOCK_EXPR@114..116
BLOCK@114..116
L_CURLY@114..115 "{" L_CURLY@114..115 "{"
R_CURLY@115..116 "}" R_CURLY@115..116 "}"
WHITESPACE@116..121 "\n " WHITESPACE@116..121 "\n "
@ -89,7 +86,6 @@ SOURCE_FILE@0..139
LOOP_KW@129..133 "loop" LOOP_KW@129..133 "loop"
WHITESPACE@133..134 " " WHITESPACE@133..134 " "
BLOCK_EXPR@134..136 BLOCK_EXPR@134..136
BLOCK@134..136
L_CURLY@134..135 "{" L_CURLY@134..135 "{"
R_CURLY@135..136 "}" R_CURLY@135..136 "}"
WHITESPACE@136..137 "\n" WHITESPACE@136..137 "\n"

View file

@ -12,7 +12,6 @@ SOURCE_FILE@0..16
R_PAREN@11..12 ")" R_PAREN@11..12 ")"
WHITESPACE@12..13 " " WHITESPACE@12..13 " "
BLOCK_EXPR@13..15 BLOCK_EXPR@13..15
BLOCK@13..15
L_CURLY@13..14 "{" L_CURLY@13..14 "{"
R_CURLY@14..15 "}" R_CURLY@14..15 "}"
WHITESPACE@15..16 "\n" WHITESPACE@15..16 "\n"

View file

@ -26,7 +26,6 @@ SOURCE_FILE@0..22
R_PAREN@16..17 ")" R_PAREN@16..17 ")"
WHITESPACE@17..18 " " WHITESPACE@17..18 " "
BLOCK_EXPR@18..21 BLOCK_EXPR@18..21
BLOCK@18..21
L_CURLY@18..19 "{" L_CURLY@18..19 "{"
WHITESPACE@19..20 "\n" WHITESPACE@19..20 "\n"
R_CURLY@20..21 "}" R_CURLY@20..21 "}"

View file

@ -9,7 +9,6 @@ SOURCE_FILE@0..112
R_PAREN@5..6 ")" R_PAREN@5..6 ")"
WHITESPACE@6..7 " " WHITESPACE@6..7 " "
BLOCK_EXPR@7..33 BLOCK_EXPR@7..33
BLOCK@7..33
L_CURLY@7..8 "{" L_CURLY@7..8 "{"
WHITESPACE@8..9 " " WHITESPACE@8..9 " "
EXPR_STMT@9..17 EXPR_STMT@9..17
@ -51,7 +50,6 @@ SOURCE_FILE@0..112
R_PAREN@39..40 ")" R_PAREN@39..40 ")"
WHITESPACE@40..41 " " WHITESPACE@40..41 " "
BLOCK_EXPR@41..68 BLOCK_EXPR@41..68
BLOCK@41..68
L_CURLY@41..42 "{" L_CURLY@41..42 "{"
WHITESPACE@42..43 " " WHITESPACE@42..43 " "
EXPR_STMT@43..54 EXPR_STMT@43..54
@ -100,7 +98,6 @@ SOURCE_FILE@0..112
R_PAREN@74..75 ")" R_PAREN@74..75 ")"
WHITESPACE@75..76 " " WHITESPACE@75..76 " "
BLOCK_EXPR@76..111 BLOCK_EXPR@76..111
BLOCK@76..111
L_CURLY@76..77 "{" L_CURLY@76..77 "{"
WHITESPACE@77..78 " " WHITESPACE@77..78 " "
EXPR_STMT@78..93 EXPR_STMT@78..93

View file

@ -9,7 +9,6 @@ SOURCE_FILE@0..94
R_PAREN@8..9 ")" R_PAREN@8..9 ")"
WHITESPACE@9..10 " " WHITESPACE@9..10 " "
BLOCK_EXPR@10..55 BLOCK_EXPR@10..55
BLOCK@10..55
L_CURLY@10..11 "{" L_CURLY@10..11 "{"
WHITESPACE@11..16 "\n " WHITESPACE@11..16 "\n "
MACRO_CALL@16..49 MACRO_CALL@16..49

View file

@ -62,7 +62,6 @@ SOURCE_FILE@0..240
R_PAREN@49..50 ")" R_PAREN@49..50 ")"
WHITESPACE@50..51 " " WHITESPACE@50..51 " "
BLOCK_EXPR@51..53 BLOCK_EXPR@51..53
BLOCK@51..53
L_CURLY@51..52 "{" L_CURLY@51..52 "{"
R_CURLY@52..53 "}" R_CURLY@52..53 "}"
WHITESPACE@53..55 "\n\n" WHITESPACE@53..55 "\n\n"
@ -76,7 +75,6 @@ SOURCE_FILE@0..240
R_PAREN@63..64 ")" R_PAREN@63..64 ")"
WHITESPACE@64..65 " " WHITESPACE@64..65 " "
BLOCK_EXPR@65..239 BLOCK_EXPR@65..239
BLOCK@65..239
L_CURLY@65..66 "{" L_CURLY@65..66 "{"
WHITESPACE@66..71 "\n " WHITESPACE@66..71 "\n "
LET_STMT@71..121 LET_STMT@71..121

View file

@ -9,7 +9,6 @@ SOURCE_FILE@0..575
R_PAREN@8..9 ")" R_PAREN@8..9 ")"
WHITESPACE@9..10 " " WHITESPACE@9..10 " "
BLOCK_EXPR@10..574 BLOCK_EXPR@10..574
BLOCK@10..574
L_CURLY@10..11 "{" L_CURLY@10..11 "{"
WHITESPACE@11..16 "\n " WHITESPACE@11..16 "\n "
ENUM_DEF@16..152 ENUM_DEF@16..152
@ -131,7 +130,6 @@ SOURCE_FILE@0..575
WHITESPACE@300..306 "\n\n " WHITESPACE@300..306 "\n\n "
EXPR_STMT@306..459 EXPR_STMT@306..459
BLOCK_EXPR@306..459 BLOCK_EXPR@306..459
BLOCK@306..459
L_CURLY@306..307 "{" L_CURLY@306..307 "{"
WHITESPACE@307..316 "\n " WHITESPACE@307..316 "\n "
ENUM_DEF@316..453 ENUM_DEF@316..453

View file

@ -21,7 +21,6 @@ SOURCE_FILE@0..30
R_ANGLE@25..26 ">" R_ANGLE@25..26 ">"
WHITESPACE@26..27 "\n" WHITESPACE@26..27 "\n"
BLOCK_EXPR@27..29 BLOCK_EXPR@27..29
BLOCK@27..29
L_CURLY@27..28 "{" L_CURLY@27..28 "{"
R_CURLY@28..29 "}" R_CURLY@28..29 "}"
WHITESPACE@29..30 "\n" WHITESPACE@29..30 "\n"

View file

@ -20,7 +20,6 @@ SOURCE_FILE@0..24
R_PAREN@11..12 ")" R_PAREN@11..12 ")"
WHITESPACE@12..13 " " WHITESPACE@12..13 " "
BLOCK_EXPR@13..23 BLOCK_EXPR@13..23
BLOCK@13..23
L_CURLY@13..14 "{" L_CURLY@13..14 "{"
WHITESPACE@14..19 "\n " WHITESPACE@14..19 "\n "
FIELD_EXPR@19..21 FIELD_EXPR@19..21

View file

@ -9,7 +9,6 @@ SOURCE_FILE@0..350
R_PAREN@9..10 ")" R_PAREN@9..10 ")"
WHITESPACE@10..11 " " WHITESPACE@10..11 " "
BLOCK_EXPR@11..349 BLOCK_EXPR@11..349
BLOCK@11..349
L_CURLY@11..12 "{" L_CURLY@11..12 "{"
WHITESPACE@12..17 "\n " WHITESPACE@12..17 "\n "
LET_STMT@17..129 LET_STMT@17..129
@ -22,7 +21,6 @@ SOURCE_FILE@0..350
EQ@27..28 "=" EQ@27..28 "="
WHITESPACE@28..29 " " WHITESPACE@28..29 " "
BLOCK_EXPR@29..128 BLOCK_EXPR@29..128
BLOCK@29..128
L_CURLY@29..30 "{" L_CURLY@29..30 "{"
WHITESPACE@30..39 "\n " WHITESPACE@30..39 "\n "
ATTR@39..83 ATTR@39..83
@ -53,7 +51,6 @@ SOURCE_FILE@0..350
TRUE_KW@137..141 "true" TRUE_KW@137..141 "true"
WHITESPACE@141..142 " " WHITESPACE@141..142 " "
BLOCK_EXPR@142..257 BLOCK_EXPR@142..257
BLOCK@142..257
L_CURLY@142..143 "{" L_CURLY@142..143 "{"
WHITESPACE@143..152 "\n " WHITESPACE@143..152 "\n "
ATTR@152..171 ATTR@152..171
@ -96,7 +93,6 @@ SOURCE_FILE@0..350
TRUE_KW@268..272 "true" TRUE_KW@268..272 "true"
WHITESPACE@272..273 " " WHITESPACE@272..273 " "
BLOCK_EXPR@273..347 BLOCK_EXPR@273..347
BLOCK@273..347
L_CURLY@273..274 "{" L_CURLY@273..274 "{"
WHITESPACE@274..283 "\n " WHITESPACE@274..283 "\n "
ATTR@283..302 ATTR@283..302

View file

@ -9,7 +9,6 @@ SOURCE_FILE@0..293
R_PAREN@7..8 ")" R_PAREN@7..8 ")"
WHITESPACE@8..9 " " WHITESPACE@8..9 " "
BLOCK_EXPR@9..292 BLOCK_EXPR@9..292
BLOCK@9..292
L_CURLY@9..10 "{" L_CURLY@9..10 "{"
WHITESPACE@10..15 "\n " WHITESPACE@10..15 "\n "
EXPR_STMT@15..101 EXPR_STMT@15..101

View file

@ -9,7 +9,6 @@ SOURCE_FILE@0..89
R_PAREN@7..8 ")" R_PAREN@7..8 ")"
WHITESPACE@8..9 " " WHITESPACE@8..9 " "
BLOCK_EXPR@9..88 BLOCK_EXPR@9..88
BLOCK@9..88
L_CURLY@9..10 "{" L_CURLY@9..10 "{"
WHITESPACE@10..15 "\n " WHITESPACE@10..15 "\n "
MATCH_EXPR@15..86 MATCH_EXPR@15..86

View file

@ -9,7 +9,6 @@ SOURCE_FILE@0..91
R_PAREN@8..9 ")" R_PAREN@8..9 ")"
WHITESPACE@9..10 " " WHITESPACE@9..10 " "
BLOCK_EXPR@10..89 BLOCK_EXPR@10..89
BLOCK@10..89
L_CURLY@10..11 "{" L_CURLY@10..11 "{"
WHITESPACE@11..16 "\n " WHITESPACE@11..16 "\n "
LET_STMT@16..27 LET_STMT@16..27

View file

@ -44,7 +44,6 @@ SOURCE_FILE@0..48
R_PAREN@43..44 ")" R_PAREN@43..44 ")"
WHITESPACE@44..45 " " WHITESPACE@44..45 " "
BLOCK_EXPR@45..47 BLOCK_EXPR@45..47
BLOCK@45..47
L_CURLY@45..46 "{" L_CURLY@45..46 "{"
R_CURLY@46..47 "}" R_CURLY@46..47 "}"
WHITESPACE@47..48 "\n" WHITESPACE@47..48 "\n"

View file

@ -27,7 +27,6 @@ SOURCE_FILE@0..118
R_PAREN@27..28 ")" R_PAREN@27..28 ")"
WHITESPACE@28..29 " " WHITESPACE@28..29 " "
BLOCK_EXPR@29..31 BLOCK_EXPR@29..31
BLOCK@29..31
L_CURLY@29..30 "{" L_CURLY@29..30 "{"
R_CURLY@30..31 "}" R_CURLY@30..31 "}"
WHITESPACE@31..36 "\n " WHITESPACE@31..36 "\n "
@ -44,7 +43,6 @@ SOURCE_FILE@0..118
R_PAREN@47..48 ")" R_PAREN@47..48 ")"
WHITESPACE@48..49 " " WHITESPACE@48..49 " "
BLOCK_EXPR@49..51 BLOCK_EXPR@49..51
BLOCK@49..51
L_CURLY@49..50 "{" L_CURLY@49..50 "{"
R_CURLY@50..51 "}" R_CURLY@50..51 "}"
WHITESPACE@51..56 "\n " WHITESPACE@51..56 "\n "

View file

@ -9,7 +9,6 @@ SOURCE_FILE@0..33
R_PAREN@8..9 ")" R_PAREN@8..9 ")"
WHITESPACE@9..10 " " WHITESPACE@9..10 " "
BLOCK_EXPR@10..32 BLOCK_EXPR@10..32
BLOCK@10..32
L_CURLY@10..11 "{" L_CURLY@10..11 "{"
WHITESPACE@11..16 "\n " WHITESPACE@11..16 "\n "
EXPR_STMT@16..21 EXPR_STMT@16..21

View file

@ -18,7 +18,6 @@ SOURCE_FILE@0..83
IDENT@12..15 "i32" IDENT@12..15 "i32"
WHITESPACE@15..16 " " WHITESPACE@15..16 " "
BLOCK_EXPR@16..82 BLOCK_EXPR@16..82
BLOCK@16..82
L_CURLY@16..17 "{" L_CURLY@16..17 "{"
WHITESPACE@17..22 "\n " WHITESPACE@17..22 "\n "
EXPR_STMT@22..80 EXPR_STMT@22..80

View file

@ -0,0 +1,70 @@
SOURCE_FILE@0..67
USE_ITEM@0..12
USE_KW@0..3 "use"
WHITESPACE@3..4 " "
USE_TREE@4..11
PATH@4..11
PATH_SEGMENT@4..11
COLON2@4..6 "::"
SUPER_KW@6..11 "super"
SEMICOLON@11..12 ";"
WHITESPACE@12..13 "\n"
USE_ITEM@13..26
USE_KW@13..16 "use"
WHITESPACE@16..17 " "
USE_TREE@17..25
PATH@17..25
PATH@17..18
PATH_SEGMENT@17..18
NAME_REF@17..18
IDENT@17..18 "a"
COLON2@18..20 "::"
PATH_SEGMENT@20..25
SUPER_KW@20..25 "super"
SEMICOLON@25..26 ";"
WHITESPACE@26..27 "\n"
USE_ITEM@27..47
USE_KW@27..30 "use"
WHITESPACE@30..31 " "
USE_TREE@31..46
PATH@31..46
PATH@31..39
PATH@31..36
PATH_SEGMENT@31..36
SUPER_KW@31..36 "super"
COLON2@36..38 "::"
PATH_SEGMENT@38..39
NAME_REF@38..39
IDENT@38..39 "a"
COLON2@39..41 "::"
PATH_SEGMENT@41..46
SUPER_KW@41..46 "super"
SEMICOLON@46..47 ";"
WHITESPACE@47..48 "\n"
USE_ITEM@48..66
USE_KW@48..51 "use"
WHITESPACE@51..52 " "
USE_TREE@52..65
PATH@52..53
PATH_SEGMENT@52..53
NAME_REF@52..53
IDENT@52..53 "a"
COLON2@53..55 "::"
USE_TREE_LIST@55..65
L_CURLY@55..56 "{"
USE_TREE@56..64
PATH@56..64
PATH@56..61
PATH_SEGMENT@56..61
SUPER_KW@56..61 "super"
COLON2@61..63 "::"
PATH_SEGMENT@63..64
NAME_REF@63..64
IDENT@63..64 "b"
R_CURLY@64..65 "}"
SEMICOLON@65..66 ";"
WHITESPACE@66..67 "\n"
error 6..11: The `super` keyword may only be preceded by other `super`s
error 20..25: The `super` keyword may only be preceded by other `super`s
error 41..46: The `super` keyword may only be preceded by other `super`s
error 56..61: The `super` keyword may only be preceded by other `super`s

View file

@ -0,0 +1,4 @@
use ::super;
use a::super;
use super::a::super;
use a::{super::b};

View file

@ -0,0 +1,27 @@
SOURCE_FILE@0..25
USE_ITEM@0..11
USE_KW@0..3 "use"
WHITESPACE@3..4 " "
USE_TREE@4..10
PATH@4..10
PATH_SEGMENT@4..10
COLON2@4..6 "::"
SELF_KW@6..10 "self"
SEMICOLON@10..11 ";"
WHITESPACE@11..12 "\n"
USE_ITEM@12..24
USE_KW@12..15 "use"
WHITESPACE@15..16 " "
USE_TREE@16..23
PATH@16..23
PATH@16..17
PATH_SEGMENT@16..17
NAME_REF@16..17
IDENT@16..17 "a"
COLON2@17..19 "::"
PATH_SEGMENT@19..23
SELF_KW@19..23 "self"
SEMICOLON@23..24 ";"
WHITESPACE@24..25 "\n"
error 6..10: The `self` keyword is only allowed as the first segment of a path
error 19..23: The `self` keyword is only allowed as the first segment of a path

View file

@ -0,0 +1,2 @@
use ::self;
use a::self;

View file

@ -9,7 +9,6 @@ SOURCE_FILE@0..30
R_PAREN@8..9 ")" R_PAREN@8..9 ")"
WHITESPACE@9..10 " " WHITESPACE@9..10 " "
BLOCK_EXPR@10..29 BLOCK_EXPR@10..29
BLOCK@10..29
L_CURLY@10..11 "{" L_CURLY@10..11 "{"
WHITESPACE@11..16 "\n " WHITESPACE@11..16 "\n "
EXPR_STMT@16..22 EXPR_STMT@16..22

View file

@ -8,7 +8,6 @@ SOURCE_FILE@0..33
L_PAREN@6..7 "(" L_PAREN@6..7 "("
R_PAREN@7..8 ")" R_PAREN@7..8 ")"
BLOCK_EXPR@8..10 BLOCK_EXPR@8..10
BLOCK@8..10
L_CURLY@8..9 "{" L_CURLY@8..9 "{"
R_CURLY@9..10 "}" R_CURLY@9..10 "}"
WHITESPACE@10..11 " " WHITESPACE@10..11 " "
@ -29,7 +28,6 @@ SOURCE_FILE@0..33
L_PAREN@28..29 "(" L_PAREN@28..29 "("
R_PAREN@29..30 ")" R_PAREN@29..30 ")"
BLOCK_EXPR@30..32 BLOCK_EXPR@30..32
BLOCK@30..32
L_CURLY@30..31 "{" L_CURLY@30..31 "{"
R_CURLY@31..32 "}" R_CURLY@31..32 "}"
WHITESPACE@32..33 "\n" WHITESPACE@32..33 "\n"

View file

@ -9,7 +9,6 @@ SOURCE_FILE@0..30
R_PAREN@7..8 ")" R_PAREN@7..8 ")"
WHITESPACE@8..9 " " WHITESPACE@8..9 " "
BLOCK_EXPR@9..29 BLOCK_EXPR@9..29
BLOCK@9..29
L_CURLY@9..10 "{" L_CURLY@9..10 "{"
WHITESPACE@10..11 " " WHITESPACE@10..11 " "
LET_STMT@11..27 LET_STMT@11..27
@ -20,10 +19,10 @@ SOURCE_FILE@0..30
WHITESPACE@16..17 " " WHITESPACE@16..17 " "
EQ@17..18 "=" EQ@17..18 "="
WHITESPACE@18..19 " " WHITESPACE@18..19 " "
BLOCK_EXPR@19..27 EFFECT_EXPR@19..27
ASYNC_KW@19..24 "async" ASYNC_KW@19..24 "async"
WHITESPACE@24..25 " " WHITESPACE@24..25 " "
BLOCK@25..27 BLOCK_EXPR@25..27
L_CURLY@25..26 "{" L_CURLY@25..26 "{"
R_CURLY@26..27 "}" R_CURLY@26..27 "}"
WHITESPACE@27..28 " " WHITESPACE@27..28 " "

View file

@ -9,7 +9,6 @@ SOURCE_FILE@0..21
R_PAREN@7..8 ")" R_PAREN@7..8 ")"
WHITESPACE@8..9 " " WHITESPACE@8..9 " "
BLOCK_EXPR@9..20 BLOCK_EXPR@9..20
BLOCK@9..20
L_CURLY@9..10 "{" L_CURLY@9..10 "{"
WHITESPACE@10..11 " " WHITESPACE@10..11 " "
ERROR@11..14 ERROR@11..14

View file

@ -9,7 +9,6 @@ SOURCE_FILE@0..48
R_PAREN@7..8 ")" R_PAREN@7..8 ")"
WHITESPACE@8..9 " " WHITESPACE@8..9 " "
BLOCK_EXPR@9..47 BLOCK_EXPR@9..47
BLOCK@9..47
L_CURLY@9..10 "{" L_CURLY@9..10 "{"
WHITESPACE@10..14 "\n " WHITESPACE@10..14 "\n "
EXPR_STMT@14..25 EXPR_STMT@14..25
@ -50,7 +49,6 @@ SOURCE_FILE@0..48
TRUE_KW@37..41 "true" TRUE_KW@37..41 "true"
WHITESPACE@41..42 " " WHITESPACE@41..42 " "
BLOCK_EXPR@42..44 BLOCK_EXPR@42..44
BLOCK@42..44
L_CURLY@42..43 "{" L_CURLY@42..43 "{"
R_CURLY@43..44 "}" R_CURLY@43..44 "}"
SEMICOLON@44..45 ";" SEMICOLON@44..45 ";"

View file

@ -9,7 +9,6 @@ SOURCE_FILE@0..47
R_PAREN@7..8 ")" R_PAREN@7..8 ")"
WHITESPACE@8..9 " " WHITESPACE@8..9 " "
BLOCK_EXPR@9..46 BLOCK_EXPR@9..46
BLOCK@9..46
L_CURLY@9..10 "{" L_CURLY@9..10 "{"
WHITESPACE@10..15 "\n " WHITESPACE@10..15 "\n "
EXPR_STMT@15..20 EXPR_STMT@15..20

View file

@ -14,7 +14,6 @@ SOURCE_FILE@0..50
R_PAREN@20..21 ")" R_PAREN@20..21 ")"
WHITESPACE@21..22 " " WHITESPACE@21..22 " "
BLOCK_EXPR@22..24 BLOCK_EXPR@22..24
BLOCK@22..24
L_CURLY@22..23 "{" L_CURLY@22..23 "{"
R_CURLY@23..24 "}" R_CURLY@23..24 "}"
WHITESPACE@24..25 "\n" WHITESPACE@24..25 "\n"
@ -33,7 +32,6 @@ SOURCE_FILE@0..50
R_PAREN@45..46 ")" R_PAREN@45..46 ")"
WHITESPACE@46..47 " " WHITESPACE@46..47 " "
BLOCK_EXPR@47..49 BLOCK_EXPR@47..49
BLOCK@47..49
L_CURLY@47..48 "{" L_CURLY@47..48 "{"
R_CURLY@48..49 "}" R_CURLY@48..49 "}"
WHITESPACE@49..50 "\n" WHITESPACE@49..50 "\n"

View file

@ -45,7 +45,6 @@ SOURCE_FILE@0..62
R_PAREN@55..56 ")" R_PAREN@55..56 ")"
WHITESPACE@56..57 " " WHITESPACE@56..57 " "
BLOCK_EXPR@57..59 BLOCK_EXPR@57..59
BLOCK@57..59
L_CURLY@57..58 "{" L_CURLY@57..58 "{"
R_CURLY@58..59 "}" R_CURLY@58..59 "}"
WHITESPACE@59..60 "\n" WHITESPACE@59..60 "\n"

View file

@ -9,7 +9,6 @@ SOURCE_FILE@0..45
R_PAREN@8..9 ")" R_PAREN@8..9 ")"
WHITESPACE@9..10 " " WHITESPACE@9..10 " "
BLOCK_EXPR@10..44 BLOCK_EXPR@10..44
BLOCK@10..44
L_CURLY@10..11 "{" L_CURLY@10..11 "{"
WHITESPACE@11..16 "\n " WHITESPACE@11..16 "\n "
RECORD_LIT@16..42 RECORD_LIT@16..42

View file

@ -51,7 +51,6 @@ SOURCE_FILE@0..83
R_PAREN@57..58 ")" R_PAREN@57..58 ")"
WHITESPACE@58..59 " " WHITESPACE@58..59 " "
BLOCK_EXPR@59..61 BLOCK_EXPR@59..61
BLOCK@59..61
L_CURLY@59..60 "{" L_CURLY@59..60 "{"
R_CURLY@60..61 "}" R_CURLY@60..61 "}"
WHITESPACE@61..66 "\n " WHITESPACE@61..66 "\n "

View file

@ -55,7 +55,6 @@ SOURCE_FILE@0..49
R_PAREN@43..44 ")" R_PAREN@43..44 ")"
WHITESPACE@44..45 "\n" WHITESPACE@44..45 "\n"
BLOCK_EXPR@45..48 BLOCK_EXPR@45..48
BLOCK@45..48
L_CURLY@45..46 "{" L_CURLY@45..46 "{"
WHITESPACE@46..47 " " WHITESPACE@46..47 " "
R_CURLY@47..48 "}" R_CURLY@47..48 "}"

View file

@ -32,7 +32,6 @@ SOURCE_FILE@0..28
L_PAREN@23..24 "(" L_PAREN@23..24 "("
R_PAREN@24..25 ")" R_PAREN@24..25 ")"
BLOCK_EXPR@25..27 BLOCK_EXPR@25..27
BLOCK@25..27
L_CURLY@25..26 "{" L_CURLY@25..26 "{"
R_CURLY@26..27 "}" R_CURLY@26..27 "}"
WHITESPACE@27..28 "\n" WHITESPACE@27..28 "\n"

View file

@ -23,7 +23,6 @@ SOURCE_FILE@0..128
R_PAREN@22..23 ")" R_PAREN@22..23 ")"
WHITESPACE@23..24 " " WHITESPACE@23..24 " "
BLOCK_EXPR@24..26 BLOCK_EXPR@24..26
BLOCK@24..26
L_CURLY@24..25 "{" L_CURLY@24..25 "{"
R_CURLY@25..26 "}" R_CURLY@25..26 "}"
WHITESPACE@26..31 "\n " WHITESPACE@26..31 "\n "
@ -41,7 +40,6 @@ SOURCE_FILE@0..128
R_PAREN@42..43 ")" R_PAREN@42..43 ")"
WHITESPACE@43..44 " " WHITESPACE@43..44 " "
BLOCK_EXPR@44..46 BLOCK_EXPR@44..46
BLOCK@44..46
L_CURLY@44..45 "{" L_CURLY@44..45 "{"
R_CURLY@45..46 "}" R_CURLY@45..46 "}"
WHITESPACE@46..51 "\n " WHITESPACE@46..51 "\n "
@ -61,7 +59,6 @@ SOURCE_FILE@0..128
R_PAREN@65..66 ")" R_PAREN@65..66 ")"
WHITESPACE@66..67 " " WHITESPACE@66..67 " "
BLOCK_EXPR@67..69 BLOCK_EXPR@67..69
BLOCK@67..69
L_CURLY@67..68 "{" L_CURLY@67..68 "{"
R_CURLY@68..69 "}" R_CURLY@68..69 "}"
WHITESPACE@69..74 "\n " WHITESPACE@69..74 "\n "
@ -95,7 +92,6 @@ SOURCE_FILE@0..128
R_PAREN@99..100 ")" R_PAREN@99..100 ")"
WHITESPACE@100..101 " " WHITESPACE@100..101 " "
BLOCK_EXPR@101..103 BLOCK_EXPR@101..103
BLOCK@101..103
L_CURLY@101..102 "{" L_CURLY@101..102 "{"
R_CURLY@102..103 "}" R_CURLY@102..103 "}"
WHITESPACE@103..108 "\n " WHITESPACE@103..108 "\n "
@ -113,7 +109,6 @@ SOURCE_FILE@0..128
R_PAREN@121..122 ")" R_PAREN@121..122 ")"
WHITESPACE@122..123 " " WHITESPACE@122..123 " "
BLOCK_EXPR@123..125 BLOCK_EXPR@123..125
BLOCK@123..125
L_CURLY@123..124 "{" L_CURLY@123..124 "{"
R_CURLY@124..125 "}" R_CURLY@124..125 "}"
WHITESPACE@125..126 "\n" WHITESPACE@125..126 "\n"

View file

@ -9,7 +9,6 @@ SOURCE_FILE@0..103
R_PAREN@7..8 ")" R_PAREN@7..8 ")"
WHITESPACE@8..9 " " WHITESPACE@8..9 " "
BLOCK_EXPR@9..102 BLOCK_EXPR@9..102
BLOCK@9..102
L_CURLY@9..10 "{" L_CURLY@9..10 "{"
WHITESPACE@10..15 "\n " WHITESPACE@10..15 "\n "
LET_STMT@15..33 LET_STMT@15..33

View file

@ -9,7 +9,6 @@ SOURCE_FILE@0..26
R_PAREN@7..8 ")" R_PAREN@7..8 ")"
WHITESPACE@8..9 " " WHITESPACE@8..9 " "
BLOCK_EXPR@9..25 BLOCK_EXPR@9..25
BLOCK@9..25
L_CURLY@9..10 "{" L_CURLY@9..10 "{"
WHITESPACE@10..15 "\n " WHITESPACE@10..15 "\n "
EXPR_STMT@15..23 EXPR_STMT@15..23
@ -17,7 +16,6 @@ SOURCE_FILE@0..26
LOOP_KW@15..19 "loop" LOOP_KW@15..19 "loop"
WHITESPACE@19..20 " " WHITESPACE@19..20 " "
BLOCK_EXPR@20..22 BLOCK_EXPR@20..22
BLOCK@20..22
L_CURLY@20..21 "{" L_CURLY@20..21 "{"
R_CURLY@21..22 "}" R_CURLY@21..22 "}"
SEMICOLON@22..23 ";" SEMICOLON@22..23 ";"

View file

@ -9,7 +9,6 @@ SOURCE_FILE@0..48
R_PAREN@7..8 ")" R_PAREN@7..8 ")"
WHITESPACE@8..9 " " WHITESPACE@8..9 " "
BLOCK_EXPR@9..47 BLOCK_EXPR@9..47
BLOCK@9..47
L_CURLY@9..10 "{" L_CURLY@9..10 "{"
WHITESPACE@10..15 "\n " WHITESPACE@10..15 "\n "
EXPR_STMT@15..21 EXPR_STMT@15..21

View file

@ -9,14 +9,12 @@ SOURCE_FILE@0..69
R_PAREN@7..8 ")" R_PAREN@7..8 ")"
WHITESPACE@8..9 " " WHITESPACE@8..9 " "
BLOCK_EXPR@9..68 BLOCK_EXPR@9..68
BLOCK@9..68
L_CURLY@9..10 "{" L_CURLY@9..10 "{"
WHITESPACE@10..15 "\n " WHITESPACE@10..15 "\n "
LOOP_EXPR@15..66 LOOP_EXPR@15..66
LOOP_KW@15..19 "loop" LOOP_KW@15..19 "loop"
WHITESPACE@19..20 " " WHITESPACE@19..20 " "
BLOCK_EXPR@20..66 BLOCK_EXPR@20..66
BLOCK@20..66
L_CURLY@20..21 "{" L_CURLY@20..21 "{"
WHITESPACE@21..30 "\n " WHITESPACE@21..30 "\n "
EXPR_STMT@30..39 EXPR_STMT@30..39

Some files were not shown because too many files have changed in this diff Show more