diff --git a/Cargo.lock b/Cargo.lock index 0f9ac5e8c0..880b34a6fd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -759,9 +759,9 @@ dependencies = [ [[package]] name = "itertools" -version = "0.10.1" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69ddb889f9d0d08a67338271fa9b62996bc788c7796a5c18cf057420aaed5eaf" +checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" dependencies = [ "either", ] @@ -810,9 +810,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.108" +version = "0.2.109" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8521a1b57e76b1ec69af7599e75e38e7b7fad6610f037db8c79b127201b5d119" +checksum = "f98a04dce437184842841303488f70d0188c5f51437d2a834dc097eafa909a01" [[package]] name = "libloading" @@ -1128,9 +1128,9 @@ checksum = "8d31d11c69a6b52a174b42bdc0c30e5e11670f90788b2c471c31c1d17d449443" [[package]] name = "proc-macro2" -version = "1.0.32" +version = "1.0.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba508cc11742c0dc5c1659771673afbab7a0efab23aa17e854cbab0837ed0b43" +checksum = "fb37d2df5df740e582f28f8560cf425f52bb267d872fe58358eadb554909f07a" dependencies = [ "unicode-xid", ] @@ -1227,9 +1227,9 @@ dependencies = [ [[package]] name = "pulldown-cmark-to-cmark" -version = "7.1.0" +version = "7.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d85e607de0249c2b5041e38de8684b00b62a64edee60bfcd85c153031a9d658" +checksum = "8bc14bc6b9f5881f240f9766414707ab24f972bcc3388589fe15b22fb15dc60d" dependencies = [ "pulldown-cmark", ] diff --git a/crates/ide_assists/src/handlers/extract_function.rs b/crates/ide_assists/src/handlers/extract_function.rs index e2845bc58b..63b0a91ce5 100644 --- a/crates/ide_assists/src/handlers/extract_function.rs +++ b/crates/ide_assists/src/handlers/extract_function.rs @@ -480,7 +480,7 @@ impl FunctionBody { .statements() .map(|stmt| stmt.syntax().text_range()) .filter(|&stmt| selected.intersect(stmt).filter(|it| !it.is_empty()).is_some()) - .fold1(|acc, stmt| acc.cover(stmt)); + .reduce(|acc, stmt| acc.cover(stmt)); if let Some(tail_range) = parent .tail_expr() .map(|it| it.syntax().text_range()) diff --git a/crates/ide_assists/src/handlers/unmerge_use.rs b/crates/ide_assists/src/handlers/unmerge_use.rs index 71a816e19f..7d73632c79 100644 --- a/crates/ide_assists/src/handlers/unmerge_use.rs +++ b/crates/ide_assists/src/handlers/unmerge_use.rs @@ -1,4 +1,3 @@ -use itertools::Itertools; use syntax::{ ast::{self, make, HasVisibility}, ted::{self, Position}, @@ -70,7 +69,7 @@ fn resolve_full_path(tree: &ast::UseTree) -> Option { .filter_map(ast::UseTree::cast) .filter_map(|t| t.path()); - let final_path = paths.fold1(|prev, next| make::path_concat(next, prev))?; + let final_path = paths.reduce(|prev, next| make::path_concat(next, prev))?; if final_path.segment().map_or(false, |it| it.self_token().is_some()) { final_path.qualifier() } else { diff --git a/crates/ide_completion/src/completions/attribute.rs b/crates/ide_completion/src/completions/attribute.rs index f09015efb6..d92e311915 100644 --- a/crates/ide_completion/src/completions/attribute.rs +++ b/crates/ide_completion/src/completions/attribute.rs @@ -30,16 +30,12 @@ pub(crate) fn complete_attribute(acc: &mut Completions, ctx: &CompletionContext) None => None, }; match (name_ref, attribute.token_tree()) { - (Some(path), Some(token_tree)) => match path.text().as_str() { - "repr" => repr::complete_repr(acc, ctx, token_tree), - "derive" => { - derive::complete_derive(acc, ctx, &parse_tt_as_comma_sep_paths(token_tree)?) - } - "feature" => { - lint::complete_lint(acc, ctx, &parse_tt_as_comma_sep_paths(token_tree)?, FEATURES) - } + (Some(path), Some(tt)) if tt.l_paren_token().is_some() => match path.text().as_str() { + "repr" => repr::complete_repr(acc, ctx, tt), + "derive" => derive::complete_derive(acc, ctx, &parse_tt_as_comma_sep_paths(tt)?), + "feature" => lint::complete_lint(acc, ctx, &parse_tt_as_comma_sep_paths(tt)?, FEATURES), "allow" | "warn" | "deny" | "forbid" => { - let existing_lints = parse_tt_as_comma_sep_paths(token_tree)?; + let existing_lints = parse_tt_as_comma_sep_paths(tt)?; lint::complete_lint(acc, ctx, &existing_lints, DEFAULT_LINTS); lint::complete_lint(acc, ctx, &existing_lints, CLIPPY_LINTS); lint::complete_lint(acc, ctx, &existing_lints, RUSTDOC_LINTS); @@ -49,8 +45,8 @@ pub(crate) fn complete_attribute(acc: &mut Completions, ctx: &CompletionContext) } _ => (), }, - (None, Some(_)) => (), - _ => complete_new_attribute(acc, ctx, attribute), + (_, Some(_)) => (), + (_, None) => complete_new_attribute(acc, ctx, attribute), } Some(()) } diff --git a/crates/ide_completion/src/tests/attribute.rs b/crates/ide_completion/src/tests/attribute.rs index c3dce61e7d..c90d4966f3 100644 --- a/crates/ide_completion/src/tests/attribute.rs +++ b/crates/ide_completion/src/tests/attribute.rs @@ -831,6 +831,20 @@ mod lint { r#"#[allow(rustdoc::bare_urls)] struct Test;"#, ); } + + #[test] + fn lint_unclosed() { + check_edit( + "deprecated", + r#"#[allow(dep$0 struct Test;"#, + r#"#[allow(deprecated struct Test;"#, + ); + check_edit( + "bare_urls", + r#"#[allow(rustdoc::$0 struct Test;"#, + r#"#[allow(rustdoc::bare_urls struct Test;"#, + ); + } } mod repr { diff --git a/crates/ide_db/src/helpers.rs b/crates/ide_db/src/helpers.rs index 1b9cb7ff51..7e78b7136c 100644 --- a/crates/ide_db/src/helpers.rs +++ b/crates/ide_db/src/helpers.rs @@ -305,20 +305,24 @@ pub fn lint_eq_or_in_group(lint: &str, lint_is: &str) -> bool { } } -/// Parses the input token tree as comma separated paths. +/// Parses the input token tree as comma separated plain paths. pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option> { - let r_paren = input.r_paren_token()?; - let tokens = input - .syntax() - .children_with_tokens() - .skip(1) - .take_while(|it| it.as_token() != Some(&r_paren)); + let r_paren = input.r_paren_token(); + let tokens = + input.syntax().children_with_tokens().skip(1).map_while(|it| match it.into_token() { + // seeing a keyword means the attribute is unclosed so stop parsing here + Some(tok) if tok.kind().is_keyword() => None, + // don't include the right token tree parenthesis if it exists + tok @ Some(_) if tok == r_paren => None, + // only nodes that we can find are other TokenTrees, those are unexpected in this parse though + None => None, + Some(tok) => Some(tok), + }); let input_expressions = tokens.into_iter().group_by(|tok| tok.kind() == T![,]); - Some( - input_expressions - .into_iter() - .filter_map(|(is_sep, group)| (!is_sep).then(|| group)) - .filter_map(|mut tokens| ast::Path::parse(&tokens.join("")).ok()) - .collect::>(), - ) + let paths = input_expressions + .into_iter() + .filter_map(|(is_sep, group)| (!is_sep).then(|| group)) + .filter_map(|mut tokens| ast::Path::parse(&tokens.join("")).ok()) + .collect(); + Some(paths) }