Merge branch 'master' into kdelorey/complete-trait-impl

This commit is contained in:
Kevin DeLorey 2020-02-09 10:25:47 -06:00
commit a957c473fd
363 changed files with 7379 additions and 4576 deletions

View file

@ -1,4 +1,4 @@
name: CI-Release name: release
on: on:
push: push:
branches: branches:
@ -132,7 +132,9 @@ jobs:
- name: Create Release - name: Create Release
id: create_release id: create_release
uses: actions/create-release@v1 # uses: actions/create-release@v1
# https://github.com/actions/create-release/pull/32
uses: fleskesvor/create-release@1a72e235c178bf2ae6c51a8ae36febc24568c5fe
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with: with:

1472
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -10,5 +10,24 @@ debug = 0
incremental = true incremental = true
debug = 0 # set this to 1 or 2 to get more useful backtraces in debugger debug = 0 # set this to 1 or 2 to get more useful backtraces in debugger
# ideally, we would use `build-override` here, but some crates are also
# needed at run-time and we end up compiling them twice
[profile.release.package.proc-macro2]
opt-level = 0
[profile.release.package.quote]
opt-level = 0
[profile.release.package.syn]
opt-level = 0
[profile.release.package.serde_derive]
opt-level = 0
[profile.release.package.chalk-derive]
opt-level = 0
[profile.release.package.chalk-macros]
opt-level = 0
[profile.release.package.salsa-macros]
opt-level = 0
[profile.release.package.xtask]
opt-level = 0
[patch.'crates-io'] [patch.'crates-io']
# rowan = { path = "../rowan" } # rowan = { path = "../rowan" }

View file

@ -2,14 +2,13 @@
<img src="https://user-images.githubusercontent.com/1711539/72443316-5a79f280-37ae-11ea-858f-035209ece2dd.png" alt="rust-analyzer logo"> <img src="https://user-images.githubusercontent.com/1711539/72443316-5a79f280-37ae-11ea-858f-035209ece2dd.png" alt="rust-analyzer logo">
</p> </p>
Rust Analyzer is an **experimental** modular compiler frontend for the Rust rust-analyzer is an **experimental** modular compiler frontend for the Rust
language. It is a part of a larger rls-2.0 effort to create excellent IDE language. It is a part of a larger rls-2.0 effort to create excellent IDE
support for Rust. If you want to get involved, check the rls-2.0 working group support for Rust. If you want to get involved, check the rls-2.0 working group:
in the compiler-team repository:
https://github.com/rust-lang/compiler-team/tree/master/content/working-groups/rls-2.0 https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Fwg-rls-2.2E0
Work on the Rust Analyzer is sponsored by Work on rust-analyzer is sponsored by
[<img src="https://user-images.githubusercontent.com/1711539/58105231-cf306900-7bee-11e9-83d8-9f1102e59d29.png" alt="Ferrous Systems" width="300">](https://ferrous-systems.com/) [<img src="https://user-images.githubusercontent.com/1711539/58105231-cf306900-7bee-11e9-83d8-9f1102e59d29.png" alt="Ferrous Systems" width="300">](https://ferrous-systems.com/)
- [Mozilla](https://www.mozilla.org/en-US/) - [Mozilla](https://www.mozilla.org/en-US/)
@ -17,17 +16,17 @@ Work on the Rust Analyzer is sponsored by
## Language Server Quick Start ## Language Server Quick Start
Rust Analyzer is a work-in-progress, so you'll have to build it from source, and rust-analyzer is a work-in-progress, so you might encounter critical bugs. That
you might encounter critical bugs. That said, it is complete enough to provide a said, it is complete enough to provide a useful IDE experience and some people
useful IDE experience and some people use it as a daily driver. use it as a daily driver.
To build rust-analyzer, you need: To build rust-analyzer, you need:
* latest stable rust for language server itself * latest stable Rust for the language server itself
* latest stable npm and VS Code for VS Code extension * latest stable npm and VS Code for VS Code extension
To quickly install rust-analyzer with VS Code extension with standard setup To quickly install the rust-analyzer language server and VS Code extension with
(`code` and `cargo` in `$PATH`, etc), use this: standard setup (`code` and `cargo` in `$PATH`, etc), use this:
``` ```
# clone the repo # clone the repo
@ -45,8 +44,8 @@ cannot start, see [./docs/user](./docs/user).
## Documentation ## Documentation
If you want to **contribute** to rust-analyzer or just curious about how things work If you want to **contribute** to rust-analyzer or are just curious about how
under the hood, check the [./docs/dev](./docs/dev) folder. things work under the hood, check the [./docs/dev](./docs/dev) folder.
If you want to **use** rust-analyzer's language server with your editor of If you want to **use** rust-analyzer's language server with your editor of
choice, check [./docs/user](./docs/user) folder. It also contains some tips & tricks to help choice, check [./docs/user](./docs/user) folder. It also contains some tips & tricks to help

View file

@ -1,6 +1,6 @@
status = [ status = [
"Rust (ubuntu-latest)", "Rust (ubuntu-latest)",
# "Rust (windows-latest)", "Rust (windows-latest)",
"Rust (macos-latest)", "Rust (macos-latest)",
"TypeScript" "TypeScript"
] ]

View file

@ -16,6 +16,8 @@ either = "1.5"
ra_syntax = { path = "../ra_syntax" } ra_syntax = { path = "../ra_syntax" }
ra_text_edit = { path = "../ra_text_edit" } ra_text_edit = { path = "../ra_text_edit" }
ra_fmt = { path = "../ra_fmt" } ra_fmt = { path = "../ra_fmt" }
ra_prof = { path = "../ra_prof" }
ra_db = { path = "../ra_db" } ra_db = { path = "../ra_db" }
ra_ide_db = { path = "../ra_ide_db" }
hir = { path = "../ra_hir", package = "ra_hir" } hir = { path = "../ra_hir", package = "ra_hir" }
test_utils = { path = "../test_utils" } test_utils = { path = "../test_utils" }

View file

@ -1,8 +1,8 @@
//! This module defines `AssistCtx` -- the API surface that is exposed to assists. //! This module defines `AssistCtx` -- the API surface that is exposed to assists.
use either::Either; use hir::{InFile, SourceAnalyzer, SourceBinder};
use hir::{db::HirDatabase, InFile, SourceAnalyzer, SourceBinder}; use ra_db::{FileRange, SourceDatabase};
use ra_db::FileRange;
use ra_fmt::{leading_indent, reindent}; use ra_fmt::{leading_indent, reindent};
use ra_ide_db::RootDatabase;
use ra_syntax::{ use ra_syntax::{
algo::{self, find_covering_element, find_node_at_offset}, algo::{self, find_covering_element, find_node_at_offset},
AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextUnit, AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextUnit,
@ -10,14 +10,40 @@ use ra_syntax::{
}; };
use ra_text_edit::TextEditBuilder; use ra_text_edit::TextEditBuilder;
use crate::{AssistAction, AssistId, AssistLabel, ResolvedAssist}; use crate::{AssistAction, AssistId, AssistLabel, GroupLabel, ResolvedAssist};
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub(crate) enum Assist { pub(crate) struct Assist(pub(crate) Vec<AssistInfo>);
Unresolved { label: AssistLabel },
Resolved { assist: ResolvedAssist }, #[derive(Clone, Debug)]
pub(crate) struct AssistInfo {
pub(crate) label: AssistLabel,
pub(crate) group_label: Option<GroupLabel>,
pub(crate) action: Option<AssistAction>,
} }
impl AssistInfo {
fn new(label: AssistLabel) -> AssistInfo {
AssistInfo { label, group_label: None, action: None }
}
fn resolved(self, action: AssistAction) -> AssistInfo {
AssistInfo { action: Some(action), ..self }
}
fn with_group(self, group_label: GroupLabel) -> AssistInfo {
AssistInfo { group_label: Some(group_label), ..self }
}
pub(crate) fn into_resolved(self) -> Option<ResolvedAssist> {
let label = self.label;
let group_label = self.group_label;
self.action.map(|action| ResolvedAssist { label, group_label, action })
}
}
pub(crate) type AssistHandler = fn(AssistCtx) -> Option<Assist>;
/// `AssistCtx` allows to apply an assist or check if it could be applied. /// `AssistCtx` allows to apply an assist or check if it could be applied.
/// ///
/// Assists use a somewhat over-engineered approach, given the current needs. The /// Assists use a somewhat over-engineered approach, given the current needs. The
@ -49,14 +75,14 @@ pub(crate) enum Assist {
/// moment, because the LSP API is pretty awkward in this place, and it's much /// moment, because the LSP API is pretty awkward in this place, and it's much
/// easier to just compute the edit eagerly :-) /// easier to just compute the edit eagerly :-)
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct AssistCtx<'a, DB> { pub(crate) struct AssistCtx<'a> {
pub(crate) db: &'a DB, pub(crate) db: &'a RootDatabase,
pub(crate) frange: FileRange, pub(crate) frange: FileRange,
source_file: SourceFile, source_file: SourceFile,
should_compute_edit: bool, should_compute_edit: bool,
} }
impl<'a, DB> Clone for AssistCtx<'a, DB> { impl Clone for AssistCtx<'_> {
fn clone(&self) -> Self { fn clone(&self) -> Self {
AssistCtx { AssistCtx {
db: self.db, db: self.db,
@ -67,15 +93,10 @@ impl<'a, DB> Clone for AssistCtx<'a, DB> {
} }
} }
impl<'a, DB: HirDatabase> AssistCtx<'a, DB> { impl<'a> AssistCtx<'a> {
pub(crate) fn with_ctx<F, T>(db: &DB, frange: FileRange, should_compute_edit: bool, f: F) -> T pub fn new(db: &RootDatabase, frange: FileRange, should_compute_edit: bool) -> AssistCtx {
where
F: FnOnce(AssistCtx<DB>) -> T,
{
let parse = db.parse(frange.file_id); let parse = db.parse(frange.file_id);
AssistCtx { db, frange, source_file: parse.tree(), should_compute_edit }
let ctx = AssistCtx { db, frange, source_file: parse.tree(), should_compute_edit };
f(ctx)
} }
pub(crate) fn add_assist( pub(crate) fn add_assist(
@ -84,48 +105,23 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> {
label: impl Into<String>, label: impl Into<String>,
f: impl FnOnce(&mut ActionBuilder), f: impl FnOnce(&mut ActionBuilder),
) -> Option<Assist> { ) -> Option<Assist> {
let label = AssistLabel { label: label.into(), id }; let label = AssistLabel::new(label.into(), id);
assert!(label.label.chars().nth(0).unwrap().is_uppercase());
let assist = if self.should_compute_edit { let mut info = AssistInfo::new(label);
if self.should_compute_edit {
let action = { let action = {
let mut edit = ActionBuilder::default(); let mut edit = ActionBuilder::default();
f(&mut edit); f(&mut edit);
edit.build() edit.build()
}; };
Assist::Resolved { assist: ResolvedAssist { label, action_data: Either::Left(action) } } info = info.resolved(action)
} else {
Assist::Unresolved { label }
}; };
Some(assist) Some(Assist(vec![info]))
} }
#[allow(dead_code)] // will be used for auto import assist with multiple actions pub(crate) fn add_assist_group(self, group_name: impl Into<String>) -> AssistGroup<'a> {
pub(crate) fn add_assist_group( AssistGroup { ctx: self, group_name: group_name.into(), assists: Vec::new() }
self,
id: AssistId,
label: impl Into<String>,
f: impl FnOnce() -> Vec<ActionBuilder>,
) -> Option<Assist> {
let label = AssistLabel { label: label.into(), id };
let assist = if self.should_compute_edit {
let actions = f();
assert!(!actions.is_empty(), "Assist cannot have no");
Assist::Resolved {
assist: ResolvedAssist {
label,
action_data: Either::Right(
actions.into_iter().map(ActionBuilder::build).collect(),
),
},
}
} else {
Assist::Unresolved { label }
};
Some(assist)
} }
pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> { pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> {
@ -142,7 +138,7 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> {
pub(crate) fn covering_element(&self) -> SyntaxElement { pub(crate) fn covering_element(&self) -> SyntaxElement {
find_covering_element(self.source_file.syntax(), self.frange.range) find_covering_element(self.source_file.syntax(), self.frange.range)
} }
pub(crate) fn source_binder(&self) -> SourceBinder<'a, DB> { pub(crate) fn source_binder(&self) -> SourceBinder<'a, RootDatabase> {
SourceBinder::new(self.db) SourceBinder::new(self.db)
} }
pub(crate) fn source_analyzer( pub(crate) fn source_analyzer(
@ -159,21 +155,48 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> {
} }
} }
pub(crate) struct AssistGroup<'a> {
ctx: AssistCtx<'a>,
group_name: String,
assists: Vec<AssistInfo>,
}
impl<'a> AssistGroup<'a> {
pub(crate) fn add_assist(
&mut self,
id: AssistId,
label: impl Into<String>,
f: impl FnOnce(&mut ActionBuilder),
) {
let label = AssistLabel::new(label.into(), id);
let mut info = AssistInfo::new(label).with_group(GroupLabel(self.group_name.clone()));
if self.ctx.should_compute_edit {
let action = {
let mut edit = ActionBuilder::default();
f(&mut edit);
edit.build()
};
info = info.resolved(action)
};
self.assists.push(info)
}
pub(crate) fn finish(self) -> Option<Assist> {
assert!(!self.assists.is_empty());
Some(Assist(self.assists))
}
}
#[derive(Default)] #[derive(Default)]
pub(crate) struct ActionBuilder { pub(crate) struct ActionBuilder {
edit: TextEditBuilder, edit: TextEditBuilder,
cursor_position: Option<TextUnit>, cursor_position: Option<TextUnit>,
target: Option<TextRange>, target: Option<TextRange>,
label: Option<String>,
} }
impl ActionBuilder { impl ActionBuilder {
#[allow(dead_code)]
/// Adds a custom label to the action, if it needs to be different from the assist label
pub(crate) fn label(&mut self, label: impl Into<String>) {
self.label = Some(label.into())
}
/// Replaces specified `range` of text with a given string. /// Replaces specified `range` of text with a given string.
pub(crate) fn replace(&mut self, range: TextRange, replace_with: impl Into<String>) { pub(crate) fn replace(&mut self, range: TextRange, replace_with: impl Into<String>) {
self.edit.replace(range, replace_with.into()) self.edit.replace(range, replace_with.into())
@ -232,7 +255,6 @@ impl ActionBuilder {
edit: self.edit.finish(), edit: self.edit.finish(),
cursor_position: self.cursor_position, cursor_position: self.cursor_position,
target: self.target, target: self.target,
label: self.label,
} }
} }
} }

View file

@ -5,24 +5,24 @@
mod generated; mod generated;
use ra_db::{fixture::WithFixture, FileRange}; use ra_db::FileRange;
use test_utils::{assert_eq_text, extract_range_or_offset}; use test_utils::{assert_eq_text, extract_range_or_offset};
use crate::test_db::TestDB; use crate::resolved_assists;
fn check(assist_id: &str, before: &str, after: &str) { fn check(assist_id: &str, before: &str, after: &str) {
let (selection, before) = extract_range_or_offset(before); let (selection, before) = extract_range_or_offset(before);
let (db, file_id) = TestDB::with_single_file(&before); let (db, file_id) = crate::helpers::with_single_file(&before);
let frange = FileRange { file_id, range: selection.into() }; let frange = FileRange { file_id, range: selection.into() };
let assist = crate::assists(&db, frange) let assist = resolved_assists(&db, frange)
.into_iter() .into_iter()
.find(|assist| assist.label.id.0 == assist_id) .find(|assist| assist.label.id.0 == assist_id)
.unwrap_or_else(|| { .unwrap_or_else(|| {
panic!( panic!(
"\n\nAssist is not applicable: {}\nAvailable assists: {}", "\n\nAssist is not applicable: {}\nAvailable assists: {}",
assist_id, assist_id,
crate::assists(&db, frange) resolved_assists(&db, frange)
.into_iter() .into_iter()
.map(|assist| assist.label.id.0) .map(|assist| assist.label.id.0)
.collect::<Vec<_>>() .collect::<Vec<_>>()
@ -30,6 +30,6 @@ fn check(assist_id: &str, before: &str, after: &str) {
) )
}); });
let actual = assist.get_first_action().edit.apply(&before); let actual = assist.action.edit.apply(&before);
assert_eq_text!(after, &actual); assert_eq_text!(after, &actual);
} }

View file

@ -1,4 +1,4 @@
//! Generated file, do not edit by hand, see `crate/ra_tools/src/codegen` //! Generated file, do not edit by hand, see `xtask/src/codegen`
use super::check; use super::check;
@ -160,21 +160,6 @@ impl Trait<u32> for () {
) )
} }
#[test]
fn doctest_add_import() {
check(
"add_import",
r#####"
fn process(map: std::collections::<|>HashMap<String, String>) {}
"#####,
r#####"
use std::collections::HashMap;
fn process(map: HashMap<String, String>) {}
"#####,
)
}
#[test] #[test]
fn doctest_add_new() { fn doctest_add_new() {
check( check(
@ -214,6 +199,27 @@ fn main() {
) )
} }
#[test]
fn doctest_auto_import() {
check(
"auto_import",
r#####"
fn main() {
let map = HashMap<|>::new();
}
pub mod std { pub mod collections { pub struct HashMap { } } }
"#####,
r#####"
use std::collections::HashMap;
fn main() {
let map = HashMap::new();
}
pub mod std { pub mod collections { pub struct HashMap { } } }
"#####,
)
}
#[test] #[test]
fn doctest_change_visibility() { fn doctest_change_visibility() {
check( check(
@ -570,6 +576,21 @@ fn handle(action: Action) {
) )
} }
#[test]
fn doctest_replace_qualified_name_with_use() {
check(
"replace_qualified_name_with_use",
r#####"
fn process(map: std::collections::<|>HashMap<String, String>) {}
"#####,
r#####"
use std::collections::HashMap;
fn process(map: HashMap<String, String>) {}
"#####,
)
}
#[test] #[test]
fn doctest_split_import() { fn doctest_split_import() {
check( check(

View file

@ -1,7 +1,7 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
use hir::db::HirDatabase;
use join_to_string::join; use join_to_string::join;
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
@ -29,7 +29,7 @@ const DERIVE_TRAIT: &str = "derive";
// //
// } // }
// ``` // ```
pub(crate) fn add_custom_impl(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn add_custom_impl(ctx: AssistCtx) -> Option<Assist> {
let input = ctx.find_node_at_offset::<ast::AttrInput>()?; let input = ctx.find_node_at_offset::<ast::AttrInput>()?;
let attr = input.syntax().parent().and_then(ast::Attr::cast)?; let attr = input.syntax().parent().and_then(ast::Attr::cast)?;

View file

@ -1,4 +1,3 @@
use hir::db::HirDatabase;
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode, AttrsOwner}, ast::{self, AstNode, AttrsOwner},
SyntaxKind::{COMMENT, WHITESPACE}, SyntaxKind::{COMMENT, WHITESPACE},
@ -25,7 +24,7 @@ use crate::{Assist, AssistCtx, AssistId};
// y: u32, // y: u32,
// } // }
// ``` // ```
pub(crate) fn add_derive(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn add_derive(ctx: AssistCtx) -> Option<Assist> {
let nominal = ctx.find_node_at_offset::<ast::NominalDef>()?; let nominal = ctx.find_node_at_offset::<ast::NominalDef>()?;
let node_start = derive_insertion_offset(&nominal)?; let node_start = derive_insertion_offset(&nominal)?;
ctx.add_assist(AssistId("add_derive"), "Add `#[derive]`", |edit| { ctx.add_assist(AssistId("add_derive"), "Add `#[derive]`", |edit| {

View file

@ -1,7 +1,7 @@
use hir::{db::HirDatabase, HirDisplay}; use hir::HirDisplay;
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode, LetStmt, NameOwner}, ast::{self, AstNode, LetStmt, NameOwner, TypeAscriptionOwner},
TextRange, T, TextRange,
}; };
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
@ -21,7 +21,7 @@ use crate::{Assist, AssistCtx, AssistId};
// let x: i32 = 92; // let x: i32 = 92;
// } // }
// ``` // ```
pub(crate) fn add_explicit_type(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn add_explicit_type(ctx: AssistCtx) -> Option<Assist> {
let stmt = ctx.find_node_at_offset::<LetStmt>()?; let stmt = ctx.find_node_at_offset::<LetStmt>()?;
let expr = stmt.initializer()?; let expr = stmt.initializer()?;
let pat = stmt.pat()?; let pat = stmt.pat()?;
@ -34,18 +34,22 @@ pub(crate) fn add_explicit_type(ctx: AssistCtx<impl HirDatabase>) -> Option<Assi
// The binding must have a name // The binding must have a name
let name = pat.name()?; let name = pat.name()?;
let name_range = name.syntax().text_range(); let name_range = name.syntax().text_range();
// Assist should only be applicable if cursor is between 'let' and '='
let stmt_range = stmt.syntax().text_range(); let stmt_range = stmt.syntax().text_range();
let eq_range = stmt.eq_token()?.text_range(); let eq_range = stmt.eq_token()?.text_range();
// Assist should only be applicable if cursor is between 'let' and '='
let let_range = TextRange::from_to(stmt_range.start(), eq_range.start()); let let_range = TextRange::from_to(stmt_range.start(), eq_range.start());
let cursor_in_range = ctx.frange.range.is_subrange(&let_range); let cursor_in_range = ctx.frange.range.is_subrange(&let_range);
if !cursor_in_range { if !cursor_in_range {
return None; return None;
} }
// Assist not applicable if the type has already been specified // Assist not applicable if the type has already been specified
if stmt.syntax().children_with_tokens().any(|child| child.kind() == T![:]) { // and it has no placeholders
let ascribed_ty = stmt.ascribed_type();
if let Some(ref ty) = ascribed_ty {
if ty.syntax().descendants().find_map(ast::PlaceholderType::cast).is_none() {
return None; return None;
} }
}
// Infer type // Infer type
let db = ctx.db; let db = ctx.db;
let analyzer = ctx.source_analyzer(stmt.syntax(), None); let analyzer = ctx.source_analyzer(stmt.syntax(), None);
@ -60,7 +64,11 @@ pub(crate) fn add_explicit_type(ctx: AssistCtx<impl HirDatabase>) -> Option<Assi
format!("Insert explicit type '{}'", ty.display(db)), format!("Insert explicit type '{}'", ty.display(db)),
|edit| { |edit| {
edit.target(pat_range); edit.target(pat_range);
if let Some(ascribed_ty) = ascribed_ty {
edit.replace(ascribed_ty.syntax().text_range(), format!("{}", ty.display(db)));
} else {
edit.insert(name_range.end(), format!(": {}", ty.display(db))); edit.insert(name_range.end(), format!(": {}", ty.display(db)));
}
}, },
) )
} }
@ -85,6 +93,40 @@ mod tests {
); );
} }
#[test]
fn add_explicit_type_works_for_underscore() {
check_assist(
add_explicit_type,
"fn f() { let a<|>: _ = 1; }",
"fn f() { let a<|>: i32 = 1; }",
);
}
#[test]
fn add_explicit_type_works_for_nested_underscore() {
check_assist(
add_explicit_type,
r#"
enum Option<T> {
Some(T),
None
}
fn f() {
let a<|>: Option<_> = Option::Some(1);
}"#,
r#"
enum Option<T> {
Some(T),
None
}
fn f() {
let a<|>: Option<i32> = Option::Some(1);
}"#,
);
}
#[test] #[test]
fn add_explicit_type_works_for_macro_call() { fn add_explicit_type_works_for_macro_call() {
check_assist( check_assist(

View file

@ -1,5 +1,5 @@
use format_buf::format; use format_buf::format;
use hir::db::HirDatabase;
use join_to_string::join; use join_to_string::join;
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode, NameOwner, TypeParamsOwner}, ast::{self, AstNode, NameOwner, TypeParamsOwner},
@ -27,7 +27,7 @@ use crate::{Assist, AssistCtx, AssistId};
// //
// } // }
// ``` // ```
pub(crate) fn add_impl(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn add_impl(ctx: AssistCtx) -> Option<Assist> {
let nominal = ctx.find_node_at_offset::<ast::NominalDef>()?; let nominal = ctx.find_node_at_offset::<ast::NominalDef>()?;
let name = nominal.name()?; let name = nominal.name()?;
ctx.add_assist(AssistId("add_impl"), format!("Implement {}", name.text().as_str()), |edit| { ctx.add_assist(AssistId("add_impl"), format!("Implement {}", name.text().as_str()), |edit| {

View file

@ -43,7 +43,7 @@ enum AddMissingImplMembersMode {
// //
// } // }
// ``` // ```
pub(crate) fn add_missing_impl_members(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn add_missing_impl_members(ctx: AssistCtx) -> Option<Assist> {
add_missing_impl_members_inner( add_missing_impl_members_inner(
ctx, ctx,
AddMissingImplMembersMode::NoDefaultMethods, AddMissingImplMembersMode::NoDefaultMethods,
@ -84,7 +84,7 @@ pub(crate) fn add_missing_impl_members(ctx: AssistCtx<impl HirDatabase>) -> Opti
// //
// } // }
// ``` // ```
pub(crate) fn add_missing_default_members(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn add_missing_default_members(ctx: AssistCtx) -> Option<Assist> {
add_missing_impl_members_inner( add_missing_impl_members_inner(
ctx, ctx,
AddMissingImplMembersMode::DefaultMethodsOnly, AddMissingImplMembersMode::DefaultMethodsOnly,
@ -94,11 +94,12 @@ pub(crate) fn add_missing_default_members(ctx: AssistCtx<impl HirDatabase>) -> O
} }
fn add_missing_impl_members_inner( fn add_missing_impl_members_inner(
ctx: AssistCtx<impl HirDatabase>, ctx: AssistCtx,
mode: AddMissingImplMembersMode, mode: AddMissingImplMembersMode,
assist_id: &'static str, assist_id: &'static str,
label: &'static str, label: &'static str,
) -> Option<Assist> { ) -> Option<Assist> {
let _p = ra_prof::profile("add_missing_impl_members_inner");
let impl_node = ctx.find_node_at_offset::<ast::ImplBlock>()?; let impl_node = ctx.find_node_at_offset::<ast::ImplBlock>()?;
let impl_item_list = impl_node.item_list()?; let impl_item_list = impl_node.item_list()?;

View file

@ -1,5 +1,5 @@
use format_buf::format; use format_buf::format;
use hir::{db::HirDatabase, InFile}; use hir::{Adt, InFile};
use join_to_string::join; use join_to_string::join;
use ra_syntax::{ use ra_syntax::{
ast::{ ast::{
@ -31,7 +31,7 @@ use crate::{Assist, AssistCtx, AssistId};
// } // }
// //
// ``` // ```
pub(crate) fn add_new(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn add_new(ctx: AssistCtx) -> Option<Assist> {
let strukt = ctx.find_node_at_offset::<ast::StructDef>()?; let strukt = ctx.find_node_at_offset::<ast::StructDef>()?;
// We want to only apply this to non-union structs with named fields // We want to only apply this to non-union structs with named fields
@ -128,26 +128,29 @@ fn generate_impl_text(strukt: &ast::StructDef, code: &str) -> String {
// //
// FIXME: change the new fn checking to a more semantic approach when that's more // FIXME: change the new fn checking to a more semantic approach when that's more
// viable (e.g. we process proc macros, etc) // viable (e.g. we process proc macros, etc)
fn find_struct_impl( fn find_struct_impl(ctx: &AssistCtx, strukt: &ast::StructDef) -> Option<Option<ast::ImplBlock>> {
ctx: &AssistCtx<impl HirDatabase>,
strukt: &ast::StructDef,
) -> Option<Option<ast::ImplBlock>> {
let db = ctx.db; let db = ctx.db;
let module = strukt.syntax().ancestors().find(|node| { let module = strukt.syntax().ancestors().find(|node| {
ast::Module::can_cast(node.kind()) || ast::SourceFile::can_cast(node.kind()) ast::Module::can_cast(node.kind()) || ast::SourceFile::can_cast(node.kind())
})?; })?;
let mut sb = ctx.source_binder(); let mut sb = ctx.source_binder();
let struct_ty = { let struct_def = {
let src = InFile { file_id: ctx.frange.file_id.into(), value: strukt.clone() }; let src = InFile { file_id: ctx.frange.file_id.into(), value: strukt.clone() };
sb.to_def(src)?.ty(db) sb.to_def(src)?
}; };
let block = module.descendants().filter_map(ast::ImplBlock::cast).find_map(|impl_blk| { let block = module.descendants().filter_map(ast::ImplBlock::cast).find_map(|impl_blk| {
let src = InFile { file_id: ctx.frange.file_id.into(), value: impl_blk.clone() }; let src = InFile { file_id: ctx.frange.file_id.into(), value: impl_blk.clone() };
let blk = sb.to_def(src)?; let blk = sb.to_def(src)?;
let same_ty = blk.target_ty(db) == struct_ty; // FIXME: handle e.g. `struct S<T>; impl<U> S<U> {}`
// (we currently use the wrong type parameter)
// also we wouldn't want to use e.g. `impl S<u32>`
let same_ty = match blk.target_ty(db).as_adt() {
Some(def) => def == Adt::Struct(struct_def),
None => false,
};
let not_trait_impl = blk.target_trait(db).is_none(); let not_trait_impl = blk.target_trait(db).is_none();
if !(same_ty && not_trait_impl) { if !(same_ty && not_trait_impl) {

View file

@ -1,8 +1,6 @@
use super::invert_if::invert_boolean_expression;
use hir::db::HirDatabase;
use ra_syntax::ast::{self, AstNode}; use ra_syntax::ast::{self, AstNode};
use crate::{Assist, AssistCtx, AssistId}; use crate::{utils::invert_boolean_expression, Assist, AssistCtx, AssistId};
// Assist: apply_demorgan // Assist: apply_demorgan
// //
@ -23,7 +21,7 @@ use crate::{Assist, AssistCtx, AssistId};
// if !(x == 4 && y) {} // if !(x == 4 && y) {}
// } // }
// ``` // ```
pub(crate) fn apply_demorgan(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn apply_demorgan(ctx: AssistCtx) -> Option<Assist> {
let expr = ctx.find_node_at_offset::<ast::BinExpr>()?; let expr = ctx.find_node_at_offset::<ast::BinExpr>()?;
let op = expr.op_kind()?; let op = expr.op_kind()?;
let op_range = expr.op_token()?.text_range(); let op_range = expr.op_token()?.text_range();
@ -32,12 +30,14 @@ pub(crate) fn apply_demorgan(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist>
if !cursor_in_range { if !cursor_in_range {
return None; return None;
} }
let lhs = expr.lhs()?; let lhs = expr.lhs()?;
let lhs_range = lhs.syntax().text_range(); let lhs_range = lhs.syntax().text_range();
let not_lhs = invert_boolean_expression(lhs);
let rhs = expr.rhs()?; let rhs = expr.rhs()?;
let rhs_range = rhs.syntax().text_range(); let rhs_range = rhs.syntax().text_range();
let not_lhs = invert_boolean_expression(&lhs)?; let not_rhs = invert_boolean_expression(rhs);
let not_rhs = invert_boolean_expression(&rhs)?;
ctx.add_assist(AssistId("apply_demorgan"), "Apply De Morgan's law", |edit| { ctx.add_assist(AssistId("apply_demorgan"), "Apply De Morgan's law", |edit| {
edit.target(op_range); edit.target(op_range);
@ -78,12 +78,12 @@ mod tests {
} }
#[test] #[test]
fn demorgan_doesnt_apply_with_cursor_not_on_op() { fn demorgan_general_case() {
check_assist_not_applicable(apply_demorgan, "fn f() { <|> !x || !x }") check_assist(apply_demorgan, "fn f() { x ||<|> x }", "fn f() { !(!x &&<|> !x) }")
} }
#[test] #[test]
fn demorgan_doesnt_apply_when_operands_arent_negated_already() { fn demorgan_doesnt_apply_with_cursor_not_on_op() {
check_assist_not_applicable(apply_demorgan, "fn f() { x ||<|> x }") check_assist_not_applicable(apply_demorgan, "fn f() { <|> !x || !x }")
} }
} }

View file

@ -0,0 +1,293 @@
use ra_ide_db::imports_locator::ImportsLocator;
use ra_syntax::ast::{self, AstNode};
use crate::{
assist_ctx::{Assist, AssistCtx},
insert_use_statement, AssistId,
};
use std::collections::BTreeSet;
// Assist: auto_import
//
// If the name is unresolved, provides all possible imports for it.
//
// ```
// fn main() {
// let map = HashMap<|>::new();
// }
// # pub mod std { pub mod collections { pub struct HashMap { } } }
// ```
// ->
// ```
// use std::collections::HashMap;
//
// fn main() {
// let map = HashMap::new();
// }
// # pub mod std { pub mod collections { pub struct HashMap { } } }
// ```
pub(crate) fn auto_import(ctx: AssistCtx) -> Option<Assist> {
let path_under_caret: ast::Path = ctx.find_node_at_offset()?;
if path_under_caret.syntax().ancestors().find_map(ast::UseItem::cast).is_some() {
return None;
}
let module = path_under_caret.syntax().ancestors().find_map(ast::Module::cast);
let position = match module.and_then(|it| it.item_list()) {
Some(item_list) => item_list.syntax().clone(),
None => {
let current_file =
path_under_caret.syntax().ancestors().find_map(ast::SourceFile::cast)?;
current_file.syntax().clone()
}
};
let source_analyzer = ctx.source_analyzer(&position, None);
let module_with_name_to_import = source_analyzer.module()?;
let name_ref_to_import =
path_under_caret.syntax().descendants().find_map(ast::NameRef::cast)?;
if source_analyzer
.resolve_path(ctx.db, &name_ref_to_import.syntax().ancestors().find_map(ast::Path::cast)?)
.is_some()
{
return None;
}
let name_to_import = name_ref_to_import.syntax().to_string();
let proposed_imports = ImportsLocator::new(ctx.db)
.find_imports(&name_to_import)
.into_iter()
.filter_map(|module_def| module_with_name_to_import.find_use_path(ctx.db, module_def))
.filter(|use_path| !use_path.segments.is_empty())
.take(20)
.collect::<BTreeSet<_>>();
if proposed_imports.is_empty() {
return None;
}
let mut group = ctx.add_assist_group(format!("Import {}", name_to_import));
for import in proposed_imports {
group.add_assist(AssistId("auto_import"), format!("Import `{}`", &import), |edit| {
edit.target(path_under_caret.syntax().text_range());
insert_use_statement(
&position,
path_under_caret.syntax(),
&import,
edit.text_edit_builder(),
);
});
}
group.finish()
}
#[cfg(test)]
mod tests {
use crate::helpers::{check_assist, check_assist_not_applicable, check_assist_target};
use super::*;
#[test]
fn applicable_when_found_an_import() {
check_assist(
auto_import,
r"
<|>PubStruct
pub mod PubMod {
pub struct PubStruct;
}
",
r"
<|>use PubMod::PubStruct;
PubStruct
pub mod PubMod {
pub struct PubStruct;
}
",
);
}
#[test]
fn auto_imports_are_merged() {
check_assist(
auto_import,
r"
use PubMod::PubStruct1;
struct Test {
test: Pub<|>Struct2<u8>,
}
pub mod PubMod {
pub struct PubStruct1;
pub struct PubStruct2<T> {
_t: T,
}
}
",
r"
use PubMod::{PubStruct2, PubStruct1};
struct Test {
test: Pub<|>Struct2<u8>,
}
pub mod PubMod {
pub struct PubStruct1;
pub struct PubStruct2<T> {
_t: T,
}
}
",
);
}
#[test]
fn applicable_when_found_multiple_imports() {
check_assist(
auto_import,
r"
PubSt<|>ruct
pub mod PubMod1 {
pub struct PubStruct;
}
pub mod PubMod2 {
pub struct PubStruct;
}
pub mod PubMod3 {
pub struct PubStruct;
}
",
r"
use PubMod1::PubStruct;
PubSt<|>ruct
pub mod PubMod1 {
pub struct PubStruct;
}
pub mod PubMod2 {
pub struct PubStruct;
}
pub mod PubMod3 {
pub struct PubStruct;
}
",
);
}
#[test]
fn not_applicable_for_already_imported_types() {
check_assist_not_applicable(
auto_import,
r"
use PubMod::PubStruct;
PubStruct<|>
pub mod PubMod {
pub struct PubStruct;
}
",
);
}
#[test]
fn not_applicable_for_types_with_private_paths() {
check_assist_not_applicable(
auto_import,
r"
PrivateStruct<|>
pub mod PubMod {
struct PrivateStruct;
}
",
);
}
#[test]
fn not_applicable_when_no_imports_found() {
check_assist_not_applicable(
auto_import,
"
PubStruct<|>",
);
}
#[test]
fn not_applicable_in_import_statements() {
check_assist_not_applicable(
auto_import,
r"
use PubStruct<|>;
pub mod PubMod {
pub struct PubStruct;
}",
);
}
#[test]
fn function_import() {
check_assist(
auto_import,
r"
test_function<|>
pub mod PubMod {
pub fn test_function() {};
}
",
r"
use PubMod::test_function;
test_function<|>
pub mod PubMod {
pub fn test_function() {};
}
",
);
}
#[test]
fn auto_import_target() {
check_assist_target(
auto_import,
r"
struct AssistInfo {
group_label: Option<<|>GroupLabel>,
}
mod m { pub struct GroupLabel; }
",
"GroupLabel",
)
}
#[test]
fn not_applicable_when_path_start_is_imported() {
check_assist_not_applicable(
auto_import,
r"
pub mod mod1 {
pub mod mod2 {
pub mod mod3 {
pub struct TestStruct;
}
}
}
use mod1::mod2;
fn main() {
mod2::mod3::TestStruct<|>
}
",
);
}
}

View file

@ -1,4 +1,3 @@
use hir::db::HirDatabase;
use ra_syntax::{ use ra_syntax::{
ast::{self, NameOwner, VisibilityOwner}, ast::{self, NameOwner, VisibilityOwner},
AstNode, AstNode,
@ -22,14 +21,14 @@ use crate::{Assist, AssistCtx, AssistId};
// ``` // ```
// pub(crate) fn frobnicate() {} // pub(crate) fn frobnicate() {}
// ``` // ```
pub(crate) fn change_visibility(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn change_visibility(ctx: AssistCtx) -> Option<Assist> {
if let Some(vis) = ctx.find_node_at_offset::<ast::Visibility>() { if let Some(vis) = ctx.find_node_at_offset::<ast::Visibility>() {
return change_vis(ctx, vis); return change_vis(ctx, vis);
} }
add_vis(ctx) add_vis(ctx)
} }
fn add_vis(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { fn add_vis(ctx: AssistCtx) -> Option<Assist> {
let item_keyword = ctx.token_at_offset().find(|leaf| match leaf.kind() { let item_keyword = ctx.token_at_offset().find(|leaf| match leaf.kind() {
T![fn] | T![mod] | T![struct] | T![enum] | T![trait] => true, T![fn] | T![mod] | T![struct] | T![enum] | T![trait] => true,
_ => false, _ => false,
@ -75,7 +74,7 @@ fn vis_offset(node: &SyntaxNode) -> TextUnit {
.unwrap_or_else(|| node.text_range().start()) .unwrap_or_else(|| node.text_range().start())
} }
fn change_vis(ctx: AssistCtx<impl HirDatabase>, vis: ast::Visibility) -> Option<Assist> { fn change_vis(ctx: AssistCtx, vis: ast::Visibility) -> Option<Assist> {
if vis.syntax().text() == "pub" { if vis.syntax().text() == "pub" {
return ctx.add_assist( return ctx.add_assist(
AssistId("change_visibility"), AssistId("change_visibility"),

View file

@ -1,6 +1,5 @@
use std::{iter::once, ops::RangeInclusive}; use std::{iter::once, ops::RangeInclusive};
use hir::db::HirDatabase;
use ra_syntax::{ use ra_syntax::{
algo::replace_children, algo::replace_children,
ast::{self, edit::IndentLevel, make, Block, Pat::TupleStructPat}, ast::{self, edit::IndentLevel, make, Block, Pat::TupleStructPat},
@ -11,6 +10,7 @@ use ra_syntax::{
use crate::{ use crate::{
assist_ctx::{Assist, AssistCtx}, assist_ctx::{Assist, AssistCtx},
utils::invert_boolean_expression,
AssistId, AssistId,
}; };
@ -36,7 +36,7 @@ use crate::{
// bar(); // bar();
// } // }
// ``` // ```
pub(crate) fn convert_to_guarded_return(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn convert_to_guarded_return(ctx: AssistCtx) -> Option<Assist> {
let if_expr: ast::IfExpr = ctx.find_node_at_offset()?; let if_expr: ast::IfExpr = ctx.find_node_at_offset()?;
if if_expr.else_branch().is_some() { if if_expr.else_branch().is_some() {
return None; return None;
@ -100,9 +100,13 @@ pub(crate) fn convert_to_guarded_return(ctx: AssistCtx<impl HirDatabase>) -> Opt
let new_block = match if_let_pat { let new_block = match if_let_pat {
None => { None => {
// If. // If.
let early_expression = &(early_expression.syntax().to_string() + ";"); let new_expr = {
let new_expr = if_indent_level let then_branch =
.increase_indent(make::if_expression(&cond_expr, early_expression)); make::block_expr(once(make::expr_stmt(early_expression).into()), None);
let cond = invert_boolean_expression(cond_expr);
let e = make::expr_if(cond, then_branch);
if_indent_level.increase_indent(e)
};
replace(new_expr.syntax(), &then_block, &parent_block, &if_expr) replace(new_expr.syntax(), &then_block, &parent_block, &if_expr)
} }
Some((path, bound_ident)) => { Some((path, bound_ident)) => {

View file

@ -31,7 +31,7 @@ use crate::{Assist, AssistCtx, AssistId};
// } // }
// } // }
// ``` // ```
pub(crate) fn fill_match_arms(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn fill_match_arms(ctx: AssistCtx) -> Option<Assist> {
let match_expr = ctx.find_node_at_offset::<ast::MatchExpr>()?; let match_expr = ctx.find_node_at_offset::<ast::MatchExpr>()?;
let match_arm_list = match_expr.match_arm_list()?; let match_arm_list = match_expr.match_arm_list()?;

View file

@ -1,4 +1,3 @@
use hir::db::HirDatabase;
use ra_syntax::ast::{AstNode, BinExpr, BinOp}; use ra_syntax::ast::{AstNode, BinExpr, BinOp};
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
@ -18,7 +17,7 @@ use crate::{Assist, AssistCtx, AssistId};
// let _ = 2 + 90; // let _ = 2 + 90;
// } // }
// ``` // ```
pub(crate) fn flip_binexpr(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn flip_binexpr(ctx: AssistCtx) -> Option<Assist> {
let expr = ctx.find_node_at_offset::<BinExpr>()?; let expr = ctx.find_node_at_offset::<BinExpr>()?;
let lhs = expr.lhs()?.syntax().clone(); let lhs = expr.lhs()?.syntax().clone();
let rhs = expr.rhs()?.syntax().clone(); let rhs = expr.rhs()?.syntax().clone();

View file

@ -1,4 +1,3 @@
use hir::db::HirDatabase;
use ra_syntax::{algo::non_trivia_sibling, Direction, T}; use ra_syntax::{algo::non_trivia_sibling, Direction, T};
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
@ -18,7 +17,7 @@ use crate::{Assist, AssistCtx, AssistId};
// ((3, 4), (1, 2)); // ((3, 4), (1, 2));
// } // }
// ``` // ```
pub(crate) fn flip_comma(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn flip_comma(ctx: AssistCtx) -> Option<Assist> {
let comma = ctx.find_token_at_offset(T![,])?; let comma = ctx.find_token_at_offset(T![,])?;
let prev = non_trivia_sibling(comma.clone().into(), Direction::Prev)?; let prev = non_trivia_sibling(comma.clone().into(), Direction::Prev)?;
let next = non_trivia_sibling(comma.clone().into(), Direction::Next)?; let next = non_trivia_sibling(comma.clone().into(), Direction::Next)?;

View file

@ -1,4 +1,3 @@
use hir::db::HirDatabase;
use ra_syntax::{ use ra_syntax::{
algo::non_trivia_sibling, algo::non_trivia_sibling,
ast::{self, AstNode}, ast::{self, AstNode},
@ -18,7 +17,7 @@ use crate::{Assist, AssistCtx, AssistId};
// ``` // ```
// fn foo<T: Copy + Clone>() { } // fn foo<T: Copy + Clone>() { }
// ``` // ```
pub(crate) fn flip_trait_bound(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn flip_trait_bound(ctx: AssistCtx) -> Option<Assist> {
// We want to replicate the behavior of `flip_binexpr` by only suggesting // We want to replicate the behavior of `flip_binexpr` by only suggesting
// the assist when the cursor is on a `+` // the assist when the cursor is on a `+`
let plus = ctx.find_token_at_offset(T![+])?; let plus = ctx.find_token_at_offset(T![+])?;

View file

@ -1,4 +1,3 @@
use hir::db::HirDatabase;
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode, AstToken}, ast::{self, AstNode, AstToken},
TextRange, TextRange,
@ -23,7 +22,7 @@ use crate::{Assist, AssistCtx, AssistId};
// (1 + 2) * 4; // (1 + 2) * 4;
// } // }
// ``` // ```
pub(crate) fn inline_local_variable(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn inline_local_variable(ctx: AssistCtx) -> Option<Assist> {
let let_stmt = ctx.find_node_at_offset::<ast::LetStmt>()?; let let_stmt = ctx.find_node_at_offset::<ast::LetStmt>()?;
let bind_pat = match let_stmt.pat()? { let bind_pat = match let_stmt.pat()? {
ast::Pat::BindPat(pat) => pat, ast::Pat::BindPat(pat) => pat,
@ -47,6 +46,9 @@ pub(crate) fn inline_local_variable(ctx: AssistCtx<impl HirDatabase>) -> Option<
}; };
let analyzer = ctx.source_analyzer(bind_pat.syntax(), None); let analyzer = ctx.source_analyzer(bind_pat.syntax(), None);
let refs = analyzer.find_all_refs(&bind_pat); let refs = analyzer.find_all_refs(&bind_pat);
if refs.is_empty() {
return None;
};
let mut wrap_in_parens = vec![true; refs.len()]; let mut wrap_in_parens = vec![true; refs.len()];
@ -645,4 +647,16 @@ fn foo() {
}", }",
); );
} }
#[test]
fn test_not_applicable_if_variable_unused() {
check_assist_not_applicable(
inline_local_variable,
"
fn foo() {
let <|>a = 0;
}
",
)
}
} }

View file

@ -1,5 +1,4 @@
use format_buf::format; use format_buf::format;
use hir::db::HirDatabase;
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
SyntaxKind::{ SyntaxKind::{
@ -28,7 +27,7 @@ use crate::{Assist, AssistCtx, AssistId};
// var_name * 4; // var_name * 4;
// } // }
// ``` // ```
pub(crate) fn introduce_variable(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn introduce_variable(ctx: AssistCtx) -> Option<Assist> {
if ctx.frange.range.is_empty() { if ctx.frange.range.is_empty() {
return None; return None;
} }

View file

@ -1,8 +1,7 @@
use hir::db::HirDatabase;
use ra_syntax::ast::{self, AstNode}; use ra_syntax::ast::{self, AstNode};
use ra_syntax::T; use ra_syntax::T;
use crate::{Assist, AssistCtx, AssistId}; use crate::{utils::invert_boolean_expression, Assist, AssistCtx, AssistId};
// Assist: invert_if // Assist: invert_if
// //
@ -23,7 +22,7 @@ use crate::{Assist, AssistCtx, AssistId};
// } // }
// ``` // ```
pub(crate) fn invert_if(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn invert_if(ctx: AssistCtx) -> Option<Assist> {
let if_keyword = ctx.find_token_at_offset(T![if])?; let if_keyword = ctx.find_token_at_offset(T![if])?;
let expr = ast::IfExpr::cast(if_keyword.parent())?; let expr = ast::IfExpr::cast(if_keyword.parent())?;
let if_range = if_keyword.text_range(); let if_range = if_keyword.text_range();
@ -36,8 +35,8 @@ pub(crate) fn invert_if(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
let then_node = expr.then_branch()?.syntax().clone(); let then_node = expr.then_branch()?.syntax().clone();
if let ast::ElseBranch::Block(else_block) = expr.else_branch()? { if let ast::ElseBranch::Block(else_block) = expr.else_branch()? {
let flip_cond = invert_boolean_expression(&cond)?;
let cond_range = cond.syntax().text_range(); let cond_range = cond.syntax().text_range();
let flip_cond = invert_boolean_expression(cond);
let else_node = else_block.syntax(); let else_node = else_block.syntax();
let else_range = else_node.text_range(); let else_range = else_node.text_range();
let then_range = then_node.text_range(); let then_range = then_node.text_range();
@ -52,20 +51,6 @@ pub(crate) fn invert_if(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
None None
} }
pub(crate) fn invert_boolean_expression(expr: &ast::Expr) -> Option<ast::Expr> {
match expr {
ast::Expr::BinExpr(bin) => match bin.op_kind()? {
ast::BinOp::NegatedEqualityTest => bin.replace_op(T![==]).map(|it| it.into()),
_ => None,
},
ast::Expr::PrefixExpr(pe) => match pe.op_kind()? {
ast::PrefixOp::Not => pe.expr(),
_ => None,
},
_ => None,
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
@ -91,12 +76,16 @@ mod tests {
} }
#[test] #[test]
fn invert_if_doesnt_apply_with_cursor_not_on_if() { fn invert_if_general_case() {
check_assist_not_applicable(invert_if, "fn f() { if !<|>cond { 3 * 2 } else { 1 } }") check_assist(
invert_if,
"fn f() { i<|>f cond { 3 * 2 } else { 1 } }",
"fn f() { i<|>f !cond { 1 } else { 3 * 2 } }",
)
} }
#[test] #[test]
fn invert_if_doesnt_apply_without_negated() { fn invert_if_doesnt_apply_with_cursor_not_on_if() {
check_assist_not_applicable(invert_if, "fn f() { i<|>f cond { 3 * 2 } else { 1 } }") check_assist_not_applicable(invert_if, "fn f() { if !<|>cond { 3 * 2 } else { 1 } }")
} }
} }

View file

@ -1,6 +1,11 @@
use crate::{Assist, AssistCtx, AssistId, TextRange, TextUnit}; use std::iter::successors;
use hir::db::HirDatabase;
use ra_syntax::ast::{AstNode, MatchArm}; use ra_syntax::{
ast::{self, AstNode},
Direction, TextUnit,
};
use crate::{Assist, AssistCtx, AssistId, TextRange};
// Assist: merge_match_arms // Assist: merge_match_arms
// //
@ -26,61 +31,79 @@ use ra_syntax::ast::{AstNode, MatchArm};
// } // }
// } // }
// ``` // ```
pub(crate) fn merge_match_arms(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn merge_match_arms(ctx: AssistCtx) -> Option<Assist> {
let current_arm = ctx.find_node_at_offset::<MatchArm>()?; let current_arm = ctx.find_node_at_offset::<ast::MatchArm>()?;
// We check if the following match arm matches this one. We could, but don't,
// compare to the previous match arm as well.
let next = current_arm.syntax().next_sibling();
let next_arm = MatchArm::cast(next?)?;
// Don't try to handle arms with guards for now - can add support for this later // Don't try to handle arms with guards for now - can add support for this later
if current_arm.guard().is_some() || next_arm.guard().is_some() { if current_arm.guard().is_some() {
return None; return None;
} }
let current_expr = current_arm.expr()?; let current_expr = current_arm.expr()?;
let next_expr = next_arm.expr()?; let current_text_range = current_arm.syntax().text_range();
// Check for match arm equality by comparing lengths and then string contents enum CursorPos {
if current_expr.syntax().text_range().len() != next_expr.syntax().text_range().len() { InExpr(TextUnit),
InPat(TextUnit),
}
let cursor_pos = ctx.frange.range.start();
let cursor_pos = if current_expr.syntax().text_range().contains(cursor_pos) {
CursorPos::InExpr(current_text_range.end() - cursor_pos)
} else {
CursorPos::InPat(cursor_pos)
};
// We check if the following match arms match this one. We could, but don't,
// compare to the previous match arm as well.
let arms_to_merge = successors(Some(current_arm), next_arm)
.take_while(|arm| {
if arm.guard().is_some() {
return false;
}
match arm.expr() {
Some(expr) => expr.syntax().text() == current_expr.syntax().text(),
None => false,
}
})
.collect::<Vec<_>>();
if arms_to_merge.len() <= 1 {
return None; return None;
} }
if current_expr.syntax().text() != next_expr.syntax().text() {
return None;
}
let cursor_to_end = current_arm.syntax().text_range().end() - ctx.frange.range.start();
ctx.add_assist(AssistId("merge_match_arms"), "Merge match arms", |edit| { ctx.add_assist(AssistId("merge_match_arms"), "Merge match arms", |edit| {
fn contains_placeholder(a: &MatchArm) -> bool { let pats = if arms_to_merge.iter().any(contains_placeholder) {
"_".into()
} else {
arms_to_merge
.iter()
.flat_map(ast::MatchArm::pats)
.map(|x| x.syntax().to_string())
.collect::<Vec<String>>()
.join(" | ")
};
let arm = format!("{} => {}", pats, current_expr.syntax().text());
let start = arms_to_merge.first().unwrap().syntax().text_range().start();
let end = arms_to_merge.last().unwrap().syntax().text_range().end();
edit.target(current_text_range);
edit.set_cursor(match cursor_pos {
CursorPos::InExpr(back_offset) => start + TextUnit::from_usize(arm.len()) - back_offset,
CursorPos::InPat(offset) => offset,
});
edit.replace(TextRange::from_to(start, end), arm);
})
}
fn contains_placeholder(a: &ast::MatchArm) -> bool {
a.pats().any(|x| match x { a.pats().any(|x| match x {
ra_syntax::ast::Pat::PlaceholderPat(..) => true, ra_syntax::ast::Pat::PlaceholderPat(..) => true,
_ => false, _ => false,
}) })
} }
let pats = if contains_placeholder(&current_arm) || contains_placeholder(&next_arm) { fn next_arm(arm: &ast::MatchArm) -> Option<ast::MatchArm> {
"_".into() arm.syntax().siblings(Direction::Next).skip(1).find_map(ast::MatchArm::cast)
} else {
let ps: Vec<String> = current_arm
.pats()
.map(|x| x.syntax().to_string())
.chain(next_arm.pats().map(|x| x.syntax().to_string()))
.collect();
ps.join(" | ")
};
let arm = format!("{} => {}", pats, current_expr.syntax().text());
let offset = TextUnit::from_usize(arm.len()) - cursor_to_end;
let start = current_arm.syntax().text_range().start();
let end = next_arm.syntax().text_range().end();
edit.target(current_arm.syntax().text_range());
edit.replace(TextRange::from_to(start, end), arm);
edit.set_cursor(start + offset);
})
} }
#[cfg(test)] #[cfg(test)]
@ -184,6 +207,37 @@ mod tests {
); );
} }
#[test]
fn merges_all_subsequent_arms() {
check_assist(
merge_match_arms,
r#"
enum X { A, B, C, D, E }
fn main() {
match X::A {
X::A<|> => 92,
X::B => 92,
X::C => 92,
X::D => 62,
_ => panic!(),
}
}
"#,
r#"
enum X { A, B, C, D, E }
fn main() {
match X::A {
X::A<|> | X::B | X::C => 92,
X::D => 62,
_ => panic!(),
}
}
"#,
)
}
#[test] #[test]
fn merge_match_arms_rejects_guards() { fn merge_match_arms_rejects_guards() {
check_assist_not_applicable( check_assist_not_applicable(

View file

@ -1,4 +1,3 @@
use hir::db::HirDatabase;
use ra_syntax::{ use ra_syntax::{
ast::{self, edit, make, AstNode, NameOwner, TypeBoundsOwner}, ast::{self, edit, make, AstNode, NameOwner, TypeBoundsOwner},
SyntaxElement, SyntaxElement,
@ -22,7 +21,7 @@ use crate::{Assist, AssistCtx, AssistId};
// f(x) // f(x)
// } // }
// ``` // ```
pub(crate) fn move_bounds_to_where_clause(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn move_bounds_to_where_clause(ctx: AssistCtx) -> Option<Assist> {
let type_param_list = ctx.find_node_at_offset::<ast::TypeParamList>()?; let type_param_list = ctx.find_node_at_offset::<ast::TypeParamList>()?;
let mut type_params = type_param_list.type_params(); let mut type_params = type_param_list.type_params();

View file

@ -1,4 +1,3 @@
use hir::db::HirDatabase;
use ra_syntax::{ use ra_syntax::{
ast, ast,
ast::{AstNode, AstToken, IfExpr, MatchArm}, ast::{AstNode, AstToken, IfExpr, MatchArm},
@ -32,7 +31,7 @@ use crate::{Assist, AssistCtx, AssistId};
// } // }
// } // }
// ``` // ```
pub(crate) fn move_guard_to_arm_body(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn move_guard_to_arm_body(ctx: AssistCtx) -> Option<Assist> {
let match_arm = ctx.find_node_at_offset::<MatchArm>()?; let match_arm = ctx.find_node_at_offset::<MatchArm>()?;
let guard = match_arm.guard()?; let guard = match_arm.guard()?;
let space_before_guard = guard.syntax().prev_sibling_or_token(); let space_before_guard = guard.syntax().prev_sibling_or_token();
@ -89,7 +88,7 @@ pub(crate) fn move_guard_to_arm_body(ctx: AssistCtx<impl HirDatabase>) -> Option
// } // }
// } // }
// ``` // ```
pub(crate) fn move_arm_cond_to_match_guard(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn move_arm_cond_to_match_guard(ctx: AssistCtx) -> Option<Assist> {
let match_arm: MatchArm = ctx.find_node_at_offset::<MatchArm>()?; let match_arm: MatchArm = ctx.find_node_at_offset::<MatchArm>()?;
let last_match_pat = match_arm.pats().last()?; let last_match_pat = match_arm.pats().last()?;

View file

@ -1,4 +1,3 @@
use hir::db::HirDatabase;
use ra_syntax::{ use ra_syntax::{
ast, AstToken, ast, AstToken,
SyntaxKind::{RAW_STRING, STRING}, SyntaxKind::{RAW_STRING, STRING},
@ -22,7 +21,7 @@ use crate::{Assist, AssistCtx, AssistId};
// r#"Hello, World!"#; // r#"Hello, World!"#;
// } // }
// ``` // ```
pub(crate) fn make_raw_string(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn make_raw_string(ctx: AssistCtx) -> Option<Assist> {
let token = ctx.find_token_at_offset(STRING).and_then(ast::String::cast)?; let token = ctx.find_token_at_offset(STRING).and_then(ast::String::cast)?;
let value = token.value()?; let value = token.value()?;
ctx.add_assist(AssistId("make_raw_string"), "Rewrite as raw string", |edit| { ctx.add_assist(AssistId("make_raw_string"), "Rewrite as raw string", |edit| {
@ -51,7 +50,7 @@ pub(crate) fn make_raw_string(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist
// "Hello, \"World!\""; // "Hello, \"World!\"";
// } // }
// ``` // ```
pub(crate) fn make_usual_string(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn make_usual_string(ctx: AssistCtx) -> Option<Assist> {
let token = ctx.find_token_at_offset(RAW_STRING).and_then(ast::RawString::cast)?; let token = ctx.find_token_at_offset(RAW_STRING).and_then(ast::RawString::cast)?;
let value = token.value()?; let value = token.value()?;
ctx.add_assist(AssistId("make_usual_string"), "Rewrite as regular string", |edit| { ctx.add_assist(AssistId("make_usual_string"), "Rewrite as regular string", |edit| {
@ -77,7 +76,7 @@ pub(crate) fn make_usual_string(ctx: AssistCtx<impl HirDatabase>) -> Option<Assi
// r##"Hello, World!"##; // r##"Hello, World!"##;
// } // }
// ``` // ```
pub(crate) fn add_hash(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn add_hash(ctx: AssistCtx) -> Option<Assist> {
let token = ctx.find_token_at_offset(RAW_STRING)?; let token = ctx.find_token_at_offset(RAW_STRING)?;
ctx.add_assist(AssistId("add_hash"), "Add # to raw string", |edit| { ctx.add_assist(AssistId("add_hash"), "Add # to raw string", |edit| {
edit.target(token.text_range()); edit.target(token.text_range());
@ -101,7 +100,7 @@ pub(crate) fn add_hash(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
// r"Hello, World!"; // r"Hello, World!";
// } // }
// ``` // ```
pub(crate) fn remove_hash(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn remove_hash(ctx: AssistCtx) -> Option<Assist> {
let token = ctx.find_token_at_offset(RAW_STRING)?; let token = ctx.find_token_at_offset(RAW_STRING)?;
let text = token.text().as_str(); let text = token.text().as_str();
if text.starts_with("r\"") { if text.starts_with("r\"") {

View file

@ -1,4 +1,3 @@
use hir::db::HirDatabase;
use ra_syntax::{ use ra_syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
TextUnit, T, TextUnit, T,
@ -21,7 +20,7 @@ use crate::{Assist, AssistCtx, AssistId};
// 92; // 92;
// } // }
// ``` // ```
pub(crate) fn remove_dbg(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn remove_dbg(ctx: AssistCtx) -> Option<Assist> {
let macro_call = ctx.find_node_at_offset::<ast::MacroCall>()?; let macro_call = ctx.find_node_at_offset::<ast::MacroCall>()?;
if !is_valid_macrocall(&macro_call, "dbg")? { if !is_valid_macrocall(&macro_call, "dbg")? {

View file

@ -1,9 +1,11 @@
use format_buf::format; use ra_fmt::unwrap_trivial_block;
use hir::db::HirDatabase; use ra_syntax::{
use ra_fmt::extract_trivial_expression; ast::{self, make},
use ra_syntax::{ast, AstNode}; AstNode,
};
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
use ast::edit::IndentLevel;
// Assist: replace_if_let_with_match // Assist: replace_if_let_with_match
// //
@ -31,7 +33,7 @@ use crate::{Assist, AssistCtx, AssistId};
// } // }
// } // }
// ``` // ```
pub(crate) fn replace_if_let_with_match(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn replace_if_let_with_match(ctx: AssistCtx) -> Option<Assist> {
let if_expr: ast::IfExpr = ctx.find_node_at_offset()?; let if_expr: ast::IfExpr = ctx.find_node_at_offset()?;
let cond = if_expr.condition()?; let cond = if_expr.condition()?;
let pat = cond.pat()?; let pat = cond.pat()?;
@ -43,34 +45,26 @@ pub(crate) fn replace_if_let_with_match(ctx: AssistCtx<impl HirDatabase>) -> Opt
}; };
ctx.add_assist(AssistId("replace_if_let_with_match"), "Replace with match", |edit| { ctx.add_assist(AssistId("replace_if_let_with_match"), "Replace with match", |edit| {
let match_expr = build_match_expr(expr, pat, then_block, else_block); let match_expr = {
let then_arm = {
let then_expr = unwrap_trivial_block(then_block);
make::match_arm(vec![pat], then_expr)
};
let else_arm = {
let else_expr = unwrap_trivial_block(else_block);
make::match_arm(vec![make::placeholder_pat().into()], else_expr)
};
make::expr_match(expr, make::match_arm_list(vec![then_arm, else_arm]))
};
let match_expr = IndentLevel::from_node(if_expr.syntax()).increase_indent(match_expr);
edit.target(if_expr.syntax().text_range()); edit.target(if_expr.syntax().text_range());
edit.replace_node_and_indent(if_expr.syntax(), match_expr); edit.set_cursor(if_expr.syntax().text_range().start());
edit.set_cursor(if_expr.syntax().text_range().start()) edit.replace_ast::<ast::Expr>(if_expr.into(), match_expr.into());
}) })
} }
fn build_match_expr(
expr: ast::Expr,
pat1: ast::Pat,
arm1: ast::BlockExpr,
arm2: ast::BlockExpr,
) -> String {
let mut buf = String::new();
format!(buf, "match {} {{\n", expr.syntax().text());
format!(buf, " {} => {}\n", pat1.syntax().text(), format_arm(&arm1));
format!(buf, " _ => {}\n", format_arm(&arm2));
buf.push_str("}");
buf
}
fn format_arm(block: &ast::BlockExpr) -> String {
match extract_trivial_expression(block) {
Some(e) if !e.syntax().text().contains_char('\n') => format!("{},", e.syntax().text()),
_ => block.syntax().text().to_string(),
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View file

@ -1,4 +1,4 @@
use hir::{self, db::HirDatabase}; use hir::{self, ModPath};
use ra_syntax::{ use ra_syntax::{
ast::{self, NameOwner}, ast::{self, NameOwner},
AstNode, Direction, SmolStr, AstNode, Direction, SmolStr,
@ -12,18 +12,18 @@ use crate::{
AssistId, AssistId,
}; };
/// This function produces sequence of text edits into edit /// Creates and inserts a use statement for the given path to import.
/// to import the target path in the most appropriate scope given /// The use statement is inserted in the scope most appropriate to the
/// the cursor position /// the cursor position given, additionally merged with the existing use imports.
pub fn auto_import_text_edit( pub fn insert_use_statement(
// Ideally the position of the cursor, used to // Ideally the position of the cursor, used to
position: &SyntaxNode, position: &SyntaxNode,
// The statement to use as anchor (last resort) // The statement to use as anchor (last resort)
anchor: &SyntaxNode, anchor: &SyntaxNode,
// The path to import as a sequence of strings path_to_import: &ModPath,
target: &[SmolStr],
edit: &mut TextEditBuilder, edit: &mut TextEditBuilder,
) { ) {
let target = path_to_import.to_string().split("::").map(SmolStr::new).collect::<Vec<_>>();
let container = position.ancestors().find_map(|n| { let container = position.ancestors().find_map(|n| {
if let Some(module) = ast::Module::cast(n.clone()) { if let Some(module) = ast::Module::cast(n.clone()) {
return module.item_list().map(|it| it.syntax().clone()); return module.item_list().map(|it| it.syntax().clone());
@ -32,14 +32,14 @@ pub fn auto_import_text_edit(
}); });
if let Some(container) = container { if let Some(container) = container {
let action = best_action_for_target(container, anchor.clone(), target); let action = best_action_for_target(container, anchor.clone(), &target);
make_assist(&action, target, edit); make_assist(&action, &target, edit);
} }
} }
// Assist: add_import // Assist: replace_qualified_name_with_use
// //
// Adds a use statement for a given fully-qualified path. // Adds a use statement for a given fully-qualified name.
// //
// ``` // ```
// fn process(map: std::collections::<|>HashMap<String, String>) {} // fn process(map: std::collections::<|>HashMap<String, String>) {}
@ -50,7 +50,7 @@ pub fn auto_import_text_edit(
// //
// fn process(map: HashMap<String, String>) {} // fn process(map: HashMap<String, String>) {}
// ``` // ```
pub(crate) fn add_import(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn replace_qualified_name_with_use(ctx: AssistCtx) -> Option<Assist> {
let path: ast::Path = ctx.find_node_at_offset()?; let path: ast::Path = ctx.find_node_at_offset()?;
// We don't want to mess with use statements // We don't want to mess with use statements
if path.syntax().ancestors().find_map(ast::UseItem::cast).is_some() { if path.syntax().ancestors().find_map(ast::UseItem::cast).is_some() {
@ -72,9 +72,13 @@ pub(crate) fn add_import(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
} }
}; };
ctx.add_assist(AssistId("add_import"), format!("Import {}", fmt_segments(&segments)), |edit| { ctx.add_assist(
apply_auto_import(&position, &path, &segments, edit.text_edit_builder()); AssistId("replace_qualified_name_with_use"),
}) "Replace qualified path with use",
|edit| {
replace_with_use(&position, &path, &segments, edit.text_edit_builder());
},
)
} }
fn collect_path_segments_raw( fn collect_path_segments_raw(
@ -107,12 +111,6 @@ fn collect_path_segments_raw(
Some(segments.len() - oldlen) Some(segments.len() - oldlen)
} }
fn fmt_segments(segments: &[SmolStr]) -> String {
let mut buf = String::new();
fmt_segments_raw(segments, &mut buf);
buf
}
fn fmt_segments_raw(segments: &[SmolStr], buf: &mut String) { fn fmt_segments_raw(segments: &[SmolStr], buf: &mut String) {
let mut iter = segments.iter(); let mut iter = segments.iter();
if let Some(s) = iter.next() { if let Some(s) = iter.next() {
@ -558,7 +556,7 @@ fn make_assist_add_nested_import(
} }
} }
fn apply_auto_import( fn replace_with_use(
container: &SyntaxNode, container: &SyntaxNode,
path: &ast::Path, path: &ast::Path,
target: &[SmolStr], target: &[SmolStr],
@ -603,9 +601,9 @@ mod tests {
use super::*; use super::*;
#[test] #[test]
fn test_auto_import_add_use_no_anchor() { fn test_replace_add_use_no_anchor() {
check_assist( check_assist(
add_import, replace_qualified_name_with_use,
" "
std::fmt::Debug<|> std::fmt::Debug<|>
", ",
@ -617,9 +615,9 @@ Debug<|>
); );
} }
#[test] #[test]
fn test_auto_import_add_use_no_anchor_with_item_below() { fn test_replace_add_use_no_anchor_with_item_below() {
check_assist( check_assist(
add_import, replace_qualified_name_with_use,
" "
std::fmt::Debug<|> std::fmt::Debug<|>
@ -638,9 +636,9 @@ fn main() {
} }
#[test] #[test]
fn test_auto_import_add_use_no_anchor_with_item_above() { fn test_replace_add_use_no_anchor_with_item_above() {
check_assist( check_assist(
add_import, replace_qualified_name_with_use,
" "
fn main() { fn main() {
} }
@ -659,9 +657,9 @@ Debug<|>
} }
#[test] #[test]
fn test_auto_import_add_use_no_anchor_2seg() { fn test_replace_add_use_no_anchor_2seg() {
check_assist( check_assist(
add_import, replace_qualified_name_with_use,
" "
std::fmt<|>::Debug std::fmt<|>::Debug
", ",
@ -674,9 +672,9 @@ fmt<|>::Debug
} }
#[test] #[test]
fn test_auto_import_add_use() { fn test_replace_add_use() {
check_assist( check_assist(
add_import, replace_qualified_name_with_use,
" "
use stdx; use stdx;
@ -694,9 +692,9 @@ impl Debug<|> for Foo {
} }
#[test] #[test]
fn test_auto_import_file_use_other_anchor() { fn test_replace_file_use_other_anchor() {
check_assist( check_assist(
add_import, replace_qualified_name_with_use,
" "
impl std::fmt::Debug<|> for Foo { impl std::fmt::Debug<|> for Foo {
} }
@ -711,9 +709,9 @@ impl Debug<|> for Foo {
} }
#[test] #[test]
fn test_auto_import_add_use_other_anchor_indent() { fn test_replace_add_use_other_anchor_indent() {
check_assist( check_assist(
add_import, replace_qualified_name_with_use,
" "
impl std::fmt::Debug<|> for Foo { impl std::fmt::Debug<|> for Foo {
} }
@ -728,9 +726,9 @@ impl Debug<|> for Foo {
} }
#[test] #[test]
fn test_auto_import_split_different() { fn test_replace_split_different() {
check_assist( check_assist(
add_import, replace_qualified_name_with_use,
" "
use std::fmt; use std::fmt;
@ -747,9 +745,9 @@ impl io<|> for Foo {
} }
#[test] #[test]
fn test_auto_import_split_self_for_use() { fn test_replace_split_self_for_use() {
check_assist( check_assist(
add_import, replace_qualified_name_with_use,
" "
use std::fmt; use std::fmt;
@ -766,9 +764,9 @@ impl Debug<|> for Foo {
} }
#[test] #[test]
fn test_auto_import_split_self_for_target() { fn test_replace_split_self_for_target() {
check_assist( check_assist(
add_import, replace_qualified_name_with_use,
" "
use std::fmt::Debug; use std::fmt::Debug;
@ -785,9 +783,9 @@ impl fmt<|> for Foo {
} }
#[test] #[test]
fn test_auto_import_add_to_nested_self_nested() { fn test_replace_add_to_nested_self_nested() {
check_assist( check_assist(
add_import, replace_qualified_name_with_use,
" "
use std::fmt::{Debug, nested::{Display}}; use std::fmt::{Debug, nested::{Display}};
@ -804,9 +802,9 @@ impl nested<|> for Foo {
} }
#[test] #[test]
fn test_auto_import_add_to_nested_self_already_included() { fn test_replace_add_to_nested_self_already_included() {
check_assist( check_assist(
add_import, replace_qualified_name_with_use,
" "
use std::fmt::{Debug, nested::{self, Display}}; use std::fmt::{Debug, nested::{self, Display}};
@ -823,9 +821,9 @@ impl nested<|> for Foo {
} }
#[test] #[test]
fn test_auto_import_add_to_nested_nested() { fn test_replace_add_to_nested_nested() {
check_assist( check_assist(
add_import, replace_qualified_name_with_use,
" "
use std::fmt::{Debug, nested::{Display}}; use std::fmt::{Debug, nested::{Display}};
@ -842,9 +840,9 @@ impl Debug<|> for Foo {
} }
#[test] #[test]
fn test_auto_import_split_common_target_longer() { fn test_replace_split_common_target_longer() {
check_assist( check_assist(
add_import, replace_qualified_name_with_use,
" "
use std::fmt::Debug; use std::fmt::Debug;
@ -861,9 +859,9 @@ impl Display<|> for Foo {
} }
#[test] #[test]
fn test_auto_import_split_common_use_longer() { fn test_replace_split_common_use_longer() {
check_assist( check_assist(
add_import, replace_qualified_name_with_use,
" "
use std::fmt::nested::Debug; use std::fmt::nested::Debug;
@ -880,9 +878,9 @@ impl Display<|> for Foo {
} }
#[test] #[test]
fn test_auto_import_use_nested_import() { fn test_replace_use_nested_import() {
check_assist( check_assist(
add_import, replace_qualified_name_with_use,
" "
use crate::{ use crate::{
ty::{Substs, Ty}, ty::{Substs, Ty},
@ -903,9 +901,9 @@ fn foo() { lower<|>::trait_env() }
} }
#[test] #[test]
fn test_auto_import_alias() { fn test_replace_alias() {
check_assist( check_assist(
add_import, replace_qualified_name_with_use,
" "
use std::fmt as foo; use std::fmt as foo;
@ -922,9 +920,9 @@ impl Debug<|> for Foo {
} }
#[test] #[test]
fn test_auto_import_not_applicable_one_segment() { fn test_replace_not_applicable_one_segment() {
check_assist_not_applicable( check_assist_not_applicable(
add_import, replace_qualified_name_with_use,
" "
impl foo<|> for Foo { impl foo<|> for Foo {
} }
@ -933,9 +931,9 @@ impl foo<|> for Foo {
} }
#[test] #[test]
fn test_auto_import_not_applicable_in_use() { fn test_replace_not_applicable_in_use() {
check_assist_not_applicable( check_assist_not_applicable(
add_import, replace_qualified_name_with_use,
" "
use std::fmt<|>; use std::fmt<|>;
", ",
@ -943,9 +941,9 @@ use std::fmt<|>;
} }
#[test] #[test]
fn test_auto_import_add_use_no_anchor_in_mod_mod() { fn test_replace_add_use_no_anchor_in_mod_mod() {
check_assist( check_assist(
add_import, replace_qualified_name_with_use,
" "
mod foo { mod foo {
mod bar { mod bar {

View file

@ -1,6 +1,5 @@
use std::iter::successors; use std::iter::successors;
use hir::db::HirDatabase;
use ra_syntax::{ast, AstNode, TextUnit, T}; use ra_syntax::{ast, AstNode, TextUnit, T};
use crate::{Assist, AssistCtx, AssistId}; use crate::{Assist, AssistCtx, AssistId};
@ -16,7 +15,7 @@ use crate::{Assist, AssistCtx, AssistId};
// ``` // ```
// use std::{collections::HashMap}; // use std::{collections::HashMap};
// ``` // ```
pub(crate) fn split_import(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { pub(crate) fn split_import(ctx: AssistCtx) -> Option<Assist> {
let colon_colon = ctx.find_token_at_offset(T![::])?; let colon_colon = ctx.find_token_at_offset(T![::])?;
let path = ast::Path::cast(colon_colon.parent())?; let path = ast::Path::cast(colon_colon.parent())?;
let top_path = successors(Some(path), |it| it.parent_path()).last()?; let top_path = successors(Some(path), |it| it.parent_path()).last()?;

View file

@ -9,18 +9,16 @@ mod assist_ctx;
mod marks; mod marks;
#[cfg(test)] #[cfg(test)]
mod doc_tests; mod doc_tests;
#[cfg(test)] mod utils;
mod test_db;
pub mod ast_transform; pub mod ast_transform;
use either::Either;
use hir::db::HirDatabase;
use ra_db::FileRange; use ra_db::FileRange;
use ra_ide_db::RootDatabase;
use ra_syntax::{TextRange, TextUnit}; use ra_syntax::{TextRange, TextUnit};
use ra_text_edit::TextEdit; use ra_text_edit::TextEdit;
pub(crate) use crate::assist_ctx::{Assist, AssistCtx}; pub(crate) use crate::assist_ctx::{Assist, AssistCtx, AssistHandler};
pub use crate::assists::add_import::auto_import_text_edit; pub use crate::handlers::replace_qualified_name_with_use::insert_use_statement;
/// Unique identifier of the assist, should not be shown to the user /// Unique identifier of the assist, should not be shown to the user
/// directly. /// directly.
@ -34,81 +32,64 @@ pub struct AssistLabel {
pub id: AssistId, pub id: AssistId,
} }
#[derive(Clone, Debug)]
pub struct GroupLabel(pub String);
impl AssistLabel {
pub(crate) fn new(label: String, id: AssistId) -> AssistLabel {
// FIXME: make fields private, so that this invariant can't be broken
assert!(label.chars().nth(0).unwrap().is_uppercase());
AssistLabel { label: label.into(), id }
}
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct AssistAction { pub struct AssistAction {
pub label: Option<String>,
pub edit: TextEdit, pub edit: TextEdit,
pub cursor_position: Option<TextUnit>, pub cursor_position: Option<TextUnit>,
// FIXME: This belongs to `AssistLabel`
pub target: Option<TextRange>, pub target: Option<TextRange>,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ResolvedAssist { pub struct ResolvedAssist {
pub label: AssistLabel, pub label: AssistLabel,
pub action_data: Either<AssistAction, Vec<AssistAction>>, pub group_label: Option<GroupLabel>,
} pub action: AssistAction,
impl ResolvedAssist {
pub fn get_first_action(&self) -> AssistAction {
match &self.action_data {
Either::Left(action) => action.clone(),
Either::Right(actions) => actions[0].clone(),
}
}
} }
/// Return all the assists applicable at the given position. /// Return all the assists applicable at the given position.
/// ///
/// Assists are returned in the "unresolved" state, that is only labels are /// Assists are returned in the "unresolved" state, that is only labels are
/// returned, without actual edits. /// returned, without actual edits.
pub fn applicable_assists<H>(db: &H, range: FileRange) -> Vec<AssistLabel> pub fn unresolved_assists(db: &RootDatabase, range: FileRange) -> Vec<AssistLabel> {
where let ctx = AssistCtx::new(db, range, false);
H: HirDatabase + 'static, handlers::all()
{
AssistCtx::with_ctx(db, range, false, |ctx| {
assists::all()
.iter() .iter()
.filter_map(|f| f(ctx.clone())) .filter_map(|f| f(ctx.clone()))
.map(|a| match a { .flat_map(|it| it.0)
Assist::Unresolved { label } => label, .map(|a| a.label)
Assist::Resolved { .. } => unreachable!(),
})
.collect() .collect()
})
} }
/// Return all the assists applicable at the given position. /// Return all the assists applicable at the given position.
/// ///
/// Assists are returned in the "resolved" state, that is with edit fully /// Assists are returned in the "resolved" state, that is with edit fully
/// computed. /// computed.
pub fn assists<H>(db: &H, range: FileRange) -> Vec<ResolvedAssist> pub fn resolved_assists(db: &RootDatabase, range: FileRange) -> Vec<ResolvedAssist> {
where let ctx = AssistCtx::new(db, range, true);
H: HirDatabase + 'static, let mut a = handlers::all()
{
use std::cmp::Ordering;
AssistCtx::with_ctx(db, range, true, |ctx| {
let mut a = assists::all()
.iter() .iter()
.filter_map(|f| f(ctx.clone())) .filter_map(|f| f(ctx.clone()))
.map(|a| match a { .flat_map(|it| it.0)
Assist::Resolved { assist } => assist, .map(|it| it.into_resolved().unwrap())
Assist::Unresolved { .. } => unreachable!(),
})
.collect::<Vec<_>>(); .collect::<Vec<_>>();
a.sort_by(|a, b| match (a.get_first_action().target, b.get_first_action().target) { a.sort_by_key(|it| it.action.target.map_or(TextUnit::from(!0u32), |it| it.len()));
(Some(a), Some(b)) => a.len().cmp(&b.len()),
(Some(_), None) => Ordering::Less,
(None, Some(_)) => Ordering::Greater,
(None, None) => Ordering::Equal,
});
a a
})
} }
mod assists { mod handlers {
use crate::{Assist, AssistCtx}; use crate::AssistHandler;
use hir::db::HirDatabase;
mod add_derive; mod add_derive;
mod add_explicit_type; mod add_explicit_type;
@ -116,6 +97,7 @@ mod assists {
mod add_custom_impl; mod add_custom_impl;
mod add_new; mod add_new;
mod apply_demorgan; mod apply_demorgan;
mod auto_import;
mod invert_if; mod invert_if;
mod flip_comma; mod flip_comma;
mod flip_binexpr; mod flip_binexpr;
@ -129,13 +111,13 @@ mod assists {
mod replace_if_let_with_match; mod replace_if_let_with_match;
mod split_import; mod split_import;
mod remove_dbg; mod remove_dbg;
pub(crate) mod add_import; pub(crate) mod replace_qualified_name_with_use;
mod add_missing_impl_members; mod add_missing_impl_members;
mod move_guard; mod move_guard;
mod move_bounds; mod move_bounds;
mod early_return; mod early_return;
pub(crate) fn all<DB: HirDatabase>() -> &'static [fn(AssistCtx<DB>) -> Option<Assist>] { pub(crate) fn all() -> &'static [AssistHandler] {
&[ &[
add_derive::add_derive, add_derive::add_derive,
add_explicit_type::add_explicit_type, add_explicit_type::add_explicit_type,
@ -154,7 +136,7 @@ mod assists {
replace_if_let_with_match::replace_if_let_with_match, replace_if_let_with_match::replace_if_let_with_match,
split_import::split_import, split_import::split_import,
remove_dbg::remove_dbg, remove_dbg::remove_dbg,
add_import::add_import, replace_qualified_name_with_use::replace_qualified_name_with_use,
add_missing_impl_members::add_missing_impl_members, add_missing_impl_members::add_missing_impl_members,
add_missing_impl_members::add_missing_default_members, add_missing_impl_members::add_missing_default_members,
inline_local_variable::inline_local_variable, inline_local_variable::inline_local_variable,
@ -166,33 +148,39 @@ mod assists {
raw_string::make_usual_string, raw_string::make_usual_string,
raw_string::remove_hash, raw_string::remove_hash,
early_return::convert_to_guarded_return, early_return::convert_to_guarded_return,
auto_import::auto_import,
] ]
} }
} }
#[cfg(test)] #[cfg(test)]
mod helpers { mod helpers {
use ra_db::{fixture::WithFixture, FileRange}; use std::sync::Arc;
use ra_db::{fixture::WithFixture, FileId, FileRange, SourceDatabaseExt};
use ra_ide_db::{symbol_index::SymbolsDatabase, RootDatabase};
use ra_syntax::TextRange; use ra_syntax::TextRange;
use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range}; use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range};
use crate::{test_db::TestDB, Assist, AssistCtx}; use crate::{AssistCtx, AssistHandler};
pub(crate) fn check_assist( pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) {
assist: fn(AssistCtx<TestDB>) -> Option<Assist>, let (mut db, file_id) = RootDatabase::with_single_file(text);
before: &str, // FIXME: ideally, this should be done by the above `RootDatabase::with_single_file`,
after: &str, // but it looks like this might need specialization? :(
) { let local_roots = vec![db.file_source_root(file_id)];
db.set_local_roots(Arc::new(local_roots));
(db, file_id)
}
pub(crate) fn check_assist(assist: AssistHandler, before: &str, after: &str) {
let (before_cursor_pos, before) = extract_offset(before); let (before_cursor_pos, before) = extract_offset(before);
let (db, file_id) = TestDB::with_single_file(&before); let (db, file_id) = with_single_file(&before);
let frange = let frange =
FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) }; FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) };
let assist = let assist =
AssistCtx::with_ctx(&db, frange, true, assist).expect("code action is not applicable"); assist(AssistCtx::new(&db, frange, true)).expect("code action is not applicable");
let action = match assist { let action = assist.0[0].action.clone().unwrap();
Assist::Unresolved { .. } => unreachable!(),
Assist::Resolved { assist } => assist.get_first_action(),
};
let actual = action.edit.apply(&before); let actual = action.edit.apply(&before);
let actual_cursor_pos = match action.cursor_position { let actual_cursor_pos = match action.cursor_position {
@ -206,20 +194,13 @@ mod helpers {
assert_eq_text!(after, &actual); assert_eq_text!(after, &actual);
} }
pub(crate) fn check_assist_range( pub(crate) fn check_assist_range(assist: AssistHandler, before: &str, after: &str) {
assist: fn(AssistCtx<TestDB>) -> Option<Assist>,
before: &str,
after: &str,
) {
let (range, before) = extract_range(before); let (range, before) = extract_range(before);
let (db, file_id) = TestDB::with_single_file(&before); let (db, file_id) = with_single_file(&before);
let frange = FileRange { file_id, range }; let frange = FileRange { file_id, range };
let assist = let assist =
AssistCtx::with_ctx(&db, frange, true, assist).expect("code action is not applicable"); assist(AssistCtx::new(&db, frange, true)).expect("code action is not applicable");
let action = match assist { let action = assist.0[0].action.clone().unwrap();
Assist::Unresolved { .. } => unreachable!(),
Assist::Resolved { assist } => assist.get_first_action(),
};
let mut actual = action.edit.apply(&before); let mut actual = action.edit.apply(&before);
if let Some(pos) = action.cursor_position { if let Some(pos) = action.cursor_position {
@ -228,85 +209,65 @@ mod helpers {
assert_eq_text!(after, &actual); assert_eq_text!(after, &actual);
} }
pub(crate) fn check_assist_target( pub(crate) fn check_assist_target(assist: AssistHandler, before: &str, target: &str) {
assist: fn(AssistCtx<TestDB>) -> Option<Assist>,
before: &str,
target: &str,
) {
let (before_cursor_pos, before) = extract_offset(before); let (before_cursor_pos, before) = extract_offset(before);
let (db, file_id) = TestDB::with_single_file(&before); let (db, file_id) = with_single_file(&before);
let frange = let frange =
FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) }; FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) };
let assist = let assist =
AssistCtx::with_ctx(&db, frange, true, assist).expect("code action is not applicable"); assist(AssistCtx::new(&db, frange, true)).expect("code action is not applicable");
let action = match assist { let action = assist.0[0].action.clone().unwrap();
Assist::Unresolved { .. } => unreachable!(),
Assist::Resolved { assist } => assist.get_first_action(),
};
let range = action.target.expect("expected target on action"); let range = action.target.expect("expected target on action");
assert_eq_text!(&before[range.start().to_usize()..range.end().to_usize()], target); assert_eq_text!(&before[range.start().to_usize()..range.end().to_usize()], target);
} }
pub(crate) fn check_assist_range_target( pub(crate) fn check_assist_range_target(assist: AssistHandler, before: &str, target: &str) {
assist: fn(AssistCtx<TestDB>) -> Option<Assist>,
before: &str,
target: &str,
) {
let (range, before) = extract_range(before); let (range, before) = extract_range(before);
let (db, file_id) = TestDB::with_single_file(&before); let (db, file_id) = with_single_file(&before);
let frange = FileRange { file_id, range }; let frange = FileRange { file_id, range };
let assist = let assist =
AssistCtx::with_ctx(&db, frange, true, assist).expect("code action is not applicable"); assist(AssistCtx::new(&db, frange, true)).expect("code action is not applicable");
let action = match assist { let action = assist.0[0].action.clone().unwrap();
Assist::Unresolved { .. } => unreachable!(),
Assist::Resolved { assist } => assist.get_first_action(),
};
let range = action.target.expect("expected target on action"); let range = action.target.expect("expected target on action");
assert_eq_text!(&before[range.start().to_usize()..range.end().to_usize()], target); assert_eq_text!(&before[range.start().to_usize()..range.end().to_usize()], target);
} }
pub(crate) fn check_assist_not_applicable( pub(crate) fn check_assist_not_applicable(assist: AssistHandler, before: &str) {
assist: fn(AssistCtx<TestDB>) -> Option<Assist>,
before: &str,
) {
let (before_cursor_pos, before) = extract_offset(before); let (before_cursor_pos, before) = extract_offset(before);
let (db, file_id) = TestDB::with_single_file(&before); let (db, file_id) = with_single_file(&before);
let frange = let frange =
FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) }; FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) };
let assist = AssistCtx::with_ctx(&db, frange, true, assist); let assist = assist(AssistCtx::new(&db, frange, true));
assert!(assist.is_none()); assert!(assist.is_none());
} }
pub(crate) fn check_assist_range_not_applicable( pub(crate) fn check_assist_range_not_applicable(assist: AssistHandler, before: &str) {
assist: fn(AssistCtx<TestDB>) -> Option<Assist>,
before: &str,
) {
let (range, before) = extract_range(before); let (range, before) = extract_range(before);
let (db, file_id) = TestDB::with_single_file(&before); let (db, file_id) = with_single_file(&before);
let frange = FileRange { file_id, range }; let frange = FileRange { file_id, range };
let assist = AssistCtx::with_ctx(&db, frange, true, assist); let assist = assist(AssistCtx::new(&db, frange, true));
assert!(assist.is_none()); assert!(assist.is_none());
} }
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use ra_db::{fixture::WithFixture, FileRange}; use ra_db::FileRange;
use ra_syntax::TextRange; use ra_syntax::TextRange;
use test_utils::{extract_offset, extract_range}; use test_utils::{extract_offset, extract_range};
use crate::test_db::TestDB; use crate::{helpers, resolved_assists};
#[test] #[test]
fn assist_order_field_struct() { fn assist_order_field_struct() {
let before = "struct Foo { <|>bar: u32 }"; let before = "struct Foo { <|>bar: u32 }";
let (before_cursor_pos, before) = extract_offset(before); let (before_cursor_pos, before) = extract_offset(before);
let (db, file_id) = TestDB::with_single_file(&before); let (db, file_id) = helpers::with_single_file(&before);
let frange = let frange =
FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) }; FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) };
let assists = super::assists(&db, frange); let assists = resolved_assists(&db, frange);
let mut assists = assists.iter(); let mut assists = assists.iter();
assert_eq!( assert_eq!(
@ -327,9 +288,9 @@ mod tests {
} }
}"; }";
let (range, before) = extract_range(before); let (range, before) = extract_range(before);
let (db, file_id) = TestDB::with_single_file(&before); let (db, file_id) = helpers::with_single_file(&before);
let frange = FileRange { file_id, range }; let frange = FileRange { file_id, range };
let assists = super::assists(&db, frange); let assists = resolved_assists(&db, frange);
let mut assists = assists.iter(); let mut assists = assists.iter();
assert_eq!(assists.next().expect("expected assist").label.label, "Extract into variable"); assert_eq!(assists.next().expect("expected assist").label.label, "Extract into variable");

View file

@ -1,45 +0,0 @@
//! Database used for testing `ra_assists`.
use std::sync::Arc;
use ra_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, RelativePath};
#[salsa::database(
ra_db::SourceDatabaseExtStorage,
ra_db::SourceDatabaseStorage,
hir::db::InternDatabaseStorage,
hir::db::AstDatabaseStorage,
hir::db::DefDatabaseStorage,
hir::db::HirDatabaseStorage
)]
#[derive(Debug, Default)]
pub struct TestDB {
runtime: salsa::Runtime<TestDB>,
}
impl salsa::Database for TestDB {
fn salsa_runtime(&self) -> &salsa::Runtime<Self> {
&self.runtime
}
fn salsa_runtime_mut(&mut self) -> &mut salsa::Runtime<Self> {
&mut self.runtime
}
}
impl std::panic::RefUnwindSafe for TestDB {}
impl FileLoader for TestDB {
fn file_text(&self, file_id: FileId) -> Arc<String> {
FileLoaderDelegate(self).file_text(file_id)
}
fn resolve_relative_path(
&self,
anchor: FileId,
relative_path: &RelativePath,
) -> Option<FileId> {
FileLoaderDelegate(self).resolve_relative_path(anchor, relative_path)
}
fn relevant_crates(&self, file_id: FileId) -> Arc<Vec<CrateId>> {
FileLoaderDelegate(self).relevant_crates(file_id)
}
}

View file

@ -0,0 +1,27 @@
//! Assorted functions shared by several assists.
use ra_syntax::{
ast::{self, make},
T,
};
pub(crate) fn invert_boolean_expression(expr: ast::Expr) -> ast::Expr {
if let Some(expr) = invert_special_case(&expr) {
return expr;
}
make::expr_prefix(T![!], expr)
}
fn invert_special_case(expr: &ast::Expr) -> Option<ast::Expr> {
match expr {
ast::Expr::BinExpr(bin) => match bin.op_kind()? {
ast::BinOp::NegatedEqualityTest => bin.replace_op(T![==]).map(|it| it.into()),
ast::BinOp::EqualityTest => bin.replace_op(T![!=]).map(|it| it.into()),
_ => None,
},
ast::Expr::PrefixExpr(pe) if pe.op_kind()? == ast::PrefixOp::Not => pe.expr(),
// FIXME:
// ast::Expr::Literal(true | false )
_ => None,
}
}

View file

@ -11,7 +11,8 @@ log = "0.4.3"
cargo_metadata = "0.9.1" cargo_metadata = "0.9.1"
jod-thread = "0.1.0" jod-thread = "0.1.0"
parking_lot = "0.10.0" parking_lot = "0.10.0"
serde_json = "1.0.45"
[dev-dependencies] [dev-dependencies]
insta = "0.12.0" insta = "0.13.0"
serde_json = "1.0" serde_json = "1.0"

View file

@ -1,12 +1,11 @@
//! This module provides the functionality needed to convert diagnostics from //! This module provides the functionality needed to convert diagnostics from
//! `cargo check` json format to the LSP diagnostic format. //! `cargo check` json format to the LSP diagnostic format.
use cargo_metadata::diagnostic::{ use cargo_metadata::diagnostic::{
Applicability, Diagnostic as RustDiagnostic, DiagnosticLevel, DiagnosticSpan, Diagnostic as RustDiagnostic, DiagnosticLevel, DiagnosticSpan, DiagnosticSpanMacroExpansion,
DiagnosticSpanMacroExpansion,
}; };
use lsp_types::{ use lsp_types::{
Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, DiagnosticTag, Location, CodeAction, Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, DiagnosticTag,
NumberOrString, Position, Range, Url, Location, NumberOrString, Position, Range, TextEdit, Url, WorkspaceEdit,
}; };
use std::{ use std::{
fmt::Write, fmt::Write,
@ -117,38 +116,9 @@ fn is_deprecated(rd: &RustDiagnostic) -> bool {
} }
} }
#[derive(Debug)]
pub struct SuggestedFix {
pub title: String,
pub location: Location,
pub replacement: String,
pub applicability: Applicability,
pub diagnostics: Vec<Diagnostic>,
}
impl std::cmp::PartialEq<SuggestedFix> for SuggestedFix {
fn eq(&self, other: &SuggestedFix) -> bool {
if self.title == other.title
&& self.location == other.location
&& self.replacement == other.replacement
{
// Applicability doesn't impl PartialEq...
match (&self.applicability, &other.applicability) {
(Applicability::MachineApplicable, Applicability::MachineApplicable) => true,
(Applicability::HasPlaceholders, Applicability::HasPlaceholders) => true,
(Applicability::MaybeIncorrect, Applicability::MaybeIncorrect) => true,
(Applicability::Unspecified, Applicability::Unspecified) => true,
_ => false,
}
} else {
false
}
}
}
enum MappedRustChildDiagnostic { enum MappedRustChildDiagnostic {
Related(DiagnosticRelatedInformation), Related(DiagnosticRelatedInformation),
SuggestedFix(SuggestedFix), SuggestedFix(CodeAction),
MessageLine(String), MessageLine(String),
} }
@ -176,12 +146,20 @@ fn map_rust_child_diagnostic(
rd.message.clone() rd.message.clone()
}; };
MappedRustChildDiagnostic::SuggestedFix(SuggestedFix { let edit = {
let edits = vec![TextEdit::new(location.range, suggested_replacement.clone())];
let mut edit_map = std::collections::HashMap::new();
edit_map.insert(location.uri, edits);
WorkspaceEdit::new(edit_map)
};
MappedRustChildDiagnostic::SuggestedFix(CodeAction {
title, title,
location, kind: Some("quickfix".to_string()),
replacement: suggested_replacement.clone(), diagnostics: None,
applicability: span.suggestion_applicability.clone().unwrap_or(Applicability::Unknown), edit: Some(edit),
diagnostics: vec![], command: None,
is_preferred: None,
}) })
} else { } else {
MappedRustChildDiagnostic::Related(DiagnosticRelatedInformation { MappedRustChildDiagnostic::Related(DiagnosticRelatedInformation {
@ -195,7 +173,7 @@ fn map_rust_child_diagnostic(
pub(crate) struct MappedRustDiagnostic { pub(crate) struct MappedRustDiagnostic {
pub location: Location, pub location: Location,
pub diagnostic: Diagnostic, pub diagnostic: Diagnostic,
pub suggested_fixes: Vec<SuggestedFix>, pub fixes: Vec<CodeAction>,
} }
/// Converts a Rust root diagnostic to LSP form /// Converts a Rust root diagnostic to LSP form
@ -250,15 +228,13 @@ pub(crate) fn map_rust_diagnostic_to_lsp(
} }
} }
let mut suggested_fixes = vec![]; let mut fixes = vec![];
let mut message = rd.message.clone(); let mut message = rd.message.clone();
for child in &rd.children { for child in &rd.children {
let child = map_rust_child_diagnostic(&child, workspace_root); let child = map_rust_child_diagnostic(&child, workspace_root);
match child { match child {
MappedRustChildDiagnostic::Related(related) => related_information.push(related), MappedRustChildDiagnostic::Related(related) => related_information.push(related),
MappedRustChildDiagnostic::SuggestedFix(suggested_fix) => { MappedRustChildDiagnostic::SuggestedFix(code_action) => fixes.push(code_action.into()),
suggested_fixes.push(suggested_fix)
}
MappedRustChildDiagnostic::MessageLine(message_line) => { MappedRustChildDiagnostic::MessageLine(message_line) => {
write!(&mut message, "\n{}", message_line).unwrap(); write!(&mut message, "\n{}", message_line).unwrap();
@ -295,7 +271,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp(
tags: if !tags.is_empty() { Some(tags) } else { None }, tags: if !tags.is_empty() { Some(tags) } else { None },
}; };
Some(MappedRustDiagnostic { location, diagnostic, suggested_fixes }) Some(MappedRustDiagnostic { location, diagnostic, fixes })
} }
/// Returns a `Url` object from a given path, will lowercase drive letters if present. /// Returns a `Url` object from a given path, will lowercase drive letters if present.

View file

@ -61,11 +61,19 @@ MappedRustDiagnostic {
), ),
tags: None, tags: None,
}, },
suggested_fixes: [ fixes: [
SuggestedFix { CodeAction {
title: "consider passing by value instead: \'self\'", title: "consider passing by value instead: \'self\'",
location: Location { kind: Some(
uri: "file:///test/compiler/mir/tagset.rs", "quickfix",
),
diagnostics: None,
edit: Some(
WorkspaceEdit {
changes: Some(
{
"file:///test/compiler/mir/tagset.rs": [
TextEdit {
range: Range { range: Range {
start: Position { start: Position {
line: 41, line: 41,
@ -76,10 +84,16 @@ MappedRustDiagnostic {
character: 28, character: 28,
}, },
}, },
new_text: "self",
}, },
replacement: "self", ],
applicability: Unspecified, },
diagnostics: [], ),
document_changes: None,
},
),
command: None,
is_preferred: None,
}, },
], ],
} }

View file

@ -42,5 +42,5 @@ MappedRustDiagnostic {
related_information: None, related_information: None,
tags: None, tags: None,
}, },
suggested_fixes: [], fixes: [],
} }

View file

@ -57,5 +57,5 @@ MappedRustDiagnostic {
), ),
tags: None, tags: None,
}, },
suggested_fixes: [], fixes: [],
} }

View file

@ -42,5 +42,5 @@ MappedRustDiagnostic {
related_information: None, related_information: None,
tags: None, tags: None,
}, },
suggested_fixes: [], fixes: [],
} }

View file

@ -42,5 +42,5 @@ MappedRustDiagnostic {
related_information: None, related_information: None,
tags: None, tags: None,
}, },
suggested_fixes: [], fixes: [],
} }

View file

@ -46,11 +46,19 @@ MappedRustDiagnostic {
], ],
), ),
}, },
suggested_fixes: [ fixes: [
SuggestedFix { CodeAction {
title: "consider prefixing with an underscore: \'_foo\'", title: "consider prefixing with an underscore: \'_foo\'",
location: Location { kind: Some(
uri: "file:///test/driver/subcommand/repl.rs", "quickfix",
),
diagnostics: None,
edit: Some(
WorkspaceEdit {
changes: Some(
{
"file:///test/driver/subcommand/repl.rs": [
TextEdit {
range: Range { range: Range {
start: Position { start: Position {
line: 290, line: 290,
@ -61,10 +69,16 @@ MappedRustDiagnostic {
character: 11, character: 11,
}, },
}, },
new_text: "_foo",
}, },
replacement: "_foo", ],
applicability: MachineApplicable, },
diagnostics: [], ),
document_changes: None,
},
),
command: None,
is_preferred: None,
}, },
], ],
} }

View file

@ -4,22 +4,20 @@
use cargo_metadata::Message; use cargo_metadata::Message;
use crossbeam_channel::{never, select, unbounded, Receiver, RecvError, Sender}; use crossbeam_channel::{never, select, unbounded, Receiver, RecvError, Sender};
use lsp_types::{ use lsp_types::{
Diagnostic, Url, WorkDoneProgress, WorkDoneProgressBegin, WorkDoneProgressEnd, CodeAction, CodeActionOrCommand, Diagnostic, Url, WorkDoneProgress, WorkDoneProgressBegin,
WorkDoneProgressReport, WorkDoneProgressEnd, WorkDoneProgressReport,
}; };
use parking_lot::RwLock;
use std::{ use std::{
collections::HashMap, io::{BufRead, BufReader},
path::PathBuf, path::PathBuf,
process::{Command, Stdio}, process::{Command, Stdio},
sync::Arc,
thread::JoinHandle, thread::JoinHandle,
time::Instant, time::Instant,
}; };
mod conv; mod conv;
use crate::conv::{map_rust_diagnostic_to_lsp, MappedRustDiagnostic, SuggestedFix}; use crate::conv::{map_rust_diagnostic_to_lsp, MappedRustDiagnostic};
pub use crate::conv::url_from_path_with_drive_lowercasing; pub use crate::conv::url_from_path_with_drive_lowercasing;
@ -38,7 +36,6 @@ pub struct CheckOptions {
#[derive(Debug)] #[derive(Debug)]
pub struct CheckWatcher { pub struct CheckWatcher {
pub task_recv: Receiver<CheckTask>, pub task_recv: Receiver<CheckTask>,
pub state: Arc<RwLock<CheckState>>,
cmd_send: Option<Sender<CheckCommand>>, cmd_send: Option<Sender<CheckCommand>>,
handle: Option<JoinHandle<()>>, handle: Option<JoinHandle<()>>,
} }
@ -46,7 +43,6 @@ pub struct CheckWatcher {
impl CheckWatcher { impl CheckWatcher {
pub fn new(options: &CheckOptions, workspace_root: PathBuf) -> CheckWatcher { pub fn new(options: &CheckOptions, workspace_root: PathBuf) -> CheckWatcher {
let options = options.clone(); let options = options.clone();
let state = Arc::new(RwLock::new(CheckState::new()));
let (task_send, task_recv) = unbounded::<CheckTask>(); let (task_send, task_recv) = unbounded::<CheckTask>();
let (cmd_send, cmd_recv) = unbounded::<CheckCommand>(); let (cmd_send, cmd_recv) = unbounded::<CheckCommand>();
@ -54,13 +50,12 @@ impl CheckWatcher {
let mut check = CheckWatcherThread::new(options, workspace_root); let mut check = CheckWatcherThread::new(options, workspace_root);
check.run(&task_send, &cmd_recv); check.run(&task_send, &cmd_recv);
}); });
CheckWatcher { task_recv, cmd_send: Some(cmd_send), handle: Some(handle), state } CheckWatcher { task_recv, cmd_send: Some(cmd_send), handle: Some(handle) }
} }
/// Returns a CheckWatcher that doesn't actually do anything /// Returns a CheckWatcher that doesn't actually do anything
pub fn dummy() -> CheckWatcher { pub fn dummy() -> CheckWatcher {
let state = Arc::new(RwLock::new(CheckState::new())); CheckWatcher { task_recv: never(), cmd_send: None, handle: None }
CheckWatcher { task_recv: never(), cmd_send: None, handle: None, state }
} }
/// Schedule a re-start of the cargo check worker. /// Schedule a re-start of the cargo check worker.
@ -87,84 +82,13 @@ impl std::ops::Drop for CheckWatcher {
} }
} }
#[derive(Debug)]
pub struct CheckState {
diagnostic_collection: HashMap<Url, Vec<Diagnostic>>,
suggested_fix_collection: HashMap<Url, Vec<SuggestedFix>>,
}
impl CheckState {
fn new() -> CheckState {
CheckState {
diagnostic_collection: HashMap::new(),
suggested_fix_collection: HashMap::new(),
}
}
/// Clear the cached diagnostics, and schedule updating diagnostics by the
/// server, to clear stale results.
pub fn clear(&mut self) -> Vec<Url> {
let cleared_files: Vec<Url> = self.diagnostic_collection.keys().cloned().collect();
self.diagnostic_collection.clear();
self.suggested_fix_collection.clear();
cleared_files
}
pub fn diagnostics_for(&self, uri: &Url) -> Option<&[Diagnostic]> {
self.diagnostic_collection.get(uri).map(|d| d.as_slice())
}
pub fn fixes_for(&self, uri: &Url) -> Option<&[SuggestedFix]> {
self.suggested_fix_collection.get(uri).map(|d| d.as_slice())
}
pub fn add_diagnostic_with_fixes(&mut self, file_uri: Url, diagnostic: DiagnosticWithFixes) {
for fix in diagnostic.suggested_fixes {
self.add_suggested_fix_for_diagnostic(fix, &diagnostic.diagnostic);
}
self.add_diagnostic(file_uri, diagnostic.diagnostic);
}
fn add_diagnostic(&mut self, file_uri: Url, diagnostic: Diagnostic) {
let diagnostics = self.diagnostic_collection.entry(file_uri).or_default();
// If we're building multiple targets it's possible we've already seen this diagnostic
let is_duplicate = diagnostics.iter().any(|d| are_diagnostics_equal(d, &diagnostic));
if is_duplicate {
return;
}
diagnostics.push(diagnostic);
}
fn add_suggested_fix_for_diagnostic(
&mut self,
mut suggested_fix: SuggestedFix,
diagnostic: &Diagnostic,
) {
let file_uri = suggested_fix.location.uri.clone();
let file_suggestions = self.suggested_fix_collection.entry(file_uri).or_default();
let existing_suggestion: Option<&mut SuggestedFix> =
file_suggestions.iter_mut().find(|s| s == &&suggested_fix);
if let Some(existing_suggestion) = existing_suggestion {
// The existing suggestion also applies to this new diagnostic
existing_suggestion.diagnostics.push(diagnostic.clone());
} else {
// We haven't seen this suggestion before
suggested_fix.diagnostics.push(diagnostic.clone());
file_suggestions.push(suggested_fix);
}
}
}
#[derive(Debug)] #[derive(Debug)]
pub enum CheckTask { pub enum CheckTask {
/// Request a clearing of all cached diagnostics from the check watcher /// Request a clearing of all cached diagnostics from the check watcher
ClearDiagnostics, ClearDiagnostics,
/// Request adding a diagnostic with fixes included to a file /// Request adding a diagnostic with fixes included to a file
AddDiagnostic(Url, DiagnosticWithFixes), AddDiagnostic { url: Url, diagnostic: Diagnostic, fixes: Vec<CodeActionOrCommand> },
/// Request check progress notification to client /// Request check progress notification to client
Status(WorkDoneProgress), Status(WorkDoneProgress),
@ -216,8 +140,10 @@ impl CheckWatcherThread {
self.last_update_req.take(); self.last_update_req.take();
task_send.send(CheckTask::ClearDiagnostics).unwrap(); task_send.send(CheckTask::ClearDiagnostics).unwrap();
// By replacing the watcher, we drop the previous one which // Replace with a dummy watcher first so we drop the original and wait for completion
// causes it to shut down automatically. std::mem::replace(&mut self.watcher, WatchThread::dummy());
// Then create the actual new watcher
self.watcher = WatchThread::new(&self.options, &self.workspace_root); self.watcher = WatchThread::new(&self.options, &self.workspace_root);
} }
} }
@ -277,10 +203,17 @@ impl CheckWatcherThread {
None => return, None => return,
}; };
let MappedRustDiagnostic { location, diagnostic, suggested_fixes } = map_result; let MappedRustDiagnostic { location, diagnostic, fixes } = map_result;
let fixes = fixes
.into_iter()
.map(|fix| {
CodeAction { diagnostics: Some(vec![diagnostic.clone()]), ..fix }.into()
})
.collect();
let diagnostic = DiagnosticWithFixes { diagnostic, suggested_fixes }; task_send
task_send.send(CheckTask::AddDiagnostic(location.uri, diagnostic)).unwrap(); .send(CheckTask::AddDiagnostic { url: location.uri, diagnostic, fixes })
.unwrap();
} }
CheckEvent::Msg(Message::BuildScriptExecuted(_msg)) => {} CheckEvent::Msg(Message::BuildScriptExecuted(_msg)) => {}
@ -292,7 +225,7 @@ impl CheckWatcherThread {
#[derive(Debug)] #[derive(Debug)]
pub struct DiagnosticWithFixes { pub struct DiagnosticWithFixes {
diagnostic: Diagnostic, diagnostic: Diagnostic,
suggested_fixes: Vec<SuggestedFix>, fixes: Vec<CodeAction>,
} }
/// WatchThread exists to wrap around the communication needed to be able to /// WatchThread exists to wrap around the communication needed to be able to
@ -341,6 +274,7 @@ impl WatchThread {
.args(&args) .args(&args)
.stdout(Stdio::piped()) .stdout(Stdio::piped())
.stderr(Stdio::null()) .stderr(Stdio::null())
.stdin(Stdio::null())
.spawn() .spawn()
.expect("couldn't launch cargo"); .expect("couldn't launch cargo");
@ -348,15 +282,45 @@ impl WatchThread {
// which will break out of the loop, and continue the shutdown // which will break out of the loop, and continue the shutdown
let _ = message_send.send(CheckEvent::Begin); let _ = message_send.send(CheckEvent::Begin);
for message in cargo_metadata::parse_messages(command.stdout.take().unwrap()) { // We manually read a line at a time, instead of using serde's
let message = match message { // stream deserializers, because the deserializer cannot recover
Ok(message) => message, // from an error, resulting in it getting stuck, because we try to
// be resillient against failures.
//
// Because cargo only outputs one JSON object per line, we can
// simply skip a line if it doesn't parse, which just ignores any
// erroneus output.
let stdout = BufReader::new(command.stdout.take().unwrap());
for line in stdout.lines() {
let line = match line {
Ok(line) => line,
Err(err) => { Err(err) => {
log::error!("Invalid json from cargo check, ignoring: {}", err); log::error!("Couldn't read line from cargo: {}", err);
continue; continue;
} }
}; };
let message = serde_json::from_str::<cargo_metadata::Message>(&line);
let message = match message {
Ok(message) => message,
Err(err) => {
log::error!(
"Invalid json from cargo check, ignoring ({}): {:?} ",
err,
line
);
continue;
}
};
// Skip certain kinds of messages to only spend time on what's useful
match &message {
Message::CompilerArtifact(artifact) if artifact.fresh => continue,
Message::BuildScriptExecuted(_) => continue,
Message::Unknown => continue,
_ => {}
}
match message_send.send(CheckEvent::Msg(message)) { match message_send.send(CheckEvent::Msg(message)) {
Ok(()) => {} Ok(()) => {}
Err(_err) => { Err(_err) => {
@ -396,10 +360,3 @@ impl std::ops::Drop for WatchThread {
} }
} }
} }
fn are_diagnostics_equal(left: &Diagnostic, right: &Diagnostic) -> bool {
left.source == right.source
&& left.severity == right.severity
&& left.range == right.range
&& left.message == right.message
}

View file

@ -7,7 +7,7 @@ publish = false
[dependencies] [dependencies]
pico-args = "0.3.0" pico-args = "0.3.0"
env_logger = { version = "0.7.1", default-features = false, features = ["humantime"] } env_logger = { version = "0.7.1", default-features = false }
ra_syntax = { path = "../ra_syntax" } ra_syntax = { path = "../ra_syntax" }
ra_ide = { path = "../ra_ide" } ra_ide = { path = "../ra_ide" }

View file

@ -171,12 +171,12 @@ pub fn run(
println!( println!(
"Expressions of unknown type: {} ({}%)", "Expressions of unknown type: {} ({}%)",
num_exprs_unknown, num_exprs_unknown,
if num_exprs > 0 { (num_exprs_unknown * 100 / num_exprs) } else { 100 } if num_exprs > 0 { num_exprs_unknown * 100 / num_exprs } else { 100 }
); );
println!( println!(
"Expressions of partially unknown type: {} ({}%)", "Expressions of partially unknown type: {} ({}%)",
num_exprs_partially_unknown, num_exprs_partially_unknown,
if num_exprs > 0 { (num_exprs_partially_unknown * 100 / num_exprs) } else { 100 } if num_exprs > 0 { num_exprs_partially_unknown * 100 / num_exprs } else { 100 }
); );
println!("Type mismatches: {}", num_type_mismatches); println!("Type mismatches: {}", num_type_mismatches);
println!("Inference: {:?}, {}", inference_time.elapsed(), ra_prof::memory_usage()); println!("Inference: {:?}, {}", inference_time.elapsed(), ra_prof::memory_usage());

View file

@ -8,8 +8,8 @@ use rustc_hash::FxHashMap;
use test_utils::{extract_offset, parse_fixture, CURSOR_MARKER}; use test_utils::{extract_offset, parse_fixture, CURSOR_MARKER};
use crate::{ use crate::{
CrateGraph, CrateId, Edition, Env, FileId, FilePosition, RelativePathBuf, SourceDatabaseExt, input::CrateName, CrateGraph, CrateId, Edition, Env, FileId, FilePosition, RelativePathBuf,
SourceRoot, SourceRootId, SourceDatabaseExt, SourceRoot, SourceRootId,
}; };
pub const WORKSPACE: SourceRootId = SourceRootId(0); pub const WORKSPACE: SourceRootId = SourceRootId(0);
@ -139,7 +139,7 @@ fn with_files(db: &mut dyn SourceDatabaseExt, fixture: &str) -> Option<FilePosit
for (from, to) in crate_deps { for (from, to) in crate_deps {
let from_id = crates[&from]; let from_id = crates[&from];
let to_id = crates[&to]; let to_id = crates[&to];
crate_graph.add_dep(from_id, to.into(), to_id).unwrap(); crate_graph.add_dep(from_id, CrateName::new(&to).unwrap(), to_id).unwrap();
} }
} }

View file

@ -83,6 +83,26 @@ pub struct CrateGraph {
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct CrateId(pub u32); pub struct CrateId(pub u32);
pub struct CrateName(SmolStr);
impl CrateName {
/// Crates a crate name, checking for dashes in the string provided.
/// Dashes are not allowed in the crate names,
/// hence the input string is returned as `Err` for those cases.
pub fn new(name: &str) -> Result<CrateName, &str> {
if name.contains('-') {
Err(name)
} else {
Ok(Self(SmolStr::new(name)))
}
}
/// Crates a crate name, unconditionally replacing the dashes with underscores.
pub fn normalize_dashes(name: &str) -> CrateName {
Self(SmolStr::new(name.replace('-', "_")))
}
}
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
struct CrateData { struct CrateData {
file_id: FileId, file_id: FileId,
@ -131,13 +151,13 @@ impl CrateGraph {
pub fn add_dep( pub fn add_dep(
&mut self, &mut self,
from: CrateId, from: CrateId,
name: SmolStr, name: CrateName,
to: CrateId, to: CrateId,
) -> Result<(), CyclicDependenciesError> { ) -> Result<(), CyclicDependenciesError> {
if self.dfs_find(from, to, &mut FxHashSet::default()) { if self.dfs_find(from, to, &mut FxHashSet::default()) {
return Err(CyclicDependenciesError); return Err(CyclicDependenciesError);
} }
self.arena.get_mut(&from).unwrap().add_dep(name, to); self.arena.get_mut(&from).unwrap().add_dep(name.0, to);
Ok(()) Ok(())
} }
@ -268,7 +288,7 @@ pub struct CyclicDependenciesError;
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::{CfgOptions, CrateGraph, Edition::Edition2018, Env, FileId, SmolStr}; use super::{CfgOptions, CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId};
#[test] #[test]
fn it_should_panic_because_of_cycle_dependencies() { fn it_should_panic_because_of_cycle_dependencies() {
@ -279,9 +299,9 @@ mod tests {
graph.add_crate_root(FileId(2u32), Edition2018, CfgOptions::default(), Env::default()); graph.add_crate_root(FileId(2u32), Edition2018, CfgOptions::default(), Env::default());
let crate3 = let crate3 =
graph.add_crate_root(FileId(3u32), Edition2018, CfgOptions::default(), Env::default()); graph.add_crate_root(FileId(3u32), Edition2018, CfgOptions::default(), Env::default());
assert!(graph.add_dep(crate1, SmolStr::new("crate2"), crate2).is_ok()); assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok());
assert!(graph.add_dep(crate2, SmolStr::new("crate3"), crate3).is_ok()); assert!(graph.add_dep(crate2, CrateName::new("crate3").unwrap(), crate3).is_ok());
assert!(graph.add_dep(crate3, SmolStr::new("crate1"), crate1).is_err()); assert!(graph.add_dep(crate3, CrateName::new("crate1").unwrap(), crate1).is_err());
} }
#[test] #[test]
@ -293,7 +313,23 @@ mod tests {
graph.add_crate_root(FileId(2u32), Edition2018, CfgOptions::default(), Env::default()); graph.add_crate_root(FileId(2u32), Edition2018, CfgOptions::default(), Env::default());
let crate3 = let crate3 =
graph.add_crate_root(FileId(3u32), Edition2018, CfgOptions::default(), Env::default()); graph.add_crate_root(FileId(3u32), Edition2018, CfgOptions::default(), Env::default());
assert!(graph.add_dep(crate1, SmolStr::new("crate2"), crate2).is_ok()); assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok());
assert!(graph.add_dep(crate2, SmolStr::new("crate3"), crate3).is_ok()); assert!(graph.add_dep(crate2, CrateName::new("crate3").unwrap(), crate3).is_ok());
}
#[test]
fn dashes_are_normalized() {
let mut graph = CrateGraph::default();
let crate1 =
graph.add_crate_root(FileId(1u32), Edition2018, CfgOptions::default(), Env::default());
let crate2 =
graph.add_crate_root(FileId(2u32), Edition2018, CfgOptions::default(), Env::default());
assert!(graph
.add_dep(crate1, CrateName::normalize_dashes("crate-name-with-dashes"), crate2)
.is_ok());
assert_eq!(
graph.dependencies(crate1).collect::<Vec<_>>(),
vec![&Dependency { crate_id: crate2, name: "crate_name_with_dashes".into() }]
);
} }
} }

View file

@ -10,7 +10,9 @@ use ra_syntax::{ast, Parse, SourceFile, TextRange, TextUnit};
pub use crate::{ pub use crate::{
cancellation::Canceled, cancellation::Canceled,
input::{CrateGraph, CrateId, Dependency, Edition, Env, FileId, SourceRoot, SourceRootId}, input::{
CrateGraph, CrateId, CrateName, Dependency, Edition, Env, FileId, SourceRoot, SourceRootId,
},
}; };
pub use relative_path::{RelativePath, RelativePathBuf}; pub use relative_path::{RelativePath, RelativePathBuf};
pub use salsa; pub use salsa;

View file

@ -35,8 +35,14 @@ fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> {
successors(token.prev_token(), |token| token.prev_token()) successors(token.prev_token(), |token| token.prev_token())
} }
pub fn extract_trivial_expression(expr: &ast::BlockExpr) -> Option<ast::Expr> { pub fn unwrap_trivial_block(block: ast::BlockExpr) -> ast::Expr {
let block = expr.block()?; extract_trivial_expression(&block)
.filter(|expr| !expr.syntax().text().contains_char('\n'))
.unwrap_or_else(|| block.into())
}
pub fn extract_trivial_expression(block: &ast::BlockExpr) -> Option<ast::Expr> {
let block = block.block()?;
let expr = block.expr()?; let expr = block.expr()?;
let non_trivial_children = block.syntax().children().filter(|it| match it.kind() { let non_trivial_children = block.syntax().children().filter(|it| match it.kind() {
WHITESPACE | T!['{'] | T!['}'] => false, WHITESPACE | T!['{'] | T!['}'] => false,

View file

@ -10,9 +10,9 @@ use hir_def::{
per_ns::PerNs, per_ns::PerNs,
resolver::HasResolver, resolver::HasResolver,
type_ref::{Mutability, TypeRef}, type_ref::{Mutability, TypeRef},
AdtId, ConstId, DefWithBodyId, EnumId, FunctionId, HasModule, ImplId, LocalEnumVariantId, AdtId, ConstId, DefWithBodyId, EnumId, FunctionId, GenericDefId, HasModule, ImplId,
LocalModuleId, LocalStructFieldId, Lookup, ModuleId, StaticId, StructId, TraitId, TypeAliasId, LocalEnumVariantId, LocalModuleId, LocalStructFieldId, Lookup, ModuleId, StaticId, StructId,
TypeParamId, UnionId, TraitId, TypeAliasId, TypeParamId, UnionId,
}; };
use hir_expand::{ use hir_expand::{
diagnostics::DiagnosticSink, diagnostics::DiagnosticSink,
@ -21,7 +21,7 @@ use hir_expand::{
}; };
use hir_ty::{ use hir_ty::{
autoderef, display::HirFormatter, expr::ExprValidator, method_resolution, ApplicationTy, autoderef, display::HirFormatter, expr::ExprValidator, method_resolution, ApplicationTy,
Canonical, InEnvironment, TraitEnvironment, Ty, TyDefId, TypeCtor, TypeWalk, Canonical, InEnvironment, Substs, TraitEnvironment, Ty, TyDefId, TypeCtor,
}; };
use ra_db::{CrateId, Edition, FileId}; use ra_db::{CrateId, Edition, FileId};
use ra_prof::profile; use ra_prof::profile;
@ -119,7 +119,7 @@ impl_froms!(
BuiltinType BuiltinType
); );
pub use hir_def::{attr::Attrs, visibility::Visibility, AssocItemId}; pub use hir_def::{attr::Attrs, item_scope::ItemInNs, visibility::Visibility, AssocItemId};
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
impl Module { impl Module {
@ -238,11 +238,16 @@ impl Module {
item: ModuleDef, item: ModuleDef,
) -> Option<hir_def::path::ModPath> { ) -> Option<hir_def::path::ModPath> {
// FIXME expose namespace choice // FIXME expose namespace choice
hir_def::find_path::find_path( hir_def::find_path::find_path(db, determine_item_namespace(item), self.into())
db, }
hir_def::item_scope::ItemInNs::Types(item.into()), }
self.into(),
) fn determine_item_namespace(module_def: ModuleDef) -> ItemInNs {
match module_def {
ModuleDef::Static(_) | ModuleDef::Const(_) | ModuleDef::Function(_) => {
ItemInNs::Values(module_def.into())
}
_ => ItemInNs::Types(module_def.into()),
} }
} }
@ -265,7 +270,13 @@ impl StructField {
pub fn ty(&self, db: &impl HirDatabase) -> Type { pub fn ty(&self, db: &impl HirDatabase) -> Type {
let var_id = self.parent.into(); let var_id = self.parent.into();
let ty = db.field_types(var_id)[self.id].clone(); let generic_def_id: GenericDefId = match self.parent {
VariantDef::Struct(it) => it.id.into(),
VariantDef::Union(it) => it.id.into(),
VariantDef::EnumVariant(it) => it.parent.id.into(),
};
let substs = Substs::type_params(db, generic_def_id);
let ty = db.field_types(var_id)[self.id].clone().subst(&substs);
Type::new(db, self.parent.module(db).id.krate.into(), var_id, ty) Type::new(db, self.parent.module(db).id.krate.into(), var_id, ty)
} }
@ -750,7 +761,7 @@ pub struct TypeParam {
impl TypeParam { impl TypeParam {
pub fn name(self, db: &impl HirDatabase) -> Name { pub fn name(self, db: &impl HirDatabase) -> Name {
let params = db.generic_params(self.id.parent); let params = db.generic_params(self.id.parent);
params.types[self.id.local_id].name.clone() params.types[self.id.local_id].name.clone().unwrap_or_else(Name::missing)
} }
pub fn module(self, db: &impl HirDatabase) -> Module { pub fn module(self, db: &impl HirDatabase) -> Module {
@ -784,8 +795,9 @@ impl ImplBlock {
pub fn target_ty(&self, db: &impl HirDatabase) -> Type { pub fn target_ty(&self, db: &impl HirDatabase) -> Type {
let impl_data = db.impl_data(self.id); let impl_data = db.impl_data(self.id);
let resolver = self.id.resolver(db); let resolver = self.id.resolver(db);
let ctx = hir_ty::TyLoweringContext::new(db, &resolver);
let environment = TraitEnvironment::lower(db, &resolver); let environment = TraitEnvironment::lower(db, &resolver);
let ty = Ty::from_hir(db, &resolver, &impl_data.target_type); let ty = Ty::from_hir(&ctx, &impl_data.target_type);
Type { Type {
krate: self.id.lookup(db).container.module(db).krate, krate: self.id.lookup(db).container.module(db).krate,
ty: InEnvironment { value: ty, environment }, ty: InEnvironment { value: ty, environment },
@ -846,9 +858,10 @@ impl Type {
fn from_def( fn from_def(
db: &impl HirDatabase, db: &impl HirDatabase,
krate: CrateId, krate: CrateId,
def: impl HasResolver + Into<TyDefId>, def: impl HasResolver + Into<TyDefId> + Into<GenericDefId>,
) -> Type { ) -> Type {
let ty = db.ty(def.into()); let substs = Substs::type_params(db, def);
let ty = db.ty(def.into()).subst(&substs);
Type::new(db, krate, def, ty) Type::new(db, krate, def, ty)
} }
@ -945,7 +958,7 @@ impl Type {
match a_ty.ctor { match a_ty.ctor {
TypeCtor::Tuple { .. } => { TypeCtor::Tuple { .. } => {
for ty in a_ty.parameters.iter() { for ty in a_ty.parameters.iter() {
let ty = ty.clone().subst(&a_ty.parameters); let ty = ty.clone();
res.push(self.derived(ty)); res.push(self.derived(ty));
} }
} }

View file

@ -1,20 +1,24 @@
//! FIXME: write short doc here //! FIXME: write short doc here
pub use hir_def::db::{ pub use hir_def::db::{
BodyQuery, BodyWithSourceMapQuery, ComputeCrateDefMapQuery, ConstDataQuery, AttrsQuery, BodyQuery, BodyWithSourceMapQuery, ComputeCrateDefMapQuery, ConstDataQuery,
CrateLangItemsQuery, DefDatabase, DefDatabaseStorage, DocumentationQuery, EnumDataQuery, CrateLangItemsQuery, DefDatabase, DefDatabaseStorage, DocumentationQuery, EnumDataQuery,
ExprScopesQuery, FunctionDataQuery, GenericParamsQuery, ImplDataQuery, InternDatabase, ExprScopesQuery, FunctionDataQuery, GenericParamsQuery, ImplDataQuery, InternConstQuery,
InternDatabaseStorage, LangItemQuery, ModuleLangItemsQuery, RawItemsQuery, StaticDataQuery, InternDatabase, InternDatabaseStorage, InternEnumQuery, InternFunctionQuery, InternImplQuery,
StructDataQuery, TraitDataQuery, TypeAliasDataQuery, InternStaticQuery, InternStructQuery, InternTraitQuery, InternTypeAliasQuery, InternUnionQuery,
LangItemQuery, ModuleLangItemsQuery, RawItemsQuery, StaticDataQuery, StructDataQuery,
TraitDataQuery, TypeAliasDataQuery, UnionDataQuery,
}; };
pub use hir_expand::db::{ pub use hir_expand::db::{
AstDatabase, AstDatabaseStorage, AstIdMapQuery, MacroArgQuery, MacroDefQuery, MacroExpandQuery, AstDatabase, AstDatabaseStorage, AstIdMapQuery, InternMacroQuery, MacroArgQuery, MacroDefQuery,
ParseMacroQuery, MacroExpandQuery, ParseMacroQuery,
}; };
pub use hir_ty::db::{ pub use hir_ty::db::{
AssociatedTyDataQuery, CallableItemSignatureQuery, DoInferQuery, FieldTypesQuery, AssociatedTyDataQuery, AssociatedTyValueQuery, CallableItemSignatureQuery, DoInferQuery,
GenericDefaultsQuery, GenericPredicatesQuery, HirDatabase, HirDatabaseStorage, ImplDatumQuery, FieldTypesQuery, GenericDefaultsQuery, GenericPredicatesForParamQuery, GenericPredicatesQuery,
ImplsForTraitQuery, ImplsInCrateQuery, StructDatumQuery, TraitDatumQuery, TraitSolveQuery, HirDatabase, HirDatabaseStorage, ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery,
ImplsForTraitQuery, ImplsInCrateQuery, InternAssocTyValueQuery, InternChalkImplQuery,
InternTypeCtorQuery, StructDatumQuery, TraitDatumQuery, TraitSolveQuery, TraitSolverQuery,
TyQuery, ValueTyQuery, TyQuery, ValueTyQuery,
}; };

View file

@ -178,6 +178,10 @@ impl SourceAnalyzer {
} }
} }
fn trait_env(&self, db: &impl HirDatabase) -> Arc<TraitEnvironment> {
TraitEnvironment::lower(db, &self.resolver)
}
pub fn type_of(&self, db: &impl HirDatabase, expr: &ast::Expr) -> Option<Type> { pub fn type_of(&self, db: &impl HirDatabase, expr: &ast::Expr) -> Option<Type> {
let expr_id = if let Some(expr) = self.expand_expr(db, InFile::new(self.file_id, expr)) { let expr_id = if let Some(expr) = self.expand_expr(db, InFile::new(self.file_id, expr)) {
self.body_source_map.as_ref()?.node_expr(expr.as_ref())? self.body_source_map.as_ref()?.node_expr(expr.as_ref())?
@ -186,14 +190,14 @@ impl SourceAnalyzer {
}; };
let ty = self.infer.as_ref()?[expr_id].clone(); let ty = self.infer.as_ref()?[expr_id].clone();
let environment = TraitEnvironment::lower(db, &self.resolver); let environment = self.trait_env(db);
Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } }) Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } })
} }
pub fn type_of_pat(&self, db: &impl HirDatabase, pat: &ast::Pat) -> Option<Type> { pub fn type_of_pat(&self, db: &impl HirDatabase, pat: &ast::Pat) -> Option<Type> {
let pat_id = self.pat_id(pat)?; let pat_id = self.pat_id(pat)?;
let ty = self.infer.as_ref()?[pat_id].clone(); let ty = self.infer.as_ref()?[pat_id].clone();
let environment = TraitEnvironment::lower(db, &self.resolver); let environment = self.trait_env(db);
Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } }) Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } })
} }

View file

@ -26,4 +26,4 @@ ra_cfg = { path = "../ra_cfg" }
tt = { path = "../ra_tt", package = "ra_tt" } tt = { path = "../ra_tt", package = "ra_tt" }
[dev-dependencies] [dev-dependencies]
insta = "0.12.0" insta = "0.13.0"

View file

@ -7,10 +7,39 @@ use crate::{
visibility::Visibility, visibility::Visibility,
CrateId, ModuleDefId, ModuleId, CrateId, ModuleDefId, ModuleId,
}; };
use hir_expand::name::Name; use hir_expand::name::{known, Name};
use test_utils::tested_by;
const MAX_PATH_LEN: usize = 15; const MAX_PATH_LEN: usize = 15;
impl ModPath {
fn starts_with_std(&self) -> bool {
self.segments.first().filter(|&first_segment| first_segment == &known::std).is_some()
}
// When std library is present, paths starting with `std::`
// should be preferred over paths starting with `core::` and `alloc::`
fn should_start_with_std(&self) -> bool {
self.segments
.first()
.filter(|&first_segment| {
first_segment == &known::alloc || first_segment == &known::core
})
.is_some()
}
fn len(&self) -> usize {
self.segments.len()
+ match self.kind {
PathKind::Plain => 0,
PathKind::Super(i) => i as usize,
PathKind::Crate => 1,
PathKind::Abs => 0,
PathKind::DollarCrate(_) => 1,
}
}
}
// FIXME: handle local items // FIXME: handle local items
/// Find a path that can be used to refer to a certain item. This can depend on /// Find a path that can be used to refer to a certain item. This can depend on
@ -112,22 +141,26 @@ fn find_path_inner(
Some(path) => path, Some(path) => path,
}; };
path.segments.push(name); path.segments.push(name);
if path_len(&path) < best_path_len {
best_path_len = path_len(&path); let new_path =
best_path = Some(path); if let Some(best_path) = best_path { select_best_path(best_path, path) } else { path };
} best_path_len = new_path.len();
best_path = Some(new_path);
} }
best_path best_path
} }
fn path_len(path: &ModPath) -> usize { fn select_best_path(old_path: ModPath, new_path: ModPath) -> ModPath {
path.segments.len() if old_path.starts_with_std() && new_path.should_start_with_std() {
+ match path.kind { tested_by!(prefer_std_paths);
PathKind::Plain => 0, old_path
PathKind::Super(i) => i as usize, } else if new_path.starts_with_std() && old_path.should_start_with_std() {
PathKind::Crate => 1, tested_by!(prefer_std_paths);
PathKind::Abs => 0, new_path
PathKind::DollarCrate(_) => 1, } else if new_path.len() < old_path.len() {
new_path
} else {
old_path
} }
} }
@ -201,6 +234,7 @@ mod tests {
use hir_expand::hygiene::Hygiene; use hir_expand::hygiene::Hygiene;
use ra_db::fixture::WithFixture; use ra_db::fixture::WithFixture;
use ra_syntax::ast::AstNode; use ra_syntax::ast::AstNode;
use test_utils::covers;
/// `code` needs to contain a cursor marker; checks that `find_path` for the /// `code` needs to contain a cursor marker; checks that `find_path` for the
/// item the `path` refers to returns that same path when called from the /// item the `path` refers to returns that same path when called from the
@ -452,4 +486,41 @@ mod tests {
"#; "#;
check_found_path(code, "crate::foo::S"); check_found_path(code, "crate::foo::S");
} }
#[test]
fn prefer_std_paths_over_alloc() {
covers!(prefer_std_paths);
let code = r#"
//- /main.rs crate:main deps:alloc,std
<|>
//- /std.rs crate:std deps:alloc
pub mod sync {
pub use alloc::sync::Arc;
}
//- /zzz.rs crate:alloc
pub mod sync {
pub struct Arc;
}
"#;
check_found_path(code, "std::sync::Arc");
}
#[test]
fn prefer_shorter_paths_if_not_alloc() {
let code = r#"
//- /main.rs crate:main deps:megaalloc,std
<|>
//- /std.rs crate:std deps:megaalloc
pub mod sync {
pub use megaalloc::sync::Arc;
}
//- /zzz.rs crate:megaalloc
pub struct Arc;
"#;
check_found_path(code, "megaalloc::Arc");
}
} }

View file

@ -27,8 +27,16 @@ use crate::{
/// Data about a generic parameter (to a function, struct, impl, ...). /// Data about a generic parameter (to a function, struct, impl, ...).
#[derive(Clone, PartialEq, Eq, Debug)] #[derive(Clone, PartialEq, Eq, Debug)]
pub struct TypeParamData { pub struct TypeParamData {
pub name: Name, pub name: Option<Name>,
pub default: Option<TypeRef>, pub default: Option<TypeRef>,
pub provenance: TypeParamProvenance,
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum TypeParamProvenance {
TypeParamList,
TraitSelf,
ArgumentImplTrait,
} }
/// Data about the generic parameters of a function, struct, impl, etc. /// Data about the generic parameters of a function, struct, impl, etc.
@ -45,10 +53,17 @@ pub struct GenericParams {
/// associated type bindings like `Iterator<Item = u32>`. /// associated type bindings like `Iterator<Item = u32>`.
#[derive(Clone, PartialEq, Eq, Debug)] #[derive(Clone, PartialEq, Eq, Debug)]
pub struct WherePredicate { pub struct WherePredicate {
pub type_ref: TypeRef, pub target: WherePredicateTarget,
pub bound: TypeBound, pub bound: TypeBound,
} }
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum WherePredicateTarget {
TypeRef(TypeRef),
/// For desugared where predicates that can directly refer to a type param.
TypeParam(LocalTypeParamId),
}
type SourceMap = ArenaMap<LocalTypeParamId, Either<ast::TraitDef, ast::TypeParam>>; type SourceMap = ArenaMap<LocalTypeParamId, Either<ast::TraitDef, ast::TypeParam>>;
impl GenericParams { impl GenericParams {
@ -68,6 +83,11 @@ impl GenericParams {
GenericDefId::FunctionId(it) => { GenericDefId::FunctionId(it) => {
let src = it.lookup(db).source(db); let src = it.lookup(db).source(db);
generics.fill(&mut sm, &src.value); generics.fill(&mut sm, &src.value);
// lower `impl Trait` in arguments
let data = db.function_data(it);
for param in &data.params {
generics.fill_implicit_impl_trait_args(param);
}
src.file_id src.file_id
} }
GenericDefId::AdtId(AdtId::StructId(it)) => { GenericDefId::AdtId(AdtId::StructId(it)) => {
@ -89,8 +109,11 @@ impl GenericParams {
let src = it.lookup(db).source(db); let src = it.lookup(db).source(db);
// traits get the Self type as an implicit first type parameter // traits get the Self type as an implicit first type parameter
let self_param_id = let self_param_id = generics.types.alloc(TypeParamData {
generics.types.alloc(TypeParamData { name: name![Self], default: None }); name: Some(name![Self]),
default: None,
provenance: TypeParamProvenance::TraitSelf,
});
sm.insert(self_param_id, Either::Left(src.value.clone())); sm.insert(self_param_id, Either::Left(src.value.clone()));
// add super traits as bounds on Self // add super traits as bounds on Self
// i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar // i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar
@ -142,7 +165,11 @@ impl GenericParams {
let name = type_param.name().map_or_else(Name::missing, |it| it.as_name()); let name = type_param.name().map_or_else(Name::missing, |it| it.as_name());
// FIXME: Use `Path::from_src` // FIXME: Use `Path::from_src`
let default = type_param.default_type().map(TypeRef::from_ast); let default = type_param.default_type().map(TypeRef::from_ast);
let param = TypeParamData { name: name.clone(), default }; let param = TypeParamData {
name: Some(name.clone()),
default,
provenance: TypeParamProvenance::TypeParamList,
};
let param_id = self.types.alloc(param); let param_id = self.types.alloc(param);
sm.insert(param_id, Either::Right(type_param.clone())); sm.insert(param_id, Either::Right(type_param.clone()));
@ -170,11 +197,43 @@ impl GenericParams {
return; return;
} }
let bound = TypeBound::from_ast(bound); let bound = TypeBound::from_ast(bound);
self.where_predicates.push(WherePredicate { type_ref, bound }); self.where_predicates
.push(WherePredicate { target: WherePredicateTarget::TypeRef(type_ref), bound });
}
fn fill_implicit_impl_trait_args(&mut self, type_ref: &TypeRef) {
type_ref.walk(&mut |type_ref| {
if let TypeRef::ImplTrait(bounds) = type_ref {
let param = TypeParamData {
name: None,
default: None,
provenance: TypeParamProvenance::ArgumentImplTrait,
};
let param_id = self.types.alloc(param);
for bound in bounds {
self.where_predicates.push(WherePredicate {
target: WherePredicateTarget::TypeParam(param_id),
bound: bound.clone(),
});
}
}
});
} }
pub fn find_by_name(&self, name: &Name) -> Option<LocalTypeParamId> { pub fn find_by_name(&self, name: &Name) -> Option<LocalTypeParamId> {
self.types.iter().find_map(|(id, p)| if &p.name == name { Some(id) } else { None }) self.types
.iter()
.find_map(|(id, p)| if p.name.as_ref() == Some(name) { Some(id) } else { None })
}
pub fn find_trait_self_param(&self) -> Option<LocalTypeParamId> {
self.types.iter().find_map(|(id, p)| {
if p.provenance == TypeParamProvenance::TraitSelf {
Some(id)
} else {
None
}
})
} }
} }

View file

@ -13,4 +13,5 @@ test_utils::marks!(
macro_dollar_crate_self macro_dollar_crate_self
macro_dollar_crate_other macro_dollar_crate_other
infer_resolve_while_let infer_resolve_while_let
prefer_std_paths
); );

View file

@ -229,6 +229,46 @@ impl CrateDefMap {
self.resolve_path_fp_with_macro(db, ResolveMode::Other, original_module, path, shadow); self.resolve_path_fp_with_macro(db, ResolveMode::Other, original_module, path, shadow);
(res.resolved_def, res.segment_index) (res.resolved_def, res.segment_index)
} }
// FIXME: this can use some more human-readable format (ideally, an IR
// even), as this should be a great debugging aid.
pub fn dump(&self) -> String {
let mut buf = String::new();
go(&mut buf, self, "\ncrate", self.root);
return buf.trim().to_string();
fn go(buf: &mut String, map: &CrateDefMap, path: &str, module: LocalModuleId) {
*buf += path;
*buf += "\n";
let mut entries: Vec<_> = map.modules[module].scope.resolutions().collect();
entries.sort_by_key(|(name, _)| name.clone());
for (name, def) in entries {
*buf += &format!("{}:", name);
if def.types.is_some() {
*buf += " t";
}
if def.values.is_some() {
*buf += " v";
}
if def.macros.is_some() {
*buf += " m";
}
if def.is_none() {
*buf += " _";
}
*buf += "\n";
}
for (name, child) in map.modules[module].children.iter() {
let path = path.to_string() + &format!("::{}", name);
go(buf, map, &path, *child);
}
}
}
} }
impl ModuleData { impl ModuleData {

View file

@ -22,7 +22,7 @@ use crate::{
diagnostics::DefDiagnostic, mod_resolution::ModDir, path_resolution::ReachedFixedPoint, diagnostics::DefDiagnostic, mod_resolution::ModDir, path_resolution::ReachedFixedPoint,
raw, BuiltinShadowMode, CrateDefMap, ModuleData, ModuleOrigin, ResolveMode, raw, BuiltinShadowMode, CrateDefMap, ModuleData, ModuleOrigin, ResolveMode,
}, },
path::{ModPath, PathKind}, path::{ImportAlias, ModPath, PathKind},
per_ns::PerNs, per_ns::PerNs,
visibility::Visibility, visibility::Visibility,
AdtId, AstId, ConstLoc, ContainerId, EnumLoc, EnumVariantId, FunctionLoc, ImplLoc, Intern, AdtId, AstId, ConstLoc, ContainerId, EnumLoc, EnumVariantId, FunctionLoc, ImplLoc, Intern,
@ -438,7 +438,11 @@ where
} else { } else {
match import.path.segments.last() { match import.path.segments.last() {
Some(last_segment) => { Some(last_segment) => {
let name = import.alias.clone().unwrap_or_else(|| last_segment.clone()); let name = match &import.alias {
Some(ImportAlias::Alias(name)) => name.clone(),
Some(ImportAlias::Underscore) => last_segment.clone(), // FIXME rust-analyzer#2736
None => last_segment.clone(),
};
log::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def); log::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def);
// extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658 // extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658

View file

@ -22,8 +22,11 @@ use ra_syntax::{
use test_utils::tested_by; use test_utils::tested_by;
use crate::{ use crate::{
attr::Attrs, db::DefDatabase, path::ModPath, visibility::RawVisibility, FileAstId, HirFileId, attr::Attrs,
InFile, db::DefDatabase,
path::{ImportAlias, ModPath},
visibility::RawVisibility,
FileAstId, HirFileId, InFile,
}; };
/// `RawItems` is a set of top-level items in a file (except for impls). /// `RawItems` is a set of top-level items in a file (except for impls).
@ -145,7 +148,7 @@ impl_arena_id!(Import);
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct ImportData { pub struct ImportData {
pub(super) path: ModPath, pub(super) path: ModPath,
pub(super) alias: Option<Name>, pub(super) alias: Option<ImportAlias>,
pub(super) is_glob: bool, pub(super) is_glob: bool,
pub(super) is_prelude: bool, pub(super) is_prelude: bool,
pub(super) is_extern_crate: bool, pub(super) is_extern_crate: bool,
@ -353,7 +356,11 @@ impl RawItemsCollector {
let path = ModPath::from_name_ref(&name_ref); let path = ModPath::from_name_ref(&name_ref);
let visibility = let visibility =
RawVisibility::from_ast_with_hygiene(extern_crate.visibility(), &self.hygiene); RawVisibility::from_ast_with_hygiene(extern_crate.visibility(), &self.hygiene);
let alias = extern_crate.alias().and_then(|a| a.name()).map(|it| it.as_name()); let alias = extern_crate.alias().map(|a| {
a.name()
.map(|it| it.as_name())
.map_or(ImportAlias::Underscore, |a| ImportAlias::Alias(a))
});
let attrs = self.parse_attrs(&extern_crate); let attrs = self.parse_attrs(&extern_crate);
// FIXME: cfg_attr // FIXME: cfg_attr
let is_macro_use = extern_crate.has_atom_attr("macro_use"); let is_macro_use = extern_crate.has_atom_attr("macro_use");

View file

@ -10,11 +10,10 @@ use insta::assert_snapshot;
use ra_db::{fixture::WithFixture, SourceDatabase}; use ra_db::{fixture::WithFixture, SourceDatabase};
use test_utils::covers; use test_utils::covers;
use crate::{db::DefDatabase, nameres::*, test_db::TestDB, LocalModuleId}; use crate::{db::DefDatabase, nameres::*, test_db::TestDB};
fn def_map(fixture: &str) -> String { fn def_map(fixture: &str) -> String {
let dm = compute_crate_def_map(fixture); compute_crate_def_map(fixture).dump()
render_crate_def_map(&dm)
} }
fn compute_crate_def_map(fixture: &str) -> Arc<CrateDefMap> { fn compute_crate_def_map(fixture: &str) -> Arc<CrateDefMap> {
@ -23,44 +22,6 @@ fn compute_crate_def_map(fixture: &str) -> Arc<CrateDefMap> {
db.crate_def_map(krate) db.crate_def_map(krate)
} }
fn render_crate_def_map(map: &CrateDefMap) -> String {
let mut buf = String::new();
go(&mut buf, map, "\ncrate", map.root);
return buf.trim().to_string();
fn go(buf: &mut String, map: &CrateDefMap, path: &str, module: LocalModuleId) {
*buf += path;
*buf += "\n";
let mut entries: Vec<_> = map.modules[module].scope.resolutions().collect();
entries.sort_by_key(|(name, _)| name.clone());
for (name, def) in entries {
*buf += &format!("{}:", name);
if def.types.is_some() {
*buf += " t";
}
if def.values.is_some() {
*buf += " v";
}
if def.macros.is_some() {
*buf += " m";
}
if def.is_none() {
*buf += " _";
}
*buf += "\n";
}
for (name, child) in map.modules[module].children.iter() {
let path = path.to_string() + &format!("::{}", name);
go(buf, map, &path, *child);
}
}
}
#[test] #[test]
fn crate_def_map_smoke_test() { fn crate_def_map_smoke_test() {
let map = def_map( let map = def_map(

View file

@ -16,13 +16,13 @@ use ra_syntax::ast;
use crate::{type_ref::TypeRef, InFile}; use crate::{type_ref::TypeRef, InFile};
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ModPath { pub struct ModPath {
pub kind: PathKind, pub kind: PathKind,
pub segments: Vec<Name>, pub segments: Vec<Name>,
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum PathKind { pub enum PathKind {
Plain, Plain,
/// `self::` is `Super(0)` /// `self::` is `Super(0)`
@ -34,6 +34,14 @@ pub enum PathKind {
DollarCrate(CrateId), DollarCrate(CrateId),
} }
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ImportAlias {
/// Unnamed alias, as in `use Foo as _;`
Underscore,
/// Named alias
Alias(Name),
}
impl ModPath { impl ModPath {
pub fn from_src(path: ast::Path, hygiene: &Hygiene) -> Option<ModPath> { pub fn from_src(path: ast::Path, hygiene: &Hygiene) -> Option<ModPath> {
lower::lower_path(path, hygiene).map(|it| it.mod_path) lower::lower_path(path, hygiene).map(|it| it.mod_path)
@ -57,7 +65,7 @@ impl ModPath {
pub(crate) fn expand_use_item( pub(crate) fn expand_use_item(
item_src: InFile<ast::UseItem>, item_src: InFile<ast::UseItem>,
hygiene: &Hygiene, hygiene: &Hygiene,
mut cb: impl FnMut(ModPath, &ast::UseTree, /* is_glob */ bool, Option<Name>), mut cb: impl FnMut(ModPath, &ast::UseTree, /* is_glob */ bool, Option<ImportAlias>),
) { ) {
if let Some(tree) = item_src.value.use_tree() { if let Some(tree) = item_src.value.use_tree() {
lower::lower_use_tree(None, tree, hygiene, &mut cb); lower::lower_use_tree(None, tree, hygiene, &mut cb);

View file

@ -4,20 +4,17 @@
use std::iter; use std::iter;
use either::Either; use either::Either;
use hir_expand::{ use hir_expand::{hygiene::Hygiene, name::AsName};
hygiene::Hygiene,
name::{AsName, Name},
};
use ra_syntax::ast::{self, NameOwner}; use ra_syntax::ast::{self, NameOwner};
use test_utils::tested_by; use test_utils::tested_by;
use crate::path::{ModPath, PathKind}; use crate::path::{ImportAlias, ModPath, PathKind};
pub(crate) fn lower_use_tree( pub(crate) fn lower_use_tree(
prefix: Option<ModPath>, prefix: Option<ModPath>,
tree: ast::UseTree, tree: ast::UseTree,
hygiene: &Hygiene, hygiene: &Hygiene,
cb: &mut dyn FnMut(ModPath, &ast::UseTree, bool, Option<Name>), cb: &mut dyn FnMut(ModPath, &ast::UseTree, bool, Option<ImportAlias>),
) { ) {
if let Some(use_tree_list) = tree.use_tree_list() { if let Some(use_tree_list) = tree.use_tree_list() {
let prefix = match tree.path() { let prefix = match tree.path() {
@ -34,7 +31,11 @@ pub(crate) fn lower_use_tree(
lower_use_tree(prefix.clone(), child_tree, hygiene, cb); lower_use_tree(prefix.clone(), child_tree, hygiene, cb);
} }
} else { } else {
let alias = tree.alias().and_then(|a| a.name()).map(|a| a.as_name()); let alias = tree.alias().map(|a| {
a.name()
.map(|it| it.as_name())
.map_or(ImportAlias::Underscore, |a| ImportAlias::Alias(a))
});
let is_glob = tree.has_star(); let is_glob = tree.has_star();
if let Some(ast_path) = tree.path() { if let Some(ast_path) = tree.path() {
// Handle self in a path. // Handle self in a path.

View file

@ -490,12 +490,14 @@ impl Scope {
} }
Scope::GenericParams { params, def } => { Scope::GenericParams { params, def } => {
for (local_id, param) in params.types.iter() { for (local_id, param) in params.types.iter() {
if let Some(name) = &param.name {
f( f(
param.name.clone(), name.clone(),
ScopeDef::GenericParam(TypeParamId { local_id, parent: *def }), ScopeDef::GenericParam(TypeParamId { local_id, parent: *def }),
) )
} }
} }
}
Scope::ImplBlockScope(i) => { Scope::ImplBlockScope(i) => {
f(name![Self], ScopeDef::ImplSelfType((*i).into())); f(name![Self], ScopeDef::ImplSelfType((*i).into()));
} }

View file

@ -124,6 +124,48 @@ impl TypeRef {
pub(crate) fn unit() -> TypeRef { pub(crate) fn unit() -> TypeRef {
TypeRef::Tuple(Vec::new()) TypeRef::Tuple(Vec::new())
} }
pub fn walk(&self, f: &mut impl FnMut(&TypeRef)) {
go(self, f);
fn go(type_ref: &TypeRef, f: &mut impl FnMut(&TypeRef)) {
f(type_ref);
match type_ref {
TypeRef::Fn(types) | TypeRef::Tuple(types) => types.iter().for_each(|t| go(t, f)),
TypeRef::RawPtr(type_ref, _)
| TypeRef::Reference(type_ref, _)
| TypeRef::Array(type_ref)
| TypeRef::Slice(type_ref) => go(&type_ref, f),
TypeRef::ImplTrait(bounds) | TypeRef::DynTrait(bounds) => {
for bound in bounds {
match bound {
TypeBound::Path(path) => go_path(path, f),
TypeBound::Error => (),
}
}
}
TypeRef::Path(path) => go_path(path, f),
TypeRef::Never | TypeRef::Placeholder | TypeRef::Error => {}
};
}
fn go_path(path: &Path, f: &mut impl FnMut(&TypeRef)) {
if let Some(type_ref) = path.type_anchor() {
go(type_ref, f);
}
for segment in path.segments().iter() {
if let Some(args_and_bindings) = segment.args_and_bindings {
for arg in &args_and_bindings.args {
let crate::path::GenericArg::Type(type_ref) = arg;
go(type_ref, f);
}
for (_, type_ref) in &args_and_bindings.bindings {
go(type_ref, f);
}
}
}
}
}
} }
pub(crate) fn type_bounds_from_ast(type_bounds_opt: Option<ast::TypeBoundList>) -> Vec<TypeBound> { pub(crate) fn type_bounds_from_ast(type_bounds_opt: Option<ast::TypeBoundList>) -> Vec<TypeBound> {

View file

@ -141,6 +141,8 @@ pub mod known {
macro_rules, macro_rules,
// Components of known path (value or mod name) // Components of known path (value or mod name)
std, std,
core,
alloc,
iter, iter,
ops, ops,
future, future,

View file

@ -21,11 +21,11 @@ ra_prof = { path = "../ra_prof" }
ra_syntax = { path = "../ra_syntax" } ra_syntax = { path = "../ra_syntax" }
test_utils = { path = "../test_utils" } test_utils = { path = "../test_utils" }
chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5" } chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "af48f302a1f571b3ca418f7c5aa639a144a34f75" }
chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5" } chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "af48f302a1f571b3ca418f7c5aa639a144a34f75" }
chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "ff65b5ac9860f3c36bd892c865ab23d5ff0bbae5" } chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "af48f302a1f571b3ca418f7c5aa639a144a34f75" }
lalrpop-intern = "0.15.1" lalrpop-intern = "0.15.1"
[dev-dependencies] [dev-dependencies]
insta = "0.12.0" insta = "0.13.0"

View file

@ -3,17 +3,18 @@
use std::sync::Arc; use std::sync::Arc;
use hir_def::{ use hir_def::{
db::DefDatabase, DefWithBodyId, GenericDefId, ImplId, LocalStructFieldId, TraitId, VariantId, db::DefDatabase, DefWithBodyId, GenericDefId, ImplId, LocalStructFieldId, TraitId, TypeParamId,
VariantId,
}; };
use ra_arena::map::ArenaMap; use ra_arena::map::ArenaMap;
use ra_db::{salsa, CrateId}; use ra_db::{impl_intern_key, salsa, CrateId};
use ra_prof::profile; use ra_prof::profile;
use crate::{ use crate::{
method_resolution::CrateImplBlocks, method_resolution::CrateImplBlocks,
traits::{chalk, AssocTyValue, Impl}, traits::{chalk, AssocTyValue, Impl},
CallableDef, FnSig, GenericPredicate, InferenceResult, Substs, TraitRef, Ty, TyDefId, TypeCtor, Binders, CallableDef, GenericPredicate, InferenceResult, PolyFnSig, Substs, TraitRef, Ty,
ValueTyDefId, TyDefId, TypeCtor, ValueTyDefId,
}; };
#[salsa::query_group(HirDatabaseStorage)] #[salsa::query_group(HirDatabaseStorage)]
@ -27,34 +28,33 @@ pub trait HirDatabase: DefDatabase {
#[salsa::invoke(crate::lower::ty_query)] #[salsa::invoke(crate::lower::ty_query)]
#[salsa::cycle(crate::lower::ty_recover)] #[salsa::cycle(crate::lower::ty_recover)]
fn ty(&self, def: TyDefId) -> Ty; fn ty(&self, def: TyDefId) -> Binders<Ty>;
#[salsa::invoke(crate::lower::value_ty_query)] #[salsa::invoke(crate::lower::value_ty_query)]
fn value_ty(&self, def: ValueTyDefId) -> Ty; fn value_ty(&self, def: ValueTyDefId) -> Binders<Ty>;
#[salsa::invoke(crate::lower::impl_self_ty_query)] #[salsa::invoke(crate::lower::impl_self_ty_query)]
#[salsa::cycle(crate::lower::impl_self_ty_recover)] #[salsa::cycle(crate::lower::impl_self_ty_recover)]
fn impl_self_ty(&self, def: ImplId) -> Ty; fn impl_self_ty(&self, def: ImplId) -> Binders<Ty>;
#[salsa::invoke(crate::lower::impl_trait_query)] #[salsa::invoke(crate::lower::impl_trait_query)]
fn impl_trait(&self, def: ImplId) -> Option<TraitRef>; fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>;
#[salsa::invoke(crate::lower::field_types_query)] #[salsa::invoke(crate::lower::field_types_query)]
fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalStructFieldId, Ty>>; fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalStructFieldId, Binders<Ty>>>;
#[salsa::invoke(crate::callable_item_sig)] #[salsa::invoke(crate::callable_item_sig)]
fn callable_item_signature(&self, def: CallableDef) -> FnSig; fn callable_item_signature(&self, def: CallableDef) -> PolyFnSig;
#[salsa::invoke(crate::lower::generic_predicates_for_param_query)] #[salsa::invoke(crate::lower::generic_predicates_for_param_query)]
#[salsa::cycle(crate::lower::generic_predicates_for_param_recover)] #[salsa::cycle(crate::lower::generic_predicates_for_param_recover)]
fn generic_predicates_for_param( fn generic_predicates_for_param(
&self, &self,
def: GenericDefId, param_id: TypeParamId,
param_idx: u32, ) -> Arc<[Binders<GenericPredicate>]>;
) -> Arc<[GenericPredicate]>;
#[salsa::invoke(crate::lower::generic_predicates_query)] #[salsa::invoke(crate::lower::generic_predicates_query)]
fn generic_predicates(&self, def: GenericDefId) -> Arc<[GenericPredicate]>; fn generic_predicates(&self, def: GenericDefId) -> Arc<[Binders<GenericPredicate>]>;
#[salsa::invoke(crate::lower::generic_defaults_query)] #[salsa::invoke(crate::lower::generic_defaults_query)]
fn generic_defaults(&self, def: GenericDefId) -> Substs; fn generic_defaults(&self, def: GenericDefId) -> Substs;
@ -77,6 +77,8 @@ pub trait HirDatabase: DefDatabase {
#[salsa::interned] #[salsa::interned]
fn intern_type_ctor(&self, type_ctor: TypeCtor) -> crate::TypeCtorId; fn intern_type_ctor(&self, type_ctor: TypeCtor) -> crate::TypeCtorId;
#[salsa::interned] #[salsa::interned]
fn intern_type_param_id(&self, param_id: TypeParamId) -> GlobalTypeParamId;
#[salsa::interned]
fn intern_chalk_impl(&self, impl_: Impl) -> crate::traits::GlobalImplId; fn intern_chalk_impl(&self, impl_: Impl) -> crate::traits::GlobalImplId;
#[salsa::interned] #[salsa::interned]
fn intern_assoc_ty_value(&self, assoc_ty_value: AssocTyValue) -> crate::traits::AssocTyValueId; fn intern_assoc_ty_value(&self, assoc_ty_value: AssocTyValue) -> crate::traits::AssocTyValueId;
@ -117,3 +119,7 @@ fn infer(db: &impl HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
fn hir_database_is_object_safe() { fn hir_database_is_object_safe() {
fn _assert_object_safe(_: &dyn HirDatabase) {} fn _assert_object_safe(_: &dyn HirDatabase) {}
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct GlobalTypeParamId(salsa::InternId);
impl_intern_key!(GlobalTypeParamId);

View file

@ -34,7 +34,6 @@ use hir_expand::{diagnostics::DiagnosticSink, name::name};
use ra_arena::map::ArenaMap; use ra_arena::map::ArenaMap;
use ra_prof::profile; use ra_prof::profile;
use ra_syntax::SmolStr; use ra_syntax::SmolStr;
use test_utils::tested_by;
use super::{ use super::{
primitive::{FloatTy, IntTy}, primitive::{FloatTy, IntTy},
@ -42,7 +41,9 @@ use super::{
ApplicationTy, GenericPredicate, InEnvironment, ProjectionTy, Substs, TraitEnvironment, ApplicationTy, GenericPredicate, InEnvironment, ProjectionTy, Substs, TraitEnvironment,
TraitRef, Ty, TypeCtor, TypeWalk, Uncertain, TraitRef, Ty, TypeCtor, TypeWalk, Uncertain,
}; };
use crate::{db::HirDatabase, infer::diagnostics::InferenceDiagnostic}; use crate::{
db::HirDatabase, infer::diagnostics::InferenceDiagnostic, lower::ImplTraitLoweringMode,
};
pub(crate) use unify::unify; pub(crate) use unify::unify;
@ -271,38 +272,21 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
self.result.diagnostics.push(diagnostic); self.result.diagnostics.push(diagnostic);
} }
fn make_ty(&mut self, type_ref: &TypeRef) -> Ty { fn make_ty_with_mode(
let ty = Ty::from_hir( &mut self,
self.db, type_ref: &TypeRef,
impl_trait_mode: ImplTraitLoweringMode,
) -> Ty {
// FIXME use right resolver for block // FIXME use right resolver for block
&self.resolver, let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver)
type_ref, .with_impl_trait_mode(impl_trait_mode);
); let ty = Ty::from_hir(&ctx, type_ref);
let ty = self.insert_type_vars(ty); let ty = self.insert_type_vars(ty);
self.normalize_associated_types_in(ty) self.normalize_associated_types_in(ty)
} }
/// Replaces `impl Trait` in `ty` by type variables and obligations for fn make_ty(&mut self, type_ref: &TypeRef) -> Ty {
/// those variables. This is done for function arguments when calling a self.make_ty_with_mode(type_ref, ImplTraitLoweringMode::Disallowed)
/// function, and for return types when inside the function body, i.e. in
/// the cases where the `impl Trait` is 'transparent'. In other cases, `impl
/// Trait` is represented by `Ty::Opaque`.
fn insert_vars_for_impl_trait(&mut self, ty: Ty) -> Ty {
ty.fold(&mut |ty| match ty {
Ty::Opaque(preds) => {
tested_by!(insert_vars_for_impl_trait);
let var = self.table.new_type_var();
let var_subst = Substs::builder(1).push(var.clone()).build();
self.obligations.extend(
preds
.iter()
.map(|pred| pred.clone().subst_bound_vars(&var_subst))
.filter_map(Obligation::from_predicate),
);
var
}
_ => ty,
})
} }
/// Replaces Ty::Unknown by a new type var, so we can maybe still infer it. /// Replaces Ty::Unknown by a new type var, so we can maybe still infer it.
@ -446,19 +430,20 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
None => return (Ty::Unknown, None), None => return (Ty::Unknown, None),
}; };
let resolver = &self.resolver; let resolver = &self.resolver;
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
// FIXME: this should resolve assoc items as well, see this example: // FIXME: this should resolve assoc items as well, see this example:
// https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521 // https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521
match resolver.resolve_path_in_type_ns_fully(self.db, path.mod_path()) { match resolver.resolve_path_in_type_ns_fully(self.db, path.mod_path()) {
Some(TypeNs::AdtId(AdtId::StructId(strukt))) => { Some(TypeNs::AdtId(AdtId::StructId(strukt))) => {
let substs = Ty::substs_from_path(self.db, resolver, path, strukt.into()); let substs = Ty::substs_from_path(&ctx, path, strukt.into());
let ty = self.db.ty(strukt.into()); let ty = self.db.ty(strukt.into());
let ty = self.insert_type_vars(ty.apply_substs(substs)); let ty = self.insert_type_vars(ty.subst(&substs));
(ty, Some(strukt.into())) (ty, Some(strukt.into()))
} }
Some(TypeNs::EnumVariantId(var)) => { Some(TypeNs::EnumVariantId(var)) => {
let substs = Ty::substs_from_path(self.db, resolver, path, var.into()); let substs = Ty::substs_from_path(&ctx, path, var.into());
let ty = self.db.ty(var.parent.into()); let ty = self.db.ty(var.parent.into());
let ty = self.insert_type_vars(ty.apply_substs(substs)); let ty = self.insert_type_vars(ty.subst(&substs));
(ty, Some(var.into())) (ty, Some(var.into()))
} }
Some(_) | None => (Ty::Unknown, None), Some(_) | None => (Ty::Unknown, None),
@ -471,13 +456,18 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
fn collect_fn(&mut self, data: &FunctionData) { fn collect_fn(&mut self, data: &FunctionData) {
let body = Arc::clone(&self.body); // avoid borrow checker problem let body = Arc::clone(&self.body); // avoid borrow checker problem
for (type_ref, pat) in data.params.iter().zip(body.params.iter()) { let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver)
let ty = self.make_ty(type_ref); .with_impl_trait_mode(ImplTraitLoweringMode::Param);
let param_tys =
data.params.iter().map(|type_ref| Ty::from_hir(&ctx, type_ref)).collect::<Vec<_>>();
for (ty, pat) in param_tys.into_iter().zip(body.params.iter()) {
let ty = self.insert_type_vars(ty);
let ty = self.normalize_associated_types_in(ty);
self.infer_pat(*pat, &ty, BindingMode::default()); self.infer_pat(*pat, &ty, BindingMode::default());
} }
let return_ty = self.make_ty(&data.ret_type); let return_ty = self.make_ty_with_mode(&data.ret_type, ImplTraitLoweringMode::Disallowed); // FIXME implement RPIT
self.return_ty = self.insert_vars_for_impl_trait(return_ty); self.return_ty = return_ty;
} }
fn infer_body(&mut self) { fn infer_body(&mut self) {

View file

@ -57,8 +57,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
let trait_ref = db.impl_trait(impl_id)?; let trait_ref = db.impl_trait(impl_id)?;
// `CoerseUnsized` has one generic parameter for the target type. // `CoerseUnsized` has one generic parameter for the target type.
let cur_from_ty = trait_ref.substs.0.get(0)?; let cur_from_ty = trait_ref.value.substs.0.get(0)?;
let cur_to_ty = trait_ref.substs.0.get(1)?; let cur_to_ty = trait_ref.value.substs.0.get(1)?;
match (&cur_from_ty, cur_to_ty) { match (&cur_from_ty, cur_to_ty) {
(ty_app!(ctor1, st1), ty_app!(ctor2, st2)) => { (ty_app!(ctor1, st1), ty_app!(ctor2, st2)) => {
@ -66,9 +66,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
// This works for smart-pointer-like coercion, which covers all impls from std. // This works for smart-pointer-like coercion, which covers all impls from std.
st1.iter().zip(st2.iter()).enumerate().find_map(|(i, (ty1, ty2))| { st1.iter().zip(st2.iter()).enumerate().find_map(|(i, (ty1, ty2))| {
match (ty1, ty2) { match (ty1, ty2) {
(Ty::Param { idx: p1, .. }, Ty::Param { idx: p2, .. }) (Ty::Bound(idx1), Ty::Bound(idx2)) if idx1 != idx2 => {
if p1 != p2 =>
{
Some(((*ctor1, *ctor2), i)) Some(((*ctor1, *ctor2), i))
} }
_ => None, _ => None,
@ -256,8 +254,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
let unsize_generic_index = { let unsize_generic_index = {
let mut index = None; let mut index = None;
let mut multiple_param = false; let mut multiple_param = false;
field_tys[last_field_id].walk(&mut |ty| match ty { field_tys[last_field_id].value.walk(&mut |ty| match ty {
&Ty::Param { idx, .. } => { &Ty::Bound(idx) => {
if index.is_none() { if index.is_none() {
index = Some(idx); index = Some(idx);
} else if Some(idx) != index { } else if Some(idx) != index {
@ -276,10 +274,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
// Check other fields do not involve it. // Check other fields do not involve it.
let mut multiple_used = false; let mut multiple_used = false;
fields.for_each(|(field_id, _data)| { fields.for_each(|(field_id, _data)| {
field_tys[field_id].walk(&mut |ty| match ty { field_tys[field_id].value.walk(&mut |ty| match ty {
&Ty::Param { idx, .. } if idx == unsize_generic_index => { &Ty::Bound(idx) if idx == unsize_generic_index => multiple_used = true,
multiple_used = true
}
_ => {} _ => {}
}) })
}); });

View file

@ -10,7 +10,7 @@ use hir_def::{
resolver::resolver_for_expr, resolver::resolver_for_expr,
AdtId, AssocContainerId, Lookup, StructFieldId, AdtId, AssocContainerId, Lookup, StructFieldId,
}; };
use hir_expand::name::{name, Name}; use hir_expand::name::Name;
use ra_syntax::ast::RangeOp; use ra_syntax::ast::RangeOp;
use crate::{ use crate::{
@ -19,8 +19,8 @@ use crate::{
method_resolution, op, method_resolution, op,
traits::InEnvironment, traits::InEnvironment,
utils::{generics, variant_data, Generics}, utils::{generics, variant_data, Generics},
ApplicationTy, CallableDef, InferTy, IntTy, Mutability, Obligation, Substs, TraitRef, Ty, ApplicationTy, Binders, CallableDef, InferTy, IntTy, Mutability, Obligation, Substs, TraitRef,
TypeCtor, TypeWalk, Uncertain, Ty, TypeCtor, Uncertain,
}; };
use super::{BindingMode, Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch}; use super::{BindingMode, Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch};
@ -236,8 +236,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
self.result.record_field_resolutions.insert(field.expr, field_def); self.result.record_field_resolutions.insert(field.expr, field_def);
} }
let field_ty = field_def let field_ty = field_def
.map_or(Ty::Unknown, |it| field_types[it.local_id].clone()) .map_or(Ty::Unknown, |it| field_types[it.local_id].clone().subst(&substs));
.subst(&substs);
self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty)); self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty));
} }
if let Some(expr) = spread { if let Some(expr) = spread {
@ -588,10 +587,10 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
self.write_method_resolution(tgt_expr, func); self.write_method_resolution(tgt_expr, func);
(ty, self.db.value_ty(func.into()), Some(generics(self.db, func.into()))) (ty, self.db.value_ty(func.into()), Some(generics(self.db, func.into())))
} }
None => (receiver_ty, Ty::Unknown, None), None => (receiver_ty, Binders::new(0, Ty::Unknown), None),
}; };
let substs = self.substs_for_method_call(def_generics, generic_args, &derefed_receiver_ty); let substs = self.substs_for_method_call(def_generics, generic_args, &derefed_receiver_ty);
let method_ty = method_ty.apply_substs(substs); let method_ty = method_ty.subst(&substs);
let method_ty = self.insert_type_vars(method_ty); let method_ty = self.insert_type_vars(method_ty);
self.register_obligations_for_call(&method_ty); self.register_obligations_for_call(&method_ty);
let (expected_receiver_ty, param_tys, ret_ty) = match method_ty.callable_sig(self.db) { let (expected_receiver_ty, param_tys, ret_ty) = match method_ty.callable_sig(self.db) {
@ -635,7 +634,6 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
continue; continue;
} }
let param_ty = self.insert_vars_for_impl_trait(param_ty);
let param_ty = self.normalize_associated_types_in(param_ty); let param_ty = self.normalize_associated_types_in(param_ty);
self.infer_expr_coerce(arg, &Expectation::has_type(param_ty.clone())); self.infer_expr_coerce(arg, &Expectation::has_type(param_ty.clone()));
} }
@ -648,13 +646,15 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
generic_args: Option<&GenericArgs>, generic_args: Option<&GenericArgs>,
receiver_ty: &Ty, receiver_ty: &Ty,
) -> Substs { ) -> Substs {
let (total_len, _parent_len, child_len) = let (parent_params, self_params, type_params, impl_trait_params) =
def_generics.as_ref().map_or((0, 0, 0), |g| g.len_split()); def_generics.as_ref().map_or((0, 0, 0, 0), |g| g.provenance_split());
assert_eq!(self_params, 0); // method shouldn't have another Self param
let total_len = parent_params + type_params + impl_trait_params;
let mut substs = Vec::with_capacity(total_len); let mut substs = Vec::with_capacity(total_len);
// Parent arguments are unknown, except for the receiver type // Parent arguments are unknown, except for the receiver type
if let Some(parent_generics) = def_generics.as_ref().map(|p| p.iter_parent()) { if let Some(parent_generics) = def_generics.as_ref().map(|p| p.iter_parent()) {
for (_id, param) in parent_generics { for (_id, param) in parent_generics {
if param.name == name![Self] { if param.provenance == hir_def::generics::TypeParamProvenance::TraitSelf {
substs.push(receiver_ty.clone()); substs.push(receiver_ty.clone());
} else { } else {
substs.push(Ty::Unknown); substs.push(Ty::Unknown);
@ -664,7 +664,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
// handle provided type arguments // handle provided type arguments
if let Some(generic_args) = generic_args { if let Some(generic_args) = generic_args {
// if args are provided, it should be all of them, but we can't rely on that // if args are provided, it should be all of them, but we can't rely on that
for arg in generic_args.args.iter().take(child_len) { for arg in generic_args.args.iter().take(type_params) {
match arg { match arg {
GenericArg::Type(type_ref) => { GenericArg::Type(type_ref) => {
let ty = self.make_ty(type_ref); let ty = self.make_ty(type_ref);

View file

@ -12,7 +12,7 @@ use hir_expand::name::Name;
use test_utils::tested_by; use test_utils::tested_by;
use super::{BindingMode, InferenceContext}; use super::{BindingMode, InferenceContext};
use crate::{db::HirDatabase, utils::variant_data, Substs, Ty, TypeCtor, TypeWalk}; use crate::{db::HirDatabase, utils::variant_data, Substs, Ty, TypeCtor};
impl<'a, D: HirDatabase> InferenceContext<'a, D> { impl<'a, D: HirDatabase> InferenceContext<'a, D> {
fn infer_tuple_struct_pat( fn infer_tuple_struct_pat(
@ -34,8 +34,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
let expected_ty = var_data let expected_ty = var_data
.as_ref() .as_ref()
.and_then(|d| d.field(&Name::new_tuple_field(i))) .and_then(|d| d.field(&Name::new_tuple_field(i)))
.map_or(Ty::Unknown, |field| field_tys[field].clone()) .map_or(Ty::Unknown, |field| field_tys[field].clone().subst(&substs));
.subst(&substs);
let expected_ty = self.normalize_associated_types_in(expected_ty); let expected_ty = self.normalize_associated_types_in(expected_ty);
self.infer_pat(subpat, &expected_ty, default_bm); self.infer_pat(subpat, &expected_ty, default_bm);
} }
@ -65,7 +64,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
for subpat in subpats { for subpat in subpats {
let matching_field = var_data.as_ref().and_then(|it| it.field(&subpat.name)); let matching_field = var_data.as_ref().and_then(|it| it.field(&subpat.name));
let expected_ty = let expected_ty =
matching_field.map_or(Ty::Unknown, |field| field_tys[field].clone()).subst(&substs); matching_field.map_or(Ty::Unknown, |field| field_tys[field].clone().subst(&substs));
let expected_ty = self.normalize_associated_types_in(expected_ty); let expected_ty = self.normalize_associated_types_in(expected_ty);
self.infer_pat(subpat.pat, &expected_ty, default_bm); self.infer_pat(subpat.pat, &expected_ty, default_bm);
} }

View file

@ -9,9 +9,9 @@ use hir_def::{
}; };
use hir_expand::name::Name; use hir_expand::name::Name;
use crate::{db::HirDatabase, method_resolution, Substs, Ty, TypeWalk, ValueTyDefId}; use crate::{db::HirDatabase, method_resolution, Substs, Ty, ValueTyDefId};
use super::{ExprOrPatId, InferenceContext, TraitEnvironment, TraitRef}; use super::{ExprOrPatId, InferenceContext, TraitRef};
impl<'a, D: HirDatabase> InferenceContext<'a, D> { impl<'a, D: HirDatabase> InferenceContext<'a, D> {
pub(super) fn infer_path( pub(super) fn infer_path(
@ -39,7 +39,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
} }
let ty = self.make_ty(type_ref); let ty = self.make_ty(type_ref);
let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1); let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
let ty = Ty::from_type_relative_path(self.db, resolver, ty, remaining_segments_for_ty); let ctx = crate::lower::TyLoweringContext::new(self.db, &resolver);
let ty = Ty::from_type_relative_path(&ctx, ty, remaining_segments_for_ty);
self.resolve_ty_assoc_item( self.resolve_ty_assoc_item(
ty, ty,
&path.segments().last().expect("path had at least one segment").name, &path.segments().last().expect("path had at least one segment").name,
@ -69,12 +70,16 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
ValueNs::EnumVariantId(it) => it.into(), ValueNs::EnumVariantId(it) => it.into(),
}; };
let mut ty = self.db.value_ty(typable); let ty = self.db.value_ty(typable);
if let Some(self_subst) = self_subst { // self_subst is just for the parent
ty = ty.subst(&self_subst); let parent_substs = self_subst.unwrap_or_else(Substs::empty);
} let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
let substs = Ty::substs_from_path(self.db, &self.resolver, path, typable); let substs = Ty::substs_from_path(&ctx, path, typable);
let ty = ty.subst(&substs); let full_substs = Substs::builder(substs.len())
.use_parent_substs(&parent_substs)
.fill(substs.0[parent_substs.len()..].iter().cloned())
.build();
let ty = ty.subst(&full_substs);
Some(ty) Some(ty)
} }
@ -98,13 +103,9 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
(TypeNs::TraitId(trait_), true) => { (TypeNs::TraitId(trait_), true) => {
let segment = let segment =
remaining_segments.last().expect("there should be at least one segment here"); remaining_segments.last().expect("there should be at least one segment here");
let trait_ref = TraitRef::from_resolved_path( let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
self.db, let trait_ref =
&self.resolver, TraitRef::from_resolved_path(&ctx, trait_.into(), resolved_segment, None);
trait_.into(),
resolved_segment,
None,
);
self.resolve_trait_assoc_item(trait_ref, segment, id) self.resolve_trait_assoc_item(trait_ref, segment, id)
} }
(def, _) => { (def, _) => {
@ -114,9 +115,9 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
// as Iterator>::Item::default`) // as Iterator>::Item::default`)
let remaining_segments_for_ty = let remaining_segments_for_ty =
remaining_segments.take(remaining_segments.len() - 1); remaining_segments.take(remaining_segments.len() - 1);
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
let ty = Ty::from_partly_resolved_hir_path( let ty = Ty::from_partly_resolved_hir_path(
self.db, &ctx,
&self.resolver,
def, def,
resolved_segment, resolved_segment,
remaining_segments_for_ty, remaining_segments_for_ty,
@ -173,13 +174,9 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
AssocItemId::ConstId(c) => ValueNs::ConstId(c), AssocItemId::ConstId(c) => ValueNs::ConstId(c),
AssocItemId::TypeAliasId(_) => unreachable!(), AssocItemId::TypeAliasId(_) => unreachable!(),
}; };
let substs = Substs::build_for_def(self.db, item)
.use_parent_substs(&trait_ref.substs)
.fill_with_params()
.build();
self.write_assoc_resolution(id, item); self.write_assoc_resolution(id, item);
Some((def, Some(substs))) Some((def, Some(trait_ref.substs)))
} }
fn resolve_ty_assoc_item( fn resolve_ty_assoc_item(
@ -193,14 +190,13 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
} }
let canonical_ty = self.canonicalizer().canonicalize_ty(ty.clone()); let canonical_ty = self.canonicalizer().canonicalize_ty(ty.clone());
let env = TraitEnvironment::lower(self.db, &self.resolver);
let krate = self.resolver.krate()?; let krate = self.resolver.krate()?;
let traits_in_scope = self.resolver.traits_in_scope(self.db); let traits_in_scope = self.resolver.traits_in_scope(self.db);
method_resolution::iterate_method_candidates( method_resolution::iterate_method_candidates(
&canonical_ty.value, &canonical_ty.value,
self.db, self.db,
env, self.trait_env.clone(),
krate, krate,
&traits_in_scope, &traits_in_scope,
Some(name), Some(name),
@ -219,12 +215,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
.fill(iter::repeat_with(|| self.table.new_type_var())) .fill(iter::repeat_with(|| self.table.new_type_var()))
.build(); .build();
let impl_self_ty = self.db.impl_self_ty(impl_id).subst(&impl_substs); let impl_self_ty = self.db.impl_self_ty(impl_id).subst(&impl_substs);
let substs = Substs::build_for_def(self.db, item)
.use_parent_substs(&impl_substs)
.fill_with_params()
.build();
self.unify(&impl_self_ty, &ty); self.unify(&impl_self_ty, &ty);
Some(substs) Some(impl_substs)
} }
AssocContainerId::TraitId(trait_) => { AssocContainerId::TraitId(trait_) => {
// we're picking this method // we're picking this method
@ -232,15 +224,11 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
.push(ty.clone()) .push(ty.clone())
.fill(std::iter::repeat_with(|| self.table.new_type_var())) .fill(std::iter::repeat_with(|| self.table.new_type_var()))
.build(); .build();
let substs = Substs::build_for_def(self.db, item)
.use_parent_substs(&trait_substs)
.fill_with_params()
.build();
self.obligations.push(super::Obligation::Trait(TraitRef { self.obligations.push(super::Obligation::Trait(TraitRef {
trait_, trait_,
substs: trait_substs, substs: trait_substs.clone(),
})); }));
Some(substs) Some(trait_substs)
} }
AssocContainerId::ContainerId(_) => None, AssocContainerId::ContainerId(_) => None,
}; };

View file

@ -44,8 +44,8 @@ use std::sync::Arc;
use std::{fmt, iter, mem}; use std::{fmt, iter, mem};
use hir_def::{ use hir_def::{
expr::ExprId, type_ref::Mutability, AdtId, AssocContainerId, DefWithBodyId, GenericDefId, expr::ExprId, generics::TypeParamProvenance, type_ref::Mutability, AdtId, AssocContainerId,
HasModule, Lookup, TraitId, TypeAliasId, DefWithBodyId, GenericDefId, HasModule, Lookup, TraitId, TypeAliasId, TypeParamId,
}; };
use hir_expand::name::Name; use hir_expand::name::Name;
use ra_db::{impl_intern_key, salsa, CrateId}; use ra_db::{impl_intern_key, salsa, CrateId};
@ -60,7 +60,9 @@ use display::{HirDisplay, HirFormatter};
pub use autoderef::autoderef; pub use autoderef::autoderef;
pub use infer::{do_infer_query, InferTy, InferenceResult}; pub use infer::{do_infer_query, InferTy, InferenceResult};
pub use lower::CallableDef; pub use lower::CallableDef;
pub use lower::{callable_item_sig, TyDefId, ValueTyDefId}; pub use lower::{
callable_item_sig, ImplTraitLoweringMode, TyDefId, TyLoweringContext, ValueTyDefId,
};
pub use traits::{InEnvironment, Obligation, ProjectionPredicate, TraitEnvironment}; pub use traits::{InEnvironment, Obligation, ProjectionPredicate, TraitEnvironment};
/// A type constructor or type name: this might be something like the primitive /// A type constructor or type name: this might be something like the primitive
@ -285,22 +287,20 @@ pub enum Ty {
/// trait and all its parameters are fully known. /// trait and all its parameters are fully known.
Projection(ProjectionTy), Projection(ProjectionTy),
/// A type parameter; for example, `T` in `fn f<T>(x: T) {} /// A placeholder for a type parameter; for example, `T` in `fn f<T>(x: T)
Param { /// {}` when we're type-checking the body of that function. In this
/// The index of the parameter (starting with parameters from the /// situation, we know this stands for *some* type, but don't know the exact
/// surrounding impl, then the current function). /// type.
idx: u32, Param(TypeParamId),
/// The name of the parameter, for displaying.
// FIXME get rid of this
name: Name,
},
/// A bound type variable. Used during trait resolution to represent Chalk /// A bound type variable. This is used in various places: when representing
/// variables, and in `Dyn` and `Opaque` bounds to represent the `Self` type. /// some polymorphic type like the type of function `fn f<T>`, the type
/// parameters get turned into variables; during trait resolution, inference
/// variables get turned into bound variables and back; and in `Dyn` the
/// `Self` type is represented with a bound variable as well.
Bound(u32), Bound(u32),
/// A type variable used during type checking. Not to be confused with a /// A type variable used during type checking.
/// type parameter.
Infer(InferTy), Infer(InferTy),
/// A trait object (`dyn Trait` or bare `Trait` in pre-2018 Rust). /// A trait object (`dyn Trait` or bare `Trait` in pre-2018 Rust).
@ -364,15 +364,19 @@ impl Substs {
} }
/// Return Substs that replace each parameter by itself (i.e. `Ty::Param`). /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`).
pub(crate) fn identity(generic_params: &Generics) -> Substs { pub(crate) fn type_params_for_generics(generic_params: &Generics) -> Substs {
Substs( Substs(generic_params.iter().map(|(id, _)| Ty::Param(id)).collect())
generic_params.iter().map(|(idx, p)| Ty::Param { idx, name: p.name.clone() }).collect(), }
)
/// Return Substs that replace each parameter by itself (i.e. `Ty::Param`).
pub fn type_params(db: &impl HirDatabase, def: impl Into<GenericDefId>) -> Substs {
let params = generics(db, def.into());
Substs::type_params_for_generics(&params)
} }
/// Return Substs that replace each parameter by a bound variable. /// Return Substs that replace each parameter by a bound variable.
pub(crate) fn bound_vars(generic_params: &Generics) -> Substs { pub(crate) fn bound_vars(generic_params: &Generics) -> Substs {
Substs(generic_params.iter().map(|(idx, _p)| Ty::Bound(idx)).collect()) Substs(generic_params.iter().enumerate().map(|(idx, _)| Ty::Bound(idx as u32)).collect())
} }
pub fn build_for_def(db: &impl HirDatabase, def: impl Into<GenericDefId>) -> SubstsBuilder { pub fn build_for_def(db: &impl HirDatabase, def: impl Into<GenericDefId>) -> SubstsBuilder {
@ -420,11 +424,6 @@ impl SubstsBuilder {
self.fill((starting_from..).map(Ty::Bound)) self.fill((starting_from..).map(Ty::Bound))
} }
pub fn fill_with_params(self) -> Self {
let start = self.vec.len() as u32;
self.fill((start..).map(|idx| Ty::Param { idx, name: Name::missing() }))
}
pub fn fill_with_unknown(self) -> Self { pub fn fill_with_unknown(self) -> Self {
self.fill(iter::repeat(Ty::Unknown)) self.fill(iter::repeat(Ty::Unknown))
} }
@ -451,6 +450,32 @@ impl Deref for Substs {
} }
} }
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct Binders<T> {
pub num_binders: usize,
pub value: T,
}
impl<T> Binders<T> {
pub fn new(num_binders: usize, value: T) -> Self {
Self { num_binders, value }
}
}
impl<T: TypeWalk> Binders<T> {
/// Substitutes all variables.
pub fn subst(self, subst: &Substs) -> T {
assert_eq!(subst.len(), self.num_binders);
self.value.subst_bound_vars(subst)
}
/// Substitutes just a prefix of the variables (shifting the rest).
pub fn subst_prefix(self, subst: &Substs) -> Binders<T> {
assert!(subst.len() < self.num_binders);
Binders::new(self.num_binders - subst.len(), self.value.subst_bound_vars(subst))
}
}
/// A trait with type parameters. This includes the `Self`, so this represents a concrete type implementing the trait. /// A trait with type parameters. This includes the `Self`, so this represents a concrete type implementing the trait.
/// Name to be bikeshedded: TraitBound? TraitImplements? /// Name to be bikeshedded: TraitBound? TraitImplements?
#[derive(Clone, PartialEq, Eq, Debug, Hash)] #[derive(Clone, PartialEq, Eq, Debug, Hash)]
@ -551,6 +576,9 @@ pub struct FnSig {
params_and_return: Arc<[Ty]>, params_and_return: Arc<[Ty]>,
} }
/// A polymorphic function signature.
pub type PolyFnSig = Binders<FnSig>;
impl FnSig { impl FnSig {
pub fn from_params_and_return(mut params: Vec<Ty>, ret: Ty) -> FnSig { pub fn from_params_and_return(mut params: Vec<Ty>, ret: Ty) -> FnSig {
params.push(ret); params.push(ret);
@ -730,22 +758,7 @@ pub trait TypeWalk {
self self
} }
/// Replaces type parameters in this type using the given `Substs`. (So e.g. /// Substitutes `Ty::Bound` vars with the given substitution.
/// if `self` is `&[T]`, where type parameter T has index 0, and the
/// `Substs` contain `u32` at index 0, we'll have `&[u32]` afterwards.)
fn subst(self, substs: &Substs) -> Self
where
Self: Sized,
{
self.fold(&mut |ty| match ty {
Ty::Param { idx, name } => {
substs.get(idx as usize).cloned().unwrap_or(Ty::Param { idx, name })
}
ty => ty,
})
}
/// Substitutes `Ty::Bound` vars (as opposed to type parameters).
fn subst_bound_vars(mut self, substs: &Substs) -> Self fn subst_bound_vars(mut self, substs: &Substs) -> Self
where where
Self: Sized, Self: Sized,
@ -755,6 +768,9 @@ pub trait TypeWalk {
&mut Ty::Bound(idx) => { &mut Ty::Bound(idx) => {
if idx as usize >= binders && (idx as usize - binders) < substs.len() { if idx as usize >= binders && (idx as usize - binders) < substs.len() {
*ty = substs.0[idx as usize - binders].clone(); *ty = substs.0[idx as usize - binders].clone();
} else if idx as usize >= binders + substs.len() {
// shift free binders
*ty = Ty::Bound(idx - substs.len() as u32);
} }
} }
_ => {} _ => {}
@ -847,7 +863,7 @@ impl HirDisplay for ApplicationTy {
} }
TypeCtor::Array => { TypeCtor::Array => {
let t = self.parameters.as_single(); let t = self.parameters.as_single();
write!(f, "[{};_]", t.display(f.db))?; write!(f, "[{}; _]", t.display(f.db))?;
} }
TypeCtor::RawPtr(m) => { TypeCtor::RawPtr(m) => {
let t = self.parameters.as_single(); let t = self.parameters.as_single();
@ -880,7 +896,7 @@ impl HirDisplay for ApplicationTy {
write!(f, ") -> {}", sig.ret().display(f.db))?; write!(f, ") -> {}", sig.ret().display(f.db))?;
} }
TypeCtor::FnDef(def) => { TypeCtor::FnDef(def) => {
let sig = f.db.callable_item_signature(def); let sig = f.db.callable_item_signature(def).subst(&self.parameters);
let name = match def { let name = match def {
CallableDef::FunctionId(ff) => f.db.function_data(ff).name.clone(), CallableDef::FunctionId(ff) => f.db.function_data(ff).name.clone(),
CallableDef::StructId(s) => f.db.struct_data(s).name.clone(), CallableDef::StructId(s) => f.db.struct_data(s).name.clone(),
@ -896,10 +912,17 @@ impl HirDisplay for ApplicationTy {
} }
} }
if self.parameters.len() > 0 { if self.parameters.len() > 0 {
let generics = generics(f.db, def.into());
let (parent_params, self_param, type_params, _impl_trait_params) =
generics.provenance_split();
let total_len = parent_params + self_param + type_params;
// We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
if total_len > 0 {
write!(f, "<")?; write!(f, "<")?;
f.write_joined(&*self.parameters.0, ", ")?; f.write_joined(&self.parameters.0[..total_len], ", ")?;
write!(f, ">")?; write!(f, ">")?;
} }
}
write!(f, "(")?; write!(f, "(")?;
f.write_joined(sig.params(), ", ")?; f.write_joined(sig.params(), ", ")?;
write!(f, ") -> {}", sig.ret().display(f.db))?; write!(f, ") -> {}", sig.ret().display(f.db))?;
@ -1009,7 +1032,24 @@ impl HirDisplay for Ty {
match self { match self {
Ty::Apply(a_ty) => a_ty.hir_fmt(f)?, Ty::Apply(a_ty) => a_ty.hir_fmt(f)?,
Ty::Projection(p_ty) => p_ty.hir_fmt(f)?, Ty::Projection(p_ty) => p_ty.hir_fmt(f)?,
Ty::Param { name, .. } => write!(f, "{}", name)?, Ty::Param(id) => {
let generics = generics(f.db, id.parent);
let param_data = &generics.params.types[id.local_id];
match param_data.provenance {
TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
write!(f, "{}", param_data.name.clone().unwrap_or_else(Name::missing))?
}
TypeParamProvenance::ArgumentImplTrait => {
write!(f, "impl ")?;
let bounds = f.db.generic_predicates_for_param(*id);
let substs = Substs::type_params_for_generics(&generics);
write_bounds_like_dyn_trait(
&bounds.iter().map(|b| b.clone().subst(&substs)).collect::<Vec<_>>(),
f,
)?;
}
}
}
Ty::Bound(idx) => write!(f, "?{}", idx)?, Ty::Bound(idx) => write!(f, "?{}", idx)?,
Ty::Dyn(predicates) | Ty::Opaque(predicates) => { Ty::Dyn(predicates) | Ty::Opaque(predicates) => {
match self { match self {
@ -1017,6 +1057,19 @@ impl HirDisplay for Ty {
Ty::Opaque(_) => write!(f, "impl ")?, Ty::Opaque(_) => write!(f, "impl ")?,
_ => unreachable!(), _ => unreachable!(),
}; };
write_bounds_like_dyn_trait(&predicates, f)?;
}
Ty::Unknown => write!(f, "{{unknown}}")?,
Ty::Infer(..) => write!(f, "_")?,
}
Ok(())
}
}
fn write_bounds_like_dyn_trait(
predicates: &[GenericPredicate],
f: &mut HirFormatter<impl HirDatabase>,
) -> fmt::Result {
// Note: This code is written to produce nice results (i.e. // Note: This code is written to produce nice results (i.e.
// corresponding to surface Rust) for types that can occur in // corresponding to surface Rust) for types that can occur in
// actual Rust. It will have weird results if the predicates // actual Rust. It will have weird results if the predicates
@ -1055,9 +1108,7 @@ impl HirDisplay for Ty {
angle_open = true; angle_open = true;
} }
let name = let name =
f.db.type_alias_data(projection_pred.projection_ty.associated_ty) f.db.type_alias_data(projection_pred.projection_ty.associated_ty).name.clone();
.name
.clone();
write!(f, "{} = ", name)?; write!(f, "{} = ", name)?;
projection_pred.ty.hir_fmt(f)?; projection_pred.ty.hir_fmt(f)?;
} }
@ -1077,12 +1128,7 @@ impl HirDisplay for Ty {
if angle_open { if angle_open {
write!(f, ">")?; write!(f, ">")?;
} }
}
Ty::Unknown => write!(f, "{{unknown}}")?,
Ty::Infer(..) => write!(f, "_")?,
}
Ok(()) Ok(())
}
} }
impl TraitRef { impl TraitRef {

View file

@ -10,12 +10,13 @@ use std::sync::Arc;
use hir_def::{ use hir_def::{
builtin_type::BuiltinType, builtin_type::BuiltinType,
generics::WherePredicate, generics::{TypeParamProvenance, WherePredicate, WherePredicateTarget},
path::{GenericArg, Path, PathSegment, PathSegments}, path::{GenericArg, Path, PathSegment, PathSegments},
resolver::{HasResolver, Resolver, TypeNs}, resolver::{HasResolver, Resolver, TypeNs},
type_ref::{TypeBound, TypeRef}, type_ref::{TypeBound, TypeRef},
AdtId, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId, AdtId, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId,
LocalStructFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, UnionId, VariantId, LocalStructFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, UnionId,
VariantId,
}; };
use ra_arena::map::ArenaMap; use ra_arena::map::ArenaMap;
use ra_db::CrateId; use ra_db::CrateId;
@ -27,64 +28,159 @@ use crate::{
all_super_traits, associated_type_by_name_including_super_traits, generics, make_mut_slice, all_super_traits, associated_type_by_name_including_super_traits, generics, make_mut_slice,
variant_data, variant_data,
}, },
FnSig, GenericPredicate, ProjectionPredicate, ProjectionTy, Substs, TraitEnvironment, TraitRef, Binders, FnSig, GenericPredicate, PolyFnSig, ProjectionPredicate, ProjectionTy, Substs,
Ty, TypeCtor, TypeWalk, TraitEnvironment, TraitRef, Ty, TypeCtor,
}; };
#[derive(Debug)]
pub struct TyLoweringContext<'a, DB: HirDatabase> {
pub db: &'a DB,
pub resolver: &'a Resolver,
/// Note: Conceptually, it's thinkable that we could be in a location where
/// some type params should be represented as placeholders, and others
/// should be converted to variables. I think in practice, this isn't
/// possible currently, so this should be fine for now.
pub type_param_mode: TypeParamLoweringMode,
pub impl_trait_mode: ImplTraitLoweringMode,
pub impl_trait_counter: std::cell::Cell<u16>,
}
impl<'a, DB: HirDatabase> TyLoweringContext<'a, DB> {
pub fn new(db: &'a DB, resolver: &'a Resolver) -> Self {
let impl_trait_counter = std::cell::Cell::new(0);
let impl_trait_mode = ImplTraitLoweringMode::Disallowed;
let type_param_mode = TypeParamLoweringMode::Placeholder;
Self { db, resolver, impl_trait_mode, impl_trait_counter, type_param_mode }
}
pub fn with_impl_trait_mode(self, impl_trait_mode: ImplTraitLoweringMode) -> Self {
Self { impl_trait_mode, ..self }
}
pub fn with_type_param_mode(self, type_param_mode: TypeParamLoweringMode) -> Self {
Self { type_param_mode, ..self }
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum ImplTraitLoweringMode {
/// `impl Trait` gets lowered into an opaque type that doesn't unify with
/// anything except itself. This is used in places where values flow 'out',
/// i.e. for arguments of the function we're currently checking, and return
/// types of functions we're calling.
Opaque,
/// `impl Trait` gets lowered into a type variable. Used for argument
/// position impl Trait when inside the respective function, since it allows
/// us to support that without Chalk.
Param,
/// `impl Trait` gets lowered into a variable that can unify with some
/// type. This is used in places where values flow 'in', i.e. for arguments
/// of functions we're calling, and the return type of the function we're
/// currently checking.
Variable,
/// `impl Trait` is disallowed and will be an error.
Disallowed,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum TypeParamLoweringMode {
Placeholder,
Variable,
}
impl Ty { impl Ty {
pub fn from_hir(db: &impl HirDatabase, resolver: &Resolver, type_ref: &TypeRef) -> Self { pub fn from_hir(ctx: &TyLoweringContext<'_, impl HirDatabase>, type_ref: &TypeRef) -> Self {
match type_ref { match type_ref {
TypeRef::Never => Ty::simple(TypeCtor::Never), TypeRef::Never => Ty::simple(TypeCtor::Never),
TypeRef::Tuple(inner) => { TypeRef::Tuple(inner) => {
let inner_tys: Arc<[Ty]> = let inner_tys: Arc<[Ty]> = inner.iter().map(|tr| Ty::from_hir(ctx, tr)).collect();
inner.iter().map(|tr| Ty::from_hir(db, resolver, tr)).collect();
Ty::apply( Ty::apply(
TypeCtor::Tuple { cardinality: inner_tys.len() as u16 }, TypeCtor::Tuple { cardinality: inner_tys.len() as u16 },
Substs(inner_tys), Substs(inner_tys),
) )
} }
TypeRef::Path(path) => Ty::from_hir_path(db, resolver, path), TypeRef::Path(path) => Ty::from_hir_path(ctx, path),
TypeRef::RawPtr(inner, mutability) => { TypeRef::RawPtr(inner, mutability) => {
let inner_ty = Ty::from_hir(db, resolver, inner); let inner_ty = Ty::from_hir(ctx, inner);
Ty::apply_one(TypeCtor::RawPtr(*mutability), inner_ty) Ty::apply_one(TypeCtor::RawPtr(*mutability), inner_ty)
} }
TypeRef::Array(inner) => { TypeRef::Array(inner) => {
let inner_ty = Ty::from_hir(db, resolver, inner); let inner_ty = Ty::from_hir(ctx, inner);
Ty::apply_one(TypeCtor::Array, inner_ty) Ty::apply_one(TypeCtor::Array, inner_ty)
} }
TypeRef::Slice(inner) => { TypeRef::Slice(inner) => {
let inner_ty = Ty::from_hir(db, resolver, inner); let inner_ty = Ty::from_hir(ctx, inner);
Ty::apply_one(TypeCtor::Slice, inner_ty) Ty::apply_one(TypeCtor::Slice, inner_ty)
} }
TypeRef::Reference(inner, mutability) => { TypeRef::Reference(inner, mutability) => {
let inner_ty = Ty::from_hir(db, resolver, inner); let inner_ty = Ty::from_hir(ctx, inner);
Ty::apply_one(TypeCtor::Ref(*mutability), inner_ty) Ty::apply_one(TypeCtor::Ref(*mutability), inner_ty)
} }
TypeRef::Placeholder => Ty::Unknown, TypeRef::Placeholder => Ty::Unknown,
TypeRef::Fn(params) => { TypeRef::Fn(params) => {
let sig = Substs(params.iter().map(|tr| Ty::from_hir(db, resolver, tr)).collect()); let sig = Substs(params.iter().map(|tr| Ty::from_hir(ctx, tr)).collect());
Ty::apply(TypeCtor::FnPtr { num_args: sig.len() as u16 - 1 }, sig) Ty::apply(TypeCtor::FnPtr { num_args: sig.len() as u16 - 1 }, sig)
} }
TypeRef::DynTrait(bounds) => { TypeRef::DynTrait(bounds) => {
let self_ty = Ty::Bound(0); let self_ty = Ty::Bound(0);
let predicates = bounds let predicates = bounds
.iter() .iter()
.flat_map(|b| { .flat_map(|b| GenericPredicate::from_type_bound(ctx, b, self_ty.clone()))
GenericPredicate::from_type_bound(db, resolver, b, self_ty.clone())
})
.collect(); .collect();
Ty::Dyn(predicates) Ty::Dyn(predicates)
} }
TypeRef::ImplTrait(bounds) => { TypeRef::ImplTrait(bounds) => {
match ctx.impl_trait_mode {
ImplTraitLoweringMode::Opaque => {
let self_ty = Ty::Bound(0); let self_ty = Ty::Bound(0);
let predicates = bounds let predicates = bounds
.iter() .iter()
.flat_map(|b| { .flat_map(|b| {
GenericPredicate::from_type_bound(db, resolver, b, self_ty.clone()) GenericPredicate::from_type_bound(ctx, b, self_ty.clone())
}) })
.collect(); .collect();
Ty::Opaque(predicates) Ty::Opaque(predicates)
} }
ImplTraitLoweringMode::Param => {
let idx = ctx.impl_trait_counter.get();
ctx.impl_trait_counter.set(idx + 1);
if let Some(def) = ctx.resolver.generic_def() {
let generics = generics(ctx.db, def);
let param = generics
.iter()
.filter(|(_, data)| {
data.provenance == TypeParamProvenance::ArgumentImplTrait
})
.nth(idx as usize)
.map_or(Ty::Unknown, |(id, _)| Ty::Param(id));
param
} else {
Ty::Unknown
}
}
ImplTraitLoweringMode::Variable => {
let idx = ctx.impl_trait_counter.get();
ctx.impl_trait_counter.set(idx + 1);
let (parent_params, self_params, list_params, _impl_trait_params) =
if let Some(def) = ctx.resolver.generic_def() {
let generics = generics(ctx.db, def);
generics.provenance_split()
} else {
(0, 0, 0, 0)
};
Ty::Bound(
idx as u32
+ parent_params as u32
+ self_params as u32
+ list_params as u32,
)
}
ImplTraitLoweringMode::Disallowed => {
// FIXME: report error
Ty::Unknown
}
}
}
TypeRef::Error => Ty::Unknown, TypeRef::Error => Ty::Unknown,
} }
} }
@ -93,10 +189,9 @@ impl Ty {
/// lower the self types of the predicates since that could lead to cycles. /// lower the self types of the predicates since that could lead to cycles.
/// So we just check here if the `type_ref` resolves to a generic param, and which. /// So we just check here if the `type_ref` resolves to a generic param, and which.
fn from_hir_only_param( fn from_hir_only_param(
db: &impl HirDatabase, ctx: &TyLoweringContext<'_, impl HirDatabase>,
resolver: &Resolver,
type_ref: &TypeRef, type_ref: &TypeRef,
) -> Option<u32> { ) -> Option<TypeParamId> {
let path = match type_ref { let path = match type_ref {
TypeRef::Path(path) => path, TypeRef::Path(path) => path,
_ => return None, _ => return None,
@ -107,29 +202,26 @@ impl Ty {
if path.segments().len() > 1 { if path.segments().len() > 1 {
return None; return None;
} }
let resolution = match resolver.resolve_path_in_type_ns(db, path.mod_path()) { let resolution = match ctx.resolver.resolve_path_in_type_ns(ctx.db, path.mod_path()) {
Some((it, None)) => it, Some((it, None)) => it,
_ => return None, _ => return None,
}; };
if let TypeNs::GenericParam(param_id) = resolution { if let TypeNs::GenericParam(param_id) = resolution {
let generics = generics(db, resolver.generic_def().expect("generics in scope")); Some(param_id)
let idx = generics.param_idx(param_id);
Some(idx)
} else { } else {
None None
} }
} }
pub(crate) fn from_type_relative_path( pub(crate) fn from_type_relative_path(
db: &impl HirDatabase, ctx: &TyLoweringContext<'_, impl HirDatabase>,
resolver: &Resolver,
ty: Ty, ty: Ty,
remaining_segments: PathSegments<'_>, remaining_segments: PathSegments<'_>,
) -> Ty { ) -> Ty {
if remaining_segments.len() == 1 { if remaining_segments.len() == 1 {
// resolve unselected assoc types // resolve unselected assoc types
let segment = remaining_segments.first().unwrap(); let segment = remaining_segments.first().unwrap();
Ty::select_associated_type(db, resolver, ty, segment) Ty::select_associated_type(ctx, ty, segment)
} else if remaining_segments.len() > 1 { } else if remaining_segments.len() > 1 {
// FIXME report error (ambiguous associated type) // FIXME report error (ambiguous associated type)
Ty::Unknown Ty::Unknown
@ -139,20 +231,18 @@ impl Ty {
} }
pub(crate) fn from_partly_resolved_hir_path( pub(crate) fn from_partly_resolved_hir_path(
db: &impl HirDatabase, ctx: &TyLoweringContext<'_, impl HirDatabase>,
resolver: &Resolver,
resolution: TypeNs, resolution: TypeNs,
resolved_segment: PathSegment<'_>, resolved_segment: PathSegment<'_>,
remaining_segments: PathSegments<'_>, remaining_segments: PathSegments<'_>,
) -> Ty { ) -> Ty {
let ty = match resolution { let ty = match resolution {
TypeNs::TraitId(trait_) => { TypeNs::TraitId(trait_) => {
let trait_ref = let trait_ref = TraitRef::from_resolved_path(ctx, trait_, resolved_segment, None);
TraitRef::from_resolved_path(db, resolver, trait_, resolved_segment, None);
return if remaining_segments.len() == 1 { return if remaining_segments.len() == 1 {
let segment = remaining_segments.first().unwrap(); let segment = remaining_segments.first().unwrap();
let associated_ty = associated_type_by_name_including_super_traits( let associated_ty = associated_type_by_name_including_super_traits(
db, ctx.db,
trait_ref.trait_, trait_ref.trait_,
&segment.name, &segment.name,
); );
@ -177,37 +267,55 @@ impl Ty {
}; };
} }
TypeNs::GenericParam(param_id) => { TypeNs::GenericParam(param_id) => {
let generics = generics(db, resolver.generic_def().expect("generics in scope")); let generics =
let idx = generics.param_idx(param_id); generics(ctx.db, ctx.resolver.generic_def().expect("generics in scope"));
// FIXME: maybe return name in resolution? match ctx.type_param_mode {
let name = generics.param_name(param_id); TypeParamLoweringMode::Placeholder => Ty::Param(param_id),
Ty::Param { idx, name } TypeParamLoweringMode::Variable => {
let idx = generics.param_idx(param_id).expect("matching generics");
Ty::Bound(idx)
}
}
}
TypeNs::SelfType(impl_id) => {
let generics = generics(ctx.db, impl_id.into());
let substs = match ctx.type_param_mode {
TypeParamLoweringMode::Placeholder => {
Substs::type_params_for_generics(&generics)
}
TypeParamLoweringMode::Variable => Substs::bound_vars(&generics),
};
ctx.db.impl_self_ty(impl_id).subst(&substs)
}
TypeNs::AdtSelfType(adt) => {
let generics = generics(ctx.db, adt.into());
let substs = match ctx.type_param_mode {
TypeParamLoweringMode::Placeholder => {
Substs::type_params_for_generics(&generics)
}
TypeParamLoweringMode::Variable => Substs::bound_vars(&generics),
};
ctx.db.ty(adt.into()).subst(&substs)
} }
TypeNs::SelfType(impl_id) => db.impl_self_ty(impl_id).clone(),
TypeNs::AdtSelfType(adt) => db.ty(adt.into()),
TypeNs::AdtId(it) => Ty::from_hir_path_inner(db, resolver, resolved_segment, it.into()), TypeNs::AdtId(it) => Ty::from_hir_path_inner(ctx, resolved_segment, it.into()),
TypeNs::BuiltinType(it) => { TypeNs::BuiltinType(it) => Ty::from_hir_path_inner(ctx, resolved_segment, it.into()),
Ty::from_hir_path_inner(db, resolver, resolved_segment, it.into()) TypeNs::TypeAliasId(it) => Ty::from_hir_path_inner(ctx, resolved_segment, it.into()),
}
TypeNs::TypeAliasId(it) => {
Ty::from_hir_path_inner(db, resolver, resolved_segment, it.into())
}
// FIXME: report error // FIXME: report error
TypeNs::EnumVariantId(_) => return Ty::Unknown, TypeNs::EnumVariantId(_) => return Ty::Unknown,
}; };
Ty::from_type_relative_path(db, resolver, ty, remaining_segments) Ty::from_type_relative_path(ctx, ty, remaining_segments)
} }
pub(crate) fn from_hir_path(db: &impl HirDatabase, resolver: &Resolver, path: &Path) -> Ty { pub(crate) fn from_hir_path(ctx: &TyLoweringContext<'_, impl HirDatabase>, path: &Path) -> Ty {
// Resolve the path (in type namespace) // Resolve the path (in type namespace)
if let Some(type_ref) = path.type_anchor() { if let Some(type_ref) = path.type_anchor() {
let ty = Ty::from_hir(db, resolver, &type_ref); let ty = Ty::from_hir(ctx, &type_ref);
return Ty::from_type_relative_path(db, resolver, ty, path.segments()); return Ty::from_type_relative_path(ctx, ty, path.segments());
} }
let (resolution, remaining_index) = let (resolution, remaining_index) =
match resolver.resolve_path_in_type_ns(db, path.mod_path()) { match ctx.resolver.resolve_path_in_type_ns(ctx.db, path.mod_path()) {
Some(it) => it, Some(it) => it,
None => return Ty::Unknown, None => return Ty::Unknown,
}; };
@ -218,39 +326,44 @@ impl Ty {
), ),
Some(i) => (path.segments().get(i - 1).unwrap(), path.segments().skip(i)), Some(i) => (path.segments().get(i - 1).unwrap(), path.segments().skip(i)),
}; };
Ty::from_partly_resolved_hir_path( Ty::from_partly_resolved_hir_path(ctx, resolution, resolved_segment, remaining_segments)
db,
resolver,
resolution,
resolved_segment,
remaining_segments,
)
} }
fn select_associated_type( fn select_associated_type(
db: &impl HirDatabase, ctx: &TyLoweringContext<'_, impl HirDatabase>,
resolver: &Resolver,
self_ty: Ty, self_ty: Ty,
segment: PathSegment<'_>, segment: PathSegment<'_>,
) -> Ty { ) -> Ty {
let param_idx = match self_ty { let def = match ctx.resolver.generic_def() {
Ty::Param { idx, .. } => idx,
_ => return Ty::Unknown, // Error: Ambiguous associated type
};
let def = match resolver.generic_def() {
Some(def) => def, Some(def) => def,
None => return Ty::Unknown, // this can't actually happen None => return Ty::Unknown, // this can't actually happen
}; };
let predicates = db.generic_predicates_for_param(def.into(), param_idx); let param_id = match self_ty {
let traits_from_env = predicates.iter().filter_map(|pred| match pred { Ty::Param(id) if ctx.type_param_mode == TypeParamLoweringMode::Placeholder => id,
GenericPredicate::Implemented(tr) if tr.self_ty() == &self_ty => Some(tr.trait_), Ty::Bound(idx) if ctx.type_param_mode == TypeParamLoweringMode::Variable => {
let generics = generics(ctx.db, def);
let param_id = if let Some((id, _)) = generics.iter().nth(idx as usize) {
id
} else {
return Ty::Unknown;
};
param_id
}
_ => return Ty::Unknown, // Error: Ambiguous associated type
};
let predicates = ctx.db.generic_predicates_for_param(param_id);
let traits_from_env = predicates.iter().filter_map(|pred| match &pred.value {
GenericPredicate::Implemented(tr) => Some(tr.trait_),
_ => None, _ => None,
}); });
let traits = traits_from_env.flat_map(|t| all_super_traits(db, t)); let traits = traits_from_env.flat_map(|t| all_super_traits(ctx.db, t));
for t in traits { for t in traits {
if let Some(associated_ty) = db.trait_data(t).associated_type_by_name(&segment.name) { if let Some(associated_ty) = ctx.db.trait_data(t).associated_type_by_name(&segment.name)
let substs = {
Substs::build_for_def(db, t).push(self_ty.clone()).fill_with_unknown().build(); let substs = Substs::build_for_def(ctx.db, t)
.push(self_ty.clone())
.fill_with_unknown()
.build();
// FIXME handle type parameters on the segment // FIXME handle type parameters on the segment
return Ty::Projection(ProjectionTy { associated_ty, parameters: substs }); return Ty::Projection(ProjectionTy { associated_ty, parameters: substs });
} }
@ -259,8 +372,7 @@ impl Ty {
} }
fn from_hir_path_inner( fn from_hir_path_inner(
db: &impl HirDatabase, ctx: &TyLoweringContext<'_, impl HirDatabase>,
resolver: &Resolver,
segment: PathSegment<'_>, segment: PathSegment<'_>,
typable: TyDefId, typable: TyDefId,
) -> Ty { ) -> Ty {
@ -269,15 +381,14 @@ impl Ty {
TyDefId::AdtId(it) => Some(it.into()), TyDefId::AdtId(it) => Some(it.into()),
TyDefId::TypeAliasId(it) => Some(it.into()), TyDefId::TypeAliasId(it) => Some(it.into()),
}; };
let substs = substs_from_path_segment(db, resolver, segment, generic_def, false); let substs = substs_from_path_segment(ctx, segment, generic_def, false);
db.ty(typable).subst(&substs) ctx.db.ty(typable).subst(&substs)
} }
/// Collect generic arguments from a path into a `Substs`. See also /// Collect generic arguments from a path into a `Substs`. See also
/// `create_substs_for_ast_path` and `def_to_ty` in rustc. /// `create_substs_for_ast_path` and `def_to_ty` in rustc.
pub(super) fn substs_from_path( pub(super) fn substs_from_path(
db: &impl HirDatabase, ctx: &TyLoweringContext<'_, impl HirDatabase>,
resolver: &Resolver,
path: &Path, path: &Path,
// Note that we don't call `db.value_type(resolved)` here, // Note that we don't call `db.value_type(resolved)` here,
// `ValueTyDefId` is just a convenient way to pass generics and // `ValueTyDefId` is just a convenient way to pass generics and
@ -305,52 +416,49 @@ impl Ty {
(segment, Some(var.parent.into())) (segment, Some(var.parent.into()))
} }
}; };
substs_from_path_segment(db, resolver, segment, generic_def, false) substs_from_path_segment(ctx, segment, generic_def, false)
} }
} }
pub(super) fn substs_from_path_segment( pub(super) fn substs_from_path_segment(
db: &impl HirDatabase, ctx: &TyLoweringContext<'_, impl HirDatabase>,
resolver: &Resolver,
segment: PathSegment<'_>, segment: PathSegment<'_>,
def_generic: Option<GenericDefId>, def_generic: Option<GenericDefId>,
add_self_param: bool, _add_self_param: bool,
) -> Substs { ) -> Substs {
let mut substs = Vec::new(); let mut substs = Vec::new();
let def_generics = def_generic.map(|def| generics(db, def.into())); let def_generics = def_generic.map(|def| generics(ctx.db, def.into()));
let (total_len, parent_len, child_len) = def_generics.map_or((0, 0, 0), |g| g.len_split()); let (parent_params, self_params, type_params, impl_trait_params) =
substs.extend(iter::repeat(Ty::Unknown).take(parent_len)); def_generics.map_or((0, 0, 0, 0), |g| g.provenance_split());
if add_self_param { substs.extend(iter::repeat(Ty::Unknown).take(parent_params));
// FIXME this add_self_param argument is kind of a hack: Traits have the
// Self type as an implicit first type parameter, but it can't be
// actually provided in the type arguments
// (well, actually sometimes it can, in the form of type-relative paths: `<Foo as Default>::default()`)
substs.push(Ty::Unknown);
}
if let Some(generic_args) = &segment.args_and_bindings { if let Some(generic_args) = &segment.args_and_bindings {
if !generic_args.has_self_type {
substs.extend(iter::repeat(Ty::Unknown).take(self_params));
}
let expected_num =
if generic_args.has_self_type { self_params + type_params } else { type_params };
let skip = if generic_args.has_self_type && self_params == 0 { 1 } else { 0 };
// if args are provided, it should be all of them, but we can't rely on that // if args are provided, it should be all of them, but we can't rely on that
let self_param_correction = if add_self_param { 1 } else { 0 }; for arg in generic_args.args.iter().skip(skip).take(expected_num) {
let child_len = child_len - self_param_correction;
for arg in generic_args.args.iter().take(child_len) {
match arg { match arg {
GenericArg::Type(type_ref) => { GenericArg::Type(type_ref) => {
let ty = Ty::from_hir(db, resolver, type_ref); let ty = Ty::from_hir(ctx, type_ref);
substs.push(ty); substs.push(ty);
} }
} }
} }
} }
let total_len = parent_params + self_params + type_params + impl_trait_params;
// add placeholders for args that were not provided // add placeholders for args that were not provided
let supplied_params = substs.len(); for _ in substs.len()..total_len {
for _ in supplied_params..total_len {
substs.push(Ty::Unknown); substs.push(Ty::Unknown);
} }
assert_eq!(substs.len(), total_len); assert_eq!(substs.len(), total_len);
// handle defaults // handle defaults
if let Some(def_generic) = def_generic { if let Some(def_generic) = def_generic {
let default_substs = db.generic_defaults(def_generic.into()); let default_substs = ctx.db.generic_defaults(def_generic.into());
assert_eq!(substs.len(), default_substs.len()); assert_eq!(substs.len(), default_substs.len());
for (i, default_ty) in default_substs.iter().enumerate() { for (i, default_ty) in default_substs.iter().enumerate() {
@ -365,27 +473,25 @@ pub(super) fn substs_from_path_segment(
impl TraitRef { impl TraitRef {
fn from_path( fn from_path(
db: &impl HirDatabase, ctx: &TyLoweringContext<'_, impl HirDatabase>,
resolver: &Resolver,
path: &Path, path: &Path,
explicit_self_ty: Option<Ty>, explicit_self_ty: Option<Ty>,
) -> Option<Self> { ) -> Option<Self> {
let resolved = match resolver.resolve_path_in_type_ns_fully(db, path.mod_path())? { let resolved = match ctx.resolver.resolve_path_in_type_ns_fully(ctx.db, path.mod_path())? {
TypeNs::TraitId(tr) => tr, TypeNs::TraitId(tr) => tr,
_ => return None, _ => return None,
}; };
let segment = path.segments().last().expect("path should have at least one segment"); let segment = path.segments().last().expect("path should have at least one segment");
Some(TraitRef::from_resolved_path(db, resolver, resolved.into(), segment, explicit_self_ty)) Some(TraitRef::from_resolved_path(ctx, resolved.into(), segment, explicit_self_ty))
} }
pub(crate) fn from_resolved_path( pub(crate) fn from_resolved_path(
db: &impl HirDatabase, ctx: &TyLoweringContext<'_, impl HirDatabase>,
resolver: &Resolver,
resolved: TraitId, resolved: TraitId,
segment: PathSegment<'_>, segment: PathSegment<'_>,
explicit_self_ty: Option<Ty>, explicit_self_ty: Option<Ty>,
) -> Self { ) -> Self {
let mut substs = TraitRef::substs_from_path(db, resolver, segment, resolved); let mut substs = TraitRef::substs_from_path(ctx, segment, resolved);
if let Some(self_ty) = explicit_self_ty { if let Some(self_ty) = explicit_self_ty {
make_mut_slice(&mut substs.0)[0] = self_ty; make_mut_slice(&mut substs.0)[0] = self_ty;
} }
@ -393,8 +499,7 @@ impl TraitRef {
} }
fn from_hir( fn from_hir(
db: &impl HirDatabase, ctx: &TyLoweringContext<'_, impl HirDatabase>,
resolver: &Resolver,
type_ref: &TypeRef, type_ref: &TypeRef,
explicit_self_ty: Option<Ty>, explicit_self_ty: Option<Ty>,
) -> Option<Self> { ) -> Option<Self> {
@ -402,28 +507,26 @@ impl TraitRef {
TypeRef::Path(path) => path, TypeRef::Path(path) => path,
_ => return None, _ => return None,
}; };
TraitRef::from_path(db, resolver, path, explicit_self_ty) TraitRef::from_path(ctx, path, explicit_self_ty)
} }
fn substs_from_path( fn substs_from_path(
db: &impl HirDatabase, ctx: &TyLoweringContext<'_, impl HirDatabase>,
resolver: &Resolver,
segment: PathSegment<'_>, segment: PathSegment<'_>,
resolved: TraitId, resolved: TraitId,
) -> Substs { ) -> Substs {
let has_self_param = let has_self_param =
segment.args_and_bindings.as_ref().map(|a| a.has_self_type).unwrap_or(false); segment.args_and_bindings.as_ref().map(|a| a.has_self_type).unwrap_or(false);
substs_from_path_segment(db, resolver, segment, Some(resolved.into()), !has_self_param) substs_from_path_segment(ctx, segment, Some(resolved.into()), !has_self_param)
} }
pub(crate) fn from_type_bound( pub(crate) fn from_type_bound(
db: &impl HirDatabase, ctx: &TyLoweringContext<'_, impl HirDatabase>,
resolver: &Resolver,
bound: &TypeBound, bound: &TypeBound,
self_ty: Ty, self_ty: Ty,
) -> Option<TraitRef> { ) -> Option<TraitRef> {
match bound { match bound {
TypeBound::Path(path) => TraitRef::from_path(db, resolver, path, Some(self_ty)), TypeBound::Path(path) => TraitRef::from_path(ctx, path, Some(self_ty)),
TypeBound::Error => None, TypeBound::Error => None,
} }
} }
@ -431,33 +534,44 @@ impl TraitRef {
impl GenericPredicate { impl GenericPredicate {
pub(crate) fn from_where_predicate<'a>( pub(crate) fn from_where_predicate<'a>(
db: &'a impl HirDatabase, ctx: &'a TyLoweringContext<'a, impl HirDatabase>,
resolver: &'a Resolver,
where_predicate: &'a WherePredicate, where_predicate: &'a WherePredicate,
) -> impl Iterator<Item = GenericPredicate> + 'a { ) -> impl Iterator<Item = GenericPredicate> + 'a {
let self_ty = Ty::from_hir(db, resolver, &where_predicate.type_ref); let self_ty = match &where_predicate.target {
GenericPredicate::from_type_bound(db, resolver, &where_predicate.bound, self_ty) WherePredicateTarget::TypeRef(type_ref) => Ty::from_hir(ctx, type_ref),
WherePredicateTarget::TypeParam(param_id) => {
let generic_def = ctx.resolver.generic_def().expect("generics in scope");
let generics = generics(ctx.db, generic_def);
let param_id = hir_def::TypeParamId { parent: generic_def, local_id: *param_id };
match ctx.type_param_mode {
TypeParamLoweringMode::Placeholder => Ty::Param(param_id),
TypeParamLoweringMode::Variable => {
let idx = generics.param_idx(param_id).expect("matching generics");
Ty::Bound(idx)
}
}
}
};
GenericPredicate::from_type_bound(ctx, &where_predicate.bound, self_ty)
} }
pub(crate) fn from_type_bound<'a>( pub(crate) fn from_type_bound<'a>(
db: &'a impl HirDatabase, ctx: &'a TyLoweringContext<'a, impl HirDatabase>,
resolver: &'a Resolver,
bound: &'a TypeBound, bound: &'a TypeBound,
self_ty: Ty, self_ty: Ty,
) -> impl Iterator<Item = GenericPredicate> + 'a { ) -> impl Iterator<Item = GenericPredicate> + 'a {
let trait_ref = TraitRef::from_type_bound(db, &resolver, bound, self_ty); let trait_ref = TraitRef::from_type_bound(ctx, bound, self_ty);
iter::once(trait_ref.clone().map_or(GenericPredicate::Error, GenericPredicate::Implemented)) iter::once(trait_ref.clone().map_or(GenericPredicate::Error, GenericPredicate::Implemented))
.chain( .chain(
trait_ref.into_iter().flat_map(move |tr| { trait_ref
assoc_type_bindings_from_type_bound(db, resolver, bound, tr) .into_iter()
}), .flat_map(move |tr| assoc_type_bindings_from_type_bound(ctx, bound, tr)),
) )
} }
} }
fn assoc_type_bindings_from_type_bound<'a>( fn assoc_type_bindings_from_type_bound<'a>(
db: &'a impl HirDatabase, ctx: &'a TyLoweringContext<'a, impl HirDatabase>,
resolver: &'a Resolver,
bound: &'a TypeBound, bound: &'a TypeBound,
trait_ref: TraitRef, trait_ref: TraitRef,
) -> impl Iterator<Item = GenericPredicate> + 'a { ) -> impl Iterator<Item = GenericPredicate> + 'a {
@ -471,21 +585,21 @@ fn assoc_type_bindings_from_type_bound<'a>(
.flat_map(|args_and_bindings| args_and_bindings.bindings.iter()) .flat_map(|args_and_bindings| args_and_bindings.bindings.iter())
.map(move |(name, type_ref)| { .map(move |(name, type_ref)| {
let associated_ty = let associated_ty =
associated_type_by_name_including_super_traits(db, trait_ref.trait_, &name); associated_type_by_name_including_super_traits(ctx.db, trait_ref.trait_, &name);
let associated_ty = match associated_ty { let associated_ty = match associated_ty {
None => return GenericPredicate::Error, None => return GenericPredicate::Error,
Some(t) => t, Some(t) => t,
}; };
let projection_ty = let projection_ty =
ProjectionTy { associated_ty, parameters: trait_ref.substs.clone() }; ProjectionTy { associated_ty, parameters: trait_ref.substs.clone() };
let ty = Ty::from_hir(db, resolver, type_ref); let ty = Ty::from_hir(ctx, type_ref);
let projection_predicate = ProjectionPredicate { projection_ty, ty }; let projection_predicate = ProjectionPredicate { projection_ty, ty };
GenericPredicate::Projection(projection_predicate) GenericPredicate::Projection(projection_predicate)
}) })
} }
/// Build the signature of a callable item (function, struct or enum variant). /// Build the signature of a callable item (function, struct or enum variant).
pub fn callable_item_sig(db: &impl HirDatabase, def: CallableDef) -> FnSig { pub fn callable_item_sig(db: &impl HirDatabase, def: CallableDef) -> PolyFnSig {
match def { match def {
CallableDef::FunctionId(f) => fn_sig_for_fn(db, f), CallableDef::FunctionId(f) => fn_sig_for_fn(db, f),
CallableDef::StructId(s) => fn_sig_for_struct_constructor(db, s), CallableDef::StructId(s) => fn_sig_for_struct_constructor(db, s),
@ -497,16 +611,19 @@ pub fn callable_item_sig(db: &impl HirDatabase, def: CallableDef) -> FnSig {
pub(crate) fn field_types_query( pub(crate) fn field_types_query(
db: &impl HirDatabase, db: &impl HirDatabase,
variant_id: VariantId, variant_id: VariantId,
) -> Arc<ArenaMap<LocalStructFieldId, Ty>> { ) -> Arc<ArenaMap<LocalStructFieldId, Binders<Ty>>> {
let var_data = variant_data(db, variant_id); let var_data = variant_data(db, variant_id);
let resolver = match variant_id { let (resolver, def): (_, GenericDefId) = match variant_id {
VariantId::StructId(it) => it.resolver(db), VariantId::StructId(it) => (it.resolver(db), it.into()),
VariantId::UnionId(it) => it.resolver(db), VariantId::UnionId(it) => (it.resolver(db), it.into()),
VariantId::EnumVariantId(it) => it.parent.resolver(db), VariantId::EnumVariantId(it) => (it.parent.resolver(db), it.parent.into()),
}; };
let generics = generics(db, def);
let mut res = ArenaMap::default(); let mut res = ArenaMap::default();
let ctx =
TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
for (field_id, field_data) in var_data.fields().iter() { for (field_id, field_data) in var_data.fields().iter() {
res.insert(field_id, Ty::from_hir(db, &resolver, &field_data.type_ref)) res.insert(field_id, Binders::new(generics.len(), Ty::from_hir(&ctx, &field_data.type_ref)))
} }
Arc::new(res) Arc::new(res)
} }
@ -521,32 +638,43 @@ pub(crate) fn field_types_query(
/// these are fine: `T: Foo<U::Item>, U: Foo<()>`. /// these are fine: `T: Foo<U::Item>, U: Foo<()>`.
pub(crate) fn generic_predicates_for_param_query( pub(crate) fn generic_predicates_for_param_query(
db: &impl HirDatabase, db: &impl HirDatabase,
def: GenericDefId, param_id: TypeParamId,
param_idx: u32, ) -> Arc<[Binders<GenericPredicate>]> {
) -> Arc<[GenericPredicate]> { let resolver = param_id.parent.resolver(db);
let resolver = def.resolver(db); let ctx =
TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
let generics = generics(db, param_id.parent);
resolver resolver
.where_predicates_in_scope() .where_predicates_in_scope()
// we have to filter out all other predicates *first*, before attempting to lower them // we have to filter out all other predicates *first*, before attempting to lower them
.filter(|pred| Ty::from_hir_only_param(db, &resolver, &pred.type_ref) == Some(param_idx)) .filter(|pred| match &pred.target {
.flat_map(|pred| GenericPredicate::from_where_predicate(db, &resolver, pred)) WherePredicateTarget::TypeRef(type_ref) => {
Ty::from_hir_only_param(&ctx, type_ref) == Some(param_id)
}
WherePredicateTarget::TypeParam(local_id) => *local_id == param_id.local_id,
})
.flat_map(|pred| {
GenericPredicate::from_where_predicate(&ctx, pred)
.map(|p| Binders::new(generics.len(), p))
})
.collect() .collect()
} }
pub(crate) fn generic_predicates_for_param_recover( pub(crate) fn generic_predicates_for_param_recover(
_db: &impl HirDatabase, _db: &impl HirDatabase,
_cycle: &[String], _cycle: &[String],
_def: &GenericDefId, _param_id: &TypeParamId,
_param_idx: &u32, ) -> Arc<[Binders<GenericPredicate>]> {
) -> Arc<[GenericPredicate]> {
Arc::new([]) Arc::new([])
} }
impl TraitEnvironment { impl TraitEnvironment {
pub fn lower(db: &impl HirDatabase, resolver: &Resolver) -> Arc<TraitEnvironment> { pub fn lower(db: &impl HirDatabase, resolver: &Resolver) -> Arc<TraitEnvironment> {
let ctx = TyLoweringContext::new(db, &resolver)
.with_type_param_mode(TypeParamLoweringMode::Placeholder);
let predicates = resolver let predicates = resolver
.where_predicates_in_scope() .where_predicates_in_scope()
.flat_map(|pred| GenericPredicate::from_where_predicate(db, &resolver, pred)) .flat_map(|pred| GenericPredicate::from_where_predicate(&ctx, pred))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
Arc::new(TraitEnvironment { predicates }) Arc::new(TraitEnvironment { predicates })
@ -557,57 +685,74 @@ impl TraitEnvironment {
pub(crate) fn generic_predicates_query( pub(crate) fn generic_predicates_query(
db: &impl HirDatabase, db: &impl HirDatabase,
def: GenericDefId, def: GenericDefId,
) -> Arc<[GenericPredicate]> { ) -> Arc<[Binders<GenericPredicate>]> {
let resolver = def.resolver(db); let resolver = def.resolver(db);
let ctx =
TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
let generics = generics(db, def);
resolver resolver
.where_predicates_in_scope() .where_predicates_in_scope()
.flat_map(|pred| GenericPredicate::from_where_predicate(db, &resolver, pred)) .flat_map(|pred| {
GenericPredicate::from_where_predicate(&ctx, pred)
.map(|p| Binders::new(generics.len(), p))
})
.collect() .collect()
} }
/// Resolve the default type params from generics /// Resolve the default type params from generics
pub(crate) fn generic_defaults_query(db: &impl HirDatabase, def: GenericDefId) -> Substs { pub(crate) fn generic_defaults_query(db: &impl HirDatabase, def: GenericDefId) -> Substs {
let resolver = def.resolver(db); let resolver = def.resolver(db);
let ctx = TyLoweringContext::new(db, &resolver);
let generic_params = generics(db, def.into()); let generic_params = generics(db, def.into());
let defaults = generic_params let defaults = generic_params
.iter() .iter()
.map(|(_idx, p)| p.default.as_ref().map_or(Ty::Unknown, |t| Ty::from_hir(db, &resolver, t))) .map(|(_idx, p)| p.default.as_ref().map_or(Ty::Unknown, |t| Ty::from_hir(&ctx, t)))
.collect(); .collect();
Substs(defaults) Substs(defaults)
} }
fn fn_sig_for_fn(db: &impl HirDatabase, def: FunctionId) -> FnSig { fn fn_sig_for_fn(db: &impl HirDatabase, def: FunctionId) -> PolyFnSig {
let data = db.function_data(def); let data = db.function_data(def);
let resolver = def.resolver(db); let resolver = def.resolver(db);
let params = data.params.iter().map(|tr| Ty::from_hir(db, &resolver, tr)).collect::<Vec<_>>(); let ctx_params = TyLoweringContext::new(db, &resolver)
let ret = Ty::from_hir(db, &resolver, &data.ret_type); .with_impl_trait_mode(ImplTraitLoweringMode::Variable)
FnSig::from_params_and_return(params, ret) .with_type_param_mode(TypeParamLoweringMode::Variable);
let params = data.params.iter().map(|tr| Ty::from_hir(&ctx_params, tr)).collect::<Vec<_>>();
let ctx_ret = ctx_params.with_impl_trait_mode(ImplTraitLoweringMode::Opaque);
let ret = Ty::from_hir(&ctx_ret, &data.ret_type);
let generics = generics(db, def.into());
let num_binders = generics.len();
Binders::new(num_binders, FnSig::from_params_and_return(params, ret))
} }
/// Build the declared type of a function. This should not need to look at the /// Build the declared type of a function. This should not need to look at the
/// function body. /// function body.
fn type_for_fn(db: &impl HirDatabase, def: FunctionId) -> Ty { fn type_for_fn(db: &impl HirDatabase, def: FunctionId) -> Binders<Ty> {
let generics = generics(db, def.into()); let generics = generics(db, def.into());
let substs = Substs::identity(&generics); let substs = Substs::bound_vars(&generics);
Ty::apply(TypeCtor::FnDef(def.into()), substs) Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs))
} }
/// Build the declared type of a const. /// Build the declared type of a const.
fn type_for_const(db: &impl HirDatabase, def: ConstId) -> Ty { fn type_for_const(db: &impl HirDatabase, def: ConstId) -> Binders<Ty> {
let data = db.const_data(def); let data = db.const_data(def);
let generics = generics(db, def.into());
let resolver = def.resolver(db); let resolver = def.resolver(db);
let ctx =
TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
Ty::from_hir(db, &resolver, &data.type_ref) Binders::new(generics.len(), Ty::from_hir(&ctx, &data.type_ref))
} }
/// Build the declared type of a static. /// Build the declared type of a static.
fn type_for_static(db: &impl HirDatabase, def: StaticId) -> Ty { fn type_for_static(db: &impl HirDatabase, def: StaticId) -> Binders<Ty> {
let data = db.static_data(def); let data = db.static_data(def);
let resolver = def.resolver(db); let resolver = def.resolver(db);
let ctx = TyLoweringContext::new(db, &resolver);
Ty::from_hir(db, &resolver, &data.type_ref) Binders::new(0, Ty::from_hir(&ctx, &data.type_ref))
} }
/// Build the declared type of a static. /// Build the declared type of a static.
@ -621,68 +766,69 @@ fn type_for_builtin(def: BuiltinType) -> Ty {
}) })
} }
fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> FnSig { fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> PolyFnSig {
let struct_data = db.struct_data(def.into()); let struct_data = db.struct_data(def.into());
let fields = struct_data.variant_data.fields(); let fields = struct_data.variant_data.fields();
let resolver = def.resolver(db); let resolver = def.resolver(db);
let params = fields let ctx =
.iter() TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
.map(|(_, field)| Ty::from_hir(db, &resolver, &field.type_ref)) let params =
.collect::<Vec<_>>(); fields.iter().map(|(_, field)| Ty::from_hir(&ctx, &field.type_ref)).collect::<Vec<_>>();
let ret = type_for_adt(db, def.into()); let ret = type_for_adt(db, def.into());
FnSig::from_params_and_return(params, ret) Binders::new(ret.num_binders, FnSig::from_params_and_return(params, ret.value))
} }
/// Build the type of a tuple struct constructor. /// Build the type of a tuple struct constructor.
fn type_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> Ty { fn type_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> Binders<Ty> {
let struct_data = db.struct_data(def.into()); let struct_data = db.struct_data(def.into());
if struct_data.variant_data.is_unit() { if struct_data.variant_data.is_unit() {
return type_for_adt(db, def.into()); // Unit struct return type_for_adt(db, def.into()); // Unit struct
} }
let generics = generics(db, def.into()); let generics = generics(db, def.into());
let substs = Substs::identity(&generics); let substs = Substs::bound_vars(&generics);
Ty::apply(TypeCtor::FnDef(def.into()), substs) Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs))
} }
fn fn_sig_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariantId) -> FnSig { fn fn_sig_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariantId) -> PolyFnSig {
let enum_data = db.enum_data(def.parent); let enum_data = db.enum_data(def.parent);
let var_data = &enum_data.variants[def.local_id]; let var_data = &enum_data.variants[def.local_id];
let fields = var_data.variant_data.fields(); let fields = var_data.variant_data.fields();
let resolver = def.parent.resolver(db); let resolver = def.parent.resolver(db);
let params = fields let ctx =
.iter() TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
.map(|(_, field)| Ty::from_hir(db, &resolver, &field.type_ref)) let params =
.collect::<Vec<_>>(); fields.iter().map(|(_, field)| Ty::from_hir(&ctx, &field.type_ref)).collect::<Vec<_>>();
let generics = generics(db, def.parent.into()); let ret = type_for_adt(db, def.parent.into());
let substs = Substs::identity(&generics); Binders::new(ret.num_binders, FnSig::from_params_and_return(params, ret.value))
let ret = type_for_adt(db, def.parent.into()).subst(&substs);
FnSig::from_params_and_return(params, ret)
} }
/// Build the type of a tuple enum variant constructor. /// Build the type of a tuple enum variant constructor.
fn type_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariantId) -> Ty { fn type_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariantId) -> Binders<Ty> {
let enum_data = db.enum_data(def.parent); let enum_data = db.enum_data(def.parent);
let var_data = &enum_data.variants[def.local_id].variant_data; let var_data = &enum_data.variants[def.local_id].variant_data;
if var_data.is_unit() { if var_data.is_unit() {
return type_for_adt(db, def.parent.into()); // Unit variant return type_for_adt(db, def.parent.into()); // Unit variant
} }
let generics = generics(db, def.parent.into()); let generics = generics(db, def.parent.into());
let substs = Substs::identity(&generics); let substs = Substs::bound_vars(&generics);
Ty::apply(TypeCtor::FnDef(EnumVariantId::from(def).into()), substs) Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(EnumVariantId::from(def).into()), substs))
} }
fn type_for_adt(db: &impl HirDatabase, adt: AdtId) -> Ty { fn type_for_adt(db: &impl HirDatabase, adt: AdtId) -> Binders<Ty> {
let generics = generics(db, adt.into()); let generics = generics(db, adt.into());
Ty::apply(TypeCtor::Adt(adt), Substs::identity(&generics)) let substs = Substs::bound_vars(&generics);
Binders::new(substs.len(), Ty::apply(TypeCtor::Adt(adt), substs))
} }
fn type_for_type_alias(db: &impl HirDatabase, t: TypeAliasId) -> Ty { fn type_for_type_alias(db: &impl HirDatabase, t: TypeAliasId) -> Binders<Ty> {
let generics = generics(db, t.into()); let generics = generics(db, t.into());
let resolver = t.resolver(db); let resolver = t.resolver(db);
let ctx =
TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
let type_ref = &db.type_alias_data(t).type_ref; let type_ref = &db.type_alias_data(t).type_ref;
let substs = Substs::identity(&generics); let substs = Substs::bound_vars(&generics);
let inner = Ty::from_hir(db, &resolver, type_ref.as_ref().unwrap_or(&TypeRef::Error)); let inner = Ty::from_hir(&ctx, type_ref.as_ref().unwrap_or(&TypeRef::Error));
inner.subst(&substs) Binders::new(substs.len(), inner)
} }
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
@ -736,19 +882,24 @@ impl_froms!(ValueTyDefId: FunctionId, StructId, EnumVariantId, ConstId, StaticId
/// `struct Foo(usize)`, we have two types: The type of the struct itself, and /// `struct Foo(usize)`, we have two types: The type of the struct itself, and
/// the constructor function `(usize) -> Foo` which lives in the values /// the constructor function `(usize) -> Foo` which lives in the values
/// namespace. /// namespace.
pub(crate) fn ty_query(db: &impl HirDatabase, def: TyDefId) -> Ty { pub(crate) fn ty_query(db: &impl HirDatabase, def: TyDefId) -> Binders<Ty> {
match def { match def {
TyDefId::BuiltinType(it) => type_for_builtin(it), TyDefId::BuiltinType(it) => Binders::new(0, type_for_builtin(it)),
TyDefId::AdtId(it) => type_for_adt(db, it), TyDefId::AdtId(it) => type_for_adt(db, it),
TyDefId::TypeAliasId(it) => type_for_type_alias(db, it), TyDefId::TypeAliasId(it) => type_for_type_alias(db, it),
} }
} }
pub(crate) fn ty_recover(_db: &impl HirDatabase, _cycle: &[String], _def: &TyDefId) -> Ty { pub(crate) fn ty_recover(db: &impl HirDatabase, _cycle: &[String], def: &TyDefId) -> Binders<Ty> {
Ty::Unknown let num_binders = match *def {
TyDefId::BuiltinType(_) => 0,
TyDefId::AdtId(it) => generics(db, it.into()).len(),
TyDefId::TypeAliasId(it) => generics(db, it.into()).len(),
};
Binders::new(num_binders, Ty::Unknown)
} }
pub(crate) fn value_ty_query(db: &impl HirDatabase, def: ValueTyDefId) -> Ty { pub(crate) fn value_ty_query(db: &impl HirDatabase, def: ValueTyDefId) -> Binders<Ty> {
match def { match def {
ValueTyDefId::FunctionId(it) => type_for_fn(db, it), ValueTyDefId::FunctionId(it) => type_for_fn(db, it),
ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it), ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it),
@ -758,24 +909,36 @@ pub(crate) fn value_ty_query(db: &impl HirDatabase, def: ValueTyDefId) -> Ty {
} }
} }
pub(crate) fn impl_self_ty_query(db: &impl HirDatabase, impl_id: ImplId) -> Ty { pub(crate) fn impl_self_ty_query(db: &impl HirDatabase, impl_id: ImplId) -> Binders<Ty> {
let impl_data = db.impl_data(impl_id); let impl_data = db.impl_data(impl_id);
let resolver = impl_id.resolver(db); let resolver = impl_id.resolver(db);
Ty::from_hir(db, &resolver, &impl_data.target_type) let generics = generics(db, impl_id.into());
let ctx =
TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
Binders::new(generics.len(), Ty::from_hir(&ctx, &impl_data.target_type))
} }
pub(crate) fn impl_self_ty_recover( pub(crate) fn impl_self_ty_recover(
_db: &impl HirDatabase, db: &impl HirDatabase,
_cycle: &[String], _cycle: &[String],
_impl_id: &ImplId, impl_id: &ImplId,
) -> Ty { ) -> Binders<Ty> {
Ty::Unknown let generics = generics(db, (*impl_id).into());
Binders::new(generics.len(), Ty::Unknown)
} }
pub(crate) fn impl_trait_query(db: &impl HirDatabase, impl_id: ImplId) -> Option<TraitRef> { pub(crate) fn impl_trait_query(
db: &impl HirDatabase,
impl_id: ImplId,
) -> Option<Binders<TraitRef>> {
let impl_data = db.impl_data(impl_id); let impl_data = db.impl_data(impl_id);
let resolver = impl_id.resolver(db); let resolver = impl_id.resolver(db);
let ctx =
TyLoweringContext::new(db, &resolver).with_type_param_mode(TypeParamLoweringMode::Variable);
let self_ty = db.impl_self_ty(impl_id); let self_ty = db.impl_self_ty(impl_id);
let target_trait = impl_data.target_trait.as_ref()?; let target_trait = impl_data.target_trait.as_ref()?;
TraitRef::from_hir(db, &resolver, target_trait, Some(self_ty.clone())) Some(Binders::new(
self_ty.num_binders,
TraitRef::from_hir(&ctx, target_trait, Some(self_ty.value.clone()))?,
))
} }

View file

@ -6,5 +6,4 @@ test_utils::marks!(
type_var_resolves_to_int_var type_var_resolves_to_int_var
match_ergonomics_ref match_ergonomics_ref
coerce_merge_fail_fallback coerce_merge_fail_fallback
insert_vars_for_impl_trait
); );

View file

@ -61,11 +61,11 @@ impl CrateImplBlocks {
for impl_id in module_data.scope.impls() { for impl_id in module_data.scope.impls() {
match db.impl_trait(impl_id) { match db.impl_trait(impl_id) {
Some(tr) => { Some(tr) => {
res.impls_by_trait.entry(tr.trait_).or_default().push(impl_id); res.impls_by_trait.entry(tr.value.trait_).or_default().push(impl_id);
} }
None => { None => {
let self_ty = db.impl_self_ty(impl_id); let self_ty = db.impl_self_ty(impl_id);
if let Some(self_ty_fp) = TyFingerprint::for_impl(&self_ty) { if let Some(self_ty_fp) = TyFingerprint::for_impl(&self_ty.value) {
res.impls.entry(self_ty_fp).or_default().push(impl_id); res.impls.entry(self_ty_fp).or_default().push(impl_id);
} }
} }
@ -496,7 +496,7 @@ fn transform_receiver_ty(
AssocContainerId::ContainerId(_) => unreachable!(), AssocContainerId::ContainerId(_) => unreachable!(),
}; };
let sig = db.callable_item_signature(function_id.into()); let sig = db.callable_item_signature(function_id.into());
Some(sig.params()[0].clone().subst(&substs)) Some(sig.value.params()[0].clone().subst_bound_vars(&substs))
} }
pub fn implements_trait( pub fn implements_trait(

View file

@ -71,42 +71,42 @@ fn test2() {
[82; 93) '{ loop {} }': T [82; 93) '{ loop {} }': T
[84; 91) 'loop {}': ! [84; 91) 'loop {}': !
[89; 91) '{}': () [89; 91) '{}': ()
[122; 133) '{ loop {} }': *mut [T;_] [122; 133) '{ loop {} }': *mut [T; _]
[124; 131) 'loop {}': ! [124; 131) 'loop {}': !
[129; 131) '{}': () [129; 131) '{}': ()
[160; 173) '{ gen() }': *mut [U] [160; 173) '{ gen() }': *mut [U]
[166; 169) 'gen': fn gen<U>() -> *mut [T;_] [166; 169) 'gen': fn gen<U>() -> *mut [U; _]
[166; 171) 'gen()': *mut [U;_] [166; 171) 'gen()': *mut [U; _]
[186; 420) '{ ...rr); }': () [186; 420) '{ ...rr); }': ()
[196; 199) 'arr': &[u8;_] [196; 199) 'arr': &[u8; _]
[212; 216) '&[1]': &[u8;_] [212; 216) '&[1]': &[u8; _]
[213; 216) '[1]': [u8;_] [213; 216) '[1]': [u8; _]
[214; 215) '1': u8 [214; 215) '1': u8
[227; 228) 'a': &[u8] [227; 228) 'a': &[u8]
[237; 240) 'arr': &[u8;_] [237; 240) 'arr': &[u8; _]
[250; 251) 'b': u8 [250; 251) 'b': u8
[254; 255) 'f': fn f<u8>(&[T]) -> T [254; 255) 'f': fn f<u8>(&[u8]) -> u8
[254; 260) 'f(arr)': u8 [254; 260) 'f(arr)': u8
[256; 259) 'arr': &[u8;_] [256; 259) 'arr': &[u8; _]
[270; 271) 'c': &[u8] [270; 271) 'c': &[u8]
[280; 287) '{ arr }': &[u8] [280; 287) '{ arr }': &[u8]
[282; 285) 'arr': &[u8;_] [282; 285) 'arr': &[u8; _]
[297; 298) 'd': u8 [297; 298) 'd': u8
[301; 302) 'g': fn g<u8>(S<&[T]>) -> T [301; 302) 'g': fn g<u8>(S<&[u8]>) -> u8
[301; 316) 'g(S { a: arr })': u8 [301; 316) 'g(S { a: arr })': u8
[303; 315) 'S { a: arr }': S<&[u8]> [303; 315) 'S { a: arr }': S<&[u8]>
[310; 313) 'arr': &[u8;_] [310; 313) 'arr': &[u8; _]
[326; 327) 'e': [&[u8];_] [326; 327) 'e': [&[u8]; _]
[341; 346) '[arr]': [&[u8];_] [341; 346) '[arr]': [&[u8]; _]
[342; 345) 'arr': &[u8;_] [342; 345) 'arr': &[u8; _]
[356; 357) 'f': [&[u8];_] [356; 357) 'f': [&[u8]; _]
[371; 379) '[arr; 2]': [&[u8];_] [371; 379) '[arr; 2]': [&[u8]; _]
[372; 375) 'arr': &[u8;_] [372; 375) 'arr': &[u8; _]
[377; 378) '2': usize [377; 378) '2': usize
[389; 390) 'g': (&[u8], &[u8]) [389; 390) 'g': (&[u8], &[u8])
[407; 417) '(arr, arr)': (&[u8], &[u8]) [407; 417) '(arr, arr)': (&[u8], &[u8])
[408; 411) 'arr': &[u8;_] [408; 411) 'arr': &[u8; _]
[413; 416) 'arr': &[u8;_] [413; 416) 'arr': &[u8; _]
"### "###
); );
} }
@ -122,8 +122,8 @@ fn test() {
@r###" @r###"
[11; 40) '{ ...[1]; }': () [11; 40) '{ ...[1]; }': ()
[21; 22) 'x': &[i32] [21; 22) 'x': &[i32]
[33; 37) '&[1]': &[i32;_] [33; 37) '&[1]': &[i32; _]
[34; 37) '[1]': [i32;_] [34; 37) '[1]': [i32; _]
[35; 36) '1': i32 [35; 36) '1': i32
"###); "###);
} }
@ -159,22 +159,22 @@ fn test(a: A<[u8; 2]>, b: B<[u8; 2]>, c: C<[u8; 2]>) {
[334; 335) 'x': C<[T]> [334; 335) 'x': C<[T]>
[355; 360) '{ x }': C<[T]> [355; 360) '{ x }': C<[T]>
[357; 358) 'x': C<[T]> [357; 358) 'x': C<[T]>
[370; 371) 'a': A<[u8;_]> [370; 371) 'a': A<[u8; _]>
[385; 386) 'b': B<[u8;_]> [385; 386) 'b': B<[u8; _]>
[400; 401) 'c': C<[u8;_]> [400; 401) 'c': C<[u8; _]>
[415; 481) '{ ...(c); }': () [415; 481) '{ ...(c); }': ()
[425; 426) 'd': A<[{unknown}]> [425; 426) 'd': A<[{unknown}]>
[429; 433) 'foo1': fn foo1<{unknown}>(A<[T]>) -> A<[T]> [429; 433) 'foo1': fn foo1<{unknown}>(A<[{unknown}]>) -> A<[{unknown}]>
[429; 436) 'foo1(a)': A<[{unknown}]> [429; 436) 'foo1(a)': A<[{unknown}]>
[434; 435) 'a': A<[u8;_]> [434; 435) 'a': A<[u8; _]>
[446; 447) 'e': B<[u8]> [446; 447) 'e': B<[u8]>
[450; 454) 'foo2': fn foo2<u8>(B<[T]>) -> B<[T]> [450; 454) 'foo2': fn foo2<u8>(B<[u8]>) -> B<[u8]>
[450; 457) 'foo2(b)': B<[u8]> [450; 457) 'foo2(b)': B<[u8]>
[455; 456) 'b': B<[u8;_]> [455; 456) 'b': B<[u8; _]>
[467; 468) 'f': C<[u8]> [467; 468) 'f': C<[u8]>
[471; 475) 'foo3': fn foo3<u8>(C<[T]>) -> C<[T]> [471; 475) 'foo3': fn foo3<u8>(C<[u8]>) -> C<[u8]>
[471; 478) 'foo3(c)': C<[u8]> [471; 478) 'foo3(c)': C<[u8]>
[476; 477) 'c': C<[u8;_]> [476; 477) 'c': C<[u8; _]>
"### "###
); );
} }
@ -202,14 +202,14 @@ fn test() {
[64; 123) 'if tru... }': &[i32] [64; 123) 'if tru... }': &[i32]
[67; 71) 'true': bool [67; 71) 'true': bool
[72; 97) '{ ... }': &[i32] [72; 97) '{ ... }': &[i32]
[82; 85) 'foo': fn foo<i32>(&[T]) -> &[T] [82; 85) 'foo': fn foo<i32>(&[i32]) -> &[i32]
[82; 91) 'foo(&[1])': &[i32] [82; 91) 'foo(&[1])': &[i32]
[86; 90) '&[1]': &[i32;_] [86; 90) '&[1]': &[i32; _]
[87; 90) '[1]': [i32;_] [87; 90) '[1]': [i32; _]
[88; 89) '1': i32 [88; 89) '1': i32
[103; 123) '{ ... }': &[i32;_] [103; 123) '{ ... }': &[i32; _]
[113; 117) '&[1]': &[i32;_] [113; 117) '&[1]': &[i32; _]
[114; 117) '[1]': [i32;_] [114; 117) '[1]': [i32; _]
[115; 116) '1': i32 [115; 116) '1': i32
"### "###
); );
@ -237,15 +237,15 @@ fn test() {
[60; 61) 'x': &[i32] [60; 61) 'x': &[i32]
[64; 123) 'if tru... }': &[i32] [64; 123) 'if tru... }': &[i32]
[67; 71) 'true': bool [67; 71) 'true': bool
[72; 92) '{ ... }': &[i32;_] [72; 92) '{ ... }': &[i32; _]
[82; 86) '&[1]': &[i32;_] [82; 86) '&[1]': &[i32; _]
[83; 86) '[1]': [i32;_] [83; 86) '[1]': [i32; _]
[84; 85) '1': i32 [84; 85) '1': i32
[98; 123) '{ ... }': &[i32] [98; 123) '{ ... }': &[i32]
[108; 111) 'foo': fn foo<i32>(&[T]) -> &[T] [108; 111) 'foo': fn foo<i32>(&[i32]) -> &[i32]
[108; 117) 'foo(&[1])': &[i32] [108; 117) 'foo(&[1])': &[i32]
[112; 116) '&[1]': &[i32;_] [112; 116) '&[1]': &[i32; _]
[113; 116) '[1]': [i32;_] [113; 116) '[1]': [i32; _]
[114; 115) '1': i32 [114; 115) '1': i32
"### "###
); );
@ -275,18 +275,18 @@ fn test(i: i32) {
[70; 147) 'match ... }': &[i32] [70; 147) 'match ... }': &[i32]
[76; 77) 'i': i32 [76; 77) 'i': i32
[88; 89) '2': i32 [88; 89) '2': i32
[93; 96) 'foo': fn foo<i32>(&[T]) -> &[T] [93; 96) 'foo': fn foo<i32>(&[i32]) -> &[i32]
[93; 102) 'foo(&[2])': &[i32] [93; 102) 'foo(&[2])': &[i32]
[97; 101) '&[2]': &[i32;_] [97; 101) '&[2]': &[i32; _]
[98; 101) '[2]': [i32;_] [98; 101) '[2]': [i32; _]
[99; 100) '2': i32 [99; 100) '2': i32
[112; 113) '1': i32 [112; 113) '1': i32
[117; 121) '&[1]': &[i32;_] [117; 121) '&[1]': &[i32; _]
[118; 121) '[1]': [i32;_] [118; 121) '[1]': [i32; _]
[119; 120) '1': i32 [119; 120) '1': i32
[131; 132) '_': i32 [131; 132) '_': i32
[136; 140) '&[3]': &[i32;_] [136; 140) '&[3]': &[i32; _]
[137; 140) '[3]': [i32;_] [137; 140) '[3]': [i32; _]
[138; 139) '3': i32 [138; 139) '3': i32
"### "###
); );
@ -316,18 +316,18 @@ fn test(i: i32) {
[70; 147) 'match ... }': &[i32] [70; 147) 'match ... }': &[i32]
[76; 77) 'i': i32 [76; 77) 'i': i32
[88; 89) '1': i32 [88; 89) '1': i32
[93; 97) '&[1]': &[i32;_] [93; 97) '&[1]': &[i32; _]
[94; 97) '[1]': [i32;_] [94; 97) '[1]': [i32; _]
[95; 96) '1': i32 [95; 96) '1': i32
[107; 108) '2': i32 [107; 108) '2': i32
[112; 115) 'foo': fn foo<i32>(&[T]) -> &[T] [112; 115) 'foo': fn foo<i32>(&[i32]) -> &[i32]
[112; 121) 'foo(&[2])': &[i32] [112; 121) 'foo(&[2])': &[i32]
[116; 120) '&[2]': &[i32;_] [116; 120) '&[2]': &[i32; _]
[117; 120) '[2]': [i32;_] [117; 120) '[2]': [i32; _]
[118; 119) '2': i32 [118; 119) '2': i32
[131; 132) '_': i32 [131; 132) '_': i32
[136; 140) '&[3]': &[i32;_] [136; 140) '&[3]': &[i32; _]
[137; 140) '[3]': [i32;_] [137; 140) '[3]': [i32; _]
[138; 139) '3': i32 [138; 139) '3': i32
"### "###
); );
@ -438,16 +438,16 @@ fn test() {
[43; 45) '*x': T [43; 45) '*x': T
[44; 45) 'x': &T [44; 45) 'x': &T
[58; 127) '{ ...oo); }': () [58; 127) '{ ...oo); }': ()
[64; 73) 'takes_ref': fn takes_ref<Foo>(&T) -> T [64; 73) 'takes_ref': fn takes_ref<Foo>(&Foo) -> Foo
[64; 79) 'takes_ref(&Foo)': Foo [64; 79) 'takes_ref(&Foo)': Foo
[74; 78) '&Foo': &Foo [74; 78) '&Foo': &Foo
[75; 78) 'Foo': Foo [75; 78) 'Foo': Foo
[85; 94) 'takes_ref': fn takes_ref<&Foo>(&T) -> T [85; 94) 'takes_ref': fn takes_ref<&Foo>(&&Foo) -> &Foo
[85; 101) 'takes_...&&Foo)': &Foo [85; 101) 'takes_...&&Foo)': &Foo
[95; 100) '&&Foo': &&Foo [95; 100) '&&Foo': &&Foo
[96; 100) '&Foo': &Foo [96; 100) '&Foo': &Foo
[97; 100) 'Foo': Foo [97; 100) 'Foo': Foo
[107; 116) 'takes_ref': fn takes_ref<&&Foo>(&T) -> T [107; 116) 'takes_ref': fn takes_ref<&&Foo>(&&&Foo) -> &&Foo
[107; 124) 'takes_...&&Foo)': &&Foo [107; 124) 'takes_...&&Foo)': &&Foo
[117; 123) '&&&Foo': &&&Foo [117; 123) '&&&Foo': &&&Foo
[118; 123) '&&Foo': &&Foo [118; 123) '&&Foo': &&Foo

View file

@ -27,7 +27,7 @@ fn test() {
[66; 73) 'loop {}': ! [66; 73) 'loop {}': !
[71; 73) '{}': () [71; 73) '{}': ()
[133; 160) '{ ...o"); }': () [133; 160) '{ ...o"); }': ()
[139; 149) '<[_]>::foo': fn foo<u8>(&[T]) -> T [139; 149) '<[_]>::foo': fn foo<u8>(&[u8]) -> u8
[139; 157) '<[_]>:..."foo")': u8 [139; 157) '<[_]>:..."foo")': u8
[150; 156) 'b"foo"': &[u8] [150; 156) 'b"foo"': &[u8]
"### "###
@ -175,7 +175,7 @@ fn test() {
[98; 101) 'val': T [98; 101) 'val': T
[123; 155) '{ ...32); }': () [123; 155) '{ ...32); }': ()
[133; 134) 'a': Gen<u32> [133; 134) 'a': Gen<u32>
[137; 146) 'Gen::make': fn make<u32>(T) -> Gen<T> [137; 146) 'Gen::make': fn make<u32>(u32) -> Gen<u32>
[137; 152) 'Gen::make(0u32)': Gen<u32> [137; 152) 'Gen::make(0u32)': Gen<u32>
[147; 151) '0u32': u32 [147; 151) '0u32': u32
"### "###
@ -206,7 +206,7 @@ fn test() {
[95; 98) '{ }': () [95; 98) '{ }': ()
[118; 146) '{ ...e(); }': () [118; 146) '{ ...e(); }': ()
[128; 129) 'a': Gen<u32> [128; 129) 'a': Gen<u32>
[132; 141) 'Gen::make': fn make<u32>() -> Gen<T> [132; 141) 'Gen::make': fn make<u32>() -> Gen<u32>
[132; 143) 'Gen::make()': Gen<u32> [132; 143) 'Gen::make()': Gen<u32>
"### "###
); );
@ -260,7 +260,7 @@ fn test() {
[91; 94) '{ }': () [91; 94) '{ }': ()
[114; 149) '{ ...e(); }': () [114; 149) '{ ...e(); }': ()
[124; 125) 'a': Gen<u32> [124; 125) 'a': Gen<u32>
[128; 144) 'Gen::<...::make': fn make<u32>() -> Gen<T> [128; 144) 'Gen::<...::make': fn make<u32>() -> Gen<u32>
[128; 146) 'Gen::<...make()': Gen<u32> [128; 146) 'Gen::<...make()': Gen<u32>
"### "###
); );
@ -291,7 +291,7 @@ fn test() {
[117; 120) '{ }': () [117; 120) '{ }': ()
[140; 180) '{ ...e(); }': () [140; 180) '{ ...e(); }': ()
[150; 151) 'a': Gen<u32, u64> [150; 151) 'a': Gen<u32, u64>
[154; 175) 'Gen::<...::make': fn make<u64>() -> Gen<u32, T> [154; 175) 'Gen::<...::make': fn make<u64>() -> Gen<u32, u64>
[154; 177) 'Gen::<...make()': Gen<u32, u64> [154; 177) 'Gen::<...make()': Gen<u32, u64>
"### "###
); );
@ -475,7 +475,7 @@ fn test() {
@r###" @r###"
[33; 37) 'self': &Self [33; 37) 'self': &Self
[102; 127) '{ ...d(); }': () [102; 127) '{ ...d(); }': ()
[108; 109) 'S': S<u32>(T) -> S<T> [108; 109) 'S': S<u32>(u32) -> S<u32>
[108; 115) 'S(1u32)': S<u32> [108; 115) 'S(1u32)': S<u32>
[108; 124) 'S(1u32...thod()': u32 [108; 124) 'S(1u32...thod()': u32
[110; 114) '1u32': u32 [110; 114) '1u32': u32
@ -501,13 +501,13 @@ fn test() {
@r###" @r###"
[87; 193) '{ ...t(); }': () [87; 193) '{ ...t(); }': ()
[97; 99) 's1': S [97; 99) 's1': S
[105; 121) 'Defaul...efault': fn default<S>() -> Self [105; 121) 'Defaul...efault': fn default<S>() -> S
[105; 123) 'Defaul...ault()': S [105; 123) 'Defaul...ault()': S
[133; 135) 's2': S [133; 135) 's2': S
[138; 148) 'S::default': fn default<S>() -> Self [138; 148) 'S::default': fn default<S>() -> S
[138; 150) 'S::default()': S [138; 150) 'S::default()': S
[160; 162) 's3': S [160; 162) 's3': S
[165; 188) '<S as ...efault': fn default<S>() -> Self [165; 188) '<S as ...efault': fn default<S>() -> S
[165; 190) '<S as ...ault()': S [165; 190) '<S as ...ault()': S
"### "###
); );
@ -533,13 +533,13 @@ fn test() {
@r###" @r###"
[127; 211) '{ ...e(); }': () [127; 211) '{ ...e(); }': ()
[137; 138) 'a': u32 [137; 138) 'a': u32
[141; 148) 'S::make': fn make<S, u32>() -> T [141; 148) 'S::make': fn make<S, u32>() -> u32
[141; 150) 'S::make()': u32 [141; 150) 'S::make()': u32
[160; 161) 'b': u64 [160; 161) 'b': u64
[164; 178) 'G::<u64>::make': fn make<G<u64>, u64>() -> T [164; 178) 'G::<u64>::make': fn make<G<u64>, u64>() -> u64
[164; 180) 'G::<u6...make()': u64 [164; 180) 'G::<u6...make()': u64
[190; 191) 'c': f64 [190; 191) 'c': f64
[199; 206) 'G::make': fn make<G<f64>, f64>() -> T [199; 206) 'G::make': fn make<G<f64>, f64>() -> f64
[199; 208) 'G::make()': f64 [199; 208) 'G::make()': f64
"### "###
); );
@ -567,19 +567,19 @@ fn test() {
@r###" @r###"
[135; 313) '{ ...e(); }': () [135; 313) '{ ...e(); }': ()
[145; 146) 'a': (u32, i64) [145; 146) 'a': (u32, i64)
[149; 163) 'S::make::<i64>': fn make<S, u32, i64>() -> (T, U) [149; 163) 'S::make::<i64>': fn make<S, u32, i64>() -> (u32, i64)
[149; 165) 'S::mak...i64>()': (u32, i64) [149; 165) 'S::mak...i64>()': (u32, i64)
[175; 176) 'b': (u32, i64) [175; 176) 'b': (u32, i64)
[189; 196) 'S::make': fn make<S, u32, i64>() -> (T, U) [189; 196) 'S::make': fn make<S, u32, i64>() -> (u32, i64)
[189; 198) 'S::make()': (u32, i64) [189; 198) 'S::make()': (u32, i64)
[208; 209) 'c': (u32, i64) [208; 209) 'c': (u32, i64)
[212; 233) 'G::<u3...:<i64>': fn make<G<u32>, u32, i64>() -> (T, U) [212; 233) 'G::<u3...:<i64>': fn make<G<u32>, u32, i64>() -> (u32, i64)
[212; 235) 'G::<u3...i64>()': (u32, i64) [212; 235) 'G::<u3...i64>()': (u32, i64)
[245; 246) 'd': (u32, i64) [245; 246) 'd': (u32, i64)
[259; 273) 'G::make::<i64>': fn make<G<u32>, u32, i64>() -> (T, U) [259; 273) 'G::make::<i64>': fn make<G<u32>, u32, i64>() -> (u32, i64)
[259; 275) 'G::mak...i64>()': (u32, i64) [259; 275) 'G::mak...i64>()': (u32, i64)
[285; 286) 'e': (u32, i64) [285; 286) 'e': (u32, i64)
[301; 308) 'G::make': fn make<G<u32>, u32, i64>() -> (T, U) [301; 308) 'G::make': fn make<G<u32>, u32, i64>() -> (u32, i64)
[301; 310) 'G::make()': (u32, i64) [301; 310) 'G::make()': (u32, i64)
"### "###
); );
@ -601,7 +601,7 @@ fn test() {
@r###" @r###"
[101; 127) '{ ...e(); }': () [101; 127) '{ ...e(); }': ()
[111; 112) 'a': (S<i32>, i64) [111; 112) 'a': (S<i32>, i64)
[115; 122) 'S::make': fn make<S<i32>, i64>() -> (Self, T) [115; 122) 'S::make': fn make<S<i32>, i64>() -> (S<i32>, i64)
[115; 124) 'S::make()': (S<i32>, i64) [115; 124) 'S::make()': (S<i32>, i64)
"### "###
); );
@ -625,10 +625,10 @@ fn test() {
@r###" @r###"
[131; 203) '{ ...e(); }': () [131; 203) '{ ...e(); }': ()
[141; 142) 'a': (S<u64>, i64) [141; 142) 'a': (S<u64>, i64)
[158; 165) 'S::make': fn make<S<u64>, i64>() -> (Self, T) [158; 165) 'S::make': fn make<S<u64>, i64>() -> (S<u64>, i64)
[158; 167) 'S::make()': (S<u64>, i64) [158; 167) 'S::make()': (S<u64>, i64)
[177; 178) 'b': (S<u32>, i32) [177; 178) 'b': (S<u32>, i32)
[191; 198) 'S::make': fn make<S<u32>, i32>() -> (Self, T) [191; 198) 'S::make': fn make<S<u32>, i32>() -> (S<u32>, i32)
[191; 200) 'S::make()': (S<u32>, i32) [191; 200) 'S::make()': (S<u32>, i32)
"### "###
); );
@ -651,10 +651,10 @@ fn test() {
@r###" @r###"
[107; 211) '{ ...>(); }': () [107; 211) '{ ...>(); }': ()
[117; 118) 'a': (S<u64>, i64, u8) [117; 118) 'a': (S<u64>, i64, u8)
[121; 150) '<S as ...::<u8>': fn make<S<u64>, i64, u8>() -> (Self, T, U) [121; 150) '<S as ...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8)
[121; 152) '<S as ...<u8>()': (S<u64>, i64, u8) [121; 152) '<S as ...<u8>()': (S<u64>, i64, u8)
[162; 163) 'b': (S<u64>, i64, u8) [162; 163) 'b': (S<u64>, i64, u8)
[182; 206) 'Trait:...::<u8>': fn make<S<u64>, i64, u8>() -> (Self, T, U) [182; 206) 'Trait:...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8)
[182; 208) 'Trait:...<u8>()': (S<u64>, i64, u8) [182; 208) 'Trait:...<u8>()': (S<u64>, i64, u8)
"### "###
); );
@ -697,7 +697,7 @@ fn test<U, T: Trait<U>>(t: T) {
[71; 72) 't': T [71; 72) 't': T
[77; 96) '{ ...d(); }': () [77; 96) '{ ...d(); }': ()
[83; 84) 't': T [83; 84) 't': T
[83; 93) 't.method()': [missing name] [83; 93) 't.method()': U
"### "###
); );
} }
@ -728,7 +728,7 @@ fn test() {
[157; 158) 'S': S [157; 158) 'S': S
[157; 165) 'S.into()': u64 [157; 165) 'S.into()': u64
[175; 176) 'z': u64 [175; 176) 'z': u64
[179; 196) 'Into::...::into': fn into<S, u64>(Self) -> T [179; 196) 'Into::...::into': fn into<S, u64>(S) -> u64
[179; 199) 'Into::...nto(S)': u64 [179; 199) 'Into::...nto(S)': u64
[197; 198) 'S': S [197; 198) 'S': S
"### "###

View file

@ -96,13 +96,13 @@ fn test() {
[38; 42) 'A(n)': A<i32> [38; 42) 'A(n)': A<i32>
[40; 41) 'n': &i32 [40; 41) 'n': &i32
[45; 50) '&A(1)': &A<i32> [45; 50) '&A(1)': &A<i32>
[46; 47) 'A': A<i32>(T) -> A<T> [46; 47) 'A': A<i32>(i32) -> A<i32>
[46; 50) 'A(1)': A<i32> [46; 50) 'A(1)': A<i32>
[48; 49) '1': i32 [48; 49) '1': i32
[60; 64) 'A(n)': A<i32> [60; 64) 'A(n)': A<i32>
[62; 63) 'n': &mut i32 [62; 63) 'n': &mut i32
[67; 76) '&mut A(1)': &mut A<i32> [67; 76) '&mut A(1)': &mut A<i32>
[72; 73) 'A': A<i32>(T) -> A<T> [72; 73) 'A': A<i32>(i32) -> A<i32>
[72; 76) 'A(1)': A<i32> [72; 76) 'A(1)': A<i32>
[74; 75) '1': i32 [74; 75) '1': i32
"### "###

View file

@ -102,7 +102,7 @@ fn test() {
[11; 48) '{ ...&y]; }': () [11; 48) '{ ...&y]; }': ()
[21; 22) 'y': &{unknown} [21; 22) 'y': &{unknown}
[25; 32) 'unknown': &{unknown} [25; 32) 'unknown': &{unknown}
[38; 45) '[y, &y]': [&&{unknown};_] [38; 45) '[y, &y]': [&&{unknown}; _]
[39; 40) 'y': &{unknown} [39; 40) 'y': &{unknown}
[42; 44) '&y': &&{unknown} [42; 44) '&y': &&{unknown}
[43; 44) 'y': &{unknown} [43; 44) 'y': &{unknown}
@ -128,7 +128,7 @@ fn test() {
[25; 32) 'unknown': &&{unknown} [25; 32) 'unknown': &&{unknown}
[42; 43) 'y': &&{unknown} [42; 43) 'y': &&{unknown}
[46; 53) 'unknown': &&{unknown} [46; 53) 'unknown': &&{unknown}
[59; 77) '[(x, y..., &x)]': [(&&&{unknown}, &&&{unknown});_] [59; 77) '[(x, y..., &x)]': [(&&&{unknown}, &&&{unknown}); _]
[60; 66) '(x, y)': (&&&{unknown}, &&&{unknown}) [60; 66) '(x, y)': (&&&{unknown}, &&&{unknown})
[61; 62) 'x': &&{unknown} [61; 62) 'x': &&{unknown}
[64; 65) 'y': &&{unknown} [64; 65) 'y': &&{unknown}
@ -180,8 +180,8 @@ fn test_line_buffer() {
"#), "#),
@r###" @r###"
[23; 53) '{ ...n']; }': () [23; 53) '{ ...n']; }': ()
[29; 50) '&[0, b...b'\n']': &[u8;_] [29; 50) '&[0, b...b'\n']': &[u8; _]
[30; 50) '[0, b'...b'\n']': [u8;_] [30; 50) '[0, b'...b'\n']': [u8; _]
[31; 32) '0': u8 [31; 32) '0': u8
[34; 39) 'b'\n'': u8 [34; 39) 'b'\n'': u8
[41; 42) '1': u8 [41; 42) '1': u8
@ -346,7 +346,7 @@ pub fn main_loop() {
@r###" @r###"
[144; 146) '{}': () [144; 146) '{}': ()
[169; 198) '{ ...t(); }': () [169; 198) '{ ...t(); }': ()
[175; 193) 'FxHash...efault': fn default<{unknown}, FxHasher>() -> HashSet<T, H> [175; 193) 'FxHash...efault': fn default<{unknown}, FxHasher>() -> HashSet<{unknown}, FxHasher>
[175; 195) 'FxHash...ault()': HashSet<{unknown}, FxHasher> [175; 195) 'FxHash...ault()': HashSet<{unknown}, FxHasher>
"### "###
); );

View file

@ -28,7 +28,7 @@ mod boxed {
"#, "#,
); );
assert_eq!("(Box<i32>, Box<Box<i32>>, Box<&i32>, Box<[i32;_]>)", type_at_pos(&db, pos)); assert_eq!("(Box<i32>, Box<Box<i32>>, Box<&i32>, Box<[i32; _]>)", type_at_pos(&db, pos));
} }
#[test] #[test]
@ -754,15 +754,15 @@ fn test() {
[289; 295) 'self.0': T [289; 295) 'self.0': T
[315; 353) '{ ...))); }': () [315; 353) '{ ...))); }': ()
[325; 326) 't': &i32 [325; 326) 't': &i32
[329; 335) 'A::foo': fn foo<i32>(&A<T>) -> &T [329; 335) 'A::foo': fn foo<i32>(&A<i32>) -> &i32
[329; 350) 'A::foo...42))))': &i32 [329; 350) 'A::foo...42))))': &i32
[336; 349) '&&B(B(A(42)))': &&B<B<A<i32>>> [336; 349) '&&B(B(A(42)))': &&B<B<A<i32>>>
[337; 349) '&B(B(A(42)))': &B<B<A<i32>>> [337; 349) '&B(B(A(42)))': &B<B<A<i32>>>
[338; 339) 'B': B<B<A<i32>>>(T) -> B<T> [338; 339) 'B': B<B<A<i32>>>(B<A<i32>>) -> B<B<A<i32>>>
[338; 349) 'B(B(A(42)))': B<B<A<i32>>> [338; 349) 'B(B(A(42)))': B<B<A<i32>>>
[340; 341) 'B': B<A<i32>>(T) -> B<T> [340; 341) 'B': B<A<i32>>(A<i32>) -> B<A<i32>>
[340; 348) 'B(A(42))': B<A<i32>> [340; 348) 'B(A(42))': B<A<i32>>
[342; 343) 'A': A<i32>(T) -> A<T> [342; 343) 'A': A<i32>(i32) -> A<i32>
[342; 347) 'A(42)': A<i32> [342; 347) 'A(42)': A<i32>
[344; 346) '42': i32 [344; 346) '42': i32
"### "###
@ -817,16 +817,16 @@ fn test(a: A<i32>) {
[326; 327) 'a': A<i32> [326; 327) 'a': A<i32>
[337; 383) '{ ...))); }': () [337; 383) '{ ...))); }': ()
[347; 348) 't': &i32 [347; 348) 't': &i32
[351; 352) 'A': A<i32>(*mut T) -> A<T> [351; 352) 'A': A<i32>(*mut i32) -> A<i32>
[351; 365) 'A(0 as *mut _)': A<i32> [351; 365) 'A(0 as *mut _)': A<i32>
[351; 380) 'A(0 as...B(a)))': &i32 [351; 380) 'A(0 as...B(a)))': &i32
[353; 354) '0': i32 [353; 354) '0': i32
[353; 364) '0 as *mut _': *mut i32 [353; 364) '0 as *mut _': *mut i32
[370; 379) '&&B(B(a))': &&B<B<A<i32>>> [370; 379) '&&B(B(a))': &&B<B<A<i32>>>
[371; 379) '&B(B(a))': &B<B<A<i32>>> [371; 379) '&B(B(a))': &B<B<A<i32>>>
[372; 373) 'B': B<B<A<i32>>>(T) -> B<T> [372; 373) 'B': B<B<A<i32>>>(B<A<i32>>) -> B<B<A<i32>>>
[372; 379) 'B(B(a))': B<B<A<i32>>> [372; 379) 'B(B(a))': B<B<A<i32>>>
[374; 375) 'B': B<A<i32>>(T) -> B<T> [374; 375) 'B': B<A<i32>>(A<i32>) -> B<A<i32>>
[374; 378) 'B(a)': B<A<i32>> [374; 378) 'B(a)': B<A<i32>>
[376; 377) 'a': A<i32> [376; 377) 'a': A<i32>
"### "###
@ -1061,55 +1061,55 @@ fn test(x: &str, y: isize) {
[9; 10) 'x': &str [9; 10) 'x': &str
[18; 19) 'y': isize [18; 19) 'y': isize
[28; 293) '{ ... []; }': () [28; 293) '{ ... []; }': ()
[38; 39) 'a': [&str;_] [38; 39) 'a': [&str; _]
[42; 45) '[x]': [&str;_] [42; 45) '[x]': [&str; _]
[43; 44) 'x': &str [43; 44) 'x': &str
[55; 56) 'b': [[&str;_];_] [55; 56) 'b': [[&str; _]; _]
[59; 65) '[a, a]': [[&str;_];_] [59; 65) '[a, a]': [[&str; _]; _]
[60; 61) 'a': [&str;_] [60; 61) 'a': [&str; _]
[63; 64) 'a': [&str;_] [63; 64) 'a': [&str; _]
[75; 76) 'c': [[[&str;_];_];_] [75; 76) 'c': [[[&str; _]; _]; _]
[79; 85) '[b, b]': [[[&str;_];_];_] [79; 85) '[b, b]': [[[&str; _]; _]; _]
[80; 81) 'b': [[&str;_];_] [80; 81) 'b': [[&str; _]; _]
[83; 84) 'b': [[&str;_];_] [83; 84) 'b': [[&str; _]; _]
[96; 97) 'd': [isize;_] [96; 97) 'd': [isize; _]
[100; 112) '[y, 1, 2, 3]': [isize;_] [100; 112) '[y, 1, 2, 3]': [isize; _]
[101; 102) 'y': isize [101; 102) 'y': isize
[104; 105) '1': isize [104; 105) '1': isize
[107; 108) '2': isize [107; 108) '2': isize
[110; 111) '3': isize [110; 111) '3': isize
[122; 123) 'd': [isize;_] [122; 123) 'd': [isize; _]
[126; 138) '[1, y, 2, 3]': [isize;_] [126; 138) '[1, y, 2, 3]': [isize; _]
[127; 128) '1': isize [127; 128) '1': isize
[130; 131) 'y': isize [130; 131) 'y': isize
[133; 134) '2': isize [133; 134) '2': isize
[136; 137) '3': isize [136; 137) '3': isize
[148; 149) 'e': [isize;_] [148; 149) 'e': [isize; _]
[152; 155) '[y]': [isize;_] [152; 155) '[y]': [isize; _]
[153; 154) 'y': isize [153; 154) 'y': isize
[165; 166) 'f': [[isize;_];_] [165; 166) 'f': [[isize; _]; _]
[169; 175) '[d, d]': [[isize;_];_] [169; 175) '[d, d]': [[isize; _]; _]
[170; 171) 'd': [isize;_] [170; 171) 'd': [isize; _]
[173; 174) 'd': [isize;_] [173; 174) 'd': [isize; _]
[185; 186) 'g': [[isize;_];_] [185; 186) 'g': [[isize; _]; _]
[189; 195) '[e, e]': [[isize;_];_] [189; 195) '[e, e]': [[isize; _]; _]
[190; 191) 'e': [isize;_] [190; 191) 'e': [isize; _]
[193; 194) 'e': [isize;_] [193; 194) 'e': [isize; _]
[206; 207) 'h': [i32;_] [206; 207) 'h': [i32; _]
[210; 216) '[1, 2]': [i32;_] [210; 216) '[1, 2]': [i32; _]
[211; 212) '1': i32 [211; 212) '1': i32
[214; 215) '2': i32 [214; 215) '2': i32
[226; 227) 'i': [&str;_] [226; 227) 'i': [&str; _]
[230; 240) '["a", "b"]': [&str;_] [230; 240) '["a", "b"]': [&str; _]
[231; 234) '"a"': &str [231; 234) '"a"': &str
[236; 239) '"b"': &str [236; 239) '"b"': &str
[251; 252) 'b': [[&str;_];_] [251; 252) 'b': [[&str; _]; _]
[255; 265) '[a, ["b"]]': [[&str;_];_] [255; 265) '[a, ["b"]]': [[&str; _]; _]
[256; 257) 'a': [&str;_] [256; 257) 'a': [&str; _]
[259; 264) '["b"]': [&str;_] [259; 264) '["b"]': [&str; _]
[260; 263) '"b"': &str [260; 263) '"b"': &str
[275; 276) 'x': [u8;_] [275; 276) 'x': [u8; _]
[288; 290) '[]': [u8;_] [288; 290) '[]': [u8; _]
"### "###
); );
} }
@ -1169,16 +1169,16 @@ fn test() {
"#), "#),
@r###" @r###"
[76; 184) '{ ...one; }': () [76; 184) '{ ...one; }': ()
[82; 83) 'A': A<i32>(T) -> A<T> [82; 83) 'A': A<i32>(i32) -> A<i32>
[82; 87) 'A(42)': A<i32> [82; 87) 'A(42)': A<i32>
[84; 86) '42': i32 [84; 86) '42': i32
[93; 94) 'A': A<u128>(T) -> A<T> [93; 94) 'A': A<u128>(u128) -> A<u128>
[93; 102) 'A(42u128)': A<u128> [93; 102) 'A(42u128)': A<u128>
[95; 101) '42u128': u128 [95; 101) '42u128': u128
[108; 112) 'Some': Some<&str>(T) -> Option<T> [108; 112) 'Some': Some<&str>(&str) -> Option<&str>
[108; 117) 'Some("x")': Option<&str> [108; 117) 'Some("x")': Option<&str>
[113; 116) '"x"': &str [113; 116) '"x"': &str
[123; 135) 'Option::Some': Some<&str>(T) -> Option<T> [123; 135) 'Option::Some': Some<&str>(&str) -> Option<&str>
[123; 140) 'Option...e("x")': Option<&str> [123; 140) 'Option...e("x")': Option<&str>
[136; 139) '"x"': &str [136; 139) '"x"': &str
[146; 150) 'None': Option<{unknown}> [146; 150) 'None': Option<{unknown}>
@ -1205,14 +1205,14 @@ fn test() {
[21; 26) '{ t }': T [21; 26) '{ t }': T
[23; 24) 't': T [23; 24) 't': T
[38; 98) '{ ...(1); }': () [38; 98) '{ ...(1); }': ()
[44; 46) 'id': fn id<u32>(T) -> T [44; 46) 'id': fn id<u32>(u32) -> u32
[44; 52) 'id(1u32)': u32 [44; 52) 'id(1u32)': u32
[47; 51) '1u32': u32 [47; 51) '1u32': u32
[58; 68) 'id::<i128>': fn id<i128>(T) -> T [58; 68) 'id::<i128>': fn id<i128>(i128) -> i128
[58; 71) 'id::<i128>(1)': i128 [58; 71) 'id::<i128>(1)': i128
[69; 70) '1': i128 [69; 70) '1': i128
[81; 82) 'x': u64 [81; 82) 'x': u64
[90; 92) 'id': fn id<u64>(T) -> T [90; 92) 'id': fn id<u64>(u64) -> u64
[90; 95) 'id(1)': u64 [90; 95) 'id(1)': u64
[93; 94) '1': u64 [93; 94) '1': u64
"### "###
@ -1220,7 +1220,7 @@ fn test() {
} }
#[test] #[test]
fn infer_impl_generics() { fn infer_impl_generics_basic() {
assert_snapshot!( assert_snapshot!(
infer(r#" infer(r#"
struct A<T1, T2> { struct A<T1, T2> {
@ -1349,16 +1349,16 @@ fn test() -> i128 {
[146; 147) 'x': i128 [146; 147) 'x': i128
[150; 151) '1': i128 [150; 151) '1': i128
[162; 163) 'y': i128 [162; 163) 'y': i128
[166; 168) 'id': fn id<i128>(T) -> T [166; 168) 'id': fn id<i128>(i128) -> i128
[166; 171) 'id(x)': i128 [166; 171) 'id(x)': i128
[169; 170) 'x': i128 [169; 170) 'x': i128
[182; 183) 'a': A<i128> [182; 183) 'a': A<i128>
[186; 200) 'A { x: id(y) }': A<i128> [186; 200) 'A { x: id(y) }': A<i128>
[193; 195) 'id': fn id<i128>(T) -> T [193; 195) 'id': fn id<i128>(i128) -> i128
[193; 198) 'id(y)': i128 [193; 198) 'id(y)': i128
[196; 197) 'y': i128 [196; 197) 'y': i128
[211; 212) 'z': i128 [211; 212) 'z': i128
[215; 217) 'id': fn id<i128>(T) -> T [215; 217) 'id': fn id<i128>(i128) -> i128
[215; 222) 'id(a.x)': i128 [215; 222) 'id(a.x)': i128
[218; 219) 'a': A<i128> [218; 219) 'a': A<i128>
[218; 221) 'a.x': i128 [218; 221) 'a.x': i128
@ -1502,14 +1502,14 @@ fn test() {
[78; 158) '{ ...(1); }': () [78; 158) '{ ...(1); }': ()
[88; 89) 'y': u32 [88; 89) 'y': u32
[92; 97) '10u32': u32 [92; 97) '10u32': u32
[103; 105) 'id': fn id<u32>(T) -> T [103; 105) 'id': fn id<u32>(u32) -> u32
[103; 108) 'id(y)': u32 [103; 108) 'id(y)': u32
[106; 107) 'y': u32 [106; 107) 'y': u32
[118; 119) 'x': bool [118; 119) 'x': bool
[128; 133) 'clone': fn clone<bool>(&T) -> T [128; 133) 'clone': fn clone<bool>(&bool) -> bool
[128; 136) 'clone(z)': bool [128; 136) 'clone(z)': bool
[134; 135) 'z': &bool [134; 135) 'z': &bool
[142; 152) 'id::<i128>': fn id<i128>(T) -> T [142; 152) 'id::<i128>': fn id<i128>(i128) -> i128
[142; 155) 'id::<i128>(1)': i128 [142; 155) 'id::<i128>(1)': i128
[153; 154) '1': i128 [153; 154) '1': i128
"### "###

View file

@ -1,7 +1,6 @@
use insta::assert_snapshot; use insta::assert_snapshot;
use ra_db::fixture::WithFixture; use ra_db::fixture::WithFixture;
use test_utils::covers;
use super::{infer, infer_with_mismatches, type_at, type_at_pos}; use super::{infer, infer_with_mismatches, type_at, type_at_pos};
use crate::test_db::TestDB; use crate::test_db::TestDB;
@ -261,10 +260,10 @@ fn test() {
[92; 94) '{}': () [92; 94) '{}': ()
[105; 144) '{ ...(s); }': () [105; 144) '{ ...(s); }': ()
[115; 116) 's': S<u32> [115; 116) 's': S<u32>
[119; 120) 'S': S<u32>(T) -> S<T> [119; 120) 'S': S<u32>(u32) -> S<u32>
[119; 129) 'S(unknown)': S<u32> [119; 129) 'S(unknown)': S<u32>
[121; 128) 'unknown': u32 [121; 128) 'unknown': u32
[135; 138) 'foo': fn foo<S<u32>>(T) -> () [135; 138) 'foo': fn foo<S<u32>>(S<u32>) -> ()
[135; 141) 'foo(s)': () [135; 141) 'foo(s)': ()
[139; 140) 's': S<u32> [139; 140) 's': S<u32>
"### "###
@ -289,11 +288,11 @@ fn test() {
[98; 100) '{}': () [98; 100) '{}': ()
[111; 163) '{ ...(s); }': () [111; 163) '{ ...(s); }': ()
[121; 122) 's': S<u32> [121; 122) 's': S<u32>
[125; 126) 'S': S<u32>(T) -> S<T> [125; 126) 'S': S<u32>(u32) -> S<u32>
[125; 135) 'S(unknown)': S<u32> [125; 135) 'S(unknown)': S<u32>
[127; 134) 'unknown': u32 [127; 134) 'unknown': u32
[145; 146) 'x': u32 [145; 146) 'x': u32
[154; 157) 'foo': fn foo<u32, S<u32>>(T) -> U [154; 157) 'foo': fn foo<u32, S<u32>>(S<u32>) -> u32
[154; 160) 'foo(s)': u32 [154; 160) 'foo(s)': u32
[158; 159) 's': S<u32> [158; 159) 's': S<u32>
"### "###
@ -358,15 +357,15 @@ fn test() {
[221; 223) '{}': () [221; 223) '{}': ()
[234; 300) '{ ...(S); }': () [234; 300) '{ ...(S); }': ()
[244; 245) 'x': u32 [244; 245) 'x': u32
[248; 252) 'foo1': fn foo1<S>(T) -> <T as Iterable>::Item [248; 252) 'foo1': fn foo1<S>(S) -> <S as Iterable>::Item
[248; 255) 'foo1(S)': u32 [248; 255) 'foo1(S)': u32
[253; 254) 'S': S [253; 254) 'S': S
[265; 266) 'y': u32 [265; 266) 'y': u32
[269; 273) 'foo2': fn foo2<S>(T) -> <T as Iterable>::Item [269; 273) 'foo2': fn foo2<S>(S) -> <S as Iterable>::Item
[269; 276) 'foo2(S)': u32 [269; 276) 'foo2(S)': u32
[274; 275) 'S': S [274; 275) 'S': S
[286; 287) 'z': u32 [286; 287) 'z': u32
[290; 294) 'foo3': fn foo3<S>(T) -> <T as Iterable>::Item [290; 294) 'foo3': fn foo3<S>(S) -> <S as Iterable>::Item
[290; 297) 'foo3(S)': u32 [290; 297) 'foo3(S)': u32
[295; 296) 'S': S [295; 296) 'S': S
"### "###
@ -479,7 +478,7 @@ fn indexing_arrays() {
@r###" @r###"
[10; 26) '{ &mut...[2]; }': () [10; 26) '{ &mut...[2]; }': ()
[12; 23) '&mut [9][2]': &mut {unknown} [12; 23) '&mut [9][2]': &mut {unknown}
[17; 20) '[9]': [i32;_] [17; 20) '[9]': [i32; _]
[17; 23) '[9][2]': {unknown} [17; 23) '[9][2]': {unknown}
[18; 19) '9': i32 [18; 19) '9': i32
[21; 22) '2': i32 [21; 22) '2': i32
@ -822,8 +821,7 @@ fn test<T: ApplyL>() {
"#, "#,
); );
// inside the generic function, the associated type gets normalized to a placeholder `ApplL::Out<T>` [https://rust-lang.github.io/rustc-guide/traits/associated-types.html#placeholder-associated-types]. // inside the generic function, the associated type gets normalized to a placeholder `ApplL::Out<T>` [https://rust-lang.github.io/rustc-guide/traits/associated-types.html#placeholder-associated-types].
// FIXME: fix type parameter names going missing when going through Chalk assert_eq!(t, "ApplyL::Out<T>");
assert_eq!(t, "ApplyL::Out<[missing name]>");
} }
#[test] #[test]
@ -850,6 +848,198 @@ fn test<T: ApplyL>(t: T) {
} }
#[test] #[test]
fn argument_impl_trait() {
assert_snapshot!(
infer_with_mismatches(r#"
trait Trait<T> {
fn foo(&self) -> T;
fn foo2(&self) -> i64;
}
fn bar(x: impl Trait<u16>) {}
struct S<T>(T);
impl<T> Trait<T> for S<T> {}
fn test(x: impl Trait<u64>, y: &impl Trait<u32>) {
x;
y;
let z = S(1);
bar(z);
x.foo();
y.foo();
z.foo();
x.foo2();
y.foo2();
z.foo2();
}
"#, true),
@r###"
[30; 34) 'self': &Self
[55; 59) 'self': &Self
[78; 79) 'x': impl Trait<u16>
[98; 100) '{}': ()
[155; 156) 'x': impl Trait<u64>
[175; 176) 'y': &impl Trait<u32>
[196; 324) '{ ...2(); }': ()
[202; 203) 'x': impl Trait<u64>
[209; 210) 'y': &impl Trait<u32>
[220; 221) 'z': S<u16>
[224; 225) 'S': S<u16>(u16) -> S<u16>
[224; 228) 'S(1)': S<u16>
[226; 227) '1': u16
[234; 237) 'bar': fn bar(S<u16>) -> ()
[234; 240) 'bar(z)': ()
[238; 239) 'z': S<u16>
[246; 247) 'x': impl Trait<u64>
[246; 253) 'x.foo()': u64
[259; 260) 'y': &impl Trait<u32>
[259; 266) 'y.foo()': u32
[272; 273) 'z': S<u16>
[272; 279) 'z.foo()': u16
[285; 286) 'x': impl Trait<u64>
[285; 293) 'x.foo2()': i64
[299; 300) 'y': &impl Trait<u32>
[299; 307) 'y.foo2()': i64
[313; 314) 'z': S<u16>
[313; 321) 'z.foo2()': i64
"###
);
}
#[test]
fn argument_impl_trait_type_args_1() {
assert_snapshot!(
infer_with_mismatches(r#"
trait Trait {}
trait Foo {
// this function has an implicit Self param, an explicit type param,
// and an implicit impl Trait param!
fn bar<T>(x: impl Trait) -> T { loop {} }
}
fn foo<T>(x: impl Trait) -> T { loop {} }
struct S;
impl Trait for S {}
struct F;
impl Foo for F {}
fn test() {
Foo::bar(S);
<F as Foo>::bar(S);
F::bar(S);
Foo::bar::<u32>(S);
<F as Foo>::bar::<u32>(S);
foo(S);
foo::<u32>(S);
foo::<u32, i32>(S); // we should ignore the extraneous i32
}
"#, true),
@r###"
[156; 157) 'x': impl Trait
[176; 187) '{ loop {} }': T
[178; 185) 'loop {}': !
[183; 185) '{}': ()
[200; 201) 'x': impl Trait
[220; 231) '{ loop {} }': T
[222; 229) 'loop {}': !
[227; 229) '{}': ()
[301; 510) '{ ... i32 }': ()
[307; 315) 'Foo::bar': fn bar<{unknown}, {unknown}>(S) -> {unknown}
[307; 318) 'Foo::bar(S)': {unknown}
[316; 317) 'S': S
[324; 339) '<F as Foo>::bar': fn bar<F, {unknown}>(S) -> {unknown}
[324; 342) '<F as ...bar(S)': {unknown}
[340; 341) 'S': S
[348; 354) 'F::bar': fn bar<F, {unknown}>(S) -> {unknown}
[348; 357) 'F::bar(S)': {unknown}
[355; 356) 'S': S
[363; 378) 'Foo::bar::<u32>': fn bar<{unknown}, u32>(S) -> u32
[363; 381) 'Foo::b...32>(S)': u32
[379; 380) 'S': S
[387; 409) '<F as ...:<u32>': fn bar<F, u32>(S) -> u32
[387; 412) '<F as ...32>(S)': u32
[410; 411) 'S': S
[419; 422) 'foo': fn foo<{unknown}>(S) -> {unknown}
[419; 425) 'foo(S)': {unknown}
[423; 424) 'S': S
[431; 441) 'foo::<u32>': fn foo<u32>(S) -> u32
[431; 444) 'foo::<u32>(S)': u32
[442; 443) 'S': S
[450; 465) 'foo::<u32, i32>': fn foo<u32>(S) -> u32
[450; 468) 'foo::<...32>(S)': u32
[466; 467) 'S': S
"###
);
}
#[test]
fn argument_impl_trait_type_args_2() {
assert_snapshot!(
infer_with_mismatches(r#"
trait Trait {}
struct S;
impl Trait for S {}
struct F<T>;
impl<T> F<T> {
fn foo<U>(self, x: impl Trait) -> (T, U) { loop {} }
}
fn test() {
F.foo(S);
F::<u32>.foo(S);
F::<u32>.foo::<i32>(S);
F::<u32>.foo::<i32, u32>(S); // extraneous argument should be ignored
}
"#, true),
@r###"
[88; 92) 'self': F<T>
[94; 95) 'x': impl Trait
[119; 130) '{ loop {} }': (T, U)
[121; 128) 'loop {}': !
[126; 128) '{}': ()
[144; 284) '{ ...ored }': ()
[150; 151) 'F': F<{unknown}>
[150; 158) 'F.foo(S)': ({unknown}, {unknown})
[156; 157) 'S': S
[164; 172) 'F::<u32>': F<u32>
[164; 179) 'F::<u32>.foo(S)': (u32, {unknown})
[177; 178) 'S': S
[185; 193) 'F::<u32>': F<u32>
[185; 207) 'F::<u3...32>(S)': (u32, i32)
[205; 206) 'S': S
[213; 221) 'F::<u32>': F<u32>
[213; 240) 'F::<u3...32>(S)': (u32, i32)
[238; 239) 'S': S
"###
);
}
#[test]
fn argument_impl_trait_to_fn_pointer() {
assert_snapshot!(
infer_with_mismatches(r#"
trait Trait {}
fn foo(x: impl Trait) { loop {} }
struct S;
impl Trait for S {}
fn test() {
let f: fn(S) -> () = foo;
}
"#, true),
@r###"
[23; 24) 'x': impl Trait
[38; 49) '{ loop {} }': ()
[40; 47) 'loop {}': !
[45; 47) '{}': ()
[91; 124) '{ ...foo; }': ()
[101; 102) 'f': fn(S) -> ()
[118; 121) 'foo': fn foo(S) -> ()
"###
);
}
#[test]
#[ignore]
fn impl_trait() { fn impl_trait() {
assert_snapshot!( assert_snapshot!(
infer(r#" infer(r#"
@ -993,34 +1183,23 @@ fn weird_bounds() {
assert_snapshot!( assert_snapshot!(
infer(r#" infer(r#"
trait Trait {} trait Trait {}
fn test() { fn test(a: impl Trait + 'lifetime, b: impl 'lifetime, c: impl (Trait), d: impl ('lifetime), e: impl ?Sized, f: impl Trait + ?Sized) {
let a: impl Trait + 'lifetime = foo;
let b: impl 'lifetime = foo;
let b: impl (Trait) = foo;
let b: impl ('lifetime) = foo;
let d: impl ?Sized = foo;
let e: impl Trait + ?Sized = foo;
} }
"#), "#),
@r###" @r###"
[26; 237) '{ ...foo; }': () [24; 25) 'a': impl Trait + {error}
[36; 37) 'a': impl Trait + {error} [51; 52) 'b': impl {error}
[64; 67) 'foo': impl Trait + {error} [70; 71) 'c': impl Trait
[77; 78) 'b': impl {error} [87; 88) 'd': impl {error}
[97; 100) 'foo': impl {error} [108; 109) 'e': impl {error}
[110; 111) 'b': impl Trait [124; 125) 'f': impl Trait + {error}
[128; 131) 'foo': impl Trait [148; 151) '{ }': ()
[141; 142) 'b': impl {error}
[163; 166) 'foo': impl {error}
[176; 177) 'd': impl {error}
[193; 196) 'foo': impl {error}
[206; 207) 'e': impl Trait + {error}
[231; 234) 'foo': impl Trait + {error}
"### "###
); );
} }
#[test] #[test]
#[ignore]
fn error_bound_chalk() { fn error_bound_chalk() {
let t = type_at( let t = type_at(
r#" r#"
@ -1076,26 +1255,26 @@ fn test<T: Trait<Type = u32>>(x: T, y: impl Trait<Type = i64>) {
[296; 299) 'get': fn get<T>(T) -> <T as Trait>::Type [296; 299) 'get': fn get<T>(T) -> <T as Trait>::Type
[296; 302) 'get(x)': {unknown} [296; 302) 'get(x)': {unknown}
[300; 301) 'x': T [300; 301) 'x': T
[308; 312) 'get2': fn get2<{unknown}, T>(T) -> U [308; 312) 'get2': fn get2<{unknown}, T>(T) -> {unknown}
[308; 315) 'get2(x)': {unknown} [308; 315) 'get2(x)': {unknown}
[313; 314) 'x': T [313; 314) 'x': T
[321; 324) 'get': fn get<impl Trait<Type = i64>>(T) -> <T as Trait>::Type [321; 324) 'get': fn get<impl Trait<Type = i64>>(impl Trait<Type = i64>) -> <impl Trait<Type = i64> as Trait>::Type
[321; 327) 'get(y)': {unknown} [321; 327) 'get(y)': {unknown}
[325; 326) 'y': impl Trait<Type = i64> [325; 326) 'y': impl Trait<Type = i64>
[333; 337) 'get2': fn get2<{unknown}, impl Trait<Type = i64>>(T) -> U [333; 337) 'get2': fn get2<{unknown}, impl Trait<Type = i64>>(impl Trait<Type = i64>) -> {unknown}
[333; 340) 'get2(y)': {unknown} [333; 340) 'get2(y)': {unknown}
[338; 339) 'y': impl Trait<Type = i64> [338; 339) 'y': impl Trait<Type = i64>
[346; 349) 'get': fn get<S<u64>>(T) -> <T as Trait>::Type [346; 349) 'get': fn get<S<u64>>(S<u64>) -> <S<u64> as Trait>::Type
[346; 357) 'get(set(S))': u64 [346; 357) 'get(set(S))': u64
[350; 353) 'set': fn set<S<u64>>(T) -> T [350; 353) 'set': fn set<S<u64>>(S<u64>) -> S<u64>
[350; 356) 'set(S)': S<u64> [350; 356) 'set(S)': S<u64>
[354; 355) 'S': S<u64> [354; 355) 'S': S<u64>
[363; 367) 'get2': fn get2<u64, S<u64>>(T) -> U [363; 367) 'get2': fn get2<u64, S<u64>>(S<u64>) -> u64
[363; 375) 'get2(set(S))': u64 [363; 375) 'get2(set(S))': u64
[368; 371) 'set': fn set<S<u64>>(T) -> T [368; 371) 'set': fn set<S<u64>>(S<u64>) -> S<u64>
[368; 374) 'set(S)': S<u64> [368; 374) 'set(S)': S<u64>
[372; 373) 'S': S<u64> [372; 373) 'S': S<u64>
[381; 385) 'get2': fn get2<str, S<str>>(T) -> U [381; 385) 'get2': fn get2<str, S<str>>(S<str>) -> str
[381; 395) 'get2(S::<str>)': str [381; 395) 'get2(S::<str>)': str
[386; 394) 'S::<str>': S<str> [386; 394) 'S::<str>': S<str>
"### "###
@ -1222,6 +1401,32 @@ fn test<T: Trait1, U: Trait2>(x: T, y: U) {
); );
} }
#[test]
fn super_trait_impl_trait_method_resolution() {
assert_snapshot!(
infer(r#"
mod foo {
trait SuperTrait {
fn foo(&self) -> u32 {}
}
}
trait Trait1: foo::SuperTrait {}
fn test(x: &impl Trait1) {
x.foo();
}
"#),
@r###"
[50; 54) 'self': &Self
[63; 65) '{}': ()
[116; 117) 'x': &impl Trait1
[133; 149) '{ ...o(); }': ()
[139; 140) 'x': &impl Trait1
[139; 146) 'x.foo()': u32
"###
);
}
#[test] #[test]
fn super_trait_cycle() { fn super_trait_cycle() {
// This just needs to not crash // This just needs to not crash
@ -1268,9 +1473,9 @@ fn test() {
[157; 160) '{t}': T [157; 160) '{t}': T
[158; 159) 't': T [158; 159) 't': T
[259; 280) '{ ...S)); }': () [259; 280) '{ ...S)); }': ()
[265; 269) 'get2': fn get2<u64, S<u64>>(T) -> U [265; 269) 'get2': fn get2<u64, S<u64>>(S<u64>) -> u64
[265; 277) 'get2(set(S))': u64 [265; 277) 'get2(set(S))': u64
[270; 273) 'set': fn set<S<u64>>(T) -> T [270; 273) 'set': fn set<S<u64>>(S<u64>) -> S<u64>
[270; 276) 'set(S)': S<u64> [270; 276) 'set(S)': S<u64>
[274; 275) 'S': S<u64> [274; 275) 'S': S<u64>
"### "###
@ -1332,7 +1537,7 @@ fn test() {
[173; 175) '{}': () [173; 175) '{}': ()
[189; 308) '{ ... 1); }': () [189; 308) '{ ... 1); }': ()
[199; 200) 'x': Option<u32> [199; 200) 'x': Option<u32>
[203; 215) 'Option::Some': Some<u32>(T) -> Option<T> [203; 215) 'Option::Some': Some<u32>(u32) -> Option<u32>
[203; 221) 'Option...(1u32)': Option<u32> [203; 221) 'Option...(1u32)': Option<u32>
[216; 220) '1u32': u32 [216; 220) '1u32': u32
[227; 228) 'x': Option<u32> [227; 228) 'x': Option<u32>
@ -1442,7 +1647,7 @@ fn test() {
[340; 342) '{}': () [340; 342) '{}': ()
[356; 515) '{ ... S); }': () [356; 515) '{ ... S); }': ()
[366; 368) 'x1': u64 [366; 368) 'x1': u64
[371; 375) 'foo1': fn foo1<S, u64, |S| -> u64>(T, F) -> U [371; 375) 'foo1': fn foo1<S, u64, |S| -> u64>(S, |S| -> u64) -> u64
[371; 394) 'foo1(S...hod())': u64 [371; 394) 'foo1(S...hod())': u64
[376; 377) 'S': S [376; 377) 'S': S
[379; 393) '|s| s.method()': |S| -> u64 [379; 393) '|s| s.method()': |S| -> u64
@ -1450,7 +1655,7 @@ fn test() {
[383; 384) 's': S [383; 384) 's': S
[383; 393) 's.method()': u64 [383; 393) 's.method()': u64
[404; 406) 'x2': u64 [404; 406) 'x2': u64
[409; 413) 'foo2': fn foo2<S, u64, |S| -> u64>(F, T) -> U [409; 413) 'foo2': fn foo2<S, u64, |S| -> u64>(|S| -> u64, S) -> u64
[409; 432) 'foo2(|...(), S)': u64 [409; 432) 'foo2(|...(), S)': u64
[414; 428) '|s| s.method()': |S| -> u64 [414; 428) '|s| s.method()': |S| -> u64
[415; 416) 's': S [415; 416) 's': S
@ -1603,7 +1808,6 @@ fn test<T, U>() where T: Trait<U::Item>, U: Trait<T::Item> {
#[test] #[test]
fn unify_impl_trait() { fn unify_impl_trait() {
covers!(insert_vars_for_impl_trait);
assert_snapshot!( assert_snapshot!(
infer_with_mismatches(r#" infer_with_mismatches(r#"
trait Trait<T> {} trait Trait<T> {}
@ -1635,26 +1839,26 @@ fn test() -> impl Trait<i32> {
[172; 183) '{ loop {} }': T [172; 183) '{ loop {} }': T
[174; 181) 'loop {}': ! [174; 181) 'loop {}': !
[179; 181) '{}': () [179; 181) '{}': ()
[214; 310) '{ ...t()) }': S<i32> [214; 310) '{ ...t()) }': S<{unknown}>
[224; 226) 's1': S<u32> [224; 226) 's1': S<u32>
[229; 230) 'S': S<u32>(T) -> S<T> [229; 230) 'S': S<u32>(u32) -> S<u32>
[229; 241) 'S(default())': S<u32> [229; 241) 'S(default())': S<u32>
[231; 238) 'default': fn default<u32>() -> T [231; 238) 'default': fn default<u32>() -> u32
[231; 240) 'default()': u32 [231; 240) 'default()': u32
[247; 250) 'foo': fn foo(impl Trait<u32>) -> () [247; 250) 'foo': fn foo(S<u32>) -> ()
[247; 254) 'foo(s1)': () [247; 254) 'foo(s1)': ()
[251; 253) 's1': S<u32> [251; 253) 's1': S<u32>
[264; 265) 'x': i32 [264; 265) 'x': i32
[273; 276) 'bar': fn bar<i32>(impl Trait<T>) -> T [273; 276) 'bar': fn bar<i32>(S<i32>) -> i32
[273; 290) 'bar(S(...lt()))': i32 [273; 290) 'bar(S(...lt()))': i32
[277; 278) 'S': S<i32>(T) -> S<T> [277; 278) 'S': S<i32>(i32) -> S<i32>
[277; 289) 'S(default())': S<i32> [277; 289) 'S(default())': S<i32>
[279; 286) 'default': fn default<i32>() -> T [279; 286) 'default': fn default<i32>() -> i32
[279; 288) 'default()': i32 [279; 288) 'default()': i32
[296; 297) 'S': S<i32>(T) -> S<T> [296; 297) 'S': S<{unknown}>({unknown}) -> S<{unknown}>
[296; 308) 'S(default())': S<i32> [296; 308) 'S(default())': S<{unknown}>
[298; 305) 'default': fn default<i32>() -> T [298; 305) 'default': fn default<{unknown}>() -> {unknown}
[298; 307) 'default()': i32 [298; 307) 'default()': {unknown}
"### "###
); );
} }

View file

@ -50,10 +50,19 @@ impl TraitSolver {
Err(_) => ra_db::Canceled::throw(), Err(_) => ra_db::Canceled::throw(),
}; };
let fuel = std::cell::Cell::new(CHALK_SOLVER_FUEL);
let solution = panic::catch_unwind({ let solution = panic::catch_unwind({
let solver = panic::AssertUnwindSafe(&mut solver); let solver = panic::AssertUnwindSafe(&mut solver);
let context = panic::AssertUnwindSafe(&context); let context = panic::AssertUnwindSafe(&context);
move || solver.0.solve(context.0, goal) move || {
solver.0.solve_limited(context.0, goal, || {
context.0.db.check_canceled();
let remaining = fuel.get();
fuel.set(remaining - 1);
remaining > 0
})
}
}); });
let solution = match solution { let solution = match solution {
@ -78,7 +87,9 @@ impl TraitSolver {
/// This controls the maximum size of types Chalk considers. If we set this too /// This controls the maximum size of types Chalk considers. If we set this too
/// high, we can run into slow edge cases; if we set it too low, Chalk won't /// high, we can run into slow edge cases; if we set it too low, Chalk won't
/// find some solutions. /// find some solutions.
const CHALK_SOLVER_MAX_SIZE: usize = 4; const CHALK_SOLVER_MAX_SIZE: usize = 10;
/// This controls how much 'time' we give the Chalk solver before giving up.
const CHALK_SOLVER_FUEL: i32 = 100;
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]
struct ChalkContext<'a, DB> { struct ChalkContext<'a, DB> {
@ -97,7 +108,8 @@ pub(crate) fn trait_solver_query(
} }
fn create_chalk_solver() -> chalk_solve::Solver<TypeFamily> { fn create_chalk_solver() -> chalk_solve::Solver<TypeFamily> {
let solver_choice = chalk_solve::SolverChoice::SLG { max_size: CHALK_SOLVER_MAX_SIZE }; let solver_choice =
chalk_solve::SolverChoice::SLG { max_size: CHALK_SOLVER_MAX_SIZE, expected_answers: None };
solver_choice.into_solver() solver_choice.into_solver()
} }
@ -232,7 +244,6 @@ fn solution_from_chalk(
let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution<TypeFamily>>| { let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution<TypeFamily>>| {
let value = subst let value = subst
.value .value
.parameters
.into_iter() .into_iter()
.map(|p| { .map(|p| {
let ty = match p.ty() { let ty = match p.ty() {

View file

@ -3,7 +3,7 @@ use std::{fmt, sync::Arc};
use log::debug; use log::debug;
use chalk_ir::{cast::Cast, Parameter, PlaceholderIndex, TypeName, UniverseIndex}; use chalk_ir::{cast::Cast, GoalData, Parameter, PlaceholderIndex, TypeName, UniverseIndex};
use hir_def::{AssocContainerId, AssocItemId, GenericDefId, HasModule, Lookup, TypeAliasId}; use hir_def::{AssocContainerId, AssocItemId, GenericDefId, HasModule, Lookup, TypeAliasId};
use ra_db::{ use ra_db::{
@ -14,7 +14,7 @@ use ra_db::{
use super::{builtin, AssocTyValue, Canonical, ChalkContext, Impl, Obligation}; use super::{builtin, AssocTyValue, Canonical, ChalkContext, Impl, Obligation};
use crate::{ use crate::{
db::HirDatabase, display::HirDisplay, utils::generics, ApplicationTy, GenericPredicate, db::HirDatabase, display::HirDisplay, utils::generics, ApplicationTy, GenericPredicate,
ProjectionTy, Substs, TraitRef, Ty, TypeCtor, TypeWalk, ProjectionTy, Substs, TraitRef, Ty, TypeCtor,
}; };
#[derive(Debug, Copy, Clone, Hash, PartialOrd, Ord, PartialEq, Eq)] #[derive(Debug, Copy, Clone, Hash, PartialOrd, Ord, PartialEq, Eq)]
@ -24,6 +24,8 @@ impl chalk_ir::family::TypeFamily for TypeFamily {
type InternedType = Box<chalk_ir::TyData<Self>>; type InternedType = Box<chalk_ir::TyData<Self>>;
type InternedLifetime = chalk_ir::LifetimeData<Self>; type InternedLifetime = chalk_ir::LifetimeData<Self>;
type InternedParameter = chalk_ir::ParameterData<Self>; type InternedParameter = chalk_ir::ParameterData<Self>;
type InternedGoal = Arc<GoalData<Self>>;
type InternedSubstitution = Vec<Parameter<Self>>;
type DefId = InternId; type DefId = InternId;
// FIXME: implement these // FIXME: implement these
@ -48,8 +50,8 @@ impl chalk_ir::family::TypeFamily for TypeFamily {
None None
} }
fn debug_projection( fn debug_alias(
_projection: &chalk_ir::ProjectionTy<Self>, _projection: &chalk_ir::AliasTy<Self>,
_fmt: &mut fmt::Formatter<'_>, _fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> { ) -> Option<fmt::Result> {
None None
@ -78,6 +80,24 @@ impl chalk_ir::family::TypeFamily for TypeFamily {
fn parameter_data(parameter: &chalk_ir::ParameterData<Self>) -> &chalk_ir::ParameterData<Self> { fn parameter_data(parameter: &chalk_ir::ParameterData<Self>) -> &chalk_ir::ParameterData<Self> {
parameter parameter
} }
fn intern_goal(goal: GoalData<Self>) -> Arc<GoalData<Self>> {
Arc::new(goal)
}
fn goal_data(goal: &Arc<GoalData<Self>>) -> &GoalData<Self> {
goal
}
fn intern_substitution<E>(
data: impl IntoIterator<Item = Result<Parameter<Self>, E>>,
) -> Result<Vec<Parameter<Self>>, E> {
data.into_iter().collect()
}
fn substitution_data(substitution: &Vec<Parameter<Self>>) -> &[Parameter<Self>] {
substitution
}
} }
impl chalk_ir::family::HasTypeFamily for TypeFamily { impl chalk_ir::family::HasTypeFamily for TypeFamily {
@ -114,16 +134,20 @@ impl ToChalk for Ty {
match self { match self {
Ty::Apply(apply_ty) => { Ty::Apply(apply_ty) => {
let name = apply_ty.ctor.to_chalk(db); let name = apply_ty.ctor.to_chalk(db);
let parameters = apply_ty.parameters.to_chalk(db); let substitution = apply_ty.parameters.to_chalk(db);
chalk_ir::ApplicationTy { name, parameters }.cast().intern() chalk_ir::ApplicationTy { name, substitution }.cast().intern()
} }
Ty::Projection(proj_ty) => { Ty::Projection(proj_ty) => {
let associated_ty_id = proj_ty.associated_ty.to_chalk(db); let associated_ty_id = proj_ty.associated_ty.to_chalk(db);
let parameters = proj_ty.parameters.to_chalk(db); let substitution = proj_ty.parameters.to_chalk(db);
chalk_ir::ProjectionTy { associated_ty_id, parameters }.cast().intern() chalk_ir::AliasTy { associated_ty_id, substitution }.cast().intern()
}
Ty::Param(id) => {
let interned_id = db.intern_type_param_id(id);
PlaceholderIndex {
ui: UniverseIndex::ROOT,
idx: interned_id.as_intern_id().as_usize(),
} }
Ty::Param { idx, .. } => {
PlaceholderIndex { ui: UniverseIndex::ROOT, idx: idx as usize }
.to_ty::<TypeFamily>() .to_ty::<TypeFamily>()
} }
Ty::Bound(idx) => chalk_ir::TyData::BoundVar(idx as usize).intern(), Ty::Bound(idx) => chalk_ir::TyData::BoundVar(idx as usize).intern(),
@ -135,23 +159,13 @@ impl ToChalk for Ty {
.cloned() .cloned()
.map(|p| p.to_chalk(db)) .map(|p| p.to_chalk(db))
.collect(); .collect();
let bounded_ty = chalk_ir::BoundedTy { bounds: make_binders(where_clauses, 1) }; let bounded_ty = chalk_ir::DynTy { bounds: make_binders(where_clauses, 1) };
chalk_ir::TyData::Dyn(bounded_ty).intern() chalk_ir::TyData::Dyn(bounded_ty).intern()
} }
Ty::Opaque(predicates) => { Ty::Opaque(_) | Ty::Unknown => {
let where_clauses = predicates let substitution = chalk_ir::Substitution::empty();
.iter()
.filter(|p| !p.is_error())
.cloned()
.map(|p| p.to_chalk(db))
.collect();
let bounded_ty = chalk_ir::BoundedTy { bounds: make_binders(where_clauses, 1) };
chalk_ir::TyData::Opaque(bounded_ty).intern()
}
Ty::Unknown => {
let parameters = Vec::new();
let name = TypeName::Error; let name = TypeName::Error;
chalk_ir::ApplicationTy { name, parameters }.cast().intern() chalk_ir::ApplicationTy { name, substitution }.cast().intern()
} }
} }
} }
@ -161,20 +175,23 @@ impl ToChalk for Ty {
TypeName::Error => Ty::Unknown, TypeName::Error => Ty::Unknown,
_ => { _ => {
let ctor = from_chalk(db, apply_ty.name); let ctor = from_chalk(db, apply_ty.name);
let parameters = from_chalk(db, apply_ty.parameters); let parameters = from_chalk(db, apply_ty.substitution);
Ty::Apply(ApplicationTy { ctor, parameters }) Ty::Apply(ApplicationTy { ctor, parameters })
} }
}, },
chalk_ir::TyData::Placeholder(idx) => { chalk_ir::TyData::Placeholder(idx) => {
assert_eq!(idx.ui, UniverseIndex::ROOT); assert_eq!(idx.ui, UniverseIndex::ROOT);
Ty::Param { idx: idx.idx as u32, name: crate::Name::missing() } let interned_id = crate::db::GlobalTypeParamId::from_intern_id(
crate::salsa::InternId::from(idx.idx),
);
Ty::Param(db.lookup_intern_type_param_id(interned_id))
} }
chalk_ir::TyData::Projection(proj) => { chalk_ir::TyData::Alias(proj) => {
let associated_ty = from_chalk(db, proj.associated_ty_id); let associated_ty = from_chalk(db, proj.associated_ty_id);
let parameters = from_chalk(db, proj.parameters); let parameters = from_chalk(db, proj.substitution);
Ty::Projection(ProjectionTy { associated_ty, parameters }) Ty::Projection(ProjectionTy { associated_ty, parameters })
} }
chalk_ir::TyData::ForAll(_) => unimplemented!(), chalk_ir::TyData::Function(_) => unimplemented!(),
chalk_ir::TyData::BoundVar(idx) => Ty::Bound(idx as u32), chalk_ir::TyData::BoundVar(idx) => Ty::Bound(idx as u32),
chalk_ir::TyData::InferenceVar(_iv) => Ty::Unknown, chalk_ir::TyData::InferenceVar(_iv) => Ty::Unknown,
chalk_ir::TyData::Dyn(where_clauses) => { chalk_ir::TyData::Dyn(where_clauses) => {
@ -183,27 +200,18 @@ impl ToChalk for Ty {
where_clauses.bounds.value.into_iter().map(|c| from_chalk(db, c)).collect(); where_clauses.bounds.value.into_iter().map(|c| from_chalk(db, c)).collect();
Ty::Dyn(predicates) Ty::Dyn(predicates)
} }
chalk_ir::TyData::Opaque(where_clauses) => {
assert_eq!(where_clauses.bounds.binders.len(), 1);
let predicates =
where_clauses.bounds.value.into_iter().map(|c| from_chalk(db, c)).collect();
Ty::Opaque(predicates)
}
} }
} }
} }
impl ToChalk for Substs { impl ToChalk for Substs {
type Chalk = Vec<chalk_ir::Parameter<TypeFamily>>; type Chalk = chalk_ir::Substitution<TypeFamily>;
fn to_chalk(self, db: &impl HirDatabase) -> Vec<Parameter<TypeFamily>> { fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Substitution<TypeFamily> {
self.iter().map(|ty| ty.clone().to_chalk(db).cast()).collect() chalk_ir::Substitution::from(self.iter().map(|ty| ty.clone().to_chalk(db)))
} }
fn from_chalk( fn from_chalk(db: &impl HirDatabase, parameters: chalk_ir::Substitution<TypeFamily>) -> Substs {
db: &impl HirDatabase,
parameters: Vec<chalk_ir::Parameter<TypeFamily>>,
) -> Substs {
let tys = parameters let tys = parameters
.into_iter() .into_iter()
.map(|p| match p.ty() { .map(|p| match p.ty() {
@ -220,13 +228,13 @@ impl ToChalk for TraitRef {
fn to_chalk(self: TraitRef, db: &impl HirDatabase) -> chalk_ir::TraitRef<TypeFamily> { fn to_chalk(self: TraitRef, db: &impl HirDatabase) -> chalk_ir::TraitRef<TypeFamily> {
let trait_id = self.trait_.to_chalk(db); let trait_id = self.trait_.to_chalk(db);
let parameters = self.substs.to_chalk(db); let substitution = self.substs.to_chalk(db);
chalk_ir::TraitRef { trait_id, parameters } chalk_ir::TraitRef { trait_id, substitution }
} }
fn from_chalk(db: &impl HirDatabase, trait_ref: chalk_ir::TraitRef<TypeFamily>) -> Self { fn from_chalk(db: &impl HirDatabase, trait_ref: chalk_ir::TraitRef<TypeFamily>) -> Self {
let trait_ = from_chalk(db, trait_ref.trait_id); let trait_ = from_chalk(db, trait_ref.trait_id);
let substs = from_chalk(db, trait_ref.parameters); let substs = from_chalk(db, trait_ref.substitution);
TraitRef { trait_, substs } TraitRef { trait_, substs }
} }
} }
@ -317,8 +325,8 @@ impl ToChalk for GenericPredicate {
make_binders(chalk_ir::WhereClause::Implemented(trait_ref.to_chalk(db)), 0) make_binders(chalk_ir::WhereClause::Implemented(trait_ref.to_chalk(db)), 0)
} }
GenericPredicate::Projection(projection_pred) => make_binders( GenericPredicate::Projection(projection_pred) => make_binders(
chalk_ir::WhereClause::ProjectionEq(chalk_ir::ProjectionEq { chalk_ir::WhereClause::AliasEq(chalk_ir::AliasEq {
projection: projection_pred.projection_ty.to_chalk(db), alias: projection_pred.projection_ty.to_chalk(db),
ty: projection_pred.ty.to_chalk(db), ty: projection_pred.ty.to_chalk(db),
}), }),
0, 0,
@ -335,8 +343,8 @@ impl ToChalk for GenericPredicate {
chalk_ir::WhereClause::Implemented(tr) => { chalk_ir::WhereClause::Implemented(tr) => {
GenericPredicate::Implemented(from_chalk(db, tr)) GenericPredicate::Implemented(from_chalk(db, tr))
} }
chalk_ir::WhereClause::ProjectionEq(projection_eq) => { chalk_ir::WhereClause::AliasEq(projection_eq) => {
let projection_ty = from_chalk(db, projection_eq.projection); let projection_ty = from_chalk(db, projection_eq.alias);
let ty = from_chalk(db, projection_eq.ty); let ty = from_chalk(db, projection_eq.ty);
GenericPredicate::Projection(super::ProjectionPredicate { projection_ty, ty }) GenericPredicate::Projection(super::ProjectionPredicate { projection_ty, ty })
} }
@ -345,22 +353,22 @@ impl ToChalk for GenericPredicate {
} }
impl ToChalk for ProjectionTy { impl ToChalk for ProjectionTy {
type Chalk = chalk_ir::ProjectionTy<TypeFamily>; type Chalk = chalk_ir::AliasTy<TypeFamily>;
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::ProjectionTy<TypeFamily> { fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::AliasTy<TypeFamily> {
chalk_ir::ProjectionTy { chalk_ir::AliasTy {
associated_ty_id: self.associated_ty.to_chalk(db), associated_ty_id: self.associated_ty.to_chalk(db),
parameters: self.parameters.to_chalk(db), substitution: self.parameters.to_chalk(db),
} }
} }
fn from_chalk( fn from_chalk(
db: &impl HirDatabase, db: &impl HirDatabase,
projection_ty: chalk_ir::ProjectionTy<TypeFamily>, projection_ty: chalk_ir::AliasTy<TypeFamily>,
) -> ProjectionTy { ) -> ProjectionTy {
ProjectionTy { ProjectionTy {
associated_ty: from_chalk(db, projection_ty.associated_ty_id), associated_ty: from_chalk(db, projection_ty.associated_ty_id),
parameters: from_chalk(db, projection_ty.parameters), parameters: from_chalk(db, projection_ty.substitution),
} }
} }
} }
@ -369,10 +377,7 @@ impl ToChalk for super::ProjectionPredicate {
type Chalk = chalk_ir::Normalize<TypeFamily>; type Chalk = chalk_ir::Normalize<TypeFamily>;
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Normalize<TypeFamily> { fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Normalize<TypeFamily> {
chalk_ir::Normalize { chalk_ir::Normalize { alias: self.projection_ty.to_chalk(db), ty: self.ty.to_chalk(db) }
projection: self.projection_ty.to_chalk(db),
ty: self.ty.to_chalk(db),
}
} }
fn from_chalk(_db: &impl HirDatabase, _normalize: chalk_ir::Normalize<TypeFamily>) -> Self { fn from_chalk(_db: &impl HirDatabase, _normalize: chalk_ir::Normalize<TypeFamily>) -> Self {
@ -522,7 +527,7 @@ fn convert_where_clauses(
let generic_predicates = db.generic_predicates(def); let generic_predicates = db.generic_predicates(def);
let mut result = Vec::with_capacity(generic_predicates.len()); let mut result = Vec::with_capacity(generic_predicates.len());
for pred in generic_predicates.iter() { for pred in generic_predicates.iter() {
if pred.is_error() { if pred.value.is_error() {
// skip errored predicates completely // skip errored predicates completely
continue; continue;
} }
@ -711,12 +716,12 @@ fn impl_block_datum(
let trait_ref = db let trait_ref = db
.impl_trait(impl_id) .impl_trait(impl_id)
// ImplIds for impls where the trait ref can't be resolved should never reach Chalk // ImplIds for impls where the trait ref can't be resolved should never reach Chalk
.expect("invalid impl passed to Chalk"); .expect("invalid impl passed to Chalk")
.value;
let impl_data = db.impl_data(impl_id); let impl_data = db.impl_data(impl_id);
let generic_params = generics(db, impl_id.into()); let generic_params = generics(db, impl_id.into());
let bound_vars = Substs::bound_vars(&generic_params); let bound_vars = Substs::bound_vars(&generic_params);
let trait_ref = trait_ref.subst(&bound_vars);
let trait_ = trait_ref.trait_; let trait_ = trait_ref.trait_;
let impl_type = if impl_id.lookup(db).container.module(db).krate == krate { let impl_type = if impl_id.lookup(db).container.module(db).krate == krate {
chalk_rust_ir::ImplType::Local chalk_rust_ir::ImplType::Local
@ -791,20 +796,18 @@ fn type_alias_associated_ty_value(
_ => panic!("assoc ty value should be in impl"), _ => panic!("assoc ty value should be in impl"),
}; };
let trait_ref = db.impl_trait(impl_id).expect("assoc ty value should not exist"); // we don't return any assoc ty values if the impl'd trait can't be resolved let trait_ref = db.impl_trait(impl_id).expect("assoc ty value should not exist").value; // we don't return any assoc ty values if the impl'd trait can't be resolved
let assoc_ty = db let assoc_ty = db
.trait_data(trait_ref.trait_) .trait_data(trait_ref.trait_)
.associated_type_by_name(&type_alias_data.name) .associated_type_by_name(&type_alias_data.name)
.expect("assoc ty value should not exist"); // validated when building the impl data as well .expect("assoc ty value should not exist"); // validated when building the impl data as well
let generic_params = generics(db, impl_id.into()); let ty = db.ty(type_alias.into());
let bound_vars = Substs::bound_vars(&generic_params); let value_bound = chalk_rust_ir::AssociatedTyValueBound { ty: ty.value.to_chalk(db) };
let ty = db.ty(type_alias.into()).subst(&bound_vars);
let value_bound = chalk_rust_ir::AssociatedTyValueBound { ty: ty.to_chalk(db) };
let value = chalk_rust_ir::AssociatedTyValue { let value = chalk_rust_ir::AssociatedTyValue {
impl_id: Impl::ImplBlock(impl_id.into()).to_chalk(db), impl_id: Impl::ImplBlock(impl_id.into()).to_chalk(db),
associated_ty_id: assoc_ty.to_chalk(db), associated_ty_id: assoc_ty.to_chalk(db),
value: make_binders(value_bound, bound_vars.len()), value: make_binders(value_bound, ty.num_binders),
}; };
Arc::new(value) Arc::new(value)
} }

View file

@ -2,10 +2,11 @@
//! query, but can't be computed directly from `*Data` (ie, which need a `db`). //! query, but can't be computed directly from `*Data` (ie, which need a `db`).
use std::sync::Arc; use std::sync::Arc;
use hir_def::generics::WherePredicateTarget;
use hir_def::{ use hir_def::{
adt::VariantData, adt::VariantData,
db::DefDatabase, db::DefDatabase,
generics::{GenericParams, TypeParamData}, generics::{GenericParams, TypeParamData, TypeParamProvenance},
path::Path, path::Path,
resolver::{HasResolver, TypeNs}, resolver::{HasResolver, TypeNs},
type_ref::TypeRef, type_ref::TypeRef,
@ -19,11 +20,18 @@ fn direct_super_traits(db: &impl DefDatabase, trait_: TraitId) -> Vec<TraitId> {
// lifetime problems, but since there usually shouldn't be more than a // lifetime problems, but since there usually shouldn't be more than a
// few direct traits this should be fine (we could even use some kind of // few direct traits this should be fine (we could even use some kind of
// SmallVec if performance is a concern) // SmallVec if performance is a concern)
db.generic_params(trait_.into()) let generic_params = db.generic_params(trait_.into());
let trait_self = generic_params.find_trait_self_param();
generic_params
.where_predicates .where_predicates
.iter() .iter()
.filter_map(|pred| match &pred.type_ref { .filter_map(|pred| match &pred.target {
TypeRef::Path(p) if p == &Path::from(name![Self]) => pred.bound.as_path(), WherePredicateTarget::TypeRef(TypeRef::Path(p)) if p == &Path::from(name![Self]) => {
pred.bound.as_path()
}
WherePredicateTarget::TypeParam(local_id) if Some(*local_id) == trait_self => {
pred.bound.as_path()
}
_ => None, _ => None,
}) })
.filter_map(|path| match resolver.resolve_path_in_type_ns_fully(db, path.mod_path()) { .filter_map(|path| match resolver.resolve_path_in_type_ns_fully(db, path.mod_path()) {
@ -95,41 +103,77 @@ pub(crate) struct Generics {
} }
impl Generics { impl Generics {
pub(crate) fn iter<'a>(&'a self) -> impl Iterator<Item = (u32, &'a TypeParamData)> + 'a { pub(crate) fn iter<'a>(
&'a self,
) -> impl Iterator<Item = (TypeParamId, &'a TypeParamData)> + 'a {
self.parent_generics self.parent_generics
.as_ref() .as_ref()
.into_iter() .into_iter()
.flat_map(|it| it.params.types.iter()) .flat_map(|it| {
.chain(self.params.types.iter()) it.params
.enumerate() .types
.map(|(i, (_local_id, p))| (i as u32, p)) .iter()
.map(move |(local_id, p)| (TypeParamId { parent: it.def, local_id }, p))
})
.chain(
self.params
.types
.iter()
.map(move |(local_id, p)| (TypeParamId { parent: self.def, local_id }, p)),
)
} }
pub(crate) fn iter_parent<'a>(&'a self) -> impl Iterator<Item = (u32, &'a TypeParamData)> + 'a { pub(crate) fn iter_parent<'a>(
self.parent_generics &'a self,
.as_ref() ) -> impl Iterator<Item = (TypeParamId, &'a TypeParamData)> + 'a {
.into_iter() self.parent_generics.as_ref().into_iter().flat_map(|it| {
.flat_map(|it| it.params.types.iter()) it.params
.enumerate() .types
.map(|(i, (_local_id, p))| (i as u32, p)) .iter()
.map(move |(local_id, p)| (TypeParamId { parent: it.def, local_id }, p))
})
} }
pub(crate) fn len(&self) -> usize { pub(crate) fn len(&self) -> usize {
self.len_split().0 self.len_split().0
} }
/// (total, parents, child) /// (total, parents, child)
pub(crate) fn len_split(&self) -> (usize, usize, usize) { pub(crate) fn len_split(&self) -> (usize, usize, usize) {
let parent = self.parent_generics.as_ref().map_or(0, |p| p.len()); let parent = self.parent_generics.as_ref().map_or(0, |p| p.len());
let child = self.params.types.len(); let child = self.params.types.len();
(parent + child, parent, child) (parent + child, parent, child)
} }
pub(crate) fn param_idx(&self, param: TypeParamId) -> u32 {
self.find_param(param).0 /// (parent total, self param, type param list, impl trait)
pub(crate) fn provenance_split(&self) -> (usize, usize, usize, usize) {
let parent = self.parent_generics.as_ref().map_or(0, |p| p.len());
let self_params = self
.params
.types
.iter()
.filter(|(_, p)| p.provenance == TypeParamProvenance::TraitSelf)
.count();
let list_params = self
.params
.types
.iter()
.filter(|(_, p)| p.provenance == TypeParamProvenance::TypeParamList)
.count();
let impl_trait_params = self
.params
.types
.iter()
.filter(|(_, p)| p.provenance == TypeParamProvenance::ArgumentImplTrait)
.count();
(parent, self_params, list_params, impl_trait_params)
} }
pub(crate) fn param_name(&self, param: TypeParamId) -> Name {
self.find_param(param).1.name.clone() pub(crate) fn param_idx(&self, param: TypeParamId) -> Option<u32> {
Some(self.find_param(param)?.0)
} }
fn find_param(&self, param: TypeParamId) -> (u32, &TypeParamData) {
fn find_param(&self, param: TypeParamId) -> Option<(u32, &TypeParamData)> {
if param.parent == self.def { if param.parent == self.def {
let (idx, (_local_id, data)) = self let (idx, (_local_id, data)) = self
.params .params
@ -139,9 +183,10 @@ impl Generics {
.find(|(_, (idx, _))| *idx == param.local_id) .find(|(_, (idx, _))| *idx == param.local_id)
.unwrap(); .unwrap();
let (_total, parent_len, _child) = self.len_split(); let (_total, parent_len, _child) = self.len_split();
return ((parent_len + idx) as u32, data); Some(((parent_len + idx) as u32, data))
} else {
self.parent_generics.as_ref().and_then(|g| g.find_param(param))
} }
self.parent_generics.as_ref().unwrap().find_param(param)
} }
} }

View file

@ -28,6 +28,7 @@ once_cell = "1.2.0"
ra_syntax = { path = "../ra_syntax" } ra_syntax = { path = "../ra_syntax" }
ra_text_edit = { path = "../ra_text_edit" } ra_text_edit = { path = "../ra_text_edit" }
ra_db = { path = "../ra_db" } ra_db = { path = "../ra_db" }
ra_ide_db = { path = "../ra_ide_db" }
ra_cfg = { path = "../ra_cfg" } ra_cfg = { path = "../ra_cfg" }
ra_fmt = { path = "../ra_fmt" } ra_fmt = { path = "../ra_fmt" }
ra_prof = { path = "../ra_prof" } ra_prof = { path = "../ra_prof" }
@ -39,7 +40,7 @@ ra_assists = { path = "../ra_assists" }
hir = { path = "../ra_hir", package = "ra_hir" } hir = { path = "../ra_hir", package = "ra_hir" }
[dev-dependencies] [dev-dependencies]
insta = "0.12.0" insta = "0.13.0"
[dev-dependencies.proptest] [dev-dependencies.proptest]
version = "0.9.0" version = "0.9.0"

View file

@ -1,22 +1,23 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use ra_assists::{resolved_assists, AssistAction, AssistLabel};
use ra_db::{FilePosition, FileRange}; use ra_db::{FilePosition, FileRange};
use ra_ide_db::RootDatabase;
use crate::{db::RootDatabase, FileId, SourceChange, SourceFileEdit}; use crate::{FileId, SourceChange, SourceFileEdit};
use either::Either;
pub use ra_assists::AssistId; pub use ra_assists::AssistId;
use ra_assists::{AssistAction, AssistLabel};
#[derive(Debug)] #[derive(Debug)]
pub struct Assist { pub struct Assist {
pub id: AssistId, pub id: AssistId,
pub label: String, pub label: String,
pub change_data: Either<SourceChange, Vec<SourceChange>>, pub group_label: Option<String>,
pub source_change: SourceChange,
} }
pub(crate) fn assists(db: &RootDatabase, frange: FileRange) -> Vec<Assist> { pub(crate) fn assists(db: &RootDatabase, frange: FileRange) -> Vec<Assist> {
ra_assists::assists(db, frange) resolved_assists(db, frange)
.into_iter() .into_iter()
.map(|assist| { .map(|assist| {
let file_id = frange.file_id; let file_id = frange.file_id;
@ -24,17 +25,8 @@ pub(crate) fn assists(db: &RootDatabase, frange: FileRange) -> Vec<Assist> {
Assist { Assist {
id: assist_label.id, id: assist_label.id,
label: assist_label.label.clone(), label: assist_label.label.clone(),
change_data: match assist.action_data { group_label: assist.group_label.map(|it| it.0),
Either::Left(action) => { source_change: action_to_edit(assist.action, file_id, assist_label),
Either::Left(action_to_edit(action, file_id, assist_label))
}
Either::Right(actions) => Either::Right(
actions
.into_iter()
.map(|action| action_to_edit(action, file_id, assist_label))
.collect(),
),
},
} }
}) })
.collect() .collect()
@ -46,9 +38,6 @@ fn action_to_edit(
assist_label: &AssistLabel, assist_label: &AssistLabel,
) -> SourceChange { ) -> SourceChange {
let file_edit = SourceFileEdit { file_id, edit: action.edit }; let file_edit = SourceFileEdit { file_id, edit: action.edit };
SourceChange::source_file_edit( SourceChange::source_file_edit(assist_label.label.clone(), file_edit)
action.label.unwrap_or_else(|| assist_label.label.clone()),
file_edit,
)
.with_cursor_opt(action.cursor_position.map(|offset| FilePosition { offset, file_id })) .with_cursor_opt(action.cursor_position.map(|offset| FilePosition { offset, file_id }))
} }

View file

@ -3,6 +3,7 @@
use indexmap::IndexMap; use indexmap::IndexMap;
use hir::db::AstDatabase; use hir::db::AstDatabase;
use ra_ide_db::RootDatabase;
use ra_syntax::{ use ra_syntax::{
ast::{self, DocCommentsOwner}, ast::{self, DocCommentsOwner},
match_ast, AstNode, TextRange, match_ast, AstNode, TextRange,
@ -10,7 +11,6 @@ use ra_syntax::{
use crate::{ use crate::{
call_info::FnCallNode, call_info::FnCallNode,
db::RootDatabase,
display::{ShortLabel, ToNav}, display::{ShortLabel, ToNav},
expand::descend_into_macros, expand::descend_into_macros,
goto_definition, references, FilePosition, NavigationTarget, RangeInfo, goto_definition, references, FilePosition, NavigationTarget, RangeInfo,

View file

@ -1,15 +1,13 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use hir::db::AstDatabase; use hir::db::AstDatabase;
use ra_ide_db::RootDatabase;
use ra_syntax::{ use ra_syntax::{
ast::{self, ArgListOwner}, ast::{self, ArgListOwner},
match_ast, AstNode, SyntaxNode, match_ast, AstNode, SyntaxNode,
}; };
use test_utils::tested_by; use test_utils::tested_by;
use crate::{ use crate::{expand::descend_into_macros, CallInfo, FilePosition, FunctionSignature};
db::RootDatabase, expand::descend_into_macros, CallInfo, FilePosition, FunctionSignature,
};
/// Computes parameter information for the given call expression. /// Computes parameter information for the given call expression.
pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> { pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> {

View file

@ -18,6 +18,7 @@ mod complete_macro_in_item_position;
mod complete_trait_impl; mod complete_trait_impl;
use ra_db::SourceDatabase; use ra_db::SourceDatabase;
use ra_ide_db::RootDatabase;
#[cfg(test)] #[cfg(test)]
use crate::completion::completion_item::do_completion; use crate::completion::completion_item::do_completion;
@ -26,7 +27,7 @@ use crate::{
completion_context::CompletionContext, completion_context::CompletionContext,
completion_item::{CompletionKind, Completions}, completion_item::{CompletionKind, Completions},
}, },
db, FilePosition, FilePosition,
}; };
pub use crate::completion::completion_item::{ pub use crate::completion::completion_item::{
@ -55,7 +56,7 @@ pub use crate::completion::completion_item::{
/// `foo` *should* be present among the completion variants. Filtering by /// `foo` *should* be present among the completion variants. Filtering by
/// identifier prefix/fuzzy match should be done higher in the stack, together /// identifier prefix/fuzzy match should be done higher in the stack, together
/// with ordering of completions (currently this is done by the client). /// with ordering of completions (currently this is done by the client).
pub(crate) fn completions(db: &db::RootDatabase, position: FilePosition) -> Option<Completions> { pub(crate) fn completions(db: &RootDatabase, position: FilePosition) -> Option<Completions> {
let original_parse = db.parse(position.file_id); let original_parse = db.parse(position.file_id);
let ctx = CompletionContext::new(db, &original_parse, position)?; let ctx = CompletionContext::new(db, &original_parse, position)?;

View file

@ -1,11 +1,6 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use ra_assists::auto_import_text_edit; use crate::completion::{CompletionContext, Completions};
use ra_syntax::{ast, AstNode, SmolStr};
use ra_text_edit::TextEditBuilder;
use rustc_hash::FxHashMap;
use crate::completion::{CompletionContext, CompletionItem, CompletionKind, Completions};
pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) { pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) {
if !ctx.is_trivial_path { if !ctx.is_trivial_path {
@ -15,120 +10,14 @@ pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) {
ctx.analyzer.process_all_names(ctx.db, &mut |name, res| { ctx.analyzer.process_all_names(ctx.db, &mut |name, res| {
acc.add_resolution(ctx, name.to_string(), &res) acc.add_resolution(ctx, name.to_string(), &res)
}); });
// auto-import
// We fetch ident from the original file, because we need to pre-filter auto-imports
if ast::NameRef::cast(ctx.token.parent()).is_some() {
let import_resolver = ImportResolver::new();
let import_names = import_resolver.all_names(ctx.token.text());
import_names.into_iter().for_each(|(name, path)| {
let edit = {
let mut builder = TextEditBuilder::default();
builder.replace(ctx.source_range(), name.to_string());
auto_import_text_edit(
&ctx.token.parent(),
&ctx.token.parent(),
&path,
&mut builder,
);
builder.finish()
};
// Hack: copied this check form conv.rs beacause auto import can produce edits
// that invalidate assert in conv_with.
if edit
.as_atoms()
.iter()
.filter(|atom| !ctx.source_range().is_subrange(&atom.delete))
.all(|atom| ctx.source_range().intersection(&atom.delete).is_none())
{
CompletionItem::new(
CompletionKind::Reference,
ctx.source_range(),
build_import_label(&name, &path),
)
.text_edit(edit)
.add_to(acc);
}
});
}
}
fn build_import_label(name: &str, path: &[SmolStr]) -> String {
let mut buf = String::with_capacity(64);
buf.push_str(name);
buf.push_str(" (");
fmt_import_path(path, &mut buf);
buf.push_str(")");
buf
}
fn fmt_import_path(path: &[SmolStr], buf: &mut String) {
let mut segments = path.iter();
if let Some(s) = segments.next() {
buf.push_str(&s);
}
for s in segments {
buf.push_str("::");
buf.push_str(&s);
}
}
#[derive(Debug, Clone, Default)]
pub(crate) struct ImportResolver {
// todo: use fst crate or something like that
dummy_names: Vec<(SmolStr, Vec<SmolStr>)>,
}
impl ImportResolver {
pub(crate) fn new() -> Self {
let dummy_names = vec![
(SmolStr::new("fmt"), vec![SmolStr::new("std"), SmolStr::new("fmt")]),
(SmolStr::new("io"), vec![SmolStr::new("std"), SmolStr::new("io")]),
(SmolStr::new("iter"), vec![SmolStr::new("std"), SmolStr::new("iter")]),
(SmolStr::new("hash"), vec![SmolStr::new("std"), SmolStr::new("hash")]),
(
SmolStr::new("Debug"),
vec![SmolStr::new("std"), SmolStr::new("fmt"), SmolStr::new("Debug")],
),
(
SmolStr::new("Display"),
vec![SmolStr::new("std"), SmolStr::new("fmt"), SmolStr::new("Display")],
),
(
SmolStr::new("Hash"),
vec![SmolStr::new("std"), SmolStr::new("hash"), SmolStr::new("Hash")],
),
(
SmolStr::new("Hasher"),
vec![SmolStr::new("std"), SmolStr::new("hash"), SmolStr::new("Hasher")],
),
(
SmolStr::new("Iterator"),
vec![SmolStr::new("std"), SmolStr::new("iter"), SmolStr::new("Iterator")],
),
];
ImportResolver { dummy_names }
}
// Returns a map of importable items filtered by name.
// The map associates item name with its full path.
// todo: should return Resolutions
pub(crate) fn all_names(&self, name: &str) -> FxHashMap<SmolStr, Vec<SmolStr>> {
if name.len() > 1 {
self.dummy_names.iter().filter(|(n, _)| n.contains(name)).cloned().collect()
} else {
FxHashMap::default()
}
}
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::completion::{do_completion, CompletionItem, CompletionKind};
use insta::assert_debug_snapshot; use insta::assert_debug_snapshot;
use crate::completion::{do_completion, CompletionItem, CompletionKind};
fn do_reference_completion(code: &str) -> Vec<CompletionItem> { fn do_reference_completion(code: &str) -> Vec<CompletionItem> {
do_completion(code, CompletionKind::Reference) do_completion(code, CompletionKind::Reference)
} }

View file

@ -2,7 +2,7 @@ use crate::completion::{CompletionContext, Completions, CompletionItem, Completi
use ra_syntax::ast::{self, NameOwner, AstNode}; use ra_syntax::ast::{self, NameOwner, AstNode};
use hir::{self, db::HirDatabase}; use hir::{self, db::HirDatabase, Docs};
pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext) { pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext) {

View file

@ -1,5 +1,6 @@
//! FIXME: write short doc here //! FIXME: write short doc here
use ra_ide_db::RootDatabase;
use ra_syntax::{ use ra_syntax::{
algo::{find_covering_element, find_node_at_offset}, algo::{find_covering_element, find_node_at_offset},
ast, AstNode, Parse, SourceFile, ast, AstNode, Parse, SourceFile,
@ -8,13 +9,13 @@ use ra_syntax::{
}; };
use ra_text_edit::AtomTextEdit; use ra_text_edit::AtomTextEdit;
use crate::{db, FilePosition}; use crate::FilePosition;
/// `CompletionContext` is created early during completion to figure out, where /// `CompletionContext` is created early during completion to figure out, where
/// exactly is the cursor, syntax-wise. /// exactly is the cursor, syntax-wise.
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct CompletionContext<'a> { pub(crate) struct CompletionContext<'a> {
pub(super) db: &'a db::RootDatabase, pub(super) db: &'a RootDatabase,
pub(super) analyzer: hir::SourceAnalyzer, pub(super) analyzer: hir::SourceAnalyzer,
pub(super) offset: TextUnit, pub(super) offset: TextUnit,
pub(super) token: SyntaxToken, pub(super) token: SyntaxToken,
@ -49,7 +50,7 @@ pub(crate) struct CompletionContext<'a> {
impl<'a> CompletionContext<'a> { impl<'a> CompletionContext<'a> {
pub(super) fn new( pub(super) fn new(
db: &'a db::RootDatabase, db: &'a RootDatabase,
original_parse: &'a Parse<ast::SourceFile>, original_parse: &'a Parse<ast::SourceFile>,
position: FilePosition, position: FilePosition,
) -> Option<CompletionContext<'a>> { ) -> Option<CompletionContext<'a>> {

View file

@ -5,6 +5,7 @@ use std::cell::RefCell;
use hir::diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink}; use hir::diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink};
use itertools::Itertools; use itertools::Itertools;
use ra_db::{RelativePath, SourceDatabase, SourceDatabaseExt}; use ra_db::{RelativePath, SourceDatabase, SourceDatabaseExt};
use ra_ide_db::RootDatabase;
use ra_prof::profile; use ra_prof::profile;
use ra_syntax::{ use ra_syntax::{
algo, algo,
@ -13,7 +14,7 @@ use ra_syntax::{
}; };
use ra_text_edit::{TextEdit, TextEditBuilder}; use ra_text_edit::{TextEdit, TextEditBuilder};
use crate::{db::RootDatabase, Diagnostic, FileId, FileSystemEdit, SourceChange, SourceFileEdit}; use crate::{Diagnostic, FileId, FileSystemEdit, SourceChange, SourceFileEdit};
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]
pub enum Severity { pub enum Severity {

View file

@ -4,13 +4,11 @@ use std::fmt::{self, Display};
use hir::{Docs, Documentation, HasSource, HirDisplay}; use hir::{Docs, Documentation, HasSource, HirDisplay};
use join_to_string::join; use join_to_string::join;
use ra_ide_db::RootDatabase;
use ra_syntax::ast::{self, AstNode, NameOwner, VisibilityOwner}; use ra_syntax::ast::{self, AstNode, NameOwner, VisibilityOwner};
use std::convert::From; use std::convert::From;
use crate::{ use crate::display::{generic_parameters, where_predicates};
db,
display::{generic_parameters, where_predicates},
};
#[derive(Debug)] #[derive(Debug)]
pub enum CallableKind { pub enum CallableKind {
@ -48,13 +46,13 @@ impl FunctionSignature {
self self
} }
pub(crate) fn from_hir(db: &db::RootDatabase, function: hir::Function) -> Self { pub(crate) fn from_hir(db: &RootDatabase, function: hir::Function) -> Self {
let doc = function.docs(db); let doc = function.docs(db);
let ast_node = function.source(db).value; let ast_node = function.source(db).value;
FunctionSignature::from(&ast_node).with_doc_opt(doc) FunctionSignature::from(&ast_node).with_doc_opt(doc)
} }
pub(crate) fn from_struct(db: &db::RootDatabase, st: hir::Struct) -> Option<Self> { pub(crate) fn from_struct(db: &RootDatabase, st: hir::Struct) -> Option<Self> {
let node: ast::StructDef = st.source(db).value; let node: ast::StructDef = st.source(db).value;
match node.kind() { match node.kind() {
ast::StructKind::Record(_) => return None, ast::StructKind::Record(_) => return None,
@ -86,10 +84,7 @@ impl FunctionSignature {
) )
} }
pub(crate) fn from_enum_variant( pub(crate) fn from_enum_variant(db: &RootDatabase, variant: hir::EnumVariant) -> Option<Self> {
db: &db::RootDatabase,
variant: hir::EnumVariant,
) -> Option<Self> {
let node: ast::EnumVariant = variant.source(db).value; let node: ast::EnumVariant = variant.source(db).value;
match node.kind() { match node.kind() {
ast::StructKind::Record(_) | ast::StructKind::Unit => return None, ast::StructKind::Record(_) | ast::StructKind::Unit => return None,
@ -126,7 +121,7 @@ impl FunctionSignature {
) )
} }
pub(crate) fn from_macro(db: &db::RootDatabase, macro_def: hir::MacroDef) -> Option<Self> { pub(crate) fn from_macro(db: &RootDatabase, macro_def: hir::MacroDef) -> Option<Self> {
let node: ast::MacroCall = macro_def.source(db).value; let node: ast::MacroCall = macro_def.source(db).value;
let params = vec![]; let params = vec![];

View file

@ -3,6 +3,7 @@
use either::Either; use either::Either;
use hir::{AssocItem, FieldSource, HasSource, InFile, ModuleSource}; use hir::{AssocItem, FieldSource, HasSource, InFile, ModuleSource};
use ra_db::{FileId, SourceDatabase}; use ra_db::{FileId, SourceDatabase};
use ra_ide_db::RootDatabase;
use ra_syntax::{ use ra_syntax::{
ast::{self, DocCommentsOwner, NameOwner}, ast::{self, DocCommentsOwner, NameOwner},
match_ast, AstNode, SmolStr, match_ast, AstNode, SmolStr,
@ -10,7 +11,7 @@ use ra_syntax::{
TextRange, TextRange,
}; };
use crate::{db::RootDatabase, expand::original_range, FileSymbol}; use crate::{expand::original_range, FileSymbol};
use super::short_label::ShortLabel; use super::short_label::ShortLabel;

Some files were not shown because too many files have changed in this diff Show more