mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-27 04:19:13 +00:00
Simplify
This commit is contained in:
parent
bfb187aacd
commit
5374ebbf36
12 changed files with 62 additions and 53 deletions
|
@ -1,4 +1,6 @@
|
||||||
//! Things to wrap other things in file ids.
|
//! Things to wrap other things in file ids.
|
||||||
|
use std::borrow::Borrow;
|
||||||
|
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use span::{
|
use span::{
|
||||||
AstIdNode, ErasedFileAstId, FileAstId, FileId, FileRange, HirFileId, HirFileIdRepr,
|
AstIdNode, ErasedFileAstId, FileAstId, FileId, FileRange, HirFileId, HirFileIdRepr,
|
||||||
|
@ -76,6 +78,13 @@ impl<FileKind: Copy, T> InFileWrapper<FileKind, T> {
|
||||||
pub fn as_ref(&self) -> InFileWrapper<FileKind, &T> {
|
pub fn as_ref(&self) -> InFileWrapper<FileKind, &T> {
|
||||||
self.with_value(&self.value)
|
self.with_value(&self.value)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn borrow<U>(&self) -> InFileWrapper<FileKind, &U>
|
||||||
|
where
|
||||||
|
T: Borrow<U>,
|
||||||
|
{
|
||||||
|
self.with_value(self.value.borrow())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<FileKind: Copy, T: Clone> InFileWrapper<FileKind, &T> {
|
impl<FileKind: Copy, T: Clone> InFileWrapper<FileKind, &T> {
|
||||||
|
@ -156,8 +165,13 @@ impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, &N> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// region:specific impls
|
// region:specific impls
|
||||||
|
impl<SN: Borrow<SyntaxNode>> InRealFile<SN> {
|
||||||
|
pub fn file_range(&self) -> FileRange {
|
||||||
|
FileRange { file_id: self.file_id, range: self.value.borrow().text_range() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl InFile<&SyntaxNode> {
|
impl<SN: Borrow<SyntaxNode>> InFile<SN> {
|
||||||
pub fn parent_ancestors_with_macros(
|
pub fn parent_ancestors_with_macros(
|
||||||
self,
|
self,
|
||||||
db: &dyn db::ExpandDatabase,
|
db: &dyn db::ExpandDatabase,
|
||||||
|
@ -172,7 +186,7 @@ impl InFile<&SyntaxNode> {
|
||||||
.map(|node| node.parent())
|
.map(|node| node.parent())
|
||||||
.transpose(),
|
.transpose(),
|
||||||
};
|
};
|
||||||
std::iter::successors(succ(&self.cloned()), succ)
|
std::iter::successors(succ(&self.borrow().cloned()), succ)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ancestors_with_macros(
|
pub fn ancestors_with_macros(
|
||||||
|
@ -189,7 +203,15 @@ impl InFile<&SyntaxNode> {
|
||||||
.map(|node| node.parent())
|
.map(|node| node.parent())
|
||||||
.transpose(),
|
.transpose(),
|
||||||
};
|
};
|
||||||
std::iter::successors(Some(self.cloned()), succ)
|
std::iter::successors(Some(self.borrow().cloned()), succ)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn kind(&self) -> parser::SyntaxKind {
|
||||||
|
self.value.borrow().kind()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn text_range(&self) -> TextRange {
|
||||||
|
self.value.borrow().text_range()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Falls back to the macro call range if the node cannot be mapped up fully.
|
/// Falls back to the macro call range if the node cannot be mapped up fully.
|
||||||
|
@ -197,7 +219,7 @@ impl InFile<&SyntaxNode> {
|
||||||
/// For attributes and derives, this will point back to the attribute only.
|
/// For attributes and derives, this will point back to the attribute only.
|
||||||
/// For the entire item use [`InFile::original_file_range_full`].
|
/// For the entire item use [`InFile::original_file_range_full`].
|
||||||
pub fn original_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange {
|
pub fn original_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange {
|
||||||
self.map(SyntaxNode::text_range).original_node_file_range_rooted(db)
|
self.borrow().map(SyntaxNode::text_range).original_node_file_range_rooted(db)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Falls back to the macro call range if the node cannot be mapped up fully.
|
/// Falls back to the macro call range if the node cannot be mapped up fully.
|
||||||
|
@ -205,15 +227,7 @@ impl InFile<&SyntaxNode> {
|
||||||
self,
|
self,
|
||||||
db: &dyn db::ExpandDatabase,
|
db: &dyn db::ExpandDatabase,
|
||||||
) -> FileRange {
|
) -> FileRange {
|
||||||
self.map(SyntaxNode::text_range).original_node_file_range_with_macro_call_body(db)
|
self.borrow().map(SyntaxNode::text_range).original_node_file_range_with_macro_call_body(db)
|
||||||
}
|
|
||||||
|
|
||||||
/// Attempts to map the syntax node back up its macro calls.
|
|
||||||
pub fn original_file_range_opt(
|
|
||||||
self,
|
|
||||||
db: &dyn db::ExpandDatabase,
|
|
||||||
) -> Option<(FileRange, SyntaxContextId)> {
|
|
||||||
self.map(SyntaxNode::text_range).original_node_file_range_opt(db)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn original_syntax_node_rooted(
|
pub fn original_syntax_node_rooted(
|
||||||
|
@ -224,16 +238,19 @@ impl InFile<&SyntaxNode> {
|
||||||
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
|
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
|
||||||
let file_id = match self.file_id.repr() {
|
let file_id = match self.file_id.repr() {
|
||||||
HirFileIdRepr::FileId(file_id) => {
|
HirFileIdRepr::FileId(file_id) => {
|
||||||
return Some(InRealFile { file_id, value: self.value.clone() })
|
return Some(InRealFile { file_id, value: self.value.borrow().clone() })
|
||||||
}
|
}
|
||||||
HirFileIdRepr::MacroFile(m) if m.is_attr_macro(db) => m,
|
HirFileIdRepr::MacroFile(m) if m.is_attr_macro(db) => m,
|
||||||
_ => return None,
|
_ => return None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let FileRange { file_id, range } =
|
let FileRange { file_id, range } = map_node_range_up_rooted(
|
||||||
map_node_range_up_rooted(db, &db.expansion_span_map(file_id), self.value.text_range())?;
|
db,
|
||||||
|
&db.expansion_span_map(file_id),
|
||||||
|
self.value.borrow().text_range(),
|
||||||
|
)?;
|
||||||
|
|
||||||
let kind = self.value.kind();
|
let kind = self.kind();
|
||||||
let value = db
|
let value = db
|
||||||
.parse(file_id)
|
.parse(file_id)
|
||||||
.syntax_node()
|
.syntax_node()
|
||||||
|
@ -245,6 +262,16 @@ impl InFile<&SyntaxNode> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl InFile<&SyntaxNode> {
|
||||||
|
/// Attempts to map the syntax node back up its macro calls.
|
||||||
|
pub fn original_file_range_opt(
|
||||||
|
self,
|
||||||
|
db: &dyn db::ExpandDatabase,
|
||||||
|
) -> Option<(FileRange, SyntaxContextId)> {
|
||||||
|
self.borrow().map(SyntaxNode::text_range).original_node_file_range_opt(db)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl InMacroFile<SyntaxToken> {
|
impl InMacroFile<SyntaxToken> {
|
||||||
pub fn upmap_once(
|
pub fn upmap_once(
|
||||||
self,
|
self,
|
||||||
|
|
|
@ -96,10 +96,10 @@ impl SourceAnalyzer {
|
||||||
None => scope_for(db, &scopes, &source_map, node),
|
None => scope_for(db, &scopes, &source_map, node),
|
||||||
Some(offset) => {
|
Some(offset) => {
|
||||||
debug_assert!(
|
debug_assert!(
|
||||||
node.value.text_range().contains_inclusive(offset),
|
node.text_range().contains_inclusive(offset),
|
||||||
"{:?} not in {:?}",
|
"{:?} not in {:?}",
|
||||||
offset,
|
offset,
|
||||||
node.value.text_range()
|
node.text_range()
|
||||||
);
|
);
|
||||||
scope_for_offset(db, &scopes, &source_map, node.file_id, offset)
|
scope_for_offset(db, &scopes, &source_map, node.file_id, offset)
|
||||||
}
|
}
|
||||||
|
@ -966,9 +966,7 @@ fn scope_for(
|
||||||
node: InFile<&SyntaxNode>,
|
node: InFile<&SyntaxNode>,
|
||||||
) -> Option<ScopeId> {
|
) -> Option<ScopeId> {
|
||||||
node.ancestors_with_macros(db.upcast())
|
node.ancestors_with_macros(db.upcast())
|
||||||
.take_while(|it| {
|
.take_while(|it| !ast::Item::can_cast(it.kind()) || ast::MacroCall::can_cast(it.kind()))
|
||||||
!ast::Item::can_cast(it.value.kind()) || ast::MacroCall::can_cast(it.value.kind())
|
|
||||||
})
|
|
||||||
.filter_map(|it| it.map(ast::Expr::cast).transpose())
|
.filter_map(|it| it.map(ast::Expr::cast).transpose())
|
||||||
.filter_map(|it| source_map.node_expr(it.as_ref()))
|
.filter_map(|it| source_map.node_expr(it.as_ref()))
|
||||||
.find_map(|it| scopes.scope_for(it))
|
.find_map(|it| scopes.scope_for(it))
|
||||||
|
@ -996,8 +994,8 @@ fn scope_for_offset(
|
||||||
Some(it.file_id.macro_file()?.call_node(db.upcast()))
|
Some(it.file_id.macro_file()?.call_node(db.upcast()))
|
||||||
})
|
})
|
||||||
.find(|it| it.file_id == from_file)
|
.find(|it| it.file_id == from_file)
|
||||||
.filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?;
|
.filter(|it| it.kind() == SyntaxKind::MACRO_CALL)?;
|
||||||
Some((source.value.text_range(), scope))
|
Some((source.text_range(), scope))
|
||||||
})
|
})
|
||||||
.filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end())
|
.filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end())
|
||||||
// find containing scope
|
// find containing scope
|
||||||
|
|
|
@ -598,9 +598,8 @@ pub(super) fn famous_types<'a, DB: HirDatabase>(
|
||||||
Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::unit()), value: "()" },
|
Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::unit()), value: "()" },
|
||||||
]
|
]
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|exprs| {
|
.inspect(|exprs| {
|
||||||
lookup.insert(exprs.ty(db), std::iter::once(exprs.clone()));
|
lookup.insert(exprs.ty(db), std::iter::once(exprs.clone()));
|
||||||
exprs
|
|
||||||
})
|
})
|
||||||
.filter(|expr| expr.ty(db).could_unify_with_deeply(db, &ctx.goal))
|
.filter(|expr| expr.ty(db).could_unify_with_deeply(db, &ctx.goal))
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,11 +7,7 @@ pub fn is_rust_fence(s: &str) -> bool {
|
||||||
let mut seen_rust_tags = false;
|
let mut seen_rust_tags = false;
|
||||||
let mut seen_other_tags = false;
|
let mut seen_other_tags = false;
|
||||||
|
|
||||||
let tokens = s
|
let tokens = s.trim().split([',', ' ', '\t']).map(str::trim).filter(|t| !t.is_empty());
|
||||||
.trim()
|
|
||||||
.split(|c| c == ',' || c == ' ' || c == '\t')
|
|
||||||
.map(str::trim)
|
|
||||||
.filter(|t| !t.is_empty());
|
|
||||||
|
|
||||||
for token in tokens {
|
for token in tokens {
|
||||||
match token {
|
match token {
|
||||||
|
|
|
@ -28,10 +28,7 @@ pub(crate) fn trait_impl_redundant_assoc_item(
|
||||||
let function = id;
|
let function = id;
|
||||||
(
|
(
|
||||||
format!("`fn {redundant_assoc_item_name}`"),
|
format!("`fn {redundant_assoc_item_name}`"),
|
||||||
function
|
function.source(db).map(|it| it.syntax().text_range()).unwrap_or(default_range),
|
||||||
.source(db)
|
|
||||||
.map(|it| it.syntax().value.text_range())
|
|
||||||
.unwrap_or(default_range),
|
|
||||||
format!("\n {};", function.display(db)),
|
format!("\n {};", function.display(db)),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -39,10 +36,7 @@ pub(crate) fn trait_impl_redundant_assoc_item(
|
||||||
let constant = id;
|
let constant = id;
|
||||||
(
|
(
|
||||||
format!("`const {redundant_assoc_item_name}`"),
|
format!("`const {redundant_assoc_item_name}`"),
|
||||||
constant
|
constant.source(db).map(|it| it.syntax().text_range()).unwrap_or(default_range),
|
||||||
.source(db)
|
|
||||||
.map(|it| it.syntax().value.text_range())
|
|
||||||
.unwrap_or(default_range),
|
|
||||||
format!("\n {};", constant.display(db)),
|
format!("\n {};", constant.display(db)),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -50,10 +44,7 @@ pub(crate) fn trait_impl_redundant_assoc_item(
|
||||||
let type_alias = id;
|
let type_alias = id;
|
||||||
(
|
(
|
||||||
format!("`type {redundant_assoc_item_name}`"),
|
format!("`type {redundant_assoc_item_name}`"),
|
||||||
type_alias
|
type_alias.source(db).map(|it| it.syntax().text_range()).unwrap_or(default_range),
|
||||||
.source(db)
|
|
||||||
.map(|it| it.syntax().value.text_range())
|
|
||||||
.unwrap_or(default_range),
|
|
||||||
format!("\n type {};", type_alias.name(ctx.sema.db).to_smol_str()),
|
format!("\n type {};", type_alias.name(ctx.sema.db).to_smol_str()),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -651,7 +651,7 @@ impl flags::AnalysisStats {
|
||||||
if let Some(src) = source {
|
if let Some(src) = source {
|
||||||
let original_file = src.file_id.original_file(db);
|
let original_file = src.file_id.original_file(db);
|
||||||
let path = vfs.file_path(original_file);
|
let path = vfs.file_path(original_file);
|
||||||
let syntax_range = src.value.text_range();
|
let syntax_range = src.text_range();
|
||||||
format!("processing: {} ({} {:?})", full_name(), path, syntax_range)
|
format!("processing: {} ({} {:?})", full_name(), path, syntax_range)
|
||||||
} else {
|
} else {
|
||||||
format!("processing: {}", full_name())
|
format!("processing: {}", full_name())
|
||||||
|
@ -945,7 +945,7 @@ impl flags::AnalysisStats {
|
||||||
if let Some(src) = source {
|
if let Some(src) = source {
|
||||||
let original_file = src.file_id.original_file(db);
|
let original_file = src.file_id.original_file(db);
|
||||||
let path = vfs.file_path(original_file);
|
let path = vfs.file_path(original_file);
|
||||||
let syntax_range = src.value.text_range();
|
let syntax_range = src.text_range();
|
||||||
format!("processing: {} ({} {:?})", full_name(), path, syntax_range)
|
format!("processing: {} ({} {:?})", full_name(), path, syntax_range)
|
||||||
} else {
|
} else {
|
||||||
format!("processing: {}", full_name())
|
format!("processing: {}", full_name())
|
||||||
|
|
|
@ -3367,7 +3367,7 @@ mod tests {
|
||||||
for idx in url_offsets {
|
for idx in url_offsets {
|
||||||
let link = &schema[idx..];
|
let link = &schema[idx..];
|
||||||
// matching on whitespace to ignore normal links
|
// matching on whitespace to ignore normal links
|
||||||
if let Some(link_end) = link.find(|c| c == ' ' || c == '[') {
|
if let Some(link_end) = link.find([' ', '[']) {
|
||||||
if link.chars().nth(link_end) == Some('[') {
|
if link.chars().nth(link_end) == Some('[') {
|
||||||
if let Some(link_text_end) = link.find(']') {
|
if let Some(link_text_end) = link.find(']') {
|
||||||
let link_text = link[link_end..(link_text_end + 1)].to_string();
|
let link_text = link[link_end..(link_text_end + 1)].to_string();
|
||||||
|
|
|
@ -388,9 +388,8 @@ impl Server {
|
||||||
}
|
}
|
||||||
fn recv(&self) -> Result<Option<Message>, Timeout> {
|
fn recv(&self) -> Result<Option<Message>, Timeout> {
|
||||||
let msg = recv_timeout(&self.client.receiver)?;
|
let msg = recv_timeout(&self.client.receiver)?;
|
||||||
let msg = msg.map(|msg| {
|
let msg = msg.inspect(|msg| {
|
||||||
self.messages.borrow_mut().push(msg.clone());
|
self.messages.borrow_mut().push(msg.clone());
|
||||||
msg
|
|
||||||
});
|
});
|
||||||
Ok(msg)
|
Ok(msg)
|
||||||
}
|
}
|
||||||
|
|
|
@ -155,7 +155,7 @@ Zlib OR Apache-2.0 OR MIT
|
||||||
|
|
||||||
let meta = cmd!(sh, "cargo metadata --format-version 1").read().unwrap();
|
let meta = cmd!(sh, "cargo metadata --format-version 1").read().unwrap();
|
||||||
let mut licenses = meta
|
let mut licenses = meta
|
||||||
.split(|c| c == ',' || c == '{' || c == '}')
|
.split([',', '{', '}'])
|
||||||
.filter(|it| it.contains(r#""license""#))
|
.filter(|it| it.contains(r#""license""#))
|
||||||
.map(|it| it.trim())
|
.map(|it| it.trim())
|
||||||
.map(|it| it[r#""license":"#.len()..].trim_matches('"'))
|
.map(|it| it[r#""license":"#.len()..].trim_matches('"'))
|
||||||
|
|
|
@ -305,7 +305,7 @@ fn extract_line_annotations(mut line: &str) -> Vec<LineAnnotation> {
|
||||||
}
|
}
|
||||||
let range = TextRange::at(offset, len.try_into().unwrap());
|
let range = TextRange::at(offset, len.try_into().unwrap());
|
||||||
let line_no_caret = &line[len..];
|
let line_no_caret = &line[len..];
|
||||||
let end_marker = line_no_caret.find(|c| c == '$');
|
let end_marker = line_no_caret.find('$');
|
||||||
let next = line_no_caret.find(marker).map_or(line.len(), |it| it + len);
|
let next = line_no_caret.find(marker).map_or(line.len(), |it| it + len);
|
||||||
|
|
||||||
let cond = |end_marker| {
|
let cond = |end_marker| {
|
||||||
|
|
|
@ -63,7 +63,7 @@ fn is_valid_diagnostic_name(diagnostic: &str) -> Result<(), String> {
|
||||||
if diagnostic.chars().any(|c| c.is_ascii_uppercase()) {
|
if diagnostic.chars().any(|c| c.is_ascii_uppercase()) {
|
||||||
return Err("Diagnostic names can't contain uppercase symbols".into());
|
return Err("Diagnostic names can't contain uppercase symbols".into());
|
||||||
}
|
}
|
||||||
if diagnostic.chars().any(|c| !c.is_ascii()) {
|
if !diagnostic.is_ascii() {
|
||||||
return Err("Diagnostic can't contain non-ASCII symbols".into());
|
return Err("Diagnostic can't contain non-ASCII symbols".into());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -119,12 +119,11 @@ impl flags::RustcPull {
|
||||||
// Fetch given rustc commit.
|
// Fetch given rustc commit.
|
||||||
cmd!(sh, "git fetch http://localhost:{JOSH_PORT}/rust-lang/rust.git@{commit}{JOSH_FILTER}.git")
|
cmd!(sh, "git fetch http://localhost:{JOSH_PORT}/rust-lang/rust.git@{commit}{JOSH_FILTER}.git")
|
||||||
.run()
|
.run()
|
||||||
.map_err(|e| {
|
.inspect_err(|_| {
|
||||||
// Try to un-do the previous `git commit`, to leave the repo in the state we found it it.
|
// Try to un-do the previous `git commit`, to leave the repo in the state we found it it.
|
||||||
cmd!(sh, "git reset --hard HEAD^")
|
cmd!(sh, "git reset --hard HEAD^")
|
||||||
.run()
|
.run()
|
||||||
.expect("FAILED to clean up again after failed `git fetch`, sorry for that");
|
.expect("FAILED to clean up again after failed `git fetch`, sorry for that");
|
||||||
e
|
|
||||||
})
|
})
|
||||||
.context("FAILED to fetch new commits, something went wrong (committing the rust-version file has been undone)")?;
|
.context("FAILED to fetch new commits, something went wrong (committing the rust-version file has been undone)")?;
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue