mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 12:54:58 +00:00
Rename ra_ssr -> ssr
This commit is contained in:
parent
bb5c189b7d
commit
ae3abd6e57
18 changed files with 95 additions and 110 deletions
29
crates/ssr/src/errors.rs
Normal file
29
crates/ssr/src/errors.rs
Normal file
|
@ -0,0 +1,29 @@
|
|||
//! Code relating to errors produced by SSR.
|
||||
|
||||
/// Constructs an SsrError taking arguments like the format macro.
|
||||
macro_rules! _error {
|
||||
($fmt:expr) => {$crate::SsrError::new(format!($fmt))};
|
||||
($fmt:expr, $($arg:tt)+) => {$crate::SsrError::new(format!($fmt, $($arg)+))}
|
||||
}
|
||||
pub(crate) use _error as error;
|
||||
|
||||
/// Returns from the current function with an error, supplied by arguments as for format!
|
||||
macro_rules! _bail {
|
||||
($($tokens:tt)*) => {return Err(crate::errors::error!($($tokens)*))}
|
||||
}
|
||||
pub(crate) use _bail as bail;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct SsrError(pub(crate) String);
|
||||
|
||||
impl std::fmt::Display for SsrError {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
write!(f, "Parse error: {}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl SsrError {
|
||||
pub(crate) fn new(message: impl Into<String>) -> SsrError {
|
||||
SsrError(message.into())
|
||||
}
|
||||
}
|
338
crates/ssr/src/lib.rs
Normal file
338
crates/ssr/src/lib.rs
Normal file
|
@ -0,0 +1,338 @@
|
|||
//! Structural Search Replace
|
||||
//!
|
||||
//! Allows searching the AST for code that matches one or more patterns and then replacing that code
|
||||
//! based on a template.
|
||||
|
||||
// Feature: Structural Search and Replace
|
||||
//
|
||||
// Search and replace with named wildcards that will match any expression, type, path, pattern or item.
|
||||
// The syntax for a structural search replace command is `<search_pattern> ==>> <replace_pattern>`.
|
||||
// A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement.
|
||||
// Within a macro call, a placeholder will match up until whatever token follows the placeholder.
|
||||
//
|
||||
// All paths in both the search pattern and the replacement template must resolve in the context
|
||||
// in which this command is invoked. Paths in the search pattern will then match the code if they
|
||||
// resolve to the same item, even if they're written differently. For example if we invoke the
|
||||
// command in the module `foo` with a pattern of `Bar`, then code in the parent module that refers
|
||||
// to `foo::Bar` will match.
|
||||
//
|
||||
// Paths in the replacement template will be rendered appropriately for the context in which the
|
||||
// replacement occurs. For example if our replacement template is `foo::Bar` and we match some
|
||||
// code in the `foo` module, we'll insert just `Bar`.
|
||||
//
|
||||
// Inherent method calls should generally be written in UFCS form. e.g. `foo::Bar::baz($s, $a)` will
|
||||
// match `$s.baz($a)`, provided the method call `baz` resolves to the method `foo::Bar::baz`.
|
||||
//
|
||||
// The scope of the search / replace will be restricted to the current selection if any, otherwise
|
||||
// it will apply to the whole workspace.
|
||||
//
|
||||
// Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`.
|
||||
//
|
||||
// Supported constraints:
|
||||
//
|
||||
// |===
|
||||
// | Constraint | Restricts placeholder
|
||||
//
|
||||
// | kind(literal) | Is a literal (e.g. `42` or `"forty two"`)
|
||||
// | not(a) | Negates the constraint `a`
|
||||
// |===
|
||||
//
|
||||
// Available via the command `rust-analyzer.ssr`.
|
||||
//
|
||||
// ```rust
|
||||
// // Using structural search replace command [foo($a, $b) ==>> ($a).foo($b)]
|
||||
//
|
||||
// // BEFORE
|
||||
// String::from(foo(y + 5, z))
|
||||
//
|
||||
// // AFTER
|
||||
// String::from((y + 5).foo(z))
|
||||
// ```
|
||||
//
|
||||
// |===
|
||||
// | Editor | Action Name
|
||||
//
|
||||
// | VS Code | **Rust Analyzer: Structural Search Replace**
|
||||
// |===
|
||||
|
||||
mod matching;
|
||||
mod nester;
|
||||
mod parsing;
|
||||
mod replacing;
|
||||
mod resolving;
|
||||
mod search;
|
||||
#[macro_use]
|
||||
mod errors;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::errors::bail;
|
||||
pub use crate::errors::SsrError;
|
||||
pub use crate::matching::Match;
|
||||
use crate::matching::MatchFailureReason;
|
||||
use base_db::{FileId, FilePosition, FileRange};
|
||||
use hir::Semantics;
|
||||
use ide_db::source_change::SourceFileEdit;
|
||||
use resolving::ResolvedRule;
|
||||
use rustc_hash::FxHashMap;
|
||||
use syntax::{ast, AstNode, SyntaxNode, TextRange};
|
||||
|
||||
// A structured search replace rule. Create by calling `parse` on a str.
|
||||
#[derive(Debug)]
|
||||
pub struct SsrRule {
|
||||
/// A structured pattern that we're searching for.
|
||||
pattern: parsing::RawPattern,
|
||||
/// What we'll replace it with.
|
||||
template: parsing::RawPattern,
|
||||
parsed_rules: Vec<parsing::ParsedRule>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SsrPattern {
|
||||
raw: parsing::RawPattern,
|
||||
parsed_rules: Vec<parsing::ParsedRule>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SsrMatches {
|
||||
pub matches: Vec<Match>,
|
||||
}
|
||||
|
||||
/// Searches a crate for pattern matches and possibly replaces them with something else.
|
||||
pub struct MatchFinder<'db> {
|
||||
/// Our source of information about the user's code.
|
||||
sema: Semantics<'db, ide_db::RootDatabase>,
|
||||
rules: Vec<ResolvedRule>,
|
||||
resolution_scope: resolving::ResolutionScope<'db>,
|
||||
restrict_ranges: Vec<FileRange>,
|
||||
}
|
||||
|
||||
impl<'db> MatchFinder<'db> {
|
||||
/// Constructs a new instance where names will be looked up as if they appeared at
|
||||
/// `lookup_context`.
|
||||
pub fn in_context(
|
||||
db: &'db ide_db::RootDatabase,
|
||||
lookup_context: FilePosition,
|
||||
mut restrict_ranges: Vec<FileRange>,
|
||||
) -> MatchFinder<'db> {
|
||||
restrict_ranges.retain(|range| !range.range.is_empty());
|
||||
let sema = Semantics::new(db);
|
||||
let resolution_scope = resolving::ResolutionScope::new(&sema, lookup_context);
|
||||
MatchFinder { sema, rules: Vec::new(), resolution_scope, restrict_ranges }
|
||||
}
|
||||
|
||||
/// Constructs an instance using the start of the first file in `db` as the lookup context.
|
||||
pub fn at_first_file(db: &'db ide_db::RootDatabase) -> Result<MatchFinder<'db>, SsrError> {
|
||||
use base_db::SourceDatabaseExt;
|
||||
use ide_db::symbol_index::SymbolsDatabase;
|
||||
if let Some(first_file_id) = db
|
||||
.local_roots()
|
||||
.iter()
|
||||
.next()
|
||||
.and_then(|root| db.source_root(root.clone()).iter().next())
|
||||
{
|
||||
Ok(MatchFinder::in_context(
|
||||
db,
|
||||
FilePosition { file_id: first_file_id, offset: 0.into() },
|
||||
vec![],
|
||||
))
|
||||
} else {
|
||||
bail!("No files to search");
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a rule to be applied. The order in which rules are added matters. Earlier rules take
|
||||
/// precedence. If a node is matched by an earlier rule, then later rules won't be permitted to
|
||||
/// match to it.
|
||||
pub fn add_rule(&mut self, rule: SsrRule) -> Result<(), SsrError> {
|
||||
for parsed_rule in rule.parsed_rules {
|
||||
self.rules.push(ResolvedRule::new(
|
||||
parsed_rule,
|
||||
&self.resolution_scope,
|
||||
self.rules.len(),
|
||||
)?);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Finds matches for all added rules and returns edits for all found matches.
|
||||
pub fn edits(&self) -> Vec<SourceFileEdit> {
|
||||
use base_db::SourceDatabaseExt;
|
||||
let mut matches_by_file = FxHashMap::default();
|
||||
for m in self.matches().matches {
|
||||
matches_by_file
|
||||
.entry(m.range.file_id)
|
||||
.or_insert_with(|| SsrMatches::default())
|
||||
.matches
|
||||
.push(m);
|
||||
}
|
||||
let mut edits = vec![];
|
||||
for (file_id, matches) in matches_by_file {
|
||||
let edit =
|
||||
replacing::matches_to_edit(&matches, &self.sema.db.file_text(file_id), &self.rules);
|
||||
edits.push(SourceFileEdit { file_id, edit });
|
||||
}
|
||||
edits
|
||||
}
|
||||
|
||||
/// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you
|
||||
/// intend to do replacement, use `add_rule` instead.
|
||||
pub fn add_search_pattern(&mut self, pattern: SsrPattern) -> Result<(), SsrError> {
|
||||
for parsed_rule in pattern.parsed_rules {
|
||||
self.rules.push(ResolvedRule::new(
|
||||
parsed_rule,
|
||||
&self.resolution_scope,
|
||||
self.rules.len(),
|
||||
)?);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns matches for all added rules.
|
||||
pub fn matches(&self) -> SsrMatches {
|
||||
let mut matches = Vec::new();
|
||||
let mut usage_cache = search::UsageCache::default();
|
||||
for rule in &self.rules {
|
||||
self.find_matches_for_rule(rule, &mut usage_cache, &mut matches);
|
||||
}
|
||||
nester::nest_and_remove_collisions(matches, &self.sema)
|
||||
}
|
||||
|
||||
/// Finds all nodes in `file_id` whose text is exactly equal to `snippet` and attempts to match
|
||||
/// them, while recording reasons why they don't match. This API is useful for command
|
||||
/// line-based debugging where providing a range is difficult.
|
||||
pub fn debug_where_text_equal(&self, file_id: FileId, snippet: &str) -> Vec<MatchDebugInfo> {
|
||||
use base_db::SourceDatabaseExt;
|
||||
let file = self.sema.parse(file_id);
|
||||
let mut res = Vec::new();
|
||||
let file_text = self.sema.db.file_text(file_id);
|
||||
let mut remaining_text = file_text.as_str();
|
||||
let mut base = 0;
|
||||
let len = snippet.len() as u32;
|
||||
while let Some(offset) = remaining_text.find(snippet) {
|
||||
let start = base + offset as u32;
|
||||
let end = start + len;
|
||||
self.output_debug_for_nodes_at_range(
|
||||
file.syntax(),
|
||||
FileRange { file_id, range: TextRange::new(start.into(), end.into()) },
|
||||
&None,
|
||||
&mut res,
|
||||
);
|
||||
remaining_text = &remaining_text[offset + snippet.len()..];
|
||||
base = end;
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
fn output_debug_for_nodes_at_range(
|
||||
&self,
|
||||
node: &SyntaxNode,
|
||||
range: FileRange,
|
||||
restrict_range: &Option<FileRange>,
|
||||
out: &mut Vec<MatchDebugInfo>,
|
||||
) {
|
||||
for node in node.children() {
|
||||
let node_range = self.sema.original_range(&node);
|
||||
if node_range.file_id != range.file_id || !node_range.range.contains_range(range.range)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if node_range.range == range.range {
|
||||
for rule in &self.rules {
|
||||
// For now we ignore rules that have a different kind than our node, otherwise
|
||||
// we get lots of noise. If at some point we add support for restricting rules
|
||||
// to a particular kind of thing (e.g. only match type references), then we can
|
||||
// relax this. We special-case expressions, since function calls can match
|
||||
// method calls.
|
||||
if rule.pattern.node.kind() != node.kind()
|
||||
&& !(ast::Expr::can_cast(rule.pattern.node.kind())
|
||||
&& ast::Expr::can_cast(node.kind()))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
out.push(MatchDebugInfo {
|
||||
matched: matching::get_match(true, rule, &node, restrict_range, &self.sema)
|
||||
.map_err(|e| MatchFailureReason {
|
||||
reason: e.reason.unwrap_or_else(|| {
|
||||
"Match failed, but no reason was given".to_owned()
|
||||
}),
|
||||
}),
|
||||
pattern: rule.pattern.node.clone(),
|
||||
node: node.clone(),
|
||||
});
|
||||
}
|
||||
} else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) {
|
||||
if let Some(expanded) = self.sema.expand(¯o_call) {
|
||||
if let Some(tt) = macro_call.token_tree() {
|
||||
self.output_debug_for_nodes_at_range(
|
||||
&expanded,
|
||||
range,
|
||||
&Some(self.sema.original_range(tt.syntax())),
|
||||
out,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
self.output_debug_for_nodes_at_range(&node, range, restrict_range, out);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct MatchDebugInfo {
|
||||
node: SyntaxNode,
|
||||
/// Our search pattern parsed as an expression or item, etc
|
||||
pattern: SyntaxNode,
|
||||
matched: Result<Match, MatchFailureReason>,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for MatchDebugInfo {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match &self.matched {
|
||||
Ok(_) => writeln!(f, "Node matched")?,
|
||||
Err(reason) => writeln!(f, "Node failed to match because: {}", reason.reason)?,
|
||||
}
|
||||
writeln!(
|
||||
f,
|
||||
"============ AST ===========\n\
|
||||
{:#?}",
|
||||
self.node
|
||||
)?;
|
||||
writeln!(f, "========= PATTERN ==========")?;
|
||||
writeln!(f, "{:#?}", self.pattern)?;
|
||||
writeln!(f, "============================")?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl SsrMatches {
|
||||
/// Returns `self` with any nested matches removed and made into top-level matches.
|
||||
pub fn flattened(self) -> SsrMatches {
|
||||
let mut out = SsrMatches::default();
|
||||
self.flatten_into(&mut out);
|
||||
out
|
||||
}
|
||||
|
||||
fn flatten_into(self, out: &mut SsrMatches) {
|
||||
for mut m in self.matches {
|
||||
for p in m.placeholder_values.values_mut() {
|
||||
std::mem::replace(&mut p.inner_matches, SsrMatches::default()).flatten_into(out);
|
||||
}
|
||||
out.matches.push(m);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Match {
|
||||
pub fn matched_text(&self) -> String {
|
||||
self.matched_node.text().to_string()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for SsrError {}
|
||||
|
||||
#[cfg(test)]
|
||||
impl MatchDebugInfo {
|
||||
pub(crate) fn match_failure_reason(&self) -> Option<&str> {
|
||||
self.matched.as_ref().err().map(|r| r.reason.as_str())
|
||||
}
|
||||
}
|
777
crates/ssr/src/matching.rs
Normal file
777
crates/ssr/src/matching.rs
Normal file
|
@ -0,0 +1,777 @@
|
|||
//! This module is responsible for matching a search pattern against a node in the AST. In the
|
||||
//! process of matching, placeholder values are recorded.
|
||||
|
||||
use crate::{
|
||||
parsing::{Constraint, NodeKind, Placeholder},
|
||||
resolving::{ResolvedPattern, ResolvedRule, UfcsCallInfo},
|
||||
SsrMatches,
|
||||
};
|
||||
use base_db::FileRange;
|
||||
use hir::Semantics;
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::{cell::Cell, iter::Peekable};
|
||||
use syntax::ast::{AstNode, AstToken};
|
||||
use syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken};
|
||||
use test_utils::mark;
|
||||
|
||||
// Creates a match error. If we're currently attempting to match some code that we thought we were
|
||||
// going to match, as indicated by the --debug-snippet flag, then populate the reason field.
|
||||
macro_rules! match_error {
|
||||
($e:expr) => {{
|
||||
MatchFailed {
|
||||
reason: if recording_match_fail_reasons() {
|
||||
Some(format!("{}", $e))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}};
|
||||
($fmt:expr, $($arg:tt)+) => {{
|
||||
MatchFailed {
|
||||
reason: if recording_match_fail_reasons() {
|
||||
Some(format!($fmt, $($arg)+))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
// Fails the current match attempt, recording the supplied reason if we're recording match fail reasons.
|
||||
macro_rules! fail_match {
|
||||
($($args:tt)*) => {return Err(match_error!($($args)*))};
|
||||
}
|
||||
|
||||
/// Information about a match that was found.
|
||||
#[derive(Debug)]
|
||||
pub struct Match {
|
||||
pub(crate) range: FileRange,
|
||||
pub(crate) matched_node: SyntaxNode,
|
||||
pub(crate) placeholder_values: FxHashMap<Var, PlaceholderMatch>,
|
||||
pub(crate) ignored_comments: Vec<ast::Comment>,
|
||||
pub(crate) rule_index: usize,
|
||||
/// The depth of matched_node.
|
||||
pub(crate) depth: usize,
|
||||
// Each path in the template rendered for the module in which the match was found.
|
||||
pub(crate) rendered_template_paths: FxHashMap<SyntaxNode, hir::ModPath>,
|
||||
}
|
||||
|
||||
/// Represents a `$var` in an SSR query.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub(crate) struct Var(pub String);
|
||||
|
||||
/// Information about a placeholder bound in a match.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct PlaceholderMatch {
|
||||
/// The node that the placeholder matched to. If set, then we'll search for further matches
|
||||
/// within this node. It isn't set when we match tokens within a macro call's token tree.
|
||||
pub(crate) node: Option<SyntaxNode>,
|
||||
pub(crate) range: FileRange,
|
||||
/// More matches, found within `node`.
|
||||
pub(crate) inner_matches: SsrMatches,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct MatchFailureReason {
|
||||
pub(crate) reason: String,
|
||||
}
|
||||
|
||||
/// An "error" indicating that matching failed. Use the fail_match! macro to create and return this.
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct MatchFailed {
|
||||
/// The reason why we failed to match. Only present when debug_active true in call to
|
||||
/// `get_match`.
|
||||
pub(crate) reason: Option<String>,
|
||||
}
|
||||
|
||||
/// Checks if `code` matches the search pattern found in `search_scope`, returning information about
|
||||
/// the match, if it does. Since we only do matching in this module and searching is done by the
|
||||
/// parent module, we don't populate nested matches.
|
||||
pub(crate) fn get_match(
|
||||
debug_active: bool,
|
||||
rule: &ResolvedRule,
|
||||
code: &SyntaxNode,
|
||||
restrict_range: &Option<FileRange>,
|
||||
sema: &Semantics<ide_db::RootDatabase>,
|
||||
) -> Result<Match, MatchFailed> {
|
||||
record_match_fails_reasons_scope(debug_active, || {
|
||||
Matcher::try_match(rule, code, restrict_range, sema)
|
||||
})
|
||||
}
|
||||
|
||||
/// Checks if our search pattern matches a particular node of the AST.
|
||||
struct Matcher<'db, 'sema> {
|
||||
sema: &'sema Semantics<'db, ide_db::RootDatabase>,
|
||||
/// If any placeholders come from anywhere outside of this range, then the match will be
|
||||
/// rejected.
|
||||
restrict_range: Option<FileRange>,
|
||||
rule: &'sema ResolvedRule,
|
||||
}
|
||||
|
||||
/// Which phase of matching we're currently performing. We do two phases because most attempted
|
||||
/// matches will fail and it means we can defer more expensive checks to the second phase.
|
||||
enum Phase<'a> {
|
||||
/// On the first phase, we perform cheap checks. No state is mutated and nothing is recorded.
|
||||
First,
|
||||
/// On the second phase, we construct the `Match`. Things like what placeholders bind to is
|
||||
/// recorded.
|
||||
Second(&'a mut Match),
|
||||
}
|
||||
|
||||
impl<'db, 'sema> Matcher<'db, 'sema> {
|
||||
fn try_match(
|
||||
rule: &ResolvedRule,
|
||||
code: &SyntaxNode,
|
||||
restrict_range: &Option<FileRange>,
|
||||
sema: &'sema Semantics<'db, ide_db::RootDatabase>,
|
||||
) -> Result<Match, MatchFailed> {
|
||||
let match_state = Matcher { sema, restrict_range: restrict_range.clone(), rule };
|
||||
// First pass at matching, where we check that node types and idents match.
|
||||
match_state.attempt_match_node(&mut Phase::First, &rule.pattern.node, code)?;
|
||||
match_state.validate_range(&sema.original_range(code))?;
|
||||
let mut the_match = Match {
|
||||
range: sema.original_range(code),
|
||||
matched_node: code.clone(),
|
||||
placeholder_values: FxHashMap::default(),
|
||||
ignored_comments: Vec::new(),
|
||||
rule_index: rule.index,
|
||||
depth: 0,
|
||||
rendered_template_paths: FxHashMap::default(),
|
||||
};
|
||||
// Second matching pass, where we record placeholder matches, ignored comments and maybe do
|
||||
// any other more expensive checks that we didn't want to do on the first pass.
|
||||
match_state.attempt_match_node(
|
||||
&mut Phase::Second(&mut the_match),
|
||||
&rule.pattern.node,
|
||||
code,
|
||||
)?;
|
||||
the_match.depth = sema.ancestors_with_macros(the_match.matched_node.clone()).count();
|
||||
if let Some(template) = &rule.template {
|
||||
the_match.render_template_paths(template, sema)?;
|
||||
}
|
||||
Ok(the_match)
|
||||
}
|
||||
|
||||
/// Checks that `range` is within the permitted range if any. This is applicable when we're
|
||||
/// processing a macro expansion and we want to fail the match if we're working with a node that
|
||||
/// didn't originate from the token tree of the macro call.
|
||||
fn validate_range(&self, range: &FileRange) -> Result<(), MatchFailed> {
|
||||
if let Some(restrict_range) = &self.restrict_range {
|
||||
if restrict_range.file_id != range.file_id
|
||||
|| !restrict_range.range.contains_range(range.range)
|
||||
{
|
||||
fail_match!("Node originated from a macro");
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn attempt_match_node(
|
||||
&self,
|
||||
phase: &mut Phase,
|
||||
pattern: &SyntaxNode,
|
||||
code: &SyntaxNode,
|
||||
) -> Result<(), MatchFailed> {
|
||||
// Handle placeholders.
|
||||
if let Some(placeholder) = self.get_placeholder(&SyntaxElement::Node(pattern.clone())) {
|
||||
for constraint in &placeholder.constraints {
|
||||
self.check_constraint(constraint, code)?;
|
||||
}
|
||||
if let Phase::Second(matches_out) = phase {
|
||||
let original_range = self.sema.original_range(code);
|
||||
// We validated the range for the node when we started the match, so the placeholder
|
||||
// probably can't fail range validation, but just to be safe...
|
||||
self.validate_range(&original_range)?;
|
||||
matches_out.placeholder_values.insert(
|
||||
Var(placeholder.ident.to_string()),
|
||||
PlaceholderMatch::new(code, original_range),
|
||||
);
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
// We allow a UFCS call to match a method call, provided they resolve to the same function.
|
||||
if let Some(pattern_ufcs) = self.rule.pattern.ufcs_function_calls.get(pattern) {
|
||||
if let Some(code) = ast::MethodCallExpr::cast(code.clone()) {
|
||||
return self.attempt_match_ufcs_to_method_call(phase, pattern_ufcs, &code);
|
||||
}
|
||||
if let Some(code) = ast::CallExpr::cast(code.clone()) {
|
||||
return self.attempt_match_ufcs_to_ufcs(phase, pattern_ufcs, &code);
|
||||
}
|
||||
}
|
||||
if pattern.kind() != code.kind() {
|
||||
fail_match!(
|
||||
"Pattern had `{}` ({:?}), code had `{}` ({:?})",
|
||||
pattern.text(),
|
||||
pattern.kind(),
|
||||
code.text(),
|
||||
code.kind()
|
||||
);
|
||||
}
|
||||
// Some kinds of nodes have special handling. For everything else, we fall back to default
|
||||
// matching.
|
||||
match code.kind() {
|
||||
SyntaxKind::RECORD_EXPR_FIELD_LIST => {
|
||||
self.attempt_match_record_field_list(phase, pattern, code)
|
||||
}
|
||||
SyntaxKind::TOKEN_TREE => self.attempt_match_token_tree(phase, pattern, code),
|
||||
SyntaxKind::PATH => self.attempt_match_path(phase, pattern, code),
|
||||
_ => self.attempt_match_node_children(phase, pattern, code),
|
||||
}
|
||||
}
|
||||
|
||||
fn attempt_match_node_children(
|
||||
&self,
|
||||
phase: &mut Phase,
|
||||
pattern: &SyntaxNode,
|
||||
code: &SyntaxNode,
|
||||
) -> Result<(), MatchFailed> {
|
||||
self.attempt_match_sequences(
|
||||
phase,
|
||||
PatternIterator::new(pattern),
|
||||
code.children_with_tokens(),
|
||||
)
|
||||
}
|
||||
|
||||
fn attempt_match_sequences(
|
||||
&self,
|
||||
phase: &mut Phase,
|
||||
pattern_it: PatternIterator,
|
||||
mut code_it: SyntaxElementChildren,
|
||||
) -> Result<(), MatchFailed> {
|
||||
let mut pattern_it = pattern_it.peekable();
|
||||
loop {
|
||||
match phase.next_non_trivial(&mut code_it) {
|
||||
None => {
|
||||
if let Some(p) = pattern_it.next() {
|
||||
fail_match!("Part of the pattern was unmatched: {:?}", p);
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
Some(SyntaxElement::Token(c)) => {
|
||||
self.attempt_match_token(phase, &mut pattern_it, &c)?;
|
||||
}
|
||||
Some(SyntaxElement::Node(c)) => match pattern_it.next() {
|
||||
Some(SyntaxElement::Node(p)) => {
|
||||
self.attempt_match_node(phase, &p, &c)?;
|
||||
}
|
||||
Some(p) => fail_match!("Pattern wanted '{}', code has {}", p, c.text()),
|
||||
None => fail_match!("Pattern reached end, code has {}", c.text()),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn attempt_match_token(
|
||||
&self,
|
||||
phase: &mut Phase,
|
||||
pattern: &mut Peekable<PatternIterator>,
|
||||
code: &syntax::SyntaxToken,
|
||||
) -> Result<(), MatchFailed> {
|
||||
phase.record_ignored_comments(code);
|
||||
// Ignore whitespace and comments.
|
||||
if code.kind().is_trivia() {
|
||||
return Ok(());
|
||||
}
|
||||
if let Some(SyntaxElement::Token(p)) = pattern.peek() {
|
||||
// If the code has a comma and the pattern is about to close something, then accept the
|
||||
// comma without advancing the pattern. i.e. ignore trailing commas.
|
||||
if code.kind() == SyntaxKind::COMMA && is_closing_token(p.kind()) {
|
||||
return Ok(());
|
||||
}
|
||||
// Conversely, if the pattern has a comma and the code doesn't, skip that part of the
|
||||
// pattern and continue to match the code.
|
||||
if p.kind() == SyntaxKind::COMMA && is_closing_token(code.kind()) {
|
||||
pattern.next();
|
||||
}
|
||||
}
|
||||
// Consume an element from the pattern and make sure it matches.
|
||||
match pattern.next() {
|
||||
Some(SyntaxElement::Token(p)) => {
|
||||
if p.kind() != code.kind() || p.text() != code.text() {
|
||||
fail_match!(
|
||||
"Pattern wanted token '{}' ({:?}), but code had token '{}' ({:?})",
|
||||
p.text(),
|
||||
p.kind(),
|
||||
code.text(),
|
||||
code.kind()
|
||||
)
|
||||
}
|
||||
}
|
||||
Some(SyntaxElement::Node(p)) => {
|
||||
// Not sure if this is actually reachable.
|
||||
fail_match!(
|
||||
"Pattern wanted {:?}, but code had token '{}' ({:?})",
|
||||
p,
|
||||
code.text(),
|
||||
code.kind()
|
||||
);
|
||||
}
|
||||
None => {
|
||||
fail_match!("Pattern exhausted, while code remains: `{}`", code.text());
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn check_constraint(
|
||||
&self,
|
||||
constraint: &Constraint,
|
||||
code: &SyntaxNode,
|
||||
) -> Result<(), MatchFailed> {
|
||||
match constraint {
|
||||
Constraint::Kind(kind) => {
|
||||
kind.matches(code)?;
|
||||
}
|
||||
Constraint::Not(sub) => {
|
||||
if self.check_constraint(&*sub, code).is_ok() {
|
||||
fail_match!("Constraint {:?} failed for '{}'", constraint, code.text());
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Paths are matched based on whether they refer to the same thing, even if they're written
|
||||
/// differently.
|
||||
fn attempt_match_path(
|
||||
&self,
|
||||
phase: &mut Phase,
|
||||
pattern: &SyntaxNode,
|
||||
code: &SyntaxNode,
|
||||
) -> Result<(), MatchFailed> {
|
||||
if let Some(pattern_resolved) = self.rule.pattern.resolved_paths.get(pattern) {
|
||||
let pattern_path = ast::Path::cast(pattern.clone()).unwrap();
|
||||
let code_path = ast::Path::cast(code.clone()).unwrap();
|
||||
if let (Some(pattern_segment), Some(code_segment)) =
|
||||
(pattern_path.segment(), code_path.segment())
|
||||
{
|
||||
// Match everything within the segment except for the name-ref, which is handled
|
||||
// separately via comparing what the path resolves to below.
|
||||
self.attempt_match_opt(
|
||||
phase,
|
||||
pattern_segment.generic_arg_list(),
|
||||
code_segment.generic_arg_list(),
|
||||
)?;
|
||||
self.attempt_match_opt(
|
||||
phase,
|
||||
pattern_segment.param_list(),
|
||||
code_segment.param_list(),
|
||||
)?;
|
||||
}
|
||||
if matches!(phase, Phase::Second(_)) {
|
||||
let resolution = self
|
||||
.sema
|
||||
.resolve_path(&code_path)
|
||||
.ok_or_else(|| match_error!("Failed to resolve path `{}`", code.text()))?;
|
||||
if pattern_resolved.resolution != resolution {
|
||||
fail_match!("Pattern had path `{}` code had `{}`", pattern.text(), code.text());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return self.attempt_match_node_children(phase, pattern, code);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn attempt_match_opt<T: AstNode>(
|
||||
&self,
|
||||
phase: &mut Phase,
|
||||
pattern: Option<T>,
|
||||
code: Option<T>,
|
||||
) -> Result<(), MatchFailed> {
|
||||
match (pattern, code) {
|
||||
(Some(p), Some(c)) => self.attempt_match_node(phase, &p.syntax(), &c.syntax()),
|
||||
(None, None) => Ok(()),
|
||||
(Some(p), None) => fail_match!("Pattern `{}` had nothing to match", p.syntax().text()),
|
||||
(None, Some(c)) => {
|
||||
fail_match!("Nothing in pattern to match code `{}`", c.syntax().text())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// We want to allow the records to match in any order, so we have special matching logic for
|
||||
/// them.
|
||||
fn attempt_match_record_field_list(
|
||||
&self,
|
||||
phase: &mut Phase,
|
||||
pattern: &SyntaxNode,
|
||||
code: &SyntaxNode,
|
||||
) -> Result<(), MatchFailed> {
|
||||
// Build a map keyed by field name.
|
||||
let mut fields_by_name = FxHashMap::default();
|
||||
for child in code.children() {
|
||||
if let Some(record) = ast::RecordExprField::cast(child.clone()) {
|
||||
if let Some(name) = record.field_name() {
|
||||
fields_by_name.insert(name.text().clone(), child.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
for p in pattern.children_with_tokens() {
|
||||
if let SyntaxElement::Node(p) = p {
|
||||
if let Some(name_element) = p.first_child_or_token() {
|
||||
if self.get_placeholder(&name_element).is_some() {
|
||||
// If the pattern is using placeholders for field names then order
|
||||
// independence doesn't make sense. Fall back to regular ordered
|
||||
// matching.
|
||||
return self.attempt_match_node_children(phase, pattern, code);
|
||||
}
|
||||
if let Some(ident) = only_ident(name_element) {
|
||||
let code_record = fields_by_name.remove(ident.text()).ok_or_else(|| {
|
||||
match_error!(
|
||||
"Placeholder has record field '{}', but code doesn't",
|
||||
ident
|
||||
)
|
||||
})?;
|
||||
self.attempt_match_node(phase, &p, &code_record)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(unmatched_fields) = fields_by_name.keys().next() {
|
||||
fail_match!(
|
||||
"{} field(s) of a record literal failed to match, starting with {}",
|
||||
fields_by_name.len(),
|
||||
unmatched_fields
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Outside of token trees, a placeholder can only match a single AST node, whereas in a token
|
||||
/// tree it can match a sequence of tokens. Note, that this code will only be used when the
|
||||
/// pattern matches the macro invocation. For matches within the macro call, we'll already have
|
||||
/// expanded the macro.
|
||||
fn attempt_match_token_tree(
|
||||
&self,
|
||||
phase: &mut Phase,
|
||||
pattern: &SyntaxNode,
|
||||
code: &syntax::SyntaxNode,
|
||||
) -> Result<(), MatchFailed> {
|
||||
let mut pattern = PatternIterator::new(pattern).peekable();
|
||||
let mut children = code.children_with_tokens();
|
||||
while let Some(child) = children.next() {
|
||||
if let Some(placeholder) = pattern.peek().and_then(|p| self.get_placeholder(p)) {
|
||||
pattern.next();
|
||||
let next_pattern_token = pattern
|
||||
.peek()
|
||||
.and_then(|p| match p {
|
||||
SyntaxElement::Token(t) => Some(t.clone()),
|
||||
SyntaxElement::Node(n) => n.first_token(),
|
||||
})
|
||||
.map(|p| p.text().to_string());
|
||||
let first_matched_token = child.clone();
|
||||
let mut last_matched_token = child;
|
||||
// Read code tokens util we reach one equal to the next token from our pattern
|
||||
// or we reach the end of the token tree.
|
||||
while let Some(next) = children.next() {
|
||||
match &next {
|
||||
SyntaxElement::Token(t) => {
|
||||
if Some(t.to_string()) == next_pattern_token {
|
||||
pattern.next();
|
||||
break;
|
||||
}
|
||||
}
|
||||
SyntaxElement::Node(n) => {
|
||||
if let Some(first_token) = n.first_token() {
|
||||
if Some(first_token.to_string()) == next_pattern_token {
|
||||
if let Some(SyntaxElement::Node(p)) = pattern.next() {
|
||||
// We have a subtree that starts with the next token in our pattern.
|
||||
self.attempt_match_token_tree(phase, &p, &n)?;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
last_matched_token = next;
|
||||
}
|
||||
if let Phase::Second(match_out) = phase {
|
||||
match_out.placeholder_values.insert(
|
||||
Var(placeholder.ident.to_string()),
|
||||
PlaceholderMatch::from_range(FileRange {
|
||||
file_id: self.sema.original_range(code).file_id,
|
||||
range: first_matched_token
|
||||
.text_range()
|
||||
.cover(last_matched_token.text_range()),
|
||||
}),
|
||||
);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// Match literal (non-placeholder) tokens.
|
||||
match child {
|
||||
SyntaxElement::Token(token) => {
|
||||
self.attempt_match_token(phase, &mut pattern, &token)?;
|
||||
}
|
||||
SyntaxElement::Node(node) => match pattern.next() {
|
||||
Some(SyntaxElement::Node(p)) => {
|
||||
self.attempt_match_token_tree(phase, &p, &node)?;
|
||||
}
|
||||
Some(SyntaxElement::Token(p)) => fail_match!(
|
||||
"Pattern has token '{}', code has subtree '{}'",
|
||||
p.text(),
|
||||
node.text()
|
||||
),
|
||||
None => fail_match!("Pattern has nothing, code has '{}'", node.text()),
|
||||
},
|
||||
}
|
||||
}
|
||||
if let Some(p) = pattern.next() {
|
||||
fail_match!("Reached end of token tree in code, but pattern still has {:?}", p);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn attempt_match_ufcs_to_method_call(
|
||||
&self,
|
||||
phase: &mut Phase,
|
||||
pattern_ufcs: &UfcsCallInfo,
|
||||
code: &ast::MethodCallExpr,
|
||||
) -> Result<(), MatchFailed> {
|
||||
use ast::ArgListOwner;
|
||||
let code_resolved_function = self
|
||||
.sema
|
||||
.resolve_method_call(code)
|
||||
.ok_or_else(|| match_error!("Failed to resolve method call"))?;
|
||||
if pattern_ufcs.function != code_resolved_function {
|
||||
fail_match!("Method call resolved to a different function");
|
||||
}
|
||||
if code_resolved_function.has_self_param(self.sema.db) {
|
||||
if let (Some(pattern_type), Some(expr)) = (&pattern_ufcs.qualifier_type, &code.expr()) {
|
||||
self.check_expr_type(pattern_type, expr)?;
|
||||
}
|
||||
}
|
||||
// Check arguments.
|
||||
let mut pattern_args = pattern_ufcs
|
||||
.call_expr
|
||||
.arg_list()
|
||||
.ok_or_else(|| match_error!("Pattern function call has no args"))?
|
||||
.args();
|
||||
self.attempt_match_opt(phase, pattern_args.next(), code.expr())?;
|
||||
let mut code_args =
|
||||
code.arg_list().ok_or_else(|| match_error!("Code method call has no args"))?.args();
|
||||
loop {
|
||||
match (pattern_args.next(), code_args.next()) {
|
||||
(None, None) => return Ok(()),
|
||||
(p, c) => self.attempt_match_opt(phase, p, c)?,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn attempt_match_ufcs_to_ufcs(
|
||||
&self,
|
||||
phase: &mut Phase,
|
||||
pattern_ufcs: &UfcsCallInfo,
|
||||
code: &ast::CallExpr,
|
||||
) -> Result<(), MatchFailed> {
|
||||
use ast::ArgListOwner;
|
||||
// Check that the first argument is the expected type.
|
||||
if let (Some(pattern_type), Some(expr)) = (
|
||||
&pattern_ufcs.qualifier_type,
|
||||
&code.arg_list().and_then(|code_args| code_args.args().next()),
|
||||
) {
|
||||
self.check_expr_type(pattern_type, expr)?;
|
||||
}
|
||||
self.attempt_match_node_children(phase, pattern_ufcs.call_expr.syntax(), code.syntax())
|
||||
}
|
||||
|
||||
fn check_expr_type(
|
||||
&self,
|
||||
pattern_type: &hir::Type,
|
||||
expr: &ast::Expr,
|
||||
) -> Result<(), MatchFailed> {
|
||||
use hir::HirDisplay;
|
||||
let code_type = self.sema.type_of_expr(&expr).ok_or_else(|| {
|
||||
match_error!("Failed to get receiver type for `{}`", expr.syntax().text())
|
||||
})?;
|
||||
if !code_type
|
||||
.autoderef(self.sema.db)
|
||||
.any(|deref_code_type| *pattern_type == deref_code_type)
|
||||
{
|
||||
fail_match!(
|
||||
"Pattern type `{}` didn't match code type `{}`",
|
||||
pattern_type.display(self.sema.db),
|
||||
code_type.display(self.sema.db)
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_placeholder(&self, element: &SyntaxElement) -> Option<&Placeholder> {
|
||||
only_ident(element.clone()).and_then(|ident| self.rule.get_placeholder(&ident))
|
||||
}
|
||||
}
|
||||
|
||||
impl Match {
|
||||
fn render_template_paths(
|
||||
&mut self,
|
||||
template: &ResolvedPattern,
|
||||
sema: &Semantics<ide_db::RootDatabase>,
|
||||
) -> Result<(), MatchFailed> {
|
||||
let module = sema
|
||||
.scope(&self.matched_node)
|
||||
.module()
|
||||
.ok_or_else(|| match_error!("Matched node isn't in a module"))?;
|
||||
for (path, resolved_path) in &template.resolved_paths {
|
||||
if let hir::PathResolution::Def(module_def) = resolved_path.resolution {
|
||||
let mod_path = module.find_use_path(sema.db, module_def).ok_or_else(|| {
|
||||
match_error!("Failed to render template path `{}` at match location")
|
||||
})?;
|
||||
self.rendered_template_paths.insert(path.clone(), mod_path);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Phase<'_> {
|
||||
fn next_non_trivial(&mut self, code_it: &mut SyntaxElementChildren) -> Option<SyntaxElement> {
|
||||
loop {
|
||||
let c = code_it.next();
|
||||
if let Some(SyntaxElement::Token(t)) = &c {
|
||||
self.record_ignored_comments(t);
|
||||
if t.kind().is_trivia() {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return c;
|
||||
}
|
||||
}
|
||||
|
||||
fn record_ignored_comments(&mut self, token: &SyntaxToken) {
|
||||
if token.kind() == SyntaxKind::COMMENT {
|
||||
if let Phase::Second(match_out) = self {
|
||||
if let Some(comment) = ast::Comment::cast(token.clone()) {
|
||||
match_out.ignored_comments.push(comment);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn is_closing_token(kind: SyntaxKind) -> bool {
|
||||
kind == SyntaxKind::R_PAREN || kind == SyntaxKind::R_CURLY || kind == SyntaxKind::R_BRACK
|
||||
}
|
||||
|
||||
pub(crate) fn record_match_fails_reasons_scope<F, T>(debug_active: bool, f: F) -> T
|
||||
where
|
||||
F: Fn() -> T,
|
||||
{
|
||||
RECORDING_MATCH_FAIL_REASONS.with(|c| c.set(debug_active));
|
||||
let res = f();
|
||||
RECORDING_MATCH_FAIL_REASONS.with(|c| c.set(false));
|
||||
res
|
||||
}
|
||||
|
||||
// For performance reasons, we don't want to record the reason why every match fails, only the bit
|
||||
// of code that the user indicated they thought would match. We use a thread local to indicate when
|
||||
// we are trying to match that bit of code. This saves us having to pass a boolean into all the bits
|
||||
// of code that can make the decision to not match.
|
||||
thread_local! {
|
||||
pub static RECORDING_MATCH_FAIL_REASONS: Cell<bool> = Cell::new(false);
|
||||
}
|
||||
|
||||
fn recording_match_fail_reasons() -> bool {
|
||||
RECORDING_MATCH_FAIL_REASONS.with(|c| c.get())
|
||||
}
|
||||
|
||||
impl PlaceholderMatch {
|
||||
fn new(node: &SyntaxNode, range: FileRange) -> Self {
|
||||
Self { node: Some(node.clone()), range, inner_matches: SsrMatches::default() }
|
||||
}
|
||||
|
||||
fn from_range(range: FileRange) -> Self {
|
||||
Self { node: None, range, inner_matches: SsrMatches::default() }
|
||||
}
|
||||
}
|
||||
|
||||
impl NodeKind {
|
||||
fn matches(&self, node: &SyntaxNode) -> Result<(), MatchFailed> {
|
||||
let ok = match self {
|
||||
Self::Literal => {
|
||||
mark::hit!(literal_constraint);
|
||||
ast::Literal::can_cast(node.kind())
|
||||
}
|
||||
};
|
||||
if !ok {
|
||||
fail_match!("Code '{}' isn't of kind {:?}", node.text(), self);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// If `node` contains nothing but an ident then return it, otherwise return None.
|
||||
fn only_ident(element: SyntaxElement) -> Option<SyntaxToken> {
|
||||
match element {
|
||||
SyntaxElement::Token(t) => {
|
||||
if t.kind() == SyntaxKind::IDENT {
|
||||
return Some(t);
|
||||
}
|
||||
}
|
||||
SyntaxElement::Node(n) => {
|
||||
let mut children = n.children_with_tokens();
|
||||
if let (Some(only_child), None) = (children.next(), children.next()) {
|
||||
return only_ident(only_child);
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
struct PatternIterator {
|
||||
iter: SyntaxElementChildren,
|
||||
}
|
||||
|
||||
impl Iterator for PatternIterator {
|
||||
type Item = SyntaxElement;
|
||||
|
||||
fn next(&mut self) -> Option<SyntaxElement> {
|
||||
while let Some(element) = self.iter.next() {
|
||||
if !element.kind().is_trivia() {
|
||||
return Some(element);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl PatternIterator {
|
||||
fn new(parent: &SyntaxNode) -> Self {
|
||||
Self { iter: parent.children_with_tokens() }
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{MatchFinder, SsrRule};
|
||||
|
||||
#[test]
|
||||
fn parse_match_replace() {
|
||||
let rule: SsrRule = "foo($x) ==>> bar($x)".parse().unwrap();
|
||||
let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }";
|
||||
|
||||
let (db, position, selections) = crate::tests::single_file(input);
|
||||
let mut match_finder = MatchFinder::in_context(&db, position, selections);
|
||||
match_finder.add_rule(rule).unwrap();
|
||||
let matches = match_finder.matches();
|
||||
assert_eq!(matches.matches.len(), 1);
|
||||
assert_eq!(matches.matches[0].matched_node.text(), "foo(1+2)");
|
||||
assert_eq!(matches.matches[0].placeholder_values.len(), 1);
|
||||
assert_eq!(
|
||||
matches.matches[0].placeholder_values[&Var("x".to_string())]
|
||||
.node
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.text(),
|
||||
"1+2"
|
||||
);
|
||||
|
||||
let edits = match_finder.edits();
|
||||
assert_eq!(edits.len(), 1);
|
||||
let edit = &edits[0];
|
||||
let mut after = input.to_string();
|
||||
edit.edit.apply(&mut after);
|
||||
assert_eq!(after, "fn foo() {} fn bar() {} fn main() { bar(1+2); }");
|
||||
}
|
||||
}
|
94
crates/ssr/src/nester.rs
Normal file
94
crates/ssr/src/nester.rs
Normal file
|
@ -0,0 +1,94 @@
|
|||
//! Converts a flat collection of matches into a nested form suitable for replacement. When there
|
||||
//! are multiple matches for a node, or that overlap, priority is given to the earlier rule. Nested
|
||||
//! matches are only permitted if the inner match is contained entirely within a placeholder of an
|
||||
//! outer match.
|
||||
//!
|
||||
//! For example, if our search pattern is `foo(foo($a))` and the code had `foo(foo(foo(foo(42))))`,
|
||||
//! then we'll get 3 matches, however only the outermost and innermost matches can be accepted. The
|
||||
//! middle match would take the second `foo` from the outer match.
|
||||
|
||||
use crate::{Match, SsrMatches};
|
||||
use rustc_hash::FxHashMap;
|
||||
use syntax::SyntaxNode;
|
||||
|
||||
pub(crate) fn nest_and_remove_collisions(
|
||||
mut matches: Vec<Match>,
|
||||
sema: &hir::Semantics<ide_db::RootDatabase>,
|
||||
) -> SsrMatches {
|
||||
// We sort the matches by depth then by rule index. Sorting by depth means that by the time we
|
||||
// see a match, any parent matches or conflicting matches will have already been seen. Sorting
|
||||
// by rule_index means that if there are two matches for the same node, the rule added first
|
||||
// will take precedence.
|
||||
matches.sort_by(|a, b| a.depth.cmp(&b.depth).then_with(|| a.rule_index.cmp(&b.rule_index)));
|
||||
let mut collector = MatchCollector::default();
|
||||
for m in matches {
|
||||
collector.add_match(m, sema);
|
||||
}
|
||||
collector.into()
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct MatchCollector {
|
||||
matches_by_node: FxHashMap<SyntaxNode, Match>,
|
||||
}
|
||||
|
||||
impl MatchCollector {
|
||||
/// Attempts to add `m` to matches. If it conflicts with an existing match, it is discarded. If
|
||||
/// it is entirely within the a placeholder of an existing match, then it is added as a child
|
||||
/// match of the existing match.
|
||||
fn add_match(&mut self, m: Match, sema: &hir::Semantics<ide_db::RootDatabase>) {
|
||||
let matched_node = m.matched_node.clone();
|
||||
if let Some(existing) = self.matches_by_node.get_mut(&matched_node) {
|
||||
try_add_sub_match(m, existing, sema);
|
||||
return;
|
||||
}
|
||||
for ancestor in sema.ancestors_with_macros(m.matched_node.clone()) {
|
||||
if let Some(existing) = self.matches_by_node.get_mut(&ancestor) {
|
||||
try_add_sub_match(m, existing, sema);
|
||||
return;
|
||||
}
|
||||
}
|
||||
self.matches_by_node.insert(matched_node, m);
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempts to add `m` as a sub-match of `existing`.
|
||||
fn try_add_sub_match(m: Match, existing: &mut Match, sema: &hir::Semantics<ide_db::RootDatabase>) {
|
||||
for p in existing.placeholder_values.values_mut() {
|
||||
// Note, no need to check if p.range.file is equal to m.range.file, since we
|
||||
// already know we're within `existing`.
|
||||
if p.range.range.contains_range(m.range.range) {
|
||||
// Convert the inner matches in `p` into a temporary MatchCollector. When
|
||||
// we're done, we then convert it back into an SsrMatches. If we expected
|
||||
// lots of inner matches, it might be worthwhile keeping a MatchCollector
|
||||
// around for each placeholder match. However we expect most placeholder
|
||||
// will have 0 and a few will have 1. More than that should hopefully be
|
||||
// exceptional.
|
||||
let mut collector = MatchCollector::default();
|
||||
for m in std::mem::replace(&mut p.inner_matches.matches, Vec::new()) {
|
||||
collector.matches_by_node.insert(m.matched_node.clone(), m);
|
||||
}
|
||||
collector.add_match(m, sema);
|
||||
p.inner_matches = collector.into();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<MatchCollector> for SsrMatches {
|
||||
fn from(mut match_collector: MatchCollector) -> Self {
|
||||
let mut matches = SsrMatches::default();
|
||||
for (_, m) in match_collector.matches_by_node.drain() {
|
||||
matches.matches.push(m);
|
||||
}
|
||||
matches.matches.sort_by(|a, b| {
|
||||
// Order matches by file_id then by start range. This should be sufficient since ranges
|
||||
// shouldn't be overlapping.
|
||||
a.range
|
||||
.file_id
|
||||
.cmp(&b.range.file_id)
|
||||
.then_with(|| a.range.range.start().cmp(&b.range.range.start()))
|
||||
});
|
||||
matches
|
||||
}
|
||||
}
|
389
crates/ssr/src/parsing.rs
Normal file
389
crates/ssr/src/parsing.rs
Normal file
|
@ -0,0 +1,389 @@
|
|||
//! This file contains code for parsing SSR rules, which look something like `foo($a) ==>> bar($b)`.
|
||||
//! We first split everything before and after the separator `==>>`. Next, both the search pattern
|
||||
//! and the replacement template get tokenized by the Rust tokenizer. Tokens are then searched for
|
||||
//! placeholders, which start with `$`. For replacement templates, this is the final form. For
|
||||
//! search patterns, we go further and parse the pattern as each kind of thing that we can match.
|
||||
//! e.g. expressions, type references etc.
|
||||
|
||||
use crate::errors::bail;
|
||||
use crate::{SsrError, SsrPattern, SsrRule};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use std::str::FromStr;
|
||||
use syntax::{ast, AstNode, SmolStr, SyntaxKind, SyntaxNode, T};
|
||||
use test_utils::mark;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ParsedRule {
|
||||
pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>,
|
||||
pub(crate) pattern: SyntaxNode,
|
||||
pub(crate) template: Option<SyntaxNode>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct RawPattern {
|
||||
tokens: Vec<PatternElement>,
|
||||
}
|
||||
|
||||
// Part of a search or replace pattern.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) enum PatternElement {
|
||||
Token(Token),
|
||||
Placeholder(Placeholder),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) struct Placeholder {
|
||||
/// The name of this placeholder. e.g. for "$a", this would be "a"
|
||||
pub(crate) ident: SmolStr,
|
||||
/// A unique name used in place of this placeholder when we parse the pattern as Rust code.
|
||||
stand_in_name: String,
|
||||
pub(crate) constraints: Vec<Constraint>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) enum Constraint {
|
||||
Kind(NodeKind),
|
||||
Not(Box<Constraint>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub(crate) enum NodeKind {
|
||||
Literal,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct Token {
|
||||
kind: SyntaxKind,
|
||||
pub(crate) text: SmolStr,
|
||||
}
|
||||
|
||||
impl ParsedRule {
|
||||
fn new(
|
||||
pattern: &RawPattern,
|
||||
template: Option<&RawPattern>,
|
||||
) -> Result<Vec<ParsedRule>, SsrError> {
|
||||
let raw_pattern = pattern.as_rust_code();
|
||||
let raw_template = template.map(|t| t.as_rust_code());
|
||||
let raw_template = raw_template.as_ref().map(|s| s.as_str());
|
||||
let mut builder = RuleBuilder {
|
||||
placeholders_by_stand_in: pattern.placeholders_by_stand_in(),
|
||||
rules: Vec::new(),
|
||||
};
|
||||
builder.try_add(ast::Expr::parse(&raw_pattern), raw_template.map(ast::Expr::parse));
|
||||
builder.try_add(ast::Type::parse(&raw_pattern), raw_template.map(ast::Type::parse));
|
||||
builder.try_add(ast::Item::parse(&raw_pattern), raw_template.map(ast::Item::parse));
|
||||
builder.try_add(ast::Path::parse(&raw_pattern), raw_template.map(ast::Path::parse));
|
||||
builder.try_add(ast::Pat::parse(&raw_pattern), raw_template.map(ast::Pat::parse));
|
||||
builder.build()
|
||||
}
|
||||
}
|
||||
|
||||
struct RuleBuilder {
|
||||
placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>,
|
||||
rules: Vec<ParsedRule>,
|
||||
}
|
||||
|
||||
impl RuleBuilder {
|
||||
fn try_add<T: AstNode>(&mut self, pattern: Result<T, ()>, template: Option<Result<T, ()>>) {
|
||||
match (pattern, template) {
|
||||
(Ok(pattern), Some(Ok(template))) => self.rules.push(ParsedRule {
|
||||
placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
|
||||
pattern: pattern.syntax().clone(),
|
||||
template: Some(template.syntax().clone()),
|
||||
}),
|
||||
(Ok(pattern), None) => self.rules.push(ParsedRule {
|
||||
placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
|
||||
pattern: pattern.syntax().clone(),
|
||||
template: None,
|
||||
}),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn build(mut self) -> Result<Vec<ParsedRule>, SsrError> {
|
||||
if self.rules.is_empty() {
|
||||
bail!("Not a valid Rust expression, type, item, path or pattern");
|
||||
}
|
||||
// If any rules contain paths, then we reject any rules that don't contain paths. Allowing a
|
||||
// mix leads to strange semantics, since the path-based rules only match things where the
|
||||
// path refers to semantically the same thing, whereas the non-path-based rules could match
|
||||
// anything. Specifically, if we have a rule like `foo ==>> bar` we only want to match the
|
||||
// `foo` that is in the current scope, not any `foo`. However "foo" can be parsed as a
|
||||
// pattern (IDENT_PAT -> NAME -> IDENT). Allowing such a rule through would result in
|
||||
// renaming everything called `foo` to `bar`. It'd also be slow, since without a path, we'd
|
||||
// have to use the slow-scan search mechanism.
|
||||
if self.rules.iter().any(|rule| contains_path(&rule.pattern)) {
|
||||
let old_len = self.rules.len();
|
||||
self.rules.retain(|rule| contains_path(&rule.pattern));
|
||||
if self.rules.len() < old_len {
|
||||
mark::hit!(pattern_is_a_single_segment_path);
|
||||
}
|
||||
}
|
||||
Ok(self.rules)
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether there are any paths in `node`.
|
||||
fn contains_path(node: &SyntaxNode) -> bool {
|
||||
node.kind() == SyntaxKind::PATH
|
||||
|| node.descendants().any(|node| node.kind() == SyntaxKind::PATH)
|
||||
}
|
||||
|
||||
impl FromStr for SsrRule {
|
||||
type Err = SsrError;
|
||||
|
||||
fn from_str(query: &str) -> Result<SsrRule, SsrError> {
|
||||
let mut it = query.split("==>>");
|
||||
let pattern = it.next().expect("at least empty string").trim();
|
||||
let template = it
|
||||
.next()
|
||||
.ok_or_else(|| SsrError("Cannot find delimiter `==>>`".into()))?
|
||||
.trim()
|
||||
.to_string();
|
||||
if it.next().is_some() {
|
||||
return Err(SsrError("More than one delimiter found".into()));
|
||||
}
|
||||
let raw_pattern = pattern.parse()?;
|
||||
let raw_template = template.parse()?;
|
||||
let parsed_rules = ParsedRule::new(&raw_pattern, Some(&raw_template))?;
|
||||
let rule = SsrRule { pattern: raw_pattern, template: raw_template, parsed_rules };
|
||||
validate_rule(&rule)?;
|
||||
Ok(rule)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for RawPattern {
|
||||
type Err = SsrError;
|
||||
|
||||
fn from_str(pattern_str: &str) -> Result<RawPattern, SsrError> {
|
||||
Ok(RawPattern { tokens: parse_pattern(pattern_str)? })
|
||||
}
|
||||
}
|
||||
|
||||
impl RawPattern {
|
||||
/// Returns this search pattern as Rust source code that we can feed to the Rust parser.
|
||||
fn as_rust_code(&self) -> String {
|
||||
let mut res = String::new();
|
||||
for t in &self.tokens {
|
||||
res.push_str(match t {
|
||||
PatternElement::Token(token) => token.text.as_str(),
|
||||
PatternElement::Placeholder(placeholder) => placeholder.stand_in_name.as_str(),
|
||||
});
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub(crate) fn placeholders_by_stand_in(&self) -> FxHashMap<SmolStr, Placeholder> {
|
||||
let mut res = FxHashMap::default();
|
||||
for t in &self.tokens {
|
||||
if let PatternElement::Placeholder(placeholder) = t {
|
||||
res.insert(SmolStr::new(placeholder.stand_in_name.clone()), placeholder.clone());
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for SsrPattern {
|
||||
type Err = SsrError;
|
||||
|
||||
fn from_str(pattern_str: &str) -> Result<SsrPattern, SsrError> {
|
||||
let raw_pattern = pattern_str.parse()?;
|
||||
let parsed_rules = ParsedRule::new(&raw_pattern, None)?;
|
||||
Ok(SsrPattern { raw: raw_pattern, parsed_rules })
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `pattern_str`, parsed as a search or replace pattern. If `remove_whitespace` is true,
|
||||
/// then any whitespace tokens will be removed, which we do for the search pattern, but not for the
|
||||
/// replace pattern.
|
||||
fn parse_pattern(pattern_str: &str) -> Result<Vec<PatternElement>, SsrError> {
|
||||
let mut res = Vec::new();
|
||||
let mut placeholder_names = FxHashSet::default();
|
||||
let mut tokens = tokenize(pattern_str)?.into_iter();
|
||||
while let Some(token) = tokens.next() {
|
||||
if token.kind == T![$] {
|
||||
let placeholder = parse_placeholder(&mut tokens)?;
|
||||
if !placeholder_names.insert(placeholder.ident.clone()) {
|
||||
bail!("Name `{}` repeats more than once", placeholder.ident);
|
||||
}
|
||||
res.push(PatternElement::Placeholder(placeholder));
|
||||
} else {
|
||||
res.push(PatternElement::Token(token));
|
||||
}
|
||||
}
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
/// Checks for errors in a rule. e.g. the replace pattern referencing placeholders that the search
|
||||
/// pattern didn't define.
|
||||
fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> {
|
||||
let mut defined_placeholders = FxHashSet::default();
|
||||
for p in &rule.pattern.tokens {
|
||||
if let PatternElement::Placeholder(placeholder) = p {
|
||||
defined_placeholders.insert(&placeholder.ident);
|
||||
}
|
||||
}
|
||||
let mut undefined = Vec::new();
|
||||
for p in &rule.template.tokens {
|
||||
if let PatternElement::Placeholder(placeholder) = p {
|
||||
if !defined_placeholders.contains(&placeholder.ident) {
|
||||
undefined.push(format!("${}", placeholder.ident));
|
||||
}
|
||||
if !placeholder.constraints.is_empty() {
|
||||
bail!("Replacement placeholders cannot have constraints");
|
||||
}
|
||||
}
|
||||
}
|
||||
if !undefined.is_empty() {
|
||||
bail!("Replacement contains undefined placeholders: {}", undefined.join(", "));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn tokenize(source: &str) -> Result<Vec<Token>, SsrError> {
|
||||
let mut start = 0;
|
||||
let (raw_tokens, errors) = syntax::tokenize(source);
|
||||
if let Some(first_error) = errors.first() {
|
||||
bail!("Failed to parse pattern: {}", first_error);
|
||||
}
|
||||
let mut tokens: Vec<Token> = Vec::new();
|
||||
for raw_token in raw_tokens {
|
||||
let token_len = usize::from(raw_token.len);
|
||||
tokens.push(Token {
|
||||
kind: raw_token.kind,
|
||||
text: SmolStr::new(&source[start..start + token_len]),
|
||||
});
|
||||
start += token_len;
|
||||
}
|
||||
Ok(tokens)
|
||||
}
|
||||
|
||||
fn parse_placeholder(tokens: &mut std::vec::IntoIter<Token>) -> Result<Placeholder, SsrError> {
|
||||
let mut name = None;
|
||||
let mut constraints = Vec::new();
|
||||
if let Some(token) = tokens.next() {
|
||||
match token.kind {
|
||||
SyntaxKind::IDENT => {
|
||||
name = Some(token.text);
|
||||
}
|
||||
T!['{'] => {
|
||||
let token =
|
||||
tokens.next().ok_or_else(|| SsrError::new("Unexpected end of placeholder"))?;
|
||||
if token.kind == SyntaxKind::IDENT {
|
||||
name = Some(token.text);
|
||||
}
|
||||
loop {
|
||||
let token = tokens
|
||||
.next()
|
||||
.ok_or_else(|| SsrError::new("Placeholder is missing closing brace '}'"))?;
|
||||
match token.kind {
|
||||
T![:] => {
|
||||
constraints.push(parse_constraint(tokens)?);
|
||||
}
|
||||
T!['}'] => break,
|
||||
_ => bail!("Unexpected token while parsing placeholder: '{}'", token.text),
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
bail!("Placeholders should either be $name or ${{name:constraints}}");
|
||||
}
|
||||
}
|
||||
}
|
||||
let name = name.ok_or_else(|| SsrError::new("Placeholder ($) with no name"))?;
|
||||
Ok(Placeholder::new(name, constraints))
|
||||
}
|
||||
|
||||
fn parse_constraint(tokens: &mut std::vec::IntoIter<Token>) -> Result<Constraint, SsrError> {
|
||||
let constraint_type = tokens
|
||||
.next()
|
||||
.ok_or_else(|| SsrError::new("Found end of placeholder while looking for a constraint"))?
|
||||
.text
|
||||
.to_string();
|
||||
match constraint_type.as_str() {
|
||||
"kind" => {
|
||||
expect_token(tokens, "(")?;
|
||||
let t = tokens.next().ok_or_else(|| {
|
||||
SsrError::new("Unexpected end of constraint while looking for kind")
|
||||
})?;
|
||||
if t.kind != SyntaxKind::IDENT {
|
||||
bail!("Expected ident, found {:?} while parsing kind constraint", t.kind);
|
||||
}
|
||||
expect_token(tokens, ")")?;
|
||||
Ok(Constraint::Kind(NodeKind::from(&t.text)?))
|
||||
}
|
||||
"not" => {
|
||||
expect_token(tokens, "(")?;
|
||||
let sub = parse_constraint(tokens)?;
|
||||
expect_token(tokens, ")")?;
|
||||
Ok(Constraint::Not(Box::new(sub)))
|
||||
}
|
||||
x => bail!("Unsupported constraint type '{}'", x),
|
||||
}
|
||||
}
|
||||
|
||||
fn expect_token(tokens: &mut std::vec::IntoIter<Token>, expected: &str) -> Result<(), SsrError> {
|
||||
if let Some(t) = tokens.next() {
|
||||
if t.text == expected {
|
||||
return Ok(());
|
||||
}
|
||||
bail!("Expected {} found {}", expected, t.text);
|
||||
}
|
||||
bail!("Expected {} found end of stream", expected);
|
||||
}
|
||||
|
||||
impl NodeKind {
|
||||
fn from(name: &SmolStr) -> Result<NodeKind, SsrError> {
|
||||
Ok(match name.as_str() {
|
||||
"literal" => NodeKind::Literal,
|
||||
_ => bail!("Unknown node kind '{}'", name),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Placeholder {
|
||||
fn new(name: SmolStr, constraints: Vec<Constraint>) -> Self {
|
||||
Self { stand_in_name: format!("__placeholder_{}", name), constraints, ident: name }
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn parser_happy_case() {
|
||||
fn token(kind: SyntaxKind, text: &str) -> PatternElement {
|
||||
PatternElement::Token(Token { kind, text: SmolStr::new(text) })
|
||||
}
|
||||
fn placeholder(name: &str) -> PatternElement {
|
||||
PatternElement::Placeholder(Placeholder::new(SmolStr::new(name), Vec::new()))
|
||||
}
|
||||
let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap();
|
||||
assert_eq!(
|
||||
result.pattern.tokens,
|
||||
vec![
|
||||
token(SyntaxKind::IDENT, "foo"),
|
||||
token(T!['('], "("),
|
||||
placeholder("a"),
|
||||
token(T![,], ","),
|
||||
token(SyntaxKind::WHITESPACE, " "),
|
||||
placeholder("b"),
|
||||
token(T![')'], ")"),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
result.template.tokens,
|
||||
vec![
|
||||
token(SyntaxKind::IDENT, "bar"),
|
||||
token(T!['('], "("),
|
||||
placeholder("b"),
|
||||
token(T![,], ","),
|
||||
token(SyntaxKind::WHITESPACE, " "),
|
||||
placeholder("a"),
|
||||
token(T![')'], ")"),
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
194
crates/ssr/src/replacing.rs
Normal file
194
crates/ssr/src/replacing.rs
Normal file
|
@ -0,0 +1,194 @@
|
|||
//! Code for applying replacement templates for matches that have previously been found.
|
||||
|
||||
use crate::matching::Var;
|
||||
use crate::{resolving::ResolvedRule, Match, SsrMatches};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use syntax::ast::{self, AstToken};
|
||||
use syntax::{SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize};
|
||||
use text_edit::TextEdit;
|
||||
|
||||
/// Returns a text edit that will replace each match in `matches` with its corresponding replacement
|
||||
/// template. Placeholders in the template will have been substituted with whatever they matched to
|
||||
/// in the original code.
|
||||
pub(crate) fn matches_to_edit(
|
||||
matches: &SsrMatches,
|
||||
file_src: &str,
|
||||
rules: &[ResolvedRule],
|
||||
) -> TextEdit {
|
||||
matches_to_edit_at_offset(matches, file_src, 0.into(), rules)
|
||||
}
|
||||
|
||||
fn matches_to_edit_at_offset(
|
||||
matches: &SsrMatches,
|
||||
file_src: &str,
|
||||
relative_start: TextSize,
|
||||
rules: &[ResolvedRule],
|
||||
) -> TextEdit {
|
||||
let mut edit_builder = TextEdit::builder();
|
||||
for m in &matches.matches {
|
||||
edit_builder.replace(
|
||||
m.range.range.checked_sub(relative_start).unwrap(),
|
||||
render_replace(m, file_src, rules),
|
||||
);
|
||||
}
|
||||
edit_builder.finish()
|
||||
}
|
||||
|
||||
struct ReplacementRenderer<'a> {
|
||||
match_info: &'a Match,
|
||||
file_src: &'a str,
|
||||
rules: &'a [ResolvedRule],
|
||||
rule: &'a ResolvedRule,
|
||||
out: String,
|
||||
// Map from a range within `out` to a token in `template` that represents a placeholder. This is
|
||||
// used to validate that the generated source code doesn't split any placeholder expansions (see
|
||||
// below).
|
||||
placeholder_tokens_by_range: FxHashMap<TextRange, SyntaxToken>,
|
||||
// Which placeholder tokens need to be wrapped in parenthesis in order to ensure that when `out`
|
||||
// is parsed, placeholders don't get split. e.g. if a template of `$a.to_string()` results in `1
|
||||
// + 2.to_string()` then the placeholder value `1 + 2` was split and needs parenthesis.
|
||||
placeholder_tokens_requiring_parenthesis: FxHashSet<SyntaxToken>,
|
||||
}
|
||||
|
||||
fn render_replace(match_info: &Match, file_src: &str, rules: &[ResolvedRule]) -> String {
|
||||
let rule = &rules[match_info.rule_index];
|
||||
let template = rule
|
||||
.template
|
||||
.as_ref()
|
||||
.expect("You called MatchFinder::edits after calling MatchFinder::add_search_pattern");
|
||||
let mut renderer = ReplacementRenderer {
|
||||
match_info,
|
||||
file_src,
|
||||
rules,
|
||||
rule,
|
||||
out: String::new(),
|
||||
placeholder_tokens_requiring_parenthesis: FxHashSet::default(),
|
||||
placeholder_tokens_by_range: FxHashMap::default(),
|
||||
};
|
||||
renderer.render_node(&template.node);
|
||||
renderer.maybe_rerender_with_extra_parenthesis(&template.node);
|
||||
for comment in &match_info.ignored_comments {
|
||||
renderer.out.push_str(&comment.syntax().to_string());
|
||||
}
|
||||
renderer.out
|
||||
}
|
||||
|
||||
impl ReplacementRenderer<'_> {
|
||||
fn render_node_children(&mut self, node: &SyntaxNode) {
|
||||
for node_or_token in node.children_with_tokens() {
|
||||
self.render_node_or_token(&node_or_token);
|
||||
}
|
||||
}
|
||||
|
||||
fn render_node_or_token(&mut self, node_or_token: &SyntaxElement) {
|
||||
match node_or_token {
|
||||
SyntaxElement::Token(token) => {
|
||||
self.render_token(&token);
|
||||
}
|
||||
SyntaxElement::Node(child_node) => {
|
||||
self.render_node(&child_node);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn render_node(&mut self, node: &SyntaxNode) {
|
||||
use syntax::ast::AstNode;
|
||||
if let Some(mod_path) = self.match_info.rendered_template_paths.get(&node) {
|
||||
self.out.push_str(&mod_path.to_string());
|
||||
// Emit everything except for the segment's name-ref, since we already effectively
|
||||
// emitted that as part of `mod_path`.
|
||||
if let Some(path) = ast::Path::cast(node.clone()) {
|
||||
if let Some(segment) = path.segment() {
|
||||
for node_or_token in segment.syntax().children_with_tokens() {
|
||||
if node_or_token.kind() != SyntaxKind::NAME_REF {
|
||||
self.render_node_or_token(&node_or_token);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
self.render_node_children(&node);
|
||||
}
|
||||
}
|
||||
|
||||
fn render_token(&mut self, token: &SyntaxToken) {
|
||||
if let Some(placeholder) = self.rule.get_placeholder(&token) {
|
||||
if let Some(placeholder_value) =
|
||||
self.match_info.placeholder_values.get(&Var(placeholder.ident.to_string()))
|
||||
{
|
||||
let range = &placeholder_value.range.range;
|
||||
let mut matched_text =
|
||||
self.file_src[usize::from(range.start())..usize::from(range.end())].to_owned();
|
||||
let edit = matches_to_edit_at_offset(
|
||||
&placeholder_value.inner_matches,
|
||||
self.file_src,
|
||||
range.start(),
|
||||
self.rules,
|
||||
);
|
||||
let needs_parenthesis =
|
||||
self.placeholder_tokens_requiring_parenthesis.contains(token);
|
||||
edit.apply(&mut matched_text);
|
||||
if needs_parenthesis {
|
||||
self.out.push('(');
|
||||
}
|
||||
self.placeholder_tokens_by_range.insert(
|
||||
TextRange::new(
|
||||
TextSize::of(&self.out),
|
||||
TextSize::of(&self.out) + TextSize::of(&matched_text),
|
||||
),
|
||||
token.clone(),
|
||||
);
|
||||
self.out.push_str(&matched_text);
|
||||
if needs_parenthesis {
|
||||
self.out.push(')');
|
||||
}
|
||||
} else {
|
||||
// We validated that all placeholder references were valid before we
|
||||
// started, so this shouldn't happen.
|
||||
panic!(
|
||||
"Internal error: replacement referenced unknown placeholder {}",
|
||||
placeholder.ident
|
||||
);
|
||||
}
|
||||
} else {
|
||||
self.out.push_str(token.text().as_str());
|
||||
}
|
||||
}
|
||||
|
||||
// Checks if the resulting code, when parsed doesn't split any placeholders due to different
|
||||
// order of operations between the search pattern and the replacement template. If any do, then
|
||||
// we rerender the template and wrap the problematic placeholders with parenthesis.
|
||||
fn maybe_rerender_with_extra_parenthesis(&mut self, template: &SyntaxNode) {
|
||||
if let Some(node) = parse_as_kind(&self.out, template.kind()) {
|
||||
self.remove_node_ranges(node);
|
||||
if self.placeholder_tokens_by_range.is_empty() {
|
||||
return;
|
||||
}
|
||||
self.placeholder_tokens_requiring_parenthesis =
|
||||
self.placeholder_tokens_by_range.values().cloned().collect();
|
||||
self.out.clear();
|
||||
self.render_node(template);
|
||||
}
|
||||
}
|
||||
|
||||
fn remove_node_ranges(&mut self, node: SyntaxNode) {
|
||||
self.placeholder_tokens_by_range.remove(&node.text_range());
|
||||
for child in node.children() {
|
||||
self.remove_node_ranges(child);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_as_kind(code: &str, kind: SyntaxKind) -> Option<SyntaxNode> {
|
||||
use syntax::ast::AstNode;
|
||||
if ast::Expr::can_cast(kind) {
|
||||
if let Ok(expr) = ast::Expr::parse(code) {
|
||||
return Some(expr.syntax().clone());
|
||||
}
|
||||
} else if ast::Item::can_cast(kind) {
|
||||
if let Ok(item) = ast::Item::parse(code) {
|
||||
return Some(item.syntax().clone());
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
299
crates/ssr/src/resolving.rs
Normal file
299
crates/ssr/src/resolving.rs
Normal file
|
@ -0,0 +1,299 @@
|
|||
//! This module is responsible for resolving paths within rules.
|
||||
|
||||
use crate::errors::error;
|
||||
use crate::{parsing, SsrError};
|
||||
use base_db::FilePosition;
|
||||
use parsing::Placeholder;
|
||||
use rustc_hash::FxHashMap;
|
||||
use syntax::{ast, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken};
|
||||
use test_utils::mark;
|
||||
|
||||
pub(crate) struct ResolutionScope<'db> {
|
||||
scope: hir::SemanticsScope<'db>,
|
||||
hygiene: hir::Hygiene,
|
||||
node: SyntaxNode,
|
||||
}
|
||||
|
||||
pub(crate) struct ResolvedRule {
|
||||
pub(crate) pattern: ResolvedPattern,
|
||||
pub(crate) template: Option<ResolvedPattern>,
|
||||
pub(crate) index: usize,
|
||||
}
|
||||
|
||||
pub(crate) struct ResolvedPattern {
|
||||
pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
|
||||
pub(crate) node: SyntaxNode,
|
||||
// Paths in `node` that we've resolved.
|
||||
pub(crate) resolved_paths: FxHashMap<SyntaxNode, ResolvedPath>,
|
||||
pub(crate) ufcs_function_calls: FxHashMap<SyntaxNode, UfcsCallInfo>,
|
||||
pub(crate) contains_self: bool,
|
||||
}
|
||||
|
||||
pub(crate) struct ResolvedPath {
|
||||
pub(crate) resolution: hir::PathResolution,
|
||||
/// The depth of the ast::Path that was resolved within the pattern.
|
||||
pub(crate) depth: u32,
|
||||
}
|
||||
|
||||
pub(crate) struct UfcsCallInfo {
|
||||
pub(crate) call_expr: ast::CallExpr,
|
||||
pub(crate) function: hir::Function,
|
||||
pub(crate) qualifier_type: Option<hir::Type>,
|
||||
}
|
||||
|
||||
impl ResolvedRule {
|
||||
pub(crate) fn new(
|
||||
rule: parsing::ParsedRule,
|
||||
resolution_scope: &ResolutionScope,
|
||||
index: usize,
|
||||
) -> Result<ResolvedRule, SsrError> {
|
||||
let resolver =
|
||||
Resolver { resolution_scope, placeholders_by_stand_in: rule.placeholders_by_stand_in };
|
||||
let resolved_template = if let Some(template) = rule.template {
|
||||
Some(resolver.resolve_pattern_tree(template)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(ResolvedRule {
|
||||
pattern: resolver.resolve_pattern_tree(rule.pattern)?,
|
||||
template: resolved_template,
|
||||
index,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn get_placeholder(&self, token: &SyntaxToken) -> Option<&Placeholder> {
|
||||
if token.kind() != SyntaxKind::IDENT {
|
||||
return None;
|
||||
}
|
||||
self.pattern.placeholders_by_stand_in.get(token.text())
|
||||
}
|
||||
}
|
||||
|
||||
struct Resolver<'a, 'db> {
|
||||
resolution_scope: &'a ResolutionScope<'db>,
|
||||
placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
|
||||
}
|
||||
|
||||
impl Resolver<'_, '_> {
|
||||
fn resolve_pattern_tree(&self, pattern: SyntaxNode) -> Result<ResolvedPattern, SsrError> {
|
||||
use syntax::ast::AstNode;
|
||||
use syntax::{SyntaxElement, T};
|
||||
let mut resolved_paths = FxHashMap::default();
|
||||
self.resolve(pattern.clone(), 0, &mut resolved_paths)?;
|
||||
let ufcs_function_calls = resolved_paths
|
||||
.iter()
|
||||
.filter_map(|(path_node, resolved)| {
|
||||
if let Some(grandparent) = path_node.parent().and_then(|parent| parent.parent()) {
|
||||
if let Some(call_expr) = ast::CallExpr::cast(grandparent.clone()) {
|
||||
if let hir::PathResolution::AssocItem(hir::AssocItem::Function(function)) =
|
||||
resolved.resolution
|
||||
{
|
||||
let qualifier_type = self.resolution_scope.qualifier_type(path_node);
|
||||
return Some((
|
||||
grandparent,
|
||||
UfcsCallInfo { call_expr, function, qualifier_type },
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
})
|
||||
.collect();
|
||||
let contains_self =
|
||||
pattern.descendants_with_tokens().any(|node_or_token| match node_or_token {
|
||||
SyntaxElement::Token(t) => t.kind() == T![self],
|
||||
_ => false,
|
||||
});
|
||||
Ok(ResolvedPattern {
|
||||
node: pattern,
|
||||
resolved_paths,
|
||||
placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
|
||||
ufcs_function_calls,
|
||||
contains_self,
|
||||
})
|
||||
}
|
||||
|
||||
fn resolve(
|
||||
&self,
|
||||
node: SyntaxNode,
|
||||
depth: u32,
|
||||
resolved_paths: &mut FxHashMap<SyntaxNode, ResolvedPath>,
|
||||
) -> Result<(), SsrError> {
|
||||
use syntax::ast::AstNode;
|
||||
if let Some(path) = ast::Path::cast(node.clone()) {
|
||||
if is_self(&path) {
|
||||
// Self cannot be resolved like other paths.
|
||||
return Ok(());
|
||||
}
|
||||
// Check if this is an appropriate place in the path to resolve. If the path is
|
||||
// something like `a::B::<i32>::c` then we want to resolve `a::B`. If the path contains
|
||||
// a placeholder. e.g. `a::$b::c` then we want to resolve `a`.
|
||||
if !path_contains_type_arguments(path.qualifier())
|
||||
&& !self.path_contains_placeholder(&path)
|
||||
{
|
||||
let resolution = self
|
||||
.resolution_scope
|
||||
.resolve_path(&path)
|
||||
.ok_or_else(|| error!("Failed to resolve path `{}`", node.text()))?;
|
||||
if self.ok_to_use_path_resolution(&resolution) {
|
||||
resolved_paths.insert(node, ResolvedPath { resolution, depth });
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
for node in node.children() {
|
||||
self.resolve(node, depth + 1, resolved_paths)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns whether `path` contains a placeholder, but ignores any placeholders within type
|
||||
/// arguments.
|
||||
fn path_contains_placeholder(&self, path: &ast::Path) -> bool {
|
||||
if let Some(segment) = path.segment() {
|
||||
if let Some(name_ref) = segment.name_ref() {
|
||||
if self.placeholders_by_stand_in.contains_key(name_ref.text()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(qualifier) = path.qualifier() {
|
||||
return self.path_contains_placeholder(&qualifier);
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn ok_to_use_path_resolution(&self, resolution: &hir::PathResolution) -> bool {
|
||||
match resolution {
|
||||
hir::PathResolution::AssocItem(hir::AssocItem::Function(function)) => {
|
||||
if function.has_self_param(self.resolution_scope.scope.db) {
|
||||
// If we don't use this path resolution, then we won't be able to match method
|
||||
// calls. e.g. `Foo::bar($s)` should match `x.bar()`.
|
||||
true
|
||||
} else {
|
||||
mark::hit!(replace_associated_trait_default_function_call);
|
||||
false
|
||||
}
|
||||
}
|
||||
hir::PathResolution::AssocItem(_) => {
|
||||
// Not a function. Could be a constant or an associated type.
|
||||
mark::hit!(replace_associated_trait_constant);
|
||||
false
|
||||
}
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> ResolutionScope<'db> {
|
||||
pub(crate) fn new(
|
||||
sema: &hir::Semantics<'db, ide_db::RootDatabase>,
|
||||
resolve_context: FilePosition,
|
||||
) -> ResolutionScope<'db> {
|
||||
use syntax::ast::AstNode;
|
||||
let file = sema.parse(resolve_context.file_id);
|
||||
// Find a node at the requested position, falling back to the whole file.
|
||||
let node = file
|
||||
.syntax()
|
||||
.token_at_offset(resolve_context.offset)
|
||||
.left_biased()
|
||||
.map(|token| token.parent())
|
||||
.unwrap_or_else(|| file.syntax().clone());
|
||||
let node = pick_node_for_resolution(node);
|
||||
let scope = sema.scope(&node);
|
||||
ResolutionScope {
|
||||
scope,
|
||||
hygiene: hir::Hygiene::new(sema.db, resolve_context.file_id.into()),
|
||||
node,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the function in which SSR was invoked, if any.
|
||||
pub(crate) fn current_function(&self) -> Option<SyntaxNode> {
|
||||
self.node.ancestors().find(|node| node.kind() == SyntaxKind::FN).map(|node| node.clone())
|
||||
}
|
||||
|
||||
fn resolve_path(&self, path: &ast::Path) -> Option<hir::PathResolution> {
|
||||
let hir_path = hir::Path::from_src(path.clone(), &self.hygiene)?;
|
||||
// First try resolving the whole path. This will work for things like
|
||||
// `std::collections::HashMap`, but will fail for things like
|
||||
// `std::collections::HashMap::new`.
|
||||
if let Some(resolution) = self.scope.resolve_hir_path(&hir_path) {
|
||||
return Some(resolution);
|
||||
}
|
||||
// Resolution failed, try resolving the qualifier (e.g. `std::collections::HashMap` and if
|
||||
// that succeeds, then iterate through the candidates on the resolved type with the provided
|
||||
// name.
|
||||
let resolved_qualifier = self.scope.resolve_hir_path_qualifier(&hir_path.qualifier()?)?;
|
||||
if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier {
|
||||
adt.ty(self.scope.db).iterate_path_candidates(
|
||||
self.scope.db,
|
||||
self.scope.module()?.krate(),
|
||||
&self.scope.traits_in_scope(),
|
||||
Some(hir_path.segments().last()?.name),
|
||||
|_ty, assoc_item| Some(hir::PathResolution::AssocItem(assoc_item)),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn qualifier_type(&self, path: &SyntaxNode) -> Option<hir::Type> {
|
||||
use syntax::ast::AstNode;
|
||||
if let Some(path) = ast::Path::cast(path.clone()) {
|
||||
if let Some(qualifier) = path.qualifier() {
|
||||
if let Some(resolved_qualifier) = self.resolve_path(&qualifier) {
|
||||
if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier {
|
||||
return Some(adt.ty(self.scope.db));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn is_self(path: &ast::Path) -> bool {
|
||||
path.segment().map(|segment| segment.self_token().is_some()).unwrap_or(false)
|
||||
}
|
||||
|
||||
/// Returns a suitable node for resolving paths in the current scope. If we create a scope based on
|
||||
/// a statement node, then we can't resolve local variables that were defined in the current scope
|
||||
/// (only in parent scopes). So we find another node, ideally a child of the statement where local
|
||||
/// variable resolution is permitted.
|
||||
fn pick_node_for_resolution(node: SyntaxNode) -> SyntaxNode {
|
||||
match node.kind() {
|
||||
SyntaxKind::EXPR_STMT => {
|
||||
if let Some(n) = node.first_child() {
|
||||
mark::hit!(cursor_after_semicolon);
|
||||
return n;
|
||||
}
|
||||
}
|
||||
SyntaxKind::LET_STMT | SyntaxKind::IDENT_PAT => {
|
||||
if let Some(next) = node.next_sibling() {
|
||||
return pick_node_for_resolution(next);
|
||||
}
|
||||
}
|
||||
SyntaxKind::NAME => {
|
||||
if let Some(parent) = node.parent() {
|
||||
return pick_node_for_resolution(parent);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
node
|
||||
}
|
||||
|
||||
/// Returns whether `path` or any of its qualifiers contains type arguments.
|
||||
fn path_contains_type_arguments(path: Option<ast::Path>) -> bool {
|
||||
if let Some(path) = path {
|
||||
if let Some(segment) = path.segment() {
|
||||
if segment.generic_arg_list().is_some() {
|
||||
mark::hit!(type_arguments_within_path);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return path_contains_type_arguments(path.qualifier());
|
||||
}
|
||||
false
|
||||
}
|
282
crates/ssr/src/search.rs
Normal file
282
crates/ssr/src/search.rs
Normal file
|
@ -0,0 +1,282 @@
|
|||
//! Searching for matches.
|
||||
|
||||
use crate::{
|
||||
matching,
|
||||
resolving::{ResolvedPath, ResolvedPattern, ResolvedRule},
|
||||
Match, MatchFinder,
|
||||
};
|
||||
use base_db::{FileId, FileRange};
|
||||
use ide_db::{
|
||||
defs::Definition,
|
||||
search::{Reference, SearchScope},
|
||||
};
|
||||
use rustc_hash::FxHashSet;
|
||||
use syntax::{ast, AstNode, SyntaxKind, SyntaxNode};
|
||||
use test_utils::mark;
|
||||
|
||||
/// A cache for the results of find_usages. This is for when we have multiple patterns that have the
|
||||
/// same path. e.g. if the pattern was `foo::Bar` that can parse as a path, an expression, a type
|
||||
/// and as a pattern. In each, the usages of `foo::Bar` are the same and we'd like to avoid finding
|
||||
/// them more than once.
|
||||
#[derive(Default)]
|
||||
pub(crate) struct UsageCache {
|
||||
usages: Vec<(Definition, Vec<Reference>)>,
|
||||
}
|
||||
|
||||
impl<'db> MatchFinder<'db> {
|
||||
/// Adds all matches for `rule` to `matches_out`. Matches may overlap in ways that make
|
||||
/// replacement impossible, so further processing is required in order to properly nest matches
|
||||
/// and remove overlapping matches. This is done in the `nesting` module.
|
||||
pub(crate) fn find_matches_for_rule(
|
||||
&self,
|
||||
rule: &ResolvedRule,
|
||||
usage_cache: &mut UsageCache,
|
||||
matches_out: &mut Vec<Match>,
|
||||
) {
|
||||
if rule.pattern.contains_self {
|
||||
// If the pattern contains `self` we restrict the scope of the search to just the
|
||||
// current method. No other method can reference the same `self`. This makes the
|
||||
// behavior of `self` consistent with other variables.
|
||||
if let Some(current_function) = self.resolution_scope.current_function() {
|
||||
self.slow_scan_node(¤t_function, rule, &None, matches_out);
|
||||
}
|
||||
return;
|
||||
}
|
||||
if pick_path_for_usages(&rule.pattern).is_none() {
|
||||
self.slow_scan(rule, matches_out);
|
||||
return;
|
||||
}
|
||||
self.find_matches_for_pattern_tree(rule, &rule.pattern, usage_cache, matches_out);
|
||||
}
|
||||
|
||||
fn find_matches_for_pattern_tree(
|
||||
&self,
|
||||
rule: &ResolvedRule,
|
||||
pattern: &ResolvedPattern,
|
||||
usage_cache: &mut UsageCache,
|
||||
matches_out: &mut Vec<Match>,
|
||||
) {
|
||||
if let Some(resolved_path) = pick_path_for_usages(pattern) {
|
||||
let definition: Definition = resolved_path.resolution.clone().into();
|
||||
for reference in self.find_usages(usage_cache, definition) {
|
||||
if let Some(node_to_match) = self.find_node_to_match(resolved_path, reference) {
|
||||
if !is_search_permitted_ancestors(&node_to_match) {
|
||||
mark::hit!(use_declaration_with_braces);
|
||||
continue;
|
||||
}
|
||||
self.try_add_match(rule, &node_to_match, &None, matches_out);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn find_node_to_match(
|
||||
&self,
|
||||
resolved_path: &ResolvedPath,
|
||||
reference: &Reference,
|
||||
) -> Option<SyntaxNode> {
|
||||
let file = self.sema.parse(reference.file_range.file_id);
|
||||
let depth = resolved_path.depth as usize;
|
||||
let offset = reference.file_range.range.start();
|
||||
if let Some(path) =
|
||||
self.sema.find_node_at_offset_with_descend::<ast::Path>(file.syntax(), offset)
|
||||
{
|
||||
self.sema.ancestors_with_macros(path.syntax().clone()).skip(depth).next()
|
||||
} else if let Some(path) =
|
||||
self.sema.find_node_at_offset_with_descend::<ast::MethodCallExpr>(file.syntax(), offset)
|
||||
{
|
||||
// If the pattern contained a path and we found a reference to that path that wasn't
|
||||
// itself a path, but was a method call, then we need to adjust how far up to try
|
||||
// matching by how deep the path was within a CallExpr. The structure would have been
|
||||
// CallExpr, PathExpr, Path - i.e. a depth offset of 2. We don't need to check if the
|
||||
// path was part of a CallExpr because if it wasn't then all that will happen is we'll
|
||||
// fail to match, which is the desired behavior.
|
||||
const PATH_DEPTH_IN_CALL_EXPR: usize = 2;
|
||||
if depth < PATH_DEPTH_IN_CALL_EXPR {
|
||||
return None;
|
||||
}
|
||||
self.sema
|
||||
.ancestors_with_macros(path.syntax().clone())
|
||||
.skip(depth - PATH_DEPTH_IN_CALL_EXPR)
|
||||
.next()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn find_usages<'a>(
|
||||
&self,
|
||||
usage_cache: &'a mut UsageCache,
|
||||
definition: Definition,
|
||||
) -> &'a [Reference] {
|
||||
// Logically if a lookup succeeds we should just return it. Unfortunately returning it would
|
||||
// extend the lifetime of the borrow, then we wouldn't be able to do the insertion on a
|
||||
// cache miss. This is a limitation of NLL and is fixed with Polonius. For now we do two
|
||||
// lookups in the case of a cache hit.
|
||||
if usage_cache.find(&definition).is_none() {
|
||||
let usages = definition.find_usages(&self.sema, Some(self.search_scope()));
|
||||
usage_cache.usages.push((definition, usages));
|
||||
return &usage_cache.usages.last().unwrap().1;
|
||||
}
|
||||
usage_cache.find(&definition).unwrap()
|
||||
}
|
||||
|
||||
/// Returns the scope within which we want to search. We don't want un unrestricted search
|
||||
/// scope, since we don't want to find references in external dependencies.
|
||||
fn search_scope(&self) -> SearchScope {
|
||||
// FIXME: We should ideally have a test that checks that we edit local roots and not library
|
||||
// roots. This probably would require some changes to fixtures, since currently everything
|
||||
// seems to get put into a single source root.
|
||||
let mut files = Vec::new();
|
||||
self.search_files_do(|file_id| {
|
||||
files.push(file_id);
|
||||
});
|
||||
SearchScope::files(&files)
|
||||
}
|
||||
|
||||
fn slow_scan(&self, rule: &ResolvedRule, matches_out: &mut Vec<Match>) {
|
||||
self.search_files_do(|file_id| {
|
||||
let file = self.sema.parse(file_id);
|
||||
let code = file.syntax();
|
||||
self.slow_scan_node(code, rule, &None, matches_out);
|
||||
})
|
||||
}
|
||||
|
||||
fn search_files_do(&self, mut callback: impl FnMut(FileId)) {
|
||||
if self.restrict_ranges.is_empty() {
|
||||
// Unrestricted search.
|
||||
use base_db::SourceDatabaseExt;
|
||||
use ide_db::symbol_index::SymbolsDatabase;
|
||||
for &root in self.sema.db.local_roots().iter() {
|
||||
let sr = self.sema.db.source_root(root);
|
||||
for file_id in sr.iter() {
|
||||
callback(file_id);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Search is restricted, deduplicate file IDs (generally only one).
|
||||
let mut files = FxHashSet::default();
|
||||
for range in &self.restrict_ranges {
|
||||
if files.insert(range.file_id) {
|
||||
callback(range.file_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn slow_scan_node(
|
||||
&self,
|
||||
code: &SyntaxNode,
|
||||
rule: &ResolvedRule,
|
||||
restrict_range: &Option<FileRange>,
|
||||
matches_out: &mut Vec<Match>,
|
||||
) {
|
||||
if !is_search_permitted(code) {
|
||||
return;
|
||||
}
|
||||
self.try_add_match(rule, &code, restrict_range, matches_out);
|
||||
// If we've got a macro call, we already tried matching it pre-expansion, which is the only
|
||||
// way to match the whole macro, now try expanding it and matching the expansion.
|
||||
if let Some(macro_call) = ast::MacroCall::cast(code.clone()) {
|
||||
if let Some(expanded) = self.sema.expand(¯o_call) {
|
||||
if let Some(tt) = macro_call.token_tree() {
|
||||
// When matching within a macro expansion, we only want to allow matches of
|
||||
// nodes that originated entirely from within the token tree of the macro call.
|
||||
// i.e. we don't want to match something that came from the macro itself.
|
||||
self.slow_scan_node(
|
||||
&expanded,
|
||||
rule,
|
||||
&Some(self.sema.original_range(tt.syntax())),
|
||||
matches_out,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
for child in code.children() {
|
||||
self.slow_scan_node(&child, rule, restrict_range, matches_out);
|
||||
}
|
||||
}
|
||||
|
||||
fn try_add_match(
|
||||
&self,
|
||||
rule: &ResolvedRule,
|
||||
code: &SyntaxNode,
|
||||
restrict_range: &Option<FileRange>,
|
||||
matches_out: &mut Vec<Match>,
|
||||
) {
|
||||
if !self.within_range_restrictions(code) {
|
||||
mark::hit!(replace_nonpath_within_selection);
|
||||
return;
|
||||
}
|
||||
if let Ok(m) = matching::get_match(false, rule, code, restrict_range, &self.sema) {
|
||||
matches_out.push(m);
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether `code` is within one of our range restrictions if we have any. No range
|
||||
/// restrictions is considered unrestricted and always returns true.
|
||||
fn within_range_restrictions(&self, code: &SyntaxNode) -> bool {
|
||||
if self.restrict_ranges.is_empty() {
|
||||
// There is no range restriction.
|
||||
return true;
|
||||
}
|
||||
let node_range = self.sema.original_range(code);
|
||||
for range in &self.restrict_ranges {
|
||||
if range.file_id == node_range.file_id && range.range.contains_range(node_range.range) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether we support matching within `node` and all of its ancestors.
|
||||
fn is_search_permitted_ancestors(node: &SyntaxNode) -> bool {
|
||||
if let Some(parent) = node.parent() {
|
||||
if !is_search_permitted_ancestors(&parent) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
is_search_permitted(node)
|
||||
}
|
||||
|
||||
/// Returns whether we support matching within this kind of node.
|
||||
fn is_search_permitted(node: &SyntaxNode) -> bool {
|
||||
// FIXME: Properly handle use declarations. At the moment, if our search pattern is `foo::bar`
|
||||
// and the code is `use foo::{baz, bar}`, we'll match `bar`, since it resolves to `foo::bar`.
|
||||
// However we'll then replace just the part we matched `bar`. We probably need to instead remove
|
||||
// `bar` and insert a new use declaration.
|
||||
node.kind() != SyntaxKind::USE
|
||||
}
|
||||
|
||||
impl UsageCache {
|
||||
fn find(&mut self, definition: &Definition) -> Option<&[Reference]> {
|
||||
// We expect a very small number of cache entries (generally 1), so a linear scan should be
|
||||
// fast enough and avoids the need to implement Hash for Definition.
|
||||
for (d, refs) in &self.usages {
|
||||
if d == definition {
|
||||
return Some(refs);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a path that's suitable for path resolution. We exclude builtin types, since they aren't
|
||||
/// something that we can find references to. We then somewhat arbitrarily pick the path that is the
|
||||
/// longest as this is hopefully more likely to be less common, making it faster to find.
|
||||
fn pick_path_for_usages(pattern: &ResolvedPattern) -> Option<&ResolvedPath> {
|
||||
// FIXME: Take the scope of the resolved path into account. e.g. if there are any paths that are
|
||||
// private to the current module, then we definitely would want to pick them over say a path
|
||||
// from std. Possibly we should go further than this and intersect the search scopes for all
|
||||
// resolved paths then search only in that scope.
|
||||
pattern
|
||||
.resolved_paths
|
||||
.iter()
|
||||
.filter(|(_, p)| {
|
||||
!matches!(p.resolution, hir::PathResolution::Def(hir::ModuleDef::BuiltinType(_)))
|
||||
})
|
||||
.map(|(node, resolved)| (node.text().len(), resolved))
|
||||
.max_by(|(a, _), (b, _)| a.cmp(b))
|
||||
.map(|(_, resolved)| resolved)
|
||||
}
|
1174
crates/ssr/src/tests.rs
Normal file
1174
crates/ssr/src/tests.rs
Normal file
File diff suppressed because it is too large
Load diff
Loading…
Add table
Add a link
Reference in a new issue