mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-08-04 10:50:15 +00:00
remove a couple of clones
This commit is contained in:
parent
6c9cf51c55
commit
573c47c9ba
32 changed files with 71 additions and 90 deletions
|
@ -270,7 +270,7 @@ impl GenericParamsCollector {
|
|||
let self_ = Name::new_symbol_root(sym::Self_);
|
||||
let idx = self.type_or_consts.alloc(
|
||||
TypeParamData {
|
||||
name: Some(self_.clone()),
|
||||
name: Some(self_),
|
||||
default: None,
|
||||
provenance: TypeParamProvenance::TraitSelf,
|
||||
}
|
||||
|
|
|
@ -320,7 +320,7 @@ impl Attr {
|
|||
) -> impl IntoIterator<Item = Self> {
|
||||
let is_cfg_attr = self.path.as_ident().is_some_and(|name| *name == sym::cfg_attr);
|
||||
if !is_cfg_attr {
|
||||
return smallvec![self.clone()];
|
||||
return smallvec![self];
|
||||
}
|
||||
|
||||
let subtree = match self.token_tree_value() {
|
||||
|
|
|
@ -515,7 +515,7 @@ fn receiver_is_dispatchable(
|
|||
trait_id: to_chalk_trait_id(trait_),
|
||||
substitution: Substitution::from_iter(
|
||||
Interner,
|
||||
std::iter::once(unsized_self_ty.clone().cast(Interner))
|
||||
std::iter::once(unsized_self_ty.cast(Interner))
|
||||
.chain(placeholder_subst.iter(Interner).skip(1).cloned()),
|
||||
),
|
||||
});
|
||||
|
|
|
@ -127,7 +127,7 @@ impl InferenceContext<'_> {
|
|||
let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
|
||||
let prev_closure = mem::replace(&mut self.current_closure, id);
|
||||
let prev_ret_ty = mem::replace(&mut self.return_ty, body_ret_ty.clone());
|
||||
let prev_ret_coercion = self.return_coercion.replace(CoerceMany::new(body_ret_ty.clone()));
|
||||
let prev_ret_coercion = self.return_coercion.replace(CoerceMany::new(body_ret_ty));
|
||||
let prev_resume_yield_tys = mem::replace(&mut self.resume_yield_tys, resume_yield_tys);
|
||||
|
||||
self.with_breakable_ctx(BreakableKind::Border, None, None, |this| {
|
||||
|
|
|
@ -827,9 +827,9 @@ impl InferenceContext<'_> {
|
|||
}
|
||||
let assoc = self.resolve_ops_index_output();
|
||||
self.resolve_associated_type_with_params(
|
||||
self_ty.clone(),
|
||||
self_ty,
|
||||
assoc,
|
||||
&[index_ty.clone().cast(Interner)],
|
||||
&[index_ty.cast(Interner)],
|
||||
)
|
||||
} else {
|
||||
self.err_ty()
|
||||
|
|
|
@ -435,7 +435,7 @@ impl InferenceContext<'_> {
|
|||
decl: Option<DeclContext>,
|
||||
) -> Ty {
|
||||
let (expectation_type, expectation_lt) = match expected.as_reference() {
|
||||
Some((inner_ty, lifetime, _exp_mut)) => (inner_ty.clone(), lifetime.clone()),
|
||||
Some((inner_ty, lifetime, _exp_mut)) => (inner_ty.clone(), lifetime),
|
||||
None => {
|
||||
let inner_ty = self.table.new_type_var();
|
||||
let inner_lt = self.table.new_lifetime_var();
|
||||
|
@ -597,7 +597,7 @@ impl InferenceContext<'_> {
|
|||
let size = consteval::usize_const(self.db, Some(len as u128), self.owner.krate(self.db));
|
||||
|
||||
let elem_ty = self.table.new_type_var();
|
||||
let array_ty = TyKind::Array(elem_ty.clone(), size).intern(Interner);
|
||||
let array_ty = TyKind::Array(elem_ty, size).intern(Interner);
|
||||
Some(array_ty)
|
||||
}
|
||||
|
||||
|
|
|
@ -1029,7 +1029,7 @@ impl<'a> InferenceTable<'a> {
|
|||
};
|
||||
let sized_pred = WhereClause::Implemented(TraitRef {
|
||||
trait_id: to_chalk_trait_id(sized),
|
||||
substitution: Substitution::from1(Interner, ty.clone()),
|
||||
substitution: Substitution::from1(Interner, ty),
|
||||
});
|
||||
let goal = GoalData::DomainGoal(chalk_ir::DomainGoal::Holds(sized_pred)).intern(Interner);
|
||||
matches!(self.try_obligation(goal), Some(Solution::Unique(_)))
|
||||
|
|
|
@ -3685,24 +3685,16 @@ impl GenericDef {
|
|||
}
|
||||
|
||||
let source_map = match def {
|
||||
GenericDefId::AdtId(AdtId::EnumId(it)) => {
|
||||
db.enum_signature_with_source_map(it).1.clone()
|
||||
}
|
||||
GenericDefId::AdtId(AdtId::StructId(it)) => {
|
||||
db.struct_signature_with_source_map(it).1.clone()
|
||||
}
|
||||
GenericDefId::AdtId(AdtId::UnionId(it)) => {
|
||||
db.union_signature_with_source_map(it).1.clone()
|
||||
}
|
||||
GenericDefId::AdtId(AdtId::EnumId(it)) => db.enum_signature_with_source_map(it).1,
|
||||
GenericDefId::AdtId(AdtId::StructId(it)) => db.struct_signature_with_source_map(it).1,
|
||||
GenericDefId::AdtId(AdtId::UnionId(it)) => db.union_signature_with_source_map(it).1,
|
||||
GenericDefId::ConstId(_) => return,
|
||||
GenericDefId::FunctionId(it) => db.function_signature_with_source_map(it).1.clone(),
|
||||
GenericDefId::ImplId(it) => db.impl_signature_with_source_map(it).1.clone(),
|
||||
GenericDefId::FunctionId(it) => db.function_signature_with_source_map(it).1,
|
||||
GenericDefId::ImplId(it) => db.impl_signature_with_source_map(it).1,
|
||||
GenericDefId::StaticId(_) => return,
|
||||
GenericDefId::TraitAliasId(it) => {
|
||||
db.trait_alias_signature_with_source_map(it).1.clone()
|
||||
}
|
||||
GenericDefId::TraitId(it) => db.trait_signature_with_source_map(it).1.clone(),
|
||||
GenericDefId::TypeAliasId(it) => db.type_alias_signature_with_source_map(it).1.clone(),
|
||||
GenericDefId::TraitAliasId(it) => db.trait_alias_signature_with_source_map(it).1,
|
||||
GenericDefId::TraitId(it) => db.trait_signature_with_source_map(it).1,
|
||||
GenericDefId::TypeAliasId(it) => db.type_alias_signature_with_source_map(it).1,
|
||||
};
|
||||
|
||||
expr_store_diagnostics(db, acc, &source_map);
|
||||
|
@ -3802,7 +3794,7 @@ impl GenericSubstitution {
|
|||
container_params
|
||||
.chain(self_params)
|
||||
.filter_map(|(ty, name)| {
|
||||
Some((name?.symbol().clone(), Type { ty: ty.clone(), env: self.env.clone() }))
|
||||
Some((name?.symbol().clone(), Type { ty, env: self.env.clone() }))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
|
|
@ -926,7 +926,7 @@ impl<'db> SemanticsImpl<'db> {
|
|||
token: InRealFile<SyntaxToken>,
|
||||
mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContext) -> ControlFlow<T>,
|
||||
) -> Option<T> {
|
||||
self.descend_into_macros_impl(token.clone(), &mut cb)
|
||||
self.descend_into_macros_impl(token, &mut cb)
|
||||
}
|
||||
|
||||
/// Descends the token into expansions, returning the tokens that matches the input
|
||||
|
@ -958,17 +958,13 @@ impl<'db> SemanticsImpl<'db> {
|
|||
let text = token.text();
|
||||
let kind = token.kind();
|
||||
if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
|
||||
self.descend_into_macros_breakable(
|
||||
token.clone(),
|
||||
|InFile { value, file_id: _ }, _ctx| {
|
||||
let mapped_kind = value.kind();
|
||||
let any_ident_match =
|
||||
|| kind.is_any_identifier() && value.kind().is_any_identifier();
|
||||
let matches =
|
||||
(kind == mapped_kind || any_ident_match()) && text == value.text();
|
||||
if matches { ControlFlow::Break(value) } else { ControlFlow::Continue(()) }
|
||||
},
|
||||
)
|
||||
self.descend_into_macros_breakable(token, |InFile { value, file_id: _ }, _ctx| {
|
||||
let mapped_kind = value.kind();
|
||||
let any_ident_match =
|
||||
|| kind.is_any_identifier() && value.kind().is_any_identifier();
|
||||
let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
|
||||
if matches { ControlFlow::Break(value) } else { ControlFlow::Continue(()) }
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
|
|
@ -559,7 +559,7 @@ impl SourceToDefCtx<'_, '_> {
|
|||
let item = match ast::Item::cast(value.clone()) {
|
||||
Some(it) => it,
|
||||
None => {
|
||||
let variant = ast::Variant::cast(value.clone())?;
|
||||
let variant = ast::Variant::cast(value)?;
|
||||
return this
|
||||
.enum_variant_to_def(InFile::new(file_id, &variant))
|
||||
.map(Into::into);
|
||||
|
|
|
@ -1431,7 +1431,7 @@ impl SourceAnalyzer {
|
|||
}
|
||||
|
||||
fn ty_of_expr(&self, expr: ast::Expr) -> Option<&Ty> {
|
||||
self.infer()?.type_of_expr_or_pat(self.expr_id(expr.clone())?)
|
||||
self.infer()?.type_of_expr_or_pat(self.expr_id(expr)?)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -196,7 +196,7 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_>
|
|||
// Wrap all tails in `Some(...)`
|
||||
let none_path = mapless_make.expr_path(mapless_make.ident_path("None"));
|
||||
let some_path = mapless_make.expr_path(mapless_make.ident_path("Some"));
|
||||
for_each_tail_expr(&ast::Expr::BlockExpr(closure_body.clone()), &mut |e| {
|
||||
for_each_tail_expr(&ast::Expr::BlockExpr(closure_body), &mut |e| {
|
||||
let e = match e {
|
||||
ast::Expr::BreakExpr(e) => e.expr(),
|
||||
ast::Expr::ReturnExpr(e) => e.expr(),
|
||||
|
|
|
@ -80,7 +80,7 @@ pub(crate) fn convert_from_to_tryfrom(acc: &mut Assists, ctx: &AssistContext<'_>
|
|||
let from_fn_name = builder.make_mut(from_fn_name);
|
||||
let tail_expr = builder.make_mut(tail_expr);
|
||||
let return_exprs = return_exprs.map(|r| builder.make_mut(r)).collect_vec();
|
||||
let associated_items = builder.make_mut(associated_items).clone();
|
||||
let associated_items = builder.make_mut(associated_items);
|
||||
|
||||
ted::replace(
|
||||
trait_ty.syntax(),
|
||||
|
|
|
@ -392,14 +392,14 @@ impl FunctionBuilder {
|
|||
// Focus the return type if there is one
|
||||
match ret_type {
|
||||
Some(ret_type) => {
|
||||
edit.add_placeholder_snippet(cap, ret_type.clone());
|
||||
edit.add_placeholder_snippet(cap, ret_type);
|
||||
}
|
||||
None => {
|
||||
edit.add_placeholder_snippet(cap, tail_expr.clone());
|
||||
edit.add_placeholder_snippet(cap, tail_expr);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
edit.add_placeholder_snippet(cap, tail_expr.clone());
|
||||
edit.add_placeholder_snippet(cap, tail_expr);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -116,7 +116,7 @@ pub(crate) fn wrap_unwrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>) -
|
|||
(Some(attr), Some(ident))
|
||||
if attr.simple_name().map(|v| v.eq("derive")).unwrap_or_default() =>
|
||||
{
|
||||
Some(attempt_get_derive(attr.clone(), ident))
|
||||
Some(attempt_get_derive(attr, ident))
|
||||
}
|
||||
|
||||
(Some(attr), _) => Some(WrapUnwrapOption::WrapAttr(attr)),
|
||||
|
@ -128,7 +128,7 @@ pub(crate) fn wrap_unwrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>) -
|
|||
NodeOrToken::Node(node) => ast::Attr::cast(node).map(WrapUnwrapOption::WrapAttr),
|
||||
NodeOrToken::Token(ident) if ident.kind() == syntax::T![ident] => {
|
||||
let attr = ident.parent_ancestors().find_map(ast::Attr::cast)?;
|
||||
Some(attempt_get_derive(attr.clone(), ident))
|
||||
Some(attempt_get_derive(attr, ident))
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
|
@ -233,7 +233,7 @@ fn wrap_cfg_attr(acc: &mut Assists, ctx: &AssistContext<'_>, attr: ast::Attr) ->
|
|||
if let Some(meta) = attr.meta() {
|
||||
if let (Some(eq), Some(expr)) = (meta.eq_token(), meta.expr()) {
|
||||
raw_tokens.push(NodeOrToken::Token(make::tokens::whitespace(" ")));
|
||||
raw_tokens.push(NodeOrToken::Token(eq.clone()));
|
||||
raw_tokens.push(NodeOrToken::Token(eq));
|
||||
raw_tokens.push(NodeOrToken::Token(make::tokens::whitespace(" ")));
|
||||
|
||||
expr.syntax().descendants_with_tokens().for_each(|it| {
|
||||
|
|
|
@ -387,11 +387,7 @@ fn expand(
|
|||
|
||||
match (
|
||||
sema.expand_macro_call(&actual_macro_call),
|
||||
sema.speculative_expand_macro_call(
|
||||
&actual_macro_call,
|
||||
&speculative_args,
|
||||
fake_ident_token.clone(),
|
||||
),
|
||||
sema.speculative_expand_macro_call(&actual_macro_call, &speculative_args, fake_ident_token),
|
||||
) {
|
||||
// successful expansions
|
||||
(Some(actual_expansion), Some((fake_expansion, fake_mapped_tokens))) => {
|
||||
|
|
|
@ -86,7 +86,7 @@ pub fn items_with_name_in_module<T>(
|
|||
let local_query = match name {
|
||||
NameToImport::Prefix(exact_name, case_sensitive)
|
||||
| NameToImport::Exact(exact_name, case_sensitive) => {
|
||||
let mut local_query = symbol_index::Query::new(exact_name.clone());
|
||||
let mut local_query = symbol_index::Query::new(exact_name);
|
||||
local_query.assoc_search_mode(assoc_item_search);
|
||||
if prefix {
|
||||
local_query.prefix();
|
||||
|
@ -99,7 +99,7 @@ pub fn items_with_name_in_module<T>(
|
|||
local_query
|
||||
}
|
||||
NameToImport::Fuzzy(fuzzy_search_string, case_sensitive) => {
|
||||
let mut local_query = symbol_index::Query::new(fuzzy_search_string.clone());
|
||||
let mut local_query = symbol_index::Query::new(fuzzy_search_string);
|
||||
local_query.fuzzy();
|
||||
local_query.assoc_search_mode(assoc_item_search);
|
||||
|
||||
|
|
|
@ -195,7 +195,7 @@ fn remove_unnecessary_wrapper(
|
|||
let db = ctx.sema.db;
|
||||
let root = db.parse_or_expand(expr_ptr.file_id);
|
||||
let expr = expr_ptr.value.to_node(&root);
|
||||
let expr = ctx.sema.original_ast_node(expr.clone())?;
|
||||
let expr = ctx.sema.original_ast_node(expr)?;
|
||||
|
||||
let Expr::CallExpr(call_expr) = expr else {
|
||||
return None;
|
||||
|
|
|
@ -163,28 +163,28 @@ impl Symbol {
|
|||
|
||||
pub fn integer(i: usize) -> Self {
|
||||
match i {
|
||||
0 => symbols::INTEGER_0.clone(),
|
||||
1 => symbols::INTEGER_1.clone(),
|
||||
2 => symbols::INTEGER_2.clone(),
|
||||
3 => symbols::INTEGER_3.clone(),
|
||||
4 => symbols::INTEGER_4.clone(),
|
||||
5 => symbols::INTEGER_5.clone(),
|
||||
6 => symbols::INTEGER_6.clone(),
|
||||
7 => symbols::INTEGER_7.clone(),
|
||||
8 => symbols::INTEGER_8.clone(),
|
||||
9 => symbols::INTEGER_9.clone(),
|
||||
10 => symbols::INTEGER_10.clone(),
|
||||
11 => symbols::INTEGER_11.clone(),
|
||||
12 => symbols::INTEGER_12.clone(),
|
||||
13 => symbols::INTEGER_13.clone(),
|
||||
14 => symbols::INTEGER_14.clone(),
|
||||
15 => symbols::INTEGER_15.clone(),
|
||||
0 => symbols::INTEGER_0,
|
||||
1 => symbols::INTEGER_1,
|
||||
2 => symbols::INTEGER_2,
|
||||
3 => symbols::INTEGER_3,
|
||||
4 => symbols::INTEGER_4,
|
||||
5 => symbols::INTEGER_5,
|
||||
6 => symbols::INTEGER_6,
|
||||
7 => symbols::INTEGER_7,
|
||||
8 => symbols::INTEGER_8,
|
||||
9 => symbols::INTEGER_9,
|
||||
10 => symbols::INTEGER_10,
|
||||
11 => symbols::INTEGER_11,
|
||||
12 => symbols::INTEGER_12,
|
||||
13 => symbols::INTEGER_13,
|
||||
14 => symbols::INTEGER_14,
|
||||
15 => symbols::INTEGER_15,
|
||||
i => Symbol::intern(&format!("{i}")),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn empty() -> Self {
|
||||
symbols::__empty.clone()
|
||||
symbols::__empty
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
|
|
@ -292,7 +292,7 @@ impl ProjectFolders {
|
|||
};
|
||||
|
||||
let file_set_roots = vec![VfsPath::from(ratoml_path.to_owned())];
|
||||
let entry = vfs::loader::Entry::Files(vec![ratoml_path.to_owned()]);
|
||||
let entry = vfs::loader::Entry::Files(vec![ratoml_path]);
|
||||
|
||||
res.watch.push(res.load.len());
|
||||
res.load.push(entry);
|
||||
|
|
|
@ -25,7 +25,7 @@ pub(crate) fn inject_cargo_package_env(env: &mut Env, package: &PackageData) {
|
|||
env.set("CARGO_PKG_VERSION_PATCH", package.version.patch.to_string());
|
||||
env.set("CARGO_PKG_VERSION_PRE", package.version.pre.to_string());
|
||||
|
||||
env.set("CARGO_PKG_AUTHORS", package.authors.join(":").clone());
|
||||
env.set("CARGO_PKG_AUTHORS", package.authors.join(":"));
|
||||
|
||||
env.set("CARGO_PKG_NAME", package.name.clone());
|
||||
env.set("CARGO_PKG_DESCRIPTION", package.description.as_deref().unwrap_or_default());
|
||||
|
|
|
@ -1370,7 +1370,7 @@ fn detached_file_to_crate_graph(
|
|||
Edition::CURRENT,
|
||||
display_name.clone(),
|
||||
None,
|
||||
cfg_options.clone(),
|
||||
cfg_options,
|
||||
None,
|
||||
Env::default(),
|
||||
CrateOrigin::Local {
|
||||
|
|
|
@ -210,7 +210,7 @@ pub(crate) fn query_group_impl(
|
|||
.into_iter()
|
||||
.filter(|fn_arg| matches!(fn_arg, FnArg::Typed(_)))
|
||||
.map(|fn_arg| match fn_arg {
|
||||
FnArg::Typed(pat_type) => pat_type.clone(),
|
||||
FnArg::Typed(pat_type) => pat_type,
|
||||
FnArg::Receiver(_) => unreachable!("this should have been filtered out"),
|
||||
})
|
||||
.collect::<Vec<syn::PatType>>();
|
||||
|
|
|
@ -30,7 +30,7 @@ impl flags::UnresolvedReferences {
|
|||
let root =
|
||||
vfs::AbsPathBuf::assert_utf8(std::env::current_dir()?.join(&self.path)).normalize();
|
||||
let config = crate::config::Config::new(
|
||||
root.clone(),
|
||||
root,
|
||||
lsp_types::ClientCapabilities::default(),
|
||||
vec![],
|
||||
None,
|
||||
|
|
|
@ -1183,7 +1183,7 @@ impl ConfigChange {
|
|||
source_root_map: Arc<FxHashMap<SourceRootId, SourceRootId>>,
|
||||
) {
|
||||
assert!(self.source_map_change.is_none());
|
||||
self.source_map_change = Some(source_root_map.clone());
|
||||
self.source_map_change = Some(source_root_map);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -511,7 +511,7 @@ impl GlobalState {
|
|||
|
||||
self.fetch_workspaces_queue.request_op(
|
||||
format!("workspace vfs file change: {path}"),
|
||||
FetchWorkspaceRequest { path: Some(path.to_owned()), force_crate_graph_reload },
|
||||
FetchWorkspaceRequest { path: Some(path), force_crate_graph_reload },
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -309,7 +309,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
|
|||
let task = move || -> std::result::Result<(), Cancelled> {
|
||||
if invocation_strategy_once {
|
||||
let saved_file = vfs_path.as_path().map(|p| p.to_owned());
|
||||
world.flycheck[0].restart_workspace(saved_file.clone());
|
||||
world.flycheck[0].restart_workspace(saved_file);
|
||||
}
|
||||
|
||||
let target = TargetSpec::for_file(&world, file_id)?.and_then(|it| {
|
||||
|
|
|
@ -2210,7 +2210,7 @@ fn runnable_action_links(
|
|||
let label = update_test.label();
|
||||
if let Some(r) = to_proto::make_update_runnable(&r, update_test) {
|
||||
let update_command = to_proto::command::run_single(&r, label.unwrap().as_str());
|
||||
group.commands.push(to_command_link(update_command, r.label.clone()));
|
||||
group.commands.push(to_command_link(update_command, r.label));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -82,11 +82,8 @@ impl RatomlTest {
|
|||
}
|
||||
|
||||
Url::parse(
|
||||
format!(
|
||||
"file://{}",
|
||||
path.into_string().to_owned().replace("C:\\", "/c:/").replace('\\', "/")
|
||||
)
|
||||
.as_str(),
|
||||
format!("file://{}", path.into_string().replace("C:\\", "/c:/").replace('\\', "/"))
|
||||
.as_str(),
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
|
|
@ -202,7 +202,7 @@ impl Project<'_> {
|
|||
}
|
||||
|
||||
let mut config = Config::new(
|
||||
tmp_dir_path.clone(),
|
||||
tmp_dir_path,
|
||||
lsp_types::ClientCapabilities {
|
||||
workspace: Some(lsp_types::WorkspaceClientCapabilities {
|
||||
did_change_watched_files: Some(
|
||||
|
|
|
@ -391,7 +391,7 @@ fn report_intersecting_changes(
|
|||
fn to_owning_node(element: &SyntaxElement) -> SyntaxNode {
|
||||
match element {
|
||||
SyntaxElement::Node(node) => node.clone(),
|
||||
SyntaxElement::Token(token) => token.parent().unwrap().clone(),
|
||||
SyntaxElement::Token(token) => token.parent().unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -353,7 +353,7 @@ impl ChangeFixture {
|
|||
)]),
|
||||
CrateOrigin::Local { repo: None, name: None },
|
||||
true,
|
||||
proc_macro_cwd.clone(),
|
||||
proc_macro_cwd,
|
||||
crate_ws_data,
|
||||
);
|
||||
proc_macros.insert(proc_macros_crate, Ok(proc_macro));
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue