mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-27 04:19:13 +00:00
remove redundant clones
This commit is contained in:
parent
f595e60b6d
commit
476e10e961
23 changed files with 34 additions and 34 deletions
|
@ -1610,7 +1610,7 @@ impl ExprCollector<'_> {
|
||||||
|name| self.alloc_expr_desugared(Expr::Path(Path::from(name))),
|
|name| self.alloc_expr_desugared(Expr::Path(Path::from(name))),
|
||||||
|name, span| {
|
|name, span| {
|
||||||
if let Some(span) = span {
|
if let Some(span) = span {
|
||||||
mappings.push((span, name.clone()))
|
mappings.push((span, name))
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
|
|
|
@ -1397,7 +1397,7 @@ impl DefCollector<'_> {
|
||||||
always!(krate == loc.def.krate);
|
always!(krate == loc.def.krate);
|
||||||
DefDiagnostic::unresolved_proc_macro(module_id, loc.kind.clone(), loc.def.krate)
|
DefDiagnostic::unresolved_proc_macro(module_id, loc.kind.clone(), loc.def.krate)
|
||||||
}
|
}
|
||||||
_ => DefDiagnostic::macro_error(module_id, loc.kind.clone(), err.to_string()),
|
_ => DefDiagnostic::macro_error(module_id, loc.kind, err.to_string()),
|
||||||
};
|
};
|
||||||
|
|
||||||
self.def_map.diagnostics.push(diag);
|
self.def_map.diagnostics.push(diag);
|
||||||
|
|
|
@ -609,7 +609,7 @@ fn render_const_scalar(
|
||||||
}
|
}
|
||||||
hir_def::AdtId::EnumId(e) => {
|
hir_def::AdtId::EnumId(e) => {
|
||||||
let Some((var_id, var_layout)) =
|
let Some((var_id, var_layout)) =
|
||||||
detect_variant_from_bytes(&layout, f.db, trait_env.clone(), b, e)
|
detect_variant_from_bytes(&layout, f.db, trait_env, b, e)
|
||||||
else {
|
else {
|
||||||
return f.write_str("<failed-to-detect-variant>");
|
return f.write_str("<failed-to-detect-variant>");
|
||||||
};
|
};
|
||||||
|
|
|
@ -738,7 +738,7 @@ impl<'a> InferenceContext<'a> {
|
||||||
result.tuple_field_access_types = tuple_field_accesses_rev
|
result.tuple_field_access_types = tuple_field_accesses_rev
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(idx, subst)| (TupleId(idx as u32), table.resolve_completely(subst.clone())))
|
.map(|(idx, subst)| (TupleId(idx as u32), table.resolve_completely(subst)))
|
||||||
.collect();
|
.collect();
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
|
@ -130,7 +130,7 @@ impl HirPlace {
|
||||||
ctx.owner.module(ctx.db.upcast()).krate(),
|
ctx.owner.module(ctx.db.upcast()).krate(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
ty.clone()
|
ty
|
||||||
}
|
}
|
||||||
|
|
||||||
fn capture_kind_of_truncated_place(
|
fn capture_kind_of_truncated_place(
|
||||||
|
@ -245,7 +245,7 @@ pub(crate) struct CapturedItemWithoutTy {
|
||||||
|
|
||||||
impl CapturedItemWithoutTy {
|
impl CapturedItemWithoutTy {
|
||||||
fn with_ty(self, ctx: &mut InferenceContext<'_>) -> CapturedItem {
|
fn with_ty(self, ctx: &mut InferenceContext<'_>) -> CapturedItem {
|
||||||
let ty = self.place.ty(ctx).clone();
|
let ty = self.place.ty(ctx);
|
||||||
let ty = match &self.kind {
|
let ty = match &self.kind {
|
||||||
CaptureKind::ByValue => ty,
|
CaptureKind::ByValue => ty,
|
||||||
CaptureKind::ByRef(bk) => {
|
CaptureKind::ByRef(bk) => {
|
||||||
|
@ -396,7 +396,7 @@ impl InferenceContext<'_> {
|
||||||
|
|
||||||
fn consume_place(&mut self, place: HirPlace, span: MirSpan) {
|
fn consume_place(&mut self, place: HirPlace, span: MirSpan) {
|
||||||
if self.is_upvar(&place) {
|
if self.is_upvar(&place) {
|
||||||
let ty = place.ty(self).clone();
|
let ty = place.ty(self);
|
||||||
let kind = if self.is_ty_copy(ty) {
|
let kind = if self.is_ty_copy(ty) {
|
||||||
CaptureKind::ByRef(BorrowKind::Shared)
|
CaptureKind::ByRef(BorrowKind::Shared)
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -978,7 +978,7 @@ impl InferenceContext<'_> {
|
||||||
.push(callee_ty.clone())
|
.push(callee_ty.clone())
|
||||||
.push(TyBuilder::tuple_with(params.iter().cloned()))
|
.push(TyBuilder::tuple_with(params.iter().cloned()))
|
||||||
.build();
|
.build();
|
||||||
self.write_method_resolution(tgt_expr, func, subst.clone());
|
self.write_method_resolution(tgt_expr, func, subst);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -336,7 +336,7 @@ impl InferenceContext<'_> {
|
||||||
&Pat::Lit(expr) => {
|
&Pat::Lit(expr) => {
|
||||||
// Don't emit type mismatches again, the expression lowering already did that.
|
// Don't emit type mismatches again, the expression lowering already did that.
|
||||||
let ty = self.infer_lit_pat(expr, &expected);
|
let ty = self.infer_lit_pat(expr, &expected);
|
||||||
self.write_pat_ty(pat, ty.clone());
|
self.write_pat_ty(pat, ty);
|
||||||
return self.pat_ty_after_adjustment(pat);
|
return self.pat_ty_after_adjustment(pat);
|
||||||
}
|
}
|
||||||
Pat::Box { inner } => match self.resolve_boxed_box() {
|
Pat::Box { inner } => match self.resolve_boxed_box() {
|
||||||
|
|
|
@ -164,7 +164,7 @@ fn layout_of_simd_ty(
|
||||||
};
|
};
|
||||||
|
|
||||||
// Compute the ABI of the element type:
|
// Compute the ABI of the element type:
|
||||||
let e_ly = db.layout_of_ty(e_ty, env.clone())?;
|
let e_ly = db.layout_of_ty(e_ty, env)?;
|
||||||
let Abi::Scalar(e_abi) = e_ly.abi else {
|
let Abi::Scalar(e_abi) = e_ly.abi else {
|
||||||
return Err(LayoutError::Unknown);
|
return Err(LayoutError::Unknown);
|
||||||
};
|
};
|
||||||
|
@ -204,17 +204,17 @@ pub fn layout_of_ty_query(
|
||||||
};
|
};
|
||||||
let cx = LayoutCx { target: &target };
|
let cx = LayoutCx { target: &target };
|
||||||
let dl = &*cx.current_data_layout();
|
let dl = &*cx.current_data_layout();
|
||||||
let ty = normalize(db, trait_env.clone(), ty.clone());
|
let ty = normalize(db, trait_env.clone(), ty);
|
||||||
let result = match ty.kind(Interner) {
|
let result = match ty.kind(Interner) {
|
||||||
TyKind::Adt(AdtId(def), subst) => {
|
TyKind::Adt(AdtId(def), subst) => {
|
||||||
if let hir_def::AdtId::StructId(s) = def {
|
if let hir_def::AdtId::StructId(s) = def {
|
||||||
let data = db.struct_data(*s);
|
let data = db.struct_data(*s);
|
||||||
let repr = data.repr.unwrap_or_default();
|
let repr = data.repr.unwrap_or_default();
|
||||||
if repr.simd() {
|
if repr.simd() {
|
||||||
return layout_of_simd_ty(db, *s, subst, trait_env.clone(), &target);
|
return layout_of_simd_ty(db, *s, subst, trait_env, &target);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
return db.layout_of_adt(*def, subst.clone(), trait_env.clone());
|
return db.layout_of_adt(*def, subst.clone(), trait_env);
|
||||||
}
|
}
|
||||||
TyKind::Scalar(s) => match s {
|
TyKind::Scalar(s) => match s {
|
||||||
chalk_ir::Scalar::Bool => Layout::scalar(
|
chalk_ir::Scalar::Bool => Layout::scalar(
|
||||||
|
@ -280,7 +280,7 @@ pub fn layout_of_ty_query(
|
||||||
}
|
}
|
||||||
TyKind::Array(element, count) => {
|
TyKind::Array(element, count) => {
|
||||||
let count = try_const_usize(db, &count).ok_or(LayoutError::HasErrorConst)? as u64;
|
let count = try_const_usize(db, &count).ok_or(LayoutError::HasErrorConst)? as u64;
|
||||||
let element = db.layout_of_ty(element.clone(), trait_env.clone())?;
|
let element = db.layout_of_ty(element.clone(), trait_env)?;
|
||||||
let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?;
|
let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?;
|
||||||
|
|
||||||
let abi = if count != 0 && matches!(element.abi, Abi::Uninhabited) {
|
let abi = if count != 0 && matches!(element.abi, Abi::Uninhabited) {
|
||||||
|
@ -303,7 +303,7 @@ pub fn layout_of_ty_query(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TyKind::Slice(element) => {
|
TyKind::Slice(element) => {
|
||||||
let element = db.layout_of_ty(element.clone(), trait_env.clone())?;
|
let element = db.layout_of_ty(element.clone(), trait_env)?;
|
||||||
Layout {
|
Layout {
|
||||||
variants: Variants::Single { index: struct_variant_idx() },
|
variants: Variants::Single { index: struct_variant_idx() },
|
||||||
fields: FieldsShape::Array { stride: element.size, count: 0 },
|
fields: FieldsShape::Array { stride: element.size, count: 0 },
|
||||||
|
@ -345,7 +345,7 @@ pub fn layout_of_ty_query(
|
||||||
}))
|
}))
|
||||||
.intern(Interner);
|
.intern(Interner);
|
||||||
}
|
}
|
||||||
unsized_part = normalize(db, trait_env.clone(), unsized_part);
|
unsized_part = normalize(db, trait_env, unsized_part);
|
||||||
let metadata = match unsized_part.kind(Interner) {
|
let metadata = match unsized_part.kind(Interner) {
|
||||||
TyKind::Slice(_) | TyKind::Str => {
|
TyKind::Slice(_) | TyKind::Str => {
|
||||||
scalar_unit(dl, Primitive::Int(dl.ptr_sized_integer(), false))
|
scalar_unit(dl, Primitive::Int(dl.ptr_sized_integer(), false))
|
||||||
|
@ -384,7 +384,7 @@ pub fn layout_of_ty_query(
|
||||||
match impl_trait_id {
|
match impl_trait_id {
|
||||||
crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
|
crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
|
||||||
let infer = db.infer(func.into());
|
let infer = db.infer(func.into());
|
||||||
return db.layout_of_ty(infer.type_of_rpit[idx].clone(), trait_env.clone());
|
return db.layout_of_ty(infer.type_of_rpit[idx].clone(), trait_env);
|
||||||
}
|
}
|
||||||
crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => {
|
crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => {
|
||||||
return Err(LayoutError::NotImplemented)
|
return Err(LayoutError::NotImplemented)
|
||||||
|
|
|
@ -1350,7 +1350,7 @@ pub(crate) fn resolve_indexing_op(
|
||||||
ty: Canonical<Ty>,
|
ty: Canonical<Ty>,
|
||||||
index_trait: TraitId,
|
index_trait: TraitId,
|
||||||
) -> Option<ReceiverAdjustments> {
|
) -> Option<ReceiverAdjustments> {
|
||||||
let mut table = InferenceTable::new(db, env.clone());
|
let mut table = InferenceTable::new(db, env);
|
||||||
let ty = table.instantiate_canonical(ty);
|
let ty = table.instantiate_canonical(ty);
|
||||||
let deref_chain = autoderef_method_receiver(&mut table, ty);
|
let deref_chain = autoderef_method_receiver(&mut table, ty);
|
||||||
for (ty, adj) in deref_chain {
|
for (ty, adj) in deref_chain {
|
||||||
|
|
|
@ -1533,7 +1533,7 @@ impl Evaluator<'_> {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
TyKind::Dyn(_) => {
|
TyKind::Dyn(_) => {
|
||||||
let vtable = self.vtable_map.id(current_ty.clone());
|
let vtable = self.vtable_map.id(current_ty);
|
||||||
let mut r = Vec::with_capacity(16);
|
let mut r = Vec::with_capacity(16);
|
||||||
let addr = addr.get(self)?;
|
let addr = addr.get(self)?;
|
||||||
r.extend(addr.iter().copied());
|
r.extend(addr.iter().copied());
|
||||||
|
|
|
@ -1093,7 +1093,7 @@ impl Field {
|
||||||
|
|
||||||
pub fn layout(&self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
|
pub fn layout(&self, db: &dyn HirDatabase) -> Result<Layout, LayoutError> {
|
||||||
db.layout_of_ty(
|
db.layout_of_ty(
|
||||||
self.ty(db).ty.clone(),
|
self.ty(db).ty,
|
||||||
db.trait_environment(match hir_def::VariantId::from(self.parent) {
|
db.trait_environment(match hir_def::VariantId::from(self.parent) {
|
||||||
hir_def::VariantId::EnumVariantId(id) => GenericDefId::EnumVariantId(id),
|
hir_def::VariantId::EnumVariantId(id) => GenericDefId::EnumVariantId(id),
|
||||||
hir_def::VariantId::StructId(id) => GenericDefId::AdtId(id.into()),
|
hir_def::VariantId::StructId(id) => GenericDefId::AdtId(id.into()),
|
||||||
|
|
|
@ -428,7 +428,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||||
if let Some(original_string) = ast::String::cast(original_token.clone()) {
|
if let Some(original_string) = ast::String::cast(original_token.clone()) {
|
||||||
if let Some(quote) = original_string.open_quote_text_range() {
|
if let Some(quote) = original_string.open_quote_text_range() {
|
||||||
return self
|
return self
|
||||||
.descend_into_macros(DescendPreference::SameText, original_token.clone())
|
.descend_into_macros(DescendPreference::SameText, original_token)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.find_map(|token| {
|
.find_map(|token| {
|
||||||
self.resolve_offset_in_format_args(
|
self.resolve_offset_in_format_args(
|
||||||
|
|
|
@ -301,7 +301,7 @@ fn replace_usages(
|
||||||
|
|
||||||
// add imports across modules where needed
|
// add imports across modules where needed
|
||||||
if let Some((import_scope, path)) = import_data {
|
if let Some((import_scope, path)) = import_data {
|
||||||
let scope = match import_scope.clone() {
|
let scope = match import_scope {
|
||||||
ImportScope::File(it) => ImportScope::File(edit.make_mut(it)),
|
ImportScope::File(it) => ImportScope::File(edit.make_mut(it)),
|
||||||
ImportScope::Module(it) => ImportScope::Module(edit.make_mut(it)),
|
ImportScope::Module(it) => ImportScope::Module(edit.make_mut(it)),
|
||||||
ImportScope::Block(it) => ImportScope::Block(edit.make_mut(it)),
|
ImportScope::Block(it) => ImportScope::Block(edit.make_mut(it)),
|
||||||
|
@ -329,7 +329,7 @@ fn augment_references_with_imports(
|
||||||
references
|
references
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|FileReference { range, name, .. }| {
|
.filter_map(|FileReference { range, name, .. }| {
|
||||||
let name = name.clone().into_name_like()?;
|
let name = name.into_name_like()?;
|
||||||
ctx.sema.scope(name.syntax()).map(|scope| (range, name, scope.module()))
|
ctx.sema.scope(name.syntax()).map(|scope| (range, name, scope.module()))
|
||||||
})
|
})
|
||||||
.map(|(range, name, ref_module)| {
|
.map(|(range, name, ref_module)| {
|
||||||
|
|
|
@ -190,7 +190,7 @@ fn augment_references_with_imports(
|
||||||
ctx.sema.scope(name.syntax()).map(|scope| (name, scope.module()))
|
ctx.sema.scope(name.syntax()).map(|scope| (name, scope.module()))
|
||||||
})
|
})
|
||||||
.map(|(name, ref_module)| {
|
.map(|(name, ref_module)| {
|
||||||
let new_name = edit.make_mut(name.clone());
|
let new_name = edit.make_mut(name);
|
||||||
|
|
||||||
// if the referenced module is not the same as the target one and has not been seen before, add an import
|
// if the referenced module is not the same as the target one and has not been seen before, add an import
|
||||||
let import_data = if ref_module.nearest_non_block_module(ctx.db()) != *target_module
|
let import_data = if ref_module.nearest_non_block_module(ctx.db()) != *target_module
|
||||||
|
|
|
@ -147,7 +147,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
|
||||||
None => {
|
None => {
|
||||||
let name = &strukt_name.to_string();
|
let name = &strukt_name.to_string();
|
||||||
let params = strukt.generic_param_list();
|
let params = strukt.generic_param_list();
|
||||||
let ty_params = params.clone();
|
let ty_params = params;
|
||||||
let where_clause = strukt.where_clause();
|
let where_clause = strukt.where_clause();
|
||||||
|
|
||||||
let impl_def = make::impl_(
|
let impl_def = make::impl_(
|
||||||
|
|
|
@ -432,7 +432,7 @@ fn get_fn_target(
|
||||||
}
|
}
|
||||||
None => next_space_for_fn_after_call_site(ast::CallableExpr::Call(call))?,
|
None => next_space_for_fn_after_call_site(ast::CallableExpr::Call(call))?,
|
||||||
};
|
};
|
||||||
Some((target.clone(), file))
|
Some((target, file))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_method_target(
|
fn get_method_target(
|
||||||
|
|
|
@ -47,7 +47,7 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>
|
||||||
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?.clone_for_update();
|
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?.clone_for_update();
|
||||||
|
|
||||||
let trait_ = impl_def.trait_()?;
|
let trait_ = impl_def.trait_()?;
|
||||||
if let ast::Type::PathType(trait_path) = trait_.clone() {
|
if let ast::Type::PathType(trait_path) = trait_ {
|
||||||
let trait_type = ctx.sema.resolve_trait(&trait_path.path()?)?;
|
let trait_type = ctx.sema.resolve_trait(&trait_path.path()?)?;
|
||||||
let scope = ctx.sema.scope(trait_path.syntax())?;
|
let scope = ctx.sema.scope(trait_path.syntax())?;
|
||||||
if trait_type != FamousDefs(&ctx.sema, scope.krate()).core_convert_Index()? {
|
if trait_type != FamousDefs(&ctx.sema, scope.krate()).core_convert_Index()? {
|
||||||
|
|
|
@ -55,7 +55,7 @@ pub fn items_with_name<'a>(
|
||||||
local_query.fuzzy();
|
local_query.fuzzy();
|
||||||
local_query.assoc_search_mode(assoc_item_search);
|
local_query.assoc_search_mode(assoc_item_search);
|
||||||
|
|
||||||
let mut external_query = import_map::Query::new(fuzzy_search_string.clone())
|
let mut external_query = import_map::Query::new(fuzzy_search_string)
|
||||||
.fuzzy()
|
.fuzzy()
|
||||||
.assoc_search_mode(assoc_item_search);
|
.assoc_search_mode(assoc_item_search);
|
||||||
|
|
||||||
|
|
|
@ -159,7 +159,7 @@ impl<'a> PathTransform<'a> {
|
||||||
.for_each(|(k, v)| match (k.split(db), v) {
|
.for_each(|(k, v)| match (k.split(db), v) {
|
||||||
(Either::Right(k), Some(TypeOrConst::Either(v))) => {
|
(Either::Right(k), Some(TypeOrConst::Either(v))) => {
|
||||||
if let Some(ty) = v.ty() {
|
if let Some(ty) = v.ty() {
|
||||||
type_substs.insert(k, ty.clone());
|
type_substs.insert(k, ty);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
(Either::Right(k), None) => {
|
(Either::Right(k), None) => {
|
||||||
|
|
|
@ -539,7 +539,7 @@ impl<'a> FindUsages<'a> {
|
||||||
tree.token_at_offset(offset).into_iter().for_each(|token| {
|
tree.token_at_offset(offset).into_iter().for_each(|token| {
|
||||||
let Some(str_token) = ast::String::cast(token.clone()) else { return };
|
let Some(str_token) = ast::String::cast(token.clone()) else { return };
|
||||||
if let Some((range, nameres)) =
|
if let Some((range, nameres)) =
|
||||||
sema.check_for_format_args_template(token.clone(), offset)
|
sema.check_for_format_args_template(token, offset)
|
||||||
{
|
{
|
||||||
if self.found_format_args_ref(file_id, range, str_token, nameres, sink) {
|
if self.found_format_args_ref(file_id, range, str_token, nameres, sink) {
|
||||||
return;
|
return;
|
||||||
|
|
|
@ -341,13 +341,13 @@ impl SourceChangeBuilder {
|
||||||
/// Adds a tabstop snippet to place the cursor before `token`
|
/// Adds a tabstop snippet to place the cursor before `token`
|
||||||
pub fn add_tabstop_before_token(&mut self, _cap: SnippetCap, token: SyntaxToken) {
|
pub fn add_tabstop_before_token(&mut self, _cap: SnippetCap, token: SyntaxToken) {
|
||||||
assert!(token.parent().is_some());
|
assert!(token.parent().is_some());
|
||||||
self.add_snippet(PlaceSnippet::Before(token.clone().into()));
|
self.add_snippet(PlaceSnippet::Before(token.into()));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Adds a tabstop snippet to place the cursor after `token`
|
/// Adds a tabstop snippet to place the cursor after `token`
|
||||||
pub fn add_tabstop_after_token(&mut self, _cap: SnippetCap, token: SyntaxToken) {
|
pub fn add_tabstop_after_token(&mut self, _cap: SnippetCap, token: SyntaxToken) {
|
||||||
assert!(token.parent().is_some());
|
assert!(token.parent().is_some());
|
||||||
self.add_snippet(PlaceSnippet::After(token.clone().into()));
|
self.add_snippet(PlaceSnippet::After(token.into()));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Adds a snippet to move the cursor selected over `node`
|
/// Adds a snippet to move the cursor selected over `node`
|
||||||
|
|
|
@ -160,7 +160,7 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -
|
||||||
// if receiver should be pass as first arg in the assoc func,
|
// if receiver should be pass as first arg in the assoc func,
|
||||||
// we could omit generic parameters cause compiler can deduce it automatically
|
// we could omit generic parameters cause compiler can deduce it automatically
|
||||||
if !need_to_take_receiver_as_first_arg && !generic_parameters.is_empty() {
|
if !need_to_take_receiver_as_first_arg && !generic_parameters.is_empty() {
|
||||||
let generic_parameters = generic_parameters.join(", ").to_string();
|
let generic_parameters = generic_parameters.join(", ");
|
||||||
receiver_type_adt_name =
|
receiver_type_adt_name =
|
||||||
format!("{}::<{}>", receiver_type_adt_name, generic_parameters);
|
format!("{}::<{}>", receiver_type_adt_name, generic_parameters);
|
||||||
}
|
}
|
||||||
|
|
|
@ -274,7 +274,7 @@ impl CargoWorkspace {
|
||||||
other_options.append(
|
other_options.append(
|
||||||
&mut targets
|
&mut targets
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|target| ["--filter-platform".to_owned().to_string(), target])
|
.flat_map(|target| ["--filter-platform".to_owned(), target])
|
||||||
.collect(),
|
.collect(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue