mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 21:05:02 +00:00
remove imports that are also in edition 2021's prelude
This commit is contained in:
parent
554f7f889e
commit
232176b46a
18 changed files with 15 additions and 27 deletions
|
@ -157,7 +157,7 @@ impl From<TokenTree> for TokenStream {
|
|||
}
|
||||
|
||||
/// Collects a number of token trees into a single stream.
|
||||
impl iter::FromIterator<TokenTree> for TokenStream {
|
||||
impl FromIterator<TokenTree> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
|
||||
trees.into_iter().map(TokenStream::from).collect()
|
||||
}
|
||||
|
@ -165,7 +165,7 @@ impl iter::FromIterator<TokenTree> for TokenStream {
|
|||
|
||||
/// A "flattening" operation on token streams, collects token trees
|
||||
/// from multiple token streams into a single stream.
|
||||
impl iter::FromIterator<TokenStream> for TokenStream {
|
||||
impl FromIterator<TokenStream> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
|
||||
let mut builder = bridge::client::TokenStreamBuilder::new();
|
||||
streams.into_iter().for_each(|stream| builder.push(stream.0));
|
||||
|
|
|
@ -12,7 +12,6 @@ use super::proc_macro::bridge::{self, server};
|
|||
|
||||
use std::collections::HashMap;
|
||||
use std::hash::Hash;
|
||||
use std::iter::FromIterator;
|
||||
use std::ops::Bound;
|
||||
use std::{ascii, vec::IntoIter};
|
||||
|
||||
|
|
|
@ -207,7 +207,7 @@ impl ConcatStreamsHelper {
|
|||
}
|
||||
|
||||
/// Collects a number of token trees into a single stream.
|
||||
impl iter::FromIterator<TokenTree> for TokenStream {
|
||||
impl FromIterator<TokenTree> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
|
||||
trees.into_iter().map(TokenStream::from).collect()
|
||||
}
|
||||
|
@ -215,7 +215,7 @@ impl iter::FromIterator<TokenTree> for TokenStream {
|
|||
|
||||
/// A "flattening" operation on token streams, collects token trees
|
||||
/// from multiple token streams into a single stream.
|
||||
impl iter::FromIterator<TokenStream> for TokenStream {
|
||||
impl FromIterator<TokenStream> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
|
||||
let iter = streams.into_iter();
|
||||
let mut builder = ConcatStreamsHelper::new(iter.size_hint().0);
|
||||
|
|
|
@ -12,7 +12,6 @@ use super::proc_macro::bridge::{self, server};
|
|||
|
||||
use std::collections::HashMap;
|
||||
use std::hash::Hash;
|
||||
use std::iter::FromIterator;
|
||||
use std::ops::Bound;
|
||||
use std::{ascii, vec::IntoIter};
|
||||
|
||||
|
|
|
@ -207,7 +207,7 @@ impl ConcatStreamsHelper {
|
|||
}
|
||||
|
||||
/// Collects a number of token trees into a single stream.
|
||||
impl iter::FromIterator<TokenTree> for TokenStream {
|
||||
impl FromIterator<TokenTree> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
|
||||
trees.into_iter().map(TokenStream::from).collect()
|
||||
}
|
||||
|
@ -215,7 +215,7 @@ impl iter::FromIterator<TokenTree> for TokenStream {
|
|||
|
||||
/// A "flattening" operation on token streams, collects token trees
|
||||
/// from multiple token streams into a single stream.
|
||||
impl iter::FromIterator<TokenStream> for TokenStream {
|
||||
impl FromIterator<TokenStream> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
|
||||
let iter = streams.into_iter();
|
||||
let mut builder = ConcatStreamsHelper::new(iter.size_hint().0);
|
||||
|
|
|
@ -12,7 +12,6 @@ use super::proc_macro::bridge::{self, server};
|
|||
|
||||
use std::collections::HashMap;
|
||||
use std::hash::Hash;
|
||||
use std::iter::FromIterator;
|
||||
use std::ops::Bound;
|
||||
use std::{ascii, vec::IntoIter};
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ use token_stream::TokenStreamBuilder;
|
|||
mod symbol;
|
||||
pub use symbol::*;
|
||||
|
||||
use std::{iter::FromIterator, ops::Bound};
|
||||
use std::ops::Bound;
|
||||
|
||||
type Group = tt::Subtree;
|
||||
type TokenTree = tt::TokenTree;
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
//! Handles dynamic library loading for proc macro
|
||||
|
||||
use std::{
|
||||
convert::TryInto,
|
||||
fmt,
|
||||
fs::File,
|
||||
io,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue