mirror of
https://github.com/astral-sh/ruff.git
synced 2025-08-04 02:38:25 +00:00
Remove some unused pub
functions (#11576)
## Summary I left anything in `red-knot`, any `with_` methods, etc.
This commit is contained in:
parent
3989cb8b56
commit
16acd4913f
14 changed files with 7 additions and 495 deletions
|
@ -113,15 +113,14 @@
|
|||
use std::iter::FusedIterator;
|
||||
use std::ops::Deref;
|
||||
|
||||
use crate::lexer::{lex, lex_starts_at, LexResult};
|
||||
use ruff_python_ast::{Expr, Mod, ModModule, PySourceType, Suite};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
|
||||
pub use crate::error::{FStringErrorType, ParseError, ParseErrorType};
|
||||
use crate::lexer::{lex, lex_starts_at, LexResult};
|
||||
pub use crate::parser::Program;
|
||||
pub use crate::token::{Tok, TokenKind};
|
||||
|
||||
use ruff_python_ast::{Expr, Mod, ModModule, PySourceType, Suite};
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
|
||||
mod error;
|
||||
pub mod lexer;
|
||||
mod parser;
|
||||
|
@ -355,44 +354,6 @@ impl Tokens {
|
|||
TokenKindIter::new(&self.0)
|
||||
}
|
||||
|
||||
/// Returns an iterator over the [`TokenKind`] and its range for all the tokens that are
|
||||
/// within the given `range`.
|
||||
///
|
||||
/// The start and end position of the given range should correspond to the start position of
|
||||
/// the first token and the end position of the last token in the returned iterator.
|
||||
///
|
||||
/// For example, if the struct contains the following tokens:
|
||||
/// ```txt
|
||||
/// (Def, 0..3)
|
||||
/// (Name, 4..7)
|
||||
/// (Lpar, 7..8)
|
||||
/// (Rpar, 8..9)
|
||||
/// (Colon, 9..10)
|
||||
/// (Ellipsis, 11..14)
|
||||
/// (Newline, 14..14)
|
||||
/// ```
|
||||
///
|
||||
/// Then, the range `4..10` returns an iterator which yields `Name`, `Lpar`, `Rpar`, and
|
||||
/// `Colon` token. But, if the given position doesn't match any of the tokens, an empty
|
||||
/// iterator is returned.
|
||||
pub fn kinds_within_range<T: Ranged>(&self, ranged: T) -> TokenKindIter {
|
||||
let Ok(start_index) = self.binary_search_by_key(&ranged.start(), |result| match result {
|
||||
Ok((_, range)) => range.start(),
|
||||
Err(error) => error.location().start(),
|
||||
}) else {
|
||||
return TokenKindIter::default();
|
||||
};
|
||||
|
||||
let Ok(end_index) = self.binary_search_by_key(&ranged.end(), |result| match result {
|
||||
Ok((_, range)) => range.end(),
|
||||
Err(error) => error.location().end(),
|
||||
}) else {
|
||||
return TokenKindIter::default();
|
||||
};
|
||||
|
||||
TokenKindIter::new(self.get(start_index..=end_index).unwrap_or(&[]))
|
||||
}
|
||||
|
||||
/// Consumes the [`Tokens`], returning the underlying vector of [`LexResult`].
|
||||
pub fn into_inner(self) -> Vec<LexResult> {
|
||||
self.0
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue