mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-26 11:59:10 +00:00
Add Tokens
newtype wrapper, TokenKind
iterator (#11361)
## Summary Alternative to #11237 This PR adds a new `Tokens` struct which is a newtype wrapper around a vector of lexer output. This allows us to add a `kinds` method which returns an iterator over the corresponding `TokenKind`. This iterator is implemented as a separate `TokenKindIter` struct to allow using the type and provide additional methods like `peek` directly on the iterator. This exposes the linter to access the stream of `TokenKind` instead of `Tok`. Edit: I've made the necessary downstream changes and plan to merge the entire stack at once.
This commit is contained in:
parent
50f14d017e
commit
025768d303
9 changed files with 142 additions and 25 deletions
|
@ -1,5 +1,4 @@
|
|||
use ruff_python_index::Indexer;
|
||||
use ruff_python_parser::lexer::LexResult;
|
||||
use ruff_python_parser::{tokenize, Mode};
|
||||
use ruff_source_file::Locator;
|
||||
use ruff_text_size::TextSize;
|
||||
|
@ -38,7 +37,7 @@ fn block_comments_indented_block() {
|
|||
fn block_comments_single_line_is_not_a_block() {
|
||||
// arrange
|
||||
let source = "\n";
|
||||
let tokens: Vec<LexResult> = tokenize(source, Mode::Module);
|
||||
let tokens = tokenize(source, Mode::Module);
|
||||
let locator = Locator::new(source);
|
||||
let indexer = Indexer::from_tokens(&tokens, &locator);
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue