mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-27 12:29:28 +00:00
Add Tokens
newtype wrapper, TokenKind
iterator (#11361)
## Summary Alternative to #11237 This PR adds a new `Tokens` struct which is a newtype wrapper around a vector of lexer output. This allows us to add a `kinds` method which returns an iterator over the corresponding `TokenKind`. This iterator is implemented as a separate `TokenKindIter` struct to allow using the type and provide additional methods like `peek` directly on the iterator. This exposes the linter to access the stream of `TokenKind` instead of `Tok`. Edit: I've made the necessary downstream changes and plan to merge the entire stack at once.
This commit is contained in:
parent
50f14d017e
commit
025768d303
9 changed files with 142 additions and 25 deletions
|
@ -17,7 +17,6 @@ use ruff_python_ast::{Mod, PySourceType};
|
|||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_formatter::{format_module_ast, pretty_comments, PyFormatContext, QuoteStyle};
|
||||
use ruff_python_index::{CommentRangesBuilder, Indexer};
|
||||
use ruff_python_parser::lexer::LexResult;
|
||||
use ruff_python_parser::{parse_tokens, tokenize_all, AsMode, Mode, Program};
|
||||
use ruff_python_trivia::CommentRanges;
|
||||
use ruff_source_file::{Locator, SourceLocation};
|
||||
|
@ -162,7 +161,7 @@ impl Workspace {
|
|||
let source_kind = SourceKind::Python(contents.to_string());
|
||||
|
||||
// Tokenize once.
|
||||
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(contents, source_type.as_mode());
|
||||
let tokens = ruff_python_parser::tokenize(contents, source_type.as_mode());
|
||||
|
||||
// Map row and column locations to byte slices (lazily).
|
||||
let locator = Locator::new(contents);
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue