mirror of
https://github.com/astral-sh/ruff.git
synced 2025-10-01 06:11:43 +00:00
Use TokenKind
in doc_lines_from_tokens
(#11418)
## Summary This PR updates the `doc_lines_from_tokens` function to use `TokenKind` instead of `Tok`. This is part of #11401 ## Test Plan `cargo test`
This commit is contained in:
parent
025768d303
commit
a33763170e
2 changed files with 10 additions and 11 deletions
|
@ -4,27 +4,26 @@
|
||||||
use std::iter::FusedIterator;
|
use std::iter::FusedIterator;
|
||||||
|
|
||||||
use ruff_python_ast::{self as ast, Stmt, Suite};
|
use ruff_python_ast::{self as ast, Stmt, Suite};
|
||||||
use ruff_python_parser::lexer::LexResult;
|
use ruff_python_parser::{TokenKind, TokenKindIter};
|
||||||
use ruff_python_parser::Tok;
|
|
||||||
use ruff_text_size::{Ranged, TextSize};
|
use ruff_text_size::{Ranged, TextSize};
|
||||||
|
|
||||||
use ruff_python_ast::statement_visitor::{walk_stmt, StatementVisitor};
|
use ruff_python_ast::statement_visitor::{walk_stmt, StatementVisitor};
|
||||||
use ruff_source_file::{Locator, UniversalNewlineIterator};
|
use ruff_source_file::{Locator, UniversalNewlineIterator};
|
||||||
|
|
||||||
/// Extract doc lines (standalone comments) from a token sequence.
|
/// Extract doc lines (standalone comments) from a token sequence.
|
||||||
pub(crate) fn doc_lines_from_tokens(lxr: &[LexResult]) -> DocLines {
|
pub(crate) fn doc_lines_from_tokens(tokens: TokenKindIter) -> DocLines {
|
||||||
DocLines::new(lxr)
|
DocLines::new(tokens)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) struct DocLines<'a> {
|
pub(crate) struct DocLines<'a> {
|
||||||
inner: std::iter::Flatten<core::slice::Iter<'a, LexResult>>,
|
inner: TokenKindIter<'a>,
|
||||||
prev: TextSize,
|
prev: TextSize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> DocLines<'a> {
|
impl<'a> DocLines<'a> {
|
||||||
fn new(lxr: &'a [LexResult]) -> Self {
|
fn new(tokens: TokenKindIter<'a>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
inner: lxr.iter().flatten(),
|
inner: tokens,
|
||||||
prev: TextSize::default(),
|
prev: TextSize::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -39,15 +38,15 @@ impl Iterator for DocLines<'_> {
|
||||||
let (tok, range) = self.inner.next()?;
|
let (tok, range) = self.inner.next()?;
|
||||||
|
|
||||||
match tok {
|
match tok {
|
||||||
Tok::Comment(..) => {
|
TokenKind::Comment => {
|
||||||
if at_start_of_line {
|
if at_start_of_line {
|
||||||
break Some(range.start());
|
break Some(range.start());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Tok::Newline | Tok::NonLogicalNewline => {
|
TokenKind::Newline | TokenKind::NonLogicalNewline => {
|
||||||
at_start_of_line = true;
|
at_start_of_line = true;
|
||||||
}
|
}
|
||||||
Tok::Indent | Tok::Dedent => {
|
TokenKind::Indent | TokenKind::Dedent => {
|
||||||
// ignore
|
// ignore
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
|
|
|
@ -93,7 +93,7 @@ pub fn check_path(
|
||||||
let use_doc_lines = settings.rules.enabled(Rule::DocLineTooLong);
|
let use_doc_lines = settings.rules.enabled(Rule::DocLineTooLong);
|
||||||
let mut doc_lines = vec![];
|
let mut doc_lines = vec![];
|
||||||
if use_doc_lines {
|
if use_doc_lines {
|
||||||
doc_lines.extend(doc_lines_from_tokens(&tokens));
|
doc_lines.extend(doc_lines_from_tokens(tokens.kinds()));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Run the token-based rules.
|
// Run the token-based rules.
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue