Formatter and parser refactoring (#7569)

I got confused and refactored a bit, now the naming should be more
consistent. This is the basis for the range formatting work.

Chages:
* `format_module` -> `format_module_source` (format a string)
* `format_node` -> `format_module_ast` (format a program parsed into an
AST)
* Added `parse_ok_tokens` that takes `Token` instead of `Result<Token>`
* Call the source code `source` consistently
* Added a `tokens_and_ranges` helper
* `python_ast` -> `module` (because that's the type)
This commit is contained in:
konsti 2023-09-26 15:29:43 +02:00 committed by GitHub
parent 2cb5e43dd7
commit 4d16e2308d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
14 changed files with 126 additions and 138 deletions

View file

@ -1,6 +1,7 @@
use std::fmt::Debug;
use ruff_python_parser::Tok;
use ruff_python_parser::lexer::{lex, LexicalError};
use ruff_python_parser::{Mode, Tok};
use ruff_python_trivia::CommentRanges;
use ruff_text_size::TextRange;
@ -20,3 +21,21 @@ impl CommentRangesBuilder {
CommentRanges::new(self.ranges)
}
}
/// Helper method to lex and extract comment ranges
pub fn tokens_and_ranges(
source: &str,
) -> Result<(Vec<(Tok, TextRange)>, CommentRanges), LexicalError> {
let mut tokens = Vec::new();
let mut comment_ranges = CommentRangesBuilder::default();
for result in lex(source, Mode::Module) {
let (token, range) = result?;
comment_ranges.visit_token(&token, range);
tokens.push((token, range));
}
let comment_ranges = comment_ranges.finish();
Ok((tokens, comment_ranges))
}

View file

@ -1,5 +1,5 @@
mod comment_ranges;
mod indexer;
pub use comment_ranges::CommentRangesBuilder;
pub use comment_ranges::{tokens_and_ranges, CommentRangesBuilder};
pub use indexer::Indexer;