Re-integrate RustPython parser repository (#4359)

Co-authored-by: Micha Reiser <micha@reiser.io>
This commit is contained in:
Jeong, YunWon 2023-05-11 16:47:17 +09:00 committed by GitHub
parent 865205d992
commit be6e00ef6e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
270 changed files with 3061 additions and 3361 deletions

View file

@ -17,7 +17,7 @@ pub fn find_tok(
locator: &Locator,
f: impl Fn(rustpython_parser::Tok) -> bool,
) -> TextRange {
for (tok, tok_range) in rustpython_parser::lexer::lex_located(
for (tok, tok_range) in rustpython_parser::lexer::lex_starts_at(
&locator.contents()[range],
rustpython_parser::Mode::Module,
range.start(),
@ -45,7 +45,7 @@ pub fn expand_indented_block(
// Find the colon, which indicates the end of the header.
let mut nesting = 0;
let mut colon = None;
for (tok, tok_range) in rustpython_parser::lexer::lex_located(
for (tok, tok_range) in rustpython_parser::lexer::lex_starts_at(
&contents[TextRange::new(location, end_location)],
rustpython_parser::Mode::Module,
location,
@ -69,7 +69,7 @@ pub fn expand_indented_block(
let colon_location = colon.unwrap();
// From here, we have two options: simple statement or compound statement.
let indent = rustpython_parser::lexer::lex_located(
let indent = rustpython_parser::lexer::lex_starts_at(
&contents[TextRange::new(colon_location, end_location)],
rustpython_parser::Mode::Module,
colon_location,

File diff suppressed because it is too large Load diff

View file

@ -2,13 +2,13 @@ use ruff_formatter::prelude::*;
use ruff_formatter::{write, Format};
use crate::context::ASTFormatContext;
use crate::cst::Located;
use crate::cst::Attributed;
use crate::format::builders::literal;
use crate::trivia::TriviaKind;
#[derive(Debug)]
pub struct LeadingComments<'a, T> {
item: &'a Located<T>,
item: &'a Attributed<T>,
}
impl<T> Format<ASTFormatContext<'_>> for LeadingComments<'_, T> {
@ -31,13 +31,13 @@ impl<T> Format<ASTFormatContext<'_>> for LeadingComments<'_, T> {
}
#[inline]
pub const fn leading_comments<T>(item: &Located<T>) -> LeadingComments<'_, T> {
pub const fn leading_comments<T>(item: &Attributed<T>) -> LeadingComments<'_, T> {
LeadingComments { item }
}
#[derive(Debug)]
pub struct TrailingComments<'a, T> {
item: &'a Located<T>,
item: &'a Attributed<T>,
}
impl<T> Format<ASTFormatContext<'_>> for TrailingComments<'_, T> {
@ -60,13 +60,13 @@ impl<T> Format<ASTFormatContext<'_>> for TrailingComments<'_, T> {
}
#[inline]
pub const fn trailing_comments<T>(item: &Located<T>) -> TrailingComments<'_, T> {
pub const fn trailing_comments<T>(item: &Attributed<T>) -> TrailingComments<'_, T> {
TrailingComments { item }
}
#[derive(Debug)]
pub struct EndOfLineComments<'a, T> {
item: &'a Located<T>,
item: &'a Attributed<T>,
}
impl<T> Format<ASTFormatContext<'_>> for EndOfLineComments<'_, T> {
@ -88,13 +88,13 @@ impl<T> Format<ASTFormatContext<'_>> for EndOfLineComments<'_, T> {
}
#[inline]
pub const fn end_of_line_comments<T>(item: &Located<T>) -> EndOfLineComments<'_, T> {
pub const fn end_of_line_comments<T>(item: &Attributed<T>) -> EndOfLineComments<'_, T> {
EndOfLineComments { item }
}
#[derive(Debug)]
pub struct DanglingComments<'a, T> {
item: &'a Located<T>,
item: &'a Attributed<T>,
}
impl<T> Format<ASTFormatContext<'_>> for DanglingComments<'_, T> {
@ -113,6 +113,6 @@ impl<T> Format<ASTFormatContext<'_>> for DanglingComments<'_, T> {
}
#[inline]
pub const fn dangling_comments<T>(item: &Located<T>) -> DanglingComments<'_, T> {
pub const fn dangling_comments<T>(item: &Attributed<T>) -> DanglingComments<'_, T> {
DanglingComments { item }
}

View file

@ -642,7 +642,7 @@ fn format_import_from(
stmt: &Stmt,
module: Option<&str>,
names: &[Alias],
level: Option<usize>,
level: Option<u32>,
) -> FormatResult<()> {
write!(f, [text("from")])?;
write!(f, [space()])?;

View file

@ -126,7 +126,7 @@ impl Format<ASTFormatContext<'_>> for StringLiteral<'_> {
// TODO(charlie): This tokenization needs to happen earlier, so that we can attach
// comments to individual string literals.
let contents = f.context().locator().slice(expr.range());
let elts = rustpython_parser::lexer::lex_located(contents, Mode::Module, expr.start())
let elts = rustpython_parser::lexer::lex_starts_at(contents, Mode::Module, expr.start())
.flatten()
.filter_map(|(tok, range)| {
if matches!(tok, Tok::String { .. }) {