Rename compiler Location to TextSize

This commit is contained in:
Jeong YunWon 2023-05-06 17:49:47 +09:00
parent 58c35ab458
commit f47dfca4e3
13 changed files with 9423 additions and 9427 deletions

View file

@ -16,6 +16,5 @@ unparse = ["rustpython-literal"]
[dependencies]
rustpython-compiler-core = { path = "../core", version = "0.2.0" }
rustpython-literal = { path = "../literal", version = "0.2.0", optional = true }
ruff_text_size = { path = "../ruff_text_size" }
num-bigint = { workspace = true }

View file

@ -718,7 +718,7 @@ def write_ast_def(mod, typeinfo, f):
#![allow(clippy::derive_partial_eq_without_eq)]
pub use crate::constant::*;
pub use ruff_text_size::{TextSize, TextRange};
pub use rustpython_compiler_core::text_size::{TextSize, TextRange};
type Ident = String;
\n

View file

@ -3,7 +3,7 @@
#![allow(clippy::derive_partial_eq_without_eq)]
pub use crate::constant::*;
pub use ruff_text_size::{TextRange, TextSize};
pub use rustpython_compiler_core::text_size::{TextRange, TextSize};
type Ident = String;

View file

@ -11,3 +11,5 @@ pub use bytecode::*;
pub use error::BaseError;
pub use location::Location;
pub use mode::Mode;
pub use ruff_text_size as text_size; // re-export mandatory and frequently accessed dependency

View file

@ -21,7 +21,6 @@ tiny-keccak = { version = "2", features = ["sha3"] }
[dependencies]
rustpython-ast = { path = "../ast", version = "0.2.0" }
rustpython-compiler-core = { path = "../core", version = "0.2.0" }
ruff_text_size = { path = "../ruff_text_size" }
ahash = { workspace = true }
itertools = { workspace = true }

View file

@ -3,8 +3,8 @@
use crate::{
ast,
lexer::{LexicalError, LexicalErrorType},
text_size::TextSize,
};
use ruff_text_size::TextSize;
use rustc_hash::FxHashSet;
pub(crate) struct ArgumentList {

View file

@ -31,12 +31,12 @@ use crate::{
mode::Mode,
soft_keywords::SoftKeywordTransformer,
string::FStringErrorType,
text_size::{TextLen, TextRange, TextSize},
token::{StringKind, Tok},
};
use log::trace;
use num_bigint::BigInt;
use num_traits::{Num, Zero};
use ruff_text_size::{TextLen, TextRange, TextSize};
use std::{char, cmp::Ordering, ops::Index, slice::SliceIndex, str::FromStr};
use unic_emoji_char::is_emoji_presentation;
use unic_ucd_ident::{is_xid_continue, is_xid_start};

View file

@ -113,6 +113,7 @@
#![doc(html_root_url = "https://docs.rs/rustpython-parser/")]
pub use rustpython_ast as ast;
pub use rustpython_compiler_core::text_size;
pub use rustpython_compiler_core::ConversionFlag;
mod function;
@ -125,14 +126,11 @@ mod soft_keywords;
mod string;
mod token;
type Location = TextSize;
pub use mode::Mode;
pub use parser::{
parse, parse_expression, parse_expression_located, parse_located, parse_program, parse_tokens,
ParseError, ParseErrorType,
};
use ruff_text_size::TextSize;
pub use string::FStringErrorType;
pub use token::{StringKind, Tok};

View file

@ -13,12 +13,12 @@
//! [`Mode`]: crate::mode
use crate::{
ast::{self},
ast,
lexer::{self, LexResult, LexicalError, LexicalErrorType},
mode::Mode,
python,
text_size::TextSize,
token::Tok,
Location,
};
use itertools::Itertools;
use std::iter;
@ -70,7 +70,7 @@ pub fn parse_program(source: &str, source_path: &str) -> Result<ast::Suite, Pars
///
/// ```
pub fn parse_expression(source: &str, path: &str) -> Result<ast::Expr, ParseError> {
parse_expression_located(source, path, Location::default())
parse_expression_located(source, path, TextSize::default())
}
/// Parses a Python expression from a given location.
@ -84,8 +84,7 @@ pub fn parse_expression(source: &str, path: &str) -> Result<ast::Expr, ParseErro
/// somewhat silly, location:
///
/// ```
/// use ruff_text_size::TextSize;
/// use rustpython_parser::{parse_expression_located};
/// use rustpython_parser::{text_size::TextSize, parse_expression_located};
///
/// let expr = parse_expression_located("1 + 2", "<embedded>", TextSize::from(400));
/// assert!(expr.is_ok());
@ -93,7 +92,7 @@ pub fn parse_expression(source: &str, path: &str) -> Result<ast::Expr, ParseErro
pub fn parse_expression_located(
source: &str,
path: &str,
location: Location,
location: TextSize,
) -> Result<ast::Expr, ParseError> {
parse_located(source, Mode::Expression, path, location).map(|top| match top {
ast::Mod::Expression(ast::ModExpression { body }) => *body,
@ -133,7 +132,7 @@ pub fn parse_expression_located(
/// assert!(program.is_ok());
/// ```
pub fn parse(source: &str, mode: Mode, source_path: &str) -> Result<ast::Mod, ParseError> {
parse_located(source, mode, source_path, Location::default())
parse_located(source, mode, source_path, TextSize::default())
}
/// Parse the given Python source code using the specified [`Mode`] and [`Location`].
@ -144,8 +143,7 @@ pub fn parse(source: &str, mode: Mode, source_path: &str) -> Result<ast::Mod, Pa
/// # Example
///
/// ```
/// use ruff_text_size::TextSize;
/// use rustpython_parser::{Mode, parse_located};
/// use rustpython_parser::{text_size::TextSize, Mode, parse_located};
///
/// let source = r#"
/// def fib(i):
@ -163,7 +161,7 @@ pub fn parse_located(
source: &str,
mode: Mode,
source_path: &str,
location: Location,
location: TextSize,
) -> Result<ast::Mod, ParseError> {
let lxr = lexer::lex_located(source, mode, location);
parse_tokens(lxr, mode, source_path)
@ -226,7 +224,7 @@ impl std::error::Error for ParseErrorType {}
// Convert `lalrpop_util::ParseError` to our internal type
fn parse_error_from_lalrpop(
err: LalrpopError<Location, Tok, LexicalError>,
err: LalrpopError<TextSize, Tok, LexicalError>,
source_path: &str,
) -> ParseError {
let source_path = source_path.to_owned();

View file

@ -1721,7 +1721,7 @@ ArgumentList: ArgumentList = {
}
};
FunctionArgument: (Option<(crate::Location, crate::Location, Option<String>)>, ast::Expr) = {
FunctionArgument: (Option<(crate::text_size::TextSize, crate::text_size::TextSize, Option<String>)>, ast::Expr) = {
<location:@L> <e:NamedExpressionTest> <c:CompFor?> <end_location:@R> => {
let expr = match c {
Some(c) => ast::Expr::new(
@ -1776,7 +1776,7 @@ Identifier: String = <s:name> => s;
// Hook external lexer:
extern {
type Location = crate::Location;
type Location = crate::text_size::TextSize;
type Error = LexicalError;
enum token::Tok {

18770
parser/src/python.rs generated

File diff suppressed because it is too large Load diff

View file

@ -8,10 +8,11 @@ use crate::{
lexer::{LexicalError, LexicalErrorType},
parser::{parse_expression_located, LalrpopError, ParseError, ParseErrorType},
token::{StringKind, Tok},
Location,
};
use itertools::Itertools;
use ruff_text_size::{TextLen, TextSize};
use rustpython_compiler_core::{
text_size::{TextLen, TextSize},
};
// unicode_name2 does not expose `MAX_NAME_LENGTH`, so we replicate that constant here, fix #3798
const MAX_UNICODE_NAME: usize = 88;
@ -19,9 +20,9 @@ const MAX_UNICODE_NAME: usize = 88;
struct StringParser<'a> {
chars: std::iter::Peekable<std::str::Chars<'a>>,
kind: StringKind,
start: Location,
end: Location,
location: Location,
start: TextSize,
end: TextSize,
location: TextSize,
}
impl<'a> StringParser<'a> {
@ -29,8 +30,8 @@ impl<'a> StringParser<'a> {
source: &'a str,
kind: StringKind,
triple_quoted: bool,
start: Location,
end: Location,
start: TextSize,
end: TextSize,
) -> Self {
let offset = kind.prefix_len()
+ if triple_quoted {
@ -60,7 +61,7 @@ impl<'a> StringParser<'a> {
}
#[inline]
fn get_pos(&self) -> Location {
fn get_pos(&self) -> TextSize {
self.location
}
@ -570,9 +571,9 @@ impl<'a> StringParser<'a> {
}
}
fn parse_fstring_expr(source: &str, location: Location) -> Result<Expr, ParseError> {
fn parse_fstring_expr(source: &str, location: TextSize) -> Result<Expr, ParseError> {
let fstring_body = format!("({source})");
let start = location - Location::from(1);
let start = location - TextSize::from(1);
parse_expression_located(&fstring_body, "<fstring>", start)
}
@ -580,14 +581,14 @@ fn parse_string(
source: &str,
kind: StringKind,
triple_quoted: bool,
start: Location,
end: Location,
start: TextSize,
end: TextSize,
) -> Result<Vec<Expr>, LexicalError> {
StringParser::new(source, kind, triple_quoted, start, end).parse()
}
pub(crate) fn parse_strings(
values: Vec<(Location, (String, StringKind, bool), Location)>,
values: Vec<(TextSize, (String, StringKind, bool), TextSize)>,
) -> Result<Expr, LexicalError> {
// Preserve the initial location and kind.
let initial_start = values[0].0;
@ -708,12 +709,12 @@ struct FStringError {
/// The type of error that occurred.
pub error: FStringErrorType,
/// The location of the error.
pub location: Location,
pub location: TextSize,
}
impl FStringError {
/// Creates a new `FStringError` with the given error type and location.
pub fn new(error: FStringErrorType, location: Location) -> Self {
pub fn new(error: FStringErrorType, location: TextSize) -> Self {
Self { error, location }
}
}
@ -792,7 +793,7 @@ impl std::fmt::Display for FStringErrorType {
}
}
impl From<FStringError> for LalrpopError<Location, Tok, LexicalError> {
impl From<FStringError> for LalrpopError<TextSize, Tok, LexicalError> {
fn from(err: FStringError) -> Self {
lalrpop_util::ParseError::User {
error: LexicalError {
@ -803,7 +804,6 @@ impl From<FStringError> for LalrpopError<Location, Tok, LexicalError> {
}
}
#[cfg(not(feature = "byte_offsets"))]
#[cfg(test)]
mod tests {
use super::*;
@ -814,8 +814,8 @@ mod tests {
source,
StringKind::FString,
false,
Location::default(),
Location::default() + source.text_len() + Location::from(3), // 3 for prefix and quotes
TextSize::default(),
TextSize::default() + source.text_len() + TextSize::from(3), // 3 for prefix and quotes
)
.parse()
}

View file

@ -4,8 +4,8 @@
//! loosely based on the token definitions found in the [CPython source].
//!
//! [CPython source]: https://github.com/python/cpython/blob/dfc2e065a2e71011017077e549cd2f9bf4944c54/Include/internal/pycore_token.h
use crate::text_size::TextSize;
use num_bigint::BigInt;
use ruff_text_size::TextSize;
use std::fmt;
/// The set of tokens the Python source code can be tokenized in.