Replace num-bigint with malachite-bigint (#18)

Co-authored-by: Jeong YunWon <jeong@youknowone.org>
This commit is contained in:
Steve Shi 2023-06-02 10:06:18 +02:00 committed by GitHub
parent 5e9e8a7589
commit a2e3209c42
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
16 changed files with 51 additions and 37 deletions

View file

@ -9,11 +9,13 @@ license = "MIT"
edition = "2021"
[features]
default = ["location"]
default = ["location", "malachite-bigint"]
location = ["rustpython-ast/location", "rustpython-parser-core/location"]
serde = ["dep:serde", "rustpython-parser-core/serde"]
all-nodes-with-ranges = ["rustpython-ast/all-nodes-with-ranges"]
full-lexer = []
malachite-bigint = ["dep:malachite-bigint", "rustpython-ast/malachite-bigint"]
num-bigint = ["dep:num-bigint", "rustpython-ast/num-bigint"]
[build-dependencies]
anyhow = { workspace = true }
@ -28,7 +30,8 @@ rustpython-parser-core = { workspace = true }
itertools = { workspace = true }
is-macro = { workspace = true }
log = { workspace = true }
num-bigint = { workspace = true }
malachite-bigint = { workspace = true, optional = true }
num-bigint = { workspace = true, optional = true }
num-traits = { workspace = true }
unicode_names2 = { workspace = true }

View file

@ -28,6 +28,7 @@
//!
//! [Lexical analysis]: https://docs.python.org/3/reference/lexical_analysis.html
use crate::{
ast::bigint::BigInt,
soft_keywords::SoftKeywordTransformer,
string::FStringErrorType,
text_size::{TextLen, TextRange, TextSize},
@ -35,7 +36,6 @@ use crate::{
Mode,
};
use log::trace;
use num_bigint::BigInt;
use num_traits::{Num, Zero};
use std::{char, cmp::Ordering, ops::Index, slice::SliceIndex, str::FromStr};
use unic_emoji_char::is_emoji_presentation;
@ -466,6 +466,13 @@ where
}
}
#[cfg(feature = "full-lexer")]
fn lex_and_emit_comment(&mut self) -> Result<(), LexicalError> {
let comment = self.lex_comment()?;
self.emit(comment);
Ok(())
}
/// Discard comment if full-lexer is not enabled.
#[cfg(not(feature = "full-lexer"))]
fn lex_comment(&mut self) {
@ -480,6 +487,13 @@ where
}
}
#[cfg(not(feature = "full-lexer"))]
#[inline]
fn lex_and_emit_comment(&mut self) -> Result<(), LexicalError> {
self.lex_comment();
Ok(())
}
/// Lex a string literal.
fn lex_string(&mut self, kind: StringKind) -> LexResult {
let start_pos = self.get_pos();
@ -626,9 +640,7 @@ where
tabs += 1;
}
Some('#') => {
let _comment = self.lex_comment();
#[cfg(feature = "full-lexer")]
self.emit(_comment?);
self.lex_and_emit_comment()?;
spaces = 0;
tabs = 0;
}
@ -775,9 +787,7 @@ where
self.emit(number);
}
'#' => {
let _comment = self.lex_comment();
#[cfg(feature = "full-lexer")]
self.emit(_comment?);
self.lex_and_emit_comment()?;
}
'"' | '\'' => {
let string = self.lex_string(StringKind::String)?;
@ -1360,7 +1370,7 @@ impl std::fmt::Display for LexicalErrorType {
#[cfg(test)]
mod tests {
use super::*;
use num_bigint::BigInt;
use crate::ast::bigint::BigInt;
const WINDOWS_EOL: &str = "\r\n";
const MAC_EOL: &str = "\r";

View file

@ -284,7 +284,6 @@ pub fn parse_program(source: &str, source_path: &str) -> Result<ast::Suite, Pars
/// For example, parsing a single expression denoting the addition of two numbers:
///
/// ```
/// extern crate num_bigint;
/// use rustpython_parser as parser;
/// let expr = parser::parse_expression("1 + 2", "<embedded>");
///

View file

@ -4,7 +4,7 @@
// See also: https://greentreesnakes.readthedocs.io/en/latest/nodes.html#keyword
use crate::{
ast::{self as ast, Ranged},
ast::{self as ast, Ranged, bigint::BigInt},
lexer::{LexicalError, LexicalErrorType},
function::{ArgumentList, parse_args, validate_pos_params, validate_arguments},
context::set_context,
@ -12,7 +12,6 @@ use crate::{
token::{self, StringKind},
text_size::TextSize, parser::optional_range
};
use num_bigint::BigInt;
grammar;

8
parser/src/python.rs generated
View file

@ -1,7 +1,7 @@
// auto-generated: "lalrpop 0.20.0"
// sha3: 4ad71899432c2dd721d23812499e7f4385a10cb13467dbf34b4043443888f745
// sha3: b94dbacf01253c4fc4605d489e98f5929504a78e0baa83381e126895ec61cb59
use crate::{
ast::{self as ast, Ranged},
ast::{self as ast, Ranged, bigint::BigInt},
lexer::{LexicalError, LexicalErrorType},
function::{ArgumentList, parse_args, validate_pos_params, validate_arguments},
context::set_context,
@ -9,7 +9,6 @@ use crate::{
token::{self, StringKind},
text_size::TextSize, parser::optional_range
};
use num_bigint::BigInt;
#[allow(unused_extern_crates)]
extern crate lalrpop_util as __lalrpop_util;
#[allow(unused_imports)]
@ -22,7 +21,7 @@ extern crate alloc;
mod __parse__Top {
use crate::{
ast::{self as ast, Ranged},
ast::{self as ast, Ranged, bigint::BigInt},
lexer::{LexicalError, LexicalErrorType},
function::{ArgumentList, parse_args, validate_pos_params, validate_arguments},
context::set_context,
@ -30,7 +29,6 @@ mod __parse__Top {
token::{self, StringKind},
text_size::TextSize, parser::optional_range
};
use num_bigint::BigInt;
#[allow(unused_extern_crates)]
extern crate lalrpop_util as __lalrpop_util;
#[allow(unused_imports)]

View file

@ -4,8 +4,8 @@
//! loosely based on the token definitions found in the [CPython source].
//!
//! [CPython source]: https://github.com/python/cpython/blob/dfc2e065a2e71011017077e549cd2f9bf4944c54/Include/internal/pycore_token.h
use crate::ast::bigint::BigInt;
use crate::{text_size::TextSize, Mode};
use num_bigint::BigInt;
use std::fmt;
/// The set of tokens the Python source code can be tokenized in.