mirror of
https://github.com/RustPython/Parser.git
synced 2025-07-08 05:35:22 +00:00
Always wrap in SoftKeywordTransformer
This commit is contained in:
parent
dc628cab8f
commit
2a8aa6f308
4 changed files with 43 additions and 28 deletions
|
@ -13,10 +13,11 @@
|
|||
//!
|
||||
//! ```
|
||||
//! use rustpython_parser::lexer::{make_tokenizer, Tok};
|
||||
//! use rustpython_parser::mode::Mode;
|
||||
//! use rustpython_parser::token::StringKind;
|
||||
//!
|
||||
//! let source = "x = 'RustPython'";
|
||||
//! let tokens = make_tokenizer(source)
|
||||
//! let tokens = make_tokenizer(source, Mode::Module)
|
||||
//! .map(|tok| tok.expect("Failed to lex"))
|
||||
//! .collect::<Vec<_>>();
|
||||
//!
|
||||
|
@ -35,6 +36,8 @@
|
|||
pub use super::token::{StringKind, Tok};
|
||||
use crate::ast::Location;
|
||||
use crate::error::{LexicalError, LexicalErrorType};
|
||||
use crate::mode::Mode;
|
||||
use crate::soft_keywords::SoftKeywordTransformer;
|
||||
use num_bigint::BigInt;
|
||||
use num_traits::identities::Zero;
|
||||
use num_traits::Num;
|
||||
|
@ -197,27 +200,29 @@ pub type LexResult = Result<Spanned, LexicalError>;
|
|||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use rustpython_parser::mode::Mode;
|
||||
/// use rustpython_parser::lexer::{make_tokenizer};
|
||||
///
|
||||
/// let source = "def hello(): return 'world'";
|
||||
/// let tokenizer = make_tokenizer(source);
|
||||
/// let tokenizer = make_tokenizer(source, Mode::Module);
|
||||
///
|
||||
/// for token in tokenizer {
|
||||
/// println!("{:?}", token);
|
||||
/// }
|
||||
/// ```
|
||||
#[inline]
|
||||
pub fn make_tokenizer(source: &str) -> impl Iterator<Item = LexResult> + '_ {
|
||||
make_tokenizer_located(source, Location::default())
|
||||
pub fn make_tokenizer(source: &str, mode: Mode) -> impl Iterator<Item = LexResult> + '_ {
|
||||
make_tokenizer_located(source, mode, Location::default())
|
||||
}
|
||||
|
||||
/// Create a new tokenizer from a source string, starting at a given location.
|
||||
/// You probably want to use [`make_tokenizer`] instead.
|
||||
pub fn make_tokenizer_located(
|
||||
source: &str,
|
||||
mode: Mode,
|
||||
start_location: Location,
|
||||
) -> impl Iterator<Item = LexResult> + '_ {
|
||||
Lexer::new(source.chars(), start_location)
|
||||
SoftKeywordTransformer::new(Lexer::new(source.chars(), start_location), mode)
|
||||
}
|
||||
|
||||
impl<T> Lexer<T>
|
||||
|
@ -1210,6 +1215,7 @@ where
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{make_tokenizer, StringKind, Tok};
|
||||
use crate::mode::Mode;
|
||||
use num_bigint::BigInt;
|
||||
|
||||
const WINDOWS_EOL: &str = "\r\n";
|
||||
|
@ -1217,7 +1223,7 @@ mod tests {
|
|||
const UNIX_EOL: &str = "\n";
|
||||
|
||||
pub fn lex_source(source: &str) -> Vec<Tok> {
|
||||
let lexer = make_tokenizer(source);
|
||||
let lexer = make_tokenizer(source, Mode::Module);
|
||||
lexer.map(|x| x.unwrap().1).collect()
|
||||
}
|
||||
|
||||
|
|
|
@ -66,27 +66,29 @@
|
|||
//! For example, to get a stream of tokens from a given string, one could do this:
|
||||
//!
|
||||
//! ```
|
||||
//! use rustpython_parser::mode::Mode;
|
||||
//! use rustpython_parser::lexer::make_tokenizer;
|
||||
//!
|
||||
//! let python_source = r#"
|
||||
//! def is_odd(i):
|
||||
//! return bool(i & 1)
|
||||
//! "#;
|
||||
//! let mut tokens = make_tokenizer(python_source);
|
||||
//! let mut tokens = make_tokenizer(python_source, Mode::Module);
|
||||
//! assert!(tokens.all(|t| t.is_ok()));
|
||||
//! ```
|
||||
//!
|
||||
//! These tokens can be directly fed into the parser to generate an AST:
|
||||
//!
|
||||
//! ```
|
||||
//! use rustpython_parser::parser::{parse_tokens, Mode};
|
||||
//! use rustpython_parser::lexer::make_tokenizer;
|
||||
//! use rustpython_parser::mode::Mode;
|
||||
//! use rustpython_parser::parser::parse_tokens;
|
||||
//!
|
||||
//! let python_source = r#"
|
||||
//! def is_odd(i):
|
||||
//! return bool(i & 1)
|
||||
//! "#;
|
||||
//! let tokens = make_tokenizer(python_source);
|
||||
//! let tokens = make_tokenizer(python_source, Mode::Module);
|
||||
//! let ast = parse_tokens(tokens, Mode::Module, "<embedded>");
|
||||
//!
|
||||
//! assert!(ast.is_ok());
|
||||
|
@ -131,5 +133,5 @@ mod string;
|
|||
#[rustfmt::skip]
|
||||
mod python;
|
||||
mod context;
|
||||
pub mod soft_keywords;
|
||||
mod soft_keywords;
|
||||
pub mod token;
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
|
||||
use crate::lexer::{LexResult, Tok};
|
||||
pub use crate::mode::Mode;
|
||||
use crate::soft_keywords::SoftKeywordTransformer;
|
||||
use crate::{ast, error::ParseError, lexer, python};
|
||||
use ast::Location;
|
||||
use itertools::Itertools;
|
||||
|
@ -107,7 +106,8 @@ pub fn parse_expression_located(
|
|||
/// parsing:
|
||||
///
|
||||
/// ```
|
||||
/// use rustpython_parser::parser::{parse, Mode};
|
||||
/// use rustpython_parser::mode::Mode;
|
||||
/// use rustpython_parser::parser::parse;
|
||||
///
|
||||
/// let expr = parse("1 + 2", Mode::Expression, "<embedded>");
|
||||
/// assert!(expr.is_ok());
|
||||
|
@ -116,7 +116,8 @@ pub fn parse_expression_located(
|
|||
/// Alternatively, we can parse a full Python program consisting of multiple lines:
|
||||
///
|
||||
/// ```
|
||||
/// use rustpython_parser::parser::{parse, Mode};
|
||||
/// use rustpython_parser::mode::Mode;
|
||||
/// use rustpython_parser::parser::parse;
|
||||
///
|
||||
/// let source = r#"
|
||||
/// class Greeter:
|
||||
|
@ -139,8 +140,9 @@ pub fn parse(source: &str, mode: Mode, source_path: &str) -> Result<ast::Mod, Pa
|
|||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use rustpython_parser::parser::{parse_located, Mode};
|
||||
/// use rustpython_parser::ast::Location;
|
||||
/// use rustpython_parser::mode::Mode;
|
||||
/// use rustpython_parser::parser::parse_located;
|
||||
///
|
||||
/// let source = r#"
|
||||
/// def fib(i):
|
||||
|
@ -160,7 +162,7 @@ pub fn parse_located(
|
|||
source_path: &str,
|
||||
location: Location,
|
||||
) -> Result<ast::Mod, ParseError> {
|
||||
let lxr = lexer::make_tokenizer_located(source, location);
|
||||
let lxr = lexer::make_tokenizer_located(source, mode, location);
|
||||
parse_tokens(lxr, mode, source_path)
|
||||
}
|
||||
|
||||
|
@ -174,10 +176,11 @@ pub fn parse_located(
|
|||
/// them using the [`lexer::make_tokenizer`] function:
|
||||
///
|
||||
/// ```
|
||||
/// use rustpython_parser::parser::{parse_tokens, Mode};
|
||||
/// use rustpython_parser::lexer::make_tokenizer;
|
||||
/// use rustpython_parser::mode::Mode;
|
||||
/// use rustpython_parser::parser::parse_tokens;
|
||||
///
|
||||
/// let expr = parse_tokens(make_tokenizer("1 + 2"), Mode::Expression, "<embedded>");
|
||||
/// let expr = parse_tokens(make_tokenizer("1 + 2", Mode::Expression), Mode::Expression, "<embedded>");
|
||||
/// assert!(expr.is_ok());
|
||||
/// ```
|
||||
pub fn parse_tokens(
|
||||
|
@ -190,7 +193,7 @@ pub fn parse_tokens(
|
|||
.chain(lxr)
|
||||
.filter_ok(|(_, tok, _)| !matches!(tok, Tok::Comment { .. } | Tok::NonLogicalNewline));
|
||||
python::TopParser::new()
|
||||
.parse(SoftKeywordTransformer::new(tokenizer, mode).into_iter())
|
||||
.parse(tokenizer.into_iter())
|
||||
.map_err(|e| crate::error::parse_error_from_lalrpop(e, source_path))
|
||||
}
|
||||
|
||||
|
|
|
@ -19,8 +19,8 @@ pub struct SoftKeywordTransformer<I>
|
|||
where
|
||||
I: Iterator<Item = LexResult>,
|
||||
{
|
||||
pub underlying: MultiPeek<I>,
|
||||
pub start_of_line: bool,
|
||||
underlying: MultiPeek<I>,
|
||||
start_of_line: bool,
|
||||
}
|
||||
|
||||
impl<I> SoftKeywordTransformer<I>
|
||||
|
@ -84,14 +84,18 @@ where
|
|||
|
||||
self.start_of_line = next.as_ref().map_or(false, |lex_result| {
|
||||
lex_result.as_ref().map_or(false, |(_, tok, _)| {
|
||||
matches!(
|
||||
tok,
|
||||
Tok::StartModule
|
||||
| Tok::StartInteractive
|
||||
| Tok::Newline
|
||||
| Tok::Indent
|
||||
| Tok::Dedent
|
||||
)
|
||||
if matches!(tok, Tok::NonLogicalNewline | Tok::Comment { .. }) {
|
||||
self.start_of_line
|
||||
} else {
|
||||
matches!(
|
||||
tok,
|
||||
Tok::StartModule
|
||||
| Tok::StartInteractive
|
||||
| Tok::Newline
|
||||
| Tok::Indent
|
||||
| Tok::Dedent
|
||||
)
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue