mirror of
https://github.com/RustPython/Parser.git
synced 2025-07-26 06:24:29 +00:00
make_tokenizer -> lex to integrate terms
we don't distinguish scanner or tokenizer from lexer
This commit is contained in:
parent
39fc23cf92
commit
8580e4ebb5
4 changed files with 27 additions and 27 deletions
|
@ -67,20 +67,20 @@
|
|||
//!
|
||||
//! ```
|
||||
//! use rustpython_parser::mode::Mode;
|
||||
//! use rustpython_parser::lexer::make_tokenizer;
|
||||
//! use rustpython_parser::lexer::lex;
|
||||
//!
|
||||
//! let python_source = r#"
|
||||
//! def is_odd(i):
|
||||
//! return bool(i & 1)
|
||||
//! "#;
|
||||
//! let mut tokens = make_tokenizer(python_source, Mode::Module);
|
||||
//! let mut tokens = lex(python_source, Mode::Module);
|
||||
//! assert!(tokens.all(|t| t.is_ok()));
|
||||
//! ```
|
||||
//!
|
||||
//! These tokens can be directly fed into the parser to generate an AST:
|
||||
//!
|
||||
//! ```
|
||||
//! use rustpython_parser::lexer::make_tokenizer;
|
||||
//! use rustpython_parser::lexer::lex;
|
||||
//! use rustpython_parser::mode::Mode;
|
||||
//! use rustpython_parser::parser::parse_tokens;
|
||||
//!
|
||||
|
@ -88,7 +88,7 @@
|
|||
//! def is_odd(i):
|
||||
//! return bool(i & 1)
|
||||
//! "#;
|
||||
//! let tokens = make_tokenizer(python_source, Mode::Module);
|
||||
//! let tokens = lex(python_source, Mode::Module);
|
||||
//! let ast = parse_tokens(tokens, Mode::Module, "<embedded>");
|
||||
//!
|
||||
//! assert!(ast.is_ok());
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue