mirror of
https://github.com/RustPython/Parser.git
synced 2025-07-25 22:14:25 +00:00
Always wrap in SoftKeywordTransformer
This commit is contained in:
parent
dc628cab8f
commit
2a8aa6f308
4 changed files with 43 additions and 28 deletions
|
@ -66,27 +66,29 @@
|
|||
//! For example, to get a stream of tokens from a given string, one could do this:
|
||||
//!
|
||||
//! ```
|
||||
//! use rustpython_parser::mode::Mode;
|
||||
//! use rustpython_parser::lexer::make_tokenizer;
|
||||
//!
|
||||
//! let python_source = r#"
|
||||
//! def is_odd(i):
|
||||
//! return bool(i & 1)
|
||||
//! "#;
|
||||
//! let mut tokens = make_tokenizer(python_source);
|
||||
//! let mut tokens = make_tokenizer(python_source, Mode::Module);
|
||||
//! assert!(tokens.all(|t| t.is_ok()));
|
||||
//! ```
|
||||
//!
|
||||
//! These tokens can be directly fed into the parser to generate an AST:
|
||||
//!
|
||||
//! ```
|
||||
//! use rustpython_parser::parser::{parse_tokens, Mode};
|
||||
//! use rustpython_parser::lexer::make_tokenizer;
|
||||
//! use rustpython_parser::mode::Mode;
|
||||
//! use rustpython_parser::parser::parse_tokens;
|
||||
//!
|
||||
//! let python_source = r#"
|
||||
//! def is_odd(i):
|
||||
//! return bool(i & 1)
|
||||
//! "#;
|
||||
//! let tokens = make_tokenizer(python_source);
|
||||
//! let tokens = make_tokenizer(python_source, Mode::Module);
|
||||
//! let ast = parse_tokens(tokens, Mode::Module, "<embedded>");
|
||||
//!
|
||||
//! assert!(ast.is_ok());
|
||||
|
@ -131,5 +133,5 @@ mod string;
|
|||
#[rustfmt::skip]
|
||||
mod python;
|
||||
mod context;
|
||||
pub mod soft_keywords;
|
||||
mod soft_keywords;
|
||||
pub mod token;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue