mirror of
https://github.com/Instagram/LibCST.git
synced 2025-12-23 10:35:53 +00:00
temp
This commit is contained in:
parent
9932a6d339
commit
f051ecca2b
20 changed files with 2331 additions and 1733 deletions
15
native/Cargo.lock
generated
15
native/Cargo.lock
generated
|
|
@ -450,8 +450,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "peg"
|
||||
version = "0.7.0"
|
||||
source = "git+https://github.com/kevinmehall/rust-peg#4b146b4b78a80c07e43d7ace2d97f65bfde279a8"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "af728fe826811af3b38c37e93de6d104485953ea373d656eebae53d6987fcd2c"
|
||||
dependencies = [
|
||||
"peg-macros",
|
||||
"peg-runtime",
|
||||
|
|
@ -459,8 +460,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "peg-macros"
|
||||
version = "0.7.0"
|
||||
source = "git+https://github.com/kevinmehall/rust-peg#4b146b4b78a80c07e43d7ace2d97f65bfde279a8"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4536be147b770b824895cbad934fccce8e49f14b4c4946eaa46a6e4a12fcdc16"
|
||||
dependencies = [
|
||||
"peg-runtime",
|
||||
"proc-macro2",
|
||||
|
|
@ -469,8 +471,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "peg-runtime"
|
||||
version = "0.7.0"
|
||||
source = "git+https://github.com/kevinmehall/rust-peg#4b146b4b78a80c07e43d7ace2d97f65bfde279a8"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f9b0efd3ba03c3a409d44d60425f279ec442bcf0b9e63ff4e410da31c8b0f69f"
|
||||
|
||||
[[package]]
|
||||
name = "plotters"
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ trace = ["peg/trace"]
|
|||
paste = "1.0.4"
|
||||
pyo3 = "0.14.4"
|
||||
thiserror = "1.0.23"
|
||||
peg = { git = "https://github.com/kevinmehall/rust-peg" }
|
||||
peg = "0.8.0"
|
||||
chic = "1.2.2"
|
||||
itertools = "0.10.0"
|
||||
once_cell = "1.5.2"
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ mod nodes;
|
|||
pub use nodes::*;
|
||||
|
||||
mod parser;
|
||||
use parser::{ParserError, Result};
|
||||
use parser::{ParserError, Result, TokVec};
|
||||
|
||||
pub mod py;
|
||||
|
||||
|
|
@ -31,43 +31,45 @@ pub fn tokenize(text: &str) -> Result<Vec<Token>> {
|
|||
.map_err(|err| ParserError::TokenizerError(err, text))
|
||||
}
|
||||
|
||||
pub fn parse_tokens_without_whitespace<'a>(
|
||||
tokens: Vec<Token<'a>>,
|
||||
pub fn parse_tokens_without_whitespace<'r, 'a>(
|
||||
tokens: TokVec<'a>,
|
||||
module_text: &'a str,
|
||||
encoding: Option<&str>,
|
||||
) -> Result<'a, Module<'a>> {
|
||||
parser::python::file(&tokens.into(), module_text, encoding)
|
||||
) -> Result<'a, Module<'r, 'a>> {
|
||||
parser::python::file(&tokens, module_text, encoding)
|
||||
.map_err(|err| ParserError::ParserError(err, module_text))
|
||||
}
|
||||
|
||||
pub fn parse_module<'a>(
|
||||
pub fn parse_module<'r, 'a>(
|
||||
mut module_text: &'a str,
|
||||
encoding: Option<&str>,
|
||||
) -> Result<'a, Module<'a>> {
|
||||
) -> Result<'a, Module<'r, 'a>> {
|
||||
// Strip UTF-8 BOM
|
||||
if let Some(stripped) = module_text.strip_prefix('\u{feff}') {
|
||||
module_text = stripped;
|
||||
}
|
||||
let tokens = tokenize(module_text)?;
|
||||
let conf = whitespace_parser::Config::new(module_text, &tokens);
|
||||
let tokens: TokVec = tokenize(module_text)?.into();
|
||||
let conf = whitespace_parser::Config::new(module_text, tokens.0.as_slice());
|
||||
let m = parse_tokens_without_whitespace(tokens, module_text, encoding)?;
|
||||
Ok(m.inflate(&conf)?)
|
||||
}
|
||||
|
||||
pub fn parse_statement(text: &str) -> Result<Statement> {
|
||||
let tokens = tokenize(text)?;
|
||||
let conf = whitespace_parser::Config::new(text, &tokens);
|
||||
let stm = parser::python::statement_input(&tokens.into(), text)
|
||||
let tokens: TokVec = tokenize(text)?.into();
|
||||
let conf = whitespace_parser::Config::new(text, tokens.0.as_slice());
|
||||
let stm = parser::python::statement_input(&tokens, text)
|
||||
.map_err(|err| ParserError::ParserError(err, text))?;
|
||||
Ok(stm.inflate(&conf)?)
|
||||
}
|
||||
|
||||
pub fn parse_expression(text: &str) -> Result<Expression> {
|
||||
let tokens = tokenize(text)?;
|
||||
let conf = whitespace_parser::Config::new(text, &tokens);
|
||||
let expr = parser::python::expression_input(&tokens.into(), text)
|
||||
pub fn parse_expression<'a>(text: &'a str) -> Result<Expression<'a, 'a>> {
|
||||
let tokens: TokVec = tokenize(text)?.into();
|
||||
let conf = whitespace_parser::Config::new(text, tokens.0.as_slice());
|
||||
let expr = parser::python::expression_input(&tokens, text)
|
||||
.map_err(|err| ParserError::ParserError(err, text))?;
|
||||
Ok(expr.inflate(&conf)?)
|
||||
|
||||
let inflated = expr.inflate(&conf)?;
|
||||
Ok(inflated)
|
||||
}
|
||||
|
||||
// n starts from 1
|
||||
|
|
|
|||
5
native/libcst/src/nodes/common.rs
Normal file
5
native/libcst/src/nodes/common.rs
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
use std::rc::Rc;
|
||||
|
||||
use crate::tokenizer::Token;
|
||||
|
||||
pub(crate) type TokenRef<'r, 'a> = &'r Rc<Token<'a>>;
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -12,12 +12,12 @@ use crate::{
|
|||
Param, Parameters, StarArg,
|
||||
};
|
||||
|
||||
pub(crate) fn adjust_parameters_trailing_whitespace<'a>(
|
||||
pub(crate) fn adjust_parameters_trailing_whitespace<'r, 'a>(
|
||||
config: &Config<'a>,
|
||||
parameters: &mut Parameters<'a>,
|
||||
parameters: &mut Parameters<'r, 'a>,
|
||||
next_tok: &Token<'a>,
|
||||
) -> Result<()> {
|
||||
let do_adjust = |param: &mut Param<'a>| -> Result<()> {
|
||||
let do_adjust = |param: &mut Param<'r, 'a>| -> Result<()> {
|
||||
let whitespace_after =
|
||||
parse_parenthesizable_whitespace(config, &mut next_tok.whitespace_before.borrow_mut())?;
|
||||
if param.comma.is_none() {
|
||||
|
|
|
|||
|
|
@ -48,4 +48,7 @@ pub use codegen::{Codegen, CodegenState};
|
|||
mod traits;
|
||||
pub use traits::{Inflate, ParenthesizedNode, WithComma, WithLeadingLines};
|
||||
|
||||
pub(crate) mod common;
|
||||
pub(crate) mod inflate_helpers;
|
||||
|
||||
pub(crate) use op::*;
|
||||
|
|
|
|||
|
|
@ -4,13 +4,12 @@
|
|||
// LICENSE file in the root directory of this source tree.
|
||||
|
||||
use std::mem::swap;
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::tokenizer::whitespace_parser::parse_empty_lines;
|
||||
use crate::tokenizer::Token;
|
||||
use crate::{
|
||||
nodes::{
|
||||
codegen::{Codegen, CodegenState},
|
||||
common::TokenRef,
|
||||
statement::Statement,
|
||||
whitespace::EmptyLine,
|
||||
},
|
||||
|
|
@ -20,11 +19,9 @@ use libcst_derive::IntoPy;
|
|||
|
||||
use super::traits::{Inflate, Result, WithLeadingLines};
|
||||
|
||||
type TokenRef<'a> = Rc<Token<'a>>;
|
||||
|
||||
#[derive(Debug, Eq, PartialEq, IntoPy)]
|
||||
pub struct Module<'a> {
|
||||
pub body: Vec<Statement<'a>>,
|
||||
pub struct Module<'r, 'a> {
|
||||
pub body: Vec<Statement<'r, 'a>>,
|
||||
pub header: Vec<EmptyLine<'a>>,
|
||||
pub footer: Vec<EmptyLine<'a>>,
|
||||
|
||||
|
|
@ -33,10 +30,10 @@ pub struct Module<'a> {
|
|||
pub has_trailing_newline: bool,
|
||||
pub encoding: String,
|
||||
|
||||
pub(crate) eof_tok: TokenRef<'a>,
|
||||
pub(crate) eof_tok: TokenRef<'r, 'a>,
|
||||
}
|
||||
|
||||
impl<'a> Codegen<'a> for Module<'a> {
|
||||
impl<'r, 'a> Codegen<'a> for Module<'r, 'a> {
|
||||
fn codegen(&self, state: &mut CodegenState<'a>) {
|
||||
for h in &self.header {
|
||||
h.codegen(state);
|
||||
|
|
@ -50,7 +47,8 @@ impl<'a> Codegen<'a> for Module<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Inflate<'a> for Module<'a> {
|
||||
impl<'r, 'a> Inflate<'a> for Module<'r, 'a> {
|
||||
type Inflated = Self;
|
||||
fn inflate(mut self, config: &Config<'a>) -> Result<Self> {
|
||||
self.default_indent = config.default_indent;
|
||||
self.default_newline = config.default_newline;
|
||||
|
|
|
|||
|
|
@ -3,29 +3,28 @@
|
|||
// This source code is licensed under the MIT license found in the
|
||||
// LICENSE file in the root directory of this source tree.
|
||||
|
||||
use std::rc::Rc;
|
||||
|
||||
use super::{whitespace::ParenthesizableWhitespace, Codegen, CodegenState};
|
||||
use crate::{
|
||||
nodes::common::TokenRef,
|
||||
nodes::traits::{Inflate, Result},
|
||||
tokenizer::{
|
||||
whitespace_parser::{parse_parenthesizable_whitespace, parse_simple_whitespace, Config},
|
||||
Token,
|
||||
tokenizer::whitespace_parser::{
|
||||
parse_parenthesizable_whitespace, parse_simple_whitespace, Config,
|
||||
},
|
||||
};
|
||||
use libcst_derive::IntoPy;
|
||||
|
||||
type TokenRef<'a> = Rc<Token<'a>>;
|
||||
|
||||
#[derive(Debug, Eq, PartialEq, Clone, IntoPy)]
|
||||
pub struct Semicolon<'a> {
|
||||
/// Any space that appears directly before this semicolon.
|
||||
pub whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
/// Any space that appears directly after this semicolon.
|
||||
pub whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
}
|
||||
|
||||
#[skip_py]
|
||||
pub(crate) tok: TokenRef<'a>,
|
||||
#[derive(Debug, Eq, PartialEq, Clone)]
|
||||
pub(crate) struct SemicolonTokens<'r, 'a> {
|
||||
pub inner: Semicolon<'a>,
|
||||
pub tok: TokenRef<'r, 'a>,
|
||||
}
|
||||
|
||||
impl<'a> Codegen<'a> for Semicolon<'a> {
|
||||
|
|
@ -36,12 +35,13 @@ impl<'a> Codegen<'a> for Semicolon<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Inflate<'a> for Semicolon<'a> {
|
||||
impl<'r, 'a> Inflate<'a> for SemicolonTokens<'r, 'a> {
|
||||
type Inflated = Self;
|
||||
fn inflate(mut self, config: &Config<'a>) -> Result<Self> {
|
||||
self.whitespace_before = ParenthesizableWhitespace::SimpleWhitespace(
|
||||
self.inner.whitespace_before = ParenthesizableWhitespace::SimpleWhitespace(
|
||||
parse_simple_whitespace(config, &mut (*self.tok).whitespace_before.borrow_mut())?,
|
||||
);
|
||||
self.whitespace_after = ParenthesizableWhitespace::SimpleWhitespace(
|
||||
self.inner.whitespace_after = ParenthesizableWhitespace::SimpleWhitespace(
|
||||
parse_simple_whitespace(config, &mut (*self.tok).whitespace_after.borrow_mut())?,
|
||||
);
|
||||
Ok(self)
|
||||
|
|
@ -49,17 +49,17 @@ impl<'a> Inflate<'a> for Semicolon<'a> {
|
|||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, IntoPy)]
|
||||
pub struct Comma<'a> {
|
||||
pub struct Comma<'r, 'a> {
|
||||
/// Any space that appears directly before this comma.
|
||||
pub whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
/// Any space that appears directly after this comma.
|
||||
pub whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
|
||||
#[skip_py]
|
||||
pub(crate) tok: TokenRef<'a>,
|
||||
pub(crate) tok: TokenRef<'r, 'a>,
|
||||
}
|
||||
|
||||
impl<'a> Codegen<'a> for Comma<'a> {
|
||||
impl<'r, 'a> Codegen<'a> for Comma<'r, 'a> {
|
||||
fn codegen(&self, state: &mut CodegenState<'a>) {
|
||||
self.whitespace_before.codegen(state);
|
||||
state.add_token(",");
|
||||
|
|
@ -67,7 +67,8 @@ impl<'a> Codegen<'a> for Comma<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Inflate<'a> for Comma<'a> {
|
||||
impl<'r, 'a> Inflate<'a> for Comma<'r, 'a> {
|
||||
type Inflated = Self;
|
||||
fn inflate(mut self, config: &Config<'a>) -> Result<Self> {
|
||||
self.whitespace_before = parse_parenthesizable_whitespace(
|
||||
config,
|
||||
|
|
@ -81,7 +82,7 @@ impl<'a> Inflate<'a> for Comma<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Comma<'a> {
|
||||
impl<'r, 'a> Comma<'r, 'a> {
|
||||
pub fn inflate_before(mut self, config: &Config<'a>) -> Result<Self> {
|
||||
self.whitespace_before = parse_parenthesizable_whitespace(
|
||||
config,
|
||||
|
|
@ -92,17 +93,17 @@ impl<'a> Comma<'a> {
|
|||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, IntoPy)]
|
||||
pub struct AssignEqual<'a> {
|
||||
pub struct AssignEqual<'r, 'a> {
|
||||
/// Any space that appears directly before this equal sign.
|
||||
pub whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
/// Any space that appears directly after this equal sign.
|
||||
pub whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
|
||||
#[skip_py]
|
||||
pub(crate) tok: TokenRef<'a>,
|
||||
pub(crate) tok: TokenRef<'r, 'a>,
|
||||
}
|
||||
|
||||
impl<'a> Codegen<'a> for AssignEqual<'a> {
|
||||
impl<'r, 'a> Codegen<'a> for AssignEqual<'r, 'a> {
|
||||
fn codegen(&self, state: &mut CodegenState<'a>) {
|
||||
self.whitespace_before.codegen(state);
|
||||
state.add_token("=");
|
||||
|
|
@ -110,7 +111,8 @@ impl<'a> Codegen<'a> for AssignEqual<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Inflate<'a> for AssignEqual<'a> {
|
||||
impl<'r, 'a> Inflate<'a> for AssignEqual<'r, 'a> {
|
||||
type Inflated = Self;
|
||||
fn inflate(mut self, config: &Config<'a>) -> Result<Self> {
|
||||
self.whitespace_before = parse_parenthesizable_whitespace(
|
||||
config,
|
||||
|
|
@ -125,17 +127,17 @@ impl<'a> Inflate<'a> for AssignEqual<'a> {
|
|||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq, Clone, IntoPy)]
|
||||
pub struct Dot<'a> {
|
||||
pub struct Dot<'r, 'a> {
|
||||
/// Any space that appears directly before this dot.
|
||||
pub whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
/// Any space that appears directly after this dot.
|
||||
pub whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
|
||||
#[skip_py]
|
||||
pub(crate) tok: TokenRef<'a>,
|
||||
pub(crate) tok: TokenRef<'r, 'a>,
|
||||
}
|
||||
|
||||
impl<'a> Codegen<'a> for Dot<'a> {
|
||||
impl<'r, 'a> Codegen<'a> for Dot<'r, 'a> {
|
||||
fn codegen(&self, state: &mut CodegenState<'a>) {
|
||||
self.whitespace_before.codegen(state);
|
||||
state.add_token(".");
|
||||
|
|
@ -143,7 +145,8 @@ impl<'a> Codegen<'a> for Dot<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Inflate<'a> for Dot<'a> {
|
||||
impl<'r, 'a> Inflate<'a> for Dot<'r, 'a> {
|
||||
type Inflated = Self;
|
||||
fn inflate(mut self, config: &Config<'a>) -> Result<Self> {
|
||||
self.inflate_before(config)?;
|
||||
self.inflate_after(config)?;
|
||||
|
|
@ -151,7 +154,7 @@ impl<'a> Inflate<'a> for Dot<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Dot<'a> {
|
||||
impl<'r, 'a> Dot<'r, 'a> {
|
||||
fn inflate_before(&mut self, config: &Config<'a>) -> Result<()> {
|
||||
self.whitespace_before = parse_parenthesizable_whitespace(
|
||||
config,
|
||||
|
|
@ -172,43 +175,44 @@ impl<'a> Dot<'a> {
|
|||
#[derive(Debug, PartialEq, Eq, Clone, IntoPy)]
|
||||
pub struct ImportStar {}
|
||||
|
||||
impl<'a> Codegen<'a> for ImportStar {
|
||||
impl<'r, 'a> Codegen<'a> for ImportStar {
|
||||
fn codegen(&self, state: &mut CodegenState<'a>) {
|
||||
state.add_token("*");
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Inflate<'a> for ImportStar {
|
||||
impl<'r, 'a> Inflate<'a> for ImportStar {
|
||||
type Inflated = Self;
|
||||
fn inflate(self, _config: &Config<'a>) -> Result<Self> {
|
||||
Ok(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, IntoPy)]
|
||||
pub enum UnaryOp<'a> {
|
||||
pub enum UnaryOp<'r, 'a> {
|
||||
Plus {
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
Minus {
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
BitInvert {
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
Not {
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'a> Codegen<'a> for UnaryOp<'a> {
|
||||
impl<'r, 'a> Codegen<'a> for UnaryOp<'r, 'a> {
|
||||
fn codegen(&self, state: &mut CodegenState<'a>) {
|
||||
let (tok, whitespace_after) = match self {
|
||||
Self::Plus {
|
||||
|
|
@ -229,7 +233,8 @@ impl<'a> Codegen<'a> for UnaryOp<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Inflate<'a> for UnaryOp<'a> {
|
||||
impl<'r, 'a> Inflate<'a> for UnaryOp<'r, 'a> {
|
||||
type Inflated = Self;
|
||||
fn inflate(self, config: &Config<'a>) -> Result<Self> {
|
||||
Ok(match self {
|
||||
Self::Plus { tok, .. } => {
|
||||
|
|
@ -277,22 +282,22 @@ impl<'a> Inflate<'a> for UnaryOp<'a> {
|
|||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, IntoPy)]
|
||||
pub enum BooleanOp<'a> {
|
||||
pub enum BooleanOp<'r, 'a> {
|
||||
And {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
Or {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'a> Codegen<'a> for BooleanOp<'a> {
|
||||
impl<'r, 'a> Codegen<'a> for BooleanOp<'r, 'a> {
|
||||
fn codegen(&self, state: &mut CodegenState<'a>) {
|
||||
let (tok, ws_bef, ws_aft) = match self {
|
||||
Self::And {
|
||||
|
|
@ -312,7 +317,8 @@ impl<'a> Codegen<'a> for BooleanOp<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Inflate<'a> for BooleanOp<'a> {
|
||||
impl<'r, 'a> Inflate<'a> for BooleanOp<'r, 'a> {
|
||||
type Inflated = Self;
|
||||
fn inflate(self, config: &Config<'a>) -> Result<Self> {
|
||||
Ok(match self {
|
||||
Self::And { tok, .. } => {
|
||||
|
|
@ -350,88 +356,88 @@ impl<'a> Inflate<'a> for BooleanOp<'a> {
|
|||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, IntoPy)]
|
||||
pub enum BinaryOp<'a> {
|
||||
pub enum BinaryOp<'r, 'a> {
|
||||
Add {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
Subtract {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
Multiply {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
Divide {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
FloorDivide {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
Modulo {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
Power {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
LeftShift {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
RightShift {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
BitOr {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
BitAnd {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
BitXor {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
MatrixMultiply {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'a> Codegen<'a> for BinaryOp<'a> {
|
||||
impl<'r, 'a> Codegen<'a> for BinaryOp<'r, 'a> {
|
||||
fn codegen(&self, state: &mut CodegenState<'a>) {
|
||||
let (whitespace_before, whitespace_after, tok) = match self {
|
||||
Self::Add {
|
||||
|
|
@ -506,7 +512,8 @@ impl<'a> Codegen<'a> for BinaryOp<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Inflate<'a> for BinaryOp<'a> {
|
||||
impl<'r, 'a> Inflate<'a> for BinaryOp<'r, 'a> {
|
||||
type Inflated = Self;
|
||||
fn inflate(self, config: &Config<'a>) -> Result<Self> {
|
||||
Ok(match self {
|
||||
Self::Add { tok, .. } => {
|
||||
|
|
@ -709,76 +716,76 @@ impl<'a> Inflate<'a> for BinaryOp<'a> {
|
|||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, IntoPy)]
|
||||
pub enum CompOp<'a> {
|
||||
pub enum CompOp<'r, 'a> {
|
||||
LessThan {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
GreaterThan {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
LessThanEqual {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
GreaterThanEqual {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
Equal {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
NotEqual {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
In {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
NotIn {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_between: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
not_tok: TokenRef<'a>,
|
||||
not_tok: TokenRef<'r, 'a>,
|
||||
#[skip_py]
|
||||
in_tok: TokenRef<'a>,
|
||||
in_tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
Is {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
IsNot {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_between: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
is_tok: TokenRef<'a>,
|
||||
is_tok: TokenRef<'r, 'a>,
|
||||
#[skip_py]
|
||||
not_tok: TokenRef<'a>,
|
||||
not_tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'a> Codegen<'a> for CompOp<'a> {
|
||||
impl<'r, 'a> Codegen<'a> for CompOp<'r, 'a> {
|
||||
fn codegen(&self, state: &mut CodegenState<'a>) {
|
||||
let (bef, aft, first_tok, between) = match self {
|
||||
Self::LessThan {
|
||||
|
|
@ -856,7 +863,8 @@ impl<'a> Codegen<'a> for CompOp<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Inflate<'a> for CompOp<'a> {
|
||||
impl<'r, 'a> Inflate<'a> for CompOp<'r, 'a> {
|
||||
type Inflated = Self;
|
||||
fn inflate(self, config: &Config<'a>) -> Result<Self> {
|
||||
Ok(match self {
|
||||
Self::LessThan { tok, .. } => {
|
||||
|
|
@ -1030,15 +1038,16 @@ impl<'a> Inflate<'a> for CompOp<'a> {
|
|||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, IntoPy)]
|
||||
pub struct Colon<'a> {
|
||||
pub struct Colon<'r, 'a> {
|
||||
pub whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
pub whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
|
||||
#[skip_py]
|
||||
pub(crate) tok: TokenRef<'a>,
|
||||
pub(crate) tok: TokenRef<'r, 'a>,
|
||||
}
|
||||
|
||||
impl<'a> Inflate<'a> for Colon<'a> {
|
||||
impl<'r, 'a> Inflate<'a> for Colon<'r, 'a> {
|
||||
type Inflated = Self;
|
||||
fn inflate(mut self, config: &Config<'a>) -> Result<Self> {
|
||||
self.whitespace_before = parse_parenthesizable_whitespace(
|
||||
config,
|
||||
|
|
@ -1052,7 +1061,7 @@ impl<'a> Inflate<'a> for Colon<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Codegen<'a> for Colon<'a> {
|
||||
impl<'r, 'a> Codegen<'a> for Colon<'r, 'a> {
|
||||
fn codegen(&self, state: &mut CodegenState<'a>) {
|
||||
self.whitespace_before.codegen(state);
|
||||
state.add_token(":");
|
||||
|
|
@ -1061,88 +1070,89 @@ impl<'a> Codegen<'a> for Colon<'a> {
|
|||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, IntoPy)]
|
||||
pub enum AugOp<'a> {
|
||||
pub enum AugOp<'r, 'a> {
|
||||
AddAssign {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
SubtractAssign {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
MultiplyAssign {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
MatrixMultiplyAssign {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
DivideAssign {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
ModuloAssign {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
BitAndAssign {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
BitOrAssign {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
BitXorAssign {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
LeftShiftAssign {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
RightShiftAssign {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
PowerAssign {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
FloorDivideAssign {
|
||||
whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
#[skip_py]
|
||||
tok: TokenRef<'a>,
|
||||
tok: TokenRef<'r, 'a>,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'a> Inflate<'a> for AugOp<'a> {
|
||||
impl<'r, 'a> Inflate<'a> for AugOp<'r, 'a> {
|
||||
type Inflated = Self;
|
||||
fn inflate(self, config: &Config<'a>) -> Result<Self> {
|
||||
Ok(match self {
|
||||
Self::AddAssign { tok, .. } => {
|
||||
|
|
@ -1344,7 +1354,7 @@ impl<'a> Inflate<'a> for AugOp<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Codegen<'a> for AugOp<'a> {
|
||||
impl<'r, 'a> Codegen<'a> for AugOp<'r, 'a> {
|
||||
fn codegen(&self, state: &mut CodegenState<'a>) {
|
||||
let (tok, bef, aft) = match self {
|
||||
Self::AddAssign {
|
||||
|
|
@ -1420,14 +1430,15 @@ impl<'a> Codegen<'a> for AugOp<'a> {
|
|||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, IntoPy)]
|
||||
pub struct BitOr<'a> {
|
||||
pub struct BitOr<'r, 'a> {
|
||||
pub whitespace_before: ParenthesizableWhitespace<'a>,
|
||||
pub whitespace_after: ParenthesizableWhitespace<'a>,
|
||||
|
||||
pub(crate) tok: TokenRef<'a>,
|
||||
pub(crate) tok: TokenRef<'r, 'a>,
|
||||
}
|
||||
|
||||
impl<'a> Inflate<'a> for BitOr<'a> {
|
||||
impl<'r, 'a> Inflate<'a> for BitOr<'r, 'a> {
|
||||
type Inflated = Self;
|
||||
fn inflate(mut self, config: &Config<'a>) -> Result<Self> {
|
||||
self.whitespace_before = parse_parenthesizable_whitespace(
|
||||
config,
|
||||
|
|
@ -1441,7 +1452,7 @@ impl<'a> Inflate<'a> for BitOr<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Codegen<'a> for BitOr<'a> {
|
||||
impl<'r, 'a> Codegen<'a> for BitOr<'r, 'a> {
|
||||
fn codegen(&self, state: &mut CodegenState<'a>) {
|
||||
self.whitespace_before.codegen(state);
|
||||
state.add_token("|");
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -9,13 +9,13 @@ use crate::{
|
|||
};
|
||||
use std::ops::Deref;
|
||||
|
||||
pub trait WithComma<'a> {
|
||||
fn with_comma(self, comma: Comma<'a>) -> Self;
|
||||
pub trait WithComma<'r, 'a> {
|
||||
fn with_comma(self, comma: Comma<'r, 'a>) -> Self;
|
||||
}
|
||||
|
||||
pub trait ParenthesizedNode<'a> {
|
||||
fn lpar(&self) -> &Vec<LeftParen<'a>>;
|
||||
fn rpar(&self) -> &Vec<RightParen<'a>>;
|
||||
pub trait ParenthesizedNode<'r, 'a: 'r> {
|
||||
fn lpar(&self) -> &Vec<LeftParen<'r, 'a>>;
|
||||
fn rpar(&self) -> &Vec<RightParen<'r, 'a>>;
|
||||
|
||||
fn parenthesize<F>(&self, state: &mut CodegenState<'a>, f: F)
|
||||
where
|
||||
|
|
@ -30,14 +30,14 @@ pub trait ParenthesizedNode<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn with_parens(self, left: LeftParen<'a>, right: RightParen<'a>) -> Self;
|
||||
fn with_parens(self, left: LeftParen<'r, 'a>, right: RightParen<'r, 'a>) -> Self;
|
||||
}
|
||||
|
||||
impl<'a, T: ParenthesizedNode<'a>> ParenthesizedNode<'a> for Box<T> {
|
||||
fn lpar(&self) -> &Vec<LeftParen<'a>> {
|
||||
impl<'r, 'a: 'r, T: ParenthesizedNode<'r, 'a>> ParenthesizedNode<'r, 'a> for Box<T> {
|
||||
fn lpar(&self) -> &Vec<LeftParen<'r, 'a>> {
|
||||
self.deref().lpar()
|
||||
}
|
||||
fn rpar(&self) -> &Vec<RightParen<'a>> {
|
||||
fn rpar(&self) -> &Vec<RightParen<'r, 'a>> {
|
||||
self.deref().rpar()
|
||||
}
|
||||
fn parenthesize<F>(&self, state: &mut CodegenState<'a>, f: F)
|
||||
|
|
@ -46,7 +46,7 @@ impl<'a, T: ParenthesizedNode<'a>> ParenthesizedNode<'a> for Box<T> {
|
|||
{
|
||||
self.deref().parenthesize(state, f)
|
||||
}
|
||||
fn with_parens(self, left: LeftParen<'a>, right: RightParen<'a>) -> Self {
|
||||
fn with_parens(self, left: LeftParen<'r, 'a>, right: RightParen<'r, 'a>) -> Self {
|
||||
Self::new((*self).with_parens(left, right))
|
||||
}
|
||||
}
|
||||
|
|
@ -61,17 +61,20 @@ pub trait Inflate<'a>
|
|||
where
|
||||
Self: Sized,
|
||||
{
|
||||
fn inflate(self, config: &Config<'a>) -> Result<Self>;
|
||||
type Inflated;
|
||||
fn inflate(self, config: &Config<'a>) -> Result<Self::Inflated>;
|
||||
}
|
||||
|
||||
impl<'a, T: Inflate<'a>> Inflate<'a> for Option<T> {
|
||||
fn inflate(self, config: &Config<'a>) -> Result<Self> {
|
||||
type Inflated = Option<T::Inflated>;
|
||||
fn inflate(self, config: &Config<'a>) -> Result<Self::Inflated> {
|
||||
self.map(|x| x.inflate(config)).transpose()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: Inflate<'a> + ?Sized> Inflate<'a> for Box<T> {
|
||||
fn inflate(self, config: &Config<'a>) -> Result<Self> {
|
||||
type Inflated = Box<T::Inflated>;
|
||||
fn inflate(self, config: &Config<'a>) -> Result<Self::Inflated> {
|
||||
match (*self).inflate(config) {
|
||||
Ok(a) => Ok(Box::new(a)),
|
||||
Err(e) => Err(e),
|
||||
|
|
@ -80,7 +83,8 @@ impl<'a, T: Inflate<'a> + ?Sized> Inflate<'a> for Box<T> {
|
|||
}
|
||||
|
||||
impl<'a, T: Inflate<'a>> Inflate<'a> for Vec<T> {
|
||||
fn inflate(self, config: &Config<'a>) -> Result<Self> {
|
||||
type Inflated = Vec<T::Inflated>;
|
||||
fn inflate(self, config: &Config<'a>) -> Result<Self::Inflated> {
|
||||
self.into_iter().map(|item| item.inflate(config)).collect()
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,10 +6,9 @@
|
|||
use pyo3::types::{IntoPyDict, PyModule};
|
||||
use pyo3::{IntoPy, PyErr, PyErrArguments, Python};
|
||||
|
||||
use crate::parser::grammar::TokVec;
|
||||
use crate::parser::grammar::ParseLoc;
|
||||
use crate::tokenizer::whitespace_parser::WhitespaceError;
|
||||
use crate::tokenizer::TokError;
|
||||
use peg::Parse;
|
||||
use thiserror::Error;
|
||||
|
||||
#[allow(clippy::enum_variant_names)]
|
||||
|
|
@ -18,10 +17,7 @@ pub enum ParserError<'a> {
|
|||
#[error("tokenizer error: {0}")]
|
||||
TokenizerError(TokError<'a>, &'a str),
|
||||
#[error("parser error: {0}")]
|
||||
ParserError(
|
||||
peg::error::ParseError<<TokVec<'a> as Parse>::PositionRepr>,
|
||||
&'a str,
|
||||
),
|
||||
ParserError(peg::error::ParseError<ParseLoc>, &'a str),
|
||||
#[error(transparent)]
|
||||
WhitespaceError(#[from] WhitespaceError),
|
||||
#[error("invalid operator")]
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -8,4 +8,5 @@ mod grammar;
|
|||
mod numbers;
|
||||
|
||||
pub use errors::ParserError;
|
||||
pub(crate) use grammar::TokVec;
|
||||
pub use grammar::{python, Result};
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@
|
|||
// This source code is licensed under the MIT license found in the
|
||||
// LICENSE file in the root directory of this source tree
|
||||
|
||||
use std::rc::Rc;
|
||||
|
||||
use crate::nodes::{
|
||||
Comment, EmptyLine, Fakeness, Newline, ParenthesizableWhitespace, ParenthesizedWhitespace,
|
||||
SimpleWhitespace, TrailingWhitespace,
|
||||
|
|
@ -65,7 +67,7 @@ pub struct Config<'a> {
|
|||
}
|
||||
|
||||
impl<'a> Config<'a> {
|
||||
pub fn new(input: &'a str, tokens: &[Token<'a>]) -> Self {
|
||||
pub fn new(input: &'a str, tokens: &[Rc<Token<'a>>]) -> Self {
|
||||
let mut default_indent = " ";
|
||||
for tok in tokens {
|
||||
if tok.r#type == TokType::Indent {
|
||||
|
|
|
|||
|
|
@ -56,7 +56,7 @@ fn impl_enum(ast: &DeriveInput, e: &DataEnum) -> TokenStream {
|
|||
let ident = &ast.ident;
|
||||
let generics = &ast.generics;
|
||||
let gen = quote! {
|
||||
impl<'a> Codegen<'a> for #ident #generics {
|
||||
impl #generics Codegen<'a> for #ident #generics {
|
||||
fn codegen(&self, state: &mut CodegenState<'a>) {
|
||||
match self {
|
||||
#(Self::#varnames(x) => x.codegen(state),)*
|
||||
|
|
|
|||
404
native/libcst_derive/src/cstnode.rs
Normal file
404
native/libcst_derive/src/cstnode.rs
Normal file
|
|
@ -0,0 +1,404 @@
|
|||
// Copyright (c) Meta Platforms, Inc. and its affiliates.
|
||||
//
|
||||
// This source code is licensed under the MIT license found in the
|
||||
// LICENSE file in the root directory of this source tree
|
||||
|
||||
use proc_macro::{Span, TokenStream};
|
||||
use quote::{format_ident, quote, quote_spanned, ToTokens};
|
||||
use syn::{
|
||||
self,
|
||||
punctuated::{Pair, Punctuated},
|
||||
spanned::Spanned,
|
||||
token::Comma,
|
||||
AngleBracketedGenericArguments, Attribute, Data, DataEnum, DataStruct, DeriveInput, Field,
|
||||
Fields, FieldsNamed, FieldsUnnamed, GenericArgument, GenericParam, Generics, Ident, Lifetime,
|
||||
LifetimeDef, ParenthesizedGenericArguments, Path, PathArguments, PathSegment, Token, Type,
|
||||
TypePath, Visibility,
|
||||
};
|
||||
|
||||
pub(crate) fn impl_cst_node(ast: DeriveInput) -> TokenStream {
|
||||
match ast.data {
|
||||
Data::Enum(e) => impl_enum(ast.attrs, ast.vis, ast.ident, ast.generics, e),
|
||||
Data::Struct(s) => impl_struct(ast.attrs, ast.vis, ast.ident, ast.generics, s),
|
||||
Data::Union(u) => quote_spanned! {
|
||||
u.union_token.span() =>
|
||||
compile_error!("Union type is not supported")
|
||||
}
|
||||
.into(),
|
||||
}
|
||||
}
|
||||
|
||||
// enum Foo<'a> {
|
||||
// Variant(Variant<'a>),
|
||||
// }
|
||||
// =>
|
||||
// enum Foo<'a> {
|
||||
// Variant(Variant<'a>),
|
||||
// }
|
||||
// enum DeflatedFoo<'r, 'a> {
|
||||
// Variant(DeflatedVariant<'r, 'a>),
|
||||
// }
|
||||
|
||||
fn impl_enum(
|
||||
attrs: Vec<Attribute>,
|
||||
vis: Visibility,
|
||||
ident: Ident,
|
||||
generics: Generics,
|
||||
mut e: DataEnum,
|
||||
) -> TokenStream {
|
||||
let deflated_ident = format_ident!("Deflated{}", &ident);
|
||||
let mut deflated_generics = generics.clone();
|
||||
let mut added_lifetime = false;
|
||||
let mut deflated_variant_tokens = vec![];
|
||||
|
||||
for var in e.variants.iter_mut() {
|
||||
let (inflated_fields, deflated_fields, extra_lifetime) = impl_fields(var.fields.clone());
|
||||
added_lifetime |= extra_lifetime;
|
||||
var.fields = deflated_fields;
|
||||
deflated_variant_tokens.push(var.to_token_stream());
|
||||
var.fields = inflated_fields;
|
||||
}
|
||||
if added_lifetime {
|
||||
deflated_generics.params.insert(
|
||||
0,
|
||||
GenericParam::Lifetime(LifetimeDef::new(Lifetime::new(
|
||||
"'r",
|
||||
Span::call_site().into(),
|
||||
))),
|
||||
);
|
||||
}
|
||||
let inflated = DeriveInput {
|
||||
attrs,
|
||||
vis,
|
||||
ident,
|
||||
generics,
|
||||
data: Data::Enum(e),
|
||||
};
|
||||
|
||||
let gen = quote! {
|
||||
#inflated
|
||||
enum #deflated_ident#deflated_generics {
|
||||
#(#deflated_variant_tokens)*
|
||||
}
|
||||
};
|
||||
gen.into()
|
||||
}
|
||||
|
||||
// pub struct Foo<'a> {
|
||||
// pub bar: Bar<'a>,
|
||||
// pub value: &'a str,
|
||||
// pub whitespace_after: SimpleWhitespace<'a>,
|
||||
// pub(crate) tok: Option<TokenRef>,
|
||||
// }
|
||||
// =>
|
||||
// pub struct Foo<'a> {
|
||||
// pub bar: Bar<'a>,
|
||||
// pub value: &'a str,
|
||||
// pub whitespace_after: SimpleWhitespace<'a>,
|
||||
// }
|
||||
// struct DeflatedFoo<'r, 'a> {
|
||||
// pub bar: DeflatedBar<'r, 'a>,
|
||||
// pub value: &'a str,
|
||||
// pub tok: Option<TokenRef<'r, 'a>>
|
||||
// }
|
||||
|
||||
fn impl_struct(
|
||||
attrs: Vec<Attribute>,
|
||||
vis: Visibility,
|
||||
ident: Ident,
|
||||
generics: Generics,
|
||||
mut s: DataStruct,
|
||||
) -> TokenStream {
|
||||
let deflated_ident = format_ident!("Deflated{}", &ident);
|
||||
let mut deflated_generics = generics.clone();
|
||||
|
||||
let (inflated_fields, deflated_fields, added_lifetime) = impl_fields(s.fields);
|
||||
s.fields = inflated_fields;
|
||||
if added_lifetime {
|
||||
deflated_generics.params.insert(
|
||||
0,
|
||||
GenericParam::Lifetime(LifetimeDef::new(Lifetime::new(
|
||||
"'r",
|
||||
Span::call_site().into(),
|
||||
))),
|
||||
);
|
||||
}
|
||||
let inflated = DeriveInput {
|
||||
attrs,
|
||||
vis,
|
||||
ident,
|
||||
generics,
|
||||
data: Data::Struct(s),
|
||||
};
|
||||
|
||||
let gen = quote! {
|
||||
#inflated
|
||||
|
||||
struct #deflated_ident#deflated_generics
|
||||
#deflated_fields
|
||||
|
||||
};
|
||||
gen.into()
|
||||
}
|
||||
|
||||
fn impl_fields(fields: Fields) -> (Fields, Fields, bool) {
|
||||
match &fields {
|
||||
Fields::Unnamed(fs) => {
|
||||
let (deflated_fields, added_lifetime) = impl_unnamed_fields(fs.clone());
|
||||
(fields, Fields::Unnamed(deflated_fields), added_lifetime)
|
||||
}
|
||||
Fields::Named(fs) => impl_named_fields(fs.clone()),
|
||||
Fields::Unit => (Fields::Unit, Fields::Unit, false),
|
||||
}
|
||||
}
|
||||
|
||||
fn impl_unnamed_fields(mut deflated_fields: FieldsUnnamed) -> (FieldsUnnamed, bool) {
|
||||
let mut added_lifetime = false;
|
||||
for f in deflated_fields.unnamed.iter_mut() {
|
||||
if let Type::Path(TypePath { path, .. }) = &mut f.ty {
|
||||
if let Some(seg) = path.segments.last_mut() {
|
||||
seg.ident = format_ident!("Deflated{}", seg.ident);
|
||||
if let PathArguments::AngleBracketed(AngleBracketedGenericArguments {
|
||||
args, ..
|
||||
}) = &mut seg.arguments
|
||||
{
|
||||
added_lifetime = true;
|
||||
args.insert(
|
||||
0,
|
||||
GenericArgument::Lifetime(Lifetime::new("'r", Span::call_site().into())),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
(deflated_fields, added_lifetime)
|
||||
}
|
||||
|
||||
fn impl_named_fields(mut fields: FieldsNamed) -> (Fields, Fields, bool) {
|
||||
let mut deflated_fields = fields.clone();
|
||||
let mut added_lifetime = false;
|
||||
let span: Span = Span::call_site();
|
||||
// Drop whitespace fields from deflated fields
|
||||
// And add lifetimes to tokenref fields
|
||||
deflated_fields.named = deflated_fields
|
||||
.named
|
||||
.into_pairs()
|
||||
.filter(|pair| {
|
||||
let id = pair.value().ident.as_ref().unwrap();
|
||||
!format!("{}", id).starts_with("whitespace_")
|
||||
})
|
||||
.map(|pair| {
|
||||
added_lifetime = true;
|
||||
add_lifetimes(pair, span)
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Drop tokenref fields from inflated fields
|
||||
fields.named = fields
|
||||
.named
|
||||
.into_pairs()
|
||||
.filter(|pair| !is_token_ref(pair.value()))
|
||||
.collect();
|
||||
(
|
||||
Fields::Named(fields),
|
||||
Fields::Named(deflated_fields),
|
||||
added_lifetime,
|
||||
)
|
||||
}
|
||||
|
||||
fn is_token_ref(field: &Field) -> bool {
|
||||
if let Type::Path(path) = &field.ty {
|
||||
if let Some(id) = path.path.segments.last().map(|seg| &seg.ident) {
|
||||
return format!("{}", id) == "TokenRef";
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
// foo::bar -> foo::bar<'r, 'a>
|
||||
fn add_lifetimes(mut pair: Pair<Field, Comma>, span: Span) -> Pair<Field, Comma> {
|
||||
if let Some(seg) = rightmost_path_segment_mut(&mut pair.value_mut().ty) {
|
||||
let lifetime_argument = GenericArgument::Lifetime(Lifetime::new("'r", span.into()));
|
||||
match seg.arguments {
|
||||
PathArguments::None => {
|
||||
let mut generic_args = Punctuated::<_, _>::new();
|
||||
generic_args.push(lifetime_argument);
|
||||
seg.arguments = PathArguments::AngleBracketed(AngleBracketedGenericArguments {
|
||||
colon2_token: None,
|
||||
lt_token: Token]),
|
||||
gt_token: Token]),
|
||||
args: generic_args,
|
||||
})
|
||||
}
|
||||
PathArguments::AngleBracketed(AngleBracketedGenericArguments {
|
||||
ref mut args, ..
|
||||
}) => {
|
||||
args.insert(0, lifetime_argument);
|
||||
}
|
||||
_ => todo!(),
|
||||
}
|
||||
}
|
||||
pair
|
||||
}
|
||||
|
||||
// fn rightmost_path_segment(ty: &Type) -> Option<&PathSegment> {
|
||||
// if let Type::Path(TypePath { path, .. }) = ty {
|
||||
// if let Some(seg) = path.segments.last() {
|
||||
// if let PathArguments::AngleBracketed(AngleBracketedGenericArguments { args, .. }) =
|
||||
// &seg.arguments
|
||||
// {
|
||||
// if let Some(GenericArgument::Type(t)) = args.last() {
|
||||
// return rightmost_path_segment(t);
|
||||
// }
|
||||
// }
|
||||
// return Some(seg);
|
||||
// }
|
||||
// }
|
||||
// None
|
||||
// }
|
||||
|
||||
type Link = Option<Box<Node>>;
|
||||
|
||||
struct Node {
|
||||
next: Link,
|
||||
}
|
||||
|
||||
struct Recursive {
|
||||
root: Link,
|
||||
}
|
||||
|
||||
// fn back(node: &mut Node) -> &mut Link {
|
||||
// let mut anchor = &mut Some(Box::new(node));
|
||||
// loop {
|
||||
// match { anchor } {
|
||||
// &mut Some(ref mut node) => anchor = &mut node.next,
|
||||
// other => return other,
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
fn back(start: &mut Link) -> &mut Link {
|
||||
let mut anchor = start;
|
||||
|
||||
loop {
|
||||
match { anchor } {
|
||||
&mut Some(ref mut node) => anchor = &mut node.next,
|
||||
other => return other,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// foo::bar::baz<quux<'a>> -> baz<quux<'a>>
|
||||
fn get_pathseg(ty: &Type) -> Option<&PathSegment> {
|
||||
match ty {
|
||||
Type::Path(TypePath { path, .. }) => path.segments.last(),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
// foo::bar::baz<quux<'a>> -> quux<'a>
|
||||
fn rightmost_path_segment(ty: &Type) -> Option<&PathSegment> {
|
||||
let mut candidate = get_pathseg(ty);
|
||||
loop {
|
||||
if let Some(pathseg) = candidate {
|
||||
if let PathArguments::AngleBracketed(AngleBracketedGenericArguments { args, .. }) =
|
||||
&pathseg.arguments
|
||||
{
|
||||
if let Some(GenericArgument::Type(t)) = args.last() {
|
||||
candidate = get_pathseg(t);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
candidate
|
||||
}
|
||||
|
||||
fn get_pathseg_mut(ty: &mut Type) -> Option<&mut PathSegment> {
|
||||
match ty {
|
||||
Type::Path(TypePath { path, .. }) => path.segments.last_mut(),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn has_more_mut(candidate: &Option<&mut PathSegment>) -> bool {
|
||||
if let Some(PathArguments::AngleBracketed(AngleBracketedGenericArguments {
|
||||
ref args, ..
|
||||
})) = candidate.as_ref().map(|c| &c.arguments)
|
||||
{
|
||||
matches!(args.last(), Some(GenericArgument::Type(_)))
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn rightmost_path_segment_mut(ty: &mut Type) -> Option<&mut PathSegment> {
|
||||
let mut candidate = get_pathseg_mut(ty);
|
||||
|
||||
while has_more_mut(&candidate) {
|
||||
candidate = match candidate.unwrap().arguments {
|
||||
PathArguments::AngleBracketed(AngleBracketedGenericArguments {
|
||||
ref mut args, ..
|
||||
}) => {
|
||||
if let Some(GenericArgument::Type(t)) = args.last_mut() {
|
||||
get_pathseg_mut(t)
|
||||
} else {
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
}
|
||||
|
||||
candidate
|
||||
}
|
||||
|
||||
// fn rightmost_path_segment_mut(mut ty: &mut Type) -> Option<&mut PathSegment> {
|
||||
// loop {
|
||||
// match { &mut *ty } {
|
||||
// Type::Path(TypePath { path, .. }) => {
|
||||
// let last_seg = path.segments.last_mut().unwrap();
|
||||
// match { &mut *last_seg }.arguments {
|
||||
// PathArguments::AngleBracketed(AngleBracketedGenericArguments {
|
||||
// mut args,
|
||||
// ..
|
||||
// }) => match args.last_mut().unwrap() {
|
||||
// GenericArgument::Type(t) => ty = t,
|
||||
// _ => {}
|
||||
// },
|
||||
// _ => return None,
|
||||
// }
|
||||
// }
|
||||
// _ => return None,
|
||||
// }
|
||||
// }
|
||||
// // let tmp = ret;
|
||||
// // if let Some(ref mut seg) = *tmp {
|
||||
// // if let PathArguments::AngleBracketed(AngleBracketedGenericArguments { args, .. }) =
|
||||
// // &mut seg.arguments
|
||||
// // {
|
||||
// // if let Some(GenericArgument::Type(t)) = args.last_mut() {
|
||||
// // ty_ = Some(t);
|
||||
// // continue;
|
||||
// // }
|
||||
// // }
|
||||
// // }
|
||||
// // ty_ = None;
|
||||
// // }
|
||||
// // return ret;
|
||||
|
||||
// // if let Type::Path(TypePath { path, .. }) = ty {
|
||||
// // if let Some(seg) = { path.segments.last_mut() } {
|
||||
// // if let PathArguments::AngleBracketed(AngleBracketedGenericArguments { args, .. }) =
|
||||
// // &mut { seg }.arguments
|
||||
// // {
|
||||
// // if let Some(GenericArgument::Type(t)) = args.last_mut() {
|
||||
// // return rightmost_path_segment_mut(t);
|
||||
// // }
|
||||
// // }
|
||||
// // return Some(seg);
|
||||
// // }
|
||||
// // }
|
||||
// // None
|
||||
// }
|
||||
|
|
@ -56,7 +56,8 @@ fn impl_inflate_enum(ast: &DeriveInput, e: &DataEnum) -> TokenStream {
|
|||
let ident = &ast.ident;
|
||||
let generics = &ast.generics;
|
||||
let gen = quote! {
|
||||
impl<'a> Inflate<'a> for #ident #generics {
|
||||
impl #generics Inflate<'a> for #ident #generics {
|
||||
type Inflated = Self;
|
||||
fn inflate(mut self, config: & crate::tokenizer::whitespace_parser::Config<'a>) -> std::result::Result<Self, crate::tokenizer::whitespace_parser::WhitespaceError> {
|
||||
match self {
|
||||
#(Self::#varnames(x) => Ok(Self::#varnames(x.inflate(config)?)),)*
|
||||
|
|
|
|||
|
|
@ -11,8 +11,11 @@ mod codegen;
|
|||
use codegen::impl_codegen;
|
||||
mod into_py;
|
||||
use into_py::impl_into_py;
|
||||
mod cstnode;
|
||||
use cstnode::impl_cst_node;
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
use syn::{parse, parse_macro_input, DeriveInput};
|
||||
|
||||
#[proc_macro_derive(Inflate)]
|
||||
pub fn inflate_derive(input: TokenStream) -> TokenStream {
|
||||
|
|
@ -34,3 +37,9 @@ pub fn parenthesized_node_codegen(input: TokenStream) -> TokenStream {
|
|||
pub fn into_py(input: TokenStream) -> TokenStream {
|
||||
impl_into_py(&syn::parse(input).unwrap())
|
||||
}
|
||||
|
||||
#[proc_macro_attribute]
|
||||
pub fn cst_node(args: TokenStream, input: TokenStream) -> TokenStream {
|
||||
let _ = parse_macro_input!(args as parse::Nothing);
|
||||
impl_cst_node(parse_macro_input!(input as DeriveInput))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -23,14 +23,14 @@ fn impl_struct(ast: &DeriveInput) -> TokenStream {
|
|||
let ident = &ast.ident;
|
||||
let generics = &ast.generics;
|
||||
let gen = quote! {
|
||||
impl<'a> ParenthesizedNode<'a> for #ident #generics {
|
||||
fn lpar(&self) -> &Vec<LeftParen<'a>> {
|
||||
impl #generics ParenthesizedNode#generics for #ident #generics {
|
||||
fn lpar(&self) -> &Vec<LeftParen#generics> {
|
||||
&self.lpar
|
||||
}
|
||||
fn rpar(&self) -> &Vec<RightParen<'a>> {
|
||||
fn rpar(&self) -> &Vec<RightParen#generics> {
|
||||
&self.rpar
|
||||
}
|
||||
fn with_parens(self, left: LeftParen<'a>, right: RightParen<'a>) -> Self {
|
||||
fn with_parens(self, left: LeftParen#generics, right: RightParen#generics) -> Self {
|
||||
let mut lpar = self.lpar;
|
||||
let mut rpar = self.rpar;
|
||||
lpar.insert(0, left);
|
||||
|
|
@ -76,18 +76,18 @@ fn impl_enum(ast: &DeriveInput, e: &DataEnum) -> TokenStream {
|
|||
let ident = &ast.ident;
|
||||
let generics = &ast.generics;
|
||||
let gen = quote! {
|
||||
impl<'a> ParenthesizedNode<'a> for #ident #generics {
|
||||
fn lpar(&self) -> &Vec<LeftParen<'a>> {
|
||||
impl #generics ParenthesizedNode#generics for #ident #generics {
|
||||
fn lpar(&self) -> &Vec<LeftParen #generics > {
|
||||
match self {
|
||||
#(Self::#varnames(x) => x.lpar(),)*
|
||||
}
|
||||
}
|
||||
fn rpar(&self) -> &Vec<RightParen<'a>> {
|
||||
fn rpar(&self) -> &Vec<RightParen #generics > {
|
||||
match self {
|
||||
#(Self::#varnames(x) => x.rpar(),)*
|
||||
}
|
||||
}
|
||||
fn with_parens(self, left: LeftParen<'a>, right: RightParen<'a>) -> Self {
|
||||
fn with_parens(self, left: LeftParen #generics, right: RightParen #generics) -> Self {
|
||||
match self {
|
||||
#(Self::#varnames(x) => Self::#varnames(x.with_parens(left, right)),)*
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue