Use snapshots for remaining lexer tests (#7264)

## Summary

This PR updates the remaining lexer test cases to use the snapshots.
This is mainly a mechanical refactor.

## Motivation

The main motivation is so that when we add the token range values to the
test case output, it's easier to update the test cases.

The reason they were not using the snapshots before was because of the usage of
`test_case` macro. The macros is mainly used for different EOL test cases. If we
just generate the snapshots directly, then the snapshot name would be suffixed
with `-1`, `-2`, etc. as the test function is still the same. So, we'll create
the snapshot ourselves with the platform name for the respective EOL
test cases.

## Test Plan

`cargo test`
This commit is contained in:
Dhruv Manilawala 2023-09-12 00:16:38 +05:30 committed by GitHub
parent ff0feb191c
commit f5701fcc63
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
35 changed files with 845 additions and 272 deletions

View file

@ -1265,11 +1265,7 @@ impl<'a> LexedText<'a> {
#[cfg(test)]
mod tests {
use num_bigint::BigInt;
use ruff_python_ast::IpyEscapeKind;
use insta::assert_debug_snapshot;
use test_case::test_case;
use super::*;
@ -1277,50 +1273,60 @@ mod tests {
const MAC_EOL: &str = "\r";
const UNIX_EOL: &str = "\n";
pub(crate) fn lex_source(source: &str) -> Vec<Tok> {
fn lex_source(source: &str) -> Vec<Tok> {
let lexer = lex(source, Mode::Module);
lexer.map(|x| x.unwrap().0).collect()
lexer.map(|result| result.unwrap().0).collect()
}
pub(crate) fn lex_jupyter_source(source: &str) -> Vec<Tok> {
fn lex_jupyter_source(source: &str) -> Vec<Tok> {
let lexer = lex(source, Mode::Ipython);
lexer.map(|x| x.unwrap().0).collect()
}
#[test_case(UNIX_EOL)]
#[test_case(MAC_EOL)]
#[test_case(WINDOWS_EOL)]
fn test_ipython_escape_command_line_continuation_eol(eol: &str) {
fn ipython_escape_command_line_continuation_eol(eol: &str) -> Vec<Tok> {
let source = format!("%matplotlib \\{eol} --inline");
let tokens = lex_jupyter_source(&source);
assert_eq!(
tokens,
vec![
Tok::IpyEscapeCommand {
value: "matplotlib --inline".to_string(),
kind: IpyEscapeKind::Magic
},
Tok::Newline
]
);
lex_jupyter_source(&source)
}
#[test_case(UNIX_EOL)]
#[test_case(MAC_EOL)]
#[test_case(WINDOWS_EOL)]
fn test_ipython_escape_command_line_continuation_with_eol_and_eof(eol: &str) {
#[test]
fn test_ipython_escape_command_line_continuation_unix_eol() {
assert_debug_snapshot!(ipython_escape_command_line_continuation_eol(UNIX_EOL));
}
#[test]
fn test_ipython_escape_command_line_continuation_mac_eol() {
assert_debug_snapshot!(ipython_escape_command_line_continuation_eol(MAC_EOL));
}
#[test]
fn test_ipython_escape_command_line_continuation_windows_eol() {
assert_debug_snapshot!(ipython_escape_command_line_continuation_eol(WINDOWS_EOL));
}
fn ipython_escape_command_line_continuation_with_eol_and_eof(eol: &str) -> Vec<Tok> {
let source = format!("%matplotlib \\{eol}");
let tokens = lex_jupyter_source(&source);
assert_eq!(
tokens,
vec![
Tok::IpyEscapeCommand {
value: "matplotlib ".to_string(),
kind: IpyEscapeKind::Magic
},
Tok::Newline
]
);
lex_jupyter_source(&source)
}
#[test]
fn test_ipython_escape_command_line_continuation_with_unix_eol_and_eof() {
assert_debug_snapshot!(ipython_escape_command_line_continuation_with_eol_and_eof(
UNIX_EOL
));
}
#[test]
fn test_ipython_escape_command_line_continuation_with_mac_eol_and_eof() {
assert_debug_snapshot!(ipython_escape_command_line_continuation_with_eol_and_eof(
MAC_EOL
));
}
#[test]
fn test_ipython_escape_command_line_continuation_with_windows_eol_and_eof() {
assert_debug_snapshot!(ipython_escape_command_line_continuation_with_eol_and_eof(
WINDOWS_EOL
));
}
#[test]
@ -1428,45 +1434,48 @@ def f(arg=%timeit a = b):
assert_debug_snapshot!(lex_source(source));
}
#[test_case(" foo"; "long")]
#[test_case(" "; "whitespace")]
#[test_case(" "; "single whitespace")]
#[test_case(""; "empty")]
fn test_line_comment(comment: &str) {
let source = format!("99232 # {comment}");
let tokens = lex_source(&source);
assert_eq!(
tokens,
vec![
Tok::Int {
value: BigInt::from(99232)
},
Tok::Comment(format!("# {comment}")),
Tok::Newline
]
);
#[test]
fn test_line_comment_long() {
let source = "99232 # foo".to_string();
assert_debug_snapshot!(lex_source(&source));
}
#[test_case(UNIX_EOL)]
#[test_case(MAC_EOL)]
#[test_case(WINDOWS_EOL)]
fn test_comment_until_eol(eol: &str) {
#[test]
fn test_line_comment_whitespace() {
let source = "99232 # ".to_string();
assert_debug_snapshot!(lex_source(&source));
}
#[test]
fn test_line_comment_single_whitespace() {
let source = "99232 # ".to_string();
assert_debug_snapshot!(lex_source(&source));
}
#[test]
fn test_line_comment_empty() {
let source = "99232 #".to_string();
assert_debug_snapshot!(lex_source(&source));
}
fn comment_until_eol(eol: &str) -> Vec<Tok> {
let source = format!("123 # Foo{eol}456");
let tokens = lex_source(&source);
assert_eq!(
tokens,
vec![
Tok::Int {
value: BigInt::from(123)
},
Tok::Comment("# Foo".to_string()),
Tok::Newline,
Tok::Int {
value: BigInt::from(456)
},
Tok::Newline,
]
);
lex_source(&source)
}
#[test]
fn test_comment_until_unix_eol() {
assert_debug_snapshot!(comment_until_eol(UNIX_EOL));
}
#[test]
fn test_comment_until_mac_eol() {
assert_debug_snapshot!(comment_until_eol(MAC_EOL));
}
#[test]
fn test_comment_until_windows_eol() {
assert_debug_snapshot!(comment_until_eol(WINDOWS_EOL));
}
#[test]
@ -1475,115 +1484,67 @@ def f(arg=%timeit a = b):
assert_debug_snapshot!(lex_source(source));
}
#[test_case(UNIX_EOL)]
#[test_case(MAC_EOL)]
#[test_case(WINDOWS_EOL)]
fn test_indentation_with_eol(eol: &str) {
fn indentation_with_eol(eol: &str) -> Vec<Tok> {
let source = format!("def foo():{eol} return 99{eol}{eol}");
let tokens = lex_source(&source);
assert_eq!(
tokens,
vec![
Tok::Def,
Tok::Name {
name: String::from("foo"),
},
Tok::Lpar,
Tok::Rpar,
Tok::Colon,
Tok::Newline,
Tok::Indent,
Tok::Return,
Tok::Int {
value: BigInt::from(99)
},
Tok::Newline,
Tok::NonLogicalNewline,
Tok::Dedent,
]
);
lex_source(&source)
}
#[test_case(UNIX_EOL)]
#[test_case(MAC_EOL)]
#[test_case(WINDOWS_EOL)]
fn test_double_dedent_with_eol(eol: &str) {
#[test]
fn test_indentation_with_unix_eol() {
assert_debug_snapshot!(indentation_with_eol(UNIX_EOL));
}
#[test]
fn test_indentation_with_mac_eol() {
assert_debug_snapshot!(indentation_with_eol(MAC_EOL));
}
#[test]
fn test_indentation_with_windows_eol() {
assert_debug_snapshot!(indentation_with_eol(WINDOWS_EOL));
}
fn double_dedent_with_eol(eol: &str) -> Vec<Tok> {
let source = format!("def foo():{eol} if x:{eol}{eol} return 99{eol}{eol}");
let tokens = lex_source(&source);
assert_eq!(
tokens,
vec![
Tok::Def,
Tok::Name {
name: String::from("foo"),
},
Tok::Lpar,
Tok::Rpar,
Tok::Colon,
Tok::Newline,
Tok::Indent,
Tok::If,
Tok::Name {
name: String::from("x"),
},
Tok::Colon,
Tok::Newline,
Tok::NonLogicalNewline,
Tok::Indent,
Tok::Return,
Tok::Int {
value: BigInt::from(99)
},
Tok::Newline,
Tok::NonLogicalNewline,
Tok::Dedent,
Tok::Dedent,
]
);
lex_source(&source)
}
#[test_case(UNIX_EOL)]
#[test_case(MAC_EOL)]
#[test_case(WINDOWS_EOL)]
fn test_double_dedent_with_tabs(eol: &str) {
#[test]
fn test_double_dedent_with_unix_eol() {
assert_debug_snapshot!(double_dedent_with_eol(UNIX_EOL));
}
#[test]
fn test_double_dedent_with_mac_eol() {
assert_debug_snapshot!(double_dedent_with_eol(MAC_EOL));
}
#[test]
fn test_double_dedent_with_windows_eol() {
assert_debug_snapshot!(double_dedent_with_eol(WINDOWS_EOL));
}
fn double_dedent_with_tabs_eol(eol: &str) -> Vec<Tok> {
let source = format!("def foo():{eol}\tif x:{eol}{eol}\t\t return 99{eol}{eol}");
let tokens = lex_source(&source);
assert_eq!(
tokens,
vec![
Tok::Def,
Tok::Name {
name: String::from("foo"),
},
Tok::Lpar,
Tok::Rpar,
Tok::Colon,
Tok::Newline,
Tok::Indent,
Tok::If,
Tok::Name {
name: String::from("x"),
},
Tok::Colon,
Tok::Newline,
Tok::NonLogicalNewline,
Tok::Indent,
Tok::Return,
Tok::Int {
value: BigInt::from(99)
},
Tok::Newline,
Tok::NonLogicalNewline,
Tok::Dedent,
Tok::Dedent,
]
);
lex_source(&source)
}
#[test_case(UNIX_EOL)]
#[test_case(MAC_EOL)]
#[test_case(WINDOWS_EOL)]
fn test_newline_in_brackets(eol: &str) {
#[test]
fn test_double_dedent_with_tabs_unix_eol() {
assert_debug_snapshot!(double_dedent_with_tabs_eol(UNIX_EOL));
}
#[test]
fn test_double_dedent_with_tabs_mac_eol() {
assert_debug_snapshot!(double_dedent_with_tabs_eol(MAC_EOL));
}
#[test]
fn test_double_dedent_with_tabs_windows_eol() {
assert_debug_snapshot!(double_dedent_with_tabs_eol(WINDOWS_EOL));
}
fn newline_in_brackets_eol(eol: &str) -> Vec<Tok> {
let source = r"x = [
1,2
@ -1595,59 +1556,22 @@ def f(arg=%timeit a = b):
7}]
"
.replace('\n', eol);
let tokens = lex_source(&source);
assert_eq!(
tokens,
vec![
Tok::Name {
name: String::from("x"),
},
Tok::Equal,
Tok::Lsqb,
Tok::NonLogicalNewline,
Tok::NonLogicalNewline,
Tok::Int {
value: BigInt::from(1)
},
Tok::Comma,
Tok::Int {
value: BigInt::from(2)
},
Tok::NonLogicalNewline,
Tok::Comma,
Tok::Lpar,
Tok::Int {
value: BigInt::from(3)
},
Tok::Comma,
Tok::NonLogicalNewline,
Tok::Int {
value: BigInt::from(4)
},
Tok::Comma,
Tok::NonLogicalNewline,
Tok::Rpar,
Tok::Comma,
Tok::Lbrace,
Tok::NonLogicalNewline,
Tok::Int {
value: BigInt::from(5)
},
Tok::Comma,
Tok::NonLogicalNewline,
Tok::Int {
value: BigInt::from(6)
},
Tok::Comma,
// Continuation here - no NonLogicalNewline.
Tok::Int {
value: BigInt::from(7)
},
Tok::Rbrace,
Tok::Rsqb,
Tok::Newline,
]
);
lex_source(&source)
}
#[test]
fn test_newline_in_brackets_unix_eol() {
assert_debug_snapshot!(newline_in_brackets_eol(UNIX_EOL));
}
#[test]
fn test_newline_in_brackets_mac_eol() {
assert_debug_snapshot!(newline_in_brackets_eol(MAC_EOL));
}
#[test]
fn test_newline_in_brackets_windows_eol() {
assert_debug_snapshot!(newline_in_brackets_eol(WINDOWS_EOL));
}
#[test]
@ -1680,60 +1604,50 @@ def f(arg=%timeit a = b):
assert_debug_snapshot!(lex_source(source));
}
#[test_case(UNIX_EOL)]
#[test_case(MAC_EOL)]
#[test_case(WINDOWS_EOL)]
fn test_string_continuation_with_eol(eol: &str) {
fn string_continuation_with_eol(eol: &str) -> Vec<Tok> {
let source = format!("\"abc\\{eol}def\"");
let tokens = lex_source(&source);
lex_source(&source)
}
assert_eq!(
tokens,
vec![
Tok::String {
value: format!("abc\\{eol}def"),
kind: StringKind::String,
triple_quoted: false,
},
Tok::Newline,
]
);
#[test]
fn test_string_continuation_with_unix_eol() {
assert_debug_snapshot!(string_continuation_with_eol(UNIX_EOL));
}
#[test]
fn test_string_continuation_with_mac_eol() {
assert_debug_snapshot!(string_continuation_with_eol(MAC_EOL));
}
#[test]
fn test_string_continuation_with_windows_eol() {
assert_debug_snapshot!(string_continuation_with_eol(WINDOWS_EOL));
}
#[test]
fn test_escape_unicode_name() {
let source = r#""\N{EN SPACE}""#;
let tokens = lex_source(source);
assert_eq!(
tokens,
vec![
Tok::String {
value: r"\N{EN SPACE}".to_string(),
kind: StringKind::String,
triple_quoted: false,
},
Tok::Newline
]
);
assert_debug_snapshot!(lex_source(source));
}
#[test_case(UNIX_EOL)]
#[test_case(MAC_EOL)]
#[test_case(WINDOWS_EOL)]
fn test_triple_quoted(eol: &str) {
fn triple_quoted_eol(eol: &str) -> Vec<Tok> {
let source = format!("\"\"\"{eol} test string{eol} \"\"\"");
let tokens = lex_source(&source);
assert_eq!(
tokens,
vec![
Tok::String {
value: format!("{eol} test string{eol} "),
kind: StringKind::String,
triple_quoted: true,
},
Tok::Newline,
]
);
lex_source(&source)
}
#[test]
fn test_triple_quoted_unix_eol() {
assert_debug_snapshot!(triple_quoted_eol(UNIX_EOL));
}
#[test]
fn test_triple_quoted_mac_eol() {
assert_debug_snapshot!(triple_quoted_eol(MAC_EOL));
}
#[test]
fn test_triple_quoted_windows_eol() {
assert_debug_snapshot!(triple_quoted_eol(WINDOWS_EOL));
}
// This test case is to just make sure that the lexer doesn't go into

View file

@ -0,0 +1,17 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
Int {
value: 123,
},
Comment(
"# Foo",
),
Newline,
Int {
value: 456,
},
Newline,
]

View file

@ -0,0 +1,17 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
Int {
value: 123,
},
Comment(
"# Foo",
),
Newline,
Int {
value: 456,
},
Newline,
]

View file

@ -0,0 +1,17 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
Int {
value: 123,
},
Comment(
"# Foo",
),
Newline,
Int {
value: 456,
},
Newline,
]

View file

@ -0,0 +1,31 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
Def,
Name {
name: "foo",
},
Lpar,
Rpar,
Colon,
Newline,
Indent,
If,
Name {
name: "x",
},
Colon,
Newline,
NonLogicalNewline,
Indent,
Return,
Int {
value: 99,
},
Newline,
NonLogicalNewline,
Dedent,
Dedent,
]

View file

@ -0,0 +1,31 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
Def,
Name {
name: "foo",
},
Lpar,
Rpar,
Colon,
Newline,
Indent,
If,
Name {
name: "x",
},
Colon,
Newline,
NonLogicalNewline,
Indent,
Return,
Int {
value: 99,
},
Newline,
NonLogicalNewline,
Dedent,
Dedent,
]

View file

@ -0,0 +1,31 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
Def,
Name {
name: "foo",
},
Lpar,
Rpar,
Colon,
Newline,
Indent,
If,
Name {
name: "x",
},
Colon,
Newline,
NonLogicalNewline,
Indent,
Return,
Int {
value: 99,
},
Newline,
NonLogicalNewline,
Dedent,
Dedent,
]

View file

@ -0,0 +1,31 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
Def,
Name {
name: "foo",
},
Lpar,
Rpar,
Colon,
Newline,
Indent,
If,
Name {
name: "x",
},
Colon,
Newline,
NonLogicalNewline,
Indent,
Return,
Int {
value: 99,
},
Newline,
NonLogicalNewline,
Dedent,
Dedent,
]

View file

@ -0,0 +1,31 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
Def,
Name {
name: "foo",
},
Lpar,
Rpar,
Colon,
Newline,
Indent,
If,
Name {
name: "x",
},
Colon,
Newline,
NonLogicalNewline,
Indent,
Return,
Int {
value: 99,
},
Newline,
NonLogicalNewline,
Dedent,
Dedent,
]

View file

@ -0,0 +1,31 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
Def,
Name {
name: "foo",
},
Lpar,
Rpar,
Colon,
Newline,
Indent,
If,
Name {
name: "x",
},
Colon,
Newline,
NonLogicalNewline,
Indent,
Return,
Int {
value: 99,
},
Newline,
NonLogicalNewline,
Dedent,
Dedent,
]

View file

@ -0,0 +1,12 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(source)
---
[
String {
value: "\\N{EN SPACE}",
kind: String,
triple_quoted: false,
},
Newline,
]

View file

@ -0,0 +1,22 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
Def,
Name {
name: "foo",
},
Lpar,
Rpar,
Colon,
Newline,
Indent,
Return,
Int {
value: 99,
},
Newline,
NonLogicalNewline,
Dedent,
]

View file

@ -0,0 +1,22 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
Def,
Name {
name: "foo",
},
Lpar,
Rpar,
Colon,
Newline,
Indent,
Return,
Int {
value: 99,
},
Newline,
NonLogicalNewline,
Dedent,
]

View file

@ -0,0 +1,22 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
Def,
Name {
name: "foo",
},
Lpar,
Rpar,
Colon,
Newline,
Indent,
Return,
Int {
value: 99,
},
Newline,
NonLogicalNewline,
Dedent,
]

View file

@ -0,0 +1,11 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_jupyter_source(&source)
---
[
IpyEscapeCommand {
value: "matplotlib --inline",
kind: Magic,
},
Newline,
]

View file

@ -0,0 +1,11 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_jupyter_source(&source)
---
[
IpyEscapeCommand {
value: "matplotlib --inline",
kind: Magic,
},
Newline,
]

View file

@ -0,0 +1,11 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_jupyter_source(&source)
---
[
IpyEscapeCommand {
value: "matplotlib --inline",
kind: Magic,
},
Newline,
]

View file

@ -0,0 +1,11 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_jupyter_source(&source)
---
[
IpyEscapeCommand {
value: "matplotlib ",
kind: Magic,
},
Newline,
]

View file

@ -0,0 +1,11 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_jupyter_source(&source)
---
[
IpyEscapeCommand {
value: "matplotlib ",
kind: Magic,
},
Newline,
]

View file

@ -0,0 +1,11 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_jupyter_source(&source)
---
[
IpyEscapeCommand {
value: "matplotlib ",
kind: Magic,
},
Newline,
]

View file

@ -0,0 +1,13 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
Int {
value: 99232,
},
Comment(
"#",
),
Newline,
]

View file

@ -0,0 +1,13 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
Int {
value: 99232,
},
Comment(
"# foo",
),
Newline,
]

View file

@ -0,0 +1,13 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
Int {
value: 99232,
},
Comment(
"# ",
),
Newline,
]

View file

@ -0,0 +1,13 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
Int {
value: 99232,
},
Comment(
"# ",
),
Newline,
]

View file

@ -0,0 +1,52 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
Name {
name: "x",
},
Equal,
Lsqb,
NonLogicalNewline,
NonLogicalNewline,
Int {
value: 1,
},
Comma,
Int {
value: 2,
},
NonLogicalNewline,
Comma,
Lpar,
Int {
value: 3,
},
Comma,
NonLogicalNewline,
Int {
value: 4,
},
Comma,
NonLogicalNewline,
Rpar,
Comma,
Lbrace,
NonLogicalNewline,
Int {
value: 5,
},
Comma,
NonLogicalNewline,
Int {
value: 6,
},
Comma,
Int {
value: 7,
},
Rbrace,
Rsqb,
Newline,
]

View file

@ -0,0 +1,52 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
Name {
name: "x",
},
Equal,
Lsqb,
NonLogicalNewline,
NonLogicalNewline,
Int {
value: 1,
},
Comma,
Int {
value: 2,
},
NonLogicalNewline,
Comma,
Lpar,
Int {
value: 3,
},
Comma,
NonLogicalNewline,
Int {
value: 4,
},
Comma,
NonLogicalNewline,
Rpar,
Comma,
Lbrace,
NonLogicalNewline,
Int {
value: 5,
},
Comma,
NonLogicalNewline,
Int {
value: 6,
},
Comma,
Int {
value: 7,
},
Rbrace,
Rsqb,
Newline,
]

View file

@ -0,0 +1,52 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
Name {
name: "x",
},
Equal,
Lsqb,
NonLogicalNewline,
NonLogicalNewline,
Int {
value: 1,
},
Comma,
Int {
value: 2,
},
NonLogicalNewline,
Comma,
Lpar,
Int {
value: 3,
},
Comma,
NonLogicalNewline,
Int {
value: 4,
},
Comma,
NonLogicalNewline,
Rpar,
Comma,
Lbrace,
NonLogicalNewline,
Int {
value: 5,
},
Comma,
NonLogicalNewline,
Int {
value: 6,
},
Comma,
Int {
value: 7,
},
Rbrace,
Rsqb,
Newline,
]

View file

@ -0,0 +1,12 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
String {
value: "abc\\\rdef",
kind: String,
triple_quoted: false,
},
Newline,
]

View file

@ -0,0 +1,12 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
String {
value: "abc\\\ndef",
kind: String,
triple_quoted: false,
},
Newline,
]

View file

@ -0,0 +1,12 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
String {
value: "abc\\\r\ndef",
kind: String,
triple_quoted: false,
},
Newline,
]

View file

@ -0,0 +1,12 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
String {
value: "\r test string\r ",
kind: String,
triple_quoted: true,
},
Newline,
]

View file

@ -0,0 +1,12 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
String {
value: "\n test string\n ",
kind: String,
triple_quoted: true,
},
Newline,
]

View file

@ -0,0 +1,12 @@
---
source: crates/ruff_python_parser/src/lexer.rs
expression: lex_source(&source)
---
[
String {
value: "\r\n test string\r\n ",
kind: String,
triple_quoted: true,
},
Newline,
]