mirror of
https://github.com/astral-sh/ruff.git
synced 2025-09-30 05:44:56 +00:00
Handle t-string prefixes in SimpleTokenizer
(#20578)
The simple tokenizer is meant to skip strings, but it was recording a `Name` token for t-strings (from the `t`). This PR fixes that.
This commit is contained in:
parent
cfc64d1707
commit
f2b7c82534
4 changed files with 54 additions and 0 deletions
|
@ -599,6 +599,16 @@ impl<'a> SimpleTokenizer<'a> {
|
||||||
| "rb"
|
| "rb"
|
||||||
| "rf"
|
| "rf"
|
||||||
| "u"
|
| "u"
|
||||||
|
| "T"
|
||||||
|
| "TR"
|
||||||
|
| "Tr"
|
||||||
|
| "RT"
|
||||||
|
| "Rt"
|
||||||
|
| "t"
|
||||||
|
| "tR"
|
||||||
|
| "tr"
|
||||||
|
| "rT"
|
||||||
|
| "rt"
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
self.bogus = true;
|
self.bogus = true;
|
||||||
|
|
|
@ -169,6 +169,22 @@ fn string_with_byte_kind() {
|
||||||
// note: not reversible: [other, bogus] vs [bogus, other]
|
// note: not reversible: [other, bogus] vs [bogus, other]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn fstring() {
|
||||||
|
let source = "f'foo'";
|
||||||
|
|
||||||
|
let test_case = tokenize(source);
|
||||||
|
assert_debug_snapshot!(test_case.tokens());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn tstring() {
|
||||||
|
let source = "t'foo'";
|
||||||
|
|
||||||
|
let test_case = tokenize(source);
|
||||||
|
assert_debug_snapshot!(test_case.tokens());
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn string_with_invalid_kind() {
|
fn string_with_invalid_kind() {
|
||||||
let source = "abc'foo'";
|
let source = "abc'foo'";
|
||||||
|
|
|
@ -0,0 +1,14 @@
|
||||||
|
---
|
||||||
|
source: crates/ruff_python_trivia_integration_tests/tests/simple_tokenizer.rs
|
||||||
|
expression: test_case.tokens()
|
||||||
|
---
|
||||||
|
[
|
||||||
|
SimpleToken {
|
||||||
|
kind: Other,
|
||||||
|
range: 0..1,
|
||||||
|
},
|
||||||
|
SimpleToken {
|
||||||
|
kind: Bogus,
|
||||||
|
range: 1..6,
|
||||||
|
},
|
||||||
|
]
|
|
@ -0,0 +1,14 @@
|
||||||
|
---
|
||||||
|
source: crates/ruff_python_trivia_integration_tests/tests/simple_tokenizer.rs
|
||||||
|
expression: test_case.tokens()
|
||||||
|
---
|
||||||
|
[
|
||||||
|
SimpleToken {
|
||||||
|
kind: Other,
|
||||||
|
range: 0..1,
|
||||||
|
},
|
||||||
|
SimpleToken {
|
||||||
|
kind: Bogus,
|
||||||
|
range: 1..6,
|
||||||
|
},
|
||||||
|
]
|
Loading…
Add table
Add a link
Reference in a new issue