Fix make_tokenizer_located

This commit is contained in:
harupy 2022-12-27 21:54:18 +09:00
parent 313fd7d28c
commit c21d0d9283

View file

@ -154,7 +154,7 @@ pub type LexResult = Result<Spanned, LexicalError>;
#[inline]
pub fn make_tokenizer(source: &str) -> impl Iterator<Item = LexResult> + '_ {
make_tokenizer_located(source, Location::new(0, 0))
make_tokenizer_located(source, Location::new(1, 0))
}
pub fn make_tokenizer_located(
@ -240,8 +240,6 @@ where
if let Some('\u{feff}') = lxr.window[0] {
lxr.window.slide();
}
// Start at top row (=1) left column (=1)
lxr.location.reset();
lxr
}