mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 12:54:58 +00:00
Remove some TextUnit->usize escapees
This commit is contained in:
parent
2f9f409538
commit
88c944f96b
7 changed files with 31 additions and 27 deletions
|
@ -5,7 +5,7 @@ use std::{
|
|||
|
||||
use test_utils::{collect_tests, dir_tests, project_dir, read_text};
|
||||
|
||||
use crate::{fuzz, tokenize, SourceFile, SyntaxError, Token};
|
||||
use crate::{fuzz, tokenize, SourceFile, SyntaxError, TextRange, TextUnit, Token};
|
||||
|
||||
#[test]
|
||||
fn lexer_tests() {
|
||||
|
@ -120,11 +120,11 @@ fn assert_errors_are_absent(errors: &[SyntaxError], path: &Path) {
|
|||
|
||||
fn dump_tokens_and_errors(tokens: &[Token], errors: &[SyntaxError], text: &str) -> String {
|
||||
let mut acc = String::new();
|
||||
let mut offset = 0;
|
||||
let mut offset = TextUnit::from_usize(0);
|
||||
for token in tokens {
|
||||
let token_len = token.len.to_usize();
|
||||
let token_text = &text[offset..offset + token_len];
|
||||
offset += token_len;
|
||||
let token_len = token.len;
|
||||
let token_text = &text[TextRange::offset_len(offset, token.len)];
|
||||
offset += token.len;
|
||||
writeln!(acc, "{:?} {} {:?}", token.kind, token_len, token_text).unwrap();
|
||||
}
|
||||
for err in errors {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue