Rename TokenWithLocation to TokenWithSpan, in backwards compatible way (#1562)

This commit is contained in:
Andrew Lamb 2024-11-30 07:55:21 -05:00 committed by GitHub
parent 92c6e7f79b
commit a134910a36
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 67 additions and 63 deletions

View file

@ -521,42 +521,46 @@ impl Span {
}
}
/// Backwards compatibility struct for [`TokenWithSpan`]
#[deprecated(since = "0.53.0", note = "please use `TokenWithSpan` instead")]
pub type TokenWithLocation = TokenWithSpan;
/// A [Token] with [Location] attached to it
#[derive(Debug, Clone, Hash, Ord, PartialOrd, Eq, PartialEq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[cfg_attr(feature = "visitor", derive(Visit, VisitMut))]
pub struct TokenWithLocation {
pub struct TokenWithSpan {
pub token: Token,
pub span: Span,
}
impl TokenWithLocation {
pub fn new(token: Token, span: Span) -> TokenWithLocation {
TokenWithLocation { token, span }
impl TokenWithSpan {
pub fn new(token: Token, span: Span) -> TokenWithSpan {
TokenWithSpan { token, span }
}
pub fn wrap(token: Token) -> TokenWithLocation {
TokenWithLocation::new(token, Span::empty())
pub fn wrap(token: Token) -> TokenWithSpan {
TokenWithSpan::new(token, Span::empty())
}
pub fn at(token: Token, start: Location, end: Location) -> TokenWithLocation {
TokenWithLocation::new(token, Span::new(start, end))
pub fn at(token: Token, start: Location, end: Location) -> TokenWithSpan {
TokenWithSpan::new(token, Span::new(start, end))
}
}
impl PartialEq<Token> for TokenWithLocation {
impl PartialEq<Token> for TokenWithSpan {
fn eq(&self, other: &Token) -> bool {
&self.token == other
}
}
impl PartialEq<TokenWithLocation> for Token {
fn eq(&self, other: &TokenWithLocation) -> bool {
impl PartialEq<TokenWithSpan> for Token {
fn eq(&self, other: &TokenWithSpan) -> bool {
self == &other.token
}
}
impl fmt::Display for TokenWithLocation {
impl fmt::Display for TokenWithSpan {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.token.fmt(f)
}
@ -716,8 +720,8 @@ impl<'a> Tokenizer<'a> {
}
/// Tokenize the statement and produce a vector of tokens with location information
pub fn tokenize_with_location(&mut self) -> Result<Vec<TokenWithLocation>, TokenizerError> {
let mut tokens: Vec<TokenWithLocation> = vec![];
pub fn tokenize_with_location(&mut self) -> Result<Vec<TokenWithSpan>, TokenizerError> {
let mut tokens: Vec<TokenWithSpan> = vec![];
self.tokenize_with_location_into_buf(&mut tokens)
.map(|_| tokens)
}
@ -726,7 +730,7 @@ impl<'a> Tokenizer<'a> {
/// If an error is thrown, the buffer will contain all tokens that were successfully parsed before the error.
pub fn tokenize_with_location_into_buf(
&mut self,
buf: &mut Vec<TokenWithLocation>,
buf: &mut Vec<TokenWithSpan>,
) -> Result<(), TokenizerError> {
let mut state = State {
peekable: self.query.chars().peekable(),
@ -738,7 +742,7 @@ impl<'a> Tokenizer<'a> {
while let Some(token) = self.next_token(&mut state)? {
let span = location.span_to(state.location());
buf.push(TokenWithLocation { token, span });
buf.push(TokenWithSpan { token, span });
location = state.location();
}
@ -2751,25 +2755,25 @@ mod tests {
.tokenize_with_location()
.unwrap();
let expected = vec![
TokenWithLocation::at(Token::make_keyword("SELECT"), (1, 1).into(), (1, 7).into()),
TokenWithLocation::at(
TokenWithSpan::at(Token::make_keyword("SELECT"), (1, 1).into(), (1, 7).into()),
TokenWithSpan::at(
Token::Whitespace(Whitespace::Space),
(1, 7).into(),
(1, 8).into(),
),
TokenWithLocation::at(Token::make_word("a", None), (1, 8).into(), (1, 9).into()),
TokenWithLocation::at(Token::Comma, (1, 9).into(), (1, 10).into()),
TokenWithLocation::at(
TokenWithSpan::at(Token::make_word("a", None), (1, 8).into(), (1, 9).into()),
TokenWithSpan::at(Token::Comma, (1, 9).into(), (1, 10).into()),
TokenWithSpan::at(
Token::Whitespace(Whitespace::Newline),
(1, 10).into(),
(2, 1).into(),
),
TokenWithLocation::at(
TokenWithSpan::at(
Token::Whitespace(Whitespace::Space),
(2, 1).into(),
(2, 2).into(),
),
TokenWithLocation::at(Token::make_word("b", None), (2, 2).into(), (2, 3).into()),
TokenWithSpan::at(Token::make_word("b", None), (2, 2).into(), (2, 3).into()),
];
compare(expected, tokens);
}