mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-15 06:45:27 +00:00
Merge commit 'aa9bc86125
' into sync-from-ra
This commit is contained in:
parent
1570299af4
commit
c48062fe2a
598 changed files with 57696 additions and 17615 deletions
|
@ -236,6 +236,21 @@ impl ast::GenericParamList {
|
|||
}
|
||||
}
|
||||
|
||||
/// Removes the existing generic param
|
||||
pub fn remove_generic_param(&self, generic_param: ast::GenericParam) {
|
||||
if let Some(previous) = generic_param.syntax().prev_sibling() {
|
||||
if let Some(next_token) = previous.next_sibling_or_token() {
|
||||
ted::remove_all(next_token..=generic_param.syntax().clone().into());
|
||||
}
|
||||
} else if let Some(next) = generic_param.syntax().next_sibling() {
|
||||
if let Some(next_token) = next.prev_sibling_or_token() {
|
||||
ted::remove_all(generic_param.syntax().clone().into()..=next_token);
|
||||
}
|
||||
} else {
|
||||
ted::remove(generic_param.syntax());
|
||||
}
|
||||
}
|
||||
|
||||
/// Constructs a matching [`ast::GenericArgList`]
|
||||
pub fn to_generic_args(&self) -> ast::GenericArgList {
|
||||
let args = self.generic_params().filter_map(|param| match param {
|
||||
|
@ -465,6 +480,8 @@ impl ast::Impl {
|
|||
}
|
||||
|
||||
impl ast::AssocItemList {
|
||||
/// Attention! This function does align the first line of `item` with respect to `self`,
|
||||
/// but it does _not_ change indentation of other lines (if any).
|
||||
pub fn add_item(&self, item: ast::AssocItem) {
|
||||
let (indent, position, whitespace) = match self.assoc_items().last() {
|
||||
Some(last_item) => (
|
||||
|
|
|
@ -288,6 +288,7 @@ impl ast::ArrayExpr {
|
|||
pub enum LiteralKind {
|
||||
String(ast::String),
|
||||
ByteString(ast::ByteString),
|
||||
CString(ast::CString),
|
||||
IntNumber(ast::IntNumber),
|
||||
FloatNumber(ast::FloatNumber),
|
||||
Char(ast::Char),
|
||||
|
@ -319,6 +320,9 @@ impl ast::Literal {
|
|||
if let Some(t) = ast::ByteString::cast(token.clone()) {
|
||||
return LiteralKind::ByteString(t);
|
||||
}
|
||||
if let Some(t) = ast::CString::cast(token.clone()) {
|
||||
return LiteralKind::CString(t);
|
||||
}
|
||||
if let Some(t) = ast::Char::cast(token.clone()) {
|
||||
return LiteralKind::Char(t);
|
||||
}
|
||||
|
@ -366,8 +370,7 @@ impl ast::BlockExpr {
|
|||
match parent.kind() {
|
||||
FOR_EXPR | IF_EXPR => parent
|
||||
.children()
|
||||
.filter(|it| ast::Expr::can_cast(it.kind()))
|
||||
.next()
|
||||
.find(|it| ast::Expr::can_cast(it.kind()))
|
||||
.map_or(true, |it| it == *self.syntax()),
|
||||
LET_ELSE | FN | WHILE_EXPR | LOOP_EXPR | CONST_BLOCK_PAT => false,
|
||||
_ => true,
|
||||
|
|
|
@ -121,6 +121,8 @@ impl ast::HasTypeBounds for AssocTypeArg {}
|
|||
impl AssocTypeArg {
|
||||
pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
|
||||
pub fn generic_arg_list(&self) -> Option<GenericArgList> { support::child(&self.syntax) }
|
||||
pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
|
||||
pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
|
||||
pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
|
||||
pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
|
||||
pub fn const_arg(&self) -> Option<ConstArg> { support::child(&self.syntax) }
|
||||
|
@ -1375,6 +1377,7 @@ pub struct LiteralPat {
|
|||
pub(crate) syntax: SyntaxNode,
|
||||
}
|
||||
impl LiteralPat {
|
||||
pub fn minus_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![-]) }
|
||||
pub fn literal(&self) -> Option<Literal> { support::child(&self.syntax) }
|
||||
}
|
||||
|
||||
|
|
|
@ -90,6 +90,27 @@ impl AstToken for ByteString {
|
|||
fn syntax(&self) -> &SyntaxToken { &self.syntax }
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct CString {
|
||||
pub(crate) syntax: SyntaxToken,
|
||||
}
|
||||
impl std::fmt::Display for CString {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(&self.syntax, f)
|
||||
}
|
||||
}
|
||||
impl AstToken for CString {
|
||||
fn can_cast(kind: SyntaxKind) -> bool { kind == C_STRING }
|
||||
fn cast(syntax: SyntaxToken) -> Option<Self> {
|
||||
if Self::can_cast(syntax.kind()) {
|
||||
Some(Self { syntax })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
fn syntax(&self) -> &SyntaxToken { &self.syntax }
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct IntNumber {
|
||||
pub(crate) syntax: SyntaxToken,
|
||||
|
|
|
@ -158,34 +158,148 @@ fn ty_from_text(text: &str) -> ast::Type {
|
|||
ast_from_text(&format!("type _T = {text};"))
|
||||
}
|
||||
|
||||
pub fn ty_alias(
|
||||
ident: &str,
|
||||
generic_param_list: Option<ast::GenericParamList>,
|
||||
type_param_bounds: Option<ast::TypeParam>,
|
||||
where_clause: Option<ast::WhereClause>,
|
||||
assignment: Option<(ast::Type, Option<ast::WhereClause>)>,
|
||||
) -> ast::TypeAlias {
|
||||
let mut s = String::new();
|
||||
s.push_str(&format!("type {} ", ident));
|
||||
|
||||
if let Some(list) = generic_param_list {
|
||||
s.push_str(&list.to_string());
|
||||
}
|
||||
|
||||
if let Some(list) = type_param_bounds {
|
||||
s.push_str(&format!(" : {}", &list));
|
||||
}
|
||||
|
||||
if let Some(cl) = where_clause {
|
||||
s.push_str(&format!(" {}", &cl.to_string()));
|
||||
}
|
||||
|
||||
if let Some(exp) = assignment {
|
||||
if let Some(cl) = exp.1 {
|
||||
s.push_str(&format!("= {} {}", &exp.0.to_string(), &cl.to_string()));
|
||||
} else {
|
||||
s.push_str(&format!("= {}", &exp.0.to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
s.push(';');
|
||||
ast_from_text(&s)
|
||||
}
|
||||
|
||||
pub fn assoc_item_list() -> ast::AssocItemList {
|
||||
ast_from_text("impl C for D {}")
|
||||
}
|
||||
|
||||
// FIXME: `ty_params` should be `ast::GenericArgList`
|
||||
pub fn impl_(
|
||||
ty: ast::Path,
|
||||
params: Option<ast::GenericParamList>,
|
||||
ty_params: Option<ast::GenericParamList>,
|
||||
) -> ast::Impl {
|
||||
let params = match params {
|
||||
Some(params) => params.to_string(),
|
||||
None => String::new(),
|
||||
};
|
||||
let ty_params = match ty_params {
|
||||
Some(params) => params.to_string(),
|
||||
None => String::new(),
|
||||
};
|
||||
ast_from_text(&format!("impl{params} {ty}{ty_params} {{}}"))
|
||||
fn merge_gen_params(
|
||||
ps: Option<ast::GenericParamList>,
|
||||
bs: Option<ast::GenericParamList>,
|
||||
) -> Option<ast::GenericParamList> {
|
||||
match (ps, bs) {
|
||||
(None, None) => None,
|
||||
(None, Some(bs)) => Some(bs),
|
||||
(Some(ps), None) => Some(ps),
|
||||
(Some(ps), Some(bs)) => {
|
||||
for b in bs.generic_params() {
|
||||
ps.add_generic_param(b);
|
||||
}
|
||||
Some(ps)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn impl_trait(
|
||||
trait_: ast::Path,
|
||||
ty: ast::Path,
|
||||
ty_params: Option<ast::GenericParamList>,
|
||||
pub fn impl_(
|
||||
generic_params: Option<ast::GenericParamList>,
|
||||
generic_args: Option<ast::GenericParamList>,
|
||||
path_type: ast::Type,
|
||||
where_clause: Option<ast::WhereClause>,
|
||||
body: Option<Vec<either::Either<ast::Attr, ast::AssocItem>>>,
|
||||
) -> ast::Impl {
|
||||
let ty_params = ty_params.map_or_else(String::new, |params| params.to_string());
|
||||
ast_from_text(&format!("impl{ty_params} {trait_} for {ty}{ty_params} {{}}"))
|
||||
let (gen_params, tr_gen_args) = match (generic_params, generic_args) {
|
||||
(None, None) => (String::new(), String::new()),
|
||||
(None, Some(args)) => (String::new(), args.to_generic_args().to_string()),
|
||||
(Some(params), None) => (params.to_string(), params.to_generic_args().to_string()),
|
||||
(Some(params), Some(args)) => match merge_gen_params(Some(params.clone()), Some(args)) {
|
||||
Some(merged) => (params.to_string(), merged.to_generic_args().to_string()),
|
||||
None => (params.to_string(), String::new()),
|
||||
},
|
||||
};
|
||||
|
||||
let where_clause = match where_clause {
|
||||
Some(pr) => pr.to_string(),
|
||||
None => " ".to_string(),
|
||||
};
|
||||
|
||||
let body = match body {
|
||||
Some(bd) => bd.iter().map(|elem| elem.to_string()).join(""),
|
||||
None => String::new(),
|
||||
};
|
||||
|
||||
ast_from_text(&format!("impl{gen_params} {path_type}{tr_gen_args}{where_clause}{{{}}}", body))
|
||||
}
|
||||
|
||||
// FIXME : We must make *_gen_args' type ast::GenericArgList but in order to do so we must implement in `edit_in_place.rs`
|
||||
// `add_generic_arg()` just like `add_generic_param()`
|
||||
// is implemented for `ast::GenericParamList`
|
||||
pub fn impl_trait(
|
||||
is_unsafe: bool,
|
||||
trait_gen_params: Option<ast::GenericParamList>,
|
||||
trait_gen_args: Option<ast::GenericParamList>,
|
||||
type_gen_params: Option<ast::GenericParamList>,
|
||||
type_gen_args: Option<ast::GenericParamList>,
|
||||
is_negative: bool,
|
||||
path_type: ast::Type,
|
||||
ty: ast::Type,
|
||||
trait_where_clause: Option<ast::WhereClause>,
|
||||
ty_where_clause: Option<ast::WhereClause>,
|
||||
body: Option<Vec<either::Either<ast::Attr, ast::AssocItem>>>,
|
||||
) -> ast::Impl {
|
||||
let is_unsafe = if is_unsafe { "unsafe " } else { "" };
|
||||
let ty_gen_args = match merge_gen_params(type_gen_params.clone(), type_gen_args) {
|
||||
Some(pars) => pars.to_generic_args().to_string(),
|
||||
None => String::new(),
|
||||
};
|
||||
|
||||
let tr_gen_args = match merge_gen_params(trait_gen_params.clone(), trait_gen_args) {
|
||||
Some(pars) => pars.to_generic_args().to_string(),
|
||||
None => String::new(),
|
||||
};
|
||||
|
||||
let gen_params = match merge_gen_params(trait_gen_params, type_gen_params) {
|
||||
Some(pars) => pars.to_string(),
|
||||
None => String::new(),
|
||||
};
|
||||
|
||||
let is_negative = if is_negative { "! " } else { "" };
|
||||
|
||||
let where_clause = match (ty_where_clause, trait_where_clause) {
|
||||
(None, None) => " ".to_string(),
|
||||
(None, Some(tr)) => format!("\n{}\n", tr).to_string(),
|
||||
(Some(ty), None) => format!("\n{}\n", ty).to_string(),
|
||||
(Some(ty), Some(tr)) => {
|
||||
let updated = ty.clone_for_update();
|
||||
tr.predicates().for_each(|p| {
|
||||
ty.add_predicate(p);
|
||||
});
|
||||
format!("\n{}\n", updated).to_string()
|
||||
}
|
||||
};
|
||||
|
||||
let body = match body {
|
||||
Some(bd) => bd.iter().map(|elem| elem.to_string()).join(""),
|
||||
None => String::new(),
|
||||
};
|
||||
|
||||
ast_from_text(&format!("{is_unsafe}impl{gen_params} {is_negative}{path_type}{tr_gen_args} for {ty}{ty_gen_args}{where_clause}{{{}}}" , body))
|
||||
}
|
||||
|
||||
pub fn impl_trait_type(bounds: ast::TypeBoundList) -> ast::ImplTraitType {
|
||||
ast_from_text(&format!("fn f(x: impl {bounds}) {{}}"))
|
||||
}
|
||||
|
||||
pub fn path_segment(name_ref: ast::NameRef) -> ast::PathSegment {
|
||||
|
@ -355,7 +469,7 @@ pub fn hacky_block_expr(
|
|||
format_to!(buf, " {t}\n")
|
||||
} else if kind == SyntaxKind::WHITESPACE {
|
||||
let content = t.text().trim_matches(|c| c != '\n');
|
||||
if content.len() >= 1 {
|
||||
if !content.is_empty() {
|
||||
format_to!(buf, "{}", &content[1..])
|
||||
}
|
||||
}
|
||||
|
@ -827,6 +941,8 @@ pub fn fn_(
|
|||
body: ast::BlockExpr,
|
||||
ret_type: Option<ast::RetType>,
|
||||
is_async: bool,
|
||||
is_const: bool,
|
||||
is_unsafe: bool,
|
||||
) -> ast::Fn {
|
||||
let type_params = match type_params {
|
||||
Some(type_params) => format!("{type_params}"),
|
||||
|
@ -846,12 +962,13 @@ pub fn fn_(
|
|||
};
|
||||
|
||||
let async_literal = if is_async { "async " } else { "" };
|
||||
let const_literal = if is_const { "const " } else { "" };
|
||||
let unsafe_literal = if is_unsafe { "unsafe " } else { "" };
|
||||
|
||||
ast_from_text(&format!(
|
||||
"{visibility}{async_literal}fn {fn_name}{type_params}{params} {ret_type}{where_clause}{body}",
|
||||
"{visibility}{async_literal}{const_literal}{unsafe_literal}fn {fn_name}{type_params}{params} {ret_type}{where_clause}{body}",
|
||||
))
|
||||
}
|
||||
|
||||
pub fn struct_(
|
||||
visibility: Option<ast::Visibility>,
|
||||
strukt_name: ast::Name,
|
||||
|
@ -901,7 +1018,7 @@ pub mod tokens {
|
|||
|
||||
pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| {
|
||||
SourceFile::parse(
|
||||
"const C: <()>::Item = (1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p)\n;\n\n",
|
||||
"const C: <()>::Item = (1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p)\n;\n\n",
|
||||
)
|
||||
});
|
||||
|
||||
|
|
|
@ -145,6 +145,10 @@ impl QuoteOffsets {
|
|||
}
|
||||
|
||||
pub trait IsString: AstToken {
|
||||
const RAW_PREFIX: &'static str;
|
||||
fn is_raw(&self) -> bool {
|
||||
self.text().starts_with(Self::RAW_PREFIX)
|
||||
}
|
||||
fn quote_offsets(&self) -> Option<QuoteOffsets> {
|
||||
let text = self.text();
|
||||
let offsets = QuoteOffsets::new(text)?;
|
||||
|
@ -183,20 +187,18 @@ pub trait IsString: AstToken {
|
|||
cb(text_range + offset, unescaped_char);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl IsString for ast::String {}
|
||||
|
||||
impl ast::String {
|
||||
pub fn is_raw(&self) -> bool {
|
||||
self.text().starts_with('r')
|
||||
}
|
||||
pub fn map_range_up(&self, range: TextRange) -> Option<TextRange> {
|
||||
fn map_range_up(&self, range: TextRange) -> Option<TextRange> {
|
||||
let contents_range = self.text_range_between_quotes()?;
|
||||
assert!(TextRange::up_to(contents_range.len()).contains_range(range));
|
||||
Some(range + contents_range.start())
|
||||
}
|
||||
}
|
||||
|
||||
impl IsString for ast::String {
|
||||
const RAW_PREFIX: &'static str = "r";
|
||||
}
|
||||
|
||||
impl ast::String {
|
||||
pub fn value(&self) -> Option<Cow<'_, str>> {
|
||||
if self.is_raw() {
|
||||
let text = self.text();
|
||||
|
@ -235,13 +237,11 @@ impl ast::String {
|
|||
}
|
||||
}
|
||||
|
||||
impl IsString for ast::ByteString {}
|
||||
impl IsString for ast::ByteString {
|
||||
const RAW_PREFIX: &'static str = "br";
|
||||
}
|
||||
|
||||
impl ast::ByteString {
|
||||
pub fn is_raw(&self) -> bool {
|
||||
self.text().starts_with("br")
|
||||
}
|
||||
|
||||
pub fn value(&self) -> Option<Cow<'_, [u8]>> {
|
||||
if self.is_raw() {
|
||||
let text = self.text();
|
||||
|
@ -280,6 +280,49 @@ impl ast::ByteString {
|
|||
}
|
||||
}
|
||||
|
||||
impl IsString for ast::CString {
|
||||
const RAW_PREFIX: &'static str = "cr";
|
||||
}
|
||||
|
||||
impl ast::CString {
|
||||
pub fn value(&self) -> Option<Cow<'_, str>> {
|
||||
if self.is_raw() {
|
||||
let text = self.text();
|
||||
let text =
|
||||
&text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
|
||||
return Some(Cow::Borrowed(text));
|
||||
}
|
||||
|
||||
let text = self.text();
|
||||
let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
|
||||
|
||||
let mut buf = String::new();
|
||||
let mut prev_end = 0;
|
||||
let mut has_error = false;
|
||||
unescape_literal(text, Mode::Str, &mut |char_range, unescaped_char| match (
|
||||
unescaped_char,
|
||||
buf.capacity() == 0,
|
||||
) {
|
||||
(Ok(c), false) => buf.push(c),
|
||||
(Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => {
|
||||
prev_end = char_range.end
|
||||
}
|
||||
(Ok(c), true) => {
|
||||
buf.reserve_exact(text.len());
|
||||
buf.push_str(&text[..prev_end]);
|
||||
buf.push(c);
|
||||
}
|
||||
(Err(_), _) => has_error = true,
|
||||
});
|
||||
|
||||
match (has_error, buf.capacity() == 0) {
|
||||
(true, _) => None,
|
||||
(false, true) => Some(Cow::Borrowed(text)),
|
||||
(false, false) => Some(Cow::Owned(buf)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ast::IntNumber {
|
||||
pub fn radix(&self) -> Radix {
|
||||
match self.text().get(..2).unwrap_or_default() {
|
||||
|
|
|
@ -43,10 +43,11 @@ pub mod utils;
|
|||
pub mod ted;
|
||||
pub mod hacks;
|
||||
|
||||
use std::{marker::PhantomData, sync::Arc};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use stdx::format_to;
|
||||
use text_edit::Indel;
|
||||
use triomphe::Arc;
|
||||
|
||||
pub use crate::{
|
||||
ast::{AstNode, AstToken},
|
||||
|
|
|
@ -39,7 +39,7 @@ fn reparse_token(
|
|||
let prev_token = root.covering_element(edit.delete).as_token()?.clone();
|
||||
let prev_token_kind = prev_token.kind();
|
||||
match prev_token_kind {
|
||||
WHITESPACE | COMMENT | IDENT | STRING => {
|
||||
WHITESPACE | COMMENT | IDENT | STRING | BYTE_STRING | C_STRING => {
|
||||
if prev_token_kind == WHITESPACE || prev_token_kind == COMMENT {
|
||||
// removing a new line may extends previous token
|
||||
let deleted_range = edit.delete - prev_token.text_range().start();
|
||||
|
@ -166,8 +166,8 @@ fn merge_errors(
|
|||
}
|
||||
res.extend(new_errors.into_iter().map(|new_err| {
|
||||
// fighting borrow checker with a variable ;)
|
||||
let offseted_range = new_err.range() + range_before_reparse.start();
|
||||
new_err.with_range(offseted_range)
|
||||
let offsetted_range = new_err.range() + range_before_reparse.start();
|
||||
new_err.with_range(offsetted_range)
|
||||
}));
|
||||
res
|
||||
}
|
||||
|
@ -408,7 +408,7 @@ enum Foo {
|
|||
|
||||
#[test]
|
||||
fn reparse_str_token_with_error_fixed() {
|
||||
do_check(r#""unterinated$0$0"#, "\"", 12);
|
||||
do_check(r#""unterminated$0$0"#, "\"", 13);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -71,7 +71,7 @@ pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc {
|
|||
"super", "trait", "true", "try", "type", "unsafe", "use", "where", "while", "yield",
|
||||
],
|
||||
contextual_keywords: &["auto", "default", "existential", "union", "raw", "macro_rules", "yeet"],
|
||||
literals: &["INT_NUMBER", "FLOAT_NUMBER", "CHAR", "BYTE", "STRING", "BYTE_STRING"],
|
||||
literals: &["INT_NUMBER", "FLOAT_NUMBER", "CHAR", "BYTE", "STRING", "BYTE_STRING", "C_STRING"],
|
||||
tokens: &["ERROR", "IDENT", "WHITESPACE", "LIFETIME_IDENT", "COMMENT", "SHEBANG"],
|
||||
nodes: &[
|
||||
"SOURCE_FILE",
|
||||
|
@ -199,6 +199,7 @@ pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc {
|
|||
"GENERIC_PARAM",
|
||||
"LIFETIME_PARAM",
|
||||
"TYPE_PARAM",
|
||||
"RETURN_TYPE_ARG",
|
||||
"CONST_PARAM",
|
||||
"GENERIC_ARG_LIST",
|
||||
"LIFETIME",
|
||||
|
|
|
@ -535,6 +535,7 @@ impl Field {
|
|||
"!" => "excl",
|
||||
"*" => "star",
|
||||
"&" => "amp",
|
||||
"-" => "minus",
|
||||
"_" => "underscore",
|
||||
"." => "dot",
|
||||
".." => "dotdot",
|
||||
|
@ -572,10 +573,11 @@ impl Field {
|
|||
|
||||
fn lower(grammar: &Grammar) -> AstSrc {
|
||||
let mut res = AstSrc {
|
||||
tokens: "Whitespace Comment String ByteString IntNumber FloatNumber Char Byte Ident"
|
||||
.split_ascii_whitespace()
|
||||
.map(|it| it.to_string())
|
||||
.collect::<Vec<_>>(),
|
||||
tokens:
|
||||
"Whitespace Comment String ByteString CString IntNumber FloatNumber Char Byte Ident"
|
||||
.split_ascii_whitespace()
|
||||
.map(|it| it.to_string())
|
||||
.collect::<Vec<_>>(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
use std::{cmp::Ordering, fmt, ops};
|
||||
|
||||
use rowan::GreenToken;
|
||||
use smol_str::SmolStr;
|
||||
|
||||
pub struct TokenText<'a>(pub(crate) Repr<'a>);
|
||||
|
||||
|
@ -47,6 +48,12 @@ impl From<TokenText<'_>> for String {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<TokenText<'_>> for SmolStr {
|
||||
fn from(token_text: TokenText<'_>) -> Self {
|
||||
SmolStr::new(token_text.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<&'_ str> for TokenText<'_> {
|
||||
fn eq(&self, other: &&str) -> bool {
|
||||
self.as_str() == *other
|
||||
|
|
|
@ -5,11 +5,11 @@
|
|||
mod block;
|
||||
|
||||
use rowan::Direction;
|
||||
use rustc_lexer::unescape::{self, unescape_byte, unescape_char, unescape_literal, Mode};
|
||||
use rustc_lexer::unescape::{self, unescape_literal, Mode};
|
||||
|
||||
use crate::{
|
||||
algo,
|
||||
ast::{self, HasAttrs, HasVisibility},
|
||||
ast::{self, HasAttrs, HasVisibility, IsString},
|
||||
match_ast, AstNode, SyntaxError,
|
||||
SyntaxKind::{CONST, FN, INT_NUMBER, TYPE_ALIAS},
|
||||
SyntaxNode, SyntaxToken, TextSize, T,
|
||||
|
@ -44,7 +44,7 @@ pub(crate) fn validate(root: &SyntaxNode) -> Vec<SyntaxError> {
|
|||
errors
|
||||
}
|
||||
|
||||
fn rustc_unescape_error_to_string(err: unescape::EscapeError) -> &'static str {
|
||||
fn rustc_unescape_error_to_string(err: unescape::EscapeError) -> (&'static str, bool) {
|
||||
use unescape::EscapeError as EE;
|
||||
|
||||
#[rustfmt::skip]
|
||||
|
@ -103,12 +103,15 @@ fn rustc_unescape_error_to_string(err: unescape::EscapeError) -> &'static str {
|
|||
EE::UnicodeEscapeInByte => {
|
||||
"Byte literals must not contain unicode escapes"
|
||||
}
|
||||
EE::NonAsciiCharInByte | EE::NonAsciiCharInByteString => {
|
||||
EE::NonAsciiCharInByte => {
|
||||
"Byte literals must not contain non-ASCII characters"
|
||||
}
|
||||
EE::UnskippedWhitespaceWarning => "Whitespace after this escape is not skipped",
|
||||
EE::MultipleSkippedLinesWarning => "Multiple lines are skipped by this escape",
|
||||
|
||||
};
|
||||
|
||||
err_message
|
||||
(err_message, err.is_fatal())
|
||||
}
|
||||
|
||||
fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
|
||||
|
@ -121,9 +124,13 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
|
|||
let text = token.text();
|
||||
|
||||
// FIXME: lift this lambda refactor to `fn` (https://github.com/rust-lang/rust-analyzer/pull/2834#discussion_r366199205)
|
||||
let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| {
|
||||
let mut push_err = |prefix_len, off, err: unescape::EscapeError| {
|
||||
let off = token.text_range().start() + TextSize::try_from(off + prefix_len).unwrap();
|
||||
acc.push(SyntaxError::new_at_offset(rustc_unescape_error_to_string(err), off));
|
||||
let (message, is_err) = rustc_unescape_error_to_string(err);
|
||||
// FIXME: Emit lexer warnings
|
||||
if is_err {
|
||||
acc.push(SyntaxError::new_at_offset(message, off));
|
||||
}
|
||||
};
|
||||
|
||||
match literal.kind() {
|
||||
|
@ -132,7 +139,7 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
|
|||
if let Some(without_quotes) = unquote(text, 1, '"') {
|
||||
unescape_literal(without_quotes, Mode::Str, &mut |range, char| {
|
||||
if let Err(err) = char {
|
||||
push_err(1, (range.start, err));
|
||||
push_err(1, range.start, err);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -143,20 +150,39 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
|
|||
if let Some(without_quotes) = unquote(text, 2, '"') {
|
||||
unescape_literal(without_quotes, Mode::ByteStr, &mut |range, char| {
|
||||
if let Err(err) = char {
|
||||
push_err(2, (range.start, err));
|
||||
push_err(1, range.start, err);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
ast::LiteralKind::CString(s) => {
|
||||
if !s.is_raw() {
|
||||
if let Some(without_quotes) = unquote(text, 2, '"') {
|
||||
unescape_literal(without_quotes, Mode::ByteStr, &mut |range, char| {
|
||||
if let Err(err) = char {
|
||||
push_err(1, range.start, err);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
ast::LiteralKind::Char(_) => {
|
||||
if let Some(Err(e)) = unquote(text, 1, '\'').map(unescape_char) {
|
||||
push_err(1, e);
|
||||
if let Some(without_quotes) = unquote(text, 1, '\'') {
|
||||
unescape_literal(without_quotes, Mode::Char, &mut |range, char| {
|
||||
if let Err(err) = char {
|
||||
push_err(1, range.start, err);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
ast::LiteralKind::Byte(_) => {
|
||||
if let Some(Err(e)) = unquote(text, 2, '\'').map(unescape_byte) {
|
||||
push_err(2, e);
|
||||
if let Some(without_quotes) = unquote(text, 2, '\'') {
|
||||
unescape_literal(without_quotes, Mode::Byte, &mut |range, char| {
|
||||
if let Err(err) = char {
|
||||
push_err(2, range.start, err);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
ast::LiteralKind::IntNumber(_)
|
||||
|
@ -175,14 +201,14 @@ pub(crate) fn validate_block_structure(root: &SyntaxNode) {
|
|||
assert_eq!(
|
||||
node.parent(),
|
||||
pair.parent(),
|
||||
"\nunpaired curlys:\n{}\n{:#?}\n",
|
||||
"\nunpaired curlies:\n{}\n{:#?}\n",
|
||||
root.text(),
|
||||
root,
|
||||
);
|
||||
assert!(
|
||||
node.next_sibling_or_token().is_none()
|
||||
&& pair.prev_sibling_or_token().is_none(),
|
||||
"\nfloating curlys at {:?}\nfile:\n{}\nerror:\n{}\n",
|
||||
"\nfloating curlies at {:?}\nfile:\n{}\nerror:\n{}\n",
|
||||
node,
|
||||
root.text(),
|
||||
node,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue