Auto merge of #16048 - Veykril:concat-bytes-fix, r=Veykril

fix: Fix concat_bytes! expansion emitting an identifier

Fixes https://github.com/rust-lang/rust-analyzer/issues/16046 (note that this has always been broken)
This commit is contained in:
bors 2023-12-08 12:27:58 +00:00
commit 86cccc76e3
12 changed files with 107 additions and 49 deletions

View file

@ -468,12 +468,12 @@ macro_rules! concat_bytes {}
fn main() { concat_bytes!(b'A', b"BC", [68, b'E', 70]); } fn main() { concat_bytes!(b'A', b"BC", [68, b'E', 70]); }
"##, "##,
expect![[r##" expect![[r#"
#[rustc_builtin_macro] #[rustc_builtin_macro]
macro_rules! concat_bytes {} macro_rules! concat_bytes {}
fn main() { [b'A', 66, 67, 68, b'E', 70]; } fn main() { [b'A', 66, 67, 68, b'E', 70]; }
"##]], "#]],
); );
} }

View file

@ -1004,3 +1004,29 @@ fn main() {
"##]], "##]],
); );
} }
#[test]
fn eager_concat_bytes_panic() {
check(
r#"
#[rustc_builtin_macro]
#[macro_export]
macro_rules! concat_bytes {}
fn main() {
let x = concat_bytes!(2);
}
"#,
expect![[r#"
#[rustc_builtin_macro]
#[macro_export]
macro_rules! concat_bytes {}
fn main() {
let x = /* error: unexpected token in input */[];
}
"#]],
);
}

View file

@ -6,6 +6,7 @@ use base_db::{
}; };
use cfg::CfgExpr; use cfg::CfgExpr;
use either::Either; use either::Either;
use itertools::Itertools;
use mbe::{parse_exprs_with_sep, parse_to_token_tree}; use mbe::{parse_exprs_with_sep, parse_to_token_tree};
use syntax::{ use syntax::{
ast::{self, AstToken}, ast::{self, AstToken},
@ -491,8 +492,25 @@ fn concat_bytes_expand(
} }
} }
} }
let ident = tt::Ident { text: bytes.join(", ").into(), span }; let value = tt::Subtree {
ExpandResult { value: quote!(span =>[#ident]), err } delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket },
token_trees: {
Itertools::intersperse_with(
bytes.into_iter().map(|it| {
tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: it.into(), span }))
}),
|| {
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char: ',',
spacing: tt::Spacing::Alone,
span,
}))
},
)
.collect()
},
};
ExpandResult { value, err }
} }
fn concat_bytes_expand_subtree( fn concat_bytes_expand_subtree(

View file

@ -137,7 +137,7 @@ pub fn intern_const_ref(
ty: Ty, ty: Ty,
krate: CrateId, krate: CrateId,
) -> Const { ) -> Const {
let layout = db.layout_of_ty(ty.clone(), Arc::new(TraitEnvironment::empty(krate))); let layout = db.layout_of_ty(ty.clone(), TraitEnvironment::empty(krate));
let bytes = match value { let bytes = match value {
LiteralConstRef::Int(i) => { LiteralConstRef::Int(i) => {
// FIXME: We should handle failure of layout better. // FIXME: We should handle failure of layout better.

View file

@ -448,9 +448,8 @@ fn render_const_scalar(
) -> Result<(), HirDisplayError> { ) -> Result<(), HirDisplayError> {
// FIXME: We need to get krate from the final callers of the hir display // FIXME: We need to get krate from the final callers of the hir display
// infrastructure and have it here as a field on `f`. // infrastructure and have it here as a field on `f`.
let trait_env = Arc::new(TraitEnvironment::empty( let trait_env =
*f.db.crate_graph().crates_in_topological_order().last().unwrap(), TraitEnvironment::empty(*f.db.crate_graph().crates_in_topological_order().last().unwrap());
));
match ty.kind(Interner) { match ty.kind(Interner) {
TyKind::Scalar(s) => match s { TyKind::Scalar(s) => match s {
Scalar::Bool => write!(f, "{}", if b[0] == 0 { false } else { true }), Scalar::Bool => write!(f, "{}", if b[0] == 0 { false } else { true }),

View file

@ -18,7 +18,6 @@ use hir_def::{
use hir_expand::name::{name, Name}; use hir_expand::name::{name, Name};
use stdx::always; use stdx::always;
use syntax::ast::RangeOp; use syntax::ast::RangeOp;
use triomphe::Arc;
use crate::{ use crate::{
autoderef::{builtin_deref, deref_by_trait, Autoderef}, autoderef::{builtin_deref, deref_by_trait, Autoderef},
@ -40,7 +39,8 @@ use crate::{
traits::FnTrait, traits::FnTrait,
utils::{generics, Generics}, utils::{generics, Generics},
Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, FnPointer, FnSig, FnSubst, Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, FnPointer, FnSig, FnSubst,
Interner, Rawness, Scalar, Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind, Interner, Rawness, Scalar, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt,
TyKind,
}; };
use super::{ use super::{
@ -1291,7 +1291,7 @@ impl InferenceContext<'_> {
let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr); let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr);
let prev_env = block_id.map(|block_id| { let prev_env = block_id.map(|block_id| {
let prev_env = self.table.trait_env.clone(); let prev_env = self.table.trait_env.clone();
Arc::make_mut(&mut self.table.trait_env).block = Some(block_id); TraitEnvironment::with_block(&mut self.table.trait_env, block_id);
prev_env prev_env
}); });

View file

@ -122,7 +122,7 @@ pub type TyKind = chalk_ir::TyKind<Interner>;
pub type TypeFlags = chalk_ir::TypeFlags; pub type TypeFlags = chalk_ir::TypeFlags;
pub type DynTy = chalk_ir::DynTy<Interner>; pub type DynTy = chalk_ir::DynTy<Interner>;
pub type FnPointer = chalk_ir::FnPointer<Interner>; pub type FnPointer = chalk_ir::FnPointer<Interner>;
// pub type FnSubst = chalk_ir::FnSubst<Interner>; // pub type FnSubst = chalk_ir::FnSubst<Interner>; // a re-export so we don't lose the tuple constructor
pub use chalk_ir::FnSubst; pub use chalk_ir::FnSubst;
pub type ProjectionTy = chalk_ir::ProjectionTy<Interner>; pub type ProjectionTy = chalk_ir::ProjectionTy<Interner>;
pub type AliasTy = chalk_ir::AliasTy<Interner>; pub type AliasTy = chalk_ir::AliasTy<Interner>;

View file

@ -1468,7 +1468,7 @@ pub(crate) fn trait_environment_for_body_query(
) -> Arc<TraitEnvironment> { ) -> Arc<TraitEnvironment> {
let Some(def) = def.as_generic_def_id() else { let Some(def) = def.as_generic_def_id() else {
let krate = def.module(db.upcast()).krate(); let krate = def.module(db.upcast()).krate();
return Arc::new(TraitEnvironment::empty(krate)); return TraitEnvironment::empty(krate);
}; };
db.trait_environment(def) db.trait_environment(def)
} }
@ -1528,12 +1528,7 @@ pub(crate) fn trait_environment_query(
let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses); let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses);
Arc::new(TraitEnvironment { TraitEnvironment::new(krate, None, traits_in_scope.into_boxed_slice(), env)
krate,
block: None,
traits_from_clauses: traits_in_scope.into_boxed_slice(),
env,
})
} }
/// Resolve the where clause(s) of an item with generics. /// Resolve the where clause(s) of an item with generics.

View file

@ -40,7 +40,6 @@ pub use monomorphization::{
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
use stdx::{impl_from, never}; use stdx::{impl_from, never};
use triomphe::Arc;
use super::consteval::{intern_const_scalar, try_const_usize}; use super::consteval::{intern_const_scalar, try_const_usize};
@ -147,7 +146,7 @@ impl<V, T> ProjectionElem<V, T> {
base = normalize( base = normalize(
db, db,
// FIXME: we should get this from caller // FIXME: we should get this from caller
Arc::new(TraitEnvironment::empty(krate)), TraitEnvironment::empty(krate),
base, base,
); );
} }

View file

@ -48,18 +48,32 @@ pub struct TraitEnvironment {
pub krate: CrateId, pub krate: CrateId,
pub block: Option<BlockId>, pub block: Option<BlockId>,
// FIXME make this a BTreeMap // FIXME make this a BTreeMap
pub(crate) traits_from_clauses: Box<[(Ty, TraitId)]>, traits_from_clauses: Box<[(Ty, TraitId)]>,
pub env: chalk_ir::Environment<Interner>, pub env: chalk_ir::Environment<Interner>,
} }
impl TraitEnvironment { impl TraitEnvironment {
pub fn empty(krate: CrateId) -> Self { pub fn empty(krate: CrateId) -> Arc<Self> {
TraitEnvironment { Arc::new(TraitEnvironment {
krate, krate,
block: None, block: None,
traits_from_clauses: Box::default(), traits_from_clauses: Box::default(),
env: chalk_ir::Environment::new(Interner), env: chalk_ir::Environment::new(Interner),
} })
}
pub fn new(
krate: CrateId,
block: Option<BlockId>,
traits_from_clauses: Box<[(Ty, TraitId)]>,
env: chalk_ir::Environment<Interner>,
) -> Arc<Self> {
Arc::new(TraitEnvironment { krate, block, traits_from_clauses, env })
}
// pub fn with_block(self: &mut Arc<Self>, block: BlockId) {
pub fn with_block(this: &mut Arc<Self>, block: BlockId) {
Arc::make_mut(this).block = Some(block);
} }
pub fn traits_in_scope_from_clauses(&self, ty: Ty) -> impl Iterator<Item = TraitId> + '_ { pub fn traits_in_scope_from_clauses(&self, ty: Ty) -> impl Iterator<Item = TraitId> + '_ {

View file

@ -3564,10 +3564,9 @@ impl TraitRef {
resolver: &Resolver, resolver: &Resolver,
trait_ref: hir_ty::TraitRef, trait_ref: hir_ty::TraitRef,
) -> TraitRef { ) -> TraitRef {
let env = resolver.generic_def().map_or_else( let env = resolver
|| Arc::new(TraitEnvironment::empty(resolver.krate())), .generic_def()
|d| db.trait_environment(d), .map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
);
TraitRef { env, trait_ref } TraitRef { env, trait_ref }
} }
@ -3707,15 +3706,14 @@ impl Type {
resolver: &Resolver, resolver: &Resolver,
ty: Ty, ty: Ty,
) -> Type { ) -> Type {
let environment = resolver.generic_def().map_or_else( let environment = resolver
|| Arc::new(TraitEnvironment::empty(resolver.krate())), .generic_def()
|d| db.trait_environment(d), .map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
);
Type { env: environment, ty } Type { env: environment, ty }
} }
pub(crate) fn new_for_crate(krate: CrateId, ty: Ty) -> Type { pub(crate) fn new_for_crate(krate: CrateId, ty: Ty) -> Type {
Type { env: Arc::new(TraitEnvironment::empty(krate)), ty } Type { env: TraitEnvironment::empty(krate), ty }
} }
pub fn reference(inner: &Type, m: Mutability) -> Type { pub fn reference(inner: &Type, m: Mutability) -> Type {
@ -3731,10 +3729,9 @@ impl Type {
fn new(db: &dyn HirDatabase, lexical_env: impl HasResolver, ty: Ty) -> Type { fn new(db: &dyn HirDatabase, lexical_env: impl HasResolver, ty: Ty) -> Type {
let resolver = lexical_env.resolver(db.upcast()); let resolver = lexical_env.resolver(db.upcast());
let environment = resolver.generic_def().map_or_else( let environment = resolver
|| Arc::new(TraitEnvironment::empty(resolver.krate())), .generic_def()
|d| db.trait_environment(d), .map_or_else(|| TraitEnvironment::empty(resolver.krate()), |d| db.trait_environment(d));
);
Type { env: environment, ty } Type { env: environment, ty }
} }
@ -4304,10 +4301,10 @@ impl Type {
let canonical = hir_ty::replace_errors_with_variables(&self.ty); let canonical = hir_ty::replace_errors_with_variables(&self.ty);
let krate = scope.krate(); let krate = scope.krate();
let environment = scope.resolver().generic_def().map_or_else( let environment = scope
|| Arc::new(TraitEnvironment::empty(krate.id)), .resolver()
|d| db.trait_environment(d), .generic_def()
); .map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
method_resolution::iterate_method_candidates_dyn( method_resolution::iterate_method_candidates_dyn(
&canonical, &canonical,
@ -4361,10 +4358,10 @@ impl Type {
let canonical = hir_ty::replace_errors_with_variables(&self.ty); let canonical = hir_ty::replace_errors_with_variables(&self.ty);
let krate = scope.krate(); let krate = scope.krate();
let environment = scope.resolver().generic_def().map_or_else( let environment = scope
|| Arc::new(TraitEnvironment::empty(krate.id)), .resolver()
|d| db.trait_environment(d), .generic_def()
); .map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
method_resolution::iterate_path_candidates( method_resolution::iterate_path_candidates(
&canonical, &canonical,

View file

@ -2,7 +2,7 @@
use std::hash::Hash; use std::hash::Hash;
use stdx::itertools::Itertools; use stdx::{always, itertools::Itertools};
use syntax::{TextRange, TextSize}; use syntax::{TextRange, TextSize};
use tt::Span; use tt::Span;
@ -21,13 +21,23 @@ impl<S: Span> SpanMap<S> {
/// Finalizes the [`SpanMap`], shrinking its backing storage and validating that the offsets are /// Finalizes the [`SpanMap`], shrinking its backing storage and validating that the offsets are
/// in order. /// in order.
pub fn finish(&mut self) { pub fn finish(&mut self) {
assert!(self.spans.iter().tuple_windows().all(|(a, b)| a.0 < b.0)); always!(
self.spans.iter().tuple_windows().all(|(a, b)| a.0 < b.0),
"spans are not in order"
);
self.spans.shrink_to_fit(); self.spans.shrink_to_fit();
} }
/// Pushes a new span onto the [`SpanMap`]. /// Pushes a new span onto the [`SpanMap`].
pub fn push(&mut self, offset: TextSize, span: S) { pub fn push(&mut self, offset: TextSize, span: S) {
debug_assert!(self.spans.last().map_or(true, |&(last_offset, _)| last_offset < offset)); if cfg!(debug_assertions) {
if let Some(&(last_offset, _)) = self.spans.last() {
assert!(
last_offset < offset,
"last_offset({last_offset:?}) must be smaller than offset({offset:?})"
);
}
}
self.spans.push((offset, span)); self.spans.push((offset, span));
} }