desugar ? operator

This commit is contained in:
hkalbasi 2023-03-08 20:58:52 +03:30
parent 924d277f32
commit b7b9ae59a0
27 changed files with 517 additions and 191 deletions

View file

@ -391,7 +391,7 @@ impl Body {
} }
}; };
let expander = Expander::new(db, file_id, module); let expander = Expander::new(db, file_id, module);
let (mut body, source_map) = Body::new(db, expander, params, body); let (mut body, source_map) = Body::new(db, expander, params, body, module.krate);
body.shrink_to_fit(); body.shrink_to_fit();
(Arc::new(body), Arc::new(source_map)) (Arc::new(body), Arc::new(source_map))
@ -420,8 +420,9 @@ impl Body {
expander: Expander, expander: Expander,
params: Option<(ast::ParamList, impl Iterator<Item = bool>)>, params: Option<(ast::ParamList, impl Iterator<Item = bool>)>,
body: Option<ast::Expr>, body: Option<ast::Expr>,
krate: CrateId,
) -> (Body, BodySourceMap) { ) -> (Body, BodySourceMap) {
lower::lower(db, expander, params, body) lower::lower(db, expander, params, body, krate)
} }
fn shrink_to_fit(&mut self) { fn shrink_to_fit(&mut self) {

View file

@ -3,6 +3,7 @@
use std::{mem, sync::Arc}; use std::{mem, sync::Arc};
use base_db::CrateId;
use either::Either; use either::Either;
use hir_expand::{ use hir_expand::{
ast_id_map::AstIdMap, ast_id_map::AstIdMap,
@ -36,6 +37,7 @@ use crate::{
RecordFieldPat, RecordLitField, Statement, RecordFieldPat, RecordLitField, Statement,
}, },
item_scope::BuiltinShadowMode, item_scope::BuiltinShadowMode,
lang_item::LangItem,
path::{GenericArgs, Path}, path::{GenericArgs, Path},
type_ref::{Mutability, Rawness, TypeRef}, type_ref::{Mutability, Rawness, TypeRef},
AdtId, BlockId, BlockLoc, ModuleDefId, UnresolvedMacro, AdtId, BlockId, BlockLoc, ModuleDefId, UnresolvedMacro,
@ -80,9 +82,11 @@ pub(super) fn lower(
expander: Expander, expander: Expander,
params: Option<(ast::ParamList, impl Iterator<Item = bool>)>, params: Option<(ast::ParamList, impl Iterator<Item = bool>)>,
body: Option<ast::Expr>, body: Option<ast::Expr>,
krate: CrateId,
) -> (Body, BodySourceMap) { ) -> (Body, BodySourceMap) {
ExprCollector { ExprCollector {
db, db,
krate,
source_map: BodySourceMap::default(), source_map: BodySourceMap::default(),
ast_id_map: db.ast_id_map(expander.current_file_id), ast_id_map: db.ast_id_map(expander.current_file_id),
body: Body { body: Body {
@ -107,6 +111,7 @@ struct ExprCollector<'a> {
expander: Expander, expander: Expander,
ast_id_map: Arc<AstIdMap>, ast_id_map: Arc<AstIdMap>,
body: Body, body: Body,
krate: CrateId,
source_map: BodySourceMap, source_map: BodySourceMap,
is_lowering_assignee_expr: bool, is_lowering_assignee_expr: bool,
is_lowering_generator: bool, is_lowering_generator: bool,
@ -176,8 +181,7 @@ impl ExprCollector<'_> {
self.source_map.expr_map.insert(src, id); self.source_map.expr_map.insert(src, id);
id id
} }
// desugared exprs don't have ptr, that's wrong and should be fixed // FIXME: desugared exprs don't have ptr, that's wrong and should be fixed somehow.
// somehow.
fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId { fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId {
self.body.exprs.alloc(expr) self.body.exprs.alloc(expr)
} }
@ -199,6 +203,10 @@ impl ExprCollector<'_> {
self.source_map.pat_map.insert(src, id); self.source_map.pat_map.insert(src, id);
id id
} }
// FIXME: desugared pats don't have ptr, that's wrong and should be fixed somehow.
fn alloc_pat_desugared(&mut self, pat: Pat) -> PatId {
self.body.pats.alloc(pat)
}
fn missing_pat(&mut self) -> PatId { fn missing_pat(&mut self) -> PatId {
self.body.pats.alloc(Pat::Missing) self.body.pats.alloc(Pat::Missing)
} }
@ -437,10 +445,7 @@ impl ExprCollector<'_> {
let expr = self.collect_expr_opt(e.expr()); let expr = self.collect_expr_opt(e.expr());
self.alloc_expr(Expr::Await { expr }, syntax_ptr) self.alloc_expr(Expr::Await { expr }, syntax_ptr)
} }
ast::Expr::TryExpr(e) => { ast::Expr::TryExpr(e) => self.collect_try_operator(syntax_ptr, e),
let expr = self.collect_expr_opt(e.expr());
self.alloc_expr(Expr::Try { expr }, syntax_ptr)
}
ast::Expr::CastExpr(e) => { ast::Expr::CastExpr(e) => {
let expr = self.collect_expr_opt(e.expr()); let expr = self.collect_expr_opt(e.expr());
let type_ref = Interned::new(TypeRef::from_ast_opt(&self.ctx(), e.ty())); let type_ref = Interned::new(TypeRef::from_ast_opt(&self.ctx(), e.ty()));
@ -601,6 +606,82 @@ impl ExprCollector<'_> {
}) })
} }
fn collect_try_operator(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::TryExpr) -> ExprId {
let (try_branch, cf_continue, cf_break, try_from_residual) = 'if_chain: {
if let Some(try_branch) = LangItem::TryTraitBranch.path(self.db, self.krate) {
if let Some(cf_continue) =
LangItem::ControlFlowContinue.path(self.db, self.krate)
{
if let Some(cf_break) =
LangItem::ControlFlowBreak.path(self.db, self.krate)
{
if let Some(try_from_residual) =
LangItem::TryTraitFromResidual.path(self.db, self.krate)
{
break 'if_chain (
try_branch,
cf_continue,
cf_break,
try_from_residual,
);
}
}
}
}
// Some of the needed lang items are missing, so we can't desugar
return self.alloc_expr(Expr::Missing, syntax_ptr);
};
let operand = self.collect_expr_opt(e.expr());
let try_branch = self.alloc_expr(Expr::Path(try_branch), syntax_ptr.clone());
let expr = self.alloc_expr(
Expr::Call {
callee: try_branch,
args: Box::new([operand]),
is_assignee_expr: false,
},
syntax_ptr.clone(),
);
let continue_binding =
self.alloc_binding(name![v1], BindingAnnotation::Unannotated);
let continue_bpat =
self.alloc_pat_desugared(Pat::Bind { id: continue_binding, subpat: None });
self.add_definition_to_binding(continue_binding, continue_bpat);
let continue_arm = MatchArm {
pat: self.alloc_pat_desugared(Pat::TupleStruct {
path: Some(Box::new(cf_continue)),
args: Box::new([continue_bpat]),
ellipsis: None,
}),
guard: None,
expr: self.alloc_expr(Expr::Path(Path::from(name![v1])), syntax_ptr.clone()),
};
let break_binding = self.alloc_binding(name![v1], BindingAnnotation::Unannotated);
let break_bpat =
self.alloc_pat_desugared(Pat::Bind { id: break_binding, subpat: None });
self.add_definition_to_binding(break_binding, break_bpat);
let break_arm = MatchArm {
pat: self.alloc_pat_desugared(Pat::TupleStruct {
path: Some(Box::new(cf_break)),
args: Box::new([break_bpat]),
ellipsis: None,
}),
guard: None,
expr: {
let x =
self.alloc_expr(Expr::Path(Path::from(name![v1])), syntax_ptr.clone());
let callee =
self.alloc_expr(Expr::Path(try_from_residual), syntax_ptr.clone());
let result = self.alloc_expr(
Expr::Call { callee, args: Box::new([x]), is_assignee_expr: false },
syntax_ptr.clone(),
);
self.alloc_expr(Expr::Return { expr: Some(result) }, syntax_ptr.clone())
},
};
let arms = Box::new([continue_arm, break_arm]);
self.alloc_expr(Expr::Match { expr, arms }, syntax_ptr)
}
fn collect_macro_call<F, T, U>( fn collect_macro_call<F, T, U>(
&mut self, &mut self,
mcall: ast::MacroCall, mcall: ast::MacroCall,

View file

@ -288,10 +288,6 @@ impl<'a> Printer<'a> {
self.print_expr(*expr); self.print_expr(*expr);
w!(self, ".await"); w!(self, ".await");
} }
Expr::Try { expr } => {
self.print_expr(*expr);
w!(self, "?");
}
Expr::Cast { expr, type_ref } => { Expr::Cast { expr, type_ref } => {
self.print_expr(*expr); self.print_expr(*expr);
w!(self, " as "); w!(self, " as ");

View file

@ -192,9 +192,6 @@ pub enum Expr {
Await { Await {
expr: ExprId, expr: ExprId,
}, },
Try {
expr: ExprId,
},
Cast { Cast {
expr: ExprId, expr: ExprId,
type_ref: Interned<TypeRef>, type_ref: Interned<TypeRef>,
@ -383,7 +380,6 @@ impl Expr {
} }
Expr::Field { expr, .. } Expr::Field { expr, .. }
| Expr::Await { expr } | Expr::Await { expr }
| Expr::Try { expr }
| Expr::Cast { expr, .. } | Expr::Cast { expr, .. }
| Expr::Ref { expr, .. } | Expr::Ref { expr, .. }
| Expr::UnaryOp { expr, .. } | Expr::UnaryOp { expr, .. }

View file

@ -8,8 +8,8 @@ use rustc_hash::FxHashMap;
use syntax::SmolStr; use syntax::SmolStr;
use crate::{ use crate::{
db::DefDatabase, AdtId, AssocItemId, AttrDefId, CrateId, EnumId, EnumVariantId, FunctionId, db::DefDatabase, path::Path, AdtId, AssocItemId, AttrDefId, CrateId, EnumId, EnumVariantId,
ImplId, ModuleDefId, StaticId, StructId, TraitId, TypeAliasId, UnionId, FunctionId, ImplId, ModuleDefId, StaticId, StructId, TraitId, TypeAliasId, UnionId,
}; };
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -220,11 +220,6 @@ macro_rules! language_item_table {
} }
} }
/// Opposite of [`LangItem::name`]
pub fn from_name(name: &hir_expand::name::Name) -> Option<Self> {
Self::from_str(name.as_str()?)
}
/// Opposite of [`LangItem::name`] /// Opposite of [`LangItem::name`]
pub fn from_str(name: &str) -> Option<Self> { pub fn from_str(name: &str) -> Option<Self> {
match name { match name {
@ -236,6 +231,18 @@ macro_rules! language_item_table {
} }
} }
impl LangItem {
/// Opposite of [`LangItem::name`]
pub fn from_name(name: &hir_expand::name::Name) -> Option<Self> {
Self::from_str(name.as_str()?)
}
pub fn path(&self, db: &dyn DefDatabase, start_crate: CrateId) -> Option<Path> {
let t = db.lang_item(start_crate, *self)?;
Some(Path::LangItem(t))
}
}
language_item_table! { language_item_table! {
// Variant name, Name, Getter method name, Target Generic requirements; // Variant name, Name, Getter method name, Target Generic requirements;
Sized, sized, sized_trait, Target::Trait, GenericRequirement::Exact(0); Sized, sized, sized_trait, Target::Trait, GenericRequirement::Exact(0);

View file

@ -8,6 +8,7 @@ use std::{
use crate::{ use crate::{
body::LowerCtx, body::LowerCtx,
lang_item::LangItemTarget,
type_ref::{ConstRefOrPath, LifetimeRef}, type_ref::{ConstRefOrPath, LifetimeRef},
}; };
use hir_expand::name::Name; use hir_expand::name::Name;
@ -36,13 +37,19 @@ impl Display for ImportAlias {
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Path { pub enum Path {
/// A normal path
Normal {
/// Type based path like `<T>::foo`. /// Type based path like `<T>::foo`.
/// Note that paths like `<Type as Trait>::foo` are desugared to `Trait::<Self=Type>::foo`. /// Note that paths like `<Type as Trait>::foo` are desugared to `Trait::<Self=Type>::foo`.
type_anchor: Option<Interned<TypeRef>>, type_anchor: Option<Interned<TypeRef>>,
mod_path: Interned<ModPath>, mod_path: Interned<ModPath>,
/// Invariant: the same len as `self.mod_path.segments` or `None` if all segments are `None`. /// Invariant: the same len as `self.mod_path.segments` or `None` if all segments are `None`.
generic_args: Option<Box<[Option<Interned<GenericArgs>>]>>, generic_args: Option<Box<[Option<Interned<GenericArgs>>]>>,
},
/// A link to a lang item. It is used in desugaring of things like `x?`. We can show these
/// links via a normal path since they might be private and not accessible in the usage place.
LangItem(LangItemTarget),
} }
/// Generic arguments to a path segment (e.g. the `i32` in `Option<i32>`). This /// Generic arguments to a path segment (e.g. the `i32` in `Option<i32>`). This
@ -102,51 +109,77 @@ impl Path {
) -> Path { ) -> Path {
let generic_args = generic_args.into(); let generic_args = generic_args.into();
assert_eq!(path.len(), generic_args.len()); assert_eq!(path.len(), generic_args.len());
Path { type_anchor: None, mod_path: Interned::new(path), generic_args: Some(generic_args) } Path::Normal {
type_anchor: None,
mod_path: Interned::new(path),
generic_args: Some(generic_args),
}
}
/// Converts a known mod path to `Path`.
pub fn from_known_path_with_no_generic(path: ModPath) -> Path {
Path::Normal { type_anchor: None, mod_path: Interned::new(path), generic_args: None }
} }
pub fn kind(&self) -> &PathKind { pub fn kind(&self) -> &PathKind {
&self.mod_path.kind match self {
Path::Normal { mod_path, .. } => &mod_path.kind,
Path::LangItem(_) => &PathKind::Abs,
}
} }
pub fn type_anchor(&self) -> Option<&TypeRef> { pub fn type_anchor(&self) -> Option<&TypeRef> {
self.type_anchor.as_deref() match self {
Path::Normal { type_anchor, .. } => type_anchor.as_deref(),
Path::LangItem(_) => None,
}
} }
pub fn segments(&self) -> PathSegments<'_> { pub fn segments(&self) -> PathSegments<'_> {
let s = PathSegments { let Path::Normal { mod_path, generic_args, .. } = self else {
segments: self.mod_path.segments(), return PathSegments {
generic_args: self.generic_args.as_deref(), segments: &[],
generic_args: None,
}; };
};
let s =
PathSegments { segments: mod_path.segments(), generic_args: generic_args.as_deref() };
if let Some(generic_args) = s.generic_args { if let Some(generic_args) = s.generic_args {
assert_eq!(s.segments.len(), generic_args.len()); assert_eq!(s.segments.len(), generic_args.len());
} }
s s
} }
pub fn mod_path(&self) -> &ModPath { pub fn mod_path(&self) -> Option<&ModPath> {
&self.mod_path match self {
Path::Normal { mod_path, .. } => Some(&mod_path),
Path::LangItem(_) => None,
}
} }
pub fn qualifier(&self) -> Option<Path> { pub fn qualifier(&self) -> Option<Path> {
if self.mod_path.is_ident() { let Path::Normal { mod_path, generic_args, type_anchor } = self else {
return None;
};
if mod_path.is_ident() {
return None; return None;
} }
let res = Path { let res = Path::Normal {
type_anchor: self.type_anchor.clone(), type_anchor: type_anchor.clone(),
mod_path: Interned::new(ModPath::from_segments( mod_path: Interned::new(ModPath::from_segments(
self.mod_path.kind, mod_path.kind,
self.mod_path.segments()[..self.mod_path.segments().len() - 1].iter().cloned(), mod_path.segments()[..mod_path.segments().len() - 1].iter().cloned(),
)), )),
generic_args: self.generic_args.as_ref().map(|it| it[..it.len() - 1].to_vec().into()), generic_args: generic_args.as_ref().map(|it| it[..it.len() - 1].to_vec().into()),
}; };
Some(res) Some(res)
} }
pub fn is_self_type(&self) -> bool { pub fn is_self_type(&self) -> bool {
self.type_anchor.is_none() let Path::Normal { mod_path, generic_args, type_anchor } = self else {
&& self.generic_args.as_deref().is_none() return false;
&& self.mod_path.is_Self() };
type_anchor.is_none() && generic_args.as_deref().is_none() && mod_path.is_Self()
} }
} }
@ -222,7 +255,7 @@ impl GenericArgs {
impl From<Name> for Path { impl From<Name> for Path {
fn from(name: Name) -> Path { fn from(name: Name) -> Path {
Path { Path::Normal {
type_anchor: None, type_anchor: None,
mod_path: Interned::new(ModPath::from_segments(PathKind::Plain, iter::once(name))), mod_path: Interned::new(ModPath::from_segments(PathKind::Plain, iter::once(name))),
generic_args: None, generic_args: None,

View file

@ -75,8 +75,11 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
} }
// <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo // <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo
Some(trait_ref) => { Some(trait_ref) => {
let Path { mod_path, generic_args: path_generic_args, .. } = let Path::Normal { mod_path, generic_args: path_generic_args, .. } =
Path::from_src(trait_ref.path()?, ctx)?; Path::from_src(trait_ref.path()?, ctx)? else
{
return None;
};
let num_segments = mod_path.segments().len(); let num_segments = mod_path.segments().len();
kind = mod_path.kind; kind = mod_path.kind;
@ -157,7 +160,7 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
} }
let mod_path = Interned::new(ModPath::from_segments(kind, segments)); let mod_path = Interned::new(ModPath::from_segments(kind, segments));
return Some(Path { return Some(Path::Normal {
type_anchor, type_anchor,
mod_path, mod_path,
generic_args: if generic_args.is_empty() { None } else { Some(generic_args.into()) }, generic_args: if generic_args.is_empty() { None } else { Some(generic_args.into()) },

View file

@ -15,8 +15,9 @@ use crate::{
expr::{BindingId, ExprId, LabelId}, expr::{BindingId, ExprId, LabelId},
generics::{GenericParams, TypeOrConstParamData}, generics::{GenericParams, TypeOrConstParamData},
item_scope::{BuiltinShadowMode, BUILTIN_SCOPE}, item_scope::{BuiltinShadowMode, BUILTIN_SCOPE},
lang_item::LangItemTarget,
nameres::DefMap, nameres::DefMap,
path::{ModPath, PathKind}, path::{ModPath, Path, PathKind},
per_ns::PerNs, per_ns::PerNs,
visibility::{RawVisibility, Visibility}, visibility::{RawVisibility, Visibility},
AdtId, AssocItemId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId, AdtId, AssocItemId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternBlockId,
@ -176,8 +177,27 @@ impl Resolver {
pub fn resolve_path_in_type_ns( pub fn resolve_path_in_type_ns(
&self, &self,
db: &dyn DefDatabase, db: &dyn DefDatabase,
path: &ModPath, path: &Path,
) -> Option<(TypeNs, Option<usize>)> { ) -> Option<(TypeNs, Option<usize>)> {
let path = match path {
Path::Normal { mod_path, .. } => mod_path,
Path::LangItem(l) => {
return Some((
match *l {
LangItemTarget::Union(x) => TypeNs::AdtId(x.into()),
LangItemTarget::TypeAlias(x) => TypeNs::TypeAliasId(x),
LangItemTarget::Struct(x) => TypeNs::AdtId(x.into()),
LangItemTarget::EnumVariant(x) => TypeNs::EnumVariantId(x),
LangItemTarget::EnumId(x) => TypeNs::AdtId(x.into()),
LangItemTarget::Trait(x) => TypeNs::TraitId(x),
LangItemTarget::Function(_)
| LangItemTarget::ImplDef(_)
| LangItemTarget::Static(_) => return None,
},
None,
))
}
};
let first_name = path.segments().first()?; let first_name = path.segments().first()?;
let skip_to_mod = path.kind != PathKind::Plain; let skip_to_mod = path.kind != PathKind::Plain;
if skip_to_mod { if skip_to_mod {
@ -217,7 +237,7 @@ impl Resolver {
pub fn resolve_path_in_type_ns_fully( pub fn resolve_path_in_type_ns_fully(
&self, &self,
db: &dyn DefDatabase, db: &dyn DefDatabase,
path: &ModPath, path: &Path,
) -> Option<TypeNs> { ) -> Option<TypeNs> {
let (res, unresolved) = self.resolve_path_in_type_ns(db, path)?; let (res, unresolved) = self.resolve_path_in_type_ns(db, path)?;
if unresolved.is_some() { if unresolved.is_some() {
@ -245,8 +265,24 @@ impl Resolver {
pub fn resolve_path_in_value_ns( pub fn resolve_path_in_value_ns(
&self, &self,
db: &dyn DefDatabase, db: &dyn DefDatabase,
path: &ModPath, path: &Path,
) -> Option<ResolveValueResult> { ) -> Option<ResolveValueResult> {
let path = match path {
Path::Normal { mod_path, .. } => mod_path,
Path::LangItem(l) => {
return Some(ResolveValueResult::ValueNs(match *l {
LangItemTarget::Function(x) => ValueNs::FunctionId(x),
LangItemTarget::Static(x) => ValueNs::StaticId(x),
LangItemTarget::Struct(x) => ValueNs::StructId(x),
LangItemTarget::EnumVariant(x) => ValueNs::EnumVariantId(x),
LangItemTarget::Union(_)
| LangItemTarget::ImplDef(_)
| LangItemTarget::TypeAlias(_)
| LangItemTarget::Trait(_)
| LangItemTarget::EnumId(_) => return None,
}))
}
};
let n_segments = path.segments().len(); let n_segments = path.segments().len();
let tmp = name![self]; let tmp = name![self];
let first_name = if path.is_self() { &tmp } else { path.segments().first()? }; let first_name = if path.is_self() { &tmp } else { path.segments().first()? };
@ -340,7 +376,7 @@ impl Resolver {
pub fn resolve_path_in_value_ns_fully( pub fn resolve_path_in_value_ns_fully(
&self, &self,
db: &dyn DefDatabase, db: &dyn DefDatabase,
path: &ModPath, path: &Path,
) -> Option<ValueNs> { ) -> Option<ValueNs> {
match self.resolve_path_in_value_ns(db, path)? { match self.resolve_path_in_value_ns(db, path)? {
ResolveValueResult::ValueNs(it) => Some(it), ResolveValueResult::ValueNs(it) => Some(it),
@ -441,7 +477,7 @@ impl Resolver {
&Scope::ImplDefScope(impl_) => { &Scope::ImplDefScope(impl_) => {
if let Some(target_trait) = &db.impl_data(impl_).target_trait { if let Some(target_trait) = &db.impl_data(impl_).target_trait {
if let Some(TypeNs::TraitId(trait_)) = if let Some(TypeNs::TraitId(trait_)) =
self.resolve_path_in_type_ns_fully(db, target_trait.path.mod_path()) self.resolve_path_in_type_ns_fully(db, &target_trait.path)
{ {
traits.insert(trait_); traits.insert(trait_);
} }

View file

@ -4,7 +4,7 @@ use base_db::CrateId;
use chalk_ir::{BoundVar, DebruijnIndex, GenericArgData}; use chalk_ir::{BoundVar, DebruijnIndex, GenericArgData};
use hir_def::{ use hir_def::{
expr::Expr, expr::Expr,
path::ModPath, path::Path,
resolver::{Resolver, ValueNs}, resolver::{Resolver, ValueNs},
type_ref::ConstRef, type_ref::ConstRef,
ConstId, EnumVariantId, ConstId, EnumVariantId,
@ -72,7 +72,7 @@ impl From<MirEvalError> for ConstEvalError {
pub(crate) fn path_to_const( pub(crate) fn path_to_const(
db: &dyn HirDatabase, db: &dyn HirDatabase,
resolver: &Resolver, resolver: &Resolver,
path: &ModPath, path: &Path,
mode: ParamLoweringMode, mode: ParamLoweringMode,
args_lazy: impl FnOnce() -> Generics, args_lazy: impl FnOnce() -> Generics,
debruijn: DebruijnIndex, debruijn: DebruijnIndex,
@ -89,7 +89,7 @@ pub(crate) fn path_to_const(
Some(x) => ConstValue::BoundVar(BoundVar::new(debruijn, x)), Some(x) => ConstValue::BoundVar(BoundVar::new(debruijn, x)),
None => { None => {
never!( never!(
"Generic list doesn't contain this param: {:?}, {}, {:?}", "Generic list doesn't contain this param: {:?}, {:?}, {:?}",
args, args,
path, path,
p p
@ -228,7 +228,7 @@ pub(crate) fn eval_to_const(
let db = ctx.db; let db = ctx.db;
if let Expr::Path(p) = &ctx.body.exprs[expr] { if let Expr::Path(p) = &ctx.body.exprs[expr] {
let resolver = &ctx.resolver; let resolver = &ctx.resolver;
if let Some(c) = path_to_const(db, resolver, p.mod_path(), mode, args, debruijn) { if let Some(c) = path_to_const(db, resolver, p, mode, args, debruijn) {
return c; return c;
} }
} }

View file

@ -801,6 +801,73 @@ fn options() {
); );
} }
#[test]
fn from_trait() {
check_number(
r#"
//- minicore: from
struct E1(i32);
struct E2(i32);
impl From<E1> for E2 {
fn from(E1(x): E1) -> Self {
E2(1000 * x)
}
}
const GOAL: i32 = {
let x: E2 = E1(2).into();
x.0
};
"#,
2000,
);
}
#[test]
fn try_operator() {
check_number(
r#"
//- minicore: option, try
const fn f(x: Option<i32>, y: Option<i32>) -> Option<i32> {
Some(x? * y?)
}
const fn g(x: Option<i32>, y: Option<i32>) -> i32 {
match f(x, y) {
Some(k) => k,
None => 5,
}
}
const GOAL: i32 = g(Some(10), Some(20)) + g(Some(30), None) + g(None, Some(40)) + g(None, None);
"#,
215,
);
check_number(
r#"
//- minicore: result, try, from
struct E1(i32);
struct E2(i32);
impl From<E1> for E2 {
fn from(E1(x): E1) -> Self {
E2(1000 * x)
}
}
const fn f(x: Result<i32, E1>) -> Result<i32, E2> {
Ok(x? * 10)
}
const fn g(x: Result<i32, E1>) -> i32 {
match f(x) {
Ok(k) => 7 * k,
Err(E2(k)) => 5 * k,
}
}
const GOAL: i32 = g(Ok(2)) + g(Err(E1(3)));
"#,
15140,
);
}
#[test] #[test]
fn or_pattern() { fn or_pattern() {
check_number( check_number(

View file

@ -97,6 +97,10 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::lower::generic_predicates_query)] #[salsa::invoke(crate::lower::generic_predicates_query)]
fn generic_predicates(&self, def: GenericDefId) -> Arc<[Binders<QuantifiedWhereClause>]>; fn generic_predicates(&self, def: GenericDefId) -> Arc<[Binders<QuantifiedWhereClause>]>;
#[salsa::invoke(crate::lower::trait_environment_for_body_query)]
#[salsa::transparent]
fn trait_environment_for_body(&self, def: DefWithBodyId) -> Arc<crate::TraitEnvironment>;
#[salsa::invoke(crate::lower::trait_environment_query)] #[salsa::invoke(crate::lower::trait_environment_query)]
fn trait_environment(&self, def: GenericDefId) -> Arc<crate::TraitEnvironment>; fn trait_environment(&self, def: GenericDefId) -> Arc<crate::TraitEnvironment>;

View file

@ -73,7 +73,7 @@ fn walk_unsafe(
} }
Expr::Path(path) => { Expr::Path(path) => {
let resolver = resolver_for_expr(db.upcast(), def, current); let resolver = resolver_for_expr(db.upcast(), def, current);
let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path.mod_path()); let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path);
if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id))) = value_or_partial { if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id))) = value_or_partial {
if db.static_data(id).mutable { if db.static_data(id).mutable {
unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block }); unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });

View file

@ -25,16 +25,16 @@ use hir_def::{
expr::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, PatId}, expr::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, PatId},
lang_item::{LangItem, LangItemTarget}, lang_item::{LangItem, LangItemTarget},
layout::Integer, layout::Integer,
path::Path, path::{ModPath, Path},
resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs}, resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs},
type_ref::TypeRef, type_ref::TypeRef,
AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, HasModule, AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, ItemContainerId, Lookup,
ItemContainerId, Lookup, TraitId, TypeAliasId, VariantId, TraitId, TypeAliasId, VariantId,
}; };
use hir_expand::name::{name, Name}; use hir_expand::name::{name, Name};
use la_arena::ArenaMap; use la_arena::ArenaMap;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use stdx::always; use stdx::{always, never};
use crate::{ use crate::{
db::HirDatabase, fold_tys, fold_tys_and_consts, infer::coerce::CoerceMany, db::HirDatabase, fold_tys, fold_tys_and_consts, infer::coerce::CoerceMany,
@ -110,10 +110,7 @@ pub(crate) fn normalize(db: &dyn HirDatabase, owner: DefWithBodyId, ty: Ty) -> T
if !ty.data(Interner).flags.intersects(TypeFlags::HAS_PROJECTION) { if !ty.data(Interner).flags.intersects(TypeFlags::HAS_PROJECTION) {
return ty; return ty;
} }
let krate = owner.module(db.upcast()).krate(); let trait_env = db.trait_environment_for_body(owner);
let trait_env = owner
.as_generic_def_id()
.map_or_else(|| Arc::new(TraitEnvironment::empty(krate)), |d| db.trait_environment(d));
let mut table = unify::InferenceTable::new(db, trait_env); let mut table = unify::InferenceTable::new(db, trait_env);
let ty_with_vars = table.normalize_associated_types_in(ty); let ty_with_vars = table.normalize_associated_types_in(ty);
@ -506,10 +503,7 @@ impl<'a> InferenceContext<'a> {
body: &'a Body, body: &'a Body,
resolver: Resolver, resolver: Resolver,
) -> Self { ) -> Self {
let krate = owner.module(db.upcast()).krate(); let trait_env = db.trait_environment_for_body(owner);
let trait_env = owner
.as_generic_def_id()
.map_or_else(|| Arc::new(TraitEnvironment::empty(krate)), |d| db.trait_environment(d));
InferenceContext { InferenceContext {
result: InferenceResult::default(), result: InferenceResult::default(),
table: unify::InferenceTable::new(db, trait_env.clone()), table: unify::InferenceTable::new(db, trait_env.clone()),
@ -851,7 +845,7 @@ impl<'a> InferenceContext<'a> {
// FIXME: this should resolve assoc items as well, see this example: // FIXME: this should resolve assoc items as well, see this example:
// https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521 // https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521
let (resolution, unresolved) = if value_ns { let (resolution, unresolved) = if value_ns {
match self.resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path()) { match self.resolver.resolve_path_in_value_ns(self.db.upcast(), path) {
Some(ResolveValueResult::ValueNs(value)) => match value { Some(ResolveValueResult::ValueNs(value)) => match value {
ValueNs::EnumVariantId(var) => { ValueNs::EnumVariantId(var) => {
let substs = ctx.substs_from_path(path, var.into(), true); let substs = ctx.substs_from_path(path, var.into(), true);
@ -872,11 +866,15 @@ impl<'a> InferenceContext<'a> {
None => return (self.err_ty(), None), None => return (self.err_ty(), None),
} }
} else { } else {
match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) { match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path) {
Some(it) => it, Some(it) => it,
None => return (self.err_ty(), None), None => return (self.err_ty(), None),
} }
}; };
let Some(mod_path) = path.mod_path() else {
never!("resolver should always resolve lang item paths");
return (self.err_ty(), None);
};
return match resolution { return match resolution {
TypeNs::AdtId(AdtId::StructId(strukt)) => { TypeNs::AdtId(AdtId::StructId(strukt)) => {
let substs = ctx.substs_from_path(path, strukt.into(), true); let substs = ctx.substs_from_path(path, strukt.into(), true);
@ -900,7 +898,7 @@ impl<'a> InferenceContext<'a> {
let generics = crate::utils::generics(self.db.upcast(), impl_id.into()); let generics = crate::utils::generics(self.db.upcast(), impl_id.into());
let substs = generics.placeholder_subst(self.db); let substs = generics.placeholder_subst(self.db);
let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs); let ty = self.db.impl_self_ty(impl_id).substitute(Interner, &substs);
self.resolve_variant_on_alias(ty, unresolved, path) self.resolve_variant_on_alias(ty, unresolved, mod_path)
} }
TypeNs::TypeAliasId(it) => { TypeNs::TypeAliasId(it) => {
let container = it.lookup(self.db.upcast()).container; let container = it.lookup(self.db.upcast()).container;
@ -917,7 +915,7 @@ impl<'a> InferenceContext<'a> {
let ty = TyBuilder::def_ty(self.db, it.into(), parent_subst) let ty = TyBuilder::def_ty(self.db, it.into(), parent_subst)
.fill_with_inference_vars(&mut self.table) .fill_with_inference_vars(&mut self.table)
.build(); .build();
self.resolve_variant_on_alias(ty, unresolved, path) self.resolve_variant_on_alias(ty, unresolved, mod_path)
} }
TypeNs::AdtSelfType(_) => { TypeNs::AdtSelfType(_) => {
// FIXME this could happen in array size expressions, once we're checking them // FIXME this could happen in array size expressions, once we're checking them
@ -953,9 +951,9 @@ impl<'a> InferenceContext<'a> {
&mut self, &mut self,
ty: Ty, ty: Ty,
unresolved: Option<usize>, unresolved: Option<usize>,
path: &Path, path: &ModPath,
) -> (Ty, Option<VariantId>) { ) -> (Ty, Option<VariantId>) {
let remaining = unresolved.map(|x| path.segments().skip(x).len()).filter(|x| x > &0); let remaining = unresolved.map(|x| path.segments()[x..].len()).filter(|x| x > &0);
match remaining { match remaining {
None => { None => {
let variant = ty.as_adt().and_then(|(adt_id, _)| match adt_id { let variant = ty.as_adt().and_then(|(adt_id, _)| match adt_id {
@ -969,7 +967,7 @@ impl<'a> InferenceContext<'a> {
(ty, variant) (ty, variant)
} }
Some(1) => { Some(1) => {
let segment = path.mod_path().segments().last().unwrap(); let segment = path.segments().last().unwrap();
// this could be an enum variant or associated type // this could be an enum variant or associated type
if let Some((AdtId::EnumId(enum_id), _)) = ty.as_adt() { if let Some((AdtId::EnumId(enum_id), _)) = ty.as_adt() {
let enum_data = self.db.enum_data(enum_id); let enum_data = self.db.enum_data(enum_id);

View file

@ -601,21 +601,21 @@ impl<'a> InferenceContext<'a> {
let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
self.resolve_associated_type(inner_ty, self.resolve_future_future_output()) self.resolve_associated_type(inner_ty, self.resolve_future_future_output())
} }
Expr::Try { expr } => { // Expr::Try { expr } => {
let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); // let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
if let Some(trait_) = self.resolve_lang_trait(LangItem::Try) { // if let Some(trait_) = self.resolve_lang_trait(LangItem::Try) {
if let Some(func) = self.db.trait_data(trait_).method_by_name(&name!(branch)) { // if let Some(func) = self.db.trait_data(trait_).method_by_name(&name!(branch)) {
let subst = TyBuilder::subst_for_def(self.db, trait_, None) // let subst = TyBuilder::subst_for_def(self.db, trait_, None)
.push(inner_ty.clone()) // .push(inner_ty.clone())
.build(); // .build();
self.write_method_resolution(tgt_expr, func, subst.clone()); // self.write_method_resolution(tgt_expr, func, subst.clone());
} // }
let try_output = self.resolve_output_on(trait_); // let try_output = self.resolve_output_on(trait_);
self.resolve_associated_type(inner_ty, try_output) // self.resolve_associated_type(inner_ty, try_output)
} else { // } else {
self.err_ty() // self.err_ty()
} // }
} // }
Expr::Cast { expr, type_ref } => { Expr::Cast { expr, type_ref } => {
let cast_ty = self.make_ty(type_ref); let cast_ty = self.make_ty(type_ref);
// FIXME: propagate the "castable to" expectation // FIXME: propagate the "castable to" expectation

View file

@ -39,7 +39,7 @@ impl<'a> InferenceContext<'a> {
} else { } else {
// FIXME: report error, unresolved first path segment // FIXME: report error, unresolved first path segment
let value_or_partial = let value_or_partial =
self.resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path())?; self.resolver.resolve_path_in_value_ns(self.db.upcast(), path)?;
match value_or_partial { match value_or_partial {
ResolveValueResult::ValueNs(it) => (it, None), ResolveValueResult::ValueNs(it) => (it, None),

View file

@ -25,12 +25,12 @@ use hir_def::{
TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget, TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget,
}, },
lang_item::{lang_attr, LangItem}, lang_item::{lang_attr, LangItem},
path::{GenericArg, ModPath, Path, PathKind, PathSegment, PathSegments}, path::{GenericArg, GenericArgs, ModPath, Path, PathKind, PathSegment, PathSegments},
resolver::{HasResolver, Resolver, TypeNs}, resolver::{HasResolver, Resolver, TypeNs},
type_ref::{ConstRefOrPath, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef}, type_ref::{ConstRefOrPath, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef},
AdtId, AssocItemId, ConstId, ConstParamId, EnumId, EnumVariantId, FunctionId, GenericDefId, AdtId, AssocItemId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, FunctionId,
HasModule, ImplId, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, StaticId, StructId, GenericDefId, HasModule, ImplId, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, StaticId,
TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, VariantId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, VariantId,
}; };
use hir_expand::{name::Name, ExpandResult}; use hir_expand::{name::Name, ExpandResult};
use intern::Interned; use intern::Interned;
@ -425,8 +425,7 @@ impl<'a> TyLoweringContext<'a> {
if path.segments().len() > 1 { if path.segments().len() > 1 {
return None; return None;
} }
let resolution = let resolution = match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path) {
match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
Some((it, None)) => it, Some((it, None)) => it,
_ => return None, _ => return None,
}; };
@ -608,7 +607,7 @@ impl<'a> TyLoweringContext<'a> {
} }
let (resolution, remaining_index) = let (resolution, remaining_index) =
match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) { match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path) {
Some(it) => it, Some(it) => it,
None => return (TyKind::Error.intern(Interner), None), None => return (TyKind::Error.intern(Interner), None),
}; };
@ -716,7 +715,7 @@ impl<'a> TyLoweringContext<'a> {
resolved: ValueTyDefId, resolved: ValueTyDefId,
infer_args: bool, infer_args: bool,
) -> Substitution { ) -> Substitution {
let last = path.segments().last().expect("path should have at least one segment"); let last = path.segments().last();
let (segment, generic_def) = match resolved { let (segment, generic_def) = match resolved {
ValueTyDefId::FunctionId(it) => (last, Some(it.into())), ValueTyDefId::FunctionId(it) => (last, Some(it.into())),
ValueTyDefId::StructId(it) => (last, Some(it.into())), ValueTyDefId::StructId(it) => (last, Some(it.into())),
@ -732,13 +731,20 @@ impl<'a> TyLoweringContext<'a> {
let len = path.segments().len(); let len = path.segments().len();
let penultimate = len.checked_sub(2).and_then(|idx| path.segments().get(idx)); let penultimate = len.checked_sub(2).and_then(|idx| path.segments().get(idx));
let segment = match penultimate { let segment = match penultimate {
Some(segment) if segment.args_and_bindings.is_some() => segment, Some(segment) if segment.args_and_bindings.is_some() => Some(segment),
_ => last, _ => last,
}; };
(segment, Some(var.parent.into())) (segment, Some(var.parent.into()))
} }
}; };
if let Some(segment) = segment {
self.substs_from_path_segment(segment, generic_def, infer_args, None) self.substs_from_path_segment(segment, generic_def, infer_args, None)
} else if let Some(generic_def) = generic_def {
// lang item
self.substs_from_args_and_bindings(None, Some(generic_def), infer_args, None)
} else {
Substitution::empty(Interner)
}
} }
fn substs_from_path_segment( fn substs_from_path_segment(
@ -747,6 +753,21 @@ impl<'a> TyLoweringContext<'a> {
def: Option<GenericDefId>, def: Option<GenericDefId>,
infer_args: bool, infer_args: bool,
explicit_self_ty: Option<Ty>, explicit_self_ty: Option<Ty>,
) -> Substitution {
self.substs_from_args_and_bindings(
segment.args_and_bindings,
def,
infer_args,
explicit_self_ty,
)
}
fn substs_from_args_and_bindings(
&self,
args_and_bindings: Option<&GenericArgs>,
def: Option<GenericDefId>,
infer_args: bool,
explicit_self_ty: Option<Ty>,
) -> Substitution { ) -> Substitution {
// Remember that the item's own generic args come before its parent's. // Remember that the item's own generic args come before its parent's.
let mut substs = Vec::new(); let mut substs = Vec::new();
@ -780,7 +801,7 @@ impl<'a> TyLoweringContext<'a> {
}; };
let mut had_explicit_args = false; let mut had_explicit_args = false;
if let Some(generic_args) = &segment.args_and_bindings { if let Some(generic_args) = &args_and_bindings {
if !generic_args.has_self_type { if !generic_args.has_self_type {
fill_self_params(); fill_self_params();
} }
@ -879,8 +900,7 @@ impl<'a> TyLoweringContext<'a> {
path: &Path, path: &Path,
explicit_self_ty: Option<Ty>, explicit_self_ty: Option<Ty>,
) -> Option<TraitRef> { ) -> Option<TraitRef> {
let resolved = let resolved = match self.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), path)? {
match self.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), path.mod_path())? {
// FIXME(trait_alias): We need to handle trait alias here. // FIXME(trait_alias): We need to handle trait alias here.
TypeNs::TraitId(tr) => tr, TypeNs::TraitId(tr) => tr,
_ => return None, _ => return None,
@ -1381,9 +1401,7 @@ pub(crate) fn generic_predicates_for_param_query(
Some(it) => it, Some(it) => it,
None => return true, None => return true,
}; };
let tr = match resolver let tr = match resolver.resolve_path_in_type_ns_fully(db.upcast(), path) {
.resolve_path_in_type_ns_fully(db.upcast(), path.mod_path())
{
Some(TypeNs::TraitId(tr)) => tr, Some(TypeNs::TraitId(tr)) => tr,
_ => return false, _ => return false,
}; };
@ -1423,6 +1441,17 @@ pub(crate) fn generic_predicates_for_param_recover(
Arc::new([]) Arc::new([])
} }
pub(crate) fn trait_environment_for_body_query(
db: &dyn HirDatabase,
def: DefWithBodyId,
) -> Arc<TraitEnvironment> {
let Some(def) = def.as_generic_def_id() else {
let krate = def.module(db.upcast()).krate();
return Arc::new(TraitEnvironment::empty(krate));
};
db.trait_environment(def)
}
pub(crate) fn trait_environment_query( pub(crate) fn trait_environment_query(
db: &dyn HirDatabase, db: &dyn HirDatabase,
def: GenericDefId, def: GenericDefId,
@ -1948,7 +1977,7 @@ pub(crate) fn generic_arg_to_chalk<'a, T>(
// as types. Maybe here is not the best place to do it, but // as types. Maybe here is not the best place to do it, but
// it works. // it works.
if let TypeRef::Path(p) = t { if let TypeRef::Path(p) = t {
let p = p.mod_path(); let p = p.mod_path()?;
if p.kind == PathKind::Plain { if p.kind == PathKind::Plain {
if let [n] = p.segments() { if let [n] = p.segments() {
let c = ConstRefOrPath::Path(n.clone()); let c = ConstRefOrPath::Path(n.clone());
@ -1977,7 +2006,14 @@ pub(crate) fn const_or_path_to_chalk(
ConstRefOrPath::Scalar(s) => intern_const_ref(db, s, expected_ty, resolver.krate()), ConstRefOrPath::Scalar(s) => intern_const_ref(db, s, expected_ty, resolver.krate()),
ConstRefOrPath::Path(n) => { ConstRefOrPath::Path(n) => {
let path = ModPath::from_segments(PathKind::Plain, Some(n.clone())); let path = ModPath::from_segments(PathKind::Plain, Some(n.clone()));
path_to_const(db, resolver, &path, mode, args, debruijn) path_to_const(
db,
resolver,
&Path::from_known_path_with_no_generic(path),
mode,
args,
debruijn,
)
.unwrap_or_else(|| unknown_const(expected_ty)) .unwrap_or_else(|| unknown_const(expected_ty))
} }
} }

View file

@ -1,6 +1,6 @@
//! This module provides a MIR interpreter, which is used in const eval. //! This module provides a MIR interpreter, which is used in const eval.
use std::{borrow::Cow, collections::HashMap, iter}; use std::{borrow::Cow, collections::HashMap, iter, sync::Arc};
use base_db::CrateId; use base_db::CrateId;
use chalk_ir::{ use chalk_ir::{
@ -24,7 +24,8 @@ use crate::{
layout::layout_of_ty, layout::layout_of_ty,
mapping::from_chalk, mapping::from_chalk,
method_resolution::lookup_impl_method, method_resolution::lookup_impl_method,
CallableDefId, Const, ConstScalar, Interner, MemoryMap, Substitution, Ty, TyBuilder, TyExt, CallableDefId, Const, ConstScalar, Interner, MemoryMap, Substitution, TraitEnvironment, Ty,
TyBuilder, TyExt,
}; };
use super::{ use super::{
@ -34,6 +35,7 @@ use super::{
pub struct Evaluator<'a> { pub struct Evaluator<'a> {
db: &'a dyn HirDatabase, db: &'a dyn HirDatabase,
trait_env: Arc<TraitEnvironment>,
stack: Vec<u8>, stack: Vec<u8>,
heap: Vec<u8>, heap: Vec<u8>,
crate_id: CrateId, crate_id: CrateId,
@ -217,8 +219,7 @@ pub fn interpret_mir(
assert_placeholder_ty_is_unused: bool, assert_placeholder_ty_is_unused: bool,
) -> Result<Const> { ) -> Result<Const> {
let ty = body.locals[return_slot()].ty.clone(); let ty = body.locals[return_slot()].ty.clone();
let mut evaluator = let mut evaluator = Evaluator::new(db, body, assert_placeholder_ty_is_unused);
Evaluator::new(db, body.owner.module(db.upcast()).krate(), assert_placeholder_ty_is_unused);
let bytes = evaluator.interpret_mir_with_no_arg(&body)?; let bytes = evaluator.interpret_mir_with_no_arg(&body)?;
let memory_map = evaluator.create_memory_map( let memory_map = evaluator.create_memory_map(
&bytes, &bytes,
@ -231,13 +232,16 @@ pub fn interpret_mir(
impl Evaluator<'_> { impl Evaluator<'_> {
pub fn new<'a>( pub fn new<'a>(
db: &'a dyn HirDatabase, db: &'a dyn HirDatabase,
crate_id: CrateId, body: &MirBody,
assert_placeholder_ty_is_unused: bool, assert_placeholder_ty_is_unused: bool,
) -> Evaluator<'a> { ) -> Evaluator<'a> {
let crate_id = body.owner.module(db.upcast()).krate();
let trait_env = db.trait_environment_for_body(body.owner);
Evaluator { Evaluator {
stack: vec![0], stack: vec![0],
heap: vec![0], heap: vec![0],
db, db,
trait_env,
crate_id, crate_id,
assert_placeholder_ty_is_unused, assert_placeholder_ty_is_unused,
stack_depth_limit: 100, stack_depth_limit: 100,
@ -500,15 +504,9 @@ impl Evaluator<'_> {
} else if let Some(x) = self.detect_lang_function(def) { } else if let Some(x) = self.detect_lang_function(def) {
self.exec_lang_item(x, arg_bytes)? self.exec_lang_item(x, arg_bytes)?
} else { } else {
let trait_env = {
let Some(d) = body.owner.as_generic_def_id() else {
not_supported!("trait resolving in non generic def id");
};
self.db.trait_environment(d)
};
let (imp, generic_args) = lookup_impl_method( let (imp, generic_args) = lookup_impl_method(
self.db, self.db,
trait_env, self.trait_env.clone(),
def, def,
generic_args.clone(), generic_args.clone(),
); );
@ -584,7 +582,7 @@ impl Evaluator<'_> {
.to_owned()); .to_owned());
} }
Terminator::Unreachable => { Terminator::Unreachable => {
return Err(MirEvalError::UndefinedBehavior("unreachable executed")) return Err(MirEvalError::UndefinedBehavior("unreachable executed"));
} }
_ => not_supported!("unknown terminator"), _ => not_supported!("unknown terminator"),
} }
@ -710,8 +708,24 @@ impl Evaluator<'_> {
let ty = self.place_ty(p, locals)?; let ty = self.place_ty(p, locals)?;
let bytes = self.eval_place(p, locals)?.get(&self)?; let bytes = self.eval_place(p, locals)?.get(&self)?;
let layout = self.layout(&ty)?; let layout = self.layout(&ty)?;
let enum_id = 'b: {
match ty.kind(Interner) {
TyKind::Adt(e, _) => match e.0 {
AdtId::EnumId(e) => break 'b e,
_ => (),
},
_ => (),
}
return Ok(Owned(0u128.to_le_bytes().to_vec()));
};
match layout.variants { match layout.variants {
Variants::Single { .. } => Owned(0u128.to_le_bytes().to_vec()), Variants::Single { index } => {
let r = self.db.const_eval_discriminant(EnumVariantId {
parent: enum_id,
local_id: index.0,
})?;
Owned(r.to_le_bytes().to_vec())
}
Variants::Multiple { tag, tag_encoding, .. } => { Variants::Multiple { tag, tag_encoding, .. } => {
let Some(target_data_layout) = self.db.target_data_layout(self.crate_id) else { let Some(target_data_layout) = self.db.target_data_layout(self.crate_id) else {
not_supported!("missing target data layout"); not_supported!("missing target data layout");
@ -727,13 +741,6 @@ impl Evaluator<'_> {
let tag = &bytes[offset..offset + size]; let tag = &bytes[offset..offset + size];
let candidate_discriminant = i128::from_le_bytes(pad16(tag, false)) let candidate_discriminant = i128::from_le_bytes(pad16(tag, false))
.wrapping_sub(niche_start as i128); .wrapping_sub(niche_start as i128);
let enum_id = match ty.kind(Interner) {
TyKind::Adt(e, _) => match e.0 {
AdtId::EnumId(e) => e,
_ => not_supported!("Non enum with multi variant layout"),
},
_ => not_supported!("Non adt with multi variant layout"),
};
let enum_data = self.db.enum_data(enum_id); let enum_data = self.db.enum_data(enum_id);
let result = 'b: { let result = 'b: {
for (local_id, _) in enum_data.variants.iter() { for (local_id, _) in enum_data.variants.iter() {
@ -790,8 +797,8 @@ impl Evaluator<'_> {
Owned(result) Owned(result)
} }
AggregateKind::Adt(x, subst) => { AggregateKind::Adt(x, subst) => {
let (size, variant_layout, tag) = let subst = self.subst_filler(subst, locals);
self.layout_of_variant(*x, subst.clone(), locals)?; let (size, variant_layout, tag) = self.layout_of_variant(*x, subst, locals)?;
Owned(self.make_by_layout(size, &variant_layout, tag, values, locals)?) Owned(self.make_by_layout(size, &variant_layout, tag, values, locals)?)
} }
}, },
@ -1124,12 +1131,13 @@ impl Evaluator<'_> {
} }
fn detect_lang_function(&self, def: FunctionId) -> Option<LangItem> { fn detect_lang_function(&self, def: FunctionId) -> Option<LangItem> {
use LangItem::*;
let candidate = lang_attr(self.db.upcast(), def)?; let candidate = lang_attr(self.db.upcast(), def)?;
// filter normal lang functions out // We want to execute these functions with special logic
if [LangItem::IntoIterIntoIter, LangItem::IteratorNext].contains(&candidate) { if [PanicFmt, BeginPanic, SliceLen].contains(&candidate) {
return None; return Some(candidate);
} }
Some(candidate) None
} }
fn create_memory_map(&self, bytes: &[u8], ty: &Ty, locals: &Locals<'_>) -> Result<MemoryMap> { fn create_memory_map(&self, bytes: &[u8], ty: &Ty, locals: &Locals<'_>) -> Result<MemoryMap> {

View file

@ -13,7 +13,7 @@ use hir_def::{
layout::LayoutError, layout::LayoutError,
path::Path, path::Path,
resolver::{resolver_for_expr, ResolveValueResult, ValueNs}, resolver::{resolver_for_expr, ResolveValueResult, ValueNs},
DefWithBodyId, EnumVariantId, HasModule, DefWithBodyId, EnumVariantId, HasModule, ItemContainerId, TraitId,
}; };
use hir_expand::name::Name; use hir_expand::name::Name;
use la_arena::ArenaMap; use la_arena::ArenaMap;
@ -50,6 +50,8 @@ pub enum MirLowerError {
ConstEvalError(Box<ConstEvalError>), ConstEvalError(Box<ConstEvalError>),
LayoutError(LayoutError), LayoutError(LayoutError),
IncompleteExpr, IncompleteExpr,
/// Trying to lower a trait function, instead of an implementation
TraitFunctionDefinition(TraitId, Name),
UnresolvedName(String), UnresolvedName(String),
RecordLiteralWithoutPath, RecordLiteralWithoutPath,
UnresolvedMethod, UnresolvedMethod,
@ -200,12 +202,21 @@ impl MirLowerCtx<'_> {
mut current: BasicBlockId, mut current: BasicBlockId,
) -> Result<Option<BasicBlockId>> { ) -> Result<Option<BasicBlockId>> {
match &self.body.exprs[expr_id] { match &self.body.exprs[expr_id] {
Expr::Missing => Err(MirLowerError::IncompleteExpr), Expr::Missing => {
if let DefWithBodyId::FunctionId(f) = self.owner {
let assoc = self.db.lookup_intern_function(f);
if let ItemContainerId::TraitId(t) = assoc.container {
let name = &self.db.function_data(f).name;
return Err(MirLowerError::TraitFunctionDefinition(t, name.clone()));
}
}
Err(MirLowerError::IncompleteExpr)
},
Expr::Path(p) => { Expr::Path(p) => {
let unresolved_name = || MirLowerError::unresolved_path(self.db, p); let unresolved_name = || MirLowerError::unresolved_path(self.db, p);
let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id); let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id);
let pr = resolver let pr = resolver
.resolve_path_in_value_ns(self.db.upcast(), p.mod_path()) .resolve_path_in_value_ns(self.db.upcast(), p)
.ok_or_else(unresolved_name)?; .ok_or_else(unresolved_name)?;
let pr = match pr { let pr = match pr {
ResolveValueResult::ValueNs(v) => v, ResolveValueResult::ValueNs(v) => v,
@ -608,7 +619,6 @@ impl MirLowerCtx<'_> {
} }
} }
Expr::Await { .. } => not_supported!("await"), Expr::Await { .. } => not_supported!("await"),
Expr::Try { .. } => not_supported!("? operator"),
Expr::Yeet { .. } => not_supported!("yeet"), Expr::Yeet { .. } => not_supported!("yeet"),
Expr::TryBlock { .. } => not_supported!("try block"), Expr::TryBlock { .. } => not_supported!("try block"),
Expr::Async { .. } => not_supported!("async block"), Expr::Async { .. } => not_supported!("async block"),

View file

@ -125,7 +125,7 @@ impl MirLowerCtx<'_> {
match &self.body.exprs[expr_id] { match &self.body.exprs[expr_id] {
Expr::Path(p) => { Expr::Path(p) => {
let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id); let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id);
let Some(pr) = resolver.resolve_path_in_value_ns(self.db.upcast(), p.mod_path()) else { let Some(pr) = resolver.resolve_path_in_value_ns(self.db.upcast(), p) else {
return Err(MirLowerError::unresolved_path(self.db, p)); return Err(MirLowerError::unresolved_path(self.db, p));
}; };
let pr = match pr { let pr = match pr {

View file

@ -77,6 +77,7 @@ impl Display for LocalName {
impl<'a> MirPrettyCtx<'a> { impl<'a> MirPrettyCtx<'a> {
fn for_body(&mut self) { fn for_body(&mut self) {
wln!(self, "// {:?}", self.body.owner);
self.with_block(|this| { self.with_block(|this| {
this.locals(); this.locals();
wln!(this); wln!(this);

View file

@ -130,7 +130,7 @@ fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(Tra
WherePredicate::Lifetime { .. } => None, WherePredicate::Lifetime { .. } => None,
}) })
.filter(|(_, bound_modifier)| matches!(bound_modifier, TraitBoundModifier::None)) .filter(|(_, bound_modifier)| matches!(bound_modifier, TraitBoundModifier::None))
.filter_map(|(path, _)| match resolver.resolve_path_in_type_ns_fully(db, path.mod_path()) { .filter_map(|(path, _)| match resolver.resolve_path_in_type_ns_fully(db, path) {
Some(TypeNs::TraitId(t)) => Some(t), Some(TypeNs::TraitId(t)) => Some(t),
_ => None, _ => None,
}) })

View file

@ -1076,10 +1076,7 @@ impl<'db> SemanticsImpl<'db> {
let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id); let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id);
let ctx = body::LowerCtx::with_hygiene(self.db.upcast(), &hygiene); let ctx = body::LowerCtx::with_hygiene(self.db.upcast(), &hygiene);
let hir_path = Path::from_src(path.clone(), &ctx)?; let hir_path = Path::from_src(path.clone(), &ctx)?;
match analyze match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), &hir_path)? {
.resolver
.resolve_path_in_type_ns_fully(self.db.upcast(), hir_path.mod_path())?
{
TypeNs::TraitId(id) => Some(Trait { id }), TypeNs::TraitId(id) => Some(Trait { id }),
_ => None, _ => None,
} }

View file

@ -420,7 +420,10 @@ impl SourceAnalyzer {
None None
} else { } else {
// Shorthand syntax, resolve to the local // Shorthand syntax, resolve to the local
let path = ModPath::from_segments(PathKind::Plain, once(local_name.clone())); let path = Path::from_known_path_with_no_generic(ModPath::from_segments(
PathKind::Plain,
once(local_name.clone()),
));
match self.resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) { match self.resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) {
Some(ValueNs::LocalBinding(binding_id)) => { Some(ValueNs::LocalBinding(binding_id)) => {
Some(Local { binding_id, parent: self.resolver.body_owner()? }) Some(Local { binding_id, parent: self.resolver.body_owner()? })
@ -461,7 +464,7 @@ impl SourceAnalyzer {
) -> Option<Macro> { ) -> Option<Macro> {
let ctx = body::LowerCtx::new(db.upcast(), macro_call.file_id); let ctx = body::LowerCtx::new(db.upcast(), macro_call.file_id);
let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &ctx))?; let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &ctx))?;
self.resolver.resolve_path_as_macro(db.upcast(), path.mod_path()).map(|it| it.into()) self.resolver.resolve_path_as_macro(db.upcast(), path.mod_path()?).map(|it| it.into())
} }
pub(crate) fn resolve_bind_pat_to_const( pub(crate) fn resolve_bind_pat_to_const(
@ -801,15 +804,11 @@ impl SourceAnalyzer {
func: FunctionId, func: FunctionId,
substs: Substitution, substs: Substitution,
) -> FunctionId { ) -> FunctionId {
let krate = self.resolver.krate();
let owner = match self.resolver.body_owner() { let owner = match self.resolver.body_owner() {
Some(it) => it, Some(it) => it,
None => return func, None => return func,
}; };
let env = owner.as_generic_def_id().map_or_else( let env = db.trait_environment_for_body(owner);
|| Arc::new(hir_ty::TraitEnvironment::empty(krate)),
|d| db.trait_environment(d),
);
method_resolution::lookup_impl_method(db, env, func, substs).0 method_resolution::lookup_impl_method(db, env, func, substs).0
} }
@ -819,15 +818,11 @@ impl SourceAnalyzer {
const_id: ConstId, const_id: ConstId,
subs: Substitution, subs: Substitution,
) -> ConstId { ) -> ConstId {
let krate = self.resolver.krate();
let owner = match self.resolver.body_owner() { let owner = match self.resolver.body_owner() {
Some(it) => it, Some(it) => it,
None => return const_id, None => return const_id,
}; };
let env = owner.as_generic_def_id().map_or_else( let env = db.trait_environment_for_body(owner);
|| Arc::new(hir_ty::TraitEnvironment::empty(krate)),
|d| db.trait_environment(d),
);
method_resolution::lookup_impl_const(db, env, const_id, subs).0 method_resolution::lookup_impl_const(db, env, const_id, subs).0
} }
@ -946,7 +941,7 @@ pub(crate) fn resolve_hir_path_as_macro(
resolver: &Resolver, resolver: &Resolver,
path: &Path, path: &Path,
) -> Option<Macro> { ) -> Option<Macro> {
resolver.resolve_path_as_macro(db.upcast(), path.mod_path()).map(Into::into) resolver.resolve_path_as_macro(db.upcast(), path.mod_path()?).map(Into::into)
} }
fn resolve_hir_path_( fn resolve_hir_path_(
@ -962,8 +957,7 @@ fn resolve_hir_path_(
res.map(|ty_ns| (ty_ns, path.segments().first())) res.map(|ty_ns| (ty_ns, path.segments().first()))
} }
None => { None => {
let (ty, remaining_idx) = let (ty, remaining_idx) = resolver.resolve_path_in_type_ns(db.upcast(), path)?;
resolver.resolve_path_in_type_ns(db.upcast(), path.mod_path())?;
match remaining_idx { match remaining_idx {
Some(remaining_idx) => { Some(remaining_idx) => {
if remaining_idx + 1 == path.segments().len() { if remaining_idx + 1 == path.segments().len() {
@ -1019,7 +1013,7 @@ fn resolve_hir_path_(
let body_owner = resolver.body_owner(); let body_owner = resolver.body_owner();
let values = || { let values = || {
resolver.resolve_path_in_value_ns_fully(db.upcast(), path.mod_path()).and_then(|val| { resolver.resolve_path_in_value_ns_fully(db.upcast(), path).and_then(|val| {
let res = match val { let res = match val {
ValueNs::LocalBinding(binding_id) => { ValueNs::LocalBinding(binding_id) => {
let var = Local { parent: body_owner?, binding_id }; let var = Local { parent: body_owner?, binding_id };
@ -1039,14 +1033,14 @@ fn resolve_hir_path_(
let items = || { let items = || {
resolver resolver
.resolve_module_path_in_items(db.upcast(), path.mod_path()) .resolve_module_path_in_items(db.upcast(), path.mod_path()?)
.take_types() .take_types()
.map(|it| PathResolution::Def(it.into())) .map(|it| PathResolution::Def(it.into()))
}; };
let macros = || { let macros = || {
resolver resolver
.resolve_path_as_macro(db.upcast(), path.mod_path()) .resolve_path_as_macro(db.upcast(), path.mod_path()?)
.map(|def| PathResolution::Def(ModuleDef::Macro(def.into()))) .map(|def| PathResolution::Def(ModuleDef::Macro(def.into())))
}; };
@ -1074,7 +1068,7 @@ fn resolve_hir_path_qualifier(
path: &Path, path: &Path,
) -> Option<PathResolution> { ) -> Option<PathResolution> {
resolver resolver
.resolve_path_in_type_ns_fully(db.upcast(), path.mod_path()) .resolve_path_in_type_ns_fully(db.upcast(), &path)
.map(|ty| match ty { .map(|ty| match ty {
TypeNs::SelfType(it) => PathResolution::SelfType(it.into()), TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
TypeNs::GenericParam(id) => PathResolution::TypeParam(id.into()), TypeNs::GenericParam(id) => PathResolution::TypeParam(id.into()),
@ -1089,7 +1083,7 @@ fn resolve_hir_path_qualifier(
}) })
.or_else(|| { .or_else(|| {
resolver resolver
.resolve_module_path_in_items(db.upcast(), path.mod_path()) .resolve_module_path_in_items(db.upcast(), path.mod_path()?)
.take_types() .take_types()
.map(|it| PathResolution::Def(it.into())) .map(|it| PathResolution::Def(it.into()))
}) })

View file

@ -20,7 +20,7 @@ use crate::assist_context::{AssistContext, Assists};
// Replaces a `try` expression with a `match` expression. // Replaces a `try` expression with a `match` expression.
// //
// ``` // ```
// # //- minicore:option // # //- minicore: try, option
// fn handle() { // fn handle() {
// let pat = Some(true)$0?; // let pat = Some(true)$0?;
// } // }
@ -111,7 +111,7 @@ mod tests {
check_assist( check_assist(
replace_try_expr_with_match, replace_try_expr_with_match,
r#" r#"
//- minicore:option //- minicore: try, option
fn test() { fn test() {
let pat = Some(true)$0?; let pat = Some(true)$0?;
} }
@ -132,7 +132,7 @@ fn test() {
check_assist( check_assist(
replace_try_expr_with_match, replace_try_expr_with_match,
r#" r#"
//- minicore:result //- minicore: try, from, result
fn test() { fn test() {
let pat = Ok(true)$0?; let pat = Ok(true)$0?;
} }

View file

@ -2352,7 +2352,7 @@ fn doctest_replace_try_expr_with_match() {
check_doc_test( check_doc_test(
"replace_try_expr_with_match", "replace_try_expr_with_match",
r#####" r#####"
//- minicore:option //- minicore: try, option
fn handle() { fn handle() {
let pat = Some(true)$0?; let pat = Some(true)$0?;
} }

View file

@ -5009,7 +5009,7 @@ fn foo() {
fn hover_try_expr_res() { fn hover_try_expr_res() {
check_hover_range( check_hover_range(
r#" r#"
//- minicore:result //- minicore: try, from, result
struct FooError; struct FooError;
fn foo() -> Result<(), FooError> { fn foo() -> Result<(), FooError> {
@ -5023,7 +5023,7 @@ fn foo() -> Result<(), FooError> {
); );
check_hover_range( check_hover_range(
r#" r#"
//- minicore:result //- minicore: try, from, result
struct FooError; struct FooError;
struct BarError; struct BarError;
@ -5044,6 +5044,7 @@ fn foo() -> Result<(), FooError> {
fn hover_try_expr() { fn hover_try_expr() {
check_hover_range( check_hover_range(
r#" r#"
//- minicore: try
struct NotResult<T, U>(T, U); struct NotResult<T, U>(T, U);
struct Short; struct Short;
struct Looooong; struct Looooong;
@ -5061,6 +5062,7 @@ fn foo() -> NotResult<(), Looooong> {
); );
check_hover_range( check_hover_range(
r#" r#"
//- minicore: try
struct NotResult<T, U>(T, U); struct NotResult<T, U>(T, U);
struct Short; struct Short;
struct Looooong; struct Looooong;
@ -5092,7 +5094,7 @@ fn foo() -> Option<()> {
"#, "#,
expect![[r#" expect![[r#"
```rust ```rust
<Option<i32> as Try>::Output i32
```"#]], ```"#]],
); );
} }

View file

@ -181,7 +181,7 @@ pub mod convert {
} }
// endregion:as_ref // endregion:as_ref
// region:infallible // region:infallible
pub enum Infallibe {} pub enum Infallible {}
// endregion:infallible // endregion:infallible
} }
@ -380,11 +380,15 @@ pub mod ops {
// endregion:fn // endregion:fn
// region:try // region:try
mod try_ { mod try_ {
use super::super::convert::Infallible;
pub enum ControlFlow<B, C = ()> { pub enum ControlFlow<B, C = ()> {
#[lang = "Continue"]
Continue(C), Continue(C),
#[lang = "Break"]
Break(B), Break(B),
} }
pub trait FromResidual<R = Self::Residual> { pub trait FromResidual<R = <Self as Try>::Residual> {
#[lang = "from_residual"] #[lang = "from_residual"]
fn from_residual(residual: R) -> Self; fn from_residual(residual: R) -> Self;
} }
@ -400,14 +404,66 @@ pub mod ops {
impl<B, C> Try for ControlFlow<B, C> { impl<B, C> Try for ControlFlow<B, C> {
type Output = C; type Output = C;
type Residual = ControlFlow<B, convert::Infallible>; type Residual = ControlFlow<B, Infallible>;
fn from_output(output: Self::Output) -> Self {} fn from_output(output: Self::Output) -> Self {}
fn branch(self) -> ControlFlow<Self::Residual, Self::Output> {} fn branch(self) -> ControlFlow<Self::Residual, Self::Output> {}
} }
impl<B, C> FromResidual for ControlFlow<B, C> { impl<B, C> FromResidual for ControlFlow<B, C> {
fn from_residual(residual: ControlFlow<B, convert::Infallible>) -> Self {} fn from_residual(residual: ControlFlow<B, Infallible>) -> Self {}
} }
// region:option
impl<T> Try for Option<T> {
type Output = T;
type Residual = Option<Infallible>;
fn from_output(output: Self::Output) -> Self {
Some(output)
}
fn branch(self) -> ControlFlow<Self::Residual, Self::Output> {
match self {
Some(x) => ControlFlow::Continue(x),
None => ControlFlow::Break(None),
}
}
}
impl<T> FromResidual for Option<T> {
fn from_residual(x: Option<Infallible>) -> Self {
match x {
None => None,
}
}
}
// endregion:option
// region:result
// region:from
use super::super::convert::From;
impl<T, E> Try for Result<T, E> {
type Output = T;
type Residual = Result<Infallible, E>;
fn from_output(output: Self::Output) -> Self {
Ok(output)
}
fn branch(self) -> ControlFlow<Self::Residual, Self::Output> {
match self {
Ok(v) => ControlFlow::Continue(v),
Err(e) => ControlFlow::Break(Err(e)),
}
}
}
impl<T, E, F: From<E>> FromResidual<Result<Infallible, E>> for Result<T, F> {
fn from_residual(residual: Result<Infallible, E>) -> Self {
match residual {
Err(e) => Err(From::from(e)),
}
}
}
// endregion:from
// endregion:result
} }
pub use self::try_::{ControlFlow, FromResidual, Try}; pub use self::try_::{ControlFlow, FromResidual, Try};
// endregion:try // endregion:try