summaryrefslogtreecommitdiffstats
path: root/src/tools/rust-analyzer/crates/hir-ty
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:20:29 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:20:29 +0000
commit631cd5845e8de329d0e227aaa707d7ea228b8f8f (patch)
treea1b87c8f8cad01cf18f7c5f57a08f102771ed303 /src/tools/rust-analyzer/crates/hir-ty
parentAdding debian version 1.69.0+dfsg1-1. (diff)
downloadrustc-631cd5845e8de329d0e227aaa707d7ea228b8f8f.tar.xz
rustc-631cd5845e8de329d0e227aaa707d7ea228b8f8f.zip
Merging upstream version 1.70.0+dfsg1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/tools/rust-analyzer/crates/hir-ty')
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/Cargo.toml9
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/builder.rs9
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs3
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs22
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs503
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs955
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/db.rs25
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs5
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs31
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs3
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/display.rs187
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer.rs215
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs60
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs849
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs312
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs147
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs5
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs19
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/interner.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout.rs16
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs13
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs21
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lib.rs49
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lower.rs30
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs213
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir.rs863
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs223
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs1253
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs1581
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs237
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs348
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs8
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests.rs40
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs3
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs21
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs15
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs30
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs52
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs84
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs71
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/utils.rs7
43 files changed, 7217 insertions, 1336 deletions
diff --git a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
index a8b8d5222..9b3296df2 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
@@ -18,13 +18,14 @@ arrayvec = "0.7.2"
bitflags = "1.3.2"
smallvec.workspace = true
ena = "0.14.0"
+either = "1.7.0"
tracing = "0.1.35"
rustc-hash = "1.1.0"
scoped-tls = "1.0.0"
-chalk-solve = { version = "0.88.0", default-features = false }
-chalk-ir = "0.88.0"
-chalk-recursive = { version = "0.88.0", default-features = false }
-chalk-derive = "0.88.0"
+chalk-solve = { version = "0.89.0", default-features = false }
+chalk-ir = "0.89.0"
+chalk-recursive = { version = "0.89.0", default-features = false }
+chalk-derive = "0.89.0"
la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
once_cell = "1.17.0"
typed-arena = "2.0.1"
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs
index 8faef7bf7..03e944359 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs
@@ -152,6 +152,15 @@ impl TyBuilder<()> {
TyKind::Tuple(0, Substitution::empty(Interner)).intern(Interner)
}
+ // FIXME: rustc's ty is dependent on the adt type, maybe we need to do that as well
+ pub fn discr_ty() -> Ty {
+ TyKind::Scalar(chalk_ir::Scalar::Int(chalk_ir::IntTy::I128)).intern(Interner)
+ }
+
+ pub fn bool() -> Ty {
+ TyKind::Scalar(chalk_ir::Scalar::Bool).intern(Interner)
+ }
+
pub fn usize() -> Ty {
TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize)).intern(Interner)
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
index 6989e9fb9..28ae4c349 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
@@ -540,8 +540,7 @@ pub(crate) fn trait_datum_query(
let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars);
let associated_ty_ids = trait_data.associated_types().map(to_assoc_type_id).collect();
let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses };
- let well_known = lang_attr(db.upcast(), trait_)
- .and_then(|name| well_known_trait_from_lang_item(LangItem::from_str(&name)?));
+ let well_known = lang_attr(db.upcast(), trait_).and_then(well_known_trait_from_lang_item);
let trait_datum = TraitDatum {
id: trait_id,
binders: make_binders(db, &generic_params, trait_datum_bound),
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
index 45c975dfc..214189492 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
@@ -12,8 +12,8 @@ use hir_def::{
use crate::{
db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
from_placeholder_idx, to_chalk_trait_id, utils::generics, AdtId, AliasEq, AliasTy, Binders,
- CallableDefId, CallableSig, FnPointer, ImplTraitId, Interner, Lifetime, ProjectionTy,
- QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyKind, WhereClause,
+ CallableDefId, CallableSig, DynTy, FnPointer, ImplTraitId, Interner, Lifetime, ProjectionTy,
+ QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyKind, TypeFlags, WhereClause,
};
pub trait TyExt {
@@ -22,6 +22,7 @@ pub trait TyExt {
fn is_floating_point(&self) -> bool;
fn is_never(&self) -> bool;
fn is_unknown(&self) -> bool;
+ fn contains_unknown(&self) -> bool;
fn is_ty_var(&self) -> bool;
fn as_adt(&self) -> Option<(hir_def::AdtId, &Substitution)>;
@@ -76,6 +77,10 @@ impl TyExt for Ty {
matches!(self.kind(Interner), TyKind::Error)
}
+ fn contains_unknown(&self) -> bool {
+ self.data(Interner).flags.contains(TypeFlags::HAS_ERROR)
+ }
+
fn is_ty_var(&self) -> bool {
matches!(self.kind(Interner), TyKind::InferenceVar(_, _))
}
@@ -373,6 +378,19 @@ impl ProjectionTyExt for ProjectionTy {
}
}
+pub trait DynTyExt {
+ fn principal(&self) -> Option<&TraitRef>;
+}
+
+impl DynTyExt for DynTy {
+ fn principal(&self) -> Option<&TraitRef> {
+ self.bounds.skip_binders().interned().get(0).and_then(|b| match b.skip_binders() {
+ crate::WhereClause::Implemented(trait_ref) => Some(trait_ref),
+ _ => None,
+ })
+ }
+}
+
pub trait TraitRefExt {
fn hir_trait_id(&self) -> TraitId;
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
index 8df70330f..5830c4898 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
@@ -1,30 +1,25 @@
//! Constant evaluation details
-use std::{
- collections::HashMap,
- fmt::{Display, Write},
-};
-
-use chalk_ir::{BoundVar, DebruijnIndex, GenericArgData, IntTy, Scalar};
+use base_db::CrateId;
+use chalk_ir::{BoundVar, DebruijnIndex, GenericArgData};
use hir_def::{
- builtin_type::BuiltinInt,
- expr::{ArithOp, BinaryOp, Expr, ExprId, Literal, Pat, PatId},
+ expr::Expr,
path::ModPath,
- resolver::{resolver_for_expr, ResolveValueResult, Resolver, ValueNs},
- src::HasChildSource,
- type_ref::ConstScalar,
- ConstId, DefWithBodyId, EnumVariantId, Lookup,
+ resolver::{Resolver, ValueNs},
+ type_ref::ConstRef,
+ ConstId, EnumVariantId,
};
-use la_arena::{Arena, Idx, RawIdx};
+use la_arena::{Idx, RawIdx};
use stdx::never;
-use syntax::ast::HasName;
use crate::{
- db::HirDatabase, infer::InferenceContext, lower::ParamLoweringMode, to_placeholder_idx,
- utils::Generics, Const, ConstData, ConstValue, GenericArg, InferenceResult, Interner, Ty,
- TyBuilder, TyKind,
+ db::HirDatabase, infer::InferenceContext, layout::layout_of_ty, lower::ParamLoweringMode,
+ to_placeholder_idx, utils::Generics, Const, ConstData, ConstScalar, ConstValue, GenericArg,
+ Interner, MemoryMap, Ty, TyBuilder,
};
+use super::mir::{interpret_mir, lower_to_mir, pad16, MirEvalError, MirLowerError};
+
/// Extension trait for [`Const`]
pub trait ConstExt {
/// Is a [`Const`] unknown?
@@ -53,346 +48,24 @@ impl ConstExt for Const {
}
}
-pub struct ConstEvalCtx<'a> {
- pub db: &'a dyn HirDatabase,
- pub owner: DefWithBodyId,
- pub exprs: &'a Arena<Expr>,
- pub pats: &'a Arena<Pat>,
- pub local_data: HashMap<PatId, ComputedExpr>,
- infer: &'a InferenceResult,
-}
-
-impl ConstEvalCtx<'_> {
- fn expr_ty(&mut self, expr: ExprId) -> Ty {
- self.infer[expr].clone()
- }
-}
-
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ConstEvalError {
- NotSupported(&'static str),
- SemanticError(&'static str),
- Loop,
- IncompleteExpr,
- Panic(String),
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub enum ComputedExpr {
- Literal(Literal),
- Enum(String, EnumVariantId, Literal),
- Tuple(Box<[ComputedExpr]>),
+ MirLowerError(MirLowerError),
+ MirEvalError(MirEvalError),
}
-impl Display for ComputedExpr {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- match self {
- ComputedExpr::Literal(l) => match l {
- Literal::Int(x, _) => {
- if *x >= 10 {
- write!(f, "{x} ({x:#X})")
- } else {
- x.fmt(f)
- }
- }
- Literal::Uint(x, _) => {
- if *x >= 10 {
- write!(f, "{x} ({x:#X})")
- } else {
- x.fmt(f)
- }
- }
- Literal::Float(x, _) => x.fmt(f),
- Literal::Bool(x) => x.fmt(f),
- Literal::Char(x) => std::fmt::Debug::fmt(x, f),
- Literal::String(x) => std::fmt::Debug::fmt(x, f),
- Literal::ByteString(x) => std::fmt::Debug::fmt(x, f),
- },
- ComputedExpr::Enum(name, _, _) => name.fmt(f),
- ComputedExpr::Tuple(t) => {
- f.write_char('(')?;
- for x in &**t {
- x.fmt(f)?;
- f.write_str(", ")?;
- }
- f.write_char(')')
- }
+impl From<MirLowerError> for ConstEvalError {
+ fn from(value: MirLowerError) -> Self {
+ match value {
+ MirLowerError::ConstEvalError(e) => *e,
+ _ => ConstEvalError::MirLowerError(value),
}
}
}
-fn scalar_max(scalar: &Scalar) -> i128 {
- match scalar {
- Scalar::Bool => 1,
- Scalar::Char => u32::MAX as i128,
- Scalar::Int(x) => match x {
- IntTy::Isize => isize::MAX as i128,
- IntTy::I8 => i8::MAX as i128,
- IntTy::I16 => i16::MAX as i128,
- IntTy::I32 => i32::MAX as i128,
- IntTy::I64 => i64::MAX as i128,
- IntTy::I128 => i128::MAX,
- },
- Scalar::Uint(x) => match x {
- chalk_ir::UintTy::Usize => usize::MAX as i128,
- chalk_ir::UintTy::U8 => u8::MAX as i128,
- chalk_ir::UintTy::U16 => u16::MAX as i128,
- chalk_ir::UintTy::U32 => u32::MAX as i128,
- chalk_ir::UintTy::U64 => u64::MAX as i128,
- chalk_ir::UintTy::U128 => i128::MAX, // ignore too big u128 for now
- },
- Scalar::Float(_) => 0,
- }
-}
-
-fn is_valid(scalar: &Scalar, value: i128) -> bool {
- if value < 0 {
- !matches!(scalar, Scalar::Uint(_)) && -scalar_max(scalar) - 1 <= value
- } else {
- value <= scalar_max(scalar)
- }
-}
-
-fn get_name(ctx: &mut ConstEvalCtx<'_>, variant: EnumVariantId) -> String {
- let loc = variant.parent.lookup(ctx.db.upcast());
- let children = variant.parent.child_source(ctx.db.upcast());
- let item_tree = loc.id.item_tree(ctx.db.upcast());
-
- let variant_name = children.value[variant.local_id].name();
- let enum_name = item_tree[loc.id.value].name.to_string();
- enum_name + "::" + &variant_name.unwrap().to_string()
-}
-
-pub fn eval_const(
- expr_id: ExprId,
- ctx: &mut ConstEvalCtx<'_>,
-) -> Result<ComputedExpr, ConstEvalError> {
- let u128_to_i128 = |it: u128| -> Result<i128, ConstEvalError> {
- it.try_into().map_err(|_| ConstEvalError::NotSupported("u128 is too big"))
- };
-
- let expr = &ctx.exprs[expr_id];
- match expr {
- Expr::Missing => match ctx.owner {
- // evaluate the implicit variant index of an enum variant without expression
- // FIXME: This should return the type of the enum representation
- DefWithBodyId::VariantId(variant) => {
- let prev_idx: u32 = variant.local_id.into_raw().into();
- let prev_idx = prev_idx.checked_sub(1).map(RawIdx::from).map(Idx::from_raw);
- let value = match prev_idx {
- Some(local_id) => {
- let prev_variant = EnumVariantId { local_id, parent: variant.parent };
- 1 + match ctx.db.const_eval_variant(prev_variant)? {
- ComputedExpr::Literal(Literal::Int(v, _)) => v,
- ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?,
- _ => {
- return Err(ConstEvalError::NotSupported(
- "Enum can't contain this kind of value",
- ))
- }
- }
- }
- _ => 0,
- };
- Ok(ComputedExpr::Literal(Literal::Int(value, Some(BuiltinInt::I128))))
- }
- _ => Err(ConstEvalError::IncompleteExpr),
- },
- Expr::Literal(l) => Ok(ComputedExpr::Literal(l.clone())),
- &Expr::UnaryOp { expr, op } => {
- let ty = &ctx.expr_ty(expr);
- let ev = eval_const(expr, ctx)?;
- match op {
- hir_def::expr::UnaryOp::Deref => Err(ConstEvalError::NotSupported("deref")),
- hir_def::expr::UnaryOp::Not => {
- let v = match ev {
- ComputedExpr::Literal(Literal::Bool(b)) => {
- return Ok(ComputedExpr::Literal(Literal::Bool(!b)))
- }
- ComputedExpr::Literal(Literal::Int(v, _)) => v,
- ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?,
- _ => return Err(ConstEvalError::NotSupported("this kind of operator")),
- };
- let r = match ty.kind(Interner) {
- TyKind::Scalar(Scalar::Uint(x)) => match x {
- chalk_ir::UintTy::U8 => !(v as u8) as i128,
- chalk_ir::UintTy::U16 => !(v as u16) as i128,
- chalk_ir::UintTy::U32 => !(v as u32) as i128,
- chalk_ir::UintTy::U64 => !(v as u64) as i128,
- chalk_ir::UintTy::U128 => {
- return Err(ConstEvalError::NotSupported("negation of u128"))
- }
- chalk_ir::UintTy::Usize => !(v as usize) as i128,
- },
- TyKind::Scalar(Scalar::Int(x)) => match x {
- chalk_ir::IntTy::I8 => !(v as i8) as i128,
- chalk_ir::IntTy::I16 => !(v as i16) as i128,
- chalk_ir::IntTy::I32 => !(v as i32) as i128,
- chalk_ir::IntTy::I64 => !(v as i64) as i128,
- chalk_ir::IntTy::I128 => !v,
- chalk_ir::IntTy::Isize => !(v as isize) as i128,
- },
- _ => return Err(ConstEvalError::NotSupported("unreachable?")),
- };
- Ok(ComputedExpr::Literal(Literal::Int(r, None)))
- }
- hir_def::expr::UnaryOp::Neg => {
- let v = match ev {
- ComputedExpr::Literal(Literal::Int(v, _)) => v,
- ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?,
- _ => return Err(ConstEvalError::NotSupported("this kind of operator")),
- };
- Ok(ComputedExpr::Literal(Literal::Int(
- v.checked_neg().ok_or_else(|| {
- ConstEvalError::Panic("overflow in negation".to_string())
- })?,
- None,
- )))
- }
- }
- }
- &Expr::BinaryOp { lhs, rhs, op } => {
- let ty = &ctx.expr_ty(lhs);
- let lhs = eval_const(lhs, ctx)?;
- let rhs = eval_const(rhs, ctx)?;
- let op = op.ok_or(ConstEvalError::IncompleteExpr)?;
- let v1 = match lhs {
- ComputedExpr::Literal(Literal::Int(v, _)) => v,
- ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?,
- _ => return Err(ConstEvalError::NotSupported("this kind of operator")),
- };
- let v2 = match rhs {
- ComputedExpr::Literal(Literal::Int(v, _)) => v,
- ComputedExpr::Literal(Literal::Uint(v, _)) => u128_to_i128(v)?,
- _ => return Err(ConstEvalError::NotSupported("this kind of operator")),
- };
- match op {
- BinaryOp::ArithOp(b) => {
- let panic_arith = ConstEvalError::Panic(
- "attempt to run invalid arithmetic operation".to_string(),
- );
- let r = match b {
- ArithOp::Add => v1.checked_add(v2).ok_or_else(|| panic_arith.clone())?,
- ArithOp::Mul => v1.checked_mul(v2).ok_or_else(|| panic_arith.clone())?,
- ArithOp::Sub => v1.checked_sub(v2).ok_or_else(|| panic_arith.clone())?,
- ArithOp::Div => v1.checked_div(v2).ok_or_else(|| panic_arith.clone())?,
- ArithOp::Rem => v1.checked_rem(v2).ok_or_else(|| panic_arith.clone())?,
- ArithOp::Shl => v1
- .checked_shl(v2.try_into().map_err(|_| panic_arith.clone())?)
- .ok_or_else(|| panic_arith.clone())?,
- ArithOp::Shr => v1
- .checked_shr(v2.try_into().map_err(|_| panic_arith.clone())?)
- .ok_or_else(|| panic_arith.clone())?,
- ArithOp::BitXor => v1 ^ v2,
- ArithOp::BitOr => v1 | v2,
- ArithOp::BitAnd => v1 & v2,
- };
- if let TyKind::Scalar(s) = ty.kind(Interner) {
- if !is_valid(s, r) {
- return Err(panic_arith);
- }
- }
- Ok(ComputedExpr::Literal(Literal::Int(r, None)))
- }
- BinaryOp::LogicOp(_) => Err(ConstEvalError::SemanticError("logic op on numbers")),
- _ => Err(ConstEvalError::NotSupported("bin op on this operators")),
- }
- }
- Expr::Block { statements, tail, .. } => {
- let mut prev_values = HashMap::<PatId, Option<ComputedExpr>>::default();
- for statement in &**statements {
- match *statement {
- hir_def::expr::Statement::Let { pat: pat_id, initializer, .. } => {
- let pat = &ctx.pats[pat_id];
- match pat {
- Pat::Bind { subpat, .. } if subpat.is_none() => (),
- _ => {
- return Err(ConstEvalError::NotSupported("complex patterns in let"))
- }
- };
- let value = match initializer {
- Some(x) => eval_const(x, ctx)?,
- None => continue,
- };
- if !prev_values.contains_key(&pat_id) {
- let prev = ctx.local_data.insert(pat_id, value);
- prev_values.insert(pat_id, prev);
- } else {
- ctx.local_data.insert(pat_id, value);
- }
- }
- hir_def::expr::Statement::Expr { .. } => {
- return Err(ConstEvalError::NotSupported("this kind of statement"))
- }
- }
- }
- let r = match tail {
- &Some(x) => eval_const(x, ctx),
- None => Ok(ComputedExpr::Tuple(Box::new([]))),
- };
- // clean up local data, so caller will receive the exact map that passed to us
- for (name, val) in prev_values {
- match val {
- Some(x) => ctx.local_data.insert(name, x),
- None => ctx.local_data.remove(&name),
- };
- }
- r
- }
- Expr::Path(p) => {
- let resolver = resolver_for_expr(ctx.db.upcast(), ctx.owner, expr_id);
- let pr = resolver
- .resolve_path_in_value_ns(ctx.db.upcast(), p.mod_path())
- .ok_or(ConstEvalError::SemanticError("unresolved path"))?;
- let pr = match pr {
- ResolveValueResult::ValueNs(v) => v,
- ResolveValueResult::Partial(..) => {
- return match ctx
- .infer
- .assoc_resolutions_for_expr(expr_id)
- .ok_or(ConstEvalError::SemanticError("unresolved assoc item"))?
- .0
- {
- hir_def::AssocItemId::FunctionId(_) => {
- Err(ConstEvalError::NotSupported("assoc function"))
- }
- // FIXME use actual impl for trait assoc const
- hir_def::AssocItemId::ConstId(c) => ctx.db.const_eval(c),
- hir_def::AssocItemId::TypeAliasId(_) => {
- Err(ConstEvalError::NotSupported("assoc type alias"))
- }
- };
- }
- };
- match pr {
- ValueNs::LocalBinding(pat_id) => {
- let r = ctx
- .local_data
- .get(&pat_id)
- .ok_or(ConstEvalError::NotSupported("Unexpected missing local"))?;
- Ok(r.clone())
- }
- ValueNs::ConstId(id) => ctx.db.const_eval(id),
- ValueNs::GenericParam(_) => {
- Err(ConstEvalError::NotSupported("const generic without substitution"))
- }
- ValueNs::EnumVariantId(id) => match ctx.db.const_eval_variant(id)? {
- ComputedExpr::Literal(lit) => {
- Ok(ComputedExpr::Enum(get_name(ctx, id), id, lit))
- }
- _ => Err(ConstEvalError::NotSupported(
- "Enums can't evalute to anything but numbers",
- )),
- },
- _ => Err(ConstEvalError::NotSupported("path that are not const or local")),
- }
- }
- // FIXME: Handle the cast target
- &Expr::Cast { expr, .. } => match eval_const(expr, ctx)? {
- ComputedExpr::Enum(_, _, lit) => Ok(ComputedExpr::Literal(lit)),
- _ => Err(ConstEvalError::NotSupported("Can't cast these types")),
- },
- _ => Err(ConstEvalError::NotSupported("This kind of expression")),
+impl From<MirEvalError> for ConstEvalError {
+ fn from(value: MirEvalError) -> Self {
+ ConstEvalError::MirEvalError(value)
}
}
@@ -449,68 +122,102 @@ pub fn intern_const_scalar(value: ConstScalar, ty: Ty) -> Const {
.intern(Interner)
}
+/// Interns a constant scalar with the given type
+pub fn intern_const_ref(db: &dyn HirDatabase, value: &ConstRef, ty: Ty, krate: CrateId) -> Const {
+ let bytes = match value {
+ ConstRef::Int(i) => {
+ // FIXME: We should handle failure of layout better.
+ let size = layout_of_ty(db, &ty, krate).map(|x| x.size.bytes_usize()).unwrap_or(16);
+ ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
+ }
+ ConstRef::UInt(i) => {
+ let size = layout_of_ty(db, &ty, krate).map(|x| x.size.bytes_usize()).unwrap_or(16);
+ ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
+ }
+ ConstRef::Bool(b) => ConstScalar::Bytes(vec![*b as u8], MemoryMap::default()),
+ ConstRef::Char(c) => {
+ ConstScalar::Bytes((*c as u32).to_le_bytes().to_vec(), MemoryMap::default())
+ }
+ ConstRef::Unknown => ConstScalar::Unknown,
+ };
+ intern_const_scalar(bytes, ty)
+}
+
/// Interns a possibly-unknown target usize
-pub fn usize_const(value: Option<u128>) -> Const {
- intern_const_scalar(value.map_or(ConstScalar::Unknown, ConstScalar::UInt), TyBuilder::usize())
+pub fn usize_const(db: &dyn HirDatabase, value: Option<u128>, krate: CrateId) -> Const {
+ intern_const_ref(
+ db,
+ &value.map_or(ConstRef::Unknown, ConstRef::UInt),
+ TyBuilder::usize(),
+ krate,
+ )
+}
+
+pub fn try_const_usize(c: &Const) -> Option<u128> {
+ match &c.data(Interner).value {
+ chalk_ir::ConstValue::BoundVar(_) => None,
+ chalk_ir::ConstValue::InferenceVar(_) => None,
+ chalk_ir::ConstValue::Placeholder(_) => None,
+ chalk_ir::ConstValue::Concrete(c) => match &c.interned {
+ ConstScalar::Bytes(x, _) => Some(u128::from_le_bytes(pad16(&x, false))),
+ _ => None,
+ },
+ }
}
pub(crate) fn const_eval_recover(
_: &dyn HirDatabase,
_: &[String],
_: &ConstId,
-) -> Result<ComputedExpr, ConstEvalError> {
- Err(ConstEvalError::Loop)
+) -> Result<Const, ConstEvalError> {
+ Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
-pub(crate) fn const_eval_variant_recover(
+pub(crate) fn const_eval_discriminant_recover(
_: &dyn HirDatabase,
_: &[String],
_: &EnumVariantId,
-) -> Result<ComputedExpr, ConstEvalError> {
- Err(ConstEvalError::Loop)
+) -> Result<i128, ConstEvalError> {
+ Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
}
-pub(crate) fn const_eval_variant_query(
+pub(crate) fn const_eval_query(
db: &dyn HirDatabase,
const_id: ConstId,
-) -> Result<ComputedExpr, ConstEvalError> {
+) -> Result<Const, ConstEvalError> {
let def = const_id.into();
- let body = db.body(def);
- let infer = &db.infer(def);
- let result = eval_const(
- body.body_expr,
- &mut ConstEvalCtx {
- db,
- owner: const_id.into(),
- exprs: &body.exprs,
- pats: &body.pats,
- local_data: HashMap::default(),
- infer,
- },
- );
- result
+ let body = db.mir_body(def)?;
+ let c = interpret_mir(db, &body, false)?;
+ Ok(c)
}
-pub(crate) fn const_eval_query_variant(
+pub(crate) fn const_eval_discriminant_variant(
db: &dyn HirDatabase,
variant_id: EnumVariantId,
-) -> Result<ComputedExpr, ConstEvalError> {
+) -> Result<i128, ConstEvalError> {
let def = variant_id.into();
let body = db.body(def);
- let infer = &db.infer(def);
- eval_const(
- body.body_expr,
- &mut ConstEvalCtx {
- db,
- owner: def,
- exprs: &body.exprs,
- pats: &body.pats,
- local_data: HashMap::default(),
- infer,
- },
- )
+ if body.exprs[body.body_expr] == Expr::Missing {
+ let prev_idx: u32 = variant_id.local_id.into_raw().into();
+ let prev_idx = prev_idx.checked_sub(1).map(RawIdx::from).map(Idx::from_raw);
+ let value = match prev_idx {
+ Some(local_id) => {
+ let prev_variant = EnumVariantId { local_id, parent: variant_id.parent };
+ 1 + db.const_eval_discriminant(prev_variant)?
+ }
+ _ => 0,
+ };
+ return Ok(value);
+ }
+ let mir_body = db.mir_body(def)?;
+ let c = interpret_mir(db, &mir_body, false)?;
+ let c = try_const_usize(&c).unwrap() as i128;
+ Ok(c)
}
+// FIXME: Ideally constants in const eval should have separate body (issue #7434), and this function should
+// get an `InferenceResult` instead of an `InferenceContext`. And we should remove `ctx.clone().resolve_all()` here
+// and make this function private. See the fixme comment on `InferenceContext::resolve_all`.
pub(crate) fn eval_to_const(
expr: Idx<Expr>,
mode: ParamLoweringMode,
@@ -518,28 +225,20 @@ pub(crate) fn eval_to_const(
args: impl FnOnce() -> Generics,
debruijn: DebruijnIndex,
) -> Const {
+ let db = ctx.db;
if let Expr::Path(p) = &ctx.body.exprs[expr] {
- let db = ctx.db;
let resolver = &ctx.resolver;
if let Some(c) = path_to_const(db, resolver, p.mod_path(), mode, args, debruijn) {
return c;
}
}
- let body = ctx.body.clone();
- let mut ctx = ConstEvalCtx {
- db: ctx.db,
- owner: ctx.owner,
- exprs: &body.exprs,
- pats: &body.pats,
- local_data: HashMap::default(),
- infer: &ctx.result,
- };
- let computed_expr = eval_const(expr, &mut ctx);
- let const_scalar = match computed_expr {
- Ok(ComputedExpr::Literal(literal)) => literal.into(),
- _ => ConstScalar::Unknown,
- };
- intern_const_scalar(const_scalar, TyBuilder::usize())
+ let infer = ctx.clone().resolve_all();
+ if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, &ctx.body, &infer, expr) {
+ if let Ok(result) = interpret_mir(db, &mir_body, true) {
+ return result;
+ }
+ }
+ unknown_const(infer[expr].clone())
}
#[cfg(test)]
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
index 3c930c077..6a29e8ce5 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
@@ -1,24 +1,44 @@
use base_db::fixture::WithFixture;
-use hir_def::{db::DefDatabase, expr::Literal};
+use hir_def::db::DefDatabase;
-use crate::{consteval::ComputedExpr, db::HirDatabase, test_db::TestDB};
+use crate::{
+ consteval::try_const_usize, db::HirDatabase, test_db::TestDB, Const, ConstScalar, Interner,
+};
-use super::ConstEvalError;
+use super::{
+ super::mir::{MirEvalError, MirLowerError},
+ ConstEvalError,
+};
+fn simplify(e: ConstEvalError) -> ConstEvalError {
+ match e {
+ ConstEvalError::MirEvalError(MirEvalError::InFunction(_, e)) => {
+ simplify(ConstEvalError::MirEvalError(*e))
+ }
+ _ => e,
+ }
+}
+
+#[track_caller]
fn check_fail(ra_fixture: &str, error: ConstEvalError) {
- assert_eq!(eval_goal(ra_fixture), Err(error));
+ assert_eq!(eval_goal(ra_fixture).map_err(simplify), Err(error));
}
+#[track_caller]
fn check_number(ra_fixture: &str, answer: i128) {
let r = eval_goal(ra_fixture).unwrap();
- match r {
- ComputedExpr::Literal(Literal::Int(r, _)) => assert_eq!(r, answer),
- ComputedExpr::Literal(Literal::Uint(r, _)) => assert_eq!(r, answer as u128),
- x => panic!("Expected number but found {x:?}"),
+ match &r.data(Interner).value {
+ chalk_ir::ConstValue::Concrete(c) => match &c.interned {
+ ConstScalar::Bytes(b, _) => {
+ assert_eq!(b, &answer.to_le_bytes()[0..b.len()]);
+ }
+ x => panic!("Expected number but found {:?}", x),
+ },
+ _ => panic!("result of const eval wasn't a concrete const"),
}
}
-fn eval_goal(ra_fixture: &str) -> Result<ComputedExpr, ConstEvalError> {
+fn eval_goal(ra_fixture: &str) -> Result<Const, ConstEvalError> {
let (db, file_id) = TestDB::with_single_file(ra_fixture);
let module_id = db.module_for_file(file_id);
let def_map = module_id.def_map(&db);
@@ -42,21 +62,18 @@ fn eval_goal(ra_fixture: &str) -> Result<ComputedExpr, ConstEvalError> {
#[test]
fn add() {
check_number(r#"const GOAL: usize = 2 + 2;"#, 4);
+ check_number(r#"const GOAL: i32 = -2 + --5;"#, 3);
+ check_number(r#"const GOAL: i32 = 7 - 5;"#, 2);
+ check_number(r#"const GOAL: i32 = 7 + (1 - 5);"#, 3);
}
#[test]
fn bit_op() {
check_number(r#"const GOAL: u8 = !0 & !(!0 >> 1)"#, 128);
check_number(r#"const GOAL: i8 = !0 & !(!0 >> 1)"#, 0);
- // FIXME: rustc evaluate this to -128
- check_fail(
- r#"const GOAL: i8 = 1 << 7"#,
- ConstEvalError::Panic("attempt to run invalid arithmetic operation".to_string()),
- );
- check_fail(
- r#"const GOAL: i8 = 1 << 8"#,
- ConstEvalError::Panic("attempt to run invalid arithmetic operation".to_string()),
- );
+ check_number(r#"const GOAL: i8 = 1 << 7"#, (1i8 << 7) as i128);
+ // FIXME: report panic here
+ check_number(r#"const GOAL: i8 = 1 << 8"#, 0);
}
#[test]
@@ -74,6 +91,803 @@ fn locals() {
}
#[test]
+fn references() {
+ check_number(
+ r#"
+ const GOAL: usize = {
+ let x = 3;
+ let y = &mut x;
+ *y = 5;
+ x
+ };
+ "#,
+ 5,
+ );
+ check_number(
+ r#"
+ struct Foo(i32);
+ impl Foo {
+ fn method(&mut self, x: i32) {
+ self.0 = 2 * self.0 + x;
+ }
+ }
+ const GOAL: i32 = {
+ let mut x = Foo(3);
+ x.method(5);
+ x.0
+ };
+ "#,
+ 11,
+ );
+}
+
+#[test]
+fn reference_autoderef() {
+ check_number(
+ r#"
+ const GOAL: usize = {
+ let x = 3;
+ let y = &mut x;
+ let y: &mut usize = &mut y;
+ *y = 5;
+ x
+ };
+ "#,
+ 5,
+ );
+ check_number(
+ r#"
+ const GOAL: usize = {
+ let x = 3;
+ let y = &&&&&&&x;
+ let z: &usize = &y;
+ *z
+ };
+ "#,
+ 3,
+ );
+ check_number(
+ r#"
+ struct Foo<T> { x: T }
+ impl<T> Foo<T> {
+ fn foo(&mut self) -> T { self.x }
+ }
+ fn f(i: &mut &mut Foo<Foo<i32>>) -> i32 {
+ ((**i).x).foo()
+ }
+ fn g(i: Foo<Foo<i32>>) -> i32 {
+ i.x.foo()
+ }
+ const GOAL: i32 = f(&mut &mut Foo { x: Foo { x: 3 } }) + g(Foo { x: Foo { x: 5 } });
+ "#,
+ 8,
+ );
+}
+
+#[test]
+fn overloaded_deref() {
+ // FIXME: We should support this.
+ check_fail(
+ r#"
+ //- minicore: deref_mut
+ struct Foo;
+
+ impl core::ops::Deref for Foo {
+ type Target = i32;
+ fn deref(&self) -> &i32 {
+ &5
+ }
+ }
+
+ const GOAL: i32 = {
+ let x = Foo;
+ let y = &*x;
+ *y + *x
+ };
+ "#,
+ ConstEvalError::MirLowerError(MirLowerError::NotSupported(
+ "explicit overloaded deref".into(),
+ )),
+ );
+}
+
+#[test]
+fn overloaded_deref_autoref() {
+ check_number(
+ r#"
+ //- minicore: deref_mut
+ struct Foo;
+ struct Bar;
+
+ impl core::ops::Deref for Foo {
+ type Target = Bar;
+ fn deref(&self) -> &Bar {
+ &Bar
+ }
+ }
+
+ impl Bar {
+ fn method(&self) -> i32 {
+ 5
+ }
+ }
+
+ const GOAL: i32 = Foo.method();
+ "#,
+ 5,
+ );
+}
+
+#[test]
+fn function_call() {
+ check_number(
+ r#"
+ const fn f(x: usize) -> usize {
+ 2 * x + 5
+ }
+ const GOAL: usize = f(3);
+ "#,
+ 11,
+ );
+ check_number(
+ r#"
+ const fn add(x: usize, y: usize) -> usize {
+ x + y
+ }
+ const GOAL: usize = add(add(1, 2), add(3, add(4, 5)));
+ "#,
+ 15,
+ );
+}
+
+#[test]
+fn intrinsics() {
+ check_number(
+ r#"
+ extern "rust-intrinsic" {
+ pub fn size_of<T>() -> usize;
+ }
+
+ const GOAL: usize = size_of::<i32>();
+ "#,
+ 4,
+ );
+}
+
+#[test]
+fn trait_basic() {
+ check_number(
+ r#"
+ trait Foo {
+ fn f(&self) -> u8;
+ }
+
+ impl Foo for u8 {
+ fn f(&self) -> u8 {
+ *self + 33
+ }
+ }
+
+ const GOAL: u8 = {
+ let x = 3;
+ Foo::f(&x)
+ };
+ "#,
+ 36,
+ );
+}
+
+#[test]
+fn trait_method() {
+ check_number(
+ r#"
+ trait Foo {
+ fn f(&self) -> u8;
+ }
+
+ impl Foo for u8 {
+ fn f(&self) -> u8 {
+ *self + 33
+ }
+ }
+
+ const GOAL: u8 = {
+ let x = 3;
+ x.f()
+ };
+ "#,
+ 36,
+ );
+}
+
+#[test]
+fn generic_fn() {
+ check_number(
+ r#"
+ trait Foo {
+ fn f(&self) -> u8;
+ }
+
+ impl Foo for () {
+ fn f(&self) -> u8 {
+ 0
+ }
+ }
+
+ struct Succ<S>(S);
+
+ impl<T: Foo> Foo for Succ<T> {
+ fn f(&self) -> u8 {
+ self.0.f() + 1
+ }
+ }
+
+ const GOAL: u8 = Succ(Succ(())).f();
+ "#,
+ 2,
+ );
+ check_number(
+ r#"
+ trait Foo {
+ fn f(&self) -> u8;
+ }
+
+ impl Foo for u8 {
+ fn f(&self) -> u8 {
+ *self + 33
+ }
+ }
+
+ fn foof<T: Foo>(x: T, y: T) -> u8 {
+ x.f() + y.f()
+ }
+
+ const GOAL: u8 = foof(2, 5);
+ "#,
+ 73,
+ );
+ check_number(
+ r#"
+ fn bar<A, B>(a: A, b: B) -> B {
+ b
+ }
+ const GOAL: u8 = bar("hello", 12);
+ "#,
+ 12,
+ );
+ check_number(
+ r#"
+ //- minicore: coerce_unsized, index, slice
+ fn bar<A, B>(a: A, b: B) -> B {
+ b
+ }
+ fn foo<T>(x: [T; 2]) -> T {
+ bar(x[0], x[1])
+ }
+
+ const GOAL: u8 = foo([2, 5]);
+ "#,
+ 5,
+ );
+}
+
+#[test]
+fn impl_trait() {
+ check_number(
+ r#"
+ trait Foo {
+ fn f(&self) -> u8;
+ }
+
+ impl Foo for u8 {
+ fn f(&self) -> u8 {
+ *self + 33
+ }
+ }
+
+ fn foof(x: impl Foo, y: impl Foo) -> impl Foo {
+ x.f() + y.f()
+ }
+
+ const GOAL: u8 = foof(2, 5).f();
+ "#,
+ 106,
+ );
+ check_number(
+ r#"
+ struct Foo<T>(T, T, (T, T));
+ trait S {
+ fn sum(&self) -> i64;
+ }
+ impl S for i64 {
+ fn sum(&self) -> i64 {
+ *self
+ }
+ }
+ impl<T: S> S for Foo<T> {
+ fn sum(&self) -> i64 {
+ self.0.sum() + self.1.sum() + self.2 .0.sum() + self.2 .1.sum()
+ }
+ }
+
+ fn foo() -> Foo<impl S> {
+ Foo(
+ Foo(1i64, 2, (3, 4)),
+ Foo(5, 6, (7, 8)),
+ (
+ Foo(9, 10, (11, 12)),
+ Foo(13, 14, (15, 16)),
+ ),
+ )
+ }
+ const GOAL: i64 = foo().sum();
+ "#,
+ 136,
+ );
+}
+
+#[test]
+fn ifs() {
+ check_number(
+ r#"
+ const fn f(b: bool) -> u8 {
+ if b { 1 } else { 10 }
+ }
+
+ const GOAL: u8 = f(true) + f(true) + f(false);
+ "#,
+ 12,
+ );
+ check_number(
+ r#"
+ const fn max(a: i32, b: i32) -> i32 {
+ if a < b { b } else { a }
+ }
+
+ const GOAL: i32 = max(max(1, max(10, 3)), 0-122);
+ "#,
+ 10,
+ );
+
+ check_number(
+ r#"
+ const fn max(a: &i32, b: &i32) -> &i32 {
+ if *a < *b { b } else { a }
+ }
+
+ const GOAL: i32 = *max(max(&1, max(&10, &3)), &5);
+ "#,
+ 10,
+ );
+}
+
+#[test]
+fn loops() {
+ check_number(
+ r#"
+ const GOAL: u8 = {
+ let mut x = 0;
+ loop {
+ x = x + 1;
+ while true {
+ break;
+ }
+ x = x + 1;
+ if x == 2 {
+ continue;
+ }
+ break;
+ }
+ x
+ };
+ "#,
+ 4,
+ );
+}
+
+#[test]
+fn for_loops() {
+ check_number(
+ r#"
+ //- minicore: iterator
+
+ struct Range {
+ start: u8,
+ end: u8,
+ }
+
+ impl Iterator for Range {
+ type Item = u8;
+ fn next(&mut self) -> Option<u8> {
+ if self.start >= self.end {
+ None
+ } else {
+ let r = self.start;
+ self.start = self.start + 1;
+ Some(r)
+ }
+ }
+ }
+
+ const GOAL: u8 = {
+ let mut sum = 0;
+ let ar = Range { start: 1, end: 11 };
+ for i in ar {
+ sum = sum + i;
+ }
+ sum
+ };
+ "#,
+ 55,
+ );
+}
+
+#[test]
+fn recursion() {
+ check_number(
+ r#"
+ const fn fact(k: i32) -> i32 {
+ if k > 0 { fact(k - 1) * k } else { 1 }
+ }
+
+ const GOAL: i32 = fact(5);
+ "#,
+ 120,
+ );
+}
+
+#[test]
+fn structs() {
+ check_number(
+ r#"
+ struct Point {
+ x: i32,
+ y: i32,
+ }
+
+ const GOAL: i32 = {
+ let p = Point { x: 5, y: 2 };
+ let y = 1;
+ let x = 3;
+ let q = Point { y, x };
+ p.x + p.y + p.x + q.y + q.y + q.x
+ };
+ "#,
+ 17,
+ );
+}
+
+#[test]
+fn unions() {
+ check_number(
+ r#"
+ union U {
+ f1: i64,
+ f2: (i32, i32),
+ }
+
+ const GOAL: i32 = {
+ let p = U { f1: 0x0123ABCD0123DCBA };
+ let p = unsafe { p.f2 };
+ p.0 + p.1 + p.1
+ };
+ "#,
+ 0x0123ABCD * 2 + 0x0123DCBA,
+ );
+}
+
+#[test]
+fn tuples() {
+ check_number(
+ r#"
+ const GOAL: u8 = {
+ let a = (10, 20, 3, 15);
+ a.1
+ };
+ "#,
+ 20,
+ );
+ check_number(
+ r#"
+ const GOAL: u8 = {
+ let mut a = (10, 20, 3, 15);
+ a.1 = 2;
+ a.0 + a.1 + a.2 + a.3
+ };
+ "#,
+ 30,
+ );
+ check_number(
+ r#"
+ struct TupleLike(i32, u8, i64, u16);
+ const GOAL: u8 = {
+ let a = TupleLike(10, 20, 3, 15);
+ a.1
+ };
+ "#,
+ 20,
+ );
+ check_number(
+ r#"
+ const GOAL: u8 = {
+ match (&(2 + 2), &4) {
+ (left_val, right_val) => {
+ if !(*left_val == *right_val) {
+ 2
+ } else {
+ 5
+ }
+ }
+ }
+ };
+ "#,
+ 5,
+ );
+}
+
+#[test]
+fn path_pattern_matching() {
+ check_number(
+ r#"
+ enum Season {
+ Spring,
+ Summer,
+ Fall,
+ Winter,
+ }
+
+ use Season::*;
+
+ const fn f(x: Season) -> i32 {
+ match x {
+ Spring => 1,
+ Summer => 2,
+ Fall => 3,
+ Winter => 4,
+ }
+ }
+ const GOAL: i32 = f(Spring) + 10 * f(Summer) + 100 * f(Fall) + 1000 * f(Winter);
+ "#,
+ 4321,
+ );
+}
+
+#[test]
+fn pattern_matching_ergonomics() {
+ check_number(
+ r#"
+ const fn f(x: &(u8, u8)) -> u8 {
+ match x {
+ (a, b) => *a + *b
+ }
+ }
+ const GOAL: u8 = f(&(2, 3));
+ "#,
+ 5,
+ );
+}
+
+#[test]
+fn let_else() {
+ check_number(
+ r#"
+ const fn f(x: &(u8, u8)) -> u8 {
+ let (a, b) = x;
+ *a + *b
+ }
+ const GOAL: u8 = f(&(2, 3));
+ "#,
+ 5,
+ );
+ check_number(
+ r#"
+ enum SingleVariant {
+ Var(u8, u8),
+ }
+ const fn f(x: &&&&&SingleVariant) -> u8 {
+ let SingleVariant::Var(a, b) = x;
+ *a + *b
+ }
+ const GOAL: u8 = f(&&&&&SingleVariant::Var(2, 3));
+ "#,
+ 5,
+ );
+ check_number(
+ r#"
+ //- minicore: option
+ const fn f(x: Option<i32>) -> i32 {
+ let Some(x) = x else { return 10 };
+ 2 * x
+ }
+ const GOAL: i32 = f(Some(1000)) + f(None);
+ "#,
+ 2010,
+ );
+}
+
+#[test]
+fn function_param_patterns() {
+ check_number(
+ r#"
+ const fn f((a, b): &(u8, u8)) -> u8 {
+ *a + *b
+ }
+ const GOAL: u8 = f(&(2, 3));
+ "#,
+ 5,
+ );
+ check_number(
+ r#"
+ const fn f(c @ (a, b): &(u8, u8)) -> u8 {
+ *a + *b + c.0 + (*c).1
+ }
+ const GOAL: u8 = f(&(2, 3));
+ "#,
+ 10,
+ );
+ check_number(
+ r#"
+ const fn f(ref a: u8) -> u8 {
+ *a
+ }
+ const GOAL: u8 = f(2);
+ "#,
+ 2,
+ );
+ check_number(
+ r#"
+ struct Foo(u8);
+ impl Foo {
+ const fn f(&self, (a, b): &(u8, u8)) -> u8 {
+ self.0 + *a + *b
+ }
+ }
+ const GOAL: u8 = Foo(4).f(&(2, 3));
+ "#,
+ 9,
+ );
+}
+
+#[test]
+fn options() {
+ check_number(
+ r#"
+ //- minicore: option
+ const GOAL: u8 = {
+ let x = Some(2);
+ match x {
+ Some(y) => 2 * y,
+ _ => 10,
+ }
+ };
+ "#,
+ 4,
+ );
+ check_number(
+ r#"
+ //- minicore: option
+ fn f(x: Option<Option<i32>>) -> i32 {
+ if let Some(y) = x && let Some(z) = y {
+ z
+ } else if let Some(y) = x {
+ 1
+ } else {
+ 0
+ }
+ }
+ const GOAL: i32 = f(Some(Some(10))) + f(Some(None)) + f(None);
+ "#,
+ 11,
+ );
+ check_number(
+ r#"
+ //- minicore: option
+ const GOAL: u8 = {
+ let x = None;
+ match x {
+ Some(y) => 2 * y,
+ _ => 10,
+ }
+ };
+ "#,
+ 10,
+ );
+ check_number(
+ r#"
+ //- minicore: option
+ const GOAL: Option<&u8> = None;
+ "#,
+ 0,
+ );
+}
+
+#[test]
+fn or_pattern() {
+ check_number(
+ r#"
+ const GOAL: u8 = {
+ let (a | a) = 2;
+ a
+ };
+ "#,
+ 2,
+ );
+ check_number(
+ r#"
+ //- minicore: option
+ const fn f(x: Option<i32>) -> i32 {
+ let (Some(a) | Some(a)) = x else { return 2; };
+ a
+ }
+ const GOAL: i32 = f(Some(10)) + f(None);
+ "#,
+ 12,
+ );
+ check_number(
+ r#"
+ //- minicore: option
+ const fn f(x: Option<i32>, y: Option<i32>) -> i32 {
+ match (x, y) {
+ (Some(x), Some(y)) => x * y,
+ (Some(a), _) | (_, Some(a)) => a,
+ _ => 10,
+ }
+ }
+ const GOAL: i32 = f(Some(10), Some(20)) + f(Some(30), None) + f(None, Some(40)) + f(None, None);
+ "#,
+ 280,
+ );
+}
+
+#[test]
+fn array_and_index() {
+ check_number(
+ r#"
+ //- minicore: coerce_unsized, index, slice
+ const GOAL: u8 = {
+ let a = [10, 20, 3, 15];
+ let x: &[u8] = &a;
+ x[1]
+ };
+ "#,
+ 20,
+ );
+ check_number(
+ r#"
+ //- minicore: coerce_unsized, index, slice
+ const GOAL: usize = [1, 2, 3][2];"#,
+ 3,
+ );
+ check_number(
+ r#"
+ //- minicore: coerce_unsized, index, slice
+ const GOAL: usize = { let a = [1, 2, 3]; let x: &[i32] = &a; x.len() };"#,
+ 3,
+ );
+ check_number(
+ r#"
+ //- minicore: coerce_unsized, index, slice
+ const GOAL: usize = [1, 2, 3, 4, 5].len();"#,
+ 5,
+ );
+}
+
+#[test]
+fn byte_string() {
+ check_number(
+ r#"
+ //- minicore: coerce_unsized, index, slice
+ const GOAL: u8 = {
+ let a = b"hello";
+ let x: &[u8] = a;
+ x[0]
+ };
+ "#,
+ 104,
+ );
+}
+
+#[test]
fn consts() {
check_number(
r#"
@@ -92,41 +906,35 @@ fn enums() {
r#"
enum E {
F1 = 1,
- F2 = 2 * E::F1 as u8,
- F3 = 3 * E::F2 as u8,
+ F2 = 2 * E::F1 as isize, // Rustc expects an isize here
+ F3 = 3 * E::F2 as isize,
}
- const GOAL: i32 = E::F3 as u8;
+ const GOAL: u8 = E::F3 as u8;
"#,
6,
);
check_number(
r#"
enum E { F1 = 1, F2, }
- const GOAL: i32 = E::F2 as u8;
+ const GOAL: u8 = E::F2 as u8;
"#,
2,
);
check_number(
r#"
enum E { F1, }
- const GOAL: i32 = E::F1 as u8;
+ const GOAL: u8 = E::F1 as u8;
"#,
0,
);
let r = eval_goal(
r#"
- enum E { A = 1, }
+ enum E { A = 1, B }
const GOAL: E = E::A;
"#,
)
.unwrap();
- match r {
- ComputedExpr::Enum(name, _, Literal::Uint(val, _)) => {
- assert_eq!(name, "E::A");
- assert_eq!(val, 1);
- }
- x => panic!("Expected enum but found {x:?}"),
- }
+ assert_eq!(try_const_usize(&r), Some(1));
}
#[test]
@@ -138,7 +946,19 @@ fn const_loop() {
const F2: i32 = 2 * F1;
const GOAL: i32 = F3;
"#,
- ConstEvalError::Loop,
+ ConstEvalError::MirLowerError(MirLowerError::Loop),
+ );
+}
+
+#[test]
+fn const_transfer_memory() {
+ check_number(
+ r#"
+ const A1: &i32 = &2;
+ const A2: &i32 = &5;
+ const GOAL: i32 = *A1 + *A2;
+ "#,
+ 7,
);
}
@@ -157,7 +977,20 @@ fn const_impl_assoc() {
}
#[test]
-fn const_generic_subst() {
+fn const_generic_subst_fn() {
+ check_number(
+ r#"
+ const fn f<const A: usize>(x: usize) -> usize {
+ A * x + 5
+ }
+ const GOAL: usize = f::<2>(3);
+ "#,
+ 11,
+ );
+}
+
+#[test]
+fn const_generic_subst_assoc_const_impl() {
// FIXME: this should evaluate to 5
check_fail(
r#"
@@ -167,7 +1000,7 @@ fn const_generic_subst() {
}
const GOAL: usize = Adder::<2, 3>::VAL;
"#,
- ConstEvalError::NotSupported("const generic without substitution"),
+ ConstEvalError::MirEvalError(MirEvalError::TypeError("missing generic arg")),
);
}
@@ -185,6 +1018,58 @@ fn const_trait_assoc() {
}
const GOAL: usize = U0::VAL;
"#,
- ConstEvalError::IncompleteExpr,
+ ConstEvalError::MirLowerError(MirLowerError::IncompleteExpr),
+ );
+}
+
+#[test]
+fn exec_limits() {
+ check_fail(
+ r#"
+ const GOAL: usize = loop {};
+ "#,
+ ConstEvalError::MirEvalError(MirEvalError::ExecutionLimitExceeded),
+ );
+ check_fail(
+ r#"
+ const fn f(x: i32) -> i32 {
+ f(x + 1)
+ }
+ const GOAL: i32 = f(0);
+ "#,
+ ConstEvalError::MirEvalError(MirEvalError::StackOverflow),
+ );
+ // Reasonable code should still work
+ check_number(
+ r#"
+ const fn nth_odd(n: i32) -> i32 {
+ 2 * n - 1
+ }
+ const fn f(n: i32) -> i32 {
+ let sum = 0;
+ let i = 0;
+ while i < n {
+ i = i + 1;
+ sum = sum + nth_odd(i);
+ }
+ sum
+ }
+ const GOAL: i32 = f(10000);
+ "#,
+ 10000 * 10000,
+ );
+}
+
+#[test]
+fn type_error() {
+ let e = eval_goal(
+ r#"
+ const GOAL: u8 = {
+ let x: u16 = 2;
+ let y: (u8, u8) = x;
+ y.0
+ };
+ "#,
);
+ assert!(matches!(e, Err(ConstEvalError::MirLowerError(MirLowerError::TypeMismatch(_)))));
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
index d45e2a943..304c78767 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
@@ -16,10 +16,12 @@ use smallvec::SmallVec;
use crate::{
chalk_db,
- consteval::{ComputedExpr, ConstEvalError},
+ consteval::ConstEvalError,
method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
- Binders, CallableDefId, FnDefId, GenericArg, ImplTraitId, InferenceResult, Interner, PolyFnSig,
- QuantifiedWhereClause, ReturnTypeImplTraits, Substitution, TraitRef, Ty, TyDefId, ValueTyDefId,
+ mir::{BorrowckResult, MirBody, MirLowerError},
+ Binders, CallableDefId, Const, FnDefId, GenericArg, ImplTraitId, InferenceResult, Interner,
+ PolyFnSig, QuantifiedWhereClause, ReturnTypeImplTraits, Substitution, TraitRef, Ty, TyDefId,
+ ValueTyDefId,
};
use hir_expand::name::Name;
@@ -32,6 +34,13 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::infer::infer_query)]
fn infer_query(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
+ #[salsa::invoke(crate::mir::mir_body_query)]
+ #[salsa::cycle(crate::mir::mir_body_recover)]
+ fn mir_body(&self, def: DefWithBodyId) -> Result<Arc<MirBody>, MirLowerError>;
+
+ #[salsa::invoke(crate::mir::borrowck_query)]
+ fn borrowck(&self, def: DefWithBodyId) -> Result<Arc<BorrowckResult>, MirLowerError>;
+
#[salsa::invoke(crate::lower::ty_query)]
#[salsa::cycle(crate::lower::ty_recover)]
fn ty(&self, def: TyDefId) -> Binders<Ty>;
@@ -46,13 +55,13 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::lower::const_param_ty_query)]
fn const_param_ty(&self, def: ConstParamId) -> Ty;
- #[salsa::invoke(crate::consteval::const_eval_variant_query)]
+ #[salsa::invoke(crate::consteval::const_eval_query)]
#[salsa::cycle(crate::consteval::const_eval_recover)]
- fn const_eval(&self, def: ConstId) -> Result<ComputedExpr, ConstEvalError>;
+ fn const_eval(&self, def: ConstId) -> Result<Const, ConstEvalError>;
- #[salsa::invoke(crate::consteval::const_eval_query_variant)]
- #[salsa::cycle(crate::consteval::const_eval_variant_recover)]
- fn const_eval_variant(&self, def: EnumVariantId) -> Result<ComputedExpr, ConstEvalError>;
+ #[salsa::invoke(crate::consteval::const_eval_discriminant_variant)]
+ #[salsa::cycle(crate::consteval::const_eval_discriminant_recover)]
+ fn const_eval_discriminant(&self, def: EnumVariantId) -> Result<i128, ConstEvalError>;
#[salsa::invoke(crate::lower::impl_trait_query)]
fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
index 37eb06be1..4b147b997 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs
@@ -11,3 +11,9 @@ pub use crate::diagnostics::{
},
unsafe_check::{missing_unsafe, unsafe_expressions, UnsafeExpr},
};
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct IncoherentImpl {
+ pub file_id: hir_expand::HirFileId,
+ pub impl_: syntax::AstPtr<syntax::ast::Impl>,
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
index f7031a854..d36b93e3b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -178,6 +178,7 @@ impl<'a> DeclValidator<'a> {
AttrDefId::StaticId(sid) => Some(sid.lookup(self.db.upcast()).container.into()),
AttrDefId::ConstId(cid) => Some(cid.lookup(self.db.upcast()).container.into()),
AttrDefId::TraitId(tid) => Some(tid.lookup(self.db.upcast()).container.into()),
+ AttrDefId::TraitAliasId(taid) => Some(taid.lookup(self.db.upcast()).container.into()),
AttrDefId::ImplId(iid) => Some(iid.lookup(self.db.upcast()).container.into()),
AttrDefId::ExternBlockId(id) => Some(id.lookup(self.db.upcast()).container.into()),
// These warnings should not explore macro definitions at all
@@ -234,8 +235,8 @@ impl<'a> DeclValidator<'a> {
let pats_replacements = body
.pats
.iter()
- .filter_map(|(id, pat)| match pat {
- Pat::Bind { name, .. } => Some((id, name)),
+ .filter_map(|(pat_id, pat)| match pat {
+ Pat::Bind { id, .. } => Some((pat_id, &body.bindings[*id].name)),
_ => None,
})
.filter_map(|(id, bind_name)| {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
index 3286dcb5a..2e9066788 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
@@ -5,11 +5,11 @@
use std::fmt;
use std::sync::Arc;
+use either::Either;
use hir_def::lang_item::LangItem;
use hir_def::{resolver::HasResolver, AdtId, AssocItemId, DefWithBodyId, HasModule};
use hir_def::{ItemContainerId, Lookup};
use hir_expand::name;
-use itertools::Either;
use itertools::Itertools;
use rustc_hash::FxHashSet;
use typed_arena::Arena;
@@ -84,7 +84,7 @@ impl ExprValidator {
match expr {
Expr::Match { expr, arms } => {
- self.validate_match(id, *expr, arms, db, self.infer.clone());
+ self.validate_match(id, *expr, arms, db);
}
Expr::Call { .. } | Expr::MethodCall { .. } => {
self.validate_call(db, id, expr, &mut filter_map_next_checker);
@@ -147,16 +147,15 @@ impl ExprValidator {
fn validate_match(
&mut self,
- id: ExprId,
match_expr: ExprId,
+ scrutinee_expr: ExprId,
arms: &[MatchArm],
db: &dyn HirDatabase,
- infer: Arc<InferenceResult>,
) {
let body = db.body(self.owner);
- let match_expr_ty = &infer[match_expr];
- if match_expr_ty.is_unknown() {
+ let scrut_ty = &self.infer[scrutinee_expr];
+ if scrut_ty.is_unknown() {
return;
}
@@ -166,23 +165,23 @@ impl ExprValidator {
let mut m_arms = Vec::with_capacity(arms.len());
let mut has_lowering_errors = false;
for arm in arms {
- if let Some(pat_ty) = infer.type_of_pat.get(arm.pat) {
+ if let Some(pat_ty) = self.infer.type_of_pat.get(arm.pat) {
// We only include patterns whose type matches the type
- // of the match expression. If we had an InvalidMatchArmPattern
+ // of the scrutinee expression. If we had an InvalidMatchArmPattern
// diagnostic or similar we could raise that in an else
// block here.
//
// When comparing the types, we also have to consider that rustc
- // will automatically de-reference the match expression type if
+ // will automatically de-reference the scrutinee expression type if
// necessary.
//
// FIXME we should use the type checker for this.
- if (pat_ty == match_expr_ty
- || match_expr_ty
+ if (pat_ty == scrut_ty
+ || scrut_ty
.as_reference()
.map(|(match_expr_ty, ..)| match_expr_ty == pat_ty)
.unwrap_or(false))
- && types_of_subpatterns_do_match(arm.pat, &body, &infer)
+ && types_of_subpatterns_do_match(arm.pat, &body, &self.infer)
{
// If we had a NotUsefulMatchArm diagnostic, we could
// check the usefulness of each pattern as we added it
@@ -206,7 +205,7 @@ impl ExprValidator {
return;
}
- let report = compute_match_usefulness(&cx, &m_arms, match_expr_ty);
+ let report = compute_match_usefulness(&cx, &m_arms, scrut_ty);
// FIXME Report unreacheble arms
// https://github.com/rust-lang/rust/blob/f31622a50/compiler/rustc_mir_build/src/thir/pattern/check_match.rs#L200
@@ -214,8 +213,8 @@ impl ExprValidator {
let witnesses = report.non_exhaustiveness_witnesses;
if !witnesses.is_empty() {
self.diagnostics.push(BodyValidationDiagnostic::MissingMatchArms {
- match_expr: id,
- uncovered_patterns: missing_match_arms(&cx, match_expr_ty, witnesses, arms),
+ match_expr,
+ uncovered_patterns: missing_match_arms(&cx, scrut_ty, witnesses, arms),
});
}
}
@@ -379,7 +378,7 @@ fn missing_match_arms<'p>(
arms: &[MatchArm],
) -> String {
struct DisplayWitness<'a, 'p>(&'a DeconstructedPat<'p>, &'a MatchCheckCtx<'a, 'p>);
- impl<'a, 'p> fmt::Display for DisplayWitness<'a, 'p> {
+ impl fmt::Display for DisplayWitness<'_, '_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let DisplayWitness(witness, cx) = *self;
let pat = witness.to_pat(cx);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs
index 8b0f051b4..859a37804 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs
@@ -146,8 +146,9 @@ impl<'a> PatCtxt<'a> {
PatKind::Leaf { subpatterns }
}
- hir_def::expr::Pat::Bind { ref name, subpat, .. } => {
+ hir_def::expr::Pat::Bind { id, subpat, .. } => {
let bm = self.infer.pat_binding_modes[&pat];
+ let name = &self.body.bindings[id].name;
match (bm, ty.kind(Interner)) {
(BindingMode::Ref(_), TyKind::Ref(.., rty)) => ty = rty,
(BindingMode::Ref(_), _) => {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs
index 431ab949b..d25c0ccf0 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs
@@ -94,8 +94,10 @@ fn walk_unsafe(
unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });
}
}
- Expr::Unsafe { body: child } => {
- return walk_unsafe(db, infer, def, body, *child, true, unsafe_expr_cb);
+ Expr::Unsafe { .. } => {
+ return expr.walk_child_exprs(|child| {
+ walk_unsafe(db, infer, def, body, child, true, unsafe_expr_cb);
+ });
}
_ => {}
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
index b22064d8c..bd3eccfe4 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
@@ -5,8 +5,9 @@
use std::fmt::{self, Debug};
use base_db::CrateId;
-use chalk_ir::BoundVar;
+use chalk_ir::{BoundVar, TyKind};
use hir_def::{
+ adt::VariantData,
body,
db::DefDatabase,
find_path,
@@ -14,9 +15,9 @@ use hir_def::{
item_scope::ItemInNs,
lang_item::{LangItem, LangItemTarget},
path::{Path, PathKind},
- type_ref::{ConstScalar, TraitBoundModifier, TypeBound, TypeRef},
+ type_ref::{TraitBoundModifier, TypeBound, TypeRef},
visibility::Visibility,
- HasModule, ItemContainerId, Lookup, ModuleDefId, ModuleId, TraitId,
+ HasModule, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId, TraitId,
};
use hir_expand::{hygiene::Hygiene, name::Name};
use intern::{Internable, Interned};
@@ -25,14 +26,17 @@ use smallvec::SmallVec;
use crate::{
db::HirDatabase,
- from_assoc_type_id, from_foreign_def_id, from_placeholder_idx, lt_from_placeholder_idx,
+ from_assoc_type_id, from_foreign_def_id, from_placeholder_idx,
+ layout::layout_of_ty,
+ lt_from_placeholder_idx,
mapping::from_chalk,
+ mir::pad16,
primitive, to_assoc_type_id,
utils::{self, generics},
- AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Const, ConstValue, DomainGoal,
- GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData, LifetimeOutlives, Mutability,
- OpaqueTy, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Scalar, Substitution, TraitRef,
- TraitRefExt, Ty, TyExt, TyKind, WhereClause,
+ AdtId, AliasEq, AliasTy, Binders, CallableDefId, CallableSig, Const, ConstScalar, ConstValue,
+ DomainGoal, GenericArg, ImplTraitId, Interner, Lifetime, LifetimeData, LifetimeOutlives,
+ MemoryMap, Mutability, OpaqueTy, ProjectionTy, ProjectionTyExt, QuantifiedWhereClause, Scalar,
+ Substitution, TraitRef, TraitRefExt, Ty, TyExt, WhereClause,
};
pub trait HirWrite: fmt::Write {
@@ -362,20 +366,176 @@ impl HirDisplay for GenericArg {
impl HirDisplay for Const {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
let data = self.interned();
- match data.value {
+ match &data.value {
ConstValue::BoundVar(idx) => idx.hir_fmt(f),
ConstValue::InferenceVar(..) => write!(f, "#c#"),
ConstValue::Placeholder(idx) => {
- let id = from_placeholder_idx(f.db, idx);
+ let id = from_placeholder_idx(f.db, *idx);
let generics = generics(f.db.upcast(), id.parent);
let param_data = &generics.params.type_or_consts[id.local_id];
write!(f, "{}", param_data.name().unwrap())
}
- ConstValue::Concrete(c) => write!(f, "{}", c.interned),
+ ConstValue::Concrete(c) => match &c.interned {
+ ConstScalar::Bytes(b, m) => render_const_scalar(f, &b, m, &data.ty),
+ ConstScalar::Unknown => f.write_char('_'),
+ },
}
}
}
+pub struct HexifiedConst(pub Const);
+
+impl HirDisplay for HexifiedConst {
+ fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+ let data = &self.0.data(Interner);
+ if let TyKind::Scalar(s) = data.ty.kind(Interner) {
+ if matches!(s, Scalar::Int(_) | Scalar::Uint(_)) {
+ if let ConstValue::Concrete(c) = &data.value {
+ if let ConstScalar::Bytes(b, m) = &c.interned {
+ let value = u128::from_le_bytes(pad16(b, false));
+ if value >= 10 {
+ render_const_scalar(f, &b, m, &data.ty)?;
+ return write!(f, " ({:#X})", value);
+ }
+ }
+ }
+ }
+ }
+ self.0.hir_fmt(f)
+ }
+}
+
+fn render_const_scalar(
+ f: &mut HirFormatter<'_>,
+ b: &[u8],
+ memory_map: &MemoryMap,
+ ty: &Ty,
+) -> Result<(), HirDisplayError> {
+ match ty.kind(Interner) {
+ chalk_ir::TyKind::Scalar(s) => match s {
+ Scalar::Bool => write!(f, "{}", if b[0] == 0 { false } else { true }),
+ Scalar::Char => {
+ let x = u128::from_le_bytes(pad16(b, false)) as u32;
+ let Ok(c) = char::try_from(x) else {
+ return f.write_str("<unicode-error>");
+ };
+ write!(f, "{c:?}")
+ }
+ Scalar::Int(_) => {
+ let x = i128::from_le_bytes(pad16(b, true));
+ write!(f, "{x}")
+ }
+ Scalar::Uint(_) => {
+ let x = u128::from_le_bytes(pad16(b, false));
+ write!(f, "{x}")
+ }
+ Scalar::Float(fl) => match fl {
+ chalk_ir::FloatTy::F32 => {
+ let x = f32::from_le_bytes(b.try_into().unwrap());
+ write!(f, "{x:?}")
+ }
+ chalk_ir::FloatTy::F64 => {
+ let x = f64::from_le_bytes(b.try_into().unwrap());
+ write!(f, "{x:?}")
+ }
+ },
+ },
+ chalk_ir::TyKind::Ref(_, _, t) => match t.kind(Interner) {
+ chalk_ir::TyKind::Str => {
+ let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap());
+ let bytes = memory_map.0.get(&addr).map(|x| &**x).unwrap_or(&[]);
+ let s = std::str::from_utf8(bytes).unwrap_or("<utf8-error>");
+ write!(f, "{s:?}")
+ }
+ _ => f.write_str("<ref-not-supported>"),
+ },
+ chalk_ir::TyKind::Tuple(_, subst) => {
+ // FIXME: Remove this line. If the target data layout is independent
+ // of the krate, the `db.target_data_layout` and its callers like `layout_of_ty` don't need
+ // to get krate. Otherwise, we need to get krate from the final callers of the hir display
+ // infrastructure and have it here as a field on `f`.
+ let krate = *f.db.crate_graph().crates_in_topological_order().last().unwrap();
+ let Ok(layout) = layout_of_ty(f.db, ty, krate) else {
+ return f.write_str("<layout-error>");
+ };
+ f.write_str("(")?;
+ let mut first = true;
+ for (id, ty) in subst.iter(Interner).enumerate() {
+ if first {
+ first = false;
+ } else {
+ f.write_str(", ")?;
+ }
+ let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument
+ let offset = layout.fields.offset(id).bytes_usize();
+ let Ok(layout) = layout_of_ty(f.db, &ty, krate) else {
+ f.write_str("<layout-error>")?;
+ continue;
+ };
+ let size = layout.size.bytes_usize();
+ render_const_scalar(f, &b[offset..offset + size], memory_map, &ty)?;
+ }
+ f.write_str(")")
+ }
+ chalk_ir::TyKind::Adt(adt, subst) => match adt.0 {
+ hir_def::AdtId::StructId(s) => {
+ let data = f.db.struct_data(s);
+ let Ok(layout) = f.db.layout_of_adt(adt.0, subst.clone()) else {
+ return f.write_str("<layout-error>");
+ };
+ match data.variant_data.as_ref() {
+ VariantData::Record(fields) | VariantData::Tuple(fields) => {
+ let field_types = f.db.field_types(s.into());
+ let krate = adt.0.module(f.db.upcast()).krate();
+ let render_field = |f: &mut HirFormatter<'_>, id: LocalFieldId| {
+ let offset = layout
+ .fields
+ .offset(u32::from(id.into_raw()) as usize)
+ .bytes_usize();
+ let ty = field_types[id].clone().substitute(Interner, subst);
+ let Ok(layout) = layout_of_ty(f.db, &ty, krate) else {
+ return f.write_str("<layout-error>");
+ };
+ let size = layout.size.bytes_usize();
+ render_const_scalar(f, &b[offset..offset + size], memory_map, &ty)
+ };
+ let mut it = fields.iter();
+ if matches!(data.variant_data.as_ref(), VariantData::Record(_)) {
+ write!(f, "{} {{", data.name)?;
+ if let Some((id, data)) = it.next() {
+ write!(f, " {}: ", data.name)?;
+ render_field(f, id)?;
+ }
+ for (id, data) in it {
+ write!(f, ", {}: ", data.name)?;
+ render_field(f, id)?;
+ }
+ write!(f, " }}")?;
+ } else {
+ let mut it = it.map(|x| x.0);
+ write!(f, "{}(", data.name)?;
+ if let Some(id) = it.next() {
+ render_field(f, id)?;
+ }
+ for id in it {
+ write!(f, ", ")?;
+ render_field(f, id)?;
+ }
+ write!(f, ")")?;
+ }
+ return Ok(());
+ }
+ VariantData::Unit => write!(f, "{}", data.name),
+ }
+ }
+ hir_def::AdtId::UnionId(u) => write!(f, "{}", f.db.union_data(u).name),
+ hir_def::AdtId::EnumId(_) => f.write_str("<enum-not-supported>"),
+ },
+ chalk_ir::TyKind::FnDef(..) => ty.hir_fmt(f),
+ _ => f.write_str("<not-supported>"),
+ }
+}
+
impl HirDisplay for BoundVar {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write!(f, "?{}.{}", self.debruijn.depth(), self.index)
@@ -614,8 +774,9 @@ impl HirDisplay for Ty {
{
return true;
}
- if let Some(ConstValue::Concrete(c)) =
- parameter.constant(Interner).map(|x| x.data(Interner).value)
+ if let Some(ConstValue::Concrete(c)) = parameter
+ .constant(Interner)
+ .map(|x| &x.data(Interner).value)
{
if c.interned == ConstScalar::Unknown {
return true;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
index 767afdf9e..7de5b4295 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
@@ -17,11 +17,12 @@ use std::ops::Index;
use std::sync::Arc;
use chalk_ir::{cast::Cast, ConstValue, DebruijnIndex, Mutability, Safety, Scalar, TypeFlags};
+use either::Either;
use hir_def::{
body::Body,
builtin_type::{BuiltinInt, BuiltinType, BuiltinUint},
data::{ConstData, StaticData},
- expr::{BindingAnnotation, ExprId, ExprOrPatId, PatId},
+ expr::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, PatId},
lang_item::{LangItem, LangItemTarget},
layout::Integer,
path::Path,
@@ -30,10 +31,9 @@ use hir_def::{
AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, HasModule,
ItemContainerId, Lookup, TraitId, TypeAliasId, VariantId,
};
-use hir_expand::name::name;
-use itertools::Either;
+use hir_expand::name::{name, Name};
use la_arena::ArenaMap;
-use rustc_hash::FxHashMap;
+use rustc_hash::{FxHashMap, FxHashSet};
use stdx::always;
use crate::{
@@ -66,8 +66,10 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc<Infer
let mut ctx = InferenceContext::new(db, def, &body, resolver);
match def {
+ DefWithBodyId::FunctionId(f) => {
+ ctx.collect_fn(f);
+ }
DefWithBodyId::ConstId(c) => ctx.collect_const(&db.const_data(c)),
- DefWithBodyId::FunctionId(f) => ctx.collect_fn(f),
DefWithBodyId::StaticId(s) => ctx.collect_static(&db.static_data(s)),
DefWithBodyId::VariantId(v) => {
ctx.return_ty = TyBuilder::builtin(match db.enum_data(v.parent).variant_body_type() {
@@ -144,44 +146,6 @@ impl Default for BindingMode {
}
}
-/// Used to generalize patterns and assignee expressions.
-trait PatLike: Into<ExprOrPatId> + Copy {
- type BindingMode: Copy;
-
- fn infer(
- this: &mut InferenceContext<'_>,
- id: Self,
- expected_ty: &Ty,
- default_bm: Self::BindingMode,
- ) -> Ty;
-}
-
-impl PatLike for ExprId {
- type BindingMode = ();
-
- fn infer(
- this: &mut InferenceContext<'_>,
- id: Self,
- expected_ty: &Ty,
- _: Self::BindingMode,
- ) -> Ty {
- this.infer_assignee_expr(id, expected_ty)
- }
-}
-
-impl PatLike for PatId {
- type BindingMode = BindingMode;
-
- fn infer(
- this: &mut InferenceContext<'_>,
- id: Self,
- expected_ty: &Ty,
- default_bm: Self::BindingMode,
- ) -> Ty {
- this.infer_pat(id, expected_ty, default_bm)
- }
-}
-
#[derive(Debug)]
pub(crate) struct InferOk<T> {
value: T,
@@ -200,11 +164,45 @@ pub(crate) type InferResult<T> = Result<InferOk<T>, TypeError>;
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum InferenceDiagnostic {
- NoSuchField { expr: ExprId },
- PrivateField { expr: ExprId, field: FieldId },
- PrivateAssocItem { id: ExprOrPatId, item: AssocItemId },
- BreakOutsideOfLoop { expr: ExprId, is_break: bool },
- MismatchedArgCount { call_expr: ExprId, expected: usize, found: usize },
+ NoSuchField {
+ expr: ExprId,
+ },
+ PrivateField {
+ expr: ExprId,
+ field: FieldId,
+ },
+ PrivateAssocItem {
+ id: ExprOrPatId,
+ item: AssocItemId,
+ },
+ UnresolvedField {
+ expr: ExprId,
+ receiver: Ty,
+ name: Name,
+ method_with_same_name_exists: bool,
+ },
+ UnresolvedMethodCall {
+ expr: ExprId,
+ receiver: Ty,
+ name: Name,
+ /// Contains the type the field resolves to
+ field_with_same_name: Option<Ty>,
+ },
+ // FIXME: Make this proper
+ BreakOutsideOfLoop {
+ expr: ExprId,
+ is_break: bool,
+ bad_value_break: bool,
+ },
+ MismatchedArgCount {
+ call_expr: ExprId,
+ expected: usize,
+ found: usize,
+ },
+ ExpectedFunction {
+ call_expr: ExprId,
+ found: Ty,
+ },
}
/// A mismatch between an expected and an inferred type.
@@ -293,8 +291,10 @@ pub enum Adjust {
/// call, with the signature `&'a T -> &'a U` or `&'a mut T -> &'a mut U`.
/// The target type is `U` in both cases, with the region and mutability
/// being those shared by both the receiver and the returned reference.
+///
+/// Mutability is `None` when we are not sure.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
-pub struct OverloadedDeref(pub Mutability);
+pub struct OverloadedDeref(pub Option<Mutability>);
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum AutoBorrow {
@@ -354,7 +354,10 @@ pub struct InferenceResult {
/// **Note**: When a pattern type is resolved it may still contain
/// unresolved or missing subpatterns or subpatterns of mismatched types.
pub type_of_pat: ArenaMap<PatId, Ty>,
+ pub type_of_binding: ArenaMap<BindingId, Ty>,
pub type_of_rpit: ArenaMap<RpitId, Ty>,
+ /// Type of the result of `.into_iter()` on the for. `ExprId` is the one of the whole for loop.
+ pub type_of_for_iterator: FxHashMap<ExprId, Ty>,
type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch>,
/// Interned common types to return references to.
standard_types: InternedStandardTypes,
@@ -389,18 +392,15 @@ impl InferenceResult {
pub fn type_mismatch_for_pat(&self, pat: PatId) -> Option<&TypeMismatch> {
self.type_mismatches.get(&pat.into())
}
+ pub fn type_mismatches(&self) -> impl Iterator<Item = (ExprOrPatId, &TypeMismatch)> {
+ self.type_mismatches.iter().map(|(expr_or_pat, mismatch)| (*expr_or_pat, mismatch))
+ }
pub fn expr_type_mismatches(&self) -> impl Iterator<Item = (ExprId, &TypeMismatch)> {
self.type_mismatches.iter().filter_map(|(expr_or_pat, mismatch)| match *expr_or_pat {
ExprOrPatId::ExprId(expr) => Some((expr, mismatch)),
_ => None,
})
}
- pub fn pat_type_mismatches(&self) -> impl Iterator<Item = (PatId, &TypeMismatch)> {
- self.type_mismatches.iter().filter_map(|(expr_or_pat, mismatch)| match *expr_or_pat {
- ExprOrPatId::PatId(pat) => Some((pat, mismatch)),
- _ => None,
- })
- }
}
impl Index<ExprId> for InferenceResult {
@@ -419,6 +419,14 @@ impl Index<PatId> for InferenceResult {
}
}
+impl Index<BindingId> for InferenceResult {
+ type Output = Ty;
+
+ fn index(&self, b: BindingId) -> &Ty {
+ self.type_of_binding.get(b).unwrap_or(&self.standard_types.unknown)
+ }
+}
+
/// The inference context contains all information needed during type inference.
#[derive(Clone, Debug)]
pub(crate) struct InferenceContext<'a> {
@@ -428,14 +436,19 @@ pub(crate) struct InferenceContext<'a> {
pub(crate) resolver: Resolver,
table: unify::InferenceTable<'a>,
trait_env: Arc<TraitEnvironment>,
+ /// The traits in scope, disregarding block modules. This is used for caching purposes.
+ traits_in_scope: FxHashSet<TraitId>,
pub(crate) result: InferenceResult,
/// The return type of the function being inferred, the closure or async block if we're
/// currently within one.
///
/// We might consider using a nested inference context for checking
- /// closures, but currently this is the only field that will change there,
- /// so it doesn't make sense.
+ /// closures so we can swap all shared things out at once.
return_ty: Ty,
+ /// If `Some`, this stores coercion information for returned
+ /// expressions. If `None`, this is in a context where return is
+ /// inappropriate, such as a const expression.
+ return_coercion: Option<CoerceMany>,
/// The resume type and the yield type, respectively, of the generator being inferred.
resume_yield_tys: Option<(Ty, Ty)>,
diverges: Diverges,
@@ -447,7 +460,7 @@ struct BreakableContext {
/// Whether this context contains at least one break expression.
may_break: bool,
/// The coercion target of the context.
- coerce: CoerceMany,
+ coerce: Option<CoerceMany>,
/// The optional label of the context.
label: Option<name::Name>,
kind: BreakableKind,
@@ -503,16 +516,22 @@ impl<'a> InferenceContext<'a> {
trait_env,
return_ty: TyKind::Error.intern(Interner), // set in collect_* calls
resume_yield_tys: None,
+ return_coercion: None,
db,
owner,
body,
+ traits_in_scope: resolver.traits_in_scope(db.upcast()),
resolver,
diverges: Diverges::Maybe,
breakables: Vec::new(),
}
}
- fn resolve_all(self) -> InferenceResult {
+ // FIXME: This function should be private in module. It is currently only used in the consteval, since we need
+ // `InferenceResult` in the middle of inference. See the fixme comment in `consteval::eval_to_const`. If you
+ // used this function for another workaround, mention it here. If you really need this function and believe that
+ // there is no problem in it being `pub(crate)`, remove this comment.
+ pub(crate) fn resolve_all(self) -> InferenceResult {
let InferenceContext { mut table, mut result, .. } = self;
table.fallback_if_possible();
@@ -528,13 +547,46 @@ impl<'a> InferenceContext<'a> {
for ty in result.type_of_pat.values_mut() {
*ty = table.resolve_completely(ty.clone());
}
- for ty in result.type_of_rpit.iter_mut().map(|x| x.1) {
+ for ty in result.type_of_binding.values_mut() {
+ *ty = table.resolve_completely(ty.clone());
+ }
+ for ty in result.type_of_rpit.values_mut() {
+ *ty = table.resolve_completely(ty.clone());
+ }
+ for ty in result.type_of_for_iterator.values_mut() {
*ty = table.resolve_completely(ty.clone());
}
for mismatch in result.type_mismatches.values_mut() {
mismatch.expected = table.resolve_completely(mismatch.expected.clone());
mismatch.actual = table.resolve_completely(mismatch.actual.clone());
}
+ result.diagnostics.retain_mut(|diagnostic| {
+ if let InferenceDiagnostic::ExpectedFunction { found: ty, .. }
+ | InferenceDiagnostic::UnresolvedField { receiver: ty, .. }
+ | InferenceDiagnostic::UnresolvedMethodCall { receiver: ty, .. } = diagnostic
+ {
+ *ty = table.resolve_completely(ty.clone());
+ // FIXME: Remove this when we are on par with rustc in terms of inference
+ if ty.contains_unknown() {
+ return false;
+ }
+
+ if let InferenceDiagnostic::UnresolvedMethodCall { field_with_same_name, .. } =
+ diagnostic
+ {
+ let clear = if let Some(ty) = field_with_same_name {
+ *ty = table.resolve_completely(ty.clone());
+ ty.contains_unknown()
+ } else {
+ false
+ };
+ if clear {
+ *field_with_same_name = None;
+ }
+ }
+ }
+ true
+ });
for (_, subst) in result.method_resolutions.values_mut() {
*subst = table.resolve_completely(subst.clone());
}
@@ -580,7 +632,7 @@ impl<'a> InferenceContext<'a> {
let ty = self.insert_type_vars(ty);
let ty = self.normalize_associated_types_in(ty);
- self.infer_pat(*pat, &ty, BindingMode::default());
+ self.infer_top_pat(*pat, &ty);
}
let error_ty = &TypeRef::Error;
let return_ty = if data.has_async_kw() {
@@ -632,10 +684,19 @@ impl<'a> InferenceContext<'a> {
};
self.return_ty = self.normalize_associated_types_in(return_ty);
+ self.return_coercion = Some(CoerceMany::new(self.return_ty.clone()));
}
fn infer_body(&mut self) {
- self.infer_expr_coerce(self.body.body_expr, &Expectation::has_type(self.return_ty.clone()));
+ match self.return_coercion {
+ Some(_) => self.infer_return(self.body.body_expr),
+ None => {
+ _ = self.infer_expr_coerce(
+ self.body.body_expr,
+ &Expectation::has_type(self.return_ty.clone()),
+ )
+ }
+ }
}
fn write_expr_ty(&mut self, expr: ExprId, ty: Ty) {
@@ -662,12 +723,15 @@ impl<'a> InferenceContext<'a> {
self.result.type_of_pat.insert(pat, ty);
}
+ fn write_binding_ty(&mut self, id: BindingId, ty: Ty) {
+ self.result.type_of_binding.insert(id, ty);
+ }
+
fn push_diagnostic(&mut self, diagnostic: InferenceDiagnostic) {
self.result.diagnostics.push(diagnostic);
}
fn make_ty(&mut self, type_ref: &TypeRef) -> Ty {
- // FIXME use right resolver for block
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
let ty = ctx.lower_ty(type_ref);
let ty = self.insert_type_vars(ty);
@@ -681,11 +745,9 @@ impl<'a> InferenceContext<'a> {
/// Replaces ConstScalar::Unknown by a new type var, so we can maybe still infer it.
fn insert_const_vars_shallow(&mut self, c: Const) -> Const {
let data = c.data(Interner);
- match data.value {
+ match &data.value {
ConstValue::Concrete(cc) => match cc.interned {
- hir_def::type_ref::ConstScalar::Unknown => {
- self.table.new_const_var(data.ty.clone())
- }
+ crate::ConstScalar::Unknown => self.table.new_const_var(data.ty.clone()),
_ => c,
},
_ => c,
@@ -785,12 +847,11 @@ impl<'a> InferenceContext<'a> {
Some(path) => path,
None => return (self.err_ty(), None),
};
- let resolver = &self.resolver;
let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
// FIXME: this should resolve assoc items as well, see this example:
// https://play.rust-lang.org/?gist=087992e9e22495446c01c0d4e2d69521
let (resolution, unresolved) = if value_ns {
- match resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path()) {
+ match self.resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path()) {
Some(ResolveValueResult::ValueNs(value)) => match value {
ValueNs::EnumVariantId(var) => {
let substs = ctx.substs_from_path(path, var.into(), true);
@@ -811,7 +872,7 @@ impl<'a> InferenceContext<'a> {
None => return (self.err_ty(), None),
}
} else {
- match resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
+ match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
Some(it) => it,
None => return (self.err_ty(), None),
}
@@ -866,7 +927,10 @@ impl<'a> InferenceContext<'a> {
// FIXME potentially resolve assoc type
(self.err_ty(), None)
}
- TypeNs::AdtId(AdtId::EnumId(_)) | TypeNs::BuiltinType(_) | TypeNs::TraitId(_) => {
+ TypeNs::AdtId(AdtId::EnumId(_))
+ | TypeNs::BuiltinType(_)
+ | TypeNs::TraitId(_)
+ | TypeNs::TraitAliasId(_) => {
// FIXME diagnostic
(self.err_ty(), None)
}
@@ -1018,6 +1082,15 @@ impl<'a> InferenceContext<'a> {
let struct_ = self.resolve_lang_item(LangItem::VaList)?.as_struct()?;
Some(struct_.into())
}
+
+ fn get_traits_in_scope(&self) -> Either<FxHashSet<TraitId>, &FxHashSet<TraitId>> {
+ let mut b_traits = self.resolver.traits_in_scope_from_block_scopes().peekable();
+ if b_traits.peek().is_some() {
+ Either::Left(self.traits_in_scope.iter().copied().chain(b_traits).collect())
+ } else {
+ Either::Right(&self.traits_in_scope)
+ }
+ }
}
/// When inferring an expression, we propagate downward whatever type hint we
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
index 3293534a0..48c915302 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
@@ -50,11 +50,44 @@ fn success(
#[derive(Clone, Debug)]
pub(super) struct CoerceMany {
expected_ty: Ty,
+ final_ty: Option<Ty>,
}
impl CoerceMany {
pub(super) fn new(expected: Ty) -> Self {
- CoerceMany { expected_ty: expected }
+ CoerceMany { expected_ty: expected, final_ty: None }
+ }
+
+ /// Returns the "expected type" with which this coercion was
+ /// constructed. This represents the "downward propagated" type
+ /// that was given to us at the start of typing whatever construct
+ /// we are typing (e.g., the match expression).
+ ///
+ /// Typically, this is used as the expected type when
+ /// type-checking each of the alternative expressions whose types
+ /// we are trying to merge.
+ pub(super) fn expected_ty(&self) -> Ty {
+ self.expected_ty.clone()
+ }
+
+ /// Returns the current "merged type", representing our best-guess
+ /// at the LUB of the expressions we've seen so far (if any). This
+ /// isn't *final* until you call `self.complete()`, which will return
+ /// the merged type.
+ pub(super) fn merged_ty(&self) -> Ty {
+ self.final_ty.clone().unwrap_or_else(|| self.expected_ty.clone())
+ }
+
+ pub(super) fn complete(self, ctx: &mut InferenceContext<'_>) -> Ty {
+ if let Some(final_ty) = self.final_ty {
+ final_ty
+ } else {
+ ctx.result.standard_types.never.clone()
+ }
+ }
+
+ pub(super) fn coerce_forced_unit(&mut self, ctx: &mut InferenceContext<'_>) {
+ self.coerce(ctx, None, &ctx.result.standard_types.unit.clone())
}
/// Merge two types from different branches, with possible coercion.
@@ -76,25 +109,25 @@ impl CoerceMany {
// Special case: two function types. Try to coerce both to
// pointers to have a chance at getting a match. See
// https://github.com/rust-lang/rust/blob/7b805396bf46dce972692a6846ce2ad8481c5f85/src/librustc_typeck/check/coercion.rs#L877-L916
- let sig = match (self.expected_ty.kind(Interner), expr_ty.kind(Interner)) {
+ let sig = match (self.merged_ty().kind(Interner), expr_ty.kind(Interner)) {
(TyKind::FnDef(..) | TyKind::Closure(..), TyKind::FnDef(..) | TyKind::Closure(..)) => {
// FIXME: we're ignoring safety here. To be more correct, if we have one FnDef and one Closure,
// we should be coercing the closure to a fn pointer of the safety of the FnDef
cov_mark::hit!(coerce_fn_reification);
let sig =
- self.expected_ty.callable_sig(ctx.db).expect("FnDef without callable sig");
+ self.merged_ty().callable_sig(ctx.db).expect("FnDef without callable sig");
Some(sig)
}
_ => None,
};
if let Some(sig) = sig {
let target_ty = TyKind::Function(sig.to_fn_ptr()).intern(Interner);
- let result1 = ctx.table.coerce_inner(self.expected_ty.clone(), &target_ty);
+ let result1 = ctx.table.coerce_inner(self.merged_ty(), &target_ty);
let result2 = ctx.table.coerce_inner(expr_ty.clone(), &target_ty);
if let (Ok(result1), Ok(result2)) = (result1, result2) {
ctx.table.register_infer_ok(result1);
ctx.table.register_infer_ok(result2);
- return self.expected_ty = target_ty;
+ return self.final_ty = Some(target_ty);
}
}
@@ -102,25 +135,20 @@ impl CoerceMany {
// type is a type variable and the new one is `!`, trying it the other
// way around first would mean we make the type variable `!`, instead of
// just marking it as possibly diverging.
- if ctx.coerce(expr, &expr_ty, &self.expected_ty).is_ok() {
- /* self.expected_ty is already correct */
- } else if ctx.coerce(expr, &self.expected_ty, &expr_ty).is_ok() {
- self.expected_ty = expr_ty;
+ if let Ok(res) = ctx.coerce(expr, &expr_ty, &self.merged_ty()) {
+ self.final_ty = Some(res);
+ } else if let Ok(res) = ctx.coerce(expr, &self.merged_ty(), &expr_ty) {
+ self.final_ty = Some(res);
} else {
if let Some(id) = expr {
ctx.result.type_mismatches.insert(
id.into(),
- TypeMismatch { expected: self.expected_ty.clone(), actual: expr_ty },
+ TypeMismatch { expected: self.merged_ty().clone(), actual: expr_ty.clone() },
);
}
cov_mark::hit!(coerce_merge_fail_fallback);
- /* self.expected_ty is already correct */
}
}
-
- pub(super) fn complete(self) -> Ty {
- self.expected_ty
- }
}
pub fn could_coerce(
@@ -665,7 +693,7 @@ pub(super) fn auto_deref_adjust_steps(autoderef: &Autoderef<'_, '_>) -> Vec<Adju
.iter()
.map(|(kind, _source)| match kind {
// We do not know what kind of deref we require at this point yet
- AutoderefKind::Overloaded => Some(OverloadedDeref(Mutability::Not)),
+ AutoderefKind::Overloaded => Some(OverloadedDeref(None)),
AutoderefKind::Builtin => None,
})
.zip(targets)
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
index 175fded8c..ee186673e 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
@@ -15,7 +15,6 @@ use hir_def::{
generics::TypeOrConstParamData,
lang_item::LangItem,
path::{GenericArg, GenericArgs},
- resolver::resolver_for_expr,
ConstParamId, FieldId, ItemContainerId, Lookup,
};
use hir_expand::name::{name, Name};
@@ -25,7 +24,9 @@ use syntax::ast::RangeOp;
use crate::{
autoderef::{self, Autoderef},
consteval,
- infer::{coerce::CoerceMany, find_continuable, BreakableKind},
+ infer::{
+ coerce::CoerceMany, find_continuable, pat::contains_explicit_ref_binding, BreakableKind,
+ },
lower::{
const_or_path_to_chalk, generic_arg_to_chalk, lower_to_chalk_mutability, ParamLoweringMode,
},
@@ -39,8 +40,8 @@ use crate::{
};
use super::{
- coerce::auto_deref_adjust_steps, find_breakable, BindingMode, BreakableContext, Diverges,
- Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch,
+ coerce::auto_deref_adjust_steps, find_breakable, BreakableContext, Diverges, Expectation,
+ InferenceContext, InferenceDiagnostic, TypeMismatch,
};
impl<'a> InferenceContext<'a> {
@@ -58,6 +59,10 @@ impl<'a> InferenceContext<'a> {
ty
}
+ pub(crate) fn infer_expr_no_expect(&mut self, tgt_expr: ExprId) -> Ty {
+ self.infer_expr_inner(tgt_expr, &Expectation::None)
+ }
+
/// Infer type of expression with possibly implicit coerce to the expected type.
/// Return the type after possible coercion.
pub(super) fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty {
@@ -78,6 +83,30 @@ impl<'a> InferenceContext<'a> {
}
}
+ pub(super) fn infer_expr_coerce_never(&mut self, expr: ExprId, expected: &Expectation) -> Ty {
+ let ty = self.infer_expr_inner(expr, expected);
+ // While we don't allow *arbitrary* coercions here, we *do* allow
+ // coercions from ! to `expected`.
+ if ty.is_never() {
+ if let Some(adjustments) = self.result.expr_adjustments.get(&expr) {
+ return if let [Adjustment { kind: Adjust::NeverToAny, target }] = &**adjustments {
+ target.clone()
+ } else {
+ self.err_ty()
+ };
+ }
+
+ let adj_ty = self.table.new_type_var();
+ self.write_expr_adj(
+ expr,
+ vec![Adjustment { kind: Adjust::NeverToAny, target: adj_ty.clone() }],
+ );
+ adj_ty
+ } else {
+ ty
+ }
+ }
+
fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
self.db.unwind_if_cancelled();
@@ -85,7 +114,7 @@ impl<'a> InferenceContext<'a> {
Expr::Missing => self.err_ty(),
&Expr::If { condition, then_branch, else_branch } => {
let expected = &expected.adjust_for_branches(&mut self.table);
- self.infer_expr(
+ self.infer_expr_coerce_never(
condition,
&Expectation::HasType(self.result.standard_types.bool_.clone()),
);
@@ -97,59 +126,39 @@ impl<'a> InferenceContext<'a> {
both_arms_diverge &= mem::replace(&mut self.diverges, Diverges::Maybe);
let mut coerce = CoerceMany::new(expected.coercion_target_type(&mut self.table));
coerce.coerce(self, Some(then_branch), &then_ty);
- let else_ty = match else_branch {
- Some(else_branch) => self.infer_expr_inner(else_branch, expected),
- None => TyBuilder::unit(),
- };
+ match else_branch {
+ Some(else_branch) => {
+ let else_ty = self.infer_expr_inner(else_branch, expected);
+ coerce.coerce(self, Some(else_branch), &else_ty);
+ }
+ None => {
+ coerce.coerce_forced_unit(self);
+ }
+ }
both_arms_diverge &= self.diverges;
- // FIXME: create a synthetic `else {}` so we have something to refer to here instead of None?
- coerce.coerce(self, else_branch, &else_ty);
self.diverges = condition_diverges | both_arms_diverge;
- coerce.complete()
+ coerce.complete(self)
}
&Expr::Let { pat, expr } => {
let input_ty = self.infer_expr(expr, &Expectation::none());
- self.infer_pat(pat, &input_ty, BindingMode::default());
+ self.infer_top_pat(pat, &input_ty);
self.result.standard_types.bool_.clone()
}
Expr::Block { statements, tail, label, id: _ } => {
- let old_resolver = mem::replace(
- &mut self.resolver,
- resolver_for_expr(self.db.upcast(), self.owner, tgt_expr),
- );
- let ty = match label {
- Some(_) => {
- let break_ty = self.table.new_type_var();
- let (breaks, ty) = self.with_breakable_ctx(
- BreakableKind::Block,
- break_ty.clone(),
- *label,
- |this| {
- this.infer_block(
- tgt_expr,
- statements,
- *tail,
- &Expectation::has_type(break_ty),
- )
- },
- );
- breaks.unwrap_or(ty)
- }
- None => self.infer_block(tgt_expr, statements, *tail, expected),
- };
- self.resolver = old_resolver;
- ty
+ self.infer_block(tgt_expr, statements, *tail, *label, expected)
+ }
+ Expr::Unsafe { id: _, statements, tail } => {
+ self.infer_block(tgt_expr, statements, *tail, None, expected)
}
- Expr::Unsafe { body } => self.infer_expr(*body, expected),
- Expr::Const { body } => {
- self.with_breakable_ctx(BreakableKind::Border, self.err_ty(), None, |this| {
- this.infer_expr(*body, expected)
+ Expr::Const { id: _, statements, tail } => {
+ self.with_breakable_ctx(BreakableKind::Border, None, None, |this| {
+ this.infer_block(tgt_expr, statements, *tail, None, expected)
})
.1
}
- Expr::TryBlock { body } => {
+ Expr::TryBlock { id: _, statements, tail } => {
// The type that is returned from the try block
let try_ty = self.table.new_type_var();
if let Some(ty) = expected.only_has_type(&mut self.table) {
@@ -160,28 +169,41 @@ impl<'a> InferenceContext<'a> {
let ok_ty =
self.resolve_associated_type(try_ty.clone(), self.resolve_ops_try_output());
- self.with_breakable_ctx(BreakableKind::Block, ok_ty.clone(), None, |this| {
- this.infer_expr(*body, &Expectation::has_type(ok_ty));
- });
-
+ self.infer_block(
+ tgt_expr,
+ statements,
+ *tail,
+ None,
+ &Expectation::has_type(ok_ty.clone()),
+ );
try_ty
}
- Expr::Async { body } => {
+ Expr::Async { id: _, statements, tail } => {
let ret_ty = self.table.new_type_var();
let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
+ let prev_ret_coercion =
+ mem::replace(&mut self.return_coercion, Some(CoerceMany::new(ret_ty.clone())));
let (_, inner_ty) =
- self.with_breakable_ctx(BreakableKind::Border, self.err_ty(), None, |this| {
- this.infer_expr_coerce(*body, &Expectation::has_type(ret_ty))
+ self.with_breakable_ctx(BreakableKind::Border, None, None, |this| {
+ this.infer_block(
+ tgt_expr,
+ statements,
+ *tail,
+ None,
+ &Expectation::has_type(ret_ty),
+ )
});
self.diverges = prev_diverges;
self.return_ty = prev_ret_ty;
+ self.return_coercion = prev_ret_coercion;
// Use the first type parameter as the output type of future.
// existential type AsyncBlockImplTrait<InnerType>: Future<Output = InnerType>
- let impl_trait_id = crate::ImplTraitId::AsyncBlockTypeImplTrait(self.owner, *body);
+ let impl_trait_id =
+ crate::ImplTraitId::AsyncBlockTypeImplTrait(self.owner, tgt_expr);
let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
TyKind::OpaqueType(opaque_ty_id, Substitution::from1(Interner, inner_ty))
.intern(Interner)
@@ -191,7 +213,7 @@ impl<'a> InferenceContext<'a> {
// let ty = expected.coercion_target_type(&mut self.table);
let ty = self.table.new_type_var();
let (breaks, ()) =
- self.with_breakable_ctx(BreakableKind::Loop, ty, label, |this| {
+ self.with_breakable_ctx(BreakableKind::Loop, Some(ty), label, |this| {
this.infer_expr(body, &Expectation::HasType(TyBuilder::unit()));
});
@@ -204,7 +226,7 @@ impl<'a> InferenceContext<'a> {
}
}
&Expr::While { condition, body, label } => {
- self.with_breakable_ctx(BreakableKind::Loop, self.err_ty(), label, |this| {
+ self.with_breakable_ctx(BreakableKind::Loop, None, label, |this| {
this.infer_expr(
condition,
&Expectation::HasType(this.result.standard_types.bool_.clone()),
@@ -220,11 +242,13 @@ impl<'a> InferenceContext<'a> {
let iterable_ty = self.infer_expr(iterable, &Expectation::none());
let into_iter_ty =
self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item());
- let pat_ty =
- self.resolve_associated_type(into_iter_ty, self.resolve_iterator_item());
+ let pat_ty = self
+ .resolve_associated_type(into_iter_ty.clone(), self.resolve_iterator_item());
+
+ self.result.type_of_for_iterator.insert(tgt_expr, into_iter_ty);
- self.infer_pat(pat, &pat_ty, BindingMode::default());
- self.with_breakable_ctx(BreakableKind::Loop, self.err_ty(), label, |this| {
+ self.infer_top_pat(pat, &pat_ty);
+ self.with_breakable_ctx(BreakableKind::Loop, None, label, |this| {
this.infer_expr(body, &Expectation::HasType(TyBuilder::unit()));
});
@@ -251,7 +275,23 @@ impl<'a> InferenceContext<'a> {
Some(type_ref) => self.make_ty(type_ref),
None => self.table.new_type_var(),
};
- sig_tys.push(ret_ty.clone());
+ if let ClosureKind::Async = closure_kind {
+ // Use the first type parameter as the output type of future.
+ // existential type AsyncBlockImplTrait<InnerType>: Future<Output = InnerType>
+ let impl_trait_id =
+ crate::ImplTraitId::AsyncBlockTypeImplTrait(self.owner, *body);
+ let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
+ sig_tys.push(
+ TyKind::OpaqueType(
+ opaque_ty_id,
+ Substitution::from1(Interner, ret_ty.clone()),
+ )
+ .intern(Interner),
+ );
+ } else {
+ sig_tys.push(ret_ty.clone());
+ }
+
let sig_ty = TyKind::Function(FnPointer {
num_binders: 0,
sig: FnSig { abi: (), safety: chalk_ir::Safety::Safe, variadic: false },
@@ -262,33 +302,38 @@ impl<'a> InferenceContext<'a> {
})
.intern(Interner);
- let (ty, resume_yield_tys) = if matches!(closure_kind, ClosureKind::Generator(_)) {
- // FIXME: report error when there are more than 1 parameter.
- let resume_ty = match sig_tys.first() {
- // When `sig_tys.len() == 1` the first type is the return type, not the
- // first parameter type.
- Some(ty) if sig_tys.len() > 1 => ty.clone(),
- _ => self.result.standard_types.unit.clone(),
- };
- let yield_ty = self.table.new_type_var();
-
- let subst = TyBuilder::subst_for_generator(self.db, self.owner)
- .push(resume_ty.clone())
- .push(yield_ty.clone())
- .push(ret_ty.clone())
- .build();
+ let (ty, resume_yield_tys) = match closure_kind {
+ ClosureKind::Generator(_) => {
+ // FIXME: report error when there are more than 1 parameter.
+ let resume_ty = match sig_tys.first() {
+ // When `sig_tys.len() == 1` the first type is the return type, not the
+ // first parameter type.
+ Some(ty) if sig_tys.len() > 1 => ty.clone(),
+ _ => self.result.standard_types.unit.clone(),
+ };
+ let yield_ty = self.table.new_type_var();
+
+ let subst = TyBuilder::subst_for_generator(self.db, self.owner)
+ .push(resume_ty.clone())
+ .push(yield_ty.clone())
+ .push(ret_ty.clone())
+ .build();
- let generator_id = self.db.intern_generator((self.owner, tgt_expr)).into();
- let generator_ty = TyKind::Generator(generator_id, subst).intern(Interner);
+ let generator_id = self.db.intern_generator((self.owner, tgt_expr)).into();
+ let generator_ty = TyKind::Generator(generator_id, subst).intern(Interner);
- (generator_ty, Some((resume_ty, yield_ty)))
- } else {
- let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into();
- let closure_ty =
- TyKind::Closure(closure_id, Substitution::from1(Interner, sig_ty.clone()))
- .intern(Interner);
+ (generator_ty, Some((resume_ty, yield_ty)))
+ }
+ ClosureKind::Closure | ClosureKind::Async => {
+ let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into();
+ let closure_ty = TyKind::Closure(
+ closure_id,
+ Substitution::from1(Interner, sig_ty.clone()),
+ )
+ .intern(Interner);
- (closure_ty, None)
+ (closure_ty, None)
+ }
};
// Eagerly try to relate the closure type with the expected
@@ -297,21 +342,25 @@ impl<'a> InferenceContext<'a> {
self.deduce_closure_type_from_expectations(tgt_expr, &ty, &sig_ty, expected);
// Now go through the argument patterns
- for (arg_pat, arg_ty) in args.iter().zip(sig_tys) {
- self.infer_pat(*arg_pat, &arg_ty, BindingMode::default());
+ for (arg_pat, arg_ty) in args.iter().zip(&sig_tys) {
+ self.infer_top_pat(*arg_pat, &arg_ty);
}
+ // FIXME: lift these out into a struct
let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
+ let prev_ret_coercion =
+ mem::replace(&mut self.return_coercion, Some(CoerceMany::new(ret_ty.clone())));
let prev_resume_yield_tys =
mem::replace(&mut self.resume_yield_tys, resume_yield_tys);
- self.with_breakable_ctx(BreakableKind::Border, self.err_ty(), None, |this| {
- this.infer_expr_coerce(*body, &Expectation::has_type(ret_ty));
+ self.with_breakable_ctx(BreakableKind::Border, None, None, |this| {
+ this.infer_return(*body);
});
self.diverges = prev_diverges;
self.return_ty = prev_ret_ty;
+ self.return_coercion = prev_ret_coercion;
self.resume_yield_tys = prev_resume_yield_tys;
ty
@@ -348,7 +397,13 @@ impl<'a> InferenceContext<'a> {
}
(params, ret_ty)
}
- None => (Vec::new(), self.err_ty()), // FIXME diagnostic
+ None => {
+ self.result.diagnostics.push(InferenceDiagnostic::ExpectedFunction {
+ call_expr: tgt_expr,
+ found: callee_ty.clone(),
+ });
+ (Vec::new(), self.err_ty())
+ }
};
let indices_to_skip = self.check_legacy_const_generics(derefed_callee, args);
self.register_obligations_for_call(&callee_ty);
@@ -381,92 +436,109 @@ impl<'a> InferenceContext<'a> {
Expr::Match { expr, arms } => {
let input_ty = self.infer_expr(*expr, &Expectation::none());
- let expected = expected.adjust_for_branches(&mut self.table);
-
- let result_ty = if arms.is_empty() {
+ if arms.is_empty() {
+ self.diverges = Diverges::Always;
self.result.standard_types.never.clone()
} else {
- expected.coercion_target_type(&mut self.table)
- };
- let mut coerce = CoerceMany::new(result_ty);
-
- let matchee_diverges = self.diverges;
- let mut all_arms_diverge = Diverges::Always;
-
- for arm in arms.iter() {
- self.diverges = Diverges::Maybe;
- let _pat_ty = self.infer_pat(arm.pat, &input_ty, BindingMode::default());
- if let Some(guard_expr) = arm.guard {
- self.infer_expr(
- guard_expr,
- &Expectation::HasType(self.result.standard_types.bool_.clone()),
- );
+ let matchee_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
+ let mut all_arms_diverge = Diverges::Always;
+ for arm in arms.iter() {
+ let input_ty = self.resolve_ty_shallow(&input_ty);
+ self.infer_top_pat(arm.pat, &input_ty);
}
- let arm_ty = self.infer_expr_inner(arm.expr, &expected);
- all_arms_diverge &= self.diverges;
- coerce.coerce(self, Some(arm.expr), &arm_ty);
- }
+ let expected = expected.adjust_for_branches(&mut self.table);
+ let result_ty = match &expected {
+ // We don't coerce to `()` so that if the match expression is a
+ // statement it's branches can have any consistent type.
+ Expectation::HasType(ty) if *ty != self.result.standard_types.unit => {
+ ty.clone()
+ }
+ _ => self.table.new_type_var(),
+ };
+ let mut coerce = CoerceMany::new(result_ty);
+
+ for arm in arms.iter() {
+ if let Some(guard_expr) = arm.guard {
+ self.diverges = Diverges::Maybe;
+ self.infer_expr_coerce_never(
+ guard_expr,
+ &Expectation::HasType(self.result.standard_types.bool_.clone()),
+ );
+ }
+ self.diverges = Diverges::Maybe;
- self.diverges = matchee_diverges | all_arms_diverge;
+ let arm_ty = self.infer_expr_inner(arm.expr, &expected);
+ all_arms_diverge &= self.diverges;
+ coerce.coerce(self, Some(arm.expr), &arm_ty);
+ }
- coerce.complete()
+ self.diverges = matchee_diverges | all_arms_diverge;
+
+ coerce.complete(self)
+ }
}
Expr::Path(p) => {
- // FIXME this could be more efficient...
- let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr);
- self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or_else(|| self.err_ty())
+ let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, tgt_expr);
+ let ty = self.infer_path(p, tgt_expr.into()).unwrap_or_else(|| self.err_ty());
+ self.resolver.reset_to_guard(g);
+ ty
}
Expr::Continue { label } => {
if let None = find_continuable(&mut self.breakables, label.as_ref()) {
self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop {
expr: tgt_expr,
is_break: false,
+ bad_value_break: false,
});
};
self.result.standard_types.never.clone()
}
Expr::Break { expr, label } => {
let val_ty = if let Some(expr) = *expr {
- self.infer_expr(expr, &Expectation::none())
+ let opt_coerce_to = match find_breakable(&mut self.breakables, label.as_ref()) {
+ Some(ctxt) => match &ctxt.coerce {
+ Some(coerce) => coerce.expected_ty(),
+ None => {
+ self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop {
+ expr: tgt_expr,
+ is_break: true,
+ bad_value_break: true,
+ });
+ self.err_ty()
+ }
+ },
+ None => self.err_ty(),
+ };
+ self.infer_expr_inner(expr, &Expectation::HasType(opt_coerce_to))
} else {
TyBuilder::unit()
};
match find_breakable(&mut self.breakables, label.as_ref()) {
- Some(ctxt) => {
- // avoiding the borrowck
- let mut coerce = mem::replace(
- &mut ctxt.coerce,
- CoerceMany::new(expected.coercion_target_type(&mut self.table)),
- );
-
- // FIXME: create a synthetic `()` during lowering so we have something to refer to here?
- coerce.coerce(self, *expr, &val_ty);
-
- let ctxt = find_breakable(&mut self.breakables, label.as_ref())
- .expect("breakable stack changed during coercion");
- ctxt.coerce = coerce;
- ctxt.may_break = true;
- }
+ Some(ctxt) => match ctxt.coerce.take() {
+ Some(mut coerce) => {
+ coerce.coerce(self, *expr, &val_ty);
+
+ // Avoiding borrowck
+ let ctxt = find_breakable(&mut self.breakables, label.as_ref())
+ .expect("breakable stack changed during coercion");
+ ctxt.may_break = true;
+ ctxt.coerce = Some(coerce);
+ }
+ None => ctxt.may_break = true,
+ },
None => {
self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop {
expr: tgt_expr,
is_break: true,
+ bad_value_break: false,
});
}
}
self.result.standard_types.never.clone()
}
- Expr::Return { expr } => {
- if let Some(expr) = expr {
- self.infer_expr_coerce(*expr, &Expectation::has_type(self.return_ty.clone()));
- } else {
- let unit = TyBuilder::unit();
- let _ = self.coerce(Some(tgt_expr), &unit, &self.return_ty.clone());
- }
- self.result.standard_types.never.clone()
- }
+ &Expr::Return { expr } => self.infer_expr_return(expr),
Expr::Yield { expr } => {
if let Some((resume_ty, yield_ty)) = self.resume_yield_tys.clone() {
if let Some(expr) = expr {
@@ -483,7 +555,7 @@ impl<'a> InferenceContext<'a> {
}
Expr::Yeet { expr } => {
if let &Some(expr) = expr {
- self.infer_expr_inner(expr, &Expectation::None);
+ self.infer_expr_no_expect(expr);
}
self.result.standard_types.never.clone()
}
@@ -524,71 +596,7 @@ impl<'a> InferenceContext<'a> {
}
ty
}
- Expr::Field { expr, name } => {
- let receiver_ty = self.infer_expr_inner(*expr, &Expectation::none());
-
- let mut autoderef = Autoderef::new(&mut self.table, receiver_ty);
- let mut private_field = None;
- let ty = autoderef.by_ref().find_map(|(derefed_ty, _)| {
- let (field_id, parameters) = match derefed_ty.kind(Interner) {
- TyKind::Tuple(_, substs) => {
- return name.as_tuple_index().and_then(|idx| {
- substs
- .as_slice(Interner)
- .get(idx)
- .map(|a| a.assert_ty_ref(Interner))
- .cloned()
- });
- }
- TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), parameters) => {
- let local_id = self.db.struct_data(*s).variant_data.field(name)?;
- let field = FieldId { parent: (*s).into(), local_id };
- (field, parameters.clone())
- }
- TyKind::Adt(AdtId(hir_def::AdtId::UnionId(u)), parameters) => {
- let local_id = self.db.union_data(*u).variant_data.field(name)?;
- let field = FieldId { parent: (*u).into(), local_id };
- (field, parameters.clone())
- }
- _ => return None,
- };
- let is_visible = self.db.field_visibilities(field_id.parent)[field_id.local_id]
- .is_visible_from(self.db.upcast(), self.resolver.module());
- if !is_visible {
- if private_field.is_none() {
- private_field = Some(field_id);
- }
- return None;
- }
- // can't have `write_field_resolution` here because `self.table` is borrowed :(
- self.result.field_resolutions.insert(tgt_expr, field_id);
- let ty = self.db.field_types(field_id.parent)[field_id.local_id]
- .clone()
- .substitute(Interner, &parameters);
- Some(ty)
- });
- let ty = match ty {
- Some(ty) => {
- let adjustments = auto_deref_adjust_steps(&autoderef);
- self.write_expr_adj(*expr, adjustments);
- let ty = self.insert_type_vars(ty);
- let ty = self.normalize_associated_types_in(ty);
- ty
- }
- _ => {
- // Write down the first private field resolution if we found no field
- // This aids IDE features for private fields like goto def
- if let Some(field) = private_field {
- self.result.field_resolutions.insert(tgt_expr, field);
- self.result
- .diagnostics
- .push(InferenceDiagnostic::PrivateField { expr: tgt_expr, field });
- }
- self.err_ty()
- }
- };
- ty
- }
+ Expr::Field { expr, name } => self.infer_field_access(tgt_expr, *expr, name),
Expr::Await { expr } => {
let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
self.resolve_associated_type(inner_ty, self.resolve_future_future_output())
@@ -611,7 +619,7 @@ impl<'a> InferenceContext<'a> {
Expr::Cast { expr, type_ref } => {
let cast_ty = self.make_ty(type_ref);
// FIXME: propagate the "castable to" expectation
- let _inner_ty = self.infer_expr_inner(*expr, &Expectation::None);
+ let _inner_ty = self.infer_expr_no_expect(*expr);
// FIXME check the cast...
cast_ty
}
@@ -807,49 +815,7 @@ impl<'a> InferenceContext<'a> {
TyKind::Tuple(tys.len(), Substitution::from_iter(Interner, tys)).intern(Interner)
}
- Expr::Array(array) => {
- let elem_ty =
- match expected.to_option(&mut self.table).as_ref().map(|t| t.kind(Interner)) {
- Some(TyKind::Array(st, _) | TyKind::Slice(st)) => st.clone(),
- _ => self.table.new_type_var(),
- };
- let mut coerce = CoerceMany::new(elem_ty.clone());
-
- let expected = Expectation::has_type(elem_ty.clone());
- let len = match array {
- Array::ElementList { elements, .. } => {
- for &expr in elements.iter() {
- let cur_elem_ty = self.infer_expr_inner(expr, &expected);
- coerce.coerce(self, Some(expr), &cur_elem_ty);
- }
- consteval::usize_const(Some(elements.len() as u128))
- }
- &Array::Repeat { initializer, repeat } => {
- self.infer_expr_coerce(initializer, &Expectation::has_type(elem_ty));
- self.infer_expr(
- repeat,
- &Expectation::HasType(
- TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(Interner),
- ),
- );
-
- if let Some(g_def) = self.owner.as_generic_def_id() {
- let generics = generics(self.db.upcast(), g_def);
- consteval::eval_to_const(
- repeat,
- ParamLoweringMode::Placeholder,
- self,
- || generics,
- DebruijnIndex::INNERMOST,
- )
- } else {
- consteval::usize_const(None)
- }
- }
- };
-
- TyKind::Array(coerce.complete(), len).intern(Interner)
- }
+ Expr::Array(array) => self.infer_expr_array(array, expected),
Expr::Literal(lit) => match lit {
Literal::Bool(..) => self.result.standard_types.bool_.clone(),
Literal::String(..) => {
@@ -859,7 +825,11 @@ impl<'a> InferenceContext<'a> {
Literal::ByteString(bs) => {
let byte_type = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner);
- let len = consteval::usize_const(Some(bs.len() as u128));
+ let len = consteval::usize_const(
+ self.db,
+ Some(bs.len() as u128),
+ self.resolver.krate(),
+ );
let array_type = TyKind::Array(byte_type, len).intern(Interner);
TyKind::Ref(Mutability::Not, static_lifetime(), array_type).intern(Interner)
@@ -904,6 +874,97 @@ impl<'a> InferenceContext<'a> {
ty
}
+ fn infer_expr_array(
+ &mut self,
+ array: &Array,
+ expected: &Expectation,
+ ) -> chalk_ir::Ty<Interner> {
+ let elem_ty = match expected.to_option(&mut self.table).as_ref().map(|t| t.kind(Interner)) {
+ Some(TyKind::Array(st, _) | TyKind::Slice(st)) => st.clone(),
+ _ => self.table.new_type_var(),
+ };
+
+ let krate = self.resolver.krate();
+
+ let expected = Expectation::has_type(elem_ty.clone());
+ let (elem_ty, len) = match array {
+ Array::ElementList { elements, .. } if elements.is_empty() => {
+ (elem_ty, consteval::usize_const(self.db, Some(0), krate))
+ }
+ Array::ElementList { elements, .. } => {
+ let mut coerce = CoerceMany::new(elem_ty.clone());
+ for &expr in elements.iter() {
+ let cur_elem_ty = self.infer_expr_inner(expr, &expected);
+ coerce.coerce(self, Some(expr), &cur_elem_ty);
+ }
+ (
+ coerce.complete(self),
+ consteval::usize_const(self.db, Some(elements.len() as u128), krate),
+ )
+ }
+ &Array::Repeat { initializer, repeat } => {
+ self.infer_expr_coerce(initializer, &Expectation::has_type(elem_ty.clone()));
+ self.infer_expr(
+ repeat,
+ &Expectation::HasType(
+ TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(Interner),
+ ),
+ );
+
+ (
+ elem_ty,
+ if let Some(g_def) = self.owner.as_generic_def_id() {
+ let generics = generics(self.db.upcast(), g_def);
+ consteval::eval_to_const(
+ repeat,
+ ParamLoweringMode::Placeholder,
+ self,
+ || generics,
+ DebruijnIndex::INNERMOST,
+ )
+ } else {
+ consteval::usize_const(self.db, None, krate)
+ },
+ )
+ }
+ };
+
+ TyKind::Array(elem_ty, len).intern(Interner)
+ }
+
+ pub(super) fn infer_return(&mut self, expr: ExprId) {
+ let ret_ty = self
+ .return_coercion
+ .as_mut()
+ .expect("infer_return called outside function body")
+ .expected_ty();
+ let return_expr_ty = self.infer_expr_inner(expr, &Expectation::HasType(ret_ty));
+ let mut coerce_many = self.return_coercion.take().unwrap();
+ coerce_many.coerce(self, Some(expr), &return_expr_ty);
+ self.return_coercion = Some(coerce_many);
+ }
+
+ fn infer_expr_return(&mut self, expr: Option<ExprId>) -> Ty {
+ match self.return_coercion {
+ Some(_) => {
+ if let Some(expr) = expr {
+ self.infer_return(expr);
+ } else {
+ let mut coerce = self.return_coercion.take().unwrap();
+ coerce.coerce_forced_unit(self);
+ self.return_coercion = Some(coerce);
+ }
+ }
+ None => {
+ // FIXME: diagnose return outside of function
+ if let Some(expr) = expr {
+ self.infer_expr_no_expect(expr);
+ }
+ }
+ }
+ self.result.standard_types.never.clone()
+ }
+
fn infer_expr_box(&mut self, inner_expr: ExprId, expected: &Expectation) -> Ty {
if let Some(box_id) = self.resolve_boxed_box() {
let table = &mut self.table;
@@ -982,8 +1043,11 @@ impl<'a> InferenceContext<'a> {
// type and length). This should not be just an error type,
// because we are to compute the unifiability of this type and
// `rhs_ty` in the end of this function to issue type mismatches.
- _ => TyKind::Array(self.err_ty(), crate::consteval::usize_const(None))
- .intern(Interner),
+ _ => TyKind::Array(
+ self.err_ty(),
+ crate::consteval::usize_const(self.db, None, self.resolver.krate()),
+ )
+ .intern(Interner),
}
}
Expr::RecordLit { path, fields, .. } => {
@@ -1123,65 +1187,211 @@ impl<'a> InferenceContext<'a> {
expr: ExprId,
statements: &[Statement],
tail: Option<ExprId>,
+ label: Option<LabelId>,
expected: &Expectation,
) -> Ty {
- for stmt in statements {
- match stmt {
- Statement::Let { pat, type_ref, initializer, else_branch } => {
- let decl_ty = type_ref
- .as_ref()
- .map(|tr| self.make_ty(tr))
- .unwrap_or_else(|| self.err_ty());
-
- // Always use the declared type when specified
- let mut ty = decl_ty.clone();
-
- if let Some(expr) = initializer {
- let actual_ty =
- self.infer_expr_coerce(*expr, &Expectation::has_type(decl_ty.clone()));
- if decl_ty.is_unknown() {
- ty = actual_ty;
+ let coerce_ty = expected.coercion_target_type(&mut self.table);
+ let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, expr);
+
+ let (break_ty, ty) =
+ self.with_breakable_ctx(BreakableKind::Block, Some(coerce_ty.clone()), label, |this| {
+ for stmt in statements {
+ match stmt {
+ Statement::Let { pat, type_ref, initializer, else_branch } => {
+ let decl_ty = type_ref
+ .as_ref()
+ .map(|tr| this.make_ty(tr))
+ .unwrap_or_else(|| this.table.new_type_var());
+
+ let ty = if let Some(expr) = initializer {
+ let ty = if contains_explicit_ref_binding(&this.body, *pat) {
+ this.infer_expr(*expr, &Expectation::has_type(decl_ty.clone()))
+ } else {
+ this.infer_expr_coerce(
+ *expr,
+ &Expectation::has_type(decl_ty.clone()),
+ )
+ };
+ if type_ref.is_some() {
+ decl_ty
+ } else {
+ ty
+ }
+ } else {
+ decl_ty
+ };
+
+ this.infer_top_pat(*pat, &ty);
+
+ if let Some(expr) = else_branch {
+ let previous_diverges =
+ mem::replace(&mut this.diverges, Diverges::Maybe);
+ this.infer_expr_coerce(
+ *expr,
+ &Expectation::HasType(this.result.standard_types.never.clone()),
+ );
+ this.diverges = previous_diverges;
+ }
+ }
+ &Statement::Expr { expr, has_semi } => {
+ if has_semi {
+ this.infer_expr(expr, &Expectation::none());
+ } else {
+ this.infer_expr_coerce(
+ expr,
+ &Expectation::HasType(this.result.standard_types.unit.clone()),
+ );
+ }
}
}
+ }
- if let Some(expr) = else_branch {
- self.infer_expr_coerce(
- *expr,
- &Expectation::HasType(self.result.standard_types.never.clone()),
- );
+ // FIXME: This should make use of the breakable CoerceMany
+ if let Some(expr) = tail {
+ this.infer_expr_coerce(expr, expected)
+ } else {
+ // Citing rustc: if there is no explicit tail expression,
+ // that is typically equivalent to a tail expression
+ // of `()` -- except if the block diverges. In that
+ // case, there is no value supplied from the tail
+ // expression (assuming there are no other breaks,
+ // this implies that the type of the block will be
+ // `!`).
+ if this.diverges.is_always() {
+ // we don't even make an attempt at coercion
+ this.table.new_maybe_never_var()
+ } else if let Some(t) = expected.only_has_type(&mut this.table) {
+ if this
+ .coerce(Some(expr), &this.result.standard_types.unit.clone(), &t)
+ .is_err()
+ {
+ this.result.type_mismatches.insert(
+ expr.into(),
+ TypeMismatch {
+ expected: t.clone(),
+ actual: this.result.standard_types.unit.clone(),
+ },
+ );
+ }
+ t
+ } else {
+ this.result.standard_types.unit.clone()
}
+ }
+ });
+ self.resolver.reset_to_guard(g);
+
+ break_ty.unwrap_or(ty)
+ }
- self.infer_pat(*pat, &ty, BindingMode::default());
+ fn lookup_field(
+ &mut self,
+ receiver_ty: &Ty,
+ name: &Name,
+ ) -> Option<(Ty, Option<FieldId>, Vec<Adjustment>, bool)> {
+ let mut autoderef = Autoderef::new(&mut self.table, receiver_ty.clone());
+ let mut private_field = None;
+ let res = autoderef.by_ref().find_map(|(derefed_ty, _)| {
+ let (field_id, parameters) = match derefed_ty.kind(Interner) {
+ TyKind::Tuple(_, substs) => {
+ return name.as_tuple_index().and_then(|idx| {
+ substs
+ .as_slice(Interner)
+ .get(idx)
+ .map(|a| a.assert_ty_ref(Interner))
+ .cloned()
+ .map(|ty| (None, ty))
+ });
+ }
+ TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), parameters) => {
+ let local_id = self.db.struct_data(*s).variant_data.field(name)?;
+ let field = FieldId { parent: (*s).into(), local_id };
+ (field, parameters.clone())
}
- Statement::Expr { expr, .. } => {
- self.infer_expr(*expr, &Expectation::none());
+ TyKind::Adt(AdtId(hir_def::AdtId::UnionId(u)), parameters) => {
+ let local_id = self.db.union_data(*u).variant_data.field(name)?;
+ let field = FieldId { parent: (*u).into(), local_id };
+ (field, parameters.clone())
}
+ _ => return None,
+ };
+ let is_visible = self.db.field_visibilities(field_id.parent)[field_id.local_id]
+ .is_visible_from(self.db.upcast(), self.resolver.module());
+ if !is_visible {
+ if private_field.is_none() {
+ private_field = Some((field_id, parameters));
+ }
+ return None;
}
- }
+ let ty = self.db.field_types(field_id.parent)[field_id.local_id]
+ .clone()
+ .substitute(Interner, &parameters);
+ Some((Some(field_id), ty))
+ });
- if let Some(expr) = tail {
- self.infer_expr_coerce(expr, expected)
- } else {
- // Citing rustc: if there is no explicit tail expression,
- // that is typically equivalent to a tail expression
- // of `()` -- except if the block diverges. In that
- // case, there is no value supplied from the tail
- // expression (assuming there are no other breaks,
- // this implies that the type of the block will be
- // `!`).
- if self.diverges.is_always() {
- // we don't even make an attempt at coercion
- self.table.new_maybe_never_var()
- } else if let Some(t) = expected.only_has_type(&mut self.table) {
- if self.coerce(Some(expr), &TyBuilder::unit(), &t).is_err() {
- self.result.type_mismatches.insert(
- expr.into(),
- TypeMismatch { expected: t.clone(), actual: TyBuilder::unit() },
- );
+ Some(match res {
+ Some((field_id, ty)) => {
+ let adjustments = auto_deref_adjust_steps(&autoderef);
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+
+ (ty, field_id, adjustments, true)
+ }
+ None => {
+ let (field_id, subst) = private_field?;
+ let adjustments = auto_deref_adjust_steps(&autoderef);
+ let ty = self.db.field_types(field_id.parent)[field_id.local_id]
+ .clone()
+ .substitute(Interner, &subst);
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+
+ (ty, Some(field_id), adjustments, false)
+ }
+ })
+ }
+
+ fn infer_field_access(&mut self, tgt_expr: ExprId, receiver: ExprId, name: &Name) -> Ty {
+ let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none());
+ match self.lookup_field(&receiver_ty, name) {
+ Some((ty, field_id, adjustments, is_public)) => {
+ self.write_expr_adj(receiver, adjustments);
+ if let Some(field_id) = field_id {
+ self.result.field_resolutions.insert(tgt_expr, field_id);
}
- t
- } else {
- TyBuilder::unit()
+ if !is_public {
+ if let Some(field) = field_id {
+ // FIXME: Merge this diagnostic into UnresolvedField?
+ self.result
+ .diagnostics
+ .push(InferenceDiagnostic::PrivateField { expr: tgt_expr, field });
+ }
+ }
+ ty
+ }
+ None => {
+ // no field found,
+ let method_with_same_name_exists = {
+ self.get_traits_in_scope();
+
+ let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
+ method_resolution::lookup_method(
+ self.db,
+ &canonicalized_receiver.value,
+ self.trait_env.clone(),
+ self.get_traits_in_scope().as_ref().left_or_else(|&it| it),
+ VisibleFromModule::Filter(self.resolver.module()),
+ name,
+ )
+ .is_some()
+ };
+ self.result.diagnostics.push(InferenceDiagnostic::UnresolvedField {
+ expr: tgt_expr,
+ receiver: receiver_ty,
+ name: name.clone(),
+ method_with_same_name_exists,
+ });
+ self.err_ty()
}
}
}
@@ -1198,13 +1408,11 @@ impl<'a> InferenceContext<'a> {
let receiver_ty = self.infer_expr(receiver, &Expectation::none());
let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
- let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
-
let resolved = method_resolution::lookup_method(
self.db,
&canonicalized_receiver.value,
self.trait_env.clone(),
- &traits_in_scope,
+ self.get_traits_in_scope().as_ref().left_or_else(|&it| it),
VisibleFromModule::Filter(self.resolver.module()),
method_name,
);
@@ -1223,11 +1431,30 @@ impl<'a> InferenceContext<'a> {
}
(ty, self.db.value_ty(func.into()), substs)
}
- None => (
- receiver_ty,
- Binders::empty(Interner, self.err_ty()),
- Substitution::empty(Interner),
- ),
+ None => {
+ let field_with_same_name_exists = match self.lookup_field(&receiver_ty, method_name)
+ {
+ Some((ty, field_id, adjustments, _public)) => {
+ self.write_expr_adj(receiver, adjustments);
+ if let Some(field_id) = field_id {
+ self.result.field_resolutions.insert(tgt_expr, field_id);
+ }
+ Some(ty)
+ }
+ None => None,
+ };
+ self.result.diagnostics.push(InferenceDiagnostic::UnresolvedMethodCall {
+ expr: tgt_expr,
+ receiver: receiver_ty.clone(),
+ name: method_name.clone(),
+ field_with_same_name: field_with_same_name_exists,
+ });
+ (
+ receiver_ty,
+ Binders::empty(Interner, self.err_ty()),
+ Substitution::empty(Interner),
+ )
+ }
};
let method_ty = method_ty.substitute(Interner, &substs);
self.register_obligations_for_call(&method_ty);
@@ -1636,16 +1863,16 @@ impl<'a> InferenceContext<'a> {
fn with_breakable_ctx<T>(
&mut self,
kind: BreakableKind,
- ty: Ty,
+ ty: Option<Ty>,
label: Option<LabelId>,
cb: impl FnOnce(&mut Self) -> T,
) -> (Option<Ty>, T) {
self.breakables.push({
let label = label.map(|label| self.body[label].name.clone());
- BreakableContext { kind, may_break: false, coerce: CoerceMany::new(ty), label }
+ BreakableContext { kind, may_break: false, coerce: ty.map(CoerceMany::new), label }
});
let res = cb(self);
let ctx = self.breakables.pop().expect("breakable stack broken");
- (ctx.may_break.then(|| ctx.coerce.complete()), res)
+ (if ctx.may_break { ctx.coerce.map(|ctx| ctx.complete(self)) } else { None }, res)
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
index f154dac8e..5f839fc30 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs
@@ -4,22 +4,57 @@ use std::iter::repeat_with;
use chalk_ir::Mutability;
use hir_def::{
- expr::{BindingAnnotation, Expr, Literal, Pat, PatId},
+ body::Body,
+ expr::{Binding, BindingAnnotation, BindingId, Expr, ExprId, ExprOrPatId, Literal, Pat, PatId},
path::Path,
- type_ref::ConstScalar,
};
use hir_expand::name::Name;
use crate::{
- consteval::intern_const_scalar,
+ consteval::{try_const_usize, usize_const},
infer::{BindingMode, Expectation, InferenceContext, TypeMismatch},
lower::lower_to_chalk_mutability,
primitive::UintTy,
- static_lifetime, ConcreteConst, ConstValue, Interner, Scalar, Substitution, Ty, TyBuilder,
- TyExt, TyKind,
+ static_lifetime, Interner, Scalar, Substitution, Ty, TyBuilder, TyExt, TyKind,
};
-use super::PatLike;
+/// Used to generalize patterns and assignee expressions.
+pub(super) trait PatLike: Into<ExprOrPatId> + Copy {
+ type BindingMode: Copy;
+
+ fn infer(
+ this: &mut InferenceContext<'_>,
+ id: Self,
+ expected_ty: &Ty,
+ default_bm: Self::BindingMode,
+ ) -> Ty;
+}
+
+impl PatLike for ExprId {
+ type BindingMode = ();
+
+ fn infer(
+ this: &mut InferenceContext<'_>,
+ id: Self,
+ expected_ty: &Ty,
+ (): Self::BindingMode,
+ ) -> Ty {
+ this.infer_assignee_expr(id, expected_ty)
+ }
+}
+
+impl PatLike for PatId {
+ type BindingMode = BindingMode;
+
+ fn infer(
+ this: &mut InferenceContext<'_>,
+ id: Self,
+ expected_ty: &Ty,
+ default_bm: Self::BindingMode,
+ ) -> Ty {
+ this.infer_pat(id, expected_ty, default_bm)
+ }
+}
impl<'a> InferenceContext<'a> {
/// Infers type for tuple struct pattern or its corresponding assignee expression.
@@ -112,6 +147,7 @@ impl<'a> InferenceContext<'a> {
ellipsis: Option<usize>,
subs: &[T],
) -> Ty {
+ let expected = self.resolve_ty_shallow(expected);
let expectations = match expected.as_tuple() {
Some(parameters) => &*parameters.as_slice(Interner),
_ => &[],
@@ -145,12 +181,11 @@ impl<'a> InferenceContext<'a> {
.intern(Interner)
}
- pub(super) fn infer_pat(
- &mut self,
- pat: PatId,
- expected: &Ty,
- mut default_bm: BindingMode,
- ) -> Ty {
+ pub(super) fn infer_top_pat(&mut self, pat: PatId, expected: &Ty) {
+ self.infer_pat(pat, expected, BindingMode::default());
+ }
+
+ fn infer_pat(&mut self, pat: PatId, expected: &Ty, mut default_bm: BindingMode) -> Ty {
let mut expected = self.resolve_ty_shallow(expected);
if is_non_ref_pat(self.body, pat) {
@@ -185,30 +220,17 @@ impl<'a> InferenceContext<'a> {
self.infer_tuple_pat_like(&expected, default_bm, *ellipsis, args)
}
Pat::Or(pats) => {
- if let Some((first_pat, rest)) = pats.split_first() {
- let ty = self.infer_pat(*first_pat, &expected, default_bm);
- for pat in rest {
- self.infer_pat(*pat, &expected, default_bm);
- }
- ty
- } else {
- self.err_ty()
+ for pat in pats.iter() {
+ self.infer_pat(*pat, &expected, default_bm);
}
+ expected.clone()
}
- Pat::Ref { pat, mutability } => {
- let mutability = lower_to_chalk_mutability(*mutability);
- let expectation = match expected.as_reference() {
- Some((inner_ty, _lifetime, exp_mut)) => {
- if mutability != exp_mut {
- // FIXME: emit type error?
- }
- inner_ty.clone()
- }
- _ => self.result.standard_types.unknown.clone(),
- };
- let subty = self.infer_pat(*pat, &expectation, default_bm);
- TyKind::Ref(mutability, static_lifetime(), subty).intern(Interner)
- }
+ &Pat::Ref { pat, mutability } => self.infer_ref_pat(
+ pat,
+ lower_to_chalk_mutability(mutability),
+ &expected,
+ default_bm,
+ ),
Pat::TupleStruct { path: p, args: subpats, ellipsis } => self
.infer_tuple_struct_pat_like(
p.as_deref(),
@@ -223,72 +245,14 @@ impl<'a> InferenceContext<'a> {
self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat, subs)
}
Pat::Path(path) => {
- // FIXME use correct resolver for the surrounding expression
- let resolver = self.resolver.clone();
- self.infer_path(&resolver, path, pat.into()).unwrap_or_else(|| self.err_ty())
+ // FIXME update resolver for the surrounding expression
+ self.infer_path(path, pat.into()).unwrap_or_else(|| self.err_ty())
}
- Pat::Bind { mode, name: _, subpat } => {
- let mode = if mode == &BindingAnnotation::Unannotated {
- default_bm
- } else {
- BindingMode::convert(*mode)
- };
- self.result.pat_binding_modes.insert(pat, mode);
-
- let inner_ty = match subpat {
- Some(subpat) => self.infer_pat(*subpat, &expected, default_bm),
- None => expected,
- };
- let inner_ty = self.insert_type_vars_shallow(inner_ty);
-
- let bound_ty = match mode {
- BindingMode::Ref(mutability) => {
- TyKind::Ref(mutability, static_lifetime(), inner_ty.clone())
- .intern(Interner)
- }
- BindingMode::Move => inner_ty.clone(),
- };
- self.write_pat_ty(pat, bound_ty);
- return inner_ty;
+ Pat::Bind { id, subpat } => {
+ return self.infer_bind_pat(pat, *id, default_bm, *subpat, &expected);
}
Pat::Slice { prefix, slice, suffix } => {
- let elem_ty = match expected.kind(Interner) {
- TyKind::Array(st, _) | TyKind::Slice(st) => st.clone(),
- _ => self.err_ty(),
- };
-
- for &pat_id in prefix.iter().chain(suffix.iter()) {
- self.infer_pat(pat_id, &elem_ty, default_bm);
- }
-
- if let &Some(slice_pat_id) = slice {
- let rest_pat_ty = match expected.kind(Interner) {
- TyKind::Array(_, length) => {
- let len = match length.data(Interner).value {
- ConstValue::Concrete(ConcreteConst {
- interned: ConstScalar::UInt(len),
- }) => len.checked_sub((prefix.len() + suffix.len()) as u128),
- _ => None,
- };
- TyKind::Array(
- elem_ty.clone(),
- intern_const_scalar(
- len.map_or(ConstScalar::Unknown, |len| ConstScalar::UInt(len)),
- TyBuilder::usize(),
- ),
- )
- }
- _ => TyKind::Slice(elem_ty.clone()),
- }
- .intern(Interner);
- self.infer_pat(slice_pat_id, &rest_pat_ty, default_bm);
- }
-
- match expected.kind(Interner) {
- TyKind::Array(_, const_) => TyKind::Array(elem_ty, const_.clone()),
- _ => TyKind::Slice(elem_ty),
- }
- .intern(Interner)
+ self.infer_slice_pat(&expected, prefix, slice, suffix, default_bm)
}
Pat::Wild => expected.clone(),
Pat::Range { start, end } => {
@@ -296,27 +260,10 @@ impl<'a> InferenceContext<'a> {
self.infer_expr(*end, &Expectation::has_type(start_ty))
}
&Pat::Lit(expr) => {
- // FIXME: using `Option` here is a workaround until we can use if-let chains in stable.
- let mut pat_ty = None;
-
- // Like slice patterns, byte string patterns can denote both `&[u8; N]` and `&[u8]`.
- if let Expr::Literal(Literal::ByteString(_)) = self.body[expr] {
- if let Some((inner, ..)) = expected.as_reference() {
- let inner = self.resolve_ty_shallow(inner);
- if matches!(inner.kind(Interner), TyKind::Slice(_)) {
- let elem_ty = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner);
- let slice_ty = TyKind::Slice(elem_ty).intern(Interner);
- let ty = TyKind::Ref(Mutability::Not, static_lifetime(), slice_ty)
- .intern(Interner);
- self.write_expr_ty(expr, ty.clone());
- pat_ty = Some(ty);
- }
- }
- }
-
- pat_ty.unwrap_or_else(|| {
- self.infer_expr(expr, &Expectation::has_type(expected.clone()))
- })
+ // Don't emit type mismatches again, the expression lowering already did that.
+ let ty = self.infer_lit_pat(expr, &expected);
+ self.write_pat_ty(pat, ty.clone());
+ return ty;
}
Pat::Box { inner } => match self.resolve_boxed_box() {
Some(box_adt) => {
@@ -345,7 +292,8 @@ impl<'a> InferenceContext<'a> {
};
// use a new type variable if we got error type here
let ty = self.insert_type_vars_shallow(ty);
- if !self.unify(&ty, &expected) {
+ // FIXME: This never check is odd, but required with out we do inference right now
+ if !expected.is_never() && !self.unify(&ty, &expected) {
self.result
.type_mismatches
.insert(pat.into(), TypeMismatch { expected, actual: ty.clone() });
@@ -353,6 +301,111 @@ impl<'a> InferenceContext<'a> {
self.write_pat_ty(pat, ty.clone());
ty
}
+
+ fn infer_ref_pat(
+ &mut self,
+ pat: PatId,
+ mutability: Mutability,
+ expected: &Ty,
+ default_bm: BindingMode,
+ ) -> Ty {
+ let expectation = match expected.as_reference() {
+ Some((inner_ty, _lifetime, _exp_mut)) => inner_ty.clone(),
+ _ => self.result.standard_types.unknown.clone(),
+ };
+ let subty = self.infer_pat(pat, &expectation, default_bm);
+ TyKind::Ref(mutability, static_lifetime(), subty).intern(Interner)
+ }
+
+ fn infer_bind_pat(
+ &mut self,
+ pat: PatId,
+ binding: BindingId,
+ default_bm: BindingMode,
+ subpat: Option<PatId>,
+ expected: &Ty,
+ ) -> Ty {
+ let Binding { mode, .. } = self.body.bindings[binding];
+ let mode = if mode == BindingAnnotation::Unannotated {
+ default_bm
+ } else {
+ BindingMode::convert(mode)
+ };
+ self.result.pat_binding_modes.insert(pat, mode);
+
+ let inner_ty = match subpat {
+ Some(subpat) => self.infer_pat(subpat, &expected, default_bm),
+ None => expected.clone(),
+ };
+ let inner_ty = self.insert_type_vars_shallow(inner_ty);
+
+ let bound_ty = match mode {
+ BindingMode::Ref(mutability) => {
+ TyKind::Ref(mutability, static_lifetime(), inner_ty.clone()).intern(Interner)
+ }
+ BindingMode::Move => inner_ty.clone(),
+ };
+ self.write_pat_ty(pat, bound_ty.clone());
+ self.write_binding_ty(binding, bound_ty);
+ return inner_ty;
+ }
+
+ fn infer_slice_pat(
+ &mut self,
+ expected: &Ty,
+ prefix: &[PatId],
+ slice: &Option<PatId>,
+ suffix: &[PatId],
+ default_bm: BindingMode,
+ ) -> Ty {
+ let elem_ty = match expected.kind(Interner) {
+ TyKind::Array(st, _) | TyKind::Slice(st) => st.clone(),
+ _ => self.err_ty(),
+ };
+
+ for &pat_id in prefix.iter().chain(suffix.iter()) {
+ self.infer_pat(pat_id, &elem_ty, default_bm);
+ }
+
+ if let &Some(slice_pat_id) = slice {
+ let rest_pat_ty = match expected.kind(Interner) {
+ TyKind::Array(_, length) => {
+ let len = try_const_usize(length);
+ let len =
+ len.and_then(|len| len.checked_sub((prefix.len() + suffix.len()) as u128));
+ TyKind::Array(elem_ty.clone(), usize_const(self.db, len, self.resolver.krate()))
+ }
+ _ => TyKind::Slice(elem_ty.clone()),
+ }
+ .intern(Interner);
+ self.infer_pat(slice_pat_id, &rest_pat_ty, default_bm);
+ }
+
+ match expected.kind(Interner) {
+ TyKind::Array(_, const_) => TyKind::Array(elem_ty, const_.clone()),
+ _ => TyKind::Slice(elem_ty),
+ }
+ .intern(Interner)
+ }
+
+ fn infer_lit_pat(&mut self, expr: ExprId, expected: &Ty) -> Ty {
+ // Like slice patterns, byte string patterns can denote both `&[u8; N]` and `&[u8]`.
+ if let Expr::Literal(Literal::ByteString(_)) = self.body[expr] {
+ if let Some((inner, ..)) = expected.as_reference() {
+ let inner = self.resolve_ty_shallow(inner);
+ if matches!(inner.kind(Interner), TyKind::Slice(_)) {
+ let elem_ty = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner);
+ let slice_ty = TyKind::Slice(elem_ty).intern(Interner);
+ let ty =
+ TyKind::Ref(Mutability::Not, static_lifetime(), slice_ty).intern(Interner);
+ self.write_expr_ty(expr, ty.clone());
+ return ty;
+ }
+ }
+ }
+
+ self.infer_expr(expr, &Expectation::has_type(expected.clone()))
+ }
}
fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool {
@@ -369,11 +422,22 @@ fn is_non_ref_pat(body: &hir_def::body::Body, pat: PatId) -> bool {
Pat::Lit(expr) => {
!matches!(body[*expr], Expr::Literal(Literal::String(..) | Literal::ByteString(..)))
}
- Pat::Bind {
- mode: BindingAnnotation::Mutable | BindingAnnotation::Unannotated,
- subpat: Some(subpat),
- ..
- } => is_non_ref_pat(body, *subpat),
+ Pat::Bind { id, subpat: Some(subpat), .. }
+ if matches!(
+ body.bindings[*id].mode,
+ BindingAnnotation::Mutable | BindingAnnotation::Unannotated
+ ) =>
+ {
+ is_non_ref_pat(body, *subpat)
+ }
Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Box { .. } | Pat::Missing => false,
}
}
+
+pub(super) fn contains_explicit_ref_binding(body: &Body, pat_id: PatId) -> bool {
+ let mut res = false;
+ body.walk_pats(pat_id, &mut |pat| {
+ res |= matches!(pat, Pat::Bind { id, .. } if body.bindings[*id].mode == BindingAnnotation::Ref);
+ });
+ res
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
index 0a8527afb..2267fedaa 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
@@ -3,7 +3,7 @@
use chalk_ir::cast::Cast;
use hir_def::{
path::{Path, PathSegment},
- resolver::{ResolveValueResult, Resolver, TypeNs, ValueNs},
+ resolver::{ResolveValueResult, TypeNs, ValueNs},
AdtId, AssocItemId, EnumVariantId, ItemContainerId, Lookup,
};
use hir_expand::name::Name;
@@ -21,55 +21,42 @@ use crate::{
use super::{ExprOrPatId, InferenceContext, TraitRef};
impl<'a> InferenceContext<'a> {
- pub(super) fn infer_path(
- &mut self,
- resolver: &Resolver,
- path: &Path,
- id: ExprOrPatId,
- ) -> Option<Ty> {
- let ty = self.resolve_value_path(resolver, path, id)?;
+ pub(super) fn infer_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<Ty> {
+ let ty = self.resolve_value_path(path, id)?;
let ty = self.insert_type_vars(ty);
let ty = self.normalize_associated_types_in(ty);
Some(ty)
}
- fn resolve_value_path(
- &mut self,
- resolver: &Resolver,
- path: &Path,
- id: ExprOrPatId,
- ) -> Option<Ty> {
+ fn resolve_value_path(&mut self, path: &Path, id: ExprOrPatId) -> Option<Ty> {
let (value, self_subst) = if let Some(type_ref) = path.type_anchor() {
- if path.segments().is_empty() {
- // This can't actually happen syntax-wise
- return None;
- }
+ let Some(last) = path.segments().last() else { return None };
let ty = self.make_ty(type_ref);
let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
- let ctx = crate::lower::TyLoweringContext::new(self.db, resolver);
+ let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
let (ty, _) = ctx.lower_ty_relative_path(ty, None, remaining_segments_for_ty);
- self.resolve_ty_assoc_item(
- ty,
- path.segments().last().expect("path had at least one segment").name,
- id,
- )?
+ self.resolve_ty_assoc_item(ty, last.name, id).map(|(it, substs)| (it, Some(substs)))?
} else {
+ // FIXME: report error, unresolved first path segment
let value_or_partial =
- resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path())?;
+ self.resolver.resolve_path_in_value_ns(self.db.upcast(), path.mod_path())?;
match value_or_partial {
ResolveValueResult::ValueNs(it) => (it, None),
- ResolveValueResult::Partial(def, remaining_index) => {
- self.resolve_assoc_item(def, path, remaining_index, id)?
- }
+ ResolveValueResult::Partial(def, remaining_index) => self
+ .resolve_assoc_item(def, path, remaining_index, id)
+ .map(|(it, substs)| (it, Some(substs)))?,
}
};
let typable: ValueTyDefId = match value {
- ValueNs::LocalBinding(pat) => {
- let ty = self.result.type_of_pat.get(pat)?.clone();
- return Some(ty);
- }
+ ValueNs::LocalBinding(pat) => match self.result.type_of_binding.get(pat) {
+ Some(ty) => return Some(ty.clone()),
+ None => {
+ never!("uninferred pattern?");
+ return None;
+ }
+ },
ValueNs::FunctionId(it) => it.into(),
ValueNs::ConstId(it) => it.into(),
ValueNs::StaticId(it) => it.into(),
@@ -91,7 +78,7 @@ impl<'a> InferenceContext<'a> {
let ty = self.db.value_ty(struct_id.into()).substitute(Interner, &substs);
return Some(ty);
} else {
- // FIXME: diagnostic, invalid Self reference
+ // FIXME: report error, invalid Self reference
return None;
}
}
@@ -126,7 +113,7 @@ impl<'a> InferenceContext<'a> {
path: &Path,
remaining_index: usize,
id: ExprOrPatId,
- ) -> Option<(ValueNs, Option<Substitution>)> {
+ ) -> Option<(ValueNs, Substitution)> {
assert!(remaining_index < path.segments().len());
// there may be more intermediate segments between the resolved one and
// the end. Only the last segment needs to be resolved to a value; from
@@ -179,7 +166,7 @@ impl<'a> InferenceContext<'a> {
trait_ref: TraitRef,
segment: PathSegment<'_>,
id: ExprOrPatId,
- ) -> Option<(ValueNs, Option<Substitution>)> {
+ ) -> Option<(ValueNs, Substitution)> {
let trait_ = trait_ref.hir_trait_id();
let item =
self.db.trait_data(trait_).items.iter().map(|(_name, id)| (*id)).find_map(|item| {
@@ -215,7 +202,7 @@ impl<'a> InferenceContext<'a> {
};
self.write_assoc_resolution(id, item, trait_ref.substitution.clone());
- Some((def, Some(trait_ref.substitution)))
+ Some((def, trait_ref.substitution))
}
fn resolve_ty_assoc_item(
@@ -223,7 +210,7 @@ impl<'a> InferenceContext<'a> {
ty: Ty,
name: &Name,
id: ExprOrPatId,
- ) -> Option<(ValueNs, Option<Substitution>)> {
+ ) -> Option<(ValueNs, Substitution)> {
if let TyKind::Error = ty.kind(Interner) {
return None;
}
@@ -233,70 +220,66 @@ impl<'a> InferenceContext<'a> {
}
let canonical_ty = self.canonicalize(ty.clone());
- let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
let mut not_visible = None;
let res = method_resolution::iterate_method_candidates(
&canonical_ty.value,
self.db,
self.table.trait_env.clone(),
- &traits_in_scope,
+ self.get_traits_in_scope().as_ref().left_or_else(|&it| it),
VisibleFromModule::Filter(self.resolver.module()),
Some(name),
method_resolution::LookupMode::Path,
|_ty, item, visible| {
- let (def, container) = match item {
- AssocItemId::FunctionId(f) => {
- (ValueNs::FunctionId(f), f.lookup(self.db.upcast()).container)
- }
- AssocItemId::ConstId(c) => {
- (ValueNs::ConstId(c), c.lookup(self.db.upcast()).container)
- }
- AssocItemId::TypeAliasId(_) => unreachable!(),
- };
- let substs = match container {
- ItemContainerId::ImplId(impl_id) => {
- let impl_substs = TyBuilder::subst_for_def(self.db, impl_id, None)
- .fill_with_inference_vars(&mut self.table)
- .build();
- let impl_self_ty =
- self.db.impl_self_ty(impl_id).substitute(Interner, &impl_substs);
- self.unify(&impl_self_ty, &ty);
- impl_substs
- }
- ItemContainerId::TraitId(trait_) => {
- // we're picking this method
- let trait_ref = TyBuilder::trait_ref(self.db, trait_)
- .push(ty.clone())
- .fill_with_inference_vars(&mut self.table)
- .build();
- self.push_obligation(trait_ref.clone().cast(Interner));
- trait_ref.substitution
- }
- ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
- never!("assoc item contained in module/extern block");
- return None;
- }
- };
-
if visible {
- Some((def, item, Some(substs), true))
+ Some((item, true))
} else {
if not_visible.is_none() {
- not_visible = Some((def, item, Some(substs), false));
+ not_visible = Some((item, false));
}
None
}
},
);
let res = res.or(not_visible);
- if let Some((_, item, Some(ref substs), visible)) = res {
- self.write_assoc_resolution(id, item, substs.clone());
- if !visible {
- self.push_diagnostic(InferenceDiagnostic::PrivateAssocItem { id, item })
+ let (item, visible) = res?;
+
+ let (def, container) = match item {
+ AssocItemId::FunctionId(f) => {
+ (ValueNs::FunctionId(f), f.lookup(self.db.upcast()).container)
+ }
+ AssocItemId::ConstId(c) => (ValueNs::ConstId(c), c.lookup(self.db.upcast()).container),
+ AssocItemId::TypeAliasId(_) => unreachable!(),
+ };
+ let substs = match container {
+ ItemContainerId::ImplId(impl_id) => {
+ let impl_substs = TyBuilder::subst_for_def(self.db, impl_id, None)
+ .fill_with_inference_vars(&mut self.table)
+ .build();
+ let impl_self_ty = self.db.impl_self_ty(impl_id).substitute(Interner, &impl_substs);
+ self.unify(&impl_self_ty, &ty);
+ impl_substs
}
+ ItemContainerId::TraitId(trait_) => {
+ // we're picking this method
+ let trait_ref = TyBuilder::trait_ref(self.db, trait_)
+ .push(ty.clone())
+ .fill_with_inference_vars(&mut self.table)
+ .build();
+ self.push_obligation(trait_ref.clone().cast(Interner));
+ trait_ref.substitution
+ }
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
+ never!("assoc item contained in module/extern block");
+ return None;
+ }
+ };
+
+ self.write_assoc_resolution(id, item, substs.clone());
+ if !visible {
+ self.push_diagnostic(InferenceDiagnostic::PrivateAssocItem { id, item });
}
- res.map(|(def, _, substs, _)| (def, substs))
+ Some((def, substs))
}
fn resolve_enum_variant_on_ty(
@@ -304,7 +287,7 @@ impl<'a> InferenceContext<'a> {
ty: &Ty,
name: &Name,
id: ExprOrPatId,
- ) -> Option<(ValueNs, Option<Substitution>)> {
+ ) -> Option<(ValueNs, Substitution)> {
let ty = self.resolve_ty_shallow(ty);
let (enum_id, subst) = match ty.as_adt() {
Some((AdtId::EnumId(e), subst)) => (e, subst),
@@ -314,6 +297,6 @@ impl<'a> InferenceContext<'a> {
let local_id = enum_data.variant(name)?;
let variant = EnumVariantId { parent: enum_id, local_id };
self.write_variant_resolution(id, variant.into());
- Some((ValueNs::EnumVariantId(variant), Some(subst.clone())))
+ Some((ValueNs::EnumVariantId(variant), subst.clone()))
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
index 46ed3533c..504f0743a 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
@@ -704,14 +704,13 @@ impl<'a> fmt::Debug for InferenceTable<'a> {
mod resolve {
use super::InferenceTable;
use crate::{
- ConcreteConst, Const, ConstData, ConstValue, DebruijnIndex, GenericArg, InferenceVar,
- Interner, Lifetime, Ty, TyVariableKind, VariableKind,
+ ConcreteConst, Const, ConstData, ConstScalar, ConstValue, DebruijnIndex, GenericArg,
+ InferenceVar, Interner, Lifetime, Ty, TyVariableKind, VariableKind,
};
use chalk_ir::{
cast::Cast,
fold::{TypeFoldable, TypeFolder},
};
- use hir_def::type_ref::ConstScalar;
#[derive(chalk_derive::FallibleTypeFolder)]
#[has_interner(Interner)]
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs b/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs
index 0c547192a..36af78153 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/inhabitedness.rs
@@ -6,12 +6,12 @@ use chalk_ir::{
DebruijnIndex,
};
use hir_def::{
- adt::VariantData, attr::Attrs, type_ref::ConstScalar, visibility::Visibility, AdtId,
- EnumVariantId, HasModule, Lookup, ModuleId, VariantId,
+ adt::VariantData, attr::Attrs, visibility::Visibility, AdtId, EnumVariantId, HasModule, Lookup,
+ ModuleId, VariantId,
};
use crate::{
- db::HirDatabase, Binders, ConcreteConst, Const, ConstValue, Interner, Substitution, Ty, TyKind,
+ consteval::try_const_usize, db::HirDatabase, Binders, Interner, Substitution, Ty, TyKind,
};
/// Checks whether a type is visibly uninhabited from a particular module.
@@ -69,7 +69,7 @@ impl TypeVisitor<Interner> for UninhabitedFrom<'_> {
TyKind::Adt(adt, subst) => self.visit_adt(adt.0, subst),
TyKind::Never => BREAK_VISIBLY_UNINHABITED,
TyKind::Tuple(..) => ty.super_visit_with(self, outer_binder),
- TyKind::Array(item_ty, len) => match try_usize_const(len) {
+ TyKind::Array(item_ty, len) => match try_const_usize(len) {
Some(0) | None => CONTINUE_OPAQUELY_INHABITED,
Some(1..) => item_ty.super_visit_with(self, outer_binder),
},
@@ -160,14 +160,3 @@ impl UninhabitedFrom<'_> {
}
}
}
-
-fn try_usize_const(c: &Const) -> Option<u128> {
- let data = &c.data(Interner);
- if data.ty.kind(Interner) != &TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize)) {
- return None;
- }
- match data.value {
- ConstValue::Concrete(ConcreteConst { interned: ConstScalar::UInt(value) }) => Some(value),
- _ => None,
- }
-}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs
index 7bf73560c..aea7e9762 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs
@@ -1,10 +1,10 @@
//! Implementation of the Chalk `Interner` trait, which allows customizing the
//! representation of the various objects Chalk deals with (types, goals etc.).
-use crate::{chalk_db, tls, GenericArg};
+use crate::{chalk_db, tls, ConstScalar, GenericArg};
use base_db::salsa::InternId;
use chalk_ir::{Goal, GoalData};
-use hir_def::{type_ref::ConstScalar, TypeAliasId};
+use hir_def::TypeAliasId;
use intern::{impl_internable, Interned};
use smallvec::SmallVec;
use std::{fmt, sync::Arc};
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
index f21b4f84c..b95bb01fc 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
@@ -11,7 +11,7 @@ use hir_def::{
};
use stdx::never;
-use crate::{db::HirDatabase, Interner, Substitution, Ty};
+use crate::{consteval::try_const_usize, db::HirDatabase, Interner, Substitution, Ty};
use self::adt::struct_variant_idx;
pub use self::{
@@ -122,17 +122,9 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)?
}
TyKind::Array(element, count) => {
- let count = match count.data(Interner).value {
- chalk_ir::ConstValue::Concrete(c) => match c.interned {
- hir_def::type_ref::ConstScalar::Int(x) => x as u64,
- hir_def::type_ref::ConstScalar::UInt(x) => x as u64,
- hir_def::type_ref::ConstScalar::Unknown => {
- user_error!("unknown const generic parameter")
- }
- _ => user_error!("mismatched type of const generic parameter"),
- },
- _ => return Err(LayoutError::HasPlaceholder),
- };
+ let count = try_const_usize(&count).ok_or(LayoutError::UserError(
+ "mismatched type of const generic parameter".to_string(),
+ ))? as u64;
let element = layout_of_ty(db, element, krate)?;
let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
index cb7968c14..b22d0fe8d 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
@@ -76,17 +76,8 @@ pub fn layout_of_adt_query(
|min, max| Integer::repr_discr(&dl, &repr, min, max).unwrap_or((Integer::I8, false)),
variants.iter_enumerated().filter_map(|(id, _)| {
let AdtId::EnumId(e) = def else { return None };
- let d = match db
- .const_eval_variant(EnumVariantId { parent: e, local_id: id.0 })
- .ok()?
- {
- crate::consteval::ComputedExpr::Literal(l) => match l {
- hir_def::expr::Literal::Int(i, _) => i,
- hir_def::expr::Literal::Uint(i, _) => i as i128,
- _ => return None,
- },
- _ => return None,
- };
+ let d =
+ db.const_eval_discriminant(EnumVariantId { parent: e, local_id: id.0 }).ok()?;
Some((id, d))
}),
// FIXME: The current code for niche-filling relies on variant indices
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
index 067bdc960..a8971fde3 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
@@ -65,25 +65,17 @@ fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
})
.unwrap();
let hir_body = db.body(adt_id.into());
- let pat = hir_body
- .pats
- .iter()
- .find(|x| match x.1 {
- hir_def::expr::Pat::Bind { name, .. } => name.to_smol_str() == "goal",
- _ => false,
- })
- .unwrap()
- .0;
+ let b = hir_body.bindings.iter().find(|x| x.1.name.to_smol_str() == "goal").unwrap().0;
let infer = db.infer(adt_id.into());
- let goal_ty = infer.type_of_pat[pat].clone();
+ let goal_ty = infer.type_of_binding[b].clone();
layout_of_ty(&db, &goal_ty, module_id.krate())
}
#[track_caller]
fn check_size_and_align(ra_fixture: &str, minicore: &str, size: u64, align: u64) {
let l = eval_goal(ra_fixture, minicore).unwrap();
- assert_eq!(l.size.bytes(), size);
- assert_eq!(l.align.abi.bytes(), align);
+ assert_eq!(l.size.bytes(), size, "size mismatch");
+ assert_eq!(l.align.abi.bytes(), align, "align mismatch");
}
#[track_caller]
@@ -300,4 +292,9 @@ fn enums_with_discriminants() {
C, // implicitly becomes 256, so we need two bytes
}
}
+ size_and_align! {
+ enum Goal {
+ A = 1, // This one is (perhaps surprisingly) zero sized.
+ }
+ }
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
index 59a5ef8c1..9c63d67ab 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
@@ -13,6 +13,7 @@ mod builder;
mod chalk_db;
mod chalk_ext;
pub mod consteval;
+pub mod mir;
mod infer;
mod inhabitedness;
mod interner;
@@ -34,7 +35,7 @@ mod tests;
#[cfg(test)]
mod test_db;
-use std::sync::Arc;
+use std::{collections::HashMap, hash::Hash, sync::Arc};
use chalk_ir::{
fold::{Shift, TypeFoldable},
@@ -42,10 +43,11 @@ use chalk_ir::{
visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
NoSolution, TyData,
};
+use either::Either;
use hir_def::{expr::ExprId, type_ref::Rawness, TypeOrConstParamId};
use hir_expand::name;
-use itertools::Either;
use la_arena::{Arena, Idx};
+use mir::MirEvalError;
use rustc_hash::FxHashSet;
use traits::FnTrait;
use utils::Generics;
@@ -145,6 +147,49 @@ pub type ConstrainedSubst = chalk_ir::ConstrainedSubst<Interner>;
pub type Guidance = chalk_solve::Guidance<Interner>;
pub type WhereClause = chalk_ir::WhereClause<Interner>;
+/// A constant can have reference to other things. Memory map job is holding
+/// the neccessary bits of memory of the const eval session to keep the constant
+/// meaningful.
+#[derive(Debug, Default, Clone, PartialEq, Eq)]
+pub struct MemoryMap(pub HashMap<usize, Vec<u8>>);
+
+impl MemoryMap {
+ fn insert(&mut self, addr: usize, x: Vec<u8>) {
+ self.0.insert(addr, x);
+ }
+
+ /// This functions convert each address by a function `f` which gets the byte intervals and assign an address
+ /// to them. It is useful when you want to load a constant with a memory map in a new memory. You can pass an
+ /// allocator function as `f` and it will return a mapping of old addresses to new addresses.
+ fn transform_addresses(
+ &self,
+ mut f: impl FnMut(&[u8]) -> Result<usize, MirEvalError>,
+ ) -> Result<HashMap<usize, usize>, MirEvalError> {
+ self.0.iter().map(|x| Ok((*x.0, f(x.1)?))).collect()
+ }
+}
+
+/// A concrete constant value
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ConstScalar {
+ Bytes(Vec<u8>, MemoryMap),
+ /// Case of an unknown value that rustc might know but we don't
+ // FIXME: this is a hack to get around chalk not being able to represent unevaluatable
+ // constants
+ // https://github.com/rust-lang/rust-analyzer/pull/8813#issuecomment-840679177
+ // https://rust-lang.zulipchat.com/#narrow/stream/144729-wg-traits/topic/Handling.20non.20evaluatable.20constants'.20equality/near/238386348
+ Unknown,
+}
+
+impl Hash for ConstScalar {
+ fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+ core::mem::discriminant(self).hash(state);
+ if let ConstScalar::Bytes(b, _) = self {
+ b.hash(state)
+ }
+ }
+}
+
/// Return an index of a parameter in the generic type parameter list by it's id.
pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<usize> {
generics(db.upcast(), id.parent).param_idx(id)
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
index 299646737..23b15087e 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
@@ -16,6 +16,7 @@ use chalk_ir::{
cast::Cast, fold::Shift, fold::TypeFoldable, interner::HasInterner, Mutability, Safety,
};
+use either::Either;
use hir_def::{
adt::StructKind,
body::{Expander, LowerCtx},
@@ -26,16 +27,13 @@ use hir_def::{
lang_item::{lang_attr, LangItem},
path::{GenericArg, ModPath, Path, PathKind, PathSegment, PathSegments},
resolver::{HasResolver, Resolver, TypeNs},
- type_ref::{
- ConstScalarOrPath, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef,
- },
+ type_ref::{ConstRefOrPath, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef},
AdtId, AssocItemId, ConstId, ConstParamId, EnumId, EnumVariantId, FunctionId, GenericDefId,
HasModule, ImplId, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, StaticId, StructId,
TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, VariantId,
};
use hir_expand::{name::Name, ExpandResult};
use intern::Interned;
-use itertools::Either;
use la_arena::{Arena, ArenaMap};
use rustc_hash::FxHashSet;
use smallvec::SmallVec;
@@ -44,7 +42,7 @@ use syntax::ast;
use crate::{
all_super_traits,
- consteval::{intern_const_scalar, path_to_const, unknown_const, unknown_const_as_generic},
+ consteval::{intern_const_ref, path_to_const, unknown_const, unknown_const_as_generic},
db::HirDatabase,
make_binders,
mapping::{from_chalk_trait_id, ToChalk},
@@ -524,6 +522,10 @@ impl<'a> TyLoweringContext<'a> {
};
return (ty, None);
}
+ TypeNs::TraitAliasId(_) => {
+ // FIXME(trait_alias): Implement trait alias.
+ return (TyKind::Error.intern(Interner), None);
+ }
TypeNs::GenericParam(param_id) => {
let generics = generics(
self.db.upcast(),
@@ -879,6 +881,7 @@ impl<'a> TyLoweringContext<'a> {
) -> Option<TraitRef> {
let resolved =
match self.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), path.mod_path())? {
+ // FIXME(trait_alias): We need to handle trait alias here.
TypeNs::TraitId(tr) => tr,
_ => return None,
};
@@ -968,7 +971,7 @@ impl<'a> TyLoweringContext<'a> {
// - `Destruct` impls are built-in in 1.62 (current nightlies as of 08-04-2022), so until
// the builtin impls are supported by Chalk, we ignore them here.
if let Some(lang) = lang_attr(self.db.upcast(), tr.hir_trait_id()) {
- if lang == "drop" || lang == "destruct" {
+ if matches!(lang, LangItem::Drop | LangItem::Destruct) {
return false;
}
}
@@ -1444,6 +1447,7 @@ pub(crate) fn trait_environment_query(
GenericDefId::FunctionId(f) => Some(f.lookup(db.upcast()).container),
GenericDefId::AdtId(_) => None,
GenericDefId::TraitId(_) => None,
+ GenericDefId::TraitAliasId(_) => None,
GenericDefId::TypeAliasId(t) => Some(t.lookup(db.upcast()).container),
GenericDefId::ImplId(_) => None,
GenericDefId::EnumVariantId(_) => None,
@@ -1583,10 +1587,10 @@ pub(crate) fn generic_defaults_recover(
.iter_id()
.map(|id| {
let val = match id {
- itertools::Either::Left(_) => {
+ Either::Left(_) => {
GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
}
- itertools::Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
+ Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
};
crate::make_binders(db, &generic_params, val)
})
@@ -1919,7 +1923,7 @@ pub(crate) fn generic_arg_to_chalk<'a, T>(
arg: &'a GenericArg,
this: &mut T,
for_type: impl FnOnce(&mut T, &TypeRef) -> Ty + 'a,
- for_const: impl FnOnce(&mut T, &ConstScalarOrPath, Ty) -> Const + 'a,
+ for_const: impl FnOnce(&mut T, &ConstRefOrPath, Ty) -> Const + 'a,
) -> Option<crate::GenericArg> {
let kind = match kind_id {
Either::Left(_) => ParamKind::Type,
@@ -1947,7 +1951,7 @@ pub(crate) fn generic_arg_to_chalk<'a, T>(
let p = p.mod_path();
if p.kind == PathKind::Plain {
if let [n] = p.segments() {
- let c = ConstScalarOrPath::Path(n.clone());
+ let c = ConstRefOrPath::Path(n.clone());
return Some(
GenericArgData::Const(for_const(this, &c, c_ty)).intern(Interner),
);
@@ -1964,14 +1968,14 @@ pub(crate) fn const_or_path_to_chalk(
db: &dyn HirDatabase,
resolver: &Resolver,
expected_ty: Ty,
- value: &ConstScalarOrPath,
+ value: &ConstRefOrPath,
mode: ParamLoweringMode,
args: impl FnOnce() -> Generics,
debruijn: DebruijnIndex,
) -> Const {
match value {
- ConstScalarOrPath::Scalar(s) => intern_const_scalar(*s, expected_ty),
- ConstScalarOrPath::Path(n) => {
+ ConstRefOrPath::Scalar(s) => intern_const_ref(db, s, expected_ty, resolver.krate()),
+ ConstRefOrPath::Path(n) => {
let path = ModPath::from_segments(PathKind::Plain, Some(n.clone()));
path_to_const(db, resolver, &path, mode, args, debruijn)
.unwrap_or_else(|| unknown_const(expected_ty))
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
index 8c7714b9a..f3a27632b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
@@ -19,13 +19,13 @@ use stdx::never;
use crate::{
autoderef::{self, AutoderefKind},
db::HirDatabase,
- from_foreign_def_id,
+ from_chalk_trait_id, from_foreign_def_id,
infer::{unify::InferenceTable, Adjust, Adjustment, AutoBorrow, OverloadedDeref, PointerCast},
primitive::{FloatTy, IntTy, UintTy},
static_lifetime, to_chalk_trait_id,
utils::all_super_traits,
- AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, ForeignDefId, InEnvironment, Interner,
- Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt,
+ AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, DynTyExt, ForeignDefId, InEnvironment,
+ Interner, Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt,
};
/// This is used as a key for indexing impls.
@@ -266,11 +266,12 @@ impl TraitImpls {
#[derive(Debug, Eq, PartialEq)]
pub struct InherentImpls {
map: FxHashMap<TyFingerprint, Vec<ImplId>>,
+ invalid_impls: Vec<ImplId>,
}
impl InherentImpls {
pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
- let mut impls = Self { map: FxHashMap::default() };
+ let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
let crate_def_map = db.crate_def_map(krate);
impls.collect_def_map(db, &crate_def_map);
@@ -283,7 +284,7 @@ impl InherentImpls {
db: &dyn HirDatabase,
block: BlockId,
) -> Option<Arc<Self>> {
- let mut impls = Self { map: FxHashMap::default() };
+ let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
if let Some(block_def_map) = db.block_def_map(block) {
impls.collect_def_map(db, &block_def_map);
impls.shrink_to_fit();
@@ -306,11 +307,17 @@ impl InherentImpls {
}
let self_ty = db.impl_self_ty(impl_id);
- let fp = TyFingerprint::for_inherent_impl(self_ty.skip_binders());
- if let Some(fp) = fp {
- self.map.entry(fp).or_default().push(impl_id);
+ let self_ty = self_ty.skip_binders();
+
+ match is_inherent_impl_coherent(db, def_map, &data, self_ty) {
+ true => {
+ // `fp` should only be `None` in error cases (either erroneous code or incomplete name resolution)
+ if let Some(fp) = TyFingerprint::for_inherent_impl(self_ty) {
+ self.map.entry(fp).or_default().push(impl_id);
+ }
+ }
+ false => self.invalid_impls.push(impl_id),
}
- // `fp` should only be `None` in error cases (either erroneous code or incomplete name resolution)
}
// To better support custom derives, collect impls in all unnamed const items.
@@ -334,6 +341,10 @@ impl InherentImpls {
pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
self.map.values().flat_map(|v| v.iter().copied())
}
+
+ pub fn invalid_impls(&self) -> &[ImplId] {
+ &self.invalid_impls
+ }
}
pub(crate) fn incoherent_inherent_impl_crates(
@@ -579,8 +590,8 @@ impl ReceiverAdjustments {
ty = new_ty.clone();
adjust.push(Adjustment {
kind: Adjust::Deref(match kind {
- // FIXME should we know the mutability here?
- AutoderefKind::Overloaded => Some(OverloadedDeref(Mutability::Not)),
+ // FIXME should we know the mutability here, when autoref is `None`?
+ AutoderefKind::Overloaded => Some(OverloadedDeref(self.autoref)),
AutoderefKind::Builtin => None,
}),
target: new_ty,
@@ -660,10 +671,10 @@ pub fn lookup_impl_const(
env: Arc<TraitEnvironment>,
const_id: ConstId,
subs: Substitution,
-) -> ConstId {
+) -> (ConstId, Substitution) {
let trait_id = match const_id.lookup(db.upcast()).container {
ItemContainerId::TraitId(id) => id,
- _ => return const_id,
+ _ => return (const_id, subs),
};
let substitution = Substitution::from_iter(Interner, subs.iter(Interner));
let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution };
@@ -671,12 +682,14 @@ pub fn lookup_impl_const(
let const_data = db.const_data(const_id);
let name = match const_data.name.as_ref() {
Some(name) => name,
- None => return const_id,
+ None => return (const_id, subs),
};
lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name)
- .and_then(|assoc| if let AssocItemId::ConstId(id) = assoc { Some(id) } else { None })
- .unwrap_or(const_id)
+ .and_then(
+ |assoc| if let (AssocItemId::ConstId(id), s) = assoc { Some((id, s)) } else { None },
+ )
+ .unwrap_or((const_id, subs))
}
/// Looks up the impl method that actually runs for the trait method `func`.
@@ -687,10 +700,10 @@ pub fn lookup_impl_method(
env: Arc<TraitEnvironment>,
func: FunctionId,
fn_subst: Substitution,
-) -> FunctionId {
+) -> (FunctionId, Substitution) {
let trait_id = match func.lookup(db.upcast()).container {
ItemContainerId::TraitId(id) => id,
- _ => return func,
+ _ => return (func, fn_subst),
};
let trait_params = db.generic_params(trait_id.into()).type_or_consts.len();
let fn_params = fn_subst.len(Interner) - trait_params;
@@ -701,8 +714,14 @@ pub fn lookup_impl_method(
let name = &db.function_data(func).name;
lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name)
- .and_then(|assoc| if let AssocItemId::FunctionId(id) = assoc { Some(id) } else { None })
- .unwrap_or(func)
+ .and_then(|assoc| {
+ if let (AssocItemId::FunctionId(id), subst) = assoc {
+ Some((id, subst))
+ } else {
+ None
+ }
+ })
+ .unwrap_or((func, fn_subst))
}
fn lookup_impl_assoc_item_for_trait_ref(
@@ -710,7 +729,7 @@ fn lookup_impl_assoc_item_for_trait_ref(
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
name: &Name,
-) -> Option<AssocItemId> {
+) -> Option<(AssocItemId, Substitution)> {
let self_ty = trait_ref.self_type_parameter(Interner);
let self_ty_fp = TyFingerprint::for_trait_impl(&self_ty)?;
let impls = db.trait_impls_in_deps(env.krate);
@@ -718,8 +737,8 @@ fn lookup_impl_assoc_item_for_trait_ref(
let table = InferenceTable::new(db, env);
- let impl_data = find_matching_impl(impls, table, trait_ref)?;
- impl_data.items.iter().find_map(|&it| match it {
+ let (impl_data, impl_subst) = find_matching_impl(impls, table, trait_ref)?;
+ let item = impl_data.items.iter().find_map(|&it| match it {
AssocItemId::FunctionId(f) => {
(db.function_data(f).name == *name).then_some(AssocItemId::FunctionId(f))
}
@@ -730,14 +749,15 @@ fn lookup_impl_assoc_item_for_trait_ref(
.map(|n| n == name)
.and_then(|result| if result { Some(AssocItemId::ConstId(c)) } else { None }),
AssocItemId::TypeAliasId(_) => None,
- })
+ })?;
+ Some((item, impl_subst))
}
fn find_matching_impl(
mut impls: impl Iterator<Item = ImplId>,
mut table: InferenceTable<'_>,
actual_trait_ref: TraitRef,
-) -> Option<Arc<ImplData>> {
+) -> Option<(Arc<ImplData>, Substitution)> {
let db = table.db;
loop {
let impl_ = impls.next()?;
@@ -758,7 +778,7 @@ fn find_matching_impl(
.into_iter()
.map(|b| b.cast(Interner));
let goal = crate::Goal::all(Interner, wcs);
- table.try_obligation(goal).map(|_| impl_data)
+ table.try_obligation(goal).map(|_| (impl_data, table.resolve_completely(impl_substs)))
});
if r.is_some() {
break r;
@@ -766,6 +786,69 @@ fn find_matching_impl(
}
}
+fn is_inherent_impl_coherent(
+ db: &dyn HirDatabase,
+ def_map: &DefMap,
+ impl_data: &ImplData,
+ self_ty: &Ty,
+) -> bool {
+ let self_ty = self_ty.kind(Interner);
+ let impl_allowed = match self_ty {
+ TyKind::Tuple(_, _)
+ | TyKind::FnDef(_, _)
+ | TyKind::Array(_, _)
+ | TyKind::Never
+ | TyKind::Raw(_, _)
+ | TyKind::Ref(_, _, _)
+ | TyKind::Slice(_)
+ | TyKind::Str
+ | TyKind::Scalar(_) => def_map.is_rustc_coherence_is_core(),
+
+ &TyKind::Adt(AdtId(adt), _) => adt.module(db.upcast()).krate() == def_map.krate(),
+ TyKind::Dyn(it) => it.principal().map_or(false, |trait_ref| {
+ from_chalk_trait_id(trait_ref.trait_id).module(db.upcast()).krate() == def_map.krate()
+ }),
+
+ _ => true,
+ };
+ impl_allowed || {
+ let rustc_has_incoherent_inherent_impls = match self_ty {
+ TyKind::Tuple(_, _)
+ | TyKind::FnDef(_, _)
+ | TyKind::Array(_, _)
+ | TyKind::Never
+ | TyKind::Raw(_, _)
+ | TyKind::Ref(_, _, _)
+ | TyKind::Slice(_)
+ | TyKind::Str
+ | TyKind::Scalar(_) => true,
+
+ &TyKind::Adt(AdtId(adt), _) => match adt {
+ hir_def::AdtId::StructId(it) => {
+ db.struct_data(it).rustc_has_incoherent_inherent_impls
+ }
+ hir_def::AdtId::UnionId(it) => {
+ db.union_data(it).rustc_has_incoherent_inherent_impls
+ }
+ hir_def::AdtId::EnumId(it) => db.enum_data(it).rustc_has_incoherent_inherent_impls,
+ },
+ TyKind::Dyn(it) => it.principal().map_or(false, |trait_ref| {
+ db.trait_data(from_chalk_trait_id(trait_ref.trait_id))
+ .rustc_has_incoherent_inherent_impls
+ }),
+
+ _ => false,
+ };
+ rustc_has_incoherent_inherent_impls
+ && !impl_data.items.is_empty()
+ && impl_data.items.iter().copied().all(|assoc| match assoc {
+ AssocItemId::FunctionId(it) => db.function_data(it).rustc_allow_incoherent_impl,
+ AssocItemId::ConstId(it) => db.const_data(it).rustc_allow_incoherent_impl,
+ AssocItemId::TypeAliasId(it) => db.type_alias_data(it).rustc_allow_incoherent_impl,
+ })
+ }
+}
+
pub fn iterate_path_candidates(
ty: &Canonical<Ty>,
db: &dyn HirDatabase,
@@ -821,9 +904,9 @@ pub fn iterate_method_candidates_dyn(
let mut table = InferenceTable::new(db, env.clone());
let ty = table.instantiate_canonical(ty.clone());
- let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty);
+ let deref_chain = autoderef_method_receiver(&mut table, ty);
- let result = deref_chain.into_iter().zip(adj).try_for_each(|(receiver_ty, adj)| {
+ let result = deref_chain.into_iter().try_for_each(|(receiver_ty, adj)| {
iterate_method_candidates_with_autoref(
&receiver_ty,
adj,
@@ -867,16 +950,20 @@ fn iterate_method_candidates_with_autoref(
return ControlFlow::Continue(());
}
- iterate_method_candidates_by_receiver(
- receiver_ty,
- first_adjustment.clone(),
- db,
- env.clone(),
- traits_in_scope,
- visible_from_module,
- name,
- &mut callback,
- )?;
+ let mut iterate_method_candidates_by_receiver = move |receiver_ty, first_adjustment| {
+ iterate_method_candidates_by_receiver(
+ receiver_ty,
+ first_adjustment,
+ db,
+ env.clone(),
+ traits_in_scope,
+ visible_from_module,
+ name,
+ &mut callback,
+ )
+ };
+
+ iterate_method_candidates_by_receiver(receiver_ty, first_adjustment.clone())?;
let refed = Canonical {
value: TyKind::Ref(Mutability::Not, static_lifetime(), receiver_ty.value.clone())
@@ -884,16 +971,7 @@ fn iterate_method_candidates_with_autoref(
binders: receiver_ty.binders.clone(),
};
- iterate_method_candidates_by_receiver(
- &refed,
- first_adjustment.with_autoref(Mutability::Not),
- db,
- env.clone(),
- traits_in_scope,
- visible_from_module,
- name,
- &mut callback,
- )?;
+ iterate_method_candidates_by_receiver(&refed, first_adjustment.with_autoref(Mutability::Not))?;
let ref_muted = Canonical {
value: TyKind::Ref(Mutability::Mut, static_lifetime(), receiver_ty.value.clone())
@@ -904,12 +982,6 @@ fn iterate_method_candidates_with_autoref(
iterate_method_candidates_by_receiver(
&ref_muted,
first_adjustment.with_autoref(Mutability::Mut),
- db,
- env,
- traits_in_scope,
- visible_from_module,
- name,
- &mut callback,
)
}
@@ -1210,8 +1282,8 @@ pub fn resolve_indexing_op(
) -> Option<ReceiverAdjustments> {
let mut table = InferenceTable::new(db, env.clone());
let ty = table.instantiate_canonical(ty);
- let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty);
- for (ty, adj) in deref_chain.into_iter().zip(adj) {
+ let deref_chain = autoderef_method_receiver(&mut table, ty);
+ for (ty, adj) in deref_chain {
let goal = generic_implements_goal(db, env.clone(), index_trait, &ty);
if db.trait_solve(env.krate, goal.cast(Interner)).is_some() {
return Some(adj);
@@ -1421,25 +1493,24 @@ fn generic_implements_goal(
fn autoderef_method_receiver(
table: &mut InferenceTable<'_>,
ty: Ty,
-) -> (Vec<Canonical<Ty>>, Vec<ReceiverAdjustments>) {
- let (mut deref_chain, mut adjustments): (Vec<_>, Vec<_>) = (Vec::new(), Vec::new());
+) -> Vec<(Canonical<Ty>, ReceiverAdjustments)> {
+ let mut deref_chain: Vec<_> = Vec::new();
let mut autoderef = autoderef::Autoderef::new(table, ty);
while let Some((ty, derefs)) = autoderef.next() {
- deref_chain.push(autoderef.table.canonicalize(ty).value);
- adjustments.push(ReceiverAdjustments {
- autoref: None,
- autoderefs: derefs,
- unsize_array: false,
- });
+ deref_chain.push((
+ autoderef.table.canonicalize(ty).value,
+ ReceiverAdjustments { autoref: None, autoderefs: derefs, unsize_array: false },
+ ));
}
// As a last step, we can do array unsizing (that's the only unsizing that rustc does for method receivers!)
- if let (Some((TyKind::Array(parameters, _), binders)), Some(adj)) = (
- deref_chain.last().map(|ty| (ty.value.kind(Interner), ty.binders.clone())),
- adjustments.last().cloned(),
- ) {
+ if let Some((TyKind::Array(parameters, _), binders, adj)) =
+ deref_chain.last().map(|(ty, adj)| (ty.value.kind(Interner), ty.binders.clone(), adj))
+ {
let unsized_ty = TyKind::Slice(parameters.clone()).intern(Interner);
- deref_chain.push(Canonical { value: unsized_ty, binders });
- adjustments.push(ReceiverAdjustments { unsize_array: true, ..adj });
+ deref_chain.push((
+ Canonical { value: unsized_ty, binders },
+ ReceiverAdjustments { unsize_array: true, ..adj.clone() },
+ ));
}
- (deref_chain, adjustments)
+ deref_chain
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
new file mode 100644
index 000000000..7c1cbbdf5
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
@@ -0,0 +1,863 @@
+//! MIR definitions and implementation
+
+use std::{fmt::Display, iter};
+
+use crate::{
+ infer::PointerCast, Const, ConstScalar, InferenceResult, Interner, MemoryMap, Substitution, Ty,
+};
+use chalk_ir::Mutability;
+use hir_def::{
+ expr::{BindingId, Expr, ExprId, Ordering, PatId},
+ DefWithBodyId, FieldId, UnionId, VariantId,
+};
+use la_arena::{Arena, ArenaMap, Idx, RawIdx};
+
+mod eval;
+mod lower;
+mod borrowck;
+mod pretty;
+
+pub use borrowck::{borrowck_query, BorrowckResult, MutabilityReason};
+pub use eval::{interpret_mir, pad16, Evaluator, MirEvalError};
+pub use lower::{lower_to_mir, mir_body_query, mir_body_recover, MirLowerError};
+use smallvec::{smallvec, SmallVec};
+use stdx::impl_from;
+
+use super::consteval::{intern_const_scalar, try_const_usize};
+
+pub type BasicBlockId = Idx<BasicBlock>;
+pub type LocalId = Idx<Local>;
+
+fn return_slot() -> LocalId {
+ LocalId::from_raw(RawIdx::from(0))
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct Local {
+ pub ty: Ty,
+}
+
+/// An operand in MIR represents a "value" in Rust, the definition of which is undecided and part of
+/// the memory model. One proposal for a definition of values can be found [on UCG][value-def].
+///
+/// [value-def]: https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/value-domain.md
+///
+/// The most common way to create values is via loading a place. Loading a place is an operation
+/// which reads the memory of the place and converts it to a value. This is a fundamentally *typed*
+/// operation. The nature of the value produced depends on the type of the conversion. Furthermore,
+/// there may be other effects: if the type has a validity constraint loading the place might be UB
+/// if the validity constraint is not met.
+///
+/// **Needs clarification:** Ralf proposes that loading a place not have side-effects.
+/// This is what is implemented in miri today. Are these the semantics we want for MIR? Is this
+/// something we can even decide without knowing more about Rust's memory model?
+///
+/// **Needs clarifiation:** Is loading a place that has its variant index set well-formed? Miri
+/// currently implements it, but it seems like this may be something to check against in the
+/// validator.
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum Operand {
+ /// Creates a value by loading the given place.
+ ///
+ /// Before drop elaboration, the type of the place must be `Copy`. After drop elaboration there
+ /// is no such requirement.
+ Copy(Place),
+
+ /// Creates a value by performing loading the place, just like the `Copy` operand.
+ ///
+ /// This *may* additionally overwrite the place with `uninit` bytes, depending on how we decide
+ /// in [UCG#188]. You should not emit MIR that may attempt a subsequent second load of this
+ /// place without first re-initializing it.
+ ///
+ /// [UCG#188]: https://github.com/rust-lang/unsafe-code-guidelines/issues/188
+ Move(Place),
+ /// Constants are already semantically values, and remain unchanged.
+ Constant(Const),
+}
+
+impl Operand {
+ fn from_concrete_const(data: Vec<u8>, memory_map: MemoryMap, ty: Ty) -> Self {
+ Operand::Constant(intern_const_scalar(ConstScalar::Bytes(data, memory_map), ty))
+ }
+
+ fn from_bytes(data: Vec<u8>, ty: Ty) -> Self {
+ Operand::from_concrete_const(data, MemoryMap::default(), ty)
+ }
+
+ fn const_zst(ty: Ty) -> Operand {
+ Self::from_bytes(vec![], ty)
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum ProjectionElem<V, T> {
+ Deref,
+ Field(FieldId),
+ TupleField(usize),
+ Index(V),
+ ConstantIndex { offset: u64, min_length: u64, from_end: bool },
+ Subslice { from: u64, to: u64, from_end: bool },
+ //Downcast(Option<Symbol>, VariantIdx),
+ OpaqueCast(T),
+}
+
+type PlaceElem = ProjectionElem<LocalId, Ty>;
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Place {
+ pub local: LocalId,
+ pub projection: Vec<PlaceElem>,
+}
+
+impl From<LocalId> for Place {
+ fn from(local: LocalId) -> Self {
+ Self { local, projection: vec![] }
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum AggregateKind {
+ /// The type is of the element
+ Array(Ty),
+ /// The type is of the tuple
+ Tuple(Ty),
+ Adt(VariantId, Substitution),
+ Union(UnionId, FieldId),
+ //Closure(LocalDefId, SubstsRef),
+ //Generator(LocalDefId, SubstsRef, Movability),
+}
+
+#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+pub struct SwitchTargets {
+ /// Possible values. The locations to branch to in each case
+ /// are found in the corresponding indices from the `targets` vector.
+ values: SmallVec<[u128; 1]>,
+
+ /// Possible branch sites. The last element of this vector is used
+ /// for the otherwise branch, so targets.len() == values.len() + 1
+ /// should hold.
+ //
+ // This invariant is quite non-obvious and also could be improved.
+ // One way to make this invariant is to have something like this instead:
+ //
+ // branches: Vec<(ConstInt, BasicBlock)>,
+ // otherwise: Option<BasicBlock> // exhaustive if None
+ //
+ // However we’ve decided to keep this as-is until we figure a case
+ // where some other approach seems to be strictly better than other.
+ targets: SmallVec<[BasicBlockId; 2]>,
+}
+
+impl SwitchTargets {
+ /// Creates switch targets from an iterator of values and target blocks.
+ ///
+ /// The iterator may be empty, in which case the `SwitchInt` instruction is equivalent to
+ /// `goto otherwise;`.
+ pub fn new(
+ targets: impl Iterator<Item = (u128, BasicBlockId)>,
+ otherwise: BasicBlockId,
+ ) -> Self {
+ let (values, mut targets): (SmallVec<_>, SmallVec<_>) = targets.unzip();
+ targets.push(otherwise);
+ Self { values, targets }
+ }
+
+ /// Builds a switch targets definition that jumps to `then` if the tested value equals `value`,
+ /// and to `else_` if not.
+ pub fn static_if(value: u128, then: BasicBlockId, else_: BasicBlockId) -> Self {
+ Self { values: smallvec![value], targets: smallvec![then, else_] }
+ }
+
+ /// Returns the fallback target that is jumped to when none of the values match the operand.
+ pub fn otherwise(&self) -> BasicBlockId {
+ *self.targets.last().unwrap()
+ }
+
+ /// Returns an iterator over the switch targets.
+ ///
+ /// The iterator will yield tuples containing the value and corresponding target to jump to, not
+ /// including the `otherwise` fallback target.
+ ///
+ /// Note that this may yield 0 elements. Only the `otherwise` branch is mandatory.
+ pub fn iter(&self) -> impl Iterator<Item = (u128, BasicBlockId)> + '_ {
+ iter::zip(&self.values, &self.targets).map(|(x, y)| (*x, *y))
+ }
+
+ /// Returns a slice with all possible jump targets (including the fallback target).
+ pub fn all_targets(&self) -> &[BasicBlockId] {
+ &self.targets
+ }
+
+ /// Finds the `BasicBlock` to which this `SwitchInt` will branch given the
+ /// specific value. This cannot fail, as it'll return the `otherwise`
+ /// branch if there's not a specific match for the value.
+ pub fn target_for_value(&self, value: u128) -> BasicBlockId {
+ self.iter().find_map(|(v, t)| (v == value).then_some(t)).unwrap_or_else(|| self.otherwise())
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum Terminator {
+ /// Block has one successor; we continue execution there.
+ Goto { target: BasicBlockId },
+
+ /// Switches based on the computed value.
+ ///
+ /// First, evaluates the `discr` operand. The type of the operand must be a signed or unsigned
+ /// integer, char, or bool, and must match the given type. Then, if the list of switch targets
+ /// contains the computed value, continues execution at the associated basic block. Otherwise,
+ /// continues execution at the "otherwise" basic block.
+ ///
+ /// Target values may not appear more than once.
+ SwitchInt {
+ /// The discriminant value being tested.
+ discr: Operand,
+
+ targets: SwitchTargets,
+ },
+
+ /// Indicates that the landing pad is finished and that the process should continue unwinding.
+ ///
+ /// Like a return, this marks the end of this invocation of the function.
+ ///
+ /// Only permitted in cleanup blocks. `Resume` is not permitted with `-C unwind=abort` after
+ /// deaggregation runs.
+ Resume,
+
+ /// Indicates that the landing pad is finished and that the process should abort.
+ ///
+ /// Used to prevent unwinding for foreign items or with `-C unwind=abort`. Only permitted in
+ /// cleanup blocks.
+ Abort,
+
+ /// Returns from the function.
+ ///
+ /// Like function calls, the exact semantics of returns in Rust are unclear. Returning very
+ /// likely at least assigns the value currently in the return place (`_0`) to the place
+ /// specified in the associated `Call` terminator in the calling function, as if assigned via
+ /// `dest = move _0`. It might additionally do other things, like have side-effects in the
+ /// aliasing model.
+ ///
+ /// If the body is a generator body, this has slightly different semantics; it instead causes a
+ /// `GeneratorState::Returned(_0)` to be created (as if by an `Aggregate` rvalue) and assigned
+ /// to the return place.
+ Return,
+
+ /// Indicates a terminator that can never be reached.
+ ///
+ /// Executing this terminator is UB.
+ Unreachable,
+
+ /// The behavior of this statement differs significantly before and after drop elaboration.
+ /// After drop elaboration, `Drop` executes the drop glue for the specified place, after which
+ /// it continues execution/unwinds at the given basic blocks. It is possible that executing drop
+ /// glue is special - this would be part of Rust's memory model. (**FIXME**: due we have an
+ /// issue tracking if drop glue has any interesting semantics in addition to those of a function
+ /// call?)
+ ///
+ /// `Drop` before drop elaboration is a *conditional* execution of the drop glue. Specifically, the
+ /// `Drop` will be executed if...
+ ///
+ /// **Needs clarification**: End of that sentence. This in effect should document the exact
+ /// behavior of drop elaboration. The following sounds vaguely right, but I'm not quite sure:
+ ///
+ /// > The drop glue is executed if, among all statements executed within this `Body`, an assignment to
+ /// > the place or one of its "parents" occurred more recently than a move out of it. This does not
+ /// > consider indirect assignments.
+ Drop { place: Place, target: BasicBlockId, unwind: Option<BasicBlockId> },
+
+ /// Drops the place and assigns a new value to it.
+ ///
+ /// This first performs the exact same operation as the pre drop-elaboration `Drop` terminator;
+ /// it then additionally assigns the `value` to the `place` as if by an assignment statement.
+ /// This assignment occurs both in the unwind and the regular code paths. The semantics are best
+ /// explained by the elaboration:
+ ///
+ /// ```ignore (MIR)
+ /// BB0 {
+ /// DropAndReplace(P <- V, goto BB1, unwind BB2)
+ /// }
+ /// ```
+ ///
+ /// becomes
+ ///
+ /// ```ignore (MIR)
+ /// BB0 {
+ /// Drop(P, goto BB1, unwind BB2)
+ /// }
+ /// BB1 {
+ /// // P is now uninitialized
+ /// P <- V
+ /// }
+ /// BB2 {
+ /// // P is now uninitialized -- its dtor panicked
+ /// P <- V
+ /// }
+ /// ```
+ ///
+ /// Disallowed after drop elaboration.
+ DropAndReplace {
+ place: Place,
+ value: Operand,
+ target: BasicBlockId,
+ unwind: Option<BasicBlockId>,
+ },
+
+ /// Roughly speaking, evaluates the `func` operand and the arguments, and starts execution of
+ /// the referred to function. The operand types must match the argument types of the function.
+ /// The return place type must match the return type. The type of the `func` operand must be
+ /// callable, meaning either a function pointer, a function type, or a closure type.
+ ///
+ /// **Needs clarification**: The exact semantics of this. Current backends rely on `move`
+ /// operands not aliasing the return place. It is unclear how this is justified in MIR, see
+ /// [#71117].
+ ///
+ /// [#71117]: https://github.com/rust-lang/rust/issues/71117
+ Call {
+ /// The function that’s being called.
+ func: Operand,
+ /// Arguments the function is called with.
+ /// These are owned by the callee, which is free to modify them.
+ /// This allows the memory occupied by "by-value" arguments to be
+ /// reused across function calls without duplicating the contents.
+ args: Vec<Operand>,
+ /// Where the returned value will be written
+ destination: Place,
+ /// Where to go after this call returns. If none, the call necessarily diverges.
+ target: Option<BasicBlockId>,
+ /// Cleanups to be done if the call unwinds.
+ cleanup: Option<BasicBlockId>,
+ /// `true` if this is from a call in HIR rather than from an overloaded
+ /// operator. True for overloaded function call.
+ from_hir_call: bool,
+ // This `Span` is the span of the function, without the dot and receiver
+ // (e.g. `foo(a, b)` in `x.foo(a, b)`
+ //fn_span: Span,
+ },
+
+ /// Evaluates the operand, which must have type `bool`. If it is not equal to `expected`,
+ /// initiates a panic. Initiating a panic corresponds to a `Call` terminator with some
+ /// unspecified constant as the function to call, all the operands stored in the `AssertMessage`
+ /// as parameters, and `None` for the destination. Keep in mind that the `cleanup` path is not
+ /// necessarily executed even in the case of a panic, for example in `-C panic=abort`. If the
+ /// assertion does not fail, execution continues at the specified basic block.
+ Assert {
+ cond: Operand,
+ expected: bool,
+ //msg: AssertMessage,
+ target: BasicBlockId,
+ cleanup: Option<BasicBlockId>,
+ },
+
+ /// Marks a suspend point.
+ ///
+ /// Like `Return` terminators in generator bodies, this computes `value` and then a
+ /// `GeneratorState::Yielded(value)` as if by `Aggregate` rvalue. That value is then assigned to
+ /// the return place of the function calling this one, and execution continues in the calling
+ /// function. When next invoked with the same first argument, execution of this function
+ /// continues at the `resume` basic block, with the second argument written to the `resume_arg`
+ /// place. If the generator is dropped before then, the `drop` basic block is invoked.
+ ///
+ /// Not permitted in bodies that are not generator bodies, or after generator lowering.
+ ///
+ /// **Needs clarification**: What about the evaluation order of the `resume_arg` and `value`?
+ Yield {
+ /// The value to return.
+ value: Operand,
+ /// Where to resume to.
+ resume: BasicBlockId,
+ /// The place to store the resume argument in.
+ resume_arg: Place,
+ /// Cleanup to be done if the generator is dropped at this suspend point.
+ drop: Option<BasicBlockId>,
+ },
+
+ /// Indicates the end of dropping a generator.
+ ///
+ /// Semantically just a `return` (from the generators drop glue). Only permitted in the same situations
+ /// as `yield`.
+ ///
+ /// **Needs clarification**: Is that even correct? The generator drop code is always confusing
+ /// to me, because it's not even really in the current body.
+ ///
+ /// **Needs clarification**: Are there type system constraints on these terminators? Should
+ /// there be a "block type" like `cleanup` blocks for them?
+ GeneratorDrop,
+
+ /// A block where control flow only ever takes one real path, but borrowck needs to be more
+ /// conservative.
+ ///
+ /// At runtime this is semantically just a goto.
+ ///
+ /// Disallowed after drop elaboration.
+ FalseEdge {
+ /// The target normal control flow will take.
+ real_target: BasicBlockId,
+ /// A block control flow could conceptually jump to, but won't in
+ /// practice.
+ imaginary_target: BasicBlockId,
+ },
+
+ /// A terminator for blocks that only take one path in reality, but where we reserve the right
+ /// to unwind in borrowck, even if it won't happen in practice. This can arise in infinite loops
+ /// with no function calls for example.
+ ///
+ /// At runtime this is semantically just a goto.
+ ///
+ /// Disallowed after drop elaboration.
+ FalseUnwind {
+ /// The target normal control flow will take.
+ real_target: BasicBlockId,
+ /// The imaginary cleanup block link. This particular path will never be taken
+ /// in practice, but in order to avoid fragility we want to always
+ /// consider it in borrowck. We don't want to accept programs which
+ /// pass borrowck only when `panic=abort` or some assertions are disabled
+ /// due to release vs. debug mode builds. This needs to be an `Option` because
+ /// of the `remove_noop_landing_pads` and `abort_unwinding_calls` passes.
+ unwind: Option<BasicBlockId>,
+ },
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum BorrowKind {
+ /// Data must be immutable and is aliasable.
+ Shared,
+
+ /// The immediately borrowed place must be immutable, but projections from
+ /// it don't need to be. For example, a shallow borrow of `a.b` doesn't
+ /// conflict with a mutable borrow of `a.b.c`.
+ ///
+ /// This is used when lowering matches: when matching on a place we want to
+ /// ensure that place have the same value from the start of the match until
+ /// an arm is selected. This prevents this code from compiling:
+ /// ```compile_fail,E0510
+ /// let mut x = &Some(0);
+ /// match *x {
+ /// None => (),
+ /// Some(_) if { x = &None; false } => (),
+ /// Some(_) => (),
+ /// }
+ /// ```
+ /// This can't be a shared borrow because mutably borrowing (*x as Some).0
+ /// should not prevent `if let None = x { ... }`, for example, because the
+ /// mutating `(*x as Some).0` can't affect the discriminant of `x`.
+ /// We can also report errors with this kind of borrow differently.
+ Shallow,
+
+ /// Data must be immutable but not aliasable. This kind of borrow
+ /// cannot currently be expressed by the user and is used only in
+ /// implicit closure bindings. It is needed when the closure is
+ /// borrowing or mutating a mutable referent, e.g.:
+ /// ```
+ /// let mut z = 3;
+ /// let x: &mut isize = &mut z;
+ /// let y = || *x += 5;
+ /// ```
+ /// If we were to try to translate this closure into a more explicit
+ /// form, we'd encounter an error with the code as written:
+ /// ```compile_fail,E0594
+ /// struct Env<'a> { x: &'a &'a mut isize }
+ /// let mut z = 3;
+ /// let x: &mut isize = &mut z;
+ /// let y = (&mut Env { x: &x }, fn_ptr); // Closure is pair of env and fn
+ /// fn fn_ptr(env: &mut Env) { **env.x += 5; }
+ /// ```
+ /// This is then illegal because you cannot mutate an `&mut` found
+ /// in an aliasable location. To solve, you'd have to translate with
+ /// an `&mut` borrow:
+ /// ```compile_fail,E0596
+ /// struct Env<'a> { x: &'a mut &'a mut isize }
+ /// let mut z = 3;
+ /// let x: &mut isize = &mut z;
+ /// let y = (&mut Env { x: &mut x }, fn_ptr); // changed from &x to &mut x
+ /// fn fn_ptr(env: &mut Env) { **env.x += 5; }
+ /// ```
+ /// Now the assignment to `**env.x` is legal, but creating a
+ /// mutable pointer to `x` is not because `x` is not mutable. We
+ /// could fix this by declaring `x` as `let mut x`. This is ok in
+ /// user code, if awkward, but extra weird for closures, since the
+ /// borrow is hidden.
+ ///
+ /// So we introduce a "unique imm" borrow -- the referent is
+ /// immutable, but not aliasable. This solves the problem. For
+ /// simplicity, we don't give users the way to express this
+ /// borrow, it's just used when translating closures.
+ Unique,
+
+ /// Data is mutable and not aliasable.
+ Mut {
+ /// `true` if this borrow arose from method-call auto-ref
+ /// (i.e., `adjustment::Adjust::Borrow`).
+ allow_two_phase_borrow: bool,
+ },
+}
+
+impl BorrowKind {
+ fn from_hir(m: hir_def::type_ref::Mutability) -> Self {
+ match m {
+ hir_def::type_ref::Mutability::Shared => BorrowKind::Shared,
+ hir_def::type_ref::Mutability::Mut => BorrowKind::Mut { allow_two_phase_borrow: false },
+ }
+ }
+
+ fn from_chalk(m: Mutability) -> Self {
+ match m {
+ Mutability::Not => BorrowKind::Shared,
+ Mutability::Mut => BorrowKind::Mut { allow_two_phase_borrow: false },
+ }
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum UnOp {
+ /// The `!` operator for logical inversion
+ Not,
+ /// The `-` operator for negation
+ Neg,
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum BinOp {
+ /// The `+` operator (addition)
+ Add,
+ /// The `-` operator (subtraction)
+ Sub,
+ /// The `*` operator (multiplication)
+ Mul,
+ /// The `/` operator (division)
+ ///
+ /// Division by zero is UB, because the compiler should have inserted checks
+ /// prior to this.
+ Div,
+ /// The `%` operator (modulus)
+ ///
+ /// Using zero as the modulus (second operand) is UB, because the compiler
+ /// should have inserted checks prior to this.
+ Rem,
+ /// The `^` operator (bitwise xor)
+ BitXor,
+ /// The `&` operator (bitwise and)
+ BitAnd,
+ /// The `|` operator (bitwise or)
+ BitOr,
+ /// The `<<` operator (shift left)
+ ///
+ /// The offset is truncated to the size of the first operand before shifting.
+ Shl,
+ /// The `>>` operator (shift right)
+ ///
+ /// The offset is truncated to the size of the first operand before shifting.
+ Shr,
+ /// The `==` operator (equality)
+ Eq,
+ /// The `<` operator (less than)
+ Lt,
+ /// The `<=` operator (less than or equal to)
+ Le,
+ /// The `!=` operator (not equal to)
+ Ne,
+ /// The `>=` operator (greater than or equal to)
+ Ge,
+ /// The `>` operator (greater than)
+ Gt,
+ /// The `ptr.offset` operator
+ Offset,
+}
+
+impl Display for BinOp {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.write_str(match self {
+ BinOp::Add => "+",
+ BinOp::Sub => "-",
+ BinOp::Mul => "*",
+ BinOp::Div => "/",
+ BinOp::Rem => "%",
+ BinOp::BitXor => "^",
+ BinOp::BitAnd => "&",
+ BinOp::BitOr => "|",
+ BinOp::Shl => "<<",
+ BinOp::Shr => ">>",
+ BinOp::Eq => "==",
+ BinOp::Lt => "<",
+ BinOp::Le => "<=",
+ BinOp::Ne => "!=",
+ BinOp::Ge => ">=",
+ BinOp::Gt => ">",
+ BinOp::Offset => "`offset`",
+ })
+ }
+}
+
+impl From<hir_def::expr::ArithOp> for BinOp {
+ fn from(value: hir_def::expr::ArithOp) -> Self {
+ match value {
+ hir_def::expr::ArithOp::Add => BinOp::Add,
+ hir_def::expr::ArithOp::Mul => BinOp::Mul,
+ hir_def::expr::ArithOp::Sub => BinOp::Sub,
+ hir_def::expr::ArithOp::Div => BinOp::Div,
+ hir_def::expr::ArithOp::Rem => BinOp::Rem,
+ hir_def::expr::ArithOp::Shl => BinOp::Shl,
+ hir_def::expr::ArithOp::Shr => BinOp::Shr,
+ hir_def::expr::ArithOp::BitXor => BinOp::BitXor,
+ hir_def::expr::ArithOp::BitOr => BinOp::BitOr,
+ hir_def::expr::ArithOp::BitAnd => BinOp::BitAnd,
+ }
+ }
+}
+
+impl From<hir_def::expr::CmpOp> for BinOp {
+ fn from(value: hir_def::expr::CmpOp) -> Self {
+ match value {
+ hir_def::expr::CmpOp::Eq { negated: false } => BinOp::Eq,
+ hir_def::expr::CmpOp::Eq { negated: true } => BinOp::Ne,
+ hir_def::expr::CmpOp::Ord { ordering: Ordering::Greater, strict: false } => BinOp::Ge,
+ hir_def::expr::CmpOp::Ord { ordering: Ordering::Greater, strict: true } => BinOp::Gt,
+ hir_def::expr::CmpOp::Ord { ordering: Ordering::Less, strict: false } => BinOp::Le,
+ hir_def::expr::CmpOp::Ord { ordering: Ordering::Less, strict: true } => BinOp::Lt,
+ }
+ }
+}
+
+impl From<Operand> for Rvalue {
+ fn from(x: Operand) -> Self {
+ Self::Use(x)
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum CastKind {
+ /// An exposing pointer to address cast. A cast between a pointer and an integer type, or
+ /// between a function pointer and an integer type.
+ /// See the docs on `expose_addr` for more details.
+ PointerExposeAddress,
+ /// An address-to-pointer cast that picks up an exposed provenance.
+ /// See the docs on `from_exposed_addr` for more details.
+ PointerFromExposedAddress,
+ /// All sorts of pointer-to-pointer casts. Note that reference-to-raw-ptr casts are
+ /// translated into `&raw mut/const *r`, i.e., they are not actually casts.
+ Pointer(PointerCast),
+ /// Cast into a dyn* object.
+ DynStar,
+ IntToInt,
+ FloatToInt,
+ FloatToFloat,
+ IntToFloat,
+ PtrToPtr,
+ FnPtrToPtr,
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum Rvalue {
+ /// Yields the operand unchanged
+ Use(Operand),
+
+ /// Creates an array where each element is the value of the operand.
+ ///
+ /// This is the cause of a bug in the case where the repetition count is zero because the value
+ /// is not dropped, see [#74836].
+ ///
+ /// Corresponds to source code like `[x; 32]`.
+ ///
+ /// [#74836]: https://github.com/rust-lang/rust/issues/74836
+ //Repeat(Operand, ty::Const),
+
+ /// Creates a reference of the indicated kind to the place.
+ ///
+ /// There is not much to document here, because besides the obvious parts the semantics of this
+ /// are essentially entirely a part of the aliasing model. There are many UCG issues discussing
+ /// exactly what the behavior of this operation should be.
+ ///
+ /// `Shallow` borrows are disallowed after drop lowering.
+ Ref(BorrowKind, Place),
+
+ /// Creates a pointer/reference to the given thread local.
+ ///
+ /// The yielded type is a `*mut T` if the static is mutable, otherwise if the static is extern a
+ /// `*const T`, and if neither of those apply a `&T`.
+ ///
+ /// **Note:** This is a runtime operation that actually executes code and is in this sense more
+ /// like a function call. Also, eliminating dead stores of this rvalue causes `fn main() {}` to
+ /// SIGILL for some reason that I (JakobDegen) never got a chance to look into.
+ ///
+ /// **Needs clarification**: Are there weird additional semantics here related to the runtime
+ /// nature of this operation?
+ //ThreadLocalRef(DefId),
+
+ /// Creates a pointer with the indicated mutability to the place.
+ ///
+ /// This is generated by pointer casts like `&v as *const _` or raw address of expressions like
+ /// `&raw v` or `addr_of!(v)`.
+ ///
+ /// Like with references, the semantics of this operation are heavily dependent on the aliasing
+ /// model.
+ //AddressOf(Mutability, Place),
+
+ /// Yields the length of the place, as a `usize`.
+ ///
+ /// If the type of the place is an array, this is the array length. For slices (`[T]`, not
+ /// `&[T]`) this accesses the place's metadata to determine the length. This rvalue is
+ /// ill-formed for places of other types.
+ Len(Place),
+
+ /// Performs essentially all of the casts that can be performed via `as`.
+ ///
+ /// This allows for casts from/to a variety of types.
+ ///
+ /// **FIXME**: Document exactly which `CastKind`s allow which types of casts. Figure out why
+ /// `ArrayToPointer` and `MutToConstPointer` are special.
+ Cast(CastKind, Operand, Ty),
+
+ // FIXME link to `pointer::offset` when it hits stable.
+ /// * `Offset` has the same semantics as `pointer::offset`, except that the second
+ /// parameter may be a `usize` as well.
+ /// * The comparison operations accept `bool`s, `char`s, signed or unsigned integers, floats,
+ /// raw pointers, or function pointers and return a `bool`. The types of the operands must be
+ /// matching, up to the usual caveat of the lifetimes in function pointers.
+ /// * Left and right shift operations accept signed or unsigned integers not necessarily of the
+ /// same type and return a value of the same type as their LHS. Like in Rust, the RHS is
+ /// truncated as needed.
+ /// * The `Bit*` operations accept signed integers, unsigned integers, or bools with matching
+ /// types and return a value of that type.
+ /// * The remaining operations accept signed integers, unsigned integers, or floats with
+ /// matching types and return a value of that type.
+ //BinaryOp(BinOp, Box<(Operand, Operand)>),
+
+ /// Same as `BinaryOp`, but yields `(T, bool)` with a `bool` indicating an error condition.
+ ///
+ /// When overflow checking is disabled and we are generating run-time code, the error condition
+ /// is false. Otherwise, and always during CTFE, the error condition is determined as described
+ /// below.
+ ///
+ /// For addition, subtraction, and multiplication on integers the error condition is set when
+ /// the infinite precision result would be unequal to the actual result.
+ ///
+ /// For shift operations on integers the error condition is set when the value of right-hand
+ /// side is greater than or equal to the number of bits in the type of the left-hand side, or
+ /// when the value of right-hand side is negative.
+ ///
+ /// Other combinations of types and operators are unsupported.
+ CheckedBinaryOp(BinOp, Operand, Operand),
+
+ /// Computes a value as described by the operation.
+ //NullaryOp(NullOp, Ty),
+
+ /// Exactly like `BinaryOp`, but less operands.
+ ///
+ /// Also does two's-complement arithmetic. Negation requires a signed integer or a float;
+ /// bitwise not requires a signed integer, unsigned integer, or bool. Both operation kinds
+ /// return a value with the same type as their operand.
+ UnaryOp(UnOp, Operand),
+
+ /// Computes the discriminant of the place, returning it as an integer of type
+ /// [`discriminant_ty`]. Returns zero for types without discriminant.
+ ///
+ /// The validity requirements for the underlying value are undecided for this rvalue, see
+ /// [#91095]. Note too that the value of the discriminant is not the same thing as the
+ /// variant index; use [`discriminant_for_variant`] to convert.
+ ///
+ /// [`discriminant_ty`]: crate::ty::Ty::discriminant_ty
+ /// [#91095]: https://github.com/rust-lang/rust/issues/91095
+ /// [`discriminant_for_variant`]: crate::ty::Ty::discriminant_for_variant
+ Discriminant(Place),
+
+ /// Creates an aggregate value, like a tuple or struct.
+ ///
+ /// This is needed because dataflow analysis needs to distinguish
+ /// `dest = Foo { x: ..., y: ... }` from `dest.x = ...; dest.y = ...;` in the case that `Foo`
+ /// has a destructor.
+ ///
+ /// Disallowed after deaggregation for all aggregate kinds except `Array` and `Generator`. After
+ /// generator lowering, `Generator` aggregate kinds are disallowed too.
+ Aggregate(AggregateKind, Vec<Operand>),
+
+ /// Transmutes a `*mut u8` into shallow-initialized `Box<T>`.
+ ///
+ /// This is different from a normal transmute because dataflow analysis will treat the box as
+ /// initialized but its content as uninitialized. Like other pointer casts, this in general
+ /// affects alias analysis.
+ ShallowInitBox(Operand, Ty),
+
+ /// A CopyForDeref is equivalent to a read from a place at the
+ /// codegen level, but is treated specially by drop elaboration. When such a read happens, it
+ /// is guaranteed (via nature of the mir_opt `Derefer` in rustc_mir_transform/src/deref_separator)
+ /// that the only use of the returned value is a deref operation, immediately
+ /// followed by one or more projections. Drop elaboration treats this rvalue as if the
+ /// read never happened and just projects further. This allows simplifying various MIR
+ /// optimizations and codegen backends that previously had to handle deref operations anywhere
+ /// in a place.
+ CopyForDeref(Place),
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum StatementKind {
+ Assign(Place, Rvalue),
+ //FakeRead(Box<(FakeReadCause, Place)>),
+ //SetDiscriminant {
+ // place: Box<Place>,
+ // variant_index: VariantIdx,
+ //},
+ Deinit(Place),
+ StorageLive(LocalId),
+ StorageDead(LocalId),
+ //Retag(RetagKind, Box<Place>),
+ //AscribeUserType(Place, UserTypeProjection, Variance),
+ //Intrinsic(Box<NonDivergingIntrinsic>),
+ Nop,
+}
+impl StatementKind {
+ fn with_span(self, span: MirSpan) -> Statement {
+ Statement { kind: self, span }
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub struct Statement {
+ pub kind: StatementKind,
+ pub span: MirSpan,
+}
+
+#[derive(Debug, Default, PartialEq, Eq)]
+pub struct BasicBlock {
+ /// List of statements in this block.
+ pub statements: Vec<Statement>,
+
+ /// Terminator for this block.
+ ///
+ /// N.B., this should generally ONLY be `None` during construction.
+ /// Therefore, you should generally access it via the
+ /// `terminator()` or `terminator_mut()` methods. The only
+ /// exception is that certain passes, such as `simplify_cfg`, swap
+ /// out the terminator temporarily with `None` while they continue
+ /// to recurse over the set of basic blocks.
+ pub terminator: Option<Terminator>,
+
+ /// If true, this block lies on an unwind path. This is used
+ /// during codegen where distinct kinds of basic blocks may be
+ /// generated (particularly for MSVC cleanup). Unwind blocks must
+ /// only branch to other unwind blocks.
+ pub is_cleanup: bool,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct MirBody {
+ pub basic_blocks: Arena<BasicBlock>,
+ pub locals: Arena<Local>,
+ pub start_block: BasicBlockId,
+ pub owner: DefWithBodyId,
+ pub arg_count: usize,
+ pub binding_locals: ArenaMap<BindingId, LocalId>,
+ pub param_locals: Vec<LocalId>,
+}
+
+fn const_as_usize(c: &Const) -> usize {
+ try_const_usize(c).unwrap() as usize
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub enum MirSpan {
+ ExprId(ExprId),
+ PatId(PatId),
+ Unknown,
+}
+
+impl_from!(ExprId, PatId for MirSpan);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
new file mode 100644
index 000000000..c8729af86
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
@@ -0,0 +1,223 @@
+//! MIR borrow checker, which is used in diagnostics like `unused_mut`
+
+// Currently it is an ad-hoc implementation, only useful for mutability analysis. Feel free to remove all of these
+// if needed for implementing a proper borrow checker.
+
+use std::sync::Arc;
+
+use hir_def::DefWithBodyId;
+use la_arena::ArenaMap;
+use stdx::never;
+
+use crate::db::HirDatabase;
+
+use super::{
+ BasicBlockId, BorrowKind, LocalId, MirBody, MirLowerError, MirSpan, Place, ProjectionElem,
+ Rvalue, StatementKind, Terminator,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+/// Stores spans which implies that the local should be mutable.
+pub enum MutabilityReason {
+ Mut { spans: Vec<MirSpan> },
+ Not,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct BorrowckResult {
+ pub mir_body: Arc<MirBody>,
+ pub mutability_of_locals: ArenaMap<LocalId, MutabilityReason>,
+}
+
+pub fn borrowck_query(
+ db: &dyn HirDatabase,
+ def: DefWithBodyId,
+) -> Result<Arc<BorrowckResult>, MirLowerError> {
+ let _p = profile::span("borrowck_query");
+ let body = db.mir_body(def)?;
+ let r = BorrowckResult { mutability_of_locals: mutability_of_locals(&body), mir_body: body };
+ Ok(Arc::new(r))
+}
+
+fn is_place_direct(lvalue: &Place) -> bool {
+ !lvalue.projection.iter().any(|x| *x == ProjectionElem::Deref)
+}
+
+enum ProjectionCase {
+ /// Projection is a local
+ Direct,
+ /// Projection is some field or slice of a local
+ DirectPart,
+ /// Projection is deref of something
+ Indirect,
+}
+
+fn place_case(lvalue: &Place) -> ProjectionCase {
+ let mut is_part_of = false;
+ for proj in lvalue.projection.iter().rev() {
+ match proj {
+ ProjectionElem::Deref => return ProjectionCase::Indirect, // It's indirect
+ ProjectionElem::ConstantIndex { .. }
+ | ProjectionElem::Subslice { .. }
+ | ProjectionElem::Field(_)
+ | ProjectionElem::TupleField(_)
+ | ProjectionElem::Index(_) => {
+ is_part_of = true;
+ }
+ ProjectionElem::OpaqueCast(_) => (),
+ }
+ }
+ if is_part_of {
+ ProjectionCase::DirectPart
+ } else {
+ ProjectionCase::Direct
+ }
+}
+
+/// Returns a map from basic blocks to the set of locals that might be ever initialized before
+/// the start of the block. Only `StorageDead` can remove something from this map, and we ignore
+/// `Uninit` and `drop` and similars after initialization.
+fn ever_initialized_map(body: &MirBody) -> ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> {
+ let mut result: ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> =
+ body.basic_blocks.iter().map(|x| (x.0, ArenaMap::default())).collect();
+ fn dfs(
+ body: &MirBody,
+ b: BasicBlockId,
+ l: LocalId,
+ result: &mut ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>>,
+ ) {
+ let mut is_ever_initialized = result[b][l]; // It must be filled, as we use it as mark for dfs
+ let block = &body.basic_blocks[b];
+ for statement in &block.statements {
+ match &statement.kind {
+ StatementKind::Assign(p, _) => {
+ if p.projection.len() == 0 && p.local == l {
+ is_ever_initialized = true;
+ }
+ }
+ StatementKind::StorageDead(p) => {
+ if *p == l {
+ is_ever_initialized = false;
+ }
+ }
+ StatementKind::Deinit(_) | StatementKind::Nop | StatementKind::StorageLive(_) => (),
+ }
+ }
+ let Some(terminator) = &block.terminator else {
+ never!("Terminator should be none only in construction");
+ return;
+ };
+ let targets = match terminator {
+ Terminator::Goto { target } => vec![*target],
+ Terminator::SwitchInt { targets, .. } => targets.all_targets().to_vec(),
+ Terminator::Resume
+ | Terminator::Abort
+ | Terminator::Return
+ | Terminator::Unreachable => vec![],
+ Terminator::Call { target, cleanup, destination, .. } => {
+ if destination.projection.len() == 0 && destination.local == l {
+ is_ever_initialized = true;
+ }
+ target.into_iter().chain(cleanup.into_iter()).copied().collect()
+ }
+ Terminator::Drop { .. }
+ | Terminator::DropAndReplace { .. }
+ | Terminator::Assert { .. }
+ | Terminator::Yield { .. }
+ | Terminator::GeneratorDrop
+ | Terminator::FalseEdge { .. }
+ | Terminator::FalseUnwind { .. } => {
+ never!("We don't emit these MIR terminators yet");
+ vec![]
+ }
+ };
+ for target in targets {
+ if !result[target].contains_idx(l) || !result[target][l] && is_ever_initialized {
+ result[target].insert(l, is_ever_initialized);
+ dfs(body, target, l, result);
+ }
+ }
+ }
+ for &l in &body.param_locals {
+ result[body.start_block].insert(l, true);
+ dfs(body, body.start_block, l, &mut result);
+ }
+ for l in body.locals.iter().map(|x| x.0) {
+ if !result[body.start_block].contains_idx(l) {
+ result[body.start_block].insert(l, false);
+ dfs(body, body.start_block, l, &mut result);
+ }
+ }
+ result
+}
+
+fn mutability_of_locals(body: &MirBody) -> ArenaMap<LocalId, MutabilityReason> {
+ let mut result: ArenaMap<LocalId, MutabilityReason> =
+ body.locals.iter().map(|x| (x.0, MutabilityReason::Not)).collect();
+ let mut push_mut_span = |local, span| match &mut result[local] {
+ MutabilityReason::Mut { spans } => spans.push(span),
+ x @ MutabilityReason::Not => *x = MutabilityReason::Mut { spans: vec![span] },
+ };
+ let ever_init_maps = ever_initialized_map(body);
+ for (block_id, mut ever_init_map) in ever_init_maps.into_iter() {
+ let block = &body.basic_blocks[block_id];
+ for statement in &block.statements {
+ match &statement.kind {
+ StatementKind::Assign(place, value) => {
+ match place_case(place) {
+ ProjectionCase::Direct => {
+ if ever_init_map.get(place.local).copied().unwrap_or_default() {
+ push_mut_span(place.local, statement.span);
+ } else {
+ ever_init_map.insert(place.local, true);
+ }
+ }
+ ProjectionCase::DirectPart => {
+ // Partial initialization is not supported, so it is definitely `mut`
+ push_mut_span(place.local, statement.span);
+ }
+ ProjectionCase::Indirect => (),
+ }
+ if let Rvalue::Ref(BorrowKind::Mut { .. }, p) = value {
+ if is_place_direct(p) {
+ push_mut_span(p.local, statement.span);
+ }
+ }
+ }
+ StatementKind::StorageDead(p) => {
+ ever_init_map.insert(*p, false);
+ }
+ StatementKind::Deinit(_) | StatementKind::StorageLive(_) | StatementKind::Nop => (),
+ }
+ }
+ let Some(terminator) = &block.terminator else {
+ never!("Terminator should be none only in construction");
+ continue;
+ };
+ match terminator {
+ Terminator::Goto { .. }
+ | Terminator::Resume
+ | Terminator::Abort
+ | Terminator::Return
+ | Terminator::Unreachable
+ | Terminator::FalseEdge { .. }
+ | Terminator::FalseUnwind { .. }
+ | Terminator::GeneratorDrop
+ | Terminator::SwitchInt { .. }
+ | Terminator::Drop { .. }
+ | Terminator::DropAndReplace { .. }
+ | Terminator::Assert { .. }
+ | Terminator::Yield { .. } => (),
+ Terminator::Call { destination, .. } => {
+ if destination.projection.len() == 0 {
+ if ever_init_map.get(destination.local).copied().unwrap_or_default() {
+ push_mut_span(destination.local, MirSpan::Unknown);
+ } else {
+ ever_init_map.insert(destination.local, true);
+ }
+ }
+ }
+ }
+ }
+ result
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
new file mode 100644
index 000000000..c5d843d9e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
@@ -0,0 +1,1253 @@
+//! This module provides a MIR interpreter, which is used in const eval.
+
+use std::{borrow::Cow, collections::HashMap, iter};
+
+use base_db::CrateId;
+use chalk_ir::{
+ fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable},
+ DebruijnIndex, TyKind,
+};
+use hir_def::{
+ builtin_type::BuiltinType,
+ lang_item::{lang_attr, LangItem},
+ layout::{Layout, LayoutError, RustcEnumVariantIdx, TagEncoding, Variants},
+ AdtId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, Lookup, VariantId,
+};
+use intern::Interned;
+use la_arena::ArenaMap;
+
+use crate::{
+ consteval::{intern_const_scalar, ConstEvalError},
+ db::HirDatabase,
+ from_placeholder_idx,
+ infer::{normalize, PointerCast},
+ layout::layout_of_ty,
+ mapping::from_chalk,
+ method_resolution::lookup_impl_method,
+ CallableDefId, Const, ConstScalar, Interner, MemoryMap, Substitution, Ty, TyBuilder, TyExt,
+};
+
+use super::{
+ const_as_usize, return_slot, AggregateKind, BinOp, CastKind, LocalId, MirBody, MirLowerError,
+ Operand, Place, ProjectionElem, Rvalue, StatementKind, Terminator, UnOp,
+};
+
+pub struct Evaluator<'a> {
+ db: &'a dyn HirDatabase,
+ stack: Vec<u8>,
+ heap: Vec<u8>,
+ crate_id: CrateId,
+ // FIXME: This is a workaround, see the comment on `interpret_mir`
+ assert_placeholder_ty_is_unused: bool,
+ /// A general limit on execution, to prevent non terminating programs from breaking r-a main process
+ execution_limit: usize,
+ /// An additional limit on stack depth, to prevent stack overflow
+ stack_depth_limit: usize,
+}
+
+#[derive(Debug, Clone, Copy)]
+enum Address {
+ Stack(usize),
+ Heap(usize),
+}
+
+use Address::*;
+
+struct Interval {
+ addr: Address,
+ size: usize,
+}
+
+impl Interval {
+ fn new(addr: Address, size: usize) -> Self {
+ Self { addr, size }
+ }
+
+ fn get<'a>(&self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]> {
+ memory.read_memory(self.addr, self.size)
+ }
+}
+
+enum IntervalOrOwned {
+ Owned(Vec<u8>),
+ Borrowed(Interval),
+}
+impl IntervalOrOwned {
+ pub(crate) fn to_vec(self, memory: &Evaluator<'_>) -> Result<Vec<u8>> {
+ Ok(match self {
+ IntervalOrOwned::Owned(o) => o,
+ IntervalOrOwned::Borrowed(b) => b.get(memory)?.to_vec(),
+ })
+ }
+}
+
+macro_rules! from_bytes {
+ ($ty:tt, $value:expr) => {
+ ($ty::from_le_bytes(match ($value).try_into() {
+ Ok(x) => x,
+ Err(_) => return Err(MirEvalError::TypeError("mismatched size")),
+ }))
+ };
+}
+
+impl Address {
+ fn from_bytes(x: &[u8]) -> Result<Self> {
+ Ok(Address::from_usize(from_bytes!(usize, x)))
+ }
+
+ fn from_usize(x: usize) -> Self {
+ if x > usize::MAX / 2 {
+ Stack(usize::MAX - x)
+ } else {
+ Heap(x)
+ }
+ }
+
+ fn to_bytes(&self) -> Vec<u8> {
+ usize::to_le_bytes(self.to_usize()).to_vec()
+ }
+
+ fn to_usize(&self) -> usize {
+ let as_num = match self {
+ Stack(x) => usize::MAX - *x,
+ Heap(x) => *x,
+ };
+ as_num
+ }
+
+ fn map(&self, f: impl FnOnce(usize) -> usize) -> Address {
+ match self {
+ Stack(x) => Stack(f(*x)),
+ Heap(x) => Heap(f(*x)),
+ }
+ }
+
+ fn offset(&self, offset: usize) -> Address {
+ self.map(|x| x + offset)
+ }
+}
+
+#[derive(Clone, PartialEq, Eq)]
+pub enum MirEvalError {
+ ConstEvalError(Box<ConstEvalError>),
+ LayoutError(LayoutError, Ty),
+ /// Means that code had type errors (or mismatched args) and we shouldn't generate mir in first place.
+ TypeError(&'static str),
+ /// Means that code had undefined behavior. We don't try to actively detect UB, but if it was detected
+ /// then use this type of error.
+ UndefinedBehavior(&'static str),
+ Panic,
+ MirLowerError(FunctionId, MirLowerError),
+ TypeIsUnsized(Ty, &'static str),
+ NotSupported(String),
+ InvalidConst(Const),
+ InFunction(FunctionId, Box<MirEvalError>),
+ ExecutionLimitExceeded,
+ StackOverflow,
+ TargetDataLayoutNotAvailable,
+}
+
+impl std::fmt::Debug for MirEvalError {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ Self::ConstEvalError(arg0) => f.debug_tuple("ConstEvalError").field(arg0).finish(),
+ Self::LayoutError(arg0, arg1) => {
+ f.debug_tuple("LayoutError").field(arg0).field(arg1).finish()
+ }
+ Self::TypeError(arg0) => f.debug_tuple("TypeError").field(arg0).finish(),
+ Self::UndefinedBehavior(arg0) => {
+ f.debug_tuple("UndefinedBehavior").field(arg0).finish()
+ }
+ Self::Panic => write!(f, "Panic"),
+ Self::TargetDataLayoutNotAvailable => write!(f, "TargetDataLayoutNotAvailable"),
+ Self::TypeIsUnsized(ty, it) => write!(f, "{ty:?} is unsized. {it} should be sized."),
+ Self::ExecutionLimitExceeded => write!(f, "execution limit exceeded"),
+ Self::StackOverflow => write!(f, "stack overflow"),
+ Self::MirLowerError(arg0, arg1) => {
+ f.debug_tuple("MirLowerError").field(arg0).field(arg1).finish()
+ }
+ Self::NotSupported(arg0) => f.debug_tuple("NotSupported").field(arg0).finish(),
+ Self::InvalidConst(arg0) => {
+ let data = &arg0.data(Interner);
+ f.debug_struct("InvalidConst").field("ty", &data.ty).field("value", &arg0).finish()
+ }
+ Self::InFunction(func, e) => {
+ let mut e = &**e;
+ let mut stack = vec![*func];
+ while let Self::InFunction(f, next_e) = e {
+ e = &next_e;
+ stack.push(*f);
+ }
+ f.debug_struct("WithStack").field("error", e).field("stack", &stack).finish()
+ }
+ }
+ }
+}
+
+macro_rules! not_supported {
+ ($x: expr) => {
+ return Err(MirEvalError::NotSupported(format!($x)))
+ };
+}
+
+impl From<ConstEvalError> for MirEvalError {
+ fn from(value: ConstEvalError) -> Self {
+ match value {
+ _ => MirEvalError::ConstEvalError(Box::new(value)),
+ }
+ }
+}
+
+type Result<T> = std::result::Result<T, MirEvalError>;
+
+struct Locals<'a> {
+ ptr: &'a ArenaMap<LocalId, Address>,
+ body: &'a MirBody,
+ subst: &'a Substitution,
+}
+
+pub fn interpret_mir(
+ db: &dyn HirDatabase,
+ body: &MirBody,
+ // FIXME: This is workaround. Ideally, const generics should have a separate body (issue #7434), but now
+ // they share their body with their parent, so in MIR lowering we have locals of the parent body, which
+ // might have placeholders. With this argument, we (wrongly) assume that every placeholder type has
+ // a zero size, hoping that they are all outside of our current body. Even without a fix for #7434, we can
+ // (and probably should) do better here, for example by excluding bindings outside of the target expression.
+ assert_placeholder_ty_is_unused: bool,
+) -> Result<Const> {
+ let ty = body.locals[return_slot()].ty.clone();
+ let mut evaluator =
+ Evaluator::new(db, body.owner.module(db.upcast()).krate(), assert_placeholder_ty_is_unused);
+ let bytes = evaluator.interpret_mir_with_no_arg(&body)?;
+ let memory_map = evaluator.create_memory_map(
+ &bytes,
+ &ty,
+ &Locals { ptr: &ArenaMap::new(), body: &body, subst: &Substitution::empty(Interner) },
+ )?;
+ return Ok(intern_const_scalar(ConstScalar::Bytes(bytes, memory_map), ty));
+}
+
+impl Evaluator<'_> {
+ pub fn new<'a>(
+ db: &'a dyn HirDatabase,
+ crate_id: CrateId,
+ assert_placeholder_ty_is_unused: bool,
+ ) -> Evaluator<'a> {
+ Evaluator {
+ stack: vec![0],
+ heap: vec![0],
+ db,
+ crate_id,
+ assert_placeholder_ty_is_unused,
+ stack_depth_limit: 100,
+ execution_limit: 100_000,
+ }
+ }
+
+ fn place_addr(&self, p: &Place, locals: &Locals<'_>) -> Result<Address> {
+ Ok(self.place_addr_and_ty(p, locals)?.0)
+ }
+
+ fn ptr_size(&self) -> usize {
+ match self.db.target_data_layout(self.crate_id) {
+ Some(x) => x.pointer_size.bytes_usize(),
+ None => 8,
+ }
+ }
+
+ fn place_addr_and_ty<'a>(&'a self, p: &Place, locals: &'a Locals<'a>) -> Result<(Address, Ty)> {
+ let mut addr = locals.ptr[p.local];
+ let mut ty: Ty =
+ self.ty_filler(&locals.body.locals[p.local].ty, locals.subst, locals.body.owner)?;
+ for proj in &p.projection {
+ match proj {
+ ProjectionElem::Deref => {
+ ty = match &ty.data(Interner).kind {
+ TyKind::Raw(_, inner) | TyKind::Ref(_, _, inner) => inner.clone(),
+ _ => {
+ return Err(MirEvalError::TypeError(
+ "Overloaded deref in MIR is disallowed",
+ ))
+ }
+ };
+ let x = from_bytes!(usize, self.read_memory(addr, self.ptr_size())?);
+ addr = Address::from_usize(x);
+ }
+ ProjectionElem::Index(op) => {
+ let offset =
+ from_bytes!(usize, self.read_memory(locals.ptr[*op], self.ptr_size())?);
+ match &ty.data(Interner).kind {
+ TyKind::Ref(_, _, inner) => match &inner.data(Interner).kind {
+ TyKind::Slice(inner) => {
+ ty = inner.clone();
+ let ty_size = self.size_of_sized(
+ &ty,
+ locals,
+ "slice inner type should be sized",
+ )?;
+ let value = self.read_memory(addr, self.ptr_size() * 2)?;
+ addr = Address::from_bytes(&value[0..8])?.offset(ty_size * offset);
+ }
+ x => not_supported!("MIR index for ref type {x:?}"),
+ },
+ TyKind::Array(inner, _) | TyKind::Slice(inner) => {
+ ty = inner.clone();
+ let ty_size = self.size_of_sized(
+ &ty,
+ locals,
+ "array inner type should be sized",
+ )?;
+ addr = addr.offset(ty_size * offset);
+ }
+ x => not_supported!("MIR index for type {x:?}"),
+ }
+ }
+ &ProjectionElem::TupleField(f) => match &ty.data(Interner).kind {
+ TyKind::Tuple(_, subst) => {
+ let layout = self.layout(&ty)?;
+ ty = subst
+ .as_slice(Interner)
+ .get(f)
+ .ok_or(MirEvalError::TypeError("not enough tuple fields"))?
+ .assert_ty_ref(Interner)
+ .clone();
+ let offset = layout.fields.offset(f).bytes_usize();
+ addr = addr.offset(offset);
+ }
+ _ => return Err(MirEvalError::TypeError("Only tuple has tuple fields")),
+ },
+ ProjectionElem::Field(f) => match &ty.data(Interner).kind {
+ TyKind::Adt(adt, subst) => {
+ let layout = self.layout_adt(adt.0, subst.clone())?;
+ let variant_layout = match &layout.variants {
+ Variants::Single { .. } => &layout,
+ Variants::Multiple { variants, .. } => {
+ &variants[match f.parent {
+ hir_def::VariantId::EnumVariantId(x) => {
+ RustcEnumVariantIdx(x.local_id)
+ }
+ _ => {
+ return Err(MirEvalError::TypeError(
+ "Multivariant layout only happens for enums",
+ ))
+ }
+ }]
+ }
+ };
+ ty = self.db.field_types(f.parent)[f.local_id]
+ .clone()
+ .substitute(Interner, subst);
+ let offset = variant_layout
+ .fields
+ .offset(u32::from(f.local_id.into_raw()) as usize)
+ .bytes_usize();
+ addr = addr.offset(offset);
+ }
+ _ => return Err(MirEvalError::TypeError("Only adt has fields")),
+ },
+ ProjectionElem::ConstantIndex { .. } => {
+ not_supported!("constant index")
+ }
+ ProjectionElem::Subslice { .. } => not_supported!("subslice"),
+ ProjectionElem::OpaqueCast(_) => not_supported!("opaque cast"),
+ }
+ }
+ Ok((addr, ty))
+ }
+
+ fn layout(&self, ty: &Ty) -> Result<Layout> {
+ layout_of_ty(self.db, ty, self.crate_id)
+ .map_err(|e| MirEvalError::LayoutError(e, ty.clone()))
+ }
+
+ fn layout_adt(&self, adt: AdtId, subst: Substitution) -> Result<Layout> {
+ self.db.layout_of_adt(adt, subst.clone()).map_err(|e| {
+ MirEvalError::LayoutError(e, TyKind::Adt(chalk_ir::AdtId(adt), subst).intern(Interner))
+ })
+ }
+
+ fn place_ty<'a>(&'a self, p: &Place, locals: &'a Locals<'a>) -> Result<Ty> {
+ Ok(self.place_addr_and_ty(p, locals)?.1)
+ }
+
+ fn operand_ty<'a>(&'a self, o: &'a Operand, locals: &'a Locals<'a>) -> Result<Ty> {
+ Ok(match o {
+ Operand::Copy(p) | Operand::Move(p) => self.place_ty(p, locals)?,
+ Operand::Constant(c) => c.data(Interner).ty.clone(),
+ })
+ }
+
+ fn interpret_mir(
+ &mut self,
+ body: &MirBody,
+ args: impl Iterator<Item = Vec<u8>>,
+ subst: Substitution,
+ ) -> Result<Vec<u8>> {
+ if let Some(x) = self.stack_depth_limit.checked_sub(1) {
+ self.stack_depth_limit = x;
+ } else {
+ return Err(MirEvalError::StackOverflow);
+ }
+ let mut current_block_idx = body.start_block;
+ let mut locals = Locals { ptr: &ArenaMap::new(), body: &body, subst: &subst };
+ let (locals_ptr, stack_size) = {
+ let mut stack_ptr = self.stack.len();
+ let addr = body
+ .locals
+ .iter()
+ .map(|(id, x)| {
+ let size =
+ self.size_of_sized(&x.ty, &locals, "no unsized local in extending stack")?;
+ let my_ptr = stack_ptr;
+ stack_ptr += size;
+ Ok((id, Stack(my_ptr)))
+ })
+ .collect::<Result<ArenaMap<LocalId, _>>>()?;
+ let stack_size = stack_ptr - self.stack.len();
+ (addr, stack_size)
+ };
+ locals.ptr = &locals_ptr;
+ self.stack.extend(iter::repeat(0).take(stack_size));
+ let mut remain_args = body.arg_count;
+ for ((_, addr), value) in locals_ptr.iter().skip(1).zip(args) {
+ self.write_memory(*addr, &value)?;
+ if remain_args == 0 {
+ return Err(MirEvalError::TypeError("more arguments provided"));
+ }
+ remain_args -= 1;
+ }
+ if remain_args > 0 {
+ return Err(MirEvalError::TypeError("not enough arguments provided"));
+ }
+ loop {
+ let current_block = &body.basic_blocks[current_block_idx];
+ if let Some(x) = self.execution_limit.checked_sub(1) {
+ self.execution_limit = x;
+ } else {
+ return Err(MirEvalError::ExecutionLimitExceeded);
+ }
+ for statement in &current_block.statements {
+ match &statement.kind {
+ StatementKind::Assign(l, r) => {
+ let addr = self.place_addr(l, &locals)?;
+ let result = self.eval_rvalue(r, &locals)?.to_vec(&self)?;
+ self.write_memory(addr, &result)?;
+ }
+ StatementKind::Deinit(_) => not_supported!("de-init statement"),
+ StatementKind::StorageLive(_)
+ | StatementKind::StorageDead(_)
+ | StatementKind::Nop => (),
+ }
+ }
+ let Some(terminator) = current_block.terminator.as_ref() else {
+ not_supported!("block without terminator");
+ };
+ match terminator {
+ Terminator::Goto { target } => {
+ current_block_idx = *target;
+ }
+ Terminator::Call {
+ func,
+ args,
+ destination,
+ target,
+ cleanup: _,
+ from_hir_call: _,
+ } => {
+ let fn_ty = self.operand_ty(func, &locals)?;
+ match &fn_ty.data(Interner).kind {
+ TyKind::FnDef(def, generic_args) => {
+ let def: CallableDefId = from_chalk(self.db, *def);
+ let generic_args = self.subst_filler(generic_args, &locals);
+ match def {
+ CallableDefId::FunctionId(def) => {
+ let arg_bytes = args
+ .iter()
+ .map(|x| {
+ Ok(self
+ .eval_operand(x, &locals)?
+ .get(&self)?
+ .to_owned())
+ })
+ .collect::<Result<Vec<_>>>()?
+ .into_iter();
+ let function_data = self.db.function_data(def);
+ let is_intrinsic = match &function_data.abi {
+ Some(abi) => *abi == Interned::new_str("rust-intrinsic"),
+ None => match def.lookup(self.db.upcast()).container {
+ hir_def::ItemContainerId::ExternBlockId(block) => {
+ let id = block.lookup(self.db.upcast()).id;
+ id.item_tree(self.db.upcast())[id.value]
+ .abi
+ .as_deref()
+ == Some("rust-intrinsic")
+ }
+ _ => false,
+ },
+ };
+ let result = if is_intrinsic {
+ self.exec_intrinsic(
+ function_data
+ .name
+ .as_text()
+ .unwrap_or_default()
+ .as_str(),
+ arg_bytes,
+ generic_args,
+ &locals,
+ )?
+ } else if let Some(x) = self.detect_lang_function(def) {
+ self.exec_lang_item(x, arg_bytes)?
+ } else {
+ let trait_env = {
+ let Some(d) = body.owner.as_generic_def_id() else {
+ not_supported!("trait resolving in non generic def id");
+ };
+ self.db.trait_environment(d)
+ };
+ let (imp, generic_args) = lookup_impl_method(
+ self.db,
+ trait_env,
+ def,
+ generic_args.clone(),
+ );
+ let generic_args =
+ self.subst_filler(&generic_args, &locals);
+ let def = imp.into();
+ let mir_body = self
+ .db
+ .mir_body(def)
+ .map_err(|e| MirEvalError::MirLowerError(imp, e))?;
+ self.interpret_mir(&mir_body, arg_bytes, generic_args)
+ .map_err(|e| {
+ MirEvalError::InFunction(imp, Box::new(e))
+ })?
+ };
+ let dest_addr = self.place_addr(destination, &locals)?;
+ self.write_memory(dest_addr, &result)?;
+ }
+ CallableDefId::StructId(id) => {
+ let (size, variant_layout, tag) = self.layout_of_variant(
+ id.into(),
+ generic_args.clone(),
+ &locals,
+ )?;
+ let result = self.make_by_layout(
+ size,
+ &variant_layout,
+ tag,
+ args,
+ &locals,
+ )?;
+ let dest_addr = self.place_addr(destination, &locals)?;
+ self.write_memory(dest_addr, &result)?;
+ }
+ CallableDefId::EnumVariantId(id) => {
+ let (size, variant_layout, tag) = self.layout_of_variant(
+ id.into(),
+ generic_args.clone(),
+ &locals,
+ )?;
+ let result = self.make_by_layout(
+ size,
+ &variant_layout,
+ tag,
+ args,
+ &locals,
+ )?;
+ let dest_addr = self.place_addr(destination, &locals)?;
+ self.write_memory(dest_addr, &result)?;
+ }
+ }
+ current_block_idx =
+ target.expect("broken mir, function without target");
+ }
+ _ => not_supported!("unknown function type"),
+ }
+ }
+ Terminator::SwitchInt { discr, targets } => {
+ let val = u128::from_le_bytes(pad16(
+ self.eval_operand(discr, &locals)?.get(&self)?,
+ false,
+ ));
+ current_block_idx = targets.target_for_value(val);
+ }
+ Terminator::Return => {
+ let ty = body.locals[return_slot()].ty.clone();
+ self.stack_depth_limit += 1;
+ return Ok(self
+ .read_memory(
+ locals.ptr[return_slot()],
+ self.size_of_sized(&ty, &locals, "return type")?,
+ )?
+ .to_owned());
+ }
+ Terminator::Unreachable => {
+ return Err(MirEvalError::UndefinedBehavior("unreachable executed"))
+ }
+ _ => not_supported!("unknown terminator"),
+ }
+ }
+ }
+
+ fn eval_rvalue<'a>(
+ &'a mut self,
+ r: &'a Rvalue,
+ locals: &'a Locals<'a>,
+ ) -> Result<IntervalOrOwned> {
+ use IntervalOrOwned::*;
+ Ok(match r {
+ Rvalue::Use(x) => Borrowed(self.eval_operand(x, locals)?),
+ Rvalue::Ref(_, p) => {
+ let addr = self.place_addr(p, locals)?;
+ Owned(addr.to_bytes())
+ }
+ Rvalue::Len(_) => not_supported!("rvalue len"),
+ Rvalue::UnaryOp(op, val) => {
+ let mut c = self.eval_operand(val, locals)?.get(&self)?;
+ let mut ty = self.operand_ty(val, locals)?;
+ while let TyKind::Ref(_, _, z) = ty.kind(Interner) {
+ ty = z.clone();
+ let size = self.size_of_sized(&ty, locals, "operand of unary op")?;
+ c = self.read_memory(Address::from_bytes(c)?, size)?;
+ }
+ let mut c = c.to_vec();
+ if ty.as_builtin() == Some(BuiltinType::Bool) {
+ c[0] = 1 - c[0];
+ } else {
+ match op {
+ UnOp::Not => c.iter_mut().for_each(|x| *x = !*x),
+ UnOp::Neg => {
+ c.iter_mut().for_each(|x| *x = !*x);
+ for k in c.iter_mut() {
+ let o;
+ (*k, o) = k.overflowing_add(1);
+ if !o {
+ break;
+ }
+ }
+ }
+ }
+ }
+ Owned(c)
+ }
+ Rvalue::CheckedBinaryOp(op, lhs, rhs) => {
+ let lc = self.eval_operand(lhs, locals)?;
+ let rc = self.eval_operand(rhs, locals)?;
+ let mut lc = lc.get(&self)?;
+ let mut rc = rc.get(&self)?;
+ let mut ty = self.operand_ty(lhs, locals)?;
+ while let TyKind::Ref(_, _, z) = ty.kind(Interner) {
+ ty = z.clone();
+ let size = self.size_of_sized(&ty, locals, "operand of binary op")?;
+ lc = self.read_memory(Address::from_bytes(lc)?, size)?;
+ rc = self.read_memory(Address::from_bytes(rc)?, size)?;
+ }
+ let is_signed = matches!(ty.as_builtin(), Some(BuiltinType::Int(_)));
+ let l128 = i128::from_le_bytes(pad16(lc, is_signed));
+ let r128 = i128::from_le_bytes(pad16(rc, is_signed));
+ match op {
+ BinOp::Ge | BinOp::Gt | BinOp::Le | BinOp::Lt | BinOp::Eq | BinOp::Ne => {
+ let r = match op {
+ BinOp::Ge => l128 >= r128,
+ BinOp::Gt => l128 > r128,
+ BinOp::Le => l128 <= r128,
+ BinOp::Lt => l128 < r128,
+ BinOp::Eq => l128 == r128,
+ BinOp::Ne => l128 != r128,
+ _ => unreachable!(),
+ };
+ let r = r as u8;
+ Owned(vec![r])
+ }
+ BinOp::BitAnd
+ | BinOp::BitOr
+ | BinOp::BitXor
+ | BinOp::Add
+ | BinOp::Mul
+ | BinOp::Div
+ | BinOp::Rem
+ | BinOp::Sub => {
+ let r = match op {
+ BinOp::Add => l128.overflowing_add(r128).0,
+ BinOp::Mul => l128.overflowing_mul(r128).0,
+ BinOp::Div => l128.checked_div(r128).ok_or(MirEvalError::Panic)?,
+ BinOp::Rem => l128.checked_rem(r128).ok_or(MirEvalError::Panic)?,
+ BinOp::Sub => l128.overflowing_sub(r128).0,
+ BinOp::BitAnd => l128 & r128,
+ BinOp::BitOr => l128 | r128,
+ BinOp::BitXor => l128 ^ r128,
+ _ => unreachable!(),
+ };
+ let r = r.to_le_bytes();
+ for &k in &r[lc.len()..] {
+ if k != 0 && (k != 255 || !is_signed) {
+ return Err(MirEvalError::Panic);
+ }
+ }
+ Owned(r[0..lc.len()].into())
+ }
+ BinOp::Shl | BinOp::Shr => {
+ let shift_amout = if r128 < 0 {
+ return Err(MirEvalError::Panic);
+ } else if r128 > 128 {
+ return Err(MirEvalError::Panic);
+ } else {
+ r128 as u8
+ };
+ let r = match op {
+ BinOp::Shl => l128 << shift_amout,
+ BinOp::Shr => l128 >> shift_amout,
+ _ => unreachable!(),
+ };
+ Owned(r.to_le_bytes()[0..lc.len()].into())
+ }
+ BinOp::Offset => not_supported!("offset binop"),
+ }
+ }
+ Rvalue::Discriminant(p) => {
+ let ty = self.place_ty(p, locals)?;
+ let bytes = self.eval_place(p, locals)?.get(&self)?;
+ let layout = self.layout(&ty)?;
+ match layout.variants {
+ Variants::Single { .. } => Owned(0u128.to_le_bytes().to_vec()),
+ Variants::Multiple { tag, tag_encoding, .. } => {
+ let Some(target_data_layout) = self.db.target_data_layout(self.crate_id) else {
+ not_supported!("missing target data layout");
+ };
+ let size = tag.size(&*target_data_layout).bytes_usize();
+ let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field
+ match tag_encoding {
+ TagEncoding::Direct => {
+ let tag = &bytes[offset..offset + size];
+ Owned(pad16(tag, false).to_vec())
+ }
+ TagEncoding::Niche { untagged_variant, niche_start, .. } => {
+ let tag = &bytes[offset..offset + size];
+ let candidate_discriminant = i128::from_le_bytes(pad16(tag, false))
+ .wrapping_sub(niche_start as i128);
+ let enum_id = match ty.kind(Interner) {
+ TyKind::Adt(e, _) => match e.0 {
+ AdtId::EnumId(e) => e,
+ _ => not_supported!("Non enum with multi variant layout"),
+ },
+ _ => not_supported!("Non adt with multi variant layout"),
+ };
+ let enum_data = self.db.enum_data(enum_id);
+ let result = 'b: {
+ for (local_id, _) in enum_data.variants.iter() {
+ if candidate_discriminant
+ == self.db.const_eval_discriminant(EnumVariantId {
+ parent: enum_id,
+ local_id,
+ })?
+ {
+ break 'b candidate_discriminant;
+ }
+ }
+ self.db.const_eval_discriminant(EnumVariantId {
+ parent: enum_id,
+ local_id: untagged_variant.0,
+ })?
+ };
+ Owned(result.to_le_bytes().to_vec())
+ }
+ }
+ }
+ }
+ }
+ Rvalue::ShallowInitBox(_, _) => not_supported!("shallow init box"),
+ Rvalue::CopyForDeref(_) => not_supported!("copy for deref"),
+ Rvalue::Aggregate(kind, values) => match kind {
+ AggregateKind::Array(_) => {
+ let mut r = vec![];
+ for x in values {
+ let value = self.eval_operand(x, locals)?.get(&self)?;
+ r.extend(value);
+ }
+ Owned(r)
+ }
+ AggregateKind::Tuple(ty) => {
+ let layout = self.layout(&ty)?;
+ Owned(self.make_by_layout(
+ layout.size.bytes_usize(),
+ &layout,
+ None,
+ values,
+ locals,
+ )?)
+ }
+ AggregateKind::Union(x, f) => {
+ let layout = self.layout_adt((*x).into(), Substitution::empty(Interner))?;
+ let offset = layout
+ .fields
+ .offset(u32::from(f.local_id.into_raw()) as usize)
+ .bytes_usize();
+ let op = self.eval_operand(&values[0], locals)?.get(&self)?;
+ let mut result = vec![0; layout.size.bytes_usize()];
+ result[offset..offset + op.len()].copy_from_slice(op);
+ Owned(result)
+ }
+ AggregateKind::Adt(x, subst) => {
+ let (size, variant_layout, tag) =
+ self.layout_of_variant(*x, subst.clone(), locals)?;
+ Owned(self.make_by_layout(size, &variant_layout, tag, values, locals)?)
+ }
+ },
+ Rvalue::Cast(kind, operand, target_ty) => match kind {
+ CastKind::PointerExposeAddress => not_supported!("exposing pointer address"),
+ CastKind::PointerFromExposedAddress => {
+ not_supported!("creating pointer from exposed address")
+ }
+ CastKind::Pointer(cast) => match cast {
+ PointerCast::Unsize => {
+ let current_ty = self.operand_ty(operand, locals)?;
+ match &target_ty.data(Interner).kind {
+ TyKind::Raw(_, ty) | TyKind::Ref(_, _, ty) => {
+ match &ty.data(Interner).kind {
+ TyKind::Slice(_) => match &current_ty.data(Interner).kind {
+ TyKind::Raw(_, ty) | TyKind::Ref(_, _, ty) => {
+ match &ty.data(Interner).kind {
+ TyKind::Array(_, size) => {
+ let addr = self
+ .eval_operand(operand, locals)?
+ .get(&self)?;
+ let len = const_as_usize(size);
+ let mut r = Vec::with_capacity(16);
+ r.extend(addr.iter().copied());
+ r.extend(len.to_le_bytes().into_iter());
+ Owned(r)
+ }
+ _ => {
+ not_supported!("slice unsizing from non arrays")
+ }
+ }
+ }
+ _ => not_supported!("slice unsizing from non pointers"),
+ },
+ TyKind::Dyn(_) => not_supported!("dyn pointer unsize cast"),
+ _ => not_supported!("unknown unsized cast"),
+ }
+ }
+ _ => not_supported!("unsized cast on unknown pointer type"),
+ }
+ }
+ x => not_supported!("pointer cast {x:?}"),
+ },
+ CastKind::DynStar => not_supported!("dyn star cast"),
+ CastKind::IntToInt => {
+ // FIXME: handle signed cast
+ let current = pad16(self.eval_operand(operand, locals)?.get(&self)?, false);
+ let dest_size =
+ self.size_of_sized(target_ty, locals, "destination of int to int cast")?;
+ Owned(current[0..dest_size].to_vec())
+ }
+ CastKind::FloatToInt => not_supported!("float to int cast"),
+ CastKind::FloatToFloat => not_supported!("float to float cast"),
+ CastKind::IntToFloat => not_supported!("float to int cast"),
+ CastKind::PtrToPtr => not_supported!("ptr to ptr cast"),
+ CastKind::FnPtrToPtr => not_supported!("fn ptr to ptr cast"),
+ },
+ })
+ }
+
+ fn layout_of_variant(
+ &mut self,
+ x: VariantId,
+ subst: Substitution,
+ locals: &Locals<'_>,
+ ) -> Result<(usize, Layout, Option<(usize, usize, i128)>)> {
+ let adt = x.adt_id();
+ if let DefWithBodyId::VariantId(f) = locals.body.owner {
+ if let VariantId::EnumVariantId(x) = x {
+ if AdtId::from(f.parent) == adt {
+ // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and
+ // infinite sized type errors) we use a dummy layout
+ let i = self.db.const_eval_discriminant(x)?;
+ return Ok((16, self.layout(&TyBuilder::unit())?, Some((0, 16, i))));
+ }
+ }
+ }
+ let layout = self.layout_adt(adt, subst)?;
+ Ok(match layout.variants {
+ Variants::Single { .. } => (layout.size.bytes_usize(), layout, None),
+ Variants::Multiple { variants, tag, tag_encoding, .. } => {
+ let cx = self
+ .db
+ .target_data_layout(self.crate_id)
+ .ok_or(MirEvalError::TargetDataLayoutNotAvailable)?;
+ let enum_variant_id = match x {
+ VariantId::EnumVariantId(x) => x,
+ _ => not_supported!("multi variant layout for non-enums"),
+ };
+ let rustc_enum_variant_idx = RustcEnumVariantIdx(enum_variant_id.local_id);
+ let mut discriminant = self.db.const_eval_discriminant(enum_variant_id)?;
+ let variant_layout = variants[rustc_enum_variant_idx].clone();
+ let have_tag = match tag_encoding {
+ TagEncoding::Direct => true,
+ TagEncoding::Niche { untagged_variant, niche_variants: _, niche_start } => {
+ discriminant = discriminant.wrapping_add(niche_start as i128);
+ untagged_variant != rustc_enum_variant_idx
+ }
+ };
+ (
+ layout.size.bytes_usize(),
+ variant_layout,
+ if have_tag {
+ Some((
+ layout.fields.offset(0).bytes_usize(),
+ tag.size(&*cx).bytes_usize(),
+ discriminant,
+ ))
+ } else {
+ None
+ },
+ )
+ }
+ })
+ }
+
+ fn make_by_layout(
+ &mut self,
+ size: usize, // Not neccessarily equal to variant_layout.size
+ variant_layout: &Layout,
+ tag: Option<(usize, usize, i128)>,
+ values: &Vec<Operand>,
+ locals: &Locals<'_>,
+ ) -> Result<Vec<u8>> {
+ let mut result = vec![0; size];
+ if let Some((offset, size, value)) = tag {
+ result[offset..offset + size].copy_from_slice(&value.to_le_bytes()[0..size]);
+ }
+ for (i, op) in values.iter().enumerate() {
+ let offset = variant_layout.fields.offset(i).bytes_usize();
+ let op = self.eval_operand(op, locals)?.get(&self)?;
+ result[offset..offset + op.len()].copy_from_slice(op);
+ }
+ Ok(result)
+ }
+
+ fn eval_operand(&mut self, x: &Operand, locals: &Locals<'_>) -> Result<Interval> {
+ Ok(match x {
+ Operand::Copy(p) | Operand::Move(p) => self.eval_place(p, locals)?,
+ Operand::Constant(konst) => {
+ let data = &konst.data(Interner);
+ match &data.value {
+ chalk_ir::ConstValue::BoundVar(b) => {
+ let c = locals
+ .subst
+ .as_slice(Interner)
+ .get(b.index)
+ .ok_or(MirEvalError::TypeError("missing generic arg"))?
+ .assert_const_ref(Interner);
+ self.eval_operand(&Operand::Constant(c.clone()), locals)?
+ }
+ chalk_ir::ConstValue::InferenceVar(_) => {
+ not_supported!("inference var constant")
+ }
+ chalk_ir::ConstValue::Placeholder(_) => not_supported!("placeholder constant"),
+ chalk_ir::ConstValue::Concrete(c) => match &c.interned {
+ ConstScalar::Bytes(v, memory_map) => {
+ let mut v: Cow<'_, [u8]> = Cow::Borrowed(v);
+ let patch_map = memory_map.transform_addresses(|b| {
+ let addr = self.heap_allocate(b.len());
+ self.write_memory(addr, b)?;
+ Ok(addr.to_usize())
+ })?;
+ let size = self.size_of(&data.ty, locals)?.unwrap_or(v.len());
+ if size != v.len() {
+ // Handle self enum
+ if size == 16 && v.len() < 16 {
+ v = Cow::Owned(pad16(&v, false).to_vec());
+ } else if size < 16 && v.len() == 16 {
+ v = Cow::Owned(v[0..size].to_vec());
+ } else {
+ return Err(MirEvalError::InvalidConst(konst.clone()));
+ }
+ }
+ let addr = self.heap_allocate(size);
+ self.write_memory(addr, &v)?;
+ self.patch_addresses(&patch_map, addr, &data.ty, locals)?;
+ Interval::new(addr, size)
+ }
+ ConstScalar::Unknown => not_supported!("evaluating unknown const"),
+ },
+ }
+ }
+ })
+ }
+
+ fn eval_place(&mut self, p: &Place, locals: &Locals<'_>) -> Result<Interval> {
+ let addr = self.place_addr(p, locals)?;
+ Ok(Interval::new(
+ addr,
+ self.size_of_sized(&self.place_ty(p, locals)?, locals, "type of this place")?,
+ ))
+ }
+
+ fn read_memory(&self, addr: Address, size: usize) -> Result<&[u8]> {
+ let (mem, pos) = match addr {
+ Stack(x) => (&self.stack, x),
+ Heap(x) => (&self.heap, x),
+ };
+ mem.get(pos..pos + size).ok_or(MirEvalError::UndefinedBehavior("out of bound memory read"))
+ }
+
+ fn write_memory(&mut self, addr: Address, r: &[u8]) -> Result<()> {
+ let (mem, pos) = match addr {
+ Stack(x) => (&mut self.stack, x),
+ Heap(x) => (&mut self.heap, x),
+ };
+ mem.get_mut(pos..pos + r.len())
+ .ok_or(MirEvalError::UndefinedBehavior("out of bound memory write"))?
+ .copy_from_slice(r);
+ Ok(())
+ }
+
+ fn size_of(&self, ty: &Ty, locals: &Locals<'_>) -> Result<Option<usize>> {
+ if let DefWithBodyId::VariantId(f) = locals.body.owner {
+ if let Some((adt, _)) = ty.as_adt() {
+ if AdtId::from(f.parent) == adt {
+ // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and
+ // infinite sized type errors) we use a dummy size
+ return Ok(Some(16));
+ }
+ }
+ }
+ let ty = &self.ty_filler(ty, locals.subst, locals.body.owner)?;
+ let layout = self.layout(ty);
+ if self.assert_placeholder_ty_is_unused {
+ if matches!(layout, Err(MirEvalError::LayoutError(LayoutError::HasPlaceholder, _))) {
+ return Ok(Some(0));
+ }
+ }
+ let layout = layout?;
+ Ok(layout.is_sized().then(|| layout.size.bytes_usize()))
+ }
+
+ /// A version of `self.size_of` which returns error if the type is unsized. `what` argument should
+ /// be something that complete this: `error: type {ty} was unsized. {what} should be sized`
+ fn size_of_sized(&self, ty: &Ty, locals: &Locals<'_>, what: &'static str) -> Result<usize> {
+ match self.size_of(ty, locals)? {
+ Some(x) => Ok(x),
+ None => Err(MirEvalError::TypeIsUnsized(ty.clone(), what)),
+ }
+ }
+
+ /// Uses `ty_filler` to fill an entire subst
+ fn subst_filler(&self, subst: &Substitution, locals: &Locals<'_>) -> Substitution {
+ Substitution::from_iter(
+ Interner,
+ subst.iter(Interner).map(|x| match x.data(Interner) {
+ chalk_ir::GenericArgData::Ty(ty) => {
+ let Ok(ty) = self.ty_filler(ty, locals.subst, locals.body.owner) else {
+ return x.clone();
+ };
+ chalk_ir::GenericArgData::Ty(ty).intern(Interner)
+ }
+ _ => x.clone(),
+ }),
+ )
+ }
+
+ /// This function substitutes placeholders of the body with the provided subst, effectively plays
+ /// the rule of monomorphization. In addition to placeholders, it substitutes opaque types (return
+ /// position impl traits) with their underlying type.
+ fn ty_filler(&self, ty: &Ty, subst: &Substitution, owner: DefWithBodyId) -> Result<Ty> {
+ struct Filler<'a> {
+ db: &'a dyn HirDatabase,
+ subst: &'a Substitution,
+ skip_params: usize,
+ }
+ impl FallibleTypeFolder<Interner> for Filler<'_> {
+ type Error = MirEvalError;
+
+ fn as_dyn(&mut self) -> &mut dyn FallibleTypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn try_fold_ty(
+ &mut self,
+ ty: Ty,
+ outer_binder: DebruijnIndex,
+ ) -> std::result::Result<Ty, Self::Error> {
+ match ty.kind(Interner) {
+ TyKind::OpaqueType(id, subst) => {
+ let impl_trait_id = self.db.lookup_intern_impl_trait_id((*id).into());
+ match impl_trait_id {
+ crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
+ let infer = self.db.infer(func.into());
+ let filler = &mut Filler { db: self.db, subst, skip_params: 0 };
+ filler.try_fold_ty(infer.type_of_rpit[idx].clone(), outer_binder)
+ }
+ crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => {
+ not_supported!("async block impl trait");
+ }
+ }
+ }
+ _ => ty.try_super_fold_with(self.as_dyn(), outer_binder),
+ }
+ }
+
+ fn try_fold_free_placeholder_ty(
+ &mut self,
+ idx: chalk_ir::PlaceholderIndex,
+ _outer_binder: DebruijnIndex,
+ ) -> std::result::Result<Ty, Self::Error> {
+ let x = from_placeholder_idx(self.db, idx);
+ Ok(self
+ .subst
+ .as_slice(Interner)
+ .get((u32::from(x.local_id.into_raw()) as usize) + self.skip_params)
+ .and_then(|x| x.ty(Interner))
+ .ok_or(MirEvalError::TypeError("Generic arg not provided"))?
+ .clone())
+ }
+ }
+ let filler = &mut Filler { db: self.db, subst, skip_params: 0 };
+ Ok(normalize(self.db, owner, ty.clone().try_fold_with(filler, DebruijnIndex::INNERMOST)?))
+ }
+
+ fn heap_allocate(&mut self, s: usize) -> Address {
+ let pos = self.heap.len();
+ self.heap.extend(iter::repeat(0).take(s));
+ Address::Heap(pos)
+ }
+
+ pub fn interpret_mir_with_no_arg(&mut self, body: &MirBody) -> Result<Vec<u8>> {
+ self.interpret_mir(&body, vec![].into_iter(), Substitution::empty(Interner))
+ }
+
+ fn detect_lang_function(&self, def: FunctionId) -> Option<LangItem> {
+ let candidate = lang_attr(self.db.upcast(), def)?;
+ // filter normal lang functions out
+ if [LangItem::IntoIterIntoIter, LangItem::IteratorNext].contains(&candidate) {
+ return None;
+ }
+ Some(candidate)
+ }
+
+ fn create_memory_map(&self, bytes: &[u8], ty: &Ty, locals: &Locals<'_>) -> Result<MemoryMap> {
+ // FIXME: support indirect references
+ let mut mm = MemoryMap::default();
+ match ty.kind(Interner) {
+ TyKind::Ref(_, _, t) => {
+ let size = self.size_of(t, locals)?;
+ match size {
+ Some(size) => {
+ let addr_usize = from_bytes!(usize, bytes);
+ mm.insert(
+ addr_usize,
+ self.read_memory(Address::from_usize(addr_usize), size)?.to_vec(),
+ )
+ }
+ None => {
+ let element_size = match t.kind(Interner) {
+ TyKind::Str => 1,
+ TyKind::Slice(t) => {
+ self.size_of_sized(t, locals, "slice inner type")?
+ }
+ _ => return Ok(mm), // FIXME: support other kind of unsized types
+ };
+ let (addr, meta) = bytes.split_at(bytes.len() / 2);
+ let size = element_size * from_bytes!(usize, meta);
+ let addr = Address::from_bytes(addr)?;
+ mm.insert(addr.to_usize(), self.read_memory(addr, size)?.to_vec());
+ }
+ }
+ }
+ _ => (),
+ }
+ Ok(mm)
+ }
+
+ fn patch_addresses(
+ &mut self,
+ patch_map: &HashMap<usize, usize>,
+ addr: Address,
+ ty: &Ty,
+ locals: &Locals<'_>,
+ ) -> Result<()> {
+ // FIXME: support indirect references
+ let my_size = self.size_of_sized(ty, locals, "value to patch address")?;
+ match ty.kind(Interner) {
+ TyKind::Ref(_, _, t) => {
+ let size = self.size_of(t, locals)?;
+ match size {
+ Some(_) => {
+ let current = from_bytes!(usize, self.read_memory(addr, my_size)?);
+ if let Some(x) = patch_map.get(&current) {
+ self.write_memory(addr, &x.to_le_bytes())?;
+ }
+ }
+ None => {
+ let current = from_bytes!(usize, self.read_memory(addr, my_size / 2)?);
+ if let Some(x) = patch_map.get(&current) {
+ self.write_memory(addr, &x.to_le_bytes())?;
+ }
+ }
+ }
+ }
+ _ => (),
+ }
+ Ok(())
+ }
+
+ fn exec_intrinsic(
+ &self,
+ as_str: &str,
+ _arg_bytes: impl Iterator<Item = Vec<u8>>,
+ generic_args: Substitution,
+ locals: &Locals<'_>,
+ ) -> Result<Vec<u8>> {
+ match as_str {
+ "size_of" => {
+ let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
+ return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
+ };
+ let size = self.size_of(ty, locals)?;
+ match size {
+ Some(x) => Ok(x.to_le_bytes().to_vec()),
+ None => return Err(MirEvalError::TypeError("size_of arg is unsized")),
+ }
+ }
+ _ => not_supported!("unknown intrinsic {as_str}"),
+ }
+ }
+
+ pub(crate) fn exec_lang_item(
+ &self,
+ x: LangItem,
+ mut args: std::vec::IntoIter<Vec<u8>>,
+ ) -> Result<Vec<u8>> {
+ use LangItem::*;
+ match x {
+ PanicFmt | BeginPanic => Err(MirEvalError::Panic),
+ SliceLen => {
+ let arg = args
+ .next()
+ .ok_or(MirEvalError::TypeError("argument of <[T]>::len() is not provided"))?;
+ let ptr_size = arg.len() / 2;
+ Ok(arg[ptr_size..].into())
+ }
+ x => not_supported!("Executing lang item {x:?}"),
+ }
+ }
+}
+
+pub fn pad16(x: &[u8], is_signed: bool) -> [u8; 16] {
+ let is_negative = is_signed && x.last().unwrap_or(&0) > &128;
+ let fill_with = if is_negative { 255 } else { 0 };
+ x.iter()
+ .copied()
+ .chain(iter::repeat(fill_with))
+ .take(16)
+ .collect::<Vec<u8>>()
+ .try_into()
+ .expect("iterator take is not working")
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
new file mode 100644
index 000000000..c4dd7c0ac
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
@@ -0,0 +1,1581 @@
+//! This module generates a polymorphic MIR from a hir body
+
+use std::{iter, mem, sync::Arc};
+
+use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind};
+use hir_def::{
+ body::Body,
+ expr::{
+ Array, BindingAnnotation, BindingId, ExprId, LabelId, Literal, MatchArm, Pat, PatId,
+ RecordLitField,
+ },
+ lang_item::{LangItem, LangItemTarget},
+ layout::LayoutError,
+ path::Path,
+ resolver::{resolver_for_expr, ResolveValueResult, ValueNs},
+ DefWithBodyId, EnumVariantId, HasModule,
+};
+use hir_expand::name::Name;
+use la_arena::ArenaMap;
+
+use crate::{
+ consteval::ConstEvalError, db::HirDatabase, display::HirDisplay, infer::TypeMismatch,
+ inhabitedness::is_ty_uninhabited_from, layout::layout_of_ty, mapping::ToChalk, static_lifetime,
+ utils::generics, Adjust, Adjustment, AutoBorrow, CallableDefId, TyBuilder, TyExt,
+};
+
+use super::*;
+
+mod as_place;
+
+#[derive(Debug, Clone, Copy)]
+struct LoopBlocks {
+ begin: BasicBlockId,
+ /// `None` for loops that are not terminating
+ end: Option<BasicBlockId>,
+}
+
+struct MirLowerCtx<'a> {
+ result: MirBody,
+ owner: DefWithBodyId,
+ current_loop_blocks: Option<LoopBlocks>,
+ discr_temp: Option<Place>,
+ db: &'a dyn HirDatabase,
+ body: &'a Body,
+ infer: &'a InferenceResult,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum MirLowerError {
+ ConstEvalError(Box<ConstEvalError>),
+ LayoutError(LayoutError),
+ IncompleteExpr,
+ UnresolvedName(String),
+ RecordLiteralWithoutPath,
+ UnresolvedMethod,
+ UnresolvedField,
+ MissingFunctionDefinition,
+ TypeMismatch(TypeMismatch),
+ /// This should be never happen. Type mismatch should catch everything.
+ TypeError(&'static str),
+ NotSupported(String),
+ ContinueWithoutLoop,
+ BreakWithoutLoop,
+ Loop,
+ /// Something that should never happen and is definitely a bug, but we don't want to panic if it happened
+ ImplementationError(&'static str),
+ LangItemNotFound(LangItem),
+ MutatingRvalue,
+}
+
+macro_rules! not_supported {
+ ($x: expr) => {
+ return Err(MirLowerError::NotSupported(format!($x)))
+ };
+}
+
+macro_rules! implementation_error {
+ ($x: expr) => {{
+ ::stdx::never!("MIR lower implementation bug: {}", $x);
+ return Err(MirLowerError::ImplementationError($x));
+ }};
+}
+
+impl From<ConstEvalError> for MirLowerError {
+ fn from(value: ConstEvalError) -> Self {
+ match value {
+ ConstEvalError::MirLowerError(e) => e,
+ _ => MirLowerError::ConstEvalError(Box::new(value)),
+ }
+ }
+}
+
+impl From<LayoutError> for MirLowerError {
+ fn from(value: LayoutError) -> Self {
+ MirLowerError::LayoutError(value)
+ }
+}
+
+impl MirLowerError {
+ fn unresolved_path(db: &dyn HirDatabase, p: &Path) -> Self {
+ Self::UnresolvedName(p.display(db).to_string())
+ }
+}
+
+type Result<T> = std::result::Result<T, MirLowerError>;
+
+impl MirLowerCtx<'_> {
+ fn temp(&mut self, ty: Ty) -> Result<LocalId> {
+ if matches!(ty.kind(Interner), TyKind::Slice(_) | TyKind::Dyn(_)) {
+ implementation_error!("unsized temporaries");
+ }
+ Ok(self.result.locals.alloc(Local { ty }))
+ }
+
+ fn lower_expr_to_some_operand(
+ &mut self,
+ expr_id: ExprId,
+ current: BasicBlockId,
+ ) -> Result<Option<(Operand, BasicBlockId)>> {
+ if !self.has_adjustments(expr_id) {
+ match &self.body.exprs[expr_id] {
+ Expr::Literal(l) => {
+ let ty = self.expr_ty(expr_id);
+ return Ok(Some((self.lower_literal_to_operand(ty, l)?, current)));
+ }
+ _ => (),
+ }
+ }
+ let Some((p, current)) = self.lower_expr_as_place(current, expr_id, true)? else {
+ return Ok(None);
+ };
+ Ok(Some((Operand::Copy(p), current)))
+ }
+
+ fn lower_expr_to_place_with_adjust(
+ &mut self,
+ expr_id: ExprId,
+ place: Place,
+ current: BasicBlockId,
+ adjustments: &[Adjustment],
+ ) -> Result<Option<BasicBlockId>> {
+ match adjustments.split_last() {
+ Some((last, rest)) => match &last.kind {
+ Adjust::NeverToAny => {
+ let temp = self.temp(TyKind::Never.intern(Interner))?;
+ self.lower_expr_to_place_with_adjust(expr_id, temp.into(), current, rest)
+ }
+ Adjust::Deref(_) => {
+ let Some((p, current)) = self.lower_expr_as_place_with_adjust(current, expr_id, true, adjustments)? else {
+ return Ok(None);
+ };
+ self.push_assignment(current, place, Operand::Copy(p).into(), expr_id.into());
+ Ok(Some(current))
+ }
+ Adjust::Borrow(AutoBorrow::Ref(m) | AutoBorrow::RawPtr(m)) => {
+ let Some((p, current)) = self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)? else {
+ return Ok(None);
+ };
+ let bk = BorrowKind::from_chalk(*m);
+ self.push_assignment(current, place, Rvalue::Ref(bk, p), expr_id.into());
+ Ok(Some(current))
+ }
+ Adjust::Pointer(cast) => {
+ let Some((p, current)) = self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)? else {
+ return Ok(None);
+ };
+ self.push_assignment(
+ current,
+ place,
+ Rvalue::Cast(
+ CastKind::Pointer(cast.clone()),
+ Operand::Copy(p).into(),
+ last.target.clone(),
+ ),
+ expr_id.into(),
+ );
+ Ok(Some(current))
+ }
+ },
+ None => self.lower_expr_to_place_without_adjust(expr_id, place, current),
+ }
+ }
+
+ fn lower_expr_to_place(
+ &mut self,
+ expr_id: ExprId,
+ place: Place,
+ prev_block: BasicBlockId,
+ ) -> Result<Option<BasicBlockId>> {
+ if let Some(adjustments) = self.infer.expr_adjustments.get(&expr_id) {
+ return self.lower_expr_to_place_with_adjust(expr_id, place, prev_block, adjustments);
+ }
+ self.lower_expr_to_place_without_adjust(expr_id, place, prev_block)
+ }
+
+ fn lower_expr_to_place_without_adjust(
+ &mut self,
+ expr_id: ExprId,
+ place: Place,
+ mut current: BasicBlockId,
+ ) -> Result<Option<BasicBlockId>> {
+ match &self.body.exprs[expr_id] {
+ Expr::Missing => Err(MirLowerError::IncompleteExpr),
+ Expr::Path(p) => {
+ let unresolved_name = || MirLowerError::unresolved_path(self.db, p);
+ let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id);
+ let pr = resolver
+ .resolve_path_in_value_ns(self.db.upcast(), p.mod_path())
+ .ok_or_else(unresolved_name)?;
+ let pr = match pr {
+ ResolveValueResult::ValueNs(v) => v,
+ ResolveValueResult::Partial(..) => {
+ if let Some(assoc) = self
+ .infer
+ .assoc_resolutions_for_expr(expr_id)
+ {
+ match assoc.0 {
+ hir_def::AssocItemId::ConstId(c) => {
+ self.lower_const(c, current, place, expr_id.into())?;
+ return Ok(Some(current))
+ },
+ _ => not_supported!("associated functions and types"),
+ }
+ } else if let Some(variant) = self
+ .infer
+ .variant_resolution_for_expr(expr_id)
+ {
+ match variant {
+ VariantId::EnumVariantId(e) => ValueNs::EnumVariantId(e),
+ VariantId::StructId(s) => ValueNs::StructId(s),
+ VariantId::UnionId(_) => implementation_error!("Union variant as path"),
+ }
+ } else {
+ return Err(unresolved_name());
+ }
+ }
+ };
+ match pr {
+ ValueNs::LocalBinding(pat_id) => {
+ self.push_assignment(
+ current,
+ place,
+ Operand::Copy(self.result.binding_locals[pat_id].into()).into(),
+ expr_id.into(),
+ );
+ Ok(Some(current))
+ }
+ ValueNs::ConstId(const_id) => {
+ self.lower_const(const_id, current, place, expr_id.into())?;
+ Ok(Some(current))
+ }
+ ValueNs::EnumVariantId(variant_id) => {
+ let ty = self.infer.type_of_expr[expr_id].clone();
+ let current = self.lower_enum_variant(
+ variant_id,
+ current,
+ place,
+ ty,
+ vec![],
+ expr_id.into(),
+ )?;
+ Ok(Some(current))
+ }
+ ValueNs::GenericParam(p) => {
+ let Some(def) = self.owner.as_generic_def_id() else {
+ not_supported!("owner without generic def id");
+ };
+ let gen = generics(self.db.upcast(), def);
+ let ty = self.expr_ty(expr_id);
+ self.push_assignment(
+ current,
+ place,
+ Operand::Constant(
+ ConstData {
+ ty,
+ value: chalk_ir::ConstValue::BoundVar(BoundVar::new(
+ DebruijnIndex::INNERMOST,
+ gen.param_idx(p.into()).ok_or(MirLowerError::TypeError(
+ "fail to lower const generic param",
+ ))?,
+ )),
+ }
+ .intern(Interner),
+ )
+ .into(),
+ expr_id.into(),
+ );
+ Ok(Some(current))
+ }
+ ValueNs::StructId(_) => {
+ // It's probably a unit struct or a zero sized function, so no action is needed.
+ Ok(Some(current))
+ }
+ x => {
+ not_supported!("unknown name {x:?} in value name space");
+ }
+ }
+ }
+ Expr::If { condition, then_branch, else_branch } => {
+ let Some((discr, current)) = self.lower_expr_to_some_operand(*condition, current)? else {
+ return Ok(None);
+ };
+ let start_of_then = self.new_basic_block();
+ let end_of_then =
+ self.lower_expr_to_place(*then_branch, place.clone(), start_of_then)?;
+ let start_of_else = self.new_basic_block();
+ let end_of_else = if let Some(else_branch) = else_branch {
+ self.lower_expr_to_place(*else_branch, place, start_of_else)?
+ } else {
+ Some(start_of_else)
+ };
+ self.set_terminator(
+ current,
+ Terminator::SwitchInt {
+ discr,
+ targets: SwitchTargets::static_if(1, start_of_then, start_of_else),
+ },
+ );
+ Ok(self.merge_blocks(end_of_then, end_of_else))
+ }
+ Expr::Let { pat, expr } => {
+ let Some((cond_place, current)) = self.lower_expr_as_place(current, *expr, true)? else {
+ return Ok(None);
+ };
+ let (then_target, else_target) = self.pattern_match(
+ current,
+ None,
+ cond_place,
+ self.expr_ty_after_adjustments(*expr),
+ *pat,
+ BindingAnnotation::Unannotated,
+ )?;
+ self.write_bytes_to_place(
+ then_target,
+ place.clone(),
+ vec![1],
+ TyBuilder::bool(),
+ MirSpan::Unknown,
+ )?;
+ if let Some(else_target) = else_target {
+ self.write_bytes_to_place(
+ else_target,
+ place,
+ vec![0],
+ TyBuilder::bool(),
+ MirSpan::Unknown,
+ )?;
+ }
+ Ok(self.merge_blocks(Some(then_target), else_target))
+ }
+ Expr::Unsafe { id: _, statements, tail } => {
+ self.lower_block_to_place(None, statements, current, *tail, place)
+ }
+ Expr::Block { id: _, statements, tail, label } => {
+ self.lower_block_to_place(*label, statements, current, *tail, place)
+ }
+ Expr::Loop { body, label } => self.lower_loop(current, *label, |this, begin| {
+ if let Some((_, block)) = this.lower_expr_as_place(begin, *body, true)? {
+ this.set_goto(block, begin);
+ }
+ Ok(())
+ }),
+ Expr::While { condition, body, label } => {
+ self.lower_loop(current, *label, |this, begin| {
+ let Some((discr, to_switch)) = this.lower_expr_to_some_operand(*condition, begin)? else {
+ return Ok(());
+ };
+ let end = this.current_loop_end()?;
+ let after_cond = this.new_basic_block();
+ this.set_terminator(
+ to_switch,
+ Terminator::SwitchInt {
+ discr,
+ targets: SwitchTargets::static_if(1, after_cond, end),
+ },
+ );
+ if let Some((_, block)) = this.lower_expr_as_place(after_cond, *body, true)? {
+ this.set_goto(block, begin);
+ }
+ Ok(())
+ })
+ }
+ &Expr::For { iterable, pat, body, label } => {
+ let into_iter_fn = self.resolve_lang_item(LangItem::IntoIterIntoIter)?
+ .as_function().ok_or(MirLowerError::LangItemNotFound(LangItem::IntoIterIntoIter))?;
+ let iter_next_fn = self.resolve_lang_item(LangItem::IteratorNext)?
+ .as_function().ok_or(MirLowerError::LangItemNotFound(LangItem::IteratorNext))?;
+ let option_some = self.resolve_lang_item(LangItem::OptionSome)?
+ .as_enum_variant().ok_or(MirLowerError::LangItemNotFound(LangItem::OptionSome))?;
+ let option = option_some.parent;
+ let into_iter_fn_op = Operand::const_zst(
+ TyKind::FnDef(
+ self.db.intern_callable_def(CallableDefId::FunctionId(into_iter_fn)).into(),
+ Substitution::from1(Interner, self.expr_ty(iterable))
+ ).intern(Interner));
+ let iter_next_fn_op = Operand::const_zst(
+ TyKind::FnDef(
+ self.db.intern_callable_def(CallableDefId::FunctionId(iter_next_fn)).into(),
+ Substitution::from1(Interner, self.expr_ty(iterable))
+ ).intern(Interner));
+ let &Some(iterator_ty) = &self.infer.type_of_for_iterator.get(&expr_id) else {
+ return Err(MirLowerError::TypeError("unknown for loop iterator type"));
+ };
+ let ref_mut_iterator_ty = TyKind::Ref(Mutability::Mut, static_lifetime(), iterator_ty.clone()).intern(Interner);
+ let item_ty = &self.infer.type_of_pat[pat];
+ let option_item_ty = TyKind::Adt(chalk_ir::AdtId(option.into()), Substitution::from1(Interner, item_ty.clone())).intern(Interner);
+ let iterator_place: Place = self.temp(iterator_ty.clone())?.into();
+ let option_item_place: Place = self.temp(option_item_ty.clone())?.into();
+ let ref_mut_iterator_place: Place = self.temp(ref_mut_iterator_ty)?.into();
+ let Some(current) = self.lower_call_and_args(into_iter_fn_op, Some(iterable).into_iter(), iterator_place.clone(), current, false)?
+ else {
+ return Ok(None);
+ };
+ self.push_assignment(current, ref_mut_iterator_place.clone(), Rvalue::Ref(BorrowKind::Mut { allow_two_phase_borrow: false }, iterator_place), expr_id.into());
+ self.lower_loop(current, label, |this, begin| {
+ let Some(current) = this.lower_call(iter_next_fn_op, vec![Operand::Copy(ref_mut_iterator_place)], option_item_place.clone(), begin, false)?
+ else {
+ return Ok(());
+ };
+ let end = this.current_loop_end()?;
+ let (current, _) = this.pattern_matching_variant(
+ option_item_ty.clone(),
+ BindingAnnotation::Unannotated,
+ option_item_place.into(),
+ option_some.into(),
+ current,
+ pat.into(),
+ Some(end),
+ &[pat], &None)?;
+ if let Some((_, block)) = this.lower_expr_as_place(current, body, true)? {
+ this.set_goto(block, begin);
+ }
+ Ok(())
+ })
+ },
+ Expr::Call { callee, args, .. } => {
+ let callee_ty = self.expr_ty_after_adjustments(*callee);
+ match &callee_ty.data(Interner).kind {
+ chalk_ir::TyKind::FnDef(..) => {
+ let func = Operand::from_bytes(vec![], callee_ty.clone());
+ self.lower_call_and_args(func, args.iter().copied(), place, current, self.is_uninhabited(expr_id))
+ }
+ TyKind::Scalar(_)
+ | TyKind::Tuple(_, _)
+ | TyKind::Array(_, _)
+ | TyKind::Adt(_, _)
+ | TyKind::Str
+ | TyKind::Foreign(_)
+ | TyKind::Slice(_) => {
+ return Err(MirLowerError::TypeError("function call on data type"))
+ }
+ TyKind::Error => return Err(MirLowerError::MissingFunctionDefinition),
+ TyKind::AssociatedType(_, _)
+ | TyKind::Raw(_, _)
+ | TyKind::Ref(_, _, _)
+ | TyKind::OpaqueType(_, _)
+ | TyKind::Never
+ | TyKind::Closure(_, _)
+ | TyKind::Generator(_, _)
+ | TyKind::GeneratorWitness(_, _)
+ | TyKind::Placeholder(_)
+ | TyKind::Dyn(_)
+ | TyKind::Alias(_)
+ | TyKind::Function(_)
+ | TyKind::BoundVar(_)
+ | TyKind::InferenceVar(_, _) => not_supported!("dynamic function call"),
+ }
+ }
+ Expr::MethodCall { receiver, args, .. } => {
+ let (func_id, generic_args) =
+ self.infer.method_resolution(expr_id).ok_or(MirLowerError::UnresolvedMethod)?;
+ let ty = chalk_ir::TyKind::FnDef(
+ CallableDefId::FunctionId(func_id).to_chalk(self.db),
+ generic_args,
+ )
+ .intern(Interner);
+ let func = Operand::from_bytes(vec![], ty);
+ self.lower_call_and_args(
+ func,
+ iter::once(*receiver).chain(args.iter().copied()),
+ place,
+ current,
+ self.is_uninhabited(expr_id),
+ )
+ }
+ Expr::Match { expr, arms } => {
+ let Some((cond_place, mut current)) = self.lower_expr_as_place(current, *expr, true)?
+ else {
+ return Ok(None);
+ };
+ let cond_ty = self.expr_ty_after_adjustments(*expr);
+ let mut end = None;
+ for MatchArm { pat, guard, expr } in arms.iter() {
+ if guard.is_some() {
+ not_supported!("pattern matching with guard");
+ }
+ let (then, otherwise) = self.pattern_match(
+ current,
+ None,
+ cond_place.clone(),
+ cond_ty.clone(),
+ *pat,
+ BindingAnnotation::Unannotated,
+ )?;
+ if let Some(block) = self.lower_expr_to_place(*expr, place.clone(), then)? {
+ let r = end.get_or_insert_with(|| self.new_basic_block());
+ self.set_goto(block, *r);
+ }
+ match otherwise {
+ Some(o) => current = o,
+ None => {
+ // The current pattern was irrefutable, so there is no need to generate code
+ // for the rest of patterns
+ break;
+ }
+ }
+ }
+ if self.is_unterminated(current) {
+ self.set_terminator(current, Terminator::Unreachable);
+ }
+ Ok(end)
+ }
+ Expr::Continue { label } => match label {
+ Some(_) => not_supported!("continue with label"),
+ None => {
+ let loop_data =
+ self.current_loop_blocks.ok_or(MirLowerError::ContinueWithoutLoop)?;
+ self.set_goto(current, loop_data.begin);
+ Ok(None)
+ }
+ },
+ Expr::Break { expr, label } => {
+ if expr.is_some() {
+ not_supported!("break with value");
+ }
+ match label {
+ Some(_) => not_supported!("break with label"),
+ None => {
+ let end =
+ self.current_loop_end()?;
+ self.set_goto(current, end);
+ Ok(None)
+ }
+ }
+ }
+ Expr::Return { expr } => {
+ if let Some(expr) = expr {
+ if let Some(c) = self.lower_expr_to_place(*expr, return_slot().into(), current)? {
+ current = c;
+ } else {
+ return Ok(None);
+ }
+ }
+ self.set_terminator(current, Terminator::Return);
+ Ok(None)
+ }
+ Expr::Yield { .. } => not_supported!("yield"),
+ Expr::RecordLit { fields, path, .. } => {
+ let variant_id = self
+ .infer
+ .variant_resolution_for_expr(expr_id)
+ .ok_or_else(|| match path {
+ Some(p) => MirLowerError::UnresolvedName(p.display(self.db).to_string()),
+ None => MirLowerError::RecordLiteralWithoutPath,
+ })?;
+ let subst = match self.expr_ty(expr_id).kind(Interner) {
+ TyKind::Adt(_, s) => s.clone(),
+ _ => not_supported!("Non ADT record literal"),
+ };
+ let variant_data = variant_id.variant_data(self.db.upcast());
+ match variant_id {
+ VariantId::EnumVariantId(_) | VariantId::StructId(_) => {
+ let mut operands = vec![None; variant_data.fields().len()];
+ for RecordLitField { name, expr } in fields.iter() {
+ let field_id =
+ variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?;
+ let Some((op, c)) = self.lower_expr_to_some_operand(*expr, current)? else {
+ return Ok(None);
+ };
+ current = c;
+ operands[u32::from(field_id.into_raw()) as usize] = Some(op);
+ }
+ self.push_assignment(
+ current,
+ place,
+ Rvalue::Aggregate(
+ AggregateKind::Adt(variant_id, subst),
+ operands.into_iter().map(|x| x).collect::<Option<_>>().ok_or(
+ MirLowerError::TypeError("missing field in record literal"),
+ )?,
+ ),
+ expr_id.into(),
+ );
+ Ok(Some(current))
+ }
+ VariantId::UnionId(union_id) => {
+ let [RecordLitField { name, expr }] = fields.as_ref() else {
+ not_supported!("Union record literal with more than one field");
+ };
+ let local_id =
+ variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?;
+ let mut place = place;
+ place
+ .projection
+ .push(PlaceElem::Field(FieldId { parent: union_id.into(), local_id }));
+ self.lower_expr_to_place(*expr, place, current)
+ }
+ }
+ }
+ Expr::Await { .. } => not_supported!("await"),
+ Expr::Try { .. } => not_supported!("? operator"),
+ Expr::Yeet { .. } => not_supported!("yeet"),
+ Expr::TryBlock { .. } => not_supported!("try block"),
+ Expr::Async { .. } => not_supported!("async block"),
+ Expr::Const { .. } => not_supported!("anonymous const block"),
+ Expr::Cast { expr, type_ref: _ } => {
+ let Some((x, current)) = self.lower_expr_to_some_operand(*expr, current)? else {
+ return Ok(None);
+ };
+ let source_ty = self.infer[*expr].clone();
+ let target_ty = self.infer[expr_id].clone();
+ self.push_assignment(
+ current,
+ place,
+ Rvalue::Cast(cast_kind(&source_ty, &target_ty)?, x, target_ty),
+ expr_id.into(),
+ );
+ Ok(Some(current))
+ }
+ Expr::Ref { expr, rawness: _, mutability } => {
+ let Some((p, current)) = self.lower_expr_as_place(current, *expr, true)? else {
+ return Ok(None);
+ };
+ let bk = BorrowKind::from_hir(*mutability);
+ self.push_assignment(current, place, Rvalue::Ref(bk, p), expr_id.into());
+ Ok(Some(current))
+ }
+ Expr::Box { .. } => not_supported!("box expression"),
+ Expr::Field { .. } | Expr::Index { .. } | Expr::UnaryOp { op: hir_def::expr::UnaryOp::Deref, .. } => {
+ let Some((p, current)) = self.lower_expr_as_place_without_adjust(current, expr_id, true)? else {
+ return Ok(None);
+ };
+ self.push_assignment(current, place, Operand::Copy(p).into(), expr_id.into());
+ Ok(Some(current))
+ }
+ Expr::UnaryOp { expr, op: op @ (hir_def::expr::UnaryOp::Not | hir_def::expr::UnaryOp::Neg) } => {
+ let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)? else {
+ return Ok(None);
+ };
+ let operation = match op {
+ hir_def::expr::UnaryOp::Not => UnOp::Not,
+ hir_def::expr::UnaryOp::Neg => UnOp::Neg,
+ _ => unreachable!(),
+ };
+ self.push_assignment(
+ current,
+ place,
+ Rvalue::UnaryOp(operation, operand),
+ expr_id.into(),
+ );
+ Ok(Some(current))
+ },
+ Expr::BinaryOp { lhs, rhs, op } => {
+ let op = op.ok_or(MirLowerError::IncompleteExpr)?;
+ if let hir_def::expr::BinaryOp::Assignment { op } = op {
+ if op.is_some() {
+ not_supported!("assignment with arith op (like +=)");
+ }
+ let Some((lhs_place, current)) =
+ self.lower_expr_as_place(current, *lhs, false)?
+ else {
+ return Ok(None);
+ };
+ let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else {
+ return Ok(None);
+ };
+ self.push_assignment(current, lhs_place, rhs_op.into(), expr_id.into());
+ return Ok(Some(current));
+ }
+ let Some((lhs_op, current)) = self.lower_expr_to_some_operand(*lhs, current)? else {
+ return Ok(None);
+ };
+ let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else {
+ return Ok(None);
+ };
+ self.push_assignment(
+ current,
+ place,
+ Rvalue::CheckedBinaryOp(
+ match op {
+ hir_def::expr::BinaryOp::LogicOp(op) => match op {
+ hir_def::expr::LogicOp::And => BinOp::BitAnd, // FIXME: make these short circuit
+ hir_def::expr::LogicOp::Or => BinOp::BitOr,
+ },
+ hir_def::expr::BinaryOp::ArithOp(op) => BinOp::from(op),
+ hir_def::expr::BinaryOp::CmpOp(op) => BinOp::from(op),
+ hir_def::expr::BinaryOp::Assignment { .. } => unreachable!(), // handled above
+ },
+ lhs_op,
+ rhs_op,
+ ),
+ expr_id.into(),
+ );
+ Ok(Some(current))
+ }
+ Expr::Range { .. } => not_supported!("range"),
+ Expr::Closure { .. } => not_supported!("closure"),
+ Expr::Tuple { exprs, is_assignee_expr: _ } => {
+ let Some(values) = exprs
+ .iter()
+ .map(|x| {
+ let Some((o, c)) = self.lower_expr_to_some_operand(*x, current)? else {
+ return Ok(None);
+ };
+ current = c;
+ Ok(Some(o))
+ })
+ .collect::<Result<Option<_>>>()?
+ else {
+ return Ok(None);
+ };
+ let r = Rvalue::Aggregate(
+ AggregateKind::Tuple(self.expr_ty(expr_id)),
+ values,
+ );
+ self.push_assignment(current, place, r, expr_id.into());
+ Ok(Some(current))
+ }
+ Expr::Array(l) => match l {
+ Array::ElementList { elements, .. } => {
+ let elem_ty = match &self.expr_ty(expr_id).data(Interner).kind {
+ TyKind::Array(ty, _) => ty.clone(),
+ _ => {
+ return Err(MirLowerError::TypeError(
+ "Array expression with non array type",
+ ))
+ }
+ };
+ let Some(values) = elements
+ .iter()
+ .map(|x| {
+ let Some((o, c)) = self.lower_expr_to_some_operand(*x, current)? else {
+ return Ok(None);
+ };
+ current = c;
+ Ok(Some(o))
+ })
+ .collect::<Result<Option<_>>>()?
+ else {
+ return Ok(None);
+ };
+ let r = Rvalue::Aggregate(
+ AggregateKind::Array(elem_ty),
+ values,
+ );
+ self.push_assignment(current, place, r, expr_id.into());
+ Ok(Some(current))
+ }
+ Array::Repeat { .. } => not_supported!("array repeat"),
+ },
+ Expr::Literal(l) => {
+ let ty = self.expr_ty(expr_id);
+ let op = self.lower_literal_to_operand(ty, l)?;
+ self.push_assignment(current, place, op.into(), expr_id.into());
+ Ok(Some(current))
+ }
+ Expr::Underscore => not_supported!("underscore"),
+ }
+ }
+
+ fn push_field_projection(&self, place: &mut Place, expr_id: ExprId) -> Result<()> {
+ if let Expr::Field { expr, name } = &self.body[expr_id] {
+ if let TyKind::Tuple(..) = self.expr_ty_after_adjustments(*expr).kind(Interner) {
+ let index = name
+ .as_tuple_index()
+ .ok_or(MirLowerError::TypeError("named field on tuple"))?;
+ place.projection.push(ProjectionElem::TupleField(index))
+ } else {
+ let field =
+ self.infer.field_resolution(expr_id).ok_or(MirLowerError::UnresolvedField)?;
+ place.projection.push(ProjectionElem::Field(field));
+ }
+ } else {
+ not_supported!("")
+ }
+ Ok(())
+ }
+
+ fn lower_literal_to_operand(&mut self, ty: Ty, l: &Literal) -> Result<Operand> {
+ let size = layout_of_ty(self.db, &ty, self.owner.module(self.db.upcast()).krate())?
+ .size
+ .bytes_usize();
+ let bytes = match l {
+ hir_def::expr::Literal::String(b) => {
+ let b = b.as_bytes();
+ let mut data = vec![];
+ data.extend(0usize.to_le_bytes());
+ data.extend(b.len().to_le_bytes());
+ let mut mm = MemoryMap::default();
+ mm.insert(0, b.to_vec());
+ return Ok(Operand::from_concrete_const(data, mm, ty));
+ }
+ hir_def::expr::Literal::ByteString(b) => {
+ let mut data = vec![];
+ data.extend(0usize.to_le_bytes());
+ data.extend(b.len().to_le_bytes());
+ let mut mm = MemoryMap::default();
+ mm.insert(0, b.to_vec());
+ return Ok(Operand::from_concrete_const(data, mm, ty));
+ }
+ hir_def::expr::Literal::Char(c) => u32::from(*c).to_le_bytes().into(),
+ hir_def::expr::Literal::Bool(b) => vec![*b as u8],
+ hir_def::expr::Literal::Int(x, _) => x.to_le_bytes()[0..size].into(),
+ hir_def::expr::Literal::Uint(x, _) => x.to_le_bytes()[0..size].into(),
+ hir_def::expr::Literal::Float(f, _) => match size {
+ 8 => f.into_f64().to_le_bytes().into(),
+ 4 => f.into_f32().to_le_bytes().into(),
+ _ => {
+ return Err(MirLowerError::TypeError("float with size other than 4 or 8 bytes"))
+ }
+ },
+ };
+ Ok(Operand::from_concrete_const(bytes, MemoryMap::default(), ty))
+ }
+
+ fn new_basic_block(&mut self) -> BasicBlockId {
+ self.result.basic_blocks.alloc(BasicBlock::default())
+ }
+
+ fn lower_const(
+ &mut self,
+ const_id: hir_def::ConstId,
+ prev_block: BasicBlockId,
+ place: Place,
+ span: MirSpan,
+ ) -> Result<()> {
+ let c = self.db.const_eval(const_id)?;
+ self.write_const_to_place(c, prev_block, place, span)
+ }
+
+ fn write_const_to_place(
+ &mut self,
+ c: Const,
+ prev_block: BasicBlockId,
+ place: Place,
+ span: MirSpan,
+ ) -> Result<()> {
+ self.push_assignment(prev_block, place, Operand::Constant(c).into(), span);
+ Ok(())
+ }
+
+ fn write_bytes_to_place(
+ &mut self,
+ prev_block: BasicBlockId,
+ place: Place,
+ cv: Vec<u8>,
+ ty: Ty,
+ span: MirSpan,
+ ) -> Result<()> {
+ self.push_assignment(prev_block, place, Operand::from_bytes(cv, ty).into(), span);
+ Ok(())
+ }
+
+ fn lower_enum_variant(
+ &mut self,
+ variant_id: EnumVariantId,
+ prev_block: BasicBlockId,
+ place: Place,
+ ty: Ty,
+ fields: Vec<Operand>,
+ span: MirSpan,
+ ) -> Result<BasicBlockId> {
+ let subst = match ty.kind(Interner) {
+ TyKind::Adt(_, subst) => subst.clone(),
+ _ => not_supported!("Non ADT enum"),
+ };
+ self.push_assignment(
+ prev_block,
+ place,
+ Rvalue::Aggregate(AggregateKind::Adt(variant_id.into(), subst), fields),
+ span,
+ );
+ Ok(prev_block)
+ }
+
+ fn lower_call_and_args(
+ &mut self,
+ func: Operand,
+ args: impl Iterator<Item = ExprId>,
+ place: Place,
+ mut current: BasicBlockId,
+ is_uninhabited: bool,
+ ) -> Result<Option<BasicBlockId>> {
+ let Some(args) = args
+ .map(|arg| {
+ if let Some((temp, c)) = self.lower_expr_to_some_operand(arg, current)? {
+ current = c;
+ Ok(Some(temp))
+ } else {
+ Ok(None)
+ }
+ })
+ .collect::<Result<Option<Vec<_>>>>()?
+ else {
+ return Ok(None);
+ };
+ self.lower_call(func, args, place, current, is_uninhabited)
+ }
+
+ fn lower_call(
+ &mut self,
+ func: Operand,
+ args: Vec<Operand>,
+ place: Place,
+ current: BasicBlockId,
+ is_uninhabited: bool,
+ ) -> Result<Option<BasicBlockId>> {
+ let b = if is_uninhabited { None } else { Some(self.new_basic_block()) };
+ self.set_terminator(
+ current,
+ Terminator::Call {
+ func,
+ args,
+ destination: place,
+ target: b,
+ cleanup: None,
+ from_hir_call: true,
+ },
+ );
+ Ok(b)
+ }
+
+ fn is_unterminated(&mut self, source: BasicBlockId) -> bool {
+ self.result.basic_blocks[source].terminator.is_none()
+ }
+
+ fn set_terminator(&mut self, source: BasicBlockId, terminator: Terminator) {
+ self.result.basic_blocks[source].terminator = Some(terminator);
+ }
+
+ fn set_goto(&mut self, source: BasicBlockId, target: BasicBlockId) {
+ self.set_terminator(source, Terminator::Goto { target });
+ }
+
+ fn expr_ty(&self, e: ExprId) -> Ty {
+ self.infer[e].clone()
+ }
+
+ fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty {
+ let mut ty = None;
+ if let Some(x) = self.infer.expr_adjustments.get(&e) {
+ if let Some(x) = x.last() {
+ ty = Some(x.target.clone());
+ }
+ }
+ ty.unwrap_or_else(|| self.expr_ty(e))
+ }
+
+ fn push_statement(&mut self, block: BasicBlockId, statement: Statement) {
+ self.result.basic_blocks[block].statements.push(statement);
+ }
+
+ fn push_assignment(
+ &mut self,
+ block: BasicBlockId,
+ place: Place,
+ rvalue: Rvalue,
+ span: MirSpan,
+ ) {
+ self.push_statement(block, StatementKind::Assign(place, rvalue).with_span(span));
+ }
+
+ /// It gets a `current` unterminated block, appends some statements and possibly a terminator to it to check if
+ /// the pattern matches and write bindings, and returns two unterminated blocks, one for the matched path (which
+ /// can be the `current` block) and one for the mismatched path. If the input pattern is irrefutable, the
+ /// mismatched path block is `None`.
+ ///
+ /// By default, it will create a new block for mismatched path. If you already have one, you can provide it with
+ /// `current_else` argument to save an unneccessary jump. If `current_else` isn't `None`, the result mismatched path
+ /// wouldn't be `None` as well. Note that this function will add jumps to the beginning of the `current_else` block,
+ /// so it should be an empty block.
+ fn pattern_match(
+ &mut self,
+ mut current: BasicBlockId,
+ mut current_else: Option<BasicBlockId>,
+ mut cond_place: Place,
+ mut cond_ty: Ty,
+ pattern: PatId,
+ mut binding_mode: BindingAnnotation,
+ ) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
+ Ok(match &self.body.pats[pattern] {
+ Pat::Missing => return Err(MirLowerError::IncompleteExpr),
+ Pat::Wild => (current, current_else),
+ Pat::Tuple { args, ellipsis } => {
+ pattern_matching_dereference(&mut cond_ty, &mut binding_mode, &mut cond_place);
+ let subst = match cond_ty.kind(Interner) {
+ TyKind::Tuple(_, s) => s,
+ _ => {
+ return Err(MirLowerError::TypeError(
+ "non tuple type matched with tuple pattern",
+ ))
+ }
+ };
+ self.pattern_match_tuple_like(
+ current,
+ current_else,
+ args.iter().enumerate().map(|(i, x)| {
+ (
+ PlaceElem::TupleField(i),
+ *x,
+ subst.at(Interner, i).assert_ty_ref(Interner).clone(),
+ )
+ }),
+ *ellipsis,
+ &cond_place,
+ binding_mode,
+ )?
+ }
+ Pat::Or(pats) => {
+ let then_target = self.new_basic_block();
+ let mut finished = false;
+ for pat in &**pats {
+ let (next, next_else) = self.pattern_match(
+ current,
+ None,
+ cond_place.clone(),
+ cond_ty.clone(),
+ *pat,
+ binding_mode,
+ )?;
+ self.set_goto(next, then_target);
+ match next_else {
+ Some(t) => {
+ current = t;
+ }
+ None => {
+ finished = true;
+ break;
+ }
+ }
+ }
+ if !finished {
+ let ce = *current_else.get_or_insert_with(|| self.new_basic_block());
+ self.set_goto(current, ce);
+ }
+ (then_target, current_else)
+ }
+ Pat::Record { .. } => not_supported!("record pattern"),
+ Pat::Range { .. } => not_supported!("range pattern"),
+ Pat::Slice { .. } => not_supported!("slice pattern"),
+ Pat::Path(_) => {
+ let Some(variant) = self.infer.variant_resolution_for_pat(pattern) else {
+ not_supported!("unresolved variant");
+ };
+ self.pattern_matching_variant(
+ cond_ty,
+ binding_mode,
+ cond_place,
+ variant,
+ current,
+ pattern.into(),
+ current_else,
+ &[],
+ &None,
+ )?
+ }
+ Pat::Lit(l) => {
+ let then_target = self.new_basic_block();
+ let else_target = current_else.unwrap_or_else(|| self.new_basic_block());
+ match &self.body.exprs[*l] {
+ Expr::Literal(l) => match l {
+ hir_def::expr::Literal::Int(x, _) => {
+ self.set_terminator(
+ current,
+ Terminator::SwitchInt {
+ discr: Operand::Copy(cond_place),
+ targets: SwitchTargets::static_if(
+ *x as u128,
+ then_target,
+ else_target,
+ ),
+ },
+ );
+ }
+ hir_def::expr::Literal::Uint(x, _) => {
+ self.set_terminator(
+ current,
+ Terminator::SwitchInt {
+ discr: Operand::Copy(cond_place),
+ targets: SwitchTargets::static_if(*x, then_target, else_target),
+ },
+ );
+ }
+ _ => not_supported!("non int path literal"),
+ },
+ _ => not_supported!("expression path literal"),
+ }
+ (then_target, Some(else_target))
+ }
+ Pat::Bind { id, subpat } => {
+ let target_place = self.result.binding_locals[*id];
+ let mode = self.body.bindings[*id].mode;
+ if let Some(subpat) = subpat {
+ (current, current_else) = self.pattern_match(
+ current,
+ current_else,
+ cond_place.clone(),
+ cond_ty,
+ *subpat,
+ binding_mode,
+ )?
+ }
+ if matches!(mode, BindingAnnotation::Ref | BindingAnnotation::RefMut) {
+ binding_mode = mode;
+ }
+ self.push_storage_live(*id, current);
+ self.push_assignment(
+ current,
+ target_place.into(),
+ match binding_mode {
+ BindingAnnotation::Unannotated | BindingAnnotation::Mutable => {
+ Operand::Copy(cond_place).into()
+ }
+ BindingAnnotation::Ref => Rvalue::Ref(BorrowKind::Shared, cond_place),
+ BindingAnnotation::RefMut => Rvalue::Ref(
+ BorrowKind::Mut { allow_two_phase_borrow: false },
+ cond_place,
+ ),
+ },
+ pattern.into(),
+ );
+ (current, current_else)
+ }
+ Pat::TupleStruct { path: _, args, ellipsis } => {
+ let Some(variant) = self.infer.variant_resolution_for_pat(pattern) else {
+ not_supported!("unresolved variant");
+ };
+ self.pattern_matching_variant(
+ cond_ty,
+ binding_mode,
+ cond_place,
+ variant,
+ current,
+ pattern.into(),
+ current_else,
+ args,
+ ellipsis,
+ )?
+ }
+ Pat::Ref { .. } => not_supported!("& pattern"),
+ Pat::Box { .. } => not_supported!("box pattern"),
+ Pat::ConstBlock(_) => not_supported!("const block pattern"),
+ })
+ }
+
+ fn pattern_matching_variant(
+ &mut self,
+ mut cond_ty: Ty,
+ mut binding_mode: BindingAnnotation,
+ mut cond_place: Place,
+ variant: VariantId,
+ current: BasicBlockId,
+ span: MirSpan,
+ current_else: Option<BasicBlockId>,
+ args: &[PatId],
+ ellipsis: &Option<usize>,
+ ) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
+ pattern_matching_dereference(&mut cond_ty, &mut binding_mode, &mut cond_place);
+ let subst = match cond_ty.kind(Interner) {
+ TyKind::Adt(_, s) => s,
+ _ => return Err(MirLowerError::TypeError("non adt type matched with tuple struct")),
+ };
+ let fields_type = self.db.field_types(variant);
+ Ok(match variant {
+ VariantId::EnumVariantId(v) => {
+ let e = self.db.const_eval_discriminant(v)? as u128;
+ let next = self.new_basic_block();
+ let tmp = self.discr_temp_place();
+ self.push_assignment(
+ current,
+ tmp.clone(),
+ Rvalue::Discriminant(cond_place.clone()),
+ span,
+ );
+ let else_target = current_else.unwrap_or_else(|| self.new_basic_block());
+ self.set_terminator(
+ current,
+ Terminator::SwitchInt {
+ discr: Operand::Copy(tmp),
+ targets: SwitchTargets::static_if(e, next, else_target),
+ },
+ );
+ let enum_data = self.db.enum_data(v.parent);
+ let fields =
+ enum_data.variants[v.local_id].variant_data.fields().iter().map(|(x, _)| {
+ (
+ PlaceElem::Field(FieldId { parent: v.into(), local_id: x }),
+ fields_type[x].clone().substitute(Interner, subst),
+ )
+ });
+ self.pattern_match_tuple_like(
+ next,
+ Some(else_target),
+ args.iter().zip(fields).map(|(x, y)| (y.0, *x, y.1)),
+ *ellipsis,
+ &cond_place,
+ binding_mode,
+ )?
+ }
+ VariantId::StructId(s) => {
+ let struct_data = self.db.struct_data(s);
+ let fields = struct_data.variant_data.fields().iter().map(|(x, _)| {
+ (
+ PlaceElem::Field(FieldId { parent: s.into(), local_id: x }),
+ fields_type[x].clone().substitute(Interner, subst),
+ )
+ });
+ self.pattern_match_tuple_like(
+ current,
+ current_else,
+ args.iter().zip(fields).map(|(x, y)| (y.0, *x, y.1)),
+ *ellipsis,
+ &cond_place,
+ binding_mode,
+ )?
+ }
+ VariantId::UnionId(_) => {
+ return Err(MirLowerError::TypeError("pattern matching on union"))
+ }
+ })
+ }
+
+ fn pattern_match_tuple_like(
+ &mut self,
+ mut current: BasicBlockId,
+ mut current_else: Option<BasicBlockId>,
+ args: impl Iterator<Item = (PlaceElem, PatId, Ty)>,
+ ellipsis: Option<usize>,
+ cond_place: &Place,
+ binding_mode: BindingAnnotation,
+ ) -> Result<(BasicBlockId, Option<BasicBlockId>)> {
+ if ellipsis.is_some() {
+ not_supported!("tuple like pattern with ellipsis");
+ }
+ for (proj, arg, ty) in args {
+ let mut cond_place = cond_place.clone();
+ cond_place.projection.push(proj);
+ (current, current_else) =
+ self.pattern_match(current, current_else, cond_place, ty, arg, binding_mode)?;
+ }
+ Ok((current, current_else))
+ }
+
+ fn discr_temp_place(&mut self) -> Place {
+ match &self.discr_temp {
+ Some(x) => x.clone(),
+ None => {
+ let tmp: Place =
+ self.temp(TyBuilder::discr_ty()).expect("discr_ty is never unsized").into();
+ self.discr_temp = Some(tmp.clone());
+ tmp
+ }
+ }
+ }
+
+ fn lower_loop(
+ &mut self,
+ prev_block: BasicBlockId,
+ label: Option<LabelId>,
+ f: impl FnOnce(&mut MirLowerCtx<'_>, BasicBlockId) -> Result<()>,
+ ) -> Result<Option<BasicBlockId>> {
+ if label.is_some() {
+ not_supported!("loop with label");
+ }
+ let begin = self.new_basic_block();
+ let prev =
+ mem::replace(&mut self.current_loop_blocks, Some(LoopBlocks { begin, end: None }));
+ self.set_goto(prev_block, begin);
+ f(self, begin)?;
+ let my = mem::replace(&mut self.current_loop_blocks, prev)
+ .ok_or(MirLowerError::ImplementationError("current_loop_blocks is corrupt"))?;
+ Ok(my.end)
+ }
+
+ fn has_adjustments(&self, expr_id: ExprId) -> bool {
+ !self.infer.expr_adjustments.get(&expr_id).map(|x| x.is_empty()).unwrap_or(true)
+ }
+
+ fn merge_blocks(
+ &mut self,
+ b1: Option<BasicBlockId>,
+ b2: Option<BasicBlockId>,
+ ) -> Option<BasicBlockId> {
+ match (b1, b2) {
+ (None, None) => None,
+ (None, Some(b)) | (Some(b), None) => Some(b),
+ (Some(b1), Some(b2)) => {
+ let bm = self.new_basic_block();
+ self.set_goto(b1, bm);
+ self.set_goto(b2, bm);
+ Some(bm)
+ }
+ }
+ }
+
+ fn current_loop_end(&mut self) -> Result<BasicBlockId> {
+ let r = match self
+ .current_loop_blocks
+ .as_mut()
+ .ok_or(MirLowerError::ImplementationError("Current loop access out of loop"))?
+ .end
+ {
+ Some(x) => x,
+ None => {
+ let s = self.new_basic_block();
+ self.current_loop_blocks
+ .as_mut()
+ .ok_or(MirLowerError::ImplementationError("Current loop access out of loop"))?
+ .end = Some(s);
+ s
+ }
+ };
+ Ok(r)
+ }
+
+ fn is_uninhabited(&self, expr_id: ExprId) -> bool {
+ is_ty_uninhabited_from(&self.infer[expr_id], self.owner.module(self.db.upcast()), self.db)
+ }
+
+ /// This function push `StorageLive` statement for the binding, and applies changes to add `StorageDead` in
+ /// the appropriated places.
+ fn push_storage_live(&mut self, b: BindingId, current: BasicBlockId) {
+ // Current implementation is wrong. It adds no `StorageDead` at the end of scope, and before each break
+ // and continue. It just add a `StorageDead` before the `StorageLive`, which is not wrong, but unneeeded in
+ // the proper implementation. Due this limitation, implementing a borrow checker on top of this mir will falsely
+ // allow this:
+ //
+ // ```
+ // let x;
+ // loop {
+ // let y = 2;
+ // x = &y;
+ // if some_condition {
+ // break; // we need to add a StorageDead(y) above this to kill the x borrow
+ // }
+ // }
+ // use(x)
+ // ```
+ // But I think this approach work for mutability analysis, as user can't write code which mutates a binding
+ // after StorageDead, except loops, which are handled by this hack.
+ let span = self.body.bindings[b]
+ .definitions
+ .first()
+ .copied()
+ .map(MirSpan::PatId)
+ .unwrap_or(MirSpan::Unknown);
+ let l = self.result.binding_locals[b];
+ self.push_statement(current, StatementKind::StorageDead(l).with_span(span));
+ self.push_statement(current, StatementKind::StorageLive(l).with_span(span));
+ }
+
+ fn resolve_lang_item(&self, item: LangItem) -> Result<LangItemTarget> {
+ let crate_id = self.owner.module(self.db.upcast()).krate();
+ self.db.lang_item(crate_id, item).ok_or(MirLowerError::LangItemNotFound(item))
+ }
+
+ fn lower_block_to_place(
+ &mut self,
+ label: Option<LabelId>,
+ statements: &[hir_def::expr::Statement],
+ mut current: BasicBlockId,
+ tail: Option<ExprId>,
+ place: Place,
+ ) -> Result<Option<Idx<BasicBlock>>> {
+ if label.is_some() {
+ not_supported!("block with label");
+ }
+ for statement in statements.iter() {
+ match statement {
+ hir_def::expr::Statement::Let { pat, initializer, else_branch, type_ref: _ } => {
+ if let Some(expr_id) = initializer {
+ let else_block;
+ let Some((init_place, c)) =
+ self.lower_expr_as_place(current, *expr_id, true)?
+ else {
+ return Ok(None);
+ };
+ current = c;
+ (current, else_block) = self.pattern_match(
+ current,
+ None,
+ init_place,
+ self.expr_ty_after_adjustments(*expr_id),
+ *pat,
+ BindingAnnotation::Unannotated,
+ )?;
+ match (else_block, else_branch) {
+ (None, _) => (),
+ (Some(else_block), None) => {
+ self.set_terminator(else_block, Terminator::Unreachable);
+ }
+ (Some(else_block), Some(else_branch)) => {
+ if let Some((_, b)) =
+ self.lower_expr_as_place(else_block, *else_branch, true)?
+ {
+ self.set_terminator(b, Terminator::Unreachable);
+ }
+ }
+ }
+ } else {
+ self.body.walk_bindings_in_pat(*pat, |b| {
+ self.push_storage_live(b, current);
+ });
+ }
+ }
+ hir_def::expr::Statement::Expr { expr, has_semi: _ } => {
+ let Some((_, c)) = self.lower_expr_as_place(current, *expr, true)? else {
+ return Ok(None);
+ };
+ current = c;
+ }
+ }
+ }
+ match tail {
+ Some(tail) => self.lower_expr_to_place(tail, place, current),
+ None => Ok(Some(current)),
+ }
+ }
+}
+
+fn pattern_matching_dereference(
+ cond_ty: &mut Ty,
+ binding_mode: &mut BindingAnnotation,
+ cond_place: &mut Place,
+) {
+ while let Some((ty, _, mu)) = cond_ty.as_reference() {
+ if mu == Mutability::Mut && *binding_mode != BindingAnnotation::Ref {
+ *binding_mode = BindingAnnotation::RefMut;
+ } else {
+ *binding_mode = BindingAnnotation::Ref;
+ }
+ *cond_ty = ty.clone();
+ cond_place.projection.push(ProjectionElem::Deref);
+ }
+}
+
+fn cast_kind(source_ty: &Ty, target_ty: &Ty) -> Result<CastKind> {
+ Ok(match (source_ty.kind(Interner), target_ty.kind(Interner)) {
+ (TyKind::Scalar(s), TyKind::Scalar(t)) => match (s, t) {
+ (chalk_ir::Scalar::Float(_), chalk_ir::Scalar::Float(_)) => CastKind::FloatToFloat,
+ (chalk_ir::Scalar::Float(_), _) => CastKind::FloatToInt,
+ (_, chalk_ir::Scalar::Float(_)) => CastKind::IntToFloat,
+ (_, _) => CastKind::IntToInt,
+ },
+ // Enum to int casts
+ (TyKind::Scalar(_), TyKind::Adt(..)) | (TyKind::Adt(..), TyKind::Scalar(_)) => {
+ CastKind::IntToInt
+ }
+ (a, b) => not_supported!("Unknown cast between {a:?} and {b:?}"),
+ })
+}
+
+pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<MirBody>> {
+ let _p = profile::span("mir_body_query").detail(|| match def {
+ DefWithBodyId::FunctionId(it) => db.function_data(it).name.to_string(),
+ DefWithBodyId::StaticId(it) => db.static_data(it).name.clone().to_string(),
+ DefWithBodyId::ConstId(it) => {
+ db.const_data(it).name.clone().unwrap_or_else(Name::missing).to_string()
+ }
+ DefWithBodyId::VariantId(it) => {
+ db.enum_data(it.parent).variants[it.local_id].name.to_string()
+ }
+ });
+ let body = db.body(def);
+ let infer = db.infer(def);
+ let result = lower_to_mir(db, def, &body, &infer, body.body_expr)?;
+ Ok(Arc::new(result))
+}
+
+pub fn mir_body_recover(
+ _db: &dyn HirDatabase,
+ _cycle: &[String],
+ _def: &DefWithBodyId,
+) -> Result<Arc<MirBody>> {
+ Err(MirLowerError::Loop)
+}
+
+pub fn lower_to_mir(
+ db: &dyn HirDatabase,
+ owner: DefWithBodyId,
+ body: &Body,
+ infer: &InferenceResult,
+ // FIXME: root_expr should always be the body.body_expr, but since `X` in `[(); X]` doesn't have its own specific body yet, we
+ // need to take this input explicitly.
+ root_expr: ExprId,
+) -> Result<MirBody> {
+ if let Some((_, x)) = infer.type_mismatches().next() {
+ return Err(MirLowerError::TypeMismatch(x.clone()));
+ }
+ let mut basic_blocks = Arena::new();
+ let start_block =
+ basic_blocks.alloc(BasicBlock { statements: vec![], terminator: None, is_cleanup: false });
+ let mut locals = Arena::new();
+ // 0 is return local
+ locals.alloc(Local { ty: infer[root_expr].clone() });
+ let mut binding_locals: ArenaMap<BindingId, LocalId> = ArenaMap::new();
+ // 1 to param_len is for params
+ let param_locals: Vec<LocalId> = if let DefWithBodyId::FunctionId(fid) = owner {
+ let substs = TyBuilder::placeholder_subst(db, fid);
+ let callable_sig = db.callable_item_signature(fid.into()).substitute(Interner, &substs);
+ body.params
+ .iter()
+ .zip(callable_sig.params().iter())
+ .map(|(&x, ty)| {
+ let local_id = locals.alloc(Local { ty: ty.clone() });
+ if let Pat::Bind { id, subpat: None } = body[x] {
+ if matches!(
+ body.bindings[id].mode,
+ BindingAnnotation::Unannotated | BindingAnnotation::Mutable
+ ) {
+ binding_locals.insert(id, local_id);
+ }
+ }
+ local_id
+ })
+ .collect()
+ } else {
+ if !body.params.is_empty() {
+ return Err(MirLowerError::TypeError("Unexpected parameter for non function body"));
+ }
+ vec![]
+ };
+ // and then rest of bindings
+ for (id, _) in body.bindings.iter() {
+ if !binding_locals.contains_idx(id) {
+ binding_locals.insert(id, locals.alloc(Local { ty: infer[id].clone() }));
+ }
+ }
+ let mir = MirBody {
+ basic_blocks,
+ locals,
+ start_block,
+ binding_locals,
+ param_locals,
+ owner,
+ arg_count: body.params.len(),
+ };
+ let mut ctx = MirLowerCtx {
+ result: mir,
+ db,
+ infer,
+ body,
+ owner,
+ current_loop_blocks: None,
+ discr_temp: None,
+ };
+ let mut current = start_block;
+ for (&param, local) in body.params.iter().zip(ctx.result.param_locals.clone().into_iter()) {
+ if let Pat::Bind { id, .. } = body[param] {
+ if local == ctx.result.binding_locals[id] {
+ continue;
+ }
+ }
+ let r = ctx.pattern_match(
+ current,
+ None,
+ local.into(),
+ ctx.result.locals[local].ty.clone(),
+ param,
+ BindingAnnotation::Unannotated,
+ )?;
+ if let Some(b) = r.1 {
+ ctx.set_terminator(b, Terminator::Unreachable);
+ }
+ current = r.0;
+ }
+ if let Some(b) = ctx.lower_expr_to_place(root_expr, return_slot().into(), current)? {
+ ctx.result.basic_blocks[b].terminator = Some(Terminator::Return);
+ }
+ Ok(ctx.result)
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs
new file mode 100644
index 000000000..fe8147dcd
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs
@@ -0,0 +1,237 @@
+//! MIR lowering for places
+
+use super::*;
+use hir_expand::name;
+
+macro_rules! not_supported {
+ ($x: expr) => {
+ return Err(MirLowerError::NotSupported(format!($x)))
+ };
+}
+
+impl MirLowerCtx<'_> {
+ fn lower_expr_to_some_place_without_adjust(
+ &mut self,
+ expr_id: ExprId,
+ prev_block: BasicBlockId,
+ ) -> Result<Option<(Place, BasicBlockId)>> {
+ let ty = self.expr_ty(expr_id);
+ let place = self.temp(ty)?;
+ let Some(current) = self.lower_expr_to_place_without_adjust(expr_id, place.into(), prev_block)? else {
+ return Ok(None);
+ };
+ Ok(Some((place.into(), current)))
+ }
+
+ fn lower_expr_to_some_place_with_adjust(
+ &mut self,
+ expr_id: ExprId,
+ prev_block: BasicBlockId,
+ adjustments: &[Adjustment],
+ ) -> Result<Option<(Place, BasicBlockId)>> {
+ let ty =
+ adjustments.last().map(|x| x.target.clone()).unwrap_or_else(|| self.expr_ty(expr_id));
+ let place = self.temp(ty)?;
+ let Some(current) = self.lower_expr_to_place_with_adjust(expr_id, place.into(), prev_block, adjustments)? else {
+ return Ok(None);
+ };
+ Ok(Some((place.into(), current)))
+ }
+
+ pub(super) fn lower_expr_as_place_with_adjust(
+ &mut self,
+ current: BasicBlockId,
+ expr_id: ExprId,
+ upgrade_rvalue: bool,
+ adjustments: &[Adjustment],
+ ) -> Result<Option<(Place, BasicBlockId)>> {
+ let try_rvalue = |this: &mut MirLowerCtx<'_>| {
+ if !upgrade_rvalue {
+ return Err(MirLowerError::MutatingRvalue);
+ }
+ this.lower_expr_to_some_place_with_adjust(expr_id, current, adjustments)
+ };
+ if let Some((last, rest)) = adjustments.split_last() {
+ match last.kind {
+ Adjust::Deref(None) => {
+ let Some(mut x) = self.lower_expr_as_place_with_adjust(
+ current,
+ expr_id,
+ upgrade_rvalue,
+ rest,
+ )? else {
+ return Ok(None);
+ };
+ x.0.projection.push(ProjectionElem::Deref);
+ Ok(Some(x))
+ }
+ Adjust::Deref(Some(od)) => {
+ let Some((r, current)) = self.lower_expr_as_place_with_adjust(
+ current,
+ expr_id,
+ upgrade_rvalue,
+ rest,
+ )? else {
+ return Ok(None);
+ };
+ self.lower_overloaded_deref(
+ current,
+ r,
+ rest.last()
+ .map(|x| x.target.clone())
+ .unwrap_or_else(|| self.expr_ty(expr_id)),
+ last.target.clone(),
+ expr_id.into(),
+ match od.0 {
+ Some(Mutability::Mut) => true,
+ Some(Mutability::Not) => false,
+ None => {
+ not_supported!("implicit overloaded deref with unknown mutability")
+ }
+ },
+ )
+ }
+ Adjust::NeverToAny | Adjust::Borrow(_) | Adjust::Pointer(_) => try_rvalue(self),
+ }
+ } else {
+ self.lower_expr_as_place_without_adjust(current, expr_id, upgrade_rvalue)
+ }
+ }
+
+ pub(super) fn lower_expr_as_place(
+ &mut self,
+ current: BasicBlockId,
+ expr_id: ExprId,
+ upgrade_rvalue: bool,
+ ) -> Result<Option<(Place, BasicBlockId)>> {
+ match self.infer.expr_adjustments.get(&expr_id) {
+ Some(a) => self.lower_expr_as_place_with_adjust(current, expr_id, upgrade_rvalue, a),
+ None => self.lower_expr_as_place_without_adjust(current, expr_id, upgrade_rvalue),
+ }
+ }
+
+ pub(super) fn lower_expr_as_place_without_adjust(
+ &mut self,
+ current: BasicBlockId,
+ expr_id: ExprId,
+ upgrade_rvalue: bool,
+ ) -> Result<Option<(Place, BasicBlockId)>> {
+ let try_rvalue = |this: &mut MirLowerCtx<'_>| {
+ if !upgrade_rvalue {
+ return Err(MirLowerError::MutatingRvalue);
+ }
+ this.lower_expr_to_some_place_without_adjust(expr_id, current)
+ };
+ match &self.body.exprs[expr_id] {
+ Expr::Path(p) => {
+ let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id);
+ let Some(pr) = resolver.resolve_path_in_value_ns(self.db.upcast(), p.mod_path()) else {
+ return Err(MirLowerError::unresolved_path(self.db, p));
+ };
+ let pr = match pr {
+ ResolveValueResult::ValueNs(v) => v,
+ ResolveValueResult::Partial(..) => return try_rvalue(self),
+ };
+ match pr {
+ ValueNs::LocalBinding(pat_id) => {
+ Ok(Some((self.result.binding_locals[pat_id].into(), current)))
+ }
+ _ => try_rvalue(self),
+ }
+ }
+ Expr::UnaryOp { expr, op } => match op {
+ hir_def::expr::UnaryOp::Deref => {
+ if !matches!(
+ self.expr_ty(*expr).kind(Interner),
+ TyKind::Ref(..) | TyKind::Raw(..)
+ ) {
+ let Some(_) = self.lower_expr_as_place(current, *expr, true)? else {
+ return Ok(None);
+ };
+ not_supported!("explicit overloaded deref");
+ }
+ let Some((mut r, current)) = self.lower_expr_as_place(current, *expr, true)? else {
+ return Ok(None);
+ };
+ r.projection.push(ProjectionElem::Deref);
+ Ok(Some((r, current)))
+ }
+ _ => try_rvalue(self),
+ },
+ Expr::Field { expr, .. } => {
+ let Some((mut r, current)) = self.lower_expr_as_place(current, *expr, true)? else {
+ return Ok(None);
+ };
+ self.push_field_projection(&mut r, expr_id)?;
+ Ok(Some((r, current)))
+ }
+ Expr::Index { base, index } => {
+ let base_ty = self.expr_ty_after_adjustments(*base);
+ let index_ty = self.expr_ty_after_adjustments(*index);
+ if index_ty != TyBuilder::usize()
+ || !matches!(base_ty.kind(Interner), TyKind::Array(..) | TyKind::Slice(..))
+ {
+ not_supported!("overloaded index");
+ }
+ let Some((mut p_base, current)) =
+ self.lower_expr_as_place(current, *base, true)? else {
+ return Ok(None);
+ };
+ let l_index = self.temp(self.expr_ty_after_adjustments(*index))?;
+ let Some(current) = self.lower_expr_to_place(*index, l_index.into(), current)? else {
+ return Ok(None);
+ };
+ p_base.projection.push(ProjectionElem::Index(l_index));
+ Ok(Some((p_base, current)))
+ }
+ _ => try_rvalue(self),
+ }
+ }
+
+ fn lower_overloaded_deref(
+ &mut self,
+ current: BasicBlockId,
+ place: Place,
+ source_ty: Ty,
+ target_ty: Ty,
+ span: MirSpan,
+ mutability: bool,
+ ) -> Result<Option<(Place, BasicBlockId)>> {
+ let (chalk_mut, trait_lang_item, trait_method_name, borrow_kind) = if !mutability {
+ (Mutability::Not, LangItem::Deref, name![deref], BorrowKind::Shared)
+ } else {
+ (
+ Mutability::Mut,
+ LangItem::DerefMut,
+ name![deref_mut],
+ BorrowKind::Mut { allow_two_phase_borrow: false },
+ )
+ };
+ let ty_ref = TyKind::Ref(chalk_mut, static_lifetime(), source_ty.clone()).intern(Interner);
+ let target_ty_ref = TyKind::Ref(chalk_mut, static_lifetime(), target_ty).intern(Interner);
+ let ref_place: Place = self.temp(ty_ref)?.into();
+ self.push_assignment(current, ref_place.clone(), Rvalue::Ref(borrow_kind, place), span);
+ let deref_trait = self
+ .resolve_lang_item(trait_lang_item)?
+ .as_trait()
+ .ok_or(MirLowerError::LangItemNotFound(trait_lang_item))?;
+ let deref_fn = self
+ .db
+ .trait_data(deref_trait)
+ .method_by_name(&trait_method_name)
+ .ok_or(MirLowerError::LangItemNotFound(trait_lang_item))?;
+ let deref_fn_op = Operand::const_zst(
+ TyKind::FnDef(
+ self.db.intern_callable_def(CallableDefId::FunctionId(deref_fn)).into(),
+ Substitution::from1(Interner, source_ty),
+ )
+ .intern(Interner),
+ );
+ let mut result: Place = self.temp(target_ty_ref)?.into();
+ let Some(current) = self.lower_call(deref_fn_op, vec![Operand::Copy(ref_place)], result.clone(), current, false)? else {
+ return Ok(None);
+ };
+ result.projection.push(ProjectionElem::Deref);
+ Ok(Some((result, current)))
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs
new file mode 100644
index 000000000..ffc08b7e3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs
@@ -0,0 +1,348 @@
+//! A pretty-printer for MIR.
+
+use std::fmt::{Display, Write};
+
+use hir_def::{body::Body, expr::BindingId};
+use hir_expand::name::Name;
+use la_arena::ArenaMap;
+
+use crate::{
+ db::HirDatabase,
+ display::HirDisplay,
+ mir::{PlaceElem, ProjectionElem, StatementKind, Terminator},
+};
+
+use super::{
+ AggregateKind, BasicBlockId, BorrowKind, LocalId, MirBody, Operand, Place, Rvalue, UnOp,
+};
+
+impl MirBody {
+ pub fn pretty_print(&self, db: &dyn HirDatabase) -> String {
+ let hir_body = db.body(self.owner);
+ let mut ctx = MirPrettyCtx::new(self, &hir_body, db);
+ ctx.for_body();
+ ctx.result
+ }
+}
+
+struct MirPrettyCtx<'a> {
+ body: &'a MirBody,
+ hir_body: &'a Body,
+ db: &'a dyn HirDatabase,
+ result: String,
+ ident: String,
+ local_to_binding: ArenaMap<LocalId, BindingId>,
+}
+
+macro_rules! w {
+ ($dst:expr, $($arg:tt)*) => {
+ { let _ = write!($dst, $($arg)*); }
+ };
+}
+
+macro_rules! wln {
+ ($dst:expr) => {
+ { let _ = writeln!($dst); }
+ };
+ ($dst:expr, $($arg:tt)*) => {
+ { let _ = writeln!($dst, $($arg)*); }
+ };
+}
+
+impl Write for MirPrettyCtx<'_> {
+ fn write_str(&mut self, s: &str) -> std::fmt::Result {
+ let mut it = s.split('\n'); // note: `.lines()` is wrong here
+ self.write(it.next().unwrap_or_default());
+ for line in it {
+ self.write_line();
+ self.write(line);
+ }
+ Ok(())
+ }
+}
+
+enum LocalName {
+ Unknown(LocalId),
+ Binding(Name, LocalId),
+}
+
+impl Display for LocalName {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ LocalName::Unknown(l) => write!(f, "_{}", u32::from(l.into_raw())),
+ LocalName::Binding(n, l) => write!(f, "{n}_{}", u32::from(l.into_raw())),
+ }
+ }
+}
+
+impl<'a> MirPrettyCtx<'a> {
+ fn for_body(&mut self) {
+ self.with_block(|this| {
+ this.locals();
+ wln!(this);
+ this.blocks();
+ });
+ }
+
+ fn with_block(&mut self, f: impl FnOnce(&mut MirPrettyCtx<'_>)) {
+ self.ident += " ";
+ wln!(self, "{{");
+ f(self);
+ for _ in 0..4 {
+ self.result.pop();
+ self.ident.pop();
+ }
+ wln!(self, "}}");
+ }
+
+ fn new(body: &'a MirBody, hir_body: &'a Body, db: &'a dyn HirDatabase) -> Self {
+ let local_to_binding = body.binding_locals.iter().map(|(x, y)| (*y, x)).collect();
+ MirPrettyCtx {
+ body,
+ db,
+ result: String::new(),
+ ident: String::new(),
+ local_to_binding,
+ hir_body,
+ }
+ }
+
+ fn write_line(&mut self) {
+ self.result.push('\n');
+ self.result += &self.ident;
+ }
+
+ fn write(&mut self, line: &str) {
+ self.result += line;
+ }
+
+ fn locals(&mut self) {
+ for (id, local) in self.body.locals.iter() {
+ wln!(self, "let {}: {};", self.local_name(id), local.ty.display(self.db));
+ }
+ }
+
+ fn local_name(&self, local: LocalId) -> LocalName {
+ match self.local_to_binding.get(local) {
+ Some(b) => LocalName::Binding(self.hir_body.bindings[*b].name.clone(), local),
+ None => LocalName::Unknown(local),
+ }
+ }
+
+ fn basic_block_id(&self, basic_block_id: BasicBlockId) -> String {
+ format!("'bb{}", u32::from(basic_block_id.into_raw()))
+ }
+
+ fn blocks(&mut self) {
+ for (id, block) in self.body.basic_blocks.iter() {
+ wln!(self);
+ w!(self, "{}: ", self.basic_block_id(id));
+ self.with_block(|this| {
+ for statement in &block.statements {
+ match &statement.kind {
+ StatementKind::Assign(l, r) => {
+ this.place(l);
+ w!(this, " = ");
+ this.rvalue(r);
+ wln!(this, ";");
+ }
+ StatementKind::StorageDead(p) => {
+ wln!(this, "StorageDead({})", this.local_name(*p));
+ }
+ StatementKind::StorageLive(p) => {
+ wln!(this, "StorageLive({})", this.local_name(*p));
+ }
+ StatementKind::Deinit(p) => {
+ w!(this, "Deinit(");
+ this.place(p);
+ wln!(this, ");");
+ }
+ StatementKind::Nop => wln!(this, "Nop;"),
+ }
+ }
+ match &block.terminator {
+ Some(terminator) => match terminator {
+ Terminator::Goto { target } => {
+ wln!(this, "goto 'bb{};", u32::from(target.into_raw()))
+ }
+ Terminator::SwitchInt { discr, targets } => {
+ w!(this, "switch ");
+ this.operand(discr);
+ w!(this, " ");
+ this.with_block(|this| {
+ for (c, b) in targets.iter() {
+ wln!(this, "{c} => {},", this.basic_block_id(b));
+ }
+ wln!(this, "_ => {},", this.basic_block_id(targets.otherwise()));
+ });
+ }
+ Terminator::Call { func, args, destination, target, .. } => {
+ w!(this, "Call ");
+ this.with_block(|this| {
+ w!(this, "func: ");
+ this.operand(func);
+ wln!(this, ",");
+ w!(this, "args: [");
+ this.operand_list(args);
+ wln!(this, "],");
+ w!(this, "destination: ");
+ this.place(destination);
+ wln!(this, ",");
+ w!(this, "target: ");
+ match target {
+ Some(t) => w!(this, "{}", this.basic_block_id(*t)),
+ None => w!(this, "<unreachable>"),
+ }
+ wln!(this, ",");
+ });
+ }
+ _ => wln!(this, "{:?};", terminator),
+ },
+ None => wln!(this, "<no-terminator>;"),
+ }
+ })
+ }
+ }
+
+ fn place(&mut self, p: &Place) {
+ fn f(this: &mut MirPrettyCtx<'_>, local: LocalId, projections: &[PlaceElem]) {
+ let Some((last, head)) = projections.split_last() else {
+ // no projection
+ w!(this, "{}", this.local_name(local));
+ return;
+ };
+ match last {
+ ProjectionElem::Deref => {
+ w!(this, "(*");
+ f(this, local, head);
+ w!(this, ")");
+ }
+ ProjectionElem::Field(field) => {
+ let variant_data = field.parent.variant_data(this.db.upcast());
+ let name = &variant_data.fields()[field.local_id].name;
+ match field.parent {
+ hir_def::VariantId::EnumVariantId(e) => {
+ w!(this, "(");
+ f(this, local, head);
+ let variant_name =
+ &this.db.enum_data(e.parent).variants[e.local_id].name;
+ w!(this, " as {}).{}", variant_name, name);
+ }
+ hir_def::VariantId::StructId(_) | hir_def::VariantId::UnionId(_) => {
+ f(this, local, head);
+ w!(this, ".{name}");
+ }
+ }
+ }
+ ProjectionElem::TupleField(x) => {
+ f(this, local, head);
+ w!(this, ".{}", x);
+ }
+ ProjectionElem::Index(l) => {
+ f(this, local, head);
+ w!(this, "[{}]", this.local_name(*l));
+ }
+ x => {
+ f(this, local, head);
+ w!(this, ".{:?}", x);
+ }
+ }
+ }
+ f(self, p.local, &p.projection);
+ }
+
+ fn operand(&mut self, r: &Operand) {
+ match r {
+ Operand::Copy(p) | Operand::Move(p) => {
+ // MIR at the time of writing doesn't have difference between move and copy, so we show them
+ // equally. Feel free to change it.
+ self.place(p);
+ }
+ Operand::Constant(c) => w!(self, "Const({})", c.display(self.db)),
+ }
+ }
+
+ fn rvalue(&mut self, r: &Rvalue) {
+ match r {
+ Rvalue::Use(op) => self.operand(op),
+ Rvalue::Ref(r, p) => {
+ match r {
+ BorrowKind::Shared => w!(self, "&"),
+ BorrowKind::Shallow => w!(self, "&shallow "),
+ BorrowKind::Unique => w!(self, "&uniq "),
+ BorrowKind::Mut { .. } => w!(self, "&mut "),
+ }
+ self.place(p);
+ }
+ Rvalue::Aggregate(AggregateKind::Tuple(_), x) => {
+ w!(self, "(");
+ self.operand_list(x);
+ w!(self, ")");
+ }
+ Rvalue::Aggregate(AggregateKind::Array(_), x) => {
+ w!(self, "[");
+ self.operand_list(x);
+ w!(self, "]");
+ }
+ Rvalue::Aggregate(AggregateKind::Adt(_, _), x) => {
+ w!(self, "Adt(");
+ self.operand_list(x);
+ w!(self, ")");
+ }
+ Rvalue::Aggregate(AggregateKind::Union(_, _), x) => {
+ w!(self, "Union(");
+ self.operand_list(x);
+ w!(self, ")");
+ }
+ Rvalue::Len(p) => {
+ w!(self, "Len(");
+ self.place(p);
+ w!(self, ")");
+ }
+ Rvalue::Cast(ck, op, ty) => {
+ w!(self, "Discriminant({ck:?}");
+ self.operand(op);
+ w!(self, "{})", ty.display(self.db));
+ }
+ Rvalue::CheckedBinaryOp(b, o1, o2) => {
+ self.operand(o1);
+ w!(self, " {b} ");
+ self.operand(o2);
+ }
+ Rvalue::UnaryOp(u, o) => {
+ let u = match u {
+ UnOp::Not => "!",
+ UnOp::Neg => "-",
+ };
+ w!(self, "{u} ");
+ self.operand(o);
+ }
+ Rvalue::Discriminant(p) => {
+ w!(self, "Discriminant(");
+ self.place(p);
+ w!(self, ")");
+ }
+ Rvalue::ShallowInitBox(op, _) => {
+ w!(self, "ShallowInitBox(");
+ self.operand(op);
+ w!(self, ")");
+ }
+ Rvalue::CopyForDeref(p) => {
+ w!(self, "CopyForDeref(");
+ self.place(p);
+ w!(self, ")");
+ }
+ }
+ }
+
+ fn operand_list(&mut self, x: &[Operand]) {
+ let mut it = x.iter();
+ if let Some(first) = it.next() {
+ self.operand(first);
+ for op in it {
+ w!(self, ", ");
+ self.operand(op);
+ }
+ }
+ }
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
index 118e5311e..8c48331b9 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
@@ -9,7 +9,7 @@ use base_db::{
salsa, AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
};
use hir_def::{db::DefDatabase, ModuleId};
-use hir_expand::db::AstDatabase;
+use hir_expand::db::ExpandDatabase;
use stdx::hash::{NoHashHashMap, NoHashHashSet};
use syntax::TextRange;
use test_utils::extract_annotations;
@@ -17,7 +17,7 @@ use test_utils::extract_annotations;
#[salsa::database(
base_db::SourceDatabaseExtStorage,
base_db::SourceDatabaseStorage,
- hir_expand::db::AstDatabaseStorage,
+ hir_expand::db::ExpandDatabaseStorage,
hir_def::db::InternDatabaseStorage,
hir_def::db::DefDatabaseStorage,
crate::db::HirDatabaseStorage
@@ -41,8 +41,8 @@ impl fmt::Debug for TestDB {
}
}
-impl Upcast<dyn AstDatabase> for TestDB {
- fn upcast(&self) -> &(dyn AstDatabase + 'static) {
+impl Upcast<dyn ExpandDatabase> for TestDB {
+ fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
&*self
}
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
index ba5d9c241..83d31f002 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
@@ -23,7 +23,7 @@ use hir_def::{
src::HasSource,
AssocItemId, DefWithBodyId, HasModule, LocalModuleId, Lookup, ModuleDefId,
};
-use hir_expand::{db::AstDatabase, InFile};
+use hir_expand::{db::ExpandDatabase, InFile};
use once_cell::race::OnceBool;
use stdx::format_to;
use syntax::{
@@ -61,22 +61,27 @@ fn setup_tracing() -> Option<tracing::subscriber::DefaultGuard> {
Some(tracing::subscriber::set_default(subscriber))
}
+#[track_caller]
fn check_types(ra_fixture: &str) {
check_impl(ra_fixture, false, true, false)
}
+#[track_caller]
fn check_types_source_code(ra_fixture: &str) {
check_impl(ra_fixture, false, true, true)
}
+#[track_caller]
fn check_no_mismatches(ra_fixture: &str) {
check_impl(ra_fixture, true, false, false)
}
+#[track_caller]
fn check(ra_fixture: &str) {
check_impl(ra_fixture, false, false, false)
}
+#[track_caller]
fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_source: bool) {
let _tracing = setup_tracing();
let (db, files) = TestDB::with_many_files(ra_fixture);
@@ -158,7 +163,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
} else {
ty.display_test(&db).to_string()
};
- assert_eq!(actual, expected);
+ assert_eq!(actual, expected, "type annotation differs at {:#?}", range.range);
}
}
@@ -174,7 +179,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
} else {
ty.display_test(&db).to_string()
};
- assert_eq!(actual, expected);
+ assert_eq!(actual, expected, "type annotation differs at {:#?}", range.range);
}
if let Some(expected) = adjustments.remove(&range) {
let adjustments = inference_result
@@ -191,30 +196,11 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
}
}
- for (pat, mismatch) in inference_result.pat_type_mismatches() {
- let node = match pat_node(&body_source_map, pat, &db) {
- Some(value) => value,
- None => continue,
- };
- let range = node.as_ref().original_file_range(&db);
- let actual = format!(
- "expected {}, got {}",
- mismatch.expected.display_test(&db),
- mismatch.actual.display_test(&db)
- );
- match mismatches.remove(&range) {
- Some(annotation) => assert_eq!(actual, annotation),
- None => format_to!(unexpected_type_mismatches, "{:?}: {}\n", range.range, actual),
- }
- }
- for (expr, mismatch) in inference_result.expr_type_mismatches() {
- let node = match body_source_map.expr_syntax(expr) {
- Ok(sp) => {
- let root = db.parse_or_expand(sp.file_id).unwrap();
- sp.map(|ptr| ptr.to_node(&root).syntax().clone())
- }
- Err(SyntheticSyntax) => continue,
- };
+ for (expr_or_pat, mismatch) in inference_result.type_mismatches() {
+ let Some(node) = (match expr_or_pat {
+ hir_def::expr::ExprOrPatId::ExprId(expr) => expr_node(&body_source_map, expr, &db),
+ hir_def::expr::ExprOrPatId::PatId(pat) => pat_node(&body_source_map, pat, &db),
+ }) else { continue; };
let range = node.as_ref().original_file_range(&db);
let actual = format!(
"expected {}, got {}",
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs
index 3e110abaf..b524922b6 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs
@@ -258,6 +258,7 @@ fn test() {
#[test]
fn coerce_autoderef_block() {
+ // FIXME: We should know mutability in overloaded deref
check_no_mismatches(
r#"
//- minicore: deref
@@ -267,7 +268,7 @@ fn takes_ref_str(x: &str) {}
fn returns_string() -> String { loop {} }
fn test() {
takes_ref_str(&{ returns_string() });
- // ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Not))), Borrow(Ref(Not))
+ // ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(None))), Borrow(Ref(Not))
}
"#,
);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs
index f00fa9729..1876be303 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/diagnostics.rs
@@ -73,3 +73,24 @@ fn test(x: bool) -> &'static str {
"#,
);
}
+
+#[test]
+fn non_unit_block_expr_stmt_no_semi() {
+ check(
+ r#"
+fn test(x: bool) {
+ if x {
+ "notok"
+ //^^^^^^^ expected (), got &str
+ } else {
+ "ok"
+ //^^^^ expected (), got &str
+ }
+ match x { true => true, false => 0 }
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected (), got bool
+ //^ expected bool, got i32
+ ()
+}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs
index 41c53701d..378d47833 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs
@@ -9,6 +9,7 @@ fn infer_slice_method() {
check_types(
r#"
impl<T> [T] {
+ #[rustc_allow_incoherent_impl]
fn foo(&self) -> T {
loop {}
}
@@ -35,6 +36,7 @@ fn test() {
//- /lib.rs crate:other_crate
mod foo {
impl f32 {
+ #[rustc_allow_incoherent_impl]
pub fn foo(self) -> f32 { 0. }
}
}
@@ -47,6 +49,7 @@ fn infer_array_inherent_impl() {
check_types(
r#"
impl<T, const N: usize> [T; N] {
+ #[rustc_allow_incoherent_impl]
fn foo(&self) -> T {
loop {}
}
@@ -1167,7 +1170,6 @@ fn test() {
123..167 '{ ...o(); }': ()
133..134 's': &S
137..151 'unsafe { f() }': &S
- 137..151 'unsafe { f() }': &S
146..147 'f': fn f() -> &S
146..149 'f()': &S
157..158 's': &S
@@ -1253,6 +1255,7 @@ fn foo<T: Trait>(a: &T) {
#[test]
fn autoderef_visibility_field() {
+ // FIXME: We should know mutability in overloaded deref
check(
r#"
//- minicore: deref
@@ -1274,7 +1277,7 @@ mod a {
mod b {
fn foo() {
let x = super::a::Bar::new().0;
- // ^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Not)))
+ // ^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(None)))
// ^^^^^^^^^^^^^^^^^^^^^^ type: char
}
}
@@ -1437,6 +1440,7 @@ fn resolve_const_generic_array_methods() {
r#"
#[lang = "array"]
impl<T, const N: usize> [T; N] {
+ #[rustc_allow_incoherent_impl]
pub fn map<F, U>(self, f: F) -> [U; N]
where
F: FnMut(T) -> U,
@@ -1445,6 +1449,7 @@ impl<T, const N: usize> [T; N] {
#[lang = "slice"]
impl<T> [T] {
+ #[rustc_allow_incoherent_impl]
pub fn map<F, U>(self, f: F) -> &[U]
where
F: FnMut(T) -> U,
@@ -1468,6 +1473,7 @@ struct Const<const N: usize>;
#[lang = "array"]
impl<T, const N: usize> [T; N] {
+ #[rustc_allow_incoherent_impl]
pub fn my_map<F, U, const X: usize>(self, f: F, c: Const<X>) -> [U; X]
where
F: FnMut(T) -> U,
@@ -1476,6 +1482,7 @@ impl<T, const N: usize> [T; N] {
#[lang = "slice"]
impl<T> [T] {
+ #[rustc_allow_incoherent_impl]
pub fn my_map<F, const X: usize, U>(self, f: F, c: Const<X>) -> &[U]
where
F: FnMut(T) -> U,
@@ -1874,14 +1881,14 @@ fn incoherent_impls() {
pub struct Box<T>(T);
use core::error::Error;
-#[rustc_allow_incoherent_impl]
impl dyn Error {
+ #[rustc_allow_incoherent_impl]
pub fn downcast<T: Error + 'static>(self: Box<Self>) -> Result<Box<T>, Box<dyn Error>> {
loop {}
}
}
-#[rustc_allow_incoherent_impl]
impl dyn Error + Send {
+ #[rustc_allow_incoherent_impl]
/// Attempts to downcast the box to a concrete type.
pub fn downcast<T: Error + 'static>(self: Box<Self>) -> Result<Box<T>, Box<dyn Error + Send>> {
let err: Box<dyn Error> = self;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs
index 9333e2693..74bcab6ca 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/patterns.rs
@@ -476,7 +476,7 @@ fn infer_adt_pattern() {
183..184 'x': usize
190..191 'x': usize
201..205 'E::B': E
- 209..212 'foo': bool
+ 209..212 'foo': {unknown}
216..217 '1': usize
227..231 'E::B': E
235..237 '10': usize
@@ -953,9 +953,9 @@ fn main() {
42..51 'true | ()': bool
49..51 '()': ()
57..59 '{}': ()
- 68..80 '(() | true,)': ((),)
+ 68..80 '(() | true,)': (bool,)
69..71 '()': ()
- 69..78 '() | true': ()
+ 69..78 '() | true': bool
74..78 'true': bool
74..78 'true': bool
84..86 '{}': ()
@@ -964,19 +964,15 @@ fn main() {
96..102 '_ | ()': bool
100..102 '()': ()
108..110 '{}': ()
- 119..128 '(() | _,)': ((),)
+ 119..128 '(() | _,)': (bool,)
120..122 '()': ()
- 120..126 '() | _': ()
+ 120..126 '() | _': bool
125..126 '_': bool
132..134 '{}': ()
49..51: expected bool, got ()
- 68..80: expected (bool,), got ((),)
69..71: expected bool, got ()
- 69..78: expected bool, got ()
100..102: expected bool, got ()
- 119..128: expected (bool,), got ((),)
120..122: expected bool, got ()
- 120..126: expected bool, got ()
"#]],
);
}
@@ -1092,3 +1088,19 @@ fn my_fn(foo: ...) {}
"#,
);
}
+
+#[test]
+fn ref_pat_mutability() {
+ check(
+ r#"
+fn foo() {
+ let &() = &();
+ let &mut () = &mut ();
+ let &mut () = &();
+ //^^^^^^^ expected &(), got &mut ()
+ let &() = &mut ();
+ //^^^ expected &mut (), got &()
+}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
index de6ae7fff..689f0da44 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
@@ -270,7 +270,7 @@ fn infer_std_crash_5() {
61..320 '{ ... }': ()
75..79 'name': &{unknown}
82..166 'if doe... }': &{unknown}
- 85..98 'doesnt_matter': bool
+ 85..98 'doesnt_matter': {unknown}
99..128 '{ ... }': &{unknown}
113..118 'first': &{unknown}
134..166 '{ ... }': &{unknown}
@@ -279,7 +279,7 @@ fn infer_std_crash_5() {
181..188 'content': &{unknown}
191..313 'if ICE... }': &{unknown}
194..231 'ICE_RE..._VALUE': {unknown}
- 194..247 'ICE_RE...&name)': bool
+ 194..247 'ICE_RE...&name)': {unknown}
241..246 '&name': &&{unknown}
242..246 'name': &{unknown}
248..276 '{ ... }': &{unknown}
@@ -1015,9 +1015,9 @@ fn cfg_tail() {
20..31 '{ "first" }': ()
22..29 '"first"': &str
72..190 '{ ...] 13 }': ()
- 78..88 '{ "fake" }': &str
+ 78..88 '{ "fake" }': ()
80..86 '"fake"': &str
- 93..103 '{ "fake" }': &str
+ 93..103 '{ "fake" }': ()
95..101 '"fake"': &str
108..120 '{ "second" }': ()
110..118 '"second"': &str
@@ -1744,3 +1744,47 @@ fn foo(b: Bar) {
"#,
);
}
+
+#[test]
+fn regression_14305() {
+ check_no_mismatches(
+ r#"
+//- minicore: add
+trait Tr {}
+impl Tr for [u8; C] {}
+const C: usize = 2 + 2;
+"#,
+ );
+}
+
+#[test]
+fn regression_14164() {
+ check_types(
+ r#"
+trait Rec {
+ type K;
+ type Rebind<Tok>: Rec<K = Tok>;
+}
+
+trait Expr<K> {
+ type Part: Rec<K = K>;
+ fn foo(_: <Self::Part as Rec>::Rebind<i32>) {}
+}
+
+struct Head<K>(K);
+impl<K> Rec for Head<K> {
+ type K = K;
+ type Rebind<Tok> = Head<Tok>;
+}
+
+fn test<E>()
+where
+ E: Expr<usize, Part = Head<usize>>,
+{
+ let head;
+ //^^^^ Head<i32>
+ E::foo(head);
+}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
index 2e5787b70..13cc3fea5 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
@@ -352,7 +352,6 @@ unsafe fn baz(u: MyUnion) {
71..89 'MyUnio...o: 0 }': MyUnion
86..87 '0': u32
95..113 'unsafe...(u); }': ()
- 95..113 'unsafe...(u); }': ()
104..107 'baz': fn baz(MyUnion)
104..110 'baz(u)': ()
108..109 'u': MyUnion
@@ -360,7 +359,6 @@ unsafe fn baz(u: MyUnion) {
126..146 'MyUnio... 0.0 }': MyUnion
141..144 '0.0': f32
152..170 'unsafe...(u); }': ()
- 152..170 'unsafe...(u); }': ()
161..164 'baz': fn baz(MyUnion)
161..167 'baz(u)': ()
165..166 'u': MyUnion
@@ -1118,21 +1116,22 @@ fn infer_inherent_method() {
fn infer_inherent_method_str() {
check_infer(
r#"
- #[lang = "str"]
- impl str {
- fn foo(&self) -> i32 {}
- }
+#![rustc_coherence_is_core]
+#[lang = "str"]
+impl str {
+ fn foo(&self) -> i32 {}
+}
- fn test() {
- "foo".foo();
- }
- "#,
+fn test() {
+ "foo".foo();
+}
+"#,
expect![[r#"
- 39..43 'self': &str
- 52..54 '{}': i32
- 68..88 '{ ...o(); }': ()
- 74..79 '"foo"': &str
- 74..85 '"foo".foo()': i32
+ 67..71 'self': &str
+ 80..82 '{}': i32
+ 96..116 '{ ...o(); }': ()
+ 102..107 '"foo"': &str
+ 102..113 '"foo".foo()': i32
"#]],
);
}
@@ -2077,22 +2076,17 @@ async fn main() {
16..193 '{ ...2 }; }': ()
26..27 'x': i32
30..43 'unsafe { 92 }': i32
- 30..43 'unsafe { 92 }': i32
39..41 '92': i32
53..54 'y': impl Future<Output = ()>
- 57..85 'async ...wait }': ()
57..85 'async ...wait }': impl Future<Output = ()>
- 65..77 'async { () }': ()
65..77 'async { () }': impl Future<Output = ()>
65..83 'async ....await': ()
73..75 '()': ()
95..96 'z': ControlFlow<(), ()>
- 130..140 'try { () }': ()
130..140 'try { () }': ControlFlow<(), ()>
136..138 '()': ()
150..151 'w': i32
154..166 'const { 92 }': i32
- 154..166 'const { 92 }': i32
162..164 '92': i32
176..177 't': i32
180..190 ''a: { 92 }': i32
@@ -2122,7 +2116,6 @@ fn main() {
83..84 'f': F
89..91 '{}': ()
103..231 '{ ... }); }': ()
- 109..161 'async ... }': Result<(), ()>
109..161 'async ... }': impl Future<Output = Result<(), ()>>
125..139 'return Err(())': !
132..135 'Err': Err<(), ()>(()) -> Result<(), ()>
@@ -2134,7 +2127,6 @@ fn main() {
167..171 'test': fn test<(), (), || -> impl Future<Output = Result<(), ()>>, impl Future<Output = Result<(), ()>>>(|| -> impl Future<Output = Result<(), ()>>)
167..228 'test(|... })': ()
172..227 '|| asy... }': || -> impl Future<Output = Result<(), ()>>
- 175..227 'async ... }': Result<(), ()>
175..227 'async ... }': impl Future<Output = Result<(), ()>>
191..205 'return Err(())': !
198..201 'Err': Err<(), ()>(()) -> Result<(), ()>
@@ -2649,6 +2641,7 @@ impl<T> [T] {}
#[lang = "slice_alloc"]
impl<T> [T] {
+ #[rustc_allow_incoherent_impl]
pub fn into_vec<A: Allocator>(self: Box<Self, A>) -> Vec<T, A> {
unimplemented!()
}
@@ -2664,22 +2657,22 @@ struct Astruct;
impl B for Astruct {}
"#,
expect![[r#"
- 569..573 'self': Box<[T], A>
- 602..634 '{ ... }': Vec<T, A>
- 648..761 '{ ...t]); }': ()
- 658..661 'vec': Vec<i32, Global>
- 664..679 '<[_]>::into_vec': fn into_vec<i32, Global>(Box<[i32], Global>) -> Vec<i32, Global>
- 664..691 '<[_]>:...1i32])': Vec<i32, Global>
- 680..690 'box [1i32]': Box<[i32; 1], Global>
- 684..690 '[1i32]': [i32; 1]
- 685..689 '1i32': i32
- 701..702 'v': Vec<Box<dyn B, Global>, Global>
- 722..739 '<[_]> ...to_vec': fn into_vec<Box<dyn B, Global>, Global>(Box<[Box<dyn B, Global>], Global>) -> Vec<Box<dyn B, Global>, Global>
- 722..758 '<[_]> ...ruct])': Vec<Box<dyn B, Global>, Global>
- 740..757 'box [b...truct]': Box<[Box<dyn B, Global>; 1], Global>
- 744..757 '[box Astruct]': [Box<dyn B, Global>; 1]
- 745..756 'box Astruct': Box<Astruct, Global>
- 749..756 'Astruct': Astruct
+ 604..608 'self': Box<[T], A>
+ 637..669 '{ ... }': Vec<T, A>
+ 683..796 '{ ...t]); }': ()
+ 693..696 'vec': Vec<i32, Global>
+ 699..714 '<[_]>::into_vec': fn into_vec<i32, Global>(Box<[i32], Global>) -> Vec<i32, Global>
+ 699..726 '<[_]>:...1i32])': Vec<i32, Global>
+ 715..725 'box [1i32]': Box<[i32; 1], Global>
+ 719..725 '[1i32]': [i32; 1]
+ 720..724 '1i32': i32
+ 736..737 'v': Vec<Box<dyn B, Global>, Global>
+ 757..774 '<[_]> ...to_vec': fn into_vec<Box<dyn B, Global>, Global>(Box<[Box<dyn B, Global>], Global>) -> Vec<Box<dyn B, Global>, Global>
+ 757..793 '<[_]> ...ruct])': Vec<Box<dyn B, Global>, Global>
+ 775..792 'box [b...truct]': Box<[Box<dyn B, Global>; 1], Global>
+ 779..792 '[box Astruct]': [Box<dyn B, Global>; 1]
+ 780..791 'box Astruct': Box<Astruct, Global>
+ 784..791 'Astruct': Astruct
"#]],
)
}
@@ -3283,3 +3276,18 @@ fn func() {
"#]],
);
}
+
+#[test]
+fn issue_14275() {
+ // FIXME: evaluate const generic
+ check_types(
+ r#"
+struct Foo<const T: bool>;
+fn main() {
+ const B: bool = false;
+ let foo = Foo::<B>;
+ //^^^ Foo<_>
+}
+"#,
+ );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
index 015085bde..da76d7fd8 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
@@ -83,6 +83,46 @@ async fn test() {
}
#[test]
+fn infer_async_closure() {
+ check_types(
+ r#"
+//- minicore: future, option
+async fn test() {
+ let f = async move |x: i32| x + 42;
+ f;
+// ^ |i32| -> impl Future<Output = i32>
+ let a = f(4);
+ a;
+// ^ impl Future<Output = i32>
+ let x = a.await;
+ x;
+// ^ i32
+ let f = async move || 42;
+ f;
+// ^ || -> impl Future<Output = i32>
+ let a = f();
+ a;
+// ^ impl Future<Output = i32>
+ let x = a.await;
+ x;
+// ^ i32
+ let b = ((async move || {})()).await;
+ b;
+// ^ ()
+ let c = async move || {
+ let y = None;
+ y
+ // ^ Option<u64>
+ };
+ let _: Option<u64> = c().await;
+ c;
+// ^ || -> impl Future<Output = Option<u64>>
+}
+"#,
+ );
+}
+
+#[test]
fn auto_sized_async_block() {
check_no_mismatches(
r#"
@@ -493,29 +533,30 @@ fn tuple_struct_with_fn() {
r#"
struct S(fn(u32) -> u64);
fn test() -> u64 {
- let a = S(|i| 2*i);
+ let a = S(|i| 2*i as u64);
let b = a.0(4);
a.0(2)
}"#,
expect![[r#"
- 43..101 '{ ...0(2) }': u64
+ 43..108 '{ ...0(2) }': u64
53..54 'a': S
57..58 'S': S(fn(u32) -> u64) -> S
- 57..67 'S(|i| 2*i)': S
- 59..66 '|i| 2*i': |u32| -> u64
+ 57..74 'S(|i| ...s u64)': S
+ 59..73 '|i| 2*i as u64': |u32| -> u64
60..61 'i': u32
- 63..64 '2': u32
- 63..66 '2*i': u32
+ 63..64 '2': u64
+ 63..73 '2*i as u64': u64
65..66 'i': u32
- 77..78 'b': u64
- 81..82 'a': S
- 81..84 'a.0': fn(u32) -> u64
- 81..87 'a.0(4)': u64
- 85..86 '4': u32
- 93..94 'a': S
- 93..96 'a.0': fn(u32) -> u64
- 93..99 'a.0(2)': u64
- 97..98 '2': u32
+ 65..73 'i as u64': u64
+ 84..85 'b': u64
+ 88..89 'a': S
+ 88..91 'a.0': fn(u32) -> u64
+ 88..94 'a.0(4)': u64
+ 92..93 '4': u32
+ 100..101 'a': S
+ 100..103 'a.0': fn(u32) -> u64
+ 100..106 'a.0(2)': u64
+ 104..105 '2': u32
"#]],
);
}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
index 70d2d5efa..34d957e26 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
@@ -5,6 +5,7 @@ use std::iter;
use base_db::CrateId;
use chalk_ir::{cast::Cast, fold::Shift, BoundVar, DebruijnIndex};
+use either::Either;
use hir_def::{
db::DefDatabase,
generics::{
@@ -19,7 +20,6 @@ use hir_def::{
};
use hir_expand::name::Name;
use intern::Interned;
-use itertools::Either;
use rustc_hash::FxHashSet;
use smallvec::{smallvec, SmallVec};
@@ -315,7 +315,10 @@ fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option<Generic
GenericDefId::TypeAliasId(it) => it.lookup(db).container,
GenericDefId::ConstId(it) => it.lookup(db).container,
GenericDefId::EnumVariantId(it) => return Some(it.parent.into()),
- GenericDefId::AdtId(_) | GenericDefId::TraitId(_) | GenericDefId::ImplId(_) => return None,
+ GenericDefId::AdtId(_)
+ | GenericDefId::TraitId(_)
+ | GenericDefId::ImplId(_)
+ | GenericDefId::TraitAliasId(_) => return None,
};
match container {