summaryrefslogtreecommitdiffstats
path: root/compiler/rustc_lint/src/reference_casting.rs
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_lint/src/reference_casting.rs')
-rw-r--r--compiler/rustc_lint/src/reference_casting.rs138
1 files changed, 81 insertions, 57 deletions
diff --git a/compiler/rustc_lint/src/reference_casting.rs b/compiler/rustc_lint/src/reference_casting.rs
index d44691b5e..96290288f 100644
--- a/compiler/rustc_lint/src/reference_casting.rs
+++ b/compiler/rustc_lint/src/reference_casting.rs
@@ -37,59 +37,73 @@ declare_lint_pass!(InvalidReferenceCasting => [INVALID_REFERENCE_CASTING]);
impl<'tcx> LateLintPass<'tcx> for InvalidReferenceCasting {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
- let Some((is_assignment, e)) = is_operation_we_care_about(cx, expr) else {
- return;
- };
-
- let init = cx.expr_or_init(e);
-
- let Some(ty_has_interior_mutability) = is_cast_from_const_to_mut(cx, init) else {
- return;
- };
- let orig_cast = if init.span != e.span { Some(init.span) } else { None };
- let ty_has_interior_mutability = ty_has_interior_mutability.then_some(());
-
- cx.emit_spanned_lint(
- INVALID_REFERENCE_CASTING,
- expr.span,
- if is_assignment {
- InvalidReferenceCastingDiag::AssignToRef { orig_cast, ty_has_interior_mutability }
- } else {
- InvalidReferenceCastingDiag::BorrowAsMut { orig_cast, ty_has_interior_mutability }
- },
- );
+ if let Some((e, pat)) = borrow_or_assign(cx, expr) {
+ if matches!(pat, PatternKind::Borrow { mutbl: Mutability::Mut } | PatternKind::Assign) {
+ let init = cx.expr_or_init(e);
+
+ let Some(ty_has_interior_mutability) = is_cast_from_ref_to_mut_ptr(cx, init) else {
+ return;
+ };
+ let orig_cast = if init.span != e.span { Some(init.span) } else { None };
+ let ty_has_interior_mutability = ty_has_interior_mutability.then_some(());
+
+ cx.emit_spanned_lint(
+ INVALID_REFERENCE_CASTING,
+ expr.span,
+ if pat == PatternKind::Assign {
+ InvalidReferenceCastingDiag::AssignToRef {
+ orig_cast,
+ ty_has_interior_mutability,
+ }
+ } else {
+ InvalidReferenceCastingDiag::BorrowAsMut {
+ orig_cast,
+ ty_has_interior_mutability,
+ }
+ },
+ );
+ }
+ }
}
}
-fn is_operation_we_care_about<'tcx>(
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum PatternKind {
+ Borrow { mutbl: Mutability },
+ Assign,
+}
+
+fn borrow_or_assign<'tcx>(
cx: &LateContext<'tcx>,
e: &'tcx Expr<'tcx>,
-) -> Option<(bool, &'tcx Expr<'tcx>)> {
- fn deref_assign_or_addr_of<'tcx>(expr: &'tcx Expr<'tcx>) -> Option<(bool, &'tcx Expr<'tcx>)> {
- // &mut <expr>
- let inner = if let ExprKind::AddrOf(_, Mutability::Mut, expr) = expr.kind {
- expr
+) -> Option<(&'tcx Expr<'tcx>, PatternKind)> {
+ fn deref_assign_or_addr_of<'tcx>(
+ expr: &'tcx Expr<'tcx>,
+ ) -> Option<(&'tcx Expr<'tcx>, PatternKind)> {
+ // &(mut) <expr>
+ let (inner, pat) = if let ExprKind::AddrOf(_, mutbl, expr) = expr.kind {
+ (expr, PatternKind::Borrow { mutbl })
// <expr> = ...
} else if let ExprKind::Assign(expr, _, _) = expr.kind {
- expr
+ (expr, PatternKind::Assign)
// <expr> += ...
} else if let ExprKind::AssignOp(_, expr, _) = expr.kind {
- expr
+ (expr, PatternKind::Assign)
} else {
return None;
};
- if let ExprKind::Unary(UnOp::Deref, e) = &inner.kind {
- Some((!matches!(expr.kind, ExprKind::AddrOf(..)), e))
- } else {
- None
- }
+ // *<inner>
+ let ExprKind::Unary(UnOp::Deref, e) = &inner.kind else {
+ return None;
+ };
+ Some((e, pat))
}
fn ptr_write<'tcx>(
cx: &LateContext<'tcx>,
e: &'tcx Expr<'tcx>,
- ) -> Option<(bool, &'tcx Expr<'tcx>)> {
+ ) -> Option<(&'tcx Expr<'tcx>, PatternKind)> {
if let ExprKind::Call(path, [arg_ptr, _arg_val]) = e.kind
&& let ExprKind::Path(ref qpath) = path.kind
&& let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
@@ -98,7 +112,7 @@ fn is_operation_we_care_about<'tcx>(
Some(sym::ptr_write | sym::ptr_write_volatile | sym::ptr_write_unaligned)
)
{
- Some((true, arg_ptr))
+ Some((arg_ptr, PatternKind::Assign))
} else {
None
}
@@ -107,13 +121,10 @@ fn is_operation_we_care_about<'tcx>(
deref_assign_or_addr_of(e).or_else(|| ptr_write(cx, e))
}
-fn is_cast_from_const_to_mut<'tcx>(
+fn is_cast_from_ref_to_mut_ptr<'tcx>(
cx: &LateContext<'tcx>,
orig_expr: &'tcx Expr<'tcx>,
) -> Option<bool> {
- let mut need_check_freeze = false;
- let mut e = orig_expr;
-
let end_ty = cx.typeck_results().node_type(orig_expr.hir_id);
// Bail out early if the end type is **not** a mutable pointer.
@@ -121,6 +132,28 @@ fn is_cast_from_const_to_mut<'tcx>(
return None;
}
+ let (e, need_check_freeze) = peel_casts(cx, orig_expr);
+
+ let start_ty = cx.typeck_results().node_type(e.hir_id);
+ if let ty::Ref(_, inner_ty, Mutability::Not) = start_ty.kind() {
+ // If an UnsafeCell method is involved, we need to additionally check the
+ // inner type for the presence of the Freeze trait (ie does NOT contain
+ // an UnsafeCell), since in that case we would incorrectly lint on valid casts.
+ //
+ // Except on the presence of non concrete skeleton types (ie generics)
+ // since there is no way to make it safe for arbitrary types.
+ let inner_ty_has_interior_mutability =
+ !inner_ty.is_freeze(cx.tcx, cx.param_env) && inner_ty.has_concrete_skeleton();
+ (!need_check_freeze || !inner_ty_has_interior_mutability)
+ .then_some(inner_ty_has_interior_mutability)
+ } else {
+ None
+ }
+}
+
+fn peel_casts<'tcx>(cx: &LateContext<'tcx>, mut e: &'tcx Expr<'tcx>) -> (&'tcx Expr<'tcx>, bool) {
+ let mut gone_trough_unsafe_cell_raw_get = false;
+
loop {
e = e.peel_blocks();
// <expr> as ...
@@ -145,27 +178,18 @@ fn is_cast_from_const_to_mut<'tcx>(
)
{
if cx.tcx.is_diagnostic_item(sym::unsafe_cell_raw_get, def_id) {
- need_check_freeze = true;
+ gone_trough_unsafe_cell_raw_get = true;
}
arg
} else {
- break;
+ let init = cx.expr_or_init(e);
+ if init.hir_id != e.hir_id {
+ init
+ } else {
+ break;
+ }
};
}
- let start_ty = cx.typeck_results().node_type(e.hir_id);
- if let ty::Ref(_, inner_ty, Mutability::Not) = start_ty.kind() {
- // If an UnsafeCell method is involved we need to additionaly check the
- // inner type for the presence of the Freeze trait (ie does NOT contain
- // an UnsafeCell), since in that case we would incorrectly lint on valid casts.
- //
- // We also consider non concrete skeleton types (ie generics)
- // to be an issue since there is no way to make it safe for abitrary types.
- let inner_ty_has_interior_mutability =
- !inner_ty.is_freeze(cx.tcx, cx.param_env) && inner_ty.has_concrete_skeleton();
- (!need_check_freeze || !inner_ty_has_interior_mutability)
- .then_some(inner_ty_has_interior_mutability)
- } else {
- None
- }
+ (e, gone_trough_unsafe_cell_raw_get)
}