compiler/rustc_const_eval/src/interpret/validity.rs RUST 1,749 lines View on github.com → Search inside
1//! Check the validity invariant of a given value, and tell the user2//! where in the value it got violated.3//! In const context, this goes even further and tries to approximate const safety.4//! That's useful because it means other passes (e.g. promotion) can rely on `const`s5//! to be const-safe.67use std::borrow::Cow;8use std::fmt::{self, Write};9use std::hash::Hash;10use std::mem;11use std::num::NonZero;1213use either::{Left, Right};14use hir::def::DefKind;15use rustc_abi::{16    BackendRepr, FieldIdx, FieldsShape, Scalar as ScalarAbi, Size, VariantIdx, Variants,17    WrappingRange,18};19use rustc_ast::Mutability;20use rustc_data_structures::fx::FxHashSet;21use rustc_hir as hir;22use rustc_middle::bug;23use rustc_middle::mir::interpret::{24    InterpErrorKind, InvalidMetaKind, Misalignment, Provenance, alloc_range, interp_ok,25};26use rustc_middle::ty::layout::{LayoutCx, TyAndLayout};27use rustc_middle::ty::{self, Ty};28use rustc_span::{Symbol, sym};29use tracing::trace;3031use super::machine::AllocMap;32use super::{33    AllocId, CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy,34    Machine, MemPlaceMeta, PlaceTy, Pointer, Projectable, Scalar, ValueVisitor, err_ub,35    format_interp_error,36};37use crate::enter_trace_span;3839// for the validation errors40#[rustfmt::skip]41use super::InterpErrorKind::UndefinedBehavior as Ub;42use super::InterpErrorKind::Unsupported as Unsup;43use super::UndefinedBehaviorInfo::*;44use super::UnsupportedOpInfo::*;4546macro_rules! err_validation_failure {47    ($where:expr,  $msg:expr ) => {{48        let where_ = &$where;49        let path = if !where_.projs.is_empty() {50            let mut path = String::new();51            write_path(&mut path, &where_.projs);52            Some(path)53        } else {54            None55        };5657        #[allow(unused)]58        use ValidationErrorKind::*;59        let msg = ValidationErrorKind::from($msg);60        err_ub!(ValidationError {61            orig_ty: where_.orig_ty,62            path,63            ptr_bytes_warning: msg.ptr_bytes_warning(),64            msg: msg.to_string(),65        })66    }};67}6869macro_rules! throw_validation_failure {70    ($where:expr, $msg:expr ) => {71        do yeet err_validation_failure!($where, $msg)72    };73}7475/// If $e throws an error matching the pattern, throw a validation failure.76/// Other errors are passed back to the caller, unchanged -- and if they reach the root of77/// the visitor, we make sure only validation errors and `InvalidProgram` errors are left.78/// This lets you use the patterns as a kind of validation list, asserting which errors79/// can possibly happen:80///81/// ```ignore(illustrative)82/// let v = try_validation!(some_fn(x), some_path, {83///     Foo | Bar | Baz => format!("some failure involving {x}"),84/// });85/// ```86///87/// The patterns must be of type `UndefinedBehaviorInfo`.88macro_rules! try_validation {89    ($e:expr, $where:expr,90    $( $( $p:pat_param )|+ => $msg:expr ),+ $(,)?91    ) => {{92        $e.map_err_kind(|e| {93            // We catch the error and turn it into a validation failure. We are okay with94            // allocation here as this can only slow down builds that fail anyway.95            match e {96                $(97                    $($p)|+ => {98                        err_validation_failure!(99                            $where,100                            $msg101                        )102                    }103                ),+,104                e => e,105            }106        })?107    }};108}109110#[derive(Debug, Clone, Copy, PartialEq, Eq)]111enum PtrKind {112    Ref(Mutability),113    Box,114}115116impl fmt::Display for PtrKind {117    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {118        let str = match self {119            PtrKind::Ref(_) => "reference",120            PtrKind::Box => "box",121        };122        write!(f, "{str}")123    }124}125126#[derive(Debug)]127enum ExpectedKind {128    Reference,129    Box,130    RawPtr,131    Bool,132    Char,133    Float,134    Int,135    FnPtr,136    Str,137}138139impl fmt::Display for ExpectedKind {140    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {141        let str = match self {142            ExpectedKind::Reference => "expected a reference",143            ExpectedKind::Box => "expected a box",144            ExpectedKind::RawPtr => "expected a raw pointer",145            ExpectedKind::Bool => "expected a boolean",146            ExpectedKind::Char => "expected a unicode scalar value",147            ExpectedKind::Float => "expected a floating point number",148            ExpectedKind::Int => "expected an integer",149            ExpectedKind::FnPtr => "expected a function pointer",150            ExpectedKind::Str => "expected a string",151        };152        write!(f, "{str}")153    }154}155156impl From<PtrKind> for ExpectedKind {157    fn from(x: PtrKind) -> ExpectedKind {158        match x {159            PtrKind::Box => ExpectedKind::Box,160            PtrKind::Ref(_) => ExpectedKind::Reference,161        }162    }163}164165/// Validation errors that can be emitted in one than one place get a variant here so that166/// we format them consistently. Everything else uses the `String` fallback.167#[derive(Debug)]168enum ValidationErrorKind<'tcx> {169    Uninit {170        expected: ExpectedKind,171    },172    PointerAsInt {173        expected: ExpectedKind,174    },175    PartialPointer,176    InvalidMetaWrongTrait {177        /// The vtable that was actually referenced by the wide pointer metadata.178        vtable_dyn_type: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,179        /// The vtable that was expected at the point in MIR that it was accessed.180        expected_dyn_type: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,181    },182    GeneralError {183        msg: String,184    },185}186187impl<'tcx> ValidationErrorKind<'tcx> {188    // We don't do this via `fmt::Display` to so that we can do a move in the `GeneralError` case.189    fn to_string(self) -> String {190        use ValidationErrorKind::*;191        match self {192            Uninit { expected } => format!("encountered uninitialized memory, but {expected}"),193            PointerAsInt { expected } => format!("encountered a pointer, but {expected}"),194            PartialPointer => format!("encountered a partial pointer or a mix of pointers"),195            InvalidMetaWrongTrait { vtable_dyn_type, expected_dyn_type } => format!(196                "wrong trait in wide pointer vtable: expected `{expected_dyn_type}`, but encountered `{vtable_dyn_type}`"197            ),198            GeneralError { msg } => msg,199        }200    }201202    fn ptr_bytes_warning(&self) -> bool {203        use ValidationErrorKind::*;204        matches!(self, PointerAsInt { .. } | PartialPointer)205    }206}207208impl<'tcx> From<String> for ValidationErrorKind<'tcx> {209    fn from(msg: String) -> Self {210        ValidationErrorKind::GeneralError { msg }211    }212}213214fn fmt_range(r: WrappingRange, max_hi: u128) -> String {215    let WrappingRange { start: lo, end: hi } = r;216    assert!(hi <= max_hi);217    if lo > hi {218        format!("less or equal to {hi}, or greater or equal to {lo}")219    } else if lo == hi {220        format!("equal to {lo}")221    } else if lo == 0 {222        assert!(hi < max_hi, "should not be printing if the range covers everything");223        format!("less or equal to {hi}")224    } else if hi == max_hi {225        assert!(lo > 0, "should not be printing if the range covers everything");226        format!("greater or equal to {lo}")227    } else {228        format!("in the range {lo}..={hi}")229    }230}231232/// We want to show a nice path to the invalid field for diagnostics,233/// but avoid string operations in the happy case where no error happens.234/// So we track a `Vec<PathElem>` where `PathElem` contains all the data we235/// need to later print something for the user.236#[derive(Copy, Clone, Debug)]237pub enum PathElem<'tcx> {238    Field(Symbol),239    Variant(Symbol),240    CoroutineState(VariantIdx),241    CapturedVar(Symbol),242    ArrayElem(usize),243    TupleElem(usize),244    Deref,245    EnumTag,246    CoroutineTag,247    DynDowncast(Ty<'tcx>),248    Vtable,249}250251#[derive(Clone, Debug)]252pub struct Path<'tcx> {253    orig_ty: Ty<'tcx>,254    projs: Vec<PathElem<'tcx>>,255}256257impl<'tcx> Path<'tcx> {258    fn new(ty: Ty<'tcx>) -> Self {259        Self { orig_ty: ty, projs: vec![] }260    }261}262263/// Extra things to check for during validation of CTFE results.264#[derive(Copy, Clone)]265pub enum CtfeValidationMode {266    /// Validation of a `static`267    Static { mutbl: Mutability },268    /// Validation of a promoted.269    Promoted,270    /// Validation of a `const`.271    /// `allow_immutable_unsafe_cell` says whether we allow `UnsafeCell` in immutable memory (which is the272    /// case for the top-level allocation of a `const`, where this is fine because the allocation will be273    /// copied at each use site).274    Const { allow_immutable_unsafe_cell: bool },275}276277impl CtfeValidationMode {278    fn allow_immutable_unsafe_cell(self) -> bool {279        match self {280            CtfeValidationMode::Static { .. } => false,281            CtfeValidationMode::Promoted { .. } => false,282            CtfeValidationMode::Const { allow_immutable_unsafe_cell, .. } => {283                allow_immutable_unsafe_cell284            }285        }286    }287}288289/// State for tracking recursive validation of references290pub struct RefTracking<T, PATH = ()> {291    seen: FxHashSet<T>,292    todo: Vec<(T, PATH)>,293}294295impl<T: Clone + Eq + Hash + std::fmt::Debug, PATH> RefTracking<T, PATH> {296    pub fn empty() -> Self {297        RefTracking { seen: FxHashSet::default(), todo: vec![] }298    }299    pub fn next(&mut self) -> Option<(T, PATH)> {300        self.todo.pop()301    }302303    fn track(&mut self, val: T, path: impl FnOnce() -> PATH) {304        if self.seen.insert(val.clone()) {305            trace!("Recursing below ptr {:#?}", val);306            let path = path();307            // Remember to come back to this later.308            self.todo.push((val, path));309        }310    }311}312313impl<'tcx, T: Clone + Eq + Hash + std::fmt::Debug> RefTracking<T, Path<'tcx>> {314    pub fn new(val: T, ty: Ty<'tcx>) -> Self {315        let mut ref_tracking_for_consts =316            RefTracking { seen: FxHashSet::default(), todo: vec![(val.clone(), Path::new(ty))] };317        ref_tracking_for_consts.seen.insert(val);318        ref_tracking_for_consts319    }320}321322/// Format a path323fn write_path(out: &mut String, path: &[PathElem<'_>]) {324    use self::PathElem::*;325326    for elem in path.iter() {327        match elem {328            Field(name) => write!(out, ".{name}"),329            EnumTag => write!(out, ".<enum-tag>"),330            Variant(name) => write!(out, ".<enum-variant({name})>"),331            CoroutineTag => write!(out, ".<coroutine-tag>"),332            CoroutineState(idx) => write!(out, ".<coroutine-state({})>", idx.index()),333            CapturedVar(name) => write!(out, ".<captured-var({name})>"),334            TupleElem(idx) => write!(out, ".{idx}"),335            ArrayElem(idx) => write!(out, "[{idx}]"),336            // `.<deref>` does not match Rust syntax, but it is more readable for long paths -- and337            // some of the other items here also are not Rust syntax. Actually we can't338            // even use the usual syntax because we are just showing the projections,339            // not the root.340            Deref => write!(out, ".<deref>"),341            DynDowncast(ty) => write!(out, ".<dyn-downcast({ty})>"),342            Vtable => write!(out, ".<vtable>"),343        }344        .unwrap()345    }346}347348/// Represents a set of `Size` values as a sorted list of ranges.349// These are (offset, length) pairs, and they are sorted and mutually disjoint,350// and never adjacent (i.e. there's always a gap between two of them).351#[derive(Debug, Clone)]352pub struct RangeSet(Vec<(Size, Size)>);353354impl RangeSet {355    fn add_range(&mut self, offset: Size, size: Size) {356        if size.bytes() == 0 {357            // No need to track empty ranges.358            return;359        }360        let v = &mut self.0;361        // We scan for a partition point where the left partition is all the elements that end362        // strictly before we start. Those are elements that are too "low" to merge with us.363        let idx =364            v.partition_point(|&(other_offset, other_size)| other_offset + other_size < offset);365        // Now we want to either merge with the first element of the second partition, or insert ourselves before that.366        if let Some(&(other_offset, other_size)) = v.get(idx)367            && offset + size >= other_offset368        {369            // Their end is >= our start (otherwise it would not be in the 2nd partition) and370            // our end is >= their start. This means we can merge the ranges.371            let new_start = other_offset.min(offset);372            let mut new_end = (other_offset + other_size).max(offset + size);373            // We grew to the right, so merge with overlapping/adjacent elements.374            // (We also may have grown to the left, but that can never make us adjacent with375            // anything there since we selected the first such candidate via `partition_point`.)376            let mut scan_right = 1;377            while let Some(&(next_offset, next_size)) = v.get(idx + scan_right)378                && new_end >= next_offset379            {380                // Increase our size to absorb the next element.381                new_end = new_end.max(next_offset + next_size);382                // Look at the next element.383                scan_right += 1;384            }385            // Update the element we grew.386            v[idx] = (new_start, new_end - new_start);387            // Remove the elements we absorbed (if any).388            if scan_right > 1 {389                drop(v.drain((idx + 1)..(idx + scan_right)));390            }391        } else {392            // Insert new element.393            v.insert(idx, (offset, size));394        }395    }396}397398struct ValidityVisitor<'rt, 'tcx, M: Machine<'tcx>> {399    /// The `path` may be pushed to, but the part that is present when a function400    /// starts must not be changed!  `with_elem` relies on this stack discipline.401    path: Path<'tcx>,402    ref_tracking: Option<&'rt mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Path<'tcx>>>,403    /// `None` indicates this is not validating for CTFE (but for runtime).404    ctfe_mode: Option<CtfeValidationMode>,405    ecx: &'rt mut InterpCx<'tcx, M>,406    /// Whether provenance should be reset outside of pointers (emulating the effect of a typed407    /// copy).408    reset_provenance_and_padding: bool,409    /// This tracks which byte ranges in this value contain data; the remaining bytes are padding.410    /// The ideal representation here would be pointer-length pairs, but to keep things more compact411    /// we only store a (range) set of offsets -- the base pointer is the same throughout the entire412    /// visit, after all.413    /// If this is `Some`, then `reset_provenance_and_padding` must be true (but not vice versa:414    /// we might not track data vs padding bytes if the operand isn't stored in memory anyway).415    data_bytes: Option<RangeSet>,416    /// True if we are inside of `MaybeDangling`. This disables pointer access checks.417    may_dangle: bool,418}419420impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {421    fn aggregate_field_path_elem(422        &mut self,423        layout: TyAndLayout<'tcx>,424        field: usize,425        field_ty: Ty<'tcx>,426    ) -> PathElem<'tcx> {427        // First, check if we are projecting to a variant.428        match layout.variants {429            Variants::Multiple { tag_field, .. } => {430                if tag_field.as_usize() == field {431                    return match layout.ty.kind() {432                        ty::Adt(def, ..) if def.is_enum() => PathElem::EnumTag,433                        ty::Coroutine(..) => PathElem::CoroutineTag,434                        _ => bug!("non-variant type {:?}", layout.ty),435                    };436                }437            }438            Variants::Single { .. } | Variants::Empty => {}439        }440441        // Now we know we are projecting to a field, so figure out which one.442        match layout.ty.kind() {443            // coroutines, closures, and coroutine-closures all have upvars that may be named.444            ty::Closure(def_id, _) | ty::Coroutine(def_id, _) | ty::CoroutineClosure(def_id, _) => {445                let mut name = None;446                // FIXME this should be more descriptive i.e. CapturePlace instead of CapturedVar447                // https://github.com/rust-lang/project-rfc-2229/issues/46448                if let Some(local_def_id) = def_id.as_local() {449                    let captures = self.ecx.tcx.closure_captures(local_def_id);450                    if let Some(captured_place) = captures.get(field) {451                        // Sometimes the index is beyond the number of upvars (seen452                        // for a coroutine).453                        let var_hir_id = captured_place.get_root_variable();454                        let node = self.ecx.tcx.hir_node(var_hir_id);455                        if let hir::Node::Pat(pat) = node456                            && let hir::PatKind::Binding(_, _, ident, _) = pat.kind457                        {458                            name = Some(ident.name);459                        }460                    }461                }462463                PathElem::CapturedVar(name.unwrap_or_else(|| {464                    // Fall back to showing the field index.465                    sym::integer(field)466                }))467            }468469            // tuples470            ty::Tuple(_) => PathElem::TupleElem(field),471472            // enums473            ty::Adt(def, ..) if def.is_enum() => {474                // we might be projecting *to* a variant, or to a field *in* a variant.475                match layout.variants {476                    Variants::Single { index } => {477                        // Inside a variant478                        PathElem::Field(def.variant(index).fields[FieldIdx::from_usize(field)].name)479                    }480                    Variants::Empty => panic!("there is no field in Variants::Empty types"),481                    Variants::Multiple { .. } => bug!("we handled variants above"),482                }483            }484485            // other ADTs486            ty::Adt(def, _) => {487                PathElem::Field(def.non_enum_variant().fields[FieldIdx::from_usize(field)].name)488            }489490            // arrays/slices491            ty::Array(..) | ty::Slice(..) => PathElem::ArrayElem(field),492493            // dyn traits494            ty::Dynamic(..) => {495                assert_eq!(field, 0);496                PathElem::DynDowncast(field_ty)497            }498499            // nothing else has an aggregate layout500            _ => bug!("aggregate_field_path_elem: got non-aggregate type {:?}", layout.ty),501        }502    }503504    fn with_elem<R>(505        &mut self,506        elem: PathElem<'tcx>,507        f: impl FnOnce(&mut Self) -> InterpResult<'tcx, R>,508    ) -> InterpResult<'tcx, R> {509        // Remember the old state510        let path_len = self.path.projs.len();511        // Record new element512        self.path.projs.push(elem);513        // Perform operation514        let r = f(self)?;515        // Undo changes516        self.path.projs.truncate(path_len);517        // Done518        interp_ok(r)519    }520521    fn read_immediate(522        &self,523        val: &PlaceTy<'tcx, M::Provenance>,524        expected: ExpectedKind,525    ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {526        interp_ok(try_validation!(527            self.ecx.read_immediate(val),528            self.path,529            Ub(InvalidUninitBytes(_)) =>530                Uninit { expected },531            // The `Unsup` cases can only occur during CTFE532            Unsup(ReadPointerAsInt(_)) =>533                PointerAsInt { expected },534            Unsup(ReadPartialPointer(_)) =>535                PartialPointer,536        ))537    }538539    fn read_scalar(540        &self,541        val: &PlaceTy<'tcx, M::Provenance>,542        expected: ExpectedKind,543    ) -> InterpResult<'tcx, Scalar<M::Provenance>> {544        interp_ok(self.read_immediate(val, expected)?.to_scalar())545    }546547    /// Given a place and a pointer loaded from that place, ensure that the place does548    /// not store any more provenance than the pointer does. IOW, if any provenance549    /// was discarded when loading the pointer, it will also get discarded in-memory.550    fn reset_pointer_provenance(551        &mut self,552        place: &PlaceTy<'tcx, M::Provenance>,553        ptr: &ImmTy<'tcx, M::Provenance>,554    ) -> InterpResult<'tcx> {555        if matches!(ptr.layout.backend_repr, BackendRepr::Scalar(..)) {556            // A thin pointer. If it has provenance, we don't have to do anything.557            // If it does not, ensure we clear the provenance in memory.558            if !matches!(ptr.to_scalar(), Scalar::Ptr(..)) {559                // The loaded pointer has no provenance. Some bytes of its representation still560                // might have provenance, which we have to clear.561                self.ecx.clear_provenance(place)?;562            }563        } else {564            // A wide pointer. This means we have to worry both about the pointer itself and the565            // metadata. We do the lazy thing and just write back the value we got. Just566            // clearing provenance in a targeted manner would be more efficient, but unless this567            // is a perf hotspot it's just not worth the effort.568            self.ecx.write_immediate_no_validate(**ptr, place)?;569        }570        interp_ok(())571    }572573    fn check_wide_ptr_meta(574        &mut self,575        meta: MemPlaceMeta<M::Provenance>,576        pointee: TyAndLayout<'tcx>,577    ) -> InterpResult<'tcx> {578        let tail = self.ecx.tcx.struct_tail_for_codegen(pointee.ty, self.ecx.typing_env);579        match tail.kind() {580            ty::Dynamic(data, _) => {581                let vtable = meta.unwrap_meta().to_pointer(self.ecx)?;582                // Make sure it is a genuine vtable pointer for the right trait.583                try_validation!(584                    self.ecx.get_ptr_vtable_ty(vtable, Some(data)),585                    self.path,586                    Ub(DanglingIntPointer{ .. } | InvalidVTablePointer(..)) =>587                        format!("encountered {vtable}, but expected a vtable pointer"),588                    Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type }) =>589                        InvalidMetaWrongTrait { expected_dyn_type, vtable_dyn_type },590                );591            }592            ty::Slice(..) | ty::Str => {593                let _len = meta.unwrap_meta().to_target_usize(self.ecx)?;594                // We do not check that `len * elem_size <= isize::MAX`:595                // that is only required for references, and there it falls out of the596                // "dereferenceable" check performed by Stacked Borrows.597            }598            ty::Foreign(..) => {599                // Unsized, but not wide.600            }601            _ => bug!("Unexpected unsized type tail: {:?}", tail),602        }603604        interp_ok(())605    }606607    /// Check a reference or `Box`.608    ///609    /// `ty` is the actual type of `value`; for a Box, `value` will be just the inner raw pointer.610    fn check_safe_pointer(611        &mut self,612        value: &PlaceTy<'tcx, M::Provenance>,613        ty: Ty<'tcx>,614        ptr_kind: PtrKind,615    ) -> InterpResult<'tcx> {616        let ptr = self.read_immediate(value, ptr_kind.into())?;617        if self.reset_provenance_and_padding {618            // There's no padding in a pointer.619            self.add_data_range_place(value);620            // Resetting provenance is done below, together with retagging, to avoid621            // redundant writes.622        }623        let place = self.ecx.imm_ptr_to_mplace(&ptr)?;624        // Handle wide pointers.625        // Check metadata early, for better diagnostics626        if place.layout.is_unsized() {627            self.check_wide_ptr_meta(place.meta(), place.layout)?;628        }629630        // Determine size and alignment of pointee.631        let size_and_align = try_validation!(632            self.ecx.size_and_align_of_val(&place),633            self.path,634            Ub(InvalidMeta(msg)) => format!(635                "encountered invalid {ptr_kind} metadata: {}",636                match msg {637                    InvalidMetaKind::SliceTooBig => "slice is bigger than largest supported object",638                    InvalidMetaKind::TooBig => "total size is bigger than largest supported object",639                }640            )641        );642        let (size, align) = size_and_align643            // for the purpose of validity, consider foreign types to have644            // alignment and size determined by the layout (size will be 0,645            // alignment should take attributes into account).646            .unwrap_or_else(|| (place.layout.size, place.layout.align.abi));647648        // If we're not allow to dangle, make sure this is dereferenceable and retag it for649        // the aliasing model.650        let adjusted_ptr = if !self.may_dangle {651            try_validation!(652                self.ecx.check_ptr_access(653                    place.ptr(),654                    size,655                    CheckInAllocMsg::Dereferenceable, // will anyway be replaced by validity message656                ),657                self.path,658                Ub(DanglingIntPointer { addr: 0, .. }) =>659                    format!("encountered a null {ptr_kind}"),660                Ub(DanglingIntPointer { addr: i, .. }) =>661                    format!(662                        "encountered a dangling {ptr_kind} ({ptr} has no provenance)",663                        ptr = Pointer::<Option<AllocId>>::without_provenance(i)664                    ),665                Ub(PointerOutOfBounds { .. }) =>666                    format!("encountered a dangling {ptr_kind} (going beyond the bounds of its allocation)"),667                Ub(PointerUseAfterFree(..)) =>668                    format!("encountered a dangling {ptr_kind} (use-after-free)"),669            );670            if self.reset_provenance_and_padding {671                M::retag_ptr_value(self.ecx, &ptr, ty).map_err_kind(|e| match e {672                    Ub(WriteToReadOnly(_)) => {673                        err_validation_failure!(674                            self.path,675                            format!(676                                "encountered {} pointing to read-only memory",677                                if ptr_kind == PtrKind::Box { "box" } else { "mutable reference" },678                            )679                        )680                    }681                    InterpErrorKind::MachineStop(mut machine_err) => {682                        // Enhance the aliasing model error with the current path.683                        if !self.path.projs.is_empty() {684                            let mut path = String::new();685                            write_path(&mut path, &self.path.projs);686                            machine_err.with_validation_path(path);687                        }688                        InterpErrorKind::MachineStop(machine_err)689                    }690                    e => e,691                })?692            } else {693                // We can't retag if we're not resetting provenance.694                None695            }696        } else {697            // Pointer remains unchanged.698            None699        };700        // If the pointer needs adjusting, write back adjusted pointer. This automatically701        // also clears any excess provenance. Otherwise, just clear the provenance.702        if let Some(ptr) = adjusted_ptr {703            self.ecx.write_immediate_no_validate(*ptr, value)?;704        } else if self.reset_provenance_and_padding {705            self.reset_pointer_provenance(value, &ptr)?;706        }707708        // Check alignment after dereferenceable (if both are violated, trigger the error above).709        try_validation!(710            self.ecx.check_ptr_align(711                place.ptr(),712                align,713            ),714            self.path,715            Ub(AlignmentCheckFailed(Misalignment { required, has }, _msg)) => format!(716                "encountered an unaligned {ptr_kind} (required {required_bytes} byte alignment but found {found_bytes})",717                required_bytes = required.bytes(),718                found_bytes = has.bytes()719            ),720        );721722        // Make sure this is non-null. This is obviously needed when `may_dangle` is set,723        // but even if we did check dereferenceability above that would still allow null724        // pointers if `size` is zero.725        let scalar = Scalar::from_maybe_pointer(place.ptr(), self.ecx);726        if self.ecx.scalar_may_be_null(scalar)? {727            let maybe = !M::Provenance::OFFSET_IS_ADDR && matches!(scalar, Scalar::Ptr(..));728            throw_validation_failure!(729                self.path,730                format!(731                    "encountered a {maybe}null {ptr_kind}",732                    maybe = if maybe { "maybe-" } else { "" }733                )734            )735        }736        // Do not allow references to uninhabited types.737        if place.layout.is_uninhabited() {738            let ty = place.layout.ty;739            throw_validation_failure!(740                self.path,741                format!("encountered a {ptr_kind} pointing to uninhabited type {ty}")742            )743        }744745        // Recursive checking (but not inside `MaybeDangling` of course).746        if let Some(ref_tracking) = self.ref_tracking.as_deref_mut()747            && !self.may_dangle748        {749            // Proceed recursively even for ZST, no reason to skip them!750            // `!` is a ZST and we want to validate it.751            if let Some(ctfe_mode) = self.ctfe_mode {752                let mut skip_recursive_check = false;753                // CTFE imposes restrictions on what references can point to.754                if let Ok((alloc_id, _offset, _prov)) =755                    self.ecx.ptr_try_get_alloc_id(place.ptr(), 0)756                {757                    // Everything should be already interned.758                    let Some(global_alloc) = self.ecx.tcx.try_get_global_alloc(alloc_id) else {759                        if self.ecx.memory.alloc_map.contains_key(&alloc_id) {760                            // This can happen when interning didn't complete due to, e.g.761                            // missing `make_global`. This must mean other errors are already762                            // being reported.763                            self.ecx.tcx.dcx().delayed_bug(764                                "interning did not complete, there should be an error",765                            );766                            return interp_ok(());767                        }768                        // We can't have *any* references to non-existing allocations in const-eval769                        // as the rest of rustc isn't happy with them... so we throw an error, even770                        // though for zero-sized references this isn't really UB.771                        // A potential future alternative would be to resurrect this as a zero-sized allocation772                        // (which codegen will then compile to an aligned dummy pointer anyway).773                        throw_validation_failure!(774                            self.path,775                            format!("encountered a dangling {ptr_kind} (use-after-free)")776                        );777                    };778                    let (size, _align) =779                        global_alloc.size_and_align(*self.ecx.tcx, self.ecx.typing_env);780                    let alloc_actual_mutbl =781                        global_alloc.mutability(*self.ecx.tcx, self.ecx.typing_env);782783                    match global_alloc {784                        GlobalAlloc::Static(did) => {785                            let DefKind::Static { nested, .. } = self.ecx.tcx.def_kind(did) else {786                                bug!()787                            };788                            assert!(!self.ecx.tcx.is_thread_local_static(did));789                            assert!(self.ecx.tcx.is_static(did));790                            match ctfe_mode {791                                CtfeValidationMode::Static { .. }792                                | CtfeValidationMode::Promoted { .. } => {793                                    // We skip recursively checking other statics. These statics must be sound by794                                    // themselves, and the only way to get broken statics here is by using795                                    // unsafe code.796                                    // The reasons we don't check other statics is twofold. For one, in all797                                    // sound cases, the static was already validated on its own, and second, we798                                    // trigger cycle errors if we try to compute the value of the other static799                                    // and that static refers back to us (potentially through a promoted).800                                    // This could miss some UB, but that's fine.801                                    // We still walk nested allocations, as they are fundamentally part of this validation run.802                                    // This means we will also recurse into nested statics of *other*803                                    // statics, even though we do not recurse into other statics directly.804                                    // That's somewhat inconsistent but harmless.805                                    skip_recursive_check = !nested;806                                }807                                CtfeValidationMode::Const { .. } => {808                                    // If this is mutable memory or an `extern static`, there's no point in checking it -- we'd809                                    // just get errors trying to read the value.810                                    if alloc_actual_mutbl.is_mut()811                                        || self.ecx.tcx.is_foreign_item(did)812                                    {813                                        skip_recursive_check = true;814                                    }815                                }816                            }817                        }818                        _ => (),819                    }820821                    // If this allocation has size zero, there is no actual mutability here.822                    if size != Size::ZERO {823                        // Determine whether this pointer expects to be pointing to something mutable.824                        let ptr_expected_mutbl = match ptr_kind {825                            PtrKind::Box => Mutability::Mut,826                            PtrKind::Ref(mutbl) => {827                                // We do not take into account interior mutability here since we cannot know if828                                // there really is an `UnsafeCell` inside `Option<UnsafeCell>` -- so we check829                                // that in the recursive descent behind this reference (controlled by830                                // `allow_immutable_unsafe_cell`).831                                mutbl832                            }833                        };834                        // Mutable pointer to immutable memory is no good.835                        if ptr_expected_mutbl == Mutability::Mut836                            && alloc_actual_mutbl == Mutability::Not837                        {838                            // This can actually occur with transmutes.839                            throw_validation_failure!(840                                self.path,841                                format!(842                                    "encountered mutable reference or box pointing to read-only memory"843                                )844                            );845                        }846                    }847                }848                // Potentially skip recursive check.849                if skip_recursive_check {850                    return interp_ok(());851                }852            } else {853                // This is not CTFE, so it's Miri with recursive checking.854                // FIXME: should we skip `UnsafeCell` behind shared references? Currently that is855                // not needed since validation reads bypass Stacked Borrows and data race checks,856                // but is that really coherent?857            }858            let path = &self.path;859            ref_tracking.track(place, || {860                // We need to clone the path anyway, make sure it gets created861                // with enough space for the additional `Deref`.862                let mut new_projs = Vec::with_capacity(path.projs.len() + 1);863                new_projs.extend(&path.projs);864                new_projs.push(PathElem::Deref);865                Path { projs: new_projs, orig_ty: path.orig_ty }866            });867        }868        interp_ok(())869    }870871    /// Check if this is a value of primitive type, and if yes check the validity of the value872    /// at that type. Return `true` if the type is indeed primitive.873    ///874    /// Note that not all of these have `FieldsShape::Primitive`, e.g. wide references.875    fn try_visit_primitive(876        &mut self,877        value: &PlaceTy<'tcx, M::Provenance>,878    ) -> InterpResult<'tcx, bool> {879        // Go over all the primitive types880        let ty = value.layout.ty;881        match ty.kind() {882            ty::Bool => {883                let scalar = self.read_scalar(value, ExpectedKind::Bool)?;884                try_validation!(885                    scalar.to_bool(),886                    self.path,887                    Ub(InvalidBool(..)) =>888                        format!("encountered {scalar:x}, but expected a boolean"),889                );890                if self.reset_provenance_and_padding {891                    self.ecx.clear_provenance(value)?;892                    self.add_data_range_place(value);893                }894                interp_ok(true)895            }896            ty::Char => {897                let scalar = self.read_scalar(value, ExpectedKind::Char)?;898                try_validation!(899                    scalar.to_char(),900                    self.path,901                    Ub(InvalidChar(..)) =>902                        format!("encountered {scalar:x}, but expected a valid unicode scalar value \903                          (in `0..=0x10FFFF` but not in `0xD800..=0xDFFF`)")904                );905                if self.reset_provenance_and_padding {906                    self.ecx.clear_provenance(value)?;907                    self.add_data_range_place(value);908                }909                interp_ok(true)910            }911            ty::Float(_) | ty::Int(_) | ty::Uint(_) => {912                // NOTE: Keep this in sync with the array optimization for int/float913                // types below!914                self.read_scalar(915                    value,916                    if matches!(ty.kind(), ty::Float(..)) {917                        ExpectedKind::Float918                    } else {919                        ExpectedKind::Int920                    },921                )?;922                if self.reset_provenance_and_padding {923                    self.ecx.clear_provenance(value)?;924                    self.add_data_range_place(value);925                }926                interp_ok(true)927            }928            ty::RawPtr(..) => {929                let ptr = self.read_immediate(value, ExpectedKind::RawPtr)?;930                if self.reset_provenance_and_padding {931                    self.reset_pointer_provenance(value, &ptr)?;932                    // There's no padding in a pointer.933                    self.add_data_range_place(value);934                }935936                let place = self.ecx.imm_ptr_to_mplace(&ptr)?;937                if place.layout.is_unsized() {938                    self.check_wide_ptr_meta(place.meta(), place.layout)?;939                }940                interp_ok(true)941            }942            ty::Ref(_, _ty, mutbl) => {943                self.check_safe_pointer(value, ty, PtrKind::Ref(*mutbl))?;944                interp_ok(true)945            }946            ty::FnPtr(..) => {947                let scalar = self.read_scalar(value, ExpectedKind::FnPtr)?;948949                // If we check references recursively, also check that this points to a function.950                if let Some(_) = self.ref_tracking {951                    let ptr = scalar.to_pointer(self.ecx)?;952                    let _fn = try_validation!(953                        self.ecx.get_ptr_fn(ptr),954                        self.path,955                        Ub(DanglingIntPointer{ .. } | InvalidFunctionPointer(..)) =>956                            format!("encountered {ptr}, but expected a function pointer"),957                    );958                    // FIXME: Check if the signature matches959                } else {960                    // Otherwise (for standalone Miri and for `-Zextra-const-ub-checks`),961                    // we have to still check it to be non-null.962                    if self.ecx.scalar_may_be_null(scalar)? {963                        let maybe =964                            !M::Provenance::OFFSET_IS_ADDR && matches!(scalar, Scalar::Ptr(..));965                        throw_validation_failure!(966                            self.path,967                            format!(968                                "encountered a {maybe}null function pointer",969                                maybe = if maybe { "maybe-" } else { "" }970                            )971                        );972                    }973                }974                if self.reset_provenance_and_padding {975                    // Make sure we do not preserve partial provenance. This matches the thin976                    // pointer handling in `deref_pointer`.977                    if matches!(scalar, Scalar::Int(..)) {978                        self.ecx.clear_provenance(value)?;979                    }980                    self.add_data_range_place(value);981                }982                interp_ok(true)983            }984            ty::Never => {985                throw_validation_failure!(986                    self.path,987                    format!("encountered a value of the never type `!`")988                )989            }990            ty::Foreign(..) | ty::FnDef(..) => {991                // Nothing to check.992                interp_ok(true)993            }994            ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binder)"),995            // The above should be all the primitive types. The rest is compound, we996            // check them by visiting their fields/variants.997            ty::Adt(..)998            | ty::Tuple(..)999            | ty::Array(..)1000            | ty::Slice(..)1001            | ty::Str1002            | ty::Dynamic(..)1003            | ty::Closure(..)1004            | ty::Pat(..)1005            | ty::CoroutineClosure(..)1006            | ty::Coroutine(..) => interp_ok(false),1007            // Some types only occur during typechecking, they have no layout.1008            // We should not see them here and we could not check them anyway.1009            ty::Error(_)1010            | ty::Infer(..)1011            | ty::Placeholder(..)1012            | ty::Bound(..)1013            | ty::Param(..)1014            | ty::Alias(..)1015            | ty::CoroutineWitness(..) => bug!("Encountered invalid type {:?}", ty),1016        }1017    }10181019    fn visit_scalar(1020        &mut self,1021        scalar: Scalar<M::Provenance>,1022        scalar_layout: ScalarAbi,1023    ) -> InterpResult<'tcx> {1024        let size = scalar_layout.size(self.ecx);1025        let valid_range = scalar_layout.valid_range(self.ecx);1026        let WrappingRange { start, end } = valid_range;1027        let max_value = size.unsigned_int_max();1028        assert!(end <= max_value);1029        let bits = match scalar.try_to_scalar_int() {1030            Ok(int) => int.to_bits(size),1031            Err(_) => {1032                // So this is a pointer then, and casting to an int failed.1033                // Can only happen during CTFE.1034                // We support 2 kinds of ranges here: full range, and excluding zero.1035                if start == 1 && end == max_value {1036                    // Only null is the niche. So make sure the ptr is NOT null.1037                    if self.ecx.scalar_may_be_null(scalar)? {1038                        throw_validation_failure!(1039                            self.path,1040                            format!(1041                                "encountered a maybe-null pointer, but expected something that is definitely non-zero"1042                            )1043                        )1044                    } else {1045                        return interp_ok(());1046                    }1047                } else if scalar_layout.is_always_valid(self.ecx) {1048                    // Easy. (This is reachable if `enforce_number_validity` is set.)1049                    return interp_ok(());1050                } else {1051                    // Conservatively, we reject, because the pointer *could* have a bad value.1052                    throw_validation_failure!(1053                        self.path,1054                        format!(1055                            "encountered a pointer with unknown absolute address, but expected something that is definitely {in_range}",1056                            in_range = fmt_range(valid_range, max_value)1057                        )1058                    )1059                }1060            }1061        };1062        // Now compare.1063        if valid_range.contains(bits) {1064            interp_ok(())1065        } else {1066            throw_validation_failure!(1067                self.path,1068                format!(1069                    "encountered {bits}, but expected something {in_range}",1070                    in_range = fmt_range(valid_range, max_value)1071                )1072            )1073        }1074    }10751076    fn in_mutable_memory(&self, val: &PlaceTy<'tcx, M::Provenance>) -> bool {1077        debug_assert!(self.ctfe_mode.is_some());1078        if let Some(mplace) = val.as_mplace_or_local().left() {1079            if let Some(alloc_id) = mplace.ptr().provenance.and_then(|p| p.get_alloc_id()) {1080                let tcx = *self.ecx.tcx;1081                // Everything must be already interned.1082                let mutbl = tcx.global_alloc(alloc_id).mutability(tcx, self.ecx.typing_env);1083                if let Some((_, alloc)) = self.ecx.memory.alloc_map.get(alloc_id) {1084                    assert_eq!(alloc.mutability, mutbl);1085                }1086                mutbl.is_mut()1087            } else {1088                // No memory at all.1089                false1090            }1091        } else {1092            // A local variable -- definitely mutable.1093            true1094        }1095    }10961097    /// Add the given pointer-length pair to the "data" range of this visit.1098    fn add_data_range(&mut self, ptr: Pointer<Option<M::Provenance>>, size: Size) {1099        if let Some(data_bytes) = self.data_bytes.as_mut() {1100            // We only have to store the offset, the rest is the same for all pointers here.1101            // The logic is agnostic to whether the offset is relative or absolute as long as1102            // it is consistent.1103            let (_prov, offset) = ptr.into_raw_parts();1104            // Add this.1105            data_bytes.add_range(offset, size);1106        };1107    }11081109    /// Add the entire given place to the "data" range of this visit.1110    fn add_data_range_place(&mut self, place: &PlaceTy<'tcx, M::Provenance>) {1111        // Only sized places can be added this way.1112        debug_assert!(place.layout.is_sized());1113        if let Some(data_bytes) = self.data_bytes.as_mut() {1114            let offset = Self::data_range_offset(self.ecx, place);1115            data_bytes.add_range(offset, place.layout.size);1116        }1117    }11181119    /// Convert a place into the offset it starts at, for the purpose of data_range tracking.1120    /// Must only be called if `data_bytes` is `Some(_)`.1121    fn data_range_offset(ecx: &InterpCx<'tcx, M>, place: &PlaceTy<'tcx, M::Provenance>) -> Size {1122        // The presence of `data_bytes` implies that our place is in memory.1123        let ptr = ecx1124            .place_to_op(place)1125            .expect("place must be in memory")1126            .as_mplace_or_imm()1127            .expect_left("place must be in memory")1128            .ptr();1129        let (_prov, offset) = ptr.into_raw_parts();1130        offset1131    }11321133    fn reset_padding(&mut self, place: &PlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {1134        let Some(data_bytes) = self.data_bytes.as_mut() else { return interp_ok(()) };1135        // Our value must be in memory, otherwise we would not have set up `data_bytes`.1136        let mplace = self.ecx.force_allocation(place)?;1137        // Determine starting offset and size.1138        let (_prov, start_offset) = mplace.ptr().into_raw_parts();1139        let (size, _align) = self1140            .ecx1141            .size_and_align_of_val(&mplace)?1142            .unwrap_or((mplace.layout.size, mplace.layout.align.abi));1143        // If there is no padding at all, we can skip the rest: check for1144        // a single data range covering the entire value.1145        if data_bytes.0 == &[(start_offset, size)] {1146            return interp_ok(());1147        }1148        // Get a handle for the allocation. Do this only once, to avoid looking up the same1149        // allocation over and over again. (Though to be fair, iterating the value already does1150        // exactly that.)1151        let Some(mut alloc) = self.ecx.get_ptr_alloc_mut(mplace.ptr(), size)? else {1152            // A ZST, no padding to clear.1153            return interp_ok(());1154        };1155        // Add a "finalizer" data range at the end, so that the iteration below finds all gaps1156        // between ranges.1157        data_bytes.0.push((start_offset + size, Size::ZERO));1158        // Iterate, and reset gaps.1159        let mut padding_cleared_until = start_offset;1160        for &(offset, size) in data_bytes.0.iter() {1161            assert!(1162                offset >= padding_cleared_until,1163                "reset_padding on {}: previous field ended at offset {}, next field starts at {} (and has a size of {} bytes)",1164                mplace.layout.ty,1165                (padding_cleared_until - start_offset).bytes(),1166                (offset - start_offset).bytes(),1167                size.bytes(),1168            );1169            if offset > padding_cleared_until {1170                // We found padding. Adjust the range to be relative to `alloc`, and make it uninit.1171                let padding_start = padding_cleared_until - start_offset;1172                let padding_size = offset - padding_cleared_until;1173                let range = alloc_range(padding_start, padding_size);1174                trace!("reset_padding on {}: resetting padding range {range:?}", mplace.layout.ty);1175                alloc.write_uninit(range);1176            }1177            padding_cleared_until = offset + size;1178        }1179        assert!(padding_cleared_until == start_offset + size);1180        interp_ok(())1181    }11821183    /// Computes the data range of this union type:1184    /// which bytes are inside a field (i.e., not padding.)1185    fn union_data_range<'e>(1186        ecx: &'e mut InterpCx<'tcx, M>,1187        layout: TyAndLayout<'tcx>,1188    ) -> Cow<'e, RangeSet> {1189        assert!(layout.ty.is_union());1190        assert!(layout.is_sized(), "there are no unsized unions");1191        let layout_cx = LayoutCx::new(*ecx.tcx, ecx.typing_env);1192        return M::cached_union_data_range(ecx, layout.ty, || {1193            let mut out = RangeSet(Vec::new());1194            union_data_range_uncached(&layout_cx, layout, Size::ZERO, &mut out);1195            out1196        });11971198        /// Helper for recursive traversal: add data ranges of the given type to `out`.1199        fn union_data_range_uncached<'tcx>(1200            cx: &LayoutCx<'tcx>,1201            layout: TyAndLayout<'tcx>,1202            base_offset: Size,1203            out: &mut RangeSet,1204        ) {1205            // If this is a ZST, we don't contain any data. In particular, this helps us to quickly1206            // skip over huge arrays of ZST.1207            if layout.is_zst() {1208                return;1209            }1210            // Just recursively add all the fields of everything to the output.1211            match &layout.fields {1212                FieldsShape::Primitive => {1213                    out.add_range(base_offset, layout.size);1214                }1215                &FieldsShape::Union(fields) => {1216                    // Currently, all fields start at offset 0 (relative to `base_offset`).1217                    for field in 0..fields.get() {1218                        let field = layout.field(cx, field);1219                        union_data_range_uncached(cx, field, base_offset, out);1220                    }1221                }1222                &FieldsShape::Array { stride, count } => {1223                    let elem = layout.field(cx, 0);12241225                    // Fast-path for large arrays of simple types that do not contain any padding.1226                    if elem.backend_repr.is_scalar() {1227                        out.add_range(base_offset, elem.size * count);1228                    } else {1229                        for idx in 0..count {1230                            // This repeats the same computation for every array element... but the alternative1231                            // is to allocate temporary storage for a dedicated `out` set for the array element,1232                            // and replicating that N times. Is that better?1233                            union_data_range_uncached(cx, elem, base_offset + idx * stride, out);1234                        }1235                    }1236                }1237                FieldsShape::Arbitrary { offsets, .. } => {1238                    for (field, &offset) in offsets.iter_enumerated() {1239                        let field = layout.field(cx, field.as_usize());1240                        union_data_range_uncached(cx, field, base_offset + offset, out);1241                    }1242                }1243            }1244            // Don't forget potential other variants.1245            match &layout.variants {1246                Variants::Single { .. } | Variants::Empty => {1247                    // Fully handled above.1248                }1249                Variants::Multiple { variants, .. } => {1250                    for variant in variants.indices() {1251                        let variant = layout.for_variant(cx, variant);1252                        union_data_range_uncached(cx, variant, base_offset, out);1253                    }1254                }1255            }1256        }1257    }1258}12591260impl<'rt, 'tcx, M: Machine<'tcx>> ValueVisitor<'tcx, M> for ValidityVisitor<'rt, 'tcx, M> {1261    type V = PlaceTy<'tcx, M::Provenance>;12621263    #[inline(always)]1264    fn ecx(&self) -> &InterpCx<'tcx, M> {1265        self.ecx1266    }12671268    fn read_discriminant(1269        &mut self,1270        val: &PlaceTy<'tcx, M::Provenance>,1271    ) -> InterpResult<'tcx, VariantIdx> {1272        self.with_elem(PathElem::EnumTag, move |this| {1273            interp_ok(try_validation!(1274                this.ecx.read_discriminant(val),1275                this.path,1276                Ub(InvalidTag(val)) =>1277                    format!("encountered {val:x}, but expected a valid enum tag"),1278                Ub(UninhabitedEnumVariantRead(_)) =>1279                    format!("encountered an uninhabited enum variant"),1280                // Uninit / bad provenance are not possible since the field was already previously1281                // checked at its integer type.1282            ))1283        })1284    }12851286    #[inline]1287    fn visit_field(1288        &mut self,1289        old_val: &PlaceTy<'tcx, M::Provenance>,1290        field: usize,1291        new_val: &PlaceTy<'tcx, M::Provenance>,1292    ) -> InterpResult<'tcx> {1293        let elem = self.aggregate_field_path_elem(old_val.layout, field, new_val.layout.ty);1294        self.with_elem(elem, move |this| this.visit_value(new_val))1295    }12961297    #[inline]1298    fn visit_variant(1299        &mut self,1300        old_val: &PlaceTy<'tcx, M::Provenance>,1301        variant_id: VariantIdx,1302        new_val: &PlaceTy<'tcx, M::Provenance>,1303    ) -> InterpResult<'tcx> {1304        let name = match old_val.layout.ty.kind() {1305            ty::Adt(adt, _) => PathElem::Variant(adt.variant(variant_id).name),1306            // Coroutines also have variants1307            ty::Coroutine(..) => PathElem::CoroutineState(variant_id),1308            _ => bug!("Unexpected type with variant: {:?}", old_val.layout.ty),1309        };1310        self.with_elem(name, move |this| this.visit_value(new_val))1311    }13121313    #[inline(always)]1314    fn visit_union(1315        &mut self,1316        val: &PlaceTy<'tcx, M::Provenance>,1317        _fields: NonZero<usize>,1318    ) -> InterpResult<'tcx> {1319        // Special check for CTFE validation, preventing `UnsafeCell` inside unions in immutable memory.1320        if self.ctfe_mode.is_some_and(|c| !c.allow_immutable_unsafe_cell()) {1321            // Unsized unions are currently not a thing, but let's keep this code consistent with1322            // the check in `visit_value`.1323            let zst = self.ecx.size_and_align_of_val(val)?.is_some_and(|(s, _a)| s.bytes() == 0);1324            if !zst && !val.layout.ty.is_freeze(*self.ecx.tcx, self.ecx.typing_env) {1325                if !self.in_mutable_memory(val) {1326                    throw_validation_failure!(1327                        self.path,1328                        format!("encountered `UnsafeCell` in read-only memory")1329                    );1330                }1331            }1332        }1333        if self.reset_provenance_and_padding1334            && let Some(data_bytes) = self.data_bytes.as_mut()1335        {1336            let base_offset = Self::data_range_offset(self.ecx, val);1337            // Determine and add data range for this union.1338            let union_data_range = Self::union_data_range(self.ecx, val.layout);1339            for &(offset, size) in union_data_range.0.iter() {1340                data_bytes.add_range(base_offset + offset, size);1341            }1342        }1343        interp_ok(())1344    }13451346    #[inline]1347    fn visit_box(1348        &mut self,1349        box_ty: Ty<'tcx>,1350        val: &PlaceTy<'tcx, M::Provenance>,1351    ) -> InterpResult<'tcx> {1352        self.check_safe_pointer(&val, box_ty, PtrKind::Box)?;1353        interp_ok(())1354    }13551356    #[inline]1357    fn visit_value(&mut self, val: &PlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {1358        trace!("visit_value: {:?}, {:?}", *val, val.layout);13591360        // Check primitive types -- the leaves of our recursive descent.1361        // This is called even for enum discriminants (which are "fields" of their enum),1362        // so for integer-typed discriminants the provenance reset will happen here.1363        // We assume that the Scalar validity range does not restrict these values1364        // any further than `try_visit_primitive` does!1365        if self.try_visit_primitive(val)? {1366            return interp_ok(());1367        }13681369        // Special check preventing `UnsafeCell` in the inner part of constants1370        if self.ctfe_mode.is_some_and(|c| !c.allow_immutable_unsafe_cell()) {1371            // Exclude ZST values. We need to compute the dynamic size/align to properly1372            // handle slices and trait objects.1373            let zst = self.ecx.size_and_align_of_val(val)?.is_some_and(|(s, _a)| s.bytes() == 0);1374            if !zst1375                && let Some(def) = val.layout.ty.ty_adt_def()1376                && def.is_unsafe_cell()1377            {1378                if !self.in_mutable_memory(val) {1379                    throw_validation_failure!(1380                        self.path,1381                        format!("encountered `UnsafeCell` in read-only memory")1382                    );1383                }1384            }1385        }13861387        // Recursively walk the value at its type. Apply optimizations for some large types.1388        match val.layout.ty.kind() {1389            ty::Str => {1390                let mplace = val.assert_mem_place(); // strings are unsized and hence never immediate1391                let len = mplace.len(self.ecx)?;1392                let expected = ExpectedKind::Str;1393                try_validation!(1394                    self.ecx.read_bytes_ptr_strip_provenance(mplace.ptr(), Size::from_bytes(len)),1395                    self.path,1396                    Ub(InvalidUninitBytes(..)) =>1397                        Uninit { expected },1398                    Unsup(ReadPointerAsInt(_)) =>1399                        PointerAsInt { expected },1400                );1401            }1402            ty::Array(tys, ..) | ty::Slice(tys)1403                // This optimization applies for types that can hold arbitrary non-provenance bytes (such as1404                // integer and floating point types).1405                // FIXME(wesleywiser) This logic could be extended further to arbitrary structs or1406                // tuples made up of integer/floating point types or inhabited ZSTs with no padding.1407                if matches!(tys.kind(), ty::Int(..) | ty::Uint(..) | ty::Float(..))1408                =>1409            {1410                let expected = if tys.is_integral() { ExpectedKind::Int } else { ExpectedKind::Float };1411                // Optimized handling for arrays of integer/float type.14121413                // This is the length of the array/slice.1414                let len = val.len(self.ecx)?;1415                // This is the element type size.1416                let layout = self.ecx.layout_of(*tys)?;1417                // This is the size in bytes of the whole array. (This checks for overflow.)1418                let size = layout.size * len;1419                // If the size is 0, there is nothing to check.1420                // (`size` can only be 0 if `len` is 0, and empty arrays are always valid.)1421                if size == Size::ZERO {1422                    return interp_ok(());1423                }1424                // Now that we definitely have a non-ZST array, we know it lives in memory -- except it may1425                // be an uninitialized local variable, those are also "immediate".1426                let mplace = match val.to_op(self.ecx)?.as_mplace_or_imm() {1427                    Left(mplace) => mplace,1428                    Right(imm) => match *imm {1429                        Immediate::Uninit =>1430                            throw_validation_failure!(1431                                self.path,1432                                Uninit { expected }1433                            ),1434                        Immediate::Scalar(..) | Immediate::ScalarPair(..) =>1435                            bug!("arrays/slices can never have Scalar/ScalarPair layout"),1436                    }1437                };14381439                // Optimization: we just check the entire range at once.1440                // NOTE: Keep this in sync with the handling of integer and float1441                // types above, in `visit_primitive`.1442                // No need for an alignment check here, this is not an actual memory access.1443                let alloc = self.ecx.get_ptr_alloc(mplace.ptr(), size)?.expect("we already excluded size 0");14441445                alloc.get_bytes_strip_provenance().map_err_kind(|kind| {1446                    // Some error happened, try to provide a more detailed description.1447                    // For some errors we might be able to provide extra information.1448                    // (This custom logic does not fit the `try_validation!` macro.)1449                    match kind {1450                        Ub(InvalidUninitBytes(Some((_alloc_id, access)))) | Unsup(ReadPointerAsInt(Some((_alloc_id, access)))) => {1451                            // Some byte was uninitialized, determine which1452                            // element that byte belongs to so we can1453                            // provide an index.1454                            let i = usize::try_from(1455                                access.bad.start.bytes() / layout.size.bytes(),1456                            )1457                            .unwrap();1458                            self.path.projs.push(PathElem::ArrayElem(i));14591460                            if matches!(kind, Ub(InvalidUninitBytes(_))) {1461                                err_validation_failure!(self.path, Uninit { expected })1462                            } else {1463                                err_validation_failure!(self.path, PointerAsInt {expected})1464                            }1465                        }14661467                        // Propagate upwards (that will also check for unexpected errors).1468                        err => err,1469                    }1470                })?;14711472                // Don't forget that these are all non-pointer types, and thus do not preserve1473                // provenance.1474                if self.reset_provenance_and_padding {1475                    // We can't share this with above as above, we might be looking at read-only memory.1476                    let mut alloc = self.ecx.get_ptr_alloc_mut(mplace.ptr(), size)?.expect("we already excluded size 0");1477                    alloc.clear_provenance();1478                    // Also, mark this as containing data, not padding.1479                    self.add_data_range(mplace.ptr(), size);1480                }1481            }1482            // Fast path for arrays and slices of ZSTs. We only need to check a single ZST element1483            // of an array and not all of them, because there's only a single value of a specific1484            // ZST type, so either validation fails for all elements or none.1485            ty::Array(tys, ..) | ty::Slice(tys) if self.ecx.layout_of(*tys)?.is_zst() => {1486                // Validate just the first element (if any).1487                if val.len(self.ecx)? > 0 {1488                    self.visit_field(val, 0, &self.ecx.project_index(val, 0)?)?;1489                }1490            }1491            ty::Pat(base, pat) => {1492                // First check that the base type is valid1493                self.visit_value(&val.transmute(self.ecx.layout_of(*base)?, self.ecx)?)?;1494                // When you extend this match, make sure to also add tests to1495                // tests/ui/type/pattern_types/validity.rs1496                match **pat {1497                    // Range and non-null patterns are precisely reflected into `valid_range` and thus1498                    // handled fully by `visit_scalar` (called below).1499                    ty::PatternKind::Range { .. } => {},1500                    ty::PatternKind::NotNull => {},15011502                    // FIXME(pattern_types): check that the value is covered by one of the variants.1503                    // For now, we rely on layout computation setting the scalar's `valid_range` to1504                    // match the pattern. However, this cannot always work; the layout may1505                    // pessimistically cover actually illegal ranges and Miri would miss that UB.1506                    // The consolation here is that codegen also will miss that UB, so at least1507                    // we won't see optimizations actually breaking such programs.1508                    ty::PatternKind::Or(_patterns) => {}1509                }1510                // FIXME(pattern_types): handle everything based on the pattern, not on the layout.1511                // it's ok to run scalar validation even if the pattern type is `u8 is 0..=255` and thus1512                // allows uninit values, because that's rare and so not a perf issue.1513                match val.layout.backend_repr {1514                    BackendRepr::Scalar(scalar_layout) => {1515                        if !scalar_layout.is_uninit_valid() {1516                            // There is something to check here.1517                            // We read directly via `ecx` since the read cannot fail -- we already read1518                            // this field above when recursing into the field.1519                            let scalar = self.ecx.read_scalar(val)?;1520                            self.visit_scalar(scalar, scalar_layout)?;1521                        }1522                    }1523                    BackendRepr::ScalarPair(a_layout, b_layout) => {1524                        // We can only proceed if *both* scalars need to be initialized.1525                        // FIXME: find a way to also check ScalarPair when one side can be uninit but1526                        // the other must be init.1527                        if !a_layout.is_uninit_valid() && !b_layout.is_uninit_valid() {1528                            // We read directly via `ecx` since the read cannot fail -- we already read1529                            // this field above when recursing into the field.1530                            let (a, b) = self.ecx.read_immediate(val)?.to_scalar_pair();1531                            self.visit_scalar(a, a_layout)?;1532                            self.visit_scalar(b, b_layout)?;1533                        }1534                    }1535                    BackendRepr::SimdVector { .. } | BackendRepr::SimdScalableVector { .. } => unreachable!(),1536                    BackendRepr::Memory { .. } => unreachable!()1537                }1538            }1539            ty::Adt(adt, _) if adt.is_maybe_dangling() => {1540                let old_may_dangle = mem::replace(&mut self.may_dangle, true);15411542                let inner = self.ecx.project_field(val, FieldIdx::ZERO)?;1543                self.visit_value(&inner)?;15441545                self.may_dangle = old_may_dangle;1546            }1547            _ => {1548                // default handler1549                try_validation!(1550                    self.walk_value(val),1551                    self.path,1552                    // It's not great to catch errors here, since we can't give a very good path,1553                    // but it's better than ICEing.1554                    Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type }) =>1555                        InvalidMetaWrongTrait { expected_dyn_type, vtable_dyn_type },1556                );1557            }1558        }15591560        // *After* all of this, check further information stored in the layout.1561        // On leaf types like `!` or empty enums, this will raise the error.1562        // This means that for types wrapping such a type, we won't ever get here, but it's1563        // just the simplest way to check for this case.1564        //1565        // FIXME: We could avoid some redundant checks here. For newtypes wrapping1566        // scalars, we do the same check on every "level" (e.g., first we check1567        // the fields of MyNewtype, and then we check MyNewType again).1568        if val.layout.is_uninhabited() {1569            let ty = val.layout.ty;1570            throw_validation_failure!(1571                self.path,1572                format!("encountered a value of uninhabited type `{ty}`")1573            );1574        }1575        if cfg!(debug_assertions) {1576            // Check that we don't miss any new changes to layout computation in our checks above.1577            match val.layout.backend_repr {1578                BackendRepr::Scalar(scalar_layout) => {1579                    if !scalar_layout.is_uninit_valid() {1580                        // There is something to check here.1581                        // We read directly via `ecx` since the read cannot fail -- we already read1582                        // this field above when recursing into the field.1583                        let scalar = self1584                            .ecx1585                            .read_scalar(val)1586                            .expect("the above checks should have fully handled this situation");1587                        self.visit_scalar(scalar, scalar_layout)1588                            .expect("the above checks should have fully handled this situation");1589                    }1590                }1591                BackendRepr::ScalarPair(a_layout, b_layout) => {1592                    // We can only proceed if *both* scalars need to be initialized.1593                    // FIXME: find a way to also check ScalarPair when one side can be uninit but1594                    // the other must be init.1595                    if !a_layout.is_uninit_valid() && !b_layout.is_uninit_valid() {1596                        let (a, b) = self1597                            .ecx1598                            .read_immediate(val)1599                            .expect("the above checks should have fully handled this situation")1600                            .to_scalar_pair();1601                        self.visit_scalar(a, a_layout)1602                            .expect("the above checks should have fully handled this situation");1603                        self.visit_scalar(b, b_layout)1604                            .expect("the above checks should have fully handled this situation");1605                    }1606                }1607                BackendRepr::SimdVector { .. } | BackendRepr::SimdScalableVector { .. } => {}1608                BackendRepr::Memory { .. } => {}1609            }1610        }16111612        interp_ok(())1613    }1614}16151616impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {1617    /// The internal core entry point for all validation operations.1618    fn validate_operand_internal(1619        &mut self,1620        val: &PlaceTy<'tcx, M::Provenance>,1621        path: Path<'tcx>,1622        ref_tracking: Option<&mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Path<'tcx>>>,1623        ctfe_mode: Option<CtfeValidationMode>,1624        reset_provenance_and_padding: bool,1625        start_in_may_dangle: bool,1626    ) -> InterpResult<'tcx> {1627        trace!("validate_operand_internal: {:?}, {:?}", *val, val.layout.ty);16281629        // Run the visitor.1630        self.run_for_validation_mut(|ecx| {1631            let reset_padding = reset_provenance_and_padding && {1632                // Check if `val` is actually stored in memory. If not, padding is not even1633                // represented and we need not reset it.1634                ecx.place_to_op(val)?.as_mplace_or_imm().is_left()1635            };1636            let mut v = ValidityVisitor {1637                path,1638                ref_tracking,1639                ctfe_mode,1640                ecx,1641                reset_provenance_and_padding,1642                data_bytes: reset_padding.then_some(RangeSet(Vec::new())),1643                may_dangle: start_in_may_dangle,1644            };1645            v.visit_value(val)?;1646            v.reset_padding(val)?;1647            interp_ok(())1648        })1649        .map_err_info(|err| {1650            if !matches!(1651                err.kind(),1652                InterpErrorKind::UndefinedBehavior(ValidationError { .. })1653                    | InterpErrorKind::InvalidProgram(_)1654                    | InterpErrorKind::Unsupported(_)1655                // We have to also ignore machine-specific errors since we do retagging1656                // during validation.1657                | InterpErrorKind::MachineStop(_)1658            ) {1659                bug!("Unexpected error during validation: {}", format_interp_error(err));1660            }1661            err1662        })1663    }16641665    /// This function checks the data at `val` to be const-valid.1666    /// `val` is assumed to cover valid memory if it is an indirect operand.1667    /// It will error if the bits at the destination do not match the ones described by the layout.1668    ///1669    /// `ref_tracking` is used to record references that we encounter so that they1670    /// can be checked recursively by an outside driving loop.1671    ///1672    /// `constant` controls whether this must satisfy the rules for constants:1673    /// - no pointers to statics.1674    /// - no `UnsafeCell` or non-ZST `&mut`.1675    #[inline(always)]1676    pub(crate) fn const_validate_operand(1677        &mut self,1678        val: &PlaceTy<'tcx, M::Provenance>,1679        path: Path<'tcx>,1680        ref_tracking: &mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Path<'tcx>>,1681        ctfe_mode: CtfeValidationMode,1682    ) -> InterpResult<'tcx> {1683        self.validate_operand_internal(1684            val,1685            path,1686            Some(ref_tracking),1687            Some(ctfe_mode),1688            /*reset_provenance*/ false,1689            /*start_in_may_dangle*/ false,1690        )1691    }16921693    /// This function checks the data at `val` to be runtime-valid.1694    /// `val` is assumed to cover valid memory if it is an indirect operand.1695    /// It will error if the bits at the destination do not match the ones described by the layout.1696    #[inline(always)]1697    pub fn validate_operand(1698        &mut self,1699        val: &PlaceTy<'tcx, M::Provenance>,1700        recursive: bool,1701        reset_provenance_and_padding: bool,1702    ) -> InterpResult<'tcx> {1703        let _trace = enter_trace_span!(1704            M,1705            "validate_operand",1706            recursive,1707            reset_provenance_and_padding,1708            ?val,1709        );1710        // Note that we *could* actually be in CTFE here with `-Zextra-const-ub-checks`, but it's1711        // still correct to not use `ctfe_mode`: that mode is for validation of the final constant1712        // value, it rules out things like `UnsafeCell` in awkward places.1713        if !recursive {1714            return self.validate_operand_internal(1715                val,1716                Path::new(val.layout.ty),1717                None,1718                None,1719                reset_provenance_and_padding,1720                /*start_in_may_dangle*/ false,1721            );1722        }1723        // Do a recursive check.1724        let mut ref_tracking = RefTracking::empty();1725        self.validate_operand_internal(1726            val,1727            Path::new(val.layout.ty),1728            Some(&mut ref_tracking),1729            None,1730            reset_provenance_and_padding,1731            /*start_in_may_dangle*/ false,1732        )?;1733        while let Some((mplace, path)) = ref_tracking.todo.pop() {1734            // Things behind reference do *not* have the provenance reset. In fact1735            // we treat the entire thing as being inside MaybeDangling, i.e., references1736            // do not have to be dereferenceable.1737            self.validate_operand_internal(1738                &mplace.into(),1739                path,1740                None, // no further recursion1741                None,1742                /*reset_provenance_and_padding*/ false,1743                /*start_in_may_dangle*/ true,1744            )?;1745        }1746        interp_ok(())1747    }1748}

Code quality findings 27

Critical: Use of 'unsafe' keyword bypasses Rust's safety guarantees. Requires careful auditing, clear justification (FFI, specific optimizations), and minimal scope.
error safety unsafe-block
// unsafe code.
Warning: '.unwrap()' will panic on None/Err variants. Prefer using pattern matching (match, if let), combinators (map, and_then), or the '?' operator for robust error handling.
warning correctness unwrap-usage
.unwrap()
Warning: Direct indexing (e.g., `vec[i]`, `slice[i]`) panics on out-of-bounds access. Prefer using `.get(index)` or `.get_mut(index)` which return Option<&T>/Option<&mut T>.
warning correctness unchecked-indexing
v[idx] = (new_start, new_end - new_start);
Warning: Direct indexing (e.g., `vec[i]`, `slice[i]`) panics on out-of-bounds access. Prefer using `.get(index)` or `.get_mut(index)` which return Option<&T>/Option<&mut T>.
warning correctness unchecked-indexing
PathElem::Field(def.variant(index).fields[FieldIdx::from_usize(field)].name)
Warning: Direct indexing (e.g., `vec[i]`, `slice[i]`) panics on out-of-bounds access. Prefer using `.get(index)` or `.get_mut(index)` which return Option<&T>/Option<&mut T>.
warning correctness unchecked-indexing
PathElem::Field(def.non_enum_variant().fields[FieldIdx::from_usize(field)].name)
Warning: '.expect()' will panic with a custom message on None/Err. While better than unwrap() for debugging, prefer non-panicking error handling in production code (match, if let, ?).
warning correctness expect-usage
.expect("place must be in memory")
Warning: '.expect()' will panic with a custom message on None/Err. While better than unwrap() for debugging, prefer non-panicking error handling in production code (match, if let, ?).
warning correctness expect-usage
let alloc = self.ecx.get_ptr_alloc(mplace.ptr(), size)?.expect("we already excluded size 0");
Warning: '.unwrap()' will panic on None/Err variants. Prefer using pattern matching (match, if let), combinators (map, and_then), or the '?' operator for robust error handling.
warning correctness unwrap-usage
.unwrap();
Warning: '.expect()' will panic with a custom message on None/Err. While better than unwrap() for debugging, prefer non-panicking error handling in production code (match, if let, ?).
warning correctness expect-usage
let mut alloc = self.ecx.get_ptr_alloc_mut(mplace.ptr(), size)?.expect("we already excluded size 0");
Warning: '.expect()' will panic with a custom message on None/Err. While better than unwrap() for debugging, prefer non-panicking error handling in production code (match, if let, ?).
warning correctness expect-usage
.expect("the above checks should have fully handled this situation");
Warning: '.expect()' will panic with a custom message on None/Err. While better than unwrap() for debugging, prefer non-panicking error handling in production code (match, if let, ?).
warning correctness expect-usage
.expect("the above checks should have fully handled this situation");
Warning: '.expect()' will panic with a custom message on None/Err. While better than unwrap() for debugging, prefer non-panicking error handling in production code (match, if let, ?).
warning correctness expect-usage
.expect("the above checks should have fully handled this situation")
Warning: '.expect()' will panic with a custom message on None/Err. While better than unwrap() for debugging, prefer non-panicking error handling in production code (match, if let, ?).
warning correctness expect-usage
.expect("the above checks should have fully handled this situation");
Warning: '.expect()' will panic with a custom message on None/Err. While better than unwrap() for debugging, prefer non-panicking error handling in production code (match, if let, ?).
warning correctness expect-usage
.expect("the above checks should have fully handled this situation");
Info: Wildcard imports (`use some::path::*;`) can obscure the origin of names and lead to conflicts. Prefer importing specific items explicitly.
info maintainability wildcard-import
use super::UndefinedBehaviorInfo::*;
Info: Wildcard imports (`use some::path::*;`) can obscure the origin of names and lead to conflicts. Prefer importing specific items explicitly.
info maintainability wildcard-import
use super::UnsupportedOpInfo::*;
Info: Usage of `#[allow(...)]` suppresses compiler lints. Ensure the allowance is justified, well-scoped, and ideally temporary. Overuse can hide potential issues.
info maintainability allow-lint
#[allow(unused)]
Info: Wildcard imports (`use some::path::*;`) can obscure the origin of names and lead to conflicts. Prefer importing specific items explicitly.
info maintainability wildcard-import
use ValidationErrorKind::*;
Info: Wildcard imports (`use some::path::*;`) can obscure the origin of names and lead to conflicts. Prefer importing specific items explicitly.
info maintainability wildcard-import
use ValidationErrorKind::*;
Info: Wildcard imports (`use some::path::*;`) can obscure the origin of names and lead to conflicts. Prefer importing specific items explicitly.
info maintainability wildcard-import
use ValidationErrorKind::*;
Performance Info: Frequent cloning, especially of Strings, Vecs, or other heap-allocated types inside loops, can be expensive. Consider using references/borrowing where possible.
info performance clone-in-loop
RefTracking { seen: FxHashSet::default(), todo: vec![(val.clone(), Path::new(ty))] };
Info: Wildcard imports (`use some::path::*;`) can obscure the origin of names and lead to conflicts. Prefer importing specific items explicitly.
info maintainability wildcard-import
use self::PathElem::*;
Info: Ensure 'match' statements are exhaustive. If matching on enums, consider adding a wildcard arm `_ => {}` only if necessary and intentional, as it suppresses warnings about unhandled variants.
info correctness match-wildcard
match layout.variants {
Info: Ensure 'match' statements are exhaustive. If matching on enums, consider adding a wildcard arm `_ => {}` only if necessary and intentional, as it suppresses warnings about unhandled variants.
info correctness match-wildcard
return match layout.ty.kind() {
Maintainability Info: `todo!()` or `unimplemented!()` macros indicate incomplete code paths that will panic at runtime if reached. Ensure these are replaced with actual logic before production use.
info correctness todo-unimplemented
ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binder)"),
Performance Info: Calling .push() repeatedly inside a loop without prior capacity reservation can lead to multiple reallocations. Consider using `Vec::with_capacity(n)` or `vec.reserve(n)` if the approximate number of elements is known.
info performance push-without-reserve
data_bytes.0.push((start_offset + size, Size::ZERO));
Info: Ensure 'match' statements are exhaustive. If matching on enums, consider adding a wildcard arm `_ => {}` only if necessary and intentional, as it suppresses warnings about unhandled variants.
info correctness match-wildcard
let name = match old_val.layout.ty.kind() {

Get this view in your editor

Same data, no extra tab — call code_get_file + code_get_findings over MCP from Claude/Cursor/Copilot.