1//! This module provides a MIR interpreter, which is used in const eval.23use std::{borrow::Cow, cell::RefCell, fmt::Write, iter, mem, ops::Range};45use base_db::{Crate, target::TargetLoadError};6use either::Either;7use hir_def::{8 AdtId, DefWithBodyId, EnumVariantId, ExpressionStoreOwnerId, FunctionId, GeneralConstId,9 HasModule, ItemContainerId, Lookup, StaticId, VariantId,10 expr_store::{Body, HygieneId},11 item_tree::FieldsShape,12 lang_item::LangItems,13 layout::{TagEncoding, Variants},14 resolver::{HasResolver, TypeNs, ValueNs},15 signatures::{16 EnumSignature, FunctionSignature, StaticFlags, StaticSignature, StructFlags,17 StructSignature, TraitSignature,18 },19};20use hir_expand::{InFile, mod_path::path, name::Name};21use intern::sym;22use la_arena::ArenaMap;23use macros::GenericTypeVisitable;24use rustc_abi::{Size, TargetDataLayout};25use rustc_apfloat::{26 Float,27 ieee::{Half as f16, Quad as f128},28};29use rustc_ast_ir::Mutability;30use rustc_hash::{FxHashMap, FxHashSet};31use rustc_type_ir::{32 AliasTyKind,33 inherent::{AdtDef, GenericArgs as _, IntoKind, Region as _, SliceLike, Ty as _},34};35use span::FileId;36use stdx::never;37use syntax::{SyntaxNodePtr, TextRange};38use triomphe::Arc;3940use crate::{41 CallableDefId, ComplexMemoryMap, InferenceResult, MemoryMap, ParamEnvAndCrate,42 consteval::{self, ConstEvalError, try_const_usize},43 db::{HirDatabase, InternedClosureId},44 display::{ClosureStyle, DisplayTarget, HirDisplay},45 infer::PointerCast,46 layout::{Layout, LayoutError, RustcEnumVariantIdx},47 method_resolution::{is_dyn_method, lookup_impl_const},48 next_solver::{49 AliasTy, Allocation, AllocationData, Const, ConstKind, DbInterner, ErrorGuaranteed,50 GenericArgs, Region, StoredTy, Ty, TyKind, TypingMode, UnevaluatedConst, ValTree,51 infer::{DbInternerInferExt, InferCtxt, traits::ObligationCause},52 obligation_ctxt::ObligationCtxt,53 },54 traits::FnTrait,55 utils::detect_variant_from_bytes,56};5758use super::{59 AggregateKind, BasicBlockId, BinOp, CastKind, LocalId, MirBody, MirLowerError, MirSpan,60 Operand, OperandKind, Place, PlaceElem, ProjectionElem, ProjectionStore, Rvalue, StatementKind,61 TerminatorKind, UnOp, return_slot,62};6364mod shim;65#[cfg(test)]66mod tests;6768macro_rules! from_bytes {69 ($ty:tt, $value:expr) => {70 ($ty::from_le_bytes(match ($value).try_into() {71 Ok(it) => it,72 Err(_) => return Err(MirEvalError::InternalError(stringify!(mismatched size in constructing $ty).into())),73 }))74 };75 ($apfloat:tt, $bits:tt, $value:expr) => {76 // FIXME(#17451): Switch to builtin `f16` and `f128` once they are stable.77 $apfloat::from_bits($bits::from_le_bytes(match ($value).try_into() {78 Ok(it) => it,79 Err(_) => return Err(MirEvalError::InternalError(stringify!(mismatched size in constructing $apfloat).into())),80 }).into())81 };82}83use from_bytes;8485macro_rules! not_supported {86 ($it: expr) => {87 return Err($crate::mir::eval::MirEvalError::NotSupported(format!($it)))88 };89}90use not_supported;9192#[derive(Debug, Default, Clone, PartialEq, Eq, GenericTypeVisitable)]93pub struct VTableMap<'db> {94 ty_to_id: FxHashMap<Ty<'db>, usize>,95 id_to_ty: Vec<Ty<'db>>,96}9798impl<'db> VTableMap<'db> {99 const OFFSET: usize = 1000; // We should add some offset to ids to make 0 (null) an invalid id.100101 fn id(&mut self, ty: Ty<'db>) -> usize {102 if let Some(it) = self.ty_to_id.get(&ty) {103 return *it;104 }105 let id = self.id_to_ty.len() + VTableMap::OFFSET;106 self.id_to_ty.push(ty);107 self.ty_to_id.insert(ty, id);108 id109 }110111 pub(crate) fn ty(&self, id: usize) -> Result<'db, Ty<'db>> {112 id.checked_sub(VTableMap::OFFSET)113 .and_then(|id| self.id_to_ty.get(id).copied())114 .ok_or(MirEvalError::InvalidVTableId(id))115 }116117 fn ty_of_bytes(&self, bytes: &[u8]) -> Result<'db, Ty<'db>> {118 let id = from_bytes!(usize, bytes);119 self.ty(id)120 }121122 pub fn shrink_to_fit(&mut self) {123 self.id_to_ty.shrink_to_fit();124 self.ty_to_id.shrink_to_fit();125 }126127 fn is_empty(&self) -> bool {128 self.id_to_ty.is_empty() && self.ty_to_id.is_empty()129 }130}131132#[derive(Debug, Default, Clone, PartialEq, Eq)]133struct TlsData {134 keys: Vec<u128>,135}136137impl TlsData {138 fn create_key(&mut self) -> usize {139 self.keys.push(0);140 self.keys.len() - 1141 }142143 fn get_key(&mut self, key: usize) -> Result<'static, u128> {144 let r = self.keys.get(key).ok_or_else(|| {145 MirEvalError::UndefinedBehavior(format!("Getting invalid tls key {key}"))146 })?;147 Ok(*r)148 }149150 fn set_key(&mut self, key: usize, value: u128) -> Result<'static, ()> {151 let r = self.keys.get_mut(key).ok_or_else(|| {152 MirEvalError::UndefinedBehavior(format!("Setting invalid tls key {key}"))153 })?;154 *r = value;155 Ok(())156 }157}158159struct StackFrame {160 locals: Locals,161 destination: Option<BasicBlockId>,162 prev_stack_ptr: usize,163 span: (MirSpan, DefWithBodyId),164}165166#[derive(Clone)]167enum MirOrDynIndex {168 Mir(Arc<MirBody>),169 Dyn(usize),170}171172pub struct Evaluator<'db> {173 db: &'db dyn HirDatabase,174 param_env: ParamEnvAndCrate<'db>,175 target_data_layout: Arc<TargetDataLayout>,176 stack: Vec<u8>,177 heap: Vec<u8>,178 code_stack: Vec<StackFrame>,179 /// Stores the global location of the statics. We const evaluate every static first time we need it180 /// and see it's missing, then we add it to this to reuse.181 static_locations: FxHashMap<StaticId, Address>,182 /// We don't really have function pointers, i.e. pointers to some assembly instructions that we can run. Instead, we183 /// store the type as an interned id in place of function and vtable pointers, and we recover back the type at the184 /// time of use.185 vtable_map: VTableMap<'db>,186 thread_local_storage: TlsData,187 random_state: oorandom::Rand64,188 stdout: Vec<u8>,189 stderr: Vec<u8>,190 layout_cache: RefCell<FxHashMap<Ty<'db>, Arc<Layout>>>,191 projected_ty_cache: RefCell<FxHashMap<(Ty<'db>, PlaceElem), Ty<'db>>>,192 not_special_fn_cache: RefCell<FxHashSet<FunctionId>>,193 mir_or_dyn_index_cache: RefCell<FxHashMap<(FunctionId, GenericArgs<'db>), MirOrDynIndex>>,194 /// Constantly dropping and creating `Locals` is very costly. We store195 /// old locals that we normally want to drop here, to reuse their allocations196 /// later.197 unused_locals_store: RefCell<FxHashMap<DefWithBodyId, Vec<Locals>>>,198 cached_ptr_size: usize,199 cached_fn_trait_func: Option<FunctionId>,200 cached_fn_mut_trait_func: Option<FunctionId>,201 cached_fn_once_trait_func: Option<FunctionId>,202 crate_id: Crate,203 // FIXME: This is a workaround, see the comment on `interpret_mir`204 assert_placeholder_ty_is_unused: bool,205 /// A general limit on execution, to prevent non terminating programs from breaking r-a main process206 execution_limit: usize,207 /// An additional limit on stack depth, to prevent stack overflow208 stack_depth_limit: usize,209 /// Maximum count of bytes that heap and stack can grow210 memory_limit: usize,211 infcx: InferCtxt<'db>,212}213214#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]215enum Address {216 Stack(usize),217 Heap(usize),218 Invalid(usize),219}220221use Address::*;222223#[derive(Debug, Clone, Copy)]224struct Interval {225 addr: Address,226 size: usize,227}228229#[derive(Debug, Clone)]230struct IntervalAndTy<'db> {231 interval: Interval,232 ty: Ty<'db>,233}234235impl Interval {236 fn new(addr: Address, size: usize) -> Self {237 Self { addr, size }238 }239240 fn get<'a, 'db>(&self, memory: &'a Evaluator<'db>) -> Result<'db, &'a [u8]> {241 memory.read_memory(self.addr, self.size)242 }243244 fn write_from_bytes<'db>(&self, memory: &mut Evaluator<'db>, bytes: &[u8]) -> Result<'db, ()> {245 memory.write_memory(self.addr, bytes)246 }247248 fn write_from_interval<'db>(249 &self,250 memory: &mut Evaluator<'db>,251 interval: Interval,252 ) -> Result<'db, ()> {253 memory.copy_from_interval(self.addr, interval)254 }255256 fn slice(self, range: Range<usize>) -> Interval {257 Interval { addr: self.addr.offset(range.start), size: range.len() }258 }259}260261impl<'db> IntervalAndTy<'db> {262 fn get<'a>(&self, memory: &'a Evaluator<'db>) -> Result<'db, &'a [u8]> {263 memory.read_memory(self.interval.addr, self.interval.size)264 }265266 fn new(267 addr: Address,268 ty: Ty<'db>,269 evaluator: &Evaluator<'db>,270 locals: &Locals,271 ) -> Result<'db, IntervalAndTy<'db>> {272 let size = evaluator.size_of_sized(ty, locals, "type of interval")?;273 Ok(IntervalAndTy { interval: Interval { addr, size }, ty })274 }275}276277enum IntervalOrOwned {278 Owned(Vec<u8>),279 Borrowed(Interval),280}281282impl From<Interval> for IntervalOrOwned {283 fn from(it: Interval) -> IntervalOrOwned {284 IntervalOrOwned::Borrowed(it)285 }286}287288impl IntervalOrOwned {289 fn get<'a, 'db>(&'a self, memory: &'a Evaluator<'db>) -> Result<'db, &'a [u8]> {290 Ok(match self {291 IntervalOrOwned::Owned(o) => o,292 IntervalOrOwned::Borrowed(b) => b.get(memory)?,293 })294 }295}296297#[cfg(target_pointer_width = "64")]298const STACK_OFFSET: usize = 1 << 60;299#[cfg(target_pointer_width = "64")]300const HEAP_OFFSET: usize = 1 << 59;301302#[cfg(target_pointer_width = "32")]303const STACK_OFFSET: usize = 1 << 30;304#[cfg(target_pointer_width = "32")]305const HEAP_OFFSET: usize = 1 << 29;306307impl Address {308 #[allow(clippy::double_parens)]309 fn from_bytes<'db>(it: &[u8]) -> Result<'db, Self> {310 Ok(Address::from_usize(from_bytes!(usize, it)))311 }312313 fn from_usize(it: usize) -> Self {314 if it > STACK_OFFSET {315 Stack(it - STACK_OFFSET)316 } else if it > HEAP_OFFSET {317 Heap(it - HEAP_OFFSET)318 } else {319 Invalid(it)320 }321 }322323 fn to_bytes(&self) -> [u8; size_of::<usize>()] {324 usize::to_le_bytes(self.to_usize())325 }326327 fn to_usize(&self) -> usize {328 match self {329 Stack(it) => *it + STACK_OFFSET,330 Heap(it) => *it + HEAP_OFFSET,331 Invalid(it) => *it,332 }333 }334335 fn map(&self, f: impl FnOnce(usize) -> usize) -> Address {336 match self {337 Stack(it) => Stack(f(*it)),338 Heap(it) => Heap(f(*it)),339 Invalid(it) => Invalid(f(*it)),340 }341 }342343 fn offset(&self, offset: usize) -> Address {344 self.map(|it| it + offset)345 }346}347348#[derive(Clone, PartialEq, Eq)]349pub enum MirEvalError {350 ConstEvalError(String, Box<ConstEvalError>),351 LayoutError(LayoutError, StoredTy),352 TargetDataLayoutNotAvailable(TargetLoadError),353 /// Means that code had undefined behavior. We don't try to actively detect UB, but if it was detected354 /// then use this type of error.355 UndefinedBehavior(String),356 Panic(String),357 // FIXME: This should be folded into ConstEvalError?358 MirLowerError(FunctionId, MirLowerError),359 MirLowerErrorForClosure(InternedClosureId, MirLowerError),360 TypeIsUnsized(StoredTy, &'static str),361 NotSupported(String),362 InvalidConst,363 InFunction(364 Box<MirEvalError>,365 Vec<(Either<FunctionId, InternedClosureId>, MirSpan, DefWithBodyId)>,366 ),367 ExecutionLimitExceeded,368 StackOverflow,369 /// FIXME: Fold this into InternalError370 InvalidVTableId(usize),371 /// ?372 CoerceUnsizedError(StoredTy),373 /// These should not occur, usually indicates a bug in mir lowering.374 InternalError(Box<str>),375}376377impl MirEvalError {378 pub fn pretty_print(379 &self,380 f: &mut String,381 db: &dyn HirDatabase,382 span_formatter: impl Fn(FileId, TextRange) -> String,383 display_target: DisplayTarget,384 ) -> std::result::Result<(), std::fmt::Error> {385 writeln!(f, "Mir eval error:")?;386 let mut err = self;387 while let MirEvalError::InFunction(e, stack) = err {388 err = e;389 for (func, span, def) in stack.iter().take(30).rev() {390 match func {391 Either::Left(func) => {392 let function_name = FunctionSignature::of(db, *func);393 writeln!(394 f,395 "In function {} ({:?})",396 function_name.name.display(db, display_target.edition),397 func398 )?;399 }400 Either::Right(closure) => {401 writeln!(f, "In {closure:?}")?;402 }403 }404 let source_map = &Body::with_source_map(db, *def).1;405 let span: InFile<SyntaxNodePtr> = match span {406 MirSpan::ExprId(e) => match source_map.expr_syntax(*e) {407 Ok(s) => s.map(|it| it.into()),408 Err(_) => continue,409 },410 MirSpan::PatId(p) => match source_map.pat_syntax(*p) {411 Ok(s) => s.map(|it| it.syntax_node_ptr()),412 Err(_) => continue,413 },414 MirSpan::BindingId(b) => {415 match source_map416 .patterns_for_binding(*b)417 .iter()418 .find_map(|p| source_map.pat_syntax(*p).ok())419 {420 Some(s) => s.map(|it| it.syntax_node_ptr()),421 None => continue,422 }423 }424 MirSpan::SelfParam => match source_map.self_param_syntax() {425 Some(s) => s.map(|it| it.syntax_node_ptr()),426 None => continue,427 },428 MirSpan::Unknown => continue,429 };430 let file_id = span.file_id.original_file(db);431 let text_range = span.value.text_range();432 writeln!(f, "{}", span_formatter(file_id.file_id(db), text_range))?;433 }434 }435 match err {436 MirEvalError::InFunction(..) => unreachable!(),437 MirEvalError::LayoutError(err, ty) => {438 write!(439 f,440 "Layout for type `{}` is not available due {err:?}",441 ty.as_ref()442 .display(db, display_target)443 .with_closure_style(ClosureStyle::ClosureWithId)444 )?;445 }446 MirEvalError::MirLowerError(func, err) => {447 let function_name = FunctionSignature::of(db, *func);448 let self_ = match func.lookup(db).container {449 ItemContainerId::ImplId(impl_id) => Some({450 db.impl_self_ty(impl_id)451 .instantiate_identity()452 .display(db, display_target)453 .to_string()454 }),455 ItemContainerId::TraitId(it) => Some(456 TraitSignature::of(db, it)457 .name458 .display(db, display_target.edition)459 .to_string(),460 ),461 _ => None,462 };463 writeln!(464 f,465 "MIR lowering for function `{}{}{}` ({:?}) failed due:",466 self_.as_deref().unwrap_or_default(),467 if self_.is_some() { "::" } else { "" },468 function_name.name.display(db, display_target.edition),469 func470 )?;471 err.pretty_print(f, db, span_formatter, display_target)?;472 }473 MirEvalError::ConstEvalError(name, err) => {474 MirLowerError::ConstEvalError((**name).into(), err.clone()).pretty_print(475 f,476 db,477 span_formatter,478 display_target,479 )?;480 }481 MirEvalError::UndefinedBehavior(_)482 | MirEvalError::TargetDataLayoutNotAvailable(_)483 | MirEvalError::Panic(_)484 | MirEvalError::MirLowerErrorForClosure(_, _)485 | MirEvalError::TypeIsUnsized(_, _)486 | MirEvalError::NotSupported(_)487 | MirEvalError::InvalidConst488 | MirEvalError::ExecutionLimitExceeded489 | MirEvalError::StackOverflow490 | MirEvalError::CoerceUnsizedError(_)491 | MirEvalError::InternalError(_)492 | MirEvalError::InvalidVTableId(_) => writeln!(f, "{err:?}")?,493 }494 Ok(())495 }496497 pub fn is_panic(&self) -> Option<&str> {498 let mut err = self;499 while let MirEvalError::InFunction(e, _) = err {500 err = e;501 }502 match err {503 MirEvalError::Panic(msg) => Some(msg),504 _ => None,505 }506 }507}508509impl std::fmt::Debug for MirEvalError {510 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {511 match self {512 Self::ConstEvalError(arg0, arg1) => {513 f.debug_tuple("ConstEvalError").field(arg0).field(arg1).finish()514 }515 Self::LayoutError(arg0, arg1) => {516 f.debug_tuple("LayoutError").field(arg0).field(arg1).finish()517 }518 Self::UndefinedBehavior(arg0) => {519 f.debug_tuple("UndefinedBehavior").field(arg0).finish()520 }521 Self::Panic(msg) => write!(f, "Panic with message:\n{msg:?}"),522 Self::TargetDataLayoutNotAvailable(arg0) => {523 f.debug_tuple("TargetDataLayoutNotAvailable").field(arg0).finish()524 }525 Self::TypeIsUnsized(ty, it) => write!(f, "{ty:?} is unsized. {it} should be sized."),526 Self::ExecutionLimitExceeded => write!(f, "execution limit exceeded"),527 Self::StackOverflow => write!(f, "stack overflow"),528 Self::MirLowerError(arg0, arg1) => {529 f.debug_tuple("MirLowerError").field(arg0).field(arg1).finish()530 }531 Self::MirLowerErrorForClosure(arg0, arg1) => {532 f.debug_tuple("MirLowerError").field(arg0).field(arg1).finish()533 }534 Self::CoerceUnsizedError(arg0) => {535 f.debug_tuple("CoerceUnsizedError").field(arg0).finish()536 }537 Self::InternalError(arg0) => f.debug_tuple("InternalError").field(arg0).finish(),538 Self::InvalidVTableId(arg0) => f.debug_tuple("InvalidVTableId").field(arg0).finish(),539 Self::NotSupported(arg0) => f.debug_tuple("NotSupported").field(arg0).finish(),540 Self::InvalidConst => f.write_str("InvalidConst"),541 Self::InFunction(e, stack) => {542 f.debug_struct("WithStack").field("error", e).field("stack", &stack).finish()543 }544 }545 }546}547548type Result<'db, T> = std::result::Result<T, MirEvalError>;549550#[derive(Debug, Default)]551struct DropFlags {552 need_drop: FxHashSet<Place>,553}554555impl DropFlags {556 fn add_place(&mut self, p: Place, store: &ProjectionStore) {557 if p.iterate_over_parents(store).any(|it| self.need_drop.contains(&it)) {558 return;559 }560 self.need_drop.retain(|it| !p.is_parent(it, store));561 self.need_drop.insert(p);562 }563564 fn remove_place(&mut self, p: &Place, store: &ProjectionStore) -> bool {565 // FIXME: replace parents with parts566 if let Some(parent) = p.iterate_over_parents(store).find(|it| self.need_drop.contains(it)) {567 self.need_drop.remove(&parent);568 return true;569 }570 self.need_drop.remove(p)571 }572573 fn clear(&mut self) {574 self.need_drop.clear();575 }576}577578#[derive(Debug)]579struct Locals {580 ptr: ArenaMap<LocalId, Interval>,581 body: Arc<MirBody>,582 drop_flags: DropFlags,583}584585pub struct MirOutput {586 stdout: Vec<u8>,587 stderr: Vec<u8>,588}589590impl MirOutput {591 pub fn stdout(&self) -> Cow<'_, str> {592 String::from_utf8_lossy(&self.stdout)593 }594 pub fn stderr(&self) -> Cow<'_, str> {595 String::from_utf8_lossy(&self.stderr)596 }597}598599pub fn interpret_mir<'db>(600 db: &'db dyn HirDatabase,601 body: Arc<MirBody>,602 // FIXME: This is workaround. Ideally, const generics should have a separate body (issue #7434), but now603 // they share their body with their parent, so in MIR lowering we have locals of the parent body, which604 // might have placeholders. With this argument, we (wrongly) assume that every placeholder type has605 // a zero size, hoping that they are all outside of our current body. Even without a fix for #7434, we can606 // (and probably should) do better here, for example by excluding bindings outside of the target expression.607 assert_placeholder_ty_is_unused: bool,608 trait_env: Option<ParamEnvAndCrate<'db>>,609) -> Result<'db, (Result<'db, Allocation<'db>>, MirOutput)> {610 let ty = body.locals[return_slot()].ty.as_ref();611 let mut evaluator = Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused, trait_env)?;612 let it: Result<'db, Allocation<'db>> = (|| {613 if evaluator.ptr_size() != size_of::<usize>() {614 not_supported!("targets with different pointer size from host");615 }616 let interval = evaluator.interpret_mir(body.clone(), None.into_iter())?;617 let bytes = interval.get(&evaluator)?;618 let mut memory_map = evaluator.create_memory_map(619 bytes,620 ty,621 &Locals { ptr: ArenaMap::new(), body, drop_flags: DropFlags::default() },622 )?;623 let bytes = Box::from(bytes);624 let memory_map = if memory_map.memory.is_empty() && evaluator.vtable_map.is_empty() {625 MemoryMap::Empty626 } else {627 memory_map.vtable = mem::take(&mut evaluator.vtable_map);628 memory_map.vtable.shrink_to_fit();629 MemoryMap::Complex(Box::new(memory_map))630 };631 Ok(Allocation::new(AllocationData { ty, memory: bytes, memory_map }))632 })();633 Ok((it, MirOutput { stdout: evaluator.stdout, stderr: evaluator.stderr }))634}635636#[cfg(test)]637const EXECUTION_LIMIT: usize = 100_000;638#[cfg(not(test))]639const EXECUTION_LIMIT: usize = 10_000_000;640641impl<'db> Evaluator<'db> {642 pub fn new(643 db: &'db dyn HirDatabase,644 owner: DefWithBodyId,645 assert_placeholder_ty_is_unused: bool,646 trait_env: Option<ParamEnvAndCrate<'db>>,647 ) -> Result<'db, Evaluator<'db>> {648 let module = owner.module(db);649 let crate_id = module.krate(db);650 let target_data_layout = match db.target_data_layout(crate_id) {651 Ok(target_data_layout) => target_data_layout,652 Err(e) => return Err(MirEvalError::TargetDataLayoutNotAvailable(e)),653 };654 let cached_ptr_size = target_data_layout.pointer_size().bytes_usize();655 let interner = DbInterner::new_with(db, crate_id);656 let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);657 let lang_items = interner.lang_items();658 Ok(Evaluator {659 target_data_layout,660 stack: vec![0],661 heap: vec![0],662 code_stack: vec![],663 vtable_map: VTableMap::default(),664 thread_local_storage: TlsData::default(),665 static_locations: Default::default(),666 db,667 random_state: oorandom::Rand64::new(0),668 param_env: trait_env.unwrap_or_else(|| ParamEnvAndCrate {669 param_env: db.trait_environment(ExpressionStoreOwnerId::from(owner)),670 krate: crate_id,671 }),672 crate_id,673 stdout: vec![],674 stderr: vec![],675 assert_placeholder_ty_is_unused,676 stack_depth_limit: 100,677 execution_limit: EXECUTION_LIMIT,678 memory_limit: 1_000_000_000, // 2GB, 1GB for stack and 1GB for heap679 layout_cache: RefCell::new(Default::default()),680 projected_ty_cache: RefCell::new(Default::default()),681 not_special_fn_cache: RefCell::new(Default::default()),682 mir_or_dyn_index_cache: RefCell::new(Default::default()),683 unused_locals_store: RefCell::new(Default::default()),684 cached_ptr_size,685 cached_fn_trait_func: lang_items686 .Fn687 .and_then(|x| x.trait_items(db).method_by_name(&Name::new_symbol_root(sym::call))),688 cached_fn_mut_trait_func: lang_items.FnMut.and_then(|x| {689 x.trait_items(db).method_by_name(&Name::new_symbol_root(sym::call_mut))690 }),691 cached_fn_once_trait_func: lang_items.FnOnce.and_then(|x| {692 x.trait_items(db).method_by_name(&Name::new_symbol_root(sym::call_once))693 }),694 infcx,695 })696 }697698 #[inline]699 fn interner(&self) -> DbInterner<'db> {700 self.infcx.interner701 }702703 #[inline]704 fn lang_items(&self) -> &'db LangItems {705 self.infcx.interner.lang_items()706 }707708 fn place_addr(&self, p: &Place, locals: &Locals) -> Result<'db, Address> {709 Ok(self.place_addr_and_ty_and_metadata(p, locals)?.0)710 }711712 fn place_interval(&self, p: &Place, locals: &Locals) -> Result<'db, Interval> {713 let place_addr_and_ty = self.place_addr_and_ty_and_metadata(p, locals)?;714 Ok(Interval {715 addr: place_addr_and_ty.0,716 size: self.size_of_sized(717 place_addr_and_ty.1,718 locals,719 "Type of place that we need its interval",720 )?,721 })722 }723724 fn ptr_size(&self) -> usize {725 self.cached_ptr_size726 }727728 fn projected_ty(&self, ty: Ty<'db>, proj: PlaceElem) -> Ty<'db> {729 let pair = (ty, proj);730 if let Some(r) = self.projected_ty_cache.borrow().get(&pair) {731 return *r;732 }733 let (ty, proj) = pair;734 let r = proj.projected_ty(&self.infcx, self.param_env.param_env, ty, self.crate_id);735 self.projected_ty_cache.borrow_mut().insert((ty, proj), r);736 r737 }738739 fn place_addr_and_ty_and_metadata<'a>(740 &'a self,741 p: &Place,742 locals: &'a Locals,743 ) -> Result<'db, (Address, Ty<'db>, Option<IntervalOrOwned>)> {744 let mut addr = locals.ptr[p.local].addr;745 let mut ty: Ty<'db> = locals.body.locals[p.local].ty.as_ref();746 let mut metadata: Option<IntervalOrOwned> = None; // locals are always sized747 for proj in p.projection.lookup(&locals.body.projection_store) {748 let prev_ty = ty;749 ty = self.projected_ty(ty, proj.clone());750 match proj {751 ProjectionElem::Deref => {752 metadata = if self.size_align_of(ty, locals)?.is_none() {753 Some(754 Interval { addr: addr.offset(self.ptr_size()), size: self.ptr_size() }755 .into(),756 )757 } else {758 None759 };760 let it = from_bytes!(usize, self.read_memory(addr, self.ptr_size())?);761 addr = Address::from_usize(it);762 }763 ProjectionElem::Index(op) => {764 let offset = from_bytes!(765 usize,766 self.read_memory(locals.ptr[*op].addr, self.ptr_size())?767 );768 metadata = None; // Result of index is always sized769 let ty_size =770 self.size_of_sized(ty, locals, "array inner type should be sized")?;771 addr = addr.offset(ty_size * offset);772 }773 &ProjectionElem::ConstantIndex { from_end, offset } => {774 let offset = if from_end {775 let len = match prev_ty.kind() {776 TyKind::Array(_, c) => match try_const_usize(self.db, c) {777 Some(it) => it as u64,778 None => {779 not_supported!("indexing array with unknown const from end")780 }781 },782 TyKind::Slice(_) => match metadata {783 Some(it) => from_bytes!(u64, it.get(self)?),784 None => not_supported!("slice place without metadata"),785 },786 _ => not_supported!("bad type for const index"),787 };788 (len - offset - 1) as usize789 } else {790 offset as usize791 };792 metadata = None; // Result of index is always sized793 let ty_size =794 self.size_of_sized(ty, locals, "array inner type should be sized")?;795 addr = addr.offset(ty_size * offset);796 }797 &ProjectionElem::Subslice { from, to } => {798 let inner_ty = match ty.kind() {799 TyKind::Array(inner, _) | TyKind::Slice(inner) => inner,800 _ => Ty::new_error(self.interner(), ErrorGuaranteed),801 };802 metadata = match metadata {803 Some(it) => {804 let prev_len = from_bytes!(u64, it.get(self)?);805 Some(IntervalOrOwned::Owned(806 (prev_len - from - to).to_le_bytes().to_vec(),807 ))808 }809 None => None,810 };811 let ty_size =812 self.size_of_sized(inner_ty, locals, "array inner type should be sized")?;813 addr = addr.offset(ty_size * (from as usize));814 }815 &ProjectionElem::ClosureField(f) => {816 let layout = self.layout(prev_ty)?;817 let offset = layout.fields.offset(f).bytes_usize();818 addr = addr.offset(offset);819 metadata = None;820 }821 ProjectionElem::Field(Either::Right(f)) => {822 let layout = self.layout(prev_ty)?;823 let offset = layout.fields.offset(f.index as usize).bytes_usize();824 addr = addr.offset(offset);825 metadata = None; // tuple field is always sized FIXME: This is wrong, the tail can be unsized826 }827 ProjectionElem::Field(Either::Left(f)) => {828 let layout = self.layout(prev_ty)?;829 let variant_layout = match &layout.variants {830 Variants::Single { .. } | Variants::Empty => &layout,831 Variants::Multiple { variants, .. } => {832 &variants[match f.parent {833 hir_def::VariantId::EnumVariantId(it) => {834 RustcEnumVariantIdx(it.lookup(self.db).index as usize)835 }836 _ => {837 return Err(MirEvalError::InternalError(838 "mismatched layout".into(),839 ));840 }841 }]842 }843 };844 let offset = variant_layout845 .fields846 .offset(u32::from(f.local_id.into_raw()) as usize)847 .bytes_usize();848 addr = addr.offset(offset);849 // Unsized field metadata is equal to the metadata of the struct850 if self.size_align_of(ty, locals)?.is_some() {851 metadata = None;852 }853 }854 ProjectionElem::OpaqueCast(_) => not_supported!("opaque cast"),855 }856 }857 Ok((addr, ty, metadata))858 }859860 fn layout(&self, ty: Ty<'db>) -> Result<'db, Arc<Layout>> {861 if let Some(x) = self.layout_cache.borrow().get(&ty) {862 return Ok(x.clone());863 }864 let r = self865 .db866 .layout_of_ty(ty.store(), self.param_env.store())867 .map_err(|e| MirEvalError::LayoutError(e, ty.store()))?;868 self.layout_cache.borrow_mut().insert(ty, r.clone());869 Ok(r)870 }871872 fn layout_adt(&self, adt: AdtId, subst: GenericArgs<'db>) -> Result<'db, Arc<Layout>> {873 self.layout(Ty::new_adt(self.interner(), adt, subst))874 }875876 fn place_ty<'a>(&'a self, p: &Place, locals: &'a Locals) -> Result<'db, Ty<'db>> {877 Ok(self.place_addr_and_ty_and_metadata(p, locals)?.1)878 }879880 fn operand_ty(&self, o: &Operand, locals: &Locals) -> Result<'db, Ty<'db>> {881 Ok(match &o.kind {882 OperandKind::Copy(p) | OperandKind::Move(p) => self.place_ty(p, locals)?,883 OperandKind::Constant { konst: _, ty } => ty.as_ref(),884 OperandKind::Allocation { allocation } => allocation.as_ref().ty,885 &OperandKind::Static(s) => {886 let ty = InferenceResult::of(self.db, DefWithBodyId::from(s))887 .expr_ty(Body::of(self.db, s.into()).root_expr());888 Ty::new_ref(889 self.interner(),890 Region::new_static(self.interner()),891 ty,892 Mutability::Not,893 )894 }895 })896 }897898 fn operand_ty_and_eval(899 &mut self,900 o: &Operand,901 locals: &mut Locals,902 ) -> Result<'db, IntervalAndTy<'db>> {903 Ok(IntervalAndTy {904 interval: self.eval_operand(o, locals)?,905 ty: self.operand_ty(o, locals)?,906 })907 }908909 fn interpret_mir(910 &mut self,911 body: Arc<MirBody>,912 args: impl Iterator<Item = IntervalOrOwned>,913 ) -> Result<'db, Interval> {914 if let Some(it) = self.stack_depth_limit.checked_sub(1) {915 self.stack_depth_limit = it;916 } else {917 return Err(MirEvalError::StackOverflow);918 }919 let mut current_block_idx = body.start_block;920 let (mut locals, prev_stack_ptr) = self.create_locals_for_body(&body, None)?;921 self.fill_locals_for_body(&body, &mut locals, args)?;922 let prev_code_stack = mem::take(&mut self.code_stack);923 let span = (MirSpan::Unknown, body.owner);924 self.code_stack.push(StackFrame { locals, destination: None, prev_stack_ptr, span });925 'stack: loop {926 let Some(mut my_stack_frame) = self.code_stack.pop() else {927 not_supported!("missing stack frame");928 };929 let e = (|| {930 let locals = &mut my_stack_frame.locals;931 let body = locals.body.clone();932 loop {933 let current_block = &body.basic_blocks[current_block_idx];934 if let Some(it) = self.execution_limit.checked_sub(1) {935 self.execution_limit = it;936 } else {937 return Err(MirEvalError::ExecutionLimitExceeded);938 }939 for statement in ¤t_block.statements {940 match &statement.kind {941 StatementKind::Assign(l, r) => {942 let addr = self.place_addr(l, locals)?;943 let result = self.eval_rvalue(r, locals)?;944 self.copy_from_interval_or_owned(addr, result)?;945 locals.drop_flags.add_place(*l, &locals.body.projection_store);946 }947 StatementKind::Deinit(_) => not_supported!("de-init statement"),948 StatementKind::StorageLive(_)949 | StatementKind::FakeRead(_)950 | StatementKind::StorageDead(_)951 | StatementKind::Nop => (),952 }953 }954 let Some(terminator) = current_block.terminator.as_ref() else {955 not_supported!("block without terminator");956 };957 match &terminator.kind {958 TerminatorKind::Goto { target } => {959 current_block_idx = *target;960 }961 TerminatorKind::Call {962 func,963 args,964 destination,965 target,966 cleanup: _,967 from_hir_call: _,968 } => {969 let destination_interval = self.place_interval(destination, locals)?;970 let fn_ty = self.operand_ty(func, locals)?;971 let args = args972 .iter()973 .map(|it| self.operand_ty_and_eval(it, locals))974 .collect::<Result<'db, Vec<_>>>()?;975 let stack_frame = match fn_ty.kind() {976 TyKind::FnPtr(..) => {977 let bytes = self.eval_operand(func, locals)?;978 self.exec_fn_pointer(979 bytes,980 destination_interval,981 &args,982 locals,983 *target,984 terminator.span,985 )?986 }987 TyKind::FnDef(def, generic_args) => self.exec_fn_def(988 def.0,989 generic_args,990 destination_interval,991 &args,992 locals,993 *target,994 terminator.span,995 )?,996 it => not_supported!("unknown function type {it:?}"),997 };998 locals999 .drop_flags1000 .add_place(*destination, &locals.body.projection_store);1001 if let Some(stack_frame) = stack_frame {1002 self.code_stack.push(my_stack_frame);1003 current_block_idx = stack_frame.locals.body.start_block;1004 self.code_stack.push(stack_frame);1005 return Ok(None);1006 } else {1007 current_block_idx =1008 target.ok_or(MirEvalError::UndefinedBehavior(1009 "Diverging function returned".to_owned(),1010 ))?;1011 }1012 }1013 TerminatorKind::SwitchInt { discr, targets } => {1014 let val = u128::from_le_bytes(pad16(1015 self.eval_operand(discr, locals)?.get(self)?,1016 false,1017 ));1018 current_block_idx = targets.target_for_value(val);1019 }1020 TerminatorKind::Return => {1021 break;1022 }1023 TerminatorKind::Unreachable => {1024 return Err(MirEvalError::UndefinedBehavior(1025 "unreachable executed".to_owned(),1026 ));1027 }1028 TerminatorKind::Drop { place, target, unwind: _ } => {1029 self.drop_place(place, locals, terminator.span)?;1030 current_block_idx = *target;1031 }1032 _ => not_supported!("unknown terminator"),1033 }1034 }1035 Ok(Some(my_stack_frame))1036 })();1037 let my_stack_frame = match e {1038 Ok(None) => continue 'stack,1039 Ok(Some(x)) => x,1040 Err(e) => {1041 let my_code_stack = mem::replace(&mut self.code_stack, prev_code_stack);1042 let mut error_stack = vec![];1043 for frame in my_code_stack.into_iter().rev() {1044 if let DefWithBodyId::FunctionId(f) = frame.locals.body.owner {1045 error_stack.push((Either::Left(f), frame.span.0, frame.span.1));1046 }1047 }1048 return Err(MirEvalError::InFunction(Box::new(e), error_stack));1049 }1050 };1051 let return_interval = my_stack_frame.locals.ptr[return_slot()];1052 self.unused_locals_store1053 .borrow_mut()1054 .entry(my_stack_frame.locals.body.owner)1055 .or_default()1056 .push(my_stack_frame.locals);1057 match my_stack_frame.destination {1058 None => {1059 self.code_stack = prev_code_stack;1060 self.stack_depth_limit += 1;1061 return Ok(return_interval);1062 }1063 Some(bb) => {1064 // We don't support const promotion, so we can't truncate the stack yet.1065 let _ = my_stack_frame.prev_stack_ptr;1066 // self.stack.truncate(my_stack_frame.prev_stack_ptr);1067 current_block_idx = bb;1068 }1069 }1070 }1071 }10721073 fn fill_locals_for_body(1074 &mut self,1075 body: &MirBody,1076 locals: &mut Locals,1077 args: impl Iterator<Item = IntervalOrOwned>,1078 ) -> Result<'db, ()> {1079 let mut remain_args = body.param_locals.len();1080 for ((l, interval), value) in locals.ptr.iter().skip(1).zip(args) {1081 locals.drop_flags.add_place(l.into(), &locals.body.projection_store);1082 match value {1083 IntervalOrOwned::Owned(value) => interval.write_from_bytes(self, &value)?,1084 IntervalOrOwned::Borrowed(value) => interval.write_from_interval(self, value)?,1085 }1086 if remain_args == 0 {1087 return Err(MirEvalError::InternalError("too many arguments".into()));1088 }1089 remain_args -= 1;1090 }1091 if remain_args > 0 {1092 return Err(MirEvalError::InternalError("too few arguments".into()));1093 }1094 Ok(())1095 }10961097 fn create_locals_for_body(1098 &mut self,1099 body: &Arc<MirBody>,1100 destination: Option<Interval>,1101 ) -> Result<'db, (Locals, usize)> {1102 let mut locals =1103 match self.unused_locals_store.borrow_mut().entry(body.owner).or_default().pop() {1104 None => Locals {1105 ptr: ArenaMap::new(),1106 body: body.clone(),1107 drop_flags: DropFlags::default(),1108 },1109 Some(mut l) => {1110 l.drop_flags.clear();1111 l.body = body.clone();1112 l1113 }1114 };1115 let stack_size = {1116 let mut stack_ptr = self.stack.len();1117 for (id, it) in body.locals.iter() {1118 if id == return_slot()1119 && let Some(destination) = destination1120 {1121 locals.ptr.insert(id, destination);1122 continue;1123 }1124 let (size, align) = self.size_align_of_sized(1125 it.ty.as_ref(),1126 &locals,1127 "no unsized local in extending stack",1128 )?;1129 while !stack_ptr.is_multiple_of(align) {1130 stack_ptr += 1;1131 }1132 let my_ptr = stack_ptr;1133 stack_ptr += size;1134 locals.ptr.insert(id, Interval { addr: Stack(my_ptr), size });1135 }1136 stack_ptr - self.stack.len()1137 };1138 let prev_stack_pointer = self.stack.len();1139 if stack_size > self.memory_limit {1140 return Err(MirEvalError::Panic(format!(1141 "Stack overflow. Tried to grow stack to {stack_size} bytes"1142 )));1143 }1144 self.stack.extend(std::iter::repeat_n(0, stack_size));1145 Ok((locals, prev_stack_pointer))1146 }11471148 fn eval_rvalue(&mut self, r: &Rvalue, locals: &mut Locals) -> Result<'db, IntervalOrOwned> {1149 use IntervalOrOwned::*;1150 Ok(match r {1151 Rvalue::Use(it) => Borrowed(self.eval_operand(it, locals)?),1152 Rvalue::Ref(_, p) => {1153 let (addr, _, metadata) = self.place_addr_and_ty_and_metadata(p, locals)?;1154 let mut r = addr.to_bytes().to_vec();1155 if let Some(metadata) = metadata {1156 r.extend(metadata.get(self)?);1157 }1158 Owned(r)1159 }1160 Rvalue::Len(p) => {1161 let (_, _, metadata) = self.place_addr_and_ty_and_metadata(p, locals)?;1162 match metadata {1163 Some(m) => m,1164 None => {1165 return Err(MirEvalError::InternalError(1166 "type without metadata is used for Rvalue::Len".into(),1167 ));1168 }1169 }1170 }1171 Rvalue::UnaryOp(op, val) => {1172 let mut c = self.eval_operand(val, locals)?.get(self)?;1173 let mut ty = self.operand_ty(val, locals)?;1174 while let TyKind::Ref(_, z, _) = ty.kind() {1175 ty = z;1176 let size = self.size_of_sized(ty, locals, "operand of unary op")?;1177 c = self.read_memory(Address::from_bytes(c)?, size)?;1178 }1179 if let TyKind::Float(f) = ty.kind() {1180 match f {1181 rustc_type_ir::FloatTy::F16 => {1182 let c = -from_bytes!(f16, u16, c);1183 Owned(u16::try_from(c.to_bits()).unwrap().to_le_bytes().into())1184 }1185 rustc_type_ir::FloatTy::F32 => {1186 let c = -from_bytes!(f32, c);1187 Owned(c.to_le_bytes().into())1188 }1189 rustc_type_ir::FloatTy::F64 => {1190 let c = -from_bytes!(f64, c);1191 Owned(c.to_le_bytes().into())1192 }1193 rustc_type_ir::FloatTy::F128 => {1194 let c = -from_bytes!(f128, u128, c);1195 Owned(c.to_bits().to_le_bytes().into())1196 }1197 }1198 } else {1199 let mut c = c.to_vec();1200 if matches!(ty.kind(), TyKind::Bool) {1201 c[0] = 1 - c[0];1202 } else {1203 match op {1204 UnOp::Not => c.iter_mut().for_each(|it| *it = !*it),1205 UnOp::Neg => {1206 c.iter_mut().for_each(|it| *it = !*it);1207 for k in c.iter_mut() {1208 let o;1209 (*k, o) = k.overflowing_add(1);1210 if !o {1211 break;1212 }1213 }1214 }1215 }1216 }1217 Owned(c)1218 }1219 }1220 Rvalue::CheckedBinaryOp(op, lhs, rhs) => 'binary_op: {1221 let lc = self.eval_operand(lhs, locals)?;1222 let rc = self.eval_operand(rhs, locals)?;1223 let mut lc = lc.get(self)?;1224 let mut rc = rc.get(self)?;1225 let mut ty = self.operand_ty(lhs, locals)?;1226 while let TyKind::Ref(_, z, _) = ty.kind() {1227 ty = z;1228 let size = if ty.is_str() {1229 if *op != BinOp::Eq {1230 never!("Only eq is builtin for `str`");1231 }1232 let ls = from_bytes!(usize, &lc[self.ptr_size()..self.ptr_size() * 2]);1233 let rs = from_bytes!(usize, &rc[self.ptr_size()..self.ptr_size() * 2]);1234 if ls != rs {1235 break 'binary_op Owned(vec![0]);1236 }1237 lc = &lc[..self.ptr_size()];1238 rc = &rc[..self.ptr_size()];1239 lc = self.read_memory(Address::from_bytes(lc)?, ls)?;1240 rc = self.read_memory(Address::from_bytes(rc)?, ls)?;1241 break 'binary_op Owned(vec![u8::from(lc == rc)]);1242 } else {1243 self.size_of_sized(ty, locals, "operand of binary op")?1244 };1245 lc = self.read_memory(Address::from_bytes(lc)?, size)?;1246 rc = self.read_memory(Address::from_bytes(rc)?, size)?;1247 }1248 if let TyKind::Float(f) = ty.kind() {1249 match f {1250 rustc_type_ir::FloatTy::F16 => {1251 let l = from_bytes!(f16, u16, lc);1252 let r = from_bytes!(f16, u16, rc);1253 match op {1254 BinOp::Ge1255 | BinOp::Gt1256 | BinOp::Le1257 | BinOp::Lt1258 | BinOp::Eq1259 | BinOp::Ne => {1260 let r = op.run_compare(l, r) as u8;1261 Owned(vec![r])1262 }1263 BinOp::Add | BinOp::Sub | BinOp::Mul | BinOp::Div => {1264 let r = match op {1265 BinOp::Add => l + r,1266 BinOp::Sub => l - r,1267 BinOp::Mul => l * r,1268 BinOp::Div => l / r,1269 _ => unreachable!(),1270 };1271 Owned(1272 u16::try_from(r.value.to_bits())1273 .unwrap()1274 .to_le_bytes()1275 .into(),1276 )1277 }1278 it => not_supported!(1279 "invalid binop {it:?} on floating point operators"1280 ),1281 }1282 }1283 rustc_type_ir::FloatTy::F32 => {1284 let l = from_bytes!(f32, lc);1285 let r = from_bytes!(f32, rc);1286 match op {1287 BinOp::Ge1288 | BinOp::Gt1289 | BinOp::Le1290 | BinOp::Lt1291 | BinOp::Eq1292 | BinOp::Ne => {1293 let r = op.run_compare(l, r) as u8;1294 Owned(vec![r])1295 }1296 BinOp::Add | BinOp::Sub | BinOp::Mul | BinOp::Div => {1297 let r = match op {1298 BinOp::Add => l + r,1299 BinOp::Sub => l - r,1300 BinOp::Mul => l * r,1301 BinOp::Div => l / r,1302 _ => unreachable!(),1303 };1304 Owned(r.to_le_bytes().into())1305 }1306 it => not_supported!(1307 "invalid binop {it:?} on floating point operators"1308 ),1309 }1310 }1311 rustc_type_ir::FloatTy::F64 => {1312 let l = from_bytes!(f64, lc);1313 let r = from_bytes!(f64, rc);1314 match op {1315 BinOp::Ge1316 | BinOp::Gt1317 | BinOp::Le1318 | BinOp::Lt1319 | BinOp::Eq1320 | BinOp::Ne => {1321 let r = op.run_compare(l, r) as u8;1322 Owned(vec![r])1323 }1324 BinOp::Add | BinOp::Sub | BinOp::Mul | BinOp::Div => {1325 let r = match op {1326 BinOp::Add => l + r,1327 BinOp::Sub => l - r,1328 BinOp::Mul => l * r,1329 BinOp::Div => l / r,1330 _ => unreachable!(),1331 };1332 Owned(r.to_le_bytes().into())1333 }1334 it => not_supported!(1335 "invalid binop {it:?} on floating point operators"1336 ),1337 }1338 }1339 rustc_type_ir::FloatTy::F128 => {1340 let l = from_bytes!(f128, u128, lc);1341 let r = from_bytes!(f128, u128, rc);1342 match op {1343 BinOp::Ge1344 | BinOp::Gt1345 | BinOp::Le1346 | BinOp::Lt1347 | BinOp::Eq1348 | BinOp::Ne => {1349 let r = op.run_compare(l, r) as u8;1350 Owned(vec![r])1351 }1352 BinOp::Add | BinOp::Sub | BinOp::Mul | BinOp::Div => {1353 let r = match op {1354 BinOp::Add => l + r,1355 BinOp::Sub => l - r,1356 BinOp::Mul => l * r,1357 BinOp::Div => l / r,1358 _ => unreachable!(),1359 };1360 Owned(r.value.to_bits().to_le_bytes().into())1361 }1362 it => not_supported!(1363 "invalid binop {it:?} on floating point operators"1364 ),1365 }1366 }1367 }1368 } else {1369 let is_signed = matches!(ty.kind(), TyKind::Int(_));1370 let l128 = IntValue::from_bytes(lc, is_signed);1371 let r128 = IntValue::from_bytes(rc, is_signed);1372 match op {1373 BinOp::Ge | BinOp::Gt | BinOp::Le | BinOp::Lt | BinOp::Eq | BinOp::Ne => {1374 let r = op.run_compare(l128, r128) as u8;1375 Owned(vec![r])1376 }1377 BinOp::BitAnd1378 | BinOp::BitOr1379 | BinOp::BitXor1380 | BinOp::Add1381 | BinOp::Mul1382 | BinOp::Div1383 | BinOp::Rem1384 | BinOp::Sub => {1385 let r = match op {1386 BinOp::Add => l128.checked_add(r128).ok_or_else(|| {1387 MirEvalError::Panic(format!("Overflow in {op:?}"))1388 })?,1389 BinOp::Mul => l128.checked_mul(r128).ok_or_else(|| {1390 MirEvalError::Panic(format!("Overflow in {op:?}"))1391 })?,1392 BinOp::Div => l128.checked_div(r128).ok_or_else(|| {1393 MirEvalError::Panic(format!("Overflow in {op:?}"))1394 })?,1395 BinOp::Rem => l128.checked_rem(r128).ok_or_else(|| {1396 MirEvalError::Panic(format!("Overflow in {op:?}"))1397 })?,1398 BinOp::Sub => l128.checked_sub(r128).ok_or_else(|| {1399 MirEvalError::Panic(format!("Overflow in {op:?}"))1400 })?,1401 BinOp::BitAnd => l128 & r128,1402 BinOp::BitOr => l128 | r128,1403 BinOp::BitXor => l128 ^ r128,1404 _ => unreachable!(),1405 };1406 Owned(r.to_bytes())1407 }1408 BinOp::Shl | BinOp::Shr => {1409 let r = 'b: {1410 if let Some(shift_amount) = r128.as_u32() {1411 let r = match op {1412 BinOp::Shl => l128.checked_shl(shift_amount),1413 BinOp::Shr => l128.checked_shr(shift_amount),1414 _ => unreachable!(),1415 };1416 if shift_amount as usize >= lc.len() * 8 {1417 return Err(MirEvalError::Panic(format!(1418 "Overflow in {op:?}"1419 )));1420 }1421 if let Some(r) = r {1422 break 'b r;1423 }1424 };1425 return Err(MirEvalError::Panic(format!("Overflow in {op:?}")));1426 };1427 Owned(r.to_bytes())1428 }1429 BinOp::Offset => not_supported!("offset binop"),1430 }1431 }1432 }1433 Rvalue::Discriminant(p) => {1434 let ty = self.place_ty(p, locals)?;1435 let bytes = self.eval_place(p, locals)?.get(self)?;1436 let result = self.compute_discriminant(ty, bytes)?;1437 Owned(result.to_le_bytes().to_vec())1438 }1439 Rvalue::Repeat(it, len) => {1440 let len = match try_const_usize(self.db, len.as_ref()) {1441 Some(it) => it as usize,1442 None => not_supported!("non evaluatable array len in repeat Rvalue"),1443 };1444 let val = self.eval_operand(it, locals)?.get(self)?;1445 let size = len * val.len();1446 Owned(val.iter().copied().cycle().take(size).collect())1447 }1448 Rvalue::ShallowInitBox(_, _) => not_supported!("shallow init box"),1449 Rvalue::ShallowInitBoxWithAlloc(ty) => {1450 let Some((size, align)) = self.size_align_of(ty.as_ref(), locals)? else {1451 not_supported!("unsized box initialization");1452 };1453 let addr = self.heap_allocate(size, align)?;1454 Owned(addr.to_bytes().to_vec())1455 }1456 Rvalue::CopyForDeref(_) => not_supported!("copy for deref"),1457 Rvalue::Aggregate(kind, values) => {1458 let values = values1459 .iter()1460 .map(|it| self.eval_operand(it, locals))1461 .collect::<Result<'db, Vec<_>>>()?;1462 match kind {1463 AggregateKind::Array(_) => {1464 let mut r = vec![];1465 for it in values {1466 let value = it.get(self)?;1467 r.extend(value);1468 }1469 Owned(r)1470 }1471 AggregateKind::Tuple(ty) => {1472 let layout = self.layout(ty.as_ref())?;1473 Owned(self.construct_with_layout(1474 layout.size.bytes_usize(),1475 &layout,1476 None,1477 values.iter().map(|&it| it.into()),1478 )?)1479 }1480 AggregateKind::Union(it, f) => {1481 let layout =1482 self.layout_adt((*it).into(), GenericArgs::empty(self.interner()))?;1483 let offset = layout1484 .fields1485 .offset(u32::from(f.local_id.into_raw()) as usize)1486 .bytes_usize();1487 let op = values[0].get(self)?;1488 let mut result = vec![0; layout.size.bytes_usize()];1489 result[offset..offset + op.len()].copy_from_slice(op);1490 Owned(result)1491 }1492 AggregateKind::Adt(it, subst) => {1493 let (size, variant_layout, tag) =1494 self.layout_of_variant(*it, subst.as_ref(), locals)?;1495 Owned(self.construct_with_layout(1496 size,1497 &variant_layout,1498 tag,1499 values.iter().map(|&it| it.into()),1500 )?)1501 }1502 AggregateKind::Closure(ty) => {1503 let layout = self.layout(ty.as_ref())?;1504 Owned(self.construct_with_layout(1505 layout.size.bytes_usize(),1506 &layout,1507 None,1508 values.iter().map(|&it| it.into()),1509 )?)1510 }1511 }1512 }1513 Rvalue::Cast(kind, operand, target_ty) => match kind {1514 CastKind::PointerCoercion(cast) => match cast {1515 PointerCast::ReifyFnPointer | PointerCast::ClosureFnPointer(_) => {1516 let current_ty = self.operand_ty(operand, locals)?;1517 if let TyKind::FnDef(_, _) | TyKind::Closure(_, _) = current_ty.kind() {1518 let id = self.vtable_map.id(current_ty);1519 let ptr_size = self.ptr_size();1520 Owned(id.to_le_bytes()[0..ptr_size].to_vec())1521 } else {1522 not_supported!(1523 "creating a fn pointer from a non FnDef or Closure type"1524 );1525 }1526 }1527 PointerCast::Unsize => {1528 let current_ty = self.operand_ty(operand, locals)?;1529 let addr = self.eval_operand(operand, locals)?;1530 self.coerce_unsized(addr, current_ty, target_ty.as_ref())?1531 }1532 PointerCast::MutToConstPointer | PointerCast::UnsafeFnPointer => {1533 // This is no-op1534 Borrowed(self.eval_operand(operand, locals)?)1535 }1536 PointerCast::ArrayToPointer => {1537 // We should remove the metadata part if the current type is slice1538 Borrowed(self.eval_operand(operand, locals)?.slice(0..self.ptr_size()))1539 }1540 },1541 CastKind::DynStar => not_supported!("dyn star cast"),1542 CastKind::IntToInt1543 | CastKind::PtrToPtr1544 | CastKind::PointerExposeAddress1545 | CastKind::PointerFromExposedAddress => {1546 let current_ty = self.operand_ty(operand, locals)?;1547 let is_signed = matches!(current_ty.kind(), TyKind::Int(_));1548 let current = pad16(self.eval_operand(operand, locals)?.get(self)?, is_signed);1549 let dest_size = self.size_of_sized(1550 target_ty.as_ref(),1551 locals,1552 "destination of int to int cast",1553 )?;1554 Owned(current[0..dest_size].to_vec())1555 }1556 CastKind::FloatToInt => {1557 let ty = self.operand_ty(operand, locals)?;1558 let TyKind::Float(ty) = ty.kind() else {1559 not_supported!("invalid float to int cast");1560 };1561 let value = self.eval_operand(operand, locals)?.get(self)?;1562 let value = match ty {1563 rustc_type_ir::FloatTy::F32 => {1564 let value = value.try_into().unwrap();1565 f32::from_le_bytes(value) as f641566 }1567 rustc_type_ir::FloatTy::F64 => {1568 let value = value.try_into().unwrap();1569 f64::from_le_bytes(value)1570 }1571 rustc_type_ir::FloatTy::F16 | rustc_type_ir::FloatTy::F128 => {1572 not_supported!("unstable floating point type f16 and f128");1573 }1574 };1575 let is_signed = matches!(target_ty.as_ref().kind(), TyKind::Int(_));1576 let dest_size = self.size_of_sized(1577 target_ty.as_ref(),1578 locals,1579 "destination of float to int cast",1580 )?;1581 let dest_bits = dest_size * 8;1582 let (max, min) = if dest_bits == 128 {1583 (i128::MAX, i128::MIN)1584 } else if is_signed {1585 let max = 1i128 << (dest_bits - 1);1586 (max - 1, -max)1587 } else {1588 (1i128 << dest_bits, 0)1589 };1590 let value = (value as i128).min(max).max(min);1591 let result = value.to_le_bytes();1592 Owned(result[0..dest_size].to_vec())1593 }1594 CastKind::FloatToFloat => {1595 let ty = self.operand_ty(operand, locals)?;1596 let TyKind::Float(ty) = ty.kind() else {1597 not_supported!("invalid float to int cast");1598 };1599 let value = self.eval_operand(operand, locals)?.get(self)?;1600 let value = match ty {1601 rustc_type_ir::FloatTy::F32 => {1602 let value = value.try_into().unwrap();1603 f32::from_le_bytes(value) as f641604 }1605 rustc_type_ir::FloatTy::F64 => {1606 let value = value.try_into().unwrap();1607 f64::from_le_bytes(value)1608 }1609 rustc_type_ir::FloatTy::F16 | rustc_type_ir::FloatTy::F128 => {1610 not_supported!("unstable floating point type f16 and f128");1611 }1612 };1613 let TyKind::Float(target_ty) = target_ty.as_ref().kind() else {1614 not_supported!("invalid float to float cast");1615 };1616 match target_ty {1617 rustc_type_ir::FloatTy::F32 => Owned((value as f32).to_le_bytes().to_vec()),1618 rustc_type_ir::FloatTy::F64 => Owned(value.to_le_bytes().to_vec()),1619 rustc_type_ir::FloatTy::F16 | rustc_type_ir::FloatTy::F128 => {1620 not_supported!("unstable floating point type f16 and f128");1621 }1622 }1623 }1624 CastKind::IntToFloat => {1625 let current_ty = self.operand_ty(operand, locals)?;1626 let is_signed = matches!(current_ty.kind(), TyKind::Int(_));1627 let value = pad16(self.eval_operand(operand, locals)?.get(self)?, is_signed);1628 let value = i128::from_le_bytes(value);1629 let TyKind::Float(target_ty) = target_ty.as_ref().kind() else {1630 not_supported!("invalid int to float cast");1631 };1632 match target_ty {1633 rustc_type_ir::FloatTy::F32 => Owned((value as f32).to_le_bytes().to_vec()),1634 rustc_type_ir::FloatTy::F64 => Owned((value as f64).to_le_bytes().to_vec()),1635 rustc_type_ir::FloatTy::F16 | rustc_type_ir::FloatTy::F128 => {1636 not_supported!("unstable floating point type f16 and f128");1637 }1638 }1639 }1640 CastKind::FnPtrToPtr => not_supported!("fn ptr to ptr cast"),1641 },1642 Rvalue::ThreadLocalRef(n)1643 | Rvalue::AddressOf(n)1644 | Rvalue::BinaryOp(n)1645 | Rvalue::NullaryOp(n) => match *n {},1646 })1647 }16481649 fn compute_discriminant(&self, ty: Ty<'db>, bytes: &[u8]) -> Result<'db, i128> {1650 let layout = self.layout(ty)?;1651 let TyKind::Adt(adt_def, _) = ty.kind() else {1652 return Ok(0);1653 };1654 let AdtId::EnumId(e) = adt_def.def_id().0 else {1655 return Ok(0);1656 };1657 match &layout.variants {1658 Variants::Empty => unreachable!(),1659 Variants::Single { index } => {1660 let r =1661 self.const_eval_discriminant(e.enum_variants(self.db).variants[index.0].0)?;1662 Ok(r)1663 }1664 Variants::Multiple { tag, tag_encoding, variants, .. } => {1665 let size = tag.size(&*self.target_data_layout).bytes_usize();1666 let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field1667 let is_signed = tag.is_signed();1668 match tag_encoding {1669 TagEncoding::Direct => {1670 let tag = &bytes[offset..offset + size];1671 Ok(i128::from_le_bytes(pad16(tag, is_signed)))1672 }1673 TagEncoding::Niche { untagged_variant, niche_start, .. } => {1674 let tag = &bytes[offset..offset + size];1675 let candidate_tag = i128::from_le_bytes(pad16(tag, is_signed))1676 .wrapping_sub(*niche_start as i128)1677 as usize;1678 let idx = variants1679 .iter_enumerated()1680 .map(|(it, _)| it)1681 .filter(|it| it != untagged_variant)1682 .nth(candidate_tag)1683 .unwrap_or(*untagged_variant)1684 .0;1685 let result =1686 self.const_eval_discriminant(e.enum_variants(self.db).variants[idx].0)?;1687 Ok(result)1688 }1689 }1690 }1691 }1692 }16931694 fn coerce_unsized_look_through_fields<T>(1695 &self,1696 ty: Ty<'db>,1697 goal: impl Fn(TyKind<'db>) -> Option<T>,1698 ) -> Result<'db, T> {1699 let kind = ty.kind();1700 if let Some(it) = goal(kind) {1701 return Ok(it);1702 }1703 if let TyKind::Adt(adt_ef, subst) = kind1704 && let AdtId::StructId(struct_id) = adt_ef.def_id().01705 {1706 let field_types = self.db.field_types(struct_id.into());1707 if let Some(ty) =1708 field_types.iter().last().map(|it| it.1.get().instantiate(self.interner(), subst))1709 {1710 return self.coerce_unsized_look_through_fields(ty, goal);1711 }1712 }1713 Err(MirEvalError::CoerceUnsizedError(ty.store()))1714 }17151716 fn coerce_unsized(1717 &mut self,1718 addr: Interval,1719 current_ty: Ty<'db>,1720 target_ty: Ty<'db>,1721 ) -> Result<'db, IntervalOrOwned> {1722 fn for_ptr<'db>(it: TyKind<'db>) -> Option<Ty<'db>> {1723 match it {1724 TyKind::RawPtr(ty, _) | TyKind::Ref(_, ty, _) => Some(ty),1725 _ => None,1726 }1727 }1728 let target_ty = self.coerce_unsized_look_through_fields(target_ty, for_ptr)?;1729 let current_ty = self.coerce_unsized_look_through_fields(current_ty, for_ptr)?;17301731 self.unsizing_ptr_from_addr(target_ty, current_ty, addr)1732 }17331734 /// Adds metadata to the address and create the fat pointer result of the unsizing operation.1735 fn unsizing_ptr_from_addr(1736 &mut self,1737 target_ty: Ty<'db>,1738 current_ty: Ty<'db>,1739 addr: Interval,1740 ) -> Result<'db, IntervalOrOwned> {1741 use IntervalOrOwned::*;1742 Ok(match &target_ty.kind() {1743 TyKind::Slice(_) => match ¤t_ty.kind() {1744 TyKind::Array(_, size) => {1745 let len = match try_const_usize(self.db, *size) {1746 None => {1747 not_supported!("unevaluatble len of array in coerce unsized")1748 }1749 Some(it) => it as usize,1750 };1751 let mut r = Vec::with_capacity(16);1752 let addr = addr.get(self)?;1753 r.extend(addr.iter().copied());1754 r.extend(len.to_le_bytes());1755 Owned(r)1756 }1757 t => {1758 not_supported!("slice unsizing from non array type {t:?}")1759 }1760 },1761 TyKind::Dynamic(..) => {1762 let vtable = self.vtable_map.id(current_ty);1763 let mut r = Vec::with_capacity(16);1764 let addr = addr.get(self)?;1765 r.extend(addr.iter().copied());1766 r.extend(vtable.to_le_bytes());1767 Owned(r)1768 }1769 TyKind::Adt(adt_def, target_subst) => match ¤t_ty.kind() {1770 TyKind::Adt(current_adt_def, current_subst) => {1771 let id = adt_def.def_id().0;1772 let current_id = current_adt_def.def_id().0;1773 if id != current_id {1774 not_supported!("unsizing struct with different type");1775 }1776 let id = match id {1777 AdtId::StructId(s) => s,1778 AdtId::UnionId(_) => not_supported!("unsizing unions"),1779 AdtId::EnumId(_) => not_supported!("unsizing enums"),1780 };1781 let Some((last_field, _)) = id.fields(self.db).fields().iter().next_back()1782 else {1783 not_supported!("unsizing struct without field");1784 };1785 let target_last_field = self.db.field_types(id.into())[last_field]1786 .get()1787 .instantiate(self.interner(), target_subst);1788 let current_last_field = self.db.field_types(id.into())[last_field]1789 .get()1790 .instantiate(self.interner(), current_subst);1791 return self.unsizing_ptr_from_addr(1792 target_last_field,1793 current_last_field,1794 addr,1795 );1796 }1797 _ => not_supported!("unsizing struct with non adt type"),1798 },1799 _ => not_supported!("unknown unsized cast"),1800 })1801 }18021803 fn layout_of_variant(1804 &mut self,1805 it: VariantId,1806 subst: GenericArgs<'db>,1807 locals: &Locals,1808 ) -> Result<'db, (usize, Arc<Layout>, Option<(usize, usize, i128)>)> {1809 let adt = it.adt_id(self.db);1810 if let DefWithBodyId::VariantId(f) = locals.body.owner1811 && let VariantId::EnumVariantId(it) = it1812 && let AdtId::EnumId(e) = adt1813 && f.lookup(self.db).parent == e1814 {1815 // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and1816 // infinite sized type errors) we use a dummy layout1817 let i = self.const_eval_discriminant(it)?;1818 return Ok((16, self.layout(Ty::new_empty_tuple(self.interner()))?, Some((0, 16, i))));1819 }1820 let layout = self.layout_adt(adt, subst)?;1821 Ok(match &layout.variants {1822 Variants::Single { .. } | Variants::Empty => (layout.size.bytes_usize(), layout, None),1823 Variants::Multiple { variants, tag, tag_encoding, .. } => {1824 let enum_variant_id = match it {1825 VariantId::EnumVariantId(it) => it,1826 _ => not_supported!("multi variant layout for non-enums"),1827 };1828 let mut discriminant = self.const_eval_discriminant(enum_variant_id)?;1829 let lookup = enum_variant_id.lookup(self.db);1830 let rustc_enum_variant_idx = RustcEnumVariantIdx(lookup.index as usize);1831 let variant_layout = variants[rustc_enum_variant_idx].clone();1832 let have_tag = match tag_encoding {1833 TagEncoding::Direct => true,1834 TagEncoding::Niche { untagged_variant, niche_variants: _, niche_start } => {1835 if *untagged_variant == rustc_enum_variant_idx {1836 false1837 } else {1838 discriminant = (variants1839 .iter_enumerated()1840 .filter(|(it, _)| it != untagged_variant)1841 .position(|(it, _)| it == rustc_enum_variant_idx)1842 .unwrap() as i128)1843 .wrapping_add(*niche_start as i128);1844 true1845 }1846 }1847 };1848 (1849 layout.size.bytes_usize(),1850 Arc::new(variant_layout),1851 if have_tag {1852 Some((1853 layout.fields.offset(0).bytes_usize(),1854 tag.size(&*self.target_data_layout).bytes_usize(),1855 discriminant,1856 ))1857 } else {1858 None1859 },1860 )1861 }1862 })1863 }18641865 fn construct_with_layout(1866 &mut self,1867 size: usize, // Not necessarily equal to variant_layout.size1868 variant_layout: &Layout,1869 tag: Option<(usize, usize, i128)>,1870 values: impl Iterator<Item = IntervalOrOwned>,1871 ) -> Result<'db, Vec<u8>> {1872 let mut result = vec![0; size];1873 if let Some((offset, size, value)) = tag {1874 match result.get_mut(offset..offset + size) {1875 Some(it) => it.copy_from_slice(&value.to_le_bytes()[0..size]),1876 None => {1877 return Err(MirEvalError::InternalError(1878 format!(1879 "encoded tag ({offset}, {size}, {value}) is out of bounds 0..{size}"1880 )1881 .into(),1882 ));1883 }1884 }1885 }1886 for (i, op) in values.enumerate() {1887 let offset = variant_layout.fields.offset(i).bytes_usize();1888 let op = op.get(self)?;1889 match result.get_mut(offset..offset + op.len()) {1890 Some(it) => it.copy_from_slice(op),1891 None => {1892 return Err(MirEvalError::InternalError(1893 format!("field offset ({offset}) is out of bounds 0..{size}").into(),1894 ));1895 }1896 }1897 }1898 Ok(result)1899 }19001901 fn eval_operand(&mut self, it: &Operand, locals: &mut Locals) -> Result<'db, Interval> {1902 Ok(match &it.kind {1903 OperandKind::Copy(p) | OperandKind::Move(p) => {1904 locals.drop_flags.remove_place(p, &locals.body.projection_store);1905 self.eval_place(p, locals)?1906 }1907 OperandKind::Static(st) => {1908 let addr = self.eval_static(*st, locals)?;1909 Interval::new(addr, self.ptr_size())1910 }1911 OperandKind::Constant { konst, .. } => {1912 self.allocate_const_in_heap(locals, konst.as_ref())?1913 }1914 OperandKind::Allocation { allocation } => {1915 self.allocate_allocation_in_heap(locals, allocation.as_ref())?1916 }1917 })1918 }19191920 fn allocate_valtree_in_heap(1921 &mut self,1922 ty: Ty<'db>,1923 valtree: ValTree<'db>,1924 ) -> Result<'db, Interval> {1925 match ty.kind() {1926 TyKind::Bool => {1927 let value = valtree.inner().to_leaf().try_to_bool().unwrap();1928 let addr = self.heap_allocate(1, 1)?;1929 self.write_memory(addr, &[u8::from(value)])?;1930 Ok(Interval::new(addr, 1))1931 }1932 TyKind::Char => {1933 let value = valtree.inner().to_leaf().to_u32();1934 let addr = self.heap_allocate(4, 4)?;1935 self.write_memory(addr, &value.to_le_bytes())?;1936 Ok(Interval::new(addr, 4))1937 }1938 TyKind::Int(int_ty) => {1939 let size = int_ty.bit_width().unwrap_or(self.ptr_size() as u64);1940 let value = valtree.inner().to_leaf().to_int(Size::from_bytes(size));1941 let addr = self.heap_allocate(size as usize, size as usize)?;1942 self.write_memory(addr, &value.to_le_bytes()[..size as usize])?;1943 Ok(Interval::new(addr, size as usize))1944 }1945 TyKind::Uint(uint_ty) => {1946 let size = uint_ty.bit_width().unwrap_or(self.ptr_size() as u64);1947 let value = valtree.inner().to_leaf().to_uint(Size::from_bytes(size));1948 let addr = self.heap_allocate(size as usize, size as usize)?;1949 self.write_memory(addr, &value.to_le_bytes()[..size as usize])?;1950 Ok(Interval::new(addr, size as usize))1951 }1952 TyKind::Float(float_ty) => {1953 let size = float_ty.bit_width();1954 let value = valtree.inner().to_leaf().to_uint(Size::from_bytes(size));1955 let addr = self.heap_allocate(size as usize, size as usize)?;1956 self.write_memory(addr, &value.to_le_bytes()[..size as usize])?;1957 Ok(Interval::new(addr, size as usize))1958 }1959 TyKind::RawPtr(..) => {1960 let size = self.ptr_size();1961 let value = valtree.inner().to_leaf().to_uint(Size::from_bytes(size));1962 let addr = self.heap_allocate(size, size)?;1963 self.write_memory(addr, &value.to_le_bytes()[..size])?;1964 Ok(Interval::new(addr, size))1965 }1966 TyKind::Ref(_, inner_ty, _) => match inner_ty.kind() {1967 TyKind::Str => {1968 let bytes = valtree1969 .inner()1970 .to_branch()1971 .iter()1972 .map(|konst| match konst.kind() {1973 ConstKind::Value(value) => Ok(value.value.inner().to_leaf().to_u8()),1974 _ => not_supported!("unsupported const"),1975 })1976 .collect::<Result<'_, Vec<_>>>()?;1977 let bytes_addr = self.heap_allocate(bytes.len(), 1)?;1978 self.write_memory(bytes_addr, &bytes)?;1979 let ref_addr = self.heap_allocate(self.ptr_size() * 2, self.ptr_size())?;1980 self.write_memory(ref_addr, &bytes_addr.to_bytes())?;1981 let mut len = [0; 16];1982 len[..size_of::<usize>()].copy_from_slice(&bytes.len().to_le_bytes());1983 self.write_memory(ref_addr.offset(self.ptr_size()), &len[..self.ptr_size()])?;1984 Ok(Interval::new(ref_addr, self.ptr_size() * 2))1985 }1986 TyKind::Slice(inner_ty) => {1987 let item_layout = self.layout(inner_ty)?;1988 let items = valtree1989 .inner()1990 .to_branch()1991 .iter()1992 .map(|konst| match konst.kind() {1993 ConstKind::Value(value) => {1994 self.allocate_valtree_in_heap(value.ty, value.value)1995 }1996 _ => not_supported!("unsupported const"),1997 })1998 .collect::<Result<'_, Vec<_>>>()?;1999 let items_addr = self.heap_allocate(2000 items.len() * (item_layout.size.bits() as usize),
Findings
✓ No findings reported for this file.