ctfe interpreter: extend provenance so that it can track whether a pointer is immutable

This commit is contained in:
Ralf Jung 2023-11-25 18:41:53 +01:00
parent 85a4bd8f58
commit cb86303342
38 changed files with 261 additions and 145 deletions

View File

@ -126,7 +126,8 @@ pub(crate) fn codegen_const_value<'tcx>(
}
}
Scalar::Ptr(ptr, _size) => {
let (alloc_id, offset) = ptr.into_parts(); // we know the `offset` is relative
let (prov, offset) = ptr.into_parts(); // we know the `offset` is relative
let alloc_id = prov.alloc_id();
let base_addr = match fx.tcx.global_alloc(alloc_id) {
GlobalAlloc::Memory(alloc) => {
let data_id = data_id_for_alloc_id(
@ -374,7 +375,8 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant
let bytes = alloc.inspect_with_uninit_and_ptr_outside_interpreter(0..alloc.len()).to_vec();
data.define(bytes.into_boxed_slice());
for &(offset, alloc_id) in alloc.provenance().ptrs().iter() {
for &(offset, prov) in alloc.provenance().ptrs().iter() {
let alloc_id = prov.alloc_id();
let addend = {
let endianness = tcx.data_layout.endian;
let offset = offset.bytes() as usize;

View File

@ -199,7 +199,8 @@ impl<'gcc, 'tcx> ConstMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
}
}
Scalar::Ptr(ptr, _size) => {
let (alloc_id, offset) = ptr.into_parts();
let (prov, offset) = ptr.into_parts(); // we know the `offset` is relative
let alloc_id = prov.alloc_id();
let base_addr =
match self.tcx.global_alloc(alloc_id) {
GlobalAlloc::Memory(alloc) => {

View File

@ -285,7 +285,8 @@ pub fn const_alloc_to_gcc<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, alloc: ConstAl
let pointer_size = dl.pointer_size.bytes() as usize;
let mut next_offset = 0;
for &(offset, alloc_id) in alloc.provenance().ptrs().iter() {
for &(offset, prov) in alloc.provenance().ptrs().iter() {
let alloc_id = prov.alloc_id();
let offset = offset.bytes();
assert_eq!(offset as usize as u64, offset);
let offset = offset as usize;
@ -313,7 +314,7 @@ pub fn const_alloc_to_gcc<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, alloc: ConstAl
llvals.push(cx.scalar_to_backend(
InterpScalar::from_pointer(
interpret::Pointer::new(alloc_id, Size::from_bytes(ptr_offset)),
interpret::Pointer::new(prov, Size::from_bytes(ptr_offset)),
&cx.tcx,
),
abi::Scalar::Initialized { value: Primitive::Pointer(address_space), valid_range: WrappingRange::full(dl.pointer_size) },

View File

@ -246,8 +246,8 @@ impl<'ll, 'tcx> ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> {
}
}
Scalar::Ptr(ptr, _size) => {
let (alloc_id, offset) = ptr.into_parts();
let (base_addr, base_addr_space) = match self.tcx.global_alloc(alloc_id) {
let (prov, offset) = ptr.into_parts();
let (base_addr, base_addr_space) = match self.tcx.global_alloc(prov.alloc_id()) {
GlobalAlloc::Memory(alloc) => {
let init = const_alloc_to_llvm(self, alloc);
let alloc = alloc.inner();

View File

@ -72,7 +72,7 @@ pub fn const_alloc_to_llvm<'ll>(cx: &CodegenCx<'ll, '_>, alloc: ConstAllocation<
}
let mut next_offset = 0;
for &(offset, alloc_id) in alloc.provenance().ptrs().iter() {
for &(offset, prov) in alloc.provenance().ptrs().iter() {
let offset = offset.bytes();
assert_eq!(offset as usize as u64, offset);
let offset = offset as usize;
@ -92,13 +92,10 @@ pub fn const_alloc_to_llvm<'ll>(cx: &CodegenCx<'ll, '_>, alloc: ConstAllocation<
.expect("const_alloc_to_llvm: could not read relocation pointer")
as u64;
let address_space = cx.tcx.global_alloc(alloc_id).address_space(cx);
let address_space = cx.tcx.global_alloc(prov.alloc_id()).address_space(cx);
llvals.push(cx.scalar_to_backend(
InterpScalar::from_pointer(
Pointer::new(alloc_id, Size::from_bytes(ptr_offset)),
&cx.tcx,
),
InterpScalar::from_pointer(Pointer::new(prov, Size::from_bytes(ptr_offset)), &cx.tcx),
Scalar::Initialized {
value: Primitive::Pointer(address_space),
valid_range: WrappingRange::full(dl.pointer_size),

View File

@ -105,7 +105,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
bug!("from_const: invalid ScalarPair layout: {:#?}", layout);
};
let a = Scalar::from_pointer(
Pointer::new(bx.tcx().reserve_and_set_memory_alloc(data), Size::ZERO),
Pointer::new(bx.tcx().reserve_and_set_memory_alloc(data).into(), Size::ZERO),
&bx.tcx(),
);
let a_llval = bx.scalar_to_backend(

View File

@ -155,8 +155,8 @@ pub(super) fn op_to_const<'tcx>(
match immediate {
Left(ref mplace) => {
// We know `offset` is relative to the allocation, so we can use `into_parts`.
let (alloc_id, offset) = mplace.ptr().into_parts();
let alloc_id = alloc_id.expect("cannot have `fake` place fot non-ZST type");
let (prov, offset) = mplace.ptr().into_parts();
let alloc_id = prov.expect("cannot have `fake` place for non-ZST type").alloc_id();
ConstValue::Indirect { alloc_id, offset }
}
// see comment on `let force_as_immediate` above
@ -178,8 +178,8 @@ pub(super) fn op_to_const<'tcx>(
);
let msg = "`op_to_const` on an immediate scalar pair must only be used on slice references to the beginning of an actual allocation";
// We know `offset` is relative to the allocation, so we can use `into_parts`.
let (alloc_id, offset) = a.to_pointer(ecx).expect(msg).into_parts();
let alloc_id = alloc_id.expect(msg);
let (prov, offset) = a.to_pointer(ecx).expect(msg).into_parts();
let alloc_id = prov.expect(msg).alloc_id();
let data = ecx.tcx.global_alloc(alloc_id).unwrap_memory();
assert!(offset == abi::Size::ZERO, "{}", msg);
let meta = b.to_target_usize(ecx).expect(msg);
@ -353,7 +353,7 @@ pub fn eval_in_interpreter<'mir, 'tcx>(
let validation =
const_validate_mplace(&ecx, &mplace, is_static, cid.promoted.is_some());
let alloc_id = mplace.ptr().provenance.unwrap();
let alloc_id = mplace.ptr().provenance.unwrap().alloc_id();
// Validation failed, report an error.
if let Err(error) = validation {

View File

@ -49,7 +49,7 @@ pub struct CompileTimeInterpreter<'mir, 'tcx> {
pub(super) num_evaluated_steps: usize,
/// The virtual call stack.
pub(super) stack: Vec<Frame<'mir, 'tcx, AllocId, ()>>,
pub(super) stack: Vec<Frame<'mir, 'tcx>>,
/// We need to make sure consts never point to anything mutable, even recursively. That is
/// relied on for pattern matching on consts with references.
@ -638,10 +638,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
}
#[inline(always)]
fn expose_ptr(
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
_ptr: Pointer<AllocId>,
) -> InterpResult<'tcx> {
fn expose_ptr(_ecx: &mut InterpCx<'mir, 'tcx, Self>, _ptr: Pointer) -> InterpResult<'tcx> {
// This is only reachable with -Zunleash-the-miri-inside-of-you.
throw_unsup_format!("exposing pointers is not possible at compile-time")
}

View File

@ -7,7 +7,9 @@ use hir::CRATE_HIR_ID;
use rustc_hir::{self as hir, def_id::DefId, definitions::DefPathData};
use rustc_index::IndexVec;
use rustc_middle::mir;
use rustc_middle::mir::interpret::{ErrorHandled, InvalidMetaKind, ReportedErrorInfo};
use rustc_middle::mir::interpret::{
CtfeProvenance, ErrorHandled, InvalidMetaKind, ReportedErrorInfo,
};
use rustc_middle::query::TyCtxtAt;
use rustc_middle::ty::layout::{
self, FnAbiError, FnAbiOfHelpers, FnAbiRequest, LayoutError, LayoutOf, LayoutOfHelpers,
@ -20,9 +22,9 @@ use rustc_span::Span;
use rustc_target::abi::{call::FnAbi, Align, HasDataLayout, Size, TargetDataLayout};
use super::{
AllocId, GlobalId, Immediate, InterpErrorInfo, InterpResult, MPlaceTy, Machine, MemPlace,
MemPlaceMeta, Memory, MemoryKind, OpTy, Operand, Place, PlaceTy, Pointer, PointerArithmetic,
Projectable, Provenance, Scalar, StackPopJump,
GlobalId, Immediate, InterpErrorInfo, InterpResult, MPlaceTy, Machine, MemPlace, MemPlaceMeta,
Memory, MemoryKind, OpTy, Operand, Place, PlaceTy, Pointer, PointerArithmetic, Projectable,
Provenance, Scalar, StackPopJump,
};
use crate::errors;
use crate::util;
@ -84,7 +86,7 @@ impl Drop for SpanGuard {
}
/// A stack frame.
pub struct Frame<'mir, 'tcx, Prov: Provenance = AllocId, Extra = ()> {
pub struct Frame<'mir, 'tcx, Prov: Provenance = CtfeProvenance, Extra = ()> {
////////////////////////////////////////////////////////////////////////////////
// Function and callsite information
////////////////////////////////////////////////////////////////////////////////
@ -156,7 +158,7 @@ pub enum StackPopCleanup {
/// State of a local variable including a memoized layout
#[derive(Clone)]
pub struct LocalState<'tcx, Prov: Provenance = AllocId> {
pub struct LocalState<'tcx, Prov: Provenance = CtfeProvenance> {
value: LocalValue<Prov>,
/// Don't modify if `Some`, this is only used to prevent computing the layout twice.
/// Avoids computing the layout of locals that are never actually initialized.
@ -177,7 +179,7 @@ impl<Prov: Provenance> std::fmt::Debug for LocalState<'_, Prov> {
/// This does not store the type of the local; the type is given by `body.local_decls` and can never
/// change, so by not storing here we avoid having to maintain that as an invariant.
#[derive(Copy, Clone, Debug)] // Miri debug-prints these
pub(super) enum LocalValue<Prov: Provenance = AllocId> {
pub(super) enum LocalValue<Prov: Provenance = CtfeProvenance> {
/// This local is not currently alive, and cannot be used at all.
Dead,
/// A normal, live local.

View File

@ -18,7 +18,7 @@ use super::validity::RefTracking;
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
use rustc_errors::ErrorGuaranteed;
use rustc_hir as hir;
use rustc_middle::mir::interpret::InterpResult;
use rustc_middle::mir::interpret::{CtfeProvenance, InterpResult};
use rustc_middle::ty::{self, layout::TyAndLayout, Ty};
use rustc_ast::Mutability;
@ -34,7 +34,7 @@ pub trait CompileTimeMachine<'mir, 'tcx: 'mir, T> = Machine<
'mir,
'tcx,
MemoryKind = T,
Provenance = AllocId,
Provenance = CtfeProvenance,
ExtraFnVal = !,
FrameExtra = (),
AllocExtra = (),
@ -135,7 +135,7 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval:
alloc.mutability = Mutability::Not;
};
// link the alloc id to the actual allocation
leftover_allocations.extend(alloc.provenance().ptrs().iter().map(|&(_, alloc_id)| alloc_id));
leftover_allocations.extend(alloc.provenance().ptrs().iter().map(|&(_, prov)| prov.alloc_id()));
let alloc = tcx.mk_const_alloc(alloc);
tcx.set_alloc_id_memory(alloc_id, alloc);
None
@ -178,10 +178,10 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
tcx.struct_tail_erasing_lifetimes(referenced_ty, self.ecx.param_env).kind()
{
let ptr = mplace.meta().unwrap_meta().to_pointer(&tcx)?;
if let Some(alloc_id) = ptr.provenance {
if let Some(prov) = ptr.provenance {
// Explicitly choose const mode here, since vtables are immutable, even
// if the reference of the fat pointer is mutable.
self.intern_shallow(alloc_id, InternMode::Const, None);
self.intern_shallow(prov.alloc_id(), InternMode::Const, None);
} else {
// Validation will error (with a better message) on an invalid vtable pointer.
// Let validation show the error message, but make sure it *does* error.
@ -191,7 +191,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
}
// Check if we have encountered this pointer+layout combination before.
// Only recurse for allocation-backed pointers.
if let Some(alloc_id) = mplace.ptr().provenance {
if let Some(prov) = mplace.ptr().provenance {
// Compute the mode with which we intern this. Our goal here is to make as many
// statics as we can immutable so they can be placed in read-only memory by LLVM.
let ref_mode = match self.mode {
@ -234,7 +234,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
InternMode::Const
}
};
match self.intern_shallow(alloc_id, ref_mode, Some(referenced_ty)) {
match self.intern_shallow(prov.alloc_id(), ref_mode, Some(referenced_ty)) {
// No need to recurse, these are interned already and statics may have
// cycles, so we don't want to recurse there
Some(IsStaticOrFn) => {}
@ -353,7 +353,7 @@ pub fn intern_const_alloc_recursive<
leftover_allocations,
// The outermost allocation must exist, because we allocated it with
// `Memory::allocate`.
ret.ptr().provenance.unwrap(),
ret.ptr().provenance.unwrap().alloc_id(),
base_intern_mode,
Some(ret.layout.ty),
);
@ -431,7 +431,8 @@ pub fn intern_const_alloc_recursive<
}
let alloc = tcx.mk_const_alloc(alloc);
tcx.set_alloc_id_memory(alloc_id, alloc);
for &(_, alloc_id) in alloc.inner().provenance().ptrs().iter() {
for &(_, prov) in alloc.inner().provenance().ptrs().iter() {
let alloc_id = prov.alloc_id();
if leftover_allocations.insert(alloc_id) {
todo.push(alloc_id);
}
@ -503,10 +504,11 @@ impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
// `allocate` picks a fresh AllocId that we will associate with its data below.
let dest = self.allocate(layout, MemoryKind::Stack)?;
f(self, &dest.clone().into())?;
let mut alloc = self.memory.alloc_map.remove(&dest.ptr().provenance.unwrap()).unwrap().1;
let mut alloc =
self.memory.alloc_map.remove(&dest.ptr().provenance.unwrap().alloc_id()).unwrap().1;
alloc.mutability = Mutability::Not;
let alloc = self.tcx.mk_const_alloc(alloc);
let alloc_id = dest.ptr().provenance.unwrap(); // this was just allocated, it must have provenance
let alloc_id = dest.ptr().provenance.unwrap().alloc_id(); // this was just allocated, it must have provenance
self.tcx.set_alloc_id_memory(alloc_id, alloc);
Ok(alloc_id)
}

View File

@ -16,8 +16,9 @@ use rustc_target::abi::{Align, Size};
use rustc_target::spec::abi::Abi as CallAbi;
use super::{
AllocBytes, AllocId, AllocKind, AllocRange, Allocation, ConstAllocation, FnArg, Frame, ImmTy,
InterpCx, InterpResult, MPlaceTy, MemoryKind, Misalignment, OpTy, PlaceTy, Pointer, Provenance,
AllocBytes, AllocId, AllocKind, AllocRange, Allocation, ConstAllocation, CtfeProvenance, FnArg,
Frame, ImmTy, InterpCx, InterpResult, MPlaceTy, MemoryKind, Misalignment, OpTy, PlaceTy,
Pointer, Provenance,
};
/// Data returned by Machine::stack_pop,
@ -513,8 +514,8 @@ pub trait Machine<'mir, 'tcx: 'mir>: Sized {
/// A lot of the flexibility above is just needed for `Miri`, but all "compile-time" machines
/// (CTFE and ConstProp) use the same instance. Here, we share that code.
pub macro compile_time_machine(<$mir: lifetime, $tcx: lifetime>) {
type Provenance = AllocId;
type ProvenanceExtra = ();
type Provenance = CtfeProvenance;
type ProvenanceExtra = (); // FIXME extract the "immutable" bool?
type ExtraFnVal = !;
@ -567,14 +568,14 @@ pub macro compile_time_machine(<$mir: lifetime, $tcx: lifetime>) {
def_id: DefId,
) -> InterpResult<$tcx, Pointer> {
// Use the `AllocId` associated with the `DefId`. Any actual *access* will fail.
Ok(Pointer::new(ecx.tcx.reserve_and_set_static_alloc(def_id), Size::ZERO))
Ok(Pointer::new(ecx.tcx.reserve_and_set_static_alloc(def_id).into(), Size::ZERO))
}
#[inline(always)]
fn adjust_alloc_base_pointer(
_ecx: &InterpCx<$mir, $tcx, Self>,
ptr: Pointer<AllocId>,
) -> InterpResult<$tcx, Pointer<AllocId>> {
ptr: Pointer<CtfeProvenance>,
) -> InterpResult<$tcx, Pointer<CtfeProvenance>> {
Ok(ptr)
}
@ -582,7 +583,7 @@ pub macro compile_time_machine(<$mir: lifetime, $tcx: lifetime>) {
fn ptr_from_addr_cast(
_ecx: &InterpCx<$mir, $tcx, Self>,
addr: u64,
) -> InterpResult<$tcx, Pointer<Option<AllocId>>> {
) -> InterpResult<$tcx, Pointer<Option<CtfeProvenance>>> {
// Allow these casts, but make the pointer not dereferenceable.
// (I.e., they behave like transmutation.)
// This is correct because no pointers can ever be exposed in compile-time evaluation.
@ -592,10 +593,10 @@ pub macro compile_time_machine(<$mir: lifetime, $tcx: lifetime>) {
#[inline(always)]
fn ptr_get_alloc(
_ecx: &InterpCx<$mir, $tcx, Self>,
ptr: Pointer<AllocId>,
ptr: Pointer<CtfeProvenance>,
) -> Option<(AllocId, Size, Self::ProvenanceExtra)> {
// We know `offset` is relative to the allocation, so we can use `into_parts`.
let (alloc_id, offset) = ptr.into_parts();
Some((alloc_id, offset, ()))
let (prov, offset) = ptr.into_parts();
Some((prov.alloc_id(), offset, ()))
}
}

View File

@ -22,8 +22,8 @@ use crate::fluent_generated as fluent;
use super::{
alloc_range, AllocBytes, AllocId, AllocMap, AllocRange, Allocation, CheckAlignMsg,
CheckInAllocMsg, GlobalAlloc, InterpCx, InterpResult, Machine, MayLeak, Misalignment, Pointer,
PointerArithmetic, Provenance, Scalar,
CheckInAllocMsg, CtfeProvenance, GlobalAlloc, InterpCx, InterpResult, Machine, MayLeak,
Misalignment, Pointer, PointerArithmetic, Provenance, Scalar,
};
#[derive(Debug, PartialEq, Copy, Clone)]
@ -159,9 +159,9 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
#[inline]
pub fn global_base_pointer(
&self,
ptr: Pointer<AllocId>,
ptr: Pointer<CtfeProvenance>,
) -> InterpResult<'tcx, Pointer<M::Provenance>> {
let alloc_id = ptr.provenance;
let alloc_id = ptr.provenance.alloc_id();
// We need to handle `extern static`.
match self.tcx.try_get_global_alloc(alloc_id) {
Some(GlobalAlloc::Static(def_id)) if self.tcx.is_thread_local_static(def_id) => {

View File

@ -13,9 +13,9 @@ use rustc_middle::{mir, ty};
use rustc_target::abi::{self, Abi, HasDataLayout, Size};
use super::{
alloc_range, from_known_layout, mir_assign_valid_types, AllocId, Frame, InterpCx, InterpResult,
MPlaceTy, Machine, MemPlace, MemPlaceMeta, OffsetMode, PlaceTy, Pointer, Projectable,
Provenance, Scalar,
alloc_range, from_known_layout, mir_assign_valid_types, CtfeProvenance, Frame, InterpCx,
InterpResult, MPlaceTy, Machine, MemPlace, MemPlaceMeta, OffsetMode, PlaceTy, Pointer,
Projectable, Provenance, Scalar,
};
/// An `Immediate` represents a single immediate self-contained Rust value.
@ -26,7 +26,7 @@ use super::{
/// In particular, thanks to `ScalarPair`, arithmetic operations and casts can be entirely
/// defined on `Immediate`, and do not have to work with a `Place`.
#[derive(Copy, Clone, Debug)]
pub enum Immediate<Prov: Provenance = AllocId> {
pub enum Immediate<Prov: Provenance = CtfeProvenance> {
/// A single scalar value (must have *initialized* `Scalar` ABI).
Scalar(Scalar<Prov>),
/// A pair of two scalar value (must have `ScalarPair` ABI where both fields are
@ -98,7 +98,7 @@ impl<Prov: Provenance> Immediate<Prov> {
// ScalarPair needs a type to interpret, so we often have an immediate and a type together
// as input for binary and cast operations.
#[derive(Clone)]
pub struct ImmTy<'tcx, Prov: Provenance = AllocId> {
pub struct ImmTy<'tcx, Prov: Provenance = CtfeProvenance> {
imm: Immediate<Prov>,
pub layout: TyAndLayout<'tcx>,
}
@ -334,13 +334,13 @@ impl<'tcx, Prov: Provenance> Projectable<'tcx, Prov> for ImmTy<'tcx, Prov> {
/// or still in memory. The latter is an optimization, to delay reading that chunk of
/// memory and to avoid having to store arbitrary-sized data here.
#[derive(Copy, Clone, Debug)]
pub(super) enum Operand<Prov: Provenance = AllocId> {
pub(super) enum Operand<Prov: Provenance = CtfeProvenance> {
Immediate(Immediate<Prov>),
Indirect(MemPlace<Prov>),
}
#[derive(Clone)]
pub struct OpTy<'tcx, Prov: Provenance = AllocId> {
pub struct OpTy<'tcx, Prov: Provenance = CtfeProvenance> {
op: Operand<Prov>, // Keep this private; it helps enforce invariants.
pub layout: TyAndLayout<'tcx>,
}
@ -750,17 +750,19 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let layout = from_known_layout(self.tcx, self.param_env, layout, || self.layout_of(ty))?;
let imm = match val_val {
mir::ConstValue::Indirect { alloc_id, offset } => {
// We rely on mutability being set correctly in that allocation to prevent writes
// where none should happen.
let ptr = self.global_base_pointer(Pointer::new(alloc_id, offset))?;
// This is const data, no mutation allowed.
let ptr = self.global_base_pointer(Pointer::new(
CtfeProvenance::from(alloc_id).as_immutable(),
offset,
))?;
return Ok(self.ptr_to_mplace(ptr.into(), layout).into());
}
mir::ConstValue::Scalar(x) => adjust_scalar(x)?.into(),
mir::ConstValue::ZeroSized => Immediate::Uninit,
mir::ConstValue::Slice { data, meta } => {
// We rely on mutability being set correctly in `data` to prevent writes
// where none should happen.
let ptr = Pointer::new(self.tcx.reserve_and_set_memory_alloc(data), Size::ZERO);
// This is const data, no mutation allowed.
let alloc_id = self.tcx.reserve_and_set_memory_alloc(data);
let ptr = Pointer::new(CtfeProvenance::from(alloc_id).as_immutable(), Size::ZERO);
Immediate::new_slice(self.global_base_pointer(ptr)?.into(), meta, self)
}
};

View File

@ -14,14 +14,14 @@ use rustc_middle::ty::Ty;
use rustc_target::abi::{Abi, Align, HasDataLayout, Size};
use super::{
alloc_range, mir_assign_valid_types, AllocId, AllocRef, AllocRefMut, CheckAlignMsg, ImmTy,
Immediate, InterpCx, InterpResult, Machine, MemoryKind, Misalignment, OffsetMode, OpTy,
alloc_range, mir_assign_valid_types, AllocRef, AllocRefMut, CheckAlignMsg, CtfeProvenance,
ImmTy, Immediate, InterpCx, InterpResult, Machine, MemoryKind, Misalignment, OffsetMode, OpTy,
Operand, Pointer, PointerArithmetic, Projectable, Provenance, Readable, Scalar,
};
#[derive(Copy, Clone, Hash, PartialEq, Eq, Debug)]
/// Information required for the sound usage of a `MemPlace`.
pub enum MemPlaceMeta<Prov: Provenance = AllocId> {
pub enum MemPlaceMeta<Prov: Provenance = CtfeProvenance> {
/// The unsized payload (e.g. length for slices or vtable pointer for trait objects).
Meta(Scalar<Prov>),
/// `Sized` types or unsized `extern type`
@ -49,7 +49,7 @@ impl<Prov: Provenance> MemPlaceMeta<Prov> {
}
#[derive(Copy, Clone, Hash, PartialEq, Eq, Debug)]
pub(super) struct MemPlace<Prov: Provenance = AllocId> {
pub(super) struct MemPlace<Prov: Provenance = CtfeProvenance> {
/// The pointer can be a pure integer, with the `None` provenance.
pub ptr: Pointer<Option<Prov>>,
/// Metadata for unsized places. Interpretation is up to the type.
@ -100,7 +100,7 @@ impl<Prov: Provenance> MemPlace<Prov> {
/// A MemPlace with its layout. Constructing it is only possible in this module.
#[derive(Clone, Hash, Eq, PartialEq)]
pub struct MPlaceTy<'tcx, Prov: Provenance = AllocId> {
pub struct MPlaceTy<'tcx, Prov: Provenance = CtfeProvenance> {
mplace: MemPlace<Prov>,
pub layout: TyAndLayout<'tcx>,
}
@ -179,7 +179,7 @@ impl<'tcx, Prov: Provenance> Projectable<'tcx, Prov> for MPlaceTy<'tcx, Prov> {
}
#[derive(Copy, Clone, Debug)]
pub(super) enum Place<Prov: Provenance = AllocId> {
pub(super) enum Place<Prov: Provenance = CtfeProvenance> {
/// A place referring to a value allocated in the `Memory` system.
Ptr(MemPlace<Prov>),
@ -195,7 +195,7 @@ pub(super) enum Place<Prov: Provenance = AllocId> {
}
#[derive(Clone)]
pub struct PlaceTy<'tcx, Prov: Provenance = AllocId> {
pub struct PlaceTy<'tcx, Prov: Provenance = CtfeProvenance> {
place: Place<Prov>, // Keep this private; it helps enforce invariants.
pub layout: TyAndLayout<'tcx>,
}

View File

@ -18,14 +18,14 @@ use rustc_target::abi::{
use rustc_target::spec::abi::Abi;
use super::{
AllocId, FnVal, ImmTy, InterpCx, InterpResult, MPlaceTy, Machine, OpTy, PlaceTy, Projectable,
Provenance, Scalar, StackPopCleanup,
CtfeProvenance, FnVal, ImmTy, InterpCx, InterpResult, MPlaceTy, Machine, OpTy, PlaceTy,
Projectable, Provenance, Scalar, StackPopCleanup,
};
use crate::fluent_generated as fluent;
/// An argment passed to a function.
#[derive(Clone, Debug)]
pub enum FnArg<'tcx, Prov: Provenance = AllocId> {
pub enum FnArg<'tcx, Prov: Provenance = CtfeProvenance> {
/// Pass a copy of the given operand.
Copy(OpTy<'tcx, Prov>),
/// Allow for the argument to be passed in-place: destroy the value originally stored at that place and

View File

@ -75,7 +75,7 @@ static_assert_size!(ConstValue<'_>, 24);
impl<'tcx> ConstValue<'tcx> {
#[inline]
pub fn try_to_scalar(&self) -> Option<Scalar<AllocId>> {
pub fn try_to_scalar(&self) -> Option<Scalar> {
match *self {
ConstValue::Indirect { .. } | ConstValue::Slice { .. } | ConstValue::ZeroSized => None,
ConstValue::Scalar(val) => Some(val),
@ -161,8 +161,8 @@ impl<'tcx> ConstValue<'tcx> {
return Some(&[]);
}
// Non-empty slice, must have memory. We know this is a relative pointer.
let (inner_alloc_id, offset) = ptr.into_parts();
let data = tcx.global_alloc(inner_alloc_id?).unwrap_memory();
let (inner_prov, offset) = ptr.into_parts();
let data = tcx.global_alloc(inner_prov?.alloc_id()).unwrap_memory();
(data, offset.bytes(), offset.bytes() + len)
}
};

View File

@ -18,9 +18,9 @@ use rustc_span::DUMMY_SP;
use rustc_target::abi::{Align, HasDataLayout, Size};
use super::{
read_target_uint, write_target_uint, AllocId, BadBytesAccess, InterpError, InterpResult,
Pointer, PointerArithmetic, Provenance, ResourceExhaustionInfo, Scalar, ScalarSizeMismatch,
UndefinedBehaviorInfo, UnsupportedOpInfo,
read_target_uint, write_target_uint, AllocId, BadBytesAccess, CtfeProvenance, InterpError,
InterpResult, Pointer, PointerArithmetic, Provenance, ResourceExhaustionInfo, Scalar,
ScalarSizeMismatch, UndefinedBehaviorInfo, UnsupportedOpInfo,
};
use crate::ty;
use init_mask::*;
@ -63,7 +63,7 @@ impl AllocBytes for Box<[u8]> {
// hashed. (see the `Hash` impl below for more details), so the impl is not derived.
#[derive(Clone, Eq, PartialEq, TyEncodable, TyDecodable)]
#[derive(HashStable)]
pub struct Allocation<Prov: Provenance = AllocId, Extra = (), Bytes = Box<[u8]>> {
pub struct Allocation<Prov: Provenance = CtfeProvenance, Extra = (), Bytes = Box<[u8]>> {
/// The actual bytes of the allocation.
/// Note that the bytes of a pointer represent the offset of the pointer.
bytes: Bytes,
@ -336,14 +336,14 @@ impl<Prov: Provenance, Bytes: AllocBytes> Allocation<Prov, (), Bytes> {
}
}
impl<Bytes: AllocBytes> Allocation<AllocId, (), Bytes> {
/// Adjust allocation from the ones in tcx to a custom Machine instance
/// with a different Provenance and Extra type.
impl<Bytes: AllocBytes> Allocation<CtfeProvenance, (), Bytes> {
/// Adjust allocation from the ones in `tcx` to a custom Machine instance
/// with a different `Provenance` and `Extra` type.
pub fn adjust_from_tcx<Prov: Provenance, Extra, Err>(
self,
cx: &impl HasDataLayout,
extra: Extra,
mut adjust_ptr: impl FnMut(Pointer<AllocId>) -> Result<Pointer<Prov>, Err>,
mut adjust_ptr: impl FnMut(Pointer<CtfeProvenance>) -> Result<Pointer<Prov>, Err>,
) -> Result<Allocation<Prov, Extra, Bytes>, Err> {
let mut bytes = self.bytes;
// Adjust provenance of pointers stored in this allocation.

View File

@ -6,14 +6,14 @@ use std::cmp;
use rustc_data_structures::sorted_map::SortedMap;
use rustc_target::abi::{HasDataLayout, Size};
use super::{alloc_range, AllocError, AllocId, AllocRange, AllocResult, Provenance};
use super::{alloc_range, AllocError, AllocRange, AllocResult, CtfeProvenance, Provenance};
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
/// Stores the provenance information of pointers stored in memory.
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
#[derive(HashStable)]
pub struct ProvenanceMap<Prov = AllocId> {
/// Provenance in this map applies from the given offset for an entire pointer-size worth of
pub struct ProvenanceMap<Prov = CtfeProvenance> {
/// `Provenance` in this map applies from the given offset for an entire pointer-size worth of
/// bytes. Two entries in this map are always at least a pointer size apart.
ptrs: SortedMap<Size, Prov>,
/// Provenance in this map only applies to the given single byte.
@ -22,18 +22,19 @@ pub struct ProvenanceMap<Prov = AllocId> {
bytes: Option<Box<SortedMap<Size, Prov>>>,
}
// These impls are generic over `Prov` since `CtfeProvenance` is only decodable/encodable
// for some particular `D`/`S`.
impl<D: Decoder, Prov: Provenance + Decodable<D>> Decodable<D> for ProvenanceMap<Prov> {
fn decode(d: &mut D) -> Self {
assert!(!Prov::OFFSET_IS_ADDR); // only `AllocId` is ever serialized
assert!(!Prov::OFFSET_IS_ADDR); // only `CtfeProvenance` is ever serialized
Self { ptrs: Decodable::decode(d), bytes: None }
}
}
impl<S: Encoder, Prov: Provenance + Encodable<S>> Encodable<S> for ProvenanceMap<Prov> {
fn encode(&self, s: &mut S) {
let Self { ptrs, bytes } = self;
assert!(!Prov::OFFSET_IS_ADDR); // only `AllocId` is ever serialized
debug_assert!(bytes.is_none());
assert!(!Prov::OFFSET_IS_ADDR); // only `CtfeProvenance` is ever serialized
debug_assert!(bytes.is_none()); // without `OFFSET_IS_ADDR`, this is always empty
ptrs.encode(s)
}
}
@ -54,10 +55,10 @@ impl ProvenanceMap {
/// Give access to the ptr-sized provenances (which can also be thought of as relocations, and
/// indeed that is how codegen treats them).
///
/// Only exposed with `AllocId` provenance, since it panics if there is bytewise provenance.
/// Only exposed with `CtfeProvenance` provenance, since it panics if there is bytewise provenance.
#[inline]
pub fn ptrs(&self) -> &SortedMap<Size, AllocId> {
debug_assert!(self.bytes.is_none()); // `AllocId::OFFSET_IS_ADDR` is false so this cannot fail
pub fn ptrs(&self) -> &SortedMap<Size, CtfeProvenance> {
debug_assert!(self.bytes.is_none()); // `CtfeProvenance::OFFSET_IS_ADDR` is false so this cannot fail
&self.ptrs
}
}

View File

@ -290,7 +290,7 @@ macro_rules! impl_into_diagnostic_arg_through_debug {
// These types have nice `Debug` output so we can just use them in diagnostics.
impl_into_diagnostic_arg_through_debug! {
AllocId,
Pointer,
Pointer<AllocId>,
AllocRange,
}
@ -323,7 +323,7 @@ pub enum UndefinedBehaviorInfo<'tcx> {
/// Invalid metadata in a wide pointer
InvalidMeta(InvalidMetaKind),
/// Reading a C string that does not end within its allocation.
UnterminatedCString(Pointer),
UnterminatedCString(Pointer<AllocId>),
/// Using a pointer after it got freed.
PointerUseAfterFree(AllocId, CheckInAllocMsg),
/// Used a pointer outside the bounds it is valid for.
@ -350,11 +350,11 @@ pub enum UndefinedBehaviorInfo<'tcx> {
/// Using a non-character `u32` as character.
InvalidChar(u32),
/// The tag of an enum does not encode an actual discriminant.
InvalidTag(Scalar),
InvalidTag(Scalar<AllocId>),
/// Using a pointer-not-to-a-function as function pointer.
InvalidFunctionPointer(Pointer),
InvalidFunctionPointer(Pointer<AllocId>),
/// Using a pointer-not-to-a-vtable as vtable pointer.
InvalidVTablePointer(Pointer),
InvalidVTablePointer(Pointer<AllocId>),
/// Using a string that is not valid UTF-8,
InvalidStr(std::str::Utf8Error),
/// Using uninitialized data where it is not allowed.

View File

@ -157,7 +157,7 @@ pub use self::allocation::{
InitChunk, InitChunkIter,
};
pub use self::pointer::{Pointer, PointerArithmetic, Provenance};
pub use self::pointer::{CtfeProvenance, Pointer, PointerArithmetic, Provenance};
/// Uniquely identifies one of the following:
/// - A constant

View File

@ -3,7 +3,7 @@ use super::{AllocId, InterpResult};
use rustc_macros::HashStable;
use rustc_target::abi::{HasDataLayout, Size};
use std::fmt;
use std::{fmt, num::NonZeroU64};
////////////////////////////////////////////////////////////////////////////////
// Pointer arithmetic
@ -141,6 +141,78 @@ pub trait Provenance: Copy + fmt::Debug + 'static {
fn join(left: Option<Self>, right: Option<Self>) -> Option<Self>;
}
/// The type of provenance in the compile-time interpreter.
/// This is a packed representation of an `AllocId` and an `immutable: bool`.
#[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct CtfeProvenance(NonZeroU64);
impl From<AllocId> for CtfeProvenance {
fn from(value: AllocId) -> Self {
let prov = CtfeProvenance(value.0);
assert!(!prov.immutable(), "`AllocId` with the highest bit set cannot be used in CTFE");
prov
}
}
impl fmt::Debug for CtfeProvenance {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// FIXME print "immutable" bit
self.alloc_id().fmt(f)
}
}
const IMMUTABLE_MASK: u64 = 1 << 63; // the highest bit
impl CtfeProvenance {
/// Returns the `AllocId` of this provenance.
#[inline(always)]
pub fn alloc_id(self) -> AllocId {
AllocId(NonZeroU64::new(self.0.get() & !IMMUTABLE_MASK).unwrap())
}
/// Returns whether this provenance is immutable.
#[inline]
pub fn immutable(self) -> bool {
self.0.get() & IMMUTABLE_MASK != 0
}
/// Returns an immutable version of this provenance.
#[inline]
pub fn as_immutable(self) -> Self {
CtfeProvenance(self.0 | IMMUTABLE_MASK)
}
}
impl Provenance for CtfeProvenance {
// With the `AllocId` as provenance, the `offset` is interpreted *relative to the allocation*,
// so ptr-to-int casts are not possible (since we do not know the global physical offset).
const OFFSET_IS_ADDR: bool = false;
fn fmt(ptr: &Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// FIXME print "immutable" bit
// Forward `alternate` flag to `alloc_id` printing.
if f.alternate() {
write!(f, "{:#?}", ptr.provenance.alloc_id())?;
} else {
write!(f, "{:?}", ptr.provenance.alloc_id())?;
}
// Print offset only if it is non-zero.
if ptr.offset.bytes() > 0 {
write!(f, "+{:#x}", ptr.offset.bytes())?;
}
Ok(())
}
fn get_alloc_id(self) -> Option<AllocId> {
Some(self.alloc_id())
}
fn join(_left: Option<Self>, _right: Option<Self>) -> Option<Self> {
panic!("merging provenance is not supported when `OFFSET_IS_ADDR` is false")
}
}
// We also need this impl so that one can debug-print `Pointer<AllocId>`
impl Provenance for AllocId {
// With the `AllocId` as provenance, the `offset` is interpreted *relative to the allocation*,
// so ptr-to-int casts are not possible (since we do not know the global physical offset).
@ -174,7 +246,7 @@ impl Provenance for AllocId {
/// Pointers are "tagged" with provenance information; typically the `AllocId` they belong to.
#[derive(Copy, Clone, Eq, PartialEq, TyEncodable, TyDecodable, Hash)]
#[derive(HashStable)]
pub struct Pointer<Prov = AllocId> {
pub struct Pointer<Prov = CtfeProvenance> {
pub(super) offset: Size, // kept private to avoid accidental misinterpretation (meaning depends on `Prov` type)
pub provenance: Prov,
}
@ -182,7 +254,7 @@ pub struct Pointer<Prov = AllocId> {
static_assert_size!(Pointer, 16);
// `Option<Prov>` pointers are also passed around quite a bit
// (but not stored in permanent machine state).
static_assert_size!(Pointer<Option<AllocId>>, 16);
static_assert_size!(Pointer<Option<CtfeProvenance>>, 16);
// We want the `Debug` output to be readable as it is used by `derive(Debug)` for
// all the Miri types.
@ -215,7 +287,7 @@ impl<Prov: Provenance> fmt::Display for Pointer<Option<Prov>> {
impl From<AllocId> for Pointer {
#[inline(always)]
fn from(alloc_id: AllocId) -> Self {
Pointer::new(alloc_id, Size::ZERO)
Pointer::new(alloc_id.into(), Size::ZERO)
}
}

View File

@ -11,7 +11,10 @@ use rustc_target::abi::{HasDataLayout, Size};
use crate::ty::ScalarInt;
use super::{AllocId, InterpResult, Pointer, PointerArithmetic, Provenance, ScalarSizeMismatch};
use super::{
AllocId, CtfeProvenance, InterpResult, Pointer, PointerArithmetic, Provenance,
ScalarSizeMismatch,
};
/// A `Scalar` represents an immediate, primitive value existing outside of a
/// `memory::Allocation`. It is in many ways like a small chunk of an `Allocation`, up to 16 bytes in
@ -22,7 +25,7 @@ use super::{AllocId, InterpResult, Pointer, PointerArithmetic, Provenance, Scala
/// Do *not* match on a `Scalar`! Use the various `to_*` methods instead.
#[derive(Clone, Copy, Eq, PartialEq, TyEncodable, TyDecodable, Hash)]
#[derive(HashStable)]
pub enum Scalar<Prov = AllocId> {
pub enum Scalar<Prov = CtfeProvenance> {
/// The raw bytes of a simple value.
Int(ScalarInt),
@ -267,6 +270,9 @@ impl<'tcx, Prov: Provenance> Scalar<Prov> {
/// Will perform ptr-to-int casts if needed and possible.
/// If that fails, we know the offset is relative, so we return an "erased" Scalar
/// (which is useful for error messages but not much else).
///
/// The error type is `AllocId`, not `CtfeProvenance`, since `AllocId` is the "minimal"
/// component all provenance types must have.
#[inline]
pub fn try_to_int(self) -> Result<ScalarInt, Scalar<AllocId>> {
match self {

View File

@ -1337,13 +1337,13 @@ pub fn write_allocations<'tcx>(
fn alloc_ids_from_alloc(
alloc: ConstAllocation<'_>,
) -> impl DoubleEndedIterator<Item = AllocId> + '_ {
alloc.inner().provenance().ptrs().values().copied()
alloc.inner().provenance().ptrs().values().map(|p| p.alloc_id())
}
fn alloc_ids_from_const_val(val: ConstValue<'_>) -> impl Iterator<Item = AllocId> + '_ {
match val {
ConstValue::Scalar(interpret::Scalar::Ptr(ptr, _)) => {
Either::Left(std::iter::once(ptr.provenance))
Either::Left(std::iter::once(ptr.provenance.alloc_id()))
}
ConstValue::Scalar(interpret::Scalar::Int { .. }) => Either::Right(std::iter::empty()),
ConstValue::ZeroSized => Either::Right(std::iter::empty()),

View File

@ -381,7 +381,7 @@ impl<'tcx> Operand<'tcx> {
impl<'tcx> ConstOperand<'tcx> {
pub fn check_static_ptr(&self, tcx: TyCtxt<'_>) -> Option<DefId> {
match self.const_.try_to_scalar() {
Some(Scalar::Ptr(ptr, _size)) => match tcx.global_alloc(ptr.provenance) {
Some(Scalar::Ptr(ptr, _size)) => match tcx.global_alloc(ptr.provenance.alloc_id()) {
GlobalAlloc::Static(def_id) => {
assert!(!tcx.is_thread_local_static(def_id));
Some(def_id)

View File

@ -264,6 +264,7 @@ trivial! {
rustc_middle::middle::stability::DeprecationEntry,
rustc_middle::mir::ConstQualifs,
rustc_middle::mir::interpret::AllocId,
rustc_middle::mir::interpret::CtfeProvenance,
rustc_middle::mir::interpret::ErrorHandled,
rustc_middle::mir::interpret::LitToConstError,
rustc_middle::thir::ExprId,

View File

@ -8,6 +8,7 @@
use crate::arena::ArenaAllocatable;
use crate::infer::canonical::{CanonicalVarInfo, CanonicalVarInfos};
use crate::mir::interpret::CtfeProvenance;
use crate::mir::{
self,
interpret::{AllocId, ConstAllocation},
@ -164,6 +165,13 @@ impl<'tcx, E: TyEncoder<I = TyCtxt<'tcx>>> Encodable<E> for AllocId {
}
}
impl<'tcx, E: TyEncoder<I = TyCtxt<'tcx>>> Encodable<E> for CtfeProvenance {
fn encode(&self, e: &mut E) {
self.alloc_id().encode(e);
self.immutable().encode(e);
}
}
impl<'tcx, E: TyEncoder<I = TyCtxt<'tcx>>> Encodable<E> for ty::ParamEnv<'tcx> {
fn encode(&self, e: &mut E) {
self.caller_bounds().encode(e);
@ -295,6 +303,15 @@ impl<'tcx, D: TyDecoder<I = TyCtxt<'tcx>>> Decodable<D> for AllocId {
}
}
impl<'tcx, D: TyDecoder<I = TyCtxt<'tcx>>> Decodable<D> for CtfeProvenance {
fn decode(decoder: &mut D) -> Self {
let alloc_id: AllocId = Decodable::decode(decoder);
let prov = CtfeProvenance::from(alloc_id);
let immutable: bool = Decodable::decode(decoder);
if immutable { prov.as_immutable() } else { prov }
}
}
impl<'tcx, D: TyDecoder<I = TyCtxt<'tcx>>> Decodable<D> for ty::SymbolName<'tcx> {
fn decode(decoder: &mut D) -> Self {
ty::SymbolName::new(decoder.interner(), decoder.read_str())

View File

@ -1,5 +1,5 @@
use crate::middle::resolve_bound_vars as rbv;
use crate::mir::interpret::{AllocId, ErrorHandled, LitToConstInput, Scalar};
use crate::mir::interpret::{ErrorHandled, LitToConstInput, Scalar};
use crate::ty::{self, GenericArgs, ParamEnv, ParamEnvAnd, Ty, TyCtxt, TypeVisitableExt};
use rustc_data_structures::intern::Interned;
use rustc_error_messages::MultiSpan;
@ -413,7 +413,7 @@ impl<'tcx> Const<'tcx> {
}
#[inline]
pub fn try_to_scalar(self) -> Option<Scalar<AllocId>> {
pub fn try_to_scalar(self) -> Option<Scalar> {
self.try_to_valtree()?.try_to_scalar()
}

View File

@ -1,5 +1,5 @@
use super::ScalarInt;
use crate::mir::interpret::{AllocId, Scalar};
use crate::mir::interpret::Scalar;
use crate::ty::{self, Ty, TyCtxt};
use rustc_macros::{HashStable, TyDecodable, TyEncodable};
@ -67,7 +67,7 @@ impl<'tcx> ValTree<'tcx> {
Self::Leaf(i)
}
pub fn try_to_scalar(self) -> Option<Scalar<AllocId>> {
pub fn try_to_scalar(self) -> Option<Scalar> {
self.try_to_scalar_int().map(Scalar::Int)
}

View File

@ -84,6 +84,14 @@ impl<'a> HashStable<StableHashingContext<'a>> for mir::interpret::AllocId {
}
}
// CtfeProvenance is an AllocId and a bool.
impl<'a> HashStable<StableHashingContext<'a>> for mir::interpret::CtfeProvenance {
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
self.alloc_id().hash_stable(hcx, hasher);
self.immutable().hash_stable(hcx, hasher);
}
}
impl<'a> ToStableHashKey<StableHashingContext<'a>> for region::Scope {
type KeyType = region::Scope;

View File

@ -1410,14 +1410,14 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
) -> Result<(), PrintError> {
define_scoped_cx!(self);
let (alloc_id, offset) = ptr.into_parts();
let (prov, offset) = ptr.into_parts();
match ty.kind() {
// Byte strings (&[u8; N])
ty::Ref(_, inner, _) => {
if let ty::Array(elem, len) = inner.kind() {
if let ty::Uint(ty::UintTy::U8) = elem.kind() {
if let ty::ConstKind::Value(ty::ValTree::Leaf(int)) = len.kind() {
match self.tcx().try_get_global_alloc(alloc_id) {
match self.tcx().try_get_global_alloc(prov.alloc_id()) {
Some(GlobalAlloc::Memory(alloc)) => {
let len = int.assert_bits(self.tcx().data_layout.pointer_size);
let range =
@ -1447,7 +1447,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
// FIXME: We should probably have a helper method to share code with the "Byte strings"
// printing above (which also has to handle pointers to all sorts of things).
if let Some(GlobalAlloc::Function(instance)) =
self.tcx().try_get_global_alloc(alloc_id)
self.tcx().try_get_global_alloc(prov.alloc_id())
{
self.typed_value(
|this| this.print_value_path(instance.def_id(), instance.args),

View File

@ -440,8 +440,9 @@ TrivialTypeTraversalAndLiftImpls! {
crate::ty::ClosureKind,
crate::ty::ParamConst,
crate::ty::ParamTy,
interpret::Scalar,
interpret::AllocId,
interpret::CtfeProvenance,
interpret::Scalar,
rustc_target::abi::Size,
}

View File

@ -240,10 +240,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx>
}
#[inline(always)]
fn expose_ptr(
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
_ptr: Pointer<AllocId>,
) -> InterpResult<'tcx> {
fn expose_ptr(_ecx: &mut InterpCx<'mir, 'tcx, Self>, _ptr: Pointer) -> InterpResult<'tcx> {
throw_machine_stop_str!("exposing pointers isn't supported in ConstProp")
}

View File

@ -388,7 +388,9 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
self.ecx.copy_op(op, &field_dest, /*allow_transmute*/ false).ok()?;
}
self.ecx.write_discriminant(variant.unwrap_or(FIRST_VARIANT), &dest).ok()?;
self.ecx.alloc_mark_immutable(dest.ptr().provenance.unwrap()).ok()?;
self.ecx
.alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
.ok()?;
dest.into()
} else {
return None;
@ -928,7 +930,8 @@ fn op_to_prop_const<'tcx>(
}
let pointer = mplace.ptr().into_pointer_or_addr().ok()?;
let (alloc_id, offset) = pointer.into_parts();
let (prov, offset) = pointer.into_parts();
let alloc_id = prov.alloc_id();
intern_const_alloc_for_constprop(ecx, alloc_id).ok()?;
if matches!(ecx.tcx.global_alloc(alloc_id), GlobalAlloc::Memory(_)) {
// `alloc_id` may point to a static. Codegen will choke on an `Indirect` with anything

View File

@ -385,8 +385,8 @@ fn collect_items_rec<'tcx>(
recursion_depth_reset = None;
if let Ok(alloc) = tcx.eval_static_initializer(def_id) {
for &id in alloc.inner().provenance().ptrs().values() {
collect_alloc(tcx, id, &mut used_items);
for &prov in alloc.inner().provenance().ptrs().values() {
collect_alloc(tcx, prov.alloc_id(), &mut used_items);
}
}
@ -1363,9 +1363,9 @@ fn collect_alloc<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut MonoIt
}
GlobalAlloc::Memory(alloc) => {
trace!("collecting {:?} with {:#?}", alloc_id, alloc);
for &inner in alloc.inner().provenance().ptrs().values() {
for &prov in alloc.inner().provenance().ptrs().values() {
rustc_data_structures::stack::ensure_sufficient_stack(|| {
collect_alloc(tcx, inner, output);
collect_alloc(tcx, prov.alloc_id(), output);
});
}
}
@ -1440,12 +1440,12 @@ fn collect_const_value<'tcx>(
) {
match value {
mir::ConstValue::Scalar(Scalar::Ptr(ptr, _size)) => {
collect_alloc(tcx, ptr.provenance, output)
collect_alloc(tcx, ptr.provenance.alloc_id(), output)
}
mir::ConstValue::Indirect { alloc_id, .. } => collect_alloc(tcx, alloc_id, output),
mir::ConstValue::Slice { data, meta: _ } => {
for &id in data.inner().provenance().ptrs().values() {
collect_alloc(tcx, id, output);
for &prov in data.inner().provenance().ptrs().values() {
collect_alloc(tcx, prov.alloc_id(), output);
}
}
_ => {}

View File

@ -47,7 +47,7 @@ pub fn new_allocation<'tcx>(
}
ConstValue::Slice { data, meta } => {
let alloc_id = tables.tcx.reserve_and_set_memory_alloc(data);
let ptr = Pointer::new(alloc_id, rustc_target::abi::Size::ZERO);
let ptr = Pointer::new(alloc_id.into(), rustc_target::abi::Size::ZERO);
let scalar_ptr = rustc_middle::mir::interpret::Scalar::from_pointer(ptr, &tables.tcx);
let scalar_meta =
rustc_middle::mir::interpret::Scalar::from_target_usize(meta, &tables.tcx);
@ -112,7 +112,10 @@ pub(super) fn allocation_filter<'tcx>(
.iter()
.filter(|a| a.0 >= alloc_range.start && a.0 <= alloc_range.end())
{
ptrs.push((offset.bytes_usize() - alloc_range.start.bytes_usize(), tables.prov(*prov)));
ptrs.push((
offset.bytes_usize() - alloc_range.start.bytes_usize(),
tables.prov(prov.alloc_id()),
));
}
Allocation {
bytes: bytes,

View File

@ -44,7 +44,7 @@ pub enum TerminationInfo {
},
DataRace {
involves_non_atomic: bool,
ptr: Pointer,
ptr: Pointer<AllocId>,
op1: RacingOp,
op2: RacingOp,
extra: Option<&'static str>,

View File

@ -256,12 +256,13 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
/// Convert a relative (tcx) pointer to a Miri pointer.
fn ptr_from_rel_ptr(
&self,
ptr: Pointer<AllocId>,
ptr: Pointer<CtfeProvenance>,
tag: BorTag,
) -> InterpResult<'tcx, Pointer<Provenance>> {
let ecx = self.eval_context_ref();
let (alloc_id, offset) = ptr.into_parts(); // offset is relative (AllocId provenance)
let (prov, offset) = ptr.into_parts(); // offset is relative (AllocId provenance)
let alloc_id = prov.alloc_id();
let base_addr = ecx.addr_from_alloc_id(alloc_id)?;
// Add offset with the right kind of pointer-overflowing arithmetic.

View File

@ -1175,11 +1175,11 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for MiriMachine<'mir, 'tcx> {
fn adjust_alloc_base_pointer(
ecx: &MiriInterpCx<'mir, 'tcx>,
ptr: Pointer<AllocId>,
ptr: Pointer<CtfeProvenance>,
) -> InterpResult<'tcx, Pointer<Provenance>> {
let alloc_id = ptr.provenance.alloc_id();
if cfg!(debug_assertions) {
// The machine promises to never call us on thread-local or extern statics.
let alloc_id = ptr.provenance;
match ecx.tcx.try_get_global_alloc(alloc_id) {
Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_thread_local_static(def_id) => {
panic!("adjust_alloc_base_pointer called on thread-local static")
@ -1190,8 +1190,9 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for MiriMachine<'mir, 'tcx> {
_ => {}
}
}
// FIXME: can we somehow preserve the immutability of `ptr`?
let tag = if let Some(borrow_tracker) = &ecx.machine.borrow_tracker {
borrow_tracker.borrow_mut().base_ptr_tag(ptr.provenance, &ecx.machine)
borrow_tracker.borrow_mut().base_ptr_tag(alloc_id, &ecx.machine)
} else {
// Value does not matter, SB is disabled
BorTag::default()