use undef for uninitialized bytes in constants

This commit is contained in:
Erik Desjardins 2021-03-31 00:06:01 -04:00
parent 7b0e554ee2
commit 4d635fdf63
7 changed files with 202 additions and 21 deletions

View File

@ -11,7 +11,8 @@ use rustc_codegen_ssa::traits::*;
use rustc_hir::def_id::DefId;
use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
use rustc_middle::mir::interpret::{
read_target_uint, Allocation, ErrorHandled, GlobalAlloc, Pointer, Scalar as InterpScalar,
read_target_uint, Allocation, ErrorHandled, GlobalAlloc, InitChunk, Pointer,
Scalar as InterpScalar,
};
use rustc_middle::mir::mono::MonoItem;
use rustc_middle::ty::{self, Instance, Ty};
@ -19,6 +20,7 @@ use rustc_middle::{bug, span_bug};
use rustc_target::abi::{
AddressSpace, Align, HasDataLayout, LayoutOf, Primitive, Scalar, Size, WrappingRange,
};
use std::ops::Range;
use tracing::debug;
pub fn const_alloc_to_llvm(cx: &CodegenCx<'ll, '_>, alloc: &Allocation) -> &'ll Value {
@ -26,6 +28,53 @@ pub fn const_alloc_to_llvm(cx: &CodegenCx<'ll, '_>, alloc: &Allocation) -> &'ll
let dl = cx.data_layout();
let pointer_size = dl.pointer_size.bytes() as usize;
// Note: this function may call `inspect_with_uninit_and_ptr_outside_interpreter`,
// so `range` must be within the bounds of `alloc` and not within a relocation.
fn append_chunks_of_init_and_uninit_bytes<'ll, 'a, 'b>(
llvals: &mut Vec<&'ll Value>,
cx: &'a CodegenCx<'ll, 'b>,
alloc: &'a Allocation,
range: Range<usize>,
) {
/// Allocations larger than this will only be codegen'd as entirely initialized or entirely undef.
/// This avoids compile time regressions when an alloc would have many chunks,
/// e.g. for `[(u64, u8); N]`, which has undef padding in each element.
const MAX_PARTIALLY_UNDEF_SIZE: usize = 1024;
let mut chunks = alloc
.init_mask()
.range_as_init_chunks(Size::from_bytes(range.start), Size::from_bytes(range.end));
let chunk_to_llval = move |chunk| match chunk {
InitChunk::Init(range) => {
let range = (range.start.bytes() as usize)..(range.end.bytes() as usize);
let bytes = alloc.inspect_with_uninit_and_ptr_outside_interpreter(range);
cx.const_bytes(bytes)
}
InitChunk::Uninit(range) => {
let len = range.end.bytes() - range.start.bytes();
cx.const_undef(cx.type_array(cx.type_i8(), len))
}
};
if range.len() > MAX_PARTIALLY_UNDEF_SIZE {
let llval = match (chunks.next(), chunks.next()) {
(Some(chunk), None) => {
// exactly one chunk, either fully init or fully uninit
chunk_to_llval(chunk)
}
_ => {
// partially uninit
let bytes = alloc.inspect_with_uninit_and_ptr_outside_interpreter(range);
cx.const_bytes(bytes)
}
};
llvals.push(llval);
} else {
llvals.extend(chunks.map(chunk_to_llval));
}
}
let mut next_offset = 0;
for &(offset, alloc_id) in alloc.relocations().iter() {
let offset = offset.bytes();
@ -34,12 +83,8 @@ pub fn const_alloc_to_llvm(cx: &CodegenCx<'ll, '_>, alloc: &Allocation) -> &'ll
if offset > next_offset {
// This `inspect` is okay since we have checked that it is not within a relocation, it
// is within the bounds of the allocation, and it doesn't affect interpreter execution
// (we inspect the result after interpreter execution). Any undef byte is replaced with
// some arbitrary byte value.
//
// FIXME: relay undef bytes to codegen as undef const bytes
let bytes = alloc.inspect_with_uninit_and_ptr_outside_interpreter(next_offset..offset);
llvals.push(cx.const_bytes(bytes));
// (we inspect the result after interpreter execution).
append_chunks_of_init_and_uninit_bytes(&mut llvals, cx, alloc, next_offset..offset);
}
let ptr_offset = read_target_uint(
dl.endian,
@ -70,12 +115,8 @@ pub fn const_alloc_to_llvm(cx: &CodegenCx<'ll, '_>, alloc: &Allocation) -> &'ll
let range = next_offset..alloc.len();
// This `inspect` is okay since we have check that it is after all relocations, it is
// within the bounds of the allocation, and it doesn't affect interpreter execution (we
// inspect the result after interpreter execution). Any undef byte is replaced with some
// arbitrary byte value.
//
// FIXME: relay undef bytes to codegen as undef const bytes
let bytes = alloc.inspect_with_uninit_and_ptr_outside_interpreter(range);
llvals.push(cx.const_bytes(bytes));
// inspect the result after interpreter execution).
append_chunks_of_init_and_uninit_bytes(&mut llvals, cx, alloc, range);
}
cx.const_struct(&llvals, true)

View File

@ -721,20 +721,24 @@ impl InitMask {
}
// FIXME(oli-obk): optimize this for allocations larger than a block.
let idx = (start.bytes()..end.bytes()).map(Size::from_bytes).find(|&i| !self.get(i));
let idx = (start..end).find(|&i| !self.get(i));
match idx {
Some(idx) => {
let uninit_end = (idx.bytes()..end.bytes())
.map(Size::from_bytes)
.find(|&i| self.get(i))
.unwrap_or(end);
let uninit_end = (idx..end).find(|&i| self.get(i)).unwrap_or(end);
Err(idx..uninit_end)
}
None => Ok(()),
}
}
/// Returns an iterator, yielding a range of byte indexes for each contiguous region
/// of initialized or uninitialized bytes inside the range `start..end` (end-exclusive).
#[inline]
pub fn range_as_init_chunks(&self, start: Size, end: Size) -> InitChunkIter<'_> {
InitChunkIter::new(self, start, end)
}
pub fn set_range(&mut self, start: Size, end: Size, new_state: bool) {
let len = self.len;
if end > len {
@ -827,6 +831,49 @@ impl InitMask {
}
}
/// Yields [`InitChunk`]s. See [`InitMask::range_as_init_chunks`].
pub struct InitChunkIter<'a> {
init_mask: &'a InitMask,
/// The current byte index into `init_mask`.
start: Size,
/// The end byte index into `init_mask`.
end: Size,
}
/// A contiguous chunk of initialized or uninitialized memory.
pub enum InitChunk {
Init(Range<Size>),
Uninit(Range<Size>),
}
impl<'a> InitChunkIter<'a> {
fn new(init_mask: &'a InitMask, start: Size, end: Size) -> Self {
assert!(start <= end);
assert!(end <= init_mask.len);
Self { init_mask, start, end }
}
}
impl<'a> Iterator for InitChunkIter<'a> {
type Item = InitChunk;
fn next(&mut self) -> Option<Self::Item> {
if self.start >= self.end {
return None;
}
let is_init = self.init_mask.get(self.start);
// FIXME(oli-obk): optimize this for allocations larger than a block.
let end_of_chunk =
(self.start..self.end).find(|&i| self.init_mask.get(i) != is_init).unwrap_or(self.end);
let range = self.start..end_of_chunk;
self.start = end_of_chunk;
Some(if is_init { InitChunk::Init(range) } else { InitChunk::Uninit(range) })
}
}
#[inline]
fn bit_index(bits: Size) -> (usize, usize) {
let bits = bits.bytes();

View File

@ -125,7 +125,9 @@ pub use self::error::{
pub use self::value::{get_slice_bytes, ConstAlloc, ConstValue, Scalar, ScalarMaybeUninit};
pub use self::allocation::{alloc_range, AllocRange, Allocation, InitMask, Relocations};
pub use self::allocation::{
alloc_range, AllocRange, Allocation, InitChunk, InitChunkIter, InitMask, Relocations,
};
pub use self::pointer::{Pointer, PointerArithmetic, Provenance};

View File

@ -5,6 +5,7 @@ use crate::spec::Target;
use std::convert::{TryFrom, TryInto};
use std::fmt;
use std::iter::Step;
use std::num::NonZeroUsize;
use std::ops::{Add, AddAssign, Deref, Mul, Range, RangeInclusive, Sub};
use std::str::FromStr;
@ -440,6 +441,43 @@ impl AddAssign for Size {
}
}
impl Step for Size {
#[inline]
fn steps_between(start: &Self, end: &Self) -> Option<usize> {
u64::steps_between(&start.bytes(), &end.bytes())
}
#[inline]
fn forward_checked(start: Self, count: usize) -> Option<Self> {
u64::forward_checked(start.bytes(), count).map(Self::from_bytes)
}
#[inline]
fn forward(start: Self, count: usize) -> Self {
Self::from_bytes(u64::forward(start.bytes(), count))
}
#[inline]
unsafe fn forward_unchecked(start: Self, count: usize) -> Self {
Self::from_bytes(u64::forward_unchecked(start.bytes(), count))
}
#[inline]
fn backward_checked(start: Self, count: usize) -> Option<Self> {
u64::backward_checked(start.bytes(), count).map(Self::from_bytes)
}
#[inline]
fn backward(start: Self, count: usize) -> Self {
Self::from_bytes(u64::backward(start.bytes(), count))
}
#[inline]
unsafe fn backward_unchecked(start: Self, count: usize) -> Self {
Self::from_bytes(u64::backward_unchecked(start.bytes(), count))
}
}
/// Alignment of a type in bytes (always a power of two).
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Encodable, Decodable)]
#[derive(HashStable_Generic)]

View File

@ -14,6 +14,8 @@
#![feature(associated_type_bounds)]
#![feature(exhaustive_patterns)]
#![feature(min_specialization)]
#![feature(step_trait)]
#![feature(unchecked_math)]
use std::path::{Path, PathBuf};

View File

@ -43,7 +43,7 @@ pub fn inline_enum_const() -> E<i8, i16> {
#[no_mangle]
pub fn low_align_const() -> E<i16, [i16; 3]> {
// Check that low_align_const and high_align_const use the same constant
// CHECK: memcpy.p0i8.p0i8.i{{(32|64)}}(i8* align 2 %1, i8* align 2 getelementptr inbounds (<{ [8 x i8] }>, <{ [8 x i8] }>* [[LOW_HIGH]], i32 0, i32 0, i32 0), i{{(32|64)}} 8, i1 false)
// CHECK: memcpy.p0i8.p0i8.i{{(32|64)}}(i8* align 2 %1, i8* align 2 getelementptr inbounds (<{ [4 x i8], [4 x i8] }>, <{ [4 x i8], [4 x i8] }>* [[LOW_HIGH]], i32 0, i32 0, i32 0), i{{(32|64)}} 8, i1 false)
*&E::A(0)
}
@ -51,6 +51,6 @@ pub fn low_align_const() -> E<i16, [i16; 3]> {
#[no_mangle]
pub fn high_align_const() -> E<i16, i32> {
// Check that low_align_const and high_align_const use the same constant
// CHECK: memcpy.p0i8.p0i8.i{{(32|64)}}(i8* align 4 %1, i8* align 4 getelementptr inbounds (<{ [8 x i8] }>, <{ [8 x i8] }>* [[LOW_HIGH]], i32 0, i32 0, i32 0), i{{(32|64)}} 8, i1 false)
// CHECK: memcpy.p0i8.p0i8.i{{(32|64)}}(i8* align 4 %1, i8* align 4 getelementptr inbounds (<{ [4 x i8], [4 x i8] }>, <{ [4 x i8], [4 x i8] }>* [[LOW_HIGH]], i32 0, i32 0, i32 0), i{{(32|64)}} 8, i1 false)
*&E::A(0)
}

View File

@ -0,0 +1,51 @@
// compile-flags: -C no-prepopulate-passes
// Check that we use undef (and not zero) for uninitialized bytes in constants.
#![crate_type = "lib"]
use std::mem::MaybeUninit;
pub struct PartiallyUninit {
x: u32,
y: MaybeUninit<[u8; 10]>
}
// CHECK: [[FULLY_UNINIT:@[0-9]+]] = private unnamed_addr constant <{ [10 x i8] }> undef
// CHECK: [[PARTIALLY_UNINIT:@[0-9]+]] = private unnamed_addr constant <{ [4 x i8], [12 x i8] }> <{ [4 x i8] c"\EF\BE\AD\DE", [12 x i8] undef }>, align 4
// CHECK: [[FULLY_UNINIT_HUGE:@[0-9]+]] = private unnamed_addr constant <{ [16384 x i8] }> undef
// This shouldn't contain undef, since generating huge partially undef constants is expensive.
// CHECK: [[UNINIT_PADDING_HUGE:@[0-9]+]] = private unnamed_addr constant <{ [32768 x i8] }> <{ [32768 x i8] c"{{.+}}" }>, align 4
// CHECK-LABEL: @fully_uninit
#[no_mangle]
pub const fn fully_uninit() -> MaybeUninit<[u8; 10]> {
const M: MaybeUninit<[u8; 10]> = MaybeUninit::uninit();
// CHECK: call void @llvm.memcpy.p0i8.p0i8.i{{(32|64)}}(i8* align 1 %1, i8* align 1 getelementptr inbounds (<{ [10 x i8] }>, <{ [10 x i8] }>* [[FULLY_UNINIT]], i32 0, i32 0, i32 0), i{{(32|64)}} 10, i1 false)
M
}
// CHECK-LABEL: @partially_uninit
#[no_mangle]
pub const fn partially_uninit() -> PartiallyUninit {
const X: PartiallyUninit = PartiallyUninit { x: 0xdeadbeef, y: MaybeUninit::uninit() };
// CHECK: call void @llvm.memcpy.p0i8.p0i8.i{{(32|64)}}(i8* align 4 %1, i8* align 4 getelementptr inbounds (<{ [4 x i8], [12 x i8] }>, <{ [4 x i8], [12 x i8] }>* [[PARTIALLY_UNINIT]], i32 0, i32 0, i32 0), i{{(32|64)}} 16, i1 false)
X
}
// CHECK-LABEL: @fully_uninit_huge
#[no_mangle]
pub const fn fully_uninit_huge() -> MaybeUninit<[u32; 4096]> {
const F: MaybeUninit<[u32; 4096]> = MaybeUninit::uninit();
// CHECK: call void @llvm.memcpy.p0i8.p0i8.i{{(32|64)}}(i8* align 4 %1, i8* align 4 getelementptr inbounds (<{ [16384 x i8] }>, <{ [16384 x i8] }>* [[FULLY_UNINIT_HUGE]], i32 0, i32 0, i32 0), i{{(32|64)}} 16384, i1 false)
F
}
// CHECK-LABEL: @uninit_padding_huge
#[no_mangle]
pub const fn uninit_padding_huge() -> [(u32, u8); 4096] {
const X: [(u32, u8); 4096] = [(123, 45); 4096];
// CHECK: call void @llvm.memcpy.p0i8.p0i8.i{{(32|64)}}(i8* align 4 %1, i8* align 4 getelementptr inbounds (<{ [32768 x i8] }>, <{ [32768 x i8] }>* [[UNINIT_PADDING_HUGE]], i32 0, i32 0, i32 0), i{{(32|64)}} 32768, i1 false)
X
}