Auto merge of #2548 - RalfJung:remove-tls-diagnostics-hack, r=RalfJung

avoid thread-local var indirection for non-halting diagnostics

This hack used to be necessary because Stacked Borrows did not have access to enough parts of the machine. But that got fixed a while ago, so now we can just emit diagnostics directly, which is a lot more reliable.

Needs https://github.com/rust-lang/rust/pull/101985
Fixes https://github.com/rust-lang/miri/issues/2538
This commit is contained in:
bors 2022-09-20 06:42:05 +00:00
commit d9ad25ee4b
47 changed files with 359 additions and 407 deletions

View File

@ -1 +1 @@
2019147c5642c08cdb9ad4cacd97dd1fa4ffa701
acb8934fd57b3c2740c4abac0a5728c2c9b1423b

View File

@ -438,8 +438,8 @@ impl MemoryCellClocks {
}
/// Evaluation context extensions.
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: MiriInterpCxExt<'mir, 'tcx> {
/// Atomic variant of read_scalar_at_offset.
fn read_scalar_at_offset_atomic(
&self,
@ -940,8 +940,8 @@ impl VClockAlloc {
}
}
impl<'mir, 'tcx: 'mir> EvalContextPrivExt<'mir, 'tcx> for MiriEvalContext<'mir, 'tcx> {}
trait EvalContextPrivExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextPrivExt<'mir, 'tcx> for MiriInterpCx<'mir, 'tcx> {}
trait EvalContextPrivExt<'mir, 'tcx: 'mir>: MiriInterpCxExt<'mir, 'tcx> {
/// Temporarily allow data-races to occur. This should only be used in
/// one of these cases:
/// - One of the appropriate `validate_atomic` functions will be called to
@ -950,7 +950,7 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
/// cannot be accessed by the interpreted program.
/// - Execution of the interpreted program execution has halted.
#[inline]
fn allow_data_races_ref<R>(&self, op: impl FnOnce(&MiriEvalContext<'mir, 'tcx>) -> R) -> R {
fn allow_data_races_ref<R>(&self, op: impl FnOnce(&MiriInterpCx<'mir, 'tcx>) -> R) -> R {
let this = self.eval_context_ref();
if let Some(data_race) = &this.machine.data_race {
let old = data_race.ongoing_action_data_race_free.replace(true);
@ -969,7 +969,7 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
#[inline]
fn allow_data_races_mut<R>(
&mut self,
op: impl FnOnce(&mut MiriEvalContext<'mir, 'tcx>) -> R,
op: impl FnOnce(&mut MiriInterpCx<'mir, 'tcx>) -> R,
) -> R {
let this = self.eval_context_mut();
if let Some(data_race) = &this.machine.data_race {

View File

@ -159,8 +159,8 @@ pub(crate) struct SynchronizationState {
}
// Private extension trait for local helper methods
impl<'mir, 'tcx: 'mir> EvalContextExtPriv<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
trait EvalContextExtPriv<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExtPriv<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
trait EvalContextExtPriv<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
/// Take a reader out of the queue waiting for the lock.
/// Returns `true` if some thread got the rwlock.
#[inline]
@ -208,8 +208,8 @@ trait EvalContextExtPriv<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
// cases, the function calls are infallible and it is the client's (shim
// implementation's) responsibility to detect and deal with erroneous
// situations.
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
#[inline]
/// Create state for a new mutex.
fn mutex_create(&mut self) -> MutexId {
@ -222,7 +222,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
/// otherwise returns the value from the closure
fn mutex_get_or_create<F>(&mut self, existing: F) -> InterpResult<'tcx, MutexId>
where
F: FnOnce(&mut MiriEvalContext<'mir, 'tcx>, MutexId) -> InterpResult<'tcx, Option<MutexId>>,
F: FnOnce(&mut MiriInterpCx<'mir, 'tcx>, MutexId) -> InterpResult<'tcx, Option<MutexId>>,
{
let this = self.eval_context_mut();
let next_index = this.machine.threads.sync.mutexes.next_index();
@ -322,10 +322,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
/// otherwise returns the value from the closure
fn rwlock_get_or_create<F>(&mut self, existing: F) -> InterpResult<'tcx, RwLockId>
where
F: FnOnce(
&mut MiriEvalContext<'mir, 'tcx>,
RwLockId,
) -> InterpResult<'tcx, Option<RwLockId>>,
F: FnOnce(&mut MiriInterpCx<'mir, 'tcx>, RwLockId) -> InterpResult<'tcx, Option<RwLockId>>,
{
let this = self.eval_context_mut();
let next_index = this.machine.threads.sync.rwlocks.next_index();
@ -492,7 +489,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
fn condvar_get_or_create<F>(&mut self, existing: F) -> InterpResult<'tcx, CondvarId>
where
F: FnOnce(
&mut MiriEvalContext<'mir, 'tcx>,
&mut MiriInterpCx<'mir, 'tcx>,
CondvarId,
) -> InterpResult<'tcx, Option<CondvarId>>,
{

View File

@ -32,8 +32,9 @@ pub enum SchedulingAction {
/// Timeout callbacks can be created by synchronization primitives to tell the
/// scheduler that they should be called once some period of time passes.
type TimeoutCallback<'mir, 'tcx> =
Box<dyn FnOnce(&mut InterpCx<'mir, 'tcx, Evaluator<'mir, 'tcx>>) -> InterpResult<'tcx> + 'tcx>;
type TimeoutCallback<'mir, 'tcx> = Box<
dyn FnOnce(&mut InterpCx<'mir, 'tcx, MiriMachine<'mir, 'tcx>>) -> InterpResult<'tcx> + 'tcx,
>;
/// A thread identifier.
#[derive(Clone, Copy, Debug, PartialOrd, Ord, PartialEq, Eq, Hash)]
@ -253,7 +254,7 @@ impl<'mir, 'tcx> Default for ThreadManager<'mir, 'tcx> {
}
impl<'mir, 'tcx: 'mir> ThreadManager<'mir, 'tcx> {
pub(crate) fn init(ecx: &mut MiriEvalContext<'mir, 'tcx>) {
pub(crate) fn init(ecx: &mut MiriInterpCx<'mir, 'tcx>) {
if ecx.tcx.sess.target.os.as_ref() != "windows" {
// The main thread can *not* be joined on except on windows.
ecx.machine.threads.threads[ThreadId::new(0)].join_status = ThreadJoinStatus::Detached;
@ -628,8 +629,8 @@ impl<'mir, 'tcx: 'mir> ThreadManager<'mir, 'tcx> {
}
// Public interface to thread management.
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
/// Get a thread-specific allocation id for the given thread-local static.
/// If needed, allocate a new one.
fn get_or_create_thread_local_alloc(

View File

@ -456,9 +456,9 @@ impl StoreElement {
}
}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub(super) trait EvalContextExt<'mir, 'tcx: 'mir>:
crate::MiriEvalContextExt<'mir, 'tcx>
crate::MiriInterpCxExt<'mir, 'tcx>
{
// If weak memory emulation is enabled, check if this atomic op imperfectly overlaps with a previous
// atomic read or write. If it does, then we require it to be ordered (non-racy) with all previous atomic
@ -502,7 +502,7 @@ pub(super) trait EvalContextExt<'mir, 'tcx: 'mir>:
let (alloc_id, base_offset, ..) = this.ptr_get_alloc_id(place.ptr)?;
if let (
crate::AllocExtra { weak_memory: Some(alloc_buffers), .. },
crate::Evaluator { data_race: Some(global), threads, .. },
crate::MiriMachine { data_race: Some(global), threads, .. },
) = this.get_alloc_extra_mut(alloc_id)?
{
if atomic == AtomicRwOrd::SeqCst {
@ -544,7 +544,7 @@ pub(super) trait EvalContextExt<'mir, 'tcx: 'mir>:
validate,
)?;
if global.track_outdated_loads && recency == LoadRecency::Outdated {
register_diagnostic(NonHaltingDiagnostic::WeakMemoryOutdatedLoad);
this.emit_diagnostic(NonHaltingDiagnostic::WeakMemoryOutdatedLoad);
}
return Ok(loaded);
@ -567,7 +567,7 @@ pub(super) trait EvalContextExt<'mir, 'tcx: 'mir>:
let (alloc_id, base_offset, ..) = this.ptr_get_alloc_id(dest.ptr)?;
if let (
crate::AllocExtra { weak_memory: Some(alloc_buffers), .. },
crate::Evaluator { data_race: Some(global), threads, .. },
crate::MiriMachine { data_race: Some(global), threads, .. },
) = this.get_alloc_extra_mut(alloc_id)?
{
if atomic == AtomicWriteOrd::SeqCst {

View File

@ -1,11 +1,9 @@
use std::cell::RefCell;
use std::fmt;
use std::num::NonZeroU64;
use log::trace;
use rustc_middle::ty;
use rustc_span::{source_map::DUMMY_SP, Span, SpanData, Symbol};
use rustc_span::{source_map::DUMMY_SP, SpanData, Symbol};
use rustc_target::abi::{Align, Size};
use crate::stacked_borrows::{diagnostics::TagHistory, AccessKind};
@ -89,15 +87,15 @@ enum DiagLevel {
/// Attempts to prune a stacktrace to omit the Rust runtime, and returns a bool indicating if any
/// frames were pruned. If the stacktrace does not have any local frames, we conclude that it must
/// be pointing to a problem in the Rust runtime itself, and do not prune it at all.
fn prune_stacktrace<'mir, 'tcx>(
ecx: &InterpCx<'mir, 'tcx, Evaluator<'mir, 'tcx>>,
fn prune_stacktrace<'tcx>(
mut stacktrace: Vec<FrameInfo<'tcx>>,
machine: &MiriMachine<'_, 'tcx>,
) -> (Vec<FrameInfo<'tcx>>, bool) {
match ecx.machine.backtrace_style {
match machine.backtrace_style {
BacktraceStyle::Off => {
// Remove all frames marked with `caller_location` -- that attribute indicates we
// usually want to point at the caller, not them.
stacktrace.retain(|frame| !frame.instance.def.requires_caller_location(*ecx.tcx));
stacktrace.retain(|frame| !frame.instance.def.requires_caller_location(machine.tcx));
// Retain one frame so that we can print a span for the error itself
stacktrace.truncate(1);
(stacktrace, false)
@ -107,11 +105,12 @@ fn prune_stacktrace<'mir, 'tcx>(
// Only prune frames if there is at least one local frame. This check ensures that if
// we get a backtrace that never makes it to the user code because it has detected a
// bug in the Rust runtime, we don't prune away every frame.
let has_local_frame = stacktrace.iter().any(|frame| ecx.machine.is_local(frame));
let has_local_frame = stacktrace.iter().any(|frame| machine.is_local(frame));
if has_local_frame {
// Remove all frames marked with `caller_location` -- that attribute indicates we
// usually want to point at the caller, not them.
stacktrace.retain(|frame| !frame.instance.def.requires_caller_location(*ecx.tcx));
stacktrace
.retain(|frame| !frame.instance.def.requires_caller_location(machine.tcx));
// This is part of the logic that `std` uses to select the relevant part of a
// backtrace. But here, we only look for __rust_begin_short_backtrace, not
@ -121,7 +120,7 @@ fn prune_stacktrace<'mir, 'tcx>(
.into_iter()
.take_while(|frame| {
let def_id = frame.instance.def_id();
let path = ecx.tcx.tcx.def_path_str(def_id);
let path = machine.tcx.def_path_str(def_id);
!path.contains("__rust_begin_short_backtrace")
})
.collect::<Vec<_>>();
@ -132,7 +131,7 @@ fn prune_stacktrace<'mir, 'tcx>(
// This len check ensures that we don't somehow remove every frame, as doing so breaks
// the primary error message.
while stacktrace.len() > 1
&& stacktrace.last().map_or(false, |frame| !ecx.machine.is_local(frame))
&& stacktrace.last().map_or(false, |frame| !machine.is_local(frame))
{
stacktrace.pop();
}
@ -146,7 +145,7 @@ fn prune_stacktrace<'mir, 'tcx>(
/// Emit a custom diagnostic without going through the miri-engine machinery
pub fn report_error<'tcx, 'mir>(
ecx: &InterpCx<'mir, 'tcx, Evaluator<'mir, 'tcx>>,
ecx: &InterpCx<'mir, 'tcx, MiriMachine<'mir, 'tcx>>,
e: InterpErrorInfo<'tcx>,
) -> Option<i64> {
use InterpError::*;
@ -256,17 +255,17 @@ pub fn report_error<'tcx, 'mir>(
};
let stacktrace = ecx.generate_stacktrace();
let (stacktrace, was_pruned) = prune_stacktrace(ecx, stacktrace);
let (stacktrace, was_pruned) = prune_stacktrace(stacktrace, &ecx.machine);
e.print_backtrace();
msg.insert(0, e.to_string());
report_msg(
ecx,
DiagLevel::Error,
&if let Some(title) = title { format!("{}: {}", title, msg[0]) } else { msg[0].clone() },
msg,
vec![],
helps,
&stacktrace,
&ecx.machine,
);
// Include a note like `std` does when we omit frames from a backtrace
@ -306,17 +305,17 @@ pub fn report_error<'tcx, 'mir>(
/// We want to present a multi-line span message for some errors. Diagnostics do not support this
/// directly, so we pass the lines as a `Vec<String>` and display each line after the first with an
/// additional `span_label` or `note` call.
fn report_msg<'mir, 'tcx>(
ecx: &InterpCx<'mir, 'tcx, Evaluator<'mir, 'tcx>>,
fn report_msg<'tcx>(
diag_level: DiagLevel,
title: &str,
span_msg: Vec<String>,
notes: Vec<(Option<SpanData>, String)>,
helps: Vec<(Option<SpanData>, String)>,
stacktrace: &[FrameInfo<'tcx>],
machine: &MiriMachine<'_, 'tcx>,
) {
let span = stacktrace.first().map_or(DUMMY_SP, |fi| fi.span);
let sess = ecx.tcx.sess;
let sess = machine.tcx.sess;
let mut err = match diag_level {
DiagLevel::Error => sess.struct_span_err(span, title).forget_guarantee(),
DiagLevel::Warning => sess.struct_span_warn(span, title),
@ -357,7 +356,7 @@ fn report_msg<'mir, 'tcx>(
}
// Add backtrace
for (idx, frame_info) in stacktrace.iter().enumerate() {
let is_local = ecx.machine.is_local(frame_info);
let is_local = machine.is_local(frame_info);
// No span for non-local frames and the first frame (which is the error site).
if is_local && idx > 0 {
err.span_note(frame_info.span, &frame_info.to_string());
@ -369,164 +368,115 @@ fn report_msg<'mir, 'tcx>(
err.emit();
}
thread_local! {
static DIAGNOSTICS: RefCell<Vec<NonHaltingDiagnostic>> = RefCell::new(Vec::new());
}
impl<'mir, 'tcx> MiriMachine<'mir, 'tcx> {
pub fn emit_diagnostic(&self, e: NonHaltingDiagnostic) {
use NonHaltingDiagnostic::*;
/// Schedule a diagnostic for emitting. This function works even if you have no `InterpCx` available.
/// The diagnostic will be emitted after the current interpreter step is finished.
pub fn register_diagnostic(e: NonHaltingDiagnostic) {
DIAGNOSTICS.with(|diagnostics| diagnostics.borrow_mut().push(e));
}
let stacktrace =
MiriInterpCx::generate_stacktrace_from_stack(self.threads.active_thread_stack());
let (stacktrace, _was_pruned) = prune_stacktrace(stacktrace, self);
/// Remember enough about the topmost frame so that we can restore the stack
/// after a step was taken.
pub struct TopFrameInfo<'tcx> {
stack_size: usize,
instance: Option<ty::Instance<'tcx>>,
span: Span,
}
let (title, diag_level) = match e {
RejectedIsolatedOp(_) => ("operation rejected by isolation", DiagLevel::Warning),
Int2Ptr { .. } => ("integer-to-pointer cast", DiagLevel::Warning),
CreatedPointerTag(..)
| PoppedPointerTag(..)
| CreatedCallId(..)
| CreatedAlloc(..)
| FreedAlloc(..)
| ProgressReport { .. }
| WeakMemoryOutdatedLoad => ("tracking was triggered", DiagLevel::Note),
};
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
fn preprocess_diagnostics(&self) -> TopFrameInfo<'tcx> {
// Ensure we have no lingering diagnostics.
DIAGNOSTICS.with(|diagnostics| assert!(diagnostics.borrow().is_empty()));
let this = self.eval_context_ref();
if this.active_thread_stack().is_empty() {
// Diagnostics can happen even with the empty stack (e.g. deallocation of thread-local statics).
return TopFrameInfo { stack_size: 0, instance: None, span: DUMMY_SP };
}
let frame = this.frame();
TopFrameInfo {
stack_size: this.active_thread_stack().len(),
instance: Some(frame.instance),
span: frame.current_span(),
}
}
/// Emit all diagnostics that were registed with `register_diagnostics`
fn process_diagnostics(&self, info: TopFrameInfo<'tcx>) {
let this = self.eval_context_ref();
DIAGNOSTICS.with(|diagnostics| {
let mut diagnostics = diagnostics.borrow_mut();
if diagnostics.is_empty() {
return;
}
// We need to fix up the stack trace, because the machine has already
// stepped to the next statement.
let mut stacktrace = this.generate_stacktrace();
// Remove newly pushed frames.
while stacktrace.len() > info.stack_size {
stacktrace.remove(0);
}
// Add popped frame back.
if stacktrace.len() < info.stack_size {
assert!(
stacktrace.len() == info.stack_size - 1,
"we should never pop more than one frame at once"
);
let frame_info = FrameInfo {
instance: info.instance.unwrap(),
span: info.span,
lint_root: None,
};
stacktrace.insert(0, frame_info);
} else if let Some(instance) = info.instance {
// Adjust topmost frame.
stacktrace[0].span = info.span;
assert_eq!(
stacktrace[0].instance, instance,
"we should not pop and push a frame in one step"
);
}
let (stacktrace, _was_pruned) = prune_stacktrace(this, stacktrace);
// Show diagnostics.
for e in diagnostics.drain(..) {
use NonHaltingDiagnostic::*;
let (title, diag_level) = match e {
RejectedIsolatedOp(_) =>
("operation rejected by isolation", DiagLevel::Warning),
Int2Ptr { .. } => ("integer-to-pointer cast", DiagLevel::Warning),
CreatedPointerTag(..)
| PoppedPointerTag(..)
| CreatedCallId(..)
| CreatedAlloc(..)
| FreedAlloc(..)
| ProgressReport { .. }
| WeakMemoryOutdatedLoad =>
("tracking was triggered", DiagLevel::Note),
};
let msg = match e {
CreatedPointerTag(tag, None) =>
format!("created tag {tag:?}"),
CreatedPointerTag(tag, Some((alloc_id, range))) =>
format!("created tag {tag:?} at {alloc_id:?}{range:?}"),
PoppedPointerTag(item, tag) =>
match tag {
None =>
format!(
"popped tracked tag for item {item:?} due to deallocation",
),
Some((tag, access)) => {
format!(
"popped tracked tag for item {item:?} due to {access:?} access for {tag:?}",
)
}
},
CreatedCallId(id) =>
format!("function call with id {id}"),
CreatedAlloc(AllocId(id), size, align, kind) =>
let msg = match e {
CreatedPointerTag(tag, None) => format!("created tag {tag:?}"),
CreatedPointerTag(tag, Some((alloc_id, range))) =>
format!("created tag {tag:?} at {alloc_id:?}{range:?}"),
PoppedPointerTag(item, tag) =>
match tag {
None => format!("popped tracked tag for item {item:?} due to deallocation",),
Some((tag, access)) => {
format!(
"created {kind} allocation of {size} bytes (alignment {align} bytes) with id {id}",
size = size.bytes(),
align = align.bytes(),
),
FreedAlloc(AllocId(id)) =>
format!("freed allocation with id {id}"),
RejectedIsolatedOp(ref op) =>
format!("{op} was made to return an error due to isolation"),
ProgressReport { .. } =>
format!("progress report: current operation being executed is here"),
Int2Ptr { .. } =>
format!("integer-to-pointer cast"),
WeakMemoryOutdatedLoad =>
format!("weak memory emulation: outdated value returned from load"),
};
let notes = match e {
ProgressReport { block_count } => {
// It is important that each progress report is slightly different, since
// identical diagnostics are being deduplicated.
vec![
(None, format!("so far, {block_count} basic blocks have been executed")),
]
"popped tracked tag for item {item:?} due to {access:?} access for {tag:?}",
)
}
_ => vec![],
};
},
CreatedCallId(id) => format!("function call with id {id}"),
CreatedAlloc(AllocId(id), size, align, kind) =>
format!(
"created {kind} allocation of {size} bytes (alignment {align} bytes) with id {id}",
size = size.bytes(),
align = align.bytes(),
),
FreedAlloc(AllocId(id)) => format!("freed allocation with id {id}"),
RejectedIsolatedOp(ref op) =>
format!("{op} was made to return an error due to isolation"),
ProgressReport { .. } =>
format!("progress report: current operation being executed is here"),
Int2Ptr { .. } => format!("integer-to-pointer cast"),
WeakMemoryOutdatedLoad =>
format!("weak memory emulation: outdated value returned from load"),
};
let helps = match e {
Int2Ptr { details: true } =>
vec![
(None, format!("This program is using integer-to-pointer casts or (equivalently) `ptr::from_exposed_addr`,")),
(None, format!("which means that Miri might miss pointer bugs in this program.")),
(None, format!("See https://doc.rust-lang.org/nightly/std/ptr/fn.from_exposed_addr.html for more details on that operation.")),
(None, format!("To ensure that Miri does not miss bugs in your program, use Strict Provenance APIs (https://doc.rust-lang.org/nightly/std/ptr/index.html#strict-provenance, https://crates.io/crates/sptr) instead.")),
(None, format!("You can then pass the `-Zmiri-strict-provenance` flag to Miri, to ensure you are not relying on `from_exposed_addr` semantics.")),
(None, format!("Alternatively, the `-Zmiri-permissive-provenance` flag disables this warning.")),
],
_ => vec![],
};
report_msg(this, diag_level, title, vec![msg], notes, helps, &stacktrace);
let notes = match e {
ProgressReport { block_count } => {
// It is important that each progress report is slightly different, since
// identical diagnostics are being deduplicated.
vec![(None, format!("so far, {block_count} basic blocks have been executed"))]
}
});
_ => vec![],
};
let helps = match e {
Int2Ptr { details: true } =>
vec![
(
None,
format!(
"This program is using integer-to-pointer casts or (equivalently) `ptr::from_exposed_addr`,"
),
),
(
None,
format!("which means that Miri might miss pointer bugs in this program."),
),
(
None,
format!(
"See https://doc.rust-lang.org/nightly/std/ptr/fn.from_exposed_addr.html for more details on that operation."
),
),
(
None,
format!(
"To ensure that Miri does not miss bugs in your program, use Strict Provenance APIs (https://doc.rust-lang.org/nightly/std/ptr/index.html#strict-provenance, https://crates.io/crates/sptr) instead."
),
),
(
None,
format!(
"You can then pass the `-Zmiri-strict-provenance` flag to Miri, to ensure you are not relying on `from_exposed_addr` semantics."
),
),
(
None,
format!(
"Alternatively, the `-Zmiri-permissive-provenance` flag disables this warning."
),
),
],
_ => vec![],
};
report_msg(diag_level, title, vec![msg], notes, helps, &stacktrace, self);
}
}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn emit_diagnostic(&self, e: NonHaltingDiagnostic) {
let this = self.eval_context_ref();
this.machine.emit_diagnostic(e);
}
/// We had a panic in Miri itself, try to print something useful.
@ -538,13 +488,13 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
let this = self.eval_context_ref();
let stacktrace = this.generate_stacktrace();
report_msg(
this,
DiagLevel::Note,
"the place in the program where the ICE was triggered",
vec![],
vec![],
vec![],
&stacktrace,
&this.machine,
);
}
}

View File

@ -180,23 +180,19 @@ pub fn create_ecx<'mir, 'tcx: 'mir>(
entry_id: DefId,
entry_type: EntryFnType,
config: &MiriConfig,
) -> InterpResult<'tcx, (InterpCx<'mir, 'tcx, Evaluator<'mir, 'tcx>>, MPlaceTy<'tcx, Provenance>)> {
) -> InterpResult<'tcx, (InterpCx<'mir, 'tcx, MiriMachine<'mir, 'tcx>>, MPlaceTy<'tcx, Provenance>)>
{
let param_env = ty::ParamEnv::reveal_all();
let layout_cx = LayoutCx { tcx, param_env };
let mut ecx = InterpCx::new(
tcx,
rustc_span::source_map::DUMMY_SP,
param_env,
Evaluator::new(config, layout_cx),
MiriMachine::new(config, layout_cx),
);
// Capture the current interpreter stack state (which should be empty) so that we can emit
// allocation-tracking and tag-tracking diagnostics for allocations which are part of the
// early runtime setup.
let info = ecx.preprocess_diagnostics();
// Some parts of initialization require a full `InterpCx`.
Evaluator::late_init(&mut ecx, config)?;
MiriMachine::late_init(&mut ecx, config)?;
// Make sure we have MIR. We check MIR for some stable monomorphic function in libcore.
let sentinel = ecx.try_resolve_path(&["core", "ascii", "escape_default"]);
@ -324,10 +320,6 @@ pub fn create_ecx<'mir, 'tcx: 'mir>(
}
}
// Emit any diagnostics related to the setup process for the runtime, so that when the
// interpreter loop starts there are no unprocessed diagnostics.
ecx.process_diagnostics(info);
Ok((ecx, ret_place))
}
@ -356,7 +348,6 @@ pub fn eval_entry<'tcx>(
let res: thread::Result<InterpResult<'_, i64>> = panic::catch_unwind(AssertUnwindSafe(|| {
// Main loop.
loop {
let info = ecx.preprocess_diagnostics();
match ecx.schedule()? {
SchedulingAction::ExecuteStep => {
assert!(ecx.step()?, "a terminated thread was scheduled for execution");
@ -374,7 +365,6 @@ pub fn eval_entry<'tcx>(
break;
}
}
ecx.process_diagnostics(info);
}
let return_code = ecx.read_scalar(&ret_place.into())?.to_machine_isize(&ecx)?;
Ok(return_code)

View File

@ -21,7 +21,7 @@ use rand::RngCore;
use crate::*;
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
// This mapping should match `decode_error_kind` in
// <https://github.com/rust-lang/rust/blob/master/library/std/src/sys/unix/mod.rs>.
@ -96,7 +96,7 @@ fn try_resolve_did<'tcx>(tcx: TyCtxt<'tcx>, path: &[&str]) -> Option<DefId> {
)
}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
/// Gets an instance for a path; fails gracefully if the path does not exist.
fn try_resolve_path(&self, path: &[&str]) -> Option<ty::Instance<'tcx>> {
let did = try_resolve_did(self.eval_context_ref().tcx.tcx, path)?;
@ -391,11 +391,11 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
where
F: FnMut(&MPlaceTy<'tcx, Provenance>) -> InterpResult<'tcx>,
{
ecx: &'ecx MiriEvalContext<'mir, 'tcx>,
ecx: &'ecx MiriInterpCx<'mir, 'tcx>,
unsafe_cell_action: F,
}
impl<'ecx, 'mir, 'tcx: 'mir, F> ValueVisitor<'mir, 'tcx, Evaluator<'mir, 'tcx>>
impl<'ecx, 'mir, 'tcx: 'mir, F> ValueVisitor<'mir, 'tcx, MiriMachine<'mir, 'tcx>>
for UnsafeCellVisitor<'ecx, 'mir, 'tcx, F>
where
F: FnMut(&MPlaceTy<'tcx, Provenance>) -> InterpResult<'tcx>,
@ -403,7 +403,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
type V = MPlaceTy<'tcx, Provenance>;
#[inline(always)]
fn ecx(&self) -> &MiriEvalContext<'mir, 'tcx> {
fn ecx(&self) -> &MiriInterpCx<'mir, 'tcx> {
self.ecx
}
@ -508,7 +508,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
Ok(())
}
RejectOpWith::Warning => {
register_diagnostic(NonHaltingDiagnostic::RejectedIsolatedOp(op_name.to_string()));
this.emit_diagnostic(NonHaltingDiagnostic::RejectedIsolatedOp(op_name.to_string()));
Ok(())
}
RejectOpWith::NoWarning => Ok(()), // no warning
@ -883,9 +883,9 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
}
}
impl<'mir, 'tcx> Evaluator<'mir, 'tcx> {
pub fn current_span(&self, tcx: TyCtxt<'tcx>) -> CurrentSpan<'_, 'mir, 'tcx> {
CurrentSpan { current_frame_idx: None, machine: self, tcx }
impl<'mir, 'tcx> MiriMachine<'mir, 'tcx> {
pub fn current_span(&self) -> CurrentSpan<'_, 'mir, 'tcx> {
CurrentSpan { current_frame_idx: None, machine: self }
}
}
@ -896,11 +896,14 @@ impl<'mir, 'tcx> Evaluator<'mir, 'tcx> {
#[derive(Clone)]
pub struct CurrentSpan<'a, 'mir, 'tcx> {
current_frame_idx: Option<usize>,
tcx: TyCtxt<'tcx>,
machine: &'a Evaluator<'mir, 'tcx>,
machine: &'a MiriMachine<'mir, 'tcx>,
}
impl<'a, 'mir: 'a, 'tcx: 'a + 'mir> CurrentSpan<'a, 'mir, 'tcx> {
pub fn machine(&self) -> &'a MiriMachine<'mir, 'tcx> {
self.machine
}
/// Get the current span, skipping non-local frames.
/// This function is backed by a cache, and can be assumed to be very fast.
pub fn get(&mut self) -> Span {
@ -916,7 +919,7 @@ impl<'a, 'mir: 'a, 'tcx: 'a + 'mir> CurrentSpan<'a, 'mir, 'tcx> {
Self::frame_span(self.machine, idx.wrapping_sub(1))
}
fn frame_span(machine: &Evaluator<'_, '_>, idx: usize) -> Span {
fn frame_span(machine: &MiriMachine<'_, '_>, idx: usize) -> Span {
machine
.threads
.active_thread_stack()
@ -928,13 +931,13 @@ impl<'a, 'mir: 'a, 'tcx: 'a + 'mir> CurrentSpan<'a, 'mir, 'tcx> {
fn current_frame_idx(&mut self) -> usize {
*self
.current_frame_idx
.get_or_insert_with(|| Self::compute_current_frame_index(self.tcx, self.machine))
.get_or_insert_with(|| Self::compute_current_frame_index(self.machine))
}
// Find the position of the inner-most frame which is part of the crate being
// compiled/executed, part of the Cargo workspace, and is also not #[track_caller].
#[inline(never)]
fn compute_current_frame_index(tcx: TyCtxt<'_>, machine: &Evaluator<'_, '_>) -> usize {
fn compute_current_frame_index(machine: &MiriMachine<'_, '_>) -> usize {
machine
.threads
.active_thread_stack()
@ -944,7 +947,7 @@ impl<'a, 'mir: 'a, 'tcx: 'a + 'mir> CurrentSpan<'a, 'mir, 'tcx> {
.find_map(|(idx, frame)| {
let def_id = frame.instance.def_id();
if (def_id.is_local() || machine.local_crates.contains(&def_id.krate))
&& !frame.instance.def.requires_caller_location(tcx)
&& !frame.instance.def.requires_caller_location(machine.tcx)
{
Some(idx)
} else {

View File

@ -59,7 +59,7 @@ impl GlobalStateInner {
impl<'mir, 'tcx> GlobalStateInner {
// Returns the exposed `AllocId` that corresponds to the specified addr,
// or `None` if the addr is out of bounds
fn alloc_id_from_addr(ecx: &MiriEvalContext<'mir, 'tcx>, addr: u64) -> Option<AllocId> {
fn alloc_id_from_addr(ecx: &MiriInterpCx<'mir, 'tcx>, addr: u64) -> Option<AllocId> {
let global_state = ecx.machine.intptrcast.borrow();
assert!(global_state.provenance_mode != ProvenanceMode::Strict);
@ -97,7 +97,7 @@ impl<'mir, 'tcx> GlobalStateInner {
}
pub fn expose_ptr(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
alloc_id: AllocId,
sb: SbTag,
) -> InterpResult<'tcx> {
@ -114,7 +114,7 @@ impl<'mir, 'tcx> GlobalStateInner {
}
pub fn ptr_from_addr_transmute(
_ecx: &MiriEvalContext<'mir, 'tcx>,
_ecx: &MiriInterpCx<'mir, 'tcx>,
addr: u64,
) -> Pointer<Option<Provenance>> {
trace!("Transmuting {:#x} to a pointer", addr);
@ -124,7 +124,7 @@ impl<'mir, 'tcx> GlobalStateInner {
}
pub fn ptr_from_addr_cast(
ecx: &MiriEvalContext<'mir, 'tcx>,
ecx: &MiriInterpCx<'mir, 'tcx>,
addr: u64,
) -> InterpResult<'tcx, Pointer<Option<Provenance>>> {
trace!("Casting {:#x} to a pointer", addr);
@ -142,7 +142,7 @@ impl<'mir, 'tcx> GlobalStateInner {
let first = past_warnings.is_empty();
if past_warnings.insert(ecx.cur_span()) {
// Newly inserted, so first time we see this span.
register_diagnostic(NonHaltingDiagnostic::Int2Ptr { details: first });
ecx.emit_diagnostic(NonHaltingDiagnostic::Int2Ptr { details: first });
}
});
}
@ -156,7 +156,7 @@ impl<'mir, 'tcx> GlobalStateInner {
Ok(Pointer::new(Some(Provenance::Wildcard), Size::from_bytes(addr)))
}
fn alloc_base_addr(ecx: &MiriEvalContext<'mir, 'tcx>, alloc_id: AllocId) -> u64 {
fn alloc_base_addr(ecx: &MiriInterpCx<'mir, 'tcx>, alloc_id: AllocId) -> u64 {
let mut global_state = ecx.machine.intptrcast.borrow_mut();
let global_state = &mut *global_state;
@ -202,7 +202,7 @@ impl<'mir, 'tcx> GlobalStateInner {
}
/// Convert a relative (tcx) pointer to an absolute address.
pub fn rel_ptr_to_addr(ecx: &MiriEvalContext<'mir, 'tcx>, ptr: Pointer<AllocId>) -> u64 {
pub fn rel_ptr_to_addr(ecx: &MiriInterpCx<'mir, 'tcx>, ptr: Pointer<AllocId>) -> u64 {
let (alloc_id, offset) = ptr.into_parts(); // offset is relative (AllocId provenance)
let base_addr = GlobalStateInner::alloc_base_addr(ecx, alloc_id);
@ -214,7 +214,7 @@ impl<'mir, 'tcx> GlobalStateInner {
/// When a pointer is used for a memory access, this computes where in which allocation the
/// access is going.
pub fn abs_ptr_to_rel(
ecx: &MiriEvalContext<'mir, 'tcx>,
ecx: &MiriInterpCx<'mir, 'tcx>,
ptr: Pointer<Provenance>,
) -> Option<(AllocId, Size)> {
let (tag, addr) = ptr.into_parts(); // addr is absolute (Tag provenance)

View File

@ -94,8 +94,8 @@ pub use crate::concurrency::{
},
};
pub use crate::diagnostics::{
register_diagnostic, report_error, EvalContextExt as DiagnosticsEvalContextExt,
NonHaltingDiagnostic, TerminationInfo,
report_error, EvalContextExt as DiagnosticsEvalContextExt, NonHaltingDiagnostic,
TerminationInfo,
};
pub use crate::eval::{
create_ecx, eval_entry, AlignmentCheck, BacktraceStyle, IsolatedOp, MiriConfig, RejectOpWith,
@ -103,8 +103,8 @@ pub use crate::eval::{
pub use crate::helpers::{CurrentSpan, EvalContextExt as HelpersEvalContextExt};
pub use crate::intptrcast::ProvenanceMode;
pub use crate::machine::{
AllocExtra, Evaluator, FrameData, MiriEvalContext, MiriEvalContextExt, MiriMemoryKind,
Provenance, ProvenanceExtra, NUM_CPUS, PAGE_SIZE, STACK_ADDR, STACK_SIZE,
AllocExtra, FrameData, MiriInterpCx, MiriInterpCxExt, MiriMachine, MiriMemoryKind, Provenance,
ProvenanceExtra, NUM_CPUS, PAGE_SIZE, STACK_ADDR, STACK_SIZE,
};
pub use crate::mono_hash_map::MonoHashMap;
pub use crate::operator::EvalContextExt as OperatorEvalContextExt;

View File

@ -291,9 +291,17 @@ impl<'mir, 'tcx: 'mir> PrimitiveLayouts<'tcx> {
}
/// The machine itself.
pub struct Evaluator<'mir, 'tcx> {
pub struct MiriMachine<'mir, 'tcx> {
// We carry a copy of the global `TyCtxt` for convenience, so methods taking just `&Evaluator` have `tcx` access.
pub tcx: TyCtxt<'tcx>,
/// Stacked Borrows global data.
pub stacked_borrows: Option<stacked_borrows::GlobalState>,
/// Data race detector global data.
pub data_race: Option<data_race::GlobalState>,
/// Ptr-int-cast module global data.
pub intptrcast: intptrcast::GlobalState,
/// Environment variables set by `setenv`.
@ -400,7 +408,7 @@ pub struct Evaluator<'mir, 'tcx> {
pub(crate) since_gc: u32,
}
impl<'mir, 'tcx> Evaluator<'mir, 'tcx> {
impl<'mir, 'tcx> MiriMachine<'mir, 'tcx> {
pub(crate) fn new(config: &MiriConfig, layout_cx: LayoutCx<'tcx, TyCtxt<'tcx>>) -> Self {
let target_triple = &layout_cx.tcx.sess.opts.target_triple.to_string();
let local_crates = helpers::get_local_crates(layout_cx.tcx);
@ -418,7 +426,8 @@ impl<'mir, 'tcx> Evaluator<'mir, 'tcx> {
))
});
let data_race = config.data_race_detector.then(|| data_race::GlobalState::new(config));
Evaluator {
MiriMachine {
tcx: layout_cx.tcx,
stacked_borrows,
data_race,
intptrcast: RefCell::new(intptrcast::GlobalStateInner::new(config)),
@ -479,17 +488,17 @@ impl<'mir, 'tcx> Evaluator<'mir, 'tcx> {
}
pub(crate) fn late_init(
this: &mut MiriEvalContext<'mir, 'tcx>,
this: &mut MiriInterpCx<'mir, 'tcx>,
config: &MiriConfig,
) -> InterpResult<'tcx> {
EnvVars::init(this, config)?;
Evaluator::init_extern_statics(this)?;
MiriMachine::init_extern_statics(this)?;
ThreadManager::init(this);
Ok(())
}
fn add_extern_static(
this: &mut MiriEvalContext<'mir, 'tcx>,
this: &mut MiriInterpCx<'mir, 'tcx>,
name: &str,
ptr: Pointer<Option<Provenance>>,
) {
@ -499,7 +508,7 @@ impl<'mir, 'tcx> Evaluator<'mir, 'tcx> {
}
fn alloc_extern_static(
this: &mut MiriEvalContext<'mir, 'tcx>,
this: &mut MiriInterpCx<'mir, 'tcx>,
name: &str,
val: ImmTy<'tcx, Provenance>,
) -> InterpResult<'tcx> {
@ -510,7 +519,7 @@ impl<'mir, 'tcx> Evaluator<'mir, 'tcx> {
}
/// Sets up the "extern statics" for this machine.
fn init_extern_statics(this: &mut MiriEvalContext<'mir, 'tcx>) -> InterpResult<'tcx> {
fn init_extern_statics(this: &mut MiriInterpCx<'mir, 'tcx>) -> InterpResult<'tcx> {
match this.tcx.sess.target.os.as_ref() {
"linux" => {
// "environ"
@ -576,26 +585,26 @@ impl<'mir, 'tcx> Evaluator<'mir, 'tcx> {
}
/// A rustc InterpCx for Miri.
pub type MiriEvalContext<'mir, 'tcx> = InterpCx<'mir, 'tcx, Evaluator<'mir, 'tcx>>;
pub type MiriInterpCx<'mir, 'tcx> = InterpCx<'mir, 'tcx, MiriMachine<'mir, 'tcx>>;
/// A little trait that's useful to be inherited by extension traits.
pub trait MiriEvalContextExt<'mir, 'tcx> {
fn eval_context_ref<'a>(&'a self) -> &'a MiriEvalContext<'mir, 'tcx>;
fn eval_context_mut<'a>(&'a mut self) -> &'a mut MiriEvalContext<'mir, 'tcx>;
pub trait MiriInterpCxExt<'mir, 'tcx> {
fn eval_context_ref<'a>(&'a self) -> &'a MiriInterpCx<'mir, 'tcx>;
fn eval_context_mut<'a>(&'a mut self) -> &'a mut MiriInterpCx<'mir, 'tcx>;
}
impl<'mir, 'tcx> MiriEvalContextExt<'mir, 'tcx> for MiriEvalContext<'mir, 'tcx> {
impl<'mir, 'tcx> MiriInterpCxExt<'mir, 'tcx> for MiriInterpCx<'mir, 'tcx> {
#[inline(always)]
fn eval_context_ref(&self) -> &MiriEvalContext<'mir, 'tcx> {
fn eval_context_ref(&self) -> &MiriInterpCx<'mir, 'tcx> {
self
}
#[inline(always)]
fn eval_context_mut(&mut self) -> &mut MiriEvalContext<'mir, 'tcx> {
fn eval_context_mut(&mut self) -> &mut MiriInterpCx<'mir, 'tcx> {
self
}
}
/// Machine hook implementations.
impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
impl<'mir, 'tcx> Machine<'mir, 'tcx> for MiriMachine<'mir, 'tcx> {
type MemoryKind = MiriMemoryKind;
type ExtraFnVal = Dlsym;
@ -615,33 +624,33 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
const PANIC_ON_ALLOC_FAIL: bool = false;
#[inline(always)]
fn enforce_alignment(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
fn enforce_alignment(ecx: &MiriInterpCx<'mir, 'tcx>) -> bool {
ecx.machine.check_alignment != AlignmentCheck::None
}
#[inline(always)]
fn use_addr_for_alignment_check(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
fn use_addr_for_alignment_check(ecx: &MiriInterpCx<'mir, 'tcx>) -> bool {
ecx.machine.check_alignment == AlignmentCheck::Int
}
#[inline(always)]
fn enforce_validity(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
fn enforce_validity(ecx: &MiriInterpCx<'mir, 'tcx>) -> bool {
ecx.machine.validate
}
#[inline(always)]
fn enforce_abi(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
fn enforce_abi(ecx: &MiriInterpCx<'mir, 'tcx>) -> bool {
ecx.machine.enforce_abi
}
#[inline(always)]
fn checked_binop_checks_overflow(ecx: &MiriEvalContext<'mir, 'tcx>) -> bool {
fn checked_binop_checks_overflow(ecx: &MiriInterpCx<'mir, 'tcx>) -> bool {
ecx.tcx.sess.overflow_checks()
}
#[inline(always)]
fn find_mir_or_eval_fn(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
instance: ty::Instance<'tcx>,
abi: Abi,
args: &[OpTy<'tcx, Provenance>],
@ -654,7 +663,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
#[inline(always)]
fn call_extra_fn(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
fn_val: Dlsym,
abi: Abi,
args: &[OpTy<'tcx, Provenance>],
@ -667,7 +676,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
#[inline(always)]
fn call_intrinsic(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx, Provenance>],
dest: &PlaceTy<'tcx, Provenance>,
@ -679,7 +688,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
#[inline(always)]
fn assert_panic(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
msg: &mir::AssertMessage<'tcx>,
unwind: Option<mir::BasicBlock>,
) -> InterpResult<'tcx> {
@ -687,13 +696,13 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
}
#[inline(always)]
fn abort(_ecx: &mut MiriEvalContext<'mir, 'tcx>, msg: String) -> InterpResult<'tcx, !> {
fn abort(_ecx: &mut MiriInterpCx<'mir, 'tcx>, msg: String) -> InterpResult<'tcx, !> {
throw_machine_stop!(TerminationInfo::Abort(msg))
}
#[inline(always)]
fn binary_ptr_op(
ecx: &MiriEvalContext<'mir, 'tcx>,
ecx: &MiriInterpCx<'mir, 'tcx>,
bin_op: mir::BinOp,
left: &ImmTy<'tcx, Provenance>,
right: &ImmTy<'tcx, Provenance>,
@ -702,14 +711,14 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
}
fn thread_local_static_base_pointer(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
def_id: DefId,
) -> InterpResult<'tcx, Pointer<Provenance>> {
ecx.get_or_create_thread_local_alloc(def_id)
}
fn extern_static_base_pointer(
ecx: &MiriEvalContext<'mir, 'tcx>,
ecx: &MiriInterpCx<'mir, 'tcx>,
def_id: DefId,
) -> InterpResult<'tcx, Pointer<Provenance>> {
let link_name = ecx.item_link_name(def_id);
@ -748,14 +757,14 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
}
fn adjust_allocation<'b>(
ecx: &MiriEvalContext<'mir, 'tcx>,
ecx: &MiriInterpCx<'mir, 'tcx>,
id: AllocId,
alloc: Cow<'b, Allocation>,
kind: Option<MemoryKind<Self::MemoryKind>>,
) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra>>> {
let kind = kind.expect("we set our STATIC_KIND so this cannot be None");
if ecx.machine.tracked_alloc_ids.contains(&id) {
register_diagnostic(NonHaltingDiagnostic::CreatedAlloc(
ecx.emit_diagnostic(NonHaltingDiagnostic::CreatedAlloc(
id,
alloc.size(),
alloc.align,
@ -770,7 +779,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
alloc.size(),
stacked_borrows,
kind,
ecx.machine.current_span(*ecx.tcx),
ecx.machine.current_span(),
)
});
let race_alloc = ecx.machine.data_race.as_ref().map(|data_race| {
@ -795,7 +804,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
}
fn adjust_alloc_base_pointer(
ecx: &MiriEvalContext<'mir, 'tcx>,
ecx: &MiriInterpCx<'mir, 'tcx>,
ptr: Pointer<AllocId>,
) -> Pointer<Provenance> {
if cfg!(debug_assertions) {
@ -813,7 +822,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
}
let absolute_addr = intptrcast::GlobalStateInner::rel_ptr_to_addr(ecx, ptr);
let sb_tag = if let Some(stacked_borrows) = &ecx.machine.stacked_borrows {
stacked_borrows.borrow_mut().base_ptr_tag(ptr.provenance)
stacked_borrows.borrow_mut().base_ptr_tag(ptr.provenance, &ecx.machine)
} else {
// Value does not matter, SB is disabled
SbTag::default()
@ -826,7 +835,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
#[inline(always)]
fn ptr_from_addr_cast(
ecx: &MiriEvalContext<'mir, 'tcx>,
ecx: &MiriInterpCx<'mir, 'tcx>,
addr: u64,
) -> InterpResult<'tcx, Pointer<Option<Self::Provenance>>> {
intptrcast::GlobalStateInner::ptr_from_addr_cast(ecx, addr)
@ -850,7 +859,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
/// Convert a pointer with provenance into an allocation-offset pair,
/// or a `None` with an absolute address if that conversion is not possible.
fn ptr_get_alloc(
ecx: &MiriEvalContext<'mir, 'tcx>,
ecx: &MiriInterpCx<'mir, 'tcx>,
ptr: Pointer<Self::Provenance>,
) -> Option<(AllocId, Size, Self::ProvenanceExtra)> {
let rel = intptrcast::GlobalStateInner::abs_ptr_to_rel(ecx, ptr);
@ -866,7 +875,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
#[inline(always)]
fn before_memory_read(
tcx: TyCtxt<'tcx>,
_tcx: TyCtxt<'tcx>,
machine: &Self,
alloc_extra: &AllocExtra,
(alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
@ -886,7 +895,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
prov_extra,
range,
machine.stacked_borrows.as_ref().unwrap(),
machine.current_span(tcx),
machine.current_span(),
&machine.threads,
)?;
}
@ -898,7 +907,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
#[inline(always)]
fn before_memory_write(
tcx: TyCtxt<'tcx>,
_tcx: TyCtxt<'tcx>,
machine: &mut Self,
alloc_extra: &mut AllocExtra,
(alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
@ -918,7 +927,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
prov_extra,
range,
machine.stacked_borrows.as_ref().unwrap(),
machine.current_span(tcx),
machine.current_span(),
&machine.threads,
)?;
}
@ -930,14 +939,14 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
#[inline(always)]
fn before_memory_deallocation(
tcx: TyCtxt<'tcx>,
_tcx: TyCtxt<'tcx>,
machine: &mut Self,
alloc_extra: &mut AllocExtra,
(alloc_id, prove_extra): (AllocId, Self::ProvenanceExtra),
range: AllocRange,
) -> InterpResult<'tcx> {
if machine.tracked_alloc_ids.contains(&alloc_id) {
register_diagnostic(NonHaltingDiagnostic::FreedAlloc(alloc_id));
machine.emit_diagnostic(NonHaltingDiagnostic::FreedAlloc(alloc_id));
}
if let Some(data_race) = &mut alloc_extra.data_race {
data_race.deallocate(
@ -953,7 +962,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
prove_extra,
range,
machine.stacked_borrows.as_ref().unwrap(),
machine.current_span(tcx),
machine.current_span(),
&machine.threads,
)
} else {
@ -993,7 +1002,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
let stacked_borrows = ecx.machine.stacked_borrows.as_ref();
let extra = FrameData {
stacked_borrows: stacked_borrows.map(|sb| sb.borrow_mut().new_frame()),
stacked_borrows: stacked_borrows.map(|sb| sb.borrow_mut().new_frame(&ecx.machine)),
catch_unwind: None,
timing,
};
@ -1018,7 +1027,7 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
// Possibly report our progress.
if let Some(report_progress) = ecx.machine.report_progress {
if ecx.machine.basic_block_count % u64::from(report_progress) == 0 {
register_diagnostic(NonHaltingDiagnostic::ProgressReport {
ecx.emit_diagnostic(NonHaltingDiagnostic::ProgressReport {
block_count: ecx.machine.basic_block_count,
});
}

View File

@ -14,7 +14,7 @@ pub trait EvalContextExt<'tcx> {
) -> InterpResult<'tcx, (Scalar<Provenance>, bool, Ty<'tcx>)>;
}
impl<'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'mir, 'tcx> {
impl<'mir, 'tcx> EvalContextExt<'tcx> for super::MiriInterpCx<'mir, 'tcx> {
fn binary_ptr_op(
&self,
bin_op: mir::BinOp,

View File

@ -5,8 +5,8 @@ use rustc_middle::ty::{self, Instance};
use rustc_span::{BytePos, Loc, Symbol};
use rustc_target::{abi::Size, spec::abi::Abi};
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn handle_miri_backtrace_size(
&mut self,
abi: Abi,

View File

@ -27,8 +27,8 @@ impl Dlsym {
}
}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn call_dlsym(
&mut self,
dlsym: Dlsym,

View File

@ -38,7 +38,7 @@ pub struct EnvVars<'tcx> {
impl<'tcx> EnvVars<'tcx> {
pub(crate) fn init<'mir>(
ecx: &mut InterpCx<'mir, 'tcx, Evaluator<'mir, 'tcx>>,
ecx: &mut InterpCx<'mir, 'tcx, MiriMachine<'mir, 'tcx>>,
config: &MiriConfig,
) -> InterpResult<'tcx> {
let target_os = ecx.tcx.sess.target.os.as_ref();
@ -74,7 +74,7 @@ impl<'tcx> EnvVars<'tcx> {
}
pub(crate) fn cleanup<'mir>(
ecx: &mut InterpCx<'mir, 'tcx, Evaluator<'mir, 'tcx>>,
ecx: &mut InterpCx<'mir, 'tcx, MiriMachine<'mir, 'tcx>>,
) -> InterpResult<'tcx> {
// Deallocate individual env vars.
let env_vars = mem::take(&mut ecx.machine.env_vars.map);
@ -92,7 +92,7 @@ impl<'tcx> EnvVars<'tcx> {
fn alloc_env_var_as_c_str<'mir, 'tcx>(
name: &OsStr,
value: &OsStr,
ecx: &mut InterpCx<'mir, 'tcx, Evaluator<'mir, 'tcx>>,
ecx: &mut InterpCx<'mir, 'tcx, MiriMachine<'mir, 'tcx>>,
) -> InterpResult<'tcx, Pointer<Option<Provenance>>> {
let mut name_osstring = name.to_os_string();
name_osstring.push("=");
@ -103,7 +103,7 @@ fn alloc_env_var_as_c_str<'mir, 'tcx>(
fn alloc_env_var_as_wide_str<'mir, 'tcx>(
name: &OsStr,
value: &OsStr,
ecx: &mut InterpCx<'mir, 'tcx, Evaluator<'mir, 'tcx>>,
ecx: &mut InterpCx<'mir, 'tcx, MiriMachine<'mir, 'tcx>>,
) -> InterpResult<'tcx, Pointer<Option<Provenance>>> {
let mut name_osstring = name.to_os_string();
name_osstring.push("=");
@ -111,8 +111,8 @@ fn alloc_env_var_as_wide_str<'mir, 'tcx>(
ecx.alloc_os_str_as_wide_str(name_osstring.as_os_str(), MiriMemoryKind::Runtime.into())
}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn getenv(
&mut self,
name_op: &OpTy<'tcx, Provenance>,

View File

@ -7,9 +7,9 @@ use rustc_target::abi::HasDataLayout;
use crate::*;
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
/// Extract the scalar value from the result of reading a scalar from the machine,
/// and convert it to a `CArg`.
fn scalar_to_carg(

View File

@ -38,8 +38,8 @@ pub enum EmulateByNameResult<'mir, 'tcx> {
NotSupported,
}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
/// Returns the minimum alignment for the target architecture for allocations of the given size.
fn min_align(&self, size: u64, kind: MiriMemoryKind) -> Align {
let this = self.eval_context_ref();
@ -334,7 +334,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
fn emulate_allocator(
&mut self,
symbol: Symbol,
default: impl FnOnce(&mut MiriEvalContext<'mir, 'tcx>) -> InterpResult<'tcx>,
default: impl FnOnce(&mut MiriInterpCx<'mir, 'tcx>) -> InterpResult<'tcx>,
) -> InterpResult<'tcx, EmulateByNameResult<'mir, 'tcx>> {
let this = self.eval_context_mut();

View File

@ -11,8 +11,8 @@ pub enum AtomicOp {
Min,
}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
/// Calls the atomic intrinsic `intrinsic`; the `atomic_` prefix has already been removed.
fn emulate_atomic_intrinsic(
&mut self,
@ -119,8 +119,8 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
}
}
impl<'mir, 'tcx: 'mir> EvalContextPrivExt<'mir, 'tcx> for MiriEvalContext<'mir, 'tcx> {}
trait EvalContextPrivExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextPrivExt<'mir, 'tcx> for MiriInterpCx<'mir, 'tcx> {}
trait EvalContextPrivExt<'mir, 'tcx: 'mir>: MiriInterpCxExt<'mir, 'tcx> {
fn atomic_load(
&mut self,
args: &[OpTy<'tcx, Provenance>],

View File

@ -18,8 +18,8 @@ use atomic::EvalContextExt as _;
use helpers::check_arg_count;
use simd::EvalContextExt as _;
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn call_intrinsic(
&mut self,
instance: ty::Instance<'tcx>,

View File

@ -6,8 +6,8 @@ use rustc_target::abi::{Endian, HasDataLayout, Size};
use crate::*;
use helpers::check_arg_count;
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
/// Calls the simd intrinsic `intrinsic`; the `simd_` prefix has already been removed.
fn emulate_simd_intrinsic(
&mut self,

View File

@ -24,8 +24,8 @@ use rustc_target::spec::abi::Abi;
use crate::*;
use helpers::check_arg_count;
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn find_mir_or_eval_fn(
&mut self,
instance: ty::Instance<'tcx>,

View File

@ -46,8 +46,8 @@ pub fn bytes_to_os_str<'a, 'tcx>(bytes: &'a [u8]) -> InterpResult<'tcx, &'a OsSt
Ok(OsStr::new(s))
}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
/// Helper function to read an OsString from a null-terminated sequence of bytes, which is what
/// the Unix APIs usually handle.
fn read_os_str_from_c_str<'a>(

View File

@ -35,8 +35,8 @@ pub struct CatchUnwindData<'tcx> {
ret: mir::BasicBlock,
}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
/// Handles the special `miri_start_panic` intrinsic, which is called
/// by libpanic_unwind to delegate the actual unwinding process to Miri.
fn handle_miri_start_panic(

View File

@ -8,8 +8,8 @@ pub fn system_time_to_duration<'tcx>(time: &SystemTime) -> InterpResult<'tcx, Du
.map_err(|_| err_unsup_format!("times before the Unix epoch are not supported").into())
}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn clock_gettime(
&mut self,
clk_id_op: &OpTy<'tcx, Provenance>,

View File

@ -241,8 +241,8 @@ impl<'tcx> TlsData<'tcx> {
}
}
impl<'mir, 'tcx: 'mir> EvalContextPrivExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextPrivExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
/// Schedule TLS destructors for Windows.
/// On windows, TLS destructors are managed by std.
fn schedule_windows_tls_dtors(&mut self) -> InterpResult<'tcx> {
@ -346,8 +346,8 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
}
}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
/// Schedule an active thread's TLS destructor to run on the active thread.
/// Note that this function does not run the destructors itself, it just
/// schedules them one by one each time it is called and reenables the

View File

@ -21,8 +21,8 @@ impl Dlsym {
}
}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn call_dlsym(
&mut self,
dlsym: Dlsym,

View File

@ -4,9 +4,9 @@ use rustc_target::spec::abi::Abi;
use crate::*;
use shims::foreign_items::EmulateByNameResult;
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn emulate_foreign_item_by_name(
&mut self,
link_name: Symbol,

View File

@ -29,8 +29,8 @@ impl Dlsym {
}
}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn call_dlsym(
&mut self,
dlsym: Dlsym,

View File

@ -13,8 +13,8 @@ use shims::unix::fs::EvalContextExt as _;
use shims::unix::sync::EvalContextExt as _;
use shims::unix::thread::EvalContextExt as _;
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn emulate_foreign_item_by_name(
&mut self,
link_name: Symbol,
@ -228,7 +228,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
// FIXME: Which of these are POSIX, and which are GNU/Linux?
// At least the names seem to all also exist on macOS.
let sysconfs: &[(&str, fn(&MiriEvalContext<'_, '_>) -> Scalar<Provenance>)] = &[
let sysconfs: &[(&str, fn(&MiriInterpCx<'_, '_>) -> Scalar<Provenance>)] = &[
("_SC_PAGESIZE", |this| Scalar::from_int(PAGE_SIZE, this.pointer_size())),
("_SC_NPROCESSORS_CONF", |this| Scalar::from_int(NUM_CPUS, this.pointer_size())),
("_SC_NPROCESSORS_ONLN", |this| Scalar::from_int(NUM_CPUS, this.pointer_size())),

View File

@ -14,8 +14,8 @@ impl Dlsym {
}
}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn call_dlsym(
&mut self,
dlsym: Dlsym,

View File

@ -5,9 +5,9 @@ use crate::*;
use shims::foreign_items::EmulateByNameResult;
use shims::unix::thread::EvalContextExt as _;
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn emulate_foreign_item_by_name(
&mut self,
link_name: Symbol,

View File

@ -304,8 +304,8 @@ impl FileHandler {
}
}
impl<'mir, 'tcx: 'mir> EvalContextExtPrivate<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
trait EvalContextExtPrivate<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExtPrivate<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
trait EvalContextExtPrivate<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn macos_stat_write_buf(
&mut self,
metadata: FileMetadata,
@ -478,8 +478,8 @@ fn maybe_sync_file(
}
}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn open(&mut self, args: &[OpTy<'tcx, Provenance>]) -> InterpResult<'tcx, i32> {
if args.len() < 2 {
throw_ub_format!(
@ -1892,7 +1892,7 @@ struct FileMetadata {
impl FileMetadata {
fn from_path<'tcx, 'mir>(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
path: &Path,
follow_symlink: bool,
) -> InterpResult<'tcx, Option<FileMetadata>> {
@ -1903,7 +1903,7 @@ impl FileMetadata {
}
fn from_fd<'tcx, 'mir>(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
fd: i32,
) -> InterpResult<'tcx, Option<FileMetadata>> {
let option = ecx.machine.file_handler.handles.get(&fd);
@ -1917,7 +1917,7 @@ impl FileMetadata {
}
fn from_meta<'tcx, 'mir>(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
metadata: Result<std::fs::Metadata, std::io::Error>,
) -> InterpResult<'tcx, Option<FileMetadata>> {
let metadata = match metadata {

View File

@ -18,8 +18,8 @@ impl Dlsym {
}
}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn call_dlsym(
&mut self,
dlsym: Dlsym,

View File

@ -8,8 +8,8 @@ use shims::unix::linux::sync::futex;
use shims::unix::sync::EvalContextExt as _;
use shims::unix::thread::EvalContextExt as _;
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn emulate_foreign_item_by_name(
&mut self,
link_name: Symbol,
@ -167,7 +167,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
// Shims the linux `getrandom` syscall.
fn getrandom<'tcx>(
this: &mut MiriEvalContext<'_, 'tcx>,
this: &mut MiriInterpCx<'_, 'tcx>,
ptr: &OpTy<'tcx, Provenance>,
len: &OpTy<'tcx, Provenance>,
flags: &OpTy<'tcx, Provenance>,

View File

@ -6,7 +6,7 @@ use std::time::SystemTime;
/// Implementation of the SYS_futex syscall.
/// `args` is the arguments *after* the syscall number.
pub fn futex<'tcx>(
this: &mut MiriEvalContext<'_, 'tcx>,
this: &mut MiriInterpCx<'_, 'tcx>,
args: &[OpTy<'tcx, Provenance>],
dest: &PlaceTy<'tcx, Provenance>,
) -> InterpResult<'tcx> {

View File

@ -22,8 +22,8 @@ impl Dlsym {
}
}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn call_dlsym(
&mut self,
dlsym: Dlsym,

View File

@ -6,8 +6,8 @@ use shims::foreign_items::EmulateByNameResult;
use shims::unix::fs::EvalContextExt as _;
use shims::unix::thread::EvalContextExt as _;
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn emulate_foreign_item_by_name(
&mut self,
link_name: Symbol,

View File

@ -20,14 +20,14 @@ use crate::*;
const PTHREAD_MUTEX_NORMAL_FLAG: i32 = 0x8000000;
fn is_mutex_kind_default<'mir, 'tcx: 'mir>(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
kind: Scalar<Provenance>,
) -> InterpResult<'tcx, bool> {
Ok(kind == ecx.eval_libc("PTHREAD_MUTEX_DEFAULT")?)
}
fn is_mutex_kind_normal<'mir, 'tcx: 'mir>(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
kind: Scalar<Provenance>,
) -> InterpResult<'tcx, bool> {
let kind = kind.to_i32()?;
@ -36,14 +36,14 @@ fn is_mutex_kind_normal<'mir, 'tcx: 'mir>(
}
fn mutexattr_get_kind<'mir, 'tcx: 'mir>(
ecx: &MiriEvalContext<'mir, 'tcx>,
ecx: &MiriInterpCx<'mir, 'tcx>,
attr_op: &OpTy<'tcx, Provenance>,
) -> InterpResult<'tcx, Scalar<Provenance>> {
ecx.read_scalar_at_offset(attr_op, 0, ecx.machine.layouts.i32)
}
fn mutexattr_set_kind<'mir, 'tcx: 'mir>(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
attr_op: &OpTy<'tcx, Provenance>,
kind: impl Into<Scalar<Provenance>>,
) -> InterpResult<'tcx, ()> {
@ -60,7 +60,7 @@ fn mutexattr_set_kind<'mir, 'tcx: 'mir>(
// (the kind has to be at its offset for compatibility with static initializer macros)
fn mutex_get_kind<'mir, 'tcx: 'mir>(
ecx: &MiriEvalContext<'mir, 'tcx>,
ecx: &MiriInterpCx<'mir, 'tcx>,
mutex_op: &OpTy<'tcx, Provenance>,
) -> InterpResult<'tcx, Scalar<Provenance>> {
let offset = if ecx.pointer_size().bytes() == 8 { 16 } else { 12 };
@ -73,7 +73,7 @@ fn mutex_get_kind<'mir, 'tcx: 'mir>(
}
fn mutex_set_kind<'mir, 'tcx: 'mir>(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
mutex_op: &OpTy<'tcx, Provenance>,
kind: impl Into<Scalar<Provenance>>,
) -> InterpResult<'tcx, ()> {
@ -88,14 +88,14 @@ fn mutex_set_kind<'mir, 'tcx: 'mir>(
}
fn mutex_get_id<'mir, 'tcx: 'mir>(
ecx: &MiriEvalContext<'mir, 'tcx>,
ecx: &MiriInterpCx<'mir, 'tcx>,
mutex_op: &OpTy<'tcx, Provenance>,
) -> InterpResult<'tcx, Scalar<Provenance>> {
ecx.read_scalar_at_offset_atomic(mutex_op, 4, ecx.machine.layouts.u32, AtomicReadOrd::Relaxed)
}
fn mutex_set_id<'mir, 'tcx: 'mir>(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
mutex_op: &OpTy<'tcx, Provenance>,
id: impl Into<Scalar<Provenance>>,
) -> InterpResult<'tcx, ()> {
@ -109,7 +109,7 @@ fn mutex_set_id<'mir, 'tcx: 'mir>(
}
fn mutex_get_or_create_id<'mir, 'tcx: 'mir>(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
mutex_op: &OpTy<'tcx, Provenance>,
) -> InterpResult<'tcx, MutexId> {
let value_place = ecx.deref_operand_and_offset(mutex_op, 4, ecx.machine.layouts.u32)?;
@ -143,14 +143,14 @@ fn mutex_get_or_create_id<'mir, 'tcx: 'mir>(
// bytes 4-7: rwlock id as u32 or 0 if id is not assigned yet.
fn rwlock_get_id<'mir, 'tcx: 'mir>(
ecx: &MiriEvalContext<'mir, 'tcx>,
ecx: &MiriInterpCx<'mir, 'tcx>,
rwlock_op: &OpTy<'tcx, Provenance>,
) -> InterpResult<'tcx, Scalar<Provenance>> {
ecx.read_scalar_at_offset_atomic(rwlock_op, 4, ecx.machine.layouts.u32, AtomicReadOrd::Relaxed)
}
fn rwlock_get_or_create_id<'mir, 'tcx: 'mir>(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
rwlock_op: &OpTy<'tcx, Provenance>,
) -> InterpResult<'tcx, RwLockId> {
let value_place = ecx.deref_operand_and_offset(rwlock_op, 4, ecx.machine.layouts.u32)?;
@ -183,14 +183,14 @@ fn rwlock_get_or_create_id<'mir, 'tcx: 'mir>(
// (e.g. CLOCK_REALTIME).
fn condattr_get_clock_id<'mir, 'tcx: 'mir>(
ecx: &MiriEvalContext<'mir, 'tcx>,
ecx: &MiriInterpCx<'mir, 'tcx>,
attr_op: &OpTy<'tcx, Provenance>,
) -> InterpResult<'tcx, Scalar<Provenance>> {
ecx.read_scalar_at_offset(attr_op, 0, ecx.machine.layouts.i32)
}
fn condattr_set_clock_id<'mir, 'tcx: 'mir>(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
attr_op: &OpTy<'tcx, Provenance>,
clock_id: impl Into<Scalar<Provenance>>,
) -> InterpResult<'tcx, ()> {
@ -212,14 +212,14 @@ fn condattr_set_clock_id<'mir, 'tcx: 'mir>(
// bytes 8-11: the clock id constant as i32
fn cond_get_id<'mir, 'tcx: 'mir>(
ecx: &MiriEvalContext<'mir, 'tcx>,
ecx: &MiriInterpCx<'mir, 'tcx>,
cond_op: &OpTy<'tcx, Provenance>,
) -> InterpResult<'tcx, Scalar<Provenance>> {
ecx.read_scalar_at_offset_atomic(cond_op, 4, ecx.machine.layouts.u32, AtomicReadOrd::Relaxed)
}
fn cond_set_id<'mir, 'tcx: 'mir>(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
cond_op: &OpTy<'tcx, Provenance>,
id: impl Into<Scalar<Provenance>>,
) -> InterpResult<'tcx, ()> {
@ -233,7 +233,7 @@ fn cond_set_id<'mir, 'tcx: 'mir>(
}
fn cond_get_or_create_id<'mir, 'tcx: 'mir>(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
cond_op: &OpTy<'tcx, Provenance>,
) -> InterpResult<'tcx, CondvarId> {
let value_place = ecx.deref_operand_and_offset(cond_op, 4, ecx.machine.layouts.u32)?;
@ -260,14 +260,14 @@ fn cond_get_or_create_id<'mir, 'tcx: 'mir>(
}
fn cond_get_clock_id<'mir, 'tcx: 'mir>(
ecx: &MiriEvalContext<'mir, 'tcx>,
ecx: &MiriInterpCx<'mir, 'tcx>,
cond_op: &OpTy<'tcx, Provenance>,
) -> InterpResult<'tcx, Scalar<Provenance>> {
ecx.read_scalar_at_offset(cond_op, 8, ecx.machine.layouts.i32)
}
fn cond_set_clock_id<'mir, 'tcx: 'mir>(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
cond_op: &OpTy<'tcx, Provenance>,
clock_id: impl Into<Scalar<Provenance>>,
) -> InterpResult<'tcx, ()> {
@ -282,7 +282,7 @@ fn cond_set_clock_id<'mir, 'tcx: 'mir>(
/// Try to reacquire the mutex associated with the condition variable after we
/// were signaled.
fn reacquire_cond_mutex<'mir, 'tcx: 'mir>(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
thread: ThreadId,
mutex: MutexId,
) -> InterpResult<'tcx> {
@ -299,7 +299,7 @@ fn reacquire_cond_mutex<'mir, 'tcx: 'mir>(
/// Reacquire the conditional variable and remove the timeout callback if any
/// was registered.
fn post_cond_signal<'mir, 'tcx: 'mir>(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
thread: ThreadId,
mutex: MutexId,
) -> InterpResult<'tcx> {
@ -313,7 +313,7 @@ fn post_cond_signal<'mir, 'tcx: 'mir>(
/// Release the mutex associated with the condition variable because we are
/// entering the waiting state.
fn release_cond_mutex_and_block<'mir, 'tcx: 'mir>(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
active_thread: ThreadId,
mutex: MutexId,
) -> InterpResult<'tcx> {
@ -328,8 +328,8 @@ fn release_cond_mutex_and_block<'mir, 'tcx: 'mir>(
Ok(())
}
impl<'mir, 'tcx> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn pthread_mutexattr_init(
&mut self,
attr_op: &OpTy<'tcx, Provenance>,

View File

@ -2,8 +2,8 @@ use crate::*;
use rustc_middle::ty::layout::LayoutOf;
use rustc_target::spec::abi::Abi;
impl<'mir, 'tcx> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn pthread_create(
&mut self,
thread: &OpTy<'tcx, Provenance>,

View File

@ -27,8 +27,8 @@ impl Dlsym {
}
}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn call_dlsym(
&mut self,
dlsym: Dlsym,

View File

@ -12,8 +12,8 @@ use shims::windows::thread::EvalContextExt as _;
use smallvec::SmallVec;
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn emulate_foreign_item_by_name(
&mut self,
link_name: Symbol,

View File

@ -145,10 +145,10 @@ impl Handle {
}
}
impl<'mir, 'tcx> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
impl<'mir, 'tcx> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
#[allow(non_snake_case)]
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn invalid_handle(&mut self, function_name: &str) -> InterpResult<'tcx, !> {
throw_machine_stop!(TerminationInfo::Abort(format!(
"invalid handle passed to `{function_name}`"

View File

@ -4,7 +4,7 @@ use crate::*;
// We use the first 4 bytes to store the RwLockId.
fn srwlock_get_or_create_id<'mir, 'tcx: 'mir>(
ecx: &mut MiriEvalContext<'mir, 'tcx>,
ecx: &mut MiriInterpCx<'mir, 'tcx>,
lock_op: &OpTy<'tcx, Provenance>,
) -> InterpResult<'tcx, RwLockId> {
let value_place = ecx.deref_operand_and_offset(lock_op, 0, ecx.machine.layouts.u32)?;
@ -30,8 +30,8 @@ fn srwlock_get_or_create_id<'mir, 'tcx: 'mir>(
})
}
impl<'mir, 'tcx> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
#[allow(non_snake_case)]
fn AcquireSRWLockExclusive(&mut self, lock_op: &OpTy<'tcx, Provenance>) -> InterpResult<'tcx> {
let this = self.eval_context_mut();

View File

@ -4,10 +4,10 @@ use rustc_target::spec::abi::Abi;
use crate::*;
use shims::windows::handle::{EvalContextExt as _, Handle, PseudoHandle};
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
#[allow(non_snake_case)]
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn CreateThread(
&mut self,
security_op: &OpTy<'tcx, Provenance>,

View File

@ -471,7 +471,9 @@ impl<'span, 'history, 'ecx, 'mir, 'tcx> DiagnosticCx<'span, 'history, 'ecx, 'mir
Some((orig_tag, kind))
}
};
register_diagnostic(NonHaltingDiagnostic::PoppedPointerTag(*item, summary));
self.current_span
.machine()
.emit_diagnostic(NonHaltingDiagnostic::PoppedPointerTag(*item, summary));
}
}

View File

@ -178,11 +178,11 @@ impl GlobalStateInner {
id
}
pub fn new_frame(&mut self) -> FrameExtra {
pub fn new_frame(&mut self, machine: &MiriMachine<'_, '_>) -> FrameExtra {
let call_id = self.next_call_id;
trace!("new_frame: Assigning call ID {}", call_id);
if self.tracked_call_ids.contains(&call_id) {
register_diagnostic(NonHaltingDiagnostic::CreatedCallId(call_id));
machine.emit_diagnostic(NonHaltingDiagnostic::CreatedCallId(call_id));
}
self.next_call_id = NonZeroU64::new(call_id.get() + 1).unwrap();
FrameExtra { call_id, protected_tags: SmallVec::new() }
@ -199,11 +199,11 @@ impl GlobalStateInner {
}
}
pub fn base_ptr_tag(&mut self, id: AllocId) -> SbTag {
pub fn base_ptr_tag(&mut self, id: AllocId, machine: &MiriMachine<'_, '_>) -> SbTag {
self.base_ptr_tags.get(&id).copied().unwrap_or_else(|| {
let tag = self.new_ptr();
if self.tracked_pointer_tags.contains(&tag) {
register_diagnostic(NonHaltingDiagnostic::CreatedPointerTag(tag.0, None));
machine.emit_diagnostic(NonHaltingDiagnostic::CreatedPointerTag(tag.0, None));
}
trace!("New allocation {:?} has base tag {:?}", id, tag);
self.base_ptr_tags.try_insert(id, tag).unwrap();
@ -572,9 +572,10 @@ impl Stacks {
// not through a pointer). That is, whenever we directly write to a local, this will pop
// everything else off the stack, invalidating all previous pointers,
// and in particular, *all* raw pointers.
MemoryKind::Stack => (extra.base_ptr_tag(id), Permission::Unique),
MemoryKind::Stack =>
(extra.base_ptr_tag(id, current_span.machine()), Permission::Unique),
// Everything else is shared by default.
_ => (extra.base_ptr_tag(id), Permission::SharedReadWrite),
_ => (extra.base_ptr_tag(id, current_span.machine()), Permission::SharedReadWrite),
};
Stacks::new(size, perm, base_tag, id, &mut current_span)
}
@ -651,10 +652,10 @@ impl Stacks {
/// Retagging/reborrowing. There is some policy in here, such as which permissions
/// to grant for which references, and when to add protectors.
impl<'mir: 'ecx, 'tcx: 'mir, 'ecx> EvalContextPrivExt<'mir, 'tcx, 'ecx>
for crate::MiriEvalContext<'mir, 'tcx>
for crate::MiriInterpCx<'mir, 'tcx>
{
}
trait EvalContextPrivExt<'mir: 'ecx, 'tcx: 'mir, 'ecx>: crate::MiriEvalContextExt<'mir, 'tcx> {
trait EvalContextPrivExt<'mir: 'ecx, 'tcx: 'mir, 'ecx>: crate::MiriInterpCxExt<'mir, 'tcx> {
/// Returns the `AllocId` the reborrow was done in, if some actual borrow stack manipulation
/// happened.
fn reborrow(
@ -669,12 +670,12 @@ trait EvalContextPrivExt<'mir: 'ecx, 'tcx: 'mir, 'ecx>: crate::MiriEvalContextEx
let this = self.eval_context_mut();
// It is crucial that this gets called on all code paths, to ensure we track tag creation.
let log_creation = |this: &MiriEvalContext<'mir, 'tcx>,
let log_creation = |this: &MiriInterpCx<'mir, 'tcx>,
loc: Option<(AllocId, Size, ProvenanceExtra)>| // alloc_id, base_offset, orig_tag
-> InterpResult<'tcx> {
let global = this.machine.stacked_borrows.as_ref().unwrap().borrow();
if global.tracked_pointer_tags.contains(&new_tag) {
register_diagnostic(NonHaltingDiagnostic::CreatedPointerTag(
this.emit_diagnostic(NonHaltingDiagnostic::CreatedPointerTag(
new_tag.0,
loc.map(|(alloc_id, base_offset, _)| (alloc_id, alloc_range(base_offset, size))),
));
@ -688,7 +689,7 @@ trait EvalContextPrivExt<'mir: 'ecx, 'tcx: 'mir, 'ecx>: crate::MiriEvalContextEx
let (_size, _align, alloc_kind) = this.get_alloc_info(alloc_id);
match alloc_kind {
AllocKind::LiveData => {
let current_span = &mut this.machine.current_span(*this.tcx);
let current_span = &mut this.machine.current_span();
// This should have alloc_extra data, but `get_alloc_extra` can still fail
// if converting this alloc_id from a global to a local one
// uncovers a non-supported `extern static`.
@ -805,7 +806,7 @@ trait EvalContextPrivExt<'mir: 'ecx, 'tcx: 'mir, 'ecx>: crate::MiriEvalContextEx
.expect("we should have Stacked Borrows data")
.borrow_mut();
// FIXME: can't share this with the current_span inside log_creation
let mut current_span = this.machine.current_span(*this.tcx);
let mut current_span = this.machine.current_span();
this.visit_freeze_sensitive(place, size, |mut range, frozen| {
// Adjust range.
range.start += base_offset;
@ -843,7 +844,6 @@ trait EvalContextPrivExt<'mir: 'ecx, 'tcx: 'mir, 'ecx>: crate::MiriEvalContextEx
// Here we can avoid `borrow()` calls because we have mutable references.
// Note that this asserts that the allocation is mutable -- but since we are creating a
// mutable pointer, that seems reasonable.
let tcx = *this.tcx;
let (alloc_extra, machine) = this.get_alloc_extra_mut(alloc_id)?;
let mut stacked_borrows = alloc_extra
.stacked_borrows
@ -854,7 +854,7 @@ trait EvalContextPrivExt<'mir: 'ecx, 'tcx: 'mir, 'ecx>: crate::MiriEvalContextEx
let range = alloc_range(base_offset, size);
let mut global = machine.stacked_borrows.as_ref().unwrap().borrow_mut();
// FIXME: can't share this with the current_span inside log_creation
let current_span = &mut machine.current_span(tcx);
let current_span = &mut machine.current_span();
let dcx = DiagnosticCxBuilder::retag(
current_span,
&machine.threads,
@ -920,8 +920,8 @@ trait EvalContextPrivExt<'mir: 'ecx, 'tcx: 'mir, 'ecx>: crate::MiriEvalContextEx
}
}
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn retag(&mut self, kind: RetagKind, place: &PlaceTy<'tcx, Provenance>) -> InterpResult<'tcx> {
let this = self.eval_context_mut();
let retag_fields = this.machine.stacked_borrows.as_mut().unwrap().get_mut().retag_fields;
@ -957,7 +957,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
// The actual visitor.
struct RetagVisitor<'ecx, 'mir, 'tcx> {
ecx: &'ecx mut MiriEvalContext<'mir, 'tcx>,
ecx: &'ecx mut MiriInterpCx<'mir, 'tcx>,
kind: RetagKind,
retag_cause: RetagCause,
retag_fields: bool,
@ -977,13 +977,13 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
Ok(())
}
}
impl<'ecx, 'mir, 'tcx> MutValueVisitor<'mir, 'tcx, Evaluator<'mir, 'tcx>>
impl<'ecx, 'mir, 'tcx> MutValueVisitor<'mir, 'tcx, MiriMachine<'mir, 'tcx>>
for RetagVisitor<'ecx, 'mir, 'tcx>
{
type V = PlaceTy<'tcx, Provenance>;
#[inline(always)]
fn ecx(&mut self) -> &mut MiriEvalContext<'mir, 'tcx> {
fn ecx(&mut self) -> &mut MiriInterpCx<'mir, 'tcx> {
self.ecx
}

View File

@ -1,8 +1,8 @@
use crate::*;
use rustc_data_structures::fx::FxHashSet;
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: MiriInterpCxExt<'mir, 'tcx> {
fn garbage_collect_tags(&mut self) -> InterpResult<'tcx> {
let this = self.eval_context_mut();
// No reason to do anything at all if stacked borrows is off.