Add `UnwindAction::Terminate`

This commit is contained in:
Gary Guo 2022-10-10 22:40:40 +01:00
parent 5e6ed132fa
commit 0a5dac3062
16 changed files with 126 additions and 121 deletions

View File

@ -1663,8 +1663,12 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
}
self.assert_iscleanup(body, ctxt, unwind, true);
}
UnwindAction::Continue => (),
UnwindAction::Unreachable => (),
UnwindAction::Continue => {
if is_cleanup {
span_mirbug!(self, ctxt, "unwind on cleanup block")
}
}
UnwindAction::Unreachable | UnwindAction::Terminate => (),
}
}

View File

@ -170,22 +170,17 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
let unwind_block = match unwind {
mir::UnwindAction::Cleanup(cleanup) => Some(self.llbb_with_cleanup(fx, cleanup)),
_ if fx.mir[self.bb].is_cleanup
&& fn_abi.can_unwind
&& !base::wants_msvc_seh(fx.cx.tcx().sess) =>
{
// Exception must not propagate out of the execution of a cleanup (doing so
// can cause undefined behaviour). We insert a double unwind guard for
// functions that can potentially unwind to protect against this.
//
// This is not necessary for SEH which does not use successive unwinding
// like Itanium EH. EH frames in SEH are different from normal function
// frames and SEH will abort automatically if an exception tries to
// propagate out from cleanup.
Some(fx.double_unwind_guard())
}
mir::UnwindAction::Continue => None,
mir::UnwindAction::Unreachable => None,
mir::UnwindAction::Terminate => {
if fx.mir[self.bb].is_cleanup && base::wants_msvc_seh(fx.cx.tcx().sess) {
// SEH will abort automatically if an exception tries to
// propagate out from cleanup.
None
} else {
Some(fx.terminate_block())
}
}
};
if let Some(unwind_block) = unwind_block {
@ -253,7 +248,14 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
instance: Instance<'_>,
mergeable_succ: bool,
) -> MergingSucc {
if let mir::UnwindAction::Cleanup(cleanup) = unwind {
let unwind_target = match unwind {
mir::UnwindAction::Cleanup(cleanup) => Some(self.llbb_with_cleanup(fx, cleanup)),
mir::UnwindAction::Terminate => Some(fx.terminate_block()),
mir::UnwindAction::Continue => None,
mir::UnwindAction::Unreachable => None,
};
if let Some(cleanup) = unwind_target {
let ret_llbb = if let Some(target) = destination {
fx.llbb(target)
} else {
@ -266,7 +268,7 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
options,
line_spans,
instance,
Some((ret_llbb, self.llbb_with_cleanup(fx, cleanup), self.funclet(fx))),
Some((ret_llbb, cleanup, self.funclet(fx))),
);
MergingSucc::False
} else {
@ -1551,62 +1553,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
fn landing_pad_for_uncached(&mut self, bb: mir::BasicBlock) -> Bx::BasicBlock {
let llbb = self.llbb(bb);
if base::wants_msvc_seh(self.cx.sess()) {
let funclet;
let ret_llbb;
match self.mir[bb].terminator.as_ref().map(|t| &t.kind) {
// This is a basic block that we're aborting the program for,
// notably in an `extern` function. These basic blocks are inserted
// so that we assert that `extern` functions do indeed not panic,
// and if they do we abort the process.
//
// On MSVC these are tricky though (where we're doing funclets). If
// we were to do a cleanuppad (like below) the normal functions like
// `longjmp` would trigger the abort logic, terminating the
// program. Instead we insert the equivalent of `catch(...)` for C++
// which magically doesn't trigger when `longjmp` files over this
// frame.
//
// Lots more discussion can be found on #48251 but this codegen is
// modeled after clang's for:
//
// try {
// foo();
// } catch (...) {
// bar();
// }
Some(&mir::TerminatorKind::Abort) => {
let cs_llbb =
Bx::append_block(self.cx, self.llfn, &format!("cs_funclet{:?}", bb));
let cp_llbb =
Bx::append_block(self.cx, self.llfn, &format!("cp_funclet{:?}", bb));
ret_llbb = cs_llbb;
let mut cs_bx = Bx::build(self.cx, cs_llbb);
let cs = cs_bx.catch_switch(None, None, &[cp_llbb]);
// The "null" here is actually a RTTI type descriptor for the
// C++ personality function, but `catch (...)` has no type so
// it's null. The 64 here is actually a bitfield which
// represents that this is a catch-all block.
let mut cp_bx = Bx::build(self.cx, cp_llbb);
let null = cp_bx.const_null(
cp_bx.type_i8p_ext(cp_bx.cx().data_layout().instruction_address_space),
);
let sixty_four = cp_bx.const_i32(64);
funclet = cp_bx.catch_pad(cs, &[null, sixty_four, null]);
cp_bx.br(llbb);
}
_ => {
let cleanup_llbb =
Bx::append_block(self.cx, self.llfn, &format!("funclet_{:?}", bb));
ret_llbb = cleanup_llbb;
let mut cleanup_bx = Bx::build(self.cx, cleanup_llbb);
funclet = cleanup_bx.cleanup_pad(None, &[]);
cleanup_bx.br(llbb);
}
}
let cleanup_bb = Bx::append_block(self.cx, self.llfn, &format!("funclet_{:?}", bb));
let mut cleanup_bx = Bx::build(self.cx, cleanup_bb);
let funclet = cleanup_bx.cleanup_pad(None, &[]);
cleanup_bx.br(llbb);
self.funclets[bb] = Some(funclet);
ret_llbb
cleanup_bb
} else {
let cleanup_llbb = Bx::append_block(self.cx, self.llfn, "cleanup");
let mut cleanup_bx = Bx::build(self.cx, cleanup_llbb);
@ -1633,26 +1585,68 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
})
}
fn double_unwind_guard(&mut self) -> Bx::BasicBlock {
self.double_unwind_guard.unwrap_or_else(|| {
assert!(!base::wants_msvc_seh(self.cx.sess()));
fn terminate_block(&mut self) -> Bx::BasicBlock {
self.terminate_block.unwrap_or_else(|| {
let funclet;
let llbb;
let mut bx;
if base::wants_msvc_seh(self.cx.sess()) {
// This is a basic block that we're aborting the program for,
// notably in an `extern` function. These basic blocks are inserted
// so that we assert that `extern` functions do indeed not panic,
// and if they do we abort the process.
//
// On MSVC these are tricky though (where we're doing funclets). If
// we were to do a cleanuppad (like below) the normal functions like
// `longjmp` would trigger the abort logic, terminating the
// program. Instead we insert the equivalent of `catch(...)` for C++
// which magically doesn't trigger when `longjmp` files over this
// frame.
//
// Lots more discussion can be found on #48251 but this codegen is
// modeled after clang's for:
//
// try {
// foo();
// } catch (...) {
// bar();
// }
llbb = Bx::append_block(self.cx, self.llfn, "cs_terminate");
let cp_llbb = Bx::append_block(self.cx, self.llfn, "cp_terminate");
let mut cs_bx = Bx::build(self.cx, llbb);
let cs = cs_bx.catch_switch(None, None, &[llbb]);
// The "null" here is actually a RTTI type descriptor for the
// C++ personality function, but `catch (...)` has no type so
// it's null. The 64 here is actually a bitfield which
// represents that this is a catch-all block.
bx = Bx::build(self.cx, cp_llbb);
let null =
bx.const_null(bx.type_i8p_ext(bx.cx().data_layout().instruction_address_space));
let sixty_four = bx.const_i32(64);
funclet = Some(bx.catch_pad(cs, &[null, sixty_four, null]));
} else {
llbb = Bx::append_block(self.cx, self.llfn, "terminate");
bx = Bx::build(self.cx, llbb);
let llpersonality = self.cx.eh_personality();
bx.cleanup_landing_pad(llpersonality);
funclet = None;
}
let llbb = Bx::append_block(self.cx, self.llfn, "abort");
let mut bx = Bx::build(self.cx, llbb);
self.set_debug_loc(&mut bx, mir::SourceInfo::outermost(self.mir.span));
let llpersonality = self.cx.eh_personality();
bx.cleanup_landing_pad(llpersonality);
let (fn_abi, fn_ptr) = common::build_langcall(&bx, None, LangItem::PanicCannotUnwind);
let fn_ty = bx.fn_decl_backend_type(&fn_abi);
let llret = bx.call(fn_ty, Some(&fn_abi), fn_ptr, &[], None);
let llret = bx.call(fn_ty, Some(&fn_abi), fn_ptr, &[], funclet.as_ref());
bx.do_not_inline(llret);
bx.unreachable();
self.double_unwind_guard = Some(llbb);
self.terminate_block = Some(llbb);
llbb
})
}

View File

@ -74,7 +74,7 @@ pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
unreachable_block: Option<Bx::BasicBlock>,
/// Cached double unwind guarding block
double_unwind_guard: Option<Bx::BasicBlock>,
terminate_block: Option<Bx::BasicBlock>,
/// The location where each MIR arg/var/tmp/ret is stored. This is
/// usually an `PlaceRef` representing an alloca, but not always:
@ -189,7 +189,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
personality_slot: None,
cached_llbbs,
unreachable_block: None,
double_unwind_guard: None,
terminate_block: None,
cleanup_kinds,
landing_pads: IndexVec::from_elem(None, &mir.basic_blocks),
funclets: IndexVec::from_fn_n(|_| None, mir.basic_blocks.len()),

View File

@ -736,6 +736,9 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
mir::UnwindAction::Unreachable => {
throw_ub_format!("unwinding past a stack frame that does not allow unwinding")
}
mir::UnwindAction::Terminate => {
M::abort(self, "panic in a function that cannot unwind".to_owned())?;
}
};
Ok(())
}

View File

@ -14,6 +14,7 @@ pub struct MirPatch<'tcx> {
resume_block: Option<BasicBlock>,
// Only for unreachable in cleanup path.
unreachable_block: Option<BasicBlock>,
terminate_block: Option<BasicBlock>,
body_span: Span,
next_local: usize,
}
@ -28,6 +29,7 @@ impl<'tcx> MirPatch<'tcx> {
next_local: body.local_decls.len(),
resume_block: None,
unreachable_block: None,
terminate_block: None,
body_span: body.span,
};

View File

@ -763,6 +763,10 @@ pub enum UnwindAction {
Continue,
/// Triggers undefined behavior if unwind happens.
Unreachable,
/// Terminates the execution if unwind happens.
///
/// Depending on the platform and situation this may cause a non-unwindable panic or abort.
Terminate,
/// Cleanups to be done.
Cleanup(BasicBlock),
}

View File

@ -274,6 +274,7 @@ impl<'tcx> Debug for TerminatorKind<'tcx> {
// Not needed or included in successors
None | Some(UnwindAction::Continue) | Some(UnwindAction::Cleanup(_)) => None,
Some(UnwindAction::Unreachable) => Some("unwind unreachable"),
Some(UnwindAction::Terminate) => Some("unwind terminate"),
};
match (successor_count, unwind) {

View File

@ -469,7 +469,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
} else {
Some(destination_block)
},
unwind: UnwindAction::Continue,
unwind: if options.contains(InlineAsmOptions::MAY_UNWIND) {
UnwindAction::Continue
} else {
UnwindAction::Unreachable
},
},
);
if options.contains(InlineAsmOptions::MAY_UNWIND) {

View File

@ -369,7 +369,7 @@ impl DropTree {
let terminator = TerminatorKind::Drop {
target: blocks[drop_data.1].unwrap(),
// The caller will handle this if needed.
unwind: UnwindAction::Continue,
unwind: UnwindAction::Terminate,
place: drop_data.0.local.into(),
};
cfg.terminate(block, drop_data.0.source_info, terminator);

View File

@ -80,7 +80,7 @@ impl Unwind {
fn into_action(self) -> UnwindAction {
match self {
Unwind::To(bb) => UnwindAction::Cleanup(bb),
Unwind::InCleanup => UnwindAction::Continue,
Unwind::InCleanup => UnwindAction::Terminate,
}
}
@ -946,7 +946,7 @@ where
args,
destination: unit_temp,
target: Some(target),
unwind: UnwindAction::Unreachable,
unwind: UnwindAction::Terminate,
from_hir_call: false,
fn_span: self.source_info.span,
}; // FIXME(#43234)

View File

@ -34,11 +34,6 @@ impl<'tcx> MirPass<'tcx> for AbortUnwindingCalls {
return;
}
// This pass only runs on functions which themselves cannot unwind,
// forcibly changing the body of the function to structurally provide
// this guarantee by aborting on an unwind. If this function can unwind,
// then there's nothing to do because it already should work correctly.
//
// Here we test for this function itself whether its ABI allows
// unwinding or not.
let body_ty = tcx.type_of(def_id).skip_binder();
@ -107,26 +102,9 @@ impl<'tcx> MirPass<'tcx> for AbortUnwindingCalls {
}
}
// For call instructions which need to be terminated, we insert a
// singular basic block which simply terminates, and then configure the
// `cleanup` attribute for all calls we found to this basic block we
// insert which means that any unwinding that happens in the functions
// will force an abort of the process.
if !calls_to_terminate.is_empty() {
let bb = BasicBlockData {
statements: Vec::new(),
is_cleanup: true,
terminator: Some(Terminator {
source_info: SourceInfo::outermost(body.span),
kind: TerminatorKind::Abort,
}),
};
let abort_bb = body.basic_blocks_mut().push(bb);
for bb in calls_to_terminate {
let cleanup = body.basic_blocks_mut()[bb].terminator_mut().unwind_mut().unwrap();
*cleanup = UnwindAction::Cleanup(abort_bb);
}
for id in calls_to_terminate {
let cleanup = body.basic_blocks_mut()[id].terminator_mut().unwind_mut().unwrap();
*cleanup = UnwindAction::Terminate;
}
for id in cleanups_to_remove {

View File

@ -417,7 +417,9 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
UnwindAction::Unreachable => {
Unwind::To(self.patch.unreachable_block())
}
UnwindAction::Terminate => Unwind::To(self.patch.terminate_block()),
UnwindAction::Terminate => {
Unwind::To(self.patch.terminate_block())
}
}
};
elaborate_drop(
@ -558,7 +560,7 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
if let TerminatorKind::Call {
destination,
target: Some(_),
unwind: UnwindAction::Continue,
unwind: UnwindAction::Continue | UnwindAction::Unreachable | UnwindAction::Terminate,
..
} = data.terminator().kind
{

View File

@ -1064,6 +1064,7 @@ fn elaborate_generator_drops<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
UnwindAction::Cleanup(tgt) => tgt,
UnwindAction::Continue => elaborator.patch.resume_block(),
UnwindAction::Unreachable => elaborator.patch.unreachable_block(),
UnwindAction::Terminate => elaborator.patch.terminate_block(),
})
};
elaborate_drop(

View File

@ -1017,15 +1017,15 @@ impl Integrator<'_, '_> {
fn map_unwind(&self, unwind: UnwindAction) -> UnwindAction {
if self.in_cleanup_block {
match unwind {
UnwindAction::Cleanup(_) => {
UnwindAction::Cleanup(_) | UnwindAction::Continue => {
bug!("cleanup on cleanup block");
}
UnwindAction::Continue | UnwindAction::Unreachable => return unwind,
UnwindAction::Unreachable | UnwindAction::Terminate => return unwind,
}
}
match unwind {
UnwindAction::Unreachable => unwind,
UnwindAction::Unreachable | UnwindAction::Terminate => unwind,
UnwindAction::Cleanup(target) => UnwindAction::Cleanup(self.map_block(target)),
// Add an unwind edge to the original call's cleanup block
UnwindAction::Continue => self.cleanup_block,
@ -1141,7 +1141,10 @@ impl<'tcx> MutVisitor<'tcx> for Integrator<'_, 'tcx> {
terminator.kind = TerminatorKind::Goto { target: tgt };
}
UnwindAction::Continue => (),
UnwindAction::Unreachable => {
UnwindAction::Unreachable | UnwindAction::Terminate => {
// If the action is terminate, then we would have mapped marked
// all our call-sites as `UnwindAction::Terminate` and no cleanup
// blocks would ever be executed.
terminator.kind = TerminatorKind::Unreachable;
}
},

View File

@ -54,7 +54,6 @@ fn lower_slice_len_call<'tcx>(
args,
destination,
target: Some(bb),
unwind: UnwindAction::Unreachable,
from_hir_call: true,
..
} => {

View File

@ -872,6 +872,16 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
| mir::TerminatorKind::FalseUnwind { .. } => bug!(),
}
if let Some(mir::UnwindAction::Terminate) = terminator.unwind() {
let instance = Instance::mono(
tcx,
tcx.require_lang_item(LangItem::PanicCannotUnwind, Some(source)),
);
if should_codegen_locally(tcx, &instance) {
self.output.push(create_fn_mono_item(tcx, instance, source));
}
}
self.super_terminator(terminator, location);
}