[CaptureTracking] Handle capturing of launder.invariant.group

Summary:
launder.invariant.group has the same rules of capturing as
bitcast, gep, etc - the original value is not captured
if the returned pointer is not captured.

With this patch, we mark 40% more functions as noalias when compiling with -fstrict-vtable-pointers;
1078 vs 1778  (39.37%)

Reviewers: sanjoy, davide, nlewycky, majnemer, mehdi_amini

Subscribers: JDevlieghere, llvm-commits

Differential Revision: https://reviews.llvm.org/D32673

llvm-svn: 331587
This commit is contained in:
Piotr Padlewski 2018-05-05 10:23:27 +00:00
parent c2ad096845
commit e9832dfdf3
2 changed files with 41 additions and 22 deletions

View File

@ -215,18 +215,22 @@ void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker) {
assert(V->getType()->isPointerTy() && "Capture is for pointers only!");
SmallVector<const Use *, Threshold> Worklist;
SmallSet<const Use *, Threshold> Visited;
int Count = 0;
for (const Use &U : V->uses()) {
// If there are lots of uses, conservatively say that the value
// is captured to avoid taking too much compile time.
if (Count++ >= Threshold)
return Tracker->tooManyUses();
if (!Tracker->shouldExplore(&U)) continue;
Visited.insert(&U);
Worklist.push_back(&U);
}
auto AddUses = [&](const Value *V) {
int Count = 0;
for (const Use &U : V->uses()) {
// If there are lots of uses, conservatively say that the value
// is captured to avoid taking too much compile time.
if (Count++ >= Threshold)
return Tracker->tooManyUses();
if (!Visited.insert(&U).second)
continue;
if (!Tracker->shouldExplore(&U))
continue;
Worklist.push_back(&U);
}
};
AddUses(V);
while (!Worklist.empty()) {
const Use *U = Worklist.pop_back_val();
@ -243,6 +247,13 @@ void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker) {
if (CS.onlyReadsMemory() && CS.doesNotThrow() && I->getType()->isVoidTy())
break;
// launder.invariant.group only captures pointer by returning it,
// so the pointer wasn't captured if returned pointer is not captured.
if (CS.getIntrinsicID() == Intrinsic::launder_invariant_group) {
AddUses(I);
break;
}
// Volatile operations effectively capture the memory location that they
// load and store to.
if (auto *MI = dyn_cast<MemIntrinsic>(I))
@ -313,17 +324,7 @@ void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker) {
case Instruction::Select:
case Instruction::AddrSpaceCast:
// The original value is not captured via this if the new value isn't.
Count = 0;
for (Use &UU : I->uses()) {
// If there are lots of uses, conservatively say that the value
// is captured to avoid taking too much compile time.
if (Count++ >= Threshold)
return Tracker->tooManyUses();
if (Visited.insert(&UU).second)
if (Tracker->shouldExplore(&UU))
Worklist.push_back(&UU);
}
AddUses(I);
break;
case Instruction::ICmp: {
// Don't count comparisons of a no-alias return value against null as

View File

@ -220,3 +220,21 @@ entry:
store volatile i32 0, i32* %gep, align 4
ret void
}
; CHECK: nocaptureLaunder(i8* nocapture %p)
define void @nocaptureLaunder(i8* %p) {
entry:
%b = call i8* @llvm.launder.invariant.group.p0i8(i8* %p)
store i8 42, i8* %b
ret void
}
@g2 = global i8* null
; CHECK: define void @captureLaunder(i8* %p)
define void @captureLaunder(i8* %p) {
%b = call i8* @llvm.launder.invariant.group.p0i8(i8* %p)
store i8* %b, i8** @g2
ret void
}
declare i8* @llvm.launder.invariant.group.p0i8(i8*)