If a GEP index simply advances by multiples of a type of zero size,

then replace the index with zero.

llvm-svn: 119974
This commit is contained in:
Duncan Sands 2010-11-22 16:32:50 +00:00
parent 77d11d054c
commit c133c54426
3 changed files with 35 additions and 16 deletions

View File

@ -714,7 +714,7 @@ Value *llvm::SimplifyGEPInst(Value *const *Ops, unsigned NumOps,
// getelementptr P, N -> P if P points to a type of zero size.
if (TD) {
const Type *Ty = PtrTy->getElementType();
if (Ty->isSized() && !TD->getTypeAllocSize(Ty))
if (Ty->isSized() && TD->getTypeAllocSize(Ty) == 0)
return Ops[0];
}
}

View File

@ -523,25 +523,35 @@ Instruction *InstCombiner::visitGetElementPtrInst(GetElementPtrInst &GEP) {
Value *PtrOp = GEP.getOperand(0);
// Eliminate unneeded casts for indices.
// Eliminate unneeded casts for indices, and replace indices which displace
// by multiples of a zero size type with zero.
if (TD) {
bool MadeChange = false;
unsigned PtrSize = TD->getPointerSizeInBits();
const Type *IntPtrTy = TD->getIntPtrType(GEP.getContext());
gep_type_iterator GTI = gep_type_begin(GEP);
for (User::op_iterator I = GEP.op_begin() + 1, E = GEP.op_end();
I != E; ++I, ++GTI) {
if (!isa<SequentialType>(*GTI)) continue;
// If we are using a wider index than needed for this platform, shrink it
// to what we need. If narrower, sign-extend it to what we need. This
// explicit cast can make subsequent optimizations more obvious.
unsigned OpBits = cast<IntegerType>((*I)->getType())->getBitWidth();
if (OpBits == PtrSize)
continue;
*I = Builder->CreateIntCast(*I, TD->getIntPtrType(GEP.getContext()),true);
MadeChange = true;
// Skip indices into struct types.
const SequentialType *SeqTy = dyn_cast<SequentialType>(*GTI);
if (!SeqTy) continue;
// If the element type has zero size then any index over it is equivalent
// to an index of zero, so replace it with zero if it is not zero already.
if (SeqTy->getElementType()->isSized() &&
TD->getTypeAllocSize(SeqTy->getElementType()) == 0)
if (!isa<Constant>(*I) || !cast<Constant>(*I)->isNullValue()) {
*I = Constant::getNullValue(IntPtrTy);
MadeChange = true;
}
if ((*I)->getType() != IntPtrTy) {
// If we are using a wider index than needed for this platform, shrink
// it to what we need. If narrower, sign-extend it to what we need.
// This explicit cast can make subsequent optimizations more obvious.
*I = Builder->CreateIntCast(*I, IntPtrTy, true);
MadeChange = true;
}
}
if (MadeChange) return &GEP;
}

View File

@ -1,8 +1,17 @@
; RUN: opt < %s -instcombine -S | not grep getelementptr
; RUN: opt < %s -instcombine -S | FileCheck %s
target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128"
define {}* @foo({}* %x, i32 %n) {
; CHECK: @foo
; CHECK-NOT: getelementptr
%p = getelementptr {}* %x, i32 %n
ret {}* %p
}
define i8* @bar(i64 %n, {{}, [0 x {[0 x i8]}]}* %p) {
; CHECK: @bar
%g = getelementptr {{}, [0 x {[0 x i8]}]}* %p, i64 %n, i32 1, i64 %n, i32 0, i64 %n
; CHECK: %p, i64 0, i32 1, i64 0, i32 0, i64 %n
ret i8* %g
}