Chris and Evan noticed that this check was compleatly fubared. I was

checking that there was a from a global instead of a load from the stub
for a global, which is the one that's safe to hoist.

Consider this program:

volatile char G[100];
int B(char *F, int N) {
  for (; N > 0; --N)
    F[N] = G[N];
}

In static mode, we shouldn't be hoisting the load from G:

$ llc -relocation-model=static -o - a.bc -march=x86 -machine-licm

LBB1_1: # bb.preheader
        leal    -1(%eax), %edx
        testl   %edx, %edx
        movl    $1, %edx
        cmovns  %eax, %edx
        xorl    %esi, %esi
LBB1_2: # bb
        movb    _G(%eax), %bl
        movb    %bl, (%ecx,%eax)

llvm-svn: 45626
This commit is contained in:
Bill Wendling 2008-01-05 09:18:04 +00:00
parent ee61d14bf6
commit be984cf10b
1 changed files with 4 additions and 1 deletions

View File

@ -171,12 +171,15 @@ bool X86InstrInfo::isReallySideEffectFree(MachineInstr *MI) const {
case X86::MOV32rm:
if (MI->getOperand(1).isRegister()) {
unsigned Reg = MI->getOperand(1).getReg();
const X86Subtarget &ST = TM.getSubtarget<X86Subtarget>();
// Loads from global addresses which aren't redefined in the function are
// side effect free.
if (Reg != 0 && MRegisterInfo::isVirtualRegister(Reg) &&
MI->getOperand(2).isImm() && MI->getOperand(3).isReg() &&
MI->getOperand(4).isGlobal() && MI->getOperand(2).getImm() == 1 &&
MI->getOperand(4).isGlobal() &&
ST.GVRequiresExtraLoad(MI->getOperand(4).getGlobal(), TM, false) &&
MI->getOperand(2).getImm() == 1 &&
MI->getOperand(3).getReg() == 0)
return true;
}