Completely disallow partial copies in adjustCopiesBackFrom().

Partial copies can show up even when CoalescerPair.isPartial() returns
false. For example:

   %vreg24:dsub_0<def> = COPY %vreg31:dsub_0; QPR:%vreg24,%vreg31

Such a partial-partial copy is not good enough for the transformation
adjustCopiesBackFrom() needs to do.

llvm-svn: 166944
This commit is contained in:
Jakob Stoklund Olesen 2012-10-29 17:51:52 +00:00
parent 0de4a1e4ae
commit 9a06696a77
2 changed files with 30 additions and 1 deletions

View File

@ -430,7 +430,8 @@ bool RegisterCoalescer::adjustCopiesBackFrom(const CoalescerPair &CP,
// If AValNo is defined as a copy from IntB, we can potentially process this.
// Get the instruction that defines this value number.
MachineInstr *ACopyMI = LIS->getInstructionFromIndex(AValNo->def);
if (!CP.isCoalescable(ACopyMI))
// Don't allow any partial copies, even if isCoalescable() allows them.
if (!CP.isCoalescable(ACopyMI) || !ACopyMI->isFullCopy())
return false;
// Get the LiveRange in IntB that this value number starts with.

View File

@ -289,3 +289,31 @@ bb:
%tmp18 = insertvalue %struct.wombat.5 %tmp17, <4 x float> undef, 3, 0
ret %struct.wombat.5 %tmp18
}
; CHECK: adjustCopiesBackFrom
; The shuffle in if.else3 must be preserved even though adjustCopiesBackFrom
; is tempted to remove it.
; CHECK: %if.else3
; CHECK: vorr d
define internal void @adjustCopiesBackFrom(<2 x i64>* noalias nocapture sret %agg.result, <2 x i64> %in) {
entry:
%0 = extractelement <2 x i64> %in, i32 0
%cmp = icmp slt i64 %0, 1
%.in = select i1 %cmp, <2 x i64> <i64 0, i64 undef>, <2 x i64> %in
%1 = extractelement <2 x i64> %in, i32 1
%cmp1 = icmp slt i64 %1, 1
br i1 %cmp1, label %if.then2, label %if.else3
if.then2: ; preds = %entry
%2 = insertelement <2 x i64> %.in, i64 0, i32 1
br label %if.end4
if.else3: ; preds = %entry
%3 = shufflevector <2 x i64> %.in, <2 x i64> %in, <2 x i32> <i32 0, i32 3>
br label %if.end4
if.end4: ; preds = %if.else3, %if.then2
%result.2 = phi <2 x i64> [ %2, %if.then2 ], [ %3, %if.else3 ]
store <2 x i64> %result.2, <2 x i64>* %agg.result, align 128
ret void
}