[X86] When lowering unsigned v2i64 setcc without SSE42, flip the sign bits in the v2i64 type then bitcast to v4i32.

This may give slightly better opportunities for DAG combine to simplify with the operations before the setcc. It also matches the type the xors will eventually be promoted to anyway so it saves a legalization step.

Almost all of the test changes are because our constant pool entry is now v2i64 instead of v4i32 on 64-bit targets. On 32-bit targets getConstant should be emitting a v4i32 build_vector and a v4i32->v2i64 bitcast.

There are a couple test cases where it appears we now combine a bitwise not with one of these xors which caused a new constant vector to be generated. This prevented a constant pool entry from being shared. But if that's an issue we're concerned about, it seems we need to address it another way that just relying a bitcast to hide it.

This came about from experiments I've been trying with pushing the promotion of and/or/xor to vXi64 later than LegalizeVectorOps where it is today. We run LegalizeVectorOps in a bottom up order. So the and/or/xor are promoted before their users are legalized. The bitcasts added for the promotion act as a barrier to computeKnownBits if we try to use it during vector legalization of a later operation. So by moving the promotion out we can hopefully get better results from computeKnownBits/computeNumSignBits like in LowerTruncate on AVX512. I've also looked at running LegalizeVectorOps in a top down order like LegalizeDAG, but thats showing some other issues.

llvm-svn: 344071
This commit is contained in:
Craig Topper 2018-10-09 19:05:50 +00:00
parent e54d9525ad
commit f6d8400869
28 changed files with 266 additions and 272 deletions

View File

@ -19051,23 +19051,21 @@ static SDValue LowerVSETCC(SDValue Op, const X86Subtarget &Subtarget,
if (Opc == X86ISD::PCMPGT && !Subtarget.hasSSE42()) {
assert(Subtarget.hasSSE2() && "Don't know how to lower!");
// First cast everything to the right type.
Op0 = DAG.getBitcast(MVT::v4i32, Op0);
Op1 = DAG.getBitcast(MVT::v4i32, Op1);
// Since SSE has no unsigned integer comparisons, we need to flip the sign
// bits of the inputs before performing those operations. The lower
// compare is always unsigned.
SDValue SB;
if (FlipSigns) {
SB = DAG.getConstant(0x80000000U, dl, MVT::v4i32);
SB = DAG.getConstant(0x8000000080000000ULL, dl, MVT::v2i64);
} else {
SDValue Sign = DAG.getConstant(0x80000000U, dl, MVT::i32);
SDValue Zero = DAG.getConstant(0x00000000U, dl, MVT::i32);
SB = DAG.getBuildVector(MVT::v4i32, dl, {Sign, Zero, Sign, Zero});
SB = DAG.getConstant(0x0000000080000000ULL, dl, MVT::v2i64);
}
Op0 = DAG.getNode(ISD::XOR, dl, MVT::v4i32, Op0, SB);
Op1 = DAG.getNode(ISD::XOR, dl, MVT::v4i32, Op1, SB);
Op0 = DAG.getNode(ISD::XOR, dl, MVT::v2i64, Op0, SB);
Op1 = DAG.getNode(ISD::XOR, dl, MVT::v2i64, Op1, SB);
// Cast everything to the right type.
Op0 = DAG.getBitcast(MVT::v4i32, Op0);
Op1 = DAG.getBitcast(MVT::v4i32, Op1);
// Emulate PCMPGTQ with (hi1 > hi2) | ((hi1 == hi2) & (lo1 > lo2))
SDValue GT = DAG.getNode(X86ISD::PCMPGT, dl, MVT::v4i32, Op0, Op1);

View File

@ -213,7 +213,7 @@ define i2 @v2i8(<2 x i8> %a, <2 x i8> %b, <2 x i8> %c, <2 x i8> %d) {
; SSE2-SSSE3-NEXT: psrad $24, %xmm1
; SSE2-SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,3,2,3]
; SSE2-SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1]
; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-SSSE3-NEXT: pxor %xmm4, %xmm1
; SSE2-SSSE3-NEXT: pxor %xmm4, %xmm0
; SSE2-SSSE3-NEXT: movdqa %xmm0, %xmm5
@ -366,7 +366,7 @@ define i2 @v2i16(<2 x i16> %a, <2 x i16> %b, <2 x i16> %c, <2 x i16> %d) {
; SSE2-SSSE3-NEXT: psrad $16, %xmm1
; SSE2-SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,3,2,3]
; SSE2-SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1]
; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-SSSE3-NEXT: pxor %xmm4, %xmm1
; SSE2-SSSE3-NEXT: pxor %xmm4, %xmm0
; SSE2-SSSE3-NEXT: movdqa %xmm0, %xmm5
@ -511,7 +511,7 @@ define i2 @v2i32(<2 x i32> %a, <2 x i32> %b, <2 x i32> %c, <2 x i32> %d) {
; SSE2-SSSE3-NEXT: psrad $31, %xmm1
; SSE2-SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,3,2,3]
; SSE2-SSSE3-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483648,0,2147483648,0]
; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483648,2147483648]
; SSE2-SSSE3-NEXT: pxor %xmm1, %xmm0
; SSE2-SSSE3-NEXT: pxor %xmm1, %xmm3
; SSE2-SSSE3-NEXT: movdqa %xmm3, %xmm5
@ -620,7 +620,7 @@ define i2 @v2i32(<2 x i32> %a, <2 x i32> %b, <2 x i32> %c, <2 x i32> %d) {
define i2 @v2i64(<2 x i64> %a, <2 x i64> %b, <2 x i64> %c, <2 x i64> %d) {
; SSE2-SSSE3-LABEL: v2i64:
; SSE2-SSSE3: # %bb.0:
; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-SSSE3-NEXT: pxor %xmm4, %xmm1
; SSE2-SSSE3-NEXT: pxor %xmm4, %xmm0
; SSE2-SSSE3-NEXT: movdqa %xmm0, %xmm5

View File

@ -9,7 +9,7 @@
define i4 @v4i64(<4 x i64> %a, <4 x i64> %b, <4 x i64> %c, <4 x i64> %d) {
; SSE2-SSSE3-LABEL: v4i64:
; SSE2-SSSE3: # %bb.0:
; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,0,2147483648,0]
; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648]
; SSE2-SSSE3-NEXT: pxor %xmm8, %xmm3
; SSE2-SSSE3-NEXT: pxor %xmm8, %xmm1
; SSE2-SSSE3-NEXT: movdqa %xmm1, %xmm9

View File

@ -163,7 +163,7 @@ define i2 @v2i8(<2 x i8> %a, <2 x i8> %b) {
; SSE2-SSSE3-NEXT: psrad $24, %xmm1
; SSE2-SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,3,2,3]
; SSE2-SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1]
; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-SSSE3-NEXT: pxor %xmm2, %xmm1
; SSE2-SSSE3-NEXT: pxor %xmm2, %xmm0
; SSE2-SSSE3-NEXT: movdqa %xmm0, %xmm2
@ -255,7 +255,7 @@ define i2 @v2i16(<2 x i16> %a, <2 x i16> %b) {
; SSE2-SSSE3-NEXT: psrad $16, %xmm1
; SSE2-SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,3,2,3]
; SSE2-SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1]
; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-SSSE3-NEXT: pxor %xmm2, %xmm1
; SSE2-SSSE3-NEXT: pxor %xmm2, %xmm0
; SSE2-SSSE3-NEXT: movdqa %xmm0, %xmm2
@ -343,7 +343,7 @@ define i2 @v2i32(<2 x i32> %a, <2 x i32> %b) {
; SSE2-SSSE3-NEXT: psrad $31, %xmm1
; SSE2-SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,3,2,3]
; SSE2-SSSE3-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483648,0,2147483648,0]
; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [2147483648,2147483648]
; SSE2-SSSE3-NEXT: pxor %xmm1, %xmm0
; SSE2-SSSE3-NEXT: pxor %xmm1, %xmm2
; SSE2-SSSE3-NEXT: movdqa %xmm2, %xmm1
@ -413,7 +413,7 @@ define i2 @v2i32(<2 x i32> %a, <2 x i32> %b) {
define i2 @v2i64(<2 x i64> %a, <2 x i64> %b) {
; SSE2-SSSE3-LABEL: v2i64:
; SSE2-SSSE3: # %bb.0:
; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-SSSE3-NEXT: pxor %xmm2, %xmm1
; SSE2-SSSE3-NEXT: pxor %xmm2, %xmm0
; SSE2-SSSE3-NEXT: movdqa %xmm0, %xmm2

View File

@ -210,7 +210,7 @@ define i32 @v32i8(<32 x i8> %a, <32 x i8> %b) {
define i4 @v4i64(<4 x i64> %a, <4 x i64> %b) {
; SSE2-SSSE3-LABEL: v4i64:
; SSE2-SSSE3: # %bb.0:
; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-SSSE3-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-SSSE3-NEXT: pxor %xmm4, %xmm3
; SSE2-SSSE3-NEXT: pxor %xmm4, %xmm1
; SSE2-SSSE3-NEXT: movdqa %xmm1, %xmm5

View File

@ -59,7 +59,7 @@ define i64 @test_reduce_v2i64(<2 x i64> %a0) {
; X64-SSE2-LABEL: test_reduce_v2i64:
; X64-SSE2: ## %bb.0:
; X64-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; X64-SSE2-NEXT: movdqa %xmm0, %xmm3
; X64-SSE2-NEXT: pxor %xmm2, %xmm3
; X64-SSE2-NEXT: pxor %xmm1, %xmm2
@ -490,7 +490,7 @@ define i64 @test_reduce_v4i64(<4 x i64> %a0) {
;
; X64-SSE2-LABEL: test_reduce_v4i64:
; X64-SSE2: ## %bb.0:
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; X64-SSE2-NEXT: movdqa %xmm1, %xmm3
; X64-SSE2-NEXT: pxor %xmm2, %xmm3
; X64-SSE2-NEXT: movdqa %xmm0, %xmm4
@ -1176,7 +1176,7 @@ define i64 @test_reduce_v8i64(<8 x i64> %a0) {
;
; X64-SSE2-LABEL: test_reduce_v8i64:
; X64-SSE2: ## %bb.0:
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; X64-SSE2-NEXT: movdqa %xmm2, %xmm5
; X64-SSE2-NEXT: pxor %xmm4, %xmm5
; X64-SSE2-NEXT: movdqa %xmm0, %xmm6

View File

@ -60,7 +60,7 @@ define i64 @test_reduce_v2i64(<2 x i64> %a0) {
; X64-SSE2-LABEL: test_reduce_v2i64:
; X64-SSE2: ## %bb.0:
; X64-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; X64-SSE2-NEXT: movdqa %xmm0, %xmm3
; X64-SSE2-NEXT: pxor %xmm2, %xmm3
; X64-SSE2-NEXT: pxor %xmm1, %xmm2
@ -493,7 +493,7 @@ define i64 @test_reduce_v4i64(<4 x i64> %a0) {
;
; X64-SSE2-LABEL: test_reduce_v4i64:
; X64-SSE2: ## %bb.0:
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; X64-SSE2-NEXT: movdqa %xmm0, %xmm3
; X64-SSE2-NEXT: pxor %xmm2, %xmm3
; X64-SSE2-NEXT: movdqa %xmm1, %xmm4
@ -1180,7 +1180,7 @@ define i64 @test_reduce_v8i64(<8 x i64> %a0) {
;
; X64-SSE2-LABEL: test_reduce_v8i64:
; X64-SSE2: ## %bb.0:
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; X64-SSE2-NEXT: movdqa %xmm1, %xmm5
; X64-SSE2-NEXT: pxor %xmm4, %xmm5
; X64-SSE2-NEXT: movdqa %xmm3, %xmm6

View File

@ -65,7 +65,7 @@ define i64 @test_reduce_v2i64(<2 x i64> %a0) {
; X64-SSE2-LABEL: test_reduce_v2i64:
; X64-SSE2: ## %bb.0:
; X64-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; X64-SSE2-NEXT: movdqa %xmm0, %xmm3
; X64-SSE2-NEXT: pxor %xmm2, %xmm3
; X64-SSE2-NEXT: pxor %xmm1, %xmm2
@ -563,7 +563,7 @@ define i64 @test_reduce_v4i64(<4 x i64> %a0) {
;
; X64-SSE2-LABEL: test_reduce_v4i64:
; X64-SSE2: ## %bb.0:
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; X64-SSE2-NEXT: movdqa %xmm1, %xmm3
; X64-SSE2-NEXT: pxor %xmm2, %xmm3
; X64-SSE2-NEXT: movdqa %xmm0, %xmm4
@ -1294,7 +1294,7 @@ define i64 @test_reduce_v8i64(<8 x i64> %a0) {
;
; X64-SSE2-LABEL: test_reduce_v8i64:
; X64-SSE2: ## %bb.0:
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648,2147483648,2147483648]
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm4 = [9223372039002259456,9223372039002259456]
; X64-SSE2-NEXT: movdqa %xmm2, %xmm5
; X64-SSE2-NEXT: pxor %xmm4, %xmm5
; X64-SSE2-NEXT: movdqa %xmm0, %xmm6

View File

@ -66,7 +66,7 @@ define i64 @test_reduce_v2i64(<2 x i64> %a0) {
; X64-SSE2-LABEL: test_reduce_v2i64:
; X64-SSE2: ## %bb.0:
; X64-SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; X64-SSE2-NEXT: movdqa %xmm0, %xmm3
; X64-SSE2-NEXT: pxor %xmm2, %xmm3
; X64-SSE2-NEXT: pxor %xmm1, %xmm2
@ -501,7 +501,7 @@ define i64 @test_reduce_v4i64(<4 x i64> %a0) {
;
; X64-SSE2-LABEL: test_reduce_v4i64:
; X64-SSE2: ## %bb.0:
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; X64-SSE2-NEXT: movdqa %xmm0, %xmm3
; X64-SSE2-NEXT: pxor %xmm2, %xmm3
; X64-SSE2-NEXT: movdqa %xmm1, %xmm4
@ -1196,7 +1196,7 @@ define i64 @test_reduce_v8i64(<8 x i64> %a0) {
;
; X64-SSE2-LABEL: test_reduce_v8i64:
; X64-SSE2: ## %bb.0:
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648,2147483648,2147483648]
; X64-SSE2-NEXT: movdqa {{.*#+}} xmm4 = [9223372039002259456,9223372039002259456]
; X64-SSE2-NEXT: movdqa %xmm1, %xmm5
; X64-SSE2-NEXT: pxor %xmm4, %xmm5
; X64-SSE2-NEXT: movdqa %xmm3, %xmm6

View File

@ -191,7 +191,7 @@ define <2 x double> @clamp_sitofp_2i64_2f64(<2 x i64> %a) nounwind {
;
; X64-SSE-LABEL: clamp_sitofp_2i64_2f64:
; X64-SSE: # %bb.0:
; X64-SSE-NEXT: movdqa {{.*#+}} xmm1 = [2147483648,0,2147483648,0]
; X64-SSE-NEXT: movdqa {{.*#+}} xmm1 = [2147483648,2147483648]
; X64-SSE-NEXT: movdqa %xmm0, %xmm2
; X64-SSE-NEXT: pxor %xmm1, %xmm2
; X64-SSE-NEXT: movdqa {{.*#+}} xmm3 = [18446744071562067713,18446744071562067713]

View File

@ -837,7 +837,7 @@ define i1 @allzeros_v16i32_sign(<16 x i32> %arg) {
define i1 @allones_v4i64_sign(<4 x i64> %arg) {
; SSE2-LABEL: allones_v4i64_sign:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: pxor %xmm2, %xmm1
; SSE2-NEXT: movdqa %xmm2, %xmm3
; SSE2-NEXT: pcmpgtd %xmm1, %xmm3
@ -888,7 +888,7 @@ define i1 @allones_v4i64_sign(<4 x i64> %arg) {
define i1 @allzeros_v4i64_sign(<4 x i64> %arg) {
; SSE2-LABEL: allzeros_v4i64_sign:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: pxor %xmm2, %xmm1
; SSE2-NEXT: movdqa %xmm2, %xmm3
; SSE2-NEXT: pcmpgtd %xmm1, %xmm3
@ -938,7 +938,7 @@ define i1 @allzeros_v4i64_sign(<4 x i64> %arg) {
define i1 @allones_v8i64_sign(<8 x i64> %arg) {
; SSE2-LABEL: allones_v8i64_sign:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-NEXT: pxor %xmm4, %xmm3
; SSE2-NEXT: movdqa %xmm4, %xmm5
; SSE2-NEXT: pcmpgtd %xmm3, %xmm5
@ -1031,7 +1031,7 @@ define i1 @allones_v8i64_sign(<8 x i64> %arg) {
define i1 @allzeros_v8i64_sign(<8 x i64> %arg) {
; SSE2-LABEL: allzeros_v8i64_sign:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-NEXT: pxor %xmm4, %xmm3
; SSE2-NEXT: movdqa %xmm4, %xmm5
; SSE2-NEXT: pcmpgtd %xmm3, %xmm5
@ -3839,7 +3839,7 @@ define i32 @movmskps(<4 x float> %x) {
define i32 @movmskpd256(<4 x double> %x) {
; SSE2-LABEL: movmskpd256:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: pxor %xmm2, %xmm1
; SSE2-NEXT: movdqa %xmm2, %xmm3
; SSE2-NEXT: pcmpgtd %xmm1, %xmm3

View File

@ -1538,7 +1538,7 @@ define <8 x i16> @psubus_8i64_max(<8 x i16> %x, <8 x i64> %y) nounwind {
; SSE2-NEXT: movdqa %xmm0, %xmm9
; SSE2-NEXT: punpckldq {{.*#+}} xmm9 = xmm9[0],xmm5[0],xmm9[1],xmm5[1]
; SSE2-NEXT: punpckhdq {{.*#+}} xmm0 = xmm0[2],xmm5[2],xmm0[3],xmm5[3]
; SSE2-NEXT: movdqa {{.*#+}} xmm11 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm11 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm2, %xmm6
; SSE2-NEXT: pxor %xmm11, %xmm6
; SSE2-NEXT: movdqa %xmm0, %xmm7
@ -1617,7 +1617,7 @@ define <8 x i16> @psubus_8i64_max(<8 x i16> %x, <8 x i64> %y) nounwind {
;
; SSSE3-LABEL: psubus_8i64_max:
; SSSE3: # %bb.0: # %vector.ph
; SSSE3-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,2147483648,2147483648,2147483648]
; SSSE3-NEXT: movdqa {{.*#+}} xmm5 = [9223372039002259456,9223372039002259456]
; SSSE3-NEXT: movdqa %xmm2, %xmm7
; SSSE3-NEXT: pxor %xmm5, %xmm7
; SSSE3-NEXT: movdqa {{.*#+}} xmm8 = [9223372039002324991,9223372039002324991]
@ -1688,7 +1688,7 @@ define <8 x i16> @psubus_8i64_max(<8 x i16> %x, <8 x i64> %y) nounwind {
; SSE41-LABEL: psubus_8i64_max:
; SSE41: # %bb.0: # %vector.ph
; SSE41-NEXT: movdqa %xmm0, %xmm10
; SSE41-NEXT: movdqa {{.*#+}} xmm6 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm6 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: movdqa %xmm4, %xmm0
; SSE41-NEXT: pxor %xmm6, %xmm0
; SSE41-NEXT: movdqa {{.*#+}} xmm8 = [9223372039002324991,9223372039002324991]

View File

@ -542,7 +542,7 @@ define <4 x i32> @unsigned_sat_constant_v4i32_using_cmp_notval(<4 x i32> %x) {
define <2 x i64> @unsigned_sat_constant_v2i64_using_min(<2 x i64> %x) {
; SSE2-LABEL: unsigned_sat_constant_v2i64_using_min:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: pxor %xmm0, %xmm1
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372034707292117,9223372034707292117]
; SSE2-NEXT: movdqa %xmm2, %xmm3
@ -563,7 +563,7 @@ define <2 x i64> @unsigned_sat_constant_v2i64_using_min(<2 x i64> %x) {
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm1
; SSE41-NEXT: movapd {{.*#+}} xmm2 = [18446744073709551573,18446744073709551573]
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: pxor %xmm1, %xmm0
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [9223372034707292117,9223372034707292117]
; SSE41-NEXT: movdqa %xmm3, %xmm4
@ -589,7 +589,7 @@ define <2 x i64> @unsigned_sat_constant_v2i64_using_cmp_sum(<2 x i64> %x) {
; ANY: # %bb.0:
; ANY-NEXT: movdqa {{.*#+}} xmm1 = [42,42]
; ANY-NEXT: paddq %xmm0, %xmm1
; ANY-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; ANY-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; ANY-NEXT: pxor %xmm2, %xmm0
; ANY-NEXT: pxor %xmm1, %xmm2
; ANY-NEXT: movdqa %xmm0, %xmm3
@ -841,22 +841,21 @@ define <4 x i32> @unsigned_sat_variable_v4i32_using_cmp_notval(<4 x i32> %x, <4
define <2 x i64> @unsigned_sat_variable_v2i64_using_min(<2 x i64> %x, <2 x i64> %y) {
; SSE2-LABEL: unsigned_sat_variable_v2i64_using_min:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: pcmpeqd %xmm3, %xmm3
; SSE2-NEXT: movdqa %xmm0, %xmm4
; SSE2-NEXT: pxor %xmm2, %xmm4
; SSE2-NEXT: pxor %xmm3, %xmm2
; SSE2-NEXT: pxor %xmm1, %xmm2
; SSE2-NEXT: movdqa %xmm2, %xmm5
; SSE2-NEXT: pcmpgtd %xmm4, %xmm5
; SSE2-NEXT: pcmpeqd %xmm2, %xmm2
; SSE2-NEXT: movdqa {{.*#+}} xmm3 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: pxor %xmm0, %xmm3
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [9223372034707292159,9223372034707292159]
; SSE2-NEXT: pxor %xmm1, %xmm4
; SSE2-NEXT: movdqa %xmm4, %xmm5
; SSE2-NEXT: pcmpgtd %xmm3, %xmm5
; SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm5[0,0,2,2]
; SSE2-NEXT: pcmpeqd %xmm4, %xmm2
; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
; SSE2-NEXT: pand %xmm6, %xmm2
; SSE2-NEXT: pcmpeqd %xmm3, %xmm4
; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm4[1,1,3,3]
; SSE2-NEXT: pand %xmm6, %xmm3
; SSE2-NEXT: pshufd {{.*#+}} xmm4 = xmm5[1,1,3,3]
; SSE2-NEXT: por %xmm2, %xmm4
; SSE2-NEXT: por %xmm3, %xmm4
; SSE2-NEXT: pand %xmm4, %xmm0
; SSE2-NEXT: pxor %xmm3, %xmm4
; SSE2-NEXT: pxor %xmm2, %xmm4
; SSE2-NEXT: movdqa %xmm1, %xmm2
; SSE2-NEXT: pandn %xmm4, %xmm2
; SSE2-NEXT: por %xmm2, %xmm0
@ -868,15 +867,15 @@ define <2 x i64> @unsigned_sat_variable_v2i64_using_min(<2 x i64> %x, <2 x i64>
; SSE41-NEXT: movdqa %xmm0, %xmm2
; SSE41-NEXT: pcmpeqd %xmm3, %xmm3
; SSE41-NEXT: pxor %xmm1, %xmm3
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa %xmm2, %xmm4
; SSE41-NEXT: pxor %xmm0, %xmm4
; SSE41-NEXT: pxor %xmm3, %xmm0
; SSE41-NEXT: movdqa %xmm0, %xmm5
; SSE41-NEXT: pcmpgtd %xmm4, %xmm5
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: pxor %xmm2, %xmm0
; SSE41-NEXT: movdqa {{.*#+}} xmm4 = [9223372034707292159,9223372034707292159]
; SSE41-NEXT: pxor %xmm1, %xmm4
; SSE41-NEXT: movdqa %xmm4, %xmm5
; SSE41-NEXT: pcmpgtd %xmm0, %xmm5
; SSE41-NEXT: pshufd {{.*#+}} xmm6 = xmm5[0,0,2,2]
; SSE41-NEXT: pcmpeqd %xmm4, %xmm0
; SSE41-NEXT: pshufd {{.*#+}} xmm4 = xmm0[1,1,3,3]
; SSE41-NEXT: pcmpeqd %xmm0, %xmm4
; SSE41-NEXT: pshufd {{.*#+}} xmm4 = xmm4[1,1,3,3]
; SSE41-NEXT: pand %xmm6, %xmm4
; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm5[1,1,3,3]
; SSE41-NEXT: por %xmm4, %xmm0
@ -895,7 +894,7 @@ define <2 x i64> @unsigned_sat_variable_v2i64_using_cmp_sum(<2 x i64> %x, <2 x i
; ANY-LABEL: unsigned_sat_variable_v2i64_using_cmp_sum:
; ANY: # %bb.0:
; ANY-NEXT: paddq %xmm0, %xmm1
; ANY-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; ANY-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; ANY-NEXT: pxor %xmm2, %xmm0
; ANY-NEXT: pxor %xmm1, %xmm2
; ANY-NEXT: movdqa %xmm0, %xmm3
@ -917,22 +916,19 @@ define <2 x i64> @unsigned_sat_variable_v2i64_using_cmp_sum(<2 x i64> %x, <2 x i
define <2 x i64> @unsigned_sat_variable_v2i64_using_cmp_notval(<2 x i64> %x, <2 x i64> %y) {
; ANY-LABEL: unsigned_sat_variable_v2i64_using_cmp_notval:
; ANY: # %bb.0:
; ANY-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; ANY-NEXT: pcmpeqd %xmm3, %xmm3
; ANY-NEXT: movdqa %xmm0, %xmm4
; ANY-NEXT: paddq %xmm1, %xmm4
; ANY-NEXT: pxor %xmm2, %xmm0
; ANY-NEXT: pxor %xmm2, %xmm3
; ANY-NEXT: pxor %xmm1, %xmm3
; ANY-NEXT: movdqa %xmm0, %xmm1
; ANY-NEXT: pcmpgtd %xmm3, %xmm1
; ANY-NEXT: pshufd {{.*#+}} xmm2 = xmm1[0,0,2,2]
; ANY-NEXT: pcmpeqd %xmm0, %xmm3
; ANY-NEXT: pshufd {{.*#+}} xmm3 = xmm3[1,1,3,3]
; ANY-NEXT: pand %xmm2, %xmm3
; ANY-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,3,3]
; ANY-NEXT: por %xmm4, %xmm0
; ANY-NEXT: por %xmm3, %xmm0
; ANY-NEXT: movdqa %xmm0, %xmm2
; ANY-NEXT: paddq %xmm1, %xmm2
; ANY-NEXT: pxor {{.*}}(%rip), %xmm1
; ANY-NEXT: pxor {{.*}}(%rip), %xmm0
; ANY-NEXT: movdqa %xmm0, %xmm3
; ANY-NEXT: pcmpgtd %xmm1, %xmm3
; ANY-NEXT: pshufd {{.*#+}} xmm4 = xmm3[0,0,2,2]
; ANY-NEXT: pcmpeqd %xmm1, %xmm0
; ANY-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3]
; ANY-NEXT: pand %xmm4, %xmm1
; ANY-NEXT: pshufd {{.*#+}} xmm0 = xmm3[1,1,3,3]
; ANY-NEXT: por %xmm2, %xmm0
; ANY-NEXT: por %xmm1, %xmm0
; ANY-NEXT: retq
%noty = xor <2 x i64> %y, <i64 -1, i64 -1>
%a = add <2 x i64> %x, %y

View File

@ -288,7 +288,7 @@ define <16 x i8> @ne_v16i8(<16 x i8> %a, <16 x i8> %b) nounwind {
define <2 x i64> @ge_v2i64(<2 x i64> %a, <2 x i64> %b) nounwind {
; SSE2-LABEL: ge_v2i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: pxor %xmm2, %xmm0
; SSE2-NEXT: pxor %xmm2, %xmm1
; SSE2-NEXT: movdqa %xmm1, %xmm2
@ -305,7 +305,7 @@ define <2 x i64> @ge_v2i64(<2 x i64> %a, <2 x i64> %b) nounwind {
;
; SSE41-LABEL: ge_v2i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE41-NEXT: pxor %xmm2, %xmm0
; SSE41-NEXT: pxor %xmm2, %xmm1
; SSE41-NEXT: movdqa %xmm1, %xmm2
@ -482,7 +482,7 @@ define <16 x i8> @ge_v16i8(<16 x i8> %a, <16 x i8> %b) nounwind {
define <2 x i64> @gt_v2i64(<2 x i64> %a, <2 x i64> %b) nounwind {
; SSE2-LABEL: gt_v2i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: pxor %xmm2, %xmm1
; SSE2-NEXT: pxor %xmm2, %xmm0
; SSE2-NEXT: movdqa %xmm0, %xmm2
@ -497,7 +497,7 @@ define <2 x i64> @gt_v2i64(<2 x i64> %a, <2 x i64> %b) nounwind {
;
; SSE41-LABEL: gt_v2i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE41-NEXT: pxor %xmm2, %xmm1
; SSE41-NEXT: pxor %xmm2, %xmm0
; SSE41-NEXT: movdqa %xmm0, %xmm2
@ -596,7 +596,7 @@ define <16 x i8> @gt_v16i8(<16 x i8> %a, <16 x i8> %b) nounwind {
define <2 x i64> @le_v2i64(<2 x i64> %a, <2 x i64> %b) nounwind {
; SSE2-LABEL: le_v2i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: pxor %xmm2, %xmm1
; SSE2-NEXT: pxor %xmm2, %xmm0
; SSE2-NEXT: movdqa %xmm0, %xmm2
@ -613,7 +613,7 @@ define <2 x i64> @le_v2i64(<2 x i64> %a, <2 x i64> %b) nounwind {
;
; SSE41-LABEL: le_v2i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE41-NEXT: pxor %xmm2, %xmm1
; SSE41-NEXT: pxor %xmm2, %xmm0
; SSE41-NEXT: movdqa %xmm0, %xmm2
@ -790,7 +790,7 @@ define <16 x i8> @le_v16i8(<16 x i8> %a, <16 x i8> %b) nounwind {
define <2 x i64> @lt_v2i64(<2 x i64> %a, <2 x i64> %b) nounwind {
; SSE2-LABEL: lt_v2i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: pxor %xmm2, %xmm0
; SSE2-NEXT: pxor %xmm2, %xmm1
; SSE2-NEXT: movdqa %xmm1, %xmm2
@ -805,7 +805,7 @@ define <2 x i64> @lt_v2i64(<2 x i64> %a, <2 x i64> %b) nounwind {
;
; SSE41-LABEL: lt_v2i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE41-NEXT: pxor %xmm2, %xmm0
; SSE41-NEXT: pxor %xmm2, %xmm1
; SSE41-NEXT: movdqa %xmm1, %xmm2

View File

@ -288,7 +288,7 @@ define <16 x i8> @ne_v16i8(<16 x i8> %a, <16 x i8> %b) nounwind {
define <2 x i64> @ge_v2i64(<2 x i64> %a, <2 x i64> %b) nounwind {
; SSE2-LABEL: ge_v2i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: pxor %xmm2, %xmm0
; SSE2-NEXT: pxor %xmm2, %xmm1
; SSE2-NEXT: movdqa %xmm1, %xmm2
@ -305,7 +305,7 @@ define <2 x i64> @ge_v2i64(<2 x i64> %a, <2 x i64> %b) nounwind {
;
; SSE41-LABEL: ge_v2i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: pxor %xmm2, %xmm0
; SSE41-NEXT: pxor %xmm2, %xmm1
; SSE41-NEXT: movdqa %xmm1, %xmm2
@ -470,7 +470,7 @@ define <16 x i8> @ge_v16i8(<16 x i8> %a, <16 x i8> %b) nounwind {
define <2 x i64> @gt_v2i64(<2 x i64> %a, <2 x i64> %b) nounwind {
; SSE2-LABEL: gt_v2i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: pxor %xmm2, %xmm1
; SSE2-NEXT: pxor %xmm2, %xmm0
; SSE2-NEXT: movdqa %xmm0, %xmm2
@ -485,7 +485,7 @@ define <2 x i64> @gt_v2i64(<2 x i64> %a, <2 x i64> %b) nounwind {
;
; SSE41-LABEL: gt_v2i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: pxor %xmm2, %xmm1
; SSE41-NEXT: pxor %xmm2, %xmm0
; SSE41-NEXT: movdqa %xmm0, %xmm2
@ -710,7 +710,7 @@ define <16 x i8> @gt_v16i8(<16 x i8> %a, <16 x i8> %b) nounwind {
define <2 x i64> @le_v2i64(<2 x i64> %a, <2 x i64> %b) nounwind {
; SSE2-LABEL: le_v2i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: pxor %xmm2, %xmm1
; SSE2-NEXT: pxor %xmm2, %xmm0
; SSE2-NEXT: movdqa %xmm0, %xmm2
@ -727,7 +727,7 @@ define <2 x i64> @le_v2i64(<2 x i64> %a, <2 x i64> %b) nounwind {
;
; SSE41-LABEL: le_v2i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: pxor %xmm2, %xmm1
; SSE41-NEXT: pxor %xmm2, %xmm0
; SSE41-NEXT: movdqa %xmm0, %xmm2
@ -892,7 +892,7 @@ define <16 x i8> @le_v16i8(<16 x i8> %a, <16 x i8> %b) nounwind {
define <2 x i64> @lt_v2i64(<2 x i64> %a, <2 x i64> %b) nounwind {
; SSE2-LABEL: lt_v2i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: pxor %xmm2, %xmm0
; SSE2-NEXT: pxor %xmm2, %xmm1
; SSE2-NEXT: movdqa %xmm1, %xmm2
@ -907,7 +907,7 @@ define <2 x i64> @lt_v2i64(<2 x i64> %a, <2 x i64> %b) nounwind {
;
; SSE41-LABEL: lt_v2i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: pxor %xmm2, %xmm0
; SSE41-NEXT: pxor %xmm2, %xmm1
; SSE41-NEXT: movdqa %xmm1, %xmm2

View File

@ -14,7 +14,7 @@
define <2 x i64> @max_gt_v2i64(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: max_gt_v2i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm1, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm0, %xmm2
@ -34,7 +34,7 @@ define <2 x i64> @max_gt_v2i64(<2 x i64> %a, <2 x i64> %b) {
; SSE41-LABEL: max_gt_v2i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm2
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,2147483648]
; SSE41-NEXT: movdqa %xmm1, %xmm3
; SSE41-NEXT: pxor %xmm0, %xmm3
; SSE41-NEXT: pxor %xmm2, %xmm0
@ -86,7 +86,7 @@ define <2 x i64> @max_gt_v2i64(<2 x i64> %a, <2 x i64> %b) {
define <4 x i64> @max_gt_v4i64(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: max_gt_v4i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm2, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm0, %xmm6
@ -121,7 +121,7 @@ define <4 x i64> @max_gt_v4i64(<4 x i64> %a, <4 x i64> %b) {
; SSE41-LABEL: max_gt_v4i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm4
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,2147483648]
; SSE41-NEXT: movdqa %xmm2, %xmm0
; SSE41-NEXT: pxor %xmm5, %xmm0
; SSE41-NEXT: movdqa %xmm4, %xmm6
@ -407,7 +407,7 @@ define <32 x i8> @max_gt_v32i8(<32 x i8> %a, <32 x i8> %b) {
define <2 x i64> @max_ge_v2i64(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: max_ge_v2i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm1, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm0, %xmm2
@ -427,7 +427,7 @@ define <2 x i64> @max_ge_v2i64(<2 x i64> %a, <2 x i64> %b) {
; SSE41-LABEL: max_ge_v2i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm2
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,2147483648]
; SSE41-NEXT: movdqa %xmm1, %xmm3
; SSE41-NEXT: pxor %xmm0, %xmm3
; SSE41-NEXT: pxor %xmm2, %xmm0
@ -479,7 +479,7 @@ define <2 x i64> @max_ge_v2i64(<2 x i64> %a, <2 x i64> %b) {
define <4 x i64> @max_ge_v4i64(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: max_ge_v4i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm2, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm0, %xmm6
@ -514,7 +514,7 @@ define <4 x i64> @max_ge_v4i64(<4 x i64> %a, <4 x i64> %b) {
; SSE41-LABEL: max_ge_v4i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm4
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,2147483648]
; SSE41-NEXT: movdqa %xmm2, %xmm0
; SSE41-NEXT: pxor %xmm5, %xmm0
; SSE41-NEXT: movdqa %xmm4, %xmm6
@ -800,7 +800,7 @@ define <32 x i8> @max_ge_v32i8(<32 x i8> %a, <32 x i8> %b) {
define <2 x i64> @min_lt_v2i64(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: min_lt_v2i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm0, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm1, %xmm2
@ -820,7 +820,7 @@ define <2 x i64> @min_lt_v2i64(<2 x i64> %a, <2 x i64> %b) {
; SSE41-LABEL: min_lt_v2i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm2
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,2147483648]
; SSE41-NEXT: movdqa %xmm2, %xmm3
; SSE41-NEXT: pxor %xmm0, %xmm3
; SSE41-NEXT: pxor %xmm1, %xmm0
@ -873,7 +873,7 @@ define <2 x i64> @min_lt_v2i64(<2 x i64> %a, <2 x i64> %b) {
define <4 x i64> @min_lt_v4i64(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: min_lt_v4i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm0, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm2, %xmm6
@ -908,7 +908,7 @@ define <4 x i64> @min_lt_v4i64(<4 x i64> %a, <4 x i64> %b) {
; SSE41-LABEL: min_lt_v4i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm4
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,2147483648]
; SSE41-NEXT: pxor %xmm5, %xmm0
; SSE41-NEXT: movdqa %xmm2, %xmm6
; SSE41-NEXT: pxor %xmm5, %xmm6
@ -1188,7 +1188,7 @@ define <32 x i8> @min_lt_v32i8(<32 x i8> %a, <32 x i8> %b) {
define <2 x i64> @min_le_v2i64(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: min_le_v2i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm0, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm1, %xmm2
@ -1208,7 +1208,7 @@ define <2 x i64> @min_le_v2i64(<2 x i64> %a, <2 x i64> %b) {
; SSE41-LABEL: min_le_v2i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm2
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,2147483648]
; SSE41-NEXT: movdqa %xmm2, %xmm3
; SSE41-NEXT: pxor %xmm0, %xmm3
; SSE41-NEXT: pxor %xmm1, %xmm0
@ -1261,7 +1261,7 @@ define <2 x i64> @min_le_v2i64(<2 x i64> %a, <2 x i64> %b) {
define <4 x i64> @min_le_v4i64(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: min_le_v4i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm0, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm2, %xmm6
@ -1296,7 +1296,7 @@ define <4 x i64> @min_le_v4i64(<4 x i64> %a, <4 x i64> %b) {
; SSE41-LABEL: min_le_v4i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm4
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,2147483648]
; SSE41-NEXT: pxor %xmm5, %xmm0
; SSE41-NEXT: movdqa %xmm2, %xmm6
; SSE41-NEXT: pxor %xmm5, %xmm6

View File

@ -14,7 +14,7 @@
define <2 x i64> @max_gt_v2i64(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: max_gt_v2i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm1, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm0, %xmm2
@ -34,7 +34,7 @@ define <2 x i64> @max_gt_v2i64(<2 x i64> %a, <2 x i64> %b) {
; SSE41-LABEL: max_gt_v2i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm2
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: movdqa %xmm1, %xmm3
; SSE41-NEXT: pxor %xmm0, %xmm3
; SSE41-NEXT: pxor %xmm2, %xmm0
@ -96,7 +96,7 @@ define <2 x i64> @max_gt_v2i64(<2 x i64> %a, <2 x i64> %b) {
define <4 x i64> @max_gt_v4i64(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: max_gt_v4i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm2, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm0, %xmm6
@ -131,7 +131,7 @@ define <4 x i64> @max_gt_v4i64(<4 x i64> %a, <4 x i64> %b) {
; SSE41-LABEL: max_gt_v4i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm4
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: movdqa %xmm2, %xmm0
; SSE41-NEXT: pxor %xmm5, %xmm0
; SSE41-NEXT: movdqa %xmm4, %xmm6
@ -435,7 +435,7 @@ define <32 x i8> @max_gt_v32i8(<32 x i8> %a, <32 x i8> %b) {
define <2 x i64> @max_ge_v2i64(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: max_ge_v2i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm1, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm0, %xmm2
@ -455,7 +455,7 @@ define <2 x i64> @max_ge_v2i64(<2 x i64> %a, <2 x i64> %b) {
; SSE41-LABEL: max_ge_v2i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm2
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: movdqa %xmm1, %xmm3
; SSE41-NEXT: pxor %xmm0, %xmm3
; SSE41-NEXT: pxor %xmm2, %xmm0
@ -517,7 +517,7 @@ define <2 x i64> @max_ge_v2i64(<2 x i64> %a, <2 x i64> %b) {
define <4 x i64> @max_ge_v4i64(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: max_ge_v4i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm2, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm0, %xmm6
@ -552,7 +552,7 @@ define <4 x i64> @max_ge_v4i64(<4 x i64> %a, <4 x i64> %b) {
; SSE41-LABEL: max_ge_v4i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm4
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: movdqa %xmm2, %xmm0
; SSE41-NEXT: pxor %xmm5, %xmm0
; SSE41-NEXT: movdqa %xmm4, %xmm6
@ -856,7 +856,7 @@ define <32 x i8> @max_ge_v32i8(<32 x i8> %a, <32 x i8> %b) {
define <2 x i64> @min_lt_v2i64(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: min_lt_v2i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm0, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm1, %xmm2
@ -876,7 +876,7 @@ define <2 x i64> @min_lt_v2i64(<2 x i64> %a, <2 x i64> %b) {
; SSE41-LABEL: min_lt_v2i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm2
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: movdqa %xmm2, %xmm3
; SSE41-NEXT: pxor %xmm0, %xmm3
; SSE41-NEXT: pxor %xmm1, %xmm0
@ -938,7 +938,7 @@ define <2 x i64> @min_lt_v2i64(<2 x i64> %a, <2 x i64> %b) {
define <4 x i64> @min_lt_v4i64(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: min_lt_v4i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm0, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm2, %xmm6
@ -973,7 +973,7 @@ define <4 x i64> @min_lt_v4i64(<4 x i64> %a, <4 x i64> %b) {
; SSE41-LABEL: min_lt_v4i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm4
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: pxor %xmm5, %xmm0
; SSE41-NEXT: movdqa %xmm2, %xmm6
; SSE41-NEXT: pxor %xmm5, %xmm6
@ -1276,7 +1276,7 @@ define <32 x i8> @min_lt_v32i8(<32 x i8> %a, <32 x i8> %b) {
define <2 x i64> @min_le_v2i64(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: min_le_v2i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm0, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm1, %xmm2
@ -1296,7 +1296,7 @@ define <2 x i64> @min_le_v2i64(<2 x i64> %a, <2 x i64> %b) {
; SSE41-LABEL: min_le_v2i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm2
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: movdqa %xmm2, %xmm3
; SSE41-NEXT: pxor %xmm0, %xmm3
; SSE41-NEXT: pxor %xmm1, %xmm0
@ -1358,7 +1358,7 @@ define <2 x i64> @min_le_v2i64(<2 x i64> %a, <2 x i64> %b) {
define <4 x i64> @min_le_v4i64(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: min_le_v4i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm0, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm2, %xmm6
@ -1393,7 +1393,7 @@ define <4 x i64> @min_le_v4i64(<4 x i64> %a, <4 x i64> %b) {
; SSE41-LABEL: min_le_v4i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm4
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: pxor %xmm5, %xmm0
; SSE41-NEXT: movdqa %xmm2, %xmm6
; SSE41-NEXT: pxor %xmm5, %xmm6

View File

@ -44,7 +44,7 @@ define <4 x i1> @test_cmp_v4f32(<4 x float> %a0, <4 x float> %a1) nounwind {
define <2 x i1> @test_cmp_v2i64(<2 x i64> %a0, <2 x i64> %a1) nounwind {
; SSE2-LABEL: test_cmp_v2i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: pxor %xmm2, %xmm1
; SSE2-NEXT: pxor %xmm2, %xmm0
; SSE2-NEXT: movdqa %xmm0, %xmm2
@ -191,7 +191,7 @@ define <8 x i1> @test_cmp_v8f32(<8 x float> %a0, <8 x float> %a1) nounwind {
define <4 x i1> @test_cmp_v4i64(<4 x i64> %a0, <4 x i64> %a1) nounwind {
; SSE2-LABEL: test_cmp_v4i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-NEXT: pxor %xmm4, %xmm3
; SSE2-NEXT: pxor %xmm4, %xmm1
; SSE2-NEXT: movdqa %xmm1, %xmm5
@ -751,7 +751,7 @@ define <16 x i1> @test_cmp_v16f32(<16 x float> %a0, <16 x float> %a1) nounwind {
define <8 x i1> @test_cmp_v8i64(<8 x i64> %a0, <8 x i64> %a1) nounwind {
; SSE2-LABEL: test_cmp_v8i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648]
; SSE2-NEXT: pxor %xmm8, %xmm7
; SSE2-NEXT: pxor %xmm8, %xmm3
; SSE2-NEXT: movdqa %xmm3, %xmm9
@ -2757,7 +2757,7 @@ define <32 x i1> @test_cmp_v32f32(<32 x float> %a0, <32 x float> %a1) nounwind {
define <16 x i1> @test_cmp_v16i64(<16 x i64> %a0, <16 x i64> %a1) nounwind {
; SSE2-LABEL: test_cmp_v16i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648]
; SSE2-NEXT: pxor %xmm8, %xmm7
; SSE2-NEXT: movdqa {{[0-9]+}}(%rsp), %xmm9
; SSE2-NEXT: pxor %xmm8, %xmm9
@ -6999,7 +6999,7 @@ define <32 x i1> @test_cmp_v32i64(<32 x i64> %a0, <32 x i64> %a1) nounwind {
; SSE2-LABEL: test_cmp_v32i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movq %rdi, %rax
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648]
; SSE2-NEXT: pxor %xmm8, %xmm1
; SSE2-NEXT: movdqa {{[0-9]+}}(%rsp), %xmm9
; SSE2-NEXT: pxor %xmm8, %xmm9

View File

@ -409,7 +409,7 @@ define <8 x i32> @cmpgt_zext_v8i32(<8 x i32> %a, <8 x i32> %b) {
define <2 x i64> @cmpgt_zext_v2i64(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: cmpgt_zext_v2i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: pxor %xmm2, %xmm1
; SSE2-NEXT: pxor %xmm2, %xmm0
; SSE2-NEXT: movdqa %xmm0, %xmm2

View File

@ -14,7 +14,7 @@ define i64 @test_v2i64(<2 x i64> %a0) {
; SSE2-LABEL: test_v2i64:
; SSE2: # %bb.0:
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm0, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm1, %xmm2
@ -36,7 +36,7 @@ define i64 @test_v2i64(<2 x i64> %a0) {
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm1
; SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm0[2,3,0,1]
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,2147483648]
; SSE41-NEXT: movdqa %xmm1, %xmm3
; SSE41-NEXT: pxor %xmm0, %xmm3
; SSE41-NEXT: pxor %xmm2, %xmm0
@ -82,7 +82,7 @@ define i64 @test_v2i64(<2 x i64> %a0) {
define i64 @test_v4i64(<4 x i64> %a0) {
; SSE2-LABEL: test_v4i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm1, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: movdqa %xmm0, %xmm4
@ -119,7 +119,7 @@ define i64 @test_v4i64(<4 x i64> %a0) {
; SSE41-LABEL: test_v4i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm2
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [2147483648,2147483648]
; SSE41-NEXT: movdqa %xmm1, %xmm0
; SSE41-NEXT: pxor %xmm3, %xmm0
; SSE41-NEXT: movdqa %xmm2, %xmm4
@ -202,7 +202,7 @@ define i64 @test_v4i64(<4 x i64> %a0) {
define i64 @test_v8i64(<8 x i64> %a0) {
; SSE2-LABEL: test_v8i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm2, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm0, %xmm6
@ -269,7 +269,7 @@ define i64 @test_v8i64(<8 x i64> %a0) {
; SSE41-LABEL: test_v8i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm4
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,2147483648]
; SSE41-NEXT: movdqa %xmm2, %xmm0
; SSE41-NEXT: pxor %xmm5, %xmm0
; SSE41-NEXT: movdqa %xmm4, %xmm6
@ -377,7 +377,7 @@ define i64 @test_v8i64(<8 x i64> %a0) {
define i64 @test_v16i64(<16 x i64> %a0) {
; SSE2-LABEL: test_v16i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm5, %xmm9
; SSE2-NEXT: pxor %xmm8, %xmm9
; SSE2-NEXT: movdqa %xmm1, %xmm10
@ -504,7 +504,7 @@ define i64 @test_v16i64(<16 x i64> %a0) {
; SSE41-LABEL: test_v16i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm8
; SSE41-NEXT: movdqa {{.*#+}} xmm9 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm9 = [2147483648,2147483648]
; SSE41-NEXT: movdqa %xmm5, %xmm10
; SSE41-NEXT: pxor %xmm9, %xmm10
; SSE41-NEXT: movdqa %xmm1, %xmm0

View File

@ -14,7 +14,7 @@ define i64 @test_v2i64(<2 x i64> %a0) {
; SSE2-LABEL: test_v2i64:
; SSE2: # %bb.0:
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm0, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm1, %xmm2
@ -36,7 +36,7 @@ define i64 @test_v2i64(<2 x i64> %a0) {
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm1
; SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm0[2,3,0,1]
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,2147483648]
; SSE41-NEXT: movdqa %xmm1, %xmm3
; SSE41-NEXT: pxor %xmm0, %xmm3
; SSE41-NEXT: pxor %xmm2, %xmm0
@ -82,7 +82,7 @@ define i64 @test_v2i64(<2 x i64> %a0) {
define i64 @test_v4i64(<4 x i64> %a0) {
; SSE2-LABEL: test_v4i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm0, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: movdqa %xmm1, %xmm4
@ -119,7 +119,7 @@ define i64 @test_v4i64(<4 x i64> %a0) {
; SSE41-LABEL: test_v4i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm2
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [2147483648,2147483648]
; SSE41-NEXT: pxor %xmm3, %xmm0
; SSE41-NEXT: movdqa %xmm1, %xmm4
; SSE41-NEXT: pxor %xmm3, %xmm4
@ -201,7 +201,7 @@ define i64 @test_v4i64(<4 x i64> %a0) {
define i64 @test_v8i64(<8 x i64> %a0) {
; SSE2-LABEL: test_v8i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm1, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm3, %xmm6
@ -268,7 +268,7 @@ define i64 @test_v8i64(<8 x i64> %a0) {
; SSE41-LABEL: test_v8i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm8
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,2147483648]
; SSE41-NEXT: movdqa %xmm1, %xmm0
; SSE41-NEXT: pxor %xmm5, %xmm0
; SSE41-NEXT: movdqa %xmm3, %xmm6
@ -376,7 +376,7 @@ define i64 @test_v8i64(<8 x i64> %a0) {
define i64 @test_v16i64(<16 x i64> %a0) {
; SSE2-LABEL: test_v16i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm2, %xmm9
; SSE2-NEXT: pxor %xmm8, %xmm9
; SSE2-NEXT: movdqa %xmm6, %xmm10
@ -503,7 +503,7 @@ define i64 @test_v16i64(<16 x i64> %a0) {
; SSE41-LABEL: test_v16i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm8
; SSE41-NEXT: movdqa {{.*#+}} xmm9 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm9 = [2147483648,2147483648]
; SSE41-NEXT: movdqa %xmm2, %xmm10
; SSE41-NEXT: pxor %xmm9, %xmm10
; SSE41-NEXT: movdqa %xmm6, %xmm0

View File

@ -14,7 +14,7 @@ define i64 @test_v2i64(<2 x i64> %a0) {
; SSE2-LABEL: test_v2i64:
; SSE2: # %bb.0:
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm0, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm1, %xmm2
@ -36,7 +36,7 @@ define i64 @test_v2i64(<2 x i64> %a0) {
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm1
; SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm0[2,3,0,1]
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: movdqa %xmm1, %xmm3
; SSE41-NEXT: pxor %xmm0, %xmm3
; SSE41-NEXT: pxor %xmm2, %xmm0
@ -85,7 +85,7 @@ define i64 @test_v2i64(<2 x i64> %a0) {
define i64 @test_v4i64(<4 x i64> %a0) {
; SSE2-LABEL: test_v4i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm1, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: movdqa %xmm0, %xmm4
@ -122,7 +122,7 @@ define i64 @test_v4i64(<4 x i64> %a0) {
; SSE41-LABEL: test_v4i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm2
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: movdqa %xmm1, %xmm0
; SSE41-NEXT: pxor %xmm3, %xmm0
; SSE41-NEXT: movdqa %xmm2, %xmm4
@ -215,7 +215,7 @@ define i64 @test_v4i64(<4 x i64> %a0) {
define i64 @test_v8i64(<8 x i64> %a0) {
; SSE2-LABEL: test_v8i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm2, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm0, %xmm6
@ -282,7 +282,7 @@ define i64 @test_v8i64(<8 x i64> %a0) {
; SSE41-LABEL: test_v8i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm4
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: movdqa %xmm2, %xmm0
; SSE41-NEXT: pxor %xmm5, %xmm0
; SSE41-NEXT: movdqa %xmm4, %xmm6
@ -406,7 +406,7 @@ define i64 @test_v8i64(<8 x i64> %a0) {
define i64 @test_v16i64(<16 x i64> %a0) {
; SSE2-LABEL: test_v16i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm5, %xmm9
; SSE2-NEXT: pxor %xmm8, %xmm9
; SSE2-NEXT: movdqa %xmm1, %xmm10
@ -533,7 +533,7 @@ define i64 @test_v16i64(<16 x i64> %a0) {
; SSE41-LABEL: test_v16i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm8
; SSE41-NEXT: movdqa {{.*#+}} xmm9 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm9 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: movdqa %xmm5, %xmm10
; SSE41-NEXT: pxor %xmm9, %xmm10
; SSE41-NEXT: movdqa %xmm1, %xmm0

View File

@ -14,7 +14,7 @@ define i64 @test_v2i64(<2 x i64> %a0) {
; SSE2-LABEL: test_v2i64:
; SSE2: # %bb.0:
; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm0, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm1, %xmm2
@ -36,7 +36,7 @@ define i64 @test_v2i64(<2 x i64> %a0) {
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm1
; SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm0[2,3,0,1]
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: movdqa %xmm1, %xmm3
; SSE41-NEXT: pxor %xmm0, %xmm3
; SSE41-NEXT: pxor %xmm2, %xmm0
@ -85,7 +85,7 @@ define i64 @test_v2i64(<2 x i64> %a0) {
define i64 @test_v4i64(<4 x i64> %a0) {
; SSE2-LABEL: test_v4i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm0, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: movdqa %xmm1, %xmm4
@ -122,7 +122,7 @@ define i64 @test_v4i64(<4 x i64> %a0) {
; SSE41-LABEL: test_v4i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm2
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: pxor %xmm3, %xmm0
; SSE41-NEXT: movdqa %xmm1, %xmm4
; SSE41-NEXT: pxor %xmm3, %xmm4
@ -214,7 +214,7 @@ define i64 @test_v4i64(<4 x i64> %a0) {
define i64 @test_v8i64(<8 x i64> %a0) {
; SSE2-LABEL: test_v8i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm1, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm3, %xmm6
@ -281,7 +281,7 @@ define i64 @test_v8i64(<8 x i64> %a0) {
; SSE41-LABEL: test_v8i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm8
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: movdqa %xmm1, %xmm0
; SSE41-NEXT: pxor %xmm5, %xmm0
; SSE41-NEXT: movdqa %xmm3, %xmm6
@ -405,7 +405,7 @@ define i64 @test_v8i64(<8 x i64> %a0) {
define i64 @test_v16i64(<16 x i64> %a0) {
; SSE2-LABEL: test_v16i64:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm2, %xmm9
; SSE2-NEXT: pxor %xmm8, %xmm9
; SSE2-NEXT: movdqa %xmm6, %xmm10
@ -532,7 +532,7 @@ define i64 @test_v16i64(<16 x i64> %a0) {
; SSE41-LABEL: test_v16i64:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm8
; SSE41-NEXT: movdqa {{.*#+}} xmm9 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm9 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: movdqa %xmm2, %xmm10
; SSE41-NEXT: pxor %xmm9, %xmm10
; SSE41-NEXT: movdqa %xmm6, %xmm0

View File

@ -18,7 +18,7 @@ define <4 x i32> @trunc_packus_v4i64_v4i32(<4 x i64> %a0) {
; SSE2-LABEL: trunc_packus_v4i64_v4i32:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [4294967295,4294967295]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm0, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: movdqa {{.*#+}} xmm5 = [2147483647,2147483647]
@ -74,7 +74,7 @@ define <4 x i32> @trunc_packus_v4i64_v4i32(<4 x i64> %a0) {
; SSSE3-LABEL: trunc_packus_v4i64_v4i32:
; SSSE3: # %bb.0:
; SSSE3-NEXT: movdqa {{.*#+}} xmm8 = [4294967295,4294967295]
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSSE3-NEXT: movdqa %xmm0, %xmm3
; SSSE3-NEXT: pxor %xmm2, %xmm3
; SSSE3-NEXT: movdqa {{.*#+}} xmm5 = [2147483647,2147483647]
@ -131,7 +131,7 @@ define <4 x i32> @trunc_packus_v4i64_v4i32(<4 x i64> %a0) {
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm2
; SSE41-NEXT: movapd {{.*#+}} xmm4 = [4294967295,4294967295]
; SSE41-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648]
; SSE41-NEXT: pxor %xmm8, %xmm0
; SSE41-NEXT: movdqa {{.*#+}} xmm6 = [2147483647,2147483647]
; SSE41-NEXT: movdqa %xmm6, %xmm5
@ -282,7 +282,7 @@ define <8 x i32> @trunc_packus_v8i64_v8i32(<8 x i64> %a0) {
; SSE2-LABEL: trunc_packus_v8i64_v8i32:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [4294967295,4294967295]
; SSE2-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm0, %xmm5
; SSE2-NEXT: pxor %xmm10, %xmm5
; SSE2-NEXT: movdqa {{.*#+}} xmm9 = [2147483647,2147483647]
@ -387,7 +387,7 @@ define <8 x i32> @trunc_packus_v8i64_v8i32(<8 x i64> %a0) {
; SSSE3-LABEL: trunc_packus_v8i64_v8i32:
; SSSE3: # %bb.0:
; SSSE3-NEXT: movdqa {{.*#+}} xmm8 = [4294967295,4294967295]
; SSSE3-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,0,2147483648,0]
; SSSE3-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,2147483648]
; SSSE3-NEXT: movdqa %xmm0, %xmm5
; SSSE3-NEXT: pxor %xmm10, %xmm5
; SSSE3-NEXT: movdqa {{.*#+}} xmm9 = [2147483647,2147483647]
@ -493,7 +493,7 @@ define <8 x i32> @trunc_packus_v8i64_v8i32(<8 x i64> %a0) {
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm4
; SSE41-NEXT: movapd {{.*#+}} xmm7 = [4294967295,4294967295]
; SSE41-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,2147483648]
; SSE41-NEXT: pxor %xmm10, %xmm0
; SSE41-NEXT: movdqa {{.*#+}} xmm11 = [2147483647,2147483647]
; SSE41-NEXT: movdqa %xmm11, %xmm6
@ -683,7 +683,7 @@ define <8 x i16> @trunc_packus_v8i64_v8i16(<8 x i64> %a0) {
; SSE2-LABEL: trunc_packus_v8i64_v8i16:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [65535,65535]
; SSE2-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm1, %xmm5
; SSE2-NEXT: pxor %xmm10, %xmm5
; SSE2-NEXT: movdqa {{.*#+}} xmm9 = [2147549183,2147549183]
@ -797,7 +797,7 @@ define <8 x i16> @trunc_packus_v8i64_v8i16(<8 x i64> %a0) {
; SSSE3-LABEL: trunc_packus_v8i64_v8i16:
; SSSE3: # %bb.0:
; SSSE3-NEXT: movdqa {{.*#+}} xmm8 = [65535,65535]
; SSSE3-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,0,2147483648,0]
; SSSE3-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,2147483648]
; SSSE3-NEXT: movdqa %xmm1, %xmm5
; SSSE3-NEXT: pxor %xmm10, %xmm5
; SSSE3-NEXT: movdqa {{.*#+}} xmm9 = [2147549183,2147549183]
@ -912,7 +912,7 @@ define <8 x i16> @trunc_packus_v8i64_v8i16(<8 x i64> %a0) {
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm9
; SSE41-NEXT: movapd {{.*#+}} xmm7 = [65535,65535]
; SSE41-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,2147483648]
; SSE41-NEXT: movdqa %xmm2, %xmm0
; SSE41-NEXT: pxor %xmm10, %xmm0
; SSE41-NEXT: movdqa {{.*#+}} xmm11 = [2147549183,2147549183]
@ -1339,7 +1339,7 @@ define <8 x i8> @trunc_packus_v8i64_v8i8(<8 x i64> %a0) {
; SSE2-LABEL: trunc_packus_v8i64_v8i8:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [255,255]
; SSE2-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm2, %xmm5
; SSE2-NEXT: pxor %xmm10, %xmm5
; SSE2-NEXT: movdqa {{.*#+}} xmm9 = [2147483903,2147483903]
@ -1445,7 +1445,7 @@ define <8 x i8> @trunc_packus_v8i64_v8i8(<8 x i64> %a0) {
; SSSE3-LABEL: trunc_packus_v8i64_v8i8:
; SSSE3: # %bb.0:
; SSSE3-NEXT: movdqa {{.*#+}} xmm8 = [255,255]
; SSSE3-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,0,2147483648,0]
; SSSE3-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,2147483648]
; SSSE3-NEXT: movdqa %xmm2, %xmm5
; SSSE3-NEXT: pxor %xmm10, %xmm5
; SSSE3-NEXT: movdqa {{.*#+}} xmm9 = [2147483903,2147483903]
@ -1552,7 +1552,7 @@ define <8 x i8> @trunc_packus_v8i64_v8i8(<8 x i64> %a0) {
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm9
; SSE41-NEXT: movapd {{.*#+}} xmm7 = [255,255]
; SSE41-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,2147483648]
; SSE41-NEXT: movdqa %xmm2, %xmm0
; SSE41-NEXT: pxor %xmm10, %xmm0
; SSE41-NEXT: movdqa {{.*#+}} xmm11 = [2147483903,2147483903]
@ -1725,7 +1725,7 @@ define void @trunc_packus_v8i64_v8i8_store(<8 x i64> %a0, <8 x i8> *%p1) {
; SSE2-LABEL: trunc_packus_v8i64_v8i8_store:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [255,255]
; SSE2-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm3, %xmm4
; SSE2-NEXT: pxor %xmm10, %xmm4
; SSE2-NEXT: movdqa {{.*#+}} xmm9 = [2147483903,2147483903]
@ -1837,7 +1837,7 @@ define void @trunc_packus_v8i64_v8i8_store(<8 x i64> %a0, <8 x i8> *%p1) {
; SSSE3-LABEL: trunc_packus_v8i64_v8i8_store:
; SSSE3: # %bb.0:
; SSSE3-NEXT: movdqa {{.*#+}} xmm8 = [255,255]
; SSSE3-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,0,2147483648,0]
; SSSE3-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,2147483648]
; SSSE3-NEXT: movdqa %xmm3, %xmm4
; SSSE3-NEXT: pxor %xmm10, %xmm4
; SSSE3-NEXT: movdqa {{.*#+}} xmm9 = [2147483903,2147483903]
@ -1950,7 +1950,7 @@ define void @trunc_packus_v8i64_v8i8_store(<8 x i64> %a0, <8 x i8> *%p1) {
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm9
; SSE41-NEXT: movapd {{.*#+}} xmm8 = [255,255]
; SSE41-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,2147483648]
; SSE41-NEXT: movdqa %xmm3, %xmm0
; SSE41-NEXT: pxor %xmm10, %xmm0
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483903,2147483903]
@ -2141,7 +2141,7 @@ define <16 x i8> @trunc_packus_v16i64_v16i8(<16 x i64> %a0) {
; SSE2-LABEL: trunc_packus_v16i64_v16i8:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm10 = [255,255]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm6, %xmm9
; SSE2-NEXT: pxor %xmm8, %xmm9
; SSE2-NEXT: movdqa {{.*#+}} xmm11 = [2147483903,2147483903]
@ -2347,7 +2347,7 @@ define <16 x i8> @trunc_packus_v16i64_v16i8(<16 x i64> %a0) {
; SSSE3-LABEL: trunc_packus_v16i64_v16i8:
; SSSE3: # %bb.0:
; SSSE3-NEXT: movdqa {{.*#+}} xmm10 = [255,255]
; SSSE3-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,0,2147483648,0]
; SSSE3-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648]
; SSSE3-NEXT: movdqa %xmm6, %xmm9
; SSSE3-NEXT: pxor %xmm8, %xmm9
; SSSE3-NEXT: movdqa {{.*#+}} xmm11 = [2147483903,2147483903]
@ -2554,7 +2554,7 @@ define <16 x i8> @trunc_packus_v16i64_v16i8(<16 x i64> %a0) {
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm8
; SSE41-NEXT: movapd {{.*#+}} xmm11 = [255,255]
; SSE41-NEXT: movdqa {{.*#+}} xmm9 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm9 = [2147483648,2147483648]
; SSE41-NEXT: movdqa %xmm6, %xmm0
; SSE41-NEXT: pxor %xmm9, %xmm0
; SSE41-NEXT: movdqa {{.*#+}} xmm12 = [2147483903,2147483903]

View File

@ -18,7 +18,7 @@ define <4 x i32> @trunc_ssat_v4i64_v4i32(<4 x i64> %a0) {
; SSE2-LABEL: trunc_ssat_v4i64_v4i32:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483647,2147483647]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm0, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: movdqa {{.*#+}} xmm5 = [4294967295,4294967295]
@ -79,7 +79,7 @@ define <4 x i32> @trunc_ssat_v4i64_v4i32(<4 x i64> %a0) {
; SSSE3-LABEL: trunc_ssat_v4i64_v4i32:
; SSSE3: # %bb.0:
; SSSE3-NEXT: movdqa {{.*#+}} xmm8 = [2147483647,2147483647]
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSSE3-NEXT: movdqa %xmm0, %xmm3
; SSSE3-NEXT: pxor %xmm2, %xmm3
; SSSE3-NEXT: movdqa {{.*#+}} xmm5 = [4294967295,4294967295]
@ -141,7 +141,7 @@ define <4 x i32> @trunc_ssat_v4i64_v4i32(<4 x i64> %a0) {
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm2
; SSE41-NEXT: movapd {{.*#+}} xmm4 = [2147483647,2147483647]
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [2147483648,2147483648]
; SSE41-NEXT: pxor %xmm3, %xmm0
; SSE41-NEXT: movdqa {{.*#+}} xmm8 = [4294967295,4294967295]
; SSE41-NEXT: movdqa %xmm8, %xmm5
@ -290,7 +290,7 @@ define <8 x i32> @trunc_ssat_v8i64_v8i32(<8 x i64> %a0) {
; SSE2-LABEL: trunc_ssat_v8i64_v8i32:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483647,2147483647]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm0, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa {{.*#+}} xmm9 = [4294967295,4294967295]
@ -404,7 +404,7 @@ define <8 x i32> @trunc_ssat_v8i64_v8i32(<8 x i64> %a0) {
; SSSE3-LABEL: trunc_ssat_v8i64_v8i32:
; SSSE3: # %bb.0:
; SSSE3-NEXT: movdqa {{.*#+}} xmm8 = [2147483647,2147483647]
; SSSE3-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSSE3-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSSE3-NEXT: movdqa %xmm0, %xmm5
; SSSE3-NEXT: pxor %xmm4, %xmm5
; SSSE3-NEXT: movdqa {{.*#+}} xmm9 = [4294967295,4294967295]
@ -519,7 +519,7 @@ define <8 x i32> @trunc_ssat_v8i64_v8i32(<8 x i64> %a0) {
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm9
; SSE41-NEXT: movapd {{.*#+}} xmm7 = [2147483647,2147483647]
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,2147483648]
; SSE41-NEXT: pxor %xmm5, %xmm0
; SSE41-NEXT: movdqa {{.*#+}} xmm10 = [4294967295,4294967295]
; SSE41-NEXT: movdqa %xmm10, %xmm6
@ -710,7 +710,7 @@ define <8 x i16> @trunc_ssat_v8i64_v8i16(<8 x i64> %a0) {
; SSE2-LABEL: trunc_ssat_v8i64_v8i16:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [32767,32767]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm2, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa {{.*#+}} xmm9 = [2147516415,2147516415]
@ -825,7 +825,7 @@ define <8 x i16> @trunc_ssat_v8i64_v8i16(<8 x i64> %a0) {
; SSSE3-LABEL: trunc_ssat_v8i64_v8i16:
; SSSE3: # %bb.0:
; SSSE3-NEXT: movdqa {{.*#+}} xmm8 = [32767,32767]
; SSSE3-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSSE3-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSSE3-NEXT: movdqa %xmm2, %xmm5
; SSSE3-NEXT: pxor %xmm4, %xmm5
; SSSE3-NEXT: movdqa {{.*#+}} xmm9 = [2147516415,2147516415]
@ -941,7 +941,7 @@ define <8 x i16> @trunc_ssat_v8i64_v8i16(<8 x i64> %a0) {
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm10
; SSE41-NEXT: movapd {{.*#+}} xmm7 = [32767,32767]
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,2147483648]
; SSE41-NEXT: movdqa %xmm2, %xmm0
; SSE41-NEXT: pxor %xmm5, %xmm0
; SSE41-NEXT: movdqa {{.*#+}} xmm11 = [2147516415,2147516415]
@ -1214,7 +1214,7 @@ define <8 x i8> @trunc_ssat_v8i64_v8i8(<8 x i64> %a0) {
; SSE2-LABEL: trunc_ssat_v8i64_v8i8:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [127,127]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm2, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa {{.*#+}} xmm9 = [2147483775,2147483775]
@ -1329,7 +1329,7 @@ define <8 x i8> @trunc_ssat_v8i64_v8i8(<8 x i64> %a0) {
; SSSE3-LABEL: trunc_ssat_v8i64_v8i8:
; SSSE3: # %bb.0:
; SSSE3-NEXT: movdqa {{.*#+}} xmm8 = [127,127]
; SSSE3-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSSE3-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSSE3-NEXT: movdqa %xmm2, %xmm5
; SSSE3-NEXT: pxor %xmm4, %xmm5
; SSSE3-NEXT: movdqa {{.*#+}} xmm9 = [2147483775,2147483775]
@ -1445,7 +1445,7 @@ define <8 x i8> @trunc_ssat_v8i64_v8i8(<8 x i64> %a0) {
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm10
; SSE41-NEXT: movapd {{.*#+}} xmm7 = [127,127]
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,2147483648]
; SSE41-NEXT: movdqa %xmm2, %xmm0
; SSE41-NEXT: pxor %xmm5, %xmm0
; SSE41-NEXT: movdqa {{.*#+}} xmm11 = [2147483775,2147483775]
@ -1620,7 +1620,7 @@ define void @trunc_ssat_v8i64_v8i8_store(<8 x i64> %a0, <8 x i8> *%p1) {
; SSE2-LABEL: trunc_ssat_v8i64_v8i8_store:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [127,127]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm3, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa {{.*#+}} xmm9 = [2147483775,2147483775]
@ -1742,7 +1742,7 @@ define void @trunc_ssat_v8i64_v8i8_store(<8 x i64> %a0, <8 x i8> *%p1) {
; SSSE3-LABEL: trunc_ssat_v8i64_v8i8_store:
; SSSE3: # %bb.0:
; SSSE3-NEXT: movdqa {{.*#+}} xmm8 = [127,127]
; SSSE3-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSSE3-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSSE3-NEXT: movdqa %xmm3, %xmm5
; SSSE3-NEXT: pxor %xmm4, %xmm5
; SSSE3-NEXT: movdqa {{.*#+}} xmm9 = [2147483775,2147483775]
@ -1865,7 +1865,7 @@ define void @trunc_ssat_v8i64_v8i8_store(<8 x i64> %a0, <8 x i8> *%p1) {
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm8
; SSE41-NEXT: movapd {{.*#+}} xmm7 = [127,127]
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,2147483648]
; SSE41-NEXT: movdqa %xmm3, %xmm0
; SSE41-NEXT: pxor %xmm5, %xmm0
; SSE41-NEXT: movdqa {{.*#+}} xmm10 = [2147483775,2147483775]
@ -2058,7 +2058,7 @@ define <16 x i8> @trunc_ssat_v16i64_v16i8(<16 x i64> %a0) {
; SSE2-LABEL: trunc_ssat_v16i64_v16i8:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm10 = [127,127]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm6, %xmm9
; SSE2-NEXT: pxor %xmm8, %xmm9
; SSE2-NEXT: movdqa {{.*#+}} xmm11 = [2147483775,2147483775]
@ -2281,7 +2281,7 @@ define <16 x i8> @trunc_ssat_v16i64_v16i8(<16 x i64> %a0) {
; SSSE3-LABEL: trunc_ssat_v16i64_v16i8:
; SSSE3: # %bb.0:
; SSSE3-NEXT: movdqa {{.*#+}} xmm10 = [127,127]
; SSSE3-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,0,2147483648,0]
; SSSE3-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648]
; SSSE3-NEXT: movdqa %xmm6, %xmm9
; SSSE3-NEXT: pxor %xmm8, %xmm9
; SSSE3-NEXT: movdqa {{.*#+}} xmm11 = [2147483775,2147483775]
@ -2505,7 +2505,7 @@ define <16 x i8> @trunc_ssat_v16i64_v16i8(<16 x i64> %a0) {
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm8
; SSE41-NEXT: movapd {{.*#+}} xmm11 = [127,127]
; SSE41-NEXT: movdqa {{.*#+}} xmm9 = [2147483648,0,2147483648,0]
; SSE41-NEXT: movdqa {{.*#+}} xmm9 = [2147483648,2147483648]
; SSE41-NEXT: movdqa %xmm6, %xmm0
; SSE41-NEXT: pxor %xmm9, %xmm0
; SSE41-NEXT: movdqa {{.*#+}} xmm12 = [2147483775,2147483775]

View File

@ -17,7 +17,7 @@
define <4 x i32> @trunc_usat_v4i64_v4i32(<4 x i64> %a0) {
; SSE2-LABEL: trunc_usat_v4i64_v4i32:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm0, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [9223372039002259455,9223372039002259455]
@ -49,7 +49,7 @@ define <4 x i32> @trunc_usat_v4i64_v4i32(<4 x i64> %a0) {
;
; SSSE3-LABEL: trunc_usat_v4i64_v4i32:
; SSSE3: # %bb.0:
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSSE3-NEXT: movdqa %xmm0, %xmm3
; SSSE3-NEXT: pxor %xmm2, %xmm3
; SSSE3-NEXT: movdqa {{.*#+}} xmm4 = [9223372039002259455,9223372039002259455]
@ -82,7 +82,7 @@ define <4 x i32> @trunc_usat_v4i64_v4i32(<4 x i64> %a0) {
; SSE41-LABEL: trunc_usat_v4i64_v4i32:
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm2
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: movdqa %xmm2, %xmm3
; SSE41-NEXT: pxor %xmm0, %xmm3
; SSE41-NEXT: movdqa {{.*#+}} xmm4 = [9223372039002259455,9223372039002259455]
@ -208,7 +208,7 @@ define <8 x i32> @trunc_usat_v8i64_v8i32(<8 x i64> %a0) {
; SSE2-LABEL: trunc_usat_v8i64_v8i32:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [4294967295,4294967295]
; SSE2-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm5 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm3, %xmm7
; SSE2-NEXT: pxor %xmm5, %xmm7
; SSE2-NEXT: movdqa {{.*#+}} xmm9 = [9223372039002259455,9223372039002259455]
@ -269,7 +269,7 @@ define <8 x i32> @trunc_usat_v8i64_v8i32(<8 x i64> %a0) {
; SSSE3-LABEL: trunc_usat_v8i64_v8i32:
; SSSE3: # %bb.0:
; SSSE3-NEXT: movdqa {{.*#+}} xmm8 = [4294967295,4294967295]
; SSSE3-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,2147483648,2147483648,2147483648]
; SSSE3-NEXT: movdqa {{.*#+}} xmm5 = [9223372039002259456,9223372039002259456]
; SSSE3-NEXT: movdqa %xmm3, %xmm7
; SSSE3-NEXT: pxor %xmm5, %xmm7
; SSSE3-NEXT: movdqa {{.*#+}} xmm9 = [9223372039002259455,9223372039002259455]
@ -331,7 +331,7 @@ define <8 x i32> @trunc_usat_v8i64_v8i32(<8 x i64> %a0) {
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm8
; SSE41-NEXT: movapd {{.*#+}} xmm6 = [4294967295,4294967295]
; SSE41-NEXT: movdqa {{.*#+}} xmm7 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm7 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: movdqa %xmm3, %xmm0
; SSE41-NEXT: pxor %xmm7, %xmm0
; SSE41-NEXT: movdqa {{.*#+}} xmm9 = [9223372039002259455,9223372039002259455]
@ -464,7 +464,7 @@ define <8 x i16> @trunc_usat_v8i64_v8i16(<8 x i64> %a0) {
; SSE2-LABEL: trunc_usat_v8i64_v8i16:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [65535,65535]
; SSE2-NEXT: movdqa {{.*#+}} xmm6 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm6 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm2, %xmm5
; SSE2-NEXT: pxor %xmm6, %xmm5
; SSE2-NEXT: movdqa {{.*#+}} xmm9 = [9223372039002324991,9223372039002324991]
@ -533,7 +533,7 @@ define <8 x i16> @trunc_usat_v8i64_v8i16(<8 x i64> %a0) {
; SSSE3-LABEL: trunc_usat_v8i64_v8i16:
; SSSE3: # %bb.0:
; SSSE3-NEXT: movdqa {{.*#+}} xmm8 = [65535,65535]
; SSSE3-NEXT: movdqa {{.*#+}} xmm6 = [2147483648,2147483648,2147483648,2147483648]
; SSSE3-NEXT: movdqa {{.*#+}} xmm6 = [9223372039002259456,9223372039002259456]
; SSSE3-NEXT: movdqa %xmm2, %xmm5
; SSSE3-NEXT: pxor %xmm6, %xmm5
; SSSE3-NEXT: movdqa {{.*#+}} xmm9 = [9223372039002324991,9223372039002324991]
@ -603,7 +603,7 @@ define <8 x i16> @trunc_usat_v8i64_v8i16(<8 x i64> %a0) {
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm8
; SSE41-NEXT: movapd {{.*#+}} xmm5 = [65535,65535]
; SSE41-NEXT: movdqa {{.*#+}} xmm6 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm6 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: movdqa %xmm1, %xmm0
; SSE41-NEXT: pxor %xmm6, %xmm0
; SSE41-NEXT: movdqa {{.*#+}} xmm9 = [9223372039002324991,9223372039002324991]
@ -965,7 +965,7 @@ define <8 x i8> @trunc_usat_v8i64_v8i8(<8 x i64> %a0) {
; SSE2-LABEL: trunc_usat_v8i64_v8i8:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [255,255]
; SSE2-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm5 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm1, %xmm7
; SSE2-NEXT: pxor %xmm5, %xmm7
; SSE2-NEXT: movdqa {{.*#+}} xmm9 = [9223372039002259711,9223372039002259711]
@ -1026,7 +1026,7 @@ define <8 x i8> @trunc_usat_v8i64_v8i8(<8 x i64> %a0) {
; SSSE3-LABEL: trunc_usat_v8i64_v8i8:
; SSSE3: # %bb.0:
; SSSE3-NEXT: movdqa {{.*#+}} xmm8 = [255,255]
; SSSE3-NEXT: movdqa {{.*#+}} xmm5 = [2147483648,2147483648,2147483648,2147483648]
; SSSE3-NEXT: movdqa {{.*#+}} xmm5 = [9223372039002259456,9223372039002259456]
; SSSE3-NEXT: movdqa %xmm1, %xmm7
; SSSE3-NEXT: pxor %xmm5, %xmm7
; SSSE3-NEXT: movdqa {{.*#+}} xmm9 = [9223372039002259711,9223372039002259711]
@ -1088,7 +1088,7 @@ define <8 x i8> @trunc_usat_v8i64_v8i8(<8 x i64> %a0) {
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm8
; SSE41-NEXT: movapd {{.*#+}} xmm5 = [255,255]
; SSE41-NEXT: movdqa {{.*#+}} xmm6 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm6 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: movdqa %xmm1, %xmm0
; SSE41-NEXT: pxor %xmm6, %xmm0
; SSE41-NEXT: movdqa {{.*#+}} xmm9 = [9223372039002259711,9223372039002259711]
@ -1203,7 +1203,7 @@ define void @trunc_usat_v8i64_v8i8_store(<8 x i64> %a0, <8 x i8> *%p1) {
; SSE2-LABEL: trunc_usat_v8i64_v8i8_store:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [255,255]
; SSE2-NEXT: movdqa {{.*#+}} xmm6 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm6 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm0, %xmm5
; SSE2-NEXT: pxor %xmm6, %xmm5
; SSE2-NEXT: movdqa {{.*#+}} xmm9 = [9223372039002259711,9223372039002259711]
@ -1270,7 +1270,7 @@ define void @trunc_usat_v8i64_v8i8_store(<8 x i64> %a0, <8 x i8> *%p1) {
; SSSE3-LABEL: trunc_usat_v8i64_v8i8_store:
; SSSE3: # %bb.0:
; SSSE3-NEXT: movdqa {{.*#+}} xmm8 = [255,255]
; SSSE3-NEXT: movdqa {{.*#+}} xmm6 = [2147483648,2147483648,2147483648,2147483648]
; SSSE3-NEXT: movdqa {{.*#+}} xmm6 = [9223372039002259456,9223372039002259456]
; SSSE3-NEXT: movdqa %xmm0, %xmm5
; SSSE3-NEXT: pxor %xmm6, %xmm5
; SSSE3-NEXT: movdqa {{.*#+}} xmm9 = [9223372039002259711,9223372039002259711]
@ -1338,7 +1338,7 @@ define void @trunc_usat_v8i64_v8i8_store(<8 x i64> %a0, <8 x i8> *%p1) {
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm4
; SSE41-NEXT: movapd {{.*#+}} xmm8 = [255,255]
; SSE41-NEXT: movdqa {{.*#+}} xmm7 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm7 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: pxor %xmm7, %xmm0
; SSE41-NEXT: movdqa {{.*#+}} xmm9 = [9223372039002259711,9223372039002259711]
; SSE41-NEXT: movdqa %xmm9, %xmm6
@ -1471,7 +1471,7 @@ define <16 x i8> @trunc_usat_v16i64_v16i8(<16 x i64> %a0) {
; SSE2-LABEL: trunc_usat_v16i64_v16i8:
; SSE2: # %bb.0:
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [255,255]
; SSE2-NEXT: movdqa {{.*#+}} xmm9 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm9 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm1, %xmm11
; SSE2-NEXT: pxor %xmm9, %xmm11
; SSE2-NEXT: movdqa {{.*#+}} xmm10 = [9223372039002259711,9223372039002259711]
@ -1588,7 +1588,7 @@ define <16 x i8> @trunc_usat_v16i64_v16i8(<16 x i64> %a0) {
; SSSE3-LABEL: trunc_usat_v16i64_v16i8:
; SSSE3: # %bb.0:
; SSSE3-NEXT: movdqa {{.*#+}} xmm8 = [255,255]
; SSSE3-NEXT: movdqa {{.*#+}} xmm9 = [2147483648,2147483648,2147483648,2147483648]
; SSSE3-NEXT: movdqa {{.*#+}} xmm9 = [9223372039002259456,9223372039002259456]
; SSSE3-NEXT: movdqa %xmm1, %xmm11
; SSSE3-NEXT: pxor %xmm9, %xmm11
; SSSE3-NEXT: movdqa {{.*#+}} xmm10 = [9223372039002259711,9223372039002259711]
@ -1706,7 +1706,7 @@ define <16 x i8> @trunc_usat_v16i64_v16i8(<16 x i64> %a0) {
; SSE41: # %bb.0:
; SSE41-NEXT: movdqa %xmm0, %xmm8
; SSE41-NEXT: movapd {{.*#+}} xmm9 = [255,255]
; SSE41-NEXT: movdqa {{.*#+}} xmm10 = [2147483648,2147483648,2147483648,2147483648]
; SSE41-NEXT: movdqa {{.*#+}} xmm10 = [9223372039002259456,9223372039002259456]
; SSE41-NEXT: movdqa %xmm1, %xmm0
; SSE41-NEXT: pxor %xmm10, %xmm0
; SSE41-NEXT: movdqa {{.*#+}} xmm11 = [9223372039002259711,9223372039002259711]

View File

@ -13,7 +13,7 @@ define <2 x i1> @ugt_v2i64(<2 x i64> %x, <2 x i64> %y) {
; SSE: # %bb.0:
; SSE-NEXT: psrlq $1, %xmm0
; SSE-NEXT: psrlq $1, %xmm1
; SSE-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE-NEXT: pxor %xmm2, %xmm1
; SSE-NEXT: pxor %xmm2, %xmm0
; SSE-NEXT: movdqa %xmm0, %xmm2
@ -43,7 +43,7 @@ define <2 x i1> @ult_v2i64(<2 x i64> %x, <2 x i64> %y) {
; SSE: # %bb.0:
; SSE-NEXT: psrlq $1, %xmm0
; SSE-NEXT: psrlq $1, %xmm1
; SSE-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE-NEXT: pxor %xmm2, %xmm0
; SSE-NEXT: pxor %xmm2, %xmm1
; SSE-NEXT: movdqa %xmm1, %xmm2
@ -73,7 +73,7 @@ define <2 x i1> @uge_v2i64(<2 x i64> %x, <2 x i64> %y) {
; SSE: # %bb.0:
; SSE-NEXT: psrlq $1, %xmm0
; SSE-NEXT: psrlq $1, %xmm1
; SSE-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE-NEXT: pxor %xmm2, %xmm0
; SSE-NEXT: pxor %xmm2, %xmm1
; SSE-NEXT: movdqa %xmm1, %xmm2
@ -107,7 +107,7 @@ define <2 x i1> @ule_v2i64(<2 x i64> %x, <2 x i64> %y) {
; SSE: # %bb.0:
; SSE-NEXT: psrlq $1, %xmm0
; SSE-NEXT: psrlq $1, %xmm1
; SSE-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE-NEXT: pxor %xmm2, %xmm1
; SSE-NEXT: pxor %xmm2, %xmm0
; SSE-NEXT: movdqa %xmm0, %xmm2

View File

@ -4471,7 +4471,7 @@ entry:
define <8 x i64> @test121(<8 x i64> %a, <8 x i64> %b) {
; SSE2-LABEL: test121:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm0, %xmm9
; SSE2-NEXT: pxor %xmm8, %xmm9
; SSE2-NEXT: movdqa %xmm4, %xmm10
@ -4592,7 +4592,7 @@ entry:
define <8 x i64> @test122(<8 x i64> %a, <8 x i64> %b) {
; SSE2-LABEL: test122:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm0, %xmm9
; SSE2-NEXT: pxor %xmm8, %xmm9
; SSE2-NEXT: movdqa %xmm4, %xmm10
@ -4713,7 +4713,7 @@ entry:
define <8 x i64> @test123(<8 x i64> %a, <8 x i64> %b) {
; SSE2-LABEL: test123:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm4, %xmm9
; SSE2-NEXT: pxor %xmm8, %xmm9
; SSE2-NEXT: movdqa %xmm0, %xmm10
@ -4833,7 +4833,7 @@ entry:
define <8 x i64> @test124(<8 x i64> %a, <8 x i64> %b) {
; SSE2-LABEL: test124:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm4, %xmm9
; SSE2-NEXT: pxor %xmm8, %xmm9
; SSE2-NEXT: movdqa %xmm0, %xmm10
@ -4953,7 +4953,7 @@ entry:
define <8 x i64> @test125(<8 x i64> %a, <8 x i64> %b) {
; SSE2-LABEL: test125:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm0, %xmm9
; SSE2-NEXT: pxor %xmm8, %xmm9
; SSE2-NEXT: movdqa %xmm4, %xmm10
@ -5103,7 +5103,7 @@ entry:
define <8 x i64> @test126(<8 x i64> %a, <8 x i64> %b) {
; SSE2-LABEL: test126:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm0, %xmm9
; SSE2-NEXT: pxor %xmm8, %xmm9
; SSE2-NEXT: movdqa %xmm4, %xmm10
@ -5253,7 +5253,7 @@ entry:
define <8 x i64> @test127(<8 x i64> %a, <8 x i64> %b) {
; SSE2-LABEL: test127:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm4, %xmm9
; SSE2-NEXT: pxor %xmm8, %xmm9
; SSE2-NEXT: movdqa %xmm0, %xmm10
@ -5402,7 +5402,7 @@ entry:
define <8 x i64> @test128(<8 x i64> %a, <8 x i64> %b) {
; SSE2-LABEL: test128:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm4, %xmm9
; SSE2-NEXT: pxor %xmm8, %xmm9
; SSE2-NEXT: movdqa %xmm0, %xmm10
@ -6929,7 +6929,7 @@ entry:
define <8 x i64> @test153(<8 x i64> %a, <8 x i64> %b) {
; SSE2-LABEL: test153:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm4, %xmm9
; SSE2-NEXT: pxor %xmm8, %xmm9
; SSE2-NEXT: movdqa %xmm0, %xmm10
@ -7049,7 +7049,7 @@ entry:
define <8 x i64> @test154(<8 x i64> %a, <8 x i64> %b) {
; SSE2-LABEL: test154:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm4, %xmm9
; SSE2-NEXT: pxor %xmm8, %xmm9
; SSE2-NEXT: movdqa %xmm0, %xmm10
@ -7169,7 +7169,7 @@ entry:
define <8 x i64> @test155(<8 x i64> %a, <8 x i64> %b) {
; SSE2-LABEL: test155:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm0, %xmm9
; SSE2-NEXT: pxor %xmm8, %xmm9
; SSE2-NEXT: movdqa %xmm4, %xmm10
@ -7290,7 +7290,7 @@ entry:
define <8 x i64> @test156(<8 x i64> %a, <8 x i64> %b) {
; SSE2-LABEL: test156:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm4, %xmm9
; SSE2-NEXT: pxor %xmm8, %xmm9
; SSE2-NEXT: movdqa %xmm0, %xmm10
@ -7439,7 +7439,7 @@ entry:
define <8 x i64> @test159(<8 x i64> %a, <8 x i64> %b) {
; SSE2-LABEL: test159:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm0, %xmm9
; SSE2-NEXT: pxor %xmm8, %xmm9
; SSE2-NEXT: movdqa %xmm4, %xmm10
@ -7589,7 +7589,7 @@ entry:
define <8 x i64> @test160(<8 x i64> %a, <8 x i64> %b) {
; SSE2-LABEL: test160:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm8 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm0, %xmm9
; SSE2-NEXT: pxor %xmm8, %xmm9
; SSE2-NEXT: movdqa %xmm4, %xmm10
@ -7739,7 +7739,7 @@ entry:
define <4 x i64> @test161(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: test161:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm0, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm2, %xmm6
@ -7813,7 +7813,7 @@ entry:
define <4 x i64> @test162(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: test162:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm0, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm2, %xmm6
@ -7887,7 +7887,7 @@ entry:
define <4 x i64> @test163(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: test163:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm2, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm0, %xmm6
@ -7960,7 +7960,7 @@ entry:
define <4 x i64> @test164(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: test164:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm2, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm0, %xmm6
@ -8033,7 +8033,7 @@ entry:
define <4 x i64> @test165(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: test165:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm0, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm2, %xmm6
@ -8122,7 +8122,7 @@ entry:
define <4 x i64> @test166(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: test166:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm0, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm2, %xmm6
@ -8211,7 +8211,7 @@ entry:
define <4 x i64> @test167(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: test167:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm2, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm0, %xmm6
@ -8299,7 +8299,7 @@ entry:
define <4 x i64> @test168(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: test168:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm2, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm0, %xmm6
@ -8387,7 +8387,7 @@ entry:
define <4 x i64> @test169(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: test169:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm2, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm0, %xmm6
@ -8460,7 +8460,7 @@ entry:
define <4 x i64> @test170(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: test170:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm2, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm0, %xmm6
@ -8533,7 +8533,7 @@ entry:
define <4 x i64> @test171(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: test171:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm0, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm2, %xmm6
@ -8607,7 +8607,7 @@ entry:
define <4 x i64> @test172(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: test172:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm0, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm2, %xmm6
@ -8681,7 +8681,7 @@ entry:
define <4 x i64> @test173(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: test173:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm2, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm0, %xmm6
@ -8769,7 +8769,7 @@ entry:
define <4 x i64> @test174(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: test174:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm2, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm0, %xmm6
@ -8857,7 +8857,7 @@ entry:
define <4 x i64> @test175(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: test175:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm0, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm2, %xmm6
@ -8946,7 +8946,7 @@ entry:
define <4 x i64> @test176(<4 x i64> %a, <4 x i64> %b) {
; SSE2-LABEL: test176:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm4 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm0, %xmm5
; SSE2-NEXT: pxor %xmm4, %xmm5
; SSE2-NEXT: movdqa %xmm2, %xmm6
@ -9035,7 +9035,7 @@ entry:
define <2 x i64> @test177(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: test177:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm0, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm1, %xmm2
@ -9086,7 +9086,7 @@ entry:
define <2 x i64> @test178(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: test178:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm0, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm1, %xmm2
@ -9137,7 +9137,7 @@ entry:
define <2 x i64> @test179(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: test179:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm1, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm0, %xmm2
@ -9187,7 +9187,7 @@ entry:
define <2 x i64> @test180(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: test180:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm1, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm0, %xmm2
@ -9237,7 +9237,7 @@ entry:
define <2 x i64> @test181(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: test181:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm0, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm1, %xmm2
@ -9297,7 +9297,7 @@ entry:
define <2 x i64> @test182(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: test182:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm0, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm1, %xmm2
@ -9357,7 +9357,7 @@ entry:
define <2 x i64> @test183(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: test183:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm1, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm0, %xmm2
@ -9417,7 +9417,7 @@ entry:
define <2 x i64> @test184(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: test184:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm1, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm0, %xmm2
@ -9477,7 +9477,7 @@ entry:
define <2 x i64> @test185(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: test185:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm1, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm0, %xmm2
@ -9527,7 +9527,7 @@ entry:
define <2 x i64> @test186(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: test186:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm1, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm0, %xmm2
@ -9577,7 +9577,7 @@ entry:
define <2 x i64> @test187(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: test187:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm0, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm1, %xmm2
@ -9628,7 +9628,7 @@ entry:
define <2 x i64> @test188(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: test188:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,0,2147483648,0]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648]
; SSE2-NEXT: movdqa %xmm0, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm1, %xmm2
@ -9679,7 +9679,7 @@ entry:
define <2 x i64> @test189(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: test189:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm1, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm0, %xmm2
@ -9739,7 +9739,7 @@ entry:
define <2 x i64> @test190(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: test190:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm1, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm0, %xmm2
@ -9799,7 +9799,7 @@ entry:
define <2 x i64> @test191(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: test191:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm0, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm1, %xmm2
@ -9859,7 +9859,7 @@ entry:
define <2 x i64> @test192(<2 x i64> %a, <2 x i64> %b) {
; SSE2-LABEL: test192:
; SSE2: # %bb.0: # %entry
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [2147483648,2147483648,2147483648,2147483648]
; SSE2-NEXT: movdqa {{.*#+}} xmm2 = [9223372039002259456,9223372039002259456]
; SSE2-NEXT: movdqa %xmm0, %xmm3
; SSE2-NEXT: pxor %xmm2, %xmm3
; SSE2-NEXT: pxor %xmm1, %xmm2