[X86] For lzcnt/tzcnt intrinsics use cttz/ctlz intrinsics with zero_undef flag set to false.

Previously we used a select and the zero_undef=true intrinsic. In -O2 this pattern will get optimized to zero_undef=false. But in -O0 this optimization won't happen. This results in a compare and cmov being wrapped around a tzcnt/lzcnt instruction.

By using the zero_undef=false intrinsic directly without the select, we can improve the -O0 codegen to just an lzcnt/tzcnt instruction.

Differential Revision: https://reviews.llvm.org/D52392

llvm-svn: 343126
This commit is contained in:
Craig Topper 2018-09-26 17:01:44 +00:00
parent 344475fce5
commit fb5d9f2849
7 changed files with 43 additions and 33 deletions

View File

@ -727,8 +727,14 @@ TARGET_BUILTIN(__builtin_ia32_subborrow_u32, "UcUcUiUiUi*", "n", "")
TARGET_BUILTIN(__builtin_ia32_rdseed16_step, "UiUs*", "n", "rdseed")
TARGET_BUILTIN(__builtin_ia32_rdseed32_step, "UiUi*", "n", "rdseed")
// LZCNT
TARGET_BUILTIN(__builtin_ia32_lzcnt_u16, "UsUs", "nc", "lzcnt")
TARGET_BUILTIN(__builtin_ia32_lzcnt_u32, "UiUi", "nc", "lzcnt")
// BMI
TARGET_BUILTIN(__builtin_ia32_bextr_u32, "UiUiUi", "nc", "bmi")
TARGET_BUILTIN(__builtin_ia32_tzcnt_u16, "UsUs", "nc", "")
TARGET_BUILTIN(__builtin_ia32_tzcnt_u32, "UiUi", "nc", "")
// BMI2
TARGET_BUILTIN(__builtin_ia32_bzhi_si, "UiUiUi", "nc", "bmi2")

View File

@ -81,7 +81,9 @@ TARGET_BUILTIN(__builtin_ia32_addcarry_u64, "UcUcULLiULLiULLi*", "n", "")
TARGET_BUILTIN(__builtin_ia32_subborrow_u64, "UcUcULLiULLiULLi*", "n", "")
TARGET_BUILTIN(__builtin_ia32_rdrand64_step, "UiULLi*", "n", "rdrnd")
TARGET_BUILTIN(__builtin_ia32_rdseed64_step, "UiULLi*", "n", "rdseed")
TARGET_BUILTIN(__builtin_ia32_lzcnt_u64, "ULLiULLi", "nc", "lzcnt")
TARGET_BUILTIN(__builtin_ia32_bextr_u64, "ULLiULLiULLi", "nc", "bmi")
TARGET_BUILTIN(__builtin_ia32_tzcnt_u64, "ULLiULLi", "nc", "")
TARGET_BUILTIN(__builtin_ia32_bzhi_di, "ULLiULLiULLi", "nc", "bmi2")
TARGET_BUILTIN(__builtin_ia32_pdep_di, "ULLiULLiULLi", "nc", "bmi2")
TARGET_BUILTIN(__builtin_ia32_pext_di, "ULLiULLiULLi", "nc", "bmi2")

View File

@ -9164,6 +9164,18 @@ Value *CodeGenFunction::EmitX86BuiltinExpr(unsigned BuiltinID,
Ops[0]);
return Builder.CreateExtractValue(Call, 0);
}
case X86::BI__builtin_ia32_lzcnt_u16:
case X86::BI__builtin_ia32_lzcnt_u32:
case X86::BI__builtin_ia32_lzcnt_u64: {
Value *F = CGM.getIntrinsic(Intrinsic::ctlz, Ops[0]->getType());
return Builder.CreateCall(F, {Ops[0], Builder.getInt1(false)});
}
case X86::BI__builtin_ia32_tzcnt_u16:
case X86::BI__builtin_ia32_tzcnt_u32:
case X86::BI__builtin_ia32_tzcnt_u64: {
Value *F = CGM.getIntrinsic(Intrinsic::cttz, Ops[0]->getType());
return Builder.CreateCall(F, {Ops[0], Builder.getInt1(false)});
}
case X86::BI__builtin_ia32_undef128:
case X86::BI__builtin_ia32_undef256:
case X86::BI__builtin_ia32_undef512:

View File

@ -62,7 +62,7 @@
static __inline__ unsigned short __RELAXED_FN_ATTRS
__tzcnt_u16(unsigned short __X)
{
return __X ? __builtin_ctzs(__X) : 16;
return __builtin_ia32_tzcnt_u16(__X);
}
/// Performs a bitwise AND of the second operand with the one's
@ -196,7 +196,7 @@ __blsr_u32(unsigned int __X)
static __inline__ unsigned int __RELAXED_FN_ATTRS
__tzcnt_u32(unsigned int __X)
{
return __X ? __builtin_ctz(__X) : 32;
return __builtin_ia32_tzcnt_u32(__X);
}
/// Counts the number of trailing zero bits in the operand.
@ -212,7 +212,7 @@ __tzcnt_u32(unsigned int __X)
static __inline__ int __RELAXED_FN_ATTRS
_mm_tzcnt_32(unsigned int __X)
{
return __X ? __builtin_ctz(__X) : 32;
return __builtin_ia32_tzcnt_u32(__X);
}
#ifdef __x86_64__
@ -359,7 +359,7 @@ __blsr_u64(unsigned long long __X)
static __inline__ unsigned long long __RELAXED_FN_ATTRS
__tzcnt_u64(unsigned long long __X)
{
return __X ? __builtin_ctzll(__X) : 64;
return __builtin_ia32_tzcnt_u64(__X);
}
/// Counts the number of trailing zero bits in the operand.
@ -375,7 +375,7 @@ __tzcnt_u64(unsigned long long __X)
static __inline__ long long __RELAXED_FN_ATTRS
_mm_tzcnt_64(unsigned long long __X)
{
return __X ? __builtin_ctzll(__X) : 64;
return __builtin_ia32_tzcnt_u64(__X);
}
#endif /* __x86_64__ */

View File

@ -44,7 +44,7 @@
static __inline__ unsigned short __DEFAULT_FN_ATTRS
__lzcnt16(unsigned short __X)
{
return __X ? __builtin_clzs(__X) : 16;
return __builtin_ia32_lzcnt_u16(__X);
}
/// Counts the number of leading zero bits in the operand.
@ -61,7 +61,7 @@ __lzcnt16(unsigned short __X)
static __inline__ unsigned int __DEFAULT_FN_ATTRS
__lzcnt32(unsigned int __X)
{
return __X ? __builtin_clz(__X) : 32;
return __builtin_ia32_lzcnt_u32(__X);
}
/// Counts the number of leading zero bits in the operand.
@ -78,7 +78,7 @@ __lzcnt32(unsigned int __X)
static __inline__ unsigned int __DEFAULT_FN_ATTRS
_lzcnt_u32(unsigned int __X)
{
return __X ? __builtin_clz(__X) : 32;
return __builtin_ia32_lzcnt_u32(__X);
}
#ifdef __x86_64__
@ -96,7 +96,7 @@ _lzcnt_u32(unsigned int __X)
static __inline__ unsigned long long __DEFAULT_FN_ATTRS
__lzcnt64(unsigned long long __X)
{
return __X ? __builtin_clzll(__X) : 64;
return __builtin_ia32_lzcnt_u64(__X);
}
/// Counts the number of leading zero bits in the operand.
@ -113,7 +113,7 @@ __lzcnt64(unsigned long long __X)
static __inline__ unsigned long long __DEFAULT_FN_ATTRS
_lzcnt_u64(unsigned long long __X)
{
return __X ? __builtin_clzll(__X) : 64;
return __builtin_ia32_lzcnt_u64(__X);
}
#endif

View File

@ -15,9 +15,7 @@
unsigned short test__tzcnt_u16(unsigned short __X) {
// CHECK-LABEL: test__tzcnt_u16
// CHECK: zext i16 %{{.*}} to i32
// CHECK: icmp ne i32 %{{.*}}, 0
// CHECK: i16 @llvm.cttz.i16(i16 %{{.*}}, i1 true)
// CHECK: i16 @llvm.cttz.i16(i16 %{{.*}}, i1 false)
return __tzcnt_u16(__X);
}
@ -57,15 +55,13 @@ unsigned int test__blsr_u32(unsigned int __X) {
unsigned int test__tzcnt_u32(unsigned int __X) {
// CHECK-LABEL: test__tzcnt_u32
// CHECK: icmp ne i32 %{{.*}}, 0
// CHECK: i32 @llvm.cttz.i32(i32 %{{.*}}, i1 true)
// CHECK: i32 @llvm.cttz.i32(i32 %{{.*}}, i1 false)
return __tzcnt_u32(__X);
}
int test_mm_tzcnt_32(unsigned int __X) {
// CHECK-LABEL: test_mm_tzcnt_32
// CHECK: icmp ne i32 %{{.*}}, 0
// CHECK: i32 @llvm.cttz.i32(i32 %{{.*}}, i1 true)
// CHECK: i32 @llvm.cttz.i32(i32 %{{.*}}, i1 false)
return _mm_tzcnt_32(__X);
}
@ -105,15 +101,13 @@ unsigned long long test__blsr_u64(unsigned long long __X) {
unsigned long long test__tzcnt_u64(unsigned long long __X) {
// CHECK-LABEL: test__tzcnt_u64
// CHECK: icmp ne i64 %{{.*}}, 0
// CHECK: i64 @llvm.cttz.i64(i64 %{{.*}}, i1 true)
// CHECK: i64 @llvm.cttz.i64(i64 %{{.*}}, i1 false)
return __tzcnt_u64(__X);
}
long long test_mm_tzcnt_64(unsigned long long __X) {
// CHECK-LABEL: test_mm_tzcnt_64
// CHECK: icmp ne i64 %{{.*}}, 0
// CHECK: i64 @llvm.cttz.i64(i64 %{{.*}}, i1 true)
// CHECK: i64 @llvm.cttz.i64(i64 %{{.*}}, i1 false)
return _mm_tzcnt_64(__X);
}
@ -121,9 +115,7 @@ long long test_mm_tzcnt_64(unsigned long long __X) {
unsigned short test_tzcnt_u16(unsigned short __X) {
// CHECK-LABEL: test_tzcnt_u16
// CHECK: zext i16 %{{.*}} to i32
// CHECK: icmp ne i32 %{{.*}}, 0
// CHECK: i16 @llvm.cttz.i16(i16 %{{.*}}, i1 true)
// CHECK: i16 @llvm.cttz.i16(i16 %{{.*}}, i1 false)
return _tzcnt_u16(__X);
}
@ -168,8 +160,7 @@ unsigned int test_blsr_u32(unsigned int __X) {
unsigned int test_tzcnt_u32(unsigned int __X) {
// CHECK-LABEL: test_tzcnt_u32
// CHECK: icmp ne i32 %{{.*}}, 0
// CHECK: i32 @llvm.cttz.i32(i32 %{{.*}}, i1 true)
// CHECK: i32 @llvm.cttz.i32(i32 %{{.*}}, i1 false)
return _tzcnt_u32(__X);
}
@ -215,7 +206,6 @@ unsigned long long test_blsr_u64(unsigned long long __X) {
unsigned long long test_tzcnt_u64(unsigned long long __X) {
// CHECK-LABEL: test_tzcnt_u64
// CHECK: icmp ne i64 %{{.*}}, 0
// CHECK: i64 @llvm.cttz.i64(i64 %{{.*}}, i1 true)
// CHECK: i64 @llvm.cttz.i64(i64 %{{.*}}, i1 false)
return _tzcnt_u64(__X);
}

View File

@ -5,30 +5,30 @@
unsigned short test__lzcnt16(unsigned short __X)
{
// CHECK: @llvm.ctlz.i16
// CHECK: @llvm.ctlz.i16(i16 %{{.*}}, i1 false)
return __lzcnt16(__X);
}
unsigned int test_lzcnt32(unsigned int __X)
{
// CHECK: @llvm.ctlz.i32
// CHECK: @llvm.ctlz.i32(i32 %{{.*}}, i1 false)
return __lzcnt32(__X);
}
unsigned long long test__lzcnt64(unsigned long long __X)
{
// CHECK: @llvm.ctlz.i64
// CHECK: @llvm.ctlz.i64(i64 %{{.*}}, i1 false)
return __lzcnt64(__X);
}
unsigned int test_lzcnt_u32(unsigned int __X)
{
// CHECK: @llvm.ctlz.i32
// CHECK: @llvm.ctlz.i32(i32 %{{.*}}, i1 false)
return _lzcnt_u32(__X);
}
unsigned long long test__lzcnt_u64(unsigned long long __X)
{
// CHECK: @llvm.ctlz.i64
// CHECK: @llvm.ctlz.i64(i64 %{{.*}}, i1 false)
return _lzcnt_u64(__X);
}