Micro-optimize this function a bit. This shrinks the generated code

some, and allows the routine to be inlined into common callers. The
various bits that hit this code in their hotpath seem slightly lower on
the profile, but I can't really measure a performance improvement as
everything seems to still be bottlenecked on likely cache misses. =/

llvm-svn: 159648
This commit is contained in:
Chandler Carruth 2012-07-03 07:16:13 +00:00
parent 85c938f44f
commit 9f0e4a2f18
1 changed files with 6 additions and 4 deletions

View File

@ -442,11 +442,10 @@ private:
template<typename LookupKeyT>
bool LookupBucketFor(const LookupKeyT &Val,
const BucketT *&FoundBucket) const {
unsigned BucketNo = getHashValue(Val);
unsigned ProbeAmt = 1;
const BucketT *BucketsPtr = getBuckets();
const unsigned NumBuckets = getNumBuckets();
if (getNumBuckets() == 0) {
if (NumBuckets == 0) {
FoundBucket = 0;
return false;
}
@ -459,8 +458,10 @@ private:
!KeyInfoT::isEqual(Val, TombstoneKey) &&
"Empty/Tombstone value shouldn't be inserted into map!");
unsigned BucketNo = getHashValue(Val) & (NumBuckets-1);
unsigned ProbeAmt = 1;
while (1) {
const BucketT *ThisBucket = BucketsPtr + (BucketNo & (getNumBuckets()-1));
const BucketT *ThisBucket = BucketsPtr + BucketNo;
// Found Val's bucket? If so, return it.
if (KeyInfoT::isEqual(Val, ThisBucket->first)) {
FoundBucket = ThisBucket;
@ -485,6 +486,7 @@ private:
// Otherwise, it's a hash collision or a tombstone, continue quadratic
// probing.
BucketNo += ProbeAmt++;
BucketNo &= (NumBuckets-1);
}
}