[asan] further speedup use-after-return: simplify deallocation of fake frames. ~ 20% speedup.

llvm-svn: 190852
This commit is contained in:
Kostya Serebryany 2013-09-17 07:42:54 +00:00
parent ac3e8eb9f0
commit 2f5c2be6bd
3 changed files with 15 additions and 21 deletions

View File

@ -63,25 +63,13 @@ FakeFrame *FakeStack::Allocate(uptr stack_size_log, uptr class_id,
FakeFrame *res = reinterpret_cast<FakeFrame *>(
GetFrame(stack_size_log, class_id, pos));
res->real_stack = real_stack;
*SavedFlagPtr(reinterpret_cast<uptr>(res), class_id) = &flags[pos];
return res;
}
CHECK(0 && "Failed to allocate a fake stack frame");
return 0;
}
ALWAYS_INLINE USED
void FakeStack::Deallocate(FakeFrame *ff, uptr stack_size_log, uptr class_id,
uptr real_stack) {
u8 *base = GetFrame(stack_size_log, class_id, 0);
u8 *cur = reinterpret_cast<u8 *>(ff);
CHECK_LE(base, cur);
CHECK_LT(cur, base + (1UL << stack_size_log));
uptr pos = (cur - base) >> (kMinStackFrameSizeLog + class_id);
u8 *flags = GetFlags(stack_size_log, class_id);
CHECK_EQ(flags[pos], 1);
flags[pos] = 0;
}
uptr FakeStack::AddrIsInFakeStack(uptr ptr) {
uptr stack_size_log = this->stack_size_log();
uptr beg = reinterpret_cast<uptr>(GetFrame(stack_size_log, 0, 0));
@ -161,9 +149,7 @@ ALWAYS_INLINE uptr OnMalloc(uptr class_id, uptr size, uptr real_stack) {
ALWAYS_INLINE void OnFree(uptr ptr, uptr class_id, uptr size, uptr real_stack) {
if (ptr == real_stack)
return;
FakeStack *fs = GetFakeStackFast(); // Must not be 0.
FakeFrame *ff = reinterpret_cast<FakeFrame *>(ptr);
fs->Deallocate(ff, fs->stack_size_log(), class_id, real_stack);
FakeStack::Deallocate(ptr, class_id);
SetShadow(ptr, size, class_id, kMagic8);
}

View File

@ -133,9 +133,10 @@ class FakeStack {
// Allocate the fake frame.
FakeFrame *Allocate(uptr stack_size_log, uptr class_id, uptr real_stack);
// Deallocate the fake frame.
void Deallocate(FakeFrame *ff, uptr stack_size_log, uptr class_id,
uptr real_stack);
// Deallocate the fake frame: read the saved flag address and write 0 there.
static void Deallocate(uptr x, uptr class_id) {
**SavedFlagPtr(x, class_id) = 0;
}
// Poison the entire FakeStack's shadow with the magic value.
void PoisonAll(u8 magic);
@ -148,6 +149,13 @@ class FakeStack {
return 1UL << (class_id + kMinStackFrameSizeLog);
}
// The fake frame is guaranteed to have a right redzone.
// We use the last word of that redzone to store the address of the flag
// that corresponds to the current frame to make faster deallocation.
static u8 **SavedFlagPtr(uptr x, uptr class_id) {
return reinterpret_cast<u8 **>(x + BytesInSizeClass(class_id) - sizeof(x));
}
uptr stack_size_log() const { return stack_size_log_; }
void HandleNoReturn();

View File

@ -128,7 +128,7 @@ TEST(FakeStack, Allocate) {
}
for (std::map<FakeFrame *, uptr>::iterator it = s.begin(); it != s.end();
++it) {
fs->Deallocate(it->first, stack_size_log, it->second, 0);
fs->Deallocate(reinterpret_cast<uptr>(it->first), it->second);
}
}
fs->Destroy();
@ -141,7 +141,7 @@ static void RecursiveFunction(FakeStack *fs, int depth) {
RecursiveFunction(fs, depth - 1);
RecursiveFunction(fs, depth - 1);
}
fs->Deallocate(ff, fs->stack_size_log(), class_id, 0);
fs->Deallocate(reinterpret_cast<uptr>(ff), class_id);
}
TEST(FakeStack, RecursiveStressTest) {