[scudo][standalone] Fix Secondary bug w/ freelist
Summary: cferris@ found an issue due to the new Secondary free list behavior and unfortunately it's completely my fault. The issue is twofold: - I lost track of the (major) fact that the Combined assumes that all chunks returned by the Secondary are zero'd out apprioriately when dealing with `ZeroContents`. With the introduction of the freelist, it's no longer the case as there can be a small portion of memory between the header and the next page boundary that is left untouched (the rest is zero'd via release). So the next time that block is returned, it's not fully zero'd out. - There was no test that would exercise that behavior :( There are several ways to fix this, the one I chose makes the most sense to me: we pass `ZeroContents` to the Secondary's `allocate` and it zero's out the block if requested and it's coming from the freelist. The prevents an extraneous `memset` in case the block comes from `map`. Another possbility could have been to `memset` in `deallocate`, but it's probably overzealous as all secondary blocks don't need to be zero'd out. Add a test that would have found the issue prior to fix. Reviewers: morehouse, hctim, cferris, pcc, eugenis, vitalybuka Subscribers: #sanitizers, llvm-commits Tags: #sanitizers, #llvm Differential Revision: https://reviews.llvm.org/D69675
This commit is contained in:
parent
b6429cdd65
commit
c7bc3db23c
|
@ -161,6 +161,7 @@ public:
|
|||
uptr Alignment = MinAlignment,
|
||||
bool ZeroContents = false) {
|
||||
initThreadMaybe();
|
||||
ZeroContents = ZeroContents || Options.ZeroContents;
|
||||
|
||||
if (UNLIKELY(Alignment > MaxAlignment)) {
|
||||
if (Options.MayReturnNull)
|
||||
|
@ -200,7 +201,8 @@ public:
|
|||
TSD->unlock();
|
||||
} else {
|
||||
ClassId = 0;
|
||||
Block = Secondary.allocate(NeededSize, Alignment, &BlockEnd);
|
||||
Block =
|
||||
Secondary.allocate(NeededSize, Alignment, &BlockEnd, ZeroContents);
|
||||
}
|
||||
|
||||
if (UNLIKELY(!Block)) {
|
||||
|
@ -212,7 +214,7 @@ public:
|
|||
// We only need to zero the contents for Primary backed allocations. This
|
||||
// condition is not necessarily unlikely, but since memset is costly, we
|
||||
// might as well mark it as such.
|
||||
if (UNLIKELY((ZeroContents || Options.ZeroContents) && ClassId))
|
||||
if (UNLIKELY(ZeroContents && ClassId))
|
||||
memset(Block, 0, PrimaryT::getSizeByClassId(ClassId));
|
||||
|
||||
Chunk::UnpackedHeader Header = {};
|
||||
|
|
|
@ -60,7 +60,8 @@ public:
|
|||
initLinkerInitialized(S);
|
||||
}
|
||||
|
||||
void *allocate(uptr Size, uptr AlignmentHint = 0, uptr *BlockEnd = nullptr);
|
||||
void *allocate(uptr Size, uptr AlignmentHint = 0, uptr *BlockEnd = nullptr,
|
||||
bool ZeroContents = false);
|
||||
|
||||
void deallocate(void *Ptr);
|
||||
|
||||
|
@ -111,7 +112,8 @@ private:
|
|||
// (pending rounding and headers).
|
||||
template <uptr MaxFreeListSize>
|
||||
void *MapAllocator<MaxFreeListSize>::allocate(uptr Size, uptr AlignmentHint,
|
||||
uptr *BlockEnd) {
|
||||
uptr *BlockEnd,
|
||||
bool ZeroContents) {
|
||||
DCHECK_GT(Size, AlignmentHint);
|
||||
const uptr PageSize = getPageSizeCached();
|
||||
const uptr RoundedSize =
|
||||
|
@ -133,8 +135,11 @@ void *MapAllocator<MaxFreeListSize>::allocate(uptr Size, uptr AlignmentHint,
|
|||
Stats.add(StatAllocated, FreeBlockSize);
|
||||
if (BlockEnd)
|
||||
*BlockEnd = H.BlockEnd;
|
||||
return reinterpret_cast<void *>(reinterpret_cast<uptr>(&H) +
|
||||
LargeBlock::getHeaderSize());
|
||||
void *Ptr = reinterpret_cast<void *>(reinterpret_cast<uptr>(&H) +
|
||||
LargeBlock::getHeaderSize());
|
||||
if (ZeroContents)
|
||||
memset(Ptr, 0, H.BlockEnd - reinterpret_cast<uptr>(Ptr));
|
||||
return Ptr;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -65,6 +65,20 @@ template <class Config> static void testAllocator() {
|
|||
}
|
||||
Allocator->releaseToOS();
|
||||
|
||||
// Ensure that specifying ZeroContents returns a zero'd out block.
|
||||
for (scudo::uptr SizeLog = 0U; SizeLog <= 20U; SizeLog++) {
|
||||
for (scudo::uptr Delta = 0U; Delta <= 4U; Delta++) {
|
||||
const scudo::uptr Size = (1U << SizeLog) + Delta * 128U;
|
||||
void *P = Allocator->allocate(Size, Origin, 1U << MinAlignLog, true);
|
||||
EXPECT_NE(P, nullptr);
|
||||
for (scudo::uptr I = 0; I < Size; I++)
|
||||
EXPECT_EQ((reinterpret_cast<char *>(P))[I], 0);
|
||||
memset(P, 0xaa, Size);
|
||||
Allocator->deallocate(P, Origin, Size);
|
||||
}
|
||||
}
|
||||
Allocator->releaseToOS();
|
||||
|
||||
// Verify that a chunk will end up being reused, at some point.
|
||||
const scudo::uptr NeedleSize = 1024U;
|
||||
void *NeedleP = Allocator->allocate(NeedleSize, Origin);
|
||||
|
|
Loading…
Reference in New Issue