[sanitizer] Add delta compression stack depot

Compress by factor 4x, takes about 10ms per 8 MiB block.

Depends on D114498.

Reviewed By: morehouse

Differential Revision: https://reviews.llvm.org/D114503
This commit is contained in:
Vitaly Buka 2021-11-21 21:54:08 -08:00
parent 3a4d373ec2
commit 1d8f295759
3 changed files with 127 additions and 11 deletions

View File

@ -10,6 +10,8 @@
#include "sanitizer_atomic.h"
#include "sanitizer_common.h"
#include "sanitizer_internal_defs.h"
#include "sanitizer_leb128.h"
#include "sanitizer_stacktrace.h"
namespace __sanitizer {
@ -128,6 +130,37 @@ uptr *StackStore::BlockInfo::GetOrCreate() {
return Create();
}
static u8 *CompressDelta(const uptr *from, const uptr *from_end, u8 *to,
u8 *to_end) {
uptr prev = 0;
for (; from < from_end; ++from) {
sptr diff = *from - prev;
to = EncodeSLEB128(diff, to, to_end);
prev += diff;
}
return to;
}
static uptr *UncompressDelta(const u8 *from, const u8 *from_end, uptr *to,
uptr *to_end) {
uptr prev = 0;
for (; to < to_end; ++to) {
sptr diff;
from = DecodeSLEB128<sptr>(from, from_end, &diff);
prev += diff;
*to = prev;
}
return to;
}
namespace {
struct PackedHeader {
uptr size;
StackStore::Compression type;
u8 data[];
};
} // namespace
uptr *StackStore::BlockInfo::GetOrUnpack() {
SpinMutexLock l(&mtx_);
switch (state) {
@ -140,10 +173,34 @@ uptr *StackStore::BlockInfo::GetOrUnpack() {
break;
}
uptr *ptr = Get();
u8 *ptr = reinterpret_cast<u8 *>(Get());
CHECK_NE(nullptr, ptr);
// Fake unpacking.
for (uptr i = 0; i < kBlockSizeFrames; ++i) ptr[i] = ~ptr[i];
const PackedHeader *header = reinterpret_cast<const PackedHeader *>(ptr);
CHECK_LE(header->size, kBlockSizeBytes);
CHECK_GE(header->size, sizeof(PackedHeader));
uptr packed_size_aligned = RoundUpTo(header->size, GetPageSizeCached());
uptr *unpacked = reinterpret_cast<uptr *>(
MmapNoReserveOrDie(kBlockSizeBytes, "StackStoreUnpack"));
uptr *unpacked_end;
switch (header->type) {
case Compression::Delta:
unpacked_end = UncompressDelta(header->data, ptr + header->size, unpacked,
unpacked + kBlockSizeFrames);
break;
default:
UNREACHABLE("Unexpected type");
break;
}
CHECK_EQ(kBlockSizeFrames, unpacked_end - unpacked);
MprotectReadOnly(reinterpret_cast<uptr>(unpacked), kBlockSizeBytes);
atomic_store(&data_, reinterpret_cast<uptr>(unpacked), memory_order_release);
UnmapOrDie(ptr, packed_size_aligned);
state = State::Unpacked;
return Get();
}
@ -165,17 +222,56 @@ uptr StackStore::BlockInfo::Pack(Compression type) {
if (!ptr || !Stored(0))
return 0;
// Fake packing.
for (uptr i = 0; i < kBlockSizeFrames; ++i) ptr[i] = ~ptr[i];
u8 *packed = reinterpret_cast<u8 *>(
MmapNoReserveOrDie(kBlockSizeBytes, "StackStorePack"));
PackedHeader *header = reinterpret_cast<PackedHeader *>(packed);
u8 *alloc_end = packed + kBlockSizeBytes;
u8 *packed_end = nullptr;
switch (type) {
case Compression::Delta:
packed_end =
CompressDelta(ptr, ptr + kBlockSizeFrames, header->data, alloc_end);
break;
default:
UNREACHABLE("Unexpected type");
break;
}
header->type = type;
header->size = packed_end - packed;
VPrintf(1, "Packed block of %zu KiB to %zu KiB\n", kBlockSizeBytes >> 10,
header->size >> 10);
if (kBlockSizeBytes - header->size < kBlockSizeBytes / 8) {
VPrintf(1, "Undo and keep block unpacked\n");
MprotectReadOnly(reinterpret_cast<uptr>(ptr), kBlockSizeBytes);
UnmapOrDie(packed, kBlockSizeBytes);
state = State::Unpacked;
return 0;
}
uptr packed_size_aligned = RoundUpTo(header->size, GetPageSizeCached());
UnmapOrDie(packed + packed_size_aligned,
kBlockSizeBytes - packed_size_aligned);
MprotectReadOnly(reinterpret_cast<uptr>(packed), packed_size_aligned);
atomic_store(&data_, reinterpret_cast<uptr>(packed), memory_order_release);
UnmapOrDie(ptr, kBlockSizeBytes);
state = State::Packed;
return kBlockSizeBytes - kBlockSizeBytes / 10;
return kBlockSizeBytes - packed_size_aligned;
}
uptr StackStore::BlockInfo::Allocated() const {
SpinMutexLock l(&mtx_);
switch (state) {
case State::Packed:
return kBlockSizeBytes / 10;
case State::Packed: {
const PackedHeader *ptr = reinterpret_cast<const PackedHeader *>(Get());
CHECK_NE(nullptr, ptr);
return RoundUpTo(ptr->size, GetPageSizeCached());
}
case State::Unpacked:
case State::Storing:
return kBlockSizeBytes;
@ -184,7 +280,7 @@ uptr StackStore::BlockInfo::Allocated() const {
void StackStore::BlockInfo::TestOnlyUnmap() {
if (uptr *ptr = Get())
UnmapOrDie(ptr, StackStore::kBlockSizeBytes);
UnmapOrDie(ptr, kBlockSizeBytes);
}
bool StackStore::BlockInfo::Stored(uptr n) {

View File

@ -25,7 +25,7 @@ class StackStore {
public:
enum class Compression : u8 {
None = 0,
Test,
Delta,
};
constexpr StackStore() = default;

View File

@ -135,7 +135,8 @@ struct StackStorePackTest : public StackStoreTest,
INSTANTIATE_TEST_SUITE_P(
PackUnpacks, StackStorePackTest,
::testing::ValuesIn({
StackStorePackTest::ParamType(StackStore::Compression::Test, 4),
StackStorePackTest::ParamType(StackStore::Compression::Delta,
FIRST_32_SECOND_64(2, 6)),
}));
TEST_P(StackStorePackTest, PackUnpack) {
@ -175,4 +176,23 @@ TEST_P(StackStorePackTest, PackUnpack) {
EXPECT_EQ(0u, CountPackedBlocks());
}
TEST_P(StackStorePackTest, Failed) {
MurMur2Hash64Builder h(0);
StackStore::Compression type = GetParam().first;
std::vector<uptr> frames(200);
for (uptr i = 0; i < kBlockSizeFrames * 4 / frames.size(); ++i) {
for (uptr& f : frames) {
h.add(1);
// Make it difficult to pack.
f = h.get();
}
uptr pack = 0;
store_.Store(StackTrace(frames.data(), frames.size()), &pack);
if (pack)
EXPECT_EQ(0u, store_.Pack(type));
}
EXPECT_EQ(0u, CountPackedBlocks());
}
} // namespace __sanitizer