[asan] Add ForEachChunk() to sanitizer allocators. Patch by Sergey Matveev

llvm-svn: 177147
This commit is contained in:
Kostya Serebryany 2013-03-15 11:39:41 +00:00
parent cdd46d9ccc
commit b941a2fca4
2 changed files with 146 additions and 0 deletions

View File

@ -433,6 +433,24 @@ class SizeClassAllocator64 {
}
}
// Iterate over existing chunks. May include chunks that are not currently
// allocated to the user (e.g. freed).
// The caller is expected to call ForceLock() before calling this function.
template<typename Callable>
void ForEachChunk(const Callable &callback) {
for (uptr class_id = 1; class_id < kNumClasses; class_id++) {
RegionInfo *region = GetRegionInfo(class_id);
uptr chunk_size = SizeClassMap::Size(class_id);
uptr region_beg = kSpaceBeg + class_id * kRegionSize;
for (uptr p = region_beg;
p < region_beg + region->allocated_user;
p += chunk_size) {
// Too slow: CHECK_EQ((void *)p, GetBlockBegin((void *)p));
callback((void *)p);
}
}
}
typedef SizeClassMap SizeClassMapT;
static const uptr kNumClasses = SizeClassMap::kNumClasses;
static const uptr kNumClassesRounded = SizeClassMap::kNumClassesRounded;
@ -681,6 +699,25 @@ class SizeClassAllocator32 {
}
}
// Iterate over existing chunks. May include chunks that are not currently
// allocated to the user (e.g. freed).
// The caller is expected to call ForceLock() before calling this function.
template<typename Callable>
void ForEachChunk(const Callable &callback) {
for (uptr region = 0; region < kNumPossibleRegions; region++)
if (state_->possible_regions[region]) {
uptr chunk_size = SizeClassMap::Size(state_->possible_regions[region]);
uptr max_chunks_in_region = kRegionSize / (chunk_size + kMetadataSize);
uptr region_beg = region * kRegionSize;
for (uptr p = region_beg;
p < region_beg + max_chunks_in_region * chunk_size;
p += chunk_size) {
// Too slow: CHECK_EQ((void *)p, GetBlockBegin((void *)p));
callback((void *)p);
}
}
}
void PrintStats() {
}
@ -1005,6 +1042,15 @@ class LargeMmapAllocator {
mutex_.Unlock();
}
// Iterate over existing chunks. May include chunks that are not currently
// allocated to the user (e.g. freed).
// The caller is expected to call ForceLock() before calling this function.
template<typename Callable>
void ForEachChunk(const Callable &callback) {
for (uptr i = 0; i < n_chunks_; i++)
callback(GetUser(chunks_[i]));
}
private:
static const int kMaxNumChunks = 1 << FIRST_32_SECOND_64(15, 18);
struct Header {
@ -1168,6 +1214,15 @@ class CombinedAllocator {
primary_.ForceUnlock();
}
// Iterate over existing chunks. May include chunks that are not currently
// allocated to the user (e.g. freed).
// The caller is expected to call ForceLock() before calling this function.
template<typename Callable>
void ForEachChunk(const Callable &callback) {
primary_.ForEachChunk(callback);
secondary_.ForEachChunk(callback);
}
private:
PrimaryAllocator primary_;
SecondaryAllocator secondary_;

View File

@ -22,6 +22,7 @@
#include <pthread.h>
#include <algorithm>
#include <vector>
#include <set>
// Too slow for debug build
#if TSAN_DEBUG == 0
@ -565,4 +566,94 @@ TEST(Allocator, ScopedBuffer) {
}
}
class IterationTestCallback {
public:
explicit IterationTestCallback(std::set<void *> *chunks)
: chunks_(chunks) {}
void operator()(void *chunk) const {
chunks_->insert(chunk);
}
private:
std::set<void *> *chunks_;
};
template <class Allocator>
void TestSizeClassAllocatorIteration() {
Allocator *a = new Allocator;
a->Init();
SizeClassAllocatorLocalCache<Allocator> cache;
memset(&cache, 0, sizeof(cache));
cache.Init(0);
static const uptr sizes[] = {1, 16, 30, 40, 100, 1000, 10000,
50000, 60000, 100000, 120000, 300000, 500000, 1000000, 2000000};
std::vector<void *> allocated;
// Allocate a bunch of chunks.
for (uptr s = 0; s < ARRAY_SIZE(sizes); s++) {
uptr size = sizes[s];
if (!a->CanAllocate(size, 1)) continue;
// printf("s = %ld\n", size);
uptr n_iter = std::max((uptr)6, 80000 / size);
// fprintf(stderr, "size: %ld iter: %ld\n", size, n_iter);
for (uptr j = 0; j < n_iter; j++) {
uptr class_id0 = Allocator::SizeClassMapT::ClassID(size);
void *x = cache.Allocate(a, class_id0);
allocated.push_back(x);
}
}
std::set<void *> reported_chunks;
IterationTestCallback callback(&reported_chunks);
a->ForceLock();
a->template ForEachChunk<IterationTestCallback>(callback);
a->ForceUnlock();
for (uptr i = 0; i < allocated.size(); i++) {
// Don't use EXPECT_NE. Reporting the first mismatch is enough.
ASSERT_NE(reported_chunks.find(allocated[i]), reported_chunks.end());
}
a->TestOnlyUnmap();
delete a;
}
#if SANITIZER_WORDSIZE == 64
TEST(SanitizerCommon, SizeClassAllocator64Iteration) {
TestSizeClassAllocatorIteration<Allocator64>();
}
#endif
TEST(SanitizerCommon, SizeClassAllocator32Iteration) {
TestSizeClassAllocatorIteration<Allocator32Compact>();
}
TEST(SanitizerCommon, LargeMmapAllocatorIteration) {
LargeMmapAllocator<> a;
a.Init();
AllocatorStats stats;
stats.Init();
static const int kNumAllocs = 1000;
char *allocated[kNumAllocs];
static const uptr size = 40;
// Allocate some.
for (int i = 0; i < kNumAllocs; i++) {
allocated[i] = (char *)a.Allocate(&stats, size, 1);
}
std::set<void *> reported_chunks;
IterationTestCallback callback(&reported_chunks);
a.ForceLock();
a.ForEachChunk<IterationTestCallback>(callback);
a.ForceUnlock();
for (uptr i = 0; i < kNumAllocs; i++) {
// Don't use EXPECT_NE. Reporting the first mismatch is enough.
ASSERT_NE(reported_chunks.find(allocated[i]), reported_chunks.end());
}
}
#endif // #if TSAN_DEBUG==0