[sanitizer] simplify the allocator's SizeClassMap: do not require an extra template parameter, instead compute it in flight. No functionality change

llvm-svn: 176853
This commit is contained in:
Kostya Serebryany 2013-03-12 07:01:27 +00:00
parent a7aedfa5db
commit ba56c8d4f9
1 changed files with 15 additions and 20 deletions

View File

@ -80,8 +80,7 @@ namespace __sanitizer {
// //
// c32 => s: 1024 diff: +64 06% l 10 cached: 64 65536; id 32 // c32 => s: 1024 diff: +64 06% l 10 cached: 64 65536; id 32
template <uptr kMaxSizeLog, uptr kMaxNumCachedT, uptr kMaxBytesCachedLog, template <uptr kMaxSizeLog, uptr kMaxNumCachedT, uptr kMaxBytesCachedLog>
uptr kMinBatchClassT>
class SizeClassMap { class SizeClassMap {
static const uptr kMinSizeLog = 4; static const uptr kMinSizeLog = 4;
static const uptr kMidSizeLog = kMinSizeLog + 4; static const uptr kMidSizeLog = kMinSizeLog + 4;
@ -93,13 +92,15 @@ class SizeClassMap {
public: public:
static const uptr kMaxNumCached = kMaxNumCachedT; static const uptr kMaxNumCached = kMaxNumCachedT;
// We transfer chunks between central and thread-local free lists in batches.
// For small size classes we allocate batches separately.
// For large size classes we use one of the chunks to store the batch.
struct TransferBatch { struct TransferBatch {
TransferBatch *next; TransferBatch *next;
uptr count; uptr count;
void *batch[kMaxNumCached]; void *batch[kMaxNumCached];
}; };
static const uptr kMinBatchClass = kMinBatchClassT;
static const uptr kMaxSize = 1 << kMaxSizeLog; static const uptr kMaxSize = 1 << kMaxSizeLog;
static const uptr kNumClasses = static const uptr kNumClasses =
kMidClass + ((kMaxSizeLog - kMidSizeLog) << S) + 1; kMidClass + ((kMaxSizeLog - kMidSizeLog) << S) + 1;
@ -154,6 +155,11 @@ class SizeClassMap {
Printf("Total cached: %zd\n", total_cached); Printf("Total cached: %zd\n", total_cached);
} }
static bool SizeClassRequiresSeparateTransferBatch(uptr class_id) {
return Size(class_id) < sizeof(TransferBatch) -
sizeof(uptr) * (kMaxNumCached - MaxCached(class_id));
}
static void Validate() { static void Validate() {
for (uptr c = 1; c < kNumClasses; c++) { for (uptr c = 1; c < kNumClasses; c++) {
// Printf("Validate: c%zd\n", c); // Printf("Validate: c%zd\n", c);
@ -176,23 +182,12 @@ class SizeClassMap {
if (c > 0) if (c > 0)
CHECK_LT(Size(c-1), s); CHECK_LT(Size(c-1), s);
} }
// TransferBatch for kMinBatchClass must fit into the block itself.
const uptr batch_size = sizeof(TransferBatch)
- sizeof(void*) // NOLINT
* (kMaxNumCached - MaxCached(kMinBatchClass));
CHECK_LE(batch_size, Size(kMinBatchClass));
// TransferBatch for kMinBatchClass-1 must not fit into the block itself.
const uptr batch_size1 = sizeof(TransferBatch)
- sizeof(void*) // NOLINT
* (kMaxNumCached - MaxCached(kMinBatchClass - 1));
CHECK_GT(batch_size1, Size(kMinBatchClass - 1));
} }
}; };
typedef SizeClassMap<17, 256, 16, FIRST_32_SECOND_64(25, 28)> typedef SizeClassMap<17, 256, 16>
DefaultSizeClassMap; DefaultSizeClassMap;
typedef SizeClassMap<17, 64, 14, FIRST_32_SECOND_64(17, 20)> typedef SizeClassMap<17, 64, 14>
CompactSizeClassMap; CompactSizeClassMap;
template<class SizeClassAllocator> struct SizeClassAllocatorLocalCache; template<class SizeClassAllocator> struct SizeClassAllocatorLocalCache;
@ -526,7 +521,7 @@ class SizeClassAllocator64 {
Die(); Die();
} }
for (;;) { for (;;) {
if (class_id < SizeClassMap::kMinBatchClass) if (SizeClassMap::SizeClassRequiresSeparateTransferBatch(class_id))
b = (Batch*)c->Allocate(this, SizeClassMap::ClassID(sizeof(Batch))); b = (Batch*)c->Allocate(this, SizeClassMap::ClassID(sizeof(Batch)));
else else
b = (Batch*)(region_beg + beg_idx); b = (Batch*)(region_beg + beg_idx);
@ -738,7 +733,7 @@ class SizeClassAllocator32 {
Batch *b = 0; Batch *b = 0;
for (uptr i = reg; i < reg + n_chunks * size; i += size) { for (uptr i = reg; i < reg + n_chunks * size; i += size) {
if (b == 0) { if (b == 0) {
if (class_id < SizeClassMap::kMinBatchClass) if (SizeClassMap::SizeClassRequiresSeparateTransferBatch(class_id))
b = (Batch*)c->Allocate(this, SizeClassMap::ClassID(sizeof(Batch))); b = (Batch*)c->Allocate(this, SizeClassMap::ClassID(sizeof(Batch)));
else else
b = (Batch*)i; b = (Batch*)i;
@ -846,7 +841,7 @@ struct SizeClassAllocatorLocalCache {
for (uptr i = 0; i < b->count; i++) for (uptr i = 0; i < b->count; i++)
c->batch[i] = b->batch[i]; c->batch[i] = b->batch[i];
c->count = b->count; c->count = b->count;
if (class_id < SizeClassMap::kMinBatchClass) if (SizeClassMap::SizeClassRequiresSeparateTransferBatch(class_id))
Deallocate(allocator, SizeClassMap::ClassID(sizeof(Batch)), b); Deallocate(allocator, SizeClassMap::ClassID(sizeof(Batch)), b);
} }
@ -854,7 +849,7 @@ struct SizeClassAllocatorLocalCache {
InitCache(); InitCache();
PerClass *c = &per_class_[class_id]; PerClass *c = &per_class_[class_id];
Batch *b; Batch *b;
if (class_id < SizeClassMap::kMinBatchClass) if (SizeClassMap::SizeClassRequiresSeparateTransferBatch(class_id))
b = (Batch*)Allocate(allocator, SizeClassMap::ClassID(sizeof(Batch))); b = (Batch*)Allocate(allocator, SizeClassMap::ClassID(sizeof(Batch)));
else else
b = (Batch*)c->batch[0]; b = (Batch*)c->batch[0];