[sanitizer] partially un-revert r267094: Allow the sanitizer allocator to use a non-fixed address range. An allocator with a non-fixed address range will be attack-resistan. NFC for the sanitizers at this point.

llvm-svn: 267252
This commit is contained in:
Kostya Serebryany 2016-04-22 23:35:00 +00:00
parent c2eb17db37
commit 58bc64432c
1 changed files with 39 additions and 19 deletions

View File

@ -297,9 +297,10 @@ typedef void (*ForEachChunkCallback)(uptr chunk, void *arg);
// SizeClassAllocator64 -- allocator for 64-bit address space. // SizeClassAllocator64 -- allocator for 64-bit address space.
// //
// Space: a portion of address space of kSpaceSize bytes starting at // Space: a portion of address space of kSpaceSize bytes starting at SpaceBeg.
// a fixed address (kSpaceBeg). Both constants are powers of two and // If kSpaceBeg is ~0 then SpaceBeg is chosen dynamically my mmap.
// kSpaceBeg is kSpaceSize-aligned. // Otherwise SpaceBeg=kSpaceBeg (fixed address).
// kSpaceSize is a power of two.
// At the beginning the entire space is mprotect-ed, then small parts of it // At the beginning the entire space is mprotect-ed, then small parts of it
// are mapped on demand. // are mapped on demand.
// //
@ -322,9 +323,15 @@ class SizeClassAllocator64 {
typedef SizeClassAllocatorLocalCache<ThisT> AllocatorCache; typedef SizeClassAllocatorLocalCache<ThisT> AllocatorCache;
void Init() { void Init() {
CHECK_EQ(kSpaceBeg, if (kUsingConstantSpaceBeg) {
reinterpret_cast<uptr>(MmapNoAccess(kSpaceBeg, kSpaceSize))); CHECK_EQ(kSpaceBeg,
MapWithCallback(kSpaceEnd, AdditionalSize()); reinterpret_cast<uptr>(MmapNoAccess(kSpaceBeg, kSpaceSize)));
} else {
NonConstSpaceBeg = reinterpret_cast<uptr>(
MmapNoAccess(0, kSpaceSize + AdditionalSize()));
CHECK_NE(NonConstSpaceBeg, ~(uptr)0);
}
MapWithCallback(SpaceEnd(), AdditionalSize());
} }
void MapWithCallback(uptr beg, uptr size) { void MapWithCallback(uptr beg, uptr size) {
@ -360,12 +367,18 @@ class SizeClassAllocator64 {
region->n_freed += b->count; region->n_freed += b->count;
} }
static bool PointerIsMine(const void *p) { bool PointerIsMine(const void *p) {
return reinterpret_cast<uptr>(p) / kSpaceSize == kSpaceBeg / kSpaceSize; uptr P = reinterpret_cast<uptr>(p);
if (kUsingConstantSpaceBeg && (kSpaceBeg % kSpaceSize) == 0)
return P / kSpaceSize == kSpaceBeg / kSpaceSize;
return P >= SpaceBeg() && P < SpaceEnd();
} }
static uptr GetSizeClass(const void *p) { uptr GetSizeClass(const void *p) {
return (reinterpret_cast<uptr>(p) / kRegionSize) % kNumClassesRounded; if (kUsingConstantSpaceBeg && (kSpaceBeg % kSpaceSize) == 0)
return ((reinterpret_cast<uptr>(p)) / kRegionSize) % kNumClassesRounded;
return ((reinterpret_cast<uptr>(p) - SpaceBeg()) / kRegionSize) %
kNumClassesRounded;
} }
void *GetBlockBegin(const void *p) { void *GetBlockBegin(const void *p) {
@ -383,7 +396,7 @@ class SizeClassAllocator64 {
return nullptr; return nullptr;
} }
static uptr GetActuallyAllocatedSize(void *p) { uptr GetActuallyAllocatedSize(void *p) {
CHECK(PointerIsMine(p)); CHECK(PointerIsMine(p));
return SizeClassMap::Size(GetSizeClass(p)); return SizeClassMap::Size(GetSizeClass(p));
} }
@ -394,8 +407,9 @@ class SizeClassAllocator64 {
uptr class_id = GetSizeClass(p); uptr class_id = GetSizeClass(p);
uptr size = SizeClassMap::Size(class_id); uptr size = SizeClassMap::Size(class_id);
uptr chunk_idx = GetChunkIdx(reinterpret_cast<uptr>(p), size); uptr chunk_idx = GetChunkIdx(reinterpret_cast<uptr>(p), size);
return reinterpret_cast<void*>(kSpaceBeg + (kRegionSize * (class_id + 1)) - return reinterpret_cast<void *>(SpaceBeg() +
(1 + chunk_idx) * kMetadataSize); (kRegionSize * (class_id + 1)) -
(1 + chunk_idx) * kMetadataSize);
} }
uptr TotalMemoryUsed() { uptr TotalMemoryUsed() {
@ -407,7 +421,7 @@ class SizeClassAllocator64 {
// Test-only. // Test-only.
void TestOnlyUnmap() { void TestOnlyUnmap() {
UnmapWithCallback(kSpaceBeg, kSpaceSize + AdditionalSize()); UnmapWithCallback(SpaceBeg(), kSpaceSize + AdditionalSize());
} }
void PrintStats() { void PrintStats() {
@ -455,7 +469,7 @@ class SizeClassAllocator64 {
for (uptr class_id = 1; class_id < kNumClasses; class_id++) { for (uptr class_id = 1; class_id < kNumClasses; class_id++) {
RegionInfo *region = GetRegionInfo(class_id); RegionInfo *region = GetRegionInfo(class_id);
uptr chunk_size = SizeClassMap::Size(class_id); uptr chunk_size = SizeClassMap::Size(class_id);
uptr region_beg = kSpaceBeg + class_id * kRegionSize; uptr region_beg = SpaceBeg() + class_id * kRegionSize;
for (uptr chunk = region_beg; for (uptr chunk = region_beg;
chunk < region_beg + region->allocated_user; chunk < region_beg + region->allocated_user;
chunk += chunk_size) { chunk += chunk_size) {
@ -476,8 +490,13 @@ class SizeClassAllocator64 {
private: private:
static const uptr kRegionSize = kSpaceSize / kNumClassesRounded; static const uptr kRegionSize = kSpaceSize / kNumClassesRounded;
static const uptr kSpaceEnd = kSpaceBeg + kSpaceSize;
COMPILER_CHECK(kSpaceBeg % kSpaceSize == 0); static const bool kUsingConstantSpaceBeg = kSpaceBeg != ~(uptr)0;
uptr NonConstSpaceBeg;
uptr SpaceBeg() const {
return kUsingConstantSpaceBeg ? kSpaceBeg : NonConstSpaceBeg;
}
uptr SpaceEnd() const { return SpaceBeg() + kSpaceSize; }
// kRegionSize must be >= 2^32. // kRegionSize must be >= 2^32.
COMPILER_CHECK((kRegionSize) >= (1ULL << (SANITIZER_WORDSIZE / 2))); COMPILER_CHECK((kRegionSize) >= (1ULL << (SANITIZER_WORDSIZE / 2)));
// Populate the free list with at most this number of bytes at once // Populate the free list with at most this number of bytes at once
@ -501,7 +520,8 @@ class SizeClassAllocator64 {
RegionInfo *GetRegionInfo(uptr class_id) { RegionInfo *GetRegionInfo(uptr class_id) {
CHECK_LT(class_id, kNumClasses); CHECK_LT(class_id, kNumClasses);
RegionInfo *regions = reinterpret_cast<RegionInfo*>(kSpaceBeg + kSpaceSize); RegionInfo *regions =
reinterpret_cast<RegionInfo *>(SpaceBeg() + kSpaceSize);
return &regions[class_id]; return &regions[class_id];
} }
@ -524,7 +544,7 @@ class SizeClassAllocator64 {
uptr count = size < kPopulateSize ? SizeClassMap::MaxCached(class_id) : 1; uptr count = size < kPopulateSize ? SizeClassMap::MaxCached(class_id) : 1;
uptr beg_idx = region->allocated_user; uptr beg_idx = region->allocated_user;
uptr end_idx = beg_idx + count * size; uptr end_idx = beg_idx + count * size;
uptr region_beg = kSpaceBeg + kRegionSize * class_id; uptr region_beg = SpaceBeg() + kRegionSize * class_id;
if (end_idx + size > region->mapped_user) { if (end_idx + size > region->mapped_user) {
// Do the mmap for the user memory. // Do the mmap for the user memory.
uptr map_size = kUserMapSize; uptr map_size = kUserMapSize;