[sanitizer] add CombinedAllocator::InitIfLinkerInitialized and use it in lsan: speeds up lsan start-up time by ~25%

llvm-svn: 224469
This commit is contained in:
Kostya Serebryany 2014-12-17 23:06:36 +00:00
parent c9d1c27d48
commit 20f3a52249
2 changed files with 25 additions and 7 deletions

View File

@ -47,7 +47,7 @@ static Allocator allocator;
static THREADLOCAL AllocatorCache cache; static THREADLOCAL AllocatorCache cache;
void InitializeAllocator() { void InitializeAllocator() {
allocator.Init(common_flags()->allocator_may_return_null); allocator.InitIfLinkerInitialized(common_flags()->allocator_may_return_null);
} }
void AllocatorThreadFinish() { void AllocatorThreadFinish() {

View File

@ -211,6 +211,7 @@ class AllocatorStats {
void Init() { void Init() {
internal_memset(this, 0, sizeof(*this)); internal_memset(this, 0, sizeof(*this));
} }
void InitIfLinkerInitialized() {}
void Add(AllocatorStat i, uptr v) { void Add(AllocatorStat i, uptr v) {
v += atomic_load(&stats_[i], memory_order_relaxed); v += atomic_load(&stats_[i], memory_order_relaxed);
@ -240,11 +241,14 @@ class AllocatorStats {
// Global stats, used for aggregation and querying. // Global stats, used for aggregation and querying.
class AllocatorGlobalStats : public AllocatorStats { class AllocatorGlobalStats : public AllocatorStats {
public: public:
void Init() { void InitIfLinkerInitialized() {
internal_memset(this, 0, sizeof(*this));
next_ = this; next_ = this;
prev_ = this; prev_ = this;
} }
void Init() {
internal_memset(this, 0, sizeof(*this));
InitIfLinkerInitialized();
}
void Register(AllocatorStats *s) { void Register(AllocatorStats *s) {
SpinMutexLock l(&mu_); SpinMutexLock l(&mu_);
@ -1002,12 +1006,16 @@ struct SizeClassAllocatorLocalCache {
template <class MapUnmapCallback = NoOpMapUnmapCallback> template <class MapUnmapCallback = NoOpMapUnmapCallback>
class LargeMmapAllocator { class LargeMmapAllocator {
public: public:
void Init(bool may_return_null) { void InitIfLinkerInitialized(bool may_return_null) {
internal_memset(this, 0, sizeof(*this));
page_size_ = GetPageSizeCached(); page_size_ = GetPageSizeCached();
atomic_store(&may_return_null_, may_return_null, memory_order_relaxed); atomic_store(&may_return_null_, may_return_null, memory_order_relaxed);
} }
void Init(bool may_return_null) {
internal_memset(this, 0, sizeof(*this));
InitIfLinkerInitialized(may_return_null);
}
void *Allocate(AllocatorStats *stat, uptr size, uptr alignment) { void *Allocate(AllocatorStats *stat, uptr size, uptr alignment) {
CHECK(IsPowerOfTwo(alignment)); CHECK(IsPowerOfTwo(alignment));
uptr map_size = RoundUpMapSize(size); uptr map_size = RoundUpMapSize(size);
@ -1253,11 +1261,21 @@ template <class PrimaryAllocator, class AllocatorCache,
class SecondaryAllocator> // NOLINT class SecondaryAllocator> // NOLINT
class CombinedAllocator { class CombinedAllocator {
public: public:
void Init(bool may_return_null) { void InitCommon(bool may_return_null) {
primary_.Init(); primary_.Init();
atomic_store(&may_return_null_, may_return_null, memory_order_relaxed);
}
void InitIfLinkerInitialized(bool may_return_null) {
secondary_.InitIfLinkerInitialized(may_return_null);
stats_.InitIfLinkerInitialized();
InitCommon(may_return_null);
}
void Init(bool may_return_null) {
secondary_.Init(may_return_null); secondary_.Init(may_return_null);
stats_.Init(); stats_.Init();
atomic_store(&may_return_null_, may_return_null, memory_order_relaxed); InitCommon(may_return_null);
} }
void *Allocate(AllocatorCache *cache, uptr size, uptr alignment, void *Allocate(AllocatorCache *cache, uptr size, uptr alignment,