Index: compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_local_cache.h =================================================================== --- compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_local_cache.h +++ compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_local_cache.h @@ -45,8 +45,8 @@ void *Allocate(SizeClassAllocator *allocator, uptr class_id) { CHECK_NE(class_id, 0UL); CHECK_LT(class_id, kNumClasses); - stats_.Add(AllocatorStatAllocated, Allocator::ClassIdToSize(class_id)); PerClass *c = &per_class_[class_id]; + stats_.Add(AllocatorStatAllocated, c->class_size); if (UNLIKELY(c->count == 0)) Refill(c, allocator, class_id); CHECK_GT(c->count, 0); @@ -62,8 +62,8 @@ // If the first allocator call on a new thread is a deallocation, then // max_count will be zero, leading to check failure. InitCache(); - stats_.Sub(AllocatorStatAllocated, Allocator::ClassIdToSize(class_id)); PerClass *c = &per_class_[class_id]; + stats_.Sub(AllocatorStatAllocated, c->class_size); CHECK_NE(c->max_count, 0UL); if (UNLIKELY(c->count == c->max_count)) Drain(c, allocator, class_id, c->max_count / 2); @@ -85,6 +85,7 @@ struct PerClass { u32 count; u32 max_count; + uptr class_size; CompactPtrT chunks[2 * SizeClassMap::kMaxNumCachedHint]; }; PerClass per_class_[kNumClasses]; @@ -96,6 +97,7 @@ for (uptr i = 0; i < kNumClasses; i++) { PerClass *c = &per_class_[i]; c->max_count = 2 * SizeClassMap::MaxCachedHint(i); + c->class_size = Allocator::ClassIdToSize(i); } } @@ -141,8 +143,8 @@ void *Allocate(SizeClassAllocator *allocator, uptr class_id) { CHECK_NE(class_id, 0UL); CHECK_LT(class_id, kNumClasses); - stats_.Add(AllocatorStatAllocated, Allocator::ClassIdToSize(class_id)); PerClass *c = &per_class_[class_id]; + stats_.Add(AllocatorStatAllocated, c->class_size); if (UNLIKELY(c->count == 0)) Refill(allocator, class_id); void *res = c->batch[--c->count]; @@ -156,8 +158,8 @@ // If the first allocator call on a new thread is a deallocation, then // max_count will be zero, leading to check failure. InitCache(); - stats_.Sub(AllocatorStatAllocated, Allocator::ClassIdToSize(class_id)); PerClass *c = &per_class_[class_id]; + stats_.Sub(AllocatorStatAllocated, c->class_size); CHECK_NE(c->max_count, 0UL); if (UNLIKELY(c->count == c->max_count)) Drain(allocator, class_id); @@ -177,6 +179,7 @@ struct PerClass { uptr count; uptr max_count; + uptr class_size; void *batch[2 * TransferBatch::kMaxNumCached]; }; PerClass per_class_[kNumClasses]; @@ -188,6 +191,7 @@ for (uptr i = 0; i < kNumClasses; i++) { PerClass *c = &per_class_[i]; c->max_count = 2 * TransferBatch::MaxCached(i); + c->class_size = Allocator::ClassIdToSize(i); } }