Index: lib/sanitizer_common/sanitizer_allocator_secondary.h =================================================================== --- lib/sanitizer_common/sanitizer_allocator_secondary.h +++ lib/sanitizer_common/sanitizer_allocator_secondary.h @@ -64,11 +64,20 @@ typedef LargeMmapAllocatorPtrArrayDynamic DefaultLargeMmapAllocatorPtrArray; #endif +// No-op implementation of `ObjectMap`. +struct LocalObjectMap { + template + static T *Map(T *target_address, uptr num_elements = 1) { + return target_address; + } +}; + // This class can (de)allocate only large chunks of memory using mmap/unmap. // The main purpose of this allocator is to cover large and rare allocation // sizes not covered by more efficient allocators (e.g. SizeClassAllocator64). template + class PtrArrayT = DefaultLargeMmapAllocatorPtrArray, + class ObjectMap = LocalObjectMap> class LargeMmapAllocator { public: void InitLinkerInitialized() { @@ -202,9 +211,10 @@ void EnsureSortedChunks() { if (chunks_sorted_) return; - Sort(reinterpret_cast(chunks_), n_chunks_); + Header **chunks = ObjectMap::Map(chunks_, n_chunks_); + Sort(reinterpret_cast(chunks), n_chunks_); for (uptr i = 0; i < n_chunks_; i++) - chunks_[i]->chunk_idx = i; + ObjectMap::Map(chunks[i])->chunk_idx = i; chunks_sorted_ = true; } @@ -272,12 +282,13 @@ // The allocator must be locked when calling this function. void ForEachChunk(ForEachChunkCallback callback, void *arg) { EnsureSortedChunks(); // Avoid doing the sort while iterating. + Header **chunks = ObjectMap::Map(chunks_, n_chunks_); for (uptr i = 0; i < n_chunks_; i++) { - auto t = chunks_[i]; + Header *t = chunks[i]; callback(reinterpret_cast(GetUser(t)), arg); // Consistency check: verify that the array did not change. - CHECK_EQ(chunks_[i], t); - CHECK_EQ(chunks_[i]->chunk_idx, i); + CHECK_EQ(chunks[i], t); + CHECK_EQ(ObjectMap::Map(chunks[i])->chunk_idx, i); } } @@ -316,4 +327,3 @@ } stats; StaticSpinMutex mutex_; }; -