diff --git a/compiler-rt/lib/asan/asan_allocator.cpp b/compiler-rt/lib/asan/asan_allocator.cpp --- a/compiler-rt/lib/asan/asan_allocator.cpp +++ b/compiler-rt/lib/asan/asan_allocator.cpp @@ -89,9 +89,7 @@ class ChunkHeader { public: atomic_uint8_t chunk_state; - u8 from_memalign : 1; u8 alloc_type : 2; - u8 rz_log : 3; u8 lsan_tag : 2; // align < 8 -> 0 @@ -161,12 +159,6 @@ class AsanChunk : public ChunkBase { public: uptr Beg() { return reinterpret_cast(this) + kChunkHeaderSize; } - - void *AllocBeg() { - if (from_memalign) - return get_allocator().GetBlockBegin(reinterpret_cast(this)); - return reinterpret_cast(Beg() - RZLog2Size(rz_log)); - } }; struct QuarantineCallback { @@ -185,7 +177,7 @@ PoisonShadow(m->Beg(), RoundUpTo(m->UsedSize(), SHADOW_GRANULARITY), kAsanHeapLeftRedzoneMagic); - void *p = reinterpret_cast(m->AllocBeg()); + void *p = get_allocator().GetBlockBegin(m); if (p != m) { uptr *alloc_magic = reinterpret_cast(p); CHECK_EQ(alloc_magic[0], kAllocBegMagic); @@ -541,8 +533,7 @@ uptr alloc_beg = reinterpret_cast(allocated); uptr alloc_end = alloc_beg + needed_size; - uptr beg_plus_redzone = alloc_beg + rz_size; - uptr user_beg = beg_plus_redzone; + uptr user_beg = alloc_beg + rz_size; if (!IsAligned(user_beg, alignment)) user_beg = RoundUpTo(user_beg, alignment); uptr user_end = user_beg + size; @@ -550,8 +541,6 @@ uptr chunk_beg = user_beg - kChunkHeaderSize; AsanChunk *m = reinterpret_cast(chunk_beg); m->alloc_type = alloc_type; - m->rz_log = rz_log; - m->from_memalign = user_beg != beg_plus_redzone; if (alloc_beg != chunk_beg) { CHECK_LE(alloc_beg + 2 * sizeof(uptr), chunk_beg); reinterpret_cast(alloc_beg)[0] = kAllocBegMagic;