diff --git a/compiler-rt/lib/asan/asan_allocator.cpp b/compiler-rt/lib/asan/asan_allocator.cpp --- a/compiler-rt/lib/asan/asan_allocator.cpp +++ b/compiler-rt/lib/asan/asan_allocator.cpp @@ -16,6 +16,7 @@ #include "asan_allocator.h" +#include "asan_internal.h" #include "asan_mapping.h" #include "asan_poisoning.h" #include "asan_report.h" @@ -24,6 +25,7 @@ #include "lsan/lsan_common.h" #include "sanitizer_common/sanitizer_allocator_checks.h" #include "sanitizer_common/sanitizer_allocator_interface.h" +#include "sanitizer_common/sanitizer_common.h" #include "sanitizer_common/sanitizer_errno.h" #include "sanitizer_common/sanitizer_flags.h" #include "sanitizer_common/sanitizer_internal_defs.h" @@ -196,6 +198,24 @@ stack_(stack) { } + void PreQuarantine(AsanChunk *m) { + Flags &fl = *flags(); + + if (fl.max_free_fill_size > 0) { + // We have to skip the chunk header, it contains free_context_id. + uptr scribble_start = (uptr)m + kChunkHeaderSize + kChunkHeader2Size; + if (m->UsedSize() >= kChunkHeader2Size) { // Skip Header2 in user area. + uptr size_to_fill = m->UsedSize() - kChunkHeader2Size; + size_to_fill = Min(size_to_fill, (uptr)fl.max_free_fill_size); + REAL(memset)((void *)scribble_start, fl.free_fill_byte, size_to_fill); + } + } + + // Poison the region. + PoisonShadow(m->Beg(), RoundUpTo(m->UsedSize(), ASAN_SHADOW_GRANULARITY), + kAsanHeapFreeMagic); + } + void Recycle(AsanChunk *m) { void *p = get_allocator().GetBlockBegin(m); if (p != m) { @@ -221,6 +241,12 @@ get_allocator().Deallocate(cache_, p); } + void RecyclePassThrough(AsanChunk *m) { + // TODO: We don't need all these here. + PreQuarantine(m); + Recycle(m); + } + void *Allocate(uptr size) { void *res = get_allocator().Allocate(cache_, size, 1); // TODO(alekseys): Consider making quarantine OOM-friendly. @@ -639,21 +665,6 @@ AsanThread *t = GetCurrentThread(); m->SetFreeContext(t ? t->tid() : 0, StackDepotPut(*stack)); - Flags &fl = *flags(); - if (fl.max_free_fill_size > 0) { - // We have to skip the chunk header, it contains free_context_id. - uptr scribble_start = (uptr)m + kChunkHeaderSize + kChunkHeader2Size; - if (m->UsedSize() >= kChunkHeader2Size) { // Skip Header2 in user area. - uptr size_to_fill = m->UsedSize() - kChunkHeader2Size; - size_to_fill = Min(size_to_fill, (uptr)fl.max_free_fill_size); - REAL(memset)((void *)scribble_start, fl.free_fill_byte, size_to_fill); - } - } - - // Poison the region. - PoisonShadow(m->Beg(), RoundUpTo(m->UsedSize(), ASAN_SHADOW_GRANULARITY), - kAsanHeapFreeMagic); - // Push into quarantine. if (t) { AsanThreadLocalMallocStorage *ms = &t->malloc_storage(); @@ -711,6 +722,12 @@ thread_stats.freed += m->UsedSize(); QuarantineChunk(m, ptr, stack); + // if (allocator.FromPrimary(ptr)) { + // QuarantineChunk(m, ptr, stack); + // return; + // } + + // allocator.Deallocate(nullptr, allocator.GetBlockBegin(ptr)); } void *Reallocate(void *old_ptr, uptr new_size, BufferedStackTrace *stack) { diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_quarantine.h b/compiler-rt/lib/sanitizer_common/sanitizer_quarantine.h --- a/compiler-rt/lib/sanitizer_common/sanitizer_quarantine.h +++ b/compiler-rt/lib/sanitizer_common/sanitizer_quarantine.h @@ -68,10 +68,6 @@ COMPILER_CHECK(sizeof(QuarantineBatch) <= (1 << 13)); // 8Kb. -// The callback interface is: -// void Callback::Recycle(Node *ptr); -// void *cb.Allocate(uptr size); -// void cb.Deallocate(void *ptr); template class Quarantine { public: @@ -100,10 +96,11 @@ void Put(Cache *c, Callback cb, Node *ptr, uptr size) { uptr max_cache_size = GetMaxCacheSize(); if (max_cache_size && size <= GetMaxSize()) { + cb.PreQuarantine(ptr); c->Enqueue(cb, ptr, size); } else { // GetMaxCacheSize() == 0 only when GetMaxSize() == 0 (see Init). - cb.Recycle(ptr); + cb.RecyclePassThrough(ptr); } // Check cache size anyway to accommodate for runtime cache_size change. if (c->Size() > max_cache_size)