diff --git a/compiler-rt/lib/asan/asan_allocator.cpp b/compiler-rt/lib/asan/asan_allocator.cpp --- a/compiler-rt/lib/asan/asan_allocator.cpp +++ b/compiler-rt/lib/asan/asan_allocator.cpp @@ -1169,15 +1169,22 @@ __asan::get_allocator().ForEachChunk(callback, arg); } -IgnoreObjectResult IgnoreObjectLocked(const void *p) { +static ALWAYS_INLINE __asan::AsanChunk *LookUpValidChunk(const void *p) { uptr addr = reinterpret_cast(p); __asan::AsanChunk *m = __asan::instance.GetAsanChunkByAddr(addr); if (!m || (atomic_load(&m->chunk_state, memory_order_acquire) != __asan::CHUNK_ALLOCATED) || !m->AddrIsInside(addr)) { - return kIgnoreObjectInvalid; + return nullptr; } + return m; +} + +IgnoreObjectResult IgnoreObjectLocked(const void *p) { + __asan::AsanChunk *m = LookUpValidChunk(p); + if (!m) + return kIgnoreObjectInvalid; if (m->lsan_tag == kIgnored) return kIgnoreObjectAlreadyIgnored; m->lsan_tag = __lsan::kIgnored; diff --git a/compiler-rt/lib/lsan/lsan_allocator.cpp b/compiler-rt/lib/lsan/lsan_allocator.cpp --- a/compiler-rt/lib/lsan/lsan_allocator.cpp +++ b/compiler-rt/lib/lsan/lsan_allocator.cpp @@ -295,19 +295,26 @@ allocator.ForEachChunk(callback, arg); } -IgnoreObjectResult IgnoreObjectLocked(const void *p) { +static ALWAYS_INLINE ChunkMetadata *LookUpValidChunk(const void *p) { void *chunk = allocator.GetBlockBegin(p); - if (!chunk || p < chunk) return kIgnoreObjectInvalid; + if (!chunk || p < chunk) + return nullptr; ChunkMetadata *m = Metadata(chunk); CHECK(m); if (m->allocated && (uptr)p < (uptr)chunk + m->requested_size) { - if (m->tag == kIgnored) - return kIgnoreObjectAlreadyIgnored; - m->tag = kIgnored; - return kIgnoreObjectSuccess; - } else { - return kIgnoreObjectInvalid; + return m; } + return nullptr; +} + +IgnoreObjectResult IgnoreObjectLocked(const void *p) { + ChunkMetadata *m = LookUpValidChunk(p); + if (!m) + return kIgnoreObjectInvalid; + if (m->tag == kIgnored) + return kIgnoreObjectAlreadyIgnored; + m->tag = kIgnored; + return kIgnoreObjectSuccess; } } // namespace __lsan