diff --git a/compiler-rt/lib/hwasan/hwasan_allocator.h b/compiler-rt/lib/hwasan/hwasan_allocator.h --- a/compiler-rt/lib/hwasan/hwasan_allocator.h +++ b/compiler-rt/lib/hwasan/hwasan_allocator.h @@ -96,6 +96,8 @@ u32 GetAllocStackId() const; bool FromSmallHeap() const; bool AddrIsInside(uptr addr) const; + inline void SetLsanTag(__lsan::ChunkTag tag); + inline __lsan::ChunkTag GetLsanTag() const; private: friend class __lsan::LsanMetadata; @@ -104,6 +106,7 @@ }; HwasanChunkView FindHeapChunkByAddress(uptr address); +HwasanChunkView FindHeapChunkByAddressFastLocked(uptr address); // Information about one (de)allocation that happened in the past. // These are recorded in a thread-local ring buffer. diff --git a/compiler-rt/lib/hwasan/hwasan_allocator.cpp b/compiler-rt/lib/hwasan/hwasan_allocator.cpp --- a/compiler-rt/lib/hwasan/hwasan_allocator.cpp +++ b/compiler-rt/lib/hwasan/hwasan_allocator.cpp @@ -75,6 +75,15 @@ return (addr >= Beg()) && (addr < Beg() + UsedSize()); } +inline void HwasanChunkView::SetLsanTag(__lsan::ChunkTag tag) { + CHECK(metadata_); + metadata_->SetLsanTag(tag); +} +inline __lsan::ChunkTag HwasanChunkView::GetLsanTag() const { + CHECK(metadata_); + return metadata_->GetLsanTag(); +} + inline void Metadata::SetAllocated(u32 stack, u64 size) { Thread *t = GetCurrentThread(); u64 context = t ? t->unique_id() : kMainTid; @@ -106,8 +115,6 @@ return atomic_load(&alloc_context_id, memory_order_relaxed); } -static const uptr kChunkHeaderSize = sizeof(HwasanChunkView); - void GetAllocatorStats(AllocatorStatCounters s) { allocator.GetStats(s); } @@ -236,6 +243,10 @@ Metadata *meta = reinterpret_cast(allocator.GetMetaData(allocated)); +#if CAN_SANITIZE_LEAKS + meta->SetLsanTag(__lsan::DisabledInThisThread() ? __lsan::kIgnored + : __lsan::kDirectlyLeaked); +#endif meta->SetAllocated(StackDepotPut(*stack), orig_size); RunMallocHooks(user_ptr, size); return user_ptr; @@ -386,6 +397,16 @@ return HwasanChunkView(reinterpret_cast(block), metadata); } +HwasanChunkView FindHeapChunkByAddressFastLocked(uptr address) { + void *block = + allocator.GetBlockBeginFastLocked(reinterpret_cast(address)); + if (!block) + return HwasanChunkView(); + Metadata *metadata = + reinterpret_cast(allocator.GetMetaData(block)); + return HwasanChunkView(reinterpret_cast(block), metadata); +} + static uptr AllocationSize(const void *tagged_ptr) { const void *untagged_ptr = UntagPtr(tagged_ptr); if (!untagged_ptr) return 0; @@ -501,8 +522,9 @@ uptr PointsIntoChunk(void *p) { uptr addr = reinterpret_cast(p); - __hwasan::HwasanChunkView view = __hwasan::FindHeapChunkByAddress(addr); - if (!view.IsAllocated()) + __hwasan::HwasanChunkView view = + __hwasan::FindHeapChunkByAddressFastLocked(addr); + if (!view.IsAllocated()) return 0; uptr chunk = view.Beg(); if (view.AddrIsInside(addr)) @@ -513,12 +535,11 @@ } uptr GetUserBegin(uptr chunk) { - return __hwasan::FindHeapChunkByAddress(chunk).Beg(); + return __hwasan::FindHeapChunkByAddressFastLocked(chunk).Beg(); } LsanMetadata::LsanMetadata(uptr chunk) { - metadata_ = chunk ? reinterpret_cast<__hwasan::Metadata *>( - chunk - __hwasan::kChunkHeaderSize) + metadata_ = chunk ? __hwasan::allocator.GetMetaData((void *)chunk) : nullptr; } @@ -553,6 +574,18 @@ __hwasan::allocator.ForEachChunk(callback, arg); } +IgnoreObjectResult IgnoreObjectLocked(const void *p) { + uptr addr = reinterpret_cast(p); + __hwasan::HwasanChunkView view = __hwasan::FindHeapChunkByAddressFastLocked(addr); + if (!view.IsAllocated() || !view.AddrIsInside(addr)) { + return kIgnoreObjectInvalid; + } + if (view.GetLsanTag() == kIgnored) + return kIgnoreObjectAlreadyIgnored; + view.SetLsanTag(kIgnored); + return kIgnoreObjectSuccess; +} + } // namespace __lsan using namespace __hwasan;