diff --git a/compiler-rt/lib/asan/asan_allocator.cpp b/compiler-rt/lib/asan/asan_allocator.cpp --- a/compiler-rt/lib/asan/asan_allocator.cpp +++ b/compiler-rt/lib/asan/asan_allocator.cpp @@ -1094,6 +1094,8 @@ } uptr GetUserBegin(uptr chunk) { + // FIXME: All usecases provide chunk address, GetAsanChunkByAddrFastLocked is + // not needed. __asan::AsanChunk *m = __asan::instance.GetAsanChunkByAddrFastLocked(chunk); return m ? m->Beg() : 0; } diff --git a/compiler-rt/lib/hwasan/CMakeLists.txt b/compiler-rt/lib/hwasan/CMakeLists.txt --- a/compiler-rt/lib/hwasan/CMakeLists.txt +++ b/compiler-rt/lib/hwasan/CMakeLists.txt @@ -164,6 +164,7 @@ RTSanitizerCommonLibc RTSanitizerCommonCoverage RTSanitizerCommonSymbolizer + RTLSanCommon RTUbsan CFLAGS ${hwasan_rtl_flags} PARENT_TARGET hwasan) @@ -200,6 +201,7 @@ RTSanitizerCommonLibc RTSanitizerCommonCoverage RTSanitizerCommonSymbolizer + RTLSanCommon RTUbsan RTUbsan_cxx # The only purpose of RTHWAsan_dynamic_version_script_dummy is to diff --git a/compiler-rt/lib/hwasan/hwasan_allocator.cpp b/compiler-rt/lib/hwasan/hwasan_allocator.cpp --- a/compiler-rt/lib/hwasan/hwasan_allocator.cpp +++ b/compiler-rt/lib/hwasan/hwasan_allocator.cpp @@ -236,6 +236,10 @@ Metadata *meta = reinterpret_cast(allocator.GetMetaData(allocated)); +#if CAN_SANITIZE_LEAKS + meta->SetLsanTag(__lsan::DisabledInThisThread() ? __lsan::kIgnored + : __lsan::kDirectlyLeaked); +#endif meta->SetAllocated(StackDepotPut(*stack), orig_size); RunMallocHooks(user_ptr, size); return user_ptr; @@ -386,6 +390,16 @@ return HwasanChunkView(reinterpret_cast(block), metadata); } +static inline HwasanChunkView FindHeapChunkByAddressFastLocked(uptr address) { + void *block = + allocator.GetBlockBeginFastLocked(reinterpret_cast(address)); + if (!block) + return HwasanChunkView(); + Metadata *metadata = + reinterpret_cast(allocator.GetMetaData(block)); + return HwasanChunkView(reinterpret_cast(block), metadata); +} + static uptr AllocationSize(const void *tagged_ptr) { const void *untagged_ptr = UntagPtr(tagged_ptr); if (!untagged_ptr) return 0; @@ -501,8 +515,9 @@ uptr PointsIntoChunk(void *p) { uptr addr = reinterpret_cast(p); - __hwasan::HwasanChunkView view = __hwasan::FindHeapChunkByAddress(addr); - if (!view.IsAllocated()) + __hwasan::HwasanChunkView view = + __hwasan::FindHeapChunkByAddressFastLocked(addr); + if (!view.IsAllocated()) return 0; uptr chunk = view.Beg(); if (view.AddrIsInside(addr)) @@ -513,13 +528,17 @@ } uptr GetUserBegin(uptr chunk) { - return __hwasan::FindHeapChunkByAddress(chunk).Beg(); + // FIXME: All usecases provide chunk address, FindHeapChunkByAddressFastLocked + // is not needed. + return __hwasan::FindHeapChunkByAddressFastLocked(chunk).Beg(); } LsanMetadata::LsanMetadata(uptr chunk) { - metadata_ = chunk ? reinterpret_cast<__hwasan::Metadata *>( - chunk - __hwasan::kChunkHeaderSize) - : nullptr; + // FIXME: All usecases provide chunk address, GetMetaData is + // not needed. + metadata_ = + chunk ? __hwasan::allocator.GetMetaData(reinterpret_cast(chunk)) + : nullptr; } bool LsanMetadata::allocated() const { @@ -553,6 +572,24 @@ __hwasan::allocator.ForEachChunk(callback, arg); } +IgnoreObjectResult IgnoreObjectLocked(const void *p) { + void *block = + __hwasan::allocator.GetBlockBeginFastLocked(const_cast(p)); + if (!block) + return kIgnoreObjectInvalid; + __hwasan::Metadata *metadata = reinterpret_cast<__hwasan::Metadata *>( + __hwasan::allocator.GetMetaData(block)); + uptr addr = reinterpret_cast(p); + __hwasan::HwasanChunkView view(reinterpret_cast(block), metadata); + if (!view.IsAllocated() || !view.AddrIsInside(addr)) { + return kIgnoreObjectInvalid; + } + if (metadata->GetLsanTag() == kIgnored) + return kIgnoreObjectAlreadyIgnored; + metadata->SetLsanTag(kIgnored); + return kIgnoreObjectSuccess; +} + } // namespace __lsan using namespace __hwasan;