diff --git a/compiler-rt/lib/asan/asan_allocator.cpp b/compiler-rt/lib/asan/asan_allocator.cpp --- a/compiler-rt/lib/asan/asan_allocator.cpp +++ b/compiler-rt/lib/asan/asan_allocator.cpp @@ -1153,33 +1153,6 @@ return kIgnoreObjectSuccess; } -void GetAdditionalThreadContextPtrs(ThreadContextBase *tctx, void *ptrs) { - // Look for the arg pointer of threads that have been created or are running. - // This is necessary to prevent false positive leaks due to the AsanThread - // holding the only live reference to a heap object. This can happen because - // the `pthread_create()` interceptor doesn't wait for the child thread to - // start before returning and thus loosing the the only live reference to the - // heap object on the stack. - - __asan::AsanThreadContext *atctx = - reinterpret_cast<__asan::AsanThreadContext *>(tctx); - __asan::AsanThread *asan_thread = atctx->thread; - - // Note ThreadStatusRunning is required because there is a small window where - // the thread status switches to `ThreadStatusRunning` but the `arg` pointer - // still isn't on the stack yet. - if (atctx->status != ThreadStatusCreated && - atctx->status != ThreadStatusRunning) - return; - - uptr thread_arg = reinterpret_cast(asan_thread->get_arg()); - if (!thread_arg) - return; - - auto ptrsVec = reinterpret_cast *>(ptrs); - ptrsVec->push_back(thread_arg); -} - } // namespace __lsan // ---------------------- Interface ---------------- {{{1 diff --git a/compiler-rt/lib/asan/asan_thread.cpp b/compiler-rt/lib/asan/asan_thread.cpp --- a/compiler-rt/lib/asan/asan_thread.cpp +++ b/compiler-rt/lib/asan/asan_thread.cpp @@ -518,9 +518,44 @@ fake_stack->ForEachFakeFrame(callback, arg); } -void RunCallbackForEachThreadLocked(__sanitizer::ThreadRegistry::ThreadCallback cb, - void *arg) { - GetAsanThreadRegistryLocked()->RunCallbackForEachThreadLocked(cb, arg); +void GetAdditionalThreadContextPtrsLocked(InternalMmapVector *ptrs) { + GetAsanThreadRegistryLocked()->RunCallbackForEachThreadLocked( + [](ThreadContextBase *tctx, void *ptrs) { + // Look for the arg pointer of threads that have been created or are + // running. This is necessary to prevent false positive leaks due to the + // AsanThread holding the only live reference to a heap object. This + // can happen because the `pthread_create()` interceptor doesn't wait + // for the child thread to start before returning and thus loosing the + // the only live reference to the heap object on the stack. + + __asan::AsanThreadContext *atctx = + static_cast<__asan::AsanThreadContext *>(tctx); + + // Note ThreadStatusRunning is required because there is a small window + // where the thread status switches to `ThreadStatusRunning` but the + // `arg` pointer still isn't on the stack yet. + if (atctx->status != ThreadStatusCreated && + atctx->status != ThreadStatusRunning) + return; + + uptr thread_arg = reinterpret_cast(atctx->thread->get_arg()); + if (!thread_arg) + return; + + auto ptrsVec = reinterpret_cast *>(ptrs); + ptrsVec->push_back(thread_arg); + }, + ptrs); +} + +void GetRunningThreadsLocked(InternalMmapVector *threads) { + GetAsanThreadRegistryLocked()->RunCallbackForEachThreadLocked( + [](ThreadContextBase *tctx, void *threads) { + if (tctx->status == ThreadStatusRunning) + reinterpret_cast *>(threads)->push_back( + tctx->os_id); + }, + threads); } void FinishThreadLocked(u32 tid) { diff --git a/compiler-rt/lib/hwasan/hwasan_thread.cpp b/compiler-rt/lib/hwasan/hwasan_thread.cpp --- a/compiler-rt/lib/hwasan/hwasan_thread.cpp +++ b/compiler-rt/lib/hwasan/hwasan_thread.cpp @@ -198,6 +198,6 @@ void *arg) {} void GetAdditionalThreadContextPtrsLocked(InternalMmapVector *ptrs) {} -void ReportUnsuspendedThreadsLocked(InternalMmapVector *threads) {} +void GetRunningThreadsLocked(InternalMmapVector *threads) {} } // namespace __lsan diff --git a/compiler-rt/lib/lsan/lsan_allocator.cpp b/compiler-rt/lib/lsan/lsan_allocator.cpp --- a/compiler-rt/lib/lsan/lsan_allocator.cpp +++ b/compiler-rt/lib/lsan/lsan_allocator.cpp @@ -319,7 +319,7 @@ } } -void GetAdditionalThreadContextPtrs(ThreadContextBase *tctx, void *ptrs) { +void GetAdditionalThreadContextPtrsLocked(InternalMmapVector *ptrs) { // This function can be used to treat memory reachable from `tctx` as live. // This is useful for threads that have been created but not yet started. diff --git a/compiler-rt/lib/lsan/lsan_common.h b/compiler-rt/lib/lsan/lsan_common.h --- a/compiler-rt/lib/lsan/lsan_common.h +++ b/compiler-rt/lib/lsan/lsan_common.h @@ -105,9 +105,8 @@ void GetAllThreadAllocatorCachesLocked(InternalMmapVector *caches); void ForEachExtraStackRange(tid_t os_id, RangeIteratorCallback callback, void *arg); - -void RunCallbackForEachThreadLocked(__sanitizer::ThreadRegistry::ThreadCallback cb, - void *arg); +void GetAdditionalThreadContextPtrsLocked(InternalMmapVector *ptrs); +void GetRunningThreadsLocked(InternalMmapVector *threads); //// -------------------------------------------------------------------------- //// Allocator prototypes. @@ -146,8 +145,6 @@ // Helper for __lsan_ignore_object(). IgnoreObjectResult IgnoreObjectLocked(const void *p); -void GetAdditionalThreadContextPtrs(ThreadContextBase *tctx, void *ptrs); - // The rest of the LSan interface which is implemented by library. struct ScopedStopTheWorldLock { diff --git a/compiler-rt/lib/lsan/lsan_common.cpp b/compiler-rt/lib/lsan/lsan_common.cpp --- a/compiler-rt/lib/lsan/lsan_common.cpp +++ b/compiler-rt/lib/lsan/lsan_common.cpp @@ -371,7 +371,7 @@ static void ProcessThreadRegistry(Frontier *frontier) { InternalMmapVector ptrs; - RunCallbackForEachThreadLocked(GetAdditionalThreadContextPtrs, &ptrs); + GetAdditionalThreadContextPtrsLocked(&ptrs); for (uptr i = 0; i < ptrs.size(); ++i) { void *ptr = reinterpret_cast(ptrs[i]); @@ -668,18 +668,6 @@ Printf("%s\n\n", line); } -static void ReportIfNotSuspended(ThreadContextBase *tctx, void *arg) { - const InternalMmapVector &suspended_threads = - *(const InternalMmapVector *)arg; - if (tctx->status == ThreadStatusRunning) { - uptr i = InternalLowerBound(suspended_threads, tctx->os_id); - if (i >= suspended_threads.size() || suspended_threads[i] != tctx->os_id) - Report( - "Running thread %llu was not suspended. False leaks are possible.\n", - tctx->os_id); - } -} - # if SANITIZER_FUCHSIA // Fuchsia provides a libc interface that guarantees all threads are @@ -696,7 +684,16 @@ Sort(threads.data(), threads.size()); - RunCallbackForEachThreadLocked(&ReportIfNotSuspended, &threads); + InternalMmapVector unsuspended; + GetRunningThreadsLocked(&unsuspended); + + for (auto os_id : unsuspended) { + uptr i = InternalLowerBound(threads, os_id); + if (i >= threads.size() || threads[i] != os_id) + Report( + "Running thread %zu was not suspended. False leaks are possible.\n", + os_id); + } } # endif // !SANITIZER_FUCHSIA diff --git a/compiler-rt/lib/lsan/lsan_common_fuchsia.cpp b/compiler-rt/lib/lsan/lsan_common_fuchsia.cpp --- a/compiler-rt/lib/lsan/lsan_common_fuchsia.cpp +++ b/compiler-rt/lib/lsan/lsan_common_fuchsia.cpp @@ -12,6 +12,7 @@ //===---------------------------------------------------------------------===// #include "lsan_common.h" +#include "lsan_thread.h" #include "sanitizer_common/sanitizer_platform.h" #if CAN_SANITIZE_LEAKS && SANITIZER_FUCHSIA @@ -146,7 +147,7 @@ // just for the allocator cache, and to call ForEachExtraStackRange, // which ASan needs. if (flags()->use_stacks) { - RunCallbackForEachThreadLocked( + GetLsanThreadRegistryLocked()->RunCallbackForEachThreadLocked( [](ThreadContextBase *tctx, void *arg) { ForEachExtraStackRange(tctx->os_id, ForEachExtraStackRangeCb, arg); diff --git a/compiler-rt/lib/lsan/lsan_fuchsia.cpp b/compiler-rt/lib/lsan/lsan_fuchsia.cpp --- a/compiler-rt/lib/lsan/lsan_fuchsia.cpp +++ b/compiler-rt/lib/lsan/lsan_fuchsia.cpp @@ -68,7 +68,7 @@ } void GetAllThreadAllocatorCachesLocked(InternalMmapVector *caches) { - RunCallbackForEachThreadLocked( + GetLsanThreadRegistryLocked()->RunCallbackForEachThreadLocked( [](ThreadContextBase *tctx, void *arg) { auto ctx = static_cast(tctx); static_cast(arg)->push_back(ctx->cache_begin()); diff --git a/compiler-rt/lib/lsan/lsan_thread.cpp b/compiler-rt/lib/lsan/lsan_thread.cpp --- a/compiler-rt/lib/lsan/lsan_thread.cpp +++ b/compiler-rt/lib/lsan/lsan_thread.cpp @@ -87,9 +87,15 @@ return thread_registry; } -void RunCallbackForEachThreadLocked( - __sanitizer::ThreadRegistry::ThreadCallback cb, void *arg) { - GetLsanThreadRegistryLocked()->RunCallbackForEachThreadLocked(cb, arg); +void GetRunningThreadsLocked(InternalMmapVector *threads) { + GetLsanThreadRegistryLocked()->RunCallbackForEachThreadLocked( + [](ThreadContextBase *tctx, void *threads) { + if (tctx->status == ThreadStatusRunning) { + reinterpret_cast *>(threads)->push_back( + tctx->os_id); + } + }, + threads); } } // namespace __lsan