diff --git a/compiler-rt/lib/asan/asan_allocator.cpp b/compiler-rt/lib/asan/asan_allocator.cpp --- a/compiler-rt/lib/asan/asan_allocator.cpp +++ b/compiler-rt/lib/asan/asan_allocator.cpp @@ -1153,33 +1153,6 @@ return kIgnoreObjectSuccess; } -void GetAdditionalThreadContextPtrs(ThreadContextBase *tctx, void *ptrs) { - // Look for the arg pointer of threads that have been created or are running. - // This is necessary to prevent false positive leaks due to the AsanThread - // holding the only live reference to a heap object. This can happen because - // the `pthread_create()` interceptor doesn't wait for the child thread to - // start before returning and thus loosing the the only live reference to the - // heap object on the stack. - - __asan::AsanThreadContext *atctx = - reinterpret_cast<__asan::AsanThreadContext *>(tctx); - __asan::AsanThread *asan_thread = atctx->thread; - - // Note ThreadStatusRunning is required because there is a small window where - // the thread status switches to `ThreadStatusRunning` but the `arg` pointer - // still isn't on the stack yet. - if (atctx->status != ThreadStatusCreated && - atctx->status != ThreadStatusRunning) - return; - - uptr thread_arg = reinterpret_cast(asan_thread->get_arg()); - if (!thread_arg) - return; - - auto ptrsVec = reinterpret_cast *>(ptrs); - ptrsVec->push_back(thread_arg); -} - } // namespace __lsan // ---------------------- Interface ---------------- {{{1 diff --git a/compiler-rt/lib/asan/asan_thread.cpp b/compiler-rt/lib/asan/asan_thread.cpp --- a/compiler-rt/lib/asan/asan_thread.cpp +++ b/compiler-rt/lib/asan/asan_thread.cpp @@ -518,9 +518,41 @@ fake_stack->ForEachFakeFrame(callback, arg); } -void RunCallbackForEachThreadLocked(__sanitizer::ThreadRegistry::ThreadCallback cb, - void *arg) { - GetAsanThreadRegistryLocked()->RunCallbackForEachThreadLocked(cb, arg); +static void GetAdditionalThreadContextPtrs(ThreadContextBase *tctx, void *ptrs) { + // Look for the arg pointer of threads that have been created or are running. + // This is necessary to prevent false positive leaks due to the AsanThread + // holding the only live reference to a heap object. This can happen because + // the `pthread_create()` interceptor doesn't wait for the child thread to + // start before returning and thus loosing the the only live reference to the + // heap object on the stack. + + __asan::AsanThreadContext *atctx = + reinterpret_cast<__asan::AsanThreadContext *>(tctx); + __asan::AsanThread *asan_thread = atctx->thread; + + // Note ThreadStatusRunning is required because there is a small window where + // the thread status switches to `ThreadStatusRunning` but the `arg` pointer + // still isn't on the stack yet. + if (atctx->status != ThreadStatusCreated && + atctx->status != ThreadStatusRunning) + return; + + uptr thread_arg = reinterpret_cast(asan_thread->get_arg()); + if (!thread_arg) + return; + + auto ptrsVec = reinterpret_cast *>(ptrs); + ptrsVec->push_back(thread_arg); +} + +void GetAdditionalThreadContextPtrsLocked(InternalMmapVector *ptrs) { + GetAsanThreadRegistryLocked()->RunCallbackForEachThreadLocked( + GetAdditionalThreadContextPtrs, ptrs); +} + +void ReportUnsuspendedThreadsLocked(InternalMmapVector *threads) { + GetAsanThreadRegistryLocked()->RunCallbackForEachThreadLocked( + &ReportIfNotSuspended, threads); } void FinishThreadLocked(u32 tid) { diff --git a/compiler-rt/lib/lsan/lsan_allocator.cpp b/compiler-rt/lib/lsan/lsan_allocator.cpp --- a/compiler-rt/lib/lsan/lsan_allocator.cpp +++ b/compiler-rt/lib/lsan/lsan_allocator.cpp @@ -319,7 +319,7 @@ } } -void GetAdditionalThreadContextPtrs(ThreadContextBase *tctx, void *ptrs) { +void GetAdditionalThreadContextPtrsLocked(InternalMmapVector *ptrs) { // This function can be used to treat memory reachable from `tctx` as live. // This is useful for threads that have been created but not yet started. diff --git a/compiler-rt/lib/lsan/lsan_common.h b/compiler-rt/lib/lsan/lsan_common.h --- a/compiler-rt/lib/lsan/lsan_common.h +++ b/compiler-rt/lib/lsan/lsan_common.h @@ -105,9 +105,8 @@ void GetAllThreadAllocatorCachesLocked(InternalMmapVector *caches); void ForEachExtraStackRange(tid_t os_id, RangeIteratorCallback callback, void *arg); - -void RunCallbackForEachThreadLocked(__sanitizer::ThreadRegistry::ThreadCallback cb, - void *arg); +void GetAdditionalThreadContextPtrsLocked(InternalMmapVector *ptrs); +void ReportUnsuspendedThreadsLocked(InternalMmapVector *threads); //// -------------------------------------------------------------------------- //// Allocator prototypes. @@ -146,8 +145,6 @@ // Helper for __lsan_ignore_object(). IgnoreObjectResult IgnoreObjectLocked(const void *p); -void GetAdditionalThreadContextPtrs(ThreadContextBase *tctx, void *ptrs); - // The rest of the LSan interface which is implemented by library. struct ScopedStopTheWorldLock { @@ -269,6 +266,7 @@ void DoRecoverableLeakCheckVoid(); void DisableCounterUnderflow(); bool DisabledInThisThread(); +void ReportIfNotSuspended(ThreadContextBase *tctx, void *arg); // Used to implement __lsan::ScopedDisabler. void DisableInThisThread(); diff --git a/compiler-rt/lib/lsan/lsan_common.cpp b/compiler-rt/lib/lsan/lsan_common.cpp --- a/compiler-rt/lib/lsan/lsan_common.cpp +++ b/compiler-rt/lib/lsan/lsan_common.cpp @@ -371,7 +371,7 @@ static void ProcessThreadRegistry(Frontier *frontier) { InternalMmapVector ptrs; - RunCallbackForEachThreadLocked(GetAdditionalThreadContextPtrs, &ptrs); + GetAdditionalThreadContextPtrsLocked(&ptrs); for (uptr i = 0; i < ptrs.size(); ++i) { void *ptr = reinterpret_cast(ptrs[i]); @@ -668,7 +668,7 @@ Printf("%s\n\n", line); } -static void ReportIfNotSuspended(ThreadContextBase *tctx, void *arg) { +void ReportIfNotSuspended(ThreadContextBase *tctx, void *arg) { const InternalMmapVector &suspended_threads = *(const InternalMmapVector *)arg; if (tctx->status == ThreadStatusRunning) { @@ -695,8 +695,7 @@ threads[i] = suspended_threads.GetThreadID(i); Sort(threads.data(), threads.size()); - - RunCallbackForEachThreadLocked(&ReportIfNotSuspended, &threads); + ReportUnsuspendedThreadsLocked(&threads); } # endif // !SANITIZER_FUCHSIA diff --git a/compiler-rt/lib/lsan/lsan_fuchsia.cpp b/compiler-rt/lib/lsan/lsan_fuchsia.cpp --- a/compiler-rt/lib/lsan/lsan_fuchsia.cpp +++ b/compiler-rt/lib/lsan/lsan_fuchsia.cpp @@ -68,7 +68,7 @@ } void GetAllThreadAllocatorCachesLocked(InternalMmapVector *caches) { - RunCallbackForEachThreadLocked( + GetLsanThreadRegistryLocked()->RunCallbackForEachThreadLocked( [](ThreadContextBase *tctx, void *arg) { auto ctx = static_cast(tctx); static_cast(arg)->push_back(ctx->cache_begin()); diff --git a/compiler-rt/lib/lsan/lsan_thread.cpp b/compiler-rt/lib/lsan/lsan_thread.cpp --- a/compiler-rt/lib/lsan/lsan_thread.cpp +++ b/compiler-rt/lib/lsan/lsan_thread.cpp @@ -87,9 +87,9 @@ return thread_registry; } -void RunCallbackForEachThreadLocked( - __sanitizer::ThreadRegistry::ThreadCallback cb, void *arg) { - GetLsanThreadRegistryLocked()->RunCallbackForEachThreadLocked(cb, arg); +void ReportUnsuspendedThreadsLocked(InternalMmapVector *threads) { + GetLsanThreadRegistryLocked()->RunCallbackForEachThreadLocked( + &ReportIfNotSuspended, threads); } } // namespace __lsan