diff --git a/compiler-rt/lib/asan/asan_allocator.cpp b/compiler-rt/lib/asan/asan_allocator.cpp --- a/compiler-rt/lib/asan/asan_allocator.cpp +++ b/compiler-rt/lib/asan/asan_allocator.cpp @@ -1153,7 +1153,8 @@ return kIgnoreObjectSuccess; } -void GetAdditionalThreadContextPtrs(ThreadContextBase *tctx, void *ptrs) { +static inline void GetAdditionalThreadContextPtrs( + __asan::AsanThreadContext *atctx, void *ptrs) { // Look for the arg pointer of threads that have been created or are running. // This is necessary to prevent false positive leaks due to the AsanThread // holding the only live reference to a heap object. This can happen because @@ -1161,8 +1162,6 @@ // start before returning and thus loosing the the only live reference to the // heap object on the stack. - __asan::AsanThreadContext *atctx = - reinterpret_cast<__asan::AsanThreadContext *>(tctx); __asan::AsanThread *asan_thread = atctx->thread; // Note ThreadStatusRunning is required because there is a small window where @@ -1180,6 +1179,11 @@ ptrsVec->push_back(thread_arg); } +void GetAdditionalThreadContextPtrs(void *context, void *arg) { + GetAdditionalThreadContextPtrs( + static_cast<__asan::AsanThreadContext *>(context), arg); +} + } // namespace __lsan // ---------------------- Interface ---------------- {{{1 diff --git a/compiler-rt/lib/asan/asan_thread.cpp b/compiler-rt/lib/asan/asan_thread.cpp --- a/compiler-rt/lib/asan/asan_thread.cpp +++ b/compiler-rt/lib/asan/asan_thread.cpp @@ -518,11 +518,27 @@ fake_stack->ForEachFakeFrame(callback, arg); } -void RunCallbackForEachThreadLocked(__sanitizer::ThreadRegistry::ThreadCallback cb, - void *arg) { +void RunCallbackForEachThreadLocked(LsanThreadCallback cb, void *arg) { GetAsanThreadRegistryLocked()->RunCallbackForEachThreadLocked(cb, arg); } +static inline void ReportIfNotSuspended(ThreadContextBase *tctx, void *threads) { + const InternalMmapVector &suspended_threads = + *(const InternalMmapVector *)threads; + if (tctx->status == ThreadStatusRunning) { + uptr i = InternalLowerBound(suspended_threads, tctx->os_id); + if (i >= suspended_threads.size() || suspended_threads[i] != tctx->os_id) + Report( + "Running thread %llu was not suspended. False leaks are possible.\n", + tctx->os_id); + } +} + +void ReportIfNotSuspended(void *context, void *arg) { + ReportIfNotSuspended(static_cast(context), arg); +} + + void FinishThreadLocked(u32 tid) { GetAsanThreadRegistryLocked()->FinishThread(tid); } diff --git a/compiler-rt/lib/hwasan/hwasan_thread.cpp b/compiler-rt/lib/hwasan/hwasan_thread.cpp --- a/compiler-rt/lib/hwasan/hwasan_thread.cpp +++ b/compiler-rt/lib/hwasan/hwasan_thread.cpp @@ -197,7 +197,7 @@ void ForEachExtraStackRange(tid_t os_id, RangeIteratorCallback callback, void *arg) {} -void GetAdditionalThreadContextPtrsLocked(InternalMmapVector *ptrs) {} -void ReportUnsuspendedThreadsLocked(InternalMmapVector *threads) {} +void GetAdditionalThreadContextPtrs(void *context, void *arg) {} +void ReportIfNotSuspended(void *context, void *arg) {} } // namespace __lsan diff --git a/compiler-rt/lib/lsan/lsan_allocator.cpp b/compiler-rt/lib/lsan/lsan_allocator.cpp --- a/compiler-rt/lib/lsan/lsan_allocator.cpp +++ b/compiler-rt/lib/lsan/lsan_allocator.cpp @@ -319,7 +319,7 @@ } } -void GetAdditionalThreadContextPtrs(ThreadContextBase *tctx, void *ptrs) { +void GetAdditionalThreadContextPtrs(void *context, void *args) { // This function can be used to treat memory reachable from `tctx` as live. // This is useful for threads that have been created but not yet started. diff --git a/compiler-rt/lib/lsan/lsan_common.h b/compiler-rt/lib/lsan/lsan_common.h --- a/compiler-rt/lib/lsan/lsan_common.h +++ b/compiler-rt/lib/lsan/lsan_common.h @@ -106,8 +106,11 @@ void ForEachExtraStackRange(tid_t os_id, RangeIteratorCallback callback, void *arg); -void RunCallbackForEachThreadLocked(__sanitizer::ThreadRegistry::ThreadCallback cb, - void *arg); +typedef void (*LsanThreadCallback)(void *context, void *arg); +void RunCallbackForEachThreadLocked(LsanThreadCallback cb, void *arg); +void GetAdditionalThreadContextPtrs(void *context, void *ptrs); +void ReportIfNotSuspended(void *context, void *arg); + //// -------------------------------------------------------------------------- //// Allocator prototypes. @@ -146,8 +149,6 @@ // Helper for __lsan_ignore_object(). IgnoreObjectResult IgnoreObjectLocked(const void *p); -void GetAdditionalThreadContextPtrs(ThreadContextBase *tctx, void *ptrs); - // The rest of the LSan interface which is implemented by library. struct ScopedStopTheWorldLock { diff --git a/compiler-rt/lib/lsan/lsan_common.cpp b/compiler-rt/lib/lsan/lsan_common.cpp --- a/compiler-rt/lib/lsan/lsan_common.cpp +++ b/compiler-rt/lib/lsan/lsan_common.cpp @@ -668,18 +668,6 @@ Printf("%s\n\n", line); } -static void ReportIfNotSuspended(ThreadContextBase *tctx, void *arg) { - const InternalMmapVector &suspended_threads = - *(const InternalMmapVector *)arg; - if (tctx->status == ThreadStatusRunning) { - uptr i = InternalLowerBound(suspended_threads, tctx->os_id); - if (i >= suspended_threads.size() || suspended_threads[i] != tctx->os_id) - Report( - "Running thread %llu was not suspended. False leaks are possible.\n", - tctx->os_id); - } -} - # if SANITIZER_FUCHSIA // Fuchsia provides a libc interface that guarantees all threads are @@ -696,7 +684,7 @@ Sort(threads.data(), threads.size()); - RunCallbackForEachThreadLocked(&ReportIfNotSuspended, &threads); + RunCallbackForEachThreadLocked(ReportIfNotSuspended, &threads); } # endif // !SANITIZER_FUCHSIA diff --git a/compiler-rt/lib/lsan/lsan_common_fuchsia.cpp b/compiler-rt/lib/lsan/lsan_common_fuchsia.cpp --- a/compiler-rt/lib/lsan/lsan_common_fuchsia.cpp +++ b/compiler-rt/lib/lsan/lsan_common_fuchsia.cpp @@ -147,7 +147,8 @@ // which ASan needs. if (flags()->use_stacks) { RunCallbackForEachThreadLocked( - [](ThreadContextBase *tctx, void *arg) { + [](ThreadContextBase *context, void *arg) { + auto tctx = static_cast(context); ForEachExtraStackRange(tctx->os_id, ForEachExtraStackRangeCb, arg); }, diff --git a/compiler-rt/lib/lsan/lsan_fuchsia.cpp b/compiler-rt/lib/lsan/lsan_fuchsia.cpp --- a/compiler-rt/lib/lsan/lsan_fuchsia.cpp +++ b/compiler-rt/lib/lsan/lsan_fuchsia.cpp @@ -69,7 +69,7 @@ void GetAllThreadAllocatorCachesLocked(InternalMmapVector *caches) { RunCallbackForEachThreadLocked( - [](ThreadContextBase *tctx, void *arg) { + [](void *tctx, void *arg) { auto ctx = static_cast(tctx); static_cast(arg)->push_back(ctx->cache_begin()); }, diff --git a/compiler-rt/lib/lsan/lsan_thread.cpp b/compiler-rt/lib/lsan/lsan_thread.cpp --- a/compiler-rt/lib/lsan/lsan_thread.cpp +++ b/compiler-rt/lib/lsan/lsan_thread.cpp @@ -92,4 +92,16 @@ GetLsanThreadRegistryLocked()->RunCallbackForEachThreadLocked(cb, arg); } +void ReportIfNotSuspended(ThreadContextBase *tctx, void *arg) { + const InternalMmapVector &suspended_threads = + *(const InternalMmapVector *)arg; + if (tctx->status == ThreadStatusRunning) { + uptr i = InternalLowerBound(suspended_threads, tctx->os_id); + if (i >= suspended_threads.size() || suspended_threads[i] != tctx->os_id) + Report( + "Running thread %llu was not suspended. False leaks are possible.\n", + tctx->os_id); + } +} + } // namespace __lsan