diff --git a/compiler-rt/lib/lsan/lsan_common.h b/compiler-rt/lib/lsan/lsan_common.h --- a/compiler-rt/lib/lsan/lsan_common.h +++ b/compiler-rt/lib/lsan/lsan_common.h @@ -103,8 +103,9 @@ ChunkTag tag); void ReportTopLeaks(uptr max_leaks); void PrintSummary(); - void ApplySuppressions(); + uptr ApplySuppressions(); uptr UnsuppressedLeakCount(); + uptr IndirectUnsuppressedLeakCount(); private: void PrintReportForLeak(uptr index); diff --git a/compiler-rt/lib/lsan/lsan_common.cpp b/compiler-rt/lib/lsan/lsan_common.cpp --- a/compiler-rt/lib/lsan/lsan_common.cpp +++ b/compiler-rt/lib/lsan/lsan_common.cpp @@ -68,9 +68,11 @@ class LeakSuppressionContext { bool parsed = false; SuppressionContext context; + bool suppressed_stacks_sorted = true; + InternalMmapVector suppressed_stacks; Suppression *GetSuppressionForAddr(uptr addr); - void LazyParse(); + void LazyInit(); public: LeakSuppressionContext(const char *supprression_types[], @@ -78,6 +80,14 @@ : context(supprression_types, suppression_types_num) {} Suppression *GetSuppressionForStack(u32 stack_trace_id); + + const InternalMmapVector &GetSortedSuppressedStacks() { + if (!suppressed_stacks_sorted) { + suppressed_stacks_sorted = true; + SortAndDedup(suppressed_stacks); + } + return suppressed_stacks; + } void PrintMatchedSuppressions(); }; @@ -105,7 +115,7 @@ LeakSuppressionContext(kSuppressionTypes, ARRAY_SIZE(kSuppressionTypes)); } -void LeakSuppressionContext::LazyParse() { +void LeakSuppressionContext::LazyInit() { if (!parsed) { parsed = true; context.ParseFromFile(flags()->suppressions); @@ -412,6 +422,24 @@ } } +static void IgnoredSuppressedCb(uptr chunk, void *arg) { + CHECK(arg); + chunk = GetUserBegin(chunk); + LsanMetadata m(chunk); + if (!m.allocated() || m.tag() == kIgnored) + return; + + const InternalMmapVector &suppressed = + *static_cast *>(arg); + uptr idx = InternalLowerBound(suppressed, m.stack_trace_id()); + if (idx >= suppressed.size() || m.stack_trace_id() != suppressed[idx]) + return; + + LOG_POINTERS("Suppressed: chunk %p-%p of size %zu.\n", chunk, + chunk + m.requested_size(), m.requested_size()); + m.set_tag(kIgnored); +} + // ForEachChunk callback. If chunk is marked as ignored, adds its address to // frontier. static void CollectIgnoredCb(uptr chunk, void *arg) { @@ -495,6 +523,12 @@ // Sets the appropriate tag on each chunk. static void ClassifyAllChunks(SuspendedThreadsList const &suspended_threads, Frontier *frontier) { + const InternalMmapVector &suppressed_stacks = + GetSuppressionContext()->GetSortedSuppressedStacks(); + if (!suppressed_stacks.empty()) { + ForEachChunk(IgnoredSuppressedCb, + const_cast *>(&suppressed_stacks)); + } ForEachChunk(CollectIgnoredCb, frontier); ProcessGlobalRegions(frontier); ProcessThreads(suspended_threads, frontier); @@ -643,21 +677,41 @@ static bool CheckForLeaks() { if (&__lsan_is_turned_off && __lsan_is_turned_off()) return false; - EnsureMainThreadIDIsCorrect(); - CheckForLeaksParam param; - LockStuffAndStopTheWorld(CheckForLeaksCallback, ¶m); + // Inside LockStuffAndStopTheWorld we can't run symbolizer, so we can't match + // suppressions. However if a stack id was previously suppressed, it should be + // suppressed in future checks as well. + for (int i = 0;; ++i) { + EnsureMainThreadIDIsCorrect(); + CheckForLeaksParam param; + LockStuffAndStopTheWorld(CheckForLeaksCallback, ¶m); + if (!param.success) { + Report("LeakSanitizer has encountered a fatal error.\n"); + Report( + "HINT: For debugging, try setting environment variable " + "LSAN_OPTIONS=verbosity=1:log_threads=1\n"); + Report( + "HINT: LeakSanitizer does not work under ptrace (strace, gdb, " + "etc)\n"); + Die(); + } + // No new suppressions stacks, so rerun will not help and we can report. + if (!param.leak_report.ApplySuppressions()) + return PrintResults(param.leak_report); - if (!param.success) { - Report("LeakSanitizer has encountered a fatal error.\n"); - Report( - "HINT: For debugging, try setting environment variable " - "LSAN_OPTIONS=verbosity=1:log_threads=1\n"); - Report( - "HINT: LeakSanitizer does not work under ptrace (strace, gdb, etc)\n"); - Die(); + // No indirect leaks to report, so we are done here. + if (!param.leak_report.IndirectUnsuppressedLeakCount()) + return PrintResults(param.leak_report); + + if (i >= 8) { + Report("WARNING: LeakSanitizer gave up on indirect leaks suppression.\n"); + return PrintResults(param.leak_report); + } + + // We found a new previously unseen suppressed call stack. Rerun to make + // sure it does not hold indirect leaks. + VReport(1, "Rerun with %zu suppressed stacks.", + GetSuppressionContext()->GetSortedSuppressedStacks().size()); } - param.leak_report.ApplySuppressions(); - return PrintResults(param.leak_report); } static bool has_reported_leaks = false; @@ -703,13 +757,16 @@ Suppression *LeakSuppressionContext::GetSuppressionForStack( u32 stack_trace_id) { - LazyParse(); + LazyInit(); StackTrace stack = StackDepotGet(stack_trace_id); for (uptr i = 0; i < stack.size; i++) { Suppression *s = GetSuppressionForAddr( StackTrace::GetPreviousInstructionPc(stack.trace[i])); - if (s) + if (s) { + suppressed_stacks_sorted = false; + suppressed_stacks.push_back(stack_trace_id); return s; + } } return nullptr; } @@ -820,8 +877,9 @@ ReportErrorSummary(summary.data()); } -void LeakReport::ApplySuppressions() { +uptr LeakReport::ApplySuppressions() { LeakSuppressionContext *suppressions = GetSuppressionContext(); + uptr new_suppressions = false; for (uptr i = 0; i < leaks_.size(); i++) { Suppression *s = suppressions->GetSuppressionForStack(leaks_[i].stack_trace_id); @@ -830,8 +888,10 @@ atomic_store_relaxed(&s->hit_count, atomic_load_relaxed(&s->hit_count) + leaks_[i].hit_count); leaks_[i].is_suppressed = true; + ++new_suppressions; } } + return new_suppressions; } uptr LeakReport::UnsuppressedLeakCount() { @@ -841,6 +901,14 @@ return result; } +uptr LeakReport::IndirectUnsuppressedLeakCount() { + uptr result = 0; + for (uptr i = 0; i < leaks_.size(); i++) + if (!leaks_[i].is_suppressed && !leaks_[i].is_directly_leaked) + result++; + return result; +} + } // namespace __lsan #else // CAN_SANITIZE_LEAKS namespace __lsan { diff --git a/compiler-rt/test/lsan/TestCases/suppressions_file.cpp b/compiler-rt/test/lsan/TestCases/suppressions_file.cpp --- a/compiler-rt/test/lsan/TestCases/suppressions_file.cpp +++ b/compiler-rt/test/lsan/TestCases/suppressions_file.cpp @@ -17,15 +17,14 @@ #include #include -void* LSanTestLeakingFunc() { +void *LSanTestLeakingFunc() { void *p = malloc(666); fprintf(stderr, "Test alloc: %p.\n", p); return p; } void LSanTestUnsuppressedLeakingFunc() { - void** p = (void**)LSanTestLeakingFunc(); - // FIXME: This must be suppressed as well. + void **p = (void **)LSanTestLeakingFunc(); *p = malloc(777); fprintf(stderr, "Test alloc: %p.\n", *p); } @@ -38,6 +37,6 @@ } // CHECK: Suppressions used: // CHECK: 1 666 *LSanTestLeakingFunc* -// CHECK: SUMMARY: {{(Leak|Address)}}Sanitizer: 2114 byte(s) leaked in 2 allocation(s) +// CHECK: SUMMARY: {{(Leak|Address)}}Sanitizer: 1337 byte(s) leaked in 1 allocation(s) // NOSUPP: SUMMARY: {{(Leak|Address)}}Sanitizer: 2780 byte(s) leaked in 3 allocation(s).