Index: lib/asan/asan_allocator.h =================================================================== --- lib/asan/asan_allocator.h +++ lib/asan/asan_allocator.h @@ -196,7 +196,7 @@ int asan_posix_memalign(void **memptr, uptr alignment, uptr size, BufferedStackTrace *stack); -uptr asan_malloc_usable_size(const void *ptr, uptr pc, uptr bp); +uptr asan_malloc_usable_size(const void *ptr, uptr bp); uptr asan_mz_size(const void *ptr); void asan_mz_force_lock(); Index: lib/asan/asan_allocator.cc =================================================================== --- lib/asan/asan_allocator.cc +++ lib/asan/asan_allocator.cc @@ -762,27 +762,33 @@ void *asan_memalign(uptr alignment, uptr size, BufferedStackTrace *stack, AllocType alloc_type) { + if (stack->size) stack->trace_buffer[0] = GET_CALLER_PC(); return instance.Allocate(size, alignment, stack, alloc_type, true); } void asan_free(void *ptr, BufferedStackTrace *stack, AllocType alloc_type) { + if (stack->size) stack->trace_buffer[0] = GET_CALLER_PC(); instance.Deallocate(ptr, 0, stack, alloc_type); } void asan_sized_free(void *ptr, uptr size, BufferedStackTrace *stack, AllocType alloc_type) { + if (stack->size) stack->trace_buffer[0] = GET_CALLER_PC(); instance.Deallocate(ptr, size, stack, alloc_type); } void *asan_malloc(uptr size, BufferedStackTrace *stack) { + if (stack->size) stack->trace_buffer[0] = GET_CALLER_PC(); return instance.Allocate(size, 8, stack, FROM_MALLOC, true); } void *asan_calloc(uptr nmemb, uptr size, BufferedStackTrace *stack) { + if (stack->size) stack->trace_buffer[0] = GET_CALLER_PC(); return instance.Calloc(nmemb, size, stack); } void *asan_realloc(void *p, uptr size, BufferedStackTrace *stack) { + if (stack->size) stack->trace_buffer[0] = GET_CALLER_PC(); if (!p) return instance.Allocate(size, 8, stack, FROM_MALLOC, true); if (size == 0) { @@ -793,10 +799,12 @@ } void *asan_valloc(uptr size, BufferedStackTrace *stack) { + if (stack->size) stack->trace_buffer[0] = GET_CALLER_PC(); return instance.Allocate(size, GetPageSizeCached(), stack, FROM_MALLOC, true); } void *asan_pvalloc(uptr size, BufferedStackTrace *stack) { + if (stack->size) stack->trace_buffer[0] = GET_CALLER_PC(); uptr PageSize = GetPageSizeCached(); size = RoundUpTo(size, PageSize); if (size == 0) { @@ -808,16 +816,18 @@ int asan_posix_memalign(void **memptr, uptr alignment, uptr size, BufferedStackTrace *stack) { + if (stack->size) stack->trace_buffer[0] = GET_CALLER_PC(); void *ptr = instance.Allocate(size, alignment, stack, FROM_MALLOC, true); CHECK(IsAligned((uptr)ptr, alignment)); *memptr = ptr; return 0; } -uptr asan_malloc_usable_size(const void *ptr, uptr pc, uptr bp) { +uptr asan_malloc_usable_size(const void *ptr, uptr bp) { if (!ptr) return 0; uptr usable_size = instance.AllocationSize(reinterpret_cast(ptr)); if (flags()->check_malloc_usable_size && (usable_size == 0)) { + uptr pc = GET_CALLER_PC(); GET_STACK_TRACE_FATAL(pc, bp); ReportMallocUsableSizeNotOwned((uptr)ptr, &stack); } Index: lib/asan/asan_interceptors.cc =================================================================== --- lib/asan/asan_interceptors.cc +++ lib/asan/asan_interceptors.cc @@ -50,35 +50,43 @@ const char *interceptor_name; }; +NOINLINE +static void ReportGenericErrorInCaller(uptr bp, uptr sp, uptr addr, + bool is_write, uptr access_size) { + uptr pc = GET_CALLER_PC(); + ReportGenericError(pc, bp, sp, addr, is_write, access_size, 0, false); +} + // We implement ACCESS_MEMORY_RANGE, ASAN_READ_RANGE, // and ASAN_WRITE_RANGE as macro instead of function so // that no extra frames are created, and stack trace contains // relevant information only. // We check all shadow bytes. -#define ACCESS_MEMORY_RANGE(ctx, offset, size, isWrite) do { \ - uptr __offset = (uptr)(offset); \ - uptr __size = (uptr)(size); \ - uptr __bad = 0; \ - if (__offset > __offset + __size) { \ - GET_STACK_TRACE_FATAL_HERE; \ - ReportStringFunctionSizeOverflow(__offset, __size, &stack); \ - } \ - if (!QuickCheckForUnpoisonedRegion(__offset, __size) && \ - (__bad = __asan_region_is_poisoned(__offset, __size))) { \ - AsanInterceptorContext *_ctx = (AsanInterceptorContext *)ctx; \ - bool suppressed = false; \ - if (_ctx) { \ - suppressed = IsInterceptorSuppressed(_ctx->interceptor_name); \ - if (!suppressed && HaveStackTraceBasedSuppressions()) { \ - GET_STACK_TRACE_FATAL_HERE; \ - suppressed = IsStackTraceSuppressed(&stack); \ - } \ - } \ - if (!suppressed) { \ - GET_CURRENT_PC_BP_SP; \ - ReportGenericError(pc, bp, sp, __bad, isWrite, __size, 0, false);\ - } \ - } \ +#define ACCESS_MEMORY_RANGE(ctx, offset, size, isWrite) \ + do { \ + uptr __offset = (uptr)(offset); \ + uptr __size = (uptr)(size); \ + uptr __bad = 0; \ + if (__offset > __offset + __size) { \ + GET_STACK_TRACE_FATAL_HERE; \ + ReportStringFunctionSizeOverflow(__offset, __size, &stack); \ + } \ + if (!QuickCheckForUnpoisonedRegion(__offset, __size) && \ + (__bad = __asan_region_is_poisoned(__offset, __size))) { \ + AsanInterceptorContext *_ctx = (AsanInterceptorContext *)ctx; \ + bool suppressed = false; \ + if (_ctx) { \ + suppressed = IsInterceptorSuppressed(_ctx->interceptor_name); \ + if (!suppressed && HaveStackTraceBasedSuppressions()) { \ + GET_STACK_TRACE_FATAL_HERE; \ + suppressed = IsStackTraceSuppressed(&stack); \ + } \ + } \ + if (!suppressed) { \ + GET_CURRENT_BP_SP; \ + ReportGenericErrorInCaller(bp, sp, __bad, isWrite, __size); \ + } \ + } \ } while (0) // memcpy is called during __asan_init() from the internals of printf(...). Index: lib/asan/asan_malloc_linux.cc =================================================================== --- lib/asan/asan_malloc_linux.cc +++ lib/asan/asan_malloc_linux.cc @@ -109,9 +109,9 @@ } INTERCEPTOR(uptr, malloc_usable_size, void *ptr) { - GET_CURRENT_PC_BP_SP; + GET_CURRENT_BP_SP; (void)sp; - return asan_malloc_usable_size(ptr, pc, bp); + return asan_malloc_usable_size(ptr, bp); } // We avoid including malloc.h for portability reasons. Index: lib/asan/asan_malloc_win.cc =================================================================== --- lib/asan/asan_malloc_win.cc +++ lib/asan/asan_malloc_win.cc @@ -131,9 +131,9 @@ ALLOCATION_FUNCTION_ATTRIBUTE size_t _msize(const void *ptr) { - GET_CURRENT_PC_BP_SP; + GET_CURRENT_BP_SP; (void)sp; - return asan_malloc_usable_size(ptr, pc, bp); + return asan_malloc_usable_size(ptr, bp); } ALLOCATION_FUNCTION_ATTRIBUTE @@ -200,9 +200,9 @@ INTERCEPTOR_WINAPI(SIZE_T, HeapSize, HANDLE hHeap, DWORD dwFlags, LPCVOID lpMem) { CHECK(dwFlags == 0 && "unsupported heap flags"); - GET_CURRENT_PC_BP_SP; + GET_CURRENT_BP_SP; (void)sp; - return asan_malloc_usable_size(lpMem, pc, bp); + return asan_malloc_usable_size(lpMem, bp); } namespace __asan { Index: lib/asan/asan_poisoning.cc =================================================================== --- lib/asan/asan_poisoning.cc +++ lib/asan/asan_poisoning.cc @@ -207,18 +207,24 @@ return 0; } -#define CHECK_SMALL_REGION(p, size, isWrite) \ - do { \ - uptr __p = reinterpret_cast(p); \ - uptr __size = size; \ - if (UNLIKELY(__asan::AddressIsPoisoned(__p) || \ - __asan::AddressIsPoisoned(__p + __size - 1))) { \ - GET_CURRENT_PC_BP_SP; \ - uptr __bad = __asan_region_is_poisoned(__p, __size); \ - __asan_report_error(pc, bp, sp, __bad, isWrite, __size, 0);\ - } \ - } while (false); \ - +NOINLINE +static void report_error(uptr bp, uptr sp, uptr addr, bool is_write, + uptr size) { + uptr pc = GET_CALLER_PC(); + __asan_report_error(pc, bp, sp, addr, is_write, size, 0); +} + +#define CHECK_SMALL_REGION(p, size, isWrite) \ + do { \ + uptr __p = reinterpret_cast(p); \ + uptr __size = size; \ + if (UNLIKELY(__asan::AddressIsPoisoned(__p) || \ + __asan::AddressIsPoisoned(__p + __size - 1))) { \ + GET_CURRENT_BP_SP; \ + uptr __bad = __asan_region_is_poisoned(__p, __size); \ + report_error(bp, sp, __bad, isWrite, __size); \ + } \ + } while (false); extern "C" SANITIZER_INTERFACE_ATTRIBUTE u16 __sanitizer_unaligned_load16(const uu16 *p) { Index: lib/asan/asan_stack.h =================================================================== --- lib/asan/asan_stack.h +++ lib/asan/asan_stack.h @@ -60,6 +60,10 @@ #endif // SANITIZER_WINDOWS } +void GetStackTraceWithPcBpAndContext(BufferedStackTrace *stack, uptr max_depth, + uptr bp, void *context, + bool fast); + } // namespace __asan // NOTE: A Rule of thumb is to retrieve stack trace in the interceptors @@ -78,7 +82,6 @@ } \ } else { \ GetStackTraceWithPcBpAndContext(&stack, max_size, \ - StackTrace::GetCurrentPc(), \ GET_CURRENT_FRAME(), 0, fast); \ } Index: lib/asan/asan_stack.cc =================================================================== --- lib/asan/asan_stack.cc +++ lib/asan/asan_stack.cc @@ -27,6 +27,13 @@ return atomic_load(&malloc_context_size, memory_order_acquire); } +NOINLINE +void GetStackTraceWithPcBpAndContext(BufferedStackTrace *stack, uptr max_depth, + uptr bp, void *context, bool fast) { + GetStackTraceWithPcBpAndContext(stack, max_depth, GET_CALLER_PC(), bp, + context, fast); +} + } // namespace __asan // ------------------ Interface -------------- {{{1 Index: lib/asan/tests/asan_noinst_test.cc =================================================================== --- lib/asan/tests/asan_noinst_test.cc +++ lib/asan/tests/asan_noinst_test.cc @@ -81,7 +81,7 @@ size_t alignment = 1 << (my_rand_r(&seed) % 10 + 1); char *ptr = (char*)__asan::asan_memalign(alignment, size, &stack2, __asan::FROM_MALLOC); - EXPECT_EQ(size, __asan::asan_malloc_usable_size(ptr, 0, 0)); + EXPECT_EQ(size, __asan::asan_malloc_usable_size(ptr, 0)); vec.push_back(ptr); ptr[0] = 0; ptr[size-1] = 0; Index: lib/sanitizer_common/sanitizer_stacktrace.h =================================================================== --- lib/sanitizer_common/sanitizer_stacktrace.h +++ lib/sanitizer_common/sanitizer_stacktrace.h @@ -129,11 +129,8 @@ uptr bp = GET_CURRENT_FRAME(); \ uptr pc = GET_CALLER_PC(); -// Use this macro if you want to print stack trace with the current -// function in the top frame. -#define GET_CURRENT_PC_BP_SP \ +#define GET_CURRENT_BP_SP \ uptr bp = GET_CURRENT_FRAME(); \ - uptr pc = StackTrace::GetCurrentPc(); \ uptr local_stack; \ uptr sp = (uptr)&local_stack Index: lib/sanitizer_common/sanitizer_stacktrace.cc =================================================================== --- lib/sanitizer_common/sanitizer_stacktrace.cc +++ lib/sanitizer_common/sanitizer_stacktrace.cc @@ -106,10 +106,6 @@ } } -static bool MatchPc(uptr cur_pc, uptr trace_pc, uptr threshold) { - return cur_pc - trace_pc <= threshold || trace_pc - cur_pc <= threshold; -} - void BufferedStackTrace::PopStackFrames(uptr count) { CHECK_LT(count, size); size -= count; @@ -119,13 +115,15 @@ } uptr BufferedStackTrace::LocatePcInTrace(uptr pc) { - // Use threshold to find PC in stack trace, as PC we want to unwind from may - // slightly differ from return address in the actual unwinded stack trace. - const int kPcThreshold = 350; + uptr min_diff = -1; for (uptr i = 0; i < size; ++i) { - if (MatchPc(pc, trace[i], kPcThreshold)) - return i; + if (trace[i] == pc) return i; + uptr diff = trace[i] < pc ? trace[i] - pc : pc - trace[i]; + min_diff = min_diff > diff ? diff : min_diff; } + static const int kPcThreshold = 150; + // Check that caller does not expect old behavior when we used threshold. + CHECK_GT(min_diff, kPcThreshold); return 0; }