diff --git a/compiler-rt/lib/hwasan/hwasan_allocator.h b/compiler-rt/lib/hwasan/hwasan_allocator.h --- a/compiler-rt/lib/hwasan/hwasan_allocator.h +++ b/compiler-rt/lib/hwasan/hwasan_allocator.h @@ -13,13 +13,15 @@ #ifndef HWASAN_ALLOCATOR_H #define HWASAN_ALLOCATOR_H +#include "hwasan.h" +#include "hwasan_interface_internal.h" +#include "hwasan_poisoning.h" #include "sanitizer_common/sanitizer_allocator.h" #include "sanitizer_common/sanitizer_allocator_checks.h" #include "sanitizer_common/sanitizer_allocator_interface.h" #include "sanitizer_common/sanitizer_allocator_report.h" #include "sanitizer_common/sanitizer_common.h" #include "sanitizer_common/sanitizer_ring_buffer.h" -#include "hwasan_poisoning.h" #if !defined(__aarch64__) && !defined(__x86_64__) #error Unsupported platform @@ -102,6 +104,11 @@ void GetAllocatorStats(AllocatorStatCounters s); +inline bool InTaggableRegion(uptr addr) { + // TODO: specialize for x86 once we use aliasing mode in the allocator. + return true; +} + } // namespace __hwasan #endif // HWASAN_ALLOCATOR_H diff --git a/compiler-rt/lib/hwasan/hwasan_allocator.cpp b/compiler-rt/lib/hwasan/hwasan_allocator.cpp --- a/compiler-rt/lib/hwasan/hwasan_allocator.cpp +++ b/compiler-rt/lib/hwasan/hwasan_allocator.cpp @@ -29,8 +29,8 @@ static SpinMutex fallback_mutex; static atomic_uint8_t hwasan_allocator_tagging_enabled; -static const tag_t kFallbackAllocTag = 0xBB; -static const tag_t kFallbackFreeTag = 0xBC; +static constexpr tag_t kFallbackAllocTag = 0xBB & kTagMask; +static constexpr tag_t kFallbackFreeTag = 0xBC; enum RightAlignMode { kRightAlignNever, @@ -149,7 +149,8 @@ // false. When tag_in_malloc = false and tag_in_free = true malloc needs to // retag to 0. if ((flags()->tag_in_malloc || flags()->tag_in_free) && - atomic_load_relaxed(&hwasan_allocator_tagging_enabled)) { + atomic_load_relaxed(&hwasan_allocator_tagging_enabled) && + InTaggableRegion(reinterpret_cast(user_ptr))) { if (flags()->tag_in_malloc && malloc_bisect(stack, orig_size)) { tag_t tag = t ? t->GenerateRandomTag() : kFallbackAllocTag; uptr tag_size = orig_size ? orig_size : 1; @@ -175,6 +176,8 @@ static bool PointerAndMemoryTagsMatch(void *tagged_ptr) { CHECK(tagged_ptr); uptr tagged_uptr = reinterpret_cast(tagged_ptr); + if (!InTaggableRegion(tagged_uptr)) + return true; tag_t mem_tag = *reinterpret_cast( MemToShadow(reinterpret_cast(UntagPtr(tagged_ptr)))); return PossiblyShortTagMatches(mem_tag, tagged_uptr, 1); @@ -187,7 +190,9 @@ if (!PointerAndMemoryTagsMatch(tagged_ptr)) ReportInvalidFree(stack, reinterpret_cast(tagged_ptr)); - void *untagged_ptr = UntagPtr(tagged_ptr); + void *untagged_ptr = InTaggableRegion(reinterpret_cast(tagged_ptr)) + ? UntagPtr(tagged_ptr) + : tagged_ptr; void *aligned_ptr = reinterpret_cast( RoundDownTo(reinterpret_cast(untagged_ptr), kShadowAlignment)); Metadata *meta = @@ -220,9 +225,13 @@ internal_memset(aligned_ptr, flags()->free_fill_byte, fill_size); } if (flags()->tag_in_free && malloc_bisect(stack, 0) && - atomic_load_relaxed(&hwasan_allocator_tagging_enabled)) + atomic_load_relaxed(&hwasan_allocator_tagging_enabled) && + InTaggableRegion(reinterpret_cast(tagged_ptr))) { + // Always store full 8-bit tags on free to maximize UAF detection. + tag_t tag = t ? t->GenerateRandomTag(/*num_bits=*/8) : kFallbackFreeTag; TagMemoryAligned(reinterpret_cast(aligned_ptr), TaggedSize(orig_size), - t ? t->GenerateRandomTag() : kFallbackFreeTag); + tag); + } if (t) { allocator.Deallocate(t->allocator_cache(), aligned_ptr); if (auto *ha = t->heap_allocations()) diff --git a/compiler-rt/lib/hwasan/hwasan_checks.h b/compiler-rt/lib/hwasan/hwasan_checks.h --- a/compiler-rt/lib/hwasan/hwasan_checks.h +++ b/compiler-rt/lib/hwasan/hwasan_checks.h @@ -13,6 +13,7 @@ #ifndef HWASAN_CHECKS_H #define HWASAN_CHECKS_H +#include "hwasan_allocator.h" #include "hwasan_mapping.h" #include "sanitizer_common/sanitizer_common.h" @@ -81,6 +82,8 @@ template __attribute__((always_inline, nodebug)) static void CheckAddress(uptr p) { + if (!InTaggableRegion(p)) + return; uptr ptr_raw = p & ~kAddressTagMask; tag_t mem_tag = *(tag_t *)MemToShadow(ptr_raw); if (UNLIKELY(!PossiblyShortTagMatches(mem_tag, p, 1 << LogSize))) { @@ -94,7 +97,7 @@ template __attribute__((always_inline, nodebug)) static void CheckAddressSized(uptr p, uptr sz) { - if (sz == 0) + if (sz == 0 || !InTaggableRegion(p)) return; tag_t ptr_tag = GetTagFromPointer(p); uptr ptr_raw = p & ~kAddressTagMask; diff --git a/compiler-rt/lib/hwasan/hwasan_linux.cpp b/compiler-rt/lib/hwasan/hwasan_linux.cpp --- a/compiler-rt/lib/hwasan/hwasan_linux.cpp +++ b/compiler-rt/lib/hwasan/hwasan_linux.cpp @@ -222,7 +222,9 @@ } bool MemIsApp(uptr p) { +#if !defined(__x86_64__) // Aliasing-mode base tags are non-zero. CHECK(GetTagFromPointer(p) == 0); +#endif return p >= kHighMemStart || (p >= kLowMemStart && p <= kLowMemEnd); }