diff --git a/compiler-rt/lib/hwasan/hwasan_allocator.h b/compiler-rt/lib/hwasan/hwasan_allocator.h --- a/compiler-rt/lib/hwasan/hwasan_allocator.h +++ b/compiler-rt/lib/hwasan/hwasan_allocator.h @@ -13,13 +13,15 @@ #ifndef HWASAN_ALLOCATOR_H #define HWASAN_ALLOCATOR_H +#include "hwasan.h" +#include "hwasan_interface_internal.h" +#include "hwasan_poisoning.h" #include "sanitizer_common/sanitizer_allocator.h" #include "sanitizer_common/sanitizer_allocator_checks.h" #include "sanitizer_common/sanitizer_allocator_interface.h" #include "sanitizer_common/sanitizer_allocator_report.h" #include "sanitizer_common/sanitizer_common.h" #include "sanitizer_common/sanitizer_ring_buffer.h" -#include "hwasan_poisoning.h" #if !defined(__aarch64__) && !defined(__x86_64__) #error Unsupported platform @@ -102,6 +104,11 @@ void GetAllocatorStats(AllocatorStatCounters s); +inline bool InTaggableRegion(uptr addr) { + // TODO: specialize for x86 once we use aliasing mode in the allocator. + return true; +} + } // namespace __hwasan #endif // HWASAN_ALLOCATOR_H diff --git a/compiler-rt/lib/hwasan/hwasan_allocator.cpp b/compiler-rt/lib/hwasan/hwasan_allocator.cpp --- a/compiler-rt/lib/hwasan/hwasan_allocator.cpp +++ b/compiler-rt/lib/hwasan/hwasan_allocator.cpp @@ -100,6 +100,14 @@ return new_size; } +static inline tag_t BaseTag(void *untagged_ptr) { +#if defined(__x86_64__) + return GetTagFromPointer(reinterpret_cast(untagged_ptr)); +#else + return 0; +#endif +} + static void *HwasanAllocate(StackTrace *stack, uptr orig_size, uptr alignment, bool zeroise) { if (orig_size > kMaxAllowedMallocSize) { @@ -145,13 +153,15 @@ } void *user_ptr = allocated; + tag_t tag = BaseTag(user_ptr); // Tagging can only be skipped when both tag_in_malloc and tag_in_free are // false. When tag_in_malloc = false and tag_in_free = true malloc needs to // retag to 0. if ((flags()->tag_in_malloc || flags()->tag_in_free) && - atomic_load_relaxed(&hwasan_allocator_tagging_enabled)) { + atomic_load_relaxed(&hwasan_allocator_tagging_enabled) && + InTaggableRegion(reinterpret_cast(user_ptr))) { if (flags()->tag_in_malloc && malloc_bisect(stack, orig_size)) { - tag_t tag = t ? t->GenerateRandomTag() : kFallbackAllocTag; + tag |= t ? t->GenerateRandomTag() : kFallbackAllocTag; uptr tag_size = orig_size ? orig_size : 1; uptr full_granule_size = RoundDownTo(tag_size, kShadowAlignment); user_ptr = @@ -164,9 +174,16 @@ short_granule[kShadowAlignment - 1] = tag; } } else { - user_ptr = (void *)TagMemoryAligned((uptr)user_ptr, size, 0); + user_ptr = (void *)TagMemoryAligned((uptr)user_ptr, size, tag); } } +#if defined(__x86_64__) + else { + // In aliasing mode our base tag is non-zero, so we must tag the shadow to + // avoid mismatches. + TagMemoryAligned(reinterpret_cast(user_ptr), size, tag); + } +#endif HWASAN_MALLOC_HOOK(user_ptr, size); return user_ptr; @@ -175,6 +192,8 @@ static bool PointerAndMemoryTagsMatch(void *tagged_ptr) { CHECK(tagged_ptr); uptr tagged_uptr = reinterpret_cast(tagged_ptr); + if (!InTaggableRegion(tagged_uptr)) + return true; tag_t mem_tag = *reinterpret_cast( MemToShadow(reinterpret_cast(UntagPtr(tagged_ptr)))); return PossiblyShortTagMatches(mem_tag, tagged_uptr, 1); @@ -187,7 +206,9 @@ if (!PointerAndMemoryTagsMatch(tagged_ptr)) ReportInvalidFree(stack, reinterpret_cast(tagged_ptr)); - void *untagged_ptr = UntagPtr(tagged_ptr); + void *untagged_ptr = InTaggableRegion(reinterpret_cast(tagged_ptr)) + ? UntagPtr(tagged_ptr) + : tagged_ptr; void *aligned_ptr = reinterpret_cast( RoundDownTo(reinterpret_cast(untagged_ptr), kShadowAlignment)); Metadata *meta = @@ -220,9 +241,13 @@ internal_memset(aligned_ptr, flags()->free_fill_byte, fill_size); } if (flags()->tag_in_free && malloc_bisect(stack, 0) && - atomic_load_relaxed(&hwasan_allocator_tagging_enabled)) + atomic_load_relaxed(&hwasan_allocator_tagging_enabled) && + InTaggableRegion(reinterpret_cast(tagged_ptr))) { + tag_t tag = BaseTag(untagged_ptr); + tag |= t ? t->GenerateRandomTag() : kFallbackFreeTag; TagMemoryAligned(reinterpret_cast(aligned_ptr), TaggedSize(orig_size), - t ? t->GenerateRandomTag() : kFallbackFreeTag); + tag); + } if (t) { allocator.Deallocate(t->allocator_cache(), aligned_ptr); if (auto *ha = t->heap_allocations()) diff --git a/compiler-rt/lib/hwasan/hwasan_checks.h b/compiler-rt/lib/hwasan/hwasan_checks.h --- a/compiler-rt/lib/hwasan/hwasan_checks.h +++ b/compiler-rt/lib/hwasan/hwasan_checks.h @@ -13,6 +13,7 @@ #ifndef HWASAN_CHECKS_H #define HWASAN_CHECKS_H +#include "hwasan_allocator.h" #include "hwasan_mapping.h" #include "sanitizer_common/sanitizer_common.h" @@ -81,6 +82,8 @@ template __attribute__((always_inline, nodebug)) static void CheckAddress(uptr p) { + if (!InTaggableRegion(p)) + return; uptr ptr_raw = p & kAddressUntagMask; tag_t mem_tag = *(tag_t *)MemToShadow(ptr_raw); if (UNLIKELY(!PossiblyShortTagMatches(mem_tag, p, 1 << LogSize))) { @@ -94,7 +97,7 @@ template __attribute__((always_inline, nodebug)) static void CheckAddressSized(uptr p, uptr sz) { - if (sz == 0) + if (sz == 0 || !InTaggableRegion(p)) return; tag_t ptr_tag = GetTagFromPointer(p); uptr ptr_raw = p & kAddressUntagMask; diff --git a/compiler-rt/lib/hwasan/hwasan_linux.cpp b/compiler-rt/lib/hwasan/hwasan_linux.cpp --- a/compiler-rt/lib/hwasan/hwasan_linux.cpp +++ b/compiler-rt/lib/hwasan/hwasan_linux.cpp @@ -222,7 +222,9 @@ } bool MemIsApp(uptr p) { +#if !defined(__x86_64__) // Aliasing-mode base tags are non-zero. CHECK(GetTagFromPointer(p) == 0); +#endif return p >= kHighMemStart || (p >= kLowMemStart && p <= kLowMemEnd); }