diff --git a/compiler-rt/lib/scudo/standalone/combined.h b/compiler-rt/lib/scudo/standalone/combined.h --- a/compiler-rt/lib/scudo/standalone/combined.h +++ b/compiler-rt/lib/scudo/standalone/combined.h @@ -983,7 +983,9 @@ if (UNLIKELY(NewHeader.ClassId && useMemoryTagging())) { u8 PrevTag = extractTag(loadTag(reinterpret_cast(Ptr))); uptr TaggedBegin, TaggedEnd; - setRandomTag(Ptr, Size, &TaggedBegin, &TaggedEnd); + // Exclude the previous tag so that immediate use after free is detected + // 100% of the time. + setRandomTag(Ptr, Size, 1UL << PrevTag, &TaggedBegin, &TaggedEnd); storeDeallocationStackMaybe(Ptr, PrevTag); } // If the quarantine is disabled, the actual size of a chunk is 0 or larger diff --git a/compiler-rt/lib/scudo/standalone/memtag.h b/compiler-rt/lib/scudo/standalone/memtag.h --- a/compiler-rt/lib/scudo/standalone/memtag.h +++ b/compiler-rt/lib/scudo/standalone/memtag.h @@ -93,8 +93,8 @@ } }; -inline void setRandomTag(void *Ptr, uptr Size, uptr *TaggedBegin, - uptr *TaggedEnd) { +inline void setRandomTag(void *Ptr, uptr Size, uptr ExcludeMask, + uptr *TaggedBegin, uptr *TaggedEnd) { void *End; __asm__ __volatile__( R"( @@ -102,7 +102,7 @@ // Set a random tag for Ptr in TaggedPtr. This needs to happen even if // Size = 0 so that TaggedPtr ends up pointing at a valid address. - irg %[TaggedPtr], %[Ptr] + irg %[TaggedPtr], %[Ptr], %[ExcludeMask] mov %[Cur], %[TaggedPtr] // Skip the loop if Size = 0. We don't want to do any tagging in this case. @@ -120,9 +120,9 @@ 2: )" - : [ TaggedPtr ] "=&r"(*TaggedBegin), [ Cur ] "=&r"(*TaggedEnd), - [ End ] "=&r"(End) - : [ Ptr ] "r"(Ptr), [ Size ] "r"(Size) + : + [TaggedPtr] "=&r"(*TaggedBegin), [Cur] "=&r"(*TaggedEnd), [End] "=&r"(End) + : [Ptr] "r"(Ptr), [Size] "r"(Size), [ExcludeMask] "r"(ExcludeMask) : "memory"); } @@ -138,7 +138,7 @@ : "memory"); uptr TaggedBegin, TaggedEnd; - setRandomTag(Ptr, Size, &TaggedBegin, &TaggedEnd); + setRandomTag(Ptr, Size, 0, &TaggedBegin, &TaggedEnd); // Finally, set the tag of the granule past the end of the allocation to 0, // to catch linear overflows even if a previous larger allocation used the @@ -225,10 +225,11 @@ ScopedDisableMemoryTagChecks() {} }; -inline void setRandomTag(void *Ptr, uptr Size, uptr *TaggedBegin, - uptr *TaggedEnd) { +inline void setRandomTag(void *Ptr, uptr Size, uptr ExcludeMask, + uptr *TaggedBegin, uptr *TaggedEnd) { (void)Ptr; (void)Size; + (void)ExcludeMask; (void)TaggedBegin; (void)TaggedEnd; UNREACHABLE("memory tagging not supported");