diff --git a/compiler-rt/lib/tsan/rtl/tsan_platform.h b/compiler-rt/lib/tsan/rtl/tsan_platform.h --- a/compiler-rt/lib/tsan/rtl/tsan_platform.h +++ b/compiler-rt/lib/tsan/rtl/tsan_platform.h @@ -595,6 +595,48 @@ extern uptr vmaSize; #endif +template +ALWAYS_INLINE auto SelectMapping(uptr arg) { +#if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO + switch (vmaSize) { + case 39: + return Func::template Apply(arg); + case 42: + return Func::template Apply(arg); + case 48: + return Func::template Apply(arg); + } + DCHECK(0); + return 0; +#elif defined(__powerpc64__) + switch (vmaSize) { +# if !SANITIZER_GO + case 44: + return Func::template Apply(arg); +# endif + case 46: + return Func::template Apply(arg); + case 47: + return Func::template Apply(arg); + } + DCHECK(0); + return 0; +#elif defined(__mips64) + switch (vmaSize) { +# if !SANITIZER_GO + case 40: + return Func::template Apply(arg); +# else + case 47: + return Func::template Apply(arg); +# endif + } + DCHECK(0); + return 0; +#else + return Func::template Apply(arg); +#endif +} enum MappingType { MAPPING_LO_APP_BEG, @@ -618,9 +660,10 @@ MAPPING_VDSO_BEG, }; -template -uptr MappingImpl(void) { - switch (Type) { +struct MappingField { + template + static uptr Apply(uptr type) { + switch (type) { #if !SANITIZER_GO case MAPPING_LO_APP_BEG: return Mapping::kLoAppMemBeg; case MAPPING_LO_APP_END: return Mapping::kLoAppMemEnd; @@ -643,98 +686,56 @@ case MAPPING_META_SHADOW_END: return Mapping::kMetaShadowEnd; case MAPPING_TRACE_BEG: return Mapping::kTraceMemBeg; case MAPPING_TRACE_END: return Mapping::kTraceMemEnd; + } + DCHECK(0); + return 0; } -} - -template -uptr MappingArchImpl(void) { -#if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO - switch (vmaSize) { - case 39: return MappingImpl(); - case 42: return MappingImpl(); - case 48: return MappingImpl(); - } - DCHECK(0); - return 0; -#elif defined(__powerpc64__) - switch (vmaSize) { -#if !SANITIZER_GO - case 44: return MappingImpl(); -#endif - case 46: return MappingImpl(); - case 47: return MappingImpl(); - } - DCHECK(0); - return 0; -#elif defined(__mips64) - switch (vmaSize) { -#if !SANITIZER_GO - case 40: return MappingImpl(); -#else - case 47: return MappingImpl(); -#endif - } - DCHECK(0); - return 0; -#else - return MappingImpl(); -#endif -} +}; #if !SANITIZER_GO ALWAYS_INLINE uptr LoAppMemBeg(void) { - return MappingArchImpl(); + return SelectMapping(MAPPING_LO_APP_BEG); } ALWAYS_INLINE uptr LoAppMemEnd(void) { - return MappingArchImpl(); + return SelectMapping(MAPPING_LO_APP_END); } #ifdef TSAN_MID_APP_RANGE ALWAYS_INLINE uptr MidAppMemBeg(void) { - return MappingArchImpl(); + return SelectMapping(MAPPING_MID_APP_BEG); } ALWAYS_INLINE uptr MidAppMemEnd(void) { - return MappingArchImpl(); + return SelectMapping(MAPPING_MID_APP_END); } #endif ALWAYS_INLINE -uptr HeapMemBeg(void) { - return MappingArchImpl(); -} +uptr HeapMemBeg(void) { return SelectMapping(MAPPING_HEAP_BEG); } ALWAYS_INLINE -uptr HeapMemEnd(void) { - return MappingArchImpl(); -} +uptr HeapMemEnd(void) { return SelectMapping(MAPPING_HEAP_END); } ALWAYS_INLINE uptr HiAppMemBeg(void) { - return MappingArchImpl(); + return SelectMapping(MAPPING_HI_APP_BEG); } ALWAYS_INLINE uptr HiAppMemEnd(void) { - return MappingArchImpl(); + return SelectMapping(MAPPING_HI_APP_END); } ALWAYS_INLINE -uptr VdsoBeg(void) { - return MappingArchImpl(); -} +uptr VdsoBeg(void) { return SelectMapping(MAPPING_VDSO_BEG); } #else ALWAYS_INLINE -uptr AppMemBeg(void) { - return MappingArchImpl(); -} +uptr AppMemBeg(void) { return SelectMapping(MAPPING_APP_BEG); } ALWAYS_INLINE -uptr AppMemEnd(void) { - return MappingArchImpl(); -} +uptr AppMemEnd(void) { return SelectMapping(MAPPING_APP_END); } #endif @@ -772,35 +773,31 @@ } ALWAYS_INLINE -uptr ShadowBeg(void) { - return MappingArchImpl(); -} +uptr ShadowBeg(void) { return SelectMapping(MAPPING_SHADOW_BEG); } ALWAYS_INLINE -uptr ShadowEnd(void) { - return MappingArchImpl(); -} +uptr ShadowEnd(void) { return SelectMapping(MAPPING_SHADOW_END); } ALWAYS_INLINE uptr MetaShadowBeg(void) { - return MappingArchImpl(); + return SelectMapping(MAPPING_META_SHADOW_BEG); } ALWAYS_INLINE uptr MetaShadowEnd(void) { - return MappingArchImpl(); + return SelectMapping(MAPPING_META_SHADOW_END); } ALWAYS_INLINE uptr TraceMemBeg(void) { - return MappingArchImpl(); + return SelectMapping(MAPPING_TRACE_BEG); } ALWAYS_INLINE uptr TraceMemEnd(void) { - return MappingArchImpl(); + return SelectMapping(MAPPING_TRACE_END); } - -template -bool IsAppMemImpl(uptr mem) { +struct IsAppMemImpl { + template + static bool Apply(uptr mem) { #if !SANITIZER_GO return (mem >= Mapping::kHeapMemBeg && mem < Mapping::kHeapMemEnd) || # ifdef TSAN_MID_APP_RANGE @@ -811,184 +808,63 @@ #else return mem >= Mapping::kAppMemBeg && mem < Mapping::kAppMemEnd; #endif -} - -ALWAYS_INLINE -bool IsAppMem(uptr mem) { -#if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO - switch (vmaSize) { - case 39: return IsAppMemImpl(mem); - case 42: return IsAppMemImpl(mem); - case 48: return IsAppMemImpl(mem); } - DCHECK(0); - return false; -#elif defined(__powerpc64__) - switch (vmaSize) { -#if !SANITIZER_GO - case 44: return IsAppMemImpl(mem); -#endif - case 46: return IsAppMemImpl(mem); - case 47: return IsAppMemImpl(mem); - } - DCHECK(0); - return false; -#elif defined(__mips64) - switch (vmaSize) { -#if !SANITIZER_GO - case 40: return IsAppMemImpl(mem); -#else - case 47: return IsAppMemImpl(mem); -#endif - } - DCHECK(0); - return false; -#else - return IsAppMemImpl(mem); -#endif -} +}; +ALWAYS_INLINE +bool IsAppMem(uptr mem) { return SelectMapping(mem); } -template -bool IsShadowMemImpl(uptr mem) { - return mem >= Mapping::kShadowBeg && mem <= Mapping::kShadowEnd; -} +struct IsShadowMemImpl { + template + static bool Apply(uptr mem) { + return mem >= Mapping::kShadowBeg && mem <= Mapping::kShadowEnd; + } +}; ALWAYS_INLINE bool IsShadowMem(RawShadow *p) { - uptr mem = reinterpret_cast(p); -#if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO - switch (vmaSize) { - case 39: return IsShadowMemImpl(mem); - case 42: return IsShadowMemImpl(mem); - case 48: return IsShadowMemImpl(mem); - } - DCHECK(0); - return false; -#elif defined(__powerpc64__) - switch (vmaSize) { -#if !SANITIZER_GO - case 44: return IsShadowMemImpl(mem); -#endif - case 46: return IsShadowMemImpl(mem); - case 47: return IsShadowMemImpl(mem); - } - DCHECK(0); - return false; -#elif defined(__mips64) - switch (vmaSize) { -#if !SANITIZER_GO - case 40: return IsShadowMemImpl(mem); -#else - case 47: return IsShadowMemImpl(mem); -#endif - } - DCHECK(0); - return false; -#else - return IsShadowMemImpl(mem); -#endif + return SelectMapping(reinterpret_cast(p)); } -template -bool IsMetaMemImpl(uptr mem) { - return mem >= Mapping::kMetaShadowBeg && mem <= Mapping::kMetaShadowEnd; -} +struct IsMetaMemImpl { + template + static bool Apply(uptr mem) { + return mem >= Mapping::kMetaShadowBeg && mem <= Mapping::kMetaShadowEnd; + } +}; ALWAYS_INLINE bool IsMetaMem(const u32 *p) { - uptr mem = reinterpret_cast(p); -#if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO - switch (vmaSize) { - case 39: return IsMetaMemImpl(mem); - case 42: return IsMetaMemImpl(mem); - case 48: return IsMetaMemImpl(mem); - } - DCHECK(0); - return false; -#elif defined(__powerpc64__) - switch (vmaSize) { -#if !SANITIZER_GO - case 44: return IsMetaMemImpl(mem); -#endif - case 46: return IsMetaMemImpl(mem); - case 47: return IsMetaMemImpl(mem); - } - DCHECK(0); - return false; -#elif defined(__mips64) - switch (vmaSize) { -#if !SANITIZER_GO - case 40: return IsMetaMemImpl(mem); -#else - case 47: return IsMetaMemImpl(mem); -#endif - } - DCHECK(0); - return false; -#else - return IsMetaMemImpl(mem); -#endif + return SelectMapping(reinterpret_cast(p)); } -template -uptr MemToShadowRaw(uptr x) { - DCHECK(IsAppMem(x)); +struct MemToShadowImpl { + template + static uptr Apply(uptr x) { + DCHECK(IsAppMemImpl::Apply(x)); #if !SANITIZER_GO - return (((x) & ~(Mapping::kAppMemMsk | (kShadowCell - 1))) - ^ Mapping::kAppMemXor) * kShadowCnt; + return (((x) & ~(Mapping::kAppMemMsk | (kShadowCell - 1))) ^ + Mapping::kAppMemXor) * + kShadowCnt; #else # ifndef SANITIZER_WINDOWS - return ((x & ~(kShadowCell - 1)) * kShadowCnt) | Mapping::kShadowBeg; + return ((x & ~(kShadowCell - 1)) * kShadowCnt) | Mapping::kShadowBeg; # else - return ((x & ~(kShadowCell - 1)) * kShadowCnt) + Mapping::kShadowBeg; + return ((x & ~(kShadowCell - 1)) * kShadowCnt) + Mapping::kShadowBeg; # endif #endif -} - -template -RawShadow *MemToShadowImpl(uptr x) { - return reinterpret_cast(MemToShadowRaw(x)); -} + } +}; ALWAYS_INLINE RawShadow *MemToShadow(uptr x) { -#if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO - switch (vmaSize) { - case 39: return MemToShadowImpl(x); - case 42: return MemToShadowImpl(x); - case 48: return MemToShadowImpl(x); - } - DCHECK(0); - return nullptr; -#elif defined(__powerpc64__) - switch (vmaSize) { -#if !SANITIZER_GO - case 44: return MemToShadowImpl(x); -#endif - case 46: return MemToShadowImpl(x); - case 47: return MemToShadowImpl(x); - } - DCHECK(0); - return nullptr; -#elif defined(__mips64) - switch (vmaSize) { -#if !SANITIZER_GO - case 40: return MemToShadowImpl(x); -#else - case 47: return MemToShadowImpl(x); -#endif - } - DCHECK(0); - return nullptr; -#else - return MemToShadowImpl(x); -#endif + return reinterpret_cast(SelectMapping(x)); } -template -u32 *MemToMetaImpl(uptr x) { - DCHECK(IsAppMem(x)); +struct MemToMetaImpl { + template + static u32 *Apply(uptr x) { + DCHECK(IsAppMemImpl::Apply(x)); #if !SANITIZER_GO return (u32*)(((((x) & ~(Mapping::kAppMemMsk | (kMetaShadowCell - 1)))) / kMetaShadowCell * kMetaShadowSize) | Mapping::kMetaShadowBeg); @@ -1001,47 +877,16 @@ kMetaShadowCell * kMetaShadowSize) + Mapping::kMetaShadowBeg); # endif #endif -} + } +}; ALWAYS_INLINE -u32 *MemToMeta(uptr x) { -#if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO - switch (vmaSize) { - case 39: return MemToMetaImpl(x); - case 42: return MemToMetaImpl(x); - case 48: return MemToMetaImpl(x); - } - DCHECK(0); - return 0; -#elif defined(__powerpc64__) - switch (vmaSize) { -#if !SANITIZER_GO - case 44: return MemToMetaImpl(x); -#endif - case 46: return MemToMetaImpl(x); - case 47: return MemToMetaImpl(x); - } - DCHECK(0); - return 0; -#elif defined(__mips64) - switch (vmaSize) { -#if !SANITIZER_GO - case 40: return MemToMetaImpl(x); -#else - case 47: return MemToMetaImpl(x); -#endif - } - DCHECK(0); - return 0; -#else - return MemToMetaImpl(x); -#endif -} +u32 *MemToMeta(uptr x) { return SelectMapping(x); } -template -uptr ShadowToMemImpl(RawShadow *s) { - DCHECK(IsShadowMem(s)); - uptr sp = reinterpret_cast(s); +struct ShadowToMemImpl { + template + static uptr Apply(uptr sp) { + DCHECK(IsShadowMemImpl::Apply(sp)); #if !SANITIZER_GO // The shadow mapping is non-linear and we've lost some bits, so we don't have // an easy way to restore the original app address. But the mapping is a @@ -1050,13 +895,13 @@ // same address. uptr p = (sp / kShadowCnt) ^ Mapping::kAppMemXor; if (p >= Mapping::kLoAppMemBeg && p < Mapping::kLoAppMemEnd && - MemToShadow(p) == s) + MemToShadowImpl::Apply(p) == sp) return p; # ifdef TSAN_MID_APP_RANGE p = ((sp / kShadowCnt) ^ Mapping::kAppMemXor) + (Mapping::kMidAppMemBeg & Mapping::kAppMemMsk); if (p >= Mapping::kMidAppMemBeg && p < Mapping::kMidAppMemEnd && - MemToShadow(p) == s) + MemToShadowImpl::Apply(p) == sp) return p; # endif return ((sp / kShadowCnt) ^ Mapping::kAppMemXor) | Mapping::kAppMemMsk; @@ -1067,41 +912,12 @@ return (sp - Mapping::kShadowBeg) / kShadowCnt; # endif // SANITIZER_WINDOWS #endif -} + } +}; ALWAYS_INLINE uptr ShadowToMem(RawShadow *s) { -#if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO - switch (vmaSize) { - case 39: return ShadowToMemImpl(s); - case 42: return ShadowToMemImpl(s); - case 48: return ShadowToMemImpl(s); - } - DCHECK(0); - return 0; -#elif defined(__powerpc64__) - switch (vmaSize) { -#if !SANITIZER_GO - case 44: return ShadowToMemImpl(s); -#endif - case 46: return ShadowToMemImpl(s); - case 47: return ShadowToMemImpl(s); - } - DCHECK(0); - return 0; -#elif defined(__mips64) - switch (vmaSize) { -#if !SANITIZER_GO - case 40: return ShadowToMemImpl(s); -#else - case 47: return ShadowToMemImpl(s); -#endif - } - DCHECK(0); - return 0; -#else - return ShadowToMemImpl(s); -#endif + return SelectMapping(reinterpret_cast(s)); } // The additional page is to catch shadow stack overflow as paging fault. @@ -1109,90 +925,31 @@ const uptr kTotalTraceSize = (kTraceSize * sizeof(Event) + sizeof(Trace) + (64 << 10) + (64 << 10) - 1) & ~((64 << 10) - 1); -template -uptr GetThreadTraceImpl(int tid) { - uptr p = Mapping::kTraceMemBeg + (uptr)tid * kTotalTraceSize; - DCHECK_LT(p, Mapping::kTraceMemEnd); - return p; -} +struct GetThreadTraceImpl { + template + static uptr Apply(uptr tid) { + uptr p = Mapping::kTraceMemBeg + tid * kTotalTraceSize; + DCHECK_LT(p, Mapping::kTraceMemEnd); + return p; + } +}; ALWAYS_INLINE -uptr GetThreadTrace(int tid) { -#if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO - switch (vmaSize) { - case 39: return GetThreadTraceImpl(tid); - case 42: return GetThreadTraceImpl(tid); - case 48: return GetThreadTraceImpl(tid); - } - DCHECK(0); - return 0; -#elif defined(__powerpc64__) - switch (vmaSize) { -#if !SANITIZER_GO - case 44: return GetThreadTraceImpl(tid); -#endif - case 46: return GetThreadTraceImpl(tid); - case 47: return GetThreadTraceImpl(tid); - } - DCHECK(0); - return 0; -#elif defined(__mips64) - switch (vmaSize) { -#if !SANITIZER_GO - case 40: return GetThreadTraceImpl(tid); -#else - case 47: return GetThreadTraceImpl(tid); -#endif +uptr GetThreadTrace(int tid) { return SelectMapping(tid); } + +struct GetThreadTraceHeaderImpl { + template + static uptr Apply(uptr tid) { + uptr p = Mapping::kTraceMemBeg + tid * kTotalTraceSize + + kTraceSize * sizeof(Event); + DCHECK_LT(p, Mapping::kTraceMemEnd); + return p; } - DCHECK(0); - return 0; -#else - return GetThreadTraceImpl(tid); -#endif -} - - -template -uptr GetThreadTraceHeaderImpl(int tid) { - uptr p = Mapping::kTraceMemBeg + (uptr)tid * kTotalTraceSize - + kTraceSize * sizeof(Event); - DCHECK_LT(p, Mapping::kTraceMemEnd); - return p; -} +}; ALWAYS_INLINE uptr GetThreadTraceHeader(int tid) { -#if defined(__aarch64__) && !defined(__APPLE__) && !SANITIZER_GO - switch (vmaSize) { - case 39: return GetThreadTraceHeaderImpl(tid); - case 42: return GetThreadTraceHeaderImpl(tid); - case 48: return GetThreadTraceHeaderImpl(tid); - } - DCHECK(0); - return 0; -#elif defined(__powerpc64__) - switch (vmaSize) { -#if !SANITIZER_GO - case 44: return GetThreadTraceHeaderImpl(tid); -#endif - case 46: return GetThreadTraceHeaderImpl(tid); - case 47: return GetThreadTraceHeaderImpl(tid); - } - DCHECK(0); - return 0; -#elif defined(__mips64) - switch (vmaSize) { -#if !SANITIZER_GO - case 40: return GetThreadTraceHeaderImpl(tid); -#else - case 47: return GetThreadTraceHeaderImpl(tid); -#endif - } - DCHECK(0); - return 0; -#else - return GetThreadTraceHeaderImpl(tid); -#endif + return SelectMapping(tid); } void InitializePlatform();