Index: lib/tsan/rtl/tsan_platform.h =================================================================== --- lib/tsan/rtl/tsan_platform.h +++ lib/tsan/rtl/tsan_platform.h @@ -91,6 +91,43 @@ static const uptr kVdsoBeg = 0xfffff00000ull; }; #elif defined(__aarch64__) +#if SANITIZER_ANDROID +/* +C/C++ on android/aarch64 (39-bit VMA) +On Android, the executable file is shared library, so it is loaded +to address space near TASK_SIZE*2/3. Vdso area is not guaranteed +to be higher than stack, so disable it. +2000 0000 00 - 4000 0000 00: shadow memory +4000 0000 00 - 5000 0000 00: metainfo +5000 0000 00 - 5500 0000 00: - +5500 0000 00 - 5700 0000 00: main binary +5700 0000 00 - 6000 0000 00: - +6000 0000 00 - 6200 0000 00: traces +6200 0000 00 - 7d00 0000 00: - +7d00 0000 00 - 7e00 0000 00: heap +7e00 0000 00 - 7fff ffff ff: modules and main thread stack +*/ +struct Mapping { + static const uptr kShadowBeg = 0x2000000000ull; + static const uptr kShadowEnd = 0x4000000000ull; + static const uptr kMetaShadowBeg = 0x4000000000ull; + static const uptr kMetaShadowEnd = 0x5000000000ull; + static const uptr kLoAppMemBeg = 0x5500000000ull; + static const uptr kLoAppMemEnd = 0x5700000000ull; + static const uptr kTraceMemBeg = 0x6000000000ull; + static const uptr kTraceMemEnd = 0x6200000000ull; + static const uptr kHeapMemBeg = 0x7d00000000ull; + static const uptr kHeapMemEnd = 0x7e00000000ull; + static const uptr kHiAppMemBeg = 0x7e00000000ull; + static const uptr kHiAppMemEnd = 0x8000000000ull; + static const uptr kVdsoBeg = 0x8000000000ull; + static const uptr kLoAppMemAdd = 0x0300000000ull; + static const uptr kLoAppMemMsk = 0x5000000000ull; + static const uptr kHiAppMemMsk = 0x7000000000ull; +}; + +#else + // AArch64 supports multiple VMA which leads to multiple address transformation // functions. To support these multiple VMAS transformations and mappings TSAN // runtime for AArch64 uses an external memory read (vmaSize) to select which @@ -161,7 +198,8 @@ // Indicates the runtime will define the memory regions at runtime. #define TSAN_RUNTIME_VMA 1 -#endif +#endif // SANITIZER_ANDROID +#endif // defined(__aarch64__) #elif defined(SANITIZER_GO) && !SANITIZER_WINDOWS @@ -268,7 +306,7 @@ template uptr MappingArchImpl(void) { -#ifdef __aarch64__ +#if defined(__aarch64__) && !SANITIZER_ANDROID if (vmaSize == 39) return MappingImpl(); else @@ -393,7 +431,7 @@ ALWAYS_INLINE bool IsAppMem(uptr mem) { -#ifdef __aarch64__ +#if defined(__aarch64__) && !SANITIZER_ANDROID if (vmaSize == 39) return IsAppMemImpl(mem); else @@ -412,7 +450,7 @@ ALWAYS_INLINE bool IsShadowMem(uptr mem) { -#ifdef __aarch64__ +#if defined(__aarch64__) && !SANITIZER_ANDROID if (vmaSize == 39) return IsShadowMemImpl(mem); else @@ -431,7 +469,7 @@ ALWAYS_INLINE bool IsMetaMem(uptr mem) { -#ifdef __aarch64__ +#if defined(__aarch64__) && !SANITIZER_ANDROID if (vmaSize == 39) return IsMetaMemImpl(mem); else @@ -454,14 +492,28 @@ #endif } +template +uptr MemToShadowAndroidImpl(uptr x) { + DCHECK(IsAppMem(x)); + if (x <= Mapping::kLoAppMemEnd) + return ((x & ~(Mapping::kLoAppMemMsk | (kShadowCell - 1))) + + Mapping::kLoAppMemAdd) * kShadowCnt; + else + return ((x & ~(Mapping::kHiAppMemMsk | (kShadowCell - 1)))) * kShadowCnt; +} + ALWAYS_INLINE uptr MemToShadow(uptr x) { #ifdef __aarch64__ +#if SANITIZER_ANDROID + return MemToShadowAndroidImpl(x); +#else if (vmaSize == 39) return MemToShadowImpl(x); else return MemToShadowImpl(x); DCHECK(0); +#endif // SANITIZER_ANDROID #else return MemToShadowImpl(x); #endif @@ -481,14 +533,30 @@ #endif } +template +u32 *MemToMetaAndroidImpl(uptr x) { + DCHECK(IsAppMem(x)); + if (x <= Mapping::kLoAppMemEnd) + return (u32*)((((x & ~(Mapping::kLoAppMemMsk | (kMetaShadowCell - 1))) + + Mapping::kLoAppMemAdd) / kMetaShadowCell * kMetaShadowSize) + | Mapping::kMetaShadowBeg); + else + return (u32*)(((x & ~(Mapping::kHiAppMemMsk | (kMetaShadowCell - 1))) + / kMetaShadowCell * kMetaShadowSize) | Mapping::kMetaShadowBeg); +} + ALWAYS_INLINE u32 *MemToMeta(uptr x) { #ifdef __aarch64__ +#if SANITIZER_ANDROID + return MemToMetaAndroidImpl(x); +#else if (vmaSize == 39) return MemToMetaImpl(x); else return MemToMetaImpl(x); DCHECK(0); +#endif // SANITIZER_ANDROID #else return MemToMetaImpl(x); #endif @@ -514,14 +582,27 @@ #endif } +template +uptr ShadowToMemAndroidImpl(uptr s) { + DCHECK(IsShadowMem(s)); + if (s <= MemToShadow(Mapping::kLoAppMemEnd)) + return ((s / kShadowCnt) - Mapping::kLoAppMemAdd) | Mapping::kLoAppMemMsk; + else + return ((s / kShadowCnt)) | Mapping::kHiAppMemMsk; +} + ALWAYS_INLINE uptr ShadowToMem(uptr s) { #ifdef __aarch64__ +#if SANITIZER_ANDROID + return ShadowToMemAndroidImpl(s); +#else if (vmaSize == 39) return ShadowToMemImpl(s); else return ShadowToMemImpl(s); DCHECK(0); +#endif // SANITIZER_ANDROID #else return ShadowToMemImpl(s); #endif @@ -543,7 +624,7 @@ ALWAYS_INLINE uptr GetThreadTrace(int tid) { -#ifdef __aarch64__ +#if defined(__aarch64__) && !SANITIZER_ANDROID if (vmaSize == 39) return GetThreadTraceImpl(tid); else @@ -565,7 +646,7 @@ ALWAYS_INLINE uptr GetThreadTraceHeader(int tid) { -#ifdef __aarch64__ +#if defined(__aarch64__) && !SANITIZER_ANDROID if (vmaSize == 39) return GetThreadTraceHeaderImpl(tid); else Index: lib/tsan/rtl/tsan_platform_posix.cc =================================================================== --- lib/tsan/rtl/tsan_platform_posix.cc +++ lib/tsan/rtl/tsan_platform_posix.cc @@ -47,6 +47,10 @@ const uptr kMadviseRangeBeg = 0xff00000000ull; const uptr kMadviseRangeSize = 0x0100000000ull; #elif defined(__aarch64__) +#if SANITIZER_ANDROID + const uptr kMadviseRangeBeg = 0x7d00000000ull; + const uptr kMadviseRangeSize = 0x0300000000ull; +#else uptr kMadviseRangeBeg = 0; uptr kMadviseRangeSize = 0; if (vmaSize == 39) { @@ -58,6 +62,7 @@ } else { DCHECK(0); } +#endif // SANITIZER_ANDROID #endif NoHugePagesInRegion(MemToShadow(kMadviseRangeBeg), kMadviseRangeSize * kShadowMultiplier); @@ -118,9 +123,14 @@ Die(); } +#if defined(__aarch64__) && SANITIZER_ANDROID + ProtectRange(MetaShadowEnd(), LoAppMemBeg()); + ProtectRange(LoAppMemEnd(), TraceMemBeg()); +#else ProtectRange(LoAppMemEnd(), ShadowBeg()); - ProtectRange(ShadowEnd(), MetaShadowBeg()); ProtectRange(MetaShadowEnd(), TraceMemBeg()); +#endif + ProtectRange(ShadowEnd(), MetaShadowBeg()); // Memory for traces is mapped lazily in MapThreadTrace. // Protect the whole range for now, so that user does not map something here. ProtectRange(TraceMemBeg(), TraceMemEnd());