Index: lib/tsan/rtl/tsan_platform.h =================================================================== --- lib/tsan/rtl/tsan_platform.h +++ lib/tsan/rtl/tsan_platform.h @@ -87,7 +87,12 @@ const uptr kAppMemXor = 0x0400000000ull; const uptr kVdsoBeg = 0xfffff00000ull; #elif defined(__aarch64__) -# if SANITIZER_AARCH64_VMA == 39 +// AArch64 supports multiple VMA which leads to multiple address transformation +// functions. To support these multiple transformations and mappings the TSAN +// runtime for AArch64 uses indirect calls for the memory to/from shadow, +// traces, etc segments. Although slower, it make a same instrumented binary +// run on multiple kernels. + /* C/C++ on linux/aarch64 (39-bit VMA) 0000 4000 00 - 0200 0000 00: main binary @@ -99,22 +104,22 @@ 7d00 0000 00 - 7e00 0000 00: heap 7e00 0000 00 - 7fff ffff ff: modules and main thread stack */ -const uptr kLoAppMemBeg = 0x0000400000ull; -const uptr kLoAppMemEnd = 0x0200000000ull; -const uptr kShadowBeg = 0x2000000000ull; -const uptr kShadowEnd = 0x4000000000ull; -const uptr kMetaShadowBeg = 0x4000000000ull; -const uptr kMetaShadowEnd = 0x5000000000ull; -const uptr kTraceMemBeg = 0x6000000000ull; -const uptr kTraceMemEnd = 0x6200000000ull; -const uptr kHeapMemBeg = 0x7d00000000ull; -const uptr kHeapMemEnd = 0x7e00000000ull; -const uptr kHiAppMemBeg = 0x7e00000000ull; -const uptr kHiAppMemEnd = 0x7fffffffffull; -const uptr kAppMemMsk = 0x7800000000ull; -const uptr kAppMemXor = 0x0800000000ull; -const uptr kVdsoBeg = 0x7f00000000ull; -# elif SANITIZER_AARCH64_VMA == 42 +const uptr kLoAppMemBeg39 = 0x0000400000ull; +const uptr kLoAppMemEnd39 = 0x0200000000ull; +const uptr kShadowBeg39 = 0x2000000000ull; +const uptr kShadowEnd39 = 0x4000000000ull; +const uptr kMetaShadowBeg39 = 0x4000000000ull; +const uptr kMetaShadowEnd39 = 0x5000000000ull; +const uptr kTraceMemBeg39 = 0x6000000000ull; +const uptr kTraceMemEnd39 = 0x6200000000ull; +const uptr kHeapMemBeg39 = 0x7d00000000ull; +const uptr kHeapMemEnd39 = 0x7e00000000ull; +const uptr kHiAppMemBeg39 = 0x7e00000000ull; +const uptr kHiAppMemEnd39 = 0x7fffffffffull; +const uptr kAppMemMsk39 = 0x7800000000ull; +const uptr kAppMemXor39 = 0x0800000000ull; +const uptr kVdsoBeg39 = 0x7f00000000ull; + /* C/C++ on linux/aarch64 (42-bit VMA) 00000 4000 00 - 01000 0000 00: main binary @@ -127,26 +132,55 @@ 36240 0000 00 - 3e000 0000 00: - 3e000 0000 00 - 3f000 0000 00: heap 3c000 0000 00 - 3ff00 0000 00: - -3ff00 0000 00 - 3ffff f000 00: modules and main thread stack +3ff00 0000 00 - 3ffff ffff ff: modules and main thread stack */ -const uptr kLoAppMemBeg = 0x00000400000ull; -const uptr kLoAppMemEnd = 0x01000000000ull; -const uptr kShadowBeg = 0x10000000000ull; -const uptr kShadowEnd = 0x20000000000ull; -const uptr kMetaShadowBeg = 0x26000000000ull; -const uptr kMetaShadowEnd = 0x28000000000ull; -const uptr kTraceMemBeg = 0x36200000000ull; -const uptr kTraceMemEnd = 0x36400000000ull; -const uptr kHeapMemBeg = 0x3e000000000ull; -const uptr kHeapMemEnd = 0x3f000000000ull; -const uptr kHiAppMemBeg = 0x3ff00000000ull; -const uptr kHiAppMemEnd = 0x3fffff00000ull; -const uptr kAppMemMsk = 0x3c000000000ull; -const uptr kAppMemXor = 0x04000000000ull; -const uptr kVdsoBeg = 0x37f00000000ull; -# endif +const uptr kLoAppMemBeg42 = 0x00000400000ull; +const uptr kLoAppMemEnd42 = 0x01000000000ull; +const uptr kShadowBeg42 = 0x10000000000ull; +const uptr kShadowEnd42 = 0x20000000000ull; +const uptr kMetaShadowBeg42 = 0x26000000000ull; +const uptr kMetaShadowEnd42 = 0x28000000000ull; +const uptr kTraceMemBeg42 = 0x36200000000ull; +const uptr kTraceMemEnd42 = 0x36400000000ull; +const uptr kHeapMemBeg42 = 0x3e000000000ull; +const uptr kHeapMemEnd42 = 0x3f000000000ull; +const uptr kHiAppMemBeg42 = 0x3ff00000000ull; +const uptr kHiAppMemEnd42 = 0x3ffffffffffull; +const uptr kAppMemMsk42 = 0x3c000000000ull; +const uptr kAppMemXor42 = 0x04000000000ull; +const uptr kVdsoBeg42 = 0x37f00000000ull; + +extern uptr kLoAppMemBeg; +extern uptr kLoAppMemEnd; +extern uptr kShadowBeg; +extern uptr kShadowEnd; +extern uptr kMetaShadowBeg; +extern uptr kMetaShadowEnd; +extern uptr kTraceMemBeg; +extern uptr kTraceMemEnd; +extern uptr kHeapMemBeg; +extern uptr kHeapMemEnd; +extern uptr kHiAppMemBeg; +extern uptr kHiAppMemEnd; +extern uptr kAppMemMsk; +extern uptr kAppMemXor; +extern uptr kVdsoBeg; + +#define __TSAN_NOINLINE_FUNCTIONS 1 #endif +#ifdef __TSAN_NOINLINE_FUNCTIONS +// For non-inline function indirect calls are used to provide diferent +// mappings strategies depending of runtime (for instance, on AArch64 +// with multiple VMA different mappings are used). +extern bool (*IsAppMem)(uptr mem); +extern bool (*IsShadowMem)(uptr mem); +extern bool (*IsMetaMem)(uptr mem); +extern uptr (*MemToShadow)(uptr x); +extern u32* (*MemToMeta)(uptr x); +extern uptr (*ShadowToMem)(uptr s); +extern uptr *UserRegions; +#else ALWAYS_INLINE bool IsAppMem(uptr mem) { return (mem >= kHeapMemBeg && mem < kHeapMemEnd) || @@ -192,6 +226,7 @@ kHiAppMemBeg, kHiAppMemEnd, kHeapMemBeg, kHeapMemEnd, }; +#endif #elif defined(SANITIZER_GO) && !SANITIZER_WINDOWS @@ -326,6 +361,10 @@ const uptr kTotalTraceSize = (kTraceSize * sizeof(Event) + sizeof(Trace) + (64 << 10) + (64 << 10) - 1) & ~((64 << 10) - 1); +#ifdef __TSAN_NOINLINE_FUNCTIONS +extern uptr (*GetThreadTrace)(int tid); +extern uptr (*GetThreadTraceHeader)(int tid); +#else uptr ALWAYS_INLINE GetThreadTrace(int tid) { uptr p = kTraceMemBeg + (uptr)tid * kTotalTraceSize; DCHECK_LT(p, kTraceMemEnd); @@ -338,8 +377,10 @@ DCHECK_LT(p, kTraceMemEnd); return p; } +#endif void InitializePlatform(); +void InitializePlatformSpecificModules(); void FlushShadowMemory(); void WriteMemoryProfile(char *buf, uptr buf_size, uptr nthread, uptr nlive); Index: lib/tsan/rtl/tsan_platform_linux.cc =================================================================== --- lib/tsan/rtl/tsan_platform_linux.cc +++ lib/tsan/rtl/tsan_platform_linux.cc @@ -67,6 +67,125 @@ static uptr g_data_start; static uptr g_data_end; +#if !defined(SANITIZER_GO) && defined(__aarch64__) +uptr kLoAppMemBeg; +uptr kLoAppMemEnd; +uptr kShadowBeg; +uptr kShadowEnd; +uptr kMetaShadowBeg; +uptr kMetaShadowEnd; +uptr kTraceMemBeg; +uptr kTraceMemEnd; +uptr kHeapMemBeg; +uptr kHeapMemEnd; +uptr kHiAppMemBeg; +uptr kHiAppMemEnd; +uptr kAppMemMsk; +uptr kAppMemXor; +uptr kVdsoBeg; + +bool (*IsAppMem)(uptr mem); +bool (*IsShadowMem)(uptr mem); +bool (*IsMetaMem)(uptr mem); +uptr (*MemToShadow)(uptr x); +u32* (*MemToMeta)(uptr x); +uptr (*ShadowToMem)(uptr s); +uptr *UserRegions; + +#define __DECLARE_IS_APP_MEM(__vma) \ +bool IsAppMem##__vma(uptr mem) { \ + return (mem >= kHeapMemBeg##__vma && mem < kHeapMemEnd##__vma) || \ + (mem >= kLoAppMemBeg##__vma && mem < kLoAppMemEnd##__vma) || \ + (mem >= kHiAppMemBeg##__vma && mem < kHiAppMemEnd##__vma); \ +} + +__DECLARE_IS_APP_MEM(39) +__DECLARE_IS_APP_MEM(42) + +#define __DECLARE_IS_SHADOW_MEM(__vma) \ +bool IsShadowMem##__vma(uptr mem) { \ + return mem >= kShadowBeg##__vma && mem <= kShadowEnd##__vma; \ +} + +__DECLARE_IS_SHADOW_MEM(39) +__DECLARE_IS_SHADOW_MEM(42) + +#define __DECLARE_IS_META_MEM(__vma) \ +bool IsMetaMem##__vma(uptr mem) { \ + return mem >= kMetaShadowBeg##__vma && mem <= kMetaShadowEnd##__vma; \ +} + +__DECLARE_IS_META_MEM(39) +__DECLARE_IS_META_MEM(42) + +#define __DECLARE_MEM_TO_SHADOW(__vma) \ +uptr MemToShadow##__vma(uptr x) { \ + DCHECK(IsAppMem##__vma(x)); \ + return (((x) & ~(kAppMemMsk##__vma | (kShadowCell - 1))) \ + ^ kAppMemXor##__vma) * kShadowCnt; \ +} + +__DECLARE_MEM_TO_SHADOW(39) +__DECLARE_MEM_TO_SHADOW(42) + +#define __DECLARE_MEM_TO_META(__vma) \ +u32 *MemToMeta##__vma(uptr x) { \ + DCHECK(IsAppMem##__vma(x)); \ + return (u32*)(((((x) & ~(kAppMemMsk##__vma | (kMetaShadowCell - 1))) \ + ^ kAppMemXor##__vma) / kMetaShadowCell * kMetaShadowSize) \ + | kMetaShadowBeg##__vma); \ +} + +__DECLARE_MEM_TO_META(39) +__DECLARE_MEM_TO_META(42) + +#define __DECLARE_SHADOW_TO_MEM(__vma) \ +uptr ShadowToMem##__vma(uptr s) { \ + CHECK(IsShadowMem##__vma(s)); \ + if (s >= MemToShadow##__vma(kLoAppMemBeg##__vma) \ + && s <= MemToShadow##__vma(kLoAppMemEnd##__vma - 1)) \ + return (s / kShadowCnt) ^ kAppMemXor##__vma; \ + return ((s / kShadowCnt) ^ kAppMemXor##__vma) | kAppMemMsk##__vma; \ +} + +__DECLARE_SHADOW_TO_MEM(39) +__DECLARE_SHADOW_TO_MEM(42) + +#define __DECLARE_USER_REGIONS(__vma) \ +static USED uptr UserRegions##__vma[] = { \ + kLoAppMemBeg##__vma, kLoAppMemEnd##__vma, \ + kHiAppMemBeg##__vma, kHiAppMemEnd##__vma, \ + kHeapMemBeg##__vma, kHeapMemEnd##__vma, \ +}; + +__DECLARE_USER_REGIONS(39) +__DECLARE_USER_REGIONS(42) + +uptr (*GetThreadTrace)(int tid); +uptr (*GetThreadTraceHeader)(int tid); + +#define __DECLARE_GET_THREAD_TRACE(__vma) \ +uptr GetThreadTrace##__vma(int tid) { \ + uptr p = kTraceMemBeg##__vma + (uptr)tid * kTotalTraceSize; \ + DCHECK_LT(p, kTraceMemEnd##__vma); \ + return p; \ +} + +__DECLARE_GET_THREAD_TRACE(39) +__DECLARE_GET_THREAD_TRACE(42) + +#define __DECLARE_GET_THREAD_TRACE_HEADER(__vma) \ +uptr GetThreadTraceHeader##__vma(int tid) { \ + uptr p = kTraceMemBeg##__vma + (uptr)tid * kTotalTraceSize \ + + kTraceSize * sizeof(Event); \ + DCHECK_LT(p, kTraceMemEnd##__vma); \ + return p; \ +} + +__DECLARE_GET_THREAD_TRACE_HEADER(39) +__DECLARE_GET_THREAD_TRACE_HEADER(42) +#endif + enum { MemTotal = 0, MemShadow = 1, @@ -222,8 +341,17 @@ const uptr kMadviseRangeBeg = 0xff00000000ull; const uptr kMadviseRangeSize = 0x0100000000ull; #elif defined(__aarch64__) - const uptr kMadviseRangeBeg = 0x7e00000000ull; - const uptr kMadviseRangeSize = 0x0100000000ull; + unsigned runtimeVMA = + (MostSignificantSetBitIndex(GET_CURRENT_FRAME()) + 1); + uptr kMadviseRangeBeg; + uptr kMadviseRangeSize; + if (runtimeVMA == 39) { + kMadviseRangeBeg = 0x7e00000000ull; + kMadviseRangeSize = 0x0100000000ull; + } else if (runtimeVMA == 42) { + kMadviseRangeBeg = 0x3ff00000000ull; + kMadviseRangeSize = 0x0100000000ull; + } #endif NoHugePagesInRegion(MemToShadow(kMadviseRangeBeg), kMadviseRangeSize * kShadowMultiplier); @@ -322,6 +450,53 @@ } #endif // #ifndef SANITIZER_GO +void InitializePlatformSpecificModules() { +#if !defined(SANITIZER_GO) && defined(__aarch64__) + +# define __INITIALIZE_MAPPING_VALUES(__vma) \ + kLoAppMemBeg = kLoAppMemBeg##__vma; \ + kLoAppMemEnd = kLoAppMemEnd##__vma; \ + kShadowBeg = kShadowBeg##__vma; \ + kShadowEnd = kShadowEnd##__vma; \ + kMetaShadowBeg = kMetaShadowBeg##__vma; \ + kMetaShadowEnd = kMetaShadowEnd##__vma; \ + kTraceMemBeg = kTraceMemBeg##__vma; \ + kTraceMemEnd = kTraceMemEnd##__vma; \ + kHeapMemBeg = kHeapMemEnd##__vma; \ + kHeapMemEnd = kHeapMemEnd##__vma; \ + kHiAppMemBeg = kHiAppMemBeg##__vma; \ + kHiAppMemEnd = kHiAppMemEnd##__vma; \ + kAppMemMsk = kAppMemMsk##__vma; \ + kAppMemXor = kAppMemXor##__vma; \ + kVdsoBeg = kVdsoBeg##__vma; + +# define __INITIALIZE_MAPPING_FUNCTIONS(__vma) \ + IsAppMem = IsAppMem##__vma; \ + IsShadowMem = IsShadowMem##__vma; \ + IsMetaMem = IsMetaMem##__vma; \ + MemToShadow = MemToShadow##__vma; \ + MemToMeta = MemToMeta##__vma; \ + ShadowToMem = ShadowToMem##__vma; \ + UserRegions = UserRegions##__vma; \ + GetThreadTrace = GetThreadTrace##__vma; \ + GetThreadTraceHeader = GetThreadTraceHeader##__vma; + + unsigned runtimeVMA = + (MostSignificantSetBitIndex(GET_CURRENT_FRAME()) + 1); + if (runtimeVMA == 39) { + __INITIALIZE_MAPPING_VALUES(39); + __INITIALIZE_MAPPING_FUNCTIONS(39); + } else if (runtimeVMA == 42) { + __INITIALIZE_MAPPING_VALUES(42); + __INITIALIZE_MAPPING_FUNCTIONS(42); + } else { + Printf("FATAL: ThreadSanitizer: unsupported VMA range\n"); + Printf("FATAL: Found %d - Supported 39 and 42\n", runtimeVMA); + Die(); + } +#endif +} + void InitializePlatform() { DisableCoreDumperIfNecessary(); Index: lib/tsan/rtl/tsan_platform_mac.cc =================================================================== --- lib/tsan/rtl/tsan_platform_mac.cc +++ lib/tsan/rtl/tsan_platform_mac.cc @@ -71,6 +71,9 @@ } #endif +void InitializePlatformSpecificModules() { +} + void InitializePlatform() { DisableCoreDumperIfNecessary(); } Index: lib/tsan/rtl/tsan_platform_windows.cc =================================================================== --- lib/tsan/rtl/tsan_platform_windows.cc +++ lib/tsan/rtl/tsan_platform_windows.cc @@ -31,6 +31,9 @@ void WriteMemoryProfile(char *buf, uptr buf_size, uptr nthread, uptr nlive) { } +void InitializePlatformSpecificModules() { +} + void InitializePlatform() { } Index: lib/tsan/rtl/tsan_rtl.cc =================================================================== --- lib/tsan/rtl/tsan_rtl.cc +++ lib/tsan/rtl/tsan_rtl.cc @@ -322,7 +322,7 @@ const char *options = GetEnv(kTsanOptionsEnv); CacheBinaryName(); InitializeFlags(&ctx->flags, options); - CheckVMASize(); + InitializePlatformSpecificModules(); #ifndef SANITIZER_GO InitializeAllocator(); #endif