diff --git a/llvm/include/llvm/Transforms/Instrumentation/AddressSanitizer.h b/llvm/include/llvm/Transforms/Instrumentation/AddressSanitizer.h --- a/llvm/include/llvm/Transforms/Instrumentation/AddressSanitizer.h +++ b/llvm/include/llvm/Transforms/Instrumentation/AddressSanitizer.h @@ -37,7 +37,8 @@ public: AddressSanitizerPass(const AddressSanitizerOptions &Options, bool UseGlobalGC = true, bool UseOdrIndicator = true, - AsanDtorKind DestructorKind = AsanDtorKind::Global); + AsanDtorKind DestructorKind = AsanDtorKind::Global, + AsanCtorKind ConstructorKind = AsanCtorKind::Global); PreservedAnalyses run(Module &M, ModuleAnalysisManager &AM); void printPipeline(raw_ostream &OS, function_ref MapClassName2PassName); @@ -48,6 +49,7 @@ bool UseGlobalGC; bool UseOdrIndicator; AsanDtorKind DestructorKind; + AsanCtorKind ConstructorKind; }; struct ASanAccessInfo { diff --git a/llvm/include/llvm/Transforms/Instrumentation/AddressSanitizerOptions.h b/llvm/include/llvm/Transforms/Instrumentation/AddressSanitizerOptions.h --- a/llvm/include/llvm/Transforms/Instrumentation/AddressSanitizerOptions.h +++ b/llvm/include/llvm/Transforms/Instrumentation/AddressSanitizerOptions.h @@ -19,6 +19,9 @@ Invalid, ///< Not a valid destructor Kind. }; +/// Types of ASan module constructors supported +enum class AsanCtorKind { None, Global }; + /// Mode of ASan detect stack use after return enum class AsanDetectStackUseAfterReturnMode { Never, ///< Never detect stack use after return. diff --git a/llvm/lib/Transforms/Instrumentation/AddressSanitizer.cpp b/llvm/lib/Transforms/Instrumentation/AddressSanitizer.cpp --- a/llvm/lib/Transforms/Instrumentation/AddressSanitizer.cpp +++ b/llvm/lib/Transforms/Instrumentation/AddressSanitizer.cpp @@ -97,7 +97,7 @@ static const uint64_t kDefaultShadowOffset64 = 1ULL << 44; static const uint64_t kDynamicShadowSentinel = std::numeric_limits::max(); -static const uint64_t kSmallX86_64ShadowOffsetBase = 0x7FFFFFFF; // < 2G. +static const uint64_t kSmallX86_64ShadowOffsetBase = 0x7FFFFFFF; // < 2G. static const uint64_t kSmallX86_64ShadowOffsetAlignMask = ~0xFFFULL; static const uint64_t kLinuxKasan_ShadowOffset64 = 0xdffffc0000000000; static const uint64_t kPPC64_ShadowOffset64 = 1ULL << 44; @@ -122,8 +122,8 @@ // The shadow memory space is dynamically allocated. static const uint64_t kWindowsShadowOffset64 = kDynamicShadowSentinel; -static const size_t kMinStackMallocSize = 1 << 6; // 64B -static const size_t kMaxStackMallocSize = 1 << 16; // 64K +static const size_t kMinStackMallocSize = 1 << 6; // 64B +static const size_t kMaxStackMallocSize = 1 << 16; // 64K static const uintptr_t kCurrentStackFrameMagic = 0x41B58AB3; static const uintptr_t kRetiredStackFrameMagic = 0x45E0360E; @@ -149,8 +149,7 @@ const char kAsanHandleNoReturnName[] = "__asan_handle_no_return"; static const int kMaxAsanStackMallocSizeClass = 10; const char kAsanStackMallocNameTemplate[] = "__asan_stack_malloc_"; -const char kAsanStackMallocAlwaysNameTemplate[] = - "__asan_stack_malloc_always_"; +const char kAsanStackMallocAlwaysNameTemplate[] = "__asan_stack_malloc_always_"; const char kAsanStackFreeNameTemplate[] = "__asan_stack_free_"; const char kAsanGenPrefix[] = "___asan_gen_"; const char kODRGenPrefix[] = "__odr_asan_gen_"; @@ -190,28 +189,30 @@ // Command-line flags. -static cl::opt ClEnableKasan( - "asan-kernel", cl::desc("Enable KernelAddressSanitizer instrumentation"), - cl::Hidden, cl::init(false)); +static cl::opt + ClEnableKasan("asan-kernel", + cl::desc("Enable KernelAddressSanitizer instrumentation"), + cl::Hidden, cl::init(false)); -static cl::opt ClRecover( - "asan-recover", - cl::desc("Enable recovery mode (continue-after-error)."), - cl::Hidden, cl::init(false)); +static cl::opt + ClRecover("asan-recover", + cl::desc("Enable recovery mode (continue-after-error)."), + cl::Hidden, cl::init(false)); static cl::opt ClInsertVersionCheck( "asan-guard-against-version-mismatch", - cl::desc("Guard against compiler/runtime version mismatch."), - cl::Hidden, cl::init(true)); + cl::desc("Guard against compiler/runtime version mismatch."), cl::Hidden, + cl::init(true)); // This flag may need to be replaced with -f[no-]asan-reads. static cl::opt ClInstrumentReads("asan-instrument-reads", cl::desc("instrument read instructions"), cl::Hidden, cl::init(true)); -static cl::opt ClInstrumentWrites( - "asan-instrument-writes", cl::desc("instrument write instructions"), - cl::Hidden, cl::init(true)); +static cl::opt + ClInstrumentWrites("asan-instrument-writes", + cl::desc("instrument write instructions"), cl::Hidden, + cl::init(true)); static cl::opt ClUseStackSafety("asan-use-stack-safety", cl::Hidden, cl::init(false), @@ -285,8 +286,8 @@ static cl::opt ClRedzoneByvalArgs("asan-redzone-byval-args", cl::desc("Create redzones for byval " "arguments (extra copy " - "required)"), cl::Hidden, - cl::init(true)); + "required)"), + cl::Hidden, cl::init(true)); static cl::opt ClUseAfterScope("asan-use-after-scope", cl::desc("Check stack-use-after-scope"), @@ -323,16 +324,15 @@ static cl::opt ClInstrumentationWithCallsThreshold( "asan-instrumentation-with-call-threshold", - cl::desc( - "If the function being instrumented contains more than " - "this number of memory accesses, use callbacks instead of " - "inline checks (-1 means never use callbacks)."), + cl::desc("If the function being instrumented contains more than " + "this number of memory accesses, use callbacks instead of " + "inline checks (-1 means never use callbacks)."), cl::Hidden, cl::init(7000)); -static cl::opt ClMemoryAccessCallbackPrefix( - "asan-memory-access-callback-prefix", - cl::desc("Prefix for memory access callbacks"), cl::Hidden, - cl::init("__asan_")); +static cl::opt + ClMemoryAccessCallbackPrefix("asan-memory-access-callback-prefix", + cl::desc("Prefix for memory access callbacks"), + cl::Hidden, cl::init("__asan_")); static cl::opt ClKasanMemIntrinCallbackPrefix( "asan-kernel-mem-intrinsic-prefix", @@ -344,11 +344,17 @@ cl::desc("instrument dynamic allocas"), cl::Hidden, cl::init(true)); -static cl::opt ClSkipPromotableAllocas( - "asan-skip-promotable-allocas", - cl::desc("Do not instrument promotable allocas"), cl::Hidden, - cl::init(true)); - +static cl::opt + ClSkipPromotableAllocas("asan-skip-promotable-allocas", + cl::desc("Do not instrument promotable allocas"), + cl::Hidden, cl::init(true)); + +static cl::opt ClConstructorKind( + "asan-constructor-kind", cl::desc("Sets the ASan constructor kind"), + cl::values(clEnumValN(AsanCtorKind::None, "none", "No destructors"), + clEnumValN(AsanCtorKind::Global, "global", + "Use global destructors")), + cl::init(AsanCtorKind::Global), cl::Hidden); // These flags allow to change the shadow mapping. // The shadow mapping looks like // Shadow = (Mem >> scale) + offset @@ -372,27 +378,29 @@ cl::desc("Optimize callbacks"), cl::Hidden, cl::init(false)); -static cl::opt ClOptSameTemp( - "asan-opt-same-temp", cl::desc("Instrument the same temp just once"), - cl::Hidden, cl::init(true)); +static cl::opt + ClOptSameTemp("asan-opt-same-temp", + cl::desc("Instrument the same temp just once"), cl::Hidden, + cl::init(true)); static cl::opt ClOptGlobals("asan-opt-globals", cl::desc("Don't instrument scalar globals"), cl::Hidden, cl::init(true)); -static cl::opt ClOptStack( - "asan-opt-stack", cl::desc("Don't instrument scalar stack variables"), - cl::Hidden, cl::init(false)); +static cl::opt + ClOptStack("asan-opt-stack", + cl::desc("Don't instrument scalar stack variables"), cl::Hidden, + cl::init(false)); static cl::opt ClDynamicAllocaStack( "asan-stack-dynamic-alloca", cl::desc("Use dynamic alloca to represent stack variables"), cl::Hidden, cl::init(true)); -static cl::opt ClForceExperiment( - "asan-force-experiment", - cl::desc("Force optimization experiment (for testing)"), cl::Hidden, - cl::init(0)); +static cl::opt + ClForceExperiment("asan-force-experiment", + cl::desc("Force optimization experiment (for testing)"), + cl::Hidden, cl::init(0)); static cl::opt ClUsePrivateAlias("asan-use-private-alias", @@ -518,7 +526,7 @@ Mapping.Offset = kEmscriptenShadowOffset; else Mapping.Offset = kDefaultShadowOffset32; - } else { // LongSize == 64 + } else { // LongSize == 64 // Fuchsia is always PIE, which means that the beginning of the address // space is always available. if (IsFuchsia) @@ -528,7 +536,7 @@ else if (IsSystemZ) Mapping.Offset = kSystemZ_ShadowOffset64; else if (IsFreeBSD && IsAArch64) - Mapping.Offset = kFreeBSDAArch64_ShadowOffset64; + Mapping.Offset = kFreeBSDAArch64_ShadowOffset64; else if (IsFreeBSD && !IsMIPS64) { if (IsKasan) Mapping.Offset = kFreeBSDKasan_ShadowOffset64; @@ -668,8 +676,7 @@ ArraySize = CI->getZExtValue(); } Type *Ty = AI.getAllocatedType(); - uint64_t SizeInBytes = - AI.getModule()->getDataLayout().getTypeAllocSize(Ty); + uint64_t SizeInBytes = AI.getModule()->getDataLayout().getTypeAllocSize(Ty); return SizeInBytes * ArraySize; } @@ -772,7 +779,8 @@ ModuleAddressSanitizer(Module &M, bool CompileKernel = false, bool Recover = false, bool UseGlobalsGC = true, bool UseOdrIndicator = true, - AsanDtorKind DestructorKind = AsanDtorKind::Global) + AsanDtorKind DestructorKind = AsanDtorKind::Global, + AsanCtorKind ConstructorKind = AsanCtorKind::Global) : CompileKernel(ClEnableKasan.getNumOccurrences() > 0 ? ClEnableKasan : CompileKernel), Recover(ClRecover.getNumOccurrences() > 0 ? ClRecover : Recover), @@ -792,7 +800,7 @@ // ClWithComdat and ClUseGlobalsGC unless the frontend says it's ok to // do globals-gc. UseCtorComdat(UseGlobalsGC && ClWithComdat && !this->CompileKernel), - DestructorKind(DestructorKind) { + DestructorKind(DestructorKind), ConstructorKind(ConstructorKind) { C = &(M.getContext()); int LongSize = M.getDataLayout().getPointerSizeInBits(); IntptrTy = Type::getIntNTy(*C, LongSize); @@ -850,6 +858,7 @@ bool UseOdrIndicator; bool UseCtorComdat; AsanDtorKind DestructorKind; + AsanCtorKind ConstructorKind; Type *IntptrTy; LLVMContext *C; Triple TargetTriple; @@ -930,9 +939,11 @@ copyArgsPassedByValToAllocas(); // Collect alloca, ret, lifetime instructions etc. - for (BasicBlock *BB : depth_first(&F.getEntryBlock())) visit(*BB); + for (BasicBlock *BB : depth_first(&F.getEntryBlock())) + visit(*BB); - if (AllocaVec.empty() && DynamicAllocaVec.empty()) return false; + if (AllocaVec.empty() && DynamicAllocaVec.empty()) + return false; initializeCallbacks(*F.getParent()); @@ -980,7 +991,9 @@ void visitResumeInst(ResumeInst &RI) { RetVec.push_back(&RI); } /// Collect all CatchReturnInst instructions. - void visitCleanupReturnInst(CleanupReturnInst &CRI) { RetVec.push_back(&CRI); } + void visitCleanupReturnInst(CleanupReturnInst &CRI) { + RetVec.push_back(&CRI); + } void unpoisonDynamicAllocasBeforeInst(Instruction *InstBefore, Value *SavedStack) { @@ -1052,8 +1065,10 @@ /// errors. void visitIntrinsicInst(IntrinsicInst &II) { Intrinsic::ID ID = II.getIntrinsicID(); - if (ID == Intrinsic::stackrestore) StackRestoreVec.push_back(&II); - if (ID == Intrinsic::localescape) LocalEscapeCall = &II; + if (ID == Intrinsic::stackrestore) + StackRestoreVec.push_back(&II); + if (ID == Intrinsic::localescape) + LocalEscapeCall = &II; if (!ASan.UseAfterScope) return; if (!II.isLifetimeStartOrEnd()) @@ -1061,7 +1076,8 @@ // Found lifetime intrinsic, add ASan instrumentation if necessary. auto *Size = cast(II.getArgOperand(0)); // If size argument is undefined, don't do anything. - if (Size->isMinusOne()) return; + if (Size->isMinusOne()) + return; // Check that size doesn't saturate uint64_t and can // be stored in IntptrTy. const uint64_t SizeValue = Size->getValue().getLimitedValue(); @@ -1131,15 +1147,17 @@ AddressSanitizerPass::AddressSanitizerPass( const AddressSanitizerOptions &Options, bool UseGlobalGC, - bool UseOdrIndicator, AsanDtorKind DestructorKind) + bool UseOdrIndicator, AsanDtorKind DestructorKind, + AsanCtorKind ConstructorKind) : Options(Options), UseGlobalGC(UseGlobalGC), - UseOdrIndicator(UseOdrIndicator), DestructorKind(DestructorKind) {} + UseOdrIndicator(UseOdrIndicator), DestructorKind(DestructorKind), + ConstructorKind(ClConstructorKind) {} PreservedAnalyses AddressSanitizerPass::run(Module &M, ModuleAnalysisManager &MAM) { - ModuleAddressSanitizer ModuleSanitizer(M, Options.CompileKernel, - Options.Recover, UseGlobalGC, - UseOdrIndicator, DestructorKind); + ModuleAddressSanitizer ModuleSanitizer( + M, Options.CompileKernel, Options.Recover, UseGlobalGC, UseOdrIndicator, + DestructorKind, ConstructorKind); bool Modified = false; auto &FAM = MAM.getResult(M).getManager(); const StackSafetyGlobalInfo *const SSGI = @@ -1199,7 +1217,8 @@ Value *AddressSanitizer::memToShadow(Value *Shadow, IRBuilder<> &IRB) { // Shadow >> scale Shadow = IRB.CreateLShr(Shadow, Mapping.Scale); - if (Mapping.Offset == 0) return Shadow; + if (Mapping.Offset == 0) + return Shadow; // (Shadow >> scale) | offset Value *ShadowBase; if (LocalDynamicShadow) @@ -1641,7 +1660,7 @@ CrashTerm = SplitBlockAndInsertIfThen(Cmp2, CheckTerm, false); } else { BasicBlock *CrashBlock = - BasicBlock::Create(*C, "", NextBB->getParent(), NextBB); + BasicBlock::Create(*C, "", NextBB->getParent(), NextBB); CrashTerm = new UnreachableInst(*C, CrashBlock); BranchInst *NewTerm = BranchInst::Create(CrashBlock, NextBB, Cmp2); ReplaceInstWithInst(CheckTerm, NewTerm); @@ -1708,12 +1727,14 @@ return; for (Use &OP : CA->operands()) { - if (isa(OP)) continue; + if (isa(OP)) + continue; ConstantStruct *CS = cast(OP); // Must have a function or null ptr. if (Function *F = dyn_cast(CS->getOperand(1))) { - if (F->getName() == kAsanModuleCtorName) continue; + if (F->getName() == kAsanModuleCtorName) + continue; auto *Priority = cast(CS->getOperand(0)); // Don't instrument CTORs that will run before asan.module_ctor. if (Priority->getLimitedValue() <= GetCtorAndDtorPriority(TargetTriple)) @@ -1747,19 +1768,24 @@ if (G->hasSanitizerMetadata() && G->getSanitizerMetadata().NoAddress) return false; - if (!Ty->isSized()) return false; - if (!G->hasInitializer()) return false; + if (!Ty->isSized()) + return false; + if (!G->hasInitializer()) + return false; // Globals in address space 1 and 4 are supported for AMDGPU. if (G->getAddressSpace() && !(TargetTriple.isAMDGPU() && !isUnsupportedAMDGPUAddrspace(G))) return false; - if (GlobalWasGeneratedByCompiler(G)) return false; // Our own globals. + if (GlobalWasGeneratedByCompiler(G)) + return false; // Our own globals. // Two problems with thread-locals: // - The address of the main thread's copy can't be computed at link-time. // - Need to poison all copies, not just the main thread's one. - if (G->isThreadLocal()) return false; + if (G->isThreadLocal()) + return false; // For now, just ignore this Global if the alignment is large. - if (G->getAlign() && *G->getAlign() > getMinRedzoneSizeForGlobal()) return false; + if (G->getAlign() && *G->getAlign() > getMinRedzoneSizeForGlobal()) + return false; // For non-COFF targets, only instrument globals known to be defined by this // TU. @@ -1798,7 +1824,8 @@ StringRef Section = G->getSection(); // Globals from llvm.metadata aren't emitted, do not instrument them. - if (Section == "llvm.metadata") return false; + if (Section == "llvm.metadata") + return false; // Do not instrument globals from special LLVM sections. if (Section.contains("__llvm") || Section.contains("__LLVM")) return false; @@ -1900,9 +1927,12 @@ StringRef ModuleAddressSanitizer::getGlobalMetadataSection() const { switch (TargetTriple.getObjectFormat()) { - case Triple::COFF: return ".ASAN$GL"; - case Triple::ELF: return "asan_globals"; - case Triple::MachO: return "__DATA,__asan_globals,regular"; + case Triple::COFF: + return ".ASAN$GL"; + case Triple::ELF: + return "asan_globals"; + case Triple::MachO: + return "__DATA,__asan_globals,regular"; case Triple::Wasm: case Triple::GOFF: case Triple::SPIRV: @@ -1991,9 +2021,10 @@ auto Linkage = TargetTriple.isOSBinFormatMachO() ? GlobalVariable::InternalLinkage : GlobalVariable::PrivateLinkage; - GlobalVariable *Metadata = new GlobalVariable( - M, Initializer->getType(), false, Linkage, Initializer, - Twine("__asan_global_") + GlobalValue::dropLLVMManglingEscape(OriginalName)); + GlobalVariable *Metadata = + new GlobalVariable(M, Initializer->getType(), false, Linkage, Initializer, + Twine("__asan_global_") + + GlobalValue::dropLLVMManglingEscape(OriginalName)); Metadata->setSection(getGlobalMetadataSection()); return Metadata; } @@ -2095,10 +2126,11 @@ StopELFMetadata->setVisibility(GlobalVariable::HiddenVisibility); // Create a call to register the globals with the runtime. - IRB.CreateCall(AsanRegisterElfGlobals, - {IRB.CreatePointerCast(RegisteredFlag, IntptrTy), - IRB.CreatePointerCast(StartELFMetadata, IntptrTy), - IRB.CreatePointerCast(StopELFMetadata, IntptrTy)}); + if (ConstructorKind == AsanCtorKind::Global) + IRB.CreateCall(AsanRegisterElfGlobals, + {IRB.CreatePointerCast(RegisteredFlag, IntptrTy), + IRB.CreatePointerCast(StartELFMetadata, IntptrTy), + IRB.CreatePointerCast(StopELFMetadata, IntptrTy)}); // We also need to unregister globals at the end, e.g., when a shared library // gets closed. @@ -2158,8 +2190,9 @@ ConstantInt::get(IntptrTy, 0), kAsanGlobalsRegisteredFlagName); RegisteredFlag->setVisibility(GlobalVariable::HiddenVisibility); - IRB.CreateCall(AsanRegisterImageGlobals, - {IRB.CreatePointerCast(RegisteredFlag, IntptrTy)}); + if (ConstructorKind == AsanCtorKind::Global) + IRB.CreateCall(AsanRegisterImageGlobals, + {IRB.CreatePointerCast(RegisteredFlag, IntptrTy)}); // We also need to unregister globals at the end, e.g., when a shared library // gets closed. @@ -2187,9 +2220,10 @@ if (Mapping.Scale > 3) AllGlobals->setAlignment(Align(1ULL << Mapping.Scale)); - IRB.CreateCall(AsanRegisterGlobals, - {IRB.CreatePointerCast(AllGlobals, IntptrTy), - ConstantInt::get(IntptrTy, N)}); + if (ConstructorKind == AsanCtorKind::Global) + IRB.CreateCall(AsanRegisterGlobals, + {IRB.CreatePointerCast(AllGlobals, IntptrTy), + ConstantInt::get(IntptrTy, N)}); // We also need to unregister globals at the end, e.g., when a shared library // gets closed. @@ -2321,9 +2355,9 @@ Constant *ODRIndicator = ConstantExpr::getNullValue(IRB.getInt8PtrTy()); GlobalValue *InstrumentedGlobal = NewGlobal; - bool CanUsePrivateAliases = - TargetTriple.isOSBinFormatELF() || TargetTriple.isOSBinFormatMachO() || - TargetTriple.isOSBinFormatWasm(); + bool CanUsePrivateAliases = TargetTriple.isOSBinFormatELF() || + TargetTriple.isOSBinFormatMachO() || + TargetTriple.isOSBinFormatWasm(); if (CanUsePrivateAliases && UsePrivateAlias) { // Create local alias for NewGlobal to avoid crash on ODR between // instrumented and non-instrumented libraries. @@ -2375,7 +2409,8 @@ SmallVector GlobalsToAddToUsedList; for (size_t i = 0; i < n; i++) { GlobalVariable *G = NewGlobals[i]; - if (G->getName().empty()) continue; + if (G->getName().empty()) + continue; GlobalsToAddToUsedList.push_back(G); } appendToCompilerUsed(M, ArrayRef(GlobalsToAddToUsedList)); @@ -2443,23 +2478,30 @@ // Create a module constructor. A destructor is created lazily because not all // platforms, and not all modules need it. - if (CompileKernel) { - // The kernel always builds with its own runtime, and therefore does not - // need the init and version check calls. - AsanCtorFunction = createSanitizerCtor(M, kAsanModuleCtorName); - } else { - std::string AsanVersion = std::to_string(GetAsanVersion(M)); - std::string VersionCheckName = - ClInsertVersionCheck ? (kAsanVersionCheckNamePrefix + AsanVersion) : ""; - std::tie(AsanCtorFunction, std::ignore) = - createSanitizerCtorAndInitFunctions(M, kAsanModuleCtorName, - kAsanInitName, /*InitArgTypes=*/{}, - /*InitArgs=*/{}, VersionCheckName); + if (ConstructorKind == AsanCtorKind::Global) { + if (CompileKernel) { + // The kernel always builds with its own runtime, and therefore does not + // need the init and version check calls. + AsanCtorFunction = createSanitizerCtor(M, kAsanModuleCtorName); + } else { + std::string AsanVersion = std::to_string(GetAsanVersion(M)); + std::string VersionCheckName = + ClInsertVersionCheck ? (kAsanVersionCheckNamePrefix + AsanVersion) + : ""; + std::tie(AsanCtorFunction, std::ignore) = + createSanitizerCtorAndInitFunctions( + M, kAsanModuleCtorName, kAsanInitName, /*InitArgTypes=*/{}, + /*InitArgs=*/{}, VersionCheckName); + } } bool CtorComdat = true; if (ClGlobals) { - IRBuilder<> IRB(AsanCtorFunction->getEntryBlock().getTerminator()); + assert(AsanCtorFunction || ConstructorKind == AsanCtorKind::None); + IRBuilder<> IRB = + AsanCtorFunction + ? IRBuilder<>(AsanCtorFunction->getEntryBlock().getTerminator()) + : IRBuilder<>(*C); InstrumentGlobals(IRB, M, &CtorComdat); } @@ -2470,13 +2512,15 @@ // (2) target is ELF. if (UseCtorComdat && TargetTriple.isOSBinFormatELF() && CtorComdat) { AsanCtorFunction->setComdat(M.getOrInsertComdat(kAsanModuleCtorName)); - appendToGlobalCtors(M, AsanCtorFunction, Priority, AsanCtorFunction); + if (AsanCtorFunction) + appendToGlobalCtors(M, AsanCtorFunction, Priority, AsanCtorFunction); if (AsanDtorFunction) { AsanDtorFunction->setComdat(M.getOrInsertComdat(kAsanModuleDtorName)); appendToGlobalDtors(M, AsanDtorFunction, Priority, AsanDtorFunction); } } else { - appendToGlobalCtors(M, AsanCtorFunction, Priority); + if (AsanCtorFunction) + appendToGlobalCtors(M, AsanCtorFunction, Priority); if (AsanDtorFunction) appendToGlobalDtors(M, AsanDtorFunction, Priority); } @@ -2484,7 +2528,8 @@ return true; } -void AddressSanitizer::initializeCallbacks(Module &M, const TargetLibraryInfo *TLI) { +void AddressSanitizer::initializeCallbacks(Module &M, + const TargetLibraryInfo *TLI) { IRBuilder<> IRB(*C); // Create __asan_report* callbacks. // IsWrite, TypeSize and Exp are encoded in the function name. @@ -2618,7 +2663,8 @@ // Try to get the declaration of llvm.localescape. If it's not in the module, // we can exit early. - if (!F.getParent()->getFunction("llvm.localescape")) return; + if (!F.getParent()->getFunction("llvm.localescape")) + return; // Look for a call to llvm.localescape call in the entry block. It can't be in // any other block. @@ -2649,9 +2695,12 @@ const TargetLibraryInfo *TLI) { if (F.empty()) return false; - if (F.getLinkage() == GlobalValue::AvailableExternallyLinkage) return false; - if (!ClDebugFunc.empty() && ClDebugFunc == F.getName()) return false; - if (F.getName().startswith("__asan_")) return false; + if (F.getLinkage() == GlobalValue::AvailableExternallyLinkage) + return false; + if (!ClDebugFunc.empty() && ClDebugFunc == F.getName()) + return false; + if (F.getName().startswith("__asan_")) + return false; bool FunctionModified = false; @@ -2662,7 +2711,8 @@ FunctionModified = true; // Leave if the function doesn't need instrumentation. - if (!F.hasFnAttribute(Attribute::SanitizeAddress)) return FunctionModified; + if (!F.hasFnAttribute(Attribute::SanitizeAddress)) + return FunctionModified; if (F.hasFnAttribute(Attribute::DisableSanitizerInstrumentation)) return FunctionModified; @@ -2694,7 +2744,8 @@ TempsToInstrument.clear(); int NumInsnsPerBB = 0; for (auto &Inst : BB) { - if (LooksLikeCodeInBug11395(&Inst)) return false; + if (LooksLikeCodeInBug11395(&Inst)) + return false; // Skip instructions inserted by another instrumentation. if (Inst.hasMetadata(LLVMContext::MD_nosanitize)) continue; @@ -2738,7 +2789,8 @@ if (CallInst *CI = dyn_cast(&Inst)) maybeMarkSanitizerLibraryCallNoBuiltin(CI, TLI); } - if (NumInsnsPerBB >= ClMaxInsnsToInstrumentPerBB) break; + if (NumInsnsPerBB >= ClMaxInsnsToInstrumentPerBB) + break; } } @@ -2792,9 +2844,11 @@ // with large assembly blobs (32-bit only), otherwise reg alloc may crash. // FIXME: remove once the bug 11395 is fixed. bool AddressSanitizer::LooksLikeCodeInBug11395(Instruction *I) { - if (LongSize != 32) return false; + if (LongSize != 32) + return false; CallInst *CI = dyn_cast(I); - if (!CI || !CI->isInlineAsm()) return false; + if (!CI || !CI->isInlineAsm()) + return false; if (CI->arg_size() <= 5) return false; // We have inline assembly with quite a few arguments. @@ -2936,7 +2990,8 @@ assert(LocalStackSize <= kMaxStackMallocSize); uint64_t MaxSize = kMinStackMallocSize; for (int i = 0;; i++, MaxSize *= 2) - if (LocalStackSize <= MaxSize) return i; + if (LocalStackSize <= MaxSize) + return i; llvm_unreachable("impossible LocalStackSize"); } @@ -2993,7 +3048,8 @@ assert(Alloca->isStaticAlloca()); } assert((ClRealignStack & (ClRealignStack - 1)) == 0); - uint64_t FrameAlignment = std::max(L.FrameAlignment, uint64_t(ClRealignStack)); + uint64_t FrameAlignment = + std::max(L.FrameAlignment, uint64_t(ClRealignStack)); Alloca->setAlignment(Align(FrameAlignment)); return IRB.CreatePointerCast(Alloca, IntptrTy); } @@ -3120,7 +3176,8 @@ ArgInitInst->moveBefore(InsBefore); // If we have a call to llvm.localescape, keep it in the entry block. - if (LocalEscapeCall) LocalEscapeCall->moveBefore(InsBefore); + if (LocalEscapeCall) + LocalEscapeCall->moveBefore(InsBefore); SmallVector SVD; SVD.reserve(AllocaVec.size()); @@ -3377,9 +3434,9 @@ // For now just insert the call to ASan runtime. Value *AddrArg = IRB.CreatePointerCast(V, IntptrTy); Value *SizeArg = ConstantInt::get(IntptrTy, Size); - IRB.CreateCall( - DoPoison ? AsanPoisonStackMemoryFunc : AsanUnpoisonStackMemoryFunc, - {AddrArg, SizeArg}); + IRB.CreateCall(DoPoison ? AsanPoisonStackMemoryFunc + : AsanUnpoisonStackMemoryFunc, + {AddrArg, SizeArg}); } // Handling llvm.lifetime intrinsics for a given %alloca: @@ -3460,7 +3517,8 @@ bool AddressSanitizer::isSafeAccess(ObjectSizeOffsetVisitor &ObjSizeVis, Value *Addr, uint64_t TypeSize) const { SizeOffsetType SizeOffset = ObjSizeVis.compute(Addr); - if (!ObjSizeVis.bothKnown(SizeOffset)) return false; + if (!ObjSizeVis.bothKnown(SizeOffset)) + return false; uint64_t Size = SizeOffset.first.getZExtValue(); int64_t Offset = SizeOffset.second.getSExtValue(); // Three checks are required to ensure safety: diff --git a/llvm/test/Instrumentation/AddressSanitizer/no-global-ctors.ll b/llvm/test/Instrumentation/AddressSanitizer/no-global-ctors.ll new file mode 100644 --- /dev/null +++ b/llvm/test/Instrumentation/AddressSanitizer/no-global-ctors.ll @@ -0,0 +1,17 @@ +; Check Default behaviour still emits dtors +; RUN: opt < %s -passes=asan -S | \ +; RUN: FileCheck -check-prefix=CHECK-DEFAULT %s +; CHECK-DEFAULT: llvm.global_ctor{{.+}}asan.module_ctor +; CHECK-DEFAULT: define internal void @asan.module_ctor + +; Check with dtor emission disabled +; RUN: opt < %s -passes=asan \ +; RUN: -asan-constructor-kind=none -S | \ +; RUN: FileCheck %s +; CHECK-NOT: llvm.global_ctor{{.+}}asan.module_ctor +; CHECK-NOT: define internal void @asan.module_ctor + +target datalayout = "e-m:o-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" +target triple = "x86_64-apple-macosx11.0.0" + +@foo = dso_local global i32 0, align 4