diff options
Diffstat (limited to 'lib/Transforms/Instrumentation/AddressSanitizer.cpp')
-rw-r--r-- | lib/Transforms/Instrumentation/AddressSanitizer.cpp | 384 |
1 files changed, 296 insertions, 88 deletions
diff --git a/lib/Transforms/Instrumentation/AddressSanitizer.cpp b/lib/Transforms/Instrumentation/AddressSanitizer.cpp index a9df5e5898ae3..43d1b377f858a 100644 --- a/lib/Transforms/Instrumentation/AddressSanitizer.cpp +++ b/lib/Transforms/Instrumentation/AddressSanitizer.cpp @@ -13,14 +13,11 @@ // //===----------------------------------------------------------------------===// -#include "llvm/Transforms/Instrumentation.h" #include "llvm/ADT/ArrayRef.h" #include "llvm/ADT/DenseMap.h" -#include "llvm/ADT/DenseSet.h" #include "llvm/ADT/DepthFirstIterator.h" #include "llvm/ADT/SetVector.h" #include "llvm/ADT/SmallSet.h" -#include "llvm/ADT/SmallString.h" #include "llvm/ADT/SmallVector.h" #include "llvm/ADT/Statistic.h" #include "llvm/ADT/StringExtras.h" @@ -48,6 +45,7 @@ #include "llvm/Support/Endian.h" #include "llvm/Support/SwapByteOrder.h" #include "llvm/Support/raw_ostream.h" +#include "llvm/Transforms/Instrumentation.h" #include "llvm/Transforms/Scalar.h" #include "llvm/Transforms/Utils/ASanStackFrameLayout.h" #include "llvm/Transforms/Utils/BasicBlockUtils.h" @@ -65,17 +63,23 @@ using namespace llvm; static const uint64_t kDefaultShadowScale = 3; static const uint64_t kDefaultShadowOffset32 = 1ULL << 29; -static const uint64_t kIOSShadowOffset32 = 1ULL << 30; static const uint64_t kDefaultShadowOffset64 = 1ULL << 44; +static const uint64_t kIOSShadowOffset32 = 1ULL << 30; +static const uint64_t kIOSShadowOffset64 = 0x120200000; +static const uint64_t kIOSSimShadowOffset32 = 1ULL << 30; +static const uint64_t kIOSSimShadowOffset64 = kDefaultShadowOffset64; static const uint64_t kSmallX86_64ShadowOffset = 0x7FFF8000; // < 2G. static const uint64_t kLinuxKasan_ShadowOffset64 = 0xdffffc0000000000; static const uint64_t kPPC64_ShadowOffset64 = 1ULL << 41; +static const uint64_t kSystemZ_ShadowOffset64 = 1ULL << 52; static const uint64_t kMIPS32_ShadowOffset32 = 0x0aaa0000; static const uint64_t kMIPS64_ShadowOffset64 = 1ULL << 37; static const uint64_t kAArch64_ShadowOffset64 = 1ULL << 36; static const uint64_t kFreeBSD_ShadowOffset32 = 1ULL << 30; static const uint64_t kFreeBSD_ShadowOffset64 = 1ULL << 46; static const uint64_t kWindowsShadowOffset32 = 3ULL << 28; +// TODO(wwchrome): Experimental for asan Win64, may change. +static const uint64_t kWindowsShadowOffset64 = 0x1ULL << 45; // 32TB. static const size_t kMinStackMallocSize = 1 << 6; // 64B static const size_t kMaxStackMallocSize = 1 << 16; // 64K @@ -89,11 +93,15 @@ static const char *const kAsanReportErrorTemplate = "__asan_report_"; static const char *const kAsanRegisterGlobalsName = "__asan_register_globals"; static const char *const kAsanUnregisterGlobalsName = "__asan_unregister_globals"; +static const char *const kAsanRegisterImageGlobalsName = + "__asan_register_image_globals"; +static const char *const kAsanUnregisterImageGlobalsName = + "__asan_unregister_image_globals"; static const char *const kAsanPoisonGlobalsName = "__asan_before_dynamic_init"; static const char *const kAsanUnpoisonGlobalsName = "__asan_after_dynamic_init"; static const char *const kAsanInitName = "__asan_init"; static const char *const kAsanVersionCheckName = - "__asan_version_mismatch_check_v6"; + "__asan_version_mismatch_check_v8"; static const char *const kAsanPtrCmp = "__sanitizer_ptr_cmp"; static const char *const kAsanPtrSub = "__sanitizer_ptr_sub"; static const char *const kAsanHandleNoReturnName = "__asan_handle_no_return"; @@ -101,13 +109,16 @@ static const int kMaxAsanStackMallocSizeClass = 10; static const char *const kAsanStackMallocNameTemplate = "__asan_stack_malloc_"; static const char *const kAsanStackFreeNameTemplate = "__asan_stack_free_"; static const char *const kAsanGenPrefix = "__asan_gen_"; +static const char *const kODRGenPrefix = "__odr_asan_gen_"; static const char *const kSanCovGenPrefix = "__sancov_gen_"; static const char *const kAsanPoisonStackMemoryName = "__asan_poison_stack_memory"; static const char *const kAsanUnpoisonStackMemoryName = "__asan_unpoison_stack_memory"; +static const char *const kAsanGlobalsRegisteredFlagName = + "__asan_globals_registered"; -static const char *const kAsanOptionDetectUAR = +static const char *const kAsanOptionDetectUseAfterReturn = "__asan_option_detect_stack_use_after_return"; static const char *const kAsanAllocaPoison = "__asan_alloca_poison"; @@ -154,8 +165,11 @@ static cl::opt<int> ClMaxInsnsToInstrumentPerBB( static cl::opt<bool> ClStack("asan-stack", cl::desc("Handle stack memory"), cl::Hidden, cl::init(true)); static cl::opt<bool> ClUseAfterReturn("asan-use-after-return", - cl::desc("Check return-after-free"), + cl::desc("Check stack-use-after-return"), cl::Hidden, cl::init(true)); +static cl::opt<bool> ClUseAfterScope("asan-use-after-scope", + cl::desc("Check stack-use-after-scope"), + cl::Hidden, cl::init(false)); // This flag may need to be replaced with -f[no]asan-globals. static cl::opt<bool> ClGlobals("asan-globals", cl::desc("Handle global objects"), cl::Hidden, @@ -192,10 +206,14 @@ static cl::opt<bool> ClSkipPromotableAllocas( // These flags allow to change the shadow mapping. // The shadow mapping looks like -// Shadow = (Mem >> scale) + (1 << offset_log) +// Shadow = (Mem >> scale) + offset static cl::opt<int> ClMappingScale("asan-mapping-scale", cl::desc("scale of asan shadow mapping"), cl::Hidden, cl::init(0)); +static cl::opt<unsigned long long> ClMappingOffset( + "asan-mapping-offset", + cl::desc("offset of asan shadow mapping [EXPERIMENTAL]"), cl::Hidden, + cl::init(0)); // Optimization flags. Not user visible, used mostly for testing // and benchmarking the tool. @@ -211,11 +229,6 @@ static cl::opt<bool> ClOptStack( "asan-opt-stack", cl::desc("Don't instrument scalar stack variables"), cl::Hidden, cl::init(false)); -static cl::opt<bool> ClCheckLifetime( - "asan-check-lifetime", - cl::desc("Use llvm.lifetime intrinsics to insert extra checks"), cl::Hidden, - cl::init(false)); - static cl::opt<bool> ClDynamicAllocaStack( "asan-stack-dynamic-alloca", cl::desc("Use dynamic alloca to represent stack variables"), cl::Hidden, @@ -226,6 +239,19 @@ static cl::opt<uint32_t> ClForceExperiment( cl::desc("Force optimization experiment (for testing)"), cl::Hidden, cl::init(0)); +static cl::opt<bool> + ClUsePrivateAliasForGlobals("asan-use-private-alias", + cl::desc("Use private aliases for global" + " variables"), + cl::Hidden, cl::init(false)); + +static cl::opt<bool> + ClUseMachOGlobalsSection("asan-globals-live-support", + cl::desc("Use linker features to support dead " + "code stripping of globals " + "(Mach-O only)"), + cl::Hidden, cl::init(false)); + // Debug flags. static cl::opt<int> ClDebug("asan-debug", cl::desc("debug"), cl::Hidden, cl::init(0)); @@ -334,11 +360,13 @@ struct ShadowMapping { static ShadowMapping getShadowMapping(Triple &TargetTriple, int LongSize, bool IsKasan) { bool IsAndroid = TargetTriple.isAndroid(); - bool IsIOS = TargetTriple.isiOS(); + bool IsIOS = TargetTriple.isiOS() || TargetTriple.isWatchOS(); bool IsFreeBSD = TargetTriple.isOSFreeBSD(); bool IsLinux = TargetTriple.isOSLinux(); bool IsPPC64 = TargetTriple.getArch() == llvm::Triple::ppc64 || TargetTriple.getArch() == llvm::Triple::ppc64le; + bool IsSystemZ = TargetTriple.getArch() == llvm::Triple::systemz; + bool IsX86 = TargetTriple.getArch() == llvm::Triple::x86; bool IsX86_64 = TargetTriple.getArch() == llvm::Triple::x86_64; bool IsMIPS32 = TargetTriple.getArch() == llvm::Triple::mips || TargetTriple.getArch() == llvm::Triple::mipsel; @@ -359,7 +387,8 @@ static ShadowMapping getShadowMapping(Triple &TargetTriple, int LongSize, else if (IsFreeBSD) Mapping.Offset = kFreeBSD_ShadowOffset32; else if (IsIOS) - Mapping.Offset = kIOSShadowOffset32; + // If we're targeting iOS and x86, the binary is built for iOS simulator. + Mapping.Offset = IsX86 ? kIOSSimShadowOffset32 : kIOSShadowOffset32; else if (IsWindows) Mapping.Offset = kWindowsShadowOffset32; else @@ -367,6 +396,8 @@ static ShadowMapping getShadowMapping(Triple &TargetTriple, int LongSize, } else { // LongSize == 64 if (IsPPC64) Mapping.Offset = kPPC64_ShadowOffset64; + else if (IsSystemZ) + Mapping.Offset = kSystemZ_ShadowOffset64; else if (IsFreeBSD) Mapping.Offset = kFreeBSD_ShadowOffset64; else if (IsLinux && IsX86_64) { @@ -374,8 +405,13 @@ static ShadowMapping getShadowMapping(Triple &TargetTriple, int LongSize, Mapping.Offset = kLinuxKasan_ShadowOffset64; else Mapping.Offset = kSmallX86_64ShadowOffset; + } else if (IsWindows && IsX86_64) { + Mapping.Offset = kWindowsShadowOffset64; } else if (IsMIPS64) Mapping.Offset = kMIPS64_ShadowOffset64; + else if (IsIOS) + // If we're targeting iOS and x86, the binary is built for iOS simulator. + Mapping.Offset = IsX86_64 ? kIOSSimShadowOffset64 : kIOSShadowOffset64; else if (IsAArch64) Mapping.Offset = kAArch64_ShadowOffset64; else @@ -383,14 +419,20 @@ static ShadowMapping getShadowMapping(Triple &TargetTriple, int LongSize, } Mapping.Scale = kDefaultShadowScale; - if (ClMappingScale) { + if (ClMappingScale.getNumOccurrences() > 0) { Mapping.Scale = ClMappingScale; } + if (ClMappingOffset.getNumOccurrences() > 0) { + Mapping.Offset = ClMappingOffset; + } + // OR-ing shadow offset if more efficient (at least on x86) if the offset // is a power of two, but on ppc64 we have to use add since the shadow - // offset is not necessary 1/8-th of the address space. - Mapping.OrShadowOffset = !IsAArch64 && !IsPPC64 + // offset is not necessary 1/8-th of the address space. On SystemZ, + // we could OR the constant in a single instruction, but it's more + // efficient to load it once and use indexed addressing. + Mapping.OrShadowOffset = !IsAArch64 && !IsPPC64 && !IsSystemZ && !(Mapping.Offset & (Mapping.Offset - 1)); return Mapping; @@ -404,9 +446,11 @@ static size_t RedzoneSizeForScale(int MappingScale) { /// AddressSanitizer: instrument the code in module to find memory bugs. struct AddressSanitizer : public FunctionPass { - explicit AddressSanitizer(bool CompileKernel = false, bool Recover = false) + explicit AddressSanitizer(bool CompileKernel = false, bool Recover = false, + bool UseAfterScope = false) : FunctionPass(ID), CompileKernel(CompileKernel || ClEnableKasan), - Recover(Recover || ClRecover) { + Recover(Recover || ClRecover), + UseAfterScope(UseAfterScope || ClUseAfterScope) { initializeAddressSanitizerPass(*PassRegistry::getPassRegistry()); } const char *getPassName() const override { @@ -417,19 +461,20 @@ struct AddressSanitizer : public FunctionPass { AU.addRequired<TargetLibraryInfoWrapperPass>(); } uint64_t getAllocaSizeInBytes(AllocaInst *AI) const { + uint64_t ArraySize = 1; + if (AI->isArrayAllocation()) { + ConstantInt *CI = dyn_cast<ConstantInt>(AI->getArraySize()); + assert(CI && "non-constant array size"); + ArraySize = CI->getZExtValue(); + } Type *Ty = AI->getAllocatedType(); uint64_t SizeInBytes = AI->getModule()->getDataLayout().getTypeAllocSize(Ty); - return SizeInBytes; + return SizeInBytes * ArraySize; } /// Check if we want (and can) handle this alloca. bool isInterestingAlloca(AllocaInst &AI); - // Check if we have dynamic alloca. - bool isDynamicAlloca(AllocaInst &AI) const { - return AI.isArrayAllocation() || !AI.isStaticAlloca(); - } - /// If it is an interesting memory access, return the PointerOperand /// and set IsWrite/Alignment. Otherwise return nullptr. Value *isInterestingMemoryAccess(Instruction *I, bool *IsWrite, @@ -483,6 +528,7 @@ struct AddressSanitizer : public FunctionPass { int LongSize; bool CompileKernel; bool Recover; + bool UseAfterScope; Type *IntptrTy; ShadowMapping Mapping; DominatorTree *DT; @@ -519,6 +565,7 @@ class AddressSanitizerModule : public ModulePass { bool InstrumentGlobals(IRBuilder<> &IRB, Module &M); bool ShouldInstrumentGlobal(GlobalVariable *G); + bool ShouldUseMachOGlobalsSection() const; void poisonOneInitializer(Function &GlobalInit, GlobalValue *ModuleName); void createInitializerPoisonCalls(Module &M, GlobalValue *ModuleName); size_t MinRedzoneSizeForGlobal() const { @@ -536,6 +583,8 @@ class AddressSanitizerModule : public ModulePass { Function *AsanUnpoisonGlobals; Function *AsanRegisterGlobals; Function *AsanUnregisterGlobals; + Function *AsanRegisterImageGlobals; + Function *AsanUnregisterImageGlobals; }; // Stack poisoning does not play well with exception handling. @@ -680,7 +729,7 @@ struct FunctionStackPoisoner : public InstVisitor<FunctionStackPoisoner> { } StackAlignment = std::max(StackAlignment, AI.getAlignment()); - if (ASan.isDynamicAlloca(AI)) + if (!AI.isStaticAlloca()) DynamicAllocaVec.push_back(&AI); else AllocaVec.push_back(&AI); @@ -692,7 +741,8 @@ struct FunctionStackPoisoner : public InstVisitor<FunctionStackPoisoner> { Intrinsic::ID ID = II.getIntrinsicID(); if (ID == Intrinsic::stackrestore) StackRestoreVec.push_back(&II); if (ID == Intrinsic::localescape) LocalEscapeCall = &II; - if (!ClCheckLifetime) return; + if (!ASan.UseAfterScope) + return; if (ID != Intrinsic::lifetime_start && ID != Intrinsic::lifetime_end) return; // Found lifetime intrinsic, add ASan instrumentation if necessary. @@ -707,7 +757,8 @@ struct FunctionStackPoisoner : public InstVisitor<FunctionStackPoisoner> { return; // Find alloca instruction that corresponds to llvm.lifetime argument. AllocaInst *AI = findAllocaForValue(II.getArgOperand(1)); - if (!AI) return; + if (!AI || !ASan.isInterestingAlloca(*AI)) + return; bool DoPoison = (ID == Intrinsic::lifetime_end); AllocaPoisonCall APC = {&II, AI, SizeValue, DoPoison}; AllocaPoisonCallVec.push_back(APC); @@ -760,9 +811,10 @@ INITIALIZE_PASS_END( "AddressSanitizer: detects use-after-free and out-of-bounds bugs.", false, false) FunctionPass *llvm::createAddressSanitizerFunctionPass(bool CompileKernel, - bool Recover) { + bool Recover, + bool UseAfterScope) { assert(!CompileKernel || Recover); - return new AddressSanitizer(CompileKernel, Recover); + return new AddressSanitizer(CompileKernel, Recover, UseAfterScope); } char AddressSanitizerModule::ID = 0; @@ -792,7 +844,7 @@ static GlobalVariable *createPrivateGlobalForString(Module &M, StringRef Str, GlobalVariable *GV = new GlobalVariable(M, StrConst->getType(), true, GlobalValue::PrivateLinkage, StrConst, kAsanGenPrefix); - if (AllowMerging) GV->setUnnamedAddr(true); + if (AllowMerging) GV->setUnnamedAddr(GlobalValue::UnnamedAddr::Global); GV->setAlignment(1); // Strings may not be merged w/o setting align 1. return GV; } @@ -809,13 +861,23 @@ static GlobalVariable *createPrivateGlobalForSourceLoc(Module &M, auto GV = new GlobalVariable(M, LocStruct->getType(), true, GlobalValue::PrivateLinkage, LocStruct, kAsanGenPrefix); - GV->setUnnamedAddr(true); + GV->setUnnamedAddr(GlobalValue::UnnamedAddr::Global); return GV; } -static bool GlobalWasGeneratedByAsan(GlobalVariable *G) { - return G->getName().find(kAsanGenPrefix) == 0 || - G->getName().find(kSanCovGenPrefix) == 0; +/// \brief Check if \p G has been created by a trusted compiler pass. +static bool GlobalWasGeneratedByCompiler(GlobalVariable *G) { + // Do not instrument asan globals. + if (G->getName().startswith(kAsanGenPrefix) || + G->getName().startswith(kSanCovGenPrefix) || + G->getName().startswith(kODRGenPrefix)) + return true; + + // Do not instrument gcov counter arrays. + if (G->getName() == "__llvm_gcov_ctr") + return true; + + return false; } Value *AddressSanitizer::memToShadow(Value *Shadow, IRBuilder<> &IRB) { @@ -858,7 +920,7 @@ bool AddressSanitizer::isInterestingAlloca(AllocaInst &AI) { bool IsInteresting = (AI.getAllocatedType()->isSized() && // alloca() may be called with 0 size, ignore it. - getAllocaSizeInBytes(&AI) > 0 && + ((!AI.isStaticAlloca()) || getAllocaSizeInBytes(&AI) > 0) && // We are only interested in allocas not promotable to registers. // Promotable allocas are common under -O0. (!ClSkipPromotableAllocas || !isAllocaPromotable(&AI)) && @@ -907,6 +969,14 @@ Value *AddressSanitizer::isInterestingMemoryAccess(Instruction *I, PtrOperand = XCHG->getPointerOperand(); } + // Do not instrument acesses from different address spaces; we cannot deal + // with them. + if (PtrOperand) { + Type *PtrTy = cast<PointerType>(PtrOperand->getType()->getScalarType()); + if (PtrTy->getPointerAddressSpace() != 0) + return nullptr; + } + // Treat memory accesses to promotable allocas as non-interesting since they // will not cause memory violations. This greatly speeds up the instrumented // executable at -O0. @@ -948,9 +1018,9 @@ void AddressSanitizer::instrumentPointerComparisonOrSubtraction( IRBuilder<> IRB(I); Function *F = isa<ICmpInst>(I) ? AsanPtrCmpFunction : AsanPtrSubFunction; Value *Param[2] = {I->getOperand(0), I->getOperand(1)}; - for (int i = 0; i < 2; i++) { - if (Param[i]->getType()->isPointerTy()) - Param[i] = IRB.CreatePointerCast(Param[i], IntptrTy); + for (Value *&i : Param) { + if (i->getType()->isPointerTy()) + i = IRB.CreatePointerCast(i, IntptrTy); } IRB.CreateCall(F, Param); } @@ -1048,7 +1118,7 @@ Instruction *AddressSanitizer::generateCrashCode(Instruction *InsertBefore, Value *AddressSanitizer::createSlowPathCmp(IRBuilder<> &IRB, Value *AddrLong, Value *ShadowValue, uint32_t TypeSize) { - size_t Granularity = 1 << Mapping.Scale; + size_t Granularity = static_cast<size_t>(1) << Mapping.Scale; // Addr & (Granularity - 1) Value *LastAccessedByte = IRB.CreateAnd(AddrLong, ConstantInt::get(IntptrTy, Granularity - 1)); @@ -1091,7 +1161,7 @@ void AddressSanitizer::instrumentAddress(Instruction *OrigIns, IRB.CreateLoad(IRB.CreateIntToPtr(ShadowPtr, ShadowPtrTy)); Value *Cmp = IRB.CreateICmpNE(ShadowValue, CmpVal); - size_t Granularity = 1 << Mapping.Scale; + size_t Granularity = 1ULL << Mapping.Scale; TerminatorInst *CrashTerm = nullptr; if (ClAlwaysSlowPath || (TypeSize < 8 * Granularity)) { @@ -1184,13 +1254,13 @@ void AddressSanitizerModule::createInitializerPoisonCalls( } bool AddressSanitizerModule::ShouldInstrumentGlobal(GlobalVariable *G) { - Type *Ty = cast<PointerType>(G->getType())->getElementType(); + Type *Ty = G->getValueType(); DEBUG(dbgs() << "GLOBAL: " << *G << "\n"); if (GlobalsMD.get(G).IsBlacklisted) return false; if (!Ty->isSized()) return false; if (!G->hasInitializer()) return false; - if (GlobalWasGeneratedByAsan(G)) return false; // Our own global. + if (GlobalWasGeneratedByCompiler(G)) return false; // Our own globals. // Touch only those globals that will not be defined in other modules. // Don't handle ODR linkage types and COMDATs since other modules may be built // without ASan. @@ -1207,12 +1277,12 @@ bool AddressSanitizerModule::ShouldInstrumentGlobal(GlobalVariable *G) { if (G->getAlignment() > MinRedzoneSizeForGlobal()) return false; if (G->hasSection()) { - StringRef Section(G->getSection()); + StringRef Section = G->getSection(); // Globals from llvm.metadata aren't emitted, do not instrument them. if (Section == "llvm.metadata") return false; // Do not instrument globals from special LLVM sections. - if (Section.find("__llvm") != StringRef::npos) return false; + if (Section.find("__llvm") != StringRef::npos || Section.find("__LLVM") != StringRef::npos) return false; // Do not instrument function pointers to initialization and termination // routines: dynamic linker will not properly handle redzones. @@ -1271,8 +1341,29 @@ bool AddressSanitizerModule::ShouldInstrumentGlobal(GlobalVariable *G) { return true; } +// On Mach-O platforms, we emit global metadata in a separate section of the +// binary in order to allow the linker to properly dead strip. This is only +// supported on recent versions of ld64. +bool AddressSanitizerModule::ShouldUseMachOGlobalsSection() const { + if (!ClUseMachOGlobalsSection) + return false; + + if (!TargetTriple.isOSBinFormatMachO()) + return false; + + if (TargetTriple.isMacOSX() && !TargetTriple.isMacOSXVersionLT(10, 11)) + return true; + if (TargetTriple.isiOS() /* or tvOS */ && !TargetTriple.isOSVersionLT(9)) + return true; + if (TargetTriple.isWatchOS() && !TargetTriple.isOSVersionLT(2)) + return true; + + return false; +} + void AddressSanitizerModule::initializeCallbacks(Module &M) { IRBuilder<> IRB(*C); + // Declare our poisoning and unpoisoning functions. AsanPoisonGlobals = checkSanitizerInterfaceFunction(M.getOrInsertFunction( kAsanPoisonGlobalsName, IRB.getVoidTy(), IntptrTy, nullptr)); @@ -1280,6 +1371,7 @@ void AddressSanitizerModule::initializeCallbacks(Module &M) { AsanUnpoisonGlobals = checkSanitizerInterfaceFunction(M.getOrInsertFunction( kAsanUnpoisonGlobalsName, IRB.getVoidTy(), nullptr)); AsanUnpoisonGlobals->setLinkage(Function::ExternalLinkage); + // Declare functions that register/unregister globals. AsanRegisterGlobals = checkSanitizerInterfaceFunction(M.getOrInsertFunction( kAsanRegisterGlobalsName, IRB.getVoidTy(), IntptrTy, IntptrTy, nullptr)); @@ -1288,6 +1380,18 @@ void AddressSanitizerModule::initializeCallbacks(Module &M) { M.getOrInsertFunction(kAsanUnregisterGlobalsName, IRB.getVoidTy(), IntptrTy, IntptrTy, nullptr)); AsanUnregisterGlobals->setLinkage(Function::ExternalLinkage); + + // Declare the functions that find globals in a shared object and then invoke + // the (un)register function on them. + AsanRegisterImageGlobals = checkSanitizerInterfaceFunction( + M.getOrInsertFunction(kAsanRegisterImageGlobalsName, + IRB.getVoidTy(), IntptrTy, nullptr)); + AsanRegisterImageGlobals->setLinkage(Function::ExternalLinkage); + + AsanUnregisterImageGlobals = checkSanitizerInterfaceFunction( + M.getOrInsertFunction(kAsanUnregisterImageGlobalsName, + IRB.getVoidTy(), IntptrTy, nullptr)); + AsanUnregisterImageGlobals->setLinkage(Function::ExternalLinkage); } // This function replaces all global variables with new variables that have @@ -1313,10 +1417,11 @@ bool AddressSanitizerModule::InstrumentGlobals(IRBuilder<> &IRB, Module &M) { // const char *module_name; // size_t has_dynamic_init; // void *source_location; + // size_t odr_indicator; // We initialize an array of such structures and pass it to a run-time call. StructType *GlobalStructTy = StructType::get(IntptrTy, IntptrTy, IntptrTy, IntptrTy, IntptrTy, - IntptrTy, IntptrTy, nullptr); + IntptrTy, IntptrTy, IntptrTy, nullptr); SmallVector<Constant *, 16> Initializers(n); bool HasDynamicallyInitializedGlobals = false; @@ -1332,14 +1437,14 @@ bool AddressSanitizerModule::InstrumentGlobals(IRBuilder<> &IRB, Module &M) { GlobalVariable *G = GlobalsToChange[i]; auto MD = GlobalsMD.get(G); + StringRef NameForGlobal = G->getName(); // Create string holding the global name (use global name from metadata // if it's available, otherwise just write the name of global variable). GlobalVariable *Name = createPrivateGlobalForString( - M, MD.Name.empty() ? G->getName() : MD.Name, + M, MD.Name.empty() ? NameForGlobal : MD.Name, /*AllowMerging*/ true); - PointerType *PtrTy = cast<PointerType>(G->getType()); - Type *Ty = PtrTy->getElementType(); + Type *Ty = G->getValueType(); uint64_t SizeInBytes = DL.getTypeAllocSize(Ty); uint64_t MinRZ = MinRedzoneSizeForGlobal(); // MinRZ <= RZ <= kMaxGlobalRedzone @@ -1384,41 +1489,125 @@ bool AddressSanitizerModule::InstrumentGlobals(IRBuilder<> &IRB, Module &M) { SourceLoc = ConstantInt::get(IntptrTy, 0); } + Constant *ODRIndicator = ConstantExpr::getNullValue(IRB.getInt8PtrTy()); + GlobalValue *InstrumentedGlobal = NewGlobal; + + bool CanUsePrivateAliases = TargetTriple.isOSBinFormatELF(); + if (CanUsePrivateAliases && ClUsePrivateAliasForGlobals) { + // Create local alias for NewGlobal to avoid crash on ODR between + // instrumented and non-instrumented libraries. + auto *GA = GlobalAlias::create(GlobalValue::InternalLinkage, + NameForGlobal + M.getName(), NewGlobal); + + // With local aliases, we need to provide another externally visible + // symbol __odr_asan_XXX to detect ODR violation. + auto *ODRIndicatorSym = + new GlobalVariable(M, IRB.getInt8Ty(), false, Linkage, + Constant::getNullValue(IRB.getInt8Ty()), + kODRGenPrefix + NameForGlobal, nullptr, + NewGlobal->getThreadLocalMode()); + + // Set meaningful attributes for indicator symbol. + ODRIndicatorSym->setVisibility(NewGlobal->getVisibility()); + ODRIndicatorSym->setDLLStorageClass(NewGlobal->getDLLStorageClass()); + ODRIndicatorSym->setAlignment(1); + ODRIndicator = ODRIndicatorSym; + InstrumentedGlobal = GA; + } + Initializers[i] = ConstantStruct::get( - GlobalStructTy, ConstantExpr::getPointerCast(NewGlobal, IntptrTy), + GlobalStructTy, + ConstantExpr::getPointerCast(InstrumentedGlobal, IntptrTy), ConstantInt::get(IntptrTy, SizeInBytes), ConstantInt::get(IntptrTy, SizeInBytes + RightRedzoneSize), ConstantExpr::getPointerCast(Name, IntptrTy), ConstantExpr::getPointerCast(ModuleName, IntptrTy), - ConstantInt::get(IntptrTy, MD.IsDynInit), SourceLoc, nullptr); + ConstantInt::get(IntptrTy, MD.IsDynInit), SourceLoc, + ConstantExpr::getPointerCast(ODRIndicator, IntptrTy), nullptr); if (ClInitializers && MD.IsDynInit) HasDynamicallyInitializedGlobals = true; DEBUG(dbgs() << "NEW GLOBAL: " << *NewGlobal << "\n"); } - ArrayType *ArrayOfGlobalStructTy = ArrayType::get(GlobalStructTy, n); - GlobalVariable *AllGlobals = new GlobalVariable( - M, ArrayOfGlobalStructTy, false, GlobalVariable::InternalLinkage, - ConstantArray::get(ArrayOfGlobalStructTy, Initializers), ""); + + GlobalVariable *AllGlobals = nullptr; + GlobalVariable *RegisteredFlag = nullptr; + + // On recent Mach-O platforms, we emit the global metadata in a way that + // allows the linker to properly strip dead globals. + if (ShouldUseMachOGlobalsSection()) { + // RegisteredFlag serves two purposes. First, we can pass it to dladdr() + // to look up the loaded image that contains it. Second, we can store in it + // whether registration has already occurred, to prevent duplicate + // registration. + // + // Common linkage allows us to coalesce needles defined in each object + // file so that there's only one per shared library. + RegisteredFlag = new GlobalVariable( + M, IntptrTy, false, GlobalVariable::CommonLinkage, + ConstantInt::get(IntptrTy, 0), kAsanGlobalsRegisteredFlagName); + + // We also emit a structure which binds the liveness of the global + // variable to the metadata struct. + StructType *LivenessTy = StructType::get(IntptrTy, IntptrTy, nullptr); + + for (size_t i = 0; i < n; i++) { + GlobalVariable *Metadata = new GlobalVariable( + M, GlobalStructTy, false, GlobalVariable::InternalLinkage, + Initializers[i], ""); + Metadata->setSection("__DATA,__asan_globals,regular"); + Metadata->setAlignment(1); // don't leave padding in between + + auto LivenessBinder = ConstantStruct::get(LivenessTy, + Initializers[i]->getAggregateElement(0u), + ConstantExpr::getPointerCast(Metadata, IntptrTy), + nullptr); + GlobalVariable *Liveness = new GlobalVariable( + M, LivenessTy, false, GlobalVariable::InternalLinkage, + LivenessBinder, ""); + Liveness->setSection("__DATA,__asan_liveness,regular,live_support"); + } + } else { + // On all other platfoms, we just emit an array of global metadata + // structures. + ArrayType *ArrayOfGlobalStructTy = ArrayType::get(GlobalStructTy, n); + AllGlobals = new GlobalVariable( + M, ArrayOfGlobalStructTy, false, GlobalVariable::InternalLinkage, + ConstantArray::get(ArrayOfGlobalStructTy, Initializers), ""); + } // Create calls for poisoning before initializers run and unpoisoning after. if (HasDynamicallyInitializedGlobals) createInitializerPoisonCalls(M, ModuleName); - IRB.CreateCall(AsanRegisterGlobals, - {IRB.CreatePointerCast(AllGlobals, IntptrTy), - ConstantInt::get(IntptrTy, n)}); - // We also need to unregister globals at the end, e.g. when a shared library + // Create a call to register the globals with the runtime. + if (ShouldUseMachOGlobalsSection()) { + IRB.CreateCall(AsanRegisterImageGlobals, + {IRB.CreatePointerCast(RegisteredFlag, IntptrTy)}); + } else { + IRB.CreateCall(AsanRegisterGlobals, + {IRB.CreatePointerCast(AllGlobals, IntptrTy), + ConstantInt::get(IntptrTy, n)}); + } + + // We also need to unregister globals at the end, e.g., when a shared library // gets closed. Function *AsanDtorFunction = Function::Create(FunctionType::get(Type::getVoidTy(*C), false), GlobalValue::InternalLinkage, kAsanModuleDtorName, &M); BasicBlock *AsanDtorBB = BasicBlock::Create(*C, "", AsanDtorFunction); IRBuilder<> IRB_Dtor(ReturnInst::Create(*C, AsanDtorBB)); - IRB_Dtor.CreateCall(AsanUnregisterGlobals, - {IRB.CreatePointerCast(AllGlobals, IntptrTy), - ConstantInt::get(IntptrTy, n)}); + + if (ShouldUseMachOGlobalsSection()) { + IRB_Dtor.CreateCall(AsanUnregisterImageGlobals, + {IRB.CreatePointerCast(RegisteredFlag, IntptrTy)}); + } else { + IRB_Dtor.CreateCall(AsanUnregisterGlobals, + {IRB.CreatePointerCast(AllGlobals, IntptrTy), + ConstantInt::get(IntptrTy, n)}); + } + appendToGlobalDtors(M, AsanDtorFunction, kAsanCtorAndDtorPriority); DEBUG(dbgs() << M); @@ -1467,7 +1656,7 @@ void AddressSanitizer::initializeCallbacks(Module &M) { IRB.getVoidTy(), IntptrTy, IntptrTy, ExpType, nullptr)); for (size_t AccessSizeIndex = 0; AccessSizeIndex < kNumberOfAccessSizes; AccessSizeIndex++) { - const std::string Suffix = TypeStr + itostr(1 << AccessSizeIndex); + const std::string Suffix = TypeStr + itostr(1ULL << AccessSizeIndex); AsanErrorCallback[AccessIsWrite][Exp][AccessSizeIndex] = checkSanitizerInterfaceFunction(M.getOrInsertFunction( kAsanReportErrorTemplate + ExpStr + Suffix + EndingStr, @@ -1608,6 +1797,8 @@ bool AddressSanitizer::runOnFunction(Function &F) { bool IsWrite; unsigned Alignment; uint64_t TypeSize; + const TargetLibraryInfo *TLI = + &getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(); // Fill the set of memory operations to instrument. for (auto &BB : F) { @@ -1636,6 +1827,8 @@ bool AddressSanitizer::runOnFunction(Function &F) { TempsToInstrument.clear(); if (CS.doesNotReturn()) NoReturnCalls.push_back(CS.getInstruction()); } + if (CallInst *CI = dyn_cast<CallInst>(&Inst)) + maybeMarkSanitizerLibraryCallNoBuiltin(CI, TLI); continue; } ToInstrument.push_back(&Inst); @@ -1648,8 +1841,6 @@ bool AddressSanitizer::runOnFunction(Function &F) { CompileKernel || (ClInstrumentationWithCallsThreshold >= 0 && ToInstrument.size() > (unsigned)ClInstrumentationWithCallsThreshold); - const TargetLibraryInfo *TLI = - &getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(); const DataLayout &DL = F.getParent()->getDataLayout(); ObjectSizeOffsetVisitor ObjSizeVis(DL, TLI, F.getContext(), /*RoundToAlign=*/true); @@ -1713,12 +1904,15 @@ void FunctionStackPoisoner::initializeCallbacks(Module &M) { M.getOrInsertFunction(kAsanStackFreeNameTemplate + Suffix, IRB.getVoidTy(), IntptrTy, IntptrTy, nullptr)); } - AsanPoisonStackMemoryFunc = checkSanitizerInterfaceFunction( - M.getOrInsertFunction(kAsanPoisonStackMemoryName, IRB.getVoidTy(), - IntptrTy, IntptrTy, nullptr)); - AsanUnpoisonStackMemoryFunc = checkSanitizerInterfaceFunction( - M.getOrInsertFunction(kAsanUnpoisonStackMemoryName, IRB.getVoidTy(), - IntptrTy, IntptrTy, nullptr)); + if (ASan.UseAfterScope) { + AsanPoisonStackMemoryFunc = checkSanitizerInterfaceFunction( + M.getOrInsertFunction(kAsanPoisonStackMemoryName, IRB.getVoidTy(), + IntptrTy, IntptrTy, nullptr)); + AsanUnpoisonStackMemoryFunc = checkSanitizerInterfaceFunction( + M.getOrInsertFunction(kAsanUnpoisonStackMemoryName, IRB.getVoidTy(), + IntptrTy, IntptrTy, nullptr)); + } + AsanAllocaPoisonFunc = checkSanitizerInterfaceFunction(M.getOrInsertFunction( kAsanAllocaPoison, IRB.getVoidTy(), IntptrTy, IntptrTy, nullptr)); AsanAllocasUnpoisonFunc = @@ -1825,13 +2019,21 @@ void FunctionStackPoisoner::poisonStack() { assert(AllocaVec.size() > 0 || DynamicAllocaVec.size() > 0); // Insert poison calls for lifetime intrinsics for alloca. - bool HavePoisonedAllocas = false; + bool HavePoisonedStaticAllocas = false; for (const auto &APC : AllocaPoisonCallVec) { assert(APC.InsBefore); assert(APC.AI); + assert(ASan.isInterestingAlloca(*APC.AI)); + bool IsDynamicAlloca = !(*APC.AI).isStaticAlloca(); + if (!ClInstrumentAllocas && IsDynamicAlloca) + continue; + IRBuilder<> IRB(APC.InsBefore); poisonAlloca(APC.AI, APC.Size, IRB, APC.DoPoison); - HavePoisonedAllocas |= APC.DoPoison; + // Dynamic allocas will be unpoisoned unconditionally below in + // unpoisonDynamicAllocas. + // Flag that we need unpoison static allocas. + HavePoisonedStaticAllocas |= (APC.DoPoison && !IsDynamicAlloca); } if (ClInstrumentAllocas && DynamicAllocaVec.size() > 0) { @@ -1846,7 +2048,7 @@ void FunctionStackPoisoner::poisonStack() { int StackMallocIdx = -1; DebugLoc EntryDebugLocation; - if (auto SP = getDISubprogram(&F)) + if (auto SP = F.getSubprogram()) EntryDebugLocation = DebugLoc::get(SP->getScopeLine(), 0, SP); Instruction *InsBefore = AllocaVec[0]; @@ -1878,7 +2080,7 @@ void FunctionStackPoisoner::poisonStack() { // i.e. 32 bytes on 64-bit platforms and 16 bytes in 32-bit platforms. size_t MinHeaderSize = ASan.LongSize / 2; ASanStackFrameLayout L; - ComputeASanStackFrameLayout(SVD, 1UL << Mapping.Scale, MinHeaderSize, &L); + ComputeASanStackFrameLayout(SVD, 1ULL << Mapping.Scale, MinHeaderSize, &L); DEBUG(dbgs() << L.DescriptionString << " --- " << L.FrameSize << "\n"); uint64_t LocalStackSize = L.FrameSize; bool DoStackMalloc = ClUseAfterReturn && !ASan.CompileKernel && @@ -1904,13 +2106,13 @@ void FunctionStackPoisoner::poisonStack() { // ? __asan_stack_malloc_N(LocalStackSize) // : nullptr; // void *LocalStackBase = (FakeStack) ? FakeStack : alloca(LocalStackSize); - Constant *OptionDetectUAR = F.getParent()->getOrInsertGlobal( - kAsanOptionDetectUAR, IRB.getInt32Ty()); - Value *UARIsEnabled = - IRB.CreateICmpNE(IRB.CreateLoad(OptionDetectUAR), + Constant *OptionDetectUseAfterReturn = F.getParent()->getOrInsertGlobal( + kAsanOptionDetectUseAfterReturn, IRB.getInt32Ty()); + Value *UseAfterReturnIsEnabled = + IRB.CreateICmpNE(IRB.CreateLoad(OptionDetectUseAfterReturn), Constant::getNullValue(IRB.getInt32Ty())); Instruction *Term = - SplitBlockAndInsertIfThen(UARIsEnabled, InsBefore, false); + SplitBlockAndInsertIfThen(UseAfterReturnIsEnabled, InsBefore, false); IRBuilder<> IRBIf(Term); IRBIf.SetCurrentDebugLocation(EntryDebugLocation); StackMallocIdx = StackMallocSizeClass(LocalStackSize); @@ -1920,7 +2122,7 @@ void FunctionStackPoisoner::poisonStack() { ConstantInt::get(IntptrTy, LocalStackSize)); IRB.SetInsertPoint(InsBefore); IRB.SetCurrentDebugLocation(EntryDebugLocation); - FakeStack = createPHI(IRB, UARIsEnabled, FakeStackValue, Term, + FakeStack = createPHI(IRB, UseAfterReturnIsEnabled, FakeStackValue, Term, ConstantInt::get(IntptrTy, 0)); Value *NoFakeStack = @@ -1977,6 +2179,16 @@ void FunctionStackPoisoner::poisonStack() { Value *ShadowBase = ASan.memToShadow(LocalStackBase, IRB); poisonRedZones(L.ShadowBytes, IRB, ShadowBase, true); + auto UnpoisonStack = [&](IRBuilder<> &IRB) { + if (HavePoisonedStaticAllocas) { + // If we poisoned some allocas in llvm.lifetime analysis, + // unpoison whole stack frame now. + poisonAlloca(LocalStackBase, LocalStackSize, IRB, false); + } else { + poisonRedZones(L.ShadowBytes, IRB, ShadowBase, false); + } + }; + // (Un)poison the stack before all ret instructions. for (auto Ret : RetVec) { IRBuilder<> IRBRet(Ret); @@ -2021,13 +2233,9 @@ void FunctionStackPoisoner::poisonStack() { } IRBuilder<> IRBElse(ElseTerm); - poisonRedZones(L.ShadowBytes, IRBElse, ShadowBase, false); - } else if (HavePoisonedAllocas) { - // If we poisoned some allocas in llvm.lifetime analysis, - // unpoison whole stack frame now. - poisonAlloca(LocalStackBase, LocalStackSize, IRBRet, false); + UnpoisonStack(IRBElse); } else { - poisonRedZones(L.ShadowBytes, IRBRet, ShadowBase, false); + UnpoisonStack(IRBRet); } } |