10b57cec5SDimitry Andric //===- StackProtector.cpp - Stack Protector Insertion ---------------------===// 20b57cec5SDimitry Andric // 30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 60b57cec5SDimitry Andric // 70b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 80b57cec5SDimitry Andric // 90b57cec5SDimitry Andric // This pass inserts stack protectors into functions which need them. A variable 100b57cec5SDimitry Andric // with a random value in it is stored onto the stack before the local variables 110b57cec5SDimitry Andric // are allocated. Upon exiting the block, the stored value is checked. If it's 120b57cec5SDimitry Andric // changed, then there was some sort of violation and the program aborts. 130b57cec5SDimitry Andric // 140b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 150b57cec5SDimitry Andric 160b57cec5SDimitry Andric #include "llvm/CodeGen/StackProtector.h" 170b57cec5SDimitry Andric #include "llvm/ADT/SmallPtrSet.h" 180b57cec5SDimitry Andric #include "llvm/ADT/Statistic.h" 190b57cec5SDimitry Andric #include "llvm/Analysis/BranchProbabilityInfo.h" 200b57cec5SDimitry Andric #include "llvm/Analysis/EHPersonalities.h" 215ffd83dbSDimitry Andric #include "llvm/Analysis/MemoryLocation.h" 220b57cec5SDimitry Andric #include "llvm/Analysis/OptimizationRemarkEmitter.h" 230b57cec5SDimitry Andric #include "llvm/CodeGen/Passes.h" 240b57cec5SDimitry Andric #include "llvm/CodeGen/TargetLowering.h" 250b57cec5SDimitry Andric #include "llvm/CodeGen/TargetPassConfig.h" 260b57cec5SDimitry Andric #include "llvm/CodeGen/TargetSubtargetInfo.h" 270b57cec5SDimitry Andric #include "llvm/IR/Attributes.h" 280b57cec5SDimitry Andric #include "llvm/IR/BasicBlock.h" 290b57cec5SDimitry Andric #include "llvm/IR/Constants.h" 300b57cec5SDimitry Andric #include "llvm/IR/DataLayout.h" 310b57cec5SDimitry Andric #include "llvm/IR/DebugInfo.h" 320b57cec5SDimitry Andric #include "llvm/IR/DebugLoc.h" 330b57cec5SDimitry Andric #include "llvm/IR/DerivedTypes.h" 340b57cec5SDimitry Andric #include "llvm/IR/Dominators.h" 350b57cec5SDimitry Andric #include "llvm/IR/Function.h" 360b57cec5SDimitry Andric #include "llvm/IR/IRBuilder.h" 370b57cec5SDimitry Andric #include "llvm/IR/Instruction.h" 380b57cec5SDimitry Andric #include "llvm/IR/Instructions.h" 390b57cec5SDimitry Andric #include "llvm/IR/IntrinsicInst.h" 400b57cec5SDimitry Andric #include "llvm/IR/Intrinsics.h" 410b57cec5SDimitry Andric #include "llvm/IR/MDBuilder.h" 420b57cec5SDimitry Andric #include "llvm/IR/Module.h" 430b57cec5SDimitry Andric #include "llvm/IR/Type.h" 440b57cec5SDimitry Andric #include "llvm/IR/User.h" 45480093f4SDimitry Andric #include "llvm/InitializePasses.h" 460b57cec5SDimitry Andric #include "llvm/Pass.h" 470b57cec5SDimitry Andric #include "llvm/Support/Casting.h" 480b57cec5SDimitry Andric #include "llvm/Support/CommandLine.h" 490b57cec5SDimitry Andric #include "llvm/Target/TargetMachine.h" 500b57cec5SDimitry Andric #include "llvm/Target/TargetOptions.h" 510b57cec5SDimitry Andric #include <utility> 520b57cec5SDimitry Andric 530b57cec5SDimitry Andric using namespace llvm; 540b57cec5SDimitry Andric 550b57cec5SDimitry Andric #define DEBUG_TYPE "stack-protector" 560b57cec5SDimitry Andric 570b57cec5SDimitry Andric STATISTIC(NumFunProtected, "Number of functions protected"); 580b57cec5SDimitry Andric STATISTIC(NumAddrTaken, "Number of local variables that have their address" 590b57cec5SDimitry Andric " taken."); 600b57cec5SDimitry Andric 610b57cec5SDimitry Andric static cl::opt<bool> EnableSelectionDAGSP("enable-selectiondag-sp", 620b57cec5SDimitry Andric cl::init(true), cl::Hidden); 630b57cec5SDimitry Andric 640b57cec5SDimitry Andric char StackProtector::ID = 0; 650b57cec5SDimitry Andric 66480093f4SDimitry Andric StackProtector::StackProtector() : FunctionPass(ID), SSPBufferSize(8) { 67480093f4SDimitry Andric initializeStackProtectorPass(*PassRegistry::getPassRegistry()); 68480093f4SDimitry Andric } 69480093f4SDimitry Andric 700b57cec5SDimitry Andric INITIALIZE_PASS_BEGIN(StackProtector, DEBUG_TYPE, 710b57cec5SDimitry Andric "Insert stack protectors", false, true) 720b57cec5SDimitry Andric INITIALIZE_PASS_DEPENDENCY(TargetPassConfig) 730b57cec5SDimitry Andric INITIALIZE_PASS_END(StackProtector, DEBUG_TYPE, 740b57cec5SDimitry Andric "Insert stack protectors", false, true) 750b57cec5SDimitry Andric 760b57cec5SDimitry Andric FunctionPass *llvm::createStackProtectorPass() { return new StackProtector(); } 770b57cec5SDimitry Andric 780b57cec5SDimitry Andric void StackProtector::getAnalysisUsage(AnalysisUsage &AU) const { 790b57cec5SDimitry Andric AU.addRequired<TargetPassConfig>(); 800b57cec5SDimitry Andric AU.addPreserved<DominatorTreeWrapperPass>(); 810b57cec5SDimitry Andric } 820b57cec5SDimitry Andric 830b57cec5SDimitry Andric bool StackProtector::runOnFunction(Function &Fn) { 840b57cec5SDimitry Andric F = &Fn; 850b57cec5SDimitry Andric M = F->getParent(); 860b57cec5SDimitry Andric DominatorTreeWrapperPass *DTWP = 870b57cec5SDimitry Andric getAnalysisIfAvailable<DominatorTreeWrapperPass>(); 880b57cec5SDimitry Andric DT = DTWP ? &DTWP->getDomTree() : nullptr; 890b57cec5SDimitry Andric TM = &getAnalysis<TargetPassConfig>().getTM<TargetMachine>(); 900b57cec5SDimitry Andric Trip = TM->getTargetTriple(); 910b57cec5SDimitry Andric TLI = TM->getSubtargetImpl(Fn)->getTargetLowering(); 920b57cec5SDimitry Andric HasPrologue = false; 930b57cec5SDimitry Andric HasIRCheck = false; 940b57cec5SDimitry Andric 950b57cec5SDimitry Andric Attribute Attr = Fn.getFnAttribute("stack-protector-buffer-size"); 960b57cec5SDimitry Andric if (Attr.isStringAttribute() && 970b57cec5SDimitry Andric Attr.getValueAsString().getAsInteger(10, SSPBufferSize)) 980b57cec5SDimitry Andric return false; // Invalid integer string 990b57cec5SDimitry Andric 1000b57cec5SDimitry Andric if (!RequiresStackProtector()) 1010b57cec5SDimitry Andric return false; 1020b57cec5SDimitry Andric 1030b57cec5SDimitry Andric // TODO(etienneb): Functions with funclets are not correctly supported now. 1040b57cec5SDimitry Andric // Do nothing if this is funclet-based personality. 1050b57cec5SDimitry Andric if (Fn.hasPersonalityFn()) { 1060b57cec5SDimitry Andric EHPersonality Personality = classifyEHPersonality(Fn.getPersonalityFn()); 1070b57cec5SDimitry Andric if (isFuncletEHPersonality(Personality)) 1080b57cec5SDimitry Andric return false; 1090b57cec5SDimitry Andric } 1100b57cec5SDimitry Andric 1110b57cec5SDimitry Andric ++NumFunProtected; 1120b57cec5SDimitry Andric return InsertStackProtectors(); 1130b57cec5SDimitry Andric } 1140b57cec5SDimitry Andric 1150b57cec5SDimitry Andric /// \param [out] IsLarge is set to true if a protectable array is found and 1160b57cec5SDimitry Andric /// it is "large" ( >= ssp-buffer-size). In the case of a structure with 1170b57cec5SDimitry Andric /// multiple arrays, this gets set if any of them is large. 1180b57cec5SDimitry Andric bool StackProtector::ContainsProtectableArray(Type *Ty, bool &IsLarge, 1190b57cec5SDimitry Andric bool Strong, 1200b57cec5SDimitry Andric bool InStruct) const { 1210b57cec5SDimitry Andric if (!Ty) 1220b57cec5SDimitry Andric return false; 1230b57cec5SDimitry Andric if (ArrayType *AT = dyn_cast<ArrayType>(Ty)) { 1240b57cec5SDimitry Andric if (!AT->getElementType()->isIntegerTy(8)) { 1250b57cec5SDimitry Andric // If we're on a non-Darwin platform or we're inside of a structure, don't 1260b57cec5SDimitry Andric // add stack protectors unless the array is a character array. 1270b57cec5SDimitry Andric // However, in strong mode any array, regardless of type and size, 1280b57cec5SDimitry Andric // triggers a protector. 1290b57cec5SDimitry Andric if (!Strong && (InStruct || !Trip.isOSDarwin())) 1300b57cec5SDimitry Andric return false; 1310b57cec5SDimitry Andric } 1320b57cec5SDimitry Andric 1330b57cec5SDimitry Andric // If an array has more than SSPBufferSize bytes of allocated space, then we 1340b57cec5SDimitry Andric // emit stack protectors. 1350b57cec5SDimitry Andric if (SSPBufferSize <= M->getDataLayout().getTypeAllocSize(AT)) { 1360b57cec5SDimitry Andric IsLarge = true; 1370b57cec5SDimitry Andric return true; 1380b57cec5SDimitry Andric } 1390b57cec5SDimitry Andric 1400b57cec5SDimitry Andric if (Strong) 1410b57cec5SDimitry Andric // Require a protector for all arrays in strong mode 1420b57cec5SDimitry Andric return true; 1430b57cec5SDimitry Andric } 1440b57cec5SDimitry Andric 1450b57cec5SDimitry Andric const StructType *ST = dyn_cast<StructType>(Ty); 1460b57cec5SDimitry Andric if (!ST) 1470b57cec5SDimitry Andric return false; 1480b57cec5SDimitry Andric 1490b57cec5SDimitry Andric bool NeedsProtector = false; 1500b57cec5SDimitry Andric for (StructType::element_iterator I = ST->element_begin(), 1510b57cec5SDimitry Andric E = ST->element_end(); 1520b57cec5SDimitry Andric I != E; ++I) 1530b57cec5SDimitry Andric if (ContainsProtectableArray(*I, IsLarge, Strong, true)) { 1540b57cec5SDimitry Andric // If the element is a protectable array and is large (>= SSPBufferSize) 1550b57cec5SDimitry Andric // then we are done. If the protectable array is not large, then 1560b57cec5SDimitry Andric // keep looking in case a subsequent element is a large array. 1570b57cec5SDimitry Andric if (IsLarge) 1580b57cec5SDimitry Andric return true; 1590b57cec5SDimitry Andric NeedsProtector = true; 1600b57cec5SDimitry Andric } 1610b57cec5SDimitry Andric 1620b57cec5SDimitry Andric return NeedsProtector; 1630b57cec5SDimitry Andric } 1640b57cec5SDimitry Andric 1655ffd83dbSDimitry Andric bool StackProtector::HasAddressTaken(const Instruction *AI, 1665ffd83dbSDimitry Andric uint64_t AllocSize) { 1675ffd83dbSDimitry Andric const DataLayout &DL = M->getDataLayout(); 168c14a5a88SDimitry Andric for (const User *U : AI->users()) { 169c14a5a88SDimitry Andric const auto *I = cast<Instruction>(U); 1705ffd83dbSDimitry Andric // If this instruction accesses memory make sure it doesn't access beyond 1715ffd83dbSDimitry Andric // the bounds of the allocated object. 1725ffd83dbSDimitry Andric Optional<MemoryLocation> MemLoc = MemoryLocation::getOrNone(I); 173e8d8bef9SDimitry Andric if (MemLoc.hasValue() && MemLoc->Size.hasValue() && 174e8d8bef9SDimitry Andric MemLoc->Size.getValue() > AllocSize) 1755ffd83dbSDimitry Andric return true; 176c14a5a88SDimitry Andric switch (I->getOpcode()) { 177c14a5a88SDimitry Andric case Instruction::Store: 178c14a5a88SDimitry Andric if (AI == cast<StoreInst>(I)->getValueOperand()) 179c14a5a88SDimitry Andric return true; 180c14a5a88SDimitry Andric break; 181c14a5a88SDimitry Andric case Instruction::AtomicCmpXchg: 182c14a5a88SDimitry Andric // cmpxchg conceptually includes both a load and store from the same 183c14a5a88SDimitry Andric // location. So, like store, the value being stored is what matters. 184c14a5a88SDimitry Andric if (AI == cast<AtomicCmpXchgInst>(I)->getNewValOperand()) 185c14a5a88SDimitry Andric return true; 186c14a5a88SDimitry Andric break; 187c14a5a88SDimitry Andric case Instruction::PtrToInt: 188c14a5a88SDimitry Andric if (AI == cast<PtrToIntInst>(I)->getOperand(0)) 189c14a5a88SDimitry Andric return true; 190c14a5a88SDimitry Andric break; 191c14a5a88SDimitry Andric case Instruction::Call: { 192c14a5a88SDimitry Andric // Ignore intrinsics that do not become real instructions. 193c14a5a88SDimitry Andric // TODO: Narrow this to intrinsics that have store-like effects. 194c14a5a88SDimitry Andric const auto *CI = cast<CallInst>(I); 195*d409305fSDimitry Andric if (!CI->isDebugOrPseudoInst() && !CI->isLifetimeStartOrEnd()) 196c14a5a88SDimitry Andric return true; 197c14a5a88SDimitry Andric break; 198c14a5a88SDimitry Andric } 199c14a5a88SDimitry Andric case Instruction::Invoke: 200c14a5a88SDimitry Andric return true; 2015ffd83dbSDimitry Andric case Instruction::GetElementPtr: { 2025ffd83dbSDimitry Andric // If the GEP offset is out-of-bounds, or is non-constant and so has to be 2035ffd83dbSDimitry Andric // assumed to be potentially out-of-bounds, then any memory access that 2045ffd83dbSDimitry Andric // would use it could also be out-of-bounds meaning stack protection is 2055ffd83dbSDimitry Andric // required. 2065ffd83dbSDimitry Andric const GetElementPtrInst *GEP = cast<GetElementPtrInst>(I); 2075ffd83dbSDimitry Andric unsigned TypeSize = DL.getIndexTypeSizeInBits(I->getType()); 2085ffd83dbSDimitry Andric APInt Offset(TypeSize, 0); 2095ffd83dbSDimitry Andric APInt MaxOffset(TypeSize, AllocSize); 2105ffd83dbSDimitry Andric if (!GEP->accumulateConstantOffset(DL, Offset) || Offset.ugt(MaxOffset)) 2115ffd83dbSDimitry Andric return true; 2125ffd83dbSDimitry Andric // Adjust AllocSize to be the space remaining after this offset. 2135ffd83dbSDimitry Andric if (HasAddressTaken(I, AllocSize - Offset.getLimitedValue())) 2145ffd83dbSDimitry Andric return true; 2155ffd83dbSDimitry Andric break; 2165ffd83dbSDimitry Andric } 217c14a5a88SDimitry Andric case Instruction::BitCast: 218c14a5a88SDimitry Andric case Instruction::Select: 219c14a5a88SDimitry Andric case Instruction::AddrSpaceCast: 2205ffd83dbSDimitry Andric if (HasAddressTaken(I, AllocSize)) 221c14a5a88SDimitry Andric return true; 222c14a5a88SDimitry Andric break; 223c14a5a88SDimitry Andric case Instruction::PHI: { 224c14a5a88SDimitry Andric // Keep track of what PHI nodes we have already visited to ensure 225c14a5a88SDimitry Andric // they are only visited once. 226c14a5a88SDimitry Andric const auto *PN = cast<PHINode>(I); 227c14a5a88SDimitry Andric if (VisitedPHIs.insert(PN).second) 2285ffd83dbSDimitry Andric if (HasAddressTaken(PN, AllocSize)) 229c14a5a88SDimitry Andric return true; 230c14a5a88SDimitry Andric break; 231c14a5a88SDimitry Andric } 232c14a5a88SDimitry Andric case Instruction::Load: 233c14a5a88SDimitry Andric case Instruction::AtomicRMW: 234c14a5a88SDimitry Andric case Instruction::Ret: 235c14a5a88SDimitry Andric // These instructions take an address operand, but have load-like or 236c14a5a88SDimitry Andric // other innocuous behavior that should not trigger a stack protector. 237c14a5a88SDimitry Andric // atomicrmw conceptually has both load and store semantics, but the 238c14a5a88SDimitry Andric // value being stored must be integer; so if a pointer is being stored, 239c14a5a88SDimitry Andric // we'll catch it in the PtrToInt case above. 240c14a5a88SDimitry Andric break; 241c14a5a88SDimitry Andric default: 242c14a5a88SDimitry Andric // Conservatively return true for any instruction that takes an address 243c14a5a88SDimitry Andric // operand, but is not handled above. 244c14a5a88SDimitry Andric return true; 245c14a5a88SDimitry Andric } 246c14a5a88SDimitry Andric } 247c14a5a88SDimitry Andric return false; 248c14a5a88SDimitry Andric } 249c14a5a88SDimitry Andric 2500b57cec5SDimitry Andric /// Search for the first call to the llvm.stackprotector intrinsic and return it 2510b57cec5SDimitry Andric /// if present. 2520b57cec5SDimitry Andric static const CallInst *findStackProtectorIntrinsic(Function &F) { 2530b57cec5SDimitry Andric for (const BasicBlock &BB : F) 2540b57cec5SDimitry Andric for (const Instruction &I : BB) 255e8d8bef9SDimitry Andric if (const auto *II = dyn_cast<IntrinsicInst>(&I)) 256e8d8bef9SDimitry Andric if (II->getIntrinsicID() == Intrinsic::stackprotector) 257e8d8bef9SDimitry Andric return II; 2580b57cec5SDimitry Andric return nullptr; 2590b57cec5SDimitry Andric } 2600b57cec5SDimitry Andric 2610b57cec5SDimitry Andric /// Check whether or not this function needs a stack protector based 2620b57cec5SDimitry Andric /// upon the stack protector level. 2630b57cec5SDimitry Andric /// 2640b57cec5SDimitry Andric /// We use two heuristics: a standard (ssp) and strong (sspstrong). 2650b57cec5SDimitry Andric /// The standard heuristic which will add a guard variable to functions that 2660b57cec5SDimitry Andric /// call alloca with a either a variable size or a size >= SSPBufferSize, 2670b57cec5SDimitry Andric /// functions with character buffers larger than SSPBufferSize, and functions 2680b57cec5SDimitry Andric /// with aggregates containing character buffers larger than SSPBufferSize. The 2690b57cec5SDimitry Andric /// strong heuristic will add a guard variables to functions that call alloca 2700b57cec5SDimitry Andric /// regardless of size, functions with any buffer regardless of type and size, 2710b57cec5SDimitry Andric /// functions with aggregates that contain any buffer regardless of type and 2720b57cec5SDimitry Andric /// size, and functions that contain stack-based variables that have had their 2730b57cec5SDimitry Andric /// address taken. 2740b57cec5SDimitry Andric bool StackProtector::RequiresStackProtector() { 2750b57cec5SDimitry Andric bool Strong = false; 2760b57cec5SDimitry Andric bool NeedsProtector = false; 2770b57cec5SDimitry Andric 2780b57cec5SDimitry Andric if (F->hasFnAttribute(Attribute::SafeStack)) 2790b57cec5SDimitry Andric return false; 2800b57cec5SDimitry Andric 2810b57cec5SDimitry Andric // We are constructing the OptimizationRemarkEmitter on the fly rather than 2820b57cec5SDimitry Andric // using the analysis pass to avoid building DominatorTree and LoopInfo which 2830b57cec5SDimitry Andric // are not available this late in the IR pipeline. 2840b57cec5SDimitry Andric OptimizationRemarkEmitter ORE(F); 2850b57cec5SDimitry Andric 2860b57cec5SDimitry Andric if (F->hasFnAttribute(Attribute::StackProtectReq)) { 2870b57cec5SDimitry Andric ORE.emit([&]() { 2880b57cec5SDimitry Andric return OptimizationRemark(DEBUG_TYPE, "StackProtectorRequested", F) 2890b57cec5SDimitry Andric << "Stack protection applied to function " 2900b57cec5SDimitry Andric << ore::NV("Function", F) 2910b57cec5SDimitry Andric << " due to a function attribute or command-line switch"; 2920b57cec5SDimitry Andric }); 2930b57cec5SDimitry Andric NeedsProtector = true; 2940b57cec5SDimitry Andric Strong = true; // Use the same heuristic as strong to determine SSPLayout 2950b57cec5SDimitry Andric } else if (F->hasFnAttribute(Attribute::StackProtectStrong)) 2960b57cec5SDimitry Andric Strong = true; 2970b57cec5SDimitry Andric else if (!F->hasFnAttribute(Attribute::StackProtect)) 2980b57cec5SDimitry Andric return false; 2990b57cec5SDimitry Andric 3000b57cec5SDimitry Andric for (const BasicBlock &BB : *F) { 3010b57cec5SDimitry Andric for (const Instruction &I : BB) { 3020b57cec5SDimitry Andric if (const AllocaInst *AI = dyn_cast<AllocaInst>(&I)) { 3030b57cec5SDimitry Andric if (AI->isArrayAllocation()) { 3040b57cec5SDimitry Andric auto RemarkBuilder = [&]() { 3050b57cec5SDimitry Andric return OptimizationRemark(DEBUG_TYPE, "StackProtectorAllocaOrArray", 3060b57cec5SDimitry Andric &I) 3070b57cec5SDimitry Andric << "Stack protection applied to function " 3080b57cec5SDimitry Andric << ore::NV("Function", F) 3090b57cec5SDimitry Andric << " due to a call to alloca or use of a variable length " 3100b57cec5SDimitry Andric "array"; 3110b57cec5SDimitry Andric }; 3120b57cec5SDimitry Andric if (const auto *CI = dyn_cast<ConstantInt>(AI->getArraySize())) { 3130b57cec5SDimitry Andric if (CI->getLimitedValue(SSPBufferSize) >= SSPBufferSize) { 3140b57cec5SDimitry Andric // A call to alloca with size >= SSPBufferSize requires 3150b57cec5SDimitry Andric // stack protectors. 3160b57cec5SDimitry Andric Layout.insert(std::make_pair(AI, 3170b57cec5SDimitry Andric MachineFrameInfo::SSPLK_LargeArray)); 3180b57cec5SDimitry Andric ORE.emit(RemarkBuilder); 3190b57cec5SDimitry Andric NeedsProtector = true; 3200b57cec5SDimitry Andric } else if (Strong) { 3210b57cec5SDimitry Andric // Require protectors for all alloca calls in strong mode. 3220b57cec5SDimitry Andric Layout.insert(std::make_pair(AI, 3230b57cec5SDimitry Andric MachineFrameInfo::SSPLK_SmallArray)); 3240b57cec5SDimitry Andric ORE.emit(RemarkBuilder); 3250b57cec5SDimitry Andric NeedsProtector = true; 3260b57cec5SDimitry Andric } 3270b57cec5SDimitry Andric } else { 3280b57cec5SDimitry Andric // A call to alloca with a variable size requires protectors. 3290b57cec5SDimitry Andric Layout.insert(std::make_pair(AI, 3300b57cec5SDimitry Andric MachineFrameInfo::SSPLK_LargeArray)); 3310b57cec5SDimitry Andric ORE.emit(RemarkBuilder); 3320b57cec5SDimitry Andric NeedsProtector = true; 3330b57cec5SDimitry Andric } 3340b57cec5SDimitry Andric continue; 3350b57cec5SDimitry Andric } 3360b57cec5SDimitry Andric 3370b57cec5SDimitry Andric bool IsLarge = false; 3380b57cec5SDimitry Andric if (ContainsProtectableArray(AI->getAllocatedType(), IsLarge, Strong)) { 3390b57cec5SDimitry Andric Layout.insert(std::make_pair(AI, IsLarge 3400b57cec5SDimitry Andric ? MachineFrameInfo::SSPLK_LargeArray 3410b57cec5SDimitry Andric : MachineFrameInfo::SSPLK_SmallArray)); 3420b57cec5SDimitry Andric ORE.emit([&]() { 3430b57cec5SDimitry Andric return OptimizationRemark(DEBUG_TYPE, "StackProtectorBuffer", &I) 3440b57cec5SDimitry Andric << "Stack protection applied to function " 3450b57cec5SDimitry Andric << ore::NV("Function", F) 3460b57cec5SDimitry Andric << " due to a stack allocated buffer or struct containing a " 3470b57cec5SDimitry Andric "buffer"; 3480b57cec5SDimitry Andric }); 3490b57cec5SDimitry Andric NeedsProtector = true; 3500b57cec5SDimitry Andric continue; 3510b57cec5SDimitry Andric } 3520b57cec5SDimitry Andric 3535ffd83dbSDimitry Andric if (Strong && HasAddressTaken(AI, M->getDataLayout().getTypeAllocSize( 3545ffd83dbSDimitry Andric AI->getAllocatedType()))) { 3550b57cec5SDimitry Andric ++NumAddrTaken; 3560b57cec5SDimitry Andric Layout.insert(std::make_pair(AI, MachineFrameInfo::SSPLK_AddrOf)); 3570b57cec5SDimitry Andric ORE.emit([&]() { 3580b57cec5SDimitry Andric return OptimizationRemark(DEBUG_TYPE, "StackProtectorAddressTaken", 3590b57cec5SDimitry Andric &I) 3600b57cec5SDimitry Andric << "Stack protection applied to function " 3610b57cec5SDimitry Andric << ore::NV("Function", F) 3620b57cec5SDimitry Andric << " due to the address of a local variable being taken"; 3630b57cec5SDimitry Andric }); 3640b57cec5SDimitry Andric NeedsProtector = true; 3650b57cec5SDimitry Andric } 3665ffd83dbSDimitry Andric // Clear any PHIs that we visited, to make sure we examine all uses of 3675ffd83dbSDimitry Andric // any subsequent allocas that we look at. 3685ffd83dbSDimitry Andric VisitedPHIs.clear(); 3690b57cec5SDimitry Andric } 3700b57cec5SDimitry Andric } 3710b57cec5SDimitry Andric } 3720b57cec5SDimitry Andric 3730b57cec5SDimitry Andric return NeedsProtector; 3740b57cec5SDimitry Andric } 3750b57cec5SDimitry Andric 3760b57cec5SDimitry Andric /// Create a stack guard loading and populate whether SelectionDAG SSP is 3770b57cec5SDimitry Andric /// supported. 3780b57cec5SDimitry Andric static Value *getStackGuard(const TargetLoweringBase *TLI, Module *M, 3790b57cec5SDimitry Andric IRBuilder<> &B, 3800b57cec5SDimitry Andric bool *SupportsSelectionDAGSP = nullptr) { 381e8d8bef9SDimitry Andric Value *Guard = TLI->getIRStackGuard(B); 382e8d8bef9SDimitry Andric auto GuardMode = TLI->getTargetMachine().Options.StackProtectorGuard; 383e8d8bef9SDimitry Andric if ((GuardMode == llvm::StackProtectorGuards::TLS || 384e8d8bef9SDimitry Andric GuardMode == llvm::StackProtectorGuards::None) && Guard) 3850b57cec5SDimitry Andric return B.CreateLoad(B.getInt8PtrTy(), Guard, true, "StackGuard"); 3860b57cec5SDimitry Andric 3870b57cec5SDimitry Andric // Use SelectionDAG SSP handling, since there isn't an IR guard. 3880b57cec5SDimitry Andric // 3890b57cec5SDimitry Andric // This is more or less weird, since we optionally output whether we 3900b57cec5SDimitry Andric // should perform a SelectionDAG SP here. The reason is that it's strictly 3910b57cec5SDimitry Andric // defined as !TLI->getIRStackGuard(B), where getIRStackGuard is also 3920b57cec5SDimitry Andric // mutating. There is no way to get this bit without mutating the IR, so 3930b57cec5SDimitry Andric // getting this bit has to happen in this right time. 3940b57cec5SDimitry Andric // 3950b57cec5SDimitry Andric // We could have define a new function TLI::supportsSelectionDAGSP(), but that 3960b57cec5SDimitry Andric // will put more burden on the backends' overriding work, especially when it 3970b57cec5SDimitry Andric // actually conveys the same information getIRStackGuard() already gives. 3980b57cec5SDimitry Andric if (SupportsSelectionDAGSP) 3990b57cec5SDimitry Andric *SupportsSelectionDAGSP = true; 4000b57cec5SDimitry Andric TLI->insertSSPDeclarations(*M); 4010b57cec5SDimitry Andric return B.CreateCall(Intrinsic::getDeclaration(M, Intrinsic::stackguard)); 4020b57cec5SDimitry Andric } 4030b57cec5SDimitry Andric 4040b57cec5SDimitry Andric /// Insert code into the entry block that stores the stack guard 4050b57cec5SDimitry Andric /// variable onto the stack: 4060b57cec5SDimitry Andric /// 4070b57cec5SDimitry Andric /// entry: 4080b57cec5SDimitry Andric /// StackGuardSlot = alloca i8* 4090b57cec5SDimitry Andric /// StackGuard = <stack guard> 4100b57cec5SDimitry Andric /// call void @llvm.stackprotector(StackGuard, StackGuardSlot) 4110b57cec5SDimitry Andric /// 4120b57cec5SDimitry Andric /// Returns true if the platform/triple supports the stackprotectorcreate pseudo 4130b57cec5SDimitry Andric /// node. 4140b57cec5SDimitry Andric static bool CreatePrologue(Function *F, Module *M, ReturnInst *RI, 4150b57cec5SDimitry Andric const TargetLoweringBase *TLI, AllocaInst *&AI) { 4160b57cec5SDimitry Andric bool SupportsSelectionDAGSP = false; 4170b57cec5SDimitry Andric IRBuilder<> B(&F->getEntryBlock().front()); 4180b57cec5SDimitry Andric PointerType *PtrTy = Type::getInt8PtrTy(RI->getContext()); 4190b57cec5SDimitry Andric AI = B.CreateAlloca(PtrTy, nullptr, "StackGuardSlot"); 4200b57cec5SDimitry Andric 4210b57cec5SDimitry Andric Value *GuardSlot = getStackGuard(TLI, M, B, &SupportsSelectionDAGSP); 4220b57cec5SDimitry Andric B.CreateCall(Intrinsic::getDeclaration(M, Intrinsic::stackprotector), 4230b57cec5SDimitry Andric {GuardSlot, AI}); 4240b57cec5SDimitry Andric return SupportsSelectionDAGSP; 4250b57cec5SDimitry Andric } 4260b57cec5SDimitry Andric 4270b57cec5SDimitry Andric /// InsertStackProtectors - Insert code into the prologue and epilogue of the 4280b57cec5SDimitry Andric /// function. 4290b57cec5SDimitry Andric /// 4300b57cec5SDimitry Andric /// - The prologue code loads and stores the stack guard onto the stack. 4310b57cec5SDimitry Andric /// - The epilogue checks the value stored in the prologue against the original 4320b57cec5SDimitry Andric /// value. It calls __stack_chk_fail if they differ. 4330b57cec5SDimitry Andric bool StackProtector::InsertStackProtectors() { 4340b57cec5SDimitry Andric // If the target wants to XOR the frame pointer into the guard value, it's 4350b57cec5SDimitry Andric // impossible to emit the check in IR, so the target *must* support stack 4360b57cec5SDimitry Andric // protection in SDAG. 4370b57cec5SDimitry Andric bool SupportsSelectionDAGSP = 4380b57cec5SDimitry Andric TLI->useStackGuardXorFP() || 4390b57cec5SDimitry Andric (EnableSelectionDAGSP && !TM->Options.EnableFastISel && 4400b57cec5SDimitry Andric !TM->Options.EnableGlobalISel); 4410b57cec5SDimitry Andric AllocaInst *AI = nullptr; // Place on stack that stores the stack guard. 4420b57cec5SDimitry Andric 4430b57cec5SDimitry Andric for (Function::iterator I = F->begin(), E = F->end(); I != E;) { 4440b57cec5SDimitry Andric BasicBlock *BB = &*I++; 4450b57cec5SDimitry Andric ReturnInst *RI = dyn_cast<ReturnInst>(BB->getTerminator()); 4460b57cec5SDimitry Andric if (!RI) 4470b57cec5SDimitry Andric continue; 4480b57cec5SDimitry Andric 4490b57cec5SDimitry Andric // Generate prologue instrumentation if not already generated. 4500b57cec5SDimitry Andric if (!HasPrologue) { 4510b57cec5SDimitry Andric HasPrologue = true; 4520b57cec5SDimitry Andric SupportsSelectionDAGSP &= CreatePrologue(F, M, RI, TLI, AI); 4530b57cec5SDimitry Andric } 4540b57cec5SDimitry Andric 4550b57cec5SDimitry Andric // SelectionDAG based code generation. Nothing else needs to be done here. 4560b57cec5SDimitry Andric // The epilogue instrumentation is postponed to SelectionDAG. 4570b57cec5SDimitry Andric if (SupportsSelectionDAGSP) 4580b57cec5SDimitry Andric break; 4590b57cec5SDimitry Andric 4600b57cec5SDimitry Andric // Find the stack guard slot if the prologue was not created by this pass 4610b57cec5SDimitry Andric // itself via a previous call to CreatePrologue(). 4620b57cec5SDimitry Andric if (!AI) { 4630b57cec5SDimitry Andric const CallInst *SPCall = findStackProtectorIntrinsic(*F); 4640b57cec5SDimitry Andric assert(SPCall && "Call to llvm.stackprotector is missing"); 4650b57cec5SDimitry Andric AI = cast<AllocaInst>(SPCall->getArgOperand(1)); 4660b57cec5SDimitry Andric } 4670b57cec5SDimitry Andric 4680b57cec5SDimitry Andric // Set HasIRCheck to true, so that SelectionDAG will not generate its own 4690b57cec5SDimitry Andric // version. SelectionDAG called 'shouldEmitSDCheck' to check whether 4700b57cec5SDimitry Andric // instrumentation has already been generated. 4710b57cec5SDimitry Andric HasIRCheck = true; 4720b57cec5SDimitry Andric 4730b57cec5SDimitry Andric // Generate epilogue instrumentation. The epilogue intrumentation can be 4740b57cec5SDimitry Andric // function-based or inlined depending on which mechanism the target is 4750b57cec5SDimitry Andric // providing. 4760b57cec5SDimitry Andric if (Function *GuardCheck = TLI->getSSPStackGuardCheck(*M)) { 4770b57cec5SDimitry Andric // Generate the function-based epilogue instrumentation. 4780b57cec5SDimitry Andric // The target provides a guard check function, generate a call to it. 4790b57cec5SDimitry Andric IRBuilder<> B(RI); 4800b57cec5SDimitry Andric LoadInst *Guard = B.CreateLoad(B.getInt8PtrTy(), AI, true, "Guard"); 4810b57cec5SDimitry Andric CallInst *Call = B.CreateCall(GuardCheck, {Guard}); 4820b57cec5SDimitry Andric Call->setAttributes(GuardCheck->getAttributes()); 4830b57cec5SDimitry Andric Call->setCallingConv(GuardCheck->getCallingConv()); 4840b57cec5SDimitry Andric } else { 4850b57cec5SDimitry Andric // Generate the epilogue with inline instrumentation. 4860b57cec5SDimitry Andric // If we do not support SelectionDAG based tail calls, generate IR level 4870b57cec5SDimitry Andric // tail calls. 4880b57cec5SDimitry Andric // 4890b57cec5SDimitry Andric // For each block with a return instruction, convert this: 4900b57cec5SDimitry Andric // 4910b57cec5SDimitry Andric // return: 4920b57cec5SDimitry Andric // ... 4930b57cec5SDimitry Andric // ret ... 4940b57cec5SDimitry Andric // 4950b57cec5SDimitry Andric // into this: 4960b57cec5SDimitry Andric // 4970b57cec5SDimitry Andric // return: 4980b57cec5SDimitry Andric // ... 4990b57cec5SDimitry Andric // %1 = <stack guard> 5000b57cec5SDimitry Andric // %2 = load StackGuardSlot 5010b57cec5SDimitry Andric // %3 = cmp i1 %1, %2 5020b57cec5SDimitry Andric // br i1 %3, label %SP_return, label %CallStackCheckFailBlk 5030b57cec5SDimitry Andric // 5040b57cec5SDimitry Andric // SP_return: 5050b57cec5SDimitry Andric // ret ... 5060b57cec5SDimitry Andric // 5070b57cec5SDimitry Andric // CallStackCheckFailBlk: 5080b57cec5SDimitry Andric // call void @__stack_chk_fail() 5090b57cec5SDimitry Andric // unreachable 5100b57cec5SDimitry Andric 5110b57cec5SDimitry Andric // Create the FailBB. We duplicate the BB every time since the MI tail 5120b57cec5SDimitry Andric // merge pass will merge together all of the various BB into one including 5130b57cec5SDimitry Andric // fail BB generated by the stack protector pseudo instruction. 5140b57cec5SDimitry Andric BasicBlock *FailBB = CreateFailBB(); 5150b57cec5SDimitry Andric 5160b57cec5SDimitry Andric // Split the basic block before the return instruction. 5170b57cec5SDimitry Andric BasicBlock *NewBB = BB->splitBasicBlock(RI->getIterator(), "SP_return"); 5180b57cec5SDimitry Andric 5190b57cec5SDimitry Andric // Update the dominator tree if we need to. 5200b57cec5SDimitry Andric if (DT && DT->isReachableFromEntry(BB)) { 5210b57cec5SDimitry Andric DT->addNewBlock(NewBB, BB); 5220b57cec5SDimitry Andric DT->addNewBlock(FailBB, BB); 5230b57cec5SDimitry Andric } 5240b57cec5SDimitry Andric 5250b57cec5SDimitry Andric // Remove default branch instruction to the new BB. 5260b57cec5SDimitry Andric BB->getTerminator()->eraseFromParent(); 5270b57cec5SDimitry Andric 5280b57cec5SDimitry Andric // Move the newly created basic block to the point right after the old 5290b57cec5SDimitry Andric // basic block so that it's in the "fall through" position. 5300b57cec5SDimitry Andric NewBB->moveAfter(BB); 5310b57cec5SDimitry Andric 5320b57cec5SDimitry Andric // Generate the stack protector instructions in the old basic block. 5330b57cec5SDimitry Andric IRBuilder<> B(BB); 5340b57cec5SDimitry Andric Value *Guard = getStackGuard(TLI, M, B); 5350b57cec5SDimitry Andric LoadInst *LI2 = B.CreateLoad(B.getInt8PtrTy(), AI, true); 5360b57cec5SDimitry Andric Value *Cmp = B.CreateICmpEQ(Guard, LI2); 5370b57cec5SDimitry Andric auto SuccessProb = 5380b57cec5SDimitry Andric BranchProbabilityInfo::getBranchProbStackProtector(true); 5390b57cec5SDimitry Andric auto FailureProb = 5400b57cec5SDimitry Andric BranchProbabilityInfo::getBranchProbStackProtector(false); 5410b57cec5SDimitry Andric MDNode *Weights = MDBuilder(F->getContext()) 5420b57cec5SDimitry Andric .createBranchWeights(SuccessProb.getNumerator(), 5430b57cec5SDimitry Andric FailureProb.getNumerator()); 5440b57cec5SDimitry Andric B.CreateCondBr(Cmp, NewBB, FailBB, Weights); 5450b57cec5SDimitry Andric } 5460b57cec5SDimitry Andric } 5470b57cec5SDimitry Andric 5480b57cec5SDimitry Andric // Return if we didn't modify any basic blocks. i.e., there are no return 5490b57cec5SDimitry Andric // statements in the function. 5500b57cec5SDimitry Andric return HasPrologue; 5510b57cec5SDimitry Andric } 5520b57cec5SDimitry Andric 5530b57cec5SDimitry Andric /// CreateFailBB - Create a basic block to jump to when the stack protector 5540b57cec5SDimitry Andric /// check fails. 5550b57cec5SDimitry Andric BasicBlock *StackProtector::CreateFailBB() { 5560b57cec5SDimitry Andric LLVMContext &Context = F->getContext(); 5570b57cec5SDimitry Andric BasicBlock *FailBB = BasicBlock::Create(Context, "CallStackCheckFailBlk", F); 5580b57cec5SDimitry Andric IRBuilder<> B(FailBB); 559e8d8bef9SDimitry Andric if (F->getSubprogram()) 560e8d8bef9SDimitry Andric B.SetCurrentDebugLocation( 561e8d8bef9SDimitry Andric DILocation::get(Context, 0, 0, F->getSubprogram())); 5620b57cec5SDimitry Andric if (Trip.isOSOpenBSD()) { 5630b57cec5SDimitry Andric FunctionCallee StackChkFail = M->getOrInsertFunction( 5640b57cec5SDimitry Andric "__stack_smash_handler", Type::getVoidTy(Context), 5650b57cec5SDimitry Andric Type::getInt8PtrTy(Context)); 5660b57cec5SDimitry Andric 5670b57cec5SDimitry Andric B.CreateCall(StackChkFail, B.CreateGlobalStringPtr(F->getName(), "SSH")); 5680b57cec5SDimitry Andric } else { 5690b57cec5SDimitry Andric FunctionCallee StackChkFail = 5700b57cec5SDimitry Andric M->getOrInsertFunction("__stack_chk_fail", Type::getVoidTy(Context)); 5710b57cec5SDimitry Andric 5720b57cec5SDimitry Andric B.CreateCall(StackChkFail, {}); 5730b57cec5SDimitry Andric } 5740b57cec5SDimitry Andric B.CreateUnreachable(); 5750b57cec5SDimitry Andric return FailBB; 5760b57cec5SDimitry Andric } 5770b57cec5SDimitry Andric 5780b57cec5SDimitry Andric bool StackProtector::shouldEmitSDCheck(const BasicBlock &BB) const { 5790b57cec5SDimitry Andric return HasPrologue && !HasIRCheck && isa<ReturnInst>(BB.getTerminator()); 5800b57cec5SDimitry Andric } 5810b57cec5SDimitry Andric 5820b57cec5SDimitry Andric void StackProtector::copyToMachineFrameInfo(MachineFrameInfo &MFI) const { 5830b57cec5SDimitry Andric if (Layout.empty()) 5840b57cec5SDimitry Andric return; 5850b57cec5SDimitry Andric 5860b57cec5SDimitry Andric for (int I = 0, E = MFI.getObjectIndexEnd(); I != E; ++I) { 5870b57cec5SDimitry Andric if (MFI.isDeadObjectIndex(I)) 5880b57cec5SDimitry Andric continue; 5890b57cec5SDimitry Andric 5900b57cec5SDimitry Andric const AllocaInst *AI = MFI.getObjectAllocation(I); 5910b57cec5SDimitry Andric if (!AI) 5920b57cec5SDimitry Andric continue; 5930b57cec5SDimitry Andric 5940b57cec5SDimitry Andric SSPLayoutMap::const_iterator LI = Layout.find(AI); 5950b57cec5SDimitry Andric if (LI == Layout.end()) 5960b57cec5SDimitry Andric continue; 5970b57cec5SDimitry Andric 5980b57cec5SDimitry Andric MFI.setObjectSSPLayout(I, LI->second); 5990b57cec5SDimitry Andric } 6000b57cec5SDimitry Andric } 601