xref: /freebsd-src/contrib/llvm-project/llvm/lib/CodeGen/ShrinkWrap.cpp (revision 0fca6ea1d4eea4c934cfff25ac9ee8ad6fe95583)
10b57cec5SDimitry Andric //===- ShrinkWrap.cpp - Compute safe point for prolog/epilog insertion ----===//
20b57cec5SDimitry Andric //
30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
60b57cec5SDimitry Andric //
70b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
80b57cec5SDimitry Andric //
90b57cec5SDimitry Andric // This pass looks for safe point where the prologue and epilogue can be
100b57cec5SDimitry Andric // inserted.
110b57cec5SDimitry Andric // The safe point for the prologue (resp. epilogue) is called Save
120b57cec5SDimitry Andric // (resp. Restore).
130b57cec5SDimitry Andric // A point is safe for prologue (resp. epilogue) if and only if
140b57cec5SDimitry Andric // it 1) dominates (resp. post-dominates) all the frame related operations and
150b57cec5SDimitry Andric // between 2) two executions of the Save (resp. Restore) point there is an
160b57cec5SDimitry Andric // execution of the Restore (resp. Save) point.
170b57cec5SDimitry Andric //
180b57cec5SDimitry Andric // For instance, the following points are safe:
190b57cec5SDimitry Andric // for (int i = 0; i < 10; ++i) {
200b57cec5SDimitry Andric //   Save
210b57cec5SDimitry Andric //   ...
220b57cec5SDimitry Andric //   Restore
230b57cec5SDimitry Andric // }
240b57cec5SDimitry Andric // Indeed, the execution looks like Save -> Restore -> Save -> Restore ...
250b57cec5SDimitry Andric // And the following points are not:
260b57cec5SDimitry Andric // for (int i = 0; i < 10; ++i) {
270b57cec5SDimitry Andric //   Save
280b57cec5SDimitry Andric //   ...
290b57cec5SDimitry Andric // }
300b57cec5SDimitry Andric // for (int i = 0; i < 10; ++i) {
310b57cec5SDimitry Andric //   ...
320b57cec5SDimitry Andric //   Restore
330b57cec5SDimitry Andric // }
340b57cec5SDimitry Andric // Indeed, the execution looks like Save -> Save -> ... -> Restore -> Restore.
350b57cec5SDimitry Andric //
360b57cec5SDimitry Andric // This pass also ensures that the safe points are 3) cheaper than the regular
370b57cec5SDimitry Andric // entry and exits blocks.
380b57cec5SDimitry Andric //
390b57cec5SDimitry Andric // Property #1 is ensured via the use of MachineDominatorTree and
400b57cec5SDimitry Andric // MachinePostDominatorTree.
410b57cec5SDimitry Andric // Property #2 is ensured via property #1 and MachineLoopInfo, i.e., both
420b57cec5SDimitry Andric // points must be in the same loop.
430b57cec5SDimitry Andric // Property #3 is ensured via the MachineBlockFrequencyInfo.
440b57cec5SDimitry Andric //
450b57cec5SDimitry Andric // If this pass found points matching all these properties, then
460b57cec5SDimitry Andric // MachineFrameInfo is updated with this information.
470b57cec5SDimitry Andric //
480b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
490b57cec5SDimitry Andric 
500b57cec5SDimitry Andric #include "llvm/ADT/BitVector.h"
510b57cec5SDimitry Andric #include "llvm/ADT/PostOrderIterator.h"
520b57cec5SDimitry Andric #include "llvm/ADT/SetVector.h"
530b57cec5SDimitry Andric #include "llvm/ADT/SmallVector.h"
540b57cec5SDimitry Andric #include "llvm/ADT/Statistic.h"
550b57cec5SDimitry Andric #include "llvm/Analysis/CFG.h"
5606c3fb27SDimitry Andric #include "llvm/Analysis/ValueTracking.h"
570b57cec5SDimitry Andric #include "llvm/CodeGen/MachineBasicBlock.h"
580b57cec5SDimitry Andric #include "llvm/CodeGen/MachineBlockFrequencyInfo.h"
590b57cec5SDimitry Andric #include "llvm/CodeGen/MachineDominators.h"
600b57cec5SDimitry Andric #include "llvm/CodeGen/MachineFrameInfo.h"
610b57cec5SDimitry Andric #include "llvm/CodeGen/MachineFunction.h"
620b57cec5SDimitry Andric #include "llvm/CodeGen/MachineFunctionPass.h"
630b57cec5SDimitry Andric #include "llvm/CodeGen/MachineInstr.h"
640b57cec5SDimitry Andric #include "llvm/CodeGen/MachineLoopInfo.h"
650b57cec5SDimitry Andric #include "llvm/CodeGen/MachineOperand.h"
660b57cec5SDimitry Andric #include "llvm/CodeGen/MachineOptimizationRemarkEmitter.h"
670b57cec5SDimitry Andric #include "llvm/CodeGen/MachinePostDominators.h"
680b57cec5SDimitry Andric #include "llvm/CodeGen/RegisterClassInfo.h"
690b57cec5SDimitry Andric #include "llvm/CodeGen/RegisterScavenging.h"
700b57cec5SDimitry Andric #include "llvm/CodeGen/TargetFrameLowering.h"
710b57cec5SDimitry Andric #include "llvm/CodeGen/TargetInstrInfo.h"
720b57cec5SDimitry Andric #include "llvm/CodeGen/TargetLowering.h"
730b57cec5SDimitry Andric #include "llvm/CodeGen/TargetRegisterInfo.h"
740b57cec5SDimitry Andric #include "llvm/CodeGen/TargetSubtargetInfo.h"
750b57cec5SDimitry Andric #include "llvm/IR/Attributes.h"
760b57cec5SDimitry Andric #include "llvm/IR/Function.h"
77480093f4SDimitry Andric #include "llvm/InitializePasses.h"
780b57cec5SDimitry Andric #include "llvm/MC/MCAsmInfo.h"
790b57cec5SDimitry Andric #include "llvm/Pass.h"
800b57cec5SDimitry Andric #include "llvm/Support/CommandLine.h"
810b57cec5SDimitry Andric #include "llvm/Support/Debug.h"
820b57cec5SDimitry Andric #include "llvm/Support/ErrorHandling.h"
830b57cec5SDimitry Andric #include "llvm/Support/raw_ostream.h"
840b57cec5SDimitry Andric #include "llvm/Target/TargetMachine.h"
850b57cec5SDimitry Andric #include <cassert>
860b57cec5SDimitry Andric #include <cstdint>
870b57cec5SDimitry Andric #include <memory>
880b57cec5SDimitry Andric 
890b57cec5SDimitry Andric using namespace llvm;
900b57cec5SDimitry Andric 
910b57cec5SDimitry Andric #define DEBUG_TYPE "shrink-wrap"
920b57cec5SDimitry Andric 
930b57cec5SDimitry Andric STATISTIC(NumFunc, "Number of functions");
940b57cec5SDimitry Andric STATISTIC(NumCandidates, "Number of shrink-wrapping candidates");
950b57cec5SDimitry Andric STATISTIC(NumCandidatesDropped,
960b57cec5SDimitry Andric           "Number of shrink-wrapping candidates dropped because of frequency");
970b57cec5SDimitry Andric 
980b57cec5SDimitry Andric static cl::opt<cl::boolOrDefault>
990b57cec5SDimitry Andric EnableShrinkWrapOpt("enable-shrink-wrap", cl::Hidden,
1000b57cec5SDimitry Andric                     cl::desc("enable the shrink-wrapping pass"));
10106c3fb27SDimitry Andric static cl::opt<bool> EnablePostShrinkWrapOpt(
10206c3fb27SDimitry Andric     "enable-shrink-wrap-region-split", cl::init(true), cl::Hidden,
10306c3fb27SDimitry Andric     cl::desc("enable splitting of the restore block if possible"));
1040b57cec5SDimitry Andric 
1050b57cec5SDimitry Andric namespace {
1060b57cec5SDimitry Andric 
1070b57cec5SDimitry Andric /// Class to determine where the safe point to insert the
1080b57cec5SDimitry Andric /// prologue and epilogue are.
1090b57cec5SDimitry Andric /// Unlike the paper from Fred C. Chow, PLDI'88, that introduces the
1100b57cec5SDimitry Andric /// shrink-wrapping term for prologue/epilogue placement, this pass
1110b57cec5SDimitry Andric /// does not rely on expensive data-flow analysis. Instead we use the
1120b57cec5SDimitry Andric /// dominance properties and loop information to decide which point
1130b57cec5SDimitry Andric /// are safe for such insertion.
1140b57cec5SDimitry Andric class ShrinkWrap : public MachineFunctionPass {
1150b57cec5SDimitry Andric   /// Hold callee-saved information.
1160b57cec5SDimitry Andric   RegisterClassInfo RCI;
11706c3fb27SDimitry Andric   MachineDominatorTree *MDT = nullptr;
11806c3fb27SDimitry Andric   MachinePostDominatorTree *MPDT = nullptr;
1190b57cec5SDimitry Andric 
1200b57cec5SDimitry Andric   /// Current safe point found for the prologue.
1210b57cec5SDimitry Andric   /// The prologue will be inserted before the first instruction
1220b57cec5SDimitry Andric   /// in this basic block.
12306c3fb27SDimitry Andric   MachineBasicBlock *Save = nullptr;
1240b57cec5SDimitry Andric 
1250b57cec5SDimitry Andric   /// Current safe point found for the epilogue.
1260b57cec5SDimitry Andric   /// The epilogue will be inserted before the first terminator instruction
1270b57cec5SDimitry Andric   /// in this basic block.
12806c3fb27SDimitry Andric   MachineBasicBlock *Restore = nullptr;
1290b57cec5SDimitry Andric 
1300b57cec5SDimitry Andric   /// Hold the information of the basic block frequency.
1310b57cec5SDimitry Andric   /// Use to check the profitability of the new points.
13206c3fb27SDimitry Andric   MachineBlockFrequencyInfo *MBFI = nullptr;
1330b57cec5SDimitry Andric 
1340b57cec5SDimitry Andric   /// Hold the loop information. Used to determine if Save and Restore
1350b57cec5SDimitry Andric   /// are in the same loop.
13606c3fb27SDimitry Andric   MachineLoopInfo *MLI = nullptr;
1370b57cec5SDimitry Andric 
1380b57cec5SDimitry Andric   // Emit remarks.
1390b57cec5SDimitry Andric   MachineOptimizationRemarkEmitter *ORE = nullptr;
1400b57cec5SDimitry Andric 
1410b57cec5SDimitry Andric   /// Frequency of the Entry block.
1425f757f3fSDimitry Andric   BlockFrequency EntryFreq;
1430b57cec5SDimitry Andric 
1440b57cec5SDimitry Andric   /// Current opcode for frame setup.
14506c3fb27SDimitry Andric   unsigned FrameSetupOpcode = ~0u;
1460b57cec5SDimitry Andric 
1470b57cec5SDimitry Andric   /// Current opcode for frame destroy.
14806c3fb27SDimitry Andric   unsigned FrameDestroyOpcode = ~0u;
1490b57cec5SDimitry Andric 
1500b57cec5SDimitry Andric   /// Stack pointer register, used by llvm.{savestack,restorestack}
151e8d8bef9SDimitry Andric   Register SP;
1520b57cec5SDimitry Andric 
1530b57cec5SDimitry Andric   /// Entry block.
15406c3fb27SDimitry Andric   const MachineBasicBlock *Entry = nullptr;
1550b57cec5SDimitry Andric 
1560b57cec5SDimitry Andric   using SetOfRegs = SmallSetVector<unsigned, 16>;
1570b57cec5SDimitry Andric 
1580b57cec5SDimitry Andric   /// Registers that need to be saved for the current function.
1590b57cec5SDimitry Andric   mutable SetOfRegs CurrentCSRs;
1600b57cec5SDimitry Andric 
1610b57cec5SDimitry Andric   /// Current MachineFunction.
16206c3fb27SDimitry Andric   MachineFunction *MachineFunc = nullptr;
16306c3fb27SDimitry Andric 
164*0fca6ea1SDimitry Andric   /// Is `true` for the block numbers where we assume possible stack accesses
165*0fca6ea1SDimitry Andric   /// or computation of stack-relative addresses on any CFG path including the
166*0fca6ea1SDimitry Andric   /// block itself. Is `false` for basic blocks where we can guarantee the
167*0fca6ea1SDimitry Andric   /// opposite. False positives won't lead to incorrect analysis results,
168*0fca6ea1SDimitry Andric   /// therefore this approach is fair.
16906c3fb27SDimitry Andric   BitVector StackAddressUsedBlockInfo;
1700b57cec5SDimitry Andric 
1710b57cec5SDimitry Andric   /// Check if \p MI uses or defines a callee-saved register or
1720b57cec5SDimitry Andric   /// a frame index. If this is the case, this means \p MI must happen
1730b57cec5SDimitry Andric   /// after Save and before Restore.
17406c3fb27SDimitry Andric   bool useOrDefCSROrFI(const MachineInstr &MI, RegScavenger *RS,
17506c3fb27SDimitry Andric                        bool StackAddressUsed) const;
1760b57cec5SDimitry Andric 
1770b57cec5SDimitry Andric   const SetOfRegs &getCurrentCSRs(RegScavenger *RS) const {
1780b57cec5SDimitry Andric     if (CurrentCSRs.empty()) {
1790b57cec5SDimitry Andric       BitVector SavedRegs;
1800b57cec5SDimitry Andric       const TargetFrameLowering *TFI =
1810b57cec5SDimitry Andric           MachineFunc->getSubtarget().getFrameLowering();
1820b57cec5SDimitry Andric 
1830b57cec5SDimitry Andric       TFI->determineCalleeSaves(*MachineFunc, SavedRegs, RS);
1840b57cec5SDimitry Andric 
1850b57cec5SDimitry Andric       for (int Reg = SavedRegs.find_first(); Reg != -1;
1860b57cec5SDimitry Andric            Reg = SavedRegs.find_next(Reg))
1870b57cec5SDimitry Andric         CurrentCSRs.insert((unsigned)Reg);
1880b57cec5SDimitry Andric     }
1890b57cec5SDimitry Andric     return CurrentCSRs;
1900b57cec5SDimitry Andric   }
1910b57cec5SDimitry Andric 
1920b57cec5SDimitry Andric   /// Update the Save and Restore points such that \p MBB is in
1930b57cec5SDimitry Andric   /// the region that is dominated by Save and post-dominated by Restore
1940b57cec5SDimitry Andric   /// and Save and Restore still match the safe point definition.
1950b57cec5SDimitry Andric   /// Such point may not exist and Save and/or Restore may be null after
1960b57cec5SDimitry Andric   /// this call.
1970b57cec5SDimitry Andric   void updateSaveRestorePoints(MachineBasicBlock &MBB, RegScavenger *RS);
1980b57cec5SDimitry Andric 
19906c3fb27SDimitry Andric   // Try to find safe point based on dominance and block frequency without
20006c3fb27SDimitry Andric   // any change in IR.
20106c3fb27SDimitry Andric   bool performShrinkWrapping(
20206c3fb27SDimitry Andric       const ReversePostOrderTraversal<MachineBasicBlock *> &RPOT,
20306c3fb27SDimitry Andric       RegScavenger *RS);
20406c3fb27SDimitry Andric 
20506c3fb27SDimitry Andric   /// This function tries to split the restore point if doing so can shrink the
20606c3fb27SDimitry Andric   /// save point further. \return True if restore point is split.
20706c3fb27SDimitry Andric   bool postShrinkWrapping(bool HasCandidate, MachineFunction &MF,
20806c3fb27SDimitry Andric                           RegScavenger *RS);
20906c3fb27SDimitry Andric 
21006c3fb27SDimitry Andric   /// This function analyzes if the restore point can split to create a new
21106c3fb27SDimitry Andric   /// restore point. This function collects
21206c3fb27SDimitry Andric   /// 1. Any preds of current restore that are reachable by callee save/FI
21306c3fb27SDimitry Andric   /// blocks
21406c3fb27SDimitry Andric   /// - indicated by DirtyPreds
21506c3fb27SDimitry Andric   /// 2. Any preds of current restore that are not DirtyPreds - indicated by
21606c3fb27SDimitry Andric   /// CleanPreds
21706c3fb27SDimitry Andric   /// Both sets should be non-empty for considering restore point split.
21806c3fb27SDimitry Andric   bool checkIfRestoreSplittable(
21906c3fb27SDimitry Andric       const MachineBasicBlock *CurRestore,
22006c3fb27SDimitry Andric       const DenseSet<const MachineBasicBlock *> &ReachableByDirty,
22106c3fb27SDimitry Andric       SmallVectorImpl<MachineBasicBlock *> &DirtyPreds,
22206c3fb27SDimitry Andric       SmallVectorImpl<MachineBasicBlock *> &CleanPreds,
22306c3fb27SDimitry Andric       const TargetInstrInfo *TII, RegScavenger *RS);
22406c3fb27SDimitry Andric 
2250b57cec5SDimitry Andric   /// Initialize the pass for \p MF.
2260b57cec5SDimitry Andric   void init(MachineFunction &MF) {
2270b57cec5SDimitry Andric     RCI.runOnMachineFunction(MF);
228*0fca6ea1SDimitry Andric     MDT = &getAnalysis<MachineDominatorTreeWrapperPass>().getDomTree();
229*0fca6ea1SDimitry Andric     MPDT = &getAnalysis<MachinePostDominatorTreeWrapperPass>().getPostDomTree();
2300b57cec5SDimitry Andric     Save = nullptr;
2310b57cec5SDimitry Andric     Restore = nullptr;
232*0fca6ea1SDimitry Andric     MBFI = &getAnalysis<MachineBlockFrequencyInfoWrapperPass>().getMBFI();
233*0fca6ea1SDimitry Andric     MLI = &getAnalysis<MachineLoopInfoWrapperPass>().getLI();
2340b57cec5SDimitry Andric     ORE = &getAnalysis<MachineOptimizationRemarkEmitterPass>().getORE();
2350b57cec5SDimitry Andric     EntryFreq = MBFI->getEntryFreq();
2360b57cec5SDimitry Andric     const TargetSubtargetInfo &Subtarget = MF.getSubtarget();
2370b57cec5SDimitry Andric     const TargetInstrInfo &TII = *Subtarget.getInstrInfo();
2380b57cec5SDimitry Andric     FrameSetupOpcode = TII.getCallFrameSetupOpcode();
2390b57cec5SDimitry Andric     FrameDestroyOpcode = TII.getCallFrameDestroyOpcode();
2400b57cec5SDimitry Andric     SP = Subtarget.getTargetLowering()->getStackPointerRegisterToSaveRestore();
2410b57cec5SDimitry Andric     Entry = &MF.front();
2420b57cec5SDimitry Andric     CurrentCSRs.clear();
2430b57cec5SDimitry Andric     MachineFunc = &MF;
2440b57cec5SDimitry Andric 
2450b57cec5SDimitry Andric     ++NumFunc;
2460b57cec5SDimitry Andric   }
2470b57cec5SDimitry Andric 
2480b57cec5SDimitry Andric   /// Check whether or not Save and Restore points are still interesting for
2490b57cec5SDimitry Andric   /// shrink-wrapping.
2500b57cec5SDimitry Andric   bool ArePointsInteresting() const { return Save != Entry && Save && Restore; }
2510b57cec5SDimitry Andric 
2520b57cec5SDimitry Andric   /// Check if shrink wrapping is enabled for this target and function.
2530b57cec5SDimitry Andric   static bool isShrinkWrapEnabled(const MachineFunction &MF);
2540b57cec5SDimitry Andric 
2550b57cec5SDimitry Andric public:
2560b57cec5SDimitry Andric   static char ID;
2570b57cec5SDimitry Andric 
2580b57cec5SDimitry Andric   ShrinkWrap() : MachineFunctionPass(ID) {
2590b57cec5SDimitry Andric     initializeShrinkWrapPass(*PassRegistry::getPassRegistry());
2600b57cec5SDimitry Andric   }
2610b57cec5SDimitry Andric 
2620b57cec5SDimitry Andric   void getAnalysisUsage(AnalysisUsage &AU) const override {
2630b57cec5SDimitry Andric     AU.setPreservesAll();
264*0fca6ea1SDimitry Andric     AU.addRequired<MachineBlockFrequencyInfoWrapperPass>();
265*0fca6ea1SDimitry Andric     AU.addRequired<MachineDominatorTreeWrapperPass>();
266*0fca6ea1SDimitry Andric     AU.addRequired<MachinePostDominatorTreeWrapperPass>();
267*0fca6ea1SDimitry Andric     AU.addRequired<MachineLoopInfoWrapperPass>();
2680b57cec5SDimitry Andric     AU.addRequired<MachineOptimizationRemarkEmitterPass>();
2690b57cec5SDimitry Andric     MachineFunctionPass::getAnalysisUsage(AU);
2700b57cec5SDimitry Andric   }
2710b57cec5SDimitry Andric 
2720b57cec5SDimitry Andric   MachineFunctionProperties getRequiredProperties() const override {
2730b57cec5SDimitry Andric     return MachineFunctionProperties().set(
2740b57cec5SDimitry Andric       MachineFunctionProperties::Property::NoVRegs);
2750b57cec5SDimitry Andric   }
2760b57cec5SDimitry Andric 
2770b57cec5SDimitry Andric   StringRef getPassName() const override { return "Shrink Wrapping analysis"; }
2780b57cec5SDimitry Andric 
2790b57cec5SDimitry Andric   /// Perform the shrink-wrapping analysis and update
2800b57cec5SDimitry Andric   /// the MachineFrameInfo attached to \p MF with the results.
2810b57cec5SDimitry Andric   bool runOnMachineFunction(MachineFunction &MF) override;
2820b57cec5SDimitry Andric };
2830b57cec5SDimitry Andric 
2840b57cec5SDimitry Andric } // end anonymous namespace
2850b57cec5SDimitry Andric 
2860b57cec5SDimitry Andric char ShrinkWrap::ID = 0;
2870b57cec5SDimitry Andric 
2880b57cec5SDimitry Andric char &llvm::ShrinkWrapID = ShrinkWrap::ID;
2890b57cec5SDimitry Andric 
2900b57cec5SDimitry Andric INITIALIZE_PASS_BEGIN(ShrinkWrap, DEBUG_TYPE, "Shrink Wrap Pass", false, false)
291*0fca6ea1SDimitry Andric INITIALIZE_PASS_DEPENDENCY(MachineBlockFrequencyInfoWrapperPass)
292*0fca6ea1SDimitry Andric INITIALIZE_PASS_DEPENDENCY(MachineDominatorTreeWrapperPass)
293*0fca6ea1SDimitry Andric INITIALIZE_PASS_DEPENDENCY(MachinePostDominatorTreeWrapperPass)
294*0fca6ea1SDimitry Andric INITIALIZE_PASS_DEPENDENCY(MachineLoopInfoWrapperPass)
2950b57cec5SDimitry Andric INITIALIZE_PASS_DEPENDENCY(MachineOptimizationRemarkEmitterPass)
2960b57cec5SDimitry Andric INITIALIZE_PASS_END(ShrinkWrap, DEBUG_TYPE, "Shrink Wrap Pass", false, false)
2970b57cec5SDimitry Andric 
29806c3fb27SDimitry Andric bool ShrinkWrap::useOrDefCSROrFI(const MachineInstr &MI, RegScavenger *RS,
29906c3fb27SDimitry Andric                                  bool StackAddressUsed) const {
30006c3fb27SDimitry Andric   /// Check if \p Op is known to access an address not on the function's stack .
30106c3fb27SDimitry Andric   /// At the moment, accesses where the underlying object is a global, function
30206c3fb27SDimitry Andric   /// argument, or jump table are considered non-stack accesses. Note that the
30306c3fb27SDimitry Andric   /// caller's stack may get accessed when passing an argument via the stack,
30406c3fb27SDimitry Andric   /// but not the stack of the current function.
30506c3fb27SDimitry Andric   ///
30606c3fb27SDimitry Andric   auto IsKnownNonStackPtr = [](MachineMemOperand *Op) {
30706c3fb27SDimitry Andric     if (Op->getValue()) {
30806c3fb27SDimitry Andric       const Value *UO = getUnderlyingObject(Op->getValue());
30906c3fb27SDimitry Andric       if (!UO)
31006c3fb27SDimitry Andric         return false;
31106c3fb27SDimitry Andric       if (auto *Arg = dyn_cast<Argument>(UO))
31206c3fb27SDimitry Andric         return !Arg->hasPassPointeeByValueCopyAttr();
31306c3fb27SDimitry Andric       return isa<GlobalValue>(UO);
31406c3fb27SDimitry Andric     }
31506c3fb27SDimitry Andric     if (const PseudoSourceValue *PSV = Op->getPseudoValue())
31606c3fb27SDimitry Andric       return PSV->isJumpTable();
31706c3fb27SDimitry Andric     return false;
31806c3fb27SDimitry Andric   };
31906c3fb27SDimitry Andric   // Load/store operations may access the stack indirectly when we previously
32006c3fb27SDimitry Andric   // computed an address to a stack location.
32106c3fb27SDimitry Andric   if (StackAddressUsed && MI.mayLoadOrStore() &&
32206c3fb27SDimitry Andric       (MI.isCall() || MI.hasUnmodeledSideEffects() || MI.memoperands_empty() ||
32306c3fb27SDimitry Andric        !all_of(MI.memoperands(), IsKnownNonStackPtr)))
3240b57cec5SDimitry Andric     return true;
3250b57cec5SDimitry Andric 
3260b57cec5SDimitry Andric   if (MI.getOpcode() == FrameSetupOpcode ||
3270b57cec5SDimitry Andric       MI.getOpcode() == FrameDestroyOpcode) {
3280b57cec5SDimitry Andric     LLVM_DEBUG(dbgs() << "Frame instruction: " << MI << '\n');
3290b57cec5SDimitry Andric     return true;
3300b57cec5SDimitry Andric   }
33104eeddc0SDimitry Andric   const MachineFunction *MF = MI.getParent()->getParent();
33204eeddc0SDimitry Andric   const TargetRegisterInfo *TRI = MF->getSubtarget().getRegisterInfo();
3330b57cec5SDimitry Andric   for (const MachineOperand &MO : MI.operands()) {
3340b57cec5SDimitry Andric     bool UseOrDefCSR = false;
3350b57cec5SDimitry Andric     if (MO.isReg()) {
3360b57cec5SDimitry Andric       // Ignore instructions like DBG_VALUE which don't read/def the register.
3370b57cec5SDimitry Andric       if (!MO.isDef() && !MO.readsReg())
3380b57cec5SDimitry Andric         continue;
3398bcb0991SDimitry Andric       Register PhysReg = MO.getReg();
3400b57cec5SDimitry Andric       if (!PhysReg)
3410b57cec5SDimitry Andric         continue;
342bdd1243dSDimitry Andric       assert(PhysReg.isPhysical() && "Unallocated register?!");
3430b57cec5SDimitry Andric       // The stack pointer is not normally described as a callee-saved register
3440b57cec5SDimitry Andric       // in calling convention definitions, so we need to watch for it
3450b57cec5SDimitry Andric       // separately. An SP mentioned by a call instruction, we can ignore,
3460b57cec5SDimitry Andric       // though, as it's harmless and we do not want to effectively disable tail
3470b57cec5SDimitry Andric       // calls by forcing the restore point to post-dominate them.
34804eeddc0SDimitry Andric       // PPC's LR is also not normally described as a callee-saved register in
34904eeddc0SDimitry Andric       // calling convention definitions, so we need to watch for it, too. An LR
35004eeddc0SDimitry Andric       // mentioned implicitly by a return (or "branch to link register")
35104eeddc0SDimitry Andric       // instruction we can ignore, otherwise we may pessimize shrinkwrapping.
35204eeddc0SDimitry Andric       UseOrDefCSR =
35304eeddc0SDimitry Andric           (!MI.isCall() && PhysReg == SP) ||
35404eeddc0SDimitry Andric           RCI.getLastCalleeSavedAlias(PhysReg) ||
35504eeddc0SDimitry Andric           (!MI.isReturn() && TRI->isNonallocatableRegisterCalleeSave(PhysReg));
3560b57cec5SDimitry Andric     } else if (MO.isRegMask()) {
3570b57cec5SDimitry Andric       // Check if this regmask clobbers any of the CSRs.
3580b57cec5SDimitry Andric       for (unsigned Reg : getCurrentCSRs(RS)) {
3590b57cec5SDimitry Andric         if (MO.clobbersPhysReg(Reg)) {
3600b57cec5SDimitry Andric           UseOrDefCSR = true;
3610b57cec5SDimitry Andric           break;
3620b57cec5SDimitry Andric         }
3630b57cec5SDimitry Andric       }
3640b57cec5SDimitry Andric     }
3650b57cec5SDimitry Andric     // Skip FrameIndex operands in DBG_VALUE instructions.
3660b57cec5SDimitry Andric     if (UseOrDefCSR || (MO.isFI() && !MI.isDebugValue())) {
3670b57cec5SDimitry Andric       LLVM_DEBUG(dbgs() << "Use or define CSR(" << UseOrDefCSR << ") or FI("
3680b57cec5SDimitry Andric                         << MO.isFI() << "): " << MI << '\n');
3690b57cec5SDimitry Andric       return true;
3700b57cec5SDimitry Andric     }
3710b57cec5SDimitry Andric   }
3720b57cec5SDimitry Andric   return false;
3730b57cec5SDimitry Andric }
3740b57cec5SDimitry Andric 
3750b57cec5SDimitry Andric /// Helper function to find the immediate (post) dominator.
3760b57cec5SDimitry Andric template <typename ListOfBBs, typename DominanceAnalysis>
3770b57cec5SDimitry Andric static MachineBasicBlock *FindIDom(MachineBasicBlock &Block, ListOfBBs BBs,
37806c3fb27SDimitry Andric                                    DominanceAnalysis &Dom, bool Strict = true) {
3790b57cec5SDimitry Andric   MachineBasicBlock *IDom = &Block;
3800b57cec5SDimitry Andric   for (MachineBasicBlock *BB : BBs) {
3810b57cec5SDimitry Andric     IDom = Dom.findNearestCommonDominator(IDom, BB);
3820b57cec5SDimitry Andric     if (!IDom)
3830b57cec5SDimitry Andric       break;
3840b57cec5SDimitry Andric   }
38506c3fb27SDimitry Andric   if (Strict && IDom == &Block)
3860b57cec5SDimitry Andric     return nullptr;
3870b57cec5SDimitry Andric   return IDom;
3880b57cec5SDimitry Andric }
3890b57cec5SDimitry Andric 
39006c3fb27SDimitry Andric static bool isAnalyzableBB(const TargetInstrInfo &TII,
39106c3fb27SDimitry Andric                            MachineBasicBlock &Entry) {
39206c3fb27SDimitry Andric   // Check if the block is analyzable.
39306c3fb27SDimitry Andric   MachineBasicBlock *TBB = nullptr, *FBB = nullptr;
39406c3fb27SDimitry Andric   SmallVector<MachineOperand, 4> Cond;
39506c3fb27SDimitry Andric   return !TII.analyzeBranch(Entry, TBB, FBB, Cond);
39606c3fb27SDimitry Andric }
39706c3fb27SDimitry Andric 
39806c3fb27SDimitry Andric /// Determines if any predecessor of MBB is on the path from block that has use
39906c3fb27SDimitry Andric /// or def of CSRs/FI to MBB.
40006c3fb27SDimitry Andric /// ReachableByDirty: All blocks reachable from block that has use or def of
40106c3fb27SDimitry Andric /// CSR/FI.
40206c3fb27SDimitry Andric static bool
40306c3fb27SDimitry Andric hasDirtyPred(const DenseSet<const MachineBasicBlock *> &ReachableByDirty,
40406c3fb27SDimitry Andric              const MachineBasicBlock &MBB) {
40506c3fb27SDimitry Andric   for (const MachineBasicBlock *PredBB : MBB.predecessors())
40606c3fb27SDimitry Andric     if (ReachableByDirty.count(PredBB))
40706c3fb27SDimitry Andric       return true;
40806c3fb27SDimitry Andric   return false;
40906c3fb27SDimitry Andric }
41006c3fb27SDimitry Andric 
41106c3fb27SDimitry Andric /// Derives the list of all the basic blocks reachable from MBB.
41206c3fb27SDimitry Andric static void markAllReachable(DenseSet<const MachineBasicBlock *> &Visited,
41306c3fb27SDimitry Andric                              const MachineBasicBlock &MBB) {
41406c3fb27SDimitry Andric   SmallVector<MachineBasicBlock *, 4> Worklist(MBB.succ_begin(),
41506c3fb27SDimitry Andric                                                MBB.succ_end());
41606c3fb27SDimitry Andric   Visited.insert(&MBB);
41706c3fb27SDimitry Andric   while (!Worklist.empty()) {
41806c3fb27SDimitry Andric     MachineBasicBlock *SuccMBB = Worklist.pop_back_val();
41906c3fb27SDimitry Andric     if (!Visited.insert(SuccMBB).second)
42006c3fb27SDimitry Andric       continue;
42106c3fb27SDimitry Andric     Worklist.append(SuccMBB->succ_begin(), SuccMBB->succ_end());
42206c3fb27SDimitry Andric   }
42306c3fb27SDimitry Andric }
42406c3fb27SDimitry Andric 
42506c3fb27SDimitry Andric /// Collect blocks reachable by use or def of CSRs/FI.
42606c3fb27SDimitry Andric static void collectBlocksReachableByDirty(
42706c3fb27SDimitry Andric     const DenseSet<const MachineBasicBlock *> &DirtyBBs,
42806c3fb27SDimitry Andric     DenseSet<const MachineBasicBlock *> &ReachableByDirty) {
42906c3fb27SDimitry Andric   for (const MachineBasicBlock *MBB : DirtyBBs) {
43006c3fb27SDimitry Andric     if (ReachableByDirty.count(MBB))
43106c3fb27SDimitry Andric       continue;
43206c3fb27SDimitry Andric     // Mark all offsprings as reachable.
43306c3fb27SDimitry Andric     markAllReachable(ReachableByDirty, *MBB);
43406c3fb27SDimitry Andric   }
43506c3fb27SDimitry Andric }
43606c3fb27SDimitry Andric 
43706c3fb27SDimitry Andric /// \return true if there is a clean path from SavePoint to the original
43806c3fb27SDimitry Andric /// Restore.
43906c3fb27SDimitry Andric static bool
44006c3fb27SDimitry Andric isSaveReachableThroughClean(const MachineBasicBlock *SavePoint,
44106c3fb27SDimitry Andric                             ArrayRef<MachineBasicBlock *> CleanPreds) {
44206c3fb27SDimitry Andric   DenseSet<const MachineBasicBlock *> Visited;
44306c3fb27SDimitry Andric   SmallVector<MachineBasicBlock *, 4> Worklist(CleanPreds.begin(),
44406c3fb27SDimitry Andric                                                CleanPreds.end());
44506c3fb27SDimitry Andric   while (!Worklist.empty()) {
44606c3fb27SDimitry Andric     MachineBasicBlock *CleanBB = Worklist.pop_back_val();
44706c3fb27SDimitry Andric     if (CleanBB == SavePoint)
44806c3fb27SDimitry Andric       return true;
44906c3fb27SDimitry Andric     if (!Visited.insert(CleanBB).second || !CleanBB->pred_size())
45006c3fb27SDimitry Andric       continue;
45106c3fb27SDimitry Andric     Worklist.append(CleanBB->pred_begin(), CleanBB->pred_end());
45206c3fb27SDimitry Andric   }
45306c3fb27SDimitry Andric   return false;
45406c3fb27SDimitry Andric }
45506c3fb27SDimitry Andric 
45606c3fb27SDimitry Andric /// This function updates the branches post restore point split.
45706c3fb27SDimitry Andric ///
45806c3fb27SDimitry Andric /// Restore point has been split.
45906c3fb27SDimitry Andric /// Old restore point: MBB
46006c3fb27SDimitry Andric /// New restore point: NMBB
46106c3fb27SDimitry Andric /// Any basic block(say BBToUpdate) which had a fallthrough to MBB
46206c3fb27SDimitry Andric /// previously should
46306c3fb27SDimitry Andric /// 1. Fallthrough to NMBB iff NMBB is inserted immediately above MBB in the
46406c3fb27SDimitry Andric /// block layout OR
46506c3fb27SDimitry Andric /// 2. Branch unconditionally to NMBB iff NMBB is inserted at any other place.
46606c3fb27SDimitry Andric static void updateTerminator(MachineBasicBlock *BBToUpdate,
46706c3fb27SDimitry Andric                              MachineBasicBlock *NMBB,
46806c3fb27SDimitry Andric                              const TargetInstrInfo *TII) {
46906c3fb27SDimitry Andric   DebugLoc DL = BBToUpdate->findBranchDebugLoc();
47006c3fb27SDimitry Andric   // if NMBB isn't the new layout successor for BBToUpdate, insert unconditional
47106c3fb27SDimitry Andric   // branch to it
47206c3fb27SDimitry Andric   if (!BBToUpdate->isLayoutSuccessor(NMBB))
47306c3fb27SDimitry Andric     TII->insertUnconditionalBranch(*BBToUpdate, NMBB, DL);
47406c3fb27SDimitry Andric }
47506c3fb27SDimitry Andric 
47606c3fb27SDimitry Andric /// This function splits the restore point and returns new restore point/BB.
47706c3fb27SDimitry Andric ///
47806c3fb27SDimitry Andric /// DirtyPreds: Predessors of \p MBB that are ReachableByDirty
47906c3fb27SDimitry Andric ///
48006c3fb27SDimitry Andric /// Decision has been made to split the restore point.
48106c3fb27SDimitry Andric /// old restore point: \p MBB
48206c3fb27SDimitry Andric /// new restore point: \p NMBB
48306c3fb27SDimitry Andric /// This function makes the necessary block layout changes so that
48406c3fb27SDimitry Andric /// 1. \p NMBB points to \p MBB unconditionally
48506c3fb27SDimitry Andric /// 2. All dirtyPreds that previously pointed to \p MBB point to \p NMBB
48606c3fb27SDimitry Andric static MachineBasicBlock *
48706c3fb27SDimitry Andric tryToSplitRestore(MachineBasicBlock *MBB,
48806c3fb27SDimitry Andric                   ArrayRef<MachineBasicBlock *> DirtyPreds,
48906c3fb27SDimitry Andric                   const TargetInstrInfo *TII) {
49006c3fb27SDimitry Andric   MachineFunction *MF = MBB->getParent();
49106c3fb27SDimitry Andric 
49206c3fb27SDimitry Andric   // get the list of DirtyPreds who have a fallthrough to MBB
49306c3fb27SDimitry Andric   // before the block layout change. This is just to ensure that if the NMBB is
49406c3fb27SDimitry Andric   // inserted after MBB, then we create unconditional branch from
49506c3fb27SDimitry Andric   // DirtyPred/CleanPred to NMBB
49606c3fb27SDimitry Andric   SmallPtrSet<MachineBasicBlock *, 8> MBBFallthrough;
49706c3fb27SDimitry Andric   for (MachineBasicBlock *BB : DirtyPreds)
49806c3fb27SDimitry Andric     if (BB->getFallThrough(false) == MBB)
49906c3fb27SDimitry Andric       MBBFallthrough.insert(BB);
50006c3fb27SDimitry Andric 
50106c3fb27SDimitry Andric   MachineBasicBlock *NMBB = MF->CreateMachineBasicBlock();
50206c3fb27SDimitry Andric   // Insert this block at the end of the function. Inserting in between may
50306c3fb27SDimitry Andric   // interfere with control flow optimizer decisions.
50406c3fb27SDimitry Andric   MF->insert(MF->end(), NMBB);
50506c3fb27SDimitry Andric 
50606c3fb27SDimitry Andric   for (const MachineBasicBlock::RegisterMaskPair &LI : MBB->liveins())
50706c3fb27SDimitry Andric     NMBB->addLiveIn(LI.PhysReg);
50806c3fb27SDimitry Andric 
50906c3fb27SDimitry Andric   TII->insertUnconditionalBranch(*NMBB, MBB, DebugLoc());
51006c3fb27SDimitry Andric 
51106c3fb27SDimitry Andric   // After splitting, all predecessors of the restore point should be dirty
51206c3fb27SDimitry Andric   // blocks.
51306c3fb27SDimitry Andric   for (MachineBasicBlock *SuccBB : DirtyPreds)
51406c3fb27SDimitry Andric     SuccBB->ReplaceUsesOfBlockWith(MBB, NMBB);
51506c3fb27SDimitry Andric 
51606c3fb27SDimitry Andric   NMBB->addSuccessor(MBB);
51706c3fb27SDimitry Andric 
51806c3fb27SDimitry Andric   for (MachineBasicBlock *BBToUpdate : MBBFallthrough)
51906c3fb27SDimitry Andric     updateTerminator(BBToUpdate, NMBB, TII);
52006c3fb27SDimitry Andric 
52106c3fb27SDimitry Andric   return NMBB;
52206c3fb27SDimitry Andric }
52306c3fb27SDimitry Andric 
52406c3fb27SDimitry Andric /// This function undoes the restore point split done earlier.
52506c3fb27SDimitry Andric ///
52606c3fb27SDimitry Andric /// DirtyPreds: All predecessors of \p NMBB that are ReachableByDirty.
52706c3fb27SDimitry Andric ///
52806c3fb27SDimitry Andric /// Restore point was split and the change needs to be unrolled. Make necessary
52906c3fb27SDimitry Andric /// changes to reset restore point from \p NMBB to \p MBB.
53006c3fb27SDimitry Andric static void rollbackRestoreSplit(MachineFunction &MF, MachineBasicBlock *NMBB,
53106c3fb27SDimitry Andric                                  MachineBasicBlock *MBB,
53206c3fb27SDimitry Andric                                  ArrayRef<MachineBasicBlock *> DirtyPreds,
53306c3fb27SDimitry Andric                                  const TargetInstrInfo *TII) {
53406c3fb27SDimitry Andric   // For a BB, if NMBB is fallthrough in the current layout, then in the new
53506c3fb27SDimitry Andric   // layout a. BB should fallthrough to MBB OR b. BB should undconditionally
53606c3fb27SDimitry Andric   // branch to MBB
53706c3fb27SDimitry Andric   SmallPtrSet<MachineBasicBlock *, 8> NMBBFallthrough;
53806c3fb27SDimitry Andric   for (MachineBasicBlock *BB : DirtyPreds)
53906c3fb27SDimitry Andric     if (BB->getFallThrough(false) == NMBB)
54006c3fb27SDimitry Andric       NMBBFallthrough.insert(BB);
54106c3fb27SDimitry Andric 
54206c3fb27SDimitry Andric   NMBB->removeSuccessor(MBB);
54306c3fb27SDimitry Andric   for (MachineBasicBlock *SuccBB : DirtyPreds)
54406c3fb27SDimitry Andric     SuccBB->ReplaceUsesOfBlockWith(NMBB, MBB);
54506c3fb27SDimitry Andric 
54606c3fb27SDimitry Andric   NMBB->erase(NMBB->begin(), NMBB->end());
54706c3fb27SDimitry Andric   NMBB->eraseFromParent();
54806c3fb27SDimitry Andric 
54906c3fb27SDimitry Andric   for (MachineBasicBlock *BBToUpdate : NMBBFallthrough)
55006c3fb27SDimitry Andric     updateTerminator(BBToUpdate, MBB, TII);
55106c3fb27SDimitry Andric }
55206c3fb27SDimitry Andric 
55306c3fb27SDimitry Andric // A block is deemed fit for restore point split iff there exist
55406c3fb27SDimitry Andric // 1. DirtyPreds - preds of CurRestore reachable from use or def of CSR/FI
55506c3fb27SDimitry Andric // 2. CleanPreds - preds of CurRestore that arent DirtyPreds
55606c3fb27SDimitry Andric bool ShrinkWrap::checkIfRestoreSplittable(
55706c3fb27SDimitry Andric     const MachineBasicBlock *CurRestore,
55806c3fb27SDimitry Andric     const DenseSet<const MachineBasicBlock *> &ReachableByDirty,
55906c3fb27SDimitry Andric     SmallVectorImpl<MachineBasicBlock *> &DirtyPreds,
56006c3fb27SDimitry Andric     SmallVectorImpl<MachineBasicBlock *> &CleanPreds,
56106c3fb27SDimitry Andric     const TargetInstrInfo *TII, RegScavenger *RS) {
56206c3fb27SDimitry Andric   for (const MachineInstr &MI : *CurRestore)
56306c3fb27SDimitry Andric     if (useOrDefCSROrFI(MI, RS, /*StackAddressUsed=*/true))
56406c3fb27SDimitry Andric       return false;
56506c3fb27SDimitry Andric 
56606c3fb27SDimitry Andric   for (MachineBasicBlock *PredBB : CurRestore->predecessors()) {
56706c3fb27SDimitry Andric     if (!isAnalyzableBB(*TII, *PredBB))
56806c3fb27SDimitry Andric       return false;
56906c3fb27SDimitry Andric 
57006c3fb27SDimitry Andric     if (ReachableByDirty.count(PredBB))
57106c3fb27SDimitry Andric       DirtyPreds.push_back(PredBB);
57206c3fb27SDimitry Andric     else
57306c3fb27SDimitry Andric       CleanPreds.push_back(PredBB);
57406c3fb27SDimitry Andric   }
57506c3fb27SDimitry Andric 
57606c3fb27SDimitry Andric   return !(CleanPreds.empty() || DirtyPreds.empty());
57706c3fb27SDimitry Andric }
57806c3fb27SDimitry Andric 
57906c3fb27SDimitry Andric bool ShrinkWrap::postShrinkWrapping(bool HasCandidate, MachineFunction &MF,
58006c3fb27SDimitry Andric                                     RegScavenger *RS) {
58106c3fb27SDimitry Andric   if (!EnablePostShrinkWrapOpt)
58206c3fb27SDimitry Andric     return false;
58306c3fb27SDimitry Andric 
58406c3fb27SDimitry Andric   MachineBasicBlock *InitSave = nullptr;
58506c3fb27SDimitry Andric   MachineBasicBlock *InitRestore = nullptr;
58606c3fb27SDimitry Andric 
58706c3fb27SDimitry Andric   if (HasCandidate) {
58806c3fb27SDimitry Andric     InitSave = Save;
58906c3fb27SDimitry Andric     InitRestore = Restore;
59006c3fb27SDimitry Andric   } else {
59106c3fb27SDimitry Andric     InitRestore = nullptr;
59206c3fb27SDimitry Andric     InitSave = &MF.front();
59306c3fb27SDimitry Andric     for (MachineBasicBlock &MBB : MF) {
59406c3fb27SDimitry Andric       if (MBB.isEHFuncletEntry())
59506c3fb27SDimitry Andric         return false;
59606c3fb27SDimitry Andric       if (MBB.isReturnBlock()) {
59706c3fb27SDimitry Andric         // Do not support multiple restore points.
59806c3fb27SDimitry Andric         if (InitRestore)
59906c3fb27SDimitry Andric           return false;
60006c3fb27SDimitry Andric         InitRestore = &MBB;
60106c3fb27SDimitry Andric       }
60206c3fb27SDimitry Andric     }
60306c3fb27SDimitry Andric   }
60406c3fb27SDimitry Andric 
60506c3fb27SDimitry Andric   if (!InitSave || !InitRestore || InitRestore == InitSave ||
60606c3fb27SDimitry Andric       !MDT->dominates(InitSave, InitRestore) ||
60706c3fb27SDimitry Andric       !MPDT->dominates(InitRestore, InitSave))
60806c3fb27SDimitry Andric     return false;
60906c3fb27SDimitry Andric 
61006c3fb27SDimitry Andric   // Bail out of the optimization if any of the basic block is target of
61106c3fb27SDimitry Andric   // INLINEASM_BR instruction
61206c3fb27SDimitry Andric   for (MachineBasicBlock &MBB : MF)
61306c3fb27SDimitry Andric     if (MBB.isInlineAsmBrIndirectTarget())
61406c3fb27SDimitry Andric       return false;
61506c3fb27SDimitry Andric 
61606c3fb27SDimitry Andric   DenseSet<const MachineBasicBlock *> DirtyBBs;
61706c3fb27SDimitry Andric   for (MachineBasicBlock &MBB : MF) {
61806c3fb27SDimitry Andric     if (MBB.isEHPad()) {
61906c3fb27SDimitry Andric       DirtyBBs.insert(&MBB);
62006c3fb27SDimitry Andric       continue;
62106c3fb27SDimitry Andric     }
62206c3fb27SDimitry Andric     for (const MachineInstr &MI : MBB)
62306c3fb27SDimitry Andric       if (useOrDefCSROrFI(MI, RS, /*StackAddressUsed=*/true)) {
62406c3fb27SDimitry Andric         DirtyBBs.insert(&MBB);
62506c3fb27SDimitry Andric         break;
62606c3fb27SDimitry Andric       }
62706c3fb27SDimitry Andric   }
62806c3fb27SDimitry Andric 
62906c3fb27SDimitry Andric   // Find blocks reachable from the use or def of CSRs/FI.
63006c3fb27SDimitry Andric   DenseSet<const MachineBasicBlock *> ReachableByDirty;
63106c3fb27SDimitry Andric   collectBlocksReachableByDirty(DirtyBBs, ReachableByDirty);
63206c3fb27SDimitry Andric 
63306c3fb27SDimitry Andric   const TargetInstrInfo *TII = MF.getSubtarget().getInstrInfo();
63406c3fb27SDimitry Andric   SmallVector<MachineBasicBlock *, 2> DirtyPreds;
63506c3fb27SDimitry Andric   SmallVector<MachineBasicBlock *, 2> CleanPreds;
63606c3fb27SDimitry Andric   if (!checkIfRestoreSplittable(InitRestore, ReachableByDirty, DirtyPreds,
63706c3fb27SDimitry Andric                                 CleanPreds, TII, RS))
63806c3fb27SDimitry Andric     return false;
63906c3fb27SDimitry Andric 
64006c3fb27SDimitry Andric   // Trying to reach out to the new save point which dominates all dirty blocks.
64106c3fb27SDimitry Andric   MachineBasicBlock *NewSave =
64206c3fb27SDimitry Andric       FindIDom<>(**DirtyPreds.begin(), DirtyPreds, *MDT, false);
64306c3fb27SDimitry Andric 
64406c3fb27SDimitry Andric   while (NewSave && (hasDirtyPred(ReachableByDirty, *NewSave) ||
6455f757f3fSDimitry Andric                      EntryFreq < MBFI->getBlockFreq(NewSave) ||
64606c3fb27SDimitry Andric                      /*Entry freq has been observed more than a loop block in
64706c3fb27SDimitry Andric                         some cases*/
64806c3fb27SDimitry Andric                      MLI->getLoopFor(NewSave)))
64906c3fb27SDimitry Andric     NewSave = FindIDom<>(**NewSave->pred_begin(), NewSave->predecessors(), *MDT,
65006c3fb27SDimitry Andric                          false);
65106c3fb27SDimitry Andric 
65206c3fb27SDimitry Andric   const TargetFrameLowering *TFI = MF.getSubtarget().getFrameLowering();
65306c3fb27SDimitry Andric   if (!NewSave || NewSave == InitSave ||
65406c3fb27SDimitry Andric       isSaveReachableThroughClean(NewSave, CleanPreds) ||
65506c3fb27SDimitry Andric       !TFI->canUseAsPrologue(*NewSave))
65606c3fb27SDimitry Andric     return false;
65706c3fb27SDimitry Andric 
65806c3fb27SDimitry Andric   // Now we know that splitting a restore point can isolate the restore point
65906c3fb27SDimitry Andric   // from clean blocks and doing so can shrink the save point.
66006c3fb27SDimitry Andric   MachineBasicBlock *NewRestore =
66106c3fb27SDimitry Andric       tryToSplitRestore(InitRestore, DirtyPreds, TII);
66206c3fb27SDimitry Andric 
66306c3fb27SDimitry Andric   // Make sure if the new restore point is valid as an epilogue, depending on
66406c3fb27SDimitry Andric   // targets.
66506c3fb27SDimitry Andric   if (!TFI->canUseAsEpilogue(*NewRestore)) {
66606c3fb27SDimitry Andric     rollbackRestoreSplit(MF, NewRestore, InitRestore, DirtyPreds, TII);
66706c3fb27SDimitry Andric     return false;
66806c3fb27SDimitry Andric   }
66906c3fb27SDimitry Andric 
67006c3fb27SDimitry Andric   Save = NewSave;
67106c3fb27SDimitry Andric   Restore = NewRestore;
67206c3fb27SDimitry Andric 
673*0fca6ea1SDimitry Andric   MDT->recalculate(MF);
674*0fca6ea1SDimitry Andric   MPDT->recalculate(MF);
67506c3fb27SDimitry Andric 
67606c3fb27SDimitry Andric   assert((MDT->dominates(Save, Restore) && MPDT->dominates(Restore, Save)) &&
67706c3fb27SDimitry Andric          "Incorrect save or restore point due to dominance relations");
67806c3fb27SDimitry Andric   assert((!MLI->getLoopFor(Save) && !MLI->getLoopFor(Restore)) &&
67906c3fb27SDimitry Andric          "Unexpected save or restore point in a loop");
6805f757f3fSDimitry Andric   assert((EntryFreq >= MBFI->getBlockFreq(Save) &&
6815f757f3fSDimitry Andric           EntryFreq >= MBFI->getBlockFreq(Restore)) &&
68206c3fb27SDimitry Andric          "Incorrect save or restore point based on block frequency");
68306c3fb27SDimitry Andric   return true;
68406c3fb27SDimitry Andric }
68506c3fb27SDimitry Andric 
6860b57cec5SDimitry Andric void ShrinkWrap::updateSaveRestorePoints(MachineBasicBlock &MBB,
6870b57cec5SDimitry Andric                                          RegScavenger *RS) {
6880b57cec5SDimitry Andric   // Get rid of the easy cases first.
6890b57cec5SDimitry Andric   if (!Save)
6900b57cec5SDimitry Andric     Save = &MBB;
6910b57cec5SDimitry Andric   else
6920b57cec5SDimitry Andric     Save = MDT->findNearestCommonDominator(Save, &MBB);
693e8d8bef9SDimitry Andric   assert(Save);
6940b57cec5SDimitry Andric 
6950b57cec5SDimitry Andric   if (!Restore)
6960b57cec5SDimitry Andric     Restore = &MBB;
6970b57cec5SDimitry Andric   else if (MPDT->getNode(&MBB)) // If the block is not in the post dom tree, it
6980b57cec5SDimitry Andric                                 // means the block never returns. If that's the
6990b57cec5SDimitry Andric                                 // case, we don't want to call
7000b57cec5SDimitry Andric                                 // `findNearestCommonDominator`, which will
7010b57cec5SDimitry Andric                                 // return `Restore`.
7020b57cec5SDimitry Andric     Restore = MPDT->findNearestCommonDominator(Restore, &MBB);
7030b57cec5SDimitry Andric   else
7040b57cec5SDimitry Andric     Restore = nullptr; // Abort, we can't find a restore point in this case.
7050b57cec5SDimitry Andric 
7060b57cec5SDimitry Andric   // Make sure we would be able to insert the restore code before the
7070b57cec5SDimitry Andric   // terminator.
7080b57cec5SDimitry Andric   if (Restore == &MBB) {
7090b57cec5SDimitry Andric     for (const MachineInstr &Terminator : MBB.terminators()) {
71006c3fb27SDimitry Andric       if (!useOrDefCSROrFI(Terminator, RS, /*StackAddressUsed=*/true))
7110b57cec5SDimitry Andric         continue;
7120b57cec5SDimitry Andric       // One of the terminator needs to happen before the restore point.
7130b57cec5SDimitry Andric       if (MBB.succ_empty()) {
7140b57cec5SDimitry Andric         Restore = nullptr; // Abort, we can't find a restore point in this case.
7150b57cec5SDimitry Andric         break;
7160b57cec5SDimitry Andric       }
7170b57cec5SDimitry Andric       // Look for a restore point that post-dominates all the successors.
7180b57cec5SDimitry Andric       // The immediate post-dominator is what we are looking for.
7190b57cec5SDimitry Andric       Restore = FindIDom<>(*Restore, Restore->successors(), *MPDT);
7200b57cec5SDimitry Andric       break;
7210b57cec5SDimitry Andric     }
7220b57cec5SDimitry Andric   }
7230b57cec5SDimitry Andric 
7240b57cec5SDimitry Andric   if (!Restore) {
7250b57cec5SDimitry Andric     LLVM_DEBUG(
7260b57cec5SDimitry Andric         dbgs() << "Restore point needs to be spanned on several blocks\n");
7270b57cec5SDimitry Andric     return;
7280b57cec5SDimitry Andric   }
7290b57cec5SDimitry Andric 
7300b57cec5SDimitry Andric   // Make sure Save and Restore are suitable for shrink-wrapping:
7310b57cec5SDimitry Andric   // 1. all path from Save needs to lead to Restore before exiting.
7320b57cec5SDimitry Andric   // 2. all path to Restore needs to go through Save from Entry.
7330b57cec5SDimitry Andric   // We achieve that by making sure that:
7340b57cec5SDimitry Andric   // A. Save dominates Restore.
7350b57cec5SDimitry Andric   // B. Restore post-dominates Save.
7360b57cec5SDimitry Andric   // C. Save and Restore are in the same loop.
7370b57cec5SDimitry Andric   bool SaveDominatesRestore = false;
7380b57cec5SDimitry Andric   bool RestorePostDominatesSave = false;
739e8d8bef9SDimitry Andric   while (Restore &&
7400b57cec5SDimitry Andric          (!(SaveDominatesRestore = MDT->dominates(Save, Restore)) ||
7410b57cec5SDimitry Andric           !(RestorePostDominatesSave = MPDT->dominates(Restore, Save)) ||
7420b57cec5SDimitry Andric           // Post-dominance is not enough in loops to ensure that all uses/defs
7430b57cec5SDimitry Andric           // are after the prologue and before the epilogue at runtime.
7440b57cec5SDimitry Andric           // E.g.,
7450b57cec5SDimitry Andric           // while(1) {
7460b57cec5SDimitry Andric           //  Save
7470b57cec5SDimitry Andric           //  Restore
7480b57cec5SDimitry Andric           //   if (...)
7490b57cec5SDimitry Andric           //     break;
7500b57cec5SDimitry Andric           //  use/def CSRs
7510b57cec5SDimitry Andric           // }
7520b57cec5SDimitry Andric           // All the uses/defs of CSRs are dominated by Save and post-dominated
7530b57cec5SDimitry Andric           // by Restore. However, the CSRs uses are still reachable after
7540b57cec5SDimitry Andric           // Restore and before Save are executed.
7550b57cec5SDimitry Andric           //
7560b57cec5SDimitry Andric           // For now, just push the restore/save points outside of loops.
7570b57cec5SDimitry Andric           // FIXME: Refine the criteria to still find interesting cases
7580b57cec5SDimitry Andric           // for loops.
7590b57cec5SDimitry Andric           MLI->getLoopFor(Save) || MLI->getLoopFor(Restore))) {
7600b57cec5SDimitry Andric     // Fix (A).
7610b57cec5SDimitry Andric     if (!SaveDominatesRestore) {
7620b57cec5SDimitry Andric       Save = MDT->findNearestCommonDominator(Save, Restore);
7630b57cec5SDimitry Andric       continue;
7640b57cec5SDimitry Andric     }
7650b57cec5SDimitry Andric     // Fix (B).
7660b57cec5SDimitry Andric     if (!RestorePostDominatesSave)
7670b57cec5SDimitry Andric       Restore = MPDT->findNearestCommonDominator(Restore, Save);
7680b57cec5SDimitry Andric 
7690b57cec5SDimitry Andric     // Fix (C).
770e8d8bef9SDimitry Andric     if (Restore && (MLI->getLoopFor(Save) || MLI->getLoopFor(Restore))) {
7710b57cec5SDimitry Andric       if (MLI->getLoopDepth(Save) > MLI->getLoopDepth(Restore)) {
7720b57cec5SDimitry Andric         // Push Save outside of this loop if immediate dominator is different
7730b57cec5SDimitry Andric         // from save block. If immediate dominator is not different, bail out.
7740b57cec5SDimitry Andric         Save = FindIDom<>(*Save, Save->predecessors(), *MDT);
7750b57cec5SDimitry Andric         if (!Save)
7760b57cec5SDimitry Andric           break;
7770b57cec5SDimitry Andric       } else {
7780b57cec5SDimitry Andric         // If the loop does not exit, there is no point in looking
7790b57cec5SDimitry Andric         // for a post-dominator outside the loop.
7800b57cec5SDimitry Andric         SmallVector<MachineBasicBlock*, 4> ExitBlocks;
7810b57cec5SDimitry Andric         MLI->getLoopFor(Restore)->getExitingBlocks(ExitBlocks);
7820b57cec5SDimitry Andric         // Push Restore outside of this loop.
7830b57cec5SDimitry Andric         // Look for the immediate post-dominator of the loop exits.
7840b57cec5SDimitry Andric         MachineBasicBlock *IPdom = Restore;
7850b57cec5SDimitry Andric         for (MachineBasicBlock *LoopExitBB: ExitBlocks) {
7860b57cec5SDimitry Andric           IPdom = FindIDom<>(*IPdom, LoopExitBB->successors(), *MPDT);
7870b57cec5SDimitry Andric           if (!IPdom)
7880b57cec5SDimitry Andric             break;
7890b57cec5SDimitry Andric         }
7900b57cec5SDimitry Andric         // If the immediate post-dominator is not in a less nested loop,
7910b57cec5SDimitry Andric         // then we are stuck in a program with an infinite loop.
7920b57cec5SDimitry Andric         // In that case, we will not find a safe point, hence, bail out.
7930b57cec5SDimitry Andric         if (IPdom && MLI->getLoopDepth(IPdom) < MLI->getLoopDepth(Restore))
7940b57cec5SDimitry Andric           Restore = IPdom;
7950b57cec5SDimitry Andric         else {
7960b57cec5SDimitry Andric           Restore = nullptr;
7970b57cec5SDimitry Andric           break;
7980b57cec5SDimitry Andric         }
7990b57cec5SDimitry Andric       }
8000b57cec5SDimitry Andric     }
8010b57cec5SDimitry Andric   }
8020b57cec5SDimitry Andric }
8030b57cec5SDimitry Andric 
8040b57cec5SDimitry Andric static bool giveUpWithRemarks(MachineOptimizationRemarkEmitter *ORE,
8050b57cec5SDimitry Andric                               StringRef RemarkName, StringRef RemarkMessage,
8060b57cec5SDimitry Andric                               const DiagnosticLocation &Loc,
8070b57cec5SDimitry Andric                               const MachineBasicBlock *MBB) {
8080b57cec5SDimitry Andric   ORE->emit([&]() {
8090b57cec5SDimitry Andric     return MachineOptimizationRemarkMissed(DEBUG_TYPE, RemarkName, Loc, MBB)
8100b57cec5SDimitry Andric            << RemarkMessage;
8110b57cec5SDimitry Andric   });
8120b57cec5SDimitry Andric 
8130b57cec5SDimitry Andric   LLVM_DEBUG(dbgs() << RemarkMessage << '\n');
8140b57cec5SDimitry Andric   return false;
8150b57cec5SDimitry Andric }
8160b57cec5SDimitry Andric 
81706c3fb27SDimitry Andric bool ShrinkWrap::performShrinkWrapping(
81806c3fb27SDimitry Andric     const ReversePostOrderTraversal<MachineBasicBlock *> &RPOT,
81906c3fb27SDimitry Andric     RegScavenger *RS) {
82006c3fb27SDimitry Andric   for (MachineBasicBlock *MBB : RPOT) {
82106c3fb27SDimitry Andric     LLVM_DEBUG(dbgs() << "Look into: " << printMBBReference(*MBB) << '\n');
82206c3fb27SDimitry Andric 
82306c3fb27SDimitry Andric     if (MBB->isEHFuncletEntry())
82406c3fb27SDimitry Andric       return giveUpWithRemarks(ORE, "UnsupportedEHFunclets",
82506c3fb27SDimitry Andric                                "EH Funclets are not supported yet.",
82606c3fb27SDimitry Andric                                MBB->front().getDebugLoc(), MBB);
82706c3fb27SDimitry Andric 
82806c3fb27SDimitry Andric     if (MBB->isEHPad() || MBB->isInlineAsmBrIndirectTarget()) {
82906c3fb27SDimitry Andric       // Push the prologue and epilogue outside of the region that may throw (or
83006c3fb27SDimitry Andric       // jump out via inlineasm_br), by making sure that all the landing pads
83106c3fb27SDimitry Andric       // are at least at the boundary of the save and restore points.  The
83206c3fb27SDimitry Andric       // problem is that a basic block can jump out from the middle in these
83306c3fb27SDimitry Andric       // cases, which we do not handle.
83406c3fb27SDimitry Andric       updateSaveRestorePoints(*MBB, RS);
83506c3fb27SDimitry Andric       if (!ArePointsInteresting()) {
83606c3fb27SDimitry Andric         LLVM_DEBUG(dbgs() << "EHPad/inlineasm_br prevents shrink-wrapping\n");
83706c3fb27SDimitry Andric         return false;
83806c3fb27SDimitry Andric       }
83906c3fb27SDimitry Andric       continue;
84006c3fb27SDimitry Andric     }
84106c3fb27SDimitry Andric 
84206c3fb27SDimitry Andric     bool StackAddressUsed = false;
84306c3fb27SDimitry Andric     // Check if we found any stack accesses in the predecessors. We are not
84406c3fb27SDimitry Andric     // doing a full dataflow analysis here to keep things simple but just
84506c3fb27SDimitry Andric     // rely on a reverse portorder traversal (RPOT) to guarantee predecessors
84606c3fb27SDimitry Andric     // are already processed except for loops (and accept the conservative
84706c3fb27SDimitry Andric     // result for loops).
84806c3fb27SDimitry Andric     for (const MachineBasicBlock *Pred : MBB->predecessors()) {
84906c3fb27SDimitry Andric       if (StackAddressUsedBlockInfo.test(Pred->getNumber())) {
85006c3fb27SDimitry Andric         StackAddressUsed = true;
85106c3fb27SDimitry Andric         break;
85206c3fb27SDimitry Andric       }
85306c3fb27SDimitry Andric     }
85406c3fb27SDimitry Andric 
85506c3fb27SDimitry Andric     for (const MachineInstr &MI : *MBB) {
85606c3fb27SDimitry Andric       if (useOrDefCSROrFI(MI, RS, StackAddressUsed)) {
85706c3fb27SDimitry Andric         // Save (resp. restore) point must dominate (resp. post dominate)
85806c3fb27SDimitry Andric         // MI. Look for the proper basic block for those.
85906c3fb27SDimitry Andric         updateSaveRestorePoints(*MBB, RS);
86006c3fb27SDimitry Andric         // If we are at a point where we cannot improve the placement of
86106c3fb27SDimitry Andric         // save/restore instructions, just give up.
86206c3fb27SDimitry Andric         if (!ArePointsInteresting()) {
86306c3fb27SDimitry Andric           LLVM_DEBUG(dbgs() << "No Shrink wrap candidate found\n");
86406c3fb27SDimitry Andric           return false;
86506c3fb27SDimitry Andric         }
86606c3fb27SDimitry Andric         // No need to look for other instructions, this basic block
86706c3fb27SDimitry Andric         // will already be part of the handled region.
86806c3fb27SDimitry Andric         StackAddressUsed = true;
86906c3fb27SDimitry Andric         break;
87006c3fb27SDimitry Andric       }
87106c3fb27SDimitry Andric     }
87206c3fb27SDimitry Andric     StackAddressUsedBlockInfo[MBB->getNumber()] = StackAddressUsed;
87306c3fb27SDimitry Andric   }
87406c3fb27SDimitry Andric   if (!ArePointsInteresting()) {
87506c3fb27SDimitry Andric     // If the points are not interesting at this point, then they must be null
87606c3fb27SDimitry Andric     // because it means we did not encounter any frame/CSR related code.
87706c3fb27SDimitry Andric     // Otherwise, we would have returned from the previous loop.
87806c3fb27SDimitry Andric     assert(!Save && !Restore && "We miss a shrink-wrap opportunity?!");
87906c3fb27SDimitry Andric     LLVM_DEBUG(dbgs() << "Nothing to shrink-wrap\n");
88006c3fb27SDimitry Andric     return false;
88106c3fb27SDimitry Andric   }
88206c3fb27SDimitry Andric 
8835f757f3fSDimitry Andric   LLVM_DEBUG(dbgs() << "\n ** Results **\nFrequency of the Entry: "
8845f757f3fSDimitry Andric                     << EntryFreq.getFrequency() << '\n');
88506c3fb27SDimitry Andric 
88606c3fb27SDimitry Andric   const TargetFrameLowering *TFI =
88706c3fb27SDimitry Andric       MachineFunc->getSubtarget().getFrameLowering();
88806c3fb27SDimitry Andric   do {
88906c3fb27SDimitry Andric     LLVM_DEBUG(dbgs() << "Shrink wrap candidates (#, Name, Freq):\nSave: "
89006c3fb27SDimitry Andric                       << printMBBReference(*Save) << ' '
8915f757f3fSDimitry Andric                       << printBlockFreq(*MBFI, *Save)
89206c3fb27SDimitry Andric                       << "\nRestore: " << printMBBReference(*Restore) << ' '
8935f757f3fSDimitry Andric                       << printBlockFreq(*MBFI, *Restore) << '\n');
89406c3fb27SDimitry Andric 
89506c3fb27SDimitry Andric     bool IsSaveCheap, TargetCanUseSaveAsPrologue = false;
8965f757f3fSDimitry Andric     if (((IsSaveCheap = EntryFreq >= MBFI->getBlockFreq(Save)) &&
8975f757f3fSDimitry Andric          EntryFreq >= MBFI->getBlockFreq(Restore)) &&
89806c3fb27SDimitry Andric         ((TargetCanUseSaveAsPrologue = TFI->canUseAsPrologue(*Save)) &&
89906c3fb27SDimitry Andric          TFI->canUseAsEpilogue(*Restore)))
90006c3fb27SDimitry Andric       break;
90106c3fb27SDimitry Andric     LLVM_DEBUG(
90206c3fb27SDimitry Andric         dbgs() << "New points are too expensive or invalid for the target\n");
90306c3fb27SDimitry Andric     MachineBasicBlock *NewBB;
90406c3fb27SDimitry Andric     if (!IsSaveCheap || !TargetCanUseSaveAsPrologue) {
90506c3fb27SDimitry Andric       Save = FindIDom<>(*Save, Save->predecessors(), *MDT);
90606c3fb27SDimitry Andric       if (!Save)
90706c3fb27SDimitry Andric         break;
90806c3fb27SDimitry Andric       NewBB = Save;
90906c3fb27SDimitry Andric     } else {
91006c3fb27SDimitry Andric       // Restore is expensive.
91106c3fb27SDimitry Andric       Restore = FindIDom<>(*Restore, Restore->successors(), *MPDT);
91206c3fb27SDimitry Andric       if (!Restore)
91306c3fb27SDimitry Andric         break;
91406c3fb27SDimitry Andric       NewBB = Restore;
91506c3fb27SDimitry Andric     }
91606c3fb27SDimitry Andric     updateSaveRestorePoints(*NewBB, RS);
91706c3fb27SDimitry Andric   } while (Save && Restore);
91806c3fb27SDimitry Andric 
91906c3fb27SDimitry Andric   if (!ArePointsInteresting()) {
92006c3fb27SDimitry Andric     ++NumCandidatesDropped;
92106c3fb27SDimitry Andric     return false;
92206c3fb27SDimitry Andric   }
92306c3fb27SDimitry Andric   return true;
92406c3fb27SDimitry Andric }
92506c3fb27SDimitry Andric 
9260b57cec5SDimitry Andric bool ShrinkWrap::runOnMachineFunction(MachineFunction &MF) {
9270b57cec5SDimitry Andric   if (skipFunction(MF.getFunction()) || MF.empty() || !isShrinkWrapEnabled(MF))
9280b57cec5SDimitry Andric     return false;
9290b57cec5SDimitry Andric 
9300b57cec5SDimitry Andric   LLVM_DEBUG(dbgs() << "**** Analysing " << MF.getName() << '\n');
9310b57cec5SDimitry Andric 
9320b57cec5SDimitry Andric   init(MF);
9330b57cec5SDimitry Andric 
9340b57cec5SDimitry Andric   ReversePostOrderTraversal<MachineBasicBlock *> RPOT(&*MF.begin());
9350b57cec5SDimitry Andric   if (containsIrreducibleCFG<MachineBasicBlock *>(RPOT, *MLI)) {
9360b57cec5SDimitry Andric     // If MF is irreducible, a block may be in a loop without
9370b57cec5SDimitry Andric     // MachineLoopInfo reporting it. I.e., we may use the
9380b57cec5SDimitry Andric     // post-dominance property in loops, which lead to incorrect
9390b57cec5SDimitry Andric     // results. Moreover, we may miss that the prologue and
9400b57cec5SDimitry Andric     // epilogue are not in the same loop, leading to unbalanced
9410b57cec5SDimitry Andric     // construction/deconstruction of the stack frame.
9420b57cec5SDimitry Andric     return giveUpWithRemarks(ORE, "UnsupportedIrreducibleCFG",
9430b57cec5SDimitry Andric                              "Irreducible CFGs are not supported yet.",
9440b57cec5SDimitry Andric                              MF.getFunction().getSubprogram(), &MF.front());
9450b57cec5SDimitry Andric   }
9460b57cec5SDimitry Andric 
9470b57cec5SDimitry Andric   const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo();
9480b57cec5SDimitry Andric   std::unique_ptr<RegScavenger> RS(
9490b57cec5SDimitry Andric       TRI->requiresRegisterScavenging(MF) ? new RegScavenger() : nullptr);
9500b57cec5SDimitry Andric 
95106c3fb27SDimitry Andric   bool Changed = false;
9520b57cec5SDimitry Andric 
953*0fca6ea1SDimitry Andric   // Initially, conservatively assume that stack addresses can be used in each
954*0fca6ea1SDimitry Andric   // basic block and change the state only for those basic blocks for which we
955*0fca6ea1SDimitry Andric   // were able to prove the opposite.
95606c3fb27SDimitry Andric   StackAddressUsedBlockInfo.resize(MF.getNumBlockIDs(), true);
95706c3fb27SDimitry Andric   bool HasCandidate = performShrinkWrapping(RPOT, RS.get());
95806c3fb27SDimitry Andric   StackAddressUsedBlockInfo.clear();
95906c3fb27SDimitry Andric   Changed = postShrinkWrapping(HasCandidate, MF, RS.get());
96006c3fb27SDimitry Andric   if (!HasCandidate && !Changed)
9610b57cec5SDimitry Andric     return false;
96206c3fb27SDimitry Andric   if (!ArePointsInteresting())
96306c3fb27SDimitry Andric     return Changed;
9640b57cec5SDimitry Andric 
9650b57cec5SDimitry Andric   LLVM_DEBUG(dbgs() << "Final shrink wrap candidates:\nSave: "
96606c3fb27SDimitry Andric                     << printMBBReference(*Save) << ' '
96706c3fb27SDimitry Andric                     << "\nRestore: " << printMBBReference(*Restore) << '\n');
9680b57cec5SDimitry Andric 
9690b57cec5SDimitry Andric   MachineFrameInfo &MFI = MF.getFrameInfo();
9700b57cec5SDimitry Andric   MFI.setSavePoint(Save);
9710b57cec5SDimitry Andric   MFI.setRestorePoint(Restore);
9720b57cec5SDimitry Andric   ++NumCandidates;
97306c3fb27SDimitry Andric   return Changed;
9740b57cec5SDimitry Andric }
9750b57cec5SDimitry Andric 
9760b57cec5SDimitry Andric bool ShrinkWrap::isShrinkWrapEnabled(const MachineFunction &MF) {
9770b57cec5SDimitry Andric   const TargetFrameLowering *TFI = MF.getSubtarget().getFrameLowering();
9780b57cec5SDimitry Andric 
9790b57cec5SDimitry Andric   switch (EnableShrinkWrapOpt) {
9800b57cec5SDimitry Andric   case cl::BOU_UNSET:
9810b57cec5SDimitry Andric     return TFI->enableShrinkWrapping(MF) &&
9820b57cec5SDimitry Andric            // Windows with CFI has some limitations that make it impossible
9830b57cec5SDimitry Andric            // to use shrink-wrapping.
9840b57cec5SDimitry Andric            !MF.getTarget().getMCAsmInfo()->usesWindowsCFI() &&
9850b57cec5SDimitry Andric            // Sanitizers look at the value of the stack at the location
9860b57cec5SDimitry Andric            // of the crash. Since a crash can happen anywhere, the
9870b57cec5SDimitry Andric            // frame must be lowered before anything else happen for the
9880b57cec5SDimitry Andric            // sanitizers to be able to get a correct stack frame.
9890b57cec5SDimitry Andric            !(MF.getFunction().hasFnAttribute(Attribute::SanitizeAddress) ||
9900b57cec5SDimitry Andric              MF.getFunction().hasFnAttribute(Attribute::SanitizeThread) ||
9910b57cec5SDimitry Andric              MF.getFunction().hasFnAttribute(Attribute::SanitizeMemory) ||
9920b57cec5SDimitry Andric              MF.getFunction().hasFnAttribute(Attribute::SanitizeHWAddress));
9930b57cec5SDimitry Andric   // If EnableShrinkWrap is set, it takes precedence on whatever the
9940b57cec5SDimitry Andric   // target sets. The rational is that we assume we want to test
9950b57cec5SDimitry Andric   // something related to shrink-wrapping.
9960b57cec5SDimitry Andric   case cl::BOU_TRUE:
9970b57cec5SDimitry Andric     return true;
9980b57cec5SDimitry Andric   case cl::BOU_FALSE:
9990b57cec5SDimitry Andric     return false;
10000b57cec5SDimitry Andric   }
10010b57cec5SDimitry Andric   llvm_unreachable("Invalid shrink-wrapping state");
10020b57cec5SDimitry Andric }
1003