10b57cec5SDimitry Andric //===-- TargetInstrInfo.cpp - Target Instruction Information --------------===// 20b57cec5SDimitry Andric // 30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 60b57cec5SDimitry Andric // 70b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 80b57cec5SDimitry Andric // 90b57cec5SDimitry Andric // This file implements the TargetInstrInfo class. 100b57cec5SDimitry Andric // 110b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 120b57cec5SDimitry Andric 130b57cec5SDimitry Andric #include "llvm/CodeGen/TargetInstrInfo.h" 145ffd83dbSDimitry Andric #include "llvm/ADT/StringExtras.h" 1581ad6265SDimitry Andric #include "llvm/BinaryFormat/Dwarf.h" 16bdd1243dSDimitry Andric #include "llvm/CodeGen/MachineCombinerPattern.h" 170b57cec5SDimitry Andric #include "llvm/CodeGen/MachineFrameInfo.h" 180b57cec5SDimitry Andric #include "llvm/CodeGen/MachineInstrBuilder.h" 190b57cec5SDimitry Andric #include "llvm/CodeGen/MachineMemOperand.h" 200b57cec5SDimitry Andric #include "llvm/CodeGen/MachineRegisterInfo.h" 21480093f4SDimitry Andric #include "llvm/CodeGen/MachineScheduler.h" 2206c3fb27SDimitry Andric #include "llvm/CodeGen/MachineTraceMetrics.h" 230b57cec5SDimitry Andric #include "llvm/CodeGen/PseudoSourceValue.h" 240b57cec5SDimitry Andric #include "llvm/CodeGen/ScoreboardHazardRecognizer.h" 250b57cec5SDimitry Andric #include "llvm/CodeGen/StackMaps.h" 260b57cec5SDimitry Andric #include "llvm/CodeGen/TargetFrameLowering.h" 270b57cec5SDimitry Andric #include "llvm/CodeGen/TargetLowering.h" 280b57cec5SDimitry Andric #include "llvm/CodeGen/TargetRegisterInfo.h" 290b57cec5SDimitry Andric #include "llvm/CodeGen/TargetSchedule.h" 300b57cec5SDimitry Andric #include "llvm/IR/DataLayout.h" 318bcb0991SDimitry Andric #include "llvm/IR/DebugInfoMetadata.h" 320b57cec5SDimitry Andric #include "llvm/MC/MCAsmInfo.h" 330b57cec5SDimitry Andric #include "llvm/MC/MCInstrItineraries.h" 340b57cec5SDimitry Andric #include "llvm/Support/CommandLine.h" 350b57cec5SDimitry Andric #include "llvm/Support/ErrorHandling.h" 360b57cec5SDimitry Andric #include "llvm/Support/raw_ostream.h" 375f757f3fSDimitry Andric #include "llvm/Target/TargetMachine.h" 380b57cec5SDimitry Andric 390b57cec5SDimitry Andric using namespace llvm; 400b57cec5SDimitry Andric 410b57cec5SDimitry Andric static cl::opt<bool> DisableHazardRecognizer( 420b57cec5SDimitry Andric "disable-sched-hazard", cl::Hidden, cl::init(false), 430b57cec5SDimitry Andric cl::desc("Disable hazard detection during preRA scheduling")); 440b57cec5SDimitry Andric 4581ad6265SDimitry Andric TargetInstrInfo::~TargetInstrInfo() = default; 460b57cec5SDimitry Andric 470b57cec5SDimitry Andric const TargetRegisterClass* 480b57cec5SDimitry Andric TargetInstrInfo::getRegClass(const MCInstrDesc &MCID, unsigned OpNum, 490b57cec5SDimitry Andric const TargetRegisterInfo *TRI, 500b57cec5SDimitry Andric const MachineFunction &MF) const { 510b57cec5SDimitry Andric if (OpNum >= MCID.getNumOperands()) 520b57cec5SDimitry Andric return nullptr; 530b57cec5SDimitry Andric 54bdd1243dSDimitry Andric short RegClass = MCID.operands()[OpNum].RegClass; 55bdd1243dSDimitry Andric if (MCID.operands()[OpNum].isLookupPtrRegClass()) 560b57cec5SDimitry Andric return TRI->getPointerRegClass(MF, RegClass); 570b57cec5SDimitry Andric 580b57cec5SDimitry Andric // Instructions like INSERT_SUBREG do not have fixed register classes. 590b57cec5SDimitry Andric if (RegClass < 0) 600b57cec5SDimitry Andric return nullptr; 610b57cec5SDimitry Andric 620b57cec5SDimitry Andric // Otherwise just look it up normally. 630b57cec5SDimitry Andric return TRI->getRegClass(RegClass); 640b57cec5SDimitry Andric } 650b57cec5SDimitry Andric 660b57cec5SDimitry Andric /// insertNoop - Insert a noop into the instruction stream at the specified 670b57cec5SDimitry Andric /// point. 680b57cec5SDimitry Andric void TargetInstrInfo::insertNoop(MachineBasicBlock &MBB, 690b57cec5SDimitry Andric MachineBasicBlock::iterator MI) const { 700b57cec5SDimitry Andric llvm_unreachable("Target didn't implement insertNoop!"); 710b57cec5SDimitry Andric } 720b57cec5SDimitry Andric 73e8d8bef9SDimitry Andric /// insertNoops - Insert noops into the instruction stream at the specified 74e8d8bef9SDimitry Andric /// point. 75e8d8bef9SDimitry Andric void TargetInstrInfo::insertNoops(MachineBasicBlock &MBB, 76e8d8bef9SDimitry Andric MachineBasicBlock::iterator MI, 77e8d8bef9SDimitry Andric unsigned Quantity) const { 78e8d8bef9SDimitry Andric for (unsigned i = 0; i < Quantity; ++i) 79e8d8bef9SDimitry Andric insertNoop(MBB, MI); 80e8d8bef9SDimitry Andric } 81e8d8bef9SDimitry Andric 820b57cec5SDimitry Andric static bool isAsmComment(const char *Str, const MCAsmInfo &MAI) { 830b57cec5SDimitry Andric return strncmp(Str, MAI.getCommentString().data(), 840b57cec5SDimitry Andric MAI.getCommentString().size()) == 0; 850b57cec5SDimitry Andric } 860b57cec5SDimitry Andric 870b57cec5SDimitry Andric /// Measure the specified inline asm to determine an approximation of its 880b57cec5SDimitry Andric /// length. 890b57cec5SDimitry Andric /// Comments (which run till the next SeparatorString or newline) do not 900b57cec5SDimitry Andric /// count as an instruction. 910b57cec5SDimitry Andric /// Any other non-whitespace text is considered an instruction, with 920b57cec5SDimitry Andric /// multiple instructions separated by SeparatorString or newlines. 930b57cec5SDimitry Andric /// Variable-length instructions are not handled here; this function 940b57cec5SDimitry Andric /// may be overloaded in the target code to do that. 950b57cec5SDimitry Andric /// We implement a special case of the .space directive which takes only a 960b57cec5SDimitry Andric /// single integer argument in base 10 that is the size in bytes. This is a 970b57cec5SDimitry Andric /// restricted form of the GAS directive in that we only interpret 980b57cec5SDimitry Andric /// simple--i.e. not a logical or arithmetic expression--size values without 990b57cec5SDimitry Andric /// the optional fill value. This is primarily used for creating arbitrary 1000b57cec5SDimitry Andric /// sized inline asm blocks for testing purposes. 1010b57cec5SDimitry Andric unsigned TargetInstrInfo::getInlineAsmLength( 1020b57cec5SDimitry Andric const char *Str, 1030b57cec5SDimitry Andric const MCAsmInfo &MAI, const TargetSubtargetInfo *STI) const { 1040b57cec5SDimitry Andric // Count the number of instructions in the asm. 1050b57cec5SDimitry Andric bool AtInsnStart = true; 1060b57cec5SDimitry Andric unsigned Length = 0; 1070b57cec5SDimitry Andric const unsigned MaxInstLength = MAI.getMaxInstLength(STI); 1080b57cec5SDimitry Andric for (; *Str; ++Str) { 1090b57cec5SDimitry Andric if (*Str == '\n' || strncmp(Str, MAI.getSeparatorString(), 1100b57cec5SDimitry Andric strlen(MAI.getSeparatorString())) == 0) { 1110b57cec5SDimitry Andric AtInsnStart = true; 1120b57cec5SDimitry Andric } else if (isAsmComment(Str, MAI)) { 1130b57cec5SDimitry Andric // Stop counting as an instruction after a comment until the next 1140b57cec5SDimitry Andric // separator. 1150b57cec5SDimitry Andric AtInsnStart = false; 1160b57cec5SDimitry Andric } 1170b57cec5SDimitry Andric 1185ffd83dbSDimitry Andric if (AtInsnStart && !isSpace(static_cast<unsigned char>(*Str))) { 1190b57cec5SDimitry Andric unsigned AddLength = MaxInstLength; 1200b57cec5SDimitry Andric if (strncmp(Str, ".space", 6) == 0) { 1210b57cec5SDimitry Andric char *EStr; 1220b57cec5SDimitry Andric int SpaceSize; 1230b57cec5SDimitry Andric SpaceSize = strtol(Str + 6, &EStr, 10); 1240b57cec5SDimitry Andric SpaceSize = SpaceSize < 0 ? 0 : SpaceSize; 1255ffd83dbSDimitry Andric while (*EStr != '\n' && isSpace(static_cast<unsigned char>(*EStr))) 1260b57cec5SDimitry Andric ++EStr; 1270b57cec5SDimitry Andric if (*EStr == '\0' || *EStr == '\n' || 1280b57cec5SDimitry Andric isAsmComment(EStr, MAI)) // Successfully parsed .space argument 1290b57cec5SDimitry Andric AddLength = SpaceSize; 1300b57cec5SDimitry Andric } 1310b57cec5SDimitry Andric Length += AddLength; 1320b57cec5SDimitry Andric AtInsnStart = false; 1330b57cec5SDimitry Andric } 1340b57cec5SDimitry Andric } 1350b57cec5SDimitry Andric 1360b57cec5SDimitry Andric return Length; 1370b57cec5SDimitry Andric } 1380b57cec5SDimitry Andric 1390b57cec5SDimitry Andric /// ReplaceTailWithBranchTo - Delete the instruction OldInst and everything 1400b57cec5SDimitry Andric /// after it, replacing it with an unconditional branch to NewDest. 1410b57cec5SDimitry Andric void 1420b57cec5SDimitry Andric TargetInstrInfo::ReplaceTailWithBranchTo(MachineBasicBlock::iterator Tail, 1430b57cec5SDimitry Andric MachineBasicBlock *NewDest) const { 1440b57cec5SDimitry Andric MachineBasicBlock *MBB = Tail->getParent(); 1450b57cec5SDimitry Andric 1460b57cec5SDimitry Andric // Remove all the old successors of MBB from the CFG. 1470b57cec5SDimitry Andric while (!MBB->succ_empty()) 1480b57cec5SDimitry Andric MBB->removeSuccessor(MBB->succ_begin()); 1490b57cec5SDimitry Andric 1500b57cec5SDimitry Andric // Save off the debug loc before erasing the instruction. 1510b57cec5SDimitry Andric DebugLoc DL = Tail->getDebugLoc(); 1520b57cec5SDimitry Andric 1530b57cec5SDimitry Andric // Update call site info and remove all the dead instructions 1540b57cec5SDimitry Andric // from the end of MBB. 1550b57cec5SDimitry Andric while (Tail != MBB->end()) { 1560b57cec5SDimitry Andric auto MI = Tail++; 1575ffd83dbSDimitry Andric if (MI->shouldUpdateCallSiteInfo()) 1588bcb0991SDimitry Andric MBB->getParent()->eraseCallSiteInfo(&*MI); 1590b57cec5SDimitry Andric MBB->erase(MI); 1600b57cec5SDimitry Andric } 1610b57cec5SDimitry Andric 1620b57cec5SDimitry Andric // If MBB isn't immediately before MBB, insert a branch to it. 1630b57cec5SDimitry Andric if (++MachineFunction::iterator(MBB) != MachineFunction::iterator(NewDest)) 1640b57cec5SDimitry Andric insertBranch(*MBB, NewDest, nullptr, SmallVector<MachineOperand, 0>(), DL); 1650b57cec5SDimitry Andric MBB->addSuccessor(NewDest); 1660b57cec5SDimitry Andric } 1670b57cec5SDimitry Andric 1680b57cec5SDimitry Andric MachineInstr *TargetInstrInfo::commuteInstructionImpl(MachineInstr &MI, 1690b57cec5SDimitry Andric bool NewMI, unsigned Idx1, 1700b57cec5SDimitry Andric unsigned Idx2) const { 1710b57cec5SDimitry Andric const MCInstrDesc &MCID = MI.getDesc(); 1720b57cec5SDimitry Andric bool HasDef = MCID.getNumDefs(); 1730b57cec5SDimitry Andric if (HasDef && !MI.getOperand(0).isReg()) 1740b57cec5SDimitry Andric // No idea how to commute this instruction. Target should implement its own. 1750b57cec5SDimitry Andric return nullptr; 1760b57cec5SDimitry Andric 1770b57cec5SDimitry Andric unsigned CommutableOpIdx1 = Idx1; (void)CommutableOpIdx1; 1780b57cec5SDimitry Andric unsigned CommutableOpIdx2 = Idx2; (void)CommutableOpIdx2; 1790b57cec5SDimitry Andric assert(findCommutedOpIndices(MI, CommutableOpIdx1, CommutableOpIdx2) && 1800b57cec5SDimitry Andric CommutableOpIdx1 == Idx1 && CommutableOpIdx2 == Idx2 && 1810b57cec5SDimitry Andric "TargetInstrInfo::CommuteInstructionImpl(): not commutable operands."); 1820b57cec5SDimitry Andric assert(MI.getOperand(Idx1).isReg() && MI.getOperand(Idx2).isReg() && 1830b57cec5SDimitry Andric "This only knows how to commute register operands so far"); 1840b57cec5SDimitry Andric 1850b57cec5SDimitry Andric Register Reg0 = HasDef ? MI.getOperand(0).getReg() : Register(); 1860b57cec5SDimitry Andric Register Reg1 = MI.getOperand(Idx1).getReg(); 1870b57cec5SDimitry Andric Register Reg2 = MI.getOperand(Idx2).getReg(); 1880b57cec5SDimitry Andric unsigned SubReg0 = HasDef ? MI.getOperand(0).getSubReg() : 0; 1890b57cec5SDimitry Andric unsigned SubReg1 = MI.getOperand(Idx1).getSubReg(); 1900b57cec5SDimitry Andric unsigned SubReg2 = MI.getOperand(Idx2).getSubReg(); 1910b57cec5SDimitry Andric bool Reg1IsKill = MI.getOperand(Idx1).isKill(); 1920b57cec5SDimitry Andric bool Reg2IsKill = MI.getOperand(Idx2).isKill(); 1930b57cec5SDimitry Andric bool Reg1IsUndef = MI.getOperand(Idx1).isUndef(); 1940b57cec5SDimitry Andric bool Reg2IsUndef = MI.getOperand(Idx2).isUndef(); 1950b57cec5SDimitry Andric bool Reg1IsInternal = MI.getOperand(Idx1).isInternalRead(); 1960b57cec5SDimitry Andric bool Reg2IsInternal = MI.getOperand(Idx2).isInternalRead(); 1970b57cec5SDimitry Andric // Avoid calling isRenamable for virtual registers since we assert that 1980b57cec5SDimitry Andric // renamable property is only queried/set for physical registers. 199bdd1243dSDimitry Andric bool Reg1IsRenamable = 200bdd1243dSDimitry Andric Reg1.isPhysical() ? MI.getOperand(Idx1).isRenamable() : false; 201bdd1243dSDimitry Andric bool Reg2IsRenamable = 202bdd1243dSDimitry Andric Reg2.isPhysical() ? MI.getOperand(Idx2).isRenamable() : false; 2030b57cec5SDimitry Andric // If destination is tied to either of the commuted source register, then 2040b57cec5SDimitry Andric // it must be updated. 2050b57cec5SDimitry Andric if (HasDef && Reg0 == Reg1 && 2060b57cec5SDimitry Andric MI.getDesc().getOperandConstraint(Idx1, MCOI::TIED_TO) == 0) { 2070b57cec5SDimitry Andric Reg2IsKill = false; 2080b57cec5SDimitry Andric Reg0 = Reg2; 2090b57cec5SDimitry Andric SubReg0 = SubReg2; 2100b57cec5SDimitry Andric } else if (HasDef && Reg0 == Reg2 && 2110b57cec5SDimitry Andric MI.getDesc().getOperandConstraint(Idx2, MCOI::TIED_TO) == 0) { 2120b57cec5SDimitry Andric Reg1IsKill = false; 2130b57cec5SDimitry Andric Reg0 = Reg1; 2140b57cec5SDimitry Andric SubReg0 = SubReg1; 2150b57cec5SDimitry Andric } 2160b57cec5SDimitry Andric 2170b57cec5SDimitry Andric MachineInstr *CommutedMI = nullptr; 2180b57cec5SDimitry Andric if (NewMI) { 2190b57cec5SDimitry Andric // Create a new instruction. 2200b57cec5SDimitry Andric MachineFunction &MF = *MI.getMF(); 2210b57cec5SDimitry Andric CommutedMI = MF.CloneMachineInstr(&MI); 2220b57cec5SDimitry Andric } else { 2230b57cec5SDimitry Andric CommutedMI = &MI; 2240b57cec5SDimitry Andric } 2250b57cec5SDimitry Andric 2260b57cec5SDimitry Andric if (HasDef) { 2270b57cec5SDimitry Andric CommutedMI->getOperand(0).setReg(Reg0); 2280b57cec5SDimitry Andric CommutedMI->getOperand(0).setSubReg(SubReg0); 2290b57cec5SDimitry Andric } 2300b57cec5SDimitry Andric CommutedMI->getOperand(Idx2).setReg(Reg1); 2310b57cec5SDimitry Andric CommutedMI->getOperand(Idx1).setReg(Reg2); 2320b57cec5SDimitry Andric CommutedMI->getOperand(Idx2).setSubReg(SubReg1); 2330b57cec5SDimitry Andric CommutedMI->getOperand(Idx1).setSubReg(SubReg2); 2340b57cec5SDimitry Andric CommutedMI->getOperand(Idx2).setIsKill(Reg1IsKill); 2350b57cec5SDimitry Andric CommutedMI->getOperand(Idx1).setIsKill(Reg2IsKill); 2360b57cec5SDimitry Andric CommutedMI->getOperand(Idx2).setIsUndef(Reg1IsUndef); 2370b57cec5SDimitry Andric CommutedMI->getOperand(Idx1).setIsUndef(Reg2IsUndef); 2380b57cec5SDimitry Andric CommutedMI->getOperand(Idx2).setIsInternalRead(Reg1IsInternal); 2390b57cec5SDimitry Andric CommutedMI->getOperand(Idx1).setIsInternalRead(Reg2IsInternal); 2400b57cec5SDimitry Andric // Avoid calling setIsRenamable for virtual registers since we assert that 2410b57cec5SDimitry Andric // renamable property is only queried/set for physical registers. 242bdd1243dSDimitry Andric if (Reg1.isPhysical()) 2430b57cec5SDimitry Andric CommutedMI->getOperand(Idx2).setIsRenamable(Reg1IsRenamable); 244bdd1243dSDimitry Andric if (Reg2.isPhysical()) 2450b57cec5SDimitry Andric CommutedMI->getOperand(Idx1).setIsRenamable(Reg2IsRenamable); 2460b57cec5SDimitry Andric return CommutedMI; 2470b57cec5SDimitry Andric } 2480b57cec5SDimitry Andric 2490b57cec5SDimitry Andric MachineInstr *TargetInstrInfo::commuteInstruction(MachineInstr &MI, bool NewMI, 2500b57cec5SDimitry Andric unsigned OpIdx1, 2510b57cec5SDimitry Andric unsigned OpIdx2) const { 2520b57cec5SDimitry Andric // If OpIdx1 or OpIdx2 is not specified, then this method is free to choose 2530b57cec5SDimitry Andric // any commutable operand, which is done in findCommutedOpIndices() method 2540b57cec5SDimitry Andric // called below. 2550b57cec5SDimitry Andric if ((OpIdx1 == CommuteAnyOperandIndex || OpIdx2 == CommuteAnyOperandIndex) && 2560b57cec5SDimitry Andric !findCommutedOpIndices(MI, OpIdx1, OpIdx2)) { 2570b57cec5SDimitry Andric assert(MI.isCommutable() && 2580b57cec5SDimitry Andric "Precondition violation: MI must be commutable."); 2590b57cec5SDimitry Andric return nullptr; 2600b57cec5SDimitry Andric } 2610b57cec5SDimitry Andric return commuteInstructionImpl(MI, NewMI, OpIdx1, OpIdx2); 2620b57cec5SDimitry Andric } 2630b57cec5SDimitry Andric 2640b57cec5SDimitry Andric bool TargetInstrInfo::fixCommutedOpIndices(unsigned &ResultIdx1, 2650b57cec5SDimitry Andric unsigned &ResultIdx2, 2660b57cec5SDimitry Andric unsigned CommutableOpIdx1, 2670b57cec5SDimitry Andric unsigned CommutableOpIdx2) { 2680b57cec5SDimitry Andric if (ResultIdx1 == CommuteAnyOperandIndex && 2690b57cec5SDimitry Andric ResultIdx2 == CommuteAnyOperandIndex) { 2700b57cec5SDimitry Andric ResultIdx1 = CommutableOpIdx1; 2710b57cec5SDimitry Andric ResultIdx2 = CommutableOpIdx2; 2720b57cec5SDimitry Andric } else if (ResultIdx1 == CommuteAnyOperandIndex) { 2730b57cec5SDimitry Andric if (ResultIdx2 == CommutableOpIdx1) 2740b57cec5SDimitry Andric ResultIdx1 = CommutableOpIdx2; 2750b57cec5SDimitry Andric else if (ResultIdx2 == CommutableOpIdx2) 2760b57cec5SDimitry Andric ResultIdx1 = CommutableOpIdx1; 2770b57cec5SDimitry Andric else 2780b57cec5SDimitry Andric return false; 2790b57cec5SDimitry Andric } else if (ResultIdx2 == CommuteAnyOperandIndex) { 2800b57cec5SDimitry Andric if (ResultIdx1 == CommutableOpIdx1) 2810b57cec5SDimitry Andric ResultIdx2 = CommutableOpIdx2; 2820b57cec5SDimitry Andric else if (ResultIdx1 == CommutableOpIdx2) 2830b57cec5SDimitry Andric ResultIdx2 = CommutableOpIdx1; 2840b57cec5SDimitry Andric else 2850b57cec5SDimitry Andric return false; 2860b57cec5SDimitry Andric } else 2870b57cec5SDimitry Andric // Check that the result operand indices match the given commutable 2880b57cec5SDimitry Andric // operand indices. 2890b57cec5SDimitry Andric return (ResultIdx1 == CommutableOpIdx1 && ResultIdx2 == CommutableOpIdx2) || 2900b57cec5SDimitry Andric (ResultIdx1 == CommutableOpIdx2 && ResultIdx2 == CommutableOpIdx1); 2910b57cec5SDimitry Andric 2920b57cec5SDimitry Andric return true; 2930b57cec5SDimitry Andric } 2940b57cec5SDimitry Andric 2958bcb0991SDimitry Andric bool TargetInstrInfo::findCommutedOpIndices(const MachineInstr &MI, 2960b57cec5SDimitry Andric unsigned &SrcOpIdx1, 2970b57cec5SDimitry Andric unsigned &SrcOpIdx2) const { 2980b57cec5SDimitry Andric assert(!MI.isBundle() && 2990b57cec5SDimitry Andric "TargetInstrInfo::findCommutedOpIndices() can't handle bundles"); 3000b57cec5SDimitry Andric 3010b57cec5SDimitry Andric const MCInstrDesc &MCID = MI.getDesc(); 3020b57cec5SDimitry Andric if (!MCID.isCommutable()) 3030b57cec5SDimitry Andric return false; 3040b57cec5SDimitry Andric 3050b57cec5SDimitry Andric // This assumes v0 = op v1, v2 and commuting would swap v1 and v2. If this 3060b57cec5SDimitry Andric // is not true, then the target must implement this. 3070b57cec5SDimitry Andric unsigned CommutableOpIdx1 = MCID.getNumDefs(); 3080b57cec5SDimitry Andric unsigned CommutableOpIdx2 = CommutableOpIdx1 + 1; 3090b57cec5SDimitry Andric if (!fixCommutedOpIndices(SrcOpIdx1, SrcOpIdx2, 3100b57cec5SDimitry Andric CommutableOpIdx1, CommutableOpIdx2)) 3110b57cec5SDimitry Andric return false; 3120b57cec5SDimitry Andric 3130b57cec5SDimitry Andric if (!MI.getOperand(SrcOpIdx1).isReg() || !MI.getOperand(SrcOpIdx2).isReg()) 3140b57cec5SDimitry Andric // No idea. 3150b57cec5SDimitry Andric return false; 3160b57cec5SDimitry Andric return true; 3170b57cec5SDimitry Andric } 3180b57cec5SDimitry Andric 3190b57cec5SDimitry Andric bool TargetInstrInfo::isUnpredicatedTerminator(const MachineInstr &MI) const { 3200b57cec5SDimitry Andric if (!MI.isTerminator()) return false; 3210b57cec5SDimitry Andric 3220b57cec5SDimitry Andric // Conditional branch is a special case. 3230b57cec5SDimitry Andric if (MI.isBranch() && !MI.isBarrier()) 3240b57cec5SDimitry Andric return true; 3250b57cec5SDimitry Andric if (!MI.isPredicable()) 3260b57cec5SDimitry Andric return true; 3270b57cec5SDimitry Andric return !isPredicated(MI); 3280b57cec5SDimitry Andric } 3290b57cec5SDimitry Andric 3300b57cec5SDimitry Andric bool TargetInstrInfo::PredicateInstruction( 3310b57cec5SDimitry Andric MachineInstr &MI, ArrayRef<MachineOperand> Pred) const { 3320b57cec5SDimitry Andric bool MadeChange = false; 3330b57cec5SDimitry Andric 3340b57cec5SDimitry Andric assert(!MI.isBundle() && 3350b57cec5SDimitry Andric "TargetInstrInfo::PredicateInstruction() can't handle bundles"); 3360b57cec5SDimitry Andric 3370b57cec5SDimitry Andric const MCInstrDesc &MCID = MI.getDesc(); 3380b57cec5SDimitry Andric if (!MI.isPredicable()) 3390b57cec5SDimitry Andric return false; 3400b57cec5SDimitry Andric 3410b57cec5SDimitry Andric for (unsigned j = 0, i = 0, e = MI.getNumOperands(); i != e; ++i) { 342bdd1243dSDimitry Andric if (MCID.operands()[i].isPredicate()) { 3430b57cec5SDimitry Andric MachineOperand &MO = MI.getOperand(i); 3440b57cec5SDimitry Andric if (MO.isReg()) { 3450b57cec5SDimitry Andric MO.setReg(Pred[j].getReg()); 3460b57cec5SDimitry Andric MadeChange = true; 3470b57cec5SDimitry Andric } else if (MO.isImm()) { 3480b57cec5SDimitry Andric MO.setImm(Pred[j].getImm()); 3490b57cec5SDimitry Andric MadeChange = true; 3500b57cec5SDimitry Andric } else if (MO.isMBB()) { 3510b57cec5SDimitry Andric MO.setMBB(Pred[j].getMBB()); 3520b57cec5SDimitry Andric MadeChange = true; 3530b57cec5SDimitry Andric } 3540b57cec5SDimitry Andric ++j; 3550b57cec5SDimitry Andric } 3560b57cec5SDimitry Andric } 3570b57cec5SDimitry Andric return MadeChange; 3580b57cec5SDimitry Andric } 3590b57cec5SDimitry Andric 3600b57cec5SDimitry Andric bool TargetInstrInfo::hasLoadFromStackSlot( 3610b57cec5SDimitry Andric const MachineInstr &MI, 3620b57cec5SDimitry Andric SmallVectorImpl<const MachineMemOperand *> &Accesses) const { 3630b57cec5SDimitry Andric size_t StartSize = Accesses.size(); 3640b57cec5SDimitry Andric for (MachineInstr::mmo_iterator o = MI.memoperands_begin(), 3650b57cec5SDimitry Andric oe = MI.memoperands_end(); 3660b57cec5SDimitry Andric o != oe; ++o) { 3670b57cec5SDimitry Andric if ((*o)->isLoad() && 368349cc55cSDimitry Andric isa_and_nonnull<FixedStackPseudoSourceValue>((*o)->getPseudoValue())) 3690b57cec5SDimitry Andric Accesses.push_back(*o); 3700b57cec5SDimitry Andric } 3710b57cec5SDimitry Andric return Accesses.size() != StartSize; 3720b57cec5SDimitry Andric } 3730b57cec5SDimitry Andric 3740b57cec5SDimitry Andric bool TargetInstrInfo::hasStoreToStackSlot( 3750b57cec5SDimitry Andric const MachineInstr &MI, 3760b57cec5SDimitry Andric SmallVectorImpl<const MachineMemOperand *> &Accesses) const { 3770b57cec5SDimitry Andric size_t StartSize = Accesses.size(); 3780b57cec5SDimitry Andric for (MachineInstr::mmo_iterator o = MI.memoperands_begin(), 3790b57cec5SDimitry Andric oe = MI.memoperands_end(); 3800b57cec5SDimitry Andric o != oe; ++o) { 3810b57cec5SDimitry Andric if ((*o)->isStore() && 382349cc55cSDimitry Andric isa_and_nonnull<FixedStackPseudoSourceValue>((*o)->getPseudoValue())) 3830b57cec5SDimitry Andric Accesses.push_back(*o); 3840b57cec5SDimitry Andric } 3850b57cec5SDimitry Andric return Accesses.size() != StartSize; 3860b57cec5SDimitry Andric } 3870b57cec5SDimitry Andric 3880b57cec5SDimitry Andric bool TargetInstrInfo::getStackSlotRange(const TargetRegisterClass *RC, 3890b57cec5SDimitry Andric unsigned SubIdx, unsigned &Size, 3900b57cec5SDimitry Andric unsigned &Offset, 3910b57cec5SDimitry Andric const MachineFunction &MF) const { 3920b57cec5SDimitry Andric const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo(); 3930b57cec5SDimitry Andric if (!SubIdx) { 3940b57cec5SDimitry Andric Size = TRI->getSpillSize(*RC); 3950b57cec5SDimitry Andric Offset = 0; 3960b57cec5SDimitry Andric return true; 3970b57cec5SDimitry Andric } 3980b57cec5SDimitry Andric unsigned BitSize = TRI->getSubRegIdxSize(SubIdx); 3990b57cec5SDimitry Andric // Convert bit size to byte size. 4000b57cec5SDimitry Andric if (BitSize % 8) 4010b57cec5SDimitry Andric return false; 4020b57cec5SDimitry Andric 4030b57cec5SDimitry Andric int BitOffset = TRI->getSubRegIdxOffset(SubIdx); 4040b57cec5SDimitry Andric if (BitOffset < 0 || BitOffset % 8) 4050b57cec5SDimitry Andric return false; 4060b57cec5SDimitry Andric 4078bcb0991SDimitry Andric Size = BitSize / 8; 4080b57cec5SDimitry Andric Offset = (unsigned)BitOffset / 8; 4090b57cec5SDimitry Andric 4100b57cec5SDimitry Andric assert(TRI->getSpillSize(*RC) >= (Offset + Size) && "bad subregister range"); 4110b57cec5SDimitry Andric 4120b57cec5SDimitry Andric if (!MF.getDataLayout().isLittleEndian()) { 4130b57cec5SDimitry Andric Offset = TRI->getSpillSize(*RC) - (Offset + Size); 4140b57cec5SDimitry Andric } 4150b57cec5SDimitry Andric return true; 4160b57cec5SDimitry Andric } 4170b57cec5SDimitry Andric 4180b57cec5SDimitry Andric void TargetInstrInfo::reMaterialize(MachineBasicBlock &MBB, 4190b57cec5SDimitry Andric MachineBasicBlock::iterator I, 4205ffd83dbSDimitry Andric Register DestReg, unsigned SubIdx, 4210b57cec5SDimitry Andric const MachineInstr &Orig, 4220b57cec5SDimitry Andric const TargetRegisterInfo &TRI) const { 4230b57cec5SDimitry Andric MachineInstr *MI = MBB.getParent()->CloneMachineInstr(&Orig); 4240b57cec5SDimitry Andric MI->substituteRegister(MI->getOperand(0).getReg(), DestReg, SubIdx, TRI); 4250b57cec5SDimitry Andric MBB.insert(I, MI); 4260b57cec5SDimitry Andric } 4270b57cec5SDimitry Andric 4280b57cec5SDimitry Andric bool TargetInstrInfo::produceSameValue(const MachineInstr &MI0, 4290b57cec5SDimitry Andric const MachineInstr &MI1, 4300b57cec5SDimitry Andric const MachineRegisterInfo *MRI) const { 4310b57cec5SDimitry Andric return MI0.isIdenticalTo(MI1, MachineInstr::IgnoreVRegDefs); 4320b57cec5SDimitry Andric } 4330b57cec5SDimitry Andric 4345f757f3fSDimitry Andric MachineInstr & 4355f757f3fSDimitry Andric TargetInstrInfo::duplicate(MachineBasicBlock &MBB, 4365f757f3fSDimitry Andric MachineBasicBlock::iterator InsertBefore, 4375f757f3fSDimitry Andric const MachineInstr &Orig) const { 4380b57cec5SDimitry Andric MachineFunction &MF = *MBB.getParent(); 4395f757f3fSDimitry Andric // CFI instructions are marked as non-duplicable, because Darwin compact 4405f757f3fSDimitry Andric // unwind info emission can't handle multiple prologue setups. 4415f757f3fSDimitry Andric assert((!Orig.isNotDuplicable() || 4425f757f3fSDimitry Andric (!MF.getTarget().getTargetTriple().isOSDarwin() && 4435f757f3fSDimitry Andric Orig.isCFIInstruction())) && 4445f757f3fSDimitry Andric "Instruction cannot be duplicated"); 4455f757f3fSDimitry Andric 4460eae32dcSDimitry Andric return MF.cloneMachineInstrBundle(MBB, InsertBefore, Orig); 4470b57cec5SDimitry Andric } 4480b57cec5SDimitry Andric 4490b57cec5SDimitry Andric // If the COPY instruction in MI can be folded to a stack operation, return 4500b57cec5SDimitry Andric // the register class to use. 4510b57cec5SDimitry Andric static const TargetRegisterClass *canFoldCopy(const MachineInstr &MI, 4525f757f3fSDimitry Andric const TargetInstrInfo &TII, 4530b57cec5SDimitry Andric unsigned FoldIdx) { 4545f757f3fSDimitry Andric assert(TII.isCopyInstr(MI) && "MI must be a COPY instruction"); 4550b57cec5SDimitry Andric if (MI.getNumOperands() != 2) 4560b57cec5SDimitry Andric return nullptr; 4570b57cec5SDimitry Andric assert(FoldIdx<2 && "FoldIdx refers no nonexistent operand"); 4580b57cec5SDimitry Andric 4590b57cec5SDimitry Andric const MachineOperand &FoldOp = MI.getOperand(FoldIdx); 4600b57cec5SDimitry Andric const MachineOperand &LiveOp = MI.getOperand(1 - FoldIdx); 4610b57cec5SDimitry Andric 4620b57cec5SDimitry Andric if (FoldOp.getSubReg() || LiveOp.getSubReg()) 4630b57cec5SDimitry Andric return nullptr; 4640b57cec5SDimitry Andric 4658bcb0991SDimitry Andric Register FoldReg = FoldOp.getReg(); 4668bcb0991SDimitry Andric Register LiveReg = LiveOp.getReg(); 4670b57cec5SDimitry Andric 468bdd1243dSDimitry Andric assert(FoldReg.isVirtual() && "Cannot fold physregs"); 4690b57cec5SDimitry Andric 4700b57cec5SDimitry Andric const MachineRegisterInfo &MRI = MI.getMF()->getRegInfo(); 4710b57cec5SDimitry Andric const TargetRegisterClass *RC = MRI.getRegClass(FoldReg); 4720b57cec5SDimitry Andric 473bdd1243dSDimitry Andric if (LiveOp.getReg().isPhysical()) 4740b57cec5SDimitry Andric return RC->contains(LiveOp.getReg()) ? RC : nullptr; 4750b57cec5SDimitry Andric 4760b57cec5SDimitry Andric if (RC->hasSubClassEq(MRI.getRegClass(LiveReg))) 4770b57cec5SDimitry Andric return RC; 4780b57cec5SDimitry Andric 4790b57cec5SDimitry Andric // FIXME: Allow folding when register classes are memory compatible. 4800b57cec5SDimitry Andric return nullptr; 4810b57cec5SDimitry Andric } 4820b57cec5SDimitry Andric 483fe6060f1SDimitry Andric MCInst TargetInstrInfo::getNop() const { llvm_unreachable("Not implemented"); } 484fe6060f1SDimitry Andric 485fe6060f1SDimitry Andric std::pair<unsigned, unsigned> 486fe6060f1SDimitry Andric TargetInstrInfo::getPatchpointUnfoldableRange(const MachineInstr &MI) const { 487fe6060f1SDimitry Andric switch (MI.getOpcode()) { 488fe6060f1SDimitry Andric case TargetOpcode::STACKMAP: 489fe6060f1SDimitry Andric // StackMapLiveValues are foldable 490fe6060f1SDimitry Andric return std::make_pair(0, StackMapOpers(&MI).getVarIdx()); 491fe6060f1SDimitry Andric case TargetOpcode::PATCHPOINT: 492fe6060f1SDimitry Andric // For PatchPoint, the call args are not foldable (even if reported in the 493fe6060f1SDimitry Andric // stackmap e.g. via anyregcc). 494fe6060f1SDimitry Andric return std::make_pair(0, PatchPointOpers(&MI).getVarIdx()); 495fe6060f1SDimitry Andric case TargetOpcode::STATEPOINT: 496fe6060f1SDimitry Andric // For statepoints, fold deopt and gc arguments, but not call arguments. 497fe6060f1SDimitry Andric return std::make_pair(MI.getNumDefs(), StatepointOpers(&MI).getVarIdx()); 498fe6060f1SDimitry Andric default: 499fe6060f1SDimitry Andric llvm_unreachable("unexpected stackmap opcode"); 500fe6060f1SDimitry Andric } 5010b57cec5SDimitry Andric } 5020b57cec5SDimitry Andric 5030b57cec5SDimitry Andric static MachineInstr *foldPatchpoint(MachineFunction &MF, MachineInstr &MI, 5040b57cec5SDimitry Andric ArrayRef<unsigned> Ops, int FrameIndex, 5050b57cec5SDimitry Andric const TargetInstrInfo &TII) { 5060b57cec5SDimitry Andric unsigned StartIdx = 0; 507e8d8bef9SDimitry Andric unsigned NumDefs = 0; 508fe6060f1SDimitry Andric // getPatchpointUnfoldableRange throws guarantee if MI is not a patchpoint. 509fe6060f1SDimitry Andric std::tie(NumDefs, StartIdx) = TII.getPatchpointUnfoldableRange(MI); 5100b57cec5SDimitry Andric 511e8d8bef9SDimitry Andric unsigned DefToFoldIdx = MI.getNumOperands(); 512e8d8bef9SDimitry Andric 5130b57cec5SDimitry Andric // Return false if any operands requested for folding are not foldable (not 5140b57cec5SDimitry Andric // part of the stackmap's live values). 5150b57cec5SDimitry Andric for (unsigned Op : Ops) { 516e8d8bef9SDimitry Andric if (Op < NumDefs) { 517e8d8bef9SDimitry Andric assert(DefToFoldIdx == MI.getNumOperands() && "Folding multiple defs"); 518e8d8bef9SDimitry Andric DefToFoldIdx = Op; 519e8d8bef9SDimitry Andric } else if (Op < StartIdx) { 520e8d8bef9SDimitry Andric return nullptr; 521e8d8bef9SDimitry Andric } 522e8d8bef9SDimitry Andric if (MI.getOperand(Op).isTied()) 5230b57cec5SDimitry Andric return nullptr; 5240b57cec5SDimitry Andric } 5250b57cec5SDimitry Andric 5260b57cec5SDimitry Andric MachineInstr *NewMI = 5270b57cec5SDimitry Andric MF.CreateMachineInstr(TII.get(MI.getOpcode()), MI.getDebugLoc(), true); 5280b57cec5SDimitry Andric MachineInstrBuilder MIB(MF, NewMI); 5290b57cec5SDimitry Andric 5300b57cec5SDimitry Andric // No need to fold return, the meta data, and function arguments 5310b57cec5SDimitry Andric for (unsigned i = 0; i < StartIdx; ++i) 532e8d8bef9SDimitry Andric if (i != DefToFoldIdx) 5330b57cec5SDimitry Andric MIB.add(MI.getOperand(i)); 5340b57cec5SDimitry Andric 535e8d8bef9SDimitry Andric for (unsigned i = StartIdx, e = MI.getNumOperands(); i < e; ++i) { 5360b57cec5SDimitry Andric MachineOperand &MO = MI.getOperand(i); 537e8d8bef9SDimitry Andric unsigned TiedTo = e; 538e8d8bef9SDimitry Andric (void)MI.isRegTiedToDefOperand(i, &TiedTo); 539e8d8bef9SDimitry Andric 5400b57cec5SDimitry Andric if (is_contained(Ops, i)) { 541e8d8bef9SDimitry Andric assert(TiedTo == e && "Cannot fold tied operands"); 5420b57cec5SDimitry Andric unsigned SpillSize; 5430b57cec5SDimitry Andric unsigned SpillOffset; 5440b57cec5SDimitry Andric // Compute the spill slot size and offset. 5450b57cec5SDimitry Andric const TargetRegisterClass *RC = 5460b57cec5SDimitry Andric MF.getRegInfo().getRegClass(MO.getReg()); 5470b57cec5SDimitry Andric bool Valid = 5480b57cec5SDimitry Andric TII.getStackSlotRange(RC, MO.getSubReg(), SpillSize, SpillOffset, MF); 5490b57cec5SDimitry Andric if (!Valid) 5500b57cec5SDimitry Andric report_fatal_error("cannot spill patchpoint subregister operand"); 5510b57cec5SDimitry Andric MIB.addImm(StackMaps::IndirectMemRefOp); 5520b57cec5SDimitry Andric MIB.addImm(SpillSize); 5530b57cec5SDimitry Andric MIB.addFrameIndex(FrameIndex); 5540b57cec5SDimitry Andric MIB.addImm(SpillOffset); 555e8d8bef9SDimitry Andric } else { 5560b57cec5SDimitry Andric MIB.add(MO); 557e8d8bef9SDimitry Andric if (TiedTo < e) { 558e8d8bef9SDimitry Andric assert(TiedTo < NumDefs && "Bad tied operand"); 559e8d8bef9SDimitry Andric if (TiedTo > DefToFoldIdx) 560e8d8bef9SDimitry Andric --TiedTo; 561e8d8bef9SDimitry Andric NewMI->tieOperands(TiedTo, NewMI->getNumOperands() - 1); 562e8d8bef9SDimitry Andric } 563e8d8bef9SDimitry Andric } 5640b57cec5SDimitry Andric } 5650b57cec5SDimitry Andric return NewMI; 5660b57cec5SDimitry Andric } 5670b57cec5SDimitry Andric 5685f757f3fSDimitry Andric static void foldInlineAsmMemOperand(MachineInstr *MI, unsigned OpNo, int FI, 5695f757f3fSDimitry Andric const TargetInstrInfo &TII) { 5705f757f3fSDimitry Andric // If the machine operand is tied, untie it first. 5715f757f3fSDimitry Andric if (MI->getOperand(OpNo).isTied()) { 5725f757f3fSDimitry Andric unsigned TiedTo = MI->findTiedOperandIdx(OpNo); 5735f757f3fSDimitry Andric MI->untieRegOperand(OpNo); 5745f757f3fSDimitry Andric // Intentional recursion! 5755f757f3fSDimitry Andric foldInlineAsmMemOperand(MI, TiedTo, FI, TII); 5765f757f3fSDimitry Andric } 5775f757f3fSDimitry Andric 5785f757f3fSDimitry Andric SmallVector<MachineOperand, 5> NewOps; 5795f757f3fSDimitry Andric TII.getFrameIndexOperands(NewOps, FI); 5805f757f3fSDimitry Andric assert(!NewOps.empty() && "getFrameIndexOperands didn't create any operands"); 5815f757f3fSDimitry Andric MI->removeOperand(OpNo); 5825f757f3fSDimitry Andric MI->insert(MI->operands_begin() + OpNo, NewOps); 5835f757f3fSDimitry Andric 5845f757f3fSDimitry Andric // Change the previous operand to a MemKind InlineAsm::Flag. The second param 5855f757f3fSDimitry Andric // is the per-target number of operands that represent the memory operand 5865f757f3fSDimitry Andric // excluding this one (MD). This includes MO. 5875f757f3fSDimitry Andric InlineAsm::Flag F(InlineAsm::Kind::Mem, NewOps.size()); 5885f757f3fSDimitry Andric F.setMemConstraint(InlineAsm::ConstraintCode::m); 5895f757f3fSDimitry Andric MachineOperand &MD = MI->getOperand(OpNo - 1); 5905f757f3fSDimitry Andric MD.setImm(F); 5915f757f3fSDimitry Andric } 5925f757f3fSDimitry Andric 5935f757f3fSDimitry Andric // Returns nullptr if not possible to fold. 5945f757f3fSDimitry Andric static MachineInstr *foldInlineAsmMemOperand(MachineInstr &MI, 5955f757f3fSDimitry Andric ArrayRef<unsigned> Ops, int FI, 5965f757f3fSDimitry Andric const TargetInstrInfo &TII) { 5975f757f3fSDimitry Andric assert(MI.isInlineAsm() && "wrong opcode"); 5985f757f3fSDimitry Andric if (Ops.size() > 1) 5995f757f3fSDimitry Andric return nullptr; 6005f757f3fSDimitry Andric unsigned Op = Ops[0]; 6015f757f3fSDimitry Andric assert(Op && "should never be first operand"); 6025f757f3fSDimitry Andric assert(MI.getOperand(Op).isReg() && "shouldn't be folding non-reg operands"); 6035f757f3fSDimitry Andric 6045f757f3fSDimitry Andric if (!MI.mayFoldInlineAsmRegOp(Op)) 6055f757f3fSDimitry Andric return nullptr; 6065f757f3fSDimitry Andric 6075f757f3fSDimitry Andric MachineInstr &NewMI = TII.duplicate(*MI.getParent(), MI.getIterator(), MI); 6085f757f3fSDimitry Andric 6095f757f3fSDimitry Andric foldInlineAsmMemOperand(&NewMI, Op, FI, TII); 6105f757f3fSDimitry Andric 6115f757f3fSDimitry Andric // Update mayload/maystore metadata, and memoperands. 6125f757f3fSDimitry Andric const VirtRegInfo &RI = 6135f757f3fSDimitry Andric AnalyzeVirtRegInBundle(MI, MI.getOperand(Op).getReg()); 6145f757f3fSDimitry Andric MachineOperand &ExtraMO = NewMI.getOperand(InlineAsm::MIOp_ExtraInfo); 6155f757f3fSDimitry Andric MachineMemOperand::Flags Flags = MachineMemOperand::MONone; 6165f757f3fSDimitry Andric if (RI.Reads) { 6175f757f3fSDimitry Andric ExtraMO.setImm(ExtraMO.getImm() | InlineAsm::Extra_MayLoad); 6185f757f3fSDimitry Andric Flags |= MachineMemOperand::MOLoad; 6195f757f3fSDimitry Andric } 6205f757f3fSDimitry Andric if (RI.Writes) { 6215f757f3fSDimitry Andric ExtraMO.setImm(ExtraMO.getImm() | InlineAsm::Extra_MayStore); 6225f757f3fSDimitry Andric Flags |= MachineMemOperand::MOStore; 6235f757f3fSDimitry Andric } 6245f757f3fSDimitry Andric MachineFunction *MF = NewMI.getMF(); 6255f757f3fSDimitry Andric const MachineFrameInfo &MFI = MF->getFrameInfo(); 6265f757f3fSDimitry Andric MachineMemOperand *MMO = MF->getMachineMemOperand( 6275f757f3fSDimitry Andric MachinePointerInfo::getFixedStack(*MF, FI), Flags, MFI.getObjectSize(FI), 6285f757f3fSDimitry Andric MFI.getObjectAlign(FI)); 6295f757f3fSDimitry Andric NewMI.addMemOperand(*MF, MMO); 6305f757f3fSDimitry Andric 6315f757f3fSDimitry Andric return &NewMI; 6325f757f3fSDimitry Andric } 6335f757f3fSDimitry Andric 6340b57cec5SDimitry Andric MachineInstr *TargetInstrInfo::foldMemoryOperand(MachineInstr &MI, 6350b57cec5SDimitry Andric ArrayRef<unsigned> Ops, int FI, 6360b57cec5SDimitry Andric LiveIntervals *LIS, 6370b57cec5SDimitry Andric VirtRegMap *VRM) const { 6380b57cec5SDimitry Andric auto Flags = MachineMemOperand::MONone; 6390b57cec5SDimitry Andric for (unsigned OpIdx : Ops) 6400b57cec5SDimitry Andric Flags |= MI.getOperand(OpIdx).isDef() ? MachineMemOperand::MOStore 6410b57cec5SDimitry Andric : MachineMemOperand::MOLoad; 6420b57cec5SDimitry Andric 6430b57cec5SDimitry Andric MachineBasicBlock *MBB = MI.getParent(); 6440b57cec5SDimitry Andric assert(MBB && "foldMemoryOperand needs an inserted instruction"); 6450b57cec5SDimitry Andric MachineFunction &MF = *MBB->getParent(); 6460b57cec5SDimitry Andric 6470b57cec5SDimitry Andric // If we're not folding a load into a subreg, the size of the load is the 6480b57cec5SDimitry Andric // size of the spill slot. But if we are, we need to figure out what the 6490b57cec5SDimitry Andric // actual load size is. 6500b57cec5SDimitry Andric int64_t MemSize = 0; 6510b57cec5SDimitry Andric const MachineFrameInfo &MFI = MF.getFrameInfo(); 6520b57cec5SDimitry Andric const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo(); 6530b57cec5SDimitry Andric 6540b57cec5SDimitry Andric if (Flags & MachineMemOperand::MOStore) { 6550b57cec5SDimitry Andric MemSize = MFI.getObjectSize(FI); 6560b57cec5SDimitry Andric } else { 6570b57cec5SDimitry Andric for (unsigned OpIdx : Ops) { 6580b57cec5SDimitry Andric int64_t OpSize = MFI.getObjectSize(FI); 6590b57cec5SDimitry Andric 6600b57cec5SDimitry Andric if (auto SubReg = MI.getOperand(OpIdx).getSubReg()) { 6610b57cec5SDimitry Andric unsigned SubRegSize = TRI->getSubRegIdxSize(SubReg); 6620b57cec5SDimitry Andric if (SubRegSize > 0 && !(SubRegSize % 8)) 6630b57cec5SDimitry Andric OpSize = SubRegSize / 8; 6640b57cec5SDimitry Andric } 6650b57cec5SDimitry Andric 6660b57cec5SDimitry Andric MemSize = std::max(MemSize, OpSize); 6670b57cec5SDimitry Andric } 6680b57cec5SDimitry Andric } 6690b57cec5SDimitry Andric 6700b57cec5SDimitry Andric assert(MemSize && "Did not expect a zero-sized stack slot"); 6710b57cec5SDimitry Andric 6720b57cec5SDimitry Andric MachineInstr *NewMI = nullptr; 6730b57cec5SDimitry Andric 6740b57cec5SDimitry Andric if (MI.getOpcode() == TargetOpcode::STACKMAP || 6750b57cec5SDimitry Andric MI.getOpcode() == TargetOpcode::PATCHPOINT || 6760b57cec5SDimitry Andric MI.getOpcode() == TargetOpcode::STATEPOINT) { 6770b57cec5SDimitry Andric // Fold stackmap/patchpoint. 6780b57cec5SDimitry Andric NewMI = foldPatchpoint(MF, MI, Ops, FI, *this); 6790b57cec5SDimitry Andric if (NewMI) 6800b57cec5SDimitry Andric MBB->insert(MI, NewMI); 6815f757f3fSDimitry Andric } else if (MI.isInlineAsm()) { 6825f757f3fSDimitry Andric return foldInlineAsmMemOperand(MI, Ops, FI, *this); 6830b57cec5SDimitry Andric } else { 6840b57cec5SDimitry Andric // Ask the target to do the actual folding. 6850b57cec5SDimitry Andric NewMI = foldMemoryOperandImpl(MF, MI, Ops, MI, FI, LIS, VRM); 6860b57cec5SDimitry Andric } 6870b57cec5SDimitry Andric 6880b57cec5SDimitry Andric if (NewMI) { 6890b57cec5SDimitry Andric NewMI->setMemRefs(MF, MI.memoperands()); 6900b57cec5SDimitry Andric // Add a memory operand, foldMemoryOperandImpl doesn't do that. 6910b57cec5SDimitry Andric assert((!(Flags & MachineMemOperand::MOStore) || 6920b57cec5SDimitry Andric NewMI->mayStore()) && 6930b57cec5SDimitry Andric "Folded a def to a non-store!"); 6940b57cec5SDimitry Andric assert((!(Flags & MachineMemOperand::MOLoad) || 6950b57cec5SDimitry Andric NewMI->mayLoad()) && 6960b57cec5SDimitry Andric "Folded a use to a non-load!"); 6970b57cec5SDimitry Andric assert(MFI.getObjectOffset(FI) != -1); 6985ffd83dbSDimitry Andric MachineMemOperand *MMO = 6995ffd83dbSDimitry Andric MF.getMachineMemOperand(MachinePointerInfo::getFixedStack(MF, FI), 7005ffd83dbSDimitry Andric Flags, MemSize, MFI.getObjectAlign(FI)); 7010b57cec5SDimitry Andric NewMI->addMemOperand(MF, MMO); 7020b57cec5SDimitry Andric 7035ffd83dbSDimitry Andric // The pass "x86 speculative load hardening" always attaches symbols to 7045ffd83dbSDimitry Andric // call instructions. We need copy it form old instruction. 7055ffd83dbSDimitry Andric NewMI->cloneInstrSymbols(MF, MI); 7065ffd83dbSDimitry Andric 7070b57cec5SDimitry Andric return NewMI; 7080b57cec5SDimitry Andric } 7090b57cec5SDimitry Andric 7100b57cec5SDimitry Andric // Straight COPY may fold as load/store. 7115f757f3fSDimitry Andric if (!isCopyInstr(MI) || Ops.size() != 1) 7120b57cec5SDimitry Andric return nullptr; 7130b57cec5SDimitry Andric 7145f757f3fSDimitry Andric const TargetRegisterClass *RC = canFoldCopy(MI, *this, Ops[0]); 7150b57cec5SDimitry Andric if (!RC) 7160b57cec5SDimitry Andric return nullptr; 7170b57cec5SDimitry Andric 7180b57cec5SDimitry Andric const MachineOperand &MO = MI.getOperand(1 - Ops[0]); 7190b57cec5SDimitry Andric MachineBasicBlock::iterator Pos = MI; 7200b57cec5SDimitry Andric 7210b57cec5SDimitry Andric if (Flags == MachineMemOperand::MOStore) 722bdd1243dSDimitry Andric storeRegToStackSlot(*MBB, Pos, MO.getReg(), MO.isKill(), FI, RC, TRI, 723bdd1243dSDimitry Andric Register()); 7240b57cec5SDimitry Andric else 725bdd1243dSDimitry Andric loadRegFromStackSlot(*MBB, Pos, MO.getReg(), FI, RC, TRI, Register()); 7260b57cec5SDimitry Andric return &*--Pos; 7270b57cec5SDimitry Andric } 7280b57cec5SDimitry Andric 7290b57cec5SDimitry Andric MachineInstr *TargetInstrInfo::foldMemoryOperand(MachineInstr &MI, 7300b57cec5SDimitry Andric ArrayRef<unsigned> Ops, 7310b57cec5SDimitry Andric MachineInstr &LoadMI, 7320b57cec5SDimitry Andric LiveIntervals *LIS) const { 7330b57cec5SDimitry Andric assert(LoadMI.canFoldAsLoad() && "LoadMI isn't foldable!"); 7340b57cec5SDimitry Andric #ifndef NDEBUG 7350b57cec5SDimitry Andric for (unsigned OpIdx : Ops) 7360b57cec5SDimitry Andric assert(MI.getOperand(OpIdx).isUse() && "Folding load into def!"); 7370b57cec5SDimitry Andric #endif 7380b57cec5SDimitry Andric 7390b57cec5SDimitry Andric MachineBasicBlock &MBB = *MI.getParent(); 7400b57cec5SDimitry Andric MachineFunction &MF = *MBB.getParent(); 7410b57cec5SDimitry Andric 7420b57cec5SDimitry Andric // Ask the target to do the actual folding. 7430b57cec5SDimitry Andric MachineInstr *NewMI = nullptr; 7440b57cec5SDimitry Andric int FrameIndex = 0; 7450b57cec5SDimitry Andric 7460b57cec5SDimitry Andric if ((MI.getOpcode() == TargetOpcode::STACKMAP || 7470b57cec5SDimitry Andric MI.getOpcode() == TargetOpcode::PATCHPOINT || 7480b57cec5SDimitry Andric MI.getOpcode() == TargetOpcode::STATEPOINT) && 7490b57cec5SDimitry Andric isLoadFromStackSlot(LoadMI, FrameIndex)) { 7500b57cec5SDimitry Andric // Fold stackmap/patchpoint. 7510b57cec5SDimitry Andric NewMI = foldPatchpoint(MF, MI, Ops, FrameIndex, *this); 7520b57cec5SDimitry Andric if (NewMI) 7530b57cec5SDimitry Andric NewMI = &*MBB.insert(MI, NewMI); 7545f757f3fSDimitry Andric } else if (MI.isInlineAsm() && isLoadFromStackSlot(LoadMI, FrameIndex)) { 7555f757f3fSDimitry Andric return foldInlineAsmMemOperand(MI, Ops, FrameIndex, *this); 7560b57cec5SDimitry Andric } else { 7570b57cec5SDimitry Andric // Ask the target to do the actual folding. 7580b57cec5SDimitry Andric NewMI = foldMemoryOperandImpl(MF, MI, Ops, MI, LoadMI, LIS); 7590b57cec5SDimitry Andric } 7600b57cec5SDimitry Andric 7610b57cec5SDimitry Andric if (!NewMI) 7620b57cec5SDimitry Andric return nullptr; 7630b57cec5SDimitry Andric 7640b57cec5SDimitry Andric // Copy the memoperands from the load to the folded instruction. 7650b57cec5SDimitry Andric if (MI.memoperands_empty()) { 7660b57cec5SDimitry Andric NewMI->setMemRefs(MF, LoadMI.memoperands()); 7670b57cec5SDimitry Andric } else { 7680b57cec5SDimitry Andric // Handle the rare case of folding multiple loads. 7690b57cec5SDimitry Andric NewMI->setMemRefs(MF, MI.memoperands()); 7700b57cec5SDimitry Andric for (MachineInstr::mmo_iterator I = LoadMI.memoperands_begin(), 7710b57cec5SDimitry Andric E = LoadMI.memoperands_end(); 7720b57cec5SDimitry Andric I != E; ++I) { 7730b57cec5SDimitry Andric NewMI->addMemOperand(MF, *I); 7740b57cec5SDimitry Andric } 7750b57cec5SDimitry Andric } 7760b57cec5SDimitry Andric return NewMI; 7770b57cec5SDimitry Andric } 7780b57cec5SDimitry Andric 77906c3fb27SDimitry Andric /// transferImplicitOperands - MI is a pseudo-instruction, and the lowered 78006c3fb27SDimitry Andric /// replacement instructions immediately precede it. Copy any implicit 78106c3fb27SDimitry Andric /// operands from MI to the replacement instruction. 78206c3fb27SDimitry Andric static void transferImplicitOperands(MachineInstr *MI, 78306c3fb27SDimitry Andric const TargetRegisterInfo *TRI) { 78406c3fb27SDimitry Andric MachineBasicBlock::iterator CopyMI = MI; 78506c3fb27SDimitry Andric --CopyMI; 78606c3fb27SDimitry Andric 78706c3fb27SDimitry Andric Register DstReg = MI->getOperand(0).getReg(); 78806c3fb27SDimitry Andric for (const MachineOperand &MO : MI->implicit_operands()) { 78906c3fb27SDimitry Andric CopyMI->addOperand(MO); 79006c3fb27SDimitry Andric 79106c3fb27SDimitry Andric // Be conservative about preserving kills when subregister defs are 79206c3fb27SDimitry Andric // involved. If there was implicit kill of a super-register overlapping the 79306c3fb27SDimitry Andric // copy result, we would kill the subregisters previous copies defined. 79406c3fb27SDimitry Andric 79506c3fb27SDimitry Andric if (MO.isKill() && TRI->regsOverlap(DstReg, MO.getReg())) 79606c3fb27SDimitry Andric CopyMI->getOperand(CopyMI->getNumOperands() - 1).setIsKill(false); 79706c3fb27SDimitry Andric } 79806c3fb27SDimitry Andric } 79906c3fb27SDimitry Andric 80006c3fb27SDimitry Andric void TargetInstrInfo::lowerCopy(MachineInstr *MI, 80106c3fb27SDimitry Andric const TargetRegisterInfo *TRI) const { 80206c3fb27SDimitry Andric if (MI->allDefsAreDead()) { 80306c3fb27SDimitry Andric MI->setDesc(get(TargetOpcode::KILL)); 80406c3fb27SDimitry Andric return; 80506c3fb27SDimitry Andric } 80606c3fb27SDimitry Andric 80706c3fb27SDimitry Andric MachineOperand &DstMO = MI->getOperand(0); 80806c3fb27SDimitry Andric MachineOperand &SrcMO = MI->getOperand(1); 80906c3fb27SDimitry Andric 81006c3fb27SDimitry Andric bool IdentityCopy = (SrcMO.getReg() == DstMO.getReg()); 81106c3fb27SDimitry Andric if (IdentityCopy || SrcMO.isUndef()) { 81206c3fb27SDimitry Andric // No need to insert an identity copy instruction, but replace with a KILL 81306c3fb27SDimitry Andric // if liveness is changed. 81406c3fb27SDimitry Andric if (SrcMO.isUndef() || MI->getNumOperands() > 2) { 81506c3fb27SDimitry Andric // We must make sure the super-register gets killed. Replace the 81606c3fb27SDimitry Andric // instruction with KILL. 81706c3fb27SDimitry Andric MI->setDesc(get(TargetOpcode::KILL)); 81806c3fb27SDimitry Andric return; 81906c3fb27SDimitry Andric } 82006c3fb27SDimitry Andric // Vanilla identity copy. 82106c3fb27SDimitry Andric MI->eraseFromParent(); 82206c3fb27SDimitry Andric return; 82306c3fb27SDimitry Andric } 82406c3fb27SDimitry Andric 82506c3fb27SDimitry Andric copyPhysReg(*MI->getParent(), MI, MI->getDebugLoc(), DstMO.getReg(), 82606c3fb27SDimitry Andric SrcMO.getReg(), SrcMO.isKill()); 82706c3fb27SDimitry Andric 82806c3fb27SDimitry Andric if (MI->getNumOperands() > 2) 82906c3fb27SDimitry Andric transferImplicitOperands(MI, TRI); 83006c3fb27SDimitry Andric MI->eraseFromParent(); 83106c3fb27SDimitry Andric } 83206c3fb27SDimitry Andric 8330b57cec5SDimitry Andric bool TargetInstrInfo::hasReassociableOperands( 8340b57cec5SDimitry Andric const MachineInstr &Inst, const MachineBasicBlock *MBB) const { 8350b57cec5SDimitry Andric const MachineOperand &Op1 = Inst.getOperand(1); 8360b57cec5SDimitry Andric const MachineOperand &Op2 = Inst.getOperand(2); 8370b57cec5SDimitry Andric const MachineRegisterInfo &MRI = MBB->getParent()->getRegInfo(); 8380b57cec5SDimitry Andric 8390b57cec5SDimitry Andric // We need virtual register definitions for the operands that we will 8400b57cec5SDimitry Andric // reassociate. 8410b57cec5SDimitry Andric MachineInstr *MI1 = nullptr; 8420b57cec5SDimitry Andric MachineInstr *MI2 = nullptr; 843bdd1243dSDimitry Andric if (Op1.isReg() && Op1.getReg().isVirtual()) 8440b57cec5SDimitry Andric MI1 = MRI.getUniqueVRegDef(Op1.getReg()); 845bdd1243dSDimitry Andric if (Op2.isReg() && Op2.getReg().isVirtual()) 8460b57cec5SDimitry Andric MI2 = MRI.getUniqueVRegDef(Op2.getReg()); 8470b57cec5SDimitry Andric 848bdd1243dSDimitry Andric // And at least one operand must be defined in MBB. 849bdd1243dSDimitry Andric return MI1 && MI2 && (MI1->getParent() == MBB || MI2->getParent() == MBB); 850bdd1243dSDimitry Andric } 851bdd1243dSDimitry Andric 852bdd1243dSDimitry Andric bool TargetInstrInfo::areOpcodesEqualOrInverse(unsigned Opcode1, 853bdd1243dSDimitry Andric unsigned Opcode2) const { 854bdd1243dSDimitry Andric return Opcode1 == Opcode2 || getInverseOpcode(Opcode1) == Opcode2; 8550b57cec5SDimitry Andric } 8560b57cec5SDimitry Andric 8570b57cec5SDimitry Andric bool TargetInstrInfo::hasReassociableSibling(const MachineInstr &Inst, 8580b57cec5SDimitry Andric bool &Commuted) const { 8590b57cec5SDimitry Andric const MachineBasicBlock *MBB = Inst.getParent(); 8600b57cec5SDimitry Andric const MachineRegisterInfo &MRI = MBB->getParent()->getRegInfo(); 8610b57cec5SDimitry Andric MachineInstr *MI1 = MRI.getUniqueVRegDef(Inst.getOperand(1).getReg()); 8620b57cec5SDimitry Andric MachineInstr *MI2 = MRI.getUniqueVRegDef(Inst.getOperand(2).getReg()); 863bdd1243dSDimitry Andric unsigned Opcode = Inst.getOpcode(); 8640b57cec5SDimitry Andric 865bdd1243dSDimitry Andric // If only one operand has the same or inverse opcode and it's the second 866bdd1243dSDimitry Andric // source operand, the operands must be commuted. 867bdd1243dSDimitry Andric Commuted = !areOpcodesEqualOrInverse(Opcode, MI1->getOpcode()) && 868bdd1243dSDimitry Andric areOpcodesEqualOrInverse(Opcode, MI2->getOpcode()); 8690b57cec5SDimitry Andric if (Commuted) 8700b57cec5SDimitry Andric std::swap(MI1, MI2); 8710b57cec5SDimitry Andric 8720b57cec5SDimitry Andric // 1. The previous instruction must be the same type as Inst. 873bdd1243dSDimitry Andric // 2. The previous instruction must also be associative/commutative or be the 874bdd1243dSDimitry Andric // inverse of such an operation (this can be different even for 875bdd1243dSDimitry Andric // instructions with the same opcode if traits like fast-math-flags are 876bdd1243dSDimitry Andric // included). 8775ffd83dbSDimitry Andric // 3. The previous instruction must have virtual register definitions for its 8780b57cec5SDimitry Andric // operands in the same basic block as Inst. 8795ffd83dbSDimitry Andric // 4. The previous instruction's result must only be used by Inst. 880bdd1243dSDimitry Andric return areOpcodesEqualOrInverse(Opcode, MI1->getOpcode()) && 881bdd1243dSDimitry Andric (isAssociativeAndCommutative(*MI1) || 882bdd1243dSDimitry Andric isAssociativeAndCommutative(*MI1, /* Invert */ true)) && 8830b57cec5SDimitry Andric hasReassociableOperands(*MI1, MBB) && 8840b57cec5SDimitry Andric MRI.hasOneNonDBGUse(MI1->getOperand(0).getReg()); 8850b57cec5SDimitry Andric } 8860b57cec5SDimitry Andric 887bdd1243dSDimitry Andric // 1. The operation must be associative and commutative or be the inverse of 888bdd1243dSDimitry Andric // such an operation. 8890b57cec5SDimitry Andric // 2. The instruction must have virtual register definitions for its 8900b57cec5SDimitry Andric // operands in the same basic block. 8910b57cec5SDimitry Andric // 3. The instruction must have a reassociable sibling. 8920b57cec5SDimitry Andric bool TargetInstrInfo::isReassociationCandidate(const MachineInstr &Inst, 8930b57cec5SDimitry Andric bool &Commuted) const { 894bdd1243dSDimitry Andric return (isAssociativeAndCommutative(Inst) || 895bdd1243dSDimitry Andric isAssociativeAndCommutative(Inst, /* Invert */ true)) && 8960b57cec5SDimitry Andric hasReassociableOperands(Inst, Inst.getParent()) && 8970b57cec5SDimitry Andric hasReassociableSibling(Inst, Commuted); 8980b57cec5SDimitry Andric } 8990b57cec5SDimitry Andric 9000b57cec5SDimitry Andric // The concept of the reassociation pass is that these operations can benefit 9010b57cec5SDimitry Andric // from this kind of transformation: 9020b57cec5SDimitry Andric // 9030b57cec5SDimitry Andric // A = ? op ? 9040b57cec5SDimitry Andric // B = A op X (Prev) 9050b57cec5SDimitry Andric // C = B op Y (Root) 9060b57cec5SDimitry Andric // --> 9070b57cec5SDimitry Andric // A = ? op ? 9080b57cec5SDimitry Andric // B = X op Y 9090b57cec5SDimitry Andric // C = A op B 9100b57cec5SDimitry Andric // 9110b57cec5SDimitry Andric // breaking the dependency between A and B, allowing them to be executed in 9120b57cec5SDimitry Andric // parallel (or back-to-back in a pipeline) instead of depending on each other. 9130b57cec5SDimitry Andric 9140b57cec5SDimitry Andric // FIXME: This has the potential to be expensive (compile time) while not 9150b57cec5SDimitry Andric // improving the code at all. Some ways to limit the overhead: 9160b57cec5SDimitry Andric // 1. Track successful transforms; bail out if hit rate gets too low. 9170b57cec5SDimitry Andric // 2. Only enable at -O3 or some other non-default optimization level. 9180b57cec5SDimitry Andric // 3. Pre-screen pattern candidates here: if an operand of the previous 9190b57cec5SDimitry Andric // instruction is known to not increase the critical path, then don't match 9200b57cec5SDimitry Andric // that pattern. 9210b57cec5SDimitry Andric bool TargetInstrInfo::getMachineCombinerPatterns( 922*0fca6ea1SDimitry Andric MachineInstr &Root, SmallVectorImpl<unsigned> &Patterns, 923e8d8bef9SDimitry Andric bool DoRegPressureReduce) const { 9240b57cec5SDimitry Andric bool Commute; 9250b57cec5SDimitry Andric if (isReassociationCandidate(Root, Commute)) { 9260b57cec5SDimitry Andric // We found a sequence of instructions that may be suitable for a 9270b57cec5SDimitry Andric // reassociation of operands to increase ILP. Specify each commutation 9280b57cec5SDimitry Andric // possibility for the Prev instruction in the sequence and let the 9290b57cec5SDimitry Andric // machine combiner decide if changing the operands is worthwhile. 9300b57cec5SDimitry Andric if (Commute) { 9310b57cec5SDimitry Andric Patterns.push_back(MachineCombinerPattern::REASSOC_AX_YB); 9320b57cec5SDimitry Andric Patterns.push_back(MachineCombinerPattern::REASSOC_XA_YB); 9330b57cec5SDimitry Andric } else { 9340b57cec5SDimitry Andric Patterns.push_back(MachineCombinerPattern::REASSOC_AX_BY); 9350b57cec5SDimitry Andric Patterns.push_back(MachineCombinerPattern::REASSOC_XA_BY); 9360b57cec5SDimitry Andric } 9370b57cec5SDimitry Andric return true; 9380b57cec5SDimitry Andric } 9390b57cec5SDimitry Andric 9400b57cec5SDimitry Andric return false; 9410b57cec5SDimitry Andric } 9420b57cec5SDimitry Andric 9430b57cec5SDimitry Andric /// Return true when a code sequence can improve loop throughput. 944*0fca6ea1SDimitry Andric bool TargetInstrInfo::isThroughputPattern(unsigned Pattern) const { 9450b57cec5SDimitry Andric return false; 9460b57cec5SDimitry Andric } 9470b57cec5SDimitry Andric 948*0fca6ea1SDimitry Andric CombinerObjective 949*0fca6ea1SDimitry Andric TargetInstrInfo::getCombinerObjective(unsigned Pattern) const { 950*0fca6ea1SDimitry Andric return CombinerObjective::Default; 951*0fca6ea1SDimitry Andric } 952*0fca6ea1SDimitry Andric 953bdd1243dSDimitry Andric std::pair<unsigned, unsigned> 954*0fca6ea1SDimitry Andric TargetInstrInfo::getReassociationOpcodes(unsigned Pattern, 955bdd1243dSDimitry Andric const MachineInstr &Root, 956bdd1243dSDimitry Andric const MachineInstr &Prev) const { 957bdd1243dSDimitry Andric bool AssocCommutRoot = isAssociativeAndCommutative(Root); 958bdd1243dSDimitry Andric bool AssocCommutPrev = isAssociativeAndCommutative(Prev); 959bdd1243dSDimitry Andric 960bdd1243dSDimitry Andric // Early exit if both opcodes are associative and commutative. It's a trivial 961bdd1243dSDimitry Andric // reassociation when we only change operands order. In this case opcodes are 962bdd1243dSDimitry Andric // not required to have inverse versions. 963bdd1243dSDimitry Andric if (AssocCommutRoot && AssocCommutPrev) { 964bdd1243dSDimitry Andric assert(Root.getOpcode() == Prev.getOpcode() && "Expected to be equal"); 965bdd1243dSDimitry Andric return std::make_pair(Root.getOpcode(), Root.getOpcode()); 966bdd1243dSDimitry Andric } 967bdd1243dSDimitry Andric 968bdd1243dSDimitry Andric // At least one instruction is not associative or commutative. 969bdd1243dSDimitry Andric // Since we have matched one of the reassociation patterns, we expect that the 970bdd1243dSDimitry Andric // instructions' opcodes are equal or one of them is the inversion of the 971bdd1243dSDimitry Andric // other. 972bdd1243dSDimitry Andric assert(areOpcodesEqualOrInverse(Root.getOpcode(), Prev.getOpcode()) && 973bdd1243dSDimitry Andric "Incorrectly matched pattern"); 974bdd1243dSDimitry Andric unsigned AssocCommutOpcode = Root.getOpcode(); 975bdd1243dSDimitry Andric unsigned InverseOpcode = *getInverseOpcode(Root.getOpcode()); 976bdd1243dSDimitry Andric if (!AssocCommutRoot) 977bdd1243dSDimitry Andric std::swap(AssocCommutOpcode, InverseOpcode); 978bdd1243dSDimitry Andric 979bdd1243dSDimitry Andric // The transformation rule (`+` is any associative and commutative binary 980bdd1243dSDimitry Andric // operation, `-` is the inverse): 981bdd1243dSDimitry Andric // REASSOC_AX_BY: 982bdd1243dSDimitry Andric // (A + X) + Y => A + (X + Y) 983bdd1243dSDimitry Andric // (A + X) - Y => A + (X - Y) 984bdd1243dSDimitry Andric // (A - X) + Y => A - (X - Y) 985bdd1243dSDimitry Andric // (A - X) - Y => A - (X + Y) 986bdd1243dSDimitry Andric // REASSOC_XA_BY: 987bdd1243dSDimitry Andric // (X + A) + Y => (X + Y) + A 988bdd1243dSDimitry Andric // (X + A) - Y => (X - Y) + A 989bdd1243dSDimitry Andric // (X - A) + Y => (X + Y) - A 990bdd1243dSDimitry Andric // (X - A) - Y => (X - Y) - A 991bdd1243dSDimitry Andric // REASSOC_AX_YB: 992bdd1243dSDimitry Andric // Y + (A + X) => (Y + X) + A 993bdd1243dSDimitry Andric // Y - (A + X) => (Y - X) - A 994bdd1243dSDimitry Andric // Y + (A - X) => (Y - X) + A 995bdd1243dSDimitry Andric // Y - (A - X) => (Y + X) - A 996bdd1243dSDimitry Andric // REASSOC_XA_YB: 997bdd1243dSDimitry Andric // Y + (X + A) => (Y + X) + A 998bdd1243dSDimitry Andric // Y - (X + A) => (Y - X) - A 999bdd1243dSDimitry Andric // Y + (X - A) => (Y + X) - A 1000bdd1243dSDimitry Andric // Y - (X - A) => (Y - X) + A 1001bdd1243dSDimitry Andric switch (Pattern) { 1002bdd1243dSDimitry Andric default: 1003bdd1243dSDimitry Andric llvm_unreachable("Unexpected pattern"); 1004bdd1243dSDimitry Andric case MachineCombinerPattern::REASSOC_AX_BY: 1005bdd1243dSDimitry Andric if (!AssocCommutRoot && AssocCommutPrev) 1006bdd1243dSDimitry Andric return {AssocCommutOpcode, InverseOpcode}; 1007bdd1243dSDimitry Andric if (AssocCommutRoot && !AssocCommutPrev) 1008bdd1243dSDimitry Andric return {InverseOpcode, InverseOpcode}; 1009bdd1243dSDimitry Andric if (!AssocCommutRoot && !AssocCommutPrev) 1010bdd1243dSDimitry Andric return {InverseOpcode, AssocCommutOpcode}; 1011bdd1243dSDimitry Andric break; 1012bdd1243dSDimitry Andric case MachineCombinerPattern::REASSOC_XA_BY: 1013bdd1243dSDimitry Andric if (!AssocCommutRoot && AssocCommutPrev) 1014bdd1243dSDimitry Andric return {AssocCommutOpcode, InverseOpcode}; 1015bdd1243dSDimitry Andric if (AssocCommutRoot && !AssocCommutPrev) 1016bdd1243dSDimitry Andric return {InverseOpcode, AssocCommutOpcode}; 1017bdd1243dSDimitry Andric if (!AssocCommutRoot && !AssocCommutPrev) 1018bdd1243dSDimitry Andric return {InverseOpcode, InverseOpcode}; 1019bdd1243dSDimitry Andric break; 1020bdd1243dSDimitry Andric case MachineCombinerPattern::REASSOC_AX_YB: 1021bdd1243dSDimitry Andric if (!AssocCommutRoot && AssocCommutPrev) 1022bdd1243dSDimitry Andric return {InverseOpcode, InverseOpcode}; 1023bdd1243dSDimitry Andric if (AssocCommutRoot && !AssocCommutPrev) 1024bdd1243dSDimitry Andric return {AssocCommutOpcode, InverseOpcode}; 1025bdd1243dSDimitry Andric if (!AssocCommutRoot && !AssocCommutPrev) 1026bdd1243dSDimitry Andric return {InverseOpcode, AssocCommutOpcode}; 1027bdd1243dSDimitry Andric break; 1028bdd1243dSDimitry Andric case MachineCombinerPattern::REASSOC_XA_YB: 1029bdd1243dSDimitry Andric if (!AssocCommutRoot && AssocCommutPrev) 1030bdd1243dSDimitry Andric return {InverseOpcode, InverseOpcode}; 1031bdd1243dSDimitry Andric if (AssocCommutRoot && !AssocCommutPrev) 1032bdd1243dSDimitry Andric return {InverseOpcode, AssocCommutOpcode}; 1033bdd1243dSDimitry Andric if (!AssocCommutRoot && !AssocCommutPrev) 1034bdd1243dSDimitry Andric return {AssocCommutOpcode, InverseOpcode}; 1035bdd1243dSDimitry Andric break; 1036bdd1243dSDimitry Andric } 1037bdd1243dSDimitry Andric llvm_unreachable("Unhandled combination"); 1038bdd1243dSDimitry Andric } 1039bdd1243dSDimitry Andric 1040bdd1243dSDimitry Andric // Return a pair of boolean flags showing if the new root and new prev operands 1041bdd1243dSDimitry Andric // must be swapped. See visual example of the rule in 1042bdd1243dSDimitry Andric // TargetInstrInfo::getReassociationOpcodes. 1043*0fca6ea1SDimitry Andric static std::pair<bool, bool> mustSwapOperands(unsigned Pattern) { 1044bdd1243dSDimitry Andric switch (Pattern) { 1045bdd1243dSDimitry Andric default: 1046bdd1243dSDimitry Andric llvm_unreachable("Unexpected pattern"); 1047bdd1243dSDimitry Andric case MachineCombinerPattern::REASSOC_AX_BY: 1048bdd1243dSDimitry Andric return {false, false}; 1049bdd1243dSDimitry Andric case MachineCombinerPattern::REASSOC_XA_BY: 1050bdd1243dSDimitry Andric return {true, false}; 1051bdd1243dSDimitry Andric case MachineCombinerPattern::REASSOC_AX_YB: 1052bdd1243dSDimitry Andric return {true, true}; 1053bdd1243dSDimitry Andric case MachineCombinerPattern::REASSOC_XA_YB: 1054bdd1243dSDimitry Andric return {true, true}; 1055bdd1243dSDimitry Andric } 1056bdd1243dSDimitry Andric } 1057bdd1243dSDimitry Andric 1058*0fca6ea1SDimitry Andric void TargetInstrInfo::getReassociateOperandIndices( 1059*0fca6ea1SDimitry Andric const MachineInstr &Root, unsigned Pattern, 1060*0fca6ea1SDimitry Andric std::array<unsigned, 5> &OperandIndices) const { 1061*0fca6ea1SDimitry Andric switch (Pattern) { 1062*0fca6ea1SDimitry Andric case MachineCombinerPattern::REASSOC_AX_BY: 1063*0fca6ea1SDimitry Andric OperandIndices = {1, 1, 1, 2, 2}; 1064*0fca6ea1SDimitry Andric break; 1065*0fca6ea1SDimitry Andric case MachineCombinerPattern::REASSOC_AX_YB: 1066*0fca6ea1SDimitry Andric OperandIndices = {2, 1, 2, 2, 1}; 1067*0fca6ea1SDimitry Andric break; 1068*0fca6ea1SDimitry Andric case MachineCombinerPattern::REASSOC_XA_BY: 1069*0fca6ea1SDimitry Andric OperandIndices = {1, 2, 1, 1, 2}; 1070*0fca6ea1SDimitry Andric break; 1071*0fca6ea1SDimitry Andric case MachineCombinerPattern::REASSOC_XA_YB: 1072*0fca6ea1SDimitry Andric OperandIndices = {2, 2, 2, 1, 1}; 1073*0fca6ea1SDimitry Andric break; 1074*0fca6ea1SDimitry Andric default: 1075*0fca6ea1SDimitry Andric llvm_unreachable("unexpected MachineCombinerPattern"); 1076*0fca6ea1SDimitry Andric } 1077*0fca6ea1SDimitry Andric } 1078*0fca6ea1SDimitry Andric 10790b57cec5SDimitry Andric /// Attempt the reassociation transformation to reduce critical path length. 10800b57cec5SDimitry Andric /// See the above comments before getMachineCombinerPatterns(). 10810b57cec5SDimitry Andric void TargetInstrInfo::reassociateOps( 1082*0fca6ea1SDimitry Andric MachineInstr &Root, MachineInstr &Prev, unsigned Pattern, 10830b57cec5SDimitry Andric SmallVectorImpl<MachineInstr *> &InsInstrs, 10840b57cec5SDimitry Andric SmallVectorImpl<MachineInstr *> &DelInstrs, 1085*0fca6ea1SDimitry Andric ArrayRef<unsigned> OperandIndices, 10860b57cec5SDimitry Andric DenseMap<unsigned, unsigned> &InstrIdxForVirtReg) const { 10870b57cec5SDimitry Andric MachineFunction *MF = Root.getMF(); 10880b57cec5SDimitry Andric MachineRegisterInfo &MRI = MF->getRegInfo(); 10890b57cec5SDimitry Andric const TargetInstrInfo *TII = MF->getSubtarget().getInstrInfo(); 10900b57cec5SDimitry Andric const TargetRegisterInfo *TRI = MF->getSubtarget().getRegisterInfo(); 10910b57cec5SDimitry Andric const TargetRegisterClass *RC = Root.getRegClassConstraint(0, TII, TRI); 10920b57cec5SDimitry Andric 1093*0fca6ea1SDimitry Andric MachineOperand &OpA = Prev.getOperand(OperandIndices[1]); 1094*0fca6ea1SDimitry Andric MachineOperand &OpB = Root.getOperand(OperandIndices[2]); 1095*0fca6ea1SDimitry Andric MachineOperand &OpX = Prev.getOperand(OperandIndices[3]); 1096*0fca6ea1SDimitry Andric MachineOperand &OpY = Root.getOperand(OperandIndices[4]); 10970b57cec5SDimitry Andric MachineOperand &OpC = Root.getOperand(0); 10980b57cec5SDimitry Andric 10998bcb0991SDimitry Andric Register RegA = OpA.getReg(); 11008bcb0991SDimitry Andric Register RegB = OpB.getReg(); 11018bcb0991SDimitry Andric Register RegX = OpX.getReg(); 11028bcb0991SDimitry Andric Register RegY = OpY.getReg(); 11038bcb0991SDimitry Andric Register RegC = OpC.getReg(); 11040b57cec5SDimitry Andric 1105bdd1243dSDimitry Andric if (RegA.isVirtual()) 11060b57cec5SDimitry Andric MRI.constrainRegClass(RegA, RC); 1107bdd1243dSDimitry Andric if (RegB.isVirtual()) 11080b57cec5SDimitry Andric MRI.constrainRegClass(RegB, RC); 1109bdd1243dSDimitry Andric if (RegX.isVirtual()) 11100b57cec5SDimitry Andric MRI.constrainRegClass(RegX, RC); 1111bdd1243dSDimitry Andric if (RegY.isVirtual()) 11120b57cec5SDimitry Andric MRI.constrainRegClass(RegY, RC); 1113bdd1243dSDimitry Andric if (RegC.isVirtual()) 11140b57cec5SDimitry Andric MRI.constrainRegClass(RegC, RC); 11150b57cec5SDimitry Andric 11160b57cec5SDimitry Andric // Create a new virtual register for the result of (X op Y) instead of 11170b57cec5SDimitry Andric // recycling RegB because the MachineCombiner's computation of the critical 11180b57cec5SDimitry Andric // path requires a new register definition rather than an existing one. 11198bcb0991SDimitry Andric Register NewVR = MRI.createVirtualRegister(RC); 11200b57cec5SDimitry Andric InstrIdxForVirtReg.insert(std::make_pair(NewVR, 0)); 11210b57cec5SDimitry Andric 1122bdd1243dSDimitry Andric auto [NewRootOpc, NewPrevOpc] = getReassociationOpcodes(Pattern, Root, Prev); 11230b57cec5SDimitry Andric bool KillA = OpA.isKill(); 11240b57cec5SDimitry Andric bool KillX = OpX.isKill(); 11250b57cec5SDimitry Andric bool KillY = OpY.isKill(); 1126bdd1243dSDimitry Andric bool KillNewVR = true; 1127bdd1243dSDimitry Andric 1128bdd1243dSDimitry Andric auto [SwapRootOperands, SwapPrevOperands] = mustSwapOperands(Pattern); 1129bdd1243dSDimitry Andric 1130bdd1243dSDimitry Andric if (SwapPrevOperands) { 1131bdd1243dSDimitry Andric std::swap(RegX, RegY); 1132bdd1243dSDimitry Andric std::swap(KillX, KillY); 1133bdd1243dSDimitry Andric } 11340b57cec5SDimitry Andric 1135*0fca6ea1SDimitry Andric unsigned PrevFirstOpIdx, PrevSecondOpIdx; 1136*0fca6ea1SDimitry Andric unsigned RootFirstOpIdx, RootSecondOpIdx; 1137*0fca6ea1SDimitry Andric switch (Pattern) { 1138*0fca6ea1SDimitry Andric case MachineCombinerPattern::REASSOC_AX_BY: 1139*0fca6ea1SDimitry Andric PrevFirstOpIdx = OperandIndices[1]; 1140*0fca6ea1SDimitry Andric PrevSecondOpIdx = OperandIndices[3]; 1141*0fca6ea1SDimitry Andric RootFirstOpIdx = OperandIndices[2]; 1142*0fca6ea1SDimitry Andric RootSecondOpIdx = OperandIndices[4]; 1143*0fca6ea1SDimitry Andric break; 1144*0fca6ea1SDimitry Andric case MachineCombinerPattern::REASSOC_AX_YB: 1145*0fca6ea1SDimitry Andric PrevFirstOpIdx = OperandIndices[1]; 1146*0fca6ea1SDimitry Andric PrevSecondOpIdx = OperandIndices[3]; 1147*0fca6ea1SDimitry Andric RootFirstOpIdx = OperandIndices[4]; 1148*0fca6ea1SDimitry Andric RootSecondOpIdx = OperandIndices[2]; 1149*0fca6ea1SDimitry Andric break; 1150*0fca6ea1SDimitry Andric case MachineCombinerPattern::REASSOC_XA_BY: 1151*0fca6ea1SDimitry Andric PrevFirstOpIdx = OperandIndices[3]; 1152*0fca6ea1SDimitry Andric PrevSecondOpIdx = OperandIndices[1]; 1153*0fca6ea1SDimitry Andric RootFirstOpIdx = OperandIndices[2]; 1154*0fca6ea1SDimitry Andric RootSecondOpIdx = OperandIndices[4]; 1155*0fca6ea1SDimitry Andric break; 1156*0fca6ea1SDimitry Andric case MachineCombinerPattern::REASSOC_XA_YB: 1157*0fca6ea1SDimitry Andric PrevFirstOpIdx = OperandIndices[3]; 1158*0fca6ea1SDimitry Andric PrevSecondOpIdx = OperandIndices[1]; 1159*0fca6ea1SDimitry Andric RootFirstOpIdx = OperandIndices[4]; 1160*0fca6ea1SDimitry Andric RootSecondOpIdx = OperandIndices[2]; 1161*0fca6ea1SDimitry Andric break; 1162*0fca6ea1SDimitry Andric default: 1163*0fca6ea1SDimitry Andric llvm_unreachable("unexpected MachineCombinerPattern"); 1164*0fca6ea1SDimitry Andric } 1165*0fca6ea1SDimitry Andric 1166*0fca6ea1SDimitry Andric // Basically BuildMI but doesn't add implicit operands by default. 1167*0fca6ea1SDimitry Andric auto buildMINoImplicit = [](MachineFunction &MF, const MIMetadata &MIMD, 1168*0fca6ea1SDimitry Andric const MCInstrDesc &MCID, Register DestReg) { 1169*0fca6ea1SDimitry Andric return MachineInstrBuilder( 1170*0fca6ea1SDimitry Andric MF, MF.CreateMachineInstr(MCID, MIMD.getDL(), /*NoImpl=*/true)) 1171*0fca6ea1SDimitry Andric .setPCSections(MIMD.getPCSections()) 1172*0fca6ea1SDimitry Andric .addReg(DestReg, RegState::Define); 1173*0fca6ea1SDimitry Andric }; 1174*0fca6ea1SDimitry Andric 11750b57cec5SDimitry Andric // Create new instructions for insertion. 11760b57cec5SDimitry Andric MachineInstrBuilder MIB1 = 1177*0fca6ea1SDimitry Andric buildMINoImplicit(*MF, MIMetadata(Prev), TII->get(NewPrevOpc), NewVR); 1178*0fca6ea1SDimitry Andric for (const auto &MO : Prev.explicit_operands()) { 1179*0fca6ea1SDimitry Andric unsigned Idx = MO.getOperandNo(); 1180*0fca6ea1SDimitry Andric // Skip the result operand we'd already added. 1181*0fca6ea1SDimitry Andric if (Idx == 0) 1182*0fca6ea1SDimitry Andric continue; 1183*0fca6ea1SDimitry Andric if (Idx == PrevFirstOpIdx) 1184*0fca6ea1SDimitry Andric MIB1.addReg(RegX, getKillRegState(KillX)); 1185*0fca6ea1SDimitry Andric else if (Idx == PrevSecondOpIdx) 1186*0fca6ea1SDimitry Andric MIB1.addReg(RegY, getKillRegState(KillY)); 1187*0fca6ea1SDimitry Andric else 1188*0fca6ea1SDimitry Andric MIB1.add(MO); 1189*0fca6ea1SDimitry Andric } 1190*0fca6ea1SDimitry Andric MIB1.copyImplicitOps(Prev); 1191bdd1243dSDimitry Andric 1192bdd1243dSDimitry Andric if (SwapRootOperands) { 1193bdd1243dSDimitry Andric std::swap(RegA, NewVR); 1194bdd1243dSDimitry Andric std::swap(KillA, KillNewVR); 1195bdd1243dSDimitry Andric } 1196bdd1243dSDimitry Andric 11970b57cec5SDimitry Andric MachineInstrBuilder MIB2 = 1198*0fca6ea1SDimitry Andric buildMINoImplicit(*MF, MIMetadata(Root), TII->get(NewRootOpc), RegC); 1199*0fca6ea1SDimitry Andric for (const auto &MO : Root.explicit_operands()) { 1200*0fca6ea1SDimitry Andric unsigned Idx = MO.getOperandNo(); 1201*0fca6ea1SDimitry Andric // Skip the result operand. 1202*0fca6ea1SDimitry Andric if (Idx == 0) 1203*0fca6ea1SDimitry Andric continue; 1204*0fca6ea1SDimitry Andric if (Idx == RootFirstOpIdx) 1205*0fca6ea1SDimitry Andric MIB2 = MIB2.addReg(RegA, getKillRegState(KillA)); 1206*0fca6ea1SDimitry Andric else if (Idx == RootSecondOpIdx) 1207*0fca6ea1SDimitry Andric MIB2 = MIB2.addReg(NewVR, getKillRegState(KillNewVR)); 1208*0fca6ea1SDimitry Andric else 1209*0fca6ea1SDimitry Andric MIB2 = MIB2.add(MO); 1210*0fca6ea1SDimitry Andric } 1211*0fca6ea1SDimitry Andric MIB2.copyImplicitOps(Root); 12125f757f3fSDimitry Andric 12135f757f3fSDimitry Andric // Propagate FP flags from the original instructions. 12145f757f3fSDimitry Andric // But clear poison-generating flags because those may not be valid now. 12155f757f3fSDimitry Andric // TODO: There should be a helper function for copying only fast-math-flags. 12165f757f3fSDimitry Andric uint32_t IntersectedFlags = Root.getFlags() & Prev.getFlags(); 12175f757f3fSDimitry Andric MIB1->setFlags(IntersectedFlags); 12185f757f3fSDimitry Andric MIB1->clearFlag(MachineInstr::MIFlag::NoSWrap); 12195f757f3fSDimitry Andric MIB1->clearFlag(MachineInstr::MIFlag::NoUWrap); 12205f757f3fSDimitry Andric MIB1->clearFlag(MachineInstr::MIFlag::IsExact); 12215f757f3fSDimitry Andric 12225f757f3fSDimitry Andric MIB2->setFlags(IntersectedFlags); 12235f757f3fSDimitry Andric MIB2->clearFlag(MachineInstr::MIFlag::NoSWrap); 12245f757f3fSDimitry Andric MIB2->clearFlag(MachineInstr::MIFlag::NoUWrap); 12255f757f3fSDimitry Andric MIB2->clearFlag(MachineInstr::MIFlag::IsExact); 12260b57cec5SDimitry Andric 12270b57cec5SDimitry Andric setSpecialOperandAttr(Root, Prev, *MIB1, *MIB2); 12280b57cec5SDimitry Andric 12290b57cec5SDimitry Andric // Record new instructions for insertion and old instructions for deletion. 12300b57cec5SDimitry Andric InsInstrs.push_back(MIB1); 12310b57cec5SDimitry Andric InsInstrs.push_back(MIB2); 12320b57cec5SDimitry Andric DelInstrs.push_back(&Prev); 12330b57cec5SDimitry Andric DelInstrs.push_back(&Root); 123406c3fb27SDimitry Andric 123506c3fb27SDimitry Andric // We transformed: 123606c3fb27SDimitry Andric // B = A op X (Prev) 123706c3fb27SDimitry Andric // C = B op Y (Root) 123806c3fb27SDimitry Andric // Into: 123906c3fb27SDimitry Andric // B = X op Y (MIB1) 124006c3fb27SDimitry Andric // C = A op B (MIB2) 124106c3fb27SDimitry Andric // C has the same value as before, B doesn't; as such, keep the debug number 124206c3fb27SDimitry Andric // of C but not of B. 124306c3fb27SDimitry Andric if (unsigned OldRootNum = Root.peekDebugInstrNum()) 124406c3fb27SDimitry Andric MIB2.getInstr()->setDebugInstrNum(OldRootNum); 12450b57cec5SDimitry Andric } 12460b57cec5SDimitry Andric 12470b57cec5SDimitry Andric void TargetInstrInfo::genAlternativeCodeSequence( 1248*0fca6ea1SDimitry Andric MachineInstr &Root, unsigned Pattern, 12490b57cec5SDimitry Andric SmallVectorImpl<MachineInstr *> &InsInstrs, 12500b57cec5SDimitry Andric SmallVectorImpl<MachineInstr *> &DelInstrs, 12510b57cec5SDimitry Andric DenseMap<unsigned, unsigned> &InstIdxForVirtReg) const { 12520b57cec5SDimitry Andric MachineRegisterInfo &MRI = Root.getMF()->getRegInfo(); 12530b57cec5SDimitry Andric 12540b57cec5SDimitry Andric // Select the previous instruction in the sequence based on the input pattern. 1255*0fca6ea1SDimitry Andric std::array<unsigned, 5> OperandIndices; 1256*0fca6ea1SDimitry Andric getReassociateOperandIndices(Root, Pattern, OperandIndices); 1257*0fca6ea1SDimitry Andric MachineInstr *Prev = 1258*0fca6ea1SDimitry Andric MRI.getUniqueVRegDef(Root.getOperand(OperandIndices[0]).getReg()); 12590b57cec5SDimitry Andric 1260bdd1243dSDimitry Andric // Don't reassociate if Prev and Root are in different blocks. 1261bdd1243dSDimitry Andric if (Prev->getParent() != Root.getParent()) 1262bdd1243dSDimitry Andric return; 1263bdd1243dSDimitry Andric 1264*0fca6ea1SDimitry Andric reassociateOps(Root, *Prev, Pattern, InsInstrs, DelInstrs, OperandIndices, 1265*0fca6ea1SDimitry Andric InstIdxForVirtReg); 12660b57cec5SDimitry Andric } 12670b57cec5SDimitry Andric 126806c3fb27SDimitry Andric MachineTraceStrategy TargetInstrInfo::getMachineCombinerTraceStrategy() const { 126906c3fb27SDimitry Andric return MachineTraceStrategy::TS_MinInstrCount; 127006c3fb27SDimitry Andric } 127106c3fb27SDimitry Andric 12725f757f3fSDimitry Andric bool TargetInstrInfo::isReallyTriviallyReMaterializable( 1273fcaf7f86SDimitry Andric const MachineInstr &MI) const { 12740b57cec5SDimitry Andric const MachineFunction &MF = *MI.getMF(); 12750b57cec5SDimitry Andric const MachineRegisterInfo &MRI = MF.getRegInfo(); 12760b57cec5SDimitry Andric 12770b57cec5SDimitry Andric // Remat clients assume operand 0 is the defined register. 12780b57cec5SDimitry Andric if (!MI.getNumOperands() || !MI.getOperand(0).isReg()) 12790b57cec5SDimitry Andric return false; 12808bcb0991SDimitry Andric Register DefReg = MI.getOperand(0).getReg(); 12810b57cec5SDimitry Andric 12820b57cec5SDimitry Andric // A sub-register definition can only be rematerialized if the instruction 12830b57cec5SDimitry Andric // doesn't read the other parts of the register. Otherwise it is really a 12840b57cec5SDimitry Andric // read-modify-write operation on the full virtual register which cannot be 12850b57cec5SDimitry Andric // moved safely. 1286bdd1243dSDimitry Andric if (DefReg.isVirtual() && MI.getOperand(0).getSubReg() && 12878bcb0991SDimitry Andric MI.readsVirtualRegister(DefReg)) 12880b57cec5SDimitry Andric return false; 12890b57cec5SDimitry Andric 12900b57cec5SDimitry Andric // A load from a fixed stack slot can be rematerialized. This may be 12910b57cec5SDimitry Andric // redundant with subsequent checks, but it's target-independent, 12920b57cec5SDimitry Andric // simple, and a common case. 12930b57cec5SDimitry Andric int FrameIdx = 0; 12940b57cec5SDimitry Andric if (isLoadFromStackSlot(MI, FrameIdx) && 12950b57cec5SDimitry Andric MF.getFrameInfo().isImmutableObjectIndex(FrameIdx)) 12960b57cec5SDimitry Andric return true; 12970b57cec5SDimitry Andric 12980b57cec5SDimitry Andric // Avoid instructions obviously unsafe for remat. 12990b57cec5SDimitry Andric if (MI.isNotDuplicable() || MI.mayStore() || MI.mayRaiseFPException() || 13000b57cec5SDimitry Andric MI.hasUnmodeledSideEffects()) 13010b57cec5SDimitry Andric return false; 13020b57cec5SDimitry Andric 13030b57cec5SDimitry Andric // Don't remat inline asm. We have no idea how expensive it is 13040b57cec5SDimitry Andric // even if it's side effect free. 13050b57cec5SDimitry Andric if (MI.isInlineAsm()) 13060b57cec5SDimitry Andric return false; 13070b57cec5SDimitry Andric 13080b57cec5SDimitry Andric // Avoid instructions which load from potentially varying memory. 1309fcaf7f86SDimitry Andric if (MI.mayLoad() && !MI.isDereferenceableInvariantLoad()) 13100b57cec5SDimitry Andric return false; 13110b57cec5SDimitry Andric 13120b57cec5SDimitry Andric // If any of the registers accessed are non-constant, conservatively assume 13130b57cec5SDimitry Andric // the instruction is not rematerializable. 13144824e7fdSDimitry Andric for (const MachineOperand &MO : MI.operands()) { 13150b57cec5SDimitry Andric if (!MO.isReg()) continue; 13168bcb0991SDimitry Andric Register Reg = MO.getReg(); 13170b57cec5SDimitry Andric if (Reg == 0) 13180b57cec5SDimitry Andric continue; 13190b57cec5SDimitry Andric 13200b57cec5SDimitry Andric // Check for a well-behaved physical register. 1321bdd1243dSDimitry Andric if (Reg.isPhysical()) { 13220b57cec5SDimitry Andric if (MO.isUse()) { 13230b57cec5SDimitry Andric // If the physreg has no defs anywhere, it's just an ambient register 13240b57cec5SDimitry Andric // and we can freely move its uses. Alternatively, if it's allocatable, 13250b57cec5SDimitry Andric // it could get allocated to something with a def during allocation. 13260b57cec5SDimitry Andric if (!MRI.isConstantPhysReg(Reg)) 13270b57cec5SDimitry Andric return false; 13280b57cec5SDimitry Andric } else { 13290b57cec5SDimitry Andric // A physreg def. We can't remat it. 13300b57cec5SDimitry Andric return false; 13310b57cec5SDimitry Andric } 13320b57cec5SDimitry Andric continue; 13330b57cec5SDimitry Andric } 13340b57cec5SDimitry Andric 13350b57cec5SDimitry Andric // Only allow one virtual-register def. There may be multiple defs of the 13360b57cec5SDimitry Andric // same virtual register, though. 13370b57cec5SDimitry Andric if (MO.isDef() && Reg != DefReg) 13380b57cec5SDimitry Andric return false; 13390b57cec5SDimitry Andric 13400b57cec5SDimitry Andric // Don't allow any virtual-register uses. Rematting an instruction with 13410b57cec5SDimitry Andric // virtual register uses would length the live ranges of the uses, which 13420b57cec5SDimitry Andric // is not necessarily a good idea, certainly not "trivial". 13430b57cec5SDimitry Andric if (MO.isUse()) 13440b57cec5SDimitry Andric return false; 13450b57cec5SDimitry Andric } 13460b57cec5SDimitry Andric 13470b57cec5SDimitry Andric // Everything checked out. 13480b57cec5SDimitry Andric return true; 13490b57cec5SDimitry Andric } 13500b57cec5SDimitry Andric 13510b57cec5SDimitry Andric int TargetInstrInfo::getSPAdjust(const MachineInstr &MI) const { 13520b57cec5SDimitry Andric const MachineFunction *MF = MI.getMF(); 13530b57cec5SDimitry Andric const TargetFrameLowering *TFI = MF->getSubtarget().getFrameLowering(); 13540b57cec5SDimitry Andric bool StackGrowsDown = 13550b57cec5SDimitry Andric TFI->getStackGrowthDirection() == TargetFrameLowering::StackGrowsDown; 13560b57cec5SDimitry Andric 13570b57cec5SDimitry Andric unsigned FrameSetupOpcode = getCallFrameSetupOpcode(); 13580b57cec5SDimitry Andric unsigned FrameDestroyOpcode = getCallFrameDestroyOpcode(); 13590b57cec5SDimitry Andric 13600b57cec5SDimitry Andric if (!isFrameInstr(MI)) 13610b57cec5SDimitry Andric return 0; 13620b57cec5SDimitry Andric 13630b57cec5SDimitry Andric int SPAdj = TFI->alignSPAdjust(getFrameSize(MI)); 13640b57cec5SDimitry Andric 13650b57cec5SDimitry Andric if ((!StackGrowsDown && MI.getOpcode() == FrameSetupOpcode) || 13660b57cec5SDimitry Andric (StackGrowsDown && MI.getOpcode() == FrameDestroyOpcode)) 13670b57cec5SDimitry Andric SPAdj = -SPAdj; 13680b57cec5SDimitry Andric 13690b57cec5SDimitry Andric return SPAdj; 13700b57cec5SDimitry Andric } 13710b57cec5SDimitry Andric 13720b57cec5SDimitry Andric /// isSchedulingBoundary - Test if the given instruction should be 13730b57cec5SDimitry Andric /// considered a scheduling boundary. This primarily includes labels 13740b57cec5SDimitry Andric /// and terminators. 13750b57cec5SDimitry Andric bool TargetInstrInfo::isSchedulingBoundary(const MachineInstr &MI, 13760b57cec5SDimitry Andric const MachineBasicBlock *MBB, 13770b57cec5SDimitry Andric const MachineFunction &MF) const { 13780b57cec5SDimitry Andric // Terminators and labels can't be scheduled around. 13790b57cec5SDimitry Andric if (MI.isTerminator() || MI.isPosition()) 13800b57cec5SDimitry Andric return true; 13810b57cec5SDimitry Andric 13825ffd83dbSDimitry Andric // INLINEASM_BR can jump to another block 13835ffd83dbSDimitry Andric if (MI.getOpcode() == TargetOpcode::INLINEASM_BR) 13845ffd83dbSDimitry Andric return true; 13855ffd83dbSDimitry Andric 13860b57cec5SDimitry Andric // Don't attempt to schedule around any instruction that defines 13870b57cec5SDimitry Andric // a stack-oriented pointer, as it's unlikely to be profitable. This 13880b57cec5SDimitry Andric // saves compile time, because it doesn't require every single 13890b57cec5SDimitry Andric // stack slot reference to depend on the instruction that does the 13900b57cec5SDimitry Andric // modification. 13910b57cec5SDimitry Andric const TargetLowering &TLI = *MF.getSubtarget().getTargetLowering(); 13920b57cec5SDimitry Andric const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo(); 13930b57cec5SDimitry Andric return MI.modifiesRegister(TLI.getStackPointerRegisterToSaveRestore(), TRI); 13940b57cec5SDimitry Andric } 13950b57cec5SDimitry Andric 13960b57cec5SDimitry Andric // Provide a global flag for disabling the PreRA hazard recognizer that targets 13970b57cec5SDimitry Andric // may choose to honor. 13980b57cec5SDimitry Andric bool TargetInstrInfo::usePreRAHazardRecognizer() const { 13990b57cec5SDimitry Andric return !DisableHazardRecognizer; 14000b57cec5SDimitry Andric } 14010b57cec5SDimitry Andric 14020b57cec5SDimitry Andric // Default implementation of CreateTargetRAHazardRecognizer. 14030b57cec5SDimitry Andric ScheduleHazardRecognizer *TargetInstrInfo:: 14040b57cec5SDimitry Andric CreateTargetHazardRecognizer(const TargetSubtargetInfo *STI, 14050b57cec5SDimitry Andric const ScheduleDAG *DAG) const { 14060b57cec5SDimitry Andric // Dummy hazard recognizer allows all instructions to issue. 14070b57cec5SDimitry Andric return new ScheduleHazardRecognizer(); 14080b57cec5SDimitry Andric } 14090b57cec5SDimitry Andric 14100b57cec5SDimitry Andric // Default implementation of CreateTargetMIHazardRecognizer. 1411480093f4SDimitry Andric ScheduleHazardRecognizer *TargetInstrInfo::CreateTargetMIHazardRecognizer( 1412480093f4SDimitry Andric const InstrItineraryData *II, const ScheduleDAGMI *DAG) const { 1413480093f4SDimitry Andric return new ScoreboardHazardRecognizer(II, DAG, "machine-scheduler"); 14140b57cec5SDimitry Andric } 14150b57cec5SDimitry Andric 14160b57cec5SDimitry Andric // Default implementation of CreateTargetPostRAHazardRecognizer. 14170b57cec5SDimitry Andric ScheduleHazardRecognizer *TargetInstrInfo:: 14180b57cec5SDimitry Andric CreateTargetPostRAHazardRecognizer(const InstrItineraryData *II, 14190b57cec5SDimitry Andric const ScheduleDAG *DAG) const { 1420480093f4SDimitry Andric return new ScoreboardHazardRecognizer(II, DAG, "post-RA-sched"); 14210b57cec5SDimitry Andric } 14220b57cec5SDimitry Andric 14235ffd83dbSDimitry Andric // Default implementation of getMemOperandWithOffset. 14245ffd83dbSDimitry Andric bool TargetInstrInfo::getMemOperandWithOffset( 14255ffd83dbSDimitry Andric const MachineInstr &MI, const MachineOperand *&BaseOp, int64_t &Offset, 14265ffd83dbSDimitry Andric bool &OffsetIsScalable, const TargetRegisterInfo *TRI) const { 14275ffd83dbSDimitry Andric SmallVector<const MachineOperand *, 4> BaseOps; 1428*0fca6ea1SDimitry Andric LocationSize Width = 0; 14295ffd83dbSDimitry Andric if (!getMemOperandsWithOffsetWidth(MI, BaseOps, Offset, OffsetIsScalable, 14305ffd83dbSDimitry Andric Width, TRI) || 14315ffd83dbSDimitry Andric BaseOps.size() != 1) 14325ffd83dbSDimitry Andric return false; 14335ffd83dbSDimitry Andric BaseOp = BaseOps.front(); 14345ffd83dbSDimitry Andric return true; 14355ffd83dbSDimitry Andric } 14365ffd83dbSDimitry Andric 14370b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 14380b57cec5SDimitry Andric // SelectionDAG latency interface. 14390b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 14400b57cec5SDimitry Andric 14415f757f3fSDimitry Andric std::optional<unsigned> 14420b57cec5SDimitry Andric TargetInstrInfo::getOperandLatency(const InstrItineraryData *ItinData, 14430b57cec5SDimitry Andric SDNode *DefNode, unsigned DefIdx, 14440b57cec5SDimitry Andric SDNode *UseNode, unsigned UseIdx) const { 14450b57cec5SDimitry Andric if (!ItinData || ItinData->isEmpty()) 14465f757f3fSDimitry Andric return std::nullopt; 14470b57cec5SDimitry Andric 14480b57cec5SDimitry Andric if (!DefNode->isMachineOpcode()) 14495f757f3fSDimitry Andric return std::nullopt; 14500b57cec5SDimitry Andric 14510b57cec5SDimitry Andric unsigned DefClass = get(DefNode->getMachineOpcode()).getSchedClass(); 14520b57cec5SDimitry Andric if (!UseNode->isMachineOpcode()) 14530b57cec5SDimitry Andric return ItinData->getOperandCycle(DefClass, DefIdx); 14540b57cec5SDimitry Andric unsigned UseClass = get(UseNode->getMachineOpcode()).getSchedClass(); 14550b57cec5SDimitry Andric return ItinData->getOperandLatency(DefClass, DefIdx, UseClass, UseIdx); 14560b57cec5SDimitry Andric } 14570b57cec5SDimitry Andric 14585f757f3fSDimitry Andric unsigned TargetInstrInfo::getInstrLatency(const InstrItineraryData *ItinData, 14590b57cec5SDimitry Andric SDNode *N) const { 14600b57cec5SDimitry Andric if (!ItinData || ItinData->isEmpty()) 14610b57cec5SDimitry Andric return 1; 14620b57cec5SDimitry Andric 14630b57cec5SDimitry Andric if (!N->isMachineOpcode()) 14640b57cec5SDimitry Andric return 1; 14650b57cec5SDimitry Andric 14660b57cec5SDimitry Andric return ItinData->getStageLatency(get(N->getMachineOpcode()).getSchedClass()); 14670b57cec5SDimitry Andric } 14680b57cec5SDimitry Andric 14690b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 14700b57cec5SDimitry Andric // MachineInstr latency interface. 14710b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 14720b57cec5SDimitry Andric 14730b57cec5SDimitry Andric unsigned TargetInstrInfo::getNumMicroOps(const InstrItineraryData *ItinData, 14740b57cec5SDimitry Andric const MachineInstr &MI) const { 14750b57cec5SDimitry Andric if (!ItinData || ItinData->isEmpty()) 14760b57cec5SDimitry Andric return 1; 14770b57cec5SDimitry Andric 14780b57cec5SDimitry Andric unsigned Class = MI.getDesc().getSchedClass(); 14790b57cec5SDimitry Andric int UOps = ItinData->Itineraries[Class].NumMicroOps; 14800b57cec5SDimitry Andric if (UOps >= 0) 14810b57cec5SDimitry Andric return UOps; 14820b57cec5SDimitry Andric 14830b57cec5SDimitry Andric // The # of u-ops is dynamically determined. The specific target should 14840b57cec5SDimitry Andric // override this function to return the right number. 14850b57cec5SDimitry Andric return 1; 14860b57cec5SDimitry Andric } 14870b57cec5SDimitry Andric 14880b57cec5SDimitry Andric /// Return the default expected latency for a def based on it's opcode. 14890b57cec5SDimitry Andric unsigned TargetInstrInfo::defaultDefLatency(const MCSchedModel &SchedModel, 14900b57cec5SDimitry Andric const MachineInstr &DefMI) const { 14910b57cec5SDimitry Andric if (DefMI.isTransient()) 14920b57cec5SDimitry Andric return 0; 14930b57cec5SDimitry Andric if (DefMI.mayLoad()) 14940b57cec5SDimitry Andric return SchedModel.LoadLatency; 14950b57cec5SDimitry Andric if (isHighLatencyDef(DefMI.getOpcode())) 14960b57cec5SDimitry Andric return SchedModel.HighLatency; 14970b57cec5SDimitry Andric return 1; 14980b57cec5SDimitry Andric } 14990b57cec5SDimitry Andric 15000b57cec5SDimitry Andric unsigned TargetInstrInfo::getPredicationCost(const MachineInstr &) const { 15010b57cec5SDimitry Andric return 0; 15020b57cec5SDimitry Andric } 15030b57cec5SDimitry Andric 15040b57cec5SDimitry Andric unsigned TargetInstrInfo::getInstrLatency(const InstrItineraryData *ItinData, 15050b57cec5SDimitry Andric const MachineInstr &MI, 15060b57cec5SDimitry Andric unsigned *PredCost) const { 15070b57cec5SDimitry Andric // Default to one cycle for no itinerary. However, an "empty" itinerary may 15080b57cec5SDimitry Andric // still have a MinLatency property, which getStageLatency checks. 15090b57cec5SDimitry Andric if (!ItinData) 15100b57cec5SDimitry Andric return MI.mayLoad() ? 2 : 1; 15110b57cec5SDimitry Andric 15120b57cec5SDimitry Andric return ItinData->getStageLatency(MI.getDesc().getSchedClass()); 15130b57cec5SDimitry Andric } 15140b57cec5SDimitry Andric 15150b57cec5SDimitry Andric bool TargetInstrInfo::hasLowDefLatency(const TargetSchedModel &SchedModel, 15160b57cec5SDimitry Andric const MachineInstr &DefMI, 15170b57cec5SDimitry Andric unsigned DefIdx) const { 15180b57cec5SDimitry Andric const InstrItineraryData *ItinData = SchedModel.getInstrItineraries(); 15190b57cec5SDimitry Andric if (!ItinData || ItinData->isEmpty()) 15200b57cec5SDimitry Andric return false; 15210b57cec5SDimitry Andric 15220b57cec5SDimitry Andric unsigned DefClass = DefMI.getDesc().getSchedClass(); 15235f757f3fSDimitry Andric std::optional<unsigned> DefCycle = 15245f757f3fSDimitry Andric ItinData->getOperandCycle(DefClass, DefIdx); 15255f757f3fSDimitry Andric return DefCycle && DefCycle <= 1U; 15265f757f3fSDimitry Andric } 15275f757f3fSDimitry Andric 15285f757f3fSDimitry Andric bool TargetInstrInfo::isFunctionSafeToSplit(const MachineFunction &MF) const { 15295f757f3fSDimitry Andric // TODO: We don't split functions where a section attribute has been set 15305f757f3fSDimitry Andric // since the split part may not be placed in a contiguous region. It may also 15315f757f3fSDimitry Andric // be more beneficial to augment the linker to ensure contiguous layout of 15325f757f3fSDimitry Andric // split functions within the same section as specified by the attribute. 1533*0fca6ea1SDimitry Andric if (MF.getFunction().hasSection()) 15345f757f3fSDimitry Andric return false; 15355f757f3fSDimitry Andric 15365f757f3fSDimitry Andric // We don't want to proceed further for cold functions 15375f757f3fSDimitry Andric // or functions of unknown hotness. Lukewarm functions have no prefix. 15385f757f3fSDimitry Andric std::optional<StringRef> SectionPrefix = MF.getFunction().getSectionPrefix(); 15395f757f3fSDimitry Andric if (SectionPrefix && 15405f757f3fSDimitry Andric (*SectionPrefix == "unlikely" || *SectionPrefix == "unknown")) { 15415f757f3fSDimitry Andric return false; 15425f757f3fSDimitry Andric } 15435f757f3fSDimitry Andric 15445f757f3fSDimitry Andric return true; 15450b57cec5SDimitry Andric } 15460b57cec5SDimitry Andric 1547bdd1243dSDimitry Andric std::optional<ParamLoadedValue> 1548480093f4SDimitry Andric TargetInstrInfo::describeLoadedValue(const MachineInstr &MI, 1549480093f4SDimitry Andric Register Reg) const { 15508bcb0991SDimitry Andric const MachineFunction *MF = MI.getMF(); 1551480093f4SDimitry Andric const TargetRegisterInfo *TRI = MF->getSubtarget().getRegisterInfo(); 1552480093f4SDimitry Andric DIExpression *Expr = DIExpression::get(MF->getFunction().getContext(), {}); 1553480093f4SDimitry Andric int64_t Offset; 15545ffd83dbSDimitry Andric bool OffsetIsScalable; 15558bcb0991SDimitry Andric 1556480093f4SDimitry Andric // To simplify the sub-register handling, verify that we only need to 1557480093f4SDimitry Andric // consider physical registers. 1558480093f4SDimitry Andric assert(MF->getProperties().hasProperty( 1559480093f4SDimitry Andric MachineFunctionProperties::Property::NoVRegs)); 1560480093f4SDimitry Andric 1561480093f4SDimitry Andric if (auto DestSrc = isCopyInstr(MI)) { 1562480093f4SDimitry Andric Register DestReg = DestSrc->Destination->getReg(); 1563480093f4SDimitry Andric 15645ffd83dbSDimitry Andric // If the copy destination is the forwarding reg, describe the forwarding 15655ffd83dbSDimitry Andric // reg using the copy source as the backup location. Example: 15665ffd83dbSDimitry Andric // 15675ffd83dbSDimitry Andric // x0 = MOV x7 15685ffd83dbSDimitry Andric // call callee(x0) ; x0 described as x7 1569480093f4SDimitry Andric if (Reg == DestReg) 1570480093f4SDimitry Andric return ParamLoadedValue(*DestSrc->Source, Expr); 1571480093f4SDimitry Andric 157206c3fb27SDimitry Andric // If the target's hook couldn't describe this copy, give up. 1573bdd1243dSDimitry Andric return std::nullopt; 1574480093f4SDimitry Andric } else if (auto RegImm = isAddImmediate(MI, Reg)) { 1575480093f4SDimitry Andric Register SrcReg = RegImm->Reg; 1576480093f4SDimitry Andric Offset = RegImm->Imm; 1577480093f4SDimitry Andric Expr = DIExpression::prepend(Expr, DIExpression::ApplyOffset, Offset); 1578480093f4SDimitry Andric return ParamLoadedValue(MachineOperand::CreateReg(SrcReg, false), Expr); 1579480093f4SDimitry Andric } else if (MI.hasOneMemOperand()) { 1580480093f4SDimitry Andric // Only describe memory which provably does not escape the function. As 1581480093f4SDimitry Andric // described in llvm.org/PR43343, escaped memory may be clobbered by the 1582480093f4SDimitry Andric // callee (or by another thread). 1583480093f4SDimitry Andric const auto &TII = MF->getSubtarget().getInstrInfo(); 1584480093f4SDimitry Andric const MachineFrameInfo &MFI = MF->getFrameInfo(); 1585480093f4SDimitry Andric const MachineMemOperand *MMO = MI.memoperands()[0]; 1586480093f4SDimitry Andric const PseudoSourceValue *PSV = MMO->getPseudoValue(); 1587480093f4SDimitry Andric 1588480093f4SDimitry Andric // If the address points to "special" memory (e.g. a spill slot), it's 1589480093f4SDimitry Andric // sufficient to check that it isn't aliased by any high-level IR value. 1590480093f4SDimitry Andric if (!PSV || PSV->mayAlias(&MFI)) 1591bdd1243dSDimitry Andric return std::nullopt; 1592480093f4SDimitry Andric 1593480093f4SDimitry Andric const MachineOperand *BaseOp; 15945ffd83dbSDimitry Andric if (!TII->getMemOperandWithOffset(MI, BaseOp, Offset, OffsetIsScalable, 15955ffd83dbSDimitry Andric TRI)) 1596bdd1243dSDimitry Andric return std::nullopt; 1597480093f4SDimitry Andric 15985ffd83dbSDimitry Andric // FIXME: Scalable offsets are not yet handled in the offset code below. 15995ffd83dbSDimitry Andric if (OffsetIsScalable) 1600bdd1243dSDimitry Andric return std::nullopt; 16015ffd83dbSDimitry Andric 16025ffd83dbSDimitry Andric // TODO: Can currently only handle mem instructions with a single define. 16035ffd83dbSDimitry Andric // An example from the x86 target: 16045ffd83dbSDimitry Andric // ... 16055ffd83dbSDimitry Andric // DIV64m $rsp, 1, $noreg, 24, $noreg, implicit-def dead $rax, implicit-def $rdx 16065ffd83dbSDimitry Andric // ... 16075ffd83dbSDimitry Andric // 16085ffd83dbSDimitry Andric if (MI.getNumExplicitDefs() != 1) 1609bdd1243dSDimitry Andric return std::nullopt; 1610480093f4SDimitry Andric 1611480093f4SDimitry Andric // TODO: In what way do we need to take Reg into consideration here? 1612480093f4SDimitry Andric 1613480093f4SDimitry Andric SmallVector<uint64_t, 8> Ops; 1614480093f4SDimitry Andric DIExpression::appendOffset(Ops, Offset); 1615480093f4SDimitry Andric Ops.push_back(dwarf::DW_OP_deref_size); 1616*0fca6ea1SDimitry Andric Ops.push_back(MMO->getSize().hasValue() ? MMO->getSize().getValue() 1617*0fca6ea1SDimitry Andric : ~UINT64_C(0)); 1618480093f4SDimitry Andric Expr = DIExpression::prependOpcodes(Expr, Ops); 1619480093f4SDimitry Andric return ParamLoadedValue(*BaseOp, Expr); 16208bcb0991SDimitry Andric } 16218bcb0991SDimitry Andric 1622bdd1243dSDimitry Andric return std::nullopt; 16238bcb0991SDimitry Andric } 16248bcb0991SDimitry Andric 16255f757f3fSDimitry Andric // Get the call frame size just before MI. 16265f757f3fSDimitry Andric unsigned TargetInstrInfo::getCallFrameSizeAt(MachineInstr &MI) const { 16275f757f3fSDimitry Andric // Search backwards from MI for the most recent call frame instruction. 16285f757f3fSDimitry Andric MachineBasicBlock *MBB = MI.getParent(); 16295f757f3fSDimitry Andric for (auto &AdjI : reverse(make_range(MBB->instr_begin(), MI.getIterator()))) { 16305f757f3fSDimitry Andric if (AdjI.getOpcode() == getCallFrameSetupOpcode()) 16315f757f3fSDimitry Andric return getFrameTotalSize(AdjI); 16325f757f3fSDimitry Andric if (AdjI.getOpcode() == getCallFrameDestroyOpcode()) 16335f757f3fSDimitry Andric return 0; 16345f757f3fSDimitry Andric } 16355f757f3fSDimitry Andric 16365f757f3fSDimitry Andric // If none was found, use the call frame size from the start of the basic 16375f757f3fSDimitry Andric // block. 16385f757f3fSDimitry Andric return MBB->getCallFrameSize(); 16395f757f3fSDimitry Andric } 16405f757f3fSDimitry Andric 16410b57cec5SDimitry Andric /// Both DefMI and UseMI must be valid. By default, call directly to the 16420b57cec5SDimitry Andric /// itinerary. This may be overriden by the target. 16435f757f3fSDimitry Andric std::optional<unsigned> TargetInstrInfo::getOperandLatency( 16445f757f3fSDimitry Andric const InstrItineraryData *ItinData, const MachineInstr &DefMI, 16455f757f3fSDimitry Andric unsigned DefIdx, const MachineInstr &UseMI, unsigned UseIdx) const { 16460b57cec5SDimitry Andric unsigned DefClass = DefMI.getDesc().getSchedClass(); 16470b57cec5SDimitry Andric unsigned UseClass = UseMI.getDesc().getSchedClass(); 16480b57cec5SDimitry Andric return ItinData->getOperandLatency(DefClass, DefIdx, UseClass, UseIdx); 16490b57cec5SDimitry Andric } 16500b57cec5SDimitry Andric 16510b57cec5SDimitry Andric bool TargetInstrInfo::getRegSequenceInputs( 16520b57cec5SDimitry Andric const MachineInstr &MI, unsigned DefIdx, 16530b57cec5SDimitry Andric SmallVectorImpl<RegSubRegPairAndIdx> &InputRegs) const { 16540b57cec5SDimitry Andric assert((MI.isRegSequence() || 16550b57cec5SDimitry Andric MI.isRegSequenceLike()) && "Instruction do not have the proper type"); 16560b57cec5SDimitry Andric 16570b57cec5SDimitry Andric if (!MI.isRegSequence()) 16580b57cec5SDimitry Andric return getRegSequenceLikeInputs(MI, DefIdx, InputRegs); 16590b57cec5SDimitry Andric 16600b57cec5SDimitry Andric // We are looking at: 16610b57cec5SDimitry Andric // Def = REG_SEQUENCE v0, sub0, v1, sub1, ... 16620b57cec5SDimitry Andric assert(DefIdx == 0 && "REG_SEQUENCE only has one def"); 16630b57cec5SDimitry Andric for (unsigned OpIdx = 1, EndOpIdx = MI.getNumOperands(); OpIdx != EndOpIdx; 16640b57cec5SDimitry Andric OpIdx += 2) { 16650b57cec5SDimitry Andric const MachineOperand &MOReg = MI.getOperand(OpIdx); 16660b57cec5SDimitry Andric if (MOReg.isUndef()) 16670b57cec5SDimitry Andric continue; 16680b57cec5SDimitry Andric const MachineOperand &MOSubIdx = MI.getOperand(OpIdx + 1); 16690b57cec5SDimitry Andric assert(MOSubIdx.isImm() && 16700b57cec5SDimitry Andric "One of the subindex of the reg_sequence is not an immediate"); 16710b57cec5SDimitry Andric // Record Reg:SubReg, SubIdx. 16720b57cec5SDimitry Andric InputRegs.push_back(RegSubRegPairAndIdx(MOReg.getReg(), MOReg.getSubReg(), 16730b57cec5SDimitry Andric (unsigned)MOSubIdx.getImm())); 16740b57cec5SDimitry Andric } 16750b57cec5SDimitry Andric return true; 16760b57cec5SDimitry Andric } 16770b57cec5SDimitry Andric 16780b57cec5SDimitry Andric bool TargetInstrInfo::getExtractSubregInputs( 16790b57cec5SDimitry Andric const MachineInstr &MI, unsigned DefIdx, 16800b57cec5SDimitry Andric RegSubRegPairAndIdx &InputReg) const { 16810b57cec5SDimitry Andric assert((MI.isExtractSubreg() || 16820b57cec5SDimitry Andric MI.isExtractSubregLike()) && "Instruction do not have the proper type"); 16830b57cec5SDimitry Andric 16840b57cec5SDimitry Andric if (!MI.isExtractSubreg()) 16850b57cec5SDimitry Andric return getExtractSubregLikeInputs(MI, DefIdx, InputReg); 16860b57cec5SDimitry Andric 16870b57cec5SDimitry Andric // We are looking at: 16880b57cec5SDimitry Andric // Def = EXTRACT_SUBREG v0.sub1, sub0. 16890b57cec5SDimitry Andric assert(DefIdx == 0 && "EXTRACT_SUBREG only has one def"); 16900b57cec5SDimitry Andric const MachineOperand &MOReg = MI.getOperand(1); 16910b57cec5SDimitry Andric if (MOReg.isUndef()) 16920b57cec5SDimitry Andric return false; 16930b57cec5SDimitry Andric const MachineOperand &MOSubIdx = MI.getOperand(2); 16940b57cec5SDimitry Andric assert(MOSubIdx.isImm() && 16950b57cec5SDimitry Andric "The subindex of the extract_subreg is not an immediate"); 16960b57cec5SDimitry Andric 16970b57cec5SDimitry Andric InputReg.Reg = MOReg.getReg(); 16980b57cec5SDimitry Andric InputReg.SubReg = MOReg.getSubReg(); 16990b57cec5SDimitry Andric InputReg.SubIdx = (unsigned)MOSubIdx.getImm(); 17000b57cec5SDimitry Andric return true; 17010b57cec5SDimitry Andric } 17020b57cec5SDimitry Andric 17030b57cec5SDimitry Andric bool TargetInstrInfo::getInsertSubregInputs( 17040b57cec5SDimitry Andric const MachineInstr &MI, unsigned DefIdx, 17050b57cec5SDimitry Andric RegSubRegPair &BaseReg, RegSubRegPairAndIdx &InsertedReg) const { 17060b57cec5SDimitry Andric assert((MI.isInsertSubreg() || 17070b57cec5SDimitry Andric MI.isInsertSubregLike()) && "Instruction do not have the proper type"); 17080b57cec5SDimitry Andric 17090b57cec5SDimitry Andric if (!MI.isInsertSubreg()) 17100b57cec5SDimitry Andric return getInsertSubregLikeInputs(MI, DefIdx, BaseReg, InsertedReg); 17110b57cec5SDimitry Andric 17120b57cec5SDimitry Andric // We are looking at: 17130b57cec5SDimitry Andric // Def = INSERT_SEQUENCE v0, v1, sub0. 17140b57cec5SDimitry Andric assert(DefIdx == 0 && "INSERT_SUBREG only has one def"); 17150b57cec5SDimitry Andric const MachineOperand &MOBaseReg = MI.getOperand(1); 17160b57cec5SDimitry Andric const MachineOperand &MOInsertedReg = MI.getOperand(2); 17170b57cec5SDimitry Andric if (MOInsertedReg.isUndef()) 17180b57cec5SDimitry Andric return false; 17190b57cec5SDimitry Andric const MachineOperand &MOSubIdx = MI.getOperand(3); 17200b57cec5SDimitry Andric assert(MOSubIdx.isImm() && 17210b57cec5SDimitry Andric "One of the subindex of the reg_sequence is not an immediate"); 17220b57cec5SDimitry Andric BaseReg.Reg = MOBaseReg.getReg(); 17230b57cec5SDimitry Andric BaseReg.SubReg = MOBaseReg.getSubReg(); 17240b57cec5SDimitry Andric 17250b57cec5SDimitry Andric InsertedReg.Reg = MOInsertedReg.getReg(); 17260b57cec5SDimitry Andric InsertedReg.SubReg = MOInsertedReg.getSubReg(); 17270b57cec5SDimitry Andric InsertedReg.SubIdx = (unsigned)MOSubIdx.getImm(); 17280b57cec5SDimitry Andric return true; 17290b57cec5SDimitry Andric } 17308bcb0991SDimitry Andric 17315ffd83dbSDimitry Andric // Returns a MIRPrinter comment for this machine operand. 17325ffd83dbSDimitry Andric std::string TargetInstrInfo::createMIROperandComment( 17335ffd83dbSDimitry Andric const MachineInstr &MI, const MachineOperand &Op, unsigned OpIdx, 17345ffd83dbSDimitry Andric const TargetRegisterInfo *TRI) const { 17355ffd83dbSDimitry Andric 17365ffd83dbSDimitry Andric if (!MI.isInlineAsm()) 17375ffd83dbSDimitry Andric return ""; 17385ffd83dbSDimitry Andric 17395ffd83dbSDimitry Andric std::string Flags; 17405ffd83dbSDimitry Andric raw_string_ostream OS(Flags); 17415ffd83dbSDimitry Andric 17425ffd83dbSDimitry Andric if (OpIdx == InlineAsm::MIOp_ExtraInfo) { 17435ffd83dbSDimitry Andric // Print HasSideEffects, MayLoad, MayStore, IsAlignStack 17445ffd83dbSDimitry Andric unsigned ExtraInfo = Op.getImm(); 17455ffd83dbSDimitry Andric bool First = true; 17465ffd83dbSDimitry Andric for (StringRef Info : InlineAsm::getExtraInfoNames(ExtraInfo)) { 17475ffd83dbSDimitry Andric if (!First) 17485ffd83dbSDimitry Andric OS << " "; 17495ffd83dbSDimitry Andric First = false; 17505ffd83dbSDimitry Andric OS << Info; 17515ffd83dbSDimitry Andric } 17525ffd83dbSDimitry Andric 1753*0fca6ea1SDimitry Andric return Flags; 17545ffd83dbSDimitry Andric } 17555ffd83dbSDimitry Andric 17565ffd83dbSDimitry Andric int FlagIdx = MI.findInlineAsmFlagIdx(OpIdx); 17575ffd83dbSDimitry Andric if (FlagIdx < 0 || (unsigned)FlagIdx != OpIdx) 17585ffd83dbSDimitry Andric return ""; 17595ffd83dbSDimitry Andric 17605ffd83dbSDimitry Andric assert(Op.isImm() && "Expected flag operand to be an immediate"); 17615ffd83dbSDimitry Andric // Pretty print the inline asm operand descriptor. 17625ffd83dbSDimitry Andric unsigned Flag = Op.getImm(); 17635f757f3fSDimitry Andric const InlineAsm::Flag F(Flag); 17645f757f3fSDimitry Andric OS << F.getKindName(); 17655ffd83dbSDimitry Andric 17665f757f3fSDimitry Andric unsigned RCID; 17675f757f3fSDimitry Andric if (!F.isImmKind() && !F.isMemKind() && F.hasRegClassConstraint(RCID)) { 17685ffd83dbSDimitry Andric if (TRI) { 17695ffd83dbSDimitry Andric OS << ':' << TRI->getRegClassName(TRI->getRegClass(RCID)); 17705ffd83dbSDimitry Andric } else 17715ffd83dbSDimitry Andric OS << ":RC" << RCID; 17725ffd83dbSDimitry Andric } 17735ffd83dbSDimitry Andric 17745f757f3fSDimitry Andric if (F.isMemKind()) { 17755f757f3fSDimitry Andric InlineAsm::ConstraintCode MCID = F.getMemoryConstraintID(); 17765ffd83dbSDimitry Andric OS << ":" << InlineAsm::getMemConstraintName(MCID); 17775ffd83dbSDimitry Andric } 17785ffd83dbSDimitry Andric 17795f757f3fSDimitry Andric unsigned TiedTo; 17805f757f3fSDimitry Andric if (F.isUseOperandTiedToDef(TiedTo)) 17815ffd83dbSDimitry Andric OS << " tiedto:$" << TiedTo; 17825ffd83dbSDimitry Andric 17835f757f3fSDimitry Andric if ((F.isRegDefKind() || F.isRegDefEarlyClobberKind() || F.isRegUseKind()) && 17845f757f3fSDimitry Andric F.getRegMayBeFolded()) 17855f757f3fSDimitry Andric OS << " foldable"; 17865f757f3fSDimitry Andric 1787*0fca6ea1SDimitry Andric return Flags; 17885ffd83dbSDimitry Andric } 17895ffd83dbSDimitry Andric 179081ad6265SDimitry Andric TargetInstrInfo::PipelinerLoopInfo::~PipelinerLoopInfo() = default; 17914824e7fdSDimitry Andric 17924824e7fdSDimitry Andric void TargetInstrInfo::mergeOutliningCandidateAttributes( 17934824e7fdSDimitry Andric Function &F, std::vector<outliner::Candidate> &Candidates) const { 17944824e7fdSDimitry Andric // Include target features from an arbitrary candidate for the outlined 17954824e7fdSDimitry Andric // function. This makes sure the outlined function knows what kinds of 17964824e7fdSDimitry Andric // instructions are going into it. This is fine, since all parent functions 17974824e7fdSDimitry Andric // must necessarily support the instructions that are in the outlined region. 17984824e7fdSDimitry Andric outliner::Candidate &FirstCand = Candidates.front(); 17994824e7fdSDimitry Andric const Function &ParentFn = FirstCand.getMF()->getFunction(); 18004824e7fdSDimitry Andric if (ParentFn.hasFnAttribute("target-features")) 18014824e7fdSDimitry Andric F.addFnAttr(ParentFn.getFnAttribute("target-features")); 1802bdd1243dSDimitry Andric if (ParentFn.hasFnAttribute("target-cpu")) 1803bdd1243dSDimitry Andric F.addFnAttr(ParentFn.getFnAttribute("target-cpu")); 18044824e7fdSDimitry Andric 18054824e7fdSDimitry Andric // Set nounwind, so we don't generate eh_frame. 18064824e7fdSDimitry Andric if (llvm::all_of(Candidates, [](const outliner::Candidate &C) { 18074824e7fdSDimitry Andric return C.getMF()->getFunction().hasFnAttribute(Attribute::NoUnwind); 18084824e7fdSDimitry Andric })) 18094824e7fdSDimitry Andric F.addFnAttr(Attribute::NoUnwind); 18104824e7fdSDimitry Andric } 18110eae32dcSDimitry Andric 181206c3fb27SDimitry Andric outliner::InstrType TargetInstrInfo::getOutliningType( 181306c3fb27SDimitry Andric MachineBasicBlock::iterator &MIT, unsigned Flags) const { 181406c3fb27SDimitry Andric MachineInstr &MI = *MIT; 181506c3fb27SDimitry Andric 181606c3fb27SDimitry Andric // NOTE: MI.isMetaInstruction() will match CFI_INSTRUCTION, but some targets 181706c3fb27SDimitry Andric // have support for outlining those. Special-case that here. 181806c3fb27SDimitry Andric if (MI.isCFIInstruction()) 181906c3fb27SDimitry Andric // Just go right to the target implementation. 182006c3fb27SDimitry Andric return getOutliningTypeImpl(MIT, Flags); 182106c3fb27SDimitry Andric 182206c3fb27SDimitry Andric // Be conservative about inline assembly. 182306c3fb27SDimitry Andric if (MI.isInlineAsm()) 182406c3fb27SDimitry Andric return outliner::InstrType::Illegal; 182506c3fb27SDimitry Andric 182606c3fb27SDimitry Andric // Labels generally can't safely be outlined. 182706c3fb27SDimitry Andric if (MI.isLabel()) 182806c3fb27SDimitry Andric return outliner::InstrType::Illegal; 182906c3fb27SDimitry Andric 183006c3fb27SDimitry Andric // Don't let debug instructions impact analysis. 183106c3fb27SDimitry Andric if (MI.isDebugInstr()) 183206c3fb27SDimitry Andric return outliner::InstrType::Invisible; 183306c3fb27SDimitry Andric 183406c3fb27SDimitry Andric // Some other special cases. 183506c3fb27SDimitry Andric switch (MI.getOpcode()) { 183606c3fb27SDimitry Andric case TargetOpcode::IMPLICIT_DEF: 183706c3fb27SDimitry Andric case TargetOpcode::KILL: 183806c3fb27SDimitry Andric case TargetOpcode::LIFETIME_START: 183906c3fb27SDimitry Andric case TargetOpcode::LIFETIME_END: 184006c3fb27SDimitry Andric return outliner::InstrType::Invisible; 184106c3fb27SDimitry Andric default: 184206c3fb27SDimitry Andric break; 184306c3fb27SDimitry Andric } 184406c3fb27SDimitry Andric 184506c3fb27SDimitry Andric // Is this a terminator for a basic block? 184606c3fb27SDimitry Andric if (MI.isTerminator()) { 184706c3fb27SDimitry Andric // If this is a branch to another block, we can't outline it. 184806c3fb27SDimitry Andric if (!MI.getParent()->succ_empty()) 184906c3fb27SDimitry Andric return outliner::InstrType::Illegal; 185006c3fb27SDimitry Andric 185106c3fb27SDimitry Andric // Don't outline if the branch is not unconditional. 185206c3fb27SDimitry Andric if (isPredicated(MI)) 185306c3fb27SDimitry Andric return outliner::InstrType::Illegal; 185406c3fb27SDimitry Andric } 185506c3fb27SDimitry Andric 185606c3fb27SDimitry Andric // Make sure none of the operands of this instruction do anything that 185706c3fb27SDimitry Andric // might break if they're moved outside their current function. 185806c3fb27SDimitry Andric // This includes MachineBasicBlock references, BlockAddressses, 185906c3fb27SDimitry Andric // Constant pool indices and jump table indices. 186006c3fb27SDimitry Andric // 186106c3fb27SDimitry Andric // A quick note on MO_TargetIndex: 186206c3fb27SDimitry Andric // This doesn't seem to be used in any of the architectures that the 186306c3fb27SDimitry Andric // MachineOutliner supports, but it was still filtered out in all of them. 186406c3fb27SDimitry Andric // There was one exception (RISC-V), but MO_TargetIndex also isn't used there. 186506c3fb27SDimitry Andric // As such, this check is removed both here and in the target-specific 186606c3fb27SDimitry Andric // implementations. Instead, we assert to make sure this doesn't 186706c3fb27SDimitry Andric // catch anyone off-guard somewhere down the line. 186806c3fb27SDimitry Andric for (const MachineOperand &MOP : MI.operands()) { 186906c3fb27SDimitry Andric // If you hit this assertion, please remove it and adjust 187006c3fb27SDimitry Andric // `getOutliningTypeImpl` for your target appropriately if necessary. 187106c3fb27SDimitry Andric // Adding the assertion back to other supported architectures 187206c3fb27SDimitry Andric // would be nice too :) 187306c3fb27SDimitry Andric assert(!MOP.isTargetIndex() && "This isn't used quite yet!"); 187406c3fb27SDimitry Andric 187506c3fb27SDimitry Andric // CFI instructions should already have been filtered out at this point. 187606c3fb27SDimitry Andric assert(!MOP.isCFIIndex() && "CFI instructions handled elsewhere!"); 187706c3fb27SDimitry Andric 187806c3fb27SDimitry Andric // PrologEpilogInserter should've already run at this point. 187906c3fb27SDimitry Andric assert(!MOP.isFI() && "FrameIndex instructions should be gone by now!"); 188006c3fb27SDimitry Andric 188106c3fb27SDimitry Andric if (MOP.isMBB() || MOP.isBlockAddress() || MOP.isCPI() || MOP.isJTI()) 188206c3fb27SDimitry Andric return outliner::InstrType::Illegal; 188306c3fb27SDimitry Andric } 188406c3fb27SDimitry Andric 188506c3fb27SDimitry Andric // If we don't know, delegate to the target-specific hook. 188606c3fb27SDimitry Andric return getOutliningTypeImpl(MIT, Flags); 188706c3fb27SDimitry Andric } 188806c3fb27SDimitry Andric 18890eae32dcSDimitry Andric bool TargetInstrInfo::isMBBSafeToOutlineFrom(MachineBasicBlock &MBB, 18900eae32dcSDimitry Andric unsigned &Flags) const { 18910eae32dcSDimitry Andric // Some instrumentations create special TargetOpcode at the start which 18920eae32dcSDimitry Andric // expands to special code sequences which must be present. 18930eae32dcSDimitry Andric auto First = MBB.getFirstNonDebugInstr(); 189406c3fb27SDimitry Andric if (First == MBB.end()) 189506c3fb27SDimitry Andric return true; 189606c3fb27SDimitry Andric 189706c3fb27SDimitry Andric if (First->getOpcode() == TargetOpcode::FENTRY_CALL || 189806c3fb27SDimitry Andric First->getOpcode() == TargetOpcode::PATCHABLE_FUNCTION_ENTER) 18990eae32dcSDimitry Andric return false; 19000eae32dcSDimitry Andric 190106c3fb27SDimitry Andric // Some instrumentations create special pseudo-instructions at or just before 190206c3fb27SDimitry Andric // the end that must be present. 190306c3fb27SDimitry Andric auto Last = MBB.getLastNonDebugInstr(); 190406c3fb27SDimitry Andric if (Last->getOpcode() == TargetOpcode::PATCHABLE_RET || 190506c3fb27SDimitry Andric Last->getOpcode() == TargetOpcode::PATCHABLE_TAIL_CALL) 190606c3fb27SDimitry Andric return false; 190706c3fb27SDimitry Andric 190806c3fb27SDimitry Andric if (Last != First && Last->isReturn()) { 190906c3fb27SDimitry Andric --Last; 191006c3fb27SDimitry Andric if (Last->getOpcode() == TargetOpcode::PATCHABLE_FUNCTION_EXIT || 191106c3fb27SDimitry Andric Last->getOpcode() == TargetOpcode::PATCHABLE_TAIL_CALL) 191206c3fb27SDimitry Andric return false; 191306c3fb27SDimitry Andric } 19140eae32dcSDimitry Andric return true; 19150eae32dcSDimitry Andric } 1916