1*81ad6265SDimitry Andric //===- SPIRVInstructionSelector.cpp ------------------------------*- C++ -*-==// 2*81ad6265SDimitry Andric // 3*81ad6265SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4*81ad6265SDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 5*81ad6265SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6*81ad6265SDimitry Andric // 7*81ad6265SDimitry Andric //===----------------------------------------------------------------------===// 8*81ad6265SDimitry Andric // 9*81ad6265SDimitry Andric // This file implements the targeting of the InstructionSelector class for 10*81ad6265SDimitry Andric // SPIRV. 11*81ad6265SDimitry Andric // TODO: This should be generated by TableGen. 12*81ad6265SDimitry Andric // 13*81ad6265SDimitry Andric //===----------------------------------------------------------------------===// 14*81ad6265SDimitry Andric 15*81ad6265SDimitry Andric #include "SPIRV.h" 16*81ad6265SDimitry Andric #include "SPIRVGlobalRegistry.h" 17*81ad6265SDimitry Andric #include "SPIRVInstrInfo.h" 18*81ad6265SDimitry Andric #include "SPIRVRegisterBankInfo.h" 19*81ad6265SDimitry Andric #include "SPIRVRegisterInfo.h" 20*81ad6265SDimitry Andric #include "SPIRVTargetMachine.h" 21*81ad6265SDimitry Andric #include "SPIRVUtils.h" 22*81ad6265SDimitry Andric #include "llvm/ADT/APFloat.h" 23*81ad6265SDimitry Andric #include "llvm/CodeGen/GlobalISel/InstructionSelector.h" 24*81ad6265SDimitry Andric #include "llvm/CodeGen/GlobalISel/InstructionSelectorImpl.h" 25*81ad6265SDimitry Andric #include "llvm/CodeGen/MachineInstrBuilder.h" 26*81ad6265SDimitry Andric #include "llvm/CodeGen/MachineRegisterInfo.h" 27*81ad6265SDimitry Andric #include "llvm/IR/IntrinsicsSPIRV.h" 28*81ad6265SDimitry Andric #include "llvm/Support/Debug.h" 29*81ad6265SDimitry Andric 30*81ad6265SDimitry Andric #define DEBUG_TYPE "spirv-isel" 31*81ad6265SDimitry Andric 32*81ad6265SDimitry Andric using namespace llvm; 33*81ad6265SDimitry Andric 34*81ad6265SDimitry Andric namespace { 35*81ad6265SDimitry Andric 36*81ad6265SDimitry Andric #define GET_GLOBALISEL_PREDICATE_BITSET 37*81ad6265SDimitry Andric #include "SPIRVGenGlobalISel.inc" 38*81ad6265SDimitry Andric #undef GET_GLOBALISEL_PREDICATE_BITSET 39*81ad6265SDimitry Andric 40*81ad6265SDimitry Andric class SPIRVInstructionSelector : public InstructionSelector { 41*81ad6265SDimitry Andric const SPIRVSubtarget &STI; 42*81ad6265SDimitry Andric const SPIRVInstrInfo &TII; 43*81ad6265SDimitry Andric const SPIRVRegisterInfo &TRI; 44*81ad6265SDimitry Andric const RegisterBankInfo &RBI; 45*81ad6265SDimitry Andric SPIRVGlobalRegistry &GR; 46*81ad6265SDimitry Andric MachineRegisterInfo *MRI; 47*81ad6265SDimitry Andric 48*81ad6265SDimitry Andric public: 49*81ad6265SDimitry Andric SPIRVInstructionSelector(const SPIRVTargetMachine &TM, 50*81ad6265SDimitry Andric const SPIRVSubtarget &ST, 51*81ad6265SDimitry Andric const RegisterBankInfo &RBI); 52*81ad6265SDimitry Andric void setupMF(MachineFunction &MF, GISelKnownBits *KB, 53*81ad6265SDimitry Andric CodeGenCoverage &CoverageInfo, ProfileSummaryInfo *PSI, 54*81ad6265SDimitry Andric BlockFrequencyInfo *BFI) override; 55*81ad6265SDimitry Andric // Common selection code. Instruction-specific selection occurs in spvSelect. 56*81ad6265SDimitry Andric bool select(MachineInstr &I) override; 57*81ad6265SDimitry Andric static const char *getName() { return DEBUG_TYPE; } 58*81ad6265SDimitry Andric 59*81ad6265SDimitry Andric #define GET_GLOBALISEL_PREDICATES_DECL 60*81ad6265SDimitry Andric #include "SPIRVGenGlobalISel.inc" 61*81ad6265SDimitry Andric #undef GET_GLOBALISEL_PREDICATES_DECL 62*81ad6265SDimitry Andric 63*81ad6265SDimitry Andric #define GET_GLOBALISEL_TEMPORARIES_DECL 64*81ad6265SDimitry Andric #include "SPIRVGenGlobalISel.inc" 65*81ad6265SDimitry Andric #undef GET_GLOBALISEL_TEMPORARIES_DECL 66*81ad6265SDimitry Andric 67*81ad6265SDimitry Andric private: 68*81ad6265SDimitry Andric // tblgen-erated 'select' implementation, used as the initial selector for 69*81ad6265SDimitry Andric // the patterns that don't require complex C++. 70*81ad6265SDimitry Andric bool selectImpl(MachineInstr &I, CodeGenCoverage &CoverageInfo) const; 71*81ad6265SDimitry Andric 72*81ad6265SDimitry Andric // All instruction-specific selection that didn't happen in "select()". 73*81ad6265SDimitry Andric // Is basically a large Switch/Case delegating to all other select method. 74*81ad6265SDimitry Andric bool spvSelect(Register ResVReg, const SPIRVType *ResType, 75*81ad6265SDimitry Andric MachineInstr &I) const; 76*81ad6265SDimitry Andric 77*81ad6265SDimitry Andric bool selectGlobalValue(Register ResVReg, MachineInstr &I, 78*81ad6265SDimitry Andric const MachineInstr *Init = nullptr) const; 79*81ad6265SDimitry Andric 80*81ad6265SDimitry Andric bool selectUnOpWithSrc(Register ResVReg, const SPIRVType *ResType, 81*81ad6265SDimitry Andric MachineInstr &I, Register SrcReg, 82*81ad6265SDimitry Andric unsigned Opcode) const; 83*81ad6265SDimitry Andric bool selectUnOp(Register ResVReg, const SPIRVType *ResType, MachineInstr &I, 84*81ad6265SDimitry Andric unsigned Opcode) const; 85*81ad6265SDimitry Andric 86*81ad6265SDimitry Andric bool selectLoad(Register ResVReg, const SPIRVType *ResType, 87*81ad6265SDimitry Andric MachineInstr &I) const; 88*81ad6265SDimitry Andric bool selectStore(MachineInstr &I) const; 89*81ad6265SDimitry Andric 90*81ad6265SDimitry Andric bool selectMemOperation(Register ResVReg, MachineInstr &I) const; 91*81ad6265SDimitry Andric 92*81ad6265SDimitry Andric bool selectAtomicRMW(Register ResVReg, const SPIRVType *ResType, 93*81ad6265SDimitry Andric MachineInstr &I, unsigned NewOpcode) const; 94*81ad6265SDimitry Andric 95*81ad6265SDimitry Andric bool selectAtomicCmpXchg(Register ResVReg, const SPIRVType *ResType, 96*81ad6265SDimitry Andric MachineInstr &I) const; 97*81ad6265SDimitry Andric 98*81ad6265SDimitry Andric bool selectFence(MachineInstr &I) const; 99*81ad6265SDimitry Andric 100*81ad6265SDimitry Andric bool selectAddrSpaceCast(Register ResVReg, const SPIRVType *ResType, 101*81ad6265SDimitry Andric MachineInstr &I) const; 102*81ad6265SDimitry Andric 103*81ad6265SDimitry Andric bool selectBitreverse(Register ResVReg, const SPIRVType *ResType, 104*81ad6265SDimitry Andric MachineInstr &I) const; 105*81ad6265SDimitry Andric 106*81ad6265SDimitry Andric bool selectConstVector(Register ResVReg, const SPIRVType *ResType, 107*81ad6265SDimitry Andric MachineInstr &I) const; 108*81ad6265SDimitry Andric 109*81ad6265SDimitry Andric bool selectCmp(Register ResVReg, const SPIRVType *ResType, 110*81ad6265SDimitry Andric unsigned comparisonOpcode, MachineInstr &I) const; 111*81ad6265SDimitry Andric 112*81ad6265SDimitry Andric bool selectICmp(Register ResVReg, const SPIRVType *ResType, 113*81ad6265SDimitry Andric MachineInstr &I) const; 114*81ad6265SDimitry Andric bool selectFCmp(Register ResVReg, const SPIRVType *ResType, 115*81ad6265SDimitry Andric MachineInstr &I) const; 116*81ad6265SDimitry Andric 117*81ad6265SDimitry Andric void renderImm32(MachineInstrBuilder &MIB, const MachineInstr &I, 118*81ad6265SDimitry Andric int OpIdx) const; 119*81ad6265SDimitry Andric void renderFImm32(MachineInstrBuilder &MIB, const MachineInstr &I, 120*81ad6265SDimitry Andric int OpIdx) const; 121*81ad6265SDimitry Andric 122*81ad6265SDimitry Andric bool selectConst(Register ResVReg, const SPIRVType *ResType, const APInt &Imm, 123*81ad6265SDimitry Andric MachineInstr &I) const; 124*81ad6265SDimitry Andric 125*81ad6265SDimitry Andric bool selectSelect(Register ResVReg, const SPIRVType *ResType, MachineInstr &I, 126*81ad6265SDimitry Andric bool IsSigned) const; 127*81ad6265SDimitry Andric bool selectIToF(Register ResVReg, const SPIRVType *ResType, MachineInstr &I, 128*81ad6265SDimitry Andric bool IsSigned, unsigned Opcode) const; 129*81ad6265SDimitry Andric bool selectExt(Register ResVReg, const SPIRVType *ResType, MachineInstr &I, 130*81ad6265SDimitry Andric bool IsSigned) const; 131*81ad6265SDimitry Andric 132*81ad6265SDimitry Andric bool selectTrunc(Register ResVReg, const SPIRVType *ResType, 133*81ad6265SDimitry Andric MachineInstr &I) const; 134*81ad6265SDimitry Andric 135*81ad6265SDimitry Andric bool selectIntToBool(Register IntReg, Register ResVReg, 136*81ad6265SDimitry Andric const SPIRVType *intTy, const SPIRVType *boolTy, 137*81ad6265SDimitry Andric MachineInstr &I) const; 138*81ad6265SDimitry Andric 139*81ad6265SDimitry Andric bool selectOpUndef(Register ResVReg, const SPIRVType *ResType, 140*81ad6265SDimitry Andric MachineInstr &I) const; 141*81ad6265SDimitry Andric bool selectIntrinsic(Register ResVReg, const SPIRVType *ResType, 142*81ad6265SDimitry Andric MachineInstr &I) const; 143*81ad6265SDimitry Andric bool selectExtractVal(Register ResVReg, const SPIRVType *ResType, 144*81ad6265SDimitry Andric MachineInstr &I) const; 145*81ad6265SDimitry Andric bool selectInsertVal(Register ResVReg, const SPIRVType *ResType, 146*81ad6265SDimitry Andric MachineInstr &I) const; 147*81ad6265SDimitry Andric bool selectExtractElt(Register ResVReg, const SPIRVType *ResType, 148*81ad6265SDimitry Andric MachineInstr &I) const; 149*81ad6265SDimitry Andric bool selectInsertElt(Register ResVReg, const SPIRVType *ResType, 150*81ad6265SDimitry Andric MachineInstr &I) const; 151*81ad6265SDimitry Andric bool selectGEP(Register ResVReg, const SPIRVType *ResType, 152*81ad6265SDimitry Andric MachineInstr &I) const; 153*81ad6265SDimitry Andric 154*81ad6265SDimitry Andric bool selectFrameIndex(Register ResVReg, const SPIRVType *ResType, 155*81ad6265SDimitry Andric MachineInstr &I) const; 156*81ad6265SDimitry Andric 157*81ad6265SDimitry Andric bool selectBranch(MachineInstr &I) const; 158*81ad6265SDimitry Andric bool selectBranchCond(MachineInstr &I) const; 159*81ad6265SDimitry Andric 160*81ad6265SDimitry Andric bool selectPhi(Register ResVReg, const SPIRVType *ResType, 161*81ad6265SDimitry Andric MachineInstr &I) const; 162*81ad6265SDimitry Andric 163*81ad6265SDimitry Andric Register buildI32Constant(uint32_t Val, MachineInstr &I, 164*81ad6265SDimitry Andric const SPIRVType *ResType = nullptr) const; 165*81ad6265SDimitry Andric 166*81ad6265SDimitry Andric Register buildZerosVal(const SPIRVType *ResType, MachineInstr &I) const; 167*81ad6265SDimitry Andric Register buildOnesVal(bool AllOnes, const SPIRVType *ResType, 168*81ad6265SDimitry Andric MachineInstr &I) const; 169*81ad6265SDimitry Andric }; 170*81ad6265SDimitry Andric 171*81ad6265SDimitry Andric } // end anonymous namespace 172*81ad6265SDimitry Andric 173*81ad6265SDimitry Andric #define GET_GLOBALISEL_IMPL 174*81ad6265SDimitry Andric #include "SPIRVGenGlobalISel.inc" 175*81ad6265SDimitry Andric #undef GET_GLOBALISEL_IMPL 176*81ad6265SDimitry Andric 177*81ad6265SDimitry Andric SPIRVInstructionSelector::SPIRVInstructionSelector(const SPIRVTargetMachine &TM, 178*81ad6265SDimitry Andric const SPIRVSubtarget &ST, 179*81ad6265SDimitry Andric const RegisterBankInfo &RBI) 180*81ad6265SDimitry Andric : InstructionSelector(), STI(ST), TII(*ST.getInstrInfo()), 181*81ad6265SDimitry Andric TRI(*ST.getRegisterInfo()), RBI(RBI), GR(*ST.getSPIRVGlobalRegistry()), 182*81ad6265SDimitry Andric #define GET_GLOBALISEL_PREDICATES_INIT 183*81ad6265SDimitry Andric #include "SPIRVGenGlobalISel.inc" 184*81ad6265SDimitry Andric #undef GET_GLOBALISEL_PREDICATES_INIT 185*81ad6265SDimitry Andric #define GET_GLOBALISEL_TEMPORARIES_INIT 186*81ad6265SDimitry Andric #include "SPIRVGenGlobalISel.inc" 187*81ad6265SDimitry Andric #undef GET_GLOBALISEL_TEMPORARIES_INIT 188*81ad6265SDimitry Andric { 189*81ad6265SDimitry Andric } 190*81ad6265SDimitry Andric 191*81ad6265SDimitry Andric void SPIRVInstructionSelector::setupMF(MachineFunction &MF, GISelKnownBits *KB, 192*81ad6265SDimitry Andric CodeGenCoverage &CoverageInfo, 193*81ad6265SDimitry Andric ProfileSummaryInfo *PSI, 194*81ad6265SDimitry Andric BlockFrequencyInfo *BFI) { 195*81ad6265SDimitry Andric MRI = &MF.getRegInfo(); 196*81ad6265SDimitry Andric GR.setCurrentFunc(MF); 197*81ad6265SDimitry Andric InstructionSelector::setupMF(MF, KB, CoverageInfo, PSI, BFI); 198*81ad6265SDimitry Andric } 199*81ad6265SDimitry Andric 200*81ad6265SDimitry Andric // Defined in SPIRVLegalizerInfo.cpp. 201*81ad6265SDimitry Andric extern bool isTypeFoldingSupported(unsigned Opcode); 202*81ad6265SDimitry Andric 203*81ad6265SDimitry Andric bool SPIRVInstructionSelector::select(MachineInstr &I) { 204*81ad6265SDimitry Andric assert(I.getParent() && "Instruction should be in a basic block!"); 205*81ad6265SDimitry Andric assert(I.getParent()->getParent() && "Instruction should be in a function!"); 206*81ad6265SDimitry Andric 207*81ad6265SDimitry Andric Register Opcode = I.getOpcode(); 208*81ad6265SDimitry Andric // If it's not a GMIR instruction, we've selected it already. 209*81ad6265SDimitry Andric if (!isPreISelGenericOpcode(Opcode)) { 210*81ad6265SDimitry Andric if (Opcode == SPIRV::ASSIGN_TYPE) { // These pseudos aren't needed any more. 211*81ad6265SDimitry Andric auto *Def = MRI->getVRegDef(I.getOperand(1).getReg()); 212*81ad6265SDimitry Andric if (isTypeFoldingSupported(Def->getOpcode())) { 213*81ad6265SDimitry Andric auto Res = selectImpl(I, *CoverageInfo); 214*81ad6265SDimitry Andric assert(Res || Def->getOpcode() == TargetOpcode::G_CONSTANT); 215*81ad6265SDimitry Andric if (Res) 216*81ad6265SDimitry Andric return Res; 217*81ad6265SDimitry Andric } 218*81ad6265SDimitry Andric MRI->replaceRegWith(I.getOperand(1).getReg(), I.getOperand(0).getReg()); 219*81ad6265SDimitry Andric I.removeFromParent(); 220*81ad6265SDimitry Andric } else if (I.getNumDefs() == 1) { 221*81ad6265SDimitry Andric // Make all vregs 32 bits (for SPIR-V IDs). 222*81ad6265SDimitry Andric MRI->setType(I.getOperand(0).getReg(), LLT::scalar(32)); 223*81ad6265SDimitry Andric } 224*81ad6265SDimitry Andric return true; 225*81ad6265SDimitry Andric } 226*81ad6265SDimitry Andric 227*81ad6265SDimitry Andric if (I.getNumOperands() != I.getNumExplicitOperands()) { 228*81ad6265SDimitry Andric LLVM_DEBUG(errs() << "Generic instr has unexpected implicit operands\n"); 229*81ad6265SDimitry Andric return false; 230*81ad6265SDimitry Andric } 231*81ad6265SDimitry Andric 232*81ad6265SDimitry Andric // Common code for getting return reg+type, and removing selected instr 233*81ad6265SDimitry Andric // from parent occurs here. Instr-specific selection happens in spvSelect(). 234*81ad6265SDimitry Andric bool HasDefs = I.getNumDefs() > 0; 235*81ad6265SDimitry Andric Register ResVReg = HasDefs ? I.getOperand(0).getReg() : Register(0); 236*81ad6265SDimitry Andric SPIRVType *ResType = HasDefs ? GR.getSPIRVTypeForVReg(ResVReg) : nullptr; 237*81ad6265SDimitry Andric assert(!HasDefs || ResType || I.getOpcode() == TargetOpcode::G_GLOBAL_VALUE); 238*81ad6265SDimitry Andric if (spvSelect(ResVReg, ResType, I)) { 239*81ad6265SDimitry Andric if (HasDefs) // Make all vregs 32 bits (for SPIR-V IDs). 240*81ad6265SDimitry Andric MRI->setType(ResVReg, LLT::scalar(32)); 241*81ad6265SDimitry Andric I.removeFromParent(); 242*81ad6265SDimitry Andric return true; 243*81ad6265SDimitry Andric } 244*81ad6265SDimitry Andric return false; 245*81ad6265SDimitry Andric } 246*81ad6265SDimitry Andric 247*81ad6265SDimitry Andric bool SPIRVInstructionSelector::spvSelect(Register ResVReg, 248*81ad6265SDimitry Andric const SPIRVType *ResType, 249*81ad6265SDimitry Andric MachineInstr &I) const { 250*81ad6265SDimitry Andric assert(!isTypeFoldingSupported(I.getOpcode()) || 251*81ad6265SDimitry Andric I.getOpcode() == TargetOpcode::G_CONSTANT); 252*81ad6265SDimitry Andric const unsigned Opcode = I.getOpcode(); 253*81ad6265SDimitry Andric switch (Opcode) { 254*81ad6265SDimitry Andric case TargetOpcode::G_CONSTANT: 255*81ad6265SDimitry Andric return selectConst(ResVReg, ResType, I.getOperand(1).getCImm()->getValue(), 256*81ad6265SDimitry Andric I); 257*81ad6265SDimitry Andric case TargetOpcode::G_GLOBAL_VALUE: 258*81ad6265SDimitry Andric return selectGlobalValue(ResVReg, I); 259*81ad6265SDimitry Andric case TargetOpcode::G_IMPLICIT_DEF: 260*81ad6265SDimitry Andric return selectOpUndef(ResVReg, ResType, I); 261*81ad6265SDimitry Andric 262*81ad6265SDimitry Andric case TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS: 263*81ad6265SDimitry Andric return selectIntrinsic(ResVReg, ResType, I); 264*81ad6265SDimitry Andric case TargetOpcode::G_BITREVERSE: 265*81ad6265SDimitry Andric return selectBitreverse(ResVReg, ResType, I); 266*81ad6265SDimitry Andric 267*81ad6265SDimitry Andric case TargetOpcode::G_BUILD_VECTOR: 268*81ad6265SDimitry Andric return selectConstVector(ResVReg, ResType, I); 269*81ad6265SDimitry Andric 270*81ad6265SDimitry Andric case TargetOpcode::G_SHUFFLE_VECTOR: { 271*81ad6265SDimitry Andric MachineBasicBlock &BB = *I.getParent(); 272*81ad6265SDimitry Andric auto MIB = BuildMI(BB, I, I.getDebugLoc(), TII.get(SPIRV::OpVectorShuffle)) 273*81ad6265SDimitry Andric .addDef(ResVReg) 274*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(ResType)) 275*81ad6265SDimitry Andric .addUse(I.getOperand(1).getReg()) 276*81ad6265SDimitry Andric .addUse(I.getOperand(2).getReg()); 277*81ad6265SDimitry Andric for (auto V : I.getOperand(3).getShuffleMask()) 278*81ad6265SDimitry Andric MIB.addImm(V); 279*81ad6265SDimitry Andric return MIB.constrainAllUses(TII, TRI, RBI); 280*81ad6265SDimitry Andric } 281*81ad6265SDimitry Andric case TargetOpcode::G_MEMMOVE: 282*81ad6265SDimitry Andric case TargetOpcode::G_MEMCPY: 283*81ad6265SDimitry Andric return selectMemOperation(ResVReg, I); 284*81ad6265SDimitry Andric 285*81ad6265SDimitry Andric case TargetOpcode::G_ICMP: 286*81ad6265SDimitry Andric return selectICmp(ResVReg, ResType, I); 287*81ad6265SDimitry Andric case TargetOpcode::G_FCMP: 288*81ad6265SDimitry Andric return selectFCmp(ResVReg, ResType, I); 289*81ad6265SDimitry Andric 290*81ad6265SDimitry Andric case TargetOpcode::G_FRAME_INDEX: 291*81ad6265SDimitry Andric return selectFrameIndex(ResVReg, ResType, I); 292*81ad6265SDimitry Andric 293*81ad6265SDimitry Andric case TargetOpcode::G_LOAD: 294*81ad6265SDimitry Andric return selectLoad(ResVReg, ResType, I); 295*81ad6265SDimitry Andric case TargetOpcode::G_STORE: 296*81ad6265SDimitry Andric return selectStore(I); 297*81ad6265SDimitry Andric 298*81ad6265SDimitry Andric case TargetOpcode::G_BR: 299*81ad6265SDimitry Andric return selectBranch(I); 300*81ad6265SDimitry Andric case TargetOpcode::G_BRCOND: 301*81ad6265SDimitry Andric return selectBranchCond(I); 302*81ad6265SDimitry Andric 303*81ad6265SDimitry Andric case TargetOpcode::G_PHI: 304*81ad6265SDimitry Andric return selectPhi(ResVReg, ResType, I); 305*81ad6265SDimitry Andric 306*81ad6265SDimitry Andric case TargetOpcode::G_FPTOSI: 307*81ad6265SDimitry Andric return selectUnOp(ResVReg, ResType, I, SPIRV::OpConvertFToS); 308*81ad6265SDimitry Andric case TargetOpcode::G_FPTOUI: 309*81ad6265SDimitry Andric return selectUnOp(ResVReg, ResType, I, SPIRV::OpConvertFToU); 310*81ad6265SDimitry Andric 311*81ad6265SDimitry Andric case TargetOpcode::G_SITOFP: 312*81ad6265SDimitry Andric return selectIToF(ResVReg, ResType, I, true, SPIRV::OpConvertSToF); 313*81ad6265SDimitry Andric case TargetOpcode::G_UITOFP: 314*81ad6265SDimitry Andric return selectIToF(ResVReg, ResType, I, false, SPIRV::OpConvertUToF); 315*81ad6265SDimitry Andric 316*81ad6265SDimitry Andric case TargetOpcode::G_CTPOP: 317*81ad6265SDimitry Andric return selectUnOp(ResVReg, ResType, I, SPIRV::OpBitCount); 318*81ad6265SDimitry Andric 319*81ad6265SDimitry Andric case TargetOpcode::G_SEXT: 320*81ad6265SDimitry Andric return selectExt(ResVReg, ResType, I, true); 321*81ad6265SDimitry Andric case TargetOpcode::G_ANYEXT: 322*81ad6265SDimitry Andric case TargetOpcode::G_ZEXT: 323*81ad6265SDimitry Andric return selectExt(ResVReg, ResType, I, false); 324*81ad6265SDimitry Andric case TargetOpcode::G_TRUNC: 325*81ad6265SDimitry Andric return selectTrunc(ResVReg, ResType, I); 326*81ad6265SDimitry Andric case TargetOpcode::G_FPTRUNC: 327*81ad6265SDimitry Andric case TargetOpcode::G_FPEXT: 328*81ad6265SDimitry Andric return selectUnOp(ResVReg, ResType, I, SPIRV::OpFConvert); 329*81ad6265SDimitry Andric 330*81ad6265SDimitry Andric case TargetOpcode::G_PTRTOINT: 331*81ad6265SDimitry Andric return selectUnOp(ResVReg, ResType, I, SPIRV::OpConvertPtrToU); 332*81ad6265SDimitry Andric case TargetOpcode::G_INTTOPTR: 333*81ad6265SDimitry Andric return selectUnOp(ResVReg, ResType, I, SPIRV::OpConvertUToPtr); 334*81ad6265SDimitry Andric case TargetOpcode::G_BITCAST: 335*81ad6265SDimitry Andric return selectUnOp(ResVReg, ResType, I, SPIRV::OpBitcast); 336*81ad6265SDimitry Andric case TargetOpcode::G_ADDRSPACE_CAST: 337*81ad6265SDimitry Andric return selectAddrSpaceCast(ResVReg, ResType, I); 338*81ad6265SDimitry Andric 339*81ad6265SDimitry Andric case TargetOpcode::G_ATOMICRMW_OR: 340*81ad6265SDimitry Andric return selectAtomicRMW(ResVReg, ResType, I, SPIRV::OpAtomicOr); 341*81ad6265SDimitry Andric case TargetOpcode::G_ATOMICRMW_ADD: 342*81ad6265SDimitry Andric return selectAtomicRMW(ResVReg, ResType, I, SPIRV::OpAtomicIAdd); 343*81ad6265SDimitry Andric case TargetOpcode::G_ATOMICRMW_AND: 344*81ad6265SDimitry Andric return selectAtomicRMW(ResVReg, ResType, I, SPIRV::OpAtomicAnd); 345*81ad6265SDimitry Andric case TargetOpcode::G_ATOMICRMW_MAX: 346*81ad6265SDimitry Andric return selectAtomicRMW(ResVReg, ResType, I, SPIRV::OpAtomicSMax); 347*81ad6265SDimitry Andric case TargetOpcode::G_ATOMICRMW_MIN: 348*81ad6265SDimitry Andric return selectAtomicRMW(ResVReg, ResType, I, SPIRV::OpAtomicSMin); 349*81ad6265SDimitry Andric case TargetOpcode::G_ATOMICRMW_SUB: 350*81ad6265SDimitry Andric return selectAtomicRMW(ResVReg, ResType, I, SPIRV::OpAtomicISub); 351*81ad6265SDimitry Andric case TargetOpcode::G_ATOMICRMW_XOR: 352*81ad6265SDimitry Andric return selectAtomicRMW(ResVReg, ResType, I, SPIRV::OpAtomicXor); 353*81ad6265SDimitry Andric case TargetOpcode::G_ATOMICRMW_UMAX: 354*81ad6265SDimitry Andric return selectAtomicRMW(ResVReg, ResType, I, SPIRV::OpAtomicUMax); 355*81ad6265SDimitry Andric case TargetOpcode::G_ATOMICRMW_UMIN: 356*81ad6265SDimitry Andric return selectAtomicRMW(ResVReg, ResType, I, SPIRV::OpAtomicUMin); 357*81ad6265SDimitry Andric case TargetOpcode::G_ATOMICRMW_XCHG: 358*81ad6265SDimitry Andric return selectAtomicRMW(ResVReg, ResType, I, SPIRV::OpAtomicExchange); 359*81ad6265SDimitry Andric case TargetOpcode::G_ATOMIC_CMPXCHG: 360*81ad6265SDimitry Andric return selectAtomicCmpXchg(ResVReg, ResType, I); 361*81ad6265SDimitry Andric 362*81ad6265SDimitry Andric case TargetOpcode::G_FENCE: 363*81ad6265SDimitry Andric return selectFence(I); 364*81ad6265SDimitry Andric 365*81ad6265SDimitry Andric default: 366*81ad6265SDimitry Andric return false; 367*81ad6265SDimitry Andric } 368*81ad6265SDimitry Andric } 369*81ad6265SDimitry Andric 370*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectUnOpWithSrc(Register ResVReg, 371*81ad6265SDimitry Andric const SPIRVType *ResType, 372*81ad6265SDimitry Andric MachineInstr &I, 373*81ad6265SDimitry Andric Register SrcReg, 374*81ad6265SDimitry Andric unsigned Opcode) const { 375*81ad6265SDimitry Andric return BuildMI(*I.getParent(), I, I.getDebugLoc(), TII.get(Opcode)) 376*81ad6265SDimitry Andric .addDef(ResVReg) 377*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(ResType)) 378*81ad6265SDimitry Andric .addUse(SrcReg) 379*81ad6265SDimitry Andric .constrainAllUses(TII, TRI, RBI); 380*81ad6265SDimitry Andric } 381*81ad6265SDimitry Andric 382*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectUnOp(Register ResVReg, 383*81ad6265SDimitry Andric const SPIRVType *ResType, 384*81ad6265SDimitry Andric MachineInstr &I, 385*81ad6265SDimitry Andric unsigned Opcode) const { 386*81ad6265SDimitry Andric return selectUnOpWithSrc(ResVReg, ResType, I, I.getOperand(1).getReg(), 387*81ad6265SDimitry Andric Opcode); 388*81ad6265SDimitry Andric } 389*81ad6265SDimitry Andric 390*81ad6265SDimitry Andric static SPIRV::MemorySemantics getMemSemantics(AtomicOrdering Ord) { 391*81ad6265SDimitry Andric switch (Ord) { 392*81ad6265SDimitry Andric case AtomicOrdering::Acquire: 393*81ad6265SDimitry Andric return SPIRV::MemorySemantics::Acquire; 394*81ad6265SDimitry Andric case AtomicOrdering::Release: 395*81ad6265SDimitry Andric return SPIRV::MemorySemantics::Release; 396*81ad6265SDimitry Andric case AtomicOrdering::AcquireRelease: 397*81ad6265SDimitry Andric return SPIRV::MemorySemantics::AcquireRelease; 398*81ad6265SDimitry Andric case AtomicOrdering::SequentiallyConsistent: 399*81ad6265SDimitry Andric return SPIRV::MemorySemantics::SequentiallyConsistent; 400*81ad6265SDimitry Andric case AtomicOrdering::Unordered: 401*81ad6265SDimitry Andric case AtomicOrdering::Monotonic: 402*81ad6265SDimitry Andric case AtomicOrdering::NotAtomic: 403*81ad6265SDimitry Andric return SPIRV::MemorySemantics::None; 404*81ad6265SDimitry Andric } 405*81ad6265SDimitry Andric } 406*81ad6265SDimitry Andric 407*81ad6265SDimitry Andric static SPIRV::Scope getScope(SyncScope::ID Ord) { 408*81ad6265SDimitry Andric switch (Ord) { 409*81ad6265SDimitry Andric case SyncScope::SingleThread: 410*81ad6265SDimitry Andric return SPIRV::Scope::Invocation; 411*81ad6265SDimitry Andric case SyncScope::System: 412*81ad6265SDimitry Andric return SPIRV::Scope::Device; 413*81ad6265SDimitry Andric default: 414*81ad6265SDimitry Andric llvm_unreachable("Unsupported synchronization Scope ID."); 415*81ad6265SDimitry Andric } 416*81ad6265SDimitry Andric } 417*81ad6265SDimitry Andric 418*81ad6265SDimitry Andric static void addMemoryOperands(MachineMemOperand *MemOp, 419*81ad6265SDimitry Andric MachineInstrBuilder &MIB) { 420*81ad6265SDimitry Andric uint32_t SpvMemOp = static_cast<uint32_t>(SPIRV::MemoryOperand::None); 421*81ad6265SDimitry Andric if (MemOp->isVolatile()) 422*81ad6265SDimitry Andric SpvMemOp |= static_cast<uint32_t>(SPIRV::MemoryOperand::Volatile); 423*81ad6265SDimitry Andric if (MemOp->isNonTemporal()) 424*81ad6265SDimitry Andric SpvMemOp |= static_cast<uint32_t>(SPIRV::MemoryOperand::Nontemporal); 425*81ad6265SDimitry Andric if (MemOp->getAlign().value()) 426*81ad6265SDimitry Andric SpvMemOp |= static_cast<uint32_t>(SPIRV::MemoryOperand::Aligned); 427*81ad6265SDimitry Andric 428*81ad6265SDimitry Andric if (SpvMemOp != static_cast<uint32_t>(SPIRV::MemoryOperand::None)) { 429*81ad6265SDimitry Andric MIB.addImm(SpvMemOp); 430*81ad6265SDimitry Andric if (SpvMemOp & static_cast<uint32_t>(SPIRV::MemoryOperand::Aligned)) 431*81ad6265SDimitry Andric MIB.addImm(MemOp->getAlign().value()); 432*81ad6265SDimitry Andric } 433*81ad6265SDimitry Andric } 434*81ad6265SDimitry Andric 435*81ad6265SDimitry Andric static void addMemoryOperands(uint64_t Flags, MachineInstrBuilder &MIB) { 436*81ad6265SDimitry Andric uint32_t SpvMemOp = static_cast<uint32_t>(SPIRV::MemoryOperand::None); 437*81ad6265SDimitry Andric if (Flags & MachineMemOperand::Flags::MOVolatile) 438*81ad6265SDimitry Andric SpvMemOp |= static_cast<uint32_t>(SPIRV::MemoryOperand::Volatile); 439*81ad6265SDimitry Andric if (Flags & MachineMemOperand::Flags::MONonTemporal) 440*81ad6265SDimitry Andric SpvMemOp |= static_cast<uint32_t>(SPIRV::MemoryOperand::Nontemporal); 441*81ad6265SDimitry Andric 442*81ad6265SDimitry Andric if (SpvMemOp != static_cast<uint32_t>(SPIRV::MemoryOperand::None)) 443*81ad6265SDimitry Andric MIB.addImm(SpvMemOp); 444*81ad6265SDimitry Andric } 445*81ad6265SDimitry Andric 446*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectLoad(Register ResVReg, 447*81ad6265SDimitry Andric const SPIRVType *ResType, 448*81ad6265SDimitry Andric MachineInstr &I) const { 449*81ad6265SDimitry Andric unsigned OpOffset = 450*81ad6265SDimitry Andric I.getOpcode() == TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS ? 1 : 0; 451*81ad6265SDimitry Andric Register Ptr = I.getOperand(1 + OpOffset).getReg(); 452*81ad6265SDimitry Andric auto MIB = BuildMI(*I.getParent(), I, I.getDebugLoc(), TII.get(SPIRV::OpLoad)) 453*81ad6265SDimitry Andric .addDef(ResVReg) 454*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(ResType)) 455*81ad6265SDimitry Andric .addUse(Ptr); 456*81ad6265SDimitry Andric if (!I.getNumMemOperands()) { 457*81ad6265SDimitry Andric assert(I.getOpcode() == TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS); 458*81ad6265SDimitry Andric addMemoryOperands(I.getOperand(2 + OpOffset).getImm(), MIB); 459*81ad6265SDimitry Andric } else { 460*81ad6265SDimitry Andric addMemoryOperands(*I.memoperands_begin(), MIB); 461*81ad6265SDimitry Andric } 462*81ad6265SDimitry Andric return MIB.constrainAllUses(TII, TRI, RBI); 463*81ad6265SDimitry Andric } 464*81ad6265SDimitry Andric 465*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectStore(MachineInstr &I) const { 466*81ad6265SDimitry Andric unsigned OpOffset = 467*81ad6265SDimitry Andric I.getOpcode() == TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS ? 1 : 0; 468*81ad6265SDimitry Andric Register StoreVal = I.getOperand(0 + OpOffset).getReg(); 469*81ad6265SDimitry Andric Register Ptr = I.getOperand(1 + OpOffset).getReg(); 470*81ad6265SDimitry Andric MachineBasicBlock &BB = *I.getParent(); 471*81ad6265SDimitry Andric auto MIB = BuildMI(BB, I, I.getDebugLoc(), TII.get(SPIRV::OpStore)) 472*81ad6265SDimitry Andric .addUse(Ptr) 473*81ad6265SDimitry Andric .addUse(StoreVal); 474*81ad6265SDimitry Andric if (!I.getNumMemOperands()) { 475*81ad6265SDimitry Andric assert(I.getOpcode() == TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS); 476*81ad6265SDimitry Andric addMemoryOperands(I.getOperand(2 + OpOffset).getImm(), MIB); 477*81ad6265SDimitry Andric } else { 478*81ad6265SDimitry Andric addMemoryOperands(*I.memoperands_begin(), MIB); 479*81ad6265SDimitry Andric } 480*81ad6265SDimitry Andric return MIB.constrainAllUses(TII, TRI, RBI); 481*81ad6265SDimitry Andric } 482*81ad6265SDimitry Andric 483*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectMemOperation(Register ResVReg, 484*81ad6265SDimitry Andric MachineInstr &I) const { 485*81ad6265SDimitry Andric MachineBasicBlock &BB = *I.getParent(); 486*81ad6265SDimitry Andric auto MIB = BuildMI(BB, I, I.getDebugLoc(), TII.get(SPIRV::OpCopyMemorySized)) 487*81ad6265SDimitry Andric .addDef(I.getOperand(0).getReg()) 488*81ad6265SDimitry Andric .addUse(I.getOperand(1).getReg()) 489*81ad6265SDimitry Andric .addUse(I.getOperand(2).getReg()); 490*81ad6265SDimitry Andric if (I.getNumMemOperands()) 491*81ad6265SDimitry Andric addMemoryOperands(*I.memoperands_begin(), MIB); 492*81ad6265SDimitry Andric bool Result = MIB.constrainAllUses(TII, TRI, RBI); 493*81ad6265SDimitry Andric if (ResVReg.isValid() && ResVReg != MIB->getOperand(0).getReg()) { 494*81ad6265SDimitry Andric BuildMI(BB, I, I.getDebugLoc(), TII.get(TargetOpcode::COPY), ResVReg) 495*81ad6265SDimitry Andric .addUse(MIB->getOperand(0).getReg()); 496*81ad6265SDimitry Andric } 497*81ad6265SDimitry Andric return Result; 498*81ad6265SDimitry Andric } 499*81ad6265SDimitry Andric 500*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectAtomicRMW(Register ResVReg, 501*81ad6265SDimitry Andric const SPIRVType *ResType, 502*81ad6265SDimitry Andric MachineInstr &I, 503*81ad6265SDimitry Andric unsigned NewOpcode) const { 504*81ad6265SDimitry Andric assert(I.hasOneMemOperand()); 505*81ad6265SDimitry Andric const MachineMemOperand *MemOp = *I.memoperands_begin(); 506*81ad6265SDimitry Andric uint32_t Scope = static_cast<uint32_t>(getScope(MemOp->getSyncScopeID())); 507*81ad6265SDimitry Andric Register ScopeReg = buildI32Constant(Scope, I); 508*81ad6265SDimitry Andric 509*81ad6265SDimitry Andric Register Ptr = I.getOperand(1).getReg(); 510*81ad6265SDimitry Andric // TODO: Changed as it's implemented in the translator. See test/atomicrmw.ll 511*81ad6265SDimitry Andric // auto ScSem = 512*81ad6265SDimitry Andric // getMemSemanticsForStorageClass(GR.getPointerStorageClass(Ptr)); 513*81ad6265SDimitry Andric AtomicOrdering AO = MemOp->getSuccessOrdering(); 514*81ad6265SDimitry Andric uint32_t MemSem = static_cast<uint32_t>(getMemSemantics(AO)); 515*81ad6265SDimitry Andric Register MemSemReg = buildI32Constant(MemSem /*| ScSem*/, I); 516*81ad6265SDimitry Andric 517*81ad6265SDimitry Andric return BuildMI(*I.getParent(), I, I.getDebugLoc(), TII.get(NewOpcode)) 518*81ad6265SDimitry Andric .addDef(ResVReg) 519*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(ResType)) 520*81ad6265SDimitry Andric .addUse(Ptr) 521*81ad6265SDimitry Andric .addUse(ScopeReg) 522*81ad6265SDimitry Andric .addUse(MemSemReg) 523*81ad6265SDimitry Andric .addUse(I.getOperand(2).getReg()) 524*81ad6265SDimitry Andric .constrainAllUses(TII, TRI, RBI); 525*81ad6265SDimitry Andric } 526*81ad6265SDimitry Andric 527*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectFence(MachineInstr &I) const { 528*81ad6265SDimitry Andric AtomicOrdering AO = AtomicOrdering(I.getOperand(0).getImm()); 529*81ad6265SDimitry Andric uint32_t MemSem = static_cast<uint32_t>(getMemSemantics(AO)); 530*81ad6265SDimitry Andric Register MemSemReg = buildI32Constant(MemSem, I); 531*81ad6265SDimitry Andric SyncScope::ID Ord = SyncScope::ID(I.getOperand(1).getImm()); 532*81ad6265SDimitry Andric uint32_t Scope = static_cast<uint32_t>(getScope(Ord)); 533*81ad6265SDimitry Andric Register ScopeReg = buildI32Constant(Scope, I); 534*81ad6265SDimitry Andric MachineBasicBlock &BB = *I.getParent(); 535*81ad6265SDimitry Andric return BuildMI(BB, I, I.getDebugLoc(), TII.get(SPIRV::OpMemoryBarrier)) 536*81ad6265SDimitry Andric .addUse(ScopeReg) 537*81ad6265SDimitry Andric .addUse(MemSemReg) 538*81ad6265SDimitry Andric .constrainAllUses(TII, TRI, RBI); 539*81ad6265SDimitry Andric } 540*81ad6265SDimitry Andric 541*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectAtomicCmpXchg(Register ResVReg, 542*81ad6265SDimitry Andric const SPIRVType *ResType, 543*81ad6265SDimitry Andric MachineInstr &I) const { 544*81ad6265SDimitry Andric assert(I.hasOneMemOperand()); 545*81ad6265SDimitry Andric const MachineMemOperand *MemOp = *I.memoperands_begin(); 546*81ad6265SDimitry Andric uint32_t Scope = static_cast<uint32_t>(getScope(MemOp->getSyncScopeID())); 547*81ad6265SDimitry Andric Register ScopeReg = buildI32Constant(Scope, I); 548*81ad6265SDimitry Andric 549*81ad6265SDimitry Andric Register Ptr = I.getOperand(2).getReg(); 550*81ad6265SDimitry Andric Register Cmp = I.getOperand(3).getReg(); 551*81ad6265SDimitry Andric Register Val = I.getOperand(4).getReg(); 552*81ad6265SDimitry Andric 553*81ad6265SDimitry Andric SPIRVType *SpvValTy = GR.getSPIRVTypeForVReg(Val); 554*81ad6265SDimitry Andric SPIRV::StorageClass SC = GR.getPointerStorageClass(Ptr); 555*81ad6265SDimitry Andric uint32_t ScSem = static_cast<uint32_t>(getMemSemanticsForStorageClass(SC)); 556*81ad6265SDimitry Andric AtomicOrdering AO = MemOp->getSuccessOrdering(); 557*81ad6265SDimitry Andric uint32_t MemSemEq = static_cast<uint32_t>(getMemSemantics(AO)) | ScSem; 558*81ad6265SDimitry Andric Register MemSemEqReg = buildI32Constant(MemSemEq, I); 559*81ad6265SDimitry Andric AtomicOrdering FO = MemOp->getFailureOrdering(); 560*81ad6265SDimitry Andric uint32_t MemSemNeq = static_cast<uint32_t>(getMemSemantics(FO)) | ScSem; 561*81ad6265SDimitry Andric Register MemSemNeqReg = 562*81ad6265SDimitry Andric MemSemEq == MemSemNeq ? MemSemEqReg : buildI32Constant(MemSemNeq, I); 563*81ad6265SDimitry Andric const DebugLoc &DL = I.getDebugLoc(); 564*81ad6265SDimitry Andric return BuildMI(*I.getParent(), I, DL, TII.get(SPIRV::OpAtomicCompareExchange)) 565*81ad6265SDimitry Andric .addDef(ResVReg) 566*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(SpvValTy)) 567*81ad6265SDimitry Andric .addUse(Ptr) 568*81ad6265SDimitry Andric .addUse(ScopeReg) 569*81ad6265SDimitry Andric .addUse(MemSemEqReg) 570*81ad6265SDimitry Andric .addUse(MemSemNeqReg) 571*81ad6265SDimitry Andric .addUse(Val) 572*81ad6265SDimitry Andric .addUse(Cmp) 573*81ad6265SDimitry Andric .constrainAllUses(TII, TRI, RBI); 574*81ad6265SDimitry Andric } 575*81ad6265SDimitry Andric 576*81ad6265SDimitry Andric static bool isGenericCastablePtr(SPIRV::StorageClass SC) { 577*81ad6265SDimitry Andric switch (SC) { 578*81ad6265SDimitry Andric case SPIRV::StorageClass::Workgroup: 579*81ad6265SDimitry Andric case SPIRV::StorageClass::CrossWorkgroup: 580*81ad6265SDimitry Andric case SPIRV::StorageClass::Function: 581*81ad6265SDimitry Andric return true; 582*81ad6265SDimitry Andric default: 583*81ad6265SDimitry Andric return false; 584*81ad6265SDimitry Andric } 585*81ad6265SDimitry Andric } 586*81ad6265SDimitry Andric 587*81ad6265SDimitry Andric // In SPIR-V address space casting can only happen to and from the Generic 588*81ad6265SDimitry Andric // storage class. We can also only case Workgroup, CrossWorkgroup, or Function 589*81ad6265SDimitry Andric // pointers to and from Generic pointers. As such, we can convert e.g. from 590*81ad6265SDimitry Andric // Workgroup to Function by going via a Generic pointer as an intermediary. All 591*81ad6265SDimitry Andric // other combinations can only be done by a bitcast, and are probably not safe. 592*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectAddrSpaceCast(Register ResVReg, 593*81ad6265SDimitry Andric const SPIRVType *ResType, 594*81ad6265SDimitry Andric MachineInstr &I) const { 595*81ad6265SDimitry Andric Register SrcPtr = I.getOperand(1).getReg(); 596*81ad6265SDimitry Andric SPIRVType *SrcPtrTy = GR.getSPIRVTypeForVReg(SrcPtr); 597*81ad6265SDimitry Andric SPIRV::StorageClass SrcSC = GR.getPointerStorageClass(SrcPtr); 598*81ad6265SDimitry Andric SPIRV::StorageClass DstSC = GR.getPointerStorageClass(ResVReg); 599*81ad6265SDimitry Andric 600*81ad6265SDimitry Andric // Casting from an eligable pointer to Generic. 601*81ad6265SDimitry Andric if (DstSC == SPIRV::StorageClass::Generic && isGenericCastablePtr(SrcSC)) 602*81ad6265SDimitry Andric return selectUnOp(ResVReg, ResType, I, SPIRV::OpPtrCastToGeneric); 603*81ad6265SDimitry Andric // Casting from Generic to an eligable pointer. 604*81ad6265SDimitry Andric if (SrcSC == SPIRV::StorageClass::Generic && isGenericCastablePtr(DstSC)) 605*81ad6265SDimitry Andric return selectUnOp(ResVReg, ResType, I, SPIRV::OpGenericCastToPtr); 606*81ad6265SDimitry Andric // Casting between 2 eligable pointers using Generic as an intermediary. 607*81ad6265SDimitry Andric if (isGenericCastablePtr(SrcSC) && isGenericCastablePtr(DstSC)) { 608*81ad6265SDimitry Andric Register Tmp = MRI->createVirtualRegister(&SPIRV::IDRegClass); 609*81ad6265SDimitry Andric SPIRVType *GenericPtrTy = GR.getOrCreateSPIRVPointerType( 610*81ad6265SDimitry Andric SrcPtrTy, I, TII, SPIRV::StorageClass::Generic); 611*81ad6265SDimitry Andric MachineBasicBlock &BB = *I.getParent(); 612*81ad6265SDimitry Andric const DebugLoc &DL = I.getDebugLoc(); 613*81ad6265SDimitry Andric bool Success = BuildMI(BB, I, DL, TII.get(SPIRV::OpPtrCastToGeneric)) 614*81ad6265SDimitry Andric .addDef(Tmp) 615*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(GenericPtrTy)) 616*81ad6265SDimitry Andric .addUse(SrcPtr) 617*81ad6265SDimitry Andric .constrainAllUses(TII, TRI, RBI); 618*81ad6265SDimitry Andric return Success && BuildMI(BB, I, DL, TII.get(SPIRV::OpGenericCastToPtr)) 619*81ad6265SDimitry Andric .addDef(ResVReg) 620*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(ResType)) 621*81ad6265SDimitry Andric .addUse(Tmp) 622*81ad6265SDimitry Andric .constrainAllUses(TII, TRI, RBI); 623*81ad6265SDimitry Andric } 624*81ad6265SDimitry Andric // TODO Should this case just be disallowed completely? 625*81ad6265SDimitry Andric // We're casting 2 other arbitrary address spaces, so have to bitcast. 626*81ad6265SDimitry Andric return selectUnOp(ResVReg, ResType, I, SPIRV::OpBitcast); 627*81ad6265SDimitry Andric } 628*81ad6265SDimitry Andric 629*81ad6265SDimitry Andric static unsigned getFCmpOpcode(unsigned PredNum) { 630*81ad6265SDimitry Andric auto Pred = static_cast<CmpInst::Predicate>(PredNum); 631*81ad6265SDimitry Andric switch (Pred) { 632*81ad6265SDimitry Andric case CmpInst::FCMP_OEQ: 633*81ad6265SDimitry Andric return SPIRV::OpFOrdEqual; 634*81ad6265SDimitry Andric case CmpInst::FCMP_OGE: 635*81ad6265SDimitry Andric return SPIRV::OpFOrdGreaterThanEqual; 636*81ad6265SDimitry Andric case CmpInst::FCMP_OGT: 637*81ad6265SDimitry Andric return SPIRV::OpFOrdGreaterThan; 638*81ad6265SDimitry Andric case CmpInst::FCMP_OLE: 639*81ad6265SDimitry Andric return SPIRV::OpFOrdLessThanEqual; 640*81ad6265SDimitry Andric case CmpInst::FCMP_OLT: 641*81ad6265SDimitry Andric return SPIRV::OpFOrdLessThan; 642*81ad6265SDimitry Andric case CmpInst::FCMP_ONE: 643*81ad6265SDimitry Andric return SPIRV::OpFOrdNotEqual; 644*81ad6265SDimitry Andric case CmpInst::FCMP_ORD: 645*81ad6265SDimitry Andric return SPIRV::OpOrdered; 646*81ad6265SDimitry Andric case CmpInst::FCMP_UEQ: 647*81ad6265SDimitry Andric return SPIRV::OpFUnordEqual; 648*81ad6265SDimitry Andric case CmpInst::FCMP_UGE: 649*81ad6265SDimitry Andric return SPIRV::OpFUnordGreaterThanEqual; 650*81ad6265SDimitry Andric case CmpInst::FCMP_UGT: 651*81ad6265SDimitry Andric return SPIRV::OpFUnordGreaterThan; 652*81ad6265SDimitry Andric case CmpInst::FCMP_ULE: 653*81ad6265SDimitry Andric return SPIRV::OpFUnordLessThanEqual; 654*81ad6265SDimitry Andric case CmpInst::FCMP_ULT: 655*81ad6265SDimitry Andric return SPIRV::OpFUnordLessThan; 656*81ad6265SDimitry Andric case CmpInst::FCMP_UNE: 657*81ad6265SDimitry Andric return SPIRV::OpFUnordNotEqual; 658*81ad6265SDimitry Andric case CmpInst::FCMP_UNO: 659*81ad6265SDimitry Andric return SPIRV::OpUnordered; 660*81ad6265SDimitry Andric default: 661*81ad6265SDimitry Andric llvm_unreachable("Unknown predicate type for FCmp"); 662*81ad6265SDimitry Andric } 663*81ad6265SDimitry Andric } 664*81ad6265SDimitry Andric 665*81ad6265SDimitry Andric static unsigned getICmpOpcode(unsigned PredNum) { 666*81ad6265SDimitry Andric auto Pred = static_cast<CmpInst::Predicate>(PredNum); 667*81ad6265SDimitry Andric switch (Pred) { 668*81ad6265SDimitry Andric case CmpInst::ICMP_EQ: 669*81ad6265SDimitry Andric return SPIRV::OpIEqual; 670*81ad6265SDimitry Andric case CmpInst::ICMP_NE: 671*81ad6265SDimitry Andric return SPIRV::OpINotEqual; 672*81ad6265SDimitry Andric case CmpInst::ICMP_SGE: 673*81ad6265SDimitry Andric return SPIRV::OpSGreaterThanEqual; 674*81ad6265SDimitry Andric case CmpInst::ICMP_SGT: 675*81ad6265SDimitry Andric return SPIRV::OpSGreaterThan; 676*81ad6265SDimitry Andric case CmpInst::ICMP_SLE: 677*81ad6265SDimitry Andric return SPIRV::OpSLessThanEqual; 678*81ad6265SDimitry Andric case CmpInst::ICMP_SLT: 679*81ad6265SDimitry Andric return SPIRV::OpSLessThan; 680*81ad6265SDimitry Andric case CmpInst::ICMP_UGE: 681*81ad6265SDimitry Andric return SPIRV::OpUGreaterThanEqual; 682*81ad6265SDimitry Andric case CmpInst::ICMP_UGT: 683*81ad6265SDimitry Andric return SPIRV::OpUGreaterThan; 684*81ad6265SDimitry Andric case CmpInst::ICMP_ULE: 685*81ad6265SDimitry Andric return SPIRV::OpULessThanEqual; 686*81ad6265SDimitry Andric case CmpInst::ICMP_ULT: 687*81ad6265SDimitry Andric return SPIRV::OpULessThan; 688*81ad6265SDimitry Andric default: 689*81ad6265SDimitry Andric llvm_unreachable("Unknown predicate type for ICmp"); 690*81ad6265SDimitry Andric } 691*81ad6265SDimitry Andric } 692*81ad6265SDimitry Andric 693*81ad6265SDimitry Andric static unsigned getPtrCmpOpcode(unsigned Pred) { 694*81ad6265SDimitry Andric switch (static_cast<CmpInst::Predicate>(Pred)) { 695*81ad6265SDimitry Andric case CmpInst::ICMP_EQ: 696*81ad6265SDimitry Andric return SPIRV::OpPtrEqual; 697*81ad6265SDimitry Andric case CmpInst::ICMP_NE: 698*81ad6265SDimitry Andric return SPIRV::OpPtrNotEqual; 699*81ad6265SDimitry Andric default: 700*81ad6265SDimitry Andric llvm_unreachable("Unknown predicate type for pointer comparison"); 701*81ad6265SDimitry Andric } 702*81ad6265SDimitry Andric } 703*81ad6265SDimitry Andric 704*81ad6265SDimitry Andric // Return the logical operation, or abort if none exists. 705*81ad6265SDimitry Andric static unsigned getBoolCmpOpcode(unsigned PredNum) { 706*81ad6265SDimitry Andric auto Pred = static_cast<CmpInst::Predicate>(PredNum); 707*81ad6265SDimitry Andric switch (Pred) { 708*81ad6265SDimitry Andric case CmpInst::ICMP_EQ: 709*81ad6265SDimitry Andric return SPIRV::OpLogicalEqual; 710*81ad6265SDimitry Andric case CmpInst::ICMP_NE: 711*81ad6265SDimitry Andric return SPIRV::OpLogicalNotEqual; 712*81ad6265SDimitry Andric default: 713*81ad6265SDimitry Andric llvm_unreachable("Unknown predicate type for Bool comparison"); 714*81ad6265SDimitry Andric } 715*81ad6265SDimitry Andric } 716*81ad6265SDimitry Andric 717*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectBitreverse(Register ResVReg, 718*81ad6265SDimitry Andric const SPIRVType *ResType, 719*81ad6265SDimitry Andric MachineInstr &I) const { 720*81ad6265SDimitry Andric MachineBasicBlock &BB = *I.getParent(); 721*81ad6265SDimitry Andric return BuildMI(BB, I, I.getDebugLoc(), TII.get(SPIRV::OpBitReverse)) 722*81ad6265SDimitry Andric .addDef(ResVReg) 723*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(ResType)) 724*81ad6265SDimitry Andric .addUse(I.getOperand(1).getReg()) 725*81ad6265SDimitry Andric .constrainAllUses(TII, TRI, RBI); 726*81ad6265SDimitry Andric } 727*81ad6265SDimitry Andric 728*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectConstVector(Register ResVReg, 729*81ad6265SDimitry Andric const SPIRVType *ResType, 730*81ad6265SDimitry Andric MachineInstr &I) const { 731*81ad6265SDimitry Andric // TODO: only const case is supported for now. 732*81ad6265SDimitry Andric assert(std::all_of( 733*81ad6265SDimitry Andric I.operands_begin(), I.operands_end(), [this](const MachineOperand &MO) { 734*81ad6265SDimitry Andric if (MO.isDef()) 735*81ad6265SDimitry Andric return true; 736*81ad6265SDimitry Andric if (!MO.isReg()) 737*81ad6265SDimitry Andric return false; 738*81ad6265SDimitry Andric SPIRVType *ConstTy = this->MRI->getVRegDef(MO.getReg()); 739*81ad6265SDimitry Andric assert(ConstTy && ConstTy->getOpcode() == SPIRV::ASSIGN_TYPE && 740*81ad6265SDimitry Andric ConstTy->getOperand(1).isReg()); 741*81ad6265SDimitry Andric Register ConstReg = ConstTy->getOperand(1).getReg(); 742*81ad6265SDimitry Andric const MachineInstr *Const = this->MRI->getVRegDef(ConstReg); 743*81ad6265SDimitry Andric assert(Const); 744*81ad6265SDimitry Andric return (Const->getOpcode() == TargetOpcode::G_CONSTANT || 745*81ad6265SDimitry Andric Const->getOpcode() == TargetOpcode::G_FCONSTANT); 746*81ad6265SDimitry Andric })); 747*81ad6265SDimitry Andric 748*81ad6265SDimitry Andric auto MIB = BuildMI(*I.getParent(), I, I.getDebugLoc(), 749*81ad6265SDimitry Andric TII.get(SPIRV::OpConstantComposite)) 750*81ad6265SDimitry Andric .addDef(ResVReg) 751*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(ResType)); 752*81ad6265SDimitry Andric for (unsigned i = I.getNumExplicitDefs(); i < I.getNumExplicitOperands(); ++i) 753*81ad6265SDimitry Andric MIB.addUse(I.getOperand(i).getReg()); 754*81ad6265SDimitry Andric return MIB.constrainAllUses(TII, TRI, RBI); 755*81ad6265SDimitry Andric } 756*81ad6265SDimitry Andric 757*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectCmp(Register ResVReg, 758*81ad6265SDimitry Andric const SPIRVType *ResType, 759*81ad6265SDimitry Andric unsigned CmpOpc, 760*81ad6265SDimitry Andric MachineInstr &I) const { 761*81ad6265SDimitry Andric Register Cmp0 = I.getOperand(2).getReg(); 762*81ad6265SDimitry Andric Register Cmp1 = I.getOperand(3).getReg(); 763*81ad6265SDimitry Andric assert(GR.getSPIRVTypeForVReg(Cmp0)->getOpcode() == 764*81ad6265SDimitry Andric GR.getSPIRVTypeForVReg(Cmp1)->getOpcode() && 765*81ad6265SDimitry Andric "CMP operands should have the same type"); 766*81ad6265SDimitry Andric return BuildMI(*I.getParent(), I, I.getDebugLoc(), TII.get(CmpOpc)) 767*81ad6265SDimitry Andric .addDef(ResVReg) 768*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(ResType)) 769*81ad6265SDimitry Andric .addUse(Cmp0) 770*81ad6265SDimitry Andric .addUse(Cmp1) 771*81ad6265SDimitry Andric .constrainAllUses(TII, TRI, RBI); 772*81ad6265SDimitry Andric } 773*81ad6265SDimitry Andric 774*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectICmp(Register ResVReg, 775*81ad6265SDimitry Andric const SPIRVType *ResType, 776*81ad6265SDimitry Andric MachineInstr &I) const { 777*81ad6265SDimitry Andric auto Pred = I.getOperand(1).getPredicate(); 778*81ad6265SDimitry Andric unsigned CmpOpc; 779*81ad6265SDimitry Andric 780*81ad6265SDimitry Andric Register CmpOperand = I.getOperand(2).getReg(); 781*81ad6265SDimitry Andric if (GR.isScalarOfType(CmpOperand, SPIRV::OpTypePointer)) 782*81ad6265SDimitry Andric CmpOpc = getPtrCmpOpcode(Pred); 783*81ad6265SDimitry Andric else if (GR.isScalarOrVectorOfType(CmpOperand, SPIRV::OpTypeBool)) 784*81ad6265SDimitry Andric CmpOpc = getBoolCmpOpcode(Pred); 785*81ad6265SDimitry Andric else 786*81ad6265SDimitry Andric CmpOpc = getICmpOpcode(Pred); 787*81ad6265SDimitry Andric return selectCmp(ResVReg, ResType, CmpOpc, I); 788*81ad6265SDimitry Andric } 789*81ad6265SDimitry Andric 790*81ad6265SDimitry Andric void SPIRVInstructionSelector::renderFImm32(MachineInstrBuilder &MIB, 791*81ad6265SDimitry Andric const MachineInstr &I, 792*81ad6265SDimitry Andric int OpIdx) const { 793*81ad6265SDimitry Andric assert(I.getOpcode() == TargetOpcode::G_FCONSTANT && OpIdx == -1 && 794*81ad6265SDimitry Andric "Expected G_FCONSTANT"); 795*81ad6265SDimitry Andric const ConstantFP *FPImm = I.getOperand(1).getFPImm(); 796*81ad6265SDimitry Andric addNumImm(FPImm->getValueAPF().bitcastToAPInt(), MIB); 797*81ad6265SDimitry Andric } 798*81ad6265SDimitry Andric 799*81ad6265SDimitry Andric void SPIRVInstructionSelector::renderImm32(MachineInstrBuilder &MIB, 800*81ad6265SDimitry Andric const MachineInstr &I, 801*81ad6265SDimitry Andric int OpIdx) const { 802*81ad6265SDimitry Andric assert(I.getOpcode() == TargetOpcode::G_CONSTANT && OpIdx == -1 && 803*81ad6265SDimitry Andric "Expected G_CONSTANT"); 804*81ad6265SDimitry Andric addNumImm(I.getOperand(1).getCImm()->getValue(), MIB); 805*81ad6265SDimitry Andric } 806*81ad6265SDimitry Andric 807*81ad6265SDimitry Andric Register 808*81ad6265SDimitry Andric SPIRVInstructionSelector::buildI32Constant(uint32_t Val, MachineInstr &I, 809*81ad6265SDimitry Andric const SPIRVType *ResType) const { 810*81ad6265SDimitry Andric const SPIRVType *SpvI32Ty = 811*81ad6265SDimitry Andric ResType ? ResType : GR.getOrCreateSPIRVIntegerType(32, I, TII); 812*81ad6265SDimitry Andric Register NewReg; 813*81ad6265SDimitry Andric NewReg = MRI->createGenericVirtualRegister(LLT::scalar(32)); 814*81ad6265SDimitry Andric MachineInstr *MI; 815*81ad6265SDimitry Andric MachineBasicBlock &BB = *I.getParent(); 816*81ad6265SDimitry Andric if (Val == 0) { 817*81ad6265SDimitry Andric MI = BuildMI(BB, I, I.getDebugLoc(), TII.get(SPIRV::OpConstantNull)) 818*81ad6265SDimitry Andric .addDef(NewReg) 819*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(SpvI32Ty)); 820*81ad6265SDimitry Andric } else { 821*81ad6265SDimitry Andric MI = BuildMI(BB, I, I.getDebugLoc(), TII.get(SPIRV::OpConstantI)) 822*81ad6265SDimitry Andric .addDef(NewReg) 823*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(SpvI32Ty)) 824*81ad6265SDimitry Andric .addImm(APInt(32, Val).getZExtValue()); 825*81ad6265SDimitry Andric } 826*81ad6265SDimitry Andric constrainSelectedInstRegOperands(*MI, TII, TRI, RBI); 827*81ad6265SDimitry Andric return NewReg; 828*81ad6265SDimitry Andric } 829*81ad6265SDimitry Andric 830*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectFCmp(Register ResVReg, 831*81ad6265SDimitry Andric const SPIRVType *ResType, 832*81ad6265SDimitry Andric MachineInstr &I) const { 833*81ad6265SDimitry Andric unsigned CmpOp = getFCmpOpcode(I.getOperand(1).getPredicate()); 834*81ad6265SDimitry Andric return selectCmp(ResVReg, ResType, CmpOp, I); 835*81ad6265SDimitry Andric } 836*81ad6265SDimitry Andric 837*81ad6265SDimitry Andric Register SPIRVInstructionSelector::buildZerosVal(const SPIRVType *ResType, 838*81ad6265SDimitry Andric MachineInstr &I) const { 839*81ad6265SDimitry Andric return buildI32Constant(0, I, ResType); 840*81ad6265SDimitry Andric } 841*81ad6265SDimitry Andric 842*81ad6265SDimitry Andric Register SPIRVInstructionSelector::buildOnesVal(bool AllOnes, 843*81ad6265SDimitry Andric const SPIRVType *ResType, 844*81ad6265SDimitry Andric MachineInstr &I) const { 845*81ad6265SDimitry Andric unsigned BitWidth = GR.getScalarOrVectorBitWidth(ResType); 846*81ad6265SDimitry Andric APInt One = AllOnes ? APInt::getAllOnesValue(BitWidth) 847*81ad6265SDimitry Andric : APInt::getOneBitSet(BitWidth, 0); 848*81ad6265SDimitry Andric Register OneReg = buildI32Constant(One.getZExtValue(), I, ResType); 849*81ad6265SDimitry Andric if (ResType->getOpcode() == SPIRV::OpTypeVector) { 850*81ad6265SDimitry Andric const unsigned NumEles = ResType->getOperand(2).getImm(); 851*81ad6265SDimitry Andric Register OneVec = MRI->createVirtualRegister(&SPIRV::IDRegClass); 852*81ad6265SDimitry Andric unsigned Opcode = SPIRV::OpConstantComposite; 853*81ad6265SDimitry Andric auto MIB = BuildMI(*I.getParent(), I, I.getDebugLoc(), TII.get(Opcode)) 854*81ad6265SDimitry Andric .addDef(OneVec) 855*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(ResType)); 856*81ad6265SDimitry Andric for (unsigned i = 0; i < NumEles; ++i) 857*81ad6265SDimitry Andric MIB.addUse(OneReg); 858*81ad6265SDimitry Andric constrainSelectedInstRegOperands(*MIB, TII, TRI, RBI); 859*81ad6265SDimitry Andric return OneVec; 860*81ad6265SDimitry Andric } 861*81ad6265SDimitry Andric return OneReg; 862*81ad6265SDimitry Andric } 863*81ad6265SDimitry Andric 864*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectSelect(Register ResVReg, 865*81ad6265SDimitry Andric const SPIRVType *ResType, 866*81ad6265SDimitry Andric MachineInstr &I, 867*81ad6265SDimitry Andric bool IsSigned) const { 868*81ad6265SDimitry Andric // To extend a bool, we need to use OpSelect between constants. 869*81ad6265SDimitry Andric Register ZeroReg = buildZerosVal(ResType, I); 870*81ad6265SDimitry Andric Register OneReg = buildOnesVal(IsSigned, ResType, I); 871*81ad6265SDimitry Andric bool IsScalarBool = 872*81ad6265SDimitry Andric GR.isScalarOfType(I.getOperand(1).getReg(), SPIRV::OpTypeBool); 873*81ad6265SDimitry Andric unsigned Opcode = 874*81ad6265SDimitry Andric IsScalarBool ? SPIRV::OpSelectSISCond : SPIRV::OpSelectSIVCond; 875*81ad6265SDimitry Andric return BuildMI(*I.getParent(), I, I.getDebugLoc(), TII.get(Opcode)) 876*81ad6265SDimitry Andric .addDef(ResVReg) 877*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(ResType)) 878*81ad6265SDimitry Andric .addUse(I.getOperand(1).getReg()) 879*81ad6265SDimitry Andric .addUse(OneReg) 880*81ad6265SDimitry Andric .addUse(ZeroReg) 881*81ad6265SDimitry Andric .constrainAllUses(TII, TRI, RBI); 882*81ad6265SDimitry Andric } 883*81ad6265SDimitry Andric 884*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectIToF(Register ResVReg, 885*81ad6265SDimitry Andric const SPIRVType *ResType, 886*81ad6265SDimitry Andric MachineInstr &I, bool IsSigned, 887*81ad6265SDimitry Andric unsigned Opcode) const { 888*81ad6265SDimitry Andric Register SrcReg = I.getOperand(1).getReg(); 889*81ad6265SDimitry Andric // We can convert bool value directly to float type without OpConvert*ToF, 890*81ad6265SDimitry Andric // however the translator generates OpSelect+OpConvert*ToF, so we do the same. 891*81ad6265SDimitry Andric if (GR.isScalarOrVectorOfType(I.getOperand(1).getReg(), SPIRV::OpTypeBool)) { 892*81ad6265SDimitry Andric unsigned BitWidth = GR.getScalarOrVectorBitWidth(ResType); 893*81ad6265SDimitry Andric SPIRVType *TmpType = GR.getOrCreateSPIRVIntegerType(BitWidth, I, TII); 894*81ad6265SDimitry Andric if (ResType->getOpcode() == SPIRV::OpTypeVector) { 895*81ad6265SDimitry Andric const unsigned NumElts = ResType->getOperand(2).getImm(); 896*81ad6265SDimitry Andric TmpType = GR.getOrCreateSPIRVVectorType(TmpType, NumElts, I, TII); 897*81ad6265SDimitry Andric } 898*81ad6265SDimitry Andric SrcReg = MRI->createVirtualRegister(&SPIRV::IDRegClass); 899*81ad6265SDimitry Andric selectSelect(SrcReg, TmpType, I, false); 900*81ad6265SDimitry Andric } 901*81ad6265SDimitry Andric return selectUnOpWithSrc(ResVReg, ResType, I, SrcReg, Opcode); 902*81ad6265SDimitry Andric } 903*81ad6265SDimitry Andric 904*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectExt(Register ResVReg, 905*81ad6265SDimitry Andric const SPIRVType *ResType, 906*81ad6265SDimitry Andric MachineInstr &I, bool IsSigned) const { 907*81ad6265SDimitry Andric if (GR.isScalarOrVectorOfType(I.getOperand(1).getReg(), SPIRV::OpTypeBool)) 908*81ad6265SDimitry Andric return selectSelect(ResVReg, ResType, I, IsSigned); 909*81ad6265SDimitry Andric unsigned Opcode = IsSigned ? SPIRV::OpSConvert : SPIRV::OpUConvert; 910*81ad6265SDimitry Andric return selectUnOp(ResVReg, ResType, I, Opcode); 911*81ad6265SDimitry Andric } 912*81ad6265SDimitry Andric 913*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectIntToBool(Register IntReg, 914*81ad6265SDimitry Andric Register ResVReg, 915*81ad6265SDimitry Andric const SPIRVType *IntTy, 916*81ad6265SDimitry Andric const SPIRVType *BoolTy, 917*81ad6265SDimitry Andric MachineInstr &I) const { 918*81ad6265SDimitry Andric // To truncate to a bool, we use OpBitwiseAnd 1 and OpINotEqual to zero. 919*81ad6265SDimitry Andric Register BitIntReg = MRI->createVirtualRegister(&SPIRV::IDRegClass); 920*81ad6265SDimitry Andric bool IsVectorTy = IntTy->getOpcode() == SPIRV::OpTypeVector; 921*81ad6265SDimitry Andric unsigned Opcode = IsVectorTy ? SPIRV::OpBitwiseAndV : SPIRV::OpBitwiseAndS; 922*81ad6265SDimitry Andric Register Zero = buildZerosVal(IntTy, I); 923*81ad6265SDimitry Andric Register One = buildOnesVal(false, IntTy, I); 924*81ad6265SDimitry Andric MachineBasicBlock &BB = *I.getParent(); 925*81ad6265SDimitry Andric BuildMI(BB, I, I.getDebugLoc(), TII.get(Opcode)) 926*81ad6265SDimitry Andric .addDef(BitIntReg) 927*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(IntTy)) 928*81ad6265SDimitry Andric .addUse(IntReg) 929*81ad6265SDimitry Andric .addUse(One) 930*81ad6265SDimitry Andric .constrainAllUses(TII, TRI, RBI); 931*81ad6265SDimitry Andric return BuildMI(BB, I, I.getDebugLoc(), TII.get(SPIRV::OpINotEqual)) 932*81ad6265SDimitry Andric .addDef(ResVReg) 933*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(BoolTy)) 934*81ad6265SDimitry Andric .addUse(BitIntReg) 935*81ad6265SDimitry Andric .addUse(Zero) 936*81ad6265SDimitry Andric .constrainAllUses(TII, TRI, RBI); 937*81ad6265SDimitry Andric } 938*81ad6265SDimitry Andric 939*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectTrunc(Register ResVReg, 940*81ad6265SDimitry Andric const SPIRVType *ResType, 941*81ad6265SDimitry Andric MachineInstr &I) const { 942*81ad6265SDimitry Andric if (GR.isScalarOrVectorOfType(ResVReg, SPIRV::OpTypeBool)) { 943*81ad6265SDimitry Andric Register IntReg = I.getOperand(1).getReg(); 944*81ad6265SDimitry Andric const SPIRVType *ArgType = GR.getSPIRVTypeForVReg(IntReg); 945*81ad6265SDimitry Andric return selectIntToBool(IntReg, ResVReg, ArgType, ResType, I); 946*81ad6265SDimitry Andric } 947*81ad6265SDimitry Andric bool IsSigned = GR.isScalarOrVectorSigned(ResType); 948*81ad6265SDimitry Andric unsigned Opcode = IsSigned ? SPIRV::OpSConvert : SPIRV::OpUConvert; 949*81ad6265SDimitry Andric return selectUnOp(ResVReg, ResType, I, Opcode); 950*81ad6265SDimitry Andric } 951*81ad6265SDimitry Andric 952*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectConst(Register ResVReg, 953*81ad6265SDimitry Andric const SPIRVType *ResType, 954*81ad6265SDimitry Andric const APInt &Imm, 955*81ad6265SDimitry Andric MachineInstr &I) const { 956*81ad6265SDimitry Andric assert(ResType->getOpcode() != SPIRV::OpTypePointer || Imm.isNullValue()); 957*81ad6265SDimitry Andric MachineBasicBlock &BB = *I.getParent(); 958*81ad6265SDimitry Andric if (ResType->getOpcode() == SPIRV::OpTypePointer && Imm.isNullValue()) { 959*81ad6265SDimitry Andric return BuildMI(BB, I, I.getDebugLoc(), TII.get(SPIRV::OpConstantNull)) 960*81ad6265SDimitry Andric .addDef(ResVReg) 961*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(ResType)) 962*81ad6265SDimitry Andric .constrainAllUses(TII, TRI, RBI); 963*81ad6265SDimitry Andric } 964*81ad6265SDimitry Andric auto MIB = BuildMI(BB, I, I.getDebugLoc(), TII.get(SPIRV::OpConstantI)) 965*81ad6265SDimitry Andric .addDef(ResVReg) 966*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(ResType)); 967*81ad6265SDimitry Andric // <=32-bit integers should be caught by the sdag pattern. 968*81ad6265SDimitry Andric assert(Imm.getBitWidth() > 32); 969*81ad6265SDimitry Andric addNumImm(Imm, MIB); 970*81ad6265SDimitry Andric return MIB.constrainAllUses(TII, TRI, RBI); 971*81ad6265SDimitry Andric } 972*81ad6265SDimitry Andric 973*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectOpUndef(Register ResVReg, 974*81ad6265SDimitry Andric const SPIRVType *ResType, 975*81ad6265SDimitry Andric MachineInstr &I) const { 976*81ad6265SDimitry Andric return BuildMI(*I.getParent(), I, I.getDebugLoc(), TII.get(SPIRV::OpUndef)) 977*81ad6265SDimitry Andric .addDef(ResVReg) 978*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(ResType)) 979*81ad6265SDimitry Andric .constrainAllUses(TII, TRI, RBI); 980*81ad6265SDimitry Andric } 981*81ad6265SDimitry Andric 982*81ad6265SDimitry Andric static bool isImm(const MachineOperand &MO, MachineRegisterInfo *MRI) { 983*81ad6265SDimitry Andric assert(MO.isReg()); 984*81ad6265SDimitry Andric const SPIRVType *TypeInst = MRI->getVRegDef(MO.getReg()); 985*81ad6265SDimitry Andric if (TypeInst->getOpcode() != SPIRV::ASSIGN_TYPE) 986*81ad6265SDimitry Andric return false; 987*81ad6265SDimitry Andric assert(TypeInst->getOperand(1).isReg()); 988*81ad6265SDimitry Andric MachineInstr *ImmInst = MRI->getVRegDef(TypeInst->getOperand(1).getReg()); 989*81ad6265SDimitry Andric return ImmInst->getOpcode() == TargetOpcode::G_CONSTANT; 990*81ad6265SDimitry Andric } 991*81ad6265SDimitry Andric 992*81ad6265SDimitry Andric static int64_t foldImm(const MachineOperand &MO, MachineRegisterInfo *MRI) { 993*81ad6265SDimitry Andric const SPIRVType *TypeInst = MRI->getVRegDef(MO.getReg()); 994*81ad6265SDimitry Andric MachineInstr *ImmInst = MRI->getVRegDef(TypeInst->getOperand(1).getReg()); 995*81ad6265SDimitry Andric assert(ImmInst->getOpcode() == TargetOpcode::G_CONSTANT); 996*81ad6265SDimitry Andric return ImmInst->getOperand(1).getCImm()->getZExtValue(); 997*81ad6265SDimitry Andric } 998*81ad6265SDimitry Andric 999*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectInsertVal(Register ResVReg, 1000*81ad6265SDimitry Andric const SPIRVType *ResType, 1001*81ad6265SDimitry Andric MachineInstr &I) const { 1002*81ad6265SDimitry Andric MachineBasicBlock &BB = *I.getParent(); 1003*81ad6265SDimitry Andric return BuildMI(BB, I, I.getDebugLoc(), TII.get(SPIRV::OpCompositeInsert)) 1004*81ad6265SDimitry Andric .addDef(ResVReg) 1005*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(ResType)) 1006*81ad6265SDimitry Andric // object to insert 1007*81ad6265SDimitry Andric .addUse(I.getOperand(3).getReg()) 1008*81ad6265SDimitry Andric // composite to insert into 1009*81ad6265SDimitry Andric .addUse(I.getOperand(2).getReg()) 1010*81ad6265SDimitry Andric // TODO: support arbitrary number of indices 1011*81ad6265SDimitry Andric .addImm(foldImm(I.getOperand(4), MRI)) 1012*81ad6265SDimitry Andric .constrainAllUses(TII, TRI, RBI); 1013*81ad6265SDimitry Andric } 1014*81ad6265SDimitry Andric 1015*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectExtractVal(Register ResVReg, 1016*81ad6265SDimitry Andric const SPIRVType *ResType, 1017*81ad6265SDimitry Andric MachineInstr &I) const { 1018*81ad6265SDimitry Andric MachineBasicBlock &BB = *I.getParent(); 1019*81ad6265SDimitry Andric return BuildMI(BB, I, I.getDebugLoc(), TII.get(SPIRV::OpCompositeExtract)) 1020*81ad6265SDimitry Andric .addDef(ResVReg) 1021*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(ResType)) 1022*81ad6265SDimitry Andric .addUse(I.getOperand(2).getReg()) 1023*81ad6265SDimitry Andric // TODO: support arbitrary number of indices 1024*81ad6265SDimitry Andric .addImm(foldImm(I.getOperand(3), MRI)) 1025*81ad6265SDimitry Andric .constrainAllUses(TII, TRI, RBI); 1026*81ad6265SDimitry Andric } 1027*81ad6265SDimitry Andric 1028*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectInsertElt(Register ResVReg, 1029*81ad6265SDimitry Andric const SPIRVType *ResType, 1030*81ad6265SDimitry Andric MachineInstr &I) const { 1031*81ad6265SDimitry Andric if (isImm(I.getOperand(4), MRI)) 1032*81ad6265SDimitry Andric return selectInsertVal(ResVReg, ResType, I); 1033*81ad6265SDimitry Andric MachineBasicBlock &BB = *I.getParent(); 1034*81ad6265SDimitry Andric return BuildMI(BB, I, I.getDebugLoc(), TII.get(SPIRV::OpVectorInsertDynamic)) 1035*81ad6265SDimitry Andric .addDef(ResVReg) 1036*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(ResType)) 1037*81ad6265SDimitry Andric .addUse(I.getOperand(2).getReg()) 1038*81ad6265SDimitry Andric .addUse(I.getOperand(3).getReg()) 1039*81ad6265SDimitry Andric .addUse(I.getOperand(4).getReg()) 1040*81ad6265SDimitry Andric .constrainAllUses(TII, TRI, RBI); 1041*81ad6265SDimitry Andric } 1042*81ad6265SDimitry Andric 1043*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectExtractElt(Register ResVReg, 1044*81ad6265SDimitry Andric const SPIRVType *ResType, 1045*81ad6265SDimitry Andric MachineInstr &I) const { 1046*81ad6265SDimitry Andric if (isImm(I.getOperand(3), MRI)) 1047*81ad6265SDimitry Andric return selectExtractVal(ResVReg, ResType, I); 1048*81ad6265SDimitry Andric MachineBasicBlock &BB = *I.getParent(); 1049*81ad6265SDimitry Andric return BuildMI(BB, I, I.getDebugLoc(), TII.get(SPIRV::OpVectorExtractDynamic)) 1050*81ad6265SDimitry Andric .addDef(ResVReg) 1051*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(ResType)) 1052*81ad6265SDimitry Andric .addUse(I.getOperand(2).getReg()) 1053*81ad6265SDimitry Andric .addUse(I.getOperand(3).getReg()) 1054*81ad6265SDimitry Andric .constrainAllUses(TII, TRI, RBI); 1055*81ad6265SDimitry Andric } 1056*81ad6265SDimitry Andric 1057*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectGEP(Register ResVReg, 1058*81ad6265SDimitry Andric const SPIRVType *ResType, 1059*81ad6265SDimitry Andric MachineInstr &I) const { 1060*81ad6265SDimitry Andric // In general we should also support OpAccessChain instrs here (i.e. not 1061*81ad6265SDimitry Andric // PtrAccessChain) but SPIRV-LLVM Translator doesn't emit them at all and so 1062*81ad6265SDimitry Andric // do we to stay compliant with its test and more importantly consumers. 1063*81ad6265SDimitry Andric unsigned Opcode = I.getOperand(2).getImm() ? SPIRV::OpInBoundsPtrAccessChain 1064*81ad6265SDimitry Andric : SPIRV::OpPtrAccessChain; 1065*81ad6265SDimitry Andric auto Res = BuildMI(*I.getParent(), I, I.getDebugLoc(), TII.get(Opcode)) 1066*81ad6265SDimitry Andric .addDef(ResVReg) 1067*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(ResType)) 1068*81ad6265SDimitry Andric // Object to get a pointer to. 1069*81ad6265SDimitry Andric .addUse(I.getOperand(3).getReg()); 1070*81ad6265SDimitry Andric // Adding indices. 1071*81ad6265SDimitry Andric for (unsigned i = 4; i < I.getNumExplicitOperands(); ++i) 1072*81ad6265SDimitry Andric Res.addUse(I.getOperand(i).getReg()); 1073*81ad6265SDimitry Andric return Res.constrainAllUses(TII, TRI, RBI); 1074*81ad6265SDimitry Andric } 1075*81ad6265SDimitry Andric 1076*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectIntrinsic(Register ResVReg, 1077*81ad6265SDimitry Andric const SPIRVType *ResType, 1078*81ad6265SDimitry Andric MachineInstr &I) const { 1079*81ad6265SDimitry Andric MachineBasicBlock &BB = *I.getParent(); 1080*81ad6265SDimitry Andric switch (I.getIntrinsicID()) { 1081*81ad6265SDimitry Andric case Intrinsic::spv_load: 1082*81ad6265SDimitry Andric return selectLoad(ResVReg, ResType, I); 1083*81ad6265SDimitry Andric break; 1084*81ad6265SDimitry Andric case Intrinsic::spv_store: 1085*81ad6265SDimitry Andric return selectStore(I); 1086*81ad6265SDimitry Andric break; 1087*81ad6265SDimitry Andric case Intrinsic::spv_extractv: 1088*81ad6265SDimitry Andric return selectExtractVal(ResVReg, ResType, I); 1089*81ad6265SDimitry Andric break; 1090*81ad6265SDimitry Andric case Intrinsic::spv_insertv: 1091*81ad6265SDimitry Andric return selectInsertVal(ResVReg, ResType, I); 1092*81ad6265SDimitry Andric break; 1093*81ad6265SDimitry Andric case Intrinsic::spv_extractelt: 1094*81ad6265SDimitry Andric return selectExtractElt(ResVReg, ResType, I); 1095*81ad6265SDimitry Andric break; 1096*81ad6265SDimitry Andric case Intrinsic::spv_insertelt: 1097*81ad6265SDimitry Andric return selectInsertElt(ResVReg, ResType, I); 1098*81ad6265SDimitry Andric break; 1099*81ad6265SDimitry Andric case Intrinsic::spv_gep: 1100*81ad6265SDimitry Andric return selectGEP(ResVReg, ResType, I); 1101*81ad6265SDimitry Andric break; 1102*81ad6265SDimitry Andric case Intrinsic::spv_unref_global: 1103*81ad6265SDimitry Andric case Intrinsic::spv_init_global: { 1104*81ad6265SDimitry Andric MachineInstr *MI = MRI->getVRegDef(I.getOperand(1).getReg()); 1105*81ad6265SDimitry Andric MachineInstr *Init = I.getNumExplicitOperands() > 2 1106*81ad6265SDimitry Andric ? MRI->getVRegDef(I.getOperand(2).getReg()) 1107*81ad6265SDimitry Andric : nullptr; 1108*81ad6265SDimitry Andric assert(MI); 1109*81ad6265SDimitry Andric return selectGlobalValue(MI->getOperand(0).getReg(), *MI, Init); 1110*81ad6265SDimitry Andric } break; 1111*81ad6265SDimitry Andric case Intrinsic::spv_const_composite: { 1112*81ad6265SDimitry Andric // If no values are attached, the composite is null constant. 1113*81ad6265SDimitry Andric bool IsNull = I.getNumExplicitDefs() + 1 == I.getNumExplicitOperands(); 1114*81ad6265SDimitry Andric unsigned Opcode = 1115*81ad6265SDimitry Andric IsNull ? SPIRV::OpConstantNull : SPIRV::OpConstantComposite; 1116*81ad6265SDimitry Andric auto MIB = BuildMI(BB, I, I.getDebugLoc(), TII.get(Opcode)) 1117*81ad6265SDimitry Andric .addDef(ResVReg) 1118*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(ResType)); 1119*81ad6265SDimitry Andric // skip type MD node we already used when generated assign.type for this 1120*81ad6265SDimitry Andric if (!IsNull) { 1121*81ad6265SDimitry Andric for (unsigned i = I.getNumExplicitDefs() + 1; 1122*81ad6265SDimitry Andric i < I.getNumExplicitOperands(); ++i) { 1123*81ad6265SDimitry Andric MIB.addUse(I.getOperand(i).getReg()); 1124*81ad6265SDimitry Andric } 1125*81ad6265SDimitry Andric } 1126*81ad6265SDimitry Andric return MIB.constrainAllUses(TII, TRI, RBI); 1127*81ad6265SDimitry Andric } break; 1128*81ad6265SDimitry Andric case Intrinsic::spv_assign_name: { 1129*81ad6265SDimitry Andric auto MIB = BuildMI(BB, I, I.getDebugLoc(), TII.get(SPIRV::OpName)); 1130*81ad6265SDimitry Andric MIB.addUse(I.getOperand(I.getNumExplicitDefs() + 1).getReg()); 1131*81ad6265SDimitry Andric for (unsigned i = I.getNumExplicitDefs() + 2; 1132*81ad6265SDimitry Andric i < I.getNumExplicitOperands(); ++i) { 1133*81ad6265SDimitry Andric MIB.addImm(I.getOperand(i).getImm()); 1134*81ad6265SDimitry Andric } 1135*81ad6265SDimitry Andric return MIB.constrainAllUses(TII, TRI, RBI); 1136*81ad6265SDimitry Andric } break; 1137*81ad6265SDimitry Andric case Intrinsic::spv_switch: { 1138*81ad6265SDimitry Andric auto MIB = BuildMI(BB, I, I.getDebugLoc(), TII.get(SPIRV::OpSwitch)); 1139*81ad6265SDimitry Andric for (unsigned i = 1; i < I.getNumExplicitOperands(); ++i) { 1140*81ad6265SDimitry Andric if (I.getOperand(i).isReg()) 1141*81ad6265SDimitry Andric MIB.addReg(I.getOperand(i).getReg()); 1142*81ad6265SDimitry Andric else if (I.getOperand(i).isCImm()) 1143*81ad6265SDimitry Andric addNumImm(I.getOperand(i).getCImm()->getValue(), MIB); 1144*81ad6265SDimitry Andric else if (I.getOperand(i).isMBB()) 1145*81ad6265SDimitry Andric MIB.addMBB(I.getOperand(i).getMBB()); 1146*81ad6265SDimitry Andric else 1147*81ad6265SDimitry Andric llvm_unreachable("Unexpected OpSwitch operand"); 1148*81ad6265SDimitry Andric } 1149*81ad6265SDimitry Andric return MIB.constrainAllUses(TII, TRI, RBI); 1150*81ad6265SDimitry Andric } break; 1151*81ad6265SDimitry Andric default: 1152*81ad6265SDimitry Andric llvm_unreachable("Intrinsic selection not implemented"); 1153*81ad6265SDimitry Andric } 1154*81ad6265SDimitry Andric return true; 1155*81ad6265SDimitry Andric } 1156*81ad6265SDimitry Andric 1157*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectFrameIndex(Register ResVReg, 1158*81ad6265SDimitry Andric const SPIRVType *ResType, 1159*81ad6265SDimitry Andric MachineInstr &I) const { 1160*81ad6265SDimitry Andric return BuildMI(*I.getParent(), I, I.getDebugLoc(), TII.get(SPIRV::OpVariable)) 1161*81ad6265SDimitry Andric .addDef(ResVReg) 1162*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(ResType)) 1163*81ad6265SDimitry Andric .addImm(static_cast<uint32_t>(SPIRV::StorageClass::Function)) 1164*81ad6265SDimitry Andric .constrainAllUses(TII, TRI, RBI); 1165*81ad6265SDimitry Andric } 1166*81ad6265SDimitry Andric 1167*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectBranch(MachineInstr &I) const { 1168*81ad6265SDimitry Andric // InstructionSelector walks backwards through the instructions. We can use 1169*81ad6265SDimitry Andric // both a G_BR and a G_BRCOND to create an OpBranchConditional. We hit G_BR 1170*81ad6265SDimitry Andric // first, so can generate an OpBranchConditional here. If there is no 1171*81ad6265SDimitry Andric // G_BRCOND, we just use OpBranch for a regular unconditional branch. 1172*81ad6265SDimitry Andric const MachineInstr *PrevI = I.getPrevNode(); 1173*81ad6265SDimitry Andric MachineBasicBlock &MBB = *I.getParent(); 1174*81ad6265SDimitry Andric if (PrevI != nullptr && PrevI->getOpcode() == TargetOpcode::G_BRCOND) { 1175*81ad6265SDimitry Andric return BuildMI(MBB, I, I.getDebugLoc(), TII.get(SPIRV::OpBranchConditional)) 1176*81ad6265SDimitry Andric .addUse(PrevI->getOperand(0).getReg()) 1177*81ad6265SDimitry Andric .addMBB(PrevI->getOperand(1).getMBB()) 1178*81ad6265SDimitry Andric .addMBB(I.getOperand(0).getMBB()) 1179*81ad6265SDimitry Andric .constrainAllUses(TII, TRI, RBI); 1180*81ad6265SDimitry Andric } 1181*81ad6265SDimitry Andric return BuildMI(MBB, I, I.getDebugLoc(), TII.get(SPIRV::OpBranch)) 1182*81ad6265SDimitry Andric .addMBB(I.getOperand(0).getMBB()) 1183*81ad6265SDimitry Andric .constrainAllUses(TII, TRI, RBI); 1184*81ad6265SDimitry Andric } 1185*81ad6265SDimitry Andric 1186*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectBranchCond(MachineInstr &I) const { 1187*81ad6265SDimitry Andric // InstructionSelector walks backwards through the instructions. For an 1188*81ad6265SDimitry Andric // explicit conditional branch with no fallthrough, we use both a G_BR and a 1189*81ad6265SDimitry Andric // G_BRCOND to create an OpBranchConditional. We should hit G_BR first, and 1190*81ad6265SDimitry Andric // generate the OpBranchConditional in selectBranch above. 1191*81ad6265SDimitry Andric // 1192*81ad6265SDimitry Andric // If an OpBranchConditional has been generated, we simply return, as the work 1193*81ad6265SDimitry Andric // is alread done. If there is no OpBranchConditional, LLVM must be relying on 1194*81ad6265SDimitry Andric // implicit fallthrough to the next basic block, so we need to create an 1195*81ad6265SDimitry Andric // OpBranchConditional with an explicit "false" argument pointing to the next 1196*81ad6265SDimitry Andric // basic block that LLVM would fall through to. 1197*81ad6265SDimitry Andric const MachineInstr *NextI = I.getNextNode(); 1198*81ad6265SDimitry Andric // Check if this has already been successfully selected. 1199*81ad6265SDimitry Andric if (NextI != nullptr && NextI->getOpcode() == SPIRV::OpBranchConditional) 1200*81ad6265SDimitry Andric return true; 1201*81ad6265SDimitry Andric // Must be relying on implicit block fallthrough, so generate an 1202*81ad6265SDimitry Andric // OpBranchConditional with the "next" basic block as the "false" target. 1203*81ad6265SDimitry Andric MachineBasicBlock &MBB = *I.getParent(); 1204*81ad6265SDimitry Andric unsigned NextMBBNum = MBB.getNextNode()->getNumber(); 1205*81ad6265SDimitry Andric MachineBasicBlock *NextMBB = I.getMF()->getBlockNumbered(NextMBBNum); 1206*81ad6265SDimitry Andric return BuildMI(MBB, I, I.getDebugLoc(), TII.get(SPIRV::OpBranchConditional)) 1207*81ad6265SDimitry Andric .addUse(I.getOperand(0).getReg()) 1208*81ad6265SDimitry Andric .addMBB(I.getOperand(1).getMBB()) 1209*81ad6265SDimitry Andric .addMBB(NextMBB) 1210*81ad6265SDimitry Andric .constrainAllUses(TII, TRI, RBI); 1211*81ad6265SDimitry Andric } 1212*81ad6265SDimitry Andric 1213*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectPhi(Register ResVReg, 1214*81ad6265SDimitry Andric const SPIRVType *ResType, 1215*81ad6265SDimitry Andric MachineInstr &I) const { 1216*81ad6265SDimitry Andric auto MIB = BuildMI(*I.getParent(), I, I.getDebugLoc(), TII.get(SPIRV::OpPhi)) 1217*81ad6265SDimitry Andric .addDef(ResVReg) 1218*81ad6265SDimitry Andric .addUse(GR.getSPIRVTypeID(ResType)); 1219*81ad6265SDimitry Andric const unsigned NumOps = I.getNumOperands(); 1220*81ad6265SDimitry Andric for (unsigned i = 1; i < NumOps; i += 2) { 1221*81ad6265SDimitry Andric MIB.addUse(I.getOperand(i + 0).getReg()); 1222*81ad6265SDimitry Andric MIB.addMBB(I.getOperand(i + 1).getMBB()); 1223*81ad6265SDimitry Andric } 1224*81ad6265SDimitry Andric return MIB.constrainAllUses(TII, TRI, RBI); 1225*81ad6265SDimitry Andric } 1226*81ad6265SDimitry Andric 1227*81ad6265SDimitry Andric bool SPIRVInstructionSelector::selectGlobalValue( 1228*81ad6265SDimitry Andric Register ResVReg, MachineInstr &I, const MachineInstr *Init) const { 1229*81ad6265SDimitry Andric // FIXME: don't use MachineIRBuilder here, replace it with BuildMI. 1230*81ad6265SDimitry Andric MachineIRBuilder MIRBuilder(I); 1231*81ad6265SDimitry Andric const GlobalValue *GV = I.getOperand(1).getGlobal(); 1232*81ad6265SDimitry Andric SPIRVType *ResType = GR.getOrCreateSPIRVType( 1233*81ad6265SDimitry Andric GV->getType(), MIRBuilder, SPIRV::AccessQualifier::ReadWrite, false); 1234*81ad6265SDimitry Andric 1235*81ad6265SDimitry Andric std::string GlobalIdent = GV->getGlobalIdentifier(); 1236*81ad6265SDimitry Andric // TODO: suport @llvm.global.annotations. 1237*81ad6265SDimitry Andric auto GlobalVar = cast<GlobalVariable>(GV); 1238*81ad6265SDimitry Andric 1239*81ad6265SDimitry Andric bool HasInit = GlobalVar->hasInitializer() && 1240*81ad6265SDimitry Andric !isa<UndefValue>(GlobalVar->getInitializer()); 1241*81ad6265SDimitry Andric // Skip empty declaration for GVs with initilaizers till we get the decl with 1242*81ad6265SDimitry Andric // passed initializer. 1243*81ad6265SDimitry Andric if (HasInit && !Init) 1244*81ad6265SDimitry Andric return true; 1245*81ad6265SDimitry Andric 1246*81ad6265SDimitry Andric unsigned AddrSpace = GV->getAddressSpace(); 1247*81ad6265SDimitry Andric SPIRV::StorageClass Storage = addressSpaceToStorageClass(AddrSpace); 1248*81ad6265SDimitry Andric bool HasLnkTy = GV->getLinkage() != GlobalValue::InternalLinkage && 1249*81ad6265SDimitry Andric Storage != SPIRV::StorageClass::Function; 1250*81ad6265SDimitry Andric SPIRV::LinkageType LnkType = 1251*81ad6265SDimitry Andric (GV->isDeclaration() || GV->hasAvailableExternallyLinkage()) 1252*81ad6265SDimitry Andric ? SPIRV::LinkageType::Import 1253*81ad6265SDimitry Andric : SPIRV::LinkageType::Export; 1254*81ad6265SDimitry Andric 1255*81ad6265SDimitry Andric Register Reg = GR.buildGlobalVariable(ResVReg, ResType, GlobalIdent, GV, 1256*81ad6265SDimitry Andric Storage, Init, GlobalVar->isConstant(), 1257*81ad6265SDimitry Andric HasLnkTy, LnkType, MIRBuilder, true); 1258*81ad6265SDimitry Andric return Reg.isValid(); 1259*81ad6265SDimitry Andric } 1260*81ad6265SDimitry Andric 1261*81ad6265SDimitry Andric namespace llvm { 1262*81ad6265SDimitry Andric InstructionSelector * 1263*81ad6265SDimitry Andric createSPIRVInstructionSelector(const SPIRVTargetMachine &TM, 1264*81ad6265SDimitry Andric const SPIRVSubtarget &Subtarget, 1265*81ad6265SDimitry Andric const RegisterBankInfo &RBI) { 1266*81ad6265SDimitry Andric return new SPIRVInstructionSelector(TM, Subtarget, RBI); 1267*81ad6265SDimitry Andric } 1268*81ad6265SDimitry Andric } // namespace llvm 1269