Searched refs:isScalarInteger (Results 1 – 19 of 19) sorted by relevance
144 bool isScalarInteger() const { in isScalarInteger() function145 return isSimple() ? V.isScalarInteger() : isExtendedScalarInteger(); in isScalarInteger()
361 return VT.isScalarInteger(); in convertSetCCLogicToBitwiseLogic()
3271 if (!OpVT.isScalarInteger() || OpVT == XLenVT) in lowerVectorIntrinsicSplats()5936 if (Ty.isScalarInteger() && in isDesirableToCommuteWithShift()8479 if (VT.isScalarInteger()) { in decomposeMulByConstant()
677 return VT.isScalarInteger(); in convertSetCCLogicToBitwiseLogic()
12873 return VT.isScalarInteger(); in preferIncOfAddToSubOfNot()
782 return VT.isScalarInteger(); in convertSetCCLogicToBitwiseLogic()
1563 return VT.isScalarInteger(); in preferIncOfAddToSubOfNot()6769 assert(ValVT.isScalarInteger() && LocVT.isScalarInteger()); in truncateScalarIntegerArg()6953 if (VA.getValVT().isScalarInteger()) in LowerFormalArguments_AIX()7044 if (ValVT.isScalarInteger() && in LowerFormalArguments_AIX()16180 if (!VT.isScalarInteger()) in decomposeMulByConstant()17147 if (MemVT.isScalarInteger()) { in computeMOFlags()17194 if (MemVT.isScalarInteger() && (FlagSet & PPC::MOF_NoExt)) { in computeMOFlags()
1061 return VT.isScalarInteger(); in convertSetCCLogicToBitwiseLogic()
5460 return X.getValueType().isScalarInteger(); // 'bt' in hasBitTest()5473 if (X.getValueType().isScalarInteger()) in shouldProduceAndByConstByHoistingConstFromShiftsLHSOfAnd()29294 if ((SrcVT == MVT::v16i1 || SrcVT == MVT::v32i1) && DstVT.isScalarInteger()) { in LowerBITCAST()31818 if (!VT1.isScalarInteger() || !VT2.isScalarInteger()) in isTruncateFree()39241 if ((BitWidth == 64) && SrcVT.isScalarInteger() && !Subtarget.hasAVX512() && in SimplifyDemandedBitsForTargetNode()39737 DstVT.isScalarInteger()) && in combineCastedMaskArithmetic()39739 SrcVT.isScalarInteger())) in combineCastedMaskArithmetic()39935 if ((VT == MVT::v4i1 || VT == MVT::v2i1) && SrcVT.isScalarInteger() && in combineBitcast()39945 if ((SrcVT == MVT::v4i1 || SrcVT == MVT::v2i1) && VT.isScalarInteger() && in combineBitcast()39978 SrcVT.isScalarInteger() && TLI.isTypeLegal(VT)) { in combineBitcast()[all …]
727 return V.getValueType().isScalarInteger(); in hasAndNotCompare()
13412 if (N->getOpcode() != ISD::ADD || !VT.isScalarInteger()) in performUADDVCombine()17096 return X.getValueType().isScalarInteger() || NewShiftOpcode == ISD::SHL; in shouldProduceAndByConstByHoistingConstFromShiftsLHSOfAnd()17168 return VT.isScalarInteger(); in preferIncOfAddToSubOfNot()
343 bool isScalarInteger() const { in isScalarInteger() function
361 return (VT.isScalarInteger() && TLI->isTypeLegal(VT)); in hasDivRemOp()
311 if (VT.isScalarInteger()) { in lowerReturnVal()
1936 if (ByteOffsetNode.getValueType().isScalarInteger() && in SelectSMRDOffset()
1041 assert(ElemTy.isScalarInteger()); in LowerSETCC()1099 assert(ElemTy.isScalarInteger()); in LowerVSELECT()2073 return X.getValueType().isScalarInteger(); // 'tstbit' in hasBitTest()
1602 if (isHvxBoolTy(VecTy) && ResTy.isScalarInteger()) { in LowerHvxBitcast()
7121 assert(Op.getValueType().isScalarInteger() && "can't handle other types"); in calculateByteProvider()12304 if (VT.isScalarInteger() || TLI.isOperationLegal(N0.getOpcode(), VT)) { in visitTRUNCATE()18086 if (!Val.getValueType().isScalarInteger() || Val.getOpcode() != ISD::OR) in splitMergedValStore()18112 !Lo.getOperand(0).getValueType().isScalarInteger() || in splitMergedValStore()18115 !Hi.getOperand(0).getValueType().isScalarInteger() || in splitMergedValStore()18590 if (ExtractIndex == BCTruncElt && BCSrc.getValueType().isScalarInteger()) in visitEXTRACT_VECTOR_ELT()18598 assert(X.getValueType().isScalarInteger() && ScalarVT.isScalarInteger() && in visitEXTRACT_VECTOR_ELT()21403 InVal.getValueType().isScalarInteger() && in visitSCALAR_TO_VECTOR()
5432 if (VT.isScalarInteger()) in expandDivFix()