Lines Matching defs:II
58 static Instruction *simplifyX86MaskedLoad(IntrinsicInst &II, InstCombiner &IC) {
59 Value *Ptr = II.getOperand(0);
60 Value *Mask = II.getOperand(1);
61 Constant *ZeroVec = Constant::getNullValue(II.getType());
65 return IC.replaceInstUsesWith(II, ZeroVec);
72 II.getType(), Ptr, Align(1), BoolMask, ZeroVec);
73 return IC.replaceInstUsesWith(II, NewMaskedLoad);
82 static bool simplifyX86MaskedStore(IntrinsicInst &II, InstCombiner &IC) {
83 Value *Ptr = II.getOperand(0);
84 Value *Mask = II.getOperand(1);
85 Value *Vec = II.getOperand(2);
89 IC.eraseInstFromFunction(II);
95 if (II.getIntrinsicID() == Intrinsic::x86_sse2_maskmov_dqu)
108 IC.eraseInstFromFunction(II);
115 static Value *simplifyX86immShift(const IntrinsicInst &II,
121 switch (II.getIntrinsicID()) {
196 Value *Vec = II.getArgOperand(0);
197 Value *Amt = II.getArgOperand(1);
210 llvm::computeKnownBits(Amt, II.getDataLayout());
234 Amt, DemandedLower, II.getDataLayout());
236 Amt, DemandedUpper, II.getDataLayout());
297 static Value *simplifyX86varShift(const IntrinsicInst &II,
302 switch (II.getIntrinsicID()) {
344 Value *Vec = II.getArgOperand(0);
345 Value *Amt = II.getArgOperand(1);
346 auto *VT = cast<FixedVectorType>(II.getType());
354 llvm::computeKnownBits(Amt, II.getDataLayout());
433 static Value *simplifyX86pack(IntrinsicInst &II,
435 Value *Arg0 = II.getArgOperand(0);
436 Value *Arg1 = II.getArgOperand(1);
437 Type *ResTy = II.getType();
499 static Value *simplifyX86pmulh(IntrinsicInst &II,
502 Value *Arg0 = II.getArgOperand(0);
503 Value *Arg1 = II.getArgOperand(1);
504 auto *ResTy = cast<FixedVectorType>(II.getType());
557 static Value *simplifyX86pmadd(IntrinsicInst &II,
560 Value *Arg0 = II.getArgOperand(0);
561 Value *Arg1 = II.getArgOperand(1);
562 auto *ResTy = cast<FixedVectorType>(II.getType());
611 static Value *simplifyX86movmsk(const IntrinsicInst &II,
613 Value *Arg = II.getArgOperand(0);
614 Type *ResTy = II.getType();
622 if (II.getIntrinsicID() == Intrinsic::x86_mmx_pmovmskb)
642 static Value *simplifyX86addcarry(const IntrinsicInst &II,
644 Value *CarryIn = II.getArgOperand(0);
645 Value *Op1 = II.getArgOperand(1);
646 Value *Op2 = II.getArgOperand(2);
647 Type *RetTy = II.getType();
669 static Value *simplifyTernarylogic(const IntrinsicInst &II,
672 auto *ArgImm = dyn_cast<ConstantInt>(II.getArgOperand(3));
676 Value *ArgA = II.getArgOperand(0);
677 Value *ArgB = II.getArgOperand(1);
678 Value *ArgC = II.getArgOperand(2);
680 Type *Ty = II.getType();
1737 static Value *simplifyX86insertps(const IntrinsicInst &II,
1739 auto *CInt = dyn_cast<ConstantInt>(II.getArgOperand(2));
1743 auto *VecTy = cast<FixedVectorType>(II.getType());
1767 Value *V1 = II.getArgOperand(1);
1772 if ((II.getArgOperand(0) == II.getArgOperand(1)) ||
1791 return Builder.CreateShuffleVector(II.getArgOperand(0), V1, ShuffleMask);
1796 static Value *simplifyX86extrq(IntrinsicInst &II, Value *Op0,
1800 Type *IntTy64 = Type::getInt64Ty(II.getContext());
1834 return UndefValue::get(II.getType());
1843 Type *IntTy8 = Type::getInt8Ty(II.getContext());
1857 return Builder.CreateBitCast(SV, II.getType());
1870 if (II.getIntrinsicID() == Intrinsic::x86_sse4a_extrq) {
1885 static Value *simplifyX86insertq(IntrinsicInst &II, Value *Op0, Value *Op1,
1909 return UndefValue::get(II.getType());
1918 Type *IntTy8 = Type::getInt8Ty(II.getContext());
1934 return Builder.CreateBitCast(SV, II.getType());
1955 Type *IntTy64 = Type::getInt64Ty(II.getContext());
1963 if (II.getIntrinsicID() == Intrinsic::x86_sse4a_insertq) {
1964 Type *IntTy8 = Type::getInt8Ty(II.getContext());
1976 static Value *simplifyX86pshufb(const IntrinsicInst &II,
1978 auto *V = dyn_cast<Constant>(II.getArgOperand(1));
1982 auto *VecTy = cast<FixedVectorType>(II.getType());
2015 auto V1 = II.getArgOperand(0);
2021 static Value *simplifyX86vpermilvar(const IntrinsicInst &II,
2023 auto *V = dyn_cast<Constant>(II.getArgOperand(1));
2027 auto *VecTy = cast<FixedVectorType>(II.getType());
2063 auto V1 = II.getArgOperand(0);
2068 static Value *simplifyX86vpermv(const IntrinsicInst &II,
2070 auto *V = dyn_cast<Constant>(II.getArgOperand(1));
2074 auto *VecTy = cast<FixedVectorType>(II.getType());
2097 auto V1 = II.getArgOperand(0);
2102 static Value *simplifyX86vpermv3(const IntrinsicInst &II,
2104 auto *V = dyn_cast<Constant>(II.getArgOperand(1));
2108 auto *VecTy = cast<FixedVectorType>(II.getType());
2132 auto V1 = II.getArgOperand(0);
2133 auto V2 = II.getArgOperand(2);
2138 static bool simplifyX86VPERMMask(Instruction *II, bool IsBinary,
2140 auto *VecTy = cast<FixedVectorType>(II->getType());
2150 return IC.SimplifyDemandedBits(II, /*OpNo=*/1, DemandedMask, KnownMask);
2154 X86TTIImpl::instCombineIntrinsic(InstCombiner &IC, IntrinsicInst &II) const {
2162 Intrinsic::ID IID = II.getIntrinsicID();
2169 if (auto *C = dyn_cast<ConstantInt>(II.getArgOperand(1))) {
2173 unsigned BitWidth = II.getType()->getIntegerBitWidth();
2176 return IC.replaceInstUsesWith(II, ConstantInt::get(II.getType(), 0));
2179 if (auto *InC = dyn_cast<ConstantInt>(II.getArgOperand(0))) {
2184 return IC.replaceInstUsesWith(II,
2185 ConstantInt::get(II.getType(), Result));
2195 if (auto *C = dyn_cast<ConstantInt>(II.getArgOperand(1))) {
2197 unsigned BitWidth = II.getType()->getIntegerBitWidth();
2199 return IC.replaceInstUsesWith(II, II.getArgOperand(0));
2202 return IC.replaceInstUsesWith(II, ConstantInt::get(II.getType(), 0));
2205 if (auto *InC = dyn_cast<ConstantInt>(II.getArgOperand(0))) {
2208 return IC.replaceInstUsesWith(II,
2209 ConstantInt::get(II.getType(), Result));
2216 if (auto *MaskC = dyn_cast<ConstantInt>(II.getArgOperand(1))) {
2218 return IC.replaceInstUsesWith(II, ConstantInt::get(II.getType(), 0));
2221 return IC.replaceInstUsesWith(II, II.getArgOperand(0));
2229 Value *Input = II.getArgOperand(0);
2230 Value *Masked = IC.Builder.CreateAnd(Input, II.getArgOperand(1));
2231 Value *ShiftAmt = ConstantInt::get(II.getType(), MaskIdx);
2233 return IC.replaceInstUsesWith(II, Shifted);
2236 if (auto *SrcC = dyn_cast<ConstantInt>(II.getArgOperand(0))) {
2253 return IC.replaceInstUsesWith(II,
2254 ConstantInt::get(II.getType(), Result));
2260 if (auto *MaskC = dyn_cast<ConstantInt>(II.getArgOperand(1))) {
2262 return IC.replaceInstUsesWith(II, ConstantInt::get(II.getType(), 0));
2265 return IC.replaceInstUsesWith(II, II.getArgOperand(0));
2273 Value *Input = II.getArgOperand(0);
2274 Value *ShiftAmt = ConstantInt::get(II.getType(), MaskIdx);
2276 Value *Masked = IC.Builder.CreateAnd(Shifted, II.getArgOperand(1));
2277 return IC.replaceInstUsesWith(II, Masked);
2280 if (auto *SrcC = dyn_cast<ConstantInt>(II.getArgOperand(0))) {
2297 return IC.replaceInstUsesWith(II,
2298 ConstantInt::get(II.getType(), Result));
2329 Value *Arg = II.getArgOperand(0);
2332 return IC.replaceOperand(II, 0, V);
2344 if (Value *V = simplifyX86movmsk(II, IC.Builder)) {
2345 return IC.replaceInstUsesWith(II, V);
2380 Value *Arg0 = II.getArgOperand(0);
2381 Value *Arg1 = II.getArgOperand(1);
2384 IC.replaceOperand(II, 0, V);
2388 IC.replaceOperand(II, 1, V);
2392 return &II;
2407 if (auto *R = dyn_cast<ConstantInt>(II.getArgOperand(2))) {
2409 Value *Arg0 = II.getArgOperand(0);
2410 Value *Arg1 = II.getArgOperand(1);
2434 return IC.replaceInstUsesWith(II, V);
2449 if (auto *R = dyn_cast<ConstantInt>(II.getArgOperand(4))) {
2452 Value *Arg0 = II.getArgOperand(0);
2453 Value *Arg1 = II.getArgOperand(1);
2480 Value *Mask = II.getArgOperand(3);
2492 IC.Builder.CreateExtractElement(II.getArgOperand(2), (uint64_t)0);
2499 return IC.replaceInstUsesWith(II, V);
2534 if (Value *V = simplifyX86immShift(II, IC.Builder)) {
2535 return IC.replaceInstUsesWith(II, V);
2566 if (Value *V = simplifyX86immShift(II, IC.Builder)) {
2567 return IC.replaceInstUsesWith(II, V);
2572 Value *Arg1 = II.getArgOperand(1);
2578 return IC.replaceOperand(II, 1, V);
2610 if (Value *V = simplifyX86varShift(II, IC.Builder)) {
2611 return IC.replaceInstUsesWith(II, V);
2621 if (Value *V = simplifyX86pack(II, IC.Builder, true)) {
2622 return IC.replaceInstUsesWith(II, V);
2632 if (Value *V = simplifyX86pack(II, IC.Builder, false)) {
2633 return IC.replaceInstUsesWith(II, V);
2640 if (Value *V = simplifyX86pmulh(II, IC.Builder, true, false)) {
2641 return IC.replaceInstUsesWith(II, V);
2648 if (Value *V = simplifyX86pmulh(II, IC.Builder, false, false)) {
2649 return IC.replaceInstUsesWith(II, V);
2656 if (Value *V = simplifyX86pmulh(II, IC.Builder, true, true)) {
2657 return IC.replaceInstUsesWith(II, V);
2664 if (Value *V = simplifyX86pmadd(II, IC.Builder, true)) {
2665 return IC.replaceInstUsesWith(II, V);
2672 if (Value *V = simplifyX86pmadd(II, IC.Builder, false)) {
2673 return IC.replaceInstUsesWith(II, V);
2680 if (auto *C = dyn_cast<ConstantInt>(II.getArgOperand(2))) {
2684 Value *Arg0 = II.getArgOperand(0);
2685 Value *Arg1 = II.getArgOperand(1);
2694 IC.replaceOperand(II, 0, V);
2703 IC.replaceOperand(II, 1, V);
2710 return IC.replaceInstUsesWith(II,
2711 ConstantAggregateZero::get(II.getType()));
2715 return &II;
2722 if (Value *V = simplifyX86insertps(II, IC.Builder)) {
2723 return IC.replaceInstUsesWith(II, V);
2728 Value *Op0 = II.getArgOperand(0);
2729 Value *Op1 = II.getArgOperand(1);
2746 if (Value *V = simplifyX86extrq(II, Op0, CILength, CIIndex, IC.Builder)) {
2747 return IC.replaceInstUsesWith(II, V);
2754 IC.replaceOperand(II, 0, V);
2758 IC.replaceOperand(II, 1, V);
2762 return &II;
2770 Value *Op0 = II.getArgOperand(0);
2776 auto *CILength = dyn_cast<ConstantInt>(II.getArgOperand(1));
2777 auto *CIIndex = dyn_cast<ConstantInt>(II.getArgOperand(2));
2780 if (Value *V = simplifyX86extrq(II, Op0, CILength, CIIndex, IC.Builder)) {
2781 return IC.replaceInstUsesWith(II, V);
2787 return IC.replaceOperand(II, 0, V);
2793 Value *Op0 = II.getArgOperand(0);
2794 Value *Op1 = II.getArgOperand(1);
2812 if (Value *V = simplifyX86insertq(II, Op0, Op1, Len, Idx, IC.Builder)) {
2813 return IC.replaceInstUsesWith(II, V);
2820 return IC.replaceOperand(II, 0, V);
2829 Value *Op0 = II.getArgOperand(0);
2830 Value *Op1 = II.getArgOperand(1);
2838 auto *CILength = dyn_cast<ConstantInt>(II.getArgOperand(2));
2839 auto *CIIndex = dyn_cast<ConstantInt>(II.getArgOperand(3));
2845 if (Value *V = simplifyX86insertq(II, Op0, Op1, Len, Idx, IC.Builder)) {
2846 return IC.replaceInstUsesWith(II, V);
2854 IC.replaceOperand(II, 0, V);
2858 IC.replaceOperand(II, 1, V);
2862 return &II;
2874 Value *Op0 = II.getArgOperand(0);
2875 Value *Op1 = II.getArgOperand(1);
2876 Value *Mask = II.getArgOperand(2);
2878 return IC.replaceInstUsesWith(II, Op0);
2883 return IC.replaceInstUsesWith(II, Op0);
2918 auto *OpTy = cast<FixedVectorType>(II.getType());
2950 return new BitCastInst(Sel, II.getType());
2960 if (Value *V = simplifyX86pshufb(II, IC.Builder)) {
2961 return IC.replaceInstUsesWith(II, V);
2965 if (IC.SimplifyDemandedBits(&II, 1, APInt(8, 0b10001111), KnownMask))
2966 return &II;
2973 if (Value *V = simplifyX86vpermilvar(II, IC.Builder)) {
2974 return IC.replaceInstUsesWith(II, V);
2978 if (IC.SimplifyDemandedBits(&II, 1, APInt(32, 0b00011), KnownMask))
2979 return &II;
2986 if (Value *V = simplifyX86vpermilvar(II, IC.Builder)) {
2987 return IC.replaceInstUsesWith(II, V);
2991 if (IC.SimplifyDemandedBits(&II, 1, APInt(64, 0b00010), KnownMask))
2992 return &II;
3010 if (Value *V = simplifyX86vpermv(II, IC.Builder)) {
3011 return IC.replaceInstUsesWith(II, V);
3013 if (simplifyX86VPERMMask(&II, /*IsBinary=*/false, IC))
3014 return &II;
3035 if (Value *V = simplifyX86vpermv3(II, IC.Builder)) {
3036 return IC.replaceInstUsesWith(II, V);
3038 if (simplifyX86VPERMMask(&II, /*IsBinary=*/true, IC))
3039 return &II;
3050 if (Instruction *I = simplifyX86MaskedLoad(II, IC)) {
3064 if (simplifyX86MaskedStore(II, IC)) {
3071 if (Value *V = simplifyX86addcarry(II, IC.Builder)) {
3072 return IC.replaceInstUsesWith(II, V);
3082 if (Value *V = simplifyTernarylogic(II, IC.Builder)) {
3083 return IC.replaceInstUsesWith(II, V);
3093 InstCombiner &IC, IntrinsicInst &II, APInt DemandedMask, KnownBits &Known,
3095 switch (II.getIntrinsicID()) {
3108 if (II.getIntrinsicID() == Intrinsic::x86_mmx_pmovmskb) {
3111 auto *ArgType = cast<FixedVectorType>(II.getArgOperand(0)->getType());
3118 Type *VTy = II.getType();
3133 InstCombiner &IC, IntrinsicInst &II, APInt DemandedElts, APInt &UndefElts,
3137 unsigned VWidth = cast<FixedVectorType>(II.getType())->getNumElements();
3138 switch (II.getIntrinsicID()) {
3148 IC.addToWorklist(&II);
3149 return ConstantAggregateZero::get(II.getType());
3154 simplifyAndSetOp(&II, 0, DemandedElts, UndefElts);
3163 simplifyAndSetOp(&II, 0, DemandedElts, UndefElts);
3167 IC.addToWorklist(&II);
3168 return II.getArgOperand(0);
3183 simplifyAndSetOp(&II, 0, DemandedElts, UndefElts);
3187 IC.addToWorklist(&II);
3188 return II.getArgOperand(0);
3193 simplifyAndSetOp(&II, 1, DemandedElts, UndefElts2);
3210 simplifyAndSetOp(&II, 0, DemandedElts2, UndefElts);
3214 IC.addToWorklist(&II);
3215 return II.getArgOperand(0);
3220 simplifyAndSetOp(&II, 1, DemandedElts, UndefElts2);
3244 simplifyAndSetOp(&II, 0, DemandedElts, UndefElts);
3248 IC.addToWorklist(&II);
3249 return II.getArgOperand(0);
3254 simplifyAndSetOp(&II, 1, DemandedElts, UndefElts2);
3255 simplifyAndSetOp(&II, 2, DemandedElts, UndefElts3);
3277 IC.Builder.SetInsertPoint(&II);
3278 Value *Arg0 = II.getArgOperand(0), *Arg1 = II.getArgOperand(1);
3283 simplifyAndSetOp(&II, 0, DemandedElts, UndefElts);
3284 simplifyAndSetOp(&II, 1, DemandedElts, UndefElts2);
3300 simplifyAndSetOp(&II, 0, DemandedElts, UndefElts);
3301 simplifyAndSetOp(&II, 1, DemandedElts, UndefElts2);
3315 simplifyAndSetOp(&II, 0, DemandedElts, UndefElts);
3316 simplifyAndSetOp(&II, 1, DemandedElts, UndefElts2);
3333 auto *Ty0 = II.getArgOperand(0)->getType();
3358 simplifyAndSetOp(&II, OpNum, OpDemandedElts, OpUndefElts);
3379 auto *ArgTy = II.getArgOperand(0)->getType();
3385 simplifyAndSetOp(&II, 0, OpDemandedElts, Op0UndefElts);
3386 simplifyAndSetOp(&II, 1, OpDemandedElts, Op1UndefElts);
3405 simplifyAndSetOp(&II, 1, DemandedElts, UndefElts);