Lines Matching defs:DemandedMask

76   APInt DemandedMask(APInt::getAllOnes(Known.getBitWidth()));
77 Value *V = SimplifyDemandedUseBits(&Inst, DemandedMask, Known,
96 const APInt &DemandedMask,
107 if (DemandedMask.isZero()) {
125 NewVal = SimplifyDemandedUseBits(VInst, DemandedMask, Known, Depth, Q);
130 SimplifyMultipleUseDemandedBits(VInst, DemandedMask, Known, Depth, Q);
142 /// set in DemandedMask of the result of V are ever used downstream.
154 /// are accurate even for bits not in DemandedMask. Note
155 /// also that the bitwidth of V, DemandedMask, Known.Zero and Known.One must all
164 const APInt &DemandedMask,
170 uint32_t BitWidth = DemandedMask.getBitWidth();
175 "Value *V, DemandedMask and Known must have same BitWidth");
196 unsigned NLZ = DemandedMask.countl_zero();
216 if (SimplifyDemandedBits(I, 1, DemandedMask, RHSKnown, Depth + 1, Q) ||
217 SimplifyDemandedBits(I, 0, DemandedMask & ~RHSKnown.Zero, LHSKnown,
226 if (DemandedMask.isSubsetOf(Known.Zero | Known.One))
231 if (DemandedMask.isSubsetOf(LHSKnown.Zero | RHSKnown.One))
233 if (DemandedMask.isSubsetOf(RHSKnown.Zero | LHSKnown.One))
237 if (ShrinkDemandedConstant(I, 1, DemandedMask & ~LHSKnown.Zero))
244 if (SimplifyDemandedBits(I, 1, DemandedMask, RHSKnown, Depth + 1, Q) ||
245 SimplifyDemandedBits(I, 0, DemandedMask & ~RHSKnown.One, LHSKnown,
257 if (DemandedMask.isSubsetOf(Known.Zero | Known.One))
262 if (DemandedMask.isSubsetOf(LHSKnown.One | RHSKnown.Zero))
264 if (DemandedMask.isSubsetOf(RHSKnown.One | LHSKnown.Zero))
268 if (ShrinkDemandedConstant(I, 1, DemandedMask))
284 if (SimplifyDemandedBits(I, 1, DemandedMask, RHSKnown, Depth + 1, Q) ||
285 SimplifyDemandedBits(I, 0, DemandedMask, LHSKnown, Depth + 1, Q))
288 if (DemandedMask == 1 &&
303 if (DemandedMask.isSubsetOf(Known.Zero | Known.One))
308 if (DemandedMask.isSubsetOf(RHSKnown.Zero))
310 if (DemandedMask.isSubsetOf(LHSKnown.Zero))
316 if (DemandedMask.isSubsetOf(RHSKnown.Zero | LHSKnown.Zero)) {
319 if (DemandedMask.isAllOnes())
329 if (DemandedMask.isSubsetOf(RHSKnown.Zero|RHSKnown.One) &&
332 ~RHSKnown.One & DemandedMask);
342 if ((*C | ~DemandedMask).isAllOnes()) {
348 if (ShrinkDemandedConstant(I, 1, DemandedMask))
361 (LHSKnown.One & RHSKnown.One & DemandedMask) != 0) {
362 APInt NewMask = ~(LHSKnown.One & RHSKnown.One & DemandedMask);
376 if (SimplifyDemandedBits(I, 2, DemandedMask, RHSKnown, Depth + 1, Q) ||
377 SimplifyDemandedBits(I, 1, DemandedMask, LHSKnown, Depth + 1, Q))
386 const APInt &DemandedMask) {
399 return ShrinkDemandedConstant(I, OpNo, DemandedMask);
406 if ((*CmpC & DemandedMask) == (*SelC & DemandedMask)) {
410 return ShrinkDemandedConstant(I, OpNo, DemandedMask);
412 if (CanonicalizeSelectConstant(I, 1, DemandedMask) ||
413 CanonicalizeSelectConstant(I, 2, DemandedMask))
433 C->ule(DemandedMask.countl_zero())) {
446 APInt InputDemandedMask = DemandedMask.zextOrTrunc(SrcBitWidth);
467 APInt InputDemandedBits = DemandedMask.trunc(SrcBitWidth);
471 if (DemandedMask.getActiveBits() > SrcBitWidth)
481 DemandedMask.getActiveBits() <= SrcBitWidth) {
494 if ((DemandedMask & 1) == 0) {
533 unsigned NLZ = DemandedMask.countl_zero();
542 unsigned NTZ = (~DemandedMask & RHSKnown.Zero).countr_one();
560 C->isOneBitSet(DemandedMask.getActiveBits() - 1)) {
576 unsigned NLZ = DemandedMask.countl_zero();
585 unsigned NTZ = (~DemandedMask & RHSKnown.Zero).countr_one();
621 if (DemandedMask.isPowerOf2()) {
625 unsigned CTZ = DemandedMask.countr_zero();
636 if (I->getOperand(0) == I->getOperand(1) && DemandedMask.ult(4)) {
652 DemandedMask, Known))
673 if (DemandedMask.countr_zero() >= ShiftAmt) {
675 unsigned NumHiDemandedBits = BitWidth - DemandedMask.countr_zero();
700 APInt DemandedMaskIn(DemandedMask.lshr(ShiftAmt));
720 if (unsigned CTLZ = DemandedMask.countl_zero()) {
754 if (DemandedMask.countl_zero() >= ShiftAmt) {
757 unsigned NumHiDemandedBits = BitWidth - DemandedMask.countr_zero();
791 APInt DemandedMaskIn(DemandedMask.shl(ShiftAmt));
811 unsigned NumHiDemandedBits = BitWidth - DemandedMask.countr_zero();
819 if (DemandedMask.isOne()) {
831 APInt DemandedMaskIn(DemandedMask.shl(ShiftAmt));
834 bool ShiftedInBitsDemanded = DemandedMask.countl_zero() < ShiftAmt;
886 if (DemandedMask.ult(*Rem)) // srem won't affect demanded bits
905 if (DemandedMask == 1)
914 if (DemandedMask == 1 && VTy->getScalarSizeInBits() % 2 == 0 &&
925 unsigned NLZ = DemandedMask.countl_zero();
926 unsigned NTZ = DemandedMask.countr_zero();
953 if (SimplifyDemandedBits(I, 0, DemandedMask, LHSKnown, Depth + 1, Q) ||
955 I, 1, (DemandedMask & ~LHSKnown.Zero).zextOrTrunc(MaskWidth),
969 if (DemandedMask.isSubsetOf(Known.Zero) &&
978 if (DemandedMask.isSubsetOf(RHSKnown.One | LHSKnown.Zero))
983 I, 1, (DemandedMask & ~LHSKnown.Zero).zextOrTrunc(MaskWidth)))
1039 APInt DemandedMaskLHS(DemandedMask.lshr(ShiftAmt));
1040 APInt DemandedMaskRHS(DemandedMask.shl(BitWidth - ShiftAmt));
1080 unsigned CTZ = DemandedMask.countr_zero();
1092 unsigned CTZ = DemandedMask.countr_zero();
1101 *II, DemandedMask, Known, KnownBitsComputed);
1125 DemandedMask.isSubsetOf(Known.Zero | Known.One))
1144 /// DemandedMask, but without modifying the Instruction.
1146 Instruction *I, const APInt &DemandedMask, KnownBits &Known, unsigned Depth,
1148 unsigned BitWidth = DemandedMask.getBitWidth();
1168 if (DemandedMask.isSubsetOf(Known.Zero | Known.One))
1173 if (DemandedMask.isSubsetOf(LHSKnown.Zero | RHSKnown.One))
1175 if (DemandedMask.isSubsetOf(RHSKnown.Zero | LHSKnown.One))
1189 if (DemandedMask.isSubsetOf(Known.Zero | Known.One))
1196 if (DemandedMask.isSubsetOf(LHSKnown.One | RHSKnown.Zero))
1198 if (DemandedMask.isSubsetOf(RHSKnown.One | LHSKnown.Zero))
1212 if (DemandedMask.isSubsetOf(Known.Zero | Known.One))
1218 if (DemandedMask.isSubsetOf(RHSKnown.Zero))
1220 if (DemandedMask.isSubsetOf(LHSKnown.Zero))
1226 unsigned NLZ = DemandedMask.countl_zero();
1246 unsigned NLZ = DemandedMask.countl_zero();
1268 if (DemandedMask.isSubsetOf(Known.Zero | Known.One))
1278 unsigned BitWidth = DemandedMask.getBitWidth();
1282 DemandedMask.isSubsetOf(APInt::getLowBitsSet(
1295 if (DemandedMask.isSubsetOf(Known.Zero|Known.One))
1313 /// 2) We don't care those bits in S, per the input DemandedMask.
1323 const APInt &ShlOp1, const APInt &DemandedMask, KnownBits &Known) {
1338 Known.Zero &= DemandedMask;
1355 if ((BitMask1 & DemandedMask) == (BitMask2 & DemandedMask)) {
1963 Value *V, const FPClassTest DemandedMask, KnownFPClass &Known,
1970 if (DemandedMask == fcNone)
1981 getFPClassConstant(VTy, DemandedMask & Known.KnownFPClasses);
1991 if (SimplifyDemandedFPClass(I, 0, llvm::fneg(DemandedMask), Known,
2001 if (SimplifyDemandedFPClass(I, 0, llvm::inverse_fabs(DemandedMask), Known,
2007 if (SimplifyDemandedFPClass(I, 0, DemandedMask, Known, Depth + 1))
2012 const FPClassTest DemandedMaskAnySign = llvm::unknown_sign(DemandedMask);
2016 if ((DemandedMask & fcPositive) == fcNone) {
2022 if ((DemandedMask & fcNegative) == fcNone) {
2034 Known = computeKnownFPClass(I, ~DemandedMask, CxtI, Depth + 1);
2042 if (SimplifyDemandedFPClass(I, 2, DemandedMask, KnownRHS, Depth + 1) ||
2043 SimplifyDemandedFPClass(I, 1, DemandedMask, KnownLHS, Depth + 1))
2046 if (KnownLHS.isKnownNever(DemandedMask))
2048 if (KnownRHS.isKnownNever(DemandedMask))
2056 Known = computeKnownFPClass(I, ~DemandedMask, CxtI, Depth + 1);
2060 return getFPClassConstant(VTy, DemandedMask & Known.KnownFPClasses);
2064 FPClassTest DemandedMask,
2069 SimplifyDemandedUseFPClass(U.get(), DemandedMask, Known, Depth, I);