Lines Matching defs:Cnt

2390       // how many of them we have for each APO, like this: Cnt[APO] = x.
2518 unsigned Cnt = 0;
2556 ++Cnt;
2563 return getNumLanes() == 2 || Cnt > 1;
2786 unsigned Cnt = 0;
2788 OS << "Operand " << Cnt++ << "\n";
4908 int Cnt = 0;
4929 auto Res = Offsets.emplace(Dist, Cnt);
4934 ++Cnt;
4942 Cnt = 0;
4944 SortedIndices[Cnt] = Pair.second;
4945 ++Cnt;
5235 for (unsigned Cnt = 0, End = VL.size(); Cnt + VF <= End; Cnt += VF) {
5236 ArrayRef<Value *> Slice = VL.slice(Cnt, VF);
5248 DemandedElts.setBits(Cnt, Cnt + VF);
5396 for (auto [Cnt, Ptr] : enumerate(VL.drop_front())) {
5397 auto Key = std::make_pair(BBs[Cnt + 1],
5400 [&, &Cnt = Cnt, &Ptr = Ptr](auto &Base) {
5408 Base.emplace_back(Ptr, *Diff, Cnt + 1);
5418 Bases.find(Key)->second.emplace_back().emplace_back(Ptr, 0, Cnt + 1);
6003 unsigned Cnt = 0;
6005 while (UserTE && Cnt < RecursionMaxDepth) {
6014 ++Cnt;
6124 unsigned Cnt = IdentityCnt;
6129 if (Cnt < Pair.second ||
6130 (Cnt == IdentityCnt && IdentityCnt == FilledIdentityCnt &&
6131 Cnt == Pair.second && !BestOrder.empty() &&
6135 Cnt = Pair.second;
6452 unsigned Cnt = IdentityCnt;
6457 if (Cnt < Pair.second) {
6460 Cnt = Pair.second;
6886 for (auto [Cnt, Pair] : enumerate(Loads)) {
6890 ToAdd.insert(Cnt);
6892 Repeated.insert(Cnt);
7011 for (unsigned Cnt = StartIdx, E = Loads.size(); Cnt < E;
7012 ++Cnt) {
7014 ArrayRef(Loads).slice(Cnt, std::min(NumElts, E - Cnt));
7086 Cnt >= MaskedGatherVectorized.back() + NumElts)
7087 MaskedGatherVectorized.push_back(Cnt);
7095 if (Cnt == StartIdx)
7104 Cnt < MaskedGatherVectorized.back() + NumElts)
7106 Cnt += NumElts - 1;
7114 for (unsigned Cnt : MaskedGatherVectorized) {
7116 Cnt, std::min<unsigned>(NumElts, Loads.size() - Cnt));
7122 if (Cnt == StartIdx)
9663 unsigned Cnt = 0;
9671 TE.ReorderIndices[Cnt + K] = Idx;
9672 TE.Scalars[Cnt + K] = V;
9675 Cnt += Indices.size();
9680 SubVectors.emplace_back(Cnt - Sz, SubVF);
9681 for (unsigned I : seq<unsigned>(Cnt - Sz, Cnt - Sz + SubVF))
9684 for (unsigned I : seq<unsigned>(Cnt - Sz, Cnt))
9825 for (unsigned Cnt = StartIdx; Cnt + VF <= End; Cnt += VF) {
9826 ArrayRef<Value *> Slice = VL.slice(Cnt, VF);
9902 Slices.emplace_back(Cnt, Slice.size());
9904 auto AddCombinedNode = [&](unsigned Idx, unsigned Cnt, unsigned Sz) {
9905 E.CombinedEntriesWithIndices.emplace_back(Idx, Cnt);
9906 if (StartIdx == Cnt)
9907 StartIdx = Cnt + Sz;
9908 if (End == Cnt + Sz)
9909 End = Cnt;
9911 for (auto [Cnt, Sz] : Slices) {
9912 ArrayRef<Value *> Slice = VL.slice(Cnt, Sz);
9917 AddCombinedNode(SE->Idx, Cnt, Sz);
9937 AddCombinedNode(PrevSize, Cnt, Sz);
18823 for (unsigned Cnt = StartIdx; Cnt + Size <= Sz;) {
18824 if (!checkTreeSizes(RangeSizes.slice(Cnt, Size),
18826 ++Cnt;
18829 ArrayRef<Value *> Slice = ArrayRef(Operands).slice(Cnt, Size);
18844 Cnt += NonSchedSizeMax;
18850 vectorizeStoreChain(Slice, R, Cnt, MinVF, TreeSize);
18865 for_each(RangeSizes.slice(Cnt, Size),
18869 if (Cnt < StartIdx + MinVF) {
18870 for_each(RangeSizes.slice(StartIdx, Cnt - StartIdx),
18874 StartIdx = Cnt + Size;
18876 if (Cnt > Sz - Size - MinVF) {
18877 for_each(RangeSizes.slice(Cnt + Size, Sz - (Cnt + Size)),
18882 End = Cnt;
18883 Sz = Cnt;
18885 Cnt += Size;
18889 !all_of(RangeSizes.slice(Cnt, Size),
18892 Cnt += Size;
18898 all_of(RangeSizes.slice(Cnt, Size),
18901 Cnt += Size;
18902 while (Cnt != Sz && RangeSizes[Cnt].first == TreeSize)
18903 ++Cnt;
18907 for_each(RangeSizes.slice(Cnt, Size),
18914 ++Cnt;
19940 for (unsigned Cnt = 0, Sz = OrigReducedVals.size(); Cnt < Sz; ++Cnt) {
19941 Value *RdxVal = TrackedVals.at(OrigReducedVals[Cnt]);
19952 TrackedToOrig.try_emplace(RdxVal, OrigReducedVals[Cnt]);
20040 unsigned Cnt = At(SameValuesCounter, OrigV);
20042 emitScaleForReusedOps(Candidates.front(), Builder, Cnt);
20044 VectorizedVals.try_emplace(OrigV, Cnt);
20159 for (unsigned Cnt = 0, Sz = ReducedVals.size(); Cnt < Sz; ++Cnt) {
20160 if (Cnt == I || (ShuffledExtracts && Cnt == I - 1))
20162 for (Value *V : ReducedVals[Cnt])
20170 for (unsigned Cnt = 0; Cnt < NumReducedVals; ++Cnt) {
20171 if (Cnt >= Pos && Cnt < Pos + ReduxWidth)
20173 Value *V = Candidates[Cnt];
20182 for (unsigned Cnt = 0; Cnt < NumReducedVals; ++Cnt) {
20183 if (Cnt >= Pos && Cnt < Pos + ReduxWidth)
20185 Value *RdxVal = Candidates[Cnt];
20521 int Cnt = ReducedVals.size();
20523 if (Cnt == 1)
20525 --Cnt;
20638 /// Emits optimized code for unique scalar value reused \p Cnt times.
20640 unsigned Cnt) {
20644 if (Cnt == 1)
20649 Value *Scale = ConstantInt::get(VectorizedValue->getType(), Cnt);
20650 LLVM_DEBUG(dbgs() << "SLP: Add (to-mul) " << Cnt << "of "
20656 LLVM_DEBUG(dbgs() << "SLP: Xor " << Cnt << "of " << VectorizedValue
20658 if (Cnt % 2 == 0)
20664 Value *Scale = ConstantFP::get(VectorizedValue->getType(), Cnt);
20665 LLVM_DEBUG(dbgs() << "SLP: FAdd (to-fmul) " << Cnt << "of "
20716 unsigned Cnt = SameValuesCounter.lookup(TrackedToOrig.at(V));
20717 Vals.push_back(ConstantInt::get(V->getType(), Cnt, /*IsSigned=*/false));
20754 unsigned Cnt = SameValuesCounter.lookup(TrackedToOrig.at(V));
20755 if (Cnt % 2 == 0) {
20774 unsigned Cnt = SameValuesCounter.lookup(TrackedToOrig.at(V));
20775 Vals.push_back(ConstantFP::get(V->getType(), Cnt));