Lines Matching full:offsets
85 // Decompose a ptr into Base and Offsets, potentially using a GEP to return a
86 // scalar base and vector offsets, or else fallback to using a base of 0 and
88 Value *decomposePtr(Value *Ptr, Value *&Offsets, int &Scale,
91 // Check for a getelementptr and deduce base and offsets from it, on success
92 // returning the base directly and the offsets indirectly using the Offsets
94 Value *decomposeGEP(Value *&Offsets, FixedVectorType *Ty,
107 // Create a gather from a base + vector of offsets
121 // Create a scatter to a base + vector of offsets
122 Instruction *tryCreateMaskedScatterOffset(IntrinsicInst *I, Value *Offsets,
133 // QI gathers and scatters can increment their offsets on their own if
137 // QI gathers/scatters can increment their offsets on their own if the
144 // Optimise the base and offsets of the given address
147 Value *foldGEP(GetElementPtrInst *GEP, Value *&Offsets, unsigned &Scale,
149 // Check whether these offsets could be moved out of the loop they're in
150 bool optimiseOffsets(Value *Offsets, BasicBlock *BB, LoopInfo *LI);
184 static bool checkOffsetSize(Value *Offsets, unsigned TargetElemCount) {
185 // Offsets that are not of type <N x i32> are sign extended by the
188 // positive offsets - i.e., the offsets are not allowed to be variables we
190 // Additionally, <N x i32> offsets have to either originate from a zext of a
196 unsigned OffsetElemSize = cast<FixedVectorType>(Offsets->getType())
200 Constant *ConstOff = dyn_cast<Constant>(Offsets);
226 Value *MVEGatherScatterLowering::decomposePtr(Value *Ptr, Value *&Offsets,
231 if (Value *V = decomposeGEP(Offsets, Ty, GEP, Builder)) {
240 // BasePtr of 0 with Ptr as the Offsets, so long as there are only 4
247 Offsets = Builder.CreatePtrToInt(
253 Value *MVEGatherScatterLowering::decomposeGEP(Value *&Offsets,
263 << " Looking at intrinsic for base + vector of offsets\n");
265 Offsets = GEP->getOperand(1);
267 !isa<FixedVectorType>(Offsets->getType()))
275 Offsets = GEP->getOperand(1);
277 cast<FixedVectorType>(Offsets->getType())->getNumElements();
281 ZExtInst *ZextOffs = dyn_cast<ZExtInst>(Offsets);
283 Offsets = ZextOffs->getOperand(0);
284 FixedVectorType *OffsetType = cast<FixedVectorType>(Offsets->getType());
286 // If the offsets are already being zext-ed to <N x i32>, that relieves us of
291 if (!checkOffsetSize(Offsets, OffsetsElemCount))
296 if (Ty != Offsets->getType()) {
299 Offsets = Builder.CreateTrunc(Offsets, Ty);
301 Offsets = Builder.CreateZExt(Offsets, VectorType::getInteger(Ty));
305 LLVM_DEBUG(dbgs() << "masked gathers/scatters: found correct offsets\n");
547 Value *Offsets;
550 Ptr, Offsets, Scale, cast<FixedVectorType>(ResultTy), MemoryTy, Builder);
560 {ResultTy, BasePtr->getType(), Offsets->getType(), Mask->getType()},
561 {BasePtr, Offsets, Builder.getInt32(MemoryTy->getScalarSizeInBits()),
566 {ResultTy, BasePtr->getType(), Offsets->getType()},
567 {BasePtr, Offsets, Builder.getInt32(MemoryTy->getScalarSizeInBits()),
670 << " to base + vector of offsets\n");
700 Value *Offsets;
703 Ptr, Offsets, Scale, cast<FixedVectorType>(InputTy), MemoryTy, Builder);
712 {BasePtr->getType(), Offsets->getType(), Input->getType(),
714 {BasePtr, Offsets, Input,
720 {BasePtr->getType(), Offsets->getType(), Input->getType()},
721 {BasePtr, Offsets, Input,
742 // Decompose the GEP into Base and Offsets
744 Value *Offsets;
745 Value *BasePtr = decomposeGEP(Offsets, Ty, GEP, Builder);
752 // The gep was in charge of making sure the offsets are scaled correctly
765 if (auto *Load = tryCreateIncrementingWBGatScat(I, BasePtr, Offsets,
773 std::pair<Value *, int64_t> Add = getVarAndConst(Offsets, TypeScale);
779 // Make sure the offsets are scaled correctly
785 // Add the base to the offsets
802 IntrinsicInst *I, Value *BasePtr, Value *Offsets, unsigned TypeScale,
807 // Offsets that are worth merging into this instruction will be incremented
809 PHINode *Phi = dyn_cast<PHINode>(Offsets);
821 Offsets = Phi->getIncomingValue(IncrementIndex);
823 std::pair<Value *, int64_t> Add = getVarAndConst(Offsets, TypeScale);
837 // Make sure the offsets are scaled correctly
843 // Add the base to the offsets
879 Instruction *AddInst = cast<Instruction>(Offsets);
944 // Check whether all usages of this instruction are as offsets of
971 bool MVEGatherScatterLowering::optimiseOffsets(Value *Offsets, BasicBlock *BB,
974 << *Offsets << "\n");
977 if (!isa<Instruction>(Offsets))
979 Instruction *Offs = cast<Instruction>(Offsets);
1147 LLVM_DEBUG(dbgs() << "masked gathers/scatters: incompatible gep offsets\n");
1188 Value *&Offsets, unsigned &Scale,
1191 Offsets = GEP->getOperand(1);
1193 // We only merge geps with constant offsets, because only for those
1195 if (GEP->getNumIndices() != 1 || !isa<Constant>(Offsets))
1199 Value *BaseBasePtr = foldGEP(BaseGEP, Offsets, Scale, Builder);
1202 Offsets = CheckAndCreateOffsetAdd(
1203 Offsets, Scale, GEP->getOperand(1),
1205 if (Offsets == nullptr)
1223 Value *Offsets;
1225 Value *Base = foldGEP(GEP, Offsets, Scale, Builder);
1230 if (Offsets && Base && Base != GEP) {
1236 Builder.getInt8Ty(), Builder.CreateBitCast(Base, BaseTy), Offsets,