Lines Matching defs:linalgOp

130                                      LinalgOp linalgOp) {
131 assert(linalgOp.hasPureBufferSemantics() &&
134 indexedValues.reserve(linalgOp->getNumOperands());
141 for (OpOperand *inputOperand : linalgOp.getDpsInputOperands()) {
142 if (linalgOp.isScalar(inputOperand)) {
147 b, loc, linalgOp.getMatchingIndexingMap(inputOperand), allIvsPlusDims);
152 for (OpOperand &outputOperand : linalgOp.getDpsInitsMutable()) {
154 b, loc, linalgOp.getMatchingIndexingMap(&outputOperand),
165 for (OpOperand &outputOperand : linalgOp.getDpsInitsMutable()) {
169 b, loc, linalgOp.getMatchingIndexingMap(&outputOperand),
173 inlineRegionAndEmitStore<LoadOpTy, StoreOpTy>(b, loc, linalgOp, indexedValues,
180 LinalgOp linalgOp,
197 assert(linalgOp.getNumLoops() == allIvs.size() &&
210 LinalgOp linalgOp) {
220 assert(linalgOp.hasPureBufferSemantics() &&
223 auto loopRanges = linalgOp.createLoopRanges(rewriter, linalgOp.getLoc());
224 auto iteratorTypes = linalgOp.getIteratorTypesArray();
228 rewriter, linalgOp.getLoc(), loopRanges, linalgOp, iteratorTypes,
231 assert(operandValuesToUse == linalgOp->getOperands() &&
234 emitScalarImplementation<LoadOpTy, StoreOpTy>(b, loc, allIvs, linalgOp);
252 replaceIndexOpsByInductionVariables(rewriter, linalgOp, loops);
265 auto linalgOp = dyn_cast<LinalgOp>(op);
266 if (!isa<LinalgOp>(op) || !linalgOp.hasPureBufferSemantics()) {
270 if (failed(linalgOpToLoopsImpl<LoopType>(rewriter, linalgOp)))
361 /// Emits a loop nest of `affine.for` with the proper body for `linalgOp`.
363 mlir::linalg::linalgOpToAffineLoops(RewriterBase &rewriter, LinalgOp linalgOp) {
364 return linalgOpToLoopsImpl<affine::AffineForOp>(rewriter, linalgOp);
367 /// Emits a loop nest of `scf.for` with the proper body for `linalgOp`.
369 LinalgOp linalgOp) {
370 return linalgOpToLoopsImpl<scf::ForOp>(rewriter, linalgOp);
373 /// Emits a loop nest of `scf.parallel` with the proper body for `linalgOp`.
376 LinalgOp linalgOp) {
377 return linalgOpToLoopsImpl<scf::ParallelOp>(rewriter, linalgOp);