Lines Matching defs:tensor

611 static Value expandRank(PatternRewriter &rewriter, Location loc, Value tensor,
614 auto tensorType = dyn_cast<RankedTensorType>(tensor.getType());
615 assert(tensorType && "expected a ranked tensor type");
618 assert(numExtraDims >= 0 && "cannot expand tensor to a lower rank");
620 return tensor;
642 // Emit 'tensor.expand_shape' op
643 return rewriter.create<tensor::ExpandShapeOp>(loc, resultType, tensor,
670 IndexPool &indexPool, Value tensor, int64_t index) {
672 return rewriter.create<tensor::DimOp>(loc, tensor, indexValue).getResult();
676 IndexPool &indexPool, Value tensor,
678 auto shapedType = dyn_cast<ShapedType>(tensor.getType());
682 return getTensorDim(rewriter, loc, indexPool, tensor, index);
802 // Emit 'tensor.empty' op
810 Value outputTensor = opBuilder.create<tensor::EmptyOp>(
826 auto castResultTensor = rewriter.createOrFold<tensor::CastOp>(
879 // Generate output tensor
885 Value outputTensor = rewriter.create<tensor::EmptyOp>(
930 auto castResult = rewriter.createOrFold<tensor::CastOp>(
1086 dynDims.push_back(rewriter.create<tensor::DimOp>(loc, input, i));
1093 .create<tensor::EmptyOp>(loc, reduceShape, resultTy.getElementType(),
1140 // Lower directly to `tensor::ExpandShapeOp` instead of `tosa::ReshapeOp`,
1144 rewriter.replaceOpWithNewOp<tensor::ExpandShapeOp>(
1188 dynDims.push_back(rewriter.create<tensor::DimOp>(loc, input, i));
1262 Value emptyTensor = rewriter.create<tensor::EmptyOp>(
1393 Value collapse = builder.create<tensor::CollapseShapeOp>(collapseTy, input,
1399 outputDynSize.push_back(builder.create<tensor::DimOp>(input, 0));
1401 outputDynSize.push_back(builder.create<tensor::DimOp>(input, 3));
1405 Value empty = builder.create<tensor::EmptyOp>(
1437 rewriter.replaceOpWithNewOp<tensor::ExpandShapeOp>(
1504 Value collapse = builder.create<tensor::CollapseShapeOp>(collapseTy, resize,
1510 outputDynSize.push_back(builder.create<tensor::DimOp>(input, 0));
1512 outputDynSize.push_back(builder.create<tensor::DimOp>(input, 3));
1516 Value empty = builder.create<tensor::EmptyOp>(
1573 auto emptyTensor = b.create<tensor::EmptyOp>(resultTy.getShape(), resultETy,
1694 Value result = b.create<tensor::ExtractOp>(
1725 Value y0x0 = b.create<tensor::ExtractOp>(
1727 Value y0x1 = b.create<tensor::ExtractOp>(
1729 Value y1x0 = b.create<tensor::ExtractOp>(
1731 Value y1x1 = b.create<tensor::ExtractOp>(
1851 dynDims.push_back(rewriter.create<tensor::DimOp>(loc, input, i));
1855 Value axisDimSize = rewriter.create<tensor::DimOp>(loc, input, axis);
1859 .create<tensor::EmptyOp>(loc, inputTy.getShape(),
1885 auto extract = nestedBuilder.create<tensor::ExtractOp>(
1927 dynDims.push_back(rewriter.create<tensor::DimOp>(loc, input, i));
1931 auto emptyTensor = rewriter.create<tensor::EmptyOp>(
1998 dynDims.push_back(rewriter.create<tensor::DimOp>(loc, input, i));
2004 .create<tensor::EmptyOp>(loc, resultTy.getShape(),
2017 .create<tensor::EmptyOp>(loc, resultTy.getShape(),
2118 .create<tensor::EmptyOp>(loc, resultTy.getShape(), resultElementTy,
2139 Value extract = rewriter.create<tensor::ExtractOp>(
2154 auto sz = tensor::getMixedSize(builder, loc, source, dim);
2190 rewriter.create<tensor::DimOp>(loc, op.getOperand(0), i));
2195 .create<tensor::EmptyOp>(loc, resultTy.getShape(),
2224 rewriter.create<tensor::ExtractOp>(loc, table, ValueRange{index});
2263 rewriter.create<tensor::ExtractOp>(loc, table, ValueRange{index});
2264 Value next = rewriter.create<tensor::ExtractOp>(
2311 auto dims = tensor::getMixedSizes(builder, loc, input);
2328 rewriter.create<tensor::EmptyOp>(loc, type, dynamicSizes);
2401 auto dimH = rewriter.createOrFold<tensor::DimOp>(loc, input, 1);
2402 auto dimW = rewriter.createOrFold<tensor::DimOp>(loc, input, 2);
2491 auto dims = tensor::getMixedSizes(rewriter, loc, input_real);
2521 auto dimH = rewriter.createOrFold<tensor::DimOp>(loc, input_real, 1);
2522 auto dimW = rewriter.createOrFold<tensor::DimOp>(loc, input_real, 2);