Home
last modified time | relevance | path

Searched full:shape (Results 1 – 25 of 1363) sorted by relevance

12345678910>>...55

/llvm-project/mlir/test/Dialect/Shape/
H A Dops.mlir7 func.func @shape_num_elements(%shape : !shape.shape) -> !shape.size {
8 %init = shape.const_size 1
9 %num_elements = shape.reduce(%shape, %init) : !shape.shape -> !shape.size {
10 ^bb0(%index : index, %extent : !shape.size, %acc : !shape.size):
11 %acc_next = shape.mul %acc, %extent
12 : !shape.size, !shape.size -> !shape.size
13 shape.yield %acc_next : !shape.size
15 return %num_elements : !shape.size
19 func.func @extent_tensor_num_elements(%shape : tensor<?xindex>) -> index {
21 %num_elements = shape.reduce(%shape, %init) : tensor<?xindex> -> index {
[all …]
H A Dcanonicalize.mlir6 // CHECK: shape.const_shape [2, 3, 4] : tensor<3xindex>
7 %0 = shape.shape_of %arg0 : tensor<2x3x4xf32> -> tensor<3xindex>
15 func.func @f() -> (!shape.shape, !shape.shape) {
16 // CHECK-DAG: shape.const_shape [2, 3] : !shape.shape
17 // CHECK-DAG: shape
[all...]
H A Dinvalid.mlir3 func.func @reduce_op_args_num_mismatch(%shape : !shape.shape, %init : !shape.size) {
5 %num_elements = shape.reduce(%shape, %init) : !shape.shape -> !shape.size {
6 ^bb0(%index: index, %dim: !shape.size):
7 shape.yield %dim : !shape.size
14 func.func @reduce_op_arg0_wrong_type(%shape : !shape.shape, %init : !shape.size) {
16 %num_elements = shape.reduce(%shape, %init) : !shape.shape -> !shape.size {
17 ^bb0(%index: f32, %dim: !shape.size, %acc: !shape.size):
18 %new_acc = "shape.add"(%acc, %dim)
19 : (!shape.size, !shape.size) -> !shape.size
20 shape.yield %new_acc : !shape.size
[all …]
H A Darg_with_shape.mlir1 // RUN: mlir-opt -outline-shape-computation -split-input-file %s 2>%t | FileCheck %s
3 func.func @func1(%arg0: !shape.value_shape, %arg1: !shape.value_shape) -> !shape.shape {
4 %0 = shape.shape_of %arg0 : !shape.value_shape -> !shape.shape
5 %1 = shape.shape_of %arg1 : !shape.value_shape -> !shape.shape
6 %2 = shape.meet %0, %1 : !shape.shape, !shape.shape -> !shape.shape
7 return %2 : !shape.shape
11 func.func @func(%arg0: !shape.value_shape, %arg1: !shape.value_shape) -> !shape.shape {
12 %0 = shape.shape_of %arg0 : !shape.value_shape -> !shape.shape
13 %1 = shape.with_shape %arg1, %0 : !shape.value_shape, !shape.shape
14 %2 = call @func1(%arg0, %1) : (!shape.value_shape, !shape.value_shape) -> !shape.shape
[all …]
H A Dremove-shape-constraints.mlir1 // RUN: mlir-opt -allow-unregistered-dialect -split-input-file -remove-shape-constraints -canonical…
2 // RUN: mlir-opt -allow-unregistered-dialect -split-input-file -remove-shape-constraints <%s | File…
8 func.func @f(%arg0 : !shape.shape, %arg1 : !shape.shape) -> index {
9 // REPLACE-NEXT: %[[WITNESS:.+]] = shape.const_witness true
10 // REPLACE-NOT: shape.cstr_eq
11 // REPLACE: shape.assuming %[[WITNESS]]
14 %0 = shape.cstr_broadcastable %arg0, %arg1 : !shape.shape, !shape.shape
15 %1 = shape.assuming %0 -> index {
17 shape.assuming_yield %2 : index
26 func.func @f(%arg0 : !shape.shape, %arg1 : !shape.shape) -> index {
[all …]
H A Doutline-shape-computation.mlir1 // RUN: mlir-opt -outline-shape-computation -test-print-shape-mapping -split-input-file %s 2>%t | F…
2 // RUN: cat %t | FileCheck %s --check-prefix SHAPE
4 // Two dynamic shapes: one of direct shape.shape_of(arg) and the other.
6 …// SHAPE-DAG: Shape for {{.*}} = "test.abs"({{.*}}> :: @shape_cal_0(<block argument> of type 'tens…
7 …// SHAPE-DAG: Shape for {{.*}} = "test.concat"({{.*}}> :: @shape_cal_1(<block argument> of type 't…
11 %0 = shape.shape_of %arg0 : tensor<?x4x?xf32> -> tensor<3xindex>
12 %1 = shape.get_extent %0, %c2 : tensor<3xindex>, index -> index
14 %3 = shape.with_shape %2, %0 : tensor<?x4x?xf32>, tensor<3xindex>
15 %4 = shape.value_of %3 : tensor<?x4x?xf32>
17 %6 = shape.get_extent %0, %c0 : tensor<3xindex>, index -> index
[all …]
H A Dshape-to-shape.mlir1 // RUN: mlir-opt -shape-to-shape-lowering -split-input-file %s | FileCheck %s
4 // CHECK-SAME: ([[ARG:%.*]]: !shape.shape) -> !shape.size
5 func.func @num_elements_to_reduce(%shape : !shape.shape) -> !shape.size {
6 %num_elements = shape.num_elements %shape : !shape.shape -> !shape.size
7 return %num_elements : !shape.size
9 // CHECK: [[C1:%.*]] = shape.const_size 1
10 // CHECK: [[NUM_ELEMENTS:%.*]] = shape.reduce([[ARG]], [[C1]]) : !shape.shape -> !shape.size
11 // CHECK: ^bb0({{.*}}: index, [[DIM:%.*]]: !shape.size, [[ACC:%.*]]: !shape.size
12 // CHECK: [[NEW_ACC:%.*]] = shape.mul [[DIM]], [[ACC]]
13 // CHECK: shape.yield [[NEW_ACC]] : !shape.size
[all …]
/llvm-project/mlir/docs/Dialects/
H A DShapeDialect.md1 # 'shape' Dialect
3 Description of operations & types within the Shape dialect as well as their
4 [usage](#different-stages-of-lowering-shape-dialect).
8 ## Different stages of lowering Shape dialect
11 shape dialect and the lowering between these uses. Currently we have 3 worlds /
12 stages of lowering of shape functions:
15 This "input" form carries both the shape and whether in error state as
31 Starting from the shape function of matmul in the error monadic form
35 shape.function_library @shplib {
37 func.func @matmul(%lhs: !shape.value_shape, %rhs: !shape.value_shape) -> !shape.shape {
[all …]
/llvm-project/mlir/docs/
H A DShapeInference.md1 # Shape Inference
3 Shape inference as discussed here is considered a specific instance of type
6 dimensions. While some operations have no compile time fixed shape (e.g., output
7 shape is dictated by data) we could still have some knowledge of
11 shape.
15 `InferShapedTypeOpInterface` is used to implement the shape and element type
16 inference. The return type can often be deduced from the deduced return shape
22 ## Shape functions
24 The C++ interfaces are the base mechanism whereby shape inference is queried and
25 executed, but not the intended way to specify shape constraints in general.
[all …]
/llvm-project/mlir/include/mlir/Dialect/Shape/IR/
H A DShapeOps.td1 //===- ShapeOps.td - Shape operations definition -----------*- tablegen -*-===//
9 // This is the operation definition file for Shape dialect operations.
16 include "mlir/Dialect/Shape/IR/ShapeBase.td"
27 // Shape op definitions
58 let summary = "Returns the broadcasted output shape of two or more inputs";
60 Returns the broadcasted shape for input shapes or extent tensors. The rest
62 to more inputs. Both operands can be of type `shape.shape` or
63 `tensor<?xindex>`. The result is of type `shape.shape` an
[all...]
H A DShapeBase.td9 // Base definitions for the `shape` dialect.
20 // Shape Inference dialect definitions
24 let name = "shape";
26 let summary = "Types and operations for shape dialect";
28 This dialect contains operations for shape inference.
30 Note: Unless explicitly stated, all functions that return a shape and take
31 shapes as input, return the invalid shape if one of its operands is an
32 invalid shape. This avoids flagging multiple errors for one verification
38 let cppNamespace = "::mlir::shape";
50 def Shape_ShapeType : Shape_Type<"Shape", "shape"> {
[all …]
/llvm-project/mlir/lib/Dialect/Shape/Transforms/
H A DOutlineShapeComputation.cpp10 #include "mlir/Dialect/Shape/Analysis/ShapeMappingAnalysis.h"
11 #include "mlir/Dialect/Shape/IR/Shape.h"
12 #include "mlir/Dialect/Shape/Transforms/Passes.h"
27 #include "mlir/Dialect/Shape/Transforms/Passes.h.inc"
30 #define DEBUG_TYPE "outline-shape-computation"
63 // Create a shape.func representing the shape computation for `shape`.
64 std::pair<shape
66 createFuncFromCluster(OpBuilder & b,const SmallVector<Operation *,8> & cluster,Value shape,StringRef fnName,Location loc) createFuncFromCluster() argument
101 Value shape = it.first; getOrderedClusters() local
114 for (Value shape : it->second) getOrderedClusters() local
135 Value shape = withOp.getShape(); constructShapeFunc() local
267 Value shape = withOp.getShape(); constructClustersForEachShape() local
277 getClusterFromValue(Value shape,DenseMap<Value,DenseSet<Operation * >> & clusters) getClusterFromValue() argument
[all...]
/llvm-project/mlir/include/mlir/Dialect/Shape/Transforms/
H A DPasses.td14 def OutlineShapeComputation : Pass<"outline-shape-computation", "ModuleOp"> {
15 let summary = "Using shape.func to preserve shape computation";
17 This pass outlines the shape computation part in high level IR by adding
18 shape.func and populate corresponding mapping infoemation into
19 ShapeMappingAnalysis. The shape computation part is usually introduced by
20 shape reification, and each single dynamic shape is denoted by shape.with_shape.
22 There're two main reasons this shape-outline pass is needed:
23 1. Many passes don't take shape reification part into consideration.
24 Therefore we need to "remove" the shape reification part temporarily for
26 2. Sometimes we cannot redo shape reification after converting from dialect
[all …]
/llvm-project/llvm/unittests/Analysis/Inputs/ir2native_x86_64_model/
H A Dsaved_model.pbtxt34 name: "shape"
35 type: "shape"
37 shape {
131 name: "shape"
132 type: "shape"
150 shape {
162 key: "shape"
164 shape {
189 shape {
214 shape {
[all …]
/llvm-project/llvm/test/Transforms/Attributor/
H A Ddepgraph.ll60 ; DOT-DAG: Node[[Node0:0x[a-z0-9]+]] [shape=record,label="{[AAIsDead]
61 ; DOT-DAG: Node[[Node1:0x[a-z0-9]+]] [shape=record,label="{[AAPotentialValues]
62 ; DOT-DAG: Node[[Node2:0x[a-z0-9]+]] [shape=record,label="{[AAPotentialValues]
63 ; DOT-DAG: Node[[Node3:0x[a-z0-9]+]] [shape=record,label="{[AAPotentialValues]
64 ; DOT-DAG: Node[[Node4:0x[a-z0-9]+]] [shape=record,label="{[AAPotentialValues]
65 ; DOT-DAG: Node[[Node5:0x[a-z0-9]+]] [shape=record,label="{[AANoReturn]
66 ; DOT-DAG: Node[[Node6:0x[a-z0-9]+]] [shape=record,label="{[AANoReturn]
67 ; DOT-DAG: Node[[Node7:0x[a-z0-9]+]] [shape=record,label="{[AAIsDead]
68 ; DOT-DAG: Node[[Node8:0x[a-z0-9]+]] [shape=record,label="{[AAWillReturn]
69 ; DOT-DAG: Node[[Node9:0x[a-z0-9]+]] [shape=record,label="{[AAIsDead]
[all …]
/llvm-project/llvm/lib/Transforms/Coroutines/
H A DCoroSplit.cpp104 coro::Shape &Shape) {
160 Shape.SymmetricTransfers.push_back(ResumeCall);
169 static void lowerAwaitSuspends(Function &F, coro::Shape &Shape) {
171 for (auto *AWS : Shape.CoroAwaitSuspends)
172 lowerAwaitSuspend(Builder, AWS, Shape);
176 const coro::Shape &Shape, Value *FramePtr,
178 assert(Shape in lowerAwaitSuspend() argument
106 coro::Shape &Shape; global() member in __anon631b365a0111::CoroCloner
120 CoroCloner(Function & OrigF,const Twine & Suffix,coro::Shape & Shape,Kind FKind,TargetTransformInfo & TTI) CoroCloner() argument
128 CoroCloner(Function & OrigF,const Twine & Suffix,coro::Shape & Shape,Function * NewF,AnyCoroSuspendInst * ActiveSuspend,TargetTransformInfo & TTI) CoroCloner() argument
243 lowerAwaitSuspends(Function & F,coro::Shape & Shape) lowerAwaitSuspends() argument
250 maybeFreeRetconStorage(IRBuilder<> & Builder,const coro::Shape & Shape,Value * FramePtr,CallGraph * CG) maybeFreeRetconStorage() argument
306 replaceFallthroughCoroEnd(AnyCoroEndInst * End,const coro::Shape & Shape,Value * FramePtr,bool InResume,CallGraph * CG) replaceFallthroughCoroEnd() argument
404 markCoroutineAsDone(IRBuilder<> & Builder,const coro::Shape & Shape,Value * FramePtr) markCoroutineAsDone() argument
438 replaceUnwindCoroEnd(AnyCoroEndInst * End,const coro::Shape & Shape,Value * FramePtr,bool InResume,CallGraph * CG) replaceUnwindCoroEnd() argument
476 replaceCoroEnd(AnyCoroEndInst * End,const coro::Shape & Shape,Value * FramePtr,bool InResume,CallGraph * CG) replaceCoroEnd() argument
540 createCloneDeclaration(Function & OrigF,coro::Shape & Shape,const Twine & Suffix,Module::iterator InsertBefore,AnyCoroSuspendInst * ActiveSuspend) createCloneDeclaration() argument
656 replaceSwiftErrorOps(Function & F,coro::Shape & Shape,ValueToValueMapTy * VMap) replaceSwiftErrorOps() argument
1167 updateAsyncFuncPointerContextSize(coro::Shape & Shape) updateAsyncFuncPointerContextSize() argument
1182 replaceFrameSizeAndAlignment(coro::Shape & Shape) replaceFrameSizeAndAlignment() argument
1222 handleNoSuspendCoroutine(coro::Shape & Shape) handleNoSuspendCoroutine() argument
1382 simplifySuspendPoints(coro::Shape & Shape) simplifySuspendPoints() argument
1677 splitAsyncCoroutine(Function & F,coro::Shape & Shape,SmallVectorImpl<Function * > & Clones,TargetTransformInfo & TTI) splitAsyncCoroutine() argument
1772 splitRetconCoroutine(Function & F,coro::Shape & Shape,SmallVectorImpl<Function * > & Clones,TargetTransformInfo & TTI) splitRetconCoroutine() argument
1925 coro::Shape Shape(F, OptimizeFrame); splitCoroutine() local
1973 removeCoroEnds(const coro::Shape & Shape) removeCoroEnds() argument
1980 updateCallGraphAfterCoroutineSplit(LazyCallGraph::Node & N,const coro::Shape & Shape,const SmallVectorImpl<Function * > & Clones,LazyCallGraph::SCC & C,LazyCallGraph & CG,CGSCCAnalysisManager & AM,CGSCCUpdateResult & UR,FunctionAnalysisManager & FAM) updateCallGraphAfterCoroutineSplit() argument
2126 const coro::Shape Shape = run() local
[all...]
/llvm-project/mlir/docs/Traits/
H A DBroadcastable.md15 - A shape inference mechanism is able to compute the result shape solely based on input operand sha…
19 - The operation's result shape is compatible with &mdash;though not necessarily identical to&mdash;…
57 ## Shape inference
59 The shape inference process begins by correcting rank differences in input operands. A shape is exp…
62 ExpandRank(shape, rank):
63 while len(shape) < rank:
64 shape.prepend(1)
67 Given the shapes of two ranked input operands, the result's shape is inferred by equalizing input r…
77 # Infer shape
85shape for an operation with an arbitrary number of input operands is then inferred by discarding u…
[all …]
/llvm-project/flang/include/flang/Evaluate/
H A Dshape.h1 //===-- include/flang/Evaluate/shape.h --------------------------*- C++ -*-===//
9 // GetShape() analyzes an expression and determines its shape, if possible,
34 using Shape = std::vector<MaybeExtentExpr>; variable
40 std::optional<ExtentExpr> AsExtentArrayExpr(const Shape &);
43 FoldingContext &, const Shape &);
46 // AsConstantExtents returns a constant shape. It may contain
50 FoldingContext &, const Shape &); in AsConstantExtents()
52 FoldingContext &foldingContext, const std::optional<Shape> &maybeShape) { in AsConstantExtents()
59 Shape AsShape(const ConstantSubscripts &); in GetRank()
60 std::optional<Shape> AsShap in GetRank()
[all...]
/llvm-project/mlir/test/Analysis/
H A Dtest-shape-fn-report.mlir1 // RUN: mlir-opt %s --test-shape-function-report -verify-diagnostics
3 module attributes {shape.lib = [@shape_lib]} {
5 // expected-remark@+1 {{associated shape function: same_result_shape}}
7 attributes {shape.function = @shape_lib::@same_result_shape} {
15 // The shape function library with some local functions.
16 shape.function_library @shape_lib {
17 // Test shape function that returns the shape of input arg as result shape.
18 func @same_result_shape(%arg: !shape.value_shape) -> !shape.shape {
19 %0 = shape_of %arg : !shape.value_shape -> !shape.shape
20 return %0 : !shape.shape
/llvm-project/flang/test/HLFIR/
H A Dshapeof.fir7 func.func @shapeof(%arg0: !hlfir.expr<2x2xi32>) -> !fir.shape<2> {
8 %shape = hlfir.shape_of %arg0 : (!hlfir.expr<2x2xi32>) -> !fir.shape<2>
9 return %shape : !fir.shape<2>
14 // CHECK-NEXT: %[[SHAPE:.*]] = hlfir.shape_of %[[EXPR]] : (!hlfir.expr<2x2xi32>) -> !fir.shape<2>
17 // CHECK-CANON-NEXT: %[[SHAPE:.*]] = fir.shape %[[C2]], %[[C2]] : (index, index) -> !fir.shape<
[all...]
/llvm-project/flang/lib/Evaluate/
H A Dshape.cpp1 //===-- lib/Evaluate/shape.cpp --------------------------------------------===//
9 #include "flang/Evaluate/shape.h"
35 details->shape().CanBeImpliedShape(); in IsExplicitShape()
41 const auto &shape{details->shape()}; in IsExplicitShape()
42 return shape.Rank() == 0 || in IsExplicitShape()
43 shape.IsExplicitShape(); // true when scalar, too in IsExplicitShape()
46 .has<semantics::AssocEntityDetails>(); // exprs have explicit shape in ConstantShape()
50 Shape GetShapeHelper::ConstantShape(const Constant<ExtentType> &arrayConstant) { in ConstantShape()
52 Shape resul in ConstantShape()
36 const auto &shape{details->shape()}; IsExplicitShape() local
80 Shape shape; CreateShape() local
87 AsExtentArrayExpr(const Shape & shape) AsExtentArrayExpr() argument
100 AsConstantShape(FoldingContext & context,const Shape & shape) AsConstantShape() argument
110 AsConstantShape(const ConstantSubscripts & shape) AsConstantShape() argument
119 AsConstantExtents(const Constant<ExtentType> & shape) AsConstantExtents() argument
128 AsConstantExtents(FoldingContext & context,const Shape & shape) AsConstantExtents() argument
136 AsShape(const ConstantSubscripts & shape) AsShape() argument
144 AsShape(const std::optional<ConstantSubscripts> & shape) AsShape() argument
152 Fold(FoldingContext & context,Shape && shape) Fold() argument
160 Fold(FoldingContext & context,std::optional<Shape> && shape) Fold() argument
198 GetSize(Shape && shape) GetSize() argument
210 GetSize(const ConstantSubscripts & shape) GetSize() argument
484 if (auto shape{GetShape(assoc->expr())}; GetAssociatedExtent() local
520 if (auto shape{GetShape(symbol, invariantOnly)}) { GetExtent() local
569 if (auto shape{GetShape(subs.value())}) { GetExtent() local
811 Shape shape; operator ()() local
833 Shape shape; operator ()() local
1081 if (auto shape{(*this)(call.arguments().at(0))}) { operator ()() local
[all...]
/llvm-project/mlir/test/Conversion/ShapeToStandard/
H A Dconvert-shape-constraints.mlir1 // RUN: mlir-opt -pass-pipeline="builtin.module(func.func(convert-shape-constraints))" <%s | FileCh…
6 // CHECK-SAME: %[[RHS:.*]]: tensor<?xindex>) -> !shape.witness {
7 // CHECK: %[[RET:.*]] = shape.const_witness true
8 // CHECK: %[[BROADCAST_IS_VALID:.*]] = shape.is_broadcastable %[[LHS]], %[[RHS]]
10 // CHECK: return %[[RET]] : !shape.witness
12 func.func @cstr_broadcastable(%arg0: tensor<?xindex>, %arg1: tensor<?xindex>) -> !shape.witness {
13 %witness = shape.cstr_broadcastable %arg0, %arg1 : tensor<?xindex>, tensor<?xindex>
14 return %witness : !shape.witness
19 // CHECK-SAME: %[[RHS:.*]]: tensor<?xindex>) -> !shape.witness {
20 // CHECK: %[[RET:.*]] = shape.const_witness true
[all …]
/llvm-project/lldb/test/API/functionalities/vtable/
H A DTestVTableValue.py25 # Test a shape instance to make sure we get the vtable correctly.
26 shape = self.frame().FindVariable("shape")
27 vtable = shape.GetVTable()
28 self.assertEqual(vtable.GetName(), "vtable for Shape")
29 self.assertEqual(vtable.GetTypeName(), "vtable for Shape")
31 # for the shape class.
36 expected_addr = self.expected_vtable_addr(shape)
42 # Test a shape reference to make sure we get the vtable correctly.
43 shape
[all...]
/llvm-project/mlir/python/mlir/extras/
H A Dtypes.py99 def _shaped(*shape, element_type: Type = None, type_constructor=None):
102 if (element_type is None and shape and not isinstance(shape[-1], Type)) or (
103 shape and isinstance(shape[-1], Type) and element_type is not None
110 sizes = shape
112 type = shape[-1]
113 sizes = shape[:-1]
121 *shape, argument
127 *shape,
85 _shaped(*shape, element_type: Type = None, type_constructor=None) global() argument
107 vector( *shape, element_type: Type = None, scalable: Optional[List[bool]] = None, scalable_dims: Optional[List[int]] = None, ) global() argument
138 memref( *shape, element_type: Type = None, memory_space: Optional[int] = None, layout: Optional[StridedLayoutAttr] = None, ) global() argument
[all...]
/llvm-project/flang/docs/
H A DArrayComposition.md51 and the shape of the array delimits the domain of the map.
61 but expressions have rank and shape, and one can view array expressions
121 * `RESHAPE(A,SHAPE=s)` without `ORDER=` must precompute the shape
123 in the storage order of `A` (whose shape must also be captured).
138 ## Determination of rank and shape
141 temporary storage is determining the shape of the result prior to,
149 * `SHAPE(SUM(X,DIM=d))` is `SHAPE(X)` with one element removed:
150 `PACK(SHAPE(X),[(j,j=1,RANK(X))]/=d)` in general.
152 * `SHAPE(MAXLOC(X))` is `[RANK(X)]`.
153 * `SHAPE(MAXLOC(X,DIM=d))` is `SHAPE(X)` with one element removed.
[all …]

12345678910>>...55