Lines Matching full:shape
1 // RUN: mlir-opt -outline-shape-computation -test-print-shape-mapping -split-input-file %s 2>%t | F…
2 // RUN: cat %t | FileCheck %s --check-prefix SHAPE
4 // Two dynamic shapes: one of direct shape.shape_of(arg) and the other.
6 …// SHAPE-DAG: Shape for {{.*}} = "test.abs"({{.*}}> :: @shape_cal_0(<block argument> of type 'tens…
7 …// SHAPE-DAG: Shape for {{.*}} = "test.concat"({{.*}}> :: @shape_cal_1(<block argument> of type 't…
11 %0 = shape.shape_of %arg0 : tensor<?x4x?xf32> -> tensor<3xindex>
12 %1 = shape.get_extent %0, %c2 : tensor<3xindex>, index -> index
14 %3 = shape.with_shape %2, %0 : tensor<?x4x?xf32>, tensor<3xindex>
15 %4 = shape.value_of %3 : tensor<?x4x?xf32>
17 %6 = shape.get_extent %0, %c0 : tensor<3xindex>, index -> index
19 %8 = shape.from_extents %7, %c4, %1 : index, index, index
20 %9 = shape.with_shape %5, %8 : tensor<?x4x?xf32>, !shape.shape
21 %10 = shape.value_of %9 : tensor<?x4x?xf32>
30 // CHECK: shape.func private @shape_cal_1(%arg0: tensor<?x4x?xf32>) -> !shape.shape {
36 // CHECK-DAG: return %[[V4]] : !shape.shape
38 // CHECK: shape.func private @shape_cal_0(%arg0: tensor<?x4x?xf32>) -> tensor<3xindex> {
44 // Two dynamic shapes and they share the same shape.func
49 %0 = shape.shape_of %arg0 : tensor<?x4x?xf32> -> tensor<3xindex>
50 %1 = shape.get_extent %0, %c2 : tensor<3xindex>, index -> index
52 %3 = shape.get_extent %0, %c0 : tensor<3xindex>, index -> index
54 %5 = shape.from_extents %4, %c4, %1 : index, index, index
55 %6 = shape.with_shape %2, %5 : tensor<?x4x?xf32>, !shape.shape
56 %7 = shape.value_of %6 : tensor<?x4x?xf32>
58 %9 = shape.with_shape %8, %5 : tensor<?x4x?xf32>, !shape.shape
59 %10 = shape.value_of %9 : tensor<?x4x?xf32>
67 // CHECK: shape.func private @shape_cal_0(%arg0: tensor<?x4x?xf32>) -> !shape.shape {
73 // CHECK-DAG: return %4 : !shape.shape
78 // There's an internal dynamic shape source, and two other dynamic shapes shares it
81 %1 = shape.shape_of %0 : tensor<?xi32> -> tensor<1xindex>
82 %2 = shape.with_shape %0, %1 : tensor<?xi32>, tensor<1xindex>
83 %3 = shape.value_of %2 : tensor<?xi32>
85 %5 = shape.with_shape %4, %1 : tensor<?xi32>, tensor<1xindex>
86 %6 = shape.value_of %5 : tensor<?xi32>
88 %8 = shape.with_shape %7, %1 : tensor<?xi32>, tensor<1xindex>
89 %9 = shape.value_of %8 : tensor<?xi32>
98 // CHECK: shape.func private @shape_cal_0(%arg0: tensor<?xi32>) -> tensor<1xindex> {
105 // There's only a return op in the constructed shape.func
108 %1 = shape.with_shape %0, %arg1 : tensor<?xi32>, tensor<1xindex>
109 %2 = shape.value_of %1 : tensor<?xi32>
116 // CHECK: shape.func private @shape_cal_0(%arg0: tensor<1xindex>) -> tensor<1xindex> {
121 // Shape computation part interleaves with general computation.
126 %0 = shape.shape_of %arg0 : tensor<?x4x5xf32> -> tensor<3xindex>
127 %1 = shape.shape_of %arg1 : tensor<?x4x5xf32> -> tensor<3xindex>
128 %2 = shape.shape_of %arg2 : tensor<?x4x5xf32> -> tensor<3xindex>
130 %4 = shape.get_extent %0, %c0 : tensor<3xindex>, index -> index
131 %5 = shape.get_extent %1, %c0 : tensor<3xindex>, index -> index
132 %6 = shape.get_extent %2, %c0 : tensor<3xindex>, index -> index
135 %9 = shape.from_extents %8, %c4, %c5 : index, index, index
136 %10 = shape.with_shape %3, %9 : tensor<?x4x5xf32>, !shape.shape
137 %11 = shape.value_of %10 : tensor<?x4x5xf32>
141 // CHECK-DAG: %[[V0:.*]] = shape.shape_of %arg0 : tensor<?x4x5xf32> -> tensor<3xindex>
142 // CHECK-DAG: %[[V1:.*]] = shape.shape_of %arg1 : tensor<?x4x5xf32> -> tensor<3xindex>
144 // CHECK-DAG: %[[V3:.*]] = shape.get_extent %[[V0]], %c0 : tensor<3xindex>, index -> index
145 // CHECK-DAG: %[[V4:.*]] = shape.get_extent %[[V1]], %c0 : tensor<3xindex>, index -> index
149 // CHECK: shape.func private @shape_cal_0(%arg0: tensor<?x4x5xf32>, %arg1: index, %arg2: index)…
154 // CHECK-DAG: return %[[V3]] : !shape.shape
158 // There're multiple reused shape computations.
162 %0 = shape.shape_of %arg0 : tensor<?x4xf32> -> tensor<2xindex>
163 %1 = shape.shape_of %arg1 : tensor<?x4xf32> -> tensor<2xindex>
166 %4 = shape.get_extent %0, %c0 : tensor<2xindex>, index -> index
167 %5 = shape.get_extent %1, %c0 : tensor<2xindex>, index -> index
169 %7 = shape.from_extents %6, %c4 : index, index
170 %8 = shape.with_shape %2, %7 : tensor<?x4xf32>, !shape.shape
171 %9 = shape.with_shape %3, %7 : tensor<?x4xf32>, !shape.shape
172 %10 = shape.value_of %8 : tensor<?x4xf32>
173 %11 = shape.value_of %9 : tensor<?x4xf32>
177 %15 = shape.from_extents %14, %c4 : index, index
178 %16 = shape.with_shape %12, %15 : tensor<?x4xf32>, !shape.shape
179 %17 = shape.with_shape %13, %15 : tensor<?x4xf32>, !shape.shape
180 %18 = shape.value_of %16 : tensor<?x4xf32>
181 %19 = shape.value_of %17 : tensor<?x4xf32>
191 // CHECK: shape.func private @shape_cal_1(%arg0: tensor<?x4xf32>, %arg1: tensor<?x4xf32>) -> !…
199 // CHECK-DAG: return %[[V6]] : !shape.shape
201 // CHECK: shape.func private @shape_cal_0(%arg0: tensor<?x4xf32>, %arg1: tensor<?x4xf32>) -> !s…
208 // CHECK-DAG: return %[[V5]] : !shape.shape
210 // Make sure redundant with_shape is removed when with_shape input is !shape.value_shape.
211 func.func @value_shape_with_shape(%arg0: !shape.value_shape, %arg1: !shape.value_shape) -> tensor<?…
212 %1 = shape.shape_of %arg0 : !shape.value_shape -> !shape.shape
213 %2 = shape.with_shape %arg1, %1 : !shape.value_shape, !shape.shape
214 %3 = shape.value_of %2 : tensor<?xf32>
218 // CHECK-NEXT:%0 = shape.value_of %arg1 : tensor<?xf32>