xref: /llvm-project/mlir/test/Dialect/Shape/bufferize.mlir (revision ced2fc7819d5ddea616ec330f18e08ff284c1868)
1// RUN: mlir-opt -split-input-file --one-shot-bufferize="dialect-filter=shape,bufferization copy-before-write unknown-type-conversion=identity-layout-map allow-unknown-ops" <%s | FileCheck %s
2
3// -----
4
5// CHECK-LABEL:   func @shape_assuming() {
6// CHECK:           %[[WTRUE:.*]] = shape.const_witness true
7// CHECK:           %[[MEMREF:.*]] = shape.assuming %[[WTRUE]] -> (memref<2xf16>) {
8// CHECK:             %[[TENSOR_VAL:.*]] = "test.source"() : () -> tensor<2xf16>
9// CHECK:             %[[YIELDED_MEMREF:.*]] = bufferization.to_memref %[[TENSOR_VAL]] : tensor<2xf16> to memref<2xf16>
10// CHECK:             shape.assuming_yield %[[YIELDED_MEMREF]] : memref<2xf16>
11// CHECK:           }
12// CHECK:           %[[TENSOR:.*]] = bufferization.to_tensor %[[MEMREF:.*]] : memref<2xf16>
13// CHECK:           "test.sink"(%[[TENSOR]]) : (tensor<2xf16>) -> ()
14// CHECK:           return
15// CHECK:         }
16func.func @shape_assuming() {
17  %0 = shape.const_witness true
18  %1 = shape.assuming %0 -> (tensor<2xf16>) {
19    %2 = "test.source"() : () -> (tensor<2xf16>)
20    shape.assuming_yield %2 : tensor<2xf16>
21  }
22  "test.sink"(%1) : (tensor<2xf16>) -> ()
23  return
24}
25