xref: /llvm-project/mlir/test/Dialect/SparseTensor/codegen_buffer_initialization.mlir (revision 83cf0dc98234bbd8cb0d0959baa570477a8daf92)
17276b643Sbixia1// RUN: mlir-opt %s --sparse-tensor-codegen=enable-buffer-initialization=true  --canonicalize --cse | FileCheck %s
27276b643Sbixia1
3dbe1be9aSYinying Li#SV = #sparse_tensor.encoding<{ map = (d0) -> (d0 : compressed) }>
47276b643Sbixia1
53e4a8c2cSAart Bik// CHECK-LABEL:   func.func @empty_sparse_vector(
6988733c6SPeiming Liu//  CHECK-SAME:     %[[VAL_0:.*]]: index) -> (memref<?xindex>, memref<?xindex>, memref<?xf64>, !sparse_tensor.storage_specifier
7*83cf0dc9SAart Bik//   CHECK-DAG:     %[[VAL_1:.*]] = arith.constant 1 : index
8*83cf0dc9SAart Bik//   CHECK-DAG:     %[[VAL_2:.*]] = arith.constant 0.000000e+00 : f64
9*83cf0dc9SAart Bik//   CHECK-DAG:     %[[VAL_3:.*]] = arith.constant 0 : index
10988733c6SPeiming Liu//       CHECK:     %[[VAL_4:.*]] = memref.alloc() : memref<16xindex>
11988733c6SPeiming Liu//       CHECK:     %[[VAL_5:.*]] = memref.cast %[[VAL_4]] : memref<16xindex> to memref<?xindex>
12988733c6SPeiming Liu//       CHECK:     linalg.fill ins(%[[VAL_3]] : index) outs(%[[VAL_4]] : memref<16xindex>)
13988733c6SPeiming Liu//       CHECK:     %[[VAL_6:.*]] = memref.alloc() : memref<16xindex>
14988733c6SPeiming Liu//       CHECK:     %[[VAL_7:.*]] = memref.cast %[[VAL_6]] : memref<16xindex> to memref<?xindex>
15988733c6SPeiming Liu//       CHECK:     linalg.fill ins(%[[VAL_3]] : index) outs(%[[VAL_6]] : memref<16xindex>)
16988733c6SPeiming Liu//       CHECK:     %[[VAL_8:.*]] = memref.alloc() : memref<16xf64>
17988733c6SPeiming Liu//       CHECK:     %[[VAL_9:.*]] = memref.cast %[[VAL_8]] : memref<16xf64> to memref<?xf64>
18988733c6SPeiming Liu//       CHECK:     linalg.fill ins(%[[VAL_2]] : f64) outs(%[[VAL_8]] : memref<16xf64>)
19988733c6SPeiming Liu//       CHECK:     %[[VAL_10:.*]] = sparse_tensor.storage_specifier.init : !sparse_tensor.storage_specifier
2084cd51bbSwren romano//       CHECK:     %[[VAL_12:.*]] = sparse_tensor.storage_specifier.set %[[VAL_10]]  lvl_sz at 0 with %[[VAL_0]] : !sparse_tensor.storage_specifier
2184cd51bbSwren romano//       CHECK:     %[[VAL_14:.*]] = sparse_tensor.storage_specifier.get %[[VAL_12]]  pos_mem_sz at 0 : !sparse_tensor.storage_specifier
2244ff23d5SPeiming Liu//       CHECK:     %[[VAL_15:.*]], %[[VAL_17:.*]] = sparse_tensor.push_back %[[VAL_14]], %[[VAL_5]], %[[VAL_3]] : index, memref<?xindex>, index
2384cd51bbSwren romano//       CHECK:     %[[VAL_18:.*]] = sparse_tensor.storage_specifier.set %[[VAL_12]]  pos_mem_sz at 0 with %[[VAL_17]] : !sparse_tensor.storage_specifier
2444ff23d5SPeiming Liu//       CHECK:     %[[VAL_19:.*]], %[[VAL_21:.*]] = sparse_tensor.push_back %[[VAL_17]], %[[VAL_15]], %[[VAL_3]], %[[VAL_1]] : index, memref<?xindex>, index, index
2584cd51bbSwren romano//       CHECK:     %[[VAL_22:.*]] = sparse_tensor.storage_specifier.set %[[VAL_18]]  pos_mem_sz at 0 with %[[VAL_21]] : !sparse_tensor.storage_specifier
26988733c6SPeiming Liu//       CHECK:     return %[[VAL_19]], %[[VAL_7]], %[[VAL_9]], %[[VAL_22]] : memref<?xindex>, memref<?xindex>, memref<?xf64>, !sparse_tensor.storage_specifier
273e4a8c2cSAart Bikfunc.func @empty_sparse_vector(%arg0: index) -> tensor<?xf64, #SV> {
283e4a8c2cSAart Bik  %0 = tensor.empty(%arg0) : tensor<?xf64, #SV>
297276b643Sbixia1  %1 = sparse_tensor.load %0 : tensor<?xf64, #SV>
307276b643Sbixia1  return %1 : tensor<?xf64, #SV>
317276b643Sbixia1}
32