1e5999787SAart Bik// RUN: mlir-opt %s -split-input-file --sparse-reinterpret-map | FileCheck %s 27cfac1beSAart Bik 3e5999787SAart Bik#trait_mul = { 4e5999787SAart Bik indexing_maps = [ 5e5999787SAart Bik affine_map<(i,j) -> (i,j)>, // A (in) 6e5999787SAart Bik affine_map<(i,j) -> (j,i)>, // B (in, transposed) 7e5999787SAart Bik affine_map<(i,j) -> (i,j)> // X (out) 8e5999787SAart Bik ], 9e5999787SAart Bik iterator_types = ["parallel", "parallel"], 10e5999787SAart Bik doc = "X(i,j) *= A(i,j) * B(j,i)" 11e5999787SAart Bik} 12e5999787SAart Bik 13e5999787SAart Bik#BSR = #sparse_tensor.encoding<{ // 2x4 blocks 14e5999787SAart Bik map = (i, j) -> 15e5999787SAart Bik ( i floordiv 2 : dense 16e5999787SAart Bik , j floordiv 4 : compressed 17e5999787SAart Bik , i mod 2 : dense 18e5999787SAart Bik , j mod 4 : dense 19e5999787SAart Bik ) 20e5999787SAart Bik}> 21e5999787SAart Bik 22c99951d4SPeiming Liu// CHECK-DAG: #[[$map0:.*]] = affine_map<(d0, d1, d2, d3) -> (d0 * 2 + d2, d1 * 4 + d3)> 23c99951d4SPeiming Liu// CHECK-DAG: #[[$map1:.*]] = affine_map<(d0, d1, d2, d3) -> (d1 * 4 + d3, d0 * 2 + d2)> 24c99951d4SPeiming Liu// CHECK-DAG: #[[$map2:.*]] = affine_map<(d0, d1, d2, d3) -> (d0, d1, d2, d3)> 25e5999787SAart Bik// CHECK-LABEL: func @mul( 26e5999787SAart Bik// CHECK-SAME: %[[A0:.*0]]: tensor<32x32xf32>, 27e5999787SAart Bik// CHECK-SAME: %[[A1:.*1]]: tensor<32x32xf32>, 28c5a67e16SYinying Li// CHECK-SAME: %[[A2:.*2]]: tensor<32x32xf32, #sparse{{[0-9]*}}>) 29e5999787SAart Bik// CHECK: %[[T0:.*]] = sparse_tensor.reinterpret_map %[[A2]] 30c99951d4SPeiming Liu// CHECK: %[[T1:.*]] = linalg.generic {doc = {{.*}} indexing_maps = [#[[$map0]], #[[$map1]], #[[$map2]]], iterator_types = ["parallel", "parallel", "parallel", "parallel"]} 31e5999787SAart Bik// CHECK: %[[T2:.*]] = sparse_tensor.reinterpret_map %[[T1]] 32c5a67e16SYinying Li// CHECK: return %[[T2]] : tensor<32x32xf32, #sparse{{[0-9]*}}> 33e5999787SAart Bikfunc.func @mul(%arg0: tensor<32x32xf32>, 34e5999787SAart Bik %arg1: tensor<32x32xf32>, 35e5999787SAart Bik %arg2: tensor<32x32xf32, #BSR>) -> tensor<32x32xf32, #BSR> { 36e5999787SAart Bik %0 = linalg.generic #trait_mul 37e5999787SAart Bik ins(%arg0, %arg1: tensor<32x32xf32>, tensor<32x32xf32>) 38e5999787SAart Bik outs(%arg2: tensor<32x32xf32, #BSR>) { 39e5999787SAart Bik ^bb(%x: f32, %y : f32, %z : f32): 40e5999787SAart Bik %1 = arith.mulf %x, %y : f32 41e5999787SAart Bik %2 = arith.mulf %1, %z : f32 42e5999787SAart Bik linalg.yield %2 : f32 43e5999787SAart Bik } -> tensor<32x32xf32, #BSR> 44e5999787SAart Bik return %0 : tensor<32x32xf32, #BSR> 45e5999787SAart Bik} 46e5999787SAart Bik 473426d330SPeiming Liu// ----- 483426d330SPeiming Liu 493426d330SPeiming Liu#BSR = #sparse_tensor.encoding<{ 503426d330SPeiming Liu map = ( i, j ) -> 513426d330SPeiming Liu ( i floordiv 2 : dense, 523426d330SPeiming Liu j floordiv 2 : compressed, 533426d330SPeiming Liu i mod 2 : dense, 543426d330SPeiming Liu j mod 2 : dense 553426d330SPeiming Liu ) 563426d330SPeiming Liu}> 573426d330SPeiming Liu 58573c4db9SPeiming Liu// CHECK-DAG: #[[$remap:.*]] = #sparse_tensor.encoding<{ map = (d0, d1) -> (d0 floordiv 2 : dense, d1 floordiv 2 : compressed, d0 mod 2 : dense, d1 mod 2 : dense) }> 59573c4db9SPeiming Liu// CHECK-DAG: #[[$demap:.*]] = #sparse_tensor.encoding<{ map = (d0, d1, d2, d3) -> (d0 : dense, d1 : compressed, d2 : dense, d3 : dense) }> 603426d330SPeiming Liu// CHECK-LABEL: func.func @sparse_foreach_reinterpret_map( 61573c4db9SPeiming Liu// CHECK-SAME: %[[VAL_0:.*]]: tensor<2x4xf64, #[[$remap]]> 62573c4db9SPeiming Liu// CHECK: %[[VAL_1:.*]] = bufferization.alloc_tensor() : tensor<1x2x2x2xf64, #[[$demap]]> 63573c4db9SPeiming Liu// CHECK: %[[VAL_2:.*]] = sparse_tensor.reinterpret_map %[[VAL_0]] : tensor<2x4xf64, #[[$remap]]> to tensor<1x2x2x2xf64, #[[$demap]]> 64c0d78c42SPeiming Liu// CHECK: %[[VAL_4:.*]] = sparse_tensor.foreach in %[[VAL_2]] init(%[[VAL_1]]) 65573c4db9SPeiming Liu// CHECK: ^bb0(%[[VAL_5:.*]]: index, %[[VAL_6:.*]]: index, %[[VAL_7:.*]]: index, %[[VAL_8:.*]]: index, %[[VAL_9:.*]]: f64, %[[VAL_10:.*]]: tensor<1x2x2x2xf64, #[[$demap]]> 66*94e27c26SPeiming Liu// CHECK: %[[VAL_11:.*]] = tensor.insert %[[VAL_9]] into %[[VAL_10]]{{\[}}%[[VAL_5]], %[[VAL_6]], %[[VAL_7]], %[[VAL_8]]] : tensor<1x2x2x2xf64, #[[$demap]]> 67c5a67e16SYinying Li// CHECK: sparse_tensor.yield %[[VAL_11]] : tensor<1x2x2x2xf64, #sparse{{[0-9]*}}> 683426d330SPeiming Liu// CHECK: } 69573c4db9SPeiming Liu// CHECK: %[[VAL_12:.*]] = sparse_tensor.reinterpret_map %[[VAL_4]] : tensor<1x2x2x2xf64, #[[$demap]]> to tensor<2x4xf64, #[[$remap]]> 70573c4db9SPeiming Liu// CHECK: %[[VAL_13:.*]] = sparse_tensor.load %[[VAL_12]] hasInserts : tensor<2x4xf64, #[[$remap]]> 71c5a67e16SYinying Li// CHECK: return %[[VAL_13]] : tensor<2x4xf64, #sparse{{[0-9]*}}> 723426d330SPeiming Liu// CHECK: } 733426d330SPeiming Liufunc.func @sparse_foreach_reinterpret_map(%6 : tensor<2x4xf64, #BSR>) -> tensor<2x4xf64, #BSR> { 743426d330SPeiming Liu %7 = bufferization.alloc_tensor() : tensor<2x4xf64, #BSR> 753426d330SPeiming Liu %8 = sparse_tensor.foreach in %6 init(%7) : tensor<2x4xf64, #BSR>, tensor<2x4xf64, #BSR> -> tensor<2x4xf64, #BSR> do { 763426d330SPeiming Liu ^bb0(%arg0: index, %arg1: index, %arg2: f64, %arg3: tensor<2x4xf64, #BSR>): 773426d330SPeiming Liu %inserted = tensor.insert %arg2 into %arg3[%arg0, %arg1] : tensor<2x4xf64, #BSR> 783426d330SPeiming Liu sparse_tensor.yield %inserted : tensor<2x4xf64, #BSR> 793426d330SPeiming Liu } 803426d330SPeiming Liu %9 = sparse_tensor.load %8 hasInserts : tensor<2x4xf64, #BSR> 813426d330SPeiming Liu return %9 : tensor<2x4xf64, #BSR> 823426d330SPeiming Liu} 8307bf1ddbSPeiming Liu 8407bf1ddbSPeiming Liu 8507bf1ddbSPeiming Liu// ----- 8607bf1ddbSPeiming Liu 8707bf1ddbSPeiming Liu#BSR = #sparse_tensor.encoding<{ 8807bf1ddbSPeiming Liu map = ( i, j ) -> 8907bf1ddbSPeiming Liu ( i floordiv 2 : dense, 9007bf1ddbSPeiming Liu j floordiv 2 : compressed, 9107bf1ddbSPeiming Liu i mod 2 : dense, 9207bf1ddbSPeiming Liu j mod 2 : dense 9307bf1ddbSPeiming Liu ) 9407bf1ddbSPeiming Liu}> 9507bf1ddbSPeiming Liu// CHECK-DAG: #[[$remap:.*]] = #sparse_tensor.encoding<{ map = (d0, d1) -> (d0 floordiv 2 : dense, d1 floordiv 2 : compressed, d0 mod 2 : dense, d1 mod 2 : dense) }> 9607bf1ddbSPeiming Liu// CHECK-DAG: #[[$demap:.*]] = #sparse_tensor.encoding<{ map = (d0, d1, d2, d3) -> (d0 : dense, d1 : compressed, d2 : dense, d3 : dense) }> 9707bf1ddbSPeiming Liu 9807bf1ddbSPeiming Liu// CHECK-LABEL: func.func @sparse_assemble_reinterpret_map( 9907bf1ddbSPeiming Liu// CHECK-SAME: %[[VAL_0:.*]]: tensor<?xf64>, 10007bf1ddbSPeiming Liu// CHECK-SAME: %[[VAL_1:.*]]: tensor<?xindex>, 10107bf1ddbSPeiming Liu// CHECK-SAME: %[[VAL_2:.*]]: tensor<?xindex>) -> tensor<2x4xf64, #[[$remap]]> { 102fc9f1d49SPeiming Liu// CHECK: %[[VAL_3:.*]] = sparse_tensor.assemble {{.*}} to tensor<1x2x2x2xf64, #[[$demap]]> 10307bf1ddbSPeiming Liu// CHECK: %[[VAL_4:.*]] = sparse_tensor.reinterpret_map %[[VAL_3]] : tensor<1x2x2x2xf64, #[[$demap]]> to tensor<2x4xf64, #[[$remap]]> 10407bf1ddbSPeiming Liu// CHECK: return %[[VAL_4]] : tensor<2x4xf64, #[[$remap]]> 10507bf1ddbSPeiming Liu// CHECK: } 10607bf1ddbSPeiming Liufunc.func @sparse_assemble_reinterpret_map(%val : tensor<?xf64>, %pos:tensor<?xindex>, %crd:tensor<?xindex>) -> tensor<2x4xf64, #BSR> { 107fc9f1d49SPeiming Liu %0 = sparse_tensor.assemble (%pos, %crd), %val 108fc9f1d49SPeiming Liu : (tensor<?xindex>, tensor<?xindex>), tensor<?xf64> to tensor<2x4xf64, #BSR> 10907bf1ddbSPeiming Liu return %0 : tensor<2x4xf64, #BSR> 11007bf1ddbSPeiming Liu} 11107bf1ddbSPeiming Liu 11207bf1ddbSPeiming Liu// CHECK-LABEL: func.func @sparse_disassemble_reinterpret_map( 11307bf1ddbSPeiming Liu// CHECK-SAME: %[[VAL_0:.*]]: tensor<2x4xf64, #[[$remap]]>, 11407bf1ddbSPeiming Liu// CHECK-SAME: %[[VAL_1:.*]]: tensor<?xf64>, 11507bf1ddbSPeiming Liu// CHECK-SAME: %[[VAL_2:.*]]: tensor<?xindex>, 11607bf1ddbSPeiming Liu// CHECK-SAME: %[[VAL_3:.*]]: tensor<?xindex>) -> (tensor<?xf64>, tensor<?xindex>, tensor<?xindex>) { 11707bf1ddbSPeiming Liu// CHECK: %[[VAL_4:.*]] = sparse_tensor.reinterpret_map %[[VAL_0]] : tensor<2x4xf64, #[[$remap]]> to tensor<1x2x2x2xf64, #[[$demap]]> 118fc9f1d49SPeiming Liu// CHECK: %{{.*}} = sparse_tensor.disassemble %[[VAL_4]] : tensor<1x2x2x2xf64, #[[$demap]]> 11907bf1ddbSPeiming Liu// CHECK: return 12007bf1ddbSPeiming Liu// CHECK: } 12107bf1ddbSPeiming Liufunc.func @sparse_disassemble_reinterpret_map(%sp : tensor<2x4xf64, #BSR>, 12207bf1ddbSPeiming Liu %od : tensor<?xf64>, 12307bf1ddbSPeiming Liu %op : tensor<?xindex>, 12407bf1ddbSPeiming Liu %oi : tensor<?xindex>) 12507bf1ddbSPeiming Liu -> (tensor<?xf64>, tensor<?xindex>, tensor<?xindex>) { 126fc9f1d49SPeiming Liu %rp, %ri, %rd, %dl, %pl, %il = sparse_tensor.disassemble %sp : tensor<2x4xf64, #BSR> 127fc9f1d49SPeiming Liu out_lvls(%op, %oi : tensor<?xindex>, tensor<?xindex>) 128fc9f1d49SPeiming Liu out_vals(%od : tensor<?xf64>) 129fc9f1d49SPeiming Liu -> (tensor<?xindex>, tensor<?xindex>), tensor<?xf64>, (index, index), index 13007bf1ddbSPeiming Liu return %rd, %rp, %ri : tensor<?xf64>, tensor<?xindex>, tensor<?xindex> 13107bf1ddbSPeiming Liu} 132