1*2291d0abSMin-Yih Hsu; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5 2*2291d0abSMin-Yih Hsu; RUN: llc -mtriple=riscv64 -mattr=+v -verify-machineinstrs < %s | FileCheck %s 3*2291d0abSMin-Yih Hsu 4*2291d0abSMin-Yih Hsudefine <2 x i64> @expanded_fixed_neg_abs64(<2 x i64> %x) { 5*2291d0abSMin-Yih Hsu; CHECK-LABEL: expanded_fixed_neg_abs64: 6*2291d0abSMin-Yih Hsu; CHECK: # %bb.0: 7*2291d0abSMin-Yih Hsu; CHECK-NEXT: vsetivli zero, 2, e64, m1, ta, ma 8*2291d0abSMin-Yih Hsu; CHECK-NEXT: vrsub.vi v9, v8, 0 9*2291d0abSMin-Yih Hsu; CHECK-NEXT: vmin.vv v8, v8, v9 10*2291d0abSMin-Yih Hsu; CHECK-NEXT: ret 11*2291d0abSMin-Yih Hsu %t = sub <2 x i64> <i64 0, i64 0>, %x 12*2291d0abSMin-Yih Hsu %t1 = call <2 x i64> @llvm.smax.v2i64(<2 x i64> %t, <2 x i64> %x) 13*2291d0abSMin-Yih Hsu %t2 = sub <2 x i64> <i64 0, i64 0>, %t1 14*2291d0abSMin-Yih Hsu ret <2 x i64> %t2 15*2291d0abSMin-Yih Hsu} 16*2291d0abSMin-Yih Hsu 17*2291d0abSMin-Yih Hsudefine <2 x i64> @expanded_fixed_neg_abs64_unsigned(<2 x i64> %x) { 18*2291d0abSMin-Yih Hsu; CHECK-LABEL: expanded_fixed_neg_abs64_unsigned: 19*2291d0abSMin-Yih Hsu; CHECK: # %bb.0: 20*2291d0abSMin-Yih Hsu; CHECK-NEXT: vsetivli zero, 2, e64, m1, ta, ma 21*2291d0abSMin-Yih Hsu; CHECK-NEXT: vrsub.vi v9, v8, 0 22*2291d0abSMin-Yih Hsu; CHECK-NEXT: vminu.vv v8, v8, v9 23*2291d0abSMin-Yih Hsu; CHECK-NEXT: ret 24*2291d0abSMin-Yih Hsu %t = sub <2 x i64> <i64 0, i64 0>, %x 25*2291d0abSMin-Yih Hsu %t1 = call <2 x i64> @llvm.umax.v2i64(<2 x i64> %t, <2 x i64> %x) 26*2291d0abSMin-Yih Hsu %t2 = sub <2 x i64> <i64 0, i64 0>, %t1 27*2291d0abSMin-Yih Hsu ret <2 x i64> %t2 28*2291d0abSMin-Yih Hsu} 29*2291d0abSMin-Yih Hsu 30*2291d0abSMin-Yih Hsudefine <2 x i64> @expanded_fixed_neg_inv_abs64(<2 x i64> %x) { 31*2291d0abSMin-Yih Hsu; CHECK-LABEL: expanded_fixed_neg_inv_abs64: 32*2291d0abSMin-Yih Hsu; CHECK: # %bb.0: 33*2291d0abSMin-Yih Hsu; CHECK-NEXT: vsetivli zero, 2, e64, m1, ta, ma 34*2291d0abSMin-Yih Hsu; CHECK-NEXT: vrsub.vi v9, v8, 0 35*2291d0abSMin-Yih Hsu; CHECK-NEXT: vmax.vv v8, v8, v9 36*2291d0abSMin-Yih Hsu; CHECK-NEXT: ret 37*2291d0abSMin-Yih Hsu %t = sub <2 x i64> <i64 0, i64 0>, %x 38*2291d0abSMin-Yih Hsu %t1 = call <2 x i64> @llvm.smin.v2i64(<2 x i64> %t, <2 x i64> %x) 39*2291d0abSMin-Yih Hsu %t2 = sub <2 x i64> <i64 0, i64 0>, %t1 40*2291d0abSMin-Yih Hsu ret <2 x i64> %t2 41*2291d0abSMin-Yih Hsu} 42*2291d0abSMin-Yih Hsu 43*2291d0abSMin-Yih Hsudefine <2 x i64> @expanded_fixed_neg_inv_abs64_unsigned(<2 x i64> %x) { 44*2291d0abSMin-Yih Hsu; CHECK-LABEL: expanded_fixed_neg_inv_abs64_unsigned: 45*2291d0abSMin-Yih Hsu; CHECK: # %bb.0: 46*2291d0abSMin-Yih Hsu; CHECK-NEXT: vsetivli zero, 2, e64, m1, ta, ma 47*2291d0abSMin-Yih Hsu; CHECK-NEXT: vrsub.vi v9, v8, 0 48*2291d0abSMin-Yih Hsu; CHECK-NEXT: vmaxu.vv v8, v8, v9 49*2291d0abSMin-Yih Hsu; CHECK-NEXT: ret 50*2291d0abSMin-Yih Hsu %t = sub <2 x i64> <i64 0, i64 0>, %x 51*2291d0abSMin-Yih Hsu %t1 = call <2 x i64> @llvm.umin.v2i64(<2 x i64> %t, <2 x i64> %x) 52*2291d0abSMin-Yih Hsu %t2 = sub <2 x i64> <i64 0, i64 0>, %t1 53*2291d0abSMin-Yih Hsu ret <2 x i64> %t2 54*2291d0abSMin-Yih Hsu} 55