1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2; RUN: llc --mtriple=loongarch32 -mattr=+d < %s | FileCheck %s --check-prefix=LA32 3; RUN: llc --mtriple=loongarch64 -mattr=+d < %s | FileCheck %s --check-prefix=LA64 4 5;; Exercise the 'ashr' LLVM IR: https://llvm.org/docs/LangRef.html#ashr-instruction 6 7define i1 @ashr_i1(i1 %x, i1 %y) { 8; LA32-LABEL: ashr_i1: 9; LA32: # %bb.0: 10; LA32-NEXT: ret 11; 12; LA64-LABEL: ashr_i1: 13; LA64: # %bb.0: 14; LA64-NEXT: ret 15 %ashr = ashr i1 %x, %y 16 ret i1 %ashr 17} 18 19define i8 @ashr_i8(i8 %x, i8 %y) { 20; LA32-LABEL: ashr_i8: 21; LA32: # %bb.0: 22; LA32-NEXT: ext.w.b $a0, $a0 23; LA32-NEXT: sra.w $a0, $a0, $a1 24; LA32-NEXT: ret 25; 26; LA64-LABEL: ashr_i8: 27; LA64: # %bb.0: 28; LA64-NEXT: ext.w.b $a0, $a0 29; LA64-NEXT: sra.d $a0, $a0, $a1 30; LA64-NEXT: ret 31 %ashr = ashr i8 %x, %y 32 ret i8 %ashr 33} 34 35define i16 @ashr_i16(i16 %x, i16 %y) { 36; LA32-LABEL: ashr_i16: 37; LA32: # %bb.0: 38; LA32-NEXT: ext.w.h $a0, $a0 39; LA32-NEXT: sra.w $a0, $a0, $a1 40; LA32-NEXT: ret 41; 42; LA64-LABEL: ashr_i16: 43; LA64: # %bb.0: 44; LA64-NEXT: ext.w.h $a0, $a0 45; LA64-NEXT: sra.d $a0, $a0, $a1 46; LA64-NEXT: ret 47 %ashr = ashr i16 %x, %y 48 ret i16 %ashr 49} 50 51define i32 @ashr_i32(i32 %x, i32 %y) { 52; LA32-LABEL: ashr_i32: 53; LA32: # %bb.0: 54; LA32-NEXT: sra.w $a0, $a0, $a1 55; LA32-NEXT: ret 56; 57; LA64-LABEL: ashr_i32: 58; LA64: # %bb.0: 59; LA64-NEXT: sra.w $a0, $a0, $a1 60; LA64-NEXT: ret 61 %ashr = ashr i32 %x, %y 62 ret i32 %ashr 63} 64 65define i64 @ashr_i64(i64 %x, i64 %y) { 66; LA32-LABEL: ashr_i64: 67; LA32: # %bb.0: 68; LA32-NEXT: srai.w $a3, $a1, 31 69; LA32-NEXT: addi.w $a4, $a2, -32 70; LA32-NEXT: slti $a5, $a4, 0 71; LA32-NEXT: masknez $a3, $a3, $a5 72; LA32-NEXT: sra.w $a6, $a1, $a2 73; LA32-NEXT: maskeqz $a6, $a6, $a5 74; LA32-NEXT: or $a3, $a6, $a3 75; LA32-NEXT: srl.w $a0, $a0, $a2 76; LA32-NEXT: xori $a2, $a2, 31 77; LA32-NEXT: slli.w $a6, $a1, 1 78; LA32-NEXT: sll.w $a2, $a6, $a2 79; LA32-NEXT: or $a0, $a0, $a2 80; LA32-NEXT: maskeqz $a0, $a0, $a5 81; LA32-NEXT: sra.w $a1, $a1, $a4 82; LA32-NEXT: masknez $a1, $a1, $a5 83; LA32-NEXT: or $a0, $a0, $a1 84; LA32-NEXT: move $a1, $a3 85; LA32-NEXT: ret 86; 87; LA64-LABEL: ashr_i64: 88; LA64: # %bb.0: 89; LA64-NEXT: sra.d $a0, $a0, $a1 90; LA64-NEXT: ret 91 %ashr = ashr i64 %x, %y 92 ret i64 %ashr 93} 94 95define i1 @ashr_i1_3(i1 %x) { 96; LA32-LABEL: ashr_i1_3: 97; LA32: # %bb.0: 98; LA32-NEXT: ret 99; 100; LA64-LABEL: ashr_i1_3: 101; LA64: # %bb.0: 102; LA64-NEXT: ret 103 %ashr = ashr i1 %x, 3 104 ret i1 %ashr 105} 106 107define i8 @ashr_i8_3(i8 %x) { 108; LA32-LABEL: ashr_i8_3: 109; LA32: # %bb.0: 110; LA32-NEXT: ext.w.b $a0, $a0 111; LA32-NEXT: srai.w $a0, $a0, 3 112; LA32-NEXT: ret 113; 114; LA64-LABEL: ashr_i8_3: 115; LA64: # %bb.0: 116; LA64-NEXT: ext.w.b $a0, $a0 117; LA64-NEXT: srai.d $a0, $a0, 3 118; LA64-NEXT: ret 119 %ashr = ashr i8 %x, 3 120 ret i8 %ashr 121} 122 123define i16 @ashr_i16_3(i16 %x) { 124; LA32-LABEL: ashr_i16_3: 125; LA32: # %bb.0: 126; LA32-NEXT: ext.w.h $a0, $a0 127; LA32-NEXT: srai.w $a0, $a0, 3 128; LA32-NEXT: ret 129; 130; LA64-LABEL: ashr_i16_3: 131; LA64: # %bb.0: 132; LA64-NEXT: ext.w.h $a0, $a0 133; LA64-NEXT: srai.d $a0, $a0, 3 134; LA64-NEXT: ret 135 %ashr = ashr i16 %x, 3 136 ret i16 %ashr 137} 138 139define i32 @ashr_i32_3(i32 %x) { 140; LA32-LABEL: ashr_i32_3: 141; LA32: # %bb.0: 142; LA32-NEXT: srai.w $a0, $a0, 3 143; LA32-NEXT: ret 144; 145; LA64-LABEL: ashr_i32_3: 146; LA64: # %bb.0: 147; LA64-NEXT: addi.w $a0, $a0, 0 148; LA64-NEXT: srai.d $a0, $a0, 3 149; LA64-NEXT: ret 150 %ashr = ashr i32 %x, 3 151 ret i32 %ashr 152} 153 154define i64 @ashr_i64_3(i64 %x) { 155; LA32-LABEL: ashr_i64_3: 156; LA32: # %bb.0: 157; LA32-NEXT: slli.w $a2, $a1, 29 158; LA32-NEXT: srli.w $a0, $a0, 3 159; LA32-NEXT: or $a0, $a0, $a2 160; LA32-NEXT: srai.w $a1, $a1, 3 161; LA32-NEXT: ret 162; 163; LA64-LABEL: ashr_i64_3: 164; LA64: # %bb.0: 165; LA64-NEXT: srai.d $a0, $a0, 3 166; LA64-NEXT: ret 167 %ashr = ashr i64 %x, 3 168 ret i64 %ashr 169} 170