1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2; RUN: llc -mtriple=riscv32 -verify-machineinstrs < %s \ 3; RUN: | FileCheck %s -check-prefix=RV32I 4; RUN: llc -mtriple=riscv64 -verify-machineinstrs < %s \ 5; RUN: | FileCheck %s -check-prefix=RV64I 6 7define void @sext_shl_trunc_same_size(i16 %x, i32 %y, ptr %res) { 8; RV32I-LABEL: sext_shl_trunc_same_size: 9; RV32I: # %bb.0: 10; RV32I-NEXT: sll a0, a0, a1 11; RV32I-NEXT: sh a0, 0(a2) 12; RV32I-NEXT: ret 13; 14; RV64I-LABEL: sext_shl_trunc_same_size: 15; RV64I: # %bb.0: 16; RV64I-NEXT: sllw a0, a0, a1 17; RV64I-NEXT: sh a0, 0(a2) 18; RV64I-NEXT: ret 19 %conv = sext i16 %x to i32 20 %shl = shl i32 %conv, %y 21 %t = trunc i32 %shl to i16 22 store i16 %t, ptr %res 23 ret void 24} 25 26define void @zext_shl_trunc_same_size(i16 %x, i32 %y, ptr %res) { 27; RV32I-LABEL: zext_shl_trunc_same_size: 28; RV32I: # %bb.0: 29; RV32I-NEXT: sll a0, a0, a1 30; RV32I-NEXT: sh a0, 0(a2) 31; RV32I-NEXT: ret 32; 33; RV64I-LABEL: zext_shl_trunc_same_size: 34; RV64I: # %bb.0: 35; RV64I-NEXT: sllw a0, a0, a1 36; RV64I-NEXT: sh a0, 0(a2) 37; RV64I-NEXT: ret 38 %conv = zext i16 %x to i32 39 %shl = shl i32 %conv, %y 40 %t = trunc i32 %shl to i16 41 store i16 %t, ptr %res 42 ret void 43} 44 45define void @sext_shl_trunc_smaller(i16 %x, i32 %y, ptr %res) { 46; RV32I-LABEL: sext_shl_trunc_smaller: 47; RV32I: # %bb.0: 48; RV32I-NEXT: sll a0, a0, a1 49; RV32I-NEXT: sb a0, 0(a2) 50; RV32I-NEXT: ret 51; 52; RV64I-LABEL: sext_shl_trunc_smaller: 53; RV64I: # %bb.0: 54; RV64I-NEXT: sllw a0, a0, a1 55; RV64I-NEXT: sb a0, 0(a2) 56; RV64I-NEXT: ret 57 %conv = sext i16 %x to i32 58 %shl = shl i32 %conv, %y 59 %t = trunc i32 %shl to i8 60 store i8 %t, ptr %res 61 ret void 62} 63 64define void @zext_shl_trunc_smaller(i16 %x, i32 %y, ptr %res) { 65; RV32I-LABEL: zext_shl_trunc_smaller: 66; RV32I: # %bb.0: 67; RV32I-NEXT: sll a0, a0, a1 68; RV32I-NEXT: sb a0, 0(a2) 69; RV32I-NEXT: ret 70; 71; RV64I-LABEL: zext_shl_trunc_smaller: 72; RV64I: # %bb.0: 73; RV64I-NEXT: sllw a0, a0, a1 74; RV64I-NEXT: sb a0, 0(a2) 75; RV64I-NEXT: ret 76 %conv = zext i16 %x to i32 77 %shl = shl i32 %conv, %y 78 %t = trunc i32 %shl to i8 79 store i8 %t, ptr %res 80 ret void 81} 82 83; negative test - demanding 1 high-bit too many to change the extend 84 85define signext i17 @sext_shl_trunc_larger(i16 %x, i32 %y) { 86; RV32I-LABEL: sext_shl_trunc_larger: 87; RV32I: # %bb.0: 88; RV32I-NEXT: slli a0, a0, 16 89; RV32I-NEXT: srai a0, a0, 16 90; RV32I-NEXT: sll a0, a0, a1 91; RV32I-NEXT: slli a0, a0, 15 92; RV32I-NEXT: srai a0, a0, 15 93; RV32I-NEXT: ret 94; 95; RV64I-LABEL: sext_shl_trunc_larger: 96; RV64I: # %bb.0: 97; RV64I-NEXT: slli a0, a0, 48 98; RV64I-NEXT: srai a0, a0, 48 99; RV64I-NEXT: sllw a0, a0, a1 100; RV64I-NEXT: slli a0, a0, 47 101; RV64I-NEXT: srai a0, a0, 47 102; RV64I-NEXT: ret 103 %conv = sext i16 %x to i32 104 %shl = shl i32 %conv, %y 105 %t = trunc i32 %shl to i17 106 ret i17 %t 107} 108 109; negative test - demanding 1 high-bit too many to change the extend 110 111define zeroext i17 @zext_shl_trunc_larger(i16 %x, i32 %y) { 112; RV32I-LABEL: zext_shl_trunc_larger: 113; RV32I: # %bb.0: 114; RV32I-NEXT: slli a0, a0, 16 115; RV32I-NEXT: srli a0, a0, 16 116; RV32I-NEXT: sll a0, a0, a1 117; RV32I-NEXT: slli a0, a0, 15 118; RV32I-NEXT: srli a0, a0, 15 119; RV32I-NEXT: ret 120; 121; RV64I-LABEL: zext_shl_trunc_larger: 122; RV64I: # %bb.0: 123; RV64I-NEXT: slli a0, a0, 48 124; RV64I-NEXT: srli a0, a0, 48 125; RV64I-NEXT: sllw a0, a0, a1 126; RV64I-NEXT: slli a0, a0, 47 127; RV64I-NEXT: srli a0, a0, 47 128; RV64I-NEXT: ret 129 %conv = zext i16 %x to i32 130 %shl = shl i32 %conv, %y 131 %t = trunc i32 %shl to i17 132 ret i17 %t 133} 134 135define i32 @sext_shl_mask(i16 %x, i32 %y) { 136; RV32I-LABEL: sext_shl_mask: 137; RV32I: # %bb.0: 138; RV32I-NEXT: sll a0, a0, a1 139; RV32I-NEXT: slli a0, a0, 16 140; RV32I-NEXT: srli a0, a0, 16 141; RV32I-NEXT: ret 142; 143; RV64I-LABEL: sext_shl_mask: 144; RV64I: # %bb.0: 145; RV64I-NEXT: sllw a0, a0, a1 146; RV64I-NEXT: slli a0, a0, 48 147; RV64I-NEXT: srli a0, a0, 48 148; RV64I-NEXT: ret 149 %conv = sext i16 %x to i32 150 %shl = shl i32 %conv, %y 151 %t = and i32 %shl, 65535 152 ret i32 %t 153} 154 155define i32 @zext_shl_mask(i16 %x, i32 %y) { 156; RV32I-LABEL: zext_shl_mask: 157; RV32I: # %bb.0: 158; RV32I-NEXT: sll a0, a0, a1 159; RV32I-NEXT: slli a0, a0, 16 160; RV32I-NEXT: srli a0, a0, 16 161; RV32I-NEXT: ret 162; 163; RV64I-LABEL: zext_shl_mask: 164; RV64I: # %bb.0: 165; RV64I-NEXT: sllw a0, a0, a1 166; RV64I-NEXT: slli a0, a0, 48 167; RV64I-NEXT: srli a0, a0, 48 168; RV64I-NEXT: ret 169 %conv = zext i16 %x to i32 170 %shl = shl i32 %conv, %y 171 %t = and i32 %shl, 65535 172 ret i32 %t 173} 174 175; negative test - demanding a bit that could change with sext 176 177define i32 @sext_shl_mask_higher(i16 %x, i32 %y) { 178; RV32I-LABEL: sext_shl_mask_higher: 179; RV32I: # %bb.0: 180; RV32I-NEXT: slli a0, a0, 16 181; RV32I-NEXT: srai a0, a0, 16 182; RV32I-NEXT: sll a0, a0, a1 183; RV32I-NEXT: lui a1, 16 184; RV32I-NEXT: and a0, a0, a1 185; RV32I-NEXT: ret 186; 187; RV64I-LABEL: sext_shl_mask_higher: 188; RV64I: # %bb.0: 189; RV64I-NEXT: slli a0, a0, 48 190; RV64I-NEXT: srai a0, a0, 48 191; RV64I-NEXT: sllw a0, a0, a1 192; RV64I-NEXT: lui a1, 16 193; RV64I-NEXT: and a0, a0, a1 194; RV64I-NEXT: ret 195 %conv = sext i16 %x to i32 196 %shl = shl i32 %conv, %y 197 %t = and i32 %shl, 65536 198 ret i32 %t 199} 200 201; negative test - demanding a bit that could change with zext 202 203define i32 @zext_shl_mask_higher(i16 %x, i32 %y) { 204; RV32I-LABEL: zext_shl_mask_higher: 205; RV32I: # %bb.0: 206; RV32I-NEXT: slli a0, a0, 16 207; RV32I-NEXT: srli a0, a0, 16 208; RV32I-NEXT: sll a0, a0, a1 209; RV32I-NEXT: lui a1, 16 210; RV32I-NEXT: and a0, a0, a1 211; RV32I-NEXT: ret 212; 213; RV64I-LABEL: zext_shl_mask_higher: 214; RV64I: # %bb.0: 215; RV64I-NEXT: slli a0, a0, 48 216; RV64I-NEXT: srli a0, a0, 48 217; RV64I-NEXT: sllw a0, a0, a1 218; RV64I-NEXT: lui a1, 16 219; RV64I-NEXT: and a0, a0, a1 220; RV64I-NEXT: ret 221 %conv = zext i16 %x to i32 222 %shl = shl i32 %conv, %y 223 %t = and i32 %shl, 65536 224 ret i32 %t 225} 226 227; May need some, but not all of the bits set by the 'or'. 228 229define i32 @set_shl_mask(i32 %x, i32 %y) { 230; RV32I-LABEL: set_shl_mask: 231; RV32I: # %bb.0: 232; RV32I-NEXT: lui a2, 16 233; RV32I-NEXT: addi a3, a2, 1 234; RV32I-NEXT: or a0, a0, a3 235; RV32I-NEXT: sll a0, a0, a1 236; RV32I-NEXT: and a0, a0, a2 237; RV32I-NEXT: ret 238; 239; RV64I-LABEL: set_shl_mask: 240; RV64I: # %bb.0: 241; RV64I-NEXT: lui a2, 16 242; RV64I-NEXT: addi a3, a2, 1 243; RV64I-NEXT: or a0, a0, a3 244; RV64I-NEXT: sllw a0, a0, a1 245; RV64I-NEXT: and a0, a0, a2 246; RV64I-NEXT: ret 247 %z = or i32 %x, 196609 248 %s = shl i32 %z, %y 249 %r = and i32 %s, 65536 250 ret i32 %r 251} 252