1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2; RUN: llc < %s -mtriple=riscv64 | FileCheck %s --check-prefixes=CHECK,RV64I 3; RUN: llc < %s -mtriple=riscv64 -mattr=+zba,+zbb | \ 4; RUN: FileCheck %s --check-prefixes=CHECK,RV64ZB 5 6; Make sure we emit an lw for the stack reload in 'truebb'. 7define i1 @test_sext_w(i64 %x, i32 %y) nounwind { 8; CHECK-LABEL: test_sext_w: 9; CHECK: # %bb.0: 10; CHECK-NEXT: addi sp, sp, -128 11; CHECK-NEXT: sd ra, 120(sp) # 8-byte Folded Spill 12; CHECK-NEXT: sd s0, 112(sp) # 8-byte Folded Spill 13; CHECK-NEXT: sd s1, 104(sp) # 8-byte Folded Spill 14; CHECK-NEXT: sd s2, 96(sp) # 8-byte Folded Spill 15; CHECK-NEXT: sd s3, 88(sp) # 8-byte Folded Spill 16; CHECK-NEXT: sd s4, 80(sp) # 8-byte Folded Spill 17; CHECK-NEXT: sd s5, 72(sp) # 8-byte Folded Spill 18; CHECK-NEXT: sd s6, 64(sp) # 8-byte Folded Spill 19; CHECK-NEXT: sd s7, 56(sp) # 8-byte Folded Spill 20; CHECK-NEXT: sd s8, 48(sp) # 8-byte Folded Spill 21; CHECK-NEXT: sd s9, 40(sp) # 8-byte Folded Spill 22; CHECK-NEXT: sd s10, 32(sp) # 8-byte Folded Spill 23; CHECK-NEXT: sd s11, 24(sp) # 8-byte Folded Spill 24; CHECK-NEXT: sd a1, 8(sp) # 8-byte Folded Spill 25; CHECK-NEXT: sd a0, 16(sp) # 8-byte Folded Spill 26; CHECK-NEXT: #APP 27; CHECK-NEXT: #NO_APP 28; CHECK-NEXT: ld a0, 16(sp) # 8-byte Folded Reload 29; CHECK-NEXT: beqz a0, .LBB0_2 30; CHECK-NEXT: # %bb.1: # %falsebb 31; CHECK-NEXT: li a0, 0 32; CHECK-NEXT: j .LBB0_3 33; CHECK-NEXT: .LBB0_2: # %truebb 34; CHECK-NEXT: lw a0, 8(sp) # 8-byte Folded Reload 35; CHECK-NEXT: slti a0, a0, 0 36; CHECK-NEXT: .LBB0_3: # %falsebb 37; CHECK-NEXT: ld ra, 120(sp) # 8-byte Folded Reload 38; CHECK-NEXT: ld s0, 112(sp) # 8-byte Folded Reload 39; CHECK-NEXT: ld s1, 104(sp) # 8-byte Folded Reload 40; CHECK-NEXT: ld s2, 96(sp) # 8-byte Folded Reload 41; CHECK-NEXT: ld s3, 88(sp) # 8-byte Folded Reload 42; CHECK-NEXT: ld s4, 80(sp) # 8-byte Folded Reload 43; CHECK-NEXT: ld s5, 72(sp) # 8-byte Folded Reload 44; CHECK-NEXT: ld s6, 64(sp) # 8-byte Folded Reload 45; CHECK-NEXT: ld s7, 56(sp) # 8-byte Folded Reload 46; CHECK-NEXT: ld s8, 48(sp) # 8-byte Folded Reload 47; CHECK-NEXT: ld s9, 40(sp) # 8-byte Folded Reload 48; CHECK-NEXT: ld s10, 32(sp) # 8-byte Folded Reload 49; CHECK-NEXT: ld s11, 24(sp) # 8-byte Folded Reload 50; CHECK-NEXT: addi sp, sp, 128 51; CHECK-NEXT: ret 52 tail call void asm sideeffect "", "~{x1},~{x3},~{x4},~{x5},~{x6},~{x7},~{x8},~{x9},~{x10},~{x11},~{x12},~{x13},~{x14},~{x15},~{x16},~{x17},~{x18},~{x19},~{x20},~{x21},~{x22},~{x23},~{x24},~{x25},~{x26},~{x27},~{x28},~{x29},~{x30},~{x31}"() 53 %a = icmp eq i64 %x, 0 54 br i1 %a, label %truebb, label %falsebb 55truebb: 56 %b = icmp slt i32 %y, 0 57 ret i1 %b 58falsebb: 59 ret i1 0 60} 61 62; Make sure we emit an lb for the stack reload in 'truebb' with Zbb. 63define i64 @test_sext_b(i64 %x, i8 %y) nounwind { 64; RV64I-LABEL: test_sext_b: 65; RV64I: # %bb.0: 66; RV64I-NEXT: addi sp, sp, -128 67; RV64I-NEXT: sd ra, 120(sp) # 8-byte Folded Spill 68; RV64I-NEXT: sd s0, 112(sp) # 8-byte Folded Spill 69; RV64I-NEXT: sd s1, 104(sp) # 8-byte Folded Spill 70; RV64I-NEXT: sd s2, 96(sp) # 8-byte Folded Spill 71; RV64I-NEXT: sd s3, 88(sp) # 8-byte Folded Spill 72; RV64I-NEXT: sd s4, 80(sp) # 8-byte Folded Spill 73; RV64I-NEXT: sd s5, 72(sp) # 8-byte Folded Spill 74; RV64I-NEXT: sd s6, 64(sp) # 8-byte Folded Spill 75; RV64I-NEXT: sd s7, 56(sp) # 8-byte Folded Spill 76; RV64I-NEXT: sd s8, 48(sp) # 8-byte Folded Spill 77; RV64I-NEXT: sd s9, 40(sp) # 8-byte Folded Spill 78; RV64I-NEXT: sd s10, 32(sp) # 8-byte Folded Spill 79; RV64I-NEXT: sd s11, 24(sp) # 8-byte Folded Spill 80; RV64I-NEXT: sd a1, 8(sp) # 8-byte Folded Spill 81; RV64I-NEXT: sd a0, 16(sp) # 8-byte Folded Spill 82; RV64I-NEXT: #APP 83; RV64I-NEXT: #NO_APP 84; RV64I-NEXT: ld a0, 16(sp) # 8-byte Folded Reload 85; RV64I-NEXT: beqz a0, .LBB1_2 86; RV64I-NEXT: # %bb.1: # %falsebb 87; RV64I-NEXT: li a0, 0 88; RV64I-NEXT: j .LBB1_3 89; RV64I-NEXT: .LBB1_2: # %truebb 90; RV64I-NEXT: ld a0, 8(sp) # 8-byte Folded Reload 91; RV64I-NEXT: slli a0, a0, 56 92; RV64I-NEXT: srai a0, a0, 56 93; RV64I-NEXT: .LBB1_3: # %falsebb 94; RV64I-NEXT: ld ra, 120(sp) # 8-byte Folded Reload 95; RV64I-NEXT: ld s0, 112(sp) # 8-byte Folded Reload 96; RV64I-NEXT: ld s1, 104(sp) # 8-byte Folded Reload 97; RV64I-NEXT: ld s2, 96(sp) # 8-byte Folded Reload 98; RV64I-NEXT: ld s3, 88(sp) # 8-byte Folded Reload 99; RV64I-NEXT: ld s4, 80(sp) # 8-byte Folded Reload 100; RV64I-NEXT: ld s5, 72(sp) # 8-byte Folded Reload 101; RV64I-NEXT: ld s6, 64(sp) # 8-byte Folded Reload 102; RV64I-NEXT: ld s7, 56(sp) # 8-byte Folded Reload 103; RV64I-NEXT: ld s8, 48(sp) # 8-byte Folded Reload 104; RV64I-NEXT: ld s9, 40(sp) # 8-byte Folded Reload 105; RV64I-NEXT: ld s10, 32(sp) # 8-byte Folded Reload 106; RV64I-NEXT: ld s11, 24(sp) # 8-byte Folded Reload 107; RV64I-NEXT: addi sp, sp, 128 108; RV64I-NEXT: ret 109; 110; RV64ZB-LABEL: test_sext_b: 111; RV64ZB: # %bb.0: 112; RV64ZB-NEXT: addi sp, sp, -128 113; RV64ZB-NEXT: sd ra, 120(sp) # 8-byte Folded Spill 114; RV64ZB-NEXT: sd s0, 112(sp) # 8-byte Folded Spill 115; RV64ZB-NEXT: sd s1, 104(sp) # 8-byte Folded Spill 116; RV64ZB-NEXT: sd s2, 96(sp) # 8-byte Folded Spill 117; RV64ZB-NEXT: sd s3, 88(sp) # 8-byte Folded Spill 118; RV64ZB-NEXT: sd s4, 80(sp) # 8-byte Folded Spill 119; RV64ZB-NEXT: sd s5, 72(sp) # 8-byte Folded Spill 120; RV64ZB-NEXT: sd s6, 64(sp) # 8-byte Folded Spill 121; RV64ZB-NEXT: sd s7, 56(sp) # 8-byte Folded Spill 122; RV64ZB-NEXT: sd s8, 48(sp) # 8-byte Folded Spill 123; RV64ZB-NEXT: sd s9, 40(sp) # 8-byte Folded Spill 124; RV64ZB-NEXT: sd s10, 32(sp) # 8-byte Folded Spill 125; RV64ZB-NEXT: sd s11, 24(sp) # 8-byte Folded Spill 126; RV64ZB-NEXT: sd a1, 8(sp) # 8-byte Folded Spill 127; RV64ZB-NEXT: sd a0, 16(sp) # 8-byte Folded Spill 128; RV64ZB-NEXT: #APP 129; RV64ZB-NEXT: #NO_APP 130; RV64ZB-NEXT: ld a0, 16(sp) # 8-byte Folded Reload 131; RV64ZB-NEXT: beqz a0, .LBB1_2 132; RV64ZB-NEXT: # %bb.1: # %falsebb 133; RV64ZB-NEXT: li a0, 0 134; RV64ZB-NEXT: j .LBB1_3 135; RV64ZB-NEXT: .LBB1_2: # %truebb 136; RV64ZB-NEXT: lb a0, 8(sp) # 8-byte Folded Reload 137; RV64ZB-NEXT: .LBB1_3: # %falsebb 138; RV64ZB-NEXT: ld ra, 120(sp) # 8-byte Folded Reload 139; RV64ZB-NEXT: ld s0, 112(sp) # 8-byte Folded Reload 140; RV64ZB-NEXT: ld s1, 104(sp) # 8-byte Folded Reload 141; RV64ZB-NEXT: ld s2, 96(sp) # 8-byte Folded Reload 142; RV64ZB-NEXT: ld s3, 88(sp) # 8-byte Folded Reload 143; RV64ZB-NEXT: ld s4, 80(sp) # 8-byte Folded Reload 144; RV64ZB-NEXT: ld s5, 72(sp) # 8-byte Folded Reload 145; RV64ZB-NEXT: ld s6, 64(sp) # 8-byte Folded Reload 146; RV64ZB-NEXT: ld s7, 56(sp) # 8-byte Folded Reload 147; RV64ZB-NEXT: ld s8, 48(sp) # 8-byte Folded Reload 148; RV64ZB-NEXT: ld s9, 40(sp) # 8-byte Folded Reload 149; RV64ZB-NEXT: ld s10, 32(sp) # 8-byte Folded Reload 150; RV64ZB-NEXT: ld s11, 24(sp) # 8-byte Folded Reload 151; RV64ZB-NEXT: addi sp, sp, 128 152; RV64ZB-NEXT: ret 153 tail call void asm sideeffect "", "~{x1},~{x3},~{x4},~{x5},~{x6},~{x7},~{x8},~{x9},~{x10},~{x11},~{x12},~{x13},~{x14},~{x15},~{x16},~{x17},~{x18},~{x19},~{x20},~{x21},~{x22},~{x23},~{x24},~{x25},~{x26},~{x27},~{x28},~{x29},~{x30},~{x31}"() 154 %a = icmp eq i64 %x, 0 155 br i1 %a, label %truebb, label %falsebb 156truebb: 157 %b = sext i8 %y to i64 158 ret i64 %b 159falsebb: 160 ret i64 0 161} 162 163; Make sure we emit an lh for the stack reload in 'truebb' with Zbb. 164define i64 @test_sext_h(i64 %x, i16 %y) nounwind { 165; RV64I-LABEL: test_sext_h: 166; RV64I: # %bb.0: 167; RV64I-NEXT: addi sp, sp, -128 168; RV64I-NEXT: sd ra, 120(sp) # 8-byte Folded Spill 169; RV64I-NEXT: sd s0, 112(sp) # 8-byte Folded Spill 170; RV64I-NEXT: sd s1, 104(sp) # 8-byte Folded Spill 171; RV64I-NEXT: sd s2, 96(sp) # 8-byte Folded Spill 172; RV64I-NEXT: sd s3, 88(sp) # 8-byte Folded Spill 173; RV64I-NEXT: sd s4, 80(sp) # 8-byte Folded Spill 174; RV64I-NEXT: sd s5, 72(sp) # 8-byte Folded Spill 175; RV64I-NEXT: sd s6, 64(sp) # 8-byte Folded Spill 176; RV64I-NEXT: sd s7, 56(sp) # 8-byte Folded Spill 177; RV64I-NEXT: sd s8, 48(sp) # 8-byte Folded Spill 178; RV64I-NEXT: sd s9, 40(sp) # 8-byte Folded Spill 179; RV64I-NEXT: sd s10, 32(sp) # 8-byte Folded Spill 180; RV64I-NEXT: sd s11, 24(sp) # 8-byte Folded Spill 181; RV64I-NEXT: sd a1, 8(sp) # 8-byte Folded Spill 182; RV64I-NEXT: sd a0, 16(sp) # 8-byte Folded Spill 183; RV64I-NEXT: #APP 184; RV64I-NEXT: #NO_APP 185; RV64I-NEXT: ld a0, 16(sp) # 8-byte Folded Reload 186; RV64I-NEXT: beqz a0, .LBB2_2 187; RV64I-NEXT: # %bb.1: # %falsebb 188; RV64I-NEXT: li a0, 0 189; RV64I-NEXT: j .LBB2_3 190; RV64I-NEXT: .LBB2_2: # %truebb 191; RV64I-NEXT: ld a0, 8(sp) # 8-byte Folded Reload 192; RV64I-NEXT: slli a0, a0, 48 193; RV64I-NEXT: srai a0, a0, 48 194; RV64I-NEXT: .LBB2_3: # %falsebb 195; RV64I-NEXT: ld ra, 120(sp) # 8-byte Folded Reload 196; RV64I-NEXT: ld s0, 112(sp) # 8-byte Folded Reload 197; RV64I-NEXT: ld s1, 104(sp) # 8-byte Folded Reload 198; RV64I-NEXT: ld s2, 96(sp) # 8-byte Folded Reload 199; RV64I-NEXT: ld s3, 88(sp) # 8-byte Folded Reload 200; RV64I-NEXT: ld s4, 80(sp) # 8-byte Folded Reload 201; RV64I-NEXT: ld s5, 72(sp) # 8-byte Folded Reload 202; RV64I-NEXT: ld s6, 64(sp) # 8-byte Folded Reload 203; RV64I-NEXT: ld s7, 56(sp) # 8-byte Folded Reload 204; RV64I-NEXT: ld s8, 48(sp) # 8-byte Folded Reload 205; RV64I-NEXT: ld s9, 40(sp) # 8-byte Folded Reload 206; RV64I-NEXT: ld s10, 32(sp) # 8-byte Folded Reload 207; RV64I-NEXT: ld s11, 24(sp) # 8-byte Folded Reload 208; RV64I-NEXT: addi sp, sp, 128 209; RV64I-NEXT: ret 210; 211; RV64ZB-LABEL: test_sext_h: 212; RV64ZB: # %bb.0: 213; RV64ZB-NEXT: addi sp, sp, -128 214; RV64ZB-NEXT: sd ra, 120(sp) # 8-byte Folded Spill 215; RV64ZB-NEXT: sd s0, 112(sp) # 8-byte Folded Spill 216; RV64ZB-NEXT: sd s1, 104(sp) # 8-byte Folded Spill 217; RV64ZB-NEXT: sd s2, 96(sp) # 8-byte Folded Spill 218; RV64ZB-NEXT: sd s3, 88(sp) # 8-byte Folded Spill 219; RV64ZB-NEXT: sd s4, 80(sp) # 8-byte Folded Spill 220; RV64ZB-NEXT: sd s5, 72(sp) # 8-byte Folded Spill 221; RV64ZB-NEXT: sd s6, 64(sp) # 8-byte Folded Spill 222; RV64ZB-NEXT: sd s7, 56(sp) # 8-byte Folded Spill 223; RV64ZB-NEXT: sd s8, 48(sp) # 8-byte Folded Spill 224; RV64ZB-NEXT: sd s9, 40(sp) # 8-byte Folded Spill 225; RV64ZB-NEXT: sd s10, 32(sp) # 8-byte Folded Spill 226; RV64ZB-NEXT: sd s11, 24(sp) # 8-byte Folded Spill 227; RV64ZB-NEXT: sd a1, 8(sp) # 8-byte Folded Spill 228; RV64ZB-NEXT: sd a0, 16(sp) # 8-byte Folded Spill 229; RV64ZB-NEXT: #APP 230; RV64ZB-NEXT: #NO_APP 231; RV64ZB-NEXT: ld a0, 16(sp) # 8-byte Folded Reload 232; RV64ZB-NEXT: beqz a0, .LBB2_2 233; RV64ZB-NEXT: # %bb.1: # %falsebb 234; RV64ZB-NEXT: li a0, 0 235; RV64ZB-NEXT: j .LBB2_3 236; RV64ZB-NEXT: .LBB2_2: # %truebb 237; RV64ZB-NEXT: lh a0, 8(sp) # 8-byte Folded Reload 238; RV64ZB-NEXT: .LBB2_3: # %falsebb 239; RV64ZB-NEXT: ld ra, 120(sp) # 8-byte Folded Reload 240; RV64ZB-NEXT: ld s0, 112(sp) # 8-byte Folded Reload 241; RV64ZB-NEXT: ld s1, 104(sp) # 8-byte Folded Reload 242; RV64ZB-NEXT: ld s2, 96(sp) # 8-byte Folded Reload 243; RV64ZB-NEXT: ld s3, 88(sp) # 8-byte Folded Reload 244; RV64ZB-NEXT: ld s4, 80(sp) # 8-byte Folded Reload 245; RV64ZB-NEXT: ld s5, 72(sp) # 8-byte Folded Reload 246; RV64ZB-NEXT: ld s6, 64(sp) # 8-byte Folded Reload 247; RV64ZB-NEXT: ld s7, 56(sp) # 8-byte Folded Reload 248; RV64ZB-NEXT: ld s8, 48(sp) # 8-byte Folded Reload 249; RV64ZB-NEXT: ld s9, 40(sp) # 8-byte Folded Reload 250; RV64ZB-NEXT: ld s10, 32(sp) # 8-byte Folded Reload 251; RV64ZB-NEXT: ld s11, 24(sp) # 8-byte Folded Reload 252; RV64ZB-NEXT: addi sp, sp, 128 253; RV64ZB-NEXT: ret 254 tail call void asm sideeffect "", "~{x1},~{x3},~{x4},~{x5},~{x6},~{x7},~{x8},~{x9},~{x10},~{x11},~{x12},~{x13},~{x14},~{x15},~{x16},~{x17},~{x18},~{x19},~{x20},~{x21},~{x22},~{x23},~{x24},~{x25},~{x26},~{x27},~{x28},~{x29},~{x30},~{x31}"() 255 %a = icmp eq i64 %x, 0 256 br i1 %a, label %truebb, label %falsebb 257truebb: 258 %b = sext i16 %y to i64 259 ret i64 %b 260falsebb: 261 ret i64 0 262} 263 264; Make sure we emit an lbu for the stack reload in 'truebb' with Zbb. 265define i64 @test_zext_b(i64 %x, i8 %y) nounwind { 266; CHECK-LABEL: test_zext_b: 267; CHECK: # %bb.0: 268; CHECK-NEXT: addi sp, sp, -128 269; CHECK-NEXT: sd ra, 120(sp) # 8-byte Folded Spill 270; CHECK-NEXT: sd s0, 112(sp) # 8-byte Folded Spill 271; CHECK-NEXT: sd s1, 104(sp) # 8-byte Folded Spill 272; CHECK-NEXT: sd s2, 96(sp) # 8-byte Folded Spill 273; CHECK-NEXT: sd s3, 88(sp) # 8-byte Folded Spill 274; CHECK-NEXT: sd s4, 80(sp) # 8-byte Folded Spill 275; CHECK-NEXT: sd s5, 72(sp) # 8-byte Folded Spill 276; CHECK-NEXT: sd s6, 64(sp) # 8-byte Folded Spill 277; CHECK-NEXT: sd s7, 56(sp) # 8-byte Folded Spill 278; CHECK-NEXT: sd s8, 48(sp) # 8-byte Folded Spill 279; CHECK-NEXT: sd s9, 40(sp) # 8-byte Folded Spill 280; CHECK-NEXT: sd s10, 32(sp) # 8-byte Folded Spill 281; CHECK-NEXT: sd s11, 24(sp) # 8-byte Folded Spill 282; CHECK-NEXT: sd a1, 8(sp) # 8-byte Folded Spill 283; CHECK-NEXT: sd a0, 16(sp) # 8-byte Folded Spill 284; CHECK-NEXT: #APP 285; CHECK-NEXT: #NO_APP 286; CHECK-NEXT: ld a0, 16(sp) # 8-byte Folded Reload 287; CHECK-NEXT: beqz a0, .LBB3_2 288; CHECK-NEXT: # %bb.1: # %falsebb 289; CHECK-NEXT: li a0, 0 290; CHECK-NEXT: j .LBB3_3 291; CHECK-NEXT: .LBB3_2: # %truebb 292; CHECK-NEXT: lbu a0, 8(sp) # 8-byte Folded Reload 293; CHECK-NEXT: .LBB3_3: # %falsebb 294; CHECK-NEXT: ld ra, 120(sp) # 8-byte Folded Reload 295; CHECK-NEXT: ld s0, 112(sp) # 8-byte Folded Reload 296; CHECK-NEXT: ld s1, 104(sp) # 8-byte Folded Reload 297; CHECK-NEXT: ld s2, 96(sp) # 8-byte Folded Reload 298; CHECK-NEXT: ld s3, 88(sp) # 8-byte Folded Reload 299; CHECK-NEXT: ld s4, 80(sp) # 8-byte Folded Reload 300; CHECK-NEXT: ld s5, 72(sp) # 8-byte Folded Reload 301; CHECK-NEXT: ld s6, 64(sp) # 8-byte Folded Reload 302; CHECK-NEXT: ld s7, 56(sp) # 8-byte Folded Reload 303; CHECK-NEXT: ld s8, 48(sp) # 8-byte Folded Reload 304; CHECK-NEXT: ld s9, 40(sp) # 8-byte Folded Reload 305; CHECK-NEXT: ld s10, 32(sp) # 8-byte Folded Reload 306; CHECK-NEXT: ld s11, 24(sp) # 8-byte Folded Reload 307; CHECK-NEXT: addi sp, sp, 128 308; CHECK-NEXT: ret 309 tail call void asm sideeffect "", "~{x1},~{x3},~{x4},~{x5},~{x6},~{x7},~{x8},~{x9},~{x10},~{x11},~{x12},~{x13},~{x14},~{x15},~{x16},~{x17},~{x18},~{x19},~{x20},~{x21},~{x22},~{x23},~{x24},~{x25},~{x26},~{x27},~{x28},~{x29},~{x30},~{x31}"() 310 %a = icmp eq i64 %x, 0 311 br i1 %a, label %truebb, label %falsebb 312truebb: 313 %b = zext i8 %y to i64 314 ret i64 %b 315falsebb: 316 ret i64 0 317} 318 319; Make sure we emit an lhu for the stack reload in 'truebb' with Zbb. 320define i64 @test_zext_h(i64 %x, i16 %y) nounwind { 321; RV64I-LABEL: test_zext_h: 322; RV64I: # %bb.0: 323; RV64I-NEXT: addi sp, sp, -128 324; RV64I-NEXT: sd ra, 120(sp) # 8-byte Folded Spill 325; RV64I-NEXT: sd s0, 112(sp) # 8-byte Folded Spill 326; RV64I-NEXT: sd s1, 104(sp) # 8-byte Folded Spill 327; RV64I-NEXT: sd s2, 96(sp) # 8-byte Folded Spill 328; RV64I-NEXT: sd s3, 88(sp) # 8-byte Folded Spill 329; RV64I-NEXT: sd s4, 80(sp) # 8-byte Folded Spill 330; RV64I-NEXT: sd s5, 72(sp) # 8-byte Folded Spill 331; RV64I-NEXT: sd s6, 64(sp) # 8-byte Folded Spill 332; RV64I-NEXT: sd s7, 56(sp) # 8-byte Folded Spill 333; RV64I-NEXT: sd s8, 48(sp) # 8-byte Folded Spill 334; RV64I-NEXT: sd s9, 40(sp) # 8-byte Folded Spill 335; RV64I-NEXT: sd s10, 32(sp) # 8-byte Folded Spill 336; RV64I-NEXT: sd s11, 24(sp) # 8-byte Folded Spill 337; RV64I-NEXT: sd a1, 8(sp) # 8-byte Folded Spill 338; RV64I-NEXT: sd a0, 16(sp) # 8-byte Folded Spill 339; RV64I-NEXT: #APP 340; RV64I-NEXT: #NO_APP 341; RV64I-NEXT: ld a0, 16(sp) # 8-byte Folded Reload 342; RV64I-NEXT: beqz a0, .LBB4_2 343; RV64I-NEXT: # %bb.1: # %falsebb 344; RV64I-NEXT: li a0, 0 345; RV64I-NEXT: j .LBB4_3 346; RV64I-NEXT: .LBB4_2: # %truebb 347; RV64I-NEXT: ld a0, 8(sp) # 8-byte Folded Reload 348; RV64I-NEXT: slli a0, a0, 48 349; RV64I-NEXT: srli a0, a0, 48 350; RV64I-NEXT: .LBB4_3: # %falsebb 351; RV64I-NEXT: ld ra, 120(sp) # 8-byte Folded Reload 352; RV64I-NEXT: ld s0, 112(sp) # 8-byte Folded Reload 353; RV64I-NEXT: ld s1, 104(sp) # 8-byte Folded Reload 354; RV64I-NEXT: ld s2, 96(sp) # 8-byte Folded Reload 355; RV64I-NEXT: ld s3, 88(sp) # 8-byte Folded Reload 356; RV64I-NEXT: ld s4, 80(sp) # 8-byte Folded Reload 357; RV64I-NEXT: ld s5, 72(sp) # 8-byte Folded Reload 358; RV64I-NEXT: ld s6, 64(sp) # 8-byte Folded Reload 359; RV64I-NEXT: ld s7, 56(sp) # 8-byte Folded Reload 360; RV64I-NEXT: ld s8, 48(sp) # 8-byte Folded Reload 361; RV64I-NEXT: ld s9, 40(sp) # 8-byte Folded Reload 362; RV64I-NEXT: ld s10, 32(sp) # 8-byte Folded Reload 363; RV64I-NEXT: ld s11, 24(sp) # 8-byte Folded Reload 364; RV64I-NEXT: addi sp, sp, 128 365; RV64I-NEXT: ret 366; 367; RV64ZB-LABEL: test_zext_h: 368; RV64ZB: # %bb.0: 369; RV64ZB-NEXT: addi sp, sp, -128 370; RV64ZB-NEXT: sd ra, 120(sp) # 8-byte Folded Spill 371; RV64ZB-NEXT: sd s0, 112(sp) # 8-byte Folded Spill 372; RV64ZB-NEXT: sd s1, 104(sp) # 8-byte Folded Spill 373; RV64ZB-NEXT: sd s2, 96(sp) # 8-byte Folded Spill 374; RV64ZB-NEXT: sd s3, 88(sp) # 8-byte Folded Spill 375; RV64ZB-NEXT: sd s4, 80(sp) # 8-byte Folded Spill 376; RV64ZB-NEXT: sd s5, 72(sp) # 8-byte Folded Spill 377; RV64ZB-NEXT: sd s6, 64(sp) # 8-byte Folded Spill 378; RV64ZB-NEXT: sd s7, 56(sp) # 8-byte Folded Spill 379; RV64ZB-NEXT: sd s8, 48(sp) # 8-byte Folded Spill 380; RV64ZB-NEXT: sd s9, 40(sp) # 8-byte Folded Spill 381; RV64ZB-NEXT: sd s10, 32(sp) # 8-byte Folded Spill 382; RV64ZB-NEXT: sd s11, 24(sp) # 8-byte Folded Spill 383; RV64ZB-NEXT: sd a1, 8(sp) # 8-byte Folded Spill 384; RV64ZB-NEXT: sd a0, 16(sp) # 8-byte Folded Spill 385; RV64ZB-NEXT: #APP 386; RV64ZB-NEXT: #NO_APP 387; RV64ZB-NEXT: ld a0, 16(sp) # 8-byte Folded Reload 388; RV64ZB-NEXT: beqz a0, .LBB4_2 389; RV64ZB-NEXT: # %bb.1: # %falsebb 390; RV64ZB-NEXT: li a0, 0 391; RV64ZB-NEXT: j .LBB4_3 392; RV64ZB-NEXT: .LBB4_2: # %truebb 393; RV64ZB-NEXT: lhu a0, 8(sp) # 8-byte Folded Reload 394; RV64ZB-NEXT: .LBB4_3: # %falsebb 395; RV64ZB-NEXT: ld ra, 120(sp) # 8-byte Folded Reload 396; RV64ZB-NEXT: ld s0, 112(sp) # 8-byte Folded Reload 397; RV64ZB-NEXT: ld s1, 104(sp) # 8-byte Folded Reload 398; RV64ZB-NEXT: ld s2, 96(sp) # 8-byte Folded Reload 399; RV64ZB-NEXT: ld s3, 88(sp) # 8-byte Folded Reload 400; RV64ZB-NEXT: ld s4, 80(sp) # 8-byte Folded Reload 401; RV64ZB-NEXT: ld s5, 72(sp) # 8-byte Folded Reload 402; RV64ZB-NEXT: ld s6, 64(sp) # 8-byte Folded Reload 403; RV64ZB-NEXT: ld s7, 56(sp) # 8-byte Folded Reload 404; RV64ZB-NEXT: ld s8, 48(sp) # 8-byte Folded Reload 405; RV64ZB-NEXT: ld s9, 40(sp) # 8-byte Folded Reload 406; RV64ZB-NEXT: ld s10, 32(sp) # 8-byte Folded Reload 407; RV64ZB-NEXT: ld s11, 24(sp) # 8-byte Folded Reload 408; RV64ZB-NEXT: addi sp, sp, 128 409; RV64ZB-NEXT: ret 410 tail call void asm sideeffect "", "~{x1},~{x3},~{x4},~{x5},~{x6},~{x7},~{x8},~{x9},~{x10},~{x11},~{x12},~{x13},~{x14},~{x15},~{x16},~{x17},~{x18},~{x19},~{x20},~{x21},~{x22},~{x23},~{x24},~{x25},~{x26},~{x27},~{x28},~{x29},~{x30},~{x31}"() 411 %a = icmp eq i64 %x, 0 412 br i1 %a, label %truebb, label %falsebb 413truebb: 414 %b = zext i16 %y to i64 415 ret i64 %b 416falsebb: 417 ret i64 0 418} 419 420; Make sure we emit an lwu for the stack reload in 'truebb' with Zbb. 421define i64 @test_zext_w(i64 %x, i32 %y) nounwind { 422; RV64I-LABEL: test_zext_w: 423; RV64I: # %bb.0: 424; RV64I-NEXT: addi sp, sp, -128 425; RV64I-NEXT: sd ra, 120(sp) # 8-byte Folded Spill 426; RV64I-NEXT: sd s0, 112(sp) # 8-byte Folded Spill 427; RV64I-NEXT: sd s1, 104(sp) # 8-byte Folded Spill 428; RV64I-NEXT: sd s2, 96(sp) # 8-byte Folded Spill 429; RV64I-NEXT: sd s3, 88(sp) # 8-byte Folded Spill 430; RV64I-NEXT: sd s4, 80(sp) # 8-byte Folded Spill 431; RV64I-NEXT: sd s5, 72(sp) # 8-byte Folded Spill 432; RV64I-NEXT: sd s6, 64(sp) # 8-byte Folded Spill 433; RV64I-NEXT: sd s7, 56(sp) # 8-byte Folded Spill 434; RV64I-NEXT: sd s8, 48(sp) # 8-byte Folded Spill 435; RV64I-NEXT: sd s9, 40(sp) # 8-byte Folded Spill 436; RV64I-NEXT: sd s10, 32(sp) # 8-byte Folded Spill 437; RV64I-NEXT: sd s11, 24(sp) # 8-byte Folded Spill 438; RV64I-NEXT: sd a1, 8(sp) # 8-byte Folded Spill 439; RV64I-NEXT: sd a0, 16(sp) # 8-byte Folded Spill 440; RV64I-NEXT: #APP 441; RV64I-NEXT: #NO_APP 442; RV64I-NEXT: ld a0, 16(sp) # 8-byte Folded Reload 443; RV64I-NEXT: beqz a0, .LBB5_2 444; RV64I-NEXT: # %bb.1: # %falsebb 445; RV64I-NEXT: li a0, 0 446; RV64I-NEXT: j .LBB5_3 447; RV64I-NEXT: .LBB5_2: # %truebb 448; RV64I-NEXT: ld a0, 8(sp) # 8-byte Folded Reload 449; RV64I-NEXT: slli a0, a0, 32 450; RV64I-NEXT: srli a0, a0, 32 451; RV64I-NEXT: .LBB5_3: # %falsebb 452; RV64I-NEXT: ld ra, 120(sp) # 8-byte Folded Reload 453; RV64I-NEXT: ld s0, 112(sp) # 8-byte Folded Reload 454; RV64I-NEXT: ld s1, 104(sp) # 8-byte Folded Reload 455; RV64I-NEXT: ld s2, 96(sp) # 8-byte Folded Reload 456; RV64I-NEXT: ld s3, 88(sp) # 8-byte Folded Reload 457; RV64I-NEXT: ld s4, 80(sp) # 8-byte Folded Reload 458; RV64I-NEXT: ld s5, 72(sp) # 8-byte Folded Reload 459; RV64I-NEXT: ld s6, 64(sp) # 8-byte Folded Reload 460; RV64I-NEXT: ld s7, 56(sp) # 8-byte Folded Reload 461; RV64I-NEXT: ld s8, 48(sp) # 8-byte Folded Reload 462; RV64I-NEXT: ld s9, 40(sp) # 8-byte Folded Reload 463; RV64I-NEXT: ld s10, 32(sp) # 8-byte Folded Reload 464; RV64I-NEXT: ld s11, 24(sp) # 8-byte Folded Reload 465; RV64I-NEXT: addi sp, sp, 128 466; RV64I-NEXT: ret 467; 468; RV64ZB-LABEL: test_zext_w: 469; RV64ZB: # %bb.0: 470; RV64ZB-NEXT: addi sp, sp, -128 471; RV64ZB-NEXT: sd ra, 120(sp) # 8-byte Folded Spill 472; RV64ZB-NEXT: sd s0, 112(sp) # 8-byte Folded Spill 473; RV64ZB-NEXT: sd s1, 104(sp) # 8-byte Folded Spill 474; RV64ZB-NEXT: sd s2, 96(sp) # 8-byte Folded Spill 475; RV64ZB-NEXT: sd s3, 88(sp) # 8-byte Folded Spill 476; RV64ZB-NEXT: sd s4, 80(sp) # 8-byte Folded Spill 477; RV64ZB-NEXT: sd s5, 72(sp) # 8-byte Folded Spill 478; RV64ZB-NEXT: sd s6, 64(sp) # 8-byte Folded Spill 479; RV64ZB-NEXT: sd s7, 56(sp) # 8-byte Folded Spill 480; RV64ZB-NEXT: sd s8, 48(sp) # 8-byte Folded Spill 481; RV64ZB-NEXT: sd s9, 40(sp) # 8-byte Folded Spill 482; RV64ZB-NEXT: sd s10, 32(sp) # 8-byte Folded Spill 483; RV64ZB-NEXT: sd s11, 24(sp) # 8-byte Folded Spill 484; RV64ZB-NEXT: sd a1, 8(sp) # 8-byte Folded Spill 485; RV64ZB-NEXT: sd a0, 16(sp) # 8-byte Folded Spill 486; RV64ZB-NEXT: #APP 487; RV64ZB-NEXT: #NO_APP 488; RV64ZB-NEXT: ld a0, 16(sp) # 8-byte Folded Reload 489; RV64ZB-NEXT: beqz a0, .LBB5_2 490; RV64ZB-NEXT: # %bb.1: # %falsebb 491; RV64ZB-NEXT: li a0, 0 492; RV64ZB-NEXT: j .LBB5_3 493; RV64ZB-NEXT: .LBB5_2: # %truebb 494; RV64ZB-NEXT: lwu a0, 8(sp) # 8-byte Folded Reload 495; RV64ZB-NEXT: .LBB5_3: # %falsebb 496; RV64ZB-NEXT: ld ra, 120(sp) # 8-byte Folded Reload 497; RV64ZB-NEXT: ld s0, 112(sp) # 8-byte Folded Reload 498; RV64ZB-NEXT: ld s1, 104(sp) # 8-byte Folded Reload 499; RV64ZB-NEXT: ld s2, 96(sp) # 8-byte Folded Reload 500; RV64ZB-NEXT: ld s3, 88(sp) # 8-byte Folded Reload 501; RV64ZB-NEXT: ld s4, 80(sp) # 8-byte Folded Reload 502; RV64ZB-NEXT: ld s5, 72(sp) # 8-byte Folded Reload 503; RV64ZB-NEXT: ld s6, 64(sp) # 8-byte Folded Reload 504; RV64ZB-NEXT: ld s7, 56(sp) # 8-byte Folded Reload 505; RV64ZB-NEXT: ld s8, 48(sp) # 8-byte Folded Reload 506; RV64ZB-NEXT: ld s9, 40(sp) # 8-byte Folded Reload 507; RV64ZB-NEXT: ld s10, 32(sp) # 8-byte Folded Reload 508; RV64ZB-NEXT: ld s11, 24(sp) # 8-byte Folded Reload 509; RV64ZB-NEXT: addi sp, sp, 128 510; RV64ZB-NEXT: ret 511 tail call void asm sideeffect "", "~{x1},~{x3},~{x4},~{x5},~{x6},~{x7},~{x8},~{x9},~{x10},~{x11},~{x12},~{x13},~{x14},~{x15},~{x16},~{x17},~{x18},~{x19},~{x20},~{x21},~{x22},~{x23},~{x24},~{x25},~{x26},~{x27},~{x28},~{x29},~{x30},~{x31}"() 512 %a = icmp eq i64 %x, 0 513 br i1 %a, label %truebb, label %falsebb 514truebb: 515 %b = zext i32 %y to i64 516 ret i64 %b 517falsebb: 518 ret i64 0 519} 520