1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2; RUN: llc -verify-machineinstrs < %s | FileCheck %s 3; A collection of basic functionality tests for statepoint lowering - most 4; interesting cornercases are exercised through the x86 tests. 5 6target datalayout = "e-i64:64-f80:128-n8:16:32:64-S128" 7target triple = "riscv64" 8 9%struct = type { i64, i64 } 10 11declare zeroext i1 @return_i1() 12declare zeroext i32 @return_i32() 13declare ptr @return_i32ptr() 14declare float @return_float() 15declare %struct @return_struct() 16declare void @varargf(i32, ...) 17 18define i1 @test_i1_return() gc "statepoint-example" { 19; CHECK-LABEL: test_i1_return: 20; CHECK: # %bb.0: # %entry 21; CHECK-NEXT: addi sp, sp, -16 22; CHECK-NEXT: .cfi_def_cfa_offset 16 23; CHECK-NEXT: sd ra, 8(sp) # 8-byte Folded Spill 24; CHECK-NEXT: .cfi_offset ra, -8 25; CHECK-NEXT: call return_i1 26; CHECK-NEXT: .Ltmp0: 27; CHECK-NEXT: ld ra, 8(sp) # 8-byte Folded Reload 28; CHECK-NEXT: .cfi_restore ra 29; CHECK-NEXT: addi sp, sp, 16 30; CHECK-NEXT: .cfi_def_cfa_offset 0 31; CHECK-NEXT: ret 32; This is just checking that a i1 gets lowered normally when there's no extra 33; state arguments to the statepoint 34entry: 35 %safepoint_token = tail call token (i64, i32, ptr, i32, i32, ...) @llvm.experimental.gc.statepoint.p0(i64 0, i32 0, ptr elementtype(i1 ()) @return_i1, i32 0, i32 0, i32 0, i32 0) 36 %call1 = call zeroext i1 @llvm.experimental.gc.result.i1(token %safepoint_token) 37 ret i1 %call1 38} 39 40define i32 @test_i32_return() gc "statepoint-example" { 41; CHECK-LABEL: test_i32_return: 42; CHECK: # %bb.0: # %entry 43; CHECK-NEXT: addi sp, sp, -16 44; CHECK-NEXT: .cfi_def_cfa_offset 16 45; CHECK-NEXT: sd ra, 8(sp) # 8-byte Folded Spill 46; CHECK-NEXT: .cfi_offset ra, -8 47; CHECK-NEXT: call return_i32 48; CHECK-NEXT: .Ltmp1: 49; CHECK-NEXT: ld ra, 8(sp) # 8-byte Folded Reload 50; CHECK-NEXT: .cfi_restore ra 51; CHECK-NEXT: addi sp, sp, 16 52; CHECK-NEXT: .cfi_def_cfa_offset 0 53; CHECK-NEXT: ret 54entry: 55 %safepoint_token = tail call token (i64, i32, ptr, i32, i32, ...) @llvm.experimental.gc.statepoint.p0(i64 0, i32 0, ptr elementtype(i32 ()) @return_i32, i32 0, i32 0, i32 0, i32 0) 56 %call1 = call zeroext i32 @llvm.experimental.gc.result.i32(token %safepoint_token) 57 ret i32 %call1 58} 59 60define ptr @test_i32ptr_return() gc "statepoint-example" { 61; CHECK-LABEL: test_i32ptr_return: 62; CHECK: # %bb.0: # %entry 63; CHECK-NEXT: addi sp, sp, -16 64; CHECK-NEXT: .cfi_def_cfa_offset 16 65; CHECK-NEXT: sd ra, 8(sp) # 8-byte Folded Spill 66; CHECK-NEXT: .cfi_offset ra, -8 67; CHECK-NEXT: call return_i32ptr 68; CHECK-NEXT: .Ltmp2: 69; CHECK-NEXT: ld ra, 8(sp) # 8-byte Folded Reload 70; CHECK-NEXT: .cfi_restore ra 71; CHECK-NEXT: addi sp, sp, 16 72; CHECK-NEXT: .cfi_def_cfa_offset 0 73; CHECK-NEXT: ret 74entry: 75 %safepoint_token = tail call token (i64, i32, ptr, i32, i32, ...) @llvm.experimental.gc.statepoint.p0(i64 0, i32 0, ptr elementtype(ptr ()) @return_i32ptr, i32 0, i32 0, i32 0, i32 0) 76 %call1 = call ptr @llvm.experimental.gc.result.p0(token %safepoint_token) 77 ret ptr %call1 78} 79 80define float @test_float_return() gc "statepoint-example" { 81; CHECK-LABEL: test_float_return: 82; CHECK: # %bb.0: # %entry 83; CHECK-NEXT: addi sp, sp, -16 84; CHECK-NEXT: .cfi_def_cfa_offset 16 85; CHECK-NEXT: sd ra, 8(sp) # 8-byte Folded Spill 86; CHECK-NEXT: .cfi_offset ra, -8 87; CHECK-NEXT: call return_float 88; CHECK-NEXT: .Ltmp3: 89; CHECK-NEXT: ld ra, 8(sp) # 8-byte Folded Reload 90; CHECK-NEXT: .cfi_restore ra 91; CHECK-NEXT: addi sp, sp, 16 92; CHECK-NEXT: .cfi_def_cfa_offset 0 93; CHECK-NEXT: ret 94entry: 95 %safepoint_token = tail call token (i64, i32, ptr, i32, i32, ...) @llvm.experimental.gc.statepoint.p0(i64 0, i32 0, ptr elementtype(float ()) @return_float, i32 0, i32 0, i32 0, i32 0) 96 %call1 = call float @llvm.experimental.gc.result.f32(token %safepoint_token) 97 ret float %call1 98} 99 100define %struct @test_struct_return() gc "statepoint-example" { 101; CHECK-LABEL: test_struct_return: 102; CHECK: # %bb.0: # %entry 103; CHECK-NEXT: addi sp, sp, -16 104; CHECK-NEXT: .cfi_def_cfa_offset 16 105; CHECK-NEXT: sd ra, 8(sp) # 8-byte Folded Spill 106; CHECK-NEXT: .cfi_offset ra, -8 107; CHECK-NEXT: call return_struct 108; CHECK-NEXT: .Ltmp4: 109; CHECK-NEXT: ld ra, 8(sp) # 8-byte Folded Reload 110; CHECK-NEXT: .cfi_restore ra 111; CHECK-NEXT: addi sp, sp, 16 112; CHECK-NEXT: .cfi_def_cfa_offset 0 113; CHECK-NEXT: ret 114entry: 115 %safepoint_token = tail call token (i64, i32, ptr, i32, i32, ...) @llvm.experimental.gc.statepoint.p0(i64 0, i32 0, ptr elementtype(%struct ()) @return_struct, i32 0, i32 0, i32 0, i32 0) 116 %call1 = call %struct @llvm.experimental.gc.result.struct(token %safepoint_token) 117 ret %struct %call1 118} 119 120define i1 @test_relocate(ptr addrspace(1) %a) gc "statepoint-example" { 121; CHECK-LABEL: test_relocate: 122; CHECK: # %bb.0: # %entry 123; CHECK-NEXT: addi sp, sp, -16 124; CHECK-NEXT: .cfi_def_cfa_offset 16 125; CHECK-NEXT: sd ra, 8(sp) # 8-byte Folded Spill 126; CHECK-NEXT: .cfi_offset ra, -8 127; CHECK-NEXT: sd a0, 0(sp) 128; CHECK-NEXT: call return_i1 129; CHECK-NEXT: .Ltmp5: 130; CHECK-NEXT: ld ra, 8(sp) # 8-byte Folded Reload 131; CHECK-NEXT: .cfi_restore ra 132; CHECK-NEXT: addi sp, sp, 16 133; CHECK-NEXT: .cfi_def_cfa_offset 0 134; CHECK-NEXT: ret 135; Check that an ununsed relocate has no code-generation impact 136entry: 137 %safepoint_token = tail call token (i64, i32, ptr, i32, i32, ...) @llvm.experimental.gc.statepoint.p0(i64 0, i32 0, ptr elementtype(i1 ()) @return_i1, i32 0, i32 0, i32 0, i32 0) ["gc-live" (ptr addrspace(1) %a)] 138 %call1 = call ptr addrspace(1) @llvm.experimental.gc.relocate.p1(token %safepoint_token, i32 0, i32 0) 139 %call2 = call zeroext i1 @llvm.experimental.gc.result.i1(token %safepoint_token) 140 ret i1 %call2 141} 142 143define void @test_void_vararg() gc "statepoint-example" { 144; CHECK-LABEL: test_void_vararg: 145; CHECK: # %bb.0: # %entry 146; CHECK-NEXT: addi sp, sp, -16 147; CHECK-NEXT: .cfi_def_cfa_offset 16 148; CHECK-NEXT: sd ra, 8(sp) # 8-byte Folded Spill 149; CHECK-NEXT: .cfi_offset ra, -8 150; CHECK-NEXT: li a0, 42 151; CHECK-NEXT: li a1, 43 152; CHECK-NEXT: call varargf 153; CHECK-NEXT: .Ltmp6: 154; CHECK-NEXT: ld ra, 8(sp) # 8-byte Folded Reload 155; CHECK-NEXT: .cfi_restore ra 156; CHECK-NEXT: addi sp, sp, 16 157; CHECK-NEXT: .cfi_def_cfa_offset 0 158; CHECK-NEXT: ret 159; Check a statepoint wrapping a *ptr returning vararg function works 160entry: 161 %safepoint_token = tail call token (i64, i32, ptr, i32, i32, ...) @llvm.experimental.gc.statepoint.p0(i64 0, i32 0, ptr elementtype(void (i32, ...)) @varargf, i32 2, i32 0, i32 42, i32 43, i32 0, i32 0) 162 ;; if we try to use the result from a statepoint wrapping a 163 ;; non-void-returning varargf, we will experience a crash. 164 ret void 165} 166 167define i1 @test_i1_return_patchable() gc "statepoint-example" { 168; CHECK-LABEL: test_i1_return_patchable: 169; CHECK: # %bb.0: # %entry 170; CHECK-NEXT: addi sp, sp, -16 171; CHECK-NEXT: .cfi_def_cfa_offset 16 172; CHECK-NEXT: sd ra, 8(sp) # 8-byte Folded Spill 173; CHECK-NEXT: .cfi_offset ra, -8 174; CHECK-NEXT: nop 175; CHECK-NEXT: .Ltmp7: 176; CHECK-NEXT: ld ra, 8(sp) # 8-byte Folded Reload 177; CHECK-NEXT: .cfi_restore ra 178; CHECK-NEXT: addi sp, sp, 16 179; CHECK-NEXT: .cfi_def_cfa_offset 0 180; CHECK-NEXT: ret 181; A patchable variant of test_i1_return 182entry: 183 %safepoint_token = tail call token (i64, i32, ptr, i32, i32, ...) @llvm.experimental.gc.statepoint.p0(i64 0, i32 4, ptr elementtype(i1 ()) null, i32 0, i32 0, i32 0, i32 0) 184 %call1 = call zeroext i1 @llvm.experimental.gc.result.i1(token %safepoint_token) 185 ret i1 %call1 186} 187 188declare void @consume(ptr addrspace(1) %obj) 189 190define i1 @test_cross_bb(ptr addrspace(1) %a, i1 %external_cond) gc "statepoint-example" { 191; CHECK-LABEL: test_cross_bb: 192; CHECK: # %bb.0: # %entry 193; CHECK-NEXT: addi sp, sp, -32 194; CHECK-NEXT: .cfi_def_cfa_offset 32 195; CHECK-NEXT: sd ra, 24(sp) # 8-byte Folded Spill 196; CHECK-NEXT: sd s0, 16(sp) # 8-byte Folded Spill 197; CHECK-NEXT: .cfi_offset ra, -8 198; CHECK-NEXT: .cfi_offset s0, -16 199; CHECK-NEXT: andi s0, a1, 1 200; CHECK-NEXT: sd a0, 8(sp) 201; CHECK-NEXT: call return_i1 202; CHECK-NEXT: .Ltmp8: 203; CHECK-NEXT: beqz s0, .LBB8_2 204; CHECK-NEXT: # %bb.1: # %left 205; CHECK-NEXT: ld a1, 8(sp) 206; CHECK-NEXT: mv s0, a0 207; CHECK-NEXT: mv a0, a1 208; CHECK-NEXT: call consume 209; CHECK-NEXT: mv a0, s0 210; CHECK-NEXT: j .LBB8_3 211; CHECK-NEXT: .LBB8_2: # %right 212; CHECK-NEXT: li a0, 1 213; CHECK-NEXT: .LBB8_3: # %right 214; CHECK-NEXT: ld ra, 24(sp) # 8-byte Folded Reload 215; CHECK-NEXT: ld s0, 16(sp) # 8-byte Folded Reload 216; CHECK-NEXT: .cfi_restore ra 217; CHECK-NEXT: .cfi_restore s0 218; CHECK-NEXT: addi sp, sp, 32 219; CHECK-NEXT: .cfi_def_cfa_offset 0 220; CHECK-NEXT: ret 221entry: 222 %safepoint_token = tail call token (i64, i32, ptr, i32, i32, ...) @llvm.experimental.gc.statepoint.p0(i64 0, i32 0, ptr elementtype(i1 ()) @return_i1, i32 0, i32 0, i32 0, i32 0) ["gc-live" (ptr addrspace(1) %a)] 223 br i1 %external_cond, label %left, label %right 224 225left: 226 %call1 = call ptr addrspace(1) @llvm.experimental.gc.relocate.p1(token %safepoint_token, i32 0, i32 0) 227 %call2 = call zeroext i1 @llvm.experimental.gc.result.i1(token %safepoint_token) 228 call void @consume(ptr addrspace(1) %call1) 229 ret i1 %call2 230 231right: 232 ret i1 true 233} 234 235%struct2 = type { i64, i64, i64 } 236 237declare void @consume_attributes(i32, ptr nest, i32, ptr byval(%struct2)) 238 239define void @test_attributes(ptr byval(%struct2) %s) gc "statepoint-example" { 240; CHECK-LABEL: test_attributes: 241; CHECK: # %bb.0: # %entry 242; CHECK-NEXT: addi sp, sp, -32 243; CHECK-NEXT: .cfi_def_cfa_offset 32 244; CHECK-NEXT: sd ra, 24(sp) # 8-byte Folded Spill 245; CHECK-NEXT: .cfi_offset ra, -8 246; CHECK-NEXT: ld a1, 16(a0) 247; CHECK-NEXT: sd a1, 16(sp) 248; CHECK-NEXT: ld a1, 8(a0) 249; CHECK-NEXT: sd a1, 8(sp) 250; CHECK-NEXT: ld a0, 0(a0) 251; CHECK-NEXT: sd a0, 0(sp) 252; CHECK-NEXT: li a0, 42 253; CHECK-NEXT: li a1, 17 254; CHECK-NEXT: mv a2, sp 255; CHECK-NEXT: li t2, 0 256; CHECK-NEXT: call consume_attributes 257; CHECK-NEXT: .Ltmp9: 258; CHECK-NEXT: ld ra, 24(sp) # 8-byte Folded Reload 259; CHECK-NEXT: .cfi_restore ra 260; CHECK-NEXT: addi sp, sp, 32 261; CHECK-NEXT: .cfi_def_cfa_offset 0 262; CHECK-NEXT: ret 263entry: 264; Check that arguments with attributes are lowered correctly. 265; We call a function that has a nest argument and a byval argument. 266 %statepoint_token = call token (i64, i32, ptr, i32, i32, ...) @llvm.experimental.gc.statepoint.p0(i64 0, i32 0, ptr elementtype(void (i32, ptr, i32, ptr)) @consume_attributes, i32 4, i32 0, i32 42, ptr nest null, i32 17, ptr byval(%struct2) %s, i32 0, i32 0) 267 ret void 268} 269 270declare token @llvm.experimental.gc.statepoint.p0(i64, i32, ptr, i32, i32, ...) 271declare i1 @llvm.experimental.gc.result.i1(token) 272 273declare i32 @llvm.experimental.gc.result.i32(token) 274 275declare ptr @llvm.experimental.gc.result.p0(token) 276 277declare float @llvm.experimental.gc.result.f32(token) 278 279declare %struct @llvm.experimental.gc.result.struct(token) 280 281 282 283declare ptr addrspace(1) @llvm.experimental.gc.relocate.p1(token, i32, i32) 284