1; RUN: llc -mtriple=riscv32 < %s | FileCheck %s -check-prefix=RV32I 2; RUN: llc -mtriple=riscv64 < %s | FileCheck %s -check-prefix=RV64I 3; RUN: llc -mtriple=riscv32 -mattr=+save-restore < %s | FileCheck %s -check-prefix=RV32I-SR 4; RUN: llc -mtriple=riscv64 -mattr=+save-restore < %s | FileCheck %s -check-prefix=RV64I-SR 5; RUN: llc -mtriple=riscv32 -mattr=+f,+save-restore -target-abi=ilp32f < %s | FileCheck %s -check-prefix=RV32I-FP-SR 6; RUN: llc -mtriple=riscv64 -mattr=+f,+d,+save-restore -target-abi=lp64d < %s | FileCheck %s -check-prefix=RV64I-FP-SR 7 8; Check that the correct save/restore libcalls are generated. 9 10@var0 = global [18 x i32] zeroinitializer 11@var1 = global [24 x i32] zeroinitializer 12@var2 = global [30 x i32] zeroinitializer 13 14define void @callee_saved0() nounwind { 15; RV32I-LABEL: callee_saved0: 16; RV32I-NOT: call t0, __riscv_save 17; RV32I-NOT: tail __riscv_restore 18; 19; RV64I-LABEL: callee_saved0: 20; RV64I-NOT: call t0, __riscv_save 21; RV64I-NOT: tail __riscv_restore 22; 23; RV32I-SR-LABEL: callee_saved0: 24; RV32I-SR: call t0, __riscv_save_5 25; RV32I-SR: tail __riscv_restore_5 26; 27; RV64I-SR-LABEL: callee_saved0: 28; RV64I-SR: call t0, __riscv_save_5 29; RV64I-SR: tail __riscv_restore_5 30; 31; RV32I-FP-SR-LABEL: callee_saved0: 32; RV32I-FP-SR: call t0, __riscv_save_5 33; RV32I-FP-SR: tail __riscv_restore_5 34; 35; RV64I-FP-SR-LABEL: callee_saved0: 36; RV64I-FP-SR: call t0, __riscv_save_5 37; RV64I-FP-SR: tail __riscv_restore_5 38 %val = load [18 x i32], ptr @var0 39 store volatile [18 x i32] %val, ptr @var0 40 ret void 41} 42 43define void @callee_saved1() nounwind { 44; RV32I-LABEL: callee_saved1: 45; RV32I-NOT: call t0, __riscv_save 46; RV32I-NOT: tail __riscv_restore 47; 48; RV64I-LABEL: callee_saved1: 49; RV64I-NOT: call t0, __riscv_save 50; RV64I-NOT: tail __riscv_restore 51; 52; RV32I-SR-LABEL: callee_saved1: 53; RV32I-SR: call t0, __riscv_save_11 54; RV32I-SR: tail __riscv_restore_11 55; 56; RV64I-SR-LABEL: callee_saved1: 57; RV64I-SR: call t0, __riscv_save_11 58; RV64I-SR: tail __riscv_restore_11 59; 60; RV32I-FP-SR-LABEL: callee_saved1: 61; RV32I-FP-SR: call t0, __riscv_save_11 62; RV32I-FP-SR: tail __riscv_restore_11 63; 64; RV64I-FP-SR-LABEL: callee_saved1: 65; RV64I-FP-SR: call t0, __riscv_save_11 66; RV64I-FP-SR: tail __riscv_restore_11 67 %val = load [24 x i32], ptr @var1 68 store volatile [24 x i32] %val, ptr @var1 69 ret void 70} 71 72define void @callee_saved2() nounwind { 73; RV32I-LABEL: callee_saved2: 74; RV32I-NOT: call t0, __riscv_save 75; RV32I-NOT: tail __riscv_restore 76; 77; RV64I-LABEL: callee_saved2: 78; RV64I-NOT: call t0, __riscv_save 79; RV64I-NOT: tail __riscv_restore 80; 81; RV32I-SR-LABEL: callee_saved2: 82; RV32I-SR: call t0, __riscv_save_12 83; RV32I-SR: tail __riscv_restore_12 84; 85; RV64I-SR-LABEL: callee_saved2: 86; RV64I-SR: call t0, __riscv_save_12 87; RV64I-SR: tail __riscv_restore_12 88; 89; RV32I-FP-SR-LABEL: callee_saved2: 90; RV32I-FP-SR: call t0, __riscv_save_12 91; RV32I-FP-SR: tail __riscv_restore_12 92; 93; RV64I-FP-SR-LABEL: callee_saved2: 94; RV64I-FP-SR: call t0, __riscv_save_12 95; RV64I-FP-SR: tail __riscv_restore_12 96 %val = load [30 x i32], ptr @var2 97 store volatile [30 x i32] %val, ptr @var2 98 ret void 99} 100 101; Check that floating point callee saved registers are still manually saved and 102; restored. 103 104define void @callee_saved_fp() nounwind { 105; RV32I-LABEL: callee_saved_fp: 106; RV32I-NOT: call t0, __riscv_save 107; RV32I-NOT: tail __riscv_restore 108; 109; RV64I-LABEL: callee_saved_fp: 110; RV64I-NOT: call t0, __riscv_save 111; RV64I-NOT: tail __riscv_restore 112; 113; RV32I-SR-LABEL: callee_saved_fp: 114; RV32I-SR: call t0, __riscv_save_7 115; RV32I-SR: tail __riscv_restore_7 116; 117; RV64I-SR-LABEL: callee_saved_fp: 118; RV64I-SR: call t0, __riscv_save_7 119; RV64I-SR: tail __riscv_restore_7 120; 121; RV32I-FP-SR-LABEL: callee_saved_fp: 122; RV32I-FP-SR: call t0, __riscv_save_7 123; RV32I-FP-SR-NEXT: addi sp, sp, -16 124; RV32I-FP-SR-NEXT: fsw fs0, 12(sp) 125; RV32I-FP-SR: flw fs0, 12(sp) 126; RV32I-FP-SR-NEXT: addi sp, sp, 16 127; RV32I-FP-SR-NEXT: tail __riscv_restore_7 128; 129; RV64I-FP-SR-LABEL: callee_saved_fp: 130; RV64I-FP-SR: call t0, __riscv_save_7 131; RV64I-FP-SR-NEXT: addi sp, sp, -16 132; RV64I-FP-SR-NEXT: fsd fs0, 8(sp) 133; RV64I-FP-SR: fld fs0, 8(sp) 134; RV64I-FP-SR-NEXT: addi sp, sp, 16 135; RV64I-FP-SR-NEXT: tail __riscv_restore_7 136 call void asm sideeffect "", "~{f8},~{x9},~{x18},~{x19},~{x20},~{x21},~{x22}"() 137 ret void 138} 139 140; Check that preserving tail calls is preferred over save/restore 141 142declare i32 @tail_callee(i32 %i) 143 144define i32 @tail_call(i32 %i) nounwind { 145; RV32I-LABEL: tail_call: 146; RV32I-NOT: call t0, __riscv_save 147; RV32I: tail tail_callee 148; RV32I-NOT: tail __riscv_restore 149; 150; RV64I-LABEL: tail_call: 151; RV64I-NOT: call t0, __riscv_save 152; RV64I: tail tail_callee 153; RV64I-NOT: tail __riscv_restore 154; 155; RV32I-SR-LABEL: tail_call: 156; RV32I-SR-NOT: call t0, __riscv_save 157; RV32I-SR: tail tail_callee 158; RV32I-SR-NOT: tail __riscv_restore 159; 160; RV64I-SR-LABEL: tail_call: 161; RV64I-SR-NOT: call t0, __riscv_save 162; RV64I-SR: tail tail_callee 163; RV64I-SR-NOT: tail __riscv_restore 164; 165; RV32I-FP-SR-LABEL: tail_call: 166; RV32I-FP-SR-NOT: call t0, __riscv_save 167; RV32I-FP-SR: tail tail_callee 168; RV32I-FP-SR-NOT: tail __riscv_restore 169; 170; RV64I-FP-SR-LABEL: tail_call: 171; RV64I-FP-SR-NOT: call t0, __riscv_save 172; RV64I-FP-SR: tail tail_callee 173; RV64I-FP-SR-NOT: tail __riscv_restore 174entry: 175 %val = load [18 x i32], ptr @var0 176 store volatile [18 x i32] %val, ptr @var0 177 %r = tail call i32 @tail_callee(i32 %i) 178 ret i32 %r 179} 180 181; Check that functions with varargs do not use save/restore code 182 183declare void @llvm.va_start(ptr) 184declare void @llvm.va_end(ptr) 185 186define i32 @varargs(ptr %fmt, ...) nounwind { 187; RV32I-LABEL: varargs: 188; RV32I-NOT: call t0, __riscv_save 189; RV32I-NOT: tail __riscv_restore 190; 191; RV64I-LABEL: varargs: 192; RV64I-NOT: call t0, __riscv_save 193; RV64I-NOT: tail __riscv_restore 194; 195; RV32I-SR-LABEL: varargs: 196; RV32I-SR-NOT: call t0, __riscv_save 197; RV32I-SR-NOT: tail __riscv_restore 198; 199; RV64I-SR-LABEL: varargs: 200; RV64I-SR-NOT: call t0, __riscv_save 201; RV64I-SR-NOT: tail __riscv_restore 202; 203; RV32I-FP-SR-LABEL: varargs: 204; RV32I-FP-SR-NOT: call t0, __riscv_save 205; RV32I-FP-SR-NOT: tail __riscv_restore 206; 207; RV64I-FP-SR-LABEL: varargs: 208; RV64I-FP-SR-NOT: call t0, __riscv_save 209; RV64I-FP-SR-NOT: tail __riscv_restore 210 %va = alloca ptr, align 4 211 call void @llvm.va_start(ptr %va) 212 %argp.cur = load ptr, ptr %va, align 4 213 %argp.next = getelementptr inbounds i8, ptr %argp.cur, i32 4 214 store ptr %argp.next, ptr %va, align 4 215 %1 = load i32, ptr %argp.cur, align 4 216 call void @llvm.va_end(ptr %va) 217 ret i32 %1 218} 219 220define void @many_args(i32, i32, i32, i32, i32, i32, i32, i32, i32) nounwind { 221; RV32I-LABEL: many_args: 222; RV32I-NOT: call t0, __riscv_save 223; RV32I-NOT: tail __riscv_restore 224; 225; RV64I-LABEL: many_args: 226; RV64I-NOT: call t0, __riscv_save 227; RV64I-NOT: tail __riscv_restore 228; 229; RV32I-SR-LABEL: many_args: 230; RV32I-SR: call t0, __riscv_save_5 231; RV32I-SR: tail __riscv_restore_5 232; 233; RV64I-SR-LABEL: many_args: 234; RV64I-SR: call t0, __riscv_save_5 235; RV64I-SR: tail __riscv_restore_5 236; 237; RV32I-FP-SR-LABEL: many_args: 238; RV32I-FP-SR: call t0, __riscv_save_5 239; RV32I-FP-SR: tail __riscv_restore_5 240; 241; RV64I-FP-SR-LABEL: many_args: 242; RV64I-FP-SR: call t0, __riscv_save_5 243; RV64I-FP-SR: tail __riscv_restore_5 244entry: 245 %val = load [18 x i32], ptr @var0 246 store volatile [18 x i32] %val, ptr @var0 247 ret void 248} 249 250; Check that dynamic allocation calculations remain correct 251 252declare ptr @llvm.stacksave() 253declare void @llvm.stackrestore(ptr) 254declare void @notdead(ptr) 255 256define void @alloca(i32 %n) nounwind { 257; RV32I-LABEL: alloca: 258; RV32I-NOT: call t0, __riscv_save 259; RV32I: addi s0, sp, 16 260; RV32I: addi sp, s0, -16 261; RV32I-NOT: tail __riscv_restore 262; 263; RV64I-LABEL: alloca: 264; RV64I-NOT: call t0, __riscv_save 265; RV64I: addi s0, sp, 32 266; RV64I: addi sp, s0, -32 267; RV64I-NOT: tail __riscv_restore 268; 269; RV32I-SR-LABEL: alloca: 270; RV32I-SR: call t0, __riscv_save_2 271; RV32I-SR: addi s0, sp, 16 272; RV32I-SR: addi sp, s0, -16 273; RV32I-SR: tail __riscv_restore_2 274; 275; RV64I-SR-LABEL: alloca: 276; RV64I-SR: call t0, __riscv_save_2 277; RV64I-SR: addi s0, sp, 32 278; RV64I-SR: addi sp, s0, -32 279; RV64I-SR: tail __riscv_restore_2 280; 281; RV32I-FP-SR-LABEL: alloca: 282; RV32I-FP-SR: call t0, __riscv_save_2 283; RV32I-FP-SR: addi s0, sp, 16 284; RV32I-FP-SR: addi sp, s0, -16 285; RV32I-FP-SR: tail __riscv_restore_2 286; 287; RV64I-FP-SR-LABEL: alloca: 288; RV64I-FP-SR: call t0, __riscv_save_2 289; RV64I-FP-SR: addi s0, sp, 32 290; RV64I-FP-SR: addi sp, s0, -32 291; RV64I-FP-SR: tail __riscv_restore_2 292 %sp = call ptr @llvm.stacksave() 293 %addr = alloca i8, i32 %n 294 call void @notdead(ptr %addr) 295 call void @llvm.stackrestore(ptr %sp) 296 ret void 297} 298 299; Check that functions with interrupt attribute do not use save/restore code 300 301declare i32 @foo(...) 302define void @interrupt() nounwind "interrupt"="supervisor" { 303; RV32I-LABEL: interrupt: 304; RV32I-NOT: call t0, __riscv_save 305; RV32I-NOT: tail __riscv_restore 306; 307; RV64I-LABEL: interrupt: 308; RV64I-NOT: call t0, __riscv_save 309; RV64I-NOT: tail __riscv_restore 310; 311; RV32I-SR-LABEL: interrupt: 312; RV32I-SR-NOT: call t0, __riscv_save 313; RV32I-SR-NOT: tail __riscv_restore 314; 315; RV64I-SR-LABEL: interrupt: 316; RV64I-SR-NOT: call t0, __riscv_save 317; RV64I-SR-NOT: tail __riscv_restore 318; 319; RV32I-FP-SR-LABEL: interrupt: 320; RV32I-FP-SR-NOT: call t0, __riscv_save 321; RV32I-FP-SR-NOT: tail __riscv_restore 322; 323; RV64I-FP-SR-LABEL: interrupt: 324; RV64I-FP-SR-NOT: call t0, __riscv_save 325; RV64I-FP-SR-NOT: tail __riscv_restore 326 %call = call i32 @foo() 327 ret void 328} 329