1*aa1d2693SPaul Kirth;; Check that shadow call stack doesn't interfere with save/restore 2*aa1d2693SPaul Kirth 3*aa1d2693SPaul Kirth; RUN: llc -mtriple=riscv32 < %s | FileCheck %s -check-prefix=RV32I 4*aa1d2693SPaul Kirth; RUN: llc -mtriple=riscv64 < %s | FileCheck %s -check-prefix=RV64I 5*aa1d2693SPaul Kirth; RUN: llc -mtriple=riscv32 -mattr=+save-restore < %s | FileCheck %s -check-prefix=RV32I-SR 6*aa1d2693SPaul Kirth; RUN: llc -mtriple=riscv64 -mattr=+save-restore < %s | FileCheck %s -check-prefix=RV64I-SR 7*aa1d2693SPaul Kirth; RUN: llc -mtriple=riscv32 -mattr=+f,+save-restore -target-abi=ilp32f < %s | FileCheck %s -check-prefix=RV32I-FP-SR 8*aa1d2693SPaul Kirth; RUN: llc -mtriple=riscv64 -mattr=+f,+d,+save-restore -target-abi=lp64d < %s | FileCheck %s -check-prefix=RV64I-FP-SR 9*aa1d2693SPaul Kirth 10*aa1d2693SPaul Kirth@var2 = global [30 x i32] zeroinitializer 11*aa1d2693SPaul Kirth 12*aa1d2693SPaul Kirthdefine void @callee_scs() nounwind shadowcallstack { 13*aa1d2693SPaul Kirth; RV32I-LABEL: callee_scs: 14*aa1d2693SPaul Kirth; RV32I-NOT: call t0, __riscv_save 15*aa1d2693SPaul Kirth; RV32I-NOT: tail __riscv_restore 16*aa1d2693SPaul Kirth; 17*aa1d2693SPaul Kirth; RV64I-LABEL: callee_scs: 18*aa1d2693SPaul Kirth; RV64I-NOT: call t0, __riscv_save 19*aa1d2693SPaul Kirth; RV64I-NOT: tail __riscv_restore 20*aa1d2693SPaul Kirth; 21*aa1d2693SPaul Kirth; RV32I-SR-LABEL: callee_scs: 22*aa1d2693SPaul Kirth; RV32I-SR: call t0, __riscv_save_12 23*aa1d2693SPaul Kirth; RV32I-SR: tail __riscv_restore_12 24*aa1d2693SPaul Kirth; 25*aa1d2693SPaul Kirth; RV64I-SR-LABEL: callee_scs: 26*aa1d2693SPaul Kirth; RV64I-SR: call t0, __riscv_save_12 27*aa1d2693SPaul Kirth; RV64I-SR: tail __riscv_restore_12 28*aa1d2693SPaul Kirth; 29*aa1d2693SPaul Kirth; RV32I-FP-SR-LABEL: callee_scs: 30*aa1d2693SPaul Kirth; RV32I-FP-SR: call t0, __riscv_save_12 31*aa1d2693SPaul Kirth; RV32I-FP-SR: tail __riscv_restore_12 32*aa1d2693SPaul Kirth; 33*aa1d2693SPaul Kirth; RV64I-FP-SR-LABEL: callee_scs: 34*aa1d2693SPaul Kirth; RV64I-FP-SR: call t0, __riscv_save_12 35*aa1d2693SPaul Kirth; RV64I-FP-SR: tail __riscv_restore_12 36*aa1d2693SPaul Kirth %val = load [30 x i32], ptr @var2 37*aa1d2693SPaul Kirth store volatile [30 x i32] %val, ptr @var2 38*aa1d2693SPaul Kirth ret void 39*aa1d2693SPaul Kirth} 40