1d89ec533Spatrick//===-- restore.S - restore up to 12 callee-save registers ----------------===// 2d89ec533Spatrick// 3d89ec533Spatrick// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4d89ec533Spatrick// See https://llvm.org/LICENSE.txt for license information. 5d89ec533Spatrick// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6d89ec533Spatrick// 7d89ec533Spatrick//===----------------------------------------------------------------------===// 8d89ec533Spatrick// 9d89ec533Spatrick// Multiple entry points depending on number of registers to restore 10d89ec533Spatrick// 11d89ec533Spatrick//===----------------------------------------------------------------------===// 12d89ec533Spatrick 13d89ec533Spatrick// All of the entry points are in the same section since we rely on many of 14d89ec533Spatrick// them falling through into each other and don't want the linker to 15d89ec533Spatrick// accidentally split them up, garbage collect, or reorder them. 16d89ec533Spatrick// 17d89ec533Spatrick// The entry points are grouped up into 2s for rv64 and 4s for rv32 since this 18d89ec533Spatrick// is the minimum grouping which will maintain the required 16-byte stack 19d89ec533Spatrick// alignment. 20d89ec533Spatrick 21d89ec533Spatrick .text 22d89ec533Spatrick 23d89ec533Spatrick#if __riscv_xlen == 32 24d89ec533Spatrick 25d89ec533Spatrick .globl __riscv_restore_12 26d89ec533Spatrick .type __riscv_restore_12,@function 27d89ec533Spatrick__riscv_restore_12: 28d89ec533Spatrick lw s11, 12(sp) 29d89ec533Spatrick addi sp, sp, 16 30d89ec533Spatrick // fallthrough into __riscv_restore_11/10/9/8 31d89ec533Spatrick 32d89ec533Spatrick .globl __riscv_restore_11 33d89ec533Spatrick .type __riscv_restore_11,@function 34d89ec533Spatrick .globl __riscv_restore_10 35d89ec533Spatrick .type __riscv_restore_10,@function 36d89ec533Spatrick .globl __riscv_restore_9 37d89ec533Spatrick .type __riscv_restore_9,@function 38d89ec533Spatrick .globl __riscv_restore_8 39d89ec533Spatrick .type __riscv_restore_8,@function 40d89ec533Spatrick__riscv_restore_11: 41d89ec533Spatrick__riscv_restore_10: 42d89ec533Spatrick__riscv_restore_9: 43d89ec533Spatrick__riscv_restore_8: 44d89ec533Spatrick lw s10, 0(sp) 45d89ec533Spatrick lw s9, 4(sp) 46d89ec533Spatrick lw s8, 8(sp) 47d89ec533Spatrick lw s7, 12(sp) 48d89ec533Spatrick addi sp, sp, 16 49d89ec533Spatrick // fallthrough into __riscv_restore_7/6/5/4 50d89ec533Spatrick 51d89ec533Spatrick .globl __riscv_restore_7 52d89ec533Spatrick .type __riscv_restore_7,@function 53d89ec533Spatrick .globl __riscv_restore_6 54d89ec533Spatrick .type __riscv_restore_6,@function 55d89ec533Spatrick .globl __riscv_restore_5 56d89ec533Spatrick .type __riscv_restore_5,@function 57d89ec533Spatrick .globl __riscv_restore_4 58d89ec533Spatrick .type __riscv_restore_4,@function 59d89ec533Spatrick__riscv_restore_7: 60d89ec533Spatrick__riscv_restore_6: 61d89ec533Spatrick__riscv_restore_5: 62d89ec533Spatrick__riscv_restore_4: 63d89ec533Spatrick lw s6, 0(sp) 64d89ec533Spatrick lw s5, 4(sp) 65d89ec533Spatrick lw s4, 8(sp) 66d89ec533Spatrick lw s3, 12(sp) 67d89ec533Spatrick addi sp, sp, 16 68d89ec533Spatrick // fallthrough into __riscv_restore_3/2/1/0 69d89ec533Spatrick 70d89ec533Spatrick .globl __riscv_restore_3 71d89ec533Spatrick .type __riscv_restore_3,@function 72d89ec533Spatrick .globl __riscv_restore_2 73d89ec533Spatrick .type __riscv_restore_2,@function 74d89ec533Spatrick .globl __riscv_restore_1 75d89ec533Spatrick .type __riscv_restore_1,@function 76d89ec533Spatrick .globl __riscv_restore_0 77d89ec533Spatrick .type __riscv_restore_0,@function 78d89ec533Spatrick__riscv_restore_3: 79d89ec533Spatrick__riscv_restore_2: 80d89ec533Spatrick__riscv_restore_1: 81d89ec533Spatrick__riscv_restore_0: 82d89ec533Spatrick lw s2, 0(sp) 83d89ec533Spatrick lw s1, 4(sp) 84d89ec533Spatrick lw s0, 8(sp) 85d89ec533Spatrick lw ra, 12(sp) 86d89ec533Spatrick addi sp, sp, 16 87d89ec533Spatrick ret 88d89ec533Spatrick 89d89ec533Spatrick#elif __riscv_xlen == 64 90d89ec533Spatrick 91d89ec533Spatrick .globl __riscv_restore_12 92d89ec533Spatrick .type __riscv_restore_12,@function 93d89ec533Spatrick__riscv_restore_12: 94d89ec533Spatrick ld s11, 8(sp) 95d89ec533Spatrick addi sp, sp, 16 96*810390e3Srobert // fallthrough into __riscv_restore_11/10 97d89ec533Spatrick 98d89ec533Spatrick .globl __riscv_restore_11 99d89ec533Spatrick .type __riscv_restore_11,@function 100d89ec533Spatrick .globl __riscv_restore_10 101d89ec533Spatrick .type __riscv_restore_10,@function 102d89ec533Spatrick__riscv_restore_11: 103d89ec533Spatrick__riscv_restore_10: 104d89ec533Spatrick ld s10, 0(sp) 105d89ec533Spatrick ld s9, 8(sp) 106d89ec533Spatrick addi sp, sp, 16 107d89ec533Spatrick // fallthrough into __riscv_restore_9/8 108d89ec533Spatrick 109d89ec533Spatrick .globl __riscv_restore_9 110d89ec533Spatrick .type __riscv_restore_9,@function 111d89ec533Spatrick .globl __riscv_restore_8 112d89ec533Spatrick .type __riscv_restore_8,@function 113d89ec533Spatrick__riscv_restore_9: 114d89ec533Spatrick__riscv_restore_8: 115d89ec533Spatrick ld s8, 0(sp) 116d89ec533Spatrick ld s7, 8(sp) 117d89ec533Spatrick addi sp, sp, 16 118d89ec533Spatrick // fallthrough into __riscv_restore_7/6 119d89ec533Spatrick 120d89ec533Spatrick .globl __riscv_restore_7 121d89ec533Spatrick .type __riscv_restore_7,@function 122d89ec533Spatrick .globl __riscv_restore_6 123d89ec533Spatrick .type __riscv_restore_6,@function 124d89ec533Spatrick__riscv_restore_7: 125d89ec533Spatrick__riscv_restore_6: 126d89ec533Spatrick ld s6, 0(sp) 127d89ec533Spatrick ld s5, 8(sp) 128d89ec533Spatrick addi sp, sp, 16 129d89ec533Spatrick // fallthrough into __riscv_restore_5/4 130d89ec533Spatrick 131d89ec533Spatrick .globl __riscv_restore_5 132d89ec533Spatrick .type __riscv_restore_5,@function 133d89ec533Spatrick .globl __riscv_restore_4 134d89ec533Spatrick .type __riscv_restore_4,@function 135d89ec533Spatrick__riscv_restore_5: 136d89ec533Spatrick__riscv_restore_4: 137d89ec533Spatrick ld s4, 0(sp) 138d89ec533Spatrick ld s3, 8(sp) 139d89ec533Spatrick addi sp, sp, 16 140d89ec533Spatrick // fallthrough into __riscv_restore_3/2 141d89ec533Spatrick 142d89ec533Spatrick .globl __riscv_restore_3 143d89ec533Spatrick .type __riscv_restore_3,@function 144d89ec533Spatrick .globl __riscv_restore_2 145d89ec533Spatrick .type __riscv_restore_2,@function 146d89ec533Spatrick__riscv_restore_3: 147d89ec533Spatrick__riscv_restore_2: 148d89ec533Spatrick ld s2, 0(sp) 149d89ec533Spatrick ld s1, 8(sp) 150d89ec533Spatrick addi sp, sp, 16 151d89ec533Spatrick // fallthrough into __riscv_restore_1/0 152d89ec533Spatrick 153*810390e3Srobert .globl __riscv_restore_1 154*810390e3Srobert .type __riscv_restore_1,@function 155*810390e3Srobert .globl __riscv_restore_0 156*810390e3Srobert .type __riscv_restore_0,@function 157d89ec533Spatrick__riscv_restore_1: 158d89ec533Spatrick__riscv_restore_0: 159d89ec533Spatrick ld s0, 0(sp) 160d89ec533Spatrick ld ra, 8(sp) 161d89ec533Spatrick addi sp, sp, 16 162d89ec533Spatrick ret 163d89ec533Spatrick 164d89ec533Spatrick#else 165d89ec533Spatrick# error "xlen must be 32 or 64 for save-restore implementation 166d89ec533Spatrick#endif 167