1*3565af6dSskrll /* $NetBSD: frame.h,v 1.48 2020/08/14 16:18:36 skrll Exp $ */ 2c1f753f9Sreinoud 3c1f753f9Sreinoud /* 4c1f753f9Sreinoud * Copyright (c) 1994-1997 Mark Brinicombe. 5c1f753f9Sreinoud * Copyright (c) 1994 Brini. 6c1f753f9Sreinoud * All rights reserved. 7c1f753f9Sreinoud * 8c1f753f9Sreinoud * This code is derived from software written for Brini by Mark Brinicombe 9c1f753f9Sreinoud * 10c1f753f9Sreinoud * Redistribution and use in source and binary forms, with or without 11c1f753f9Sreinoud * modification, are permitted provided that the following conditions 12c1f753f9Sreinoud * are met: 13c1f753f9Sreinoud * 1. Redistributions of source code must retain the above copyright 14c1f753f9Sreinoud * notice, this list of conditions and the following disclaimer. 15c1f753f9Sreinoud * 2. Redistributions in binary form must reproduce the above copyright 16c1f753f9Sreinoud * notice, this list of conditions and the following disclaimer in the 17c1f753f9Sreinoud * documentation and/or other materials provided with the distribution. 18c1f753f9Sreinoud * 3. All advertising materials mentioning features or use of this software 19c1f753f9Sreinoud * must display the following acknowledgement: 20c1f753f9Sreinoud * This product includes software developed by Brini. 21c1f753f9Sreinoud * 4. The name of the company nor the name of the author may be used to 22c1f753f9Sreinoud * endorse or promote products derived from this software without specific 23c1f753f9Sreinoud * prior written permission. 24c1f753f9Sreinoud * 25c1f753f9Sreinoud * THIS SOFTWARE IS PROVIDED BY BRINI ``AS IS'' AND ANY EXPRESS OR IMPLIED 26c1f753f9Sreinoud * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 27c1f753f9Sreinoud * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 28c1f753f9Sreinoud * IN NO EVENT SHALL BRINI OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, 29c1f753f9Sreinoud * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 30c1f753f9Sreinoud * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 31c1f753f9Sreinoud * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 32c1f753f9Sreinoud * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 33c1f753f9Sreinoud * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 34c1f753f9Sreinoud * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 35c1f753f9Sreinoud * SUCH DAMAGE. 36c1f753f9Sreinoud * 37c1f753f9Sreinoud * RiscBSD kernel project 38c1f753f9Sreinoud * 39c1f753f9Sreinoud * frame.h 40c1f753f9Sreinoud * 41c1f753f9Sreinoud * Stack frames structures 42c1f753f9Sreinoud * 43c1f753f9Sreinoud * Created : 30/09/94 44c1f753f9Sreinoud */ 45c1f753f9Sreinoud 46c1f753f9Sreinoud #ifndef _ARM32_FRAME_H_ 47c1f753f9Sreinoud #define _ARM32_FRAME_H_ 48c1f753f9Sreinoud 49c1f753f9Sreinoud #include <arm/frame.h> /* Common ARM stack frames */ 50c1f753f9Sreinoud 51c1f753f9Sreinoud #ifndef _LOCORE 52c1f753f9Sreinoud 53c1f753f9Sreinoud /* 54cf47b9b0Sskrll * Switch frame. 55cf47b9b0Sskrll * 56cf47b9b0Sskrll * Should be a multiple of 8 bytes for dumpsys. 57c1f753f9Sreinoud */ 58c1f753f9Sreinoud 59c1f753f9Sreinoud struct switchframe { 60c1f753f9Sreinoud u_int sf_r4; 61c1f753f9Sreinoud u_int sf_r5; 62c1f753f9Sreinoud u_int sf_r6; 63c1f753f9Sreinoud u_int sf_r7; 64cf47b9b0Sskrll u_int sf_sp; 65a531a4aeSbjh21 u_int sf_pc; 66c1f753f9Sreinoud }; 67c1f753f9Sreinoud 68c1f753f9Sreinoud /* 6989b5157aSmatt * System stack frames. 7089b5157aSmatt */ 7189b5157aSmatt 7289b5157aSmatt struct clockframe { 7389b5157aSmatt struct trapframe cf_tf; 7489b5157aSmatt }; 7589b5157aSmatt 7689b5157aSmatt /* 77c1f753f9Sreinoud * Stack frame. Used during stack traces (db_trace.c) 78c1f753f9Sreinoud */ 79c1f753f9Sreinoud struct frame { 80c1f753f9Sreinoud u_int fr_fp; 81c1f753f9Sreinoud u_int fr_sp; 82c1f753f9Sreinoud u_int fr_lr; 83c1f753f9Sreinoud u_int fr_pc; 84c1f753f9Sreinoud }; 85c1f753f9Sreinoud 86c1f753f9Sreinoud #ifdef _KERNEL 870f72c054Smatt void validate_trapframe(trapframe_t *, int); 88c1f753f9Sreinoud #endif /* _KERNEL */ 89c1f753f9Sreinoud 90c1f753f9Sreinoud #else /* _LOCORE */ 91c1f753f9Sreinoud 927a55b436Sscw #include "opt_compat_netbsd.h" 937a55b436Sscw #include "opt_execfmt.h" 947a55b436Sscw #include "opt_multiprocessor.h" 95825088edSmatt #include "opt_cpuoptions.h" 96165d4e6dSthorpej #include "opt_arm_debug.h" 9784c8a9dbSmatt #include "opt_cputypes.h" 983a47e81aSchs #include "opt_dtrace.h" 997a55b436Sscw 10089b5157aSmatt #include <arm/locore.h> 101825088edSmatt 1027a55b436Sscw /* 103e2da7cebSmatt * This macro is used by DO_AST_AND_RESTORE_ALIGNMENT_FAULTS to process 104e2da7cebSmatt * any pending softints. 105e2da7cebSmatt */ 10689b5157aSmatt #ifdef _ARM_ARCH_4T 10789b5157aSmatt #define B_CF_CONTROL(rX) ;\ 10889b5157aSmatt ldr ip, [rX, #CF_CONTROL] /* get function addr */ ;\ 10989b5157aSmatt bx ip /* branch to cpu_control */ 11089b5157aSmatt #else 11189b5157aSmatt #define B_CF_CONTROL(rX) ;\ 11289b5157aSmatt ldr pc, [rX, #CF_CONTROL] /* branch to cpu_control */ 11389b5157aSmatt #endif 11489b5157aSmatt #ifdef _ARM_ARCH_5T 11589b5157aSmatt #define BL_CF_CONTROL(rX) ;\ 11689b5157aSmatt ldr ip, [rX, #CF_CONTROL] /* get function addr */ ;\ 11789b5157aSmatt blx ip /* call cpu_control */ 11889b5157aSmatt #else 11989b5157aSmatt #define BL_CF_CONTROL(rX) ;\ 12089b5157aSmatt mov lr, pc ;\ 12189b5157aSmatt ldr pc, [rX, #CF_CONTROL] /* call cpu_control */ 12289b5157aSmatt #endif 123424810a5Smatt #if defined(__HAVE_FAST_SOFTINTS) && !defined(__HAVE_PIC_FAST_SOFTINTS) 124e2da7cebSmatt #define DO_PENDING_SOFTINTS \ 125d80307c3Smatt ldr r0, [r4, #CI_INTR_DEPTH]/* Get current intr depth */ ;\ 12689b5157aSmatt cmp r0, #0 /* Test for 0. */ ;\ 1270f72c054Smatt bne 10f /* skip softints if != 0 */ ;\ 128d80307c3Smatt ldr r0, [r4, #CI_CPL] /* Get current priority level */;\ 129e2da7cebSmatt ldr r1, [r4, #CI_SOFTINTS] /* Get pending softint mask */ ;\ 130424810a5Smatt lsrs r0, r1, r0 /* shift mask by cpl */ ;\ 131f9d9dd1cSmatt blne _C_LABEL(dosoftints) /* dosoftints(void) */ ;\ 132e2da7cebSmatt 10: 133e2da7cebSmatt #else 134e2da7cebSmatt #define DO_PENDING_SOFTINTS /* nothing */ 135e2da7cebSmatt #endif 136e2da7cebSmatt 137424810a5Smatt #ifdef _ARM_ARCH_6 138424810a5Smatt #define GET_CPSR(rb) /* nothing */ 139424810a5Smatt #define CPSID_I(ra,rb) cpsid i 140424810a5Smatt #define CPSIE_I(ra,rb) cpsie i 141424810a5Smatt #else 142424810a5Smatt #define GET_CPSR(rb) \ 143424810a5Smatt mrs rb, cpsr /* fetch CPSR */ 144424810a5Smatt 145424810a5Smatt #define CPSID_I(ra,rb) \ 146424810a5Smatt orr ra, rb, #(IF32_bits) ;\ 147424810a5Smatt msr cpsr_c, ra /* Disable interrupts */ 148424810a5Smatt 149424810a5Smatt #define CPSIE_I(ra,rb) \ 150424810a5Smatt bic ra, rb, #(IF32_bits) ;\ 151424810a5Smatt msr cpsr_c, ra /* Restore interrupts */ 152424810a5Smatt #endif 153424810a5Smatt 1545bf653b8Smatt #define DO_PENDING_AST(lbl) ;\ 155*3565af6dSskrll 1: ldr r1, [r5, #L_MD_ASTPENDING] /* Pending AST? */ ;\ 156*3565af6dSskrll tst r1, #1 ;\ 1575bf653b8Smatt beq lbl /* Nope. Just bail */ ;\ 158*3565af6dSskrll bic r0, r1, #1 /* clear AST */ ;\ 159*3565af6dSskrll str r0, [r5, #L_MD_ASTPENDING] ;\ 160*3565af6dSskrll CPSIE_I(r6, r6) /* Restore interrupts */ ;\ 1615bf653b8Smatt mov r0, sp ;\ 1625bf653b8Smatt bl _C_LABEL(ast) /* ast(frame) */ ;\ 163*3565af6dSskrll CPSID_I(r0, r6) /* Disable interrupts */ ;\ 1645bf653b8Smatt b 1b /* test again */ 1655bf653b8Smatt 166e2da7cebSmatt /* 1677a55b436Sscw * AST_ALIGNMENT_FAULT_LOCALS and ENABLE_ALIGNMENT_FAULTS 1687a55b436Sscw * These are used in order to support dynamic enabling/disabling of 1697a55b436Sscw * alignment faults when executing old a.out ARM binaries. 170825088edSmatt * 171825088edSmatt * Note that when ENABLE_ALIGNMENTS_FAULTS finishes r4 will contain 172*3565af6dSskrll * curcpu() and r5 containing curlwp. DO_AST_AND_RESTORE_ALIGNMENT_FAULTS 173*3565af6dSskrll * relies on r4 and r5 being preserved. 1747a55b436Sscw */ 175f309b668Smanu #ifdef EXEC_AOUT 176825088edSmatt #define AST_ALIGNMENT_FAULT_LOCALS \ 177825088edSmatt .Laflt_cpufuncs: ;\ 178825088edSmatt .word _C_LABEL(cpufuncs) 179825088edSmatt 1807a55b436Sscw /* 1817a55b436Sscw * This macro must be invoked following PUSHFRAMEINSVC or PUSHFRAME at 1827a55b436Sscw * the top of interrupt/exception handlers. 1837a55b436Sscw * 1847a55b436Sscw * When invoked, r0 *must* contain the value of SPSR on the current 1857a55b436Sscw * trap/interrupt frame. This is always the case if ENABLE_ALIGNMENT_FAULTS 1867a55b436Sscw * is invoked immediately after PUSHFRAMEINSVC or PUSHFRAME. 1877a55b436Sscw */ 1887a55b436Sscw #define ENABLE_ALIGNMENT_FAULTS \ 189424810a5Smatt and r7, r0, #(PSR_MODE) /* Test for USR32 mode */ ;\ 19011bb41b5Smatt cmp r7, #(PSR_USR32_MODE) ;\ 191*3565af6dSskrll GET_CURX(r4, r5) /* r4 = curcpu, r5 = curlwp */ ;\ 1927a55b436Sscw bne 1f /* Not USR mode skip AFLT */ ;\ 193*3565af6dSskrll ldr r1, [r5, #L_MD_FLAGS] /* Fetch l_md.md_flags */ ;\ 194445ebaadSmatt tst r1, #MDLWP_NOALIGNFLT ;\ 1957a55b436Sscw beq 1f /* AFLTs already enabled */ ;\ 1967a55b436Sscw ldr r2, .Laflt_cpufuncs ;\ 197825088edSmatt ldr r1, [r4, #CI_CTRL] /* Fetch control register */ ;\ 1987a55b436Sscw mov r0, #-1 ;\ 19989b5157aSmatt BL_CF_CONTROL(r2) /* Enable alignment faults */ ;\ 200a985f526Smatt 1: /* done */ 2017a55b436Sscw 2027a55b436Sscw /* 2037a55b436Sscw * This macro must be invoked just before PULLFRAMEFROMSVCANDEXIT or 204825088edSmatt * PULLFRAME at the end of interrupt/exception handlers. We know that 205*3565af6dSskrll * r4 points to curcpu() and r5 points to curlwp since that is what 206*3565af6dSskrll * ENABLE_ALIGNMENT_FAULTS did for us. 2077a55b436Sscw */ 2087a55b436Sscw #define DO_AST_AND_RESTORE_ALIGNMENT_FAULTS \ 209e2da7cebSmatt DO_PENDING_SOFTINTS ;\ 210*3565af6dSskrll GET_CPSR(r6) /* save CPSR */ ;\ 211*3565af6dSskrll CPSID_I(r1, r6) /* Disable interrupts */ ;\ 21211bb41b5Smatt cmp r7, #(PSR_USR32_MODE) /* Returning to USR mode? */ ;\ 2137a55b436Sscw bne 3f /* Nope, get out now */ ;\ 2145bf653b8Smatt DO_PENDING_AST(2f) /* Pending AST? */ ;\ 2155bf653b8Smatt 2: ldr r1, [r4, #CI_CURLWP] /* get curlwp from cpu_info */ ;\ 216445ebaadSmatt ldr r0, [r1, #L_MD_FLAGS] /* get md_flags from lwp */ ;\ 217445ebaadSmatt tst r0, #MDLWP_NOALIGNFLT ;\ 2187a55b436Sscw beq 3f /* Keep AFLTs enabled */ ;\ 219825088edSmatt ldr r1, [r4, #CI_CTRL] /* Fetch control register */ ;\ 2207a55b436Sscw ldr r2, .Laflt_cpufuncs ;\ 2217a55b436Sscw mov r0, #-1 ;\ 2227a55b436Sscw bic r1, r1, #CPU_CONTROL_AFLT_ENABLE /* Disable AFLTs */ ;\ 2235bf653b8Smatt BL_CF_CONTROL(r2) /* Set new CTRL reg value */ ;\ 224a985f526Smatt 3: /* done */ 2257a55b436Sscw 226f309b668Smanu #else /* !EXEC_AOUT */ 2277a55b436Sscw 228825088edSmatt #define AST_ALIGNMENT_FAULT_LOCALS 2297a55b436Sscw 230424810a5Smatt #define ENABLE_ALIGNMENT_FAULTS \ 231424810a5Smatt and r7, r0, #(PSR_MODE) /* Test for USR32 mode */ ;\ 232*3565af6dSskrll GET_CURX(r4, r5) /* r4 = curcpu, r5 = curlwp */ 233a985f526Smatt 2347a55b436Sscw 2357a55b436Sscw #define DO_AST_AND_RESTORE_ALIGNMENT_FAULTS \ 236e2da7cebSmatt DO_PENDING_SOFTINTS ;\ 237*3565af6dSskrll GET_CPSR(r6) /* save CPSR */ ;\ 238*3565af6dSskrll CPSID_I(r1, r6) /* Disable interrupts */ ;\ 23911bb41b5Smatt cmp r7, #(PSR_USR32_MODE) ;\ 2407a55b436Sscw bne 2f /* Nope, get out now */ ;\ 2415bf653b8Smatt DO_PENDING_AST(2f) /* Pending AST? */ ;\ 242a985f526Smatt 2: /* done */ 243f309b668Smanu #endif /* EXEC_AOUT */ 2447a55b436Sscw 245445ebaadSmatt #ifndef _ARM_ARCH_6 246165d4e6dSthorpej #ifdef ARM_LOCK_CAS_DEBUG 247165d4e6dSthorpej #define LOCK_CAS_DEBUG_LOCALS \ 248165d4e6dSthorpej .L_lock_cas_restart: ;\ 249165d4e6dSthorpej .word _C_LABEL(_lock_cas_restart) 250165d4e6dSthorpej 251165d4e6dSthorpej #if defined(__ARMEB__) 252165d4e6dSthorpej #define LOCK_CAS_DEBUG_COUNT_RESTART \ 253165d4e6dSthorpej ble 99f ;\ 254165d4e6dSthorpej ldr r0, .L_lock_cas_restart ;\ 255165d4e6dSthorpej ldmia r0, {r1-r2} /* load ev_count */ ;\ 256165d4e6dSthorpej adds r2, r2, #1 /* 64-bit incr (lo) */ ;\ 257165d4e6dSthorpej adc r1, r1, #0 /* 64-bit incr (hi) */ ;\ 258165d4e6dSthorpej stmia r0, {r1-r2} /* store ev_count */ 259165d4e6dSthorpej #else /* __ARMEB__ */ 260165d4e6dSthorpej #define LOCK_CAS_DEBUG_COUNT_RESTART \ 261165d4e6dSthorpej ble 99f ;\ 262165d4e6dSthorpej ldr r0, .L_lock_cas_restart ;\ 263165d4e6dSthorpej ldmia r0, {r1-r2} /* load ev_count */ ;\ 264165d4e6dSthorpej adds r1, r1, #1 /* 64-bit incr (lo) */ ;\ 265165d4e6dSthorpej adc r2, r2, #0 /* 64-bit incr (hi) */ ;\ 266165d4e6dSthorpej stmia r0, {r1-r2} /* store ev_count */ 267165d4e6dSthorpej #endif /* __ARMEB__ */ 268165d4e6dSthorpej #else /* ARM_LOCK_CAS_DEBUG */ 269165d4e6dSthorpej #define LOCK_CAS_DEBUG_LOCALS /* nothing */ 270165d4e6dSthorpej #define LOCK_CAS_DEBUG_COUNT_RESTART /* nothing */ 271165d4e6dSthorpej #endif /* ARM_LOCK_CAS_DEBUG */ 272165d4e6dSthorpej 273165d4e6dSthorpej #define LOCK_CAS_CHECK_LOCALS \ 274165d4e6dSthorpej .L_lock_cas: ;\ 275165d4e6dSthorpej .word _C_LABEL(_lock_cas) ;\ 276165d4e6dSthorpej .L_lock_cas_end: ;\ 277165d4e6dSthorpej .word _C_LABEL(_lock_cas_end) ;\ 278165d4e6dSthorpej LOCK_CAS_DEBUG_LOCALS 279165d4e6dSthorpej 280165d4e6dSthorpej #define LOCK_CAS_CHECK \ 281165d4e6dSthorpej ldr r0, [sp] /* get saved PSR */ ;\ 282165d4e6dSthorpej and r0, r0, #(PSR_MODE) /* check for SVC32 mode */ ;\ 28311bb41b5Smatt cmp r0, #(PSR_SVC32_MODE) ;\ 284165d4e6dSthorpej bne 99f /* nope, get out now */ ;\ 2851c15ddd3Sskrll ldr r0, [sp, #(TF_PC)] ;\ 286165d4e6dSthorpej ldr r1, .L_lock_cas_end ;\ 287165d4e6dSthorpej cmp r0, r1 ;\ 288165d4e6dSthorpej bge 99f ;\ 289165d4e6dSthorpej ldr r1, .L_lock_cas ;\ 290165d4e6dSthorpej cmp r0, r1 ;\ 2911c15ddd3Sskrll strgt r1, [sp, #(TF_PC)] ;\ 292165d4e6dSthorpej LOCK_CAS_DEBUG_COUNT_RESTART ;\ 293165d4e6dSthorpej 99: 294165d4e6dSthorpej 295445ebaadSmatt #else 296445ebaadSmatt #define LOCK_CAS_CHECK /* nothing */ 297445ebaadSmatt #define LOCK_CAS_CHECK_LOCALS /* nothing */ 298445ebaadSmatt #endif 299445ebaadSmatt 300c1f753f9Sreinoud /* 301c1f753f9Sreinoud * ASM macros for pushing and pulling trapframes from the stack 302c1f753f9Sreinoud * 3031c15ddd3Sskrll * These macros are used to handle the trapframe structure defined above. 304c1f753f9Sreinoud */ 305c1f753f9Sreinoud 306c1f753f9Sreinoud /* 307c1f753f9Sreinoud * PUSHFRAME - macro to push a trap frame on the stack in the current mode 308c1f753f9Sreinoud * Since the current mode is used, the SVC lr field is not defined. 30984c8a9dbSmatt */ 31084c8a9dbSmatt 31184c8a9dbSmatt #ifdef CPU_SA110 31284c8a9dbSmatt /* 313c1f753f9Sreinoud * NOTE: r13 and r14 are stored separately as a work around for the 314c1f753f9Sreinoud * SA110 rev 2 STM^ bug 315c1f753f9Sreinoud */ 31684c8a9dbSmatt #define PUSHUSERREGS \ 31784c8a9dbSmatt stmia sp, {r0-r12}; /* Push the user mode registers */ \ 318492b6e5cSmatt add r0, sp, #(TF_USR_SP-TF_R0); /* Adjust the stack pointer */ \ 31984c8a9dbSmatt stmia r0, {r13-r14}^ /* Push the user mode registers */ 32084c8a9dbSmatt #else 32184c8a9dbSmatt #define PUSHUSERREGS \ 32284c8a9dbSmatt stmia sp, {r0-r14}^ /* Push the user mode registers */ 32384c8a9dbSmatt #endif 324c1f753f9Sreinoud 325c1f753f9Sreinoud #define PUSHFRAME \ 326c1f753f9Sreinoud str lr, [sp, #-4]!; /* Push the return address */ \ 327492b6e5cSmatt sub sp, sp, #(TF_PC-TF_R0); /* Adjust the stack pointer */ \ 32884c8a9dbSmatt PUSHUSERREGS; /* Push the user mode registers */ \ 329c1f753f9Sreinoud mov r0, r0; /* NOP for previous instruction */ \ 3308b058f85Sjoerg mrs r0, spsr; /* Get the SPSR */ \ 331492b6e5cSmatt str r0, [sp, #-TF_R0]! /* Push the SPSR on the stack */ 332c1f753f9Sreinoud 333c1f753f9Sreinoud /* 3342081417cSmatt * Push a minimal trapframe so we can dispatch an interrupt from the 3352081417cSmatt * idle loop. The only reason the idle loop wakes up is to dispatch 3362081417cSmatt * interrupts so why take the avoid of a full exception when we can do 3372081417cSmatt * something minimal. 3382081417cSmatt */ 3392081417cSmatt #define PUSHIDLEFRAME \ 3402081417cSmatt str lr, [sp, #-4]!; /* save SVC32 lr */ \ 3412081417cSmatt str r6, [sp, #(TF_R6-TF_PC)]!; /* save callee-saved r6 */ \ 342492b6e5cSmatt str r4, [sp, #(TF_R4-TF_R6)]!; /* save callee-saved r4 */ \ 3438b058f85Sjoerg mrs r0, cpsr; /* Get the CPSR */ \ 3442081417cSmatt str r0, [sp, #(-TF_R4)]! /* Push the CPSR on the stack */ 3452081417cSmatt 3462081417cSmatt /* 34789b5157aSmatt * Push a trapframe to be used by cpu_switchto 34889b5157aSmatt */ 34989b5157aSmatt #define PUSHSWITCHFRAME(rX) \ 35089b5157aSmatt mov ip, sp; \ 35189b5157aSmatt sub sp, sp, #(TRAPFRAMESIZE-TF_R12); /* Adjust the stack pointer */ \ 35289b5157aSmatt push {r4-r11}; /* Push the callee saved registers */ \ 35389b5157aSmatt sub sp, sp, #TF_R4; /* reserve rest of trapframe */ \ 35489b5157aSmatt str ip, [sp, #TF_SVC_SP]; \ 35589b5157aSmatt str lr, [sp, #TF_SVC_LR]; \ 35689b5157aSmatt str lr, [sp, #TF_PC]; \ 3578b058f85Sjoerg mrs rX, cpsr; /* Get the CPSR */ \ 35889b5157aSmatt str rX, [sp, #TF_SPSR] /* save in trapframe */ 35989b5157aSmatt 36089b5157aSmatt #define PUSHSWITCHFRAME1 \ 36189b5157aSmatt mov ip, sp; \ 36289b5157aSmatt sub sp, sp, #(TRAPFRAMESIZE-TF_R8); /* Adjust the stack pointer */ \ 36389b5157aSmatt push {r4-r7}; /* Push some of the callee saved registers */ \ 36489b5157aSmatt sub sp, sp, #TF_R4; /* reserve rest of trapframe */ \ 36589b5157aSmatt str ip, [sp, #TF_SVC_SP]; \ 36689b5157aSmatt str lr, [sp, #TF_SVC_LR]; \ 36789b5157aSmatt str lr, [sp, #TF_PC] 36889b5157aSmatt 36989b5157aSmatt #if defined(_ARM_ARCH_DWORD_OK) && __ARM_EABI__ 37089b5157aSmatt #define PUSHSWITCHFRAME2 \ 37189b5157aSmatt strd r10, [sp, #TF_R10]; /* save r10 & r11 */ \ 37289b5157aSmatt strd r8, [sp, #TF_R8]; /* save r8 & r9 */ \ 3738b058f85Sjoerg mrs r0, cpsr; /* Get the CPSR */ \ 37489b5157aSmatt str r0, [sp, #TF_SPSR] /* save in trapframe */ 37589b5157aSmatt #else 37689b5157aSmatt #define PUSHSWITCHFRAME2 \ 37789b5157aSmatt add r0, sp, #TF_R8; /* get ptr to r8 and above */ \ 37889b5157aSmatt stmia r0, {r8-r11}; /* save rest of registers */ \ 3798b058f85Sjoerg mrs r0, cpsr; /* Get the CPSR */ \ 38089b5157aSmatt str r0, [sp, #TF_SPSR] /* save in trapframe */ 38189b5157aSmatt #endif 38289b5157aSmatt 38389b5157aSmatt /* 384c1f753f9Sreinoud * PULLFRAME - macro to pull a trap frame from the stack in the current mode 385c1f753f9Sreinoud * Since the current mode is used, the SVC lr field is ignored. 386c1f753f9Sreinoud */ 387c1f753f9Sreinoud 388c1f753f9Sreinoud #define PULLFRAME \ 389492b6e5cSmatt ldr r0, [sp], #TF_R0; /* Pop the SPSR from stack */ \ 390015a4426Smatt msr spsr_fsxc, r0; \ 391c1f753f9Sreinoud ldmia sp, {r0-r14}^; /* Restore registers (usr mode) */ \ 392c1f753f9Sreinoud mov r0, r0; /* NOP for previous instruction */ \ 393492b6e5cSmatt add sp, sp, #(TF_PC-TF_R0); /* Adjust the stack pointer */ \ 3949a90e009Sskrll ldr lr, [sp], #4 /* Pop the return address */ 395c1f753f9Sreinoud 3962081417cSmatt #define PULLIDLEFRAME \ 3972081417cSmatt add sp, sp, #TF_R4; /* Adjust the stack pointer */ \ 3982081417cSmatt ldr r4, [sp], #(TF_R6-TF_R4); /* restore callee-saved r4 */ \ 3992081417cSmatt ldr r6, [sp], #(TF_PC-TF_R6); /* restore callee-saved r6 */ \ 4002081417cSmatt ldr lr, [sp], #4 /* Pop the return address */ 4012081417cSmatt 402c1f753f9Sreinoud /* 40389b5157aSmatt * Pop a trapframe to be used by cpu_switchto (don't touch r0 & r1). 40489b5157aSmatt */ 40589b5157aSmatt #define PULLSWITCHFRAME \ 40689b5157aSmatt add sp, sp, #TF_R4; /* Adjust the stack pointer */ \ 40789b5157aSmatt pop {r4-r11}; /* pop the callee saved registers */ \ 40889b5157aSmatt add sp, sp, #(TF_PC-TF_R12); /* Adjust the stack pointer */ \ 40989b5157aSmatt ldr lr, [sp], #4; /* pop the return address */ 41089b5157aSmatt 41189b5157aSmatt /* 412c1f753f9Sreinoud * PUSHFRAMEINSVC - macro to push a trap frame on the stack in SVC32 mode 413c1f753f9Sreinoud * This should only be used if the processor is not currently in SVC32 414c1f753f9Sreinoud * mode. The processor mode is switched to SVC mode and the trap frame is 415c1f753f9Sreinoud * stored. The SVC lr field is used to store the previous value of 416c1f753f9Sreinoud * lr in SVC mode. 417c1f753f9Sreinoud * 418c1f753f9Sreinoud * NOTE: r13 and r14 are stored separately as a work around for the 419c1f753f9Sreinoud * SA110 rev 2 STM^ bug 420c1f753f9Sreinoud */ 421c1f753f9Sreinoud 422decbfbbfSmatt #ifdef _ARM_ARCH_6 423decbfbbfSmatt #define SET_CPSR_MODE(tmp, mode) \ 424decbfbbfSmatt cps #(mode) 425decbfbbfSmatt #else 426decbfbbfSmatt #define SET_CPSR_MODE(tmp, mode) \ 427decbfbbfSmatt mrs tmp, cpsr; /* Get the CPSR */ \ 428decbfbbfSmatt bic tmp, tmp, #(PSR_MODE); /* Fix for SVC mode */ \ 429decbfbbfSmatt orr tmp, tmp, #(mode); \ 430decbfbbfSmatt msr cpsr_c, tmp /* Punch into SVC mode */ 431decbfbbfSmatt #endif 432decbfbbfSmatt 4333a47e81aSchs #define PUSHXXXREGSANDSWITCH \ 434c1f753f9Sreinoud stmdb sp, {r0-r3}; /* Save 4 registers */ \ 435c1f753f9Sreinoud mov r0, lr; /* Save xxx32 r14 */ \ 436c1f753f9Sreinoud mov r1, sp; /* Save xxx32 sp */ \ 4374706ae86Sthorpej mrs r3, spsr; /* Save xxx32 spsr */ \ 4383a47e81aSchs SET_CPSR_MODE(r2, PSR_SVC32_MODE) 4393a47e81aSchs 4403a47e81aSchs #ifdef KDTRACE_HOOKS 4413a47e81aSchs #define PUSHDTRACEGAP \ 4423a47e81aSchs and r2, r3, #(PSR_MODE); \ 4433a47e81aSchs cmp r2, #(PSR_SVC32_MODE); /* were we in SVC mode? */ \ 4443a47e81aSchs mov r2, sp; \ 4453a47e81aSchs subeq r2, r2, #(4 * 16); /* if so, leave a gap for dtrace */ 4463a47e81aSchs #else 447e6f0ec38Sskrll #define PUSHDTRACEGAP \ 448e6f0ec38Sskrll mov r2, sp 4493a47e81aSchs #endif 4503a47e81aSchs 4513a47e81aSchs #define PUSHTRAPFRAME(rX) \ 4523a47e81aSchs bic r2, rX, #7; /* Align new SVC sp */ \ 45333a5014dSmatt str r0, [r2, #-4]!; /* Push return address */ \ 45433a5014dSmatt stmdb r2!, {sp, lr}; /* Push SVC sp, lr */ \ 4553ed93c30Smatt mov sp, r2; /* Keep stack aligned */ \ 456015a4426Smatt msr spsr_fsxc, r3; /* Restore correct spsr */ \ 457c1f753f9Sreinoud ldmdb r1, {r0-r3}; /* Restore 4 regs from xxx mode */ \ 458492b6e5cSmatt sub sp, sp, #(TF_SVC_SP-TF_R0); /* Adjust the stack pointer */ \ 45984c8a9dbSmatt PUSHUSERREGS; /* Push the user mode registers */ \ 460c1f753f9Sreinoud mov r0, r0; /* NOP for previous instruction */ \ 4618b058f85Sjoerg mrs r0, spsr; /* Get the SPSR */ \ 462492b6e5cSmatt str r0, [sp, #-TF_R0]! /* Push the SPSR onto the stack */ 463c1f753f9Sreinoud 4643a47e81aSchs #define PUSHFRAMEINSVC \ 4653a47e81aSchs PUSHXXXREGSANDSWITCH; \ 4663a47e81aSchs PUSHTRAPFRAME(sp) 4673a47e81aSchs 468c1f753f9Sreinoud /* 469c1f753f9Sreinoud * PULLFRAMEFROMSVCANDEXIT - macro to pull a trap frame from the stack 470c1f753f9Sreinoud * in SVC32 mode and restore the saved processor mode and PC. 471c1f753f9Sreinoud * This should be used when the SVC lr register needs to be restored on 472c1f753f9Sreinoud * exit. 473c1f753f9Sreinoud */ 474c1f753f9Sreinoud 475c1f753f9Sreinoud #define PULLFRAMEFROMSVCANDEXIT \ 4767ff3cf3fSskrll ldr r0, [sp], #TF_R0; /* Pop the SPSR from stack */ \ 477015a4426Smatt msr spsr_fsxc, r0; /* restore SPSR */ \ 478c1f753f9Sreinoud ldmia sp, {r0-r14}^; /* Restore registers (usr mode) */ \ 479c1f753f9Sreinoud mov r0, r0; /* NOP for previous instruction */ \ 480492b6e5cSmatt add sp, sp, #(TF_SVC_SP-TF_R0); /* Adjust the stack pointer */ \ 481c1f753f9Sreinoud ldmia sp, {sp, lr, pc}^ /* Restore lr and exit */ 482c1f753f9Sreinoud 483023bdd2bSsimonb #endif /* _LOCORE */ 484c1f753f9Sreinoud 485c1f753f9Sreinoud #endif /* _ARM32_FRAME_H_ */ 486