1*6cf6fe02Smatt /* $NetBSD: profile.h,v 1.1 2014/09/19 17:36:26 matt Exp $ */ 2*6cf6fe02Smatt 3*6cf6fe02Smatt /*- 4*6cf6fe02Smatt * Copyright (c) 2014 The NetBSD Foundation, Inc. 5*6cf6fe02Smatt * All rights reserved. 6*6cf6fe02Smatt * 7*6cf6fe02Smatt * This code is derived from software contributed to The NetBSD Foundation 8*6cf6fe02Smatt * by Matt Thomas of 3am Software Foundry. 9*6cf6fe02Smatt * 10*6cf6fe02Smatt * Redistribution and use in source and binary forms, with or without 11*6cf6fe02Smatt * modification, are permitted provided that the following conditions 12*6cf6fe02Smatt * are met: 13*6cf6fe02Smatt * 1. Redistributions of source code must retain the above copyright 14*6cf6fe02Smatt * notice, this list of conditions and the following disclaimer. 15*6cf6fe02Smatt * 2. Redistributions in binary form must reproduce the above copyright 16*6cf6fe02Smatt * notice, this list of conditions and the following disclaimer in the 17*6cf6fe02Smatt * documentation and/or other materials provided with the distribution. 18*6cf6fe02Smatt * 19*6cf6fe02Smatt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 20*6cf6fe02Smatt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 21*6cf6fe02Smatt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 22*6cf6fe02Smatt * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 23*6cf6fe02Smatt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 24*6cf6fe02Smatt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 25*6cf6fe02Smatt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 26*6cf6fe02Smatt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 27*6cf6fe02Smatt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 28*6cf6fe02Smatt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 29*6cf6fe02Smatt * POSSIBILITY OF SUCH DAMAGE. 30*6cf6fe02Smatt */ 31*6cf6fe02Smatt 32*6cf6fe02Smatt #ifndef _RISCV_PROFILE_H_ 33*6cf6fe02Smatt #define _RISCV_PROFILE_H_ 34*6cf6fe02Smatt 35*6cf6fe02Smatt #define _MCOUNT_DECL void _mcount 36*6cf6fe02Smatt 37*6cf6fe02Smatt /* 38*6cf6fe02Smatt * Cannot implement mcount in C as GCC will trash the ip register when it 39*6cf6fe02Smatt * pushes a trapframe. Pity we cannot insert assembly before the function 40*6cf6fe02Smatt * prologue. 41*6cf6fe02Smatt */ 42*6cf6fe02Smatt 43*6cf6fe02Smatt #define MCOUNT_ASM_NAME "__mcount" 44*6cf6fe02Smatt #define PLTSYM 45*6cf6fe02Smatt 46*6cf6fe02Smatt #if 0 47*6cf6fe02Smatt #define MCOUNT \ 48*6cf6fe02Smatt __asm(".text"); \ 49*6cf6fe02Smatt __asm(".align 0"); \ 50*6cf6fe02Smatt __asm(".type " MCOUNT_ASM_NAME ",@function"); \ 51*6cf6fe02Smatt __asm(".global " MCOUNT_ASM_NAME); \ 52*6cf6fe02Smatt __asm(MCOUNT_ASM_NAME ":"); \ 53*6cf6fe02Smatt /* \ 54*6cf6fe02Smatt * Preserve registers that are trashed during mcount \ 55*6cf6fe02Smatt */ \ 56*6cf6fe02Smatt __asm("sub sp, sp, #80"); \ 57*6cf6fe02Smatt __asm("stp x29, x30, [sp, #64]"); \ 58*6cf6fe02Smatt __asm("add x29, sp, #64"); \ 59*6cf6fe02Smatt __asm("stp x0, x1, [x29, #0]"); \ 60*6cf6fe02Smatt __asm("stp x2, x3, [x29, #16]"); \ 61*6cf6fe02Smatt __asm("stp x4, x5, [x29, #32]"); \ 62*6cf6fe02Smatt __asm("stp x6, x7, [x29, #48]"); \ 63*6cf6fe02Smatt /* \ 64*6cf6fe02Smatt * find the return address for mcount, \ 65*6cf6fe02Smatt * and the return address for mcount's caller. \ 66*6cf6fe02Smatt * \ 67*6cf6fe02Smatt * frompcindex = pc pushed by call into self. \ 68*6cf6fe02Smatt */ \ 69*6cf6fe02Smatt __asm("mov x0, x19"); \ 70*6cf6fe02Smatt /* \ 71*6cf6fe02Smatt * selfpc = pc pushed by mcount call \ 72*6cf6fe02Smatt */ \ 73*6cf6fe02Smatt __asm("mov x1, x30"); \ 74*6cf6fe02Smatt /* \ 75*6cf6fe02Smatt * Call the real mcount code \ 76*6cf6fe02Smatt */ \ 77*6cf6fe02Smatt __asm("bl " ___STRING(_C_LABEL(_mcount))); \ 78*6cf6fe02Smatt /* \ 79*6cf6fe02Smatt * Restore registers that were trashed during mcount \ 80*6cf6fe02Smatt */ \ 81*6cf6fe02Smatt __asm("ldp x0, x1, [x29, #0]"); \ 82*6cf6fe02Smatt __asm("ldp x2, x3, [x29, #16]"); \ 83*6cf6fe02Smatt __asm("ldp x4, x5, [x29, #32]"); \ 84*6cf6fe02Smatt __asm("ldp x6, x7, [x29, #48]"); \ 85*6cf6fe02Smatt __asm("ldp x29, x30, [x29, #64]"); \ 86*6cf6fe02Smatt __asm("add sp, sp, #80"); \ 87*6cf6fe02Smatt __asm("ret"); \ 88*6cf6fe02Smatt __asm(".size " MCOUNT_ASM_NAME ", .-" MCOUNT_ASM_NAME); 89*6cf6fe02Smatt #endif 90*6cf6fe02Smatt 91*6cf6fe02Smatt #endif /* _RISCV_PROFILE_H_ */ 92