xref: /netbsd-src/sys/arch/powerpc/include/profile.h (revision b1c86f5f087524e68db12794ee9c3e3da1ab17a0)
1 /*	$NetBSD: profile.h,v 1.7 2006/07/07 21:28:03 ross Exp $	*/
2 
3 /*-
4  * Copyright (c) 2000 Tsubai Masanari.  All rights reserved.
5  *
6  * Redistribution and use in source and binary forms, with or without
7  * modification, are permitted provided that the following conditions
8  * are met:
9  * 1. Redistributions of source code must retain the above copyright
10  *    notice, this list of conditions and the following disclaimer.
11  * 2. Redistributions in binary form must reproduce the above copyright
12  *    notice, this list of conditions and the following disclaimer in the
13  *    documentation and/or other materials provided with the distribution.
14  * 3. The name of the author may not be used to endorse or promote products
15  *    derived from this software without specific prior written permission.
16  *
17  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28 
29 #define	_MCOUNT_DECL	void __mcount
30 
31 #ifdef _LP64
32 
33 #define MCOUNT				\
34 __asm("	.globl	_mcount			\n" \
35 "	.section \".opd\",\"aw\"	\n" \
36 "	.align 3			\n" \
37 "_mcount:				\n" \
38 "	.quad	._mcount,.TOC.@tocbase,0\n" \
39 "	.previous			\n" \
40 "	.size	_mcount,24		\n" \
41 "	.type	._mcount,@function	\n" \
42 "	.globl	._mcount		\n" \
43 "	.align	3			\n" \
44 "._mcount:				\n" \
45 "	frame=128			\n" \
46 "	stdu	1,-frame(1)		\n" \
47 "	std	2,120(1)		\n" \
48 "	std	3,48+0(1)		\n" \
49 "	std	4,48+8(1)		\n" \
50 "	std	5,48+16(1)		\n" \
51 "	std	6,48+24(1)		\n" \
52 "	std	7,48+32(1)		\n" \
53 "	std	8,48+40(1)		\n" \
54 "	std	9,48+48(1)		\n" \
55 "	std	10,48+56(1)		\n" \
56 "					\n" \
57 "	mflr	4			\n" \
58 "	std	4,112(1)		\n" \
59 "	ld	3,frame+16(1)		\n" \
60 "	bl	.__mcount		\n" \
61 "	ld	2,120(1)		\n" \
62 "	ld	3,frame+16(1)		\n" \
63 "	mtlr	3			\n" \
64 "	ld	4,112(1)		\n" \
65 "	mtctr	4			\n" \
66 "					\n" \
67 "	ld	3,16(1)			\n" \
68 "	ld	4,20(1)			\n" \
69 "	ld	5,24(1)			\n" \
70 "	ld	6,28(1)			\n" \
71 "	ld	7,32(1)			\n" \
72 "	ld	8,36(1)			\n" \
73 "	ld	9,40(1)			\n" \
74 "	ld	10,44(1)		\n" \
75 "	addi	1,1,frame		\n" \
76 "	bctr");
77 
78 #else
79 
80 #ifdef PIC
81 #define _PLT "@plt"
82 #else
83 #define _PLT
84 #endif
85 
86 #define MCOUNT				\
87 __asm("	.globl	_mcount			\n" \
88 "	.type	_mcount,@function	\n" \
89 "_mcount:				\n" \
90 "	stwu	1,-64(1)		\n" \
91 "	stw	3,16(1)			\n" \
92 "	stw	4,20(1)			\n" \
93 "	stw	5,24(1)			\n" \
94 "	stw	6,28(1)			\n" \
95 "	stw	7,32(1)			\n" \
96 "	stw	8,36(1)			\n" \
97 "	stw	9,40(1)			\n" \
98 "	stw	10,44(1)		\n" \
99 "					\n" \
100 "	mflr	4			\n" \
101 "	stw	4,48(1)			\n" \
102 "	lwz	3,68(1)			\n" \
103 "	bl	__mcount" _PLT "	\n" \
104 "	lwz	3,68(1)			\n" \
105 "	mtlr	3			\n" \
106 "	lwz	4,48(1)			\n" \
107 "	mtctr	4			\n" \
108 "					\n" \
109 "	lwz	3,16(1)			\n" \
110 "	lwz	4,20(1)			\n" \
111 "	lwz	5,24(1)			\n" \
112 "	lwz	6,28(1)			\n" \
113 "	lwz	7,32(1)			\n" \
114 "	lwz	8,36(1)			\n" \
115 "	lwz	9,40(1)			\n" \
116 "	lwz	10,44(1)		\n" \
117 "	addi	1,1,64			\n" \
118 "	bctr				\n" \
119 "_mcount_end:				\n" \
120 "	.size	_mcount,_mcount_end-_mcount");
121 
122 #endif
123 
124 #ifdef _KERNEL
125 #define MCOUNT_ENTER						\
126 	__asm volatile("mfmsr %0" : "=r"(s));			\
127 	if ((s & (PSL_IR | PSL_DR)) != (PSL_IR | PSL_DR))	\
128 		return;		/* XXX */			\
129 	s &= ~PSL_POW;						\
130 	__asm volatile("mtmsr %0" :: "r"(s & ~PSL_EE))
131 
132 #define MCOUNT_EXIT						\
133 	__asm volatile("mtmsr %0" :: "r"(s))
134 #endif
135