xref: /minix3/sys/arch/i386/include/frameasm.h (revision 0a6a1f1d05b60e214de2f05a7310ddd1f0e590e7)
1*0a6a1f1dSLionel Sambuc /*	$NetBSD: frameasm.h,v 1.15 2011/07/26 12:57:35 yamt Exp $	*/
2*0a6a1f1dSLionel Sambuc 
3*0a6a1f1dSLionel Sambuc #ifndef _I386_FRAMEASM_H_
4*0a6a1f1dSLionel Sambuc #define _I386_FRAMEASM_H_
5*0a6a1f1dSLionel Sambuc 
6*0a6a1f1dSLionel Sambuc #ifdef _KERNEL_OPT
7*0a6a1f1dSLionel Sambuc #include "opt_multiprocessor.h"
8*0a6a1f1dSLionel Sambuc #include "opt_xen.h"
9*0a6a1f1dSLionel Sambuc #endif
10*0a6a1f1dSLionel Sambuc 
11*0a6a1f1dSLionel Sambuc #if !defined(XEN)
12*0a6a1f1dSLionel Sambuc #define CLI(reg)        cli
13*0a6a1f1dSLionel Sambuc #define STI(reg)        sti
14*0a6a1f1dSLionel Sambuc #else
15*0a6a1f1dSLionel Sambuc /* XXX assym.h */
16*0a6a1f1dSLionel Sambuc #define TRAP_INSTR      int $0x82
17*0a6a1f1dSLionel Sambuc #define XEN_BLOCK_EVENTS(reg)   movb $1,EVTCHN_UPCALL_MASK(reg)
18*0a6a1f1dSLionel Sambuc #define XEN_UNBLOCK_EVENTS(reg) movb $0,EVTCHN_UPCALL_MASK(reg)
19*0a6a1f1dSLionel Sambuc #define XEN_TEST_PENDING(reg)   testb $0xFF,EVTCHN_UPCALL_PENDING(reg)
20*0a6a1f1dSLionel Sambuc 
21*0a6a1f1dSLionel Sambuc #define CLI(reg)        movl    CPUVAR(VCPU),reg ;  \
22*0a6a1f1dSLionel Sambuc                         XEN_BLOCK_EVENTS(reg)
23*0a6a1f1dSLionel Sambuc #define STI(reg)        movl    CPUVAR(VCPU),reg ;  \
24*0a6a1f1dSLionel Sambuc 			XEN_UNBLOCK_EVENTS(reg)
25*0a6a1f1dSLionel Sambuc #define STIC(reg)       movl    CPUVAR(VCPU),reg ;  \
26*0a6a1f1dSLionel Sambuc 			XEN_UNBLOCK_EVENTS(reg)  ; \
27*0a6a1f1dSLionel Sambuc 			testb $0xff,EVTCHN_UPCALL_PENDING(reg)
28*0a6a1f1dSLionel Sambuc #endif
29*0a6a1f1dSLionel Sambuc 
30*0a6a1f1dSLionel Sambuc #ifndef TRAPLOG
31*0a6a1f1dSLionel Sambuc #define TLOG		/**/
32*0a6a1f1dSLionel Sambuc #else
33*0a6a1f1dSLionel Sambuc /*
34*0a6a1f1dSLionel Sambuc  * Fill in trap record
35*0a6a1f1dSLionel Sambuc  */
36*0a6a1f1dSLionel Sambuc #define TLOG						\
37*0a6a1f1dSLionel Sambuc 9:							\
38*0a6a1f1dSLionel Sambuc 	movl	%fs:CPU_TLOG_OFFSET, %eax;		\
39*0a6a1f1dSLionel Sambuc 	movl	%fs:CPU_TLOG_BASE, %ebx;		\
40*0a6a1f1dSLionel Sambuc 	addl	$SIZEOF_TREC,%eax;			\
41*0a6a1f1dSLionel Sambuc 	andl	$SIZEOF_TLOG-1,%eax;			\
42*0a6a1f1dSLionel Sambuc 	addl	%eax,%ebx;				\
43*0a6a1f1dSLionel Sambuc 	movl	%eax,%fs:CPU_TLOG_OFFSET;		\
44*0a6a1f1dSLionel Sambuc 	movl	%esp,TREC_SP(%ebx);			\
45*0a6a1f1dSLionel Sambuc 	movl	$9b,TREC_HPC(%ebx);			\
46*0a6a1f1dSLionel Sambuc 	movl	TF_EIP(%esp),%eax;			\
47*0a6a1f1dSLionel Sambuc 	movl	%eax,TREC_IPC(%ebx);			\
48*0a6a1f1dSLionel Sambuc 	rdtsc			;			\
49*0a6a1f1dSLionel Sambuc 	movl	%eax,TREC_TSC(%ebx);			\
50*0a6a1f1dSLionel Sambuc 	movl	$MSR_LASTBRANCHFROMIP,%ecx;		\
51*0a6a1f1dSLionel Sambuc 	rdmsr			;			\
52*0a6a1f1dSLionel Sambuc 	movl	%eax,TREC_LBF(%ebx);			\
53*0a6a1f1dSLionel Sambuc 	incl	%ecx		;			\
54*0a6a1f1dSLionel Sambuc 	rdmsr			;			\
55*0a6a1f1dSLionel Sambuc 	movl	%eax,TREC_LBT(%ebx);			\
56*0a6a1f1dSLionel Sambuc 	incl	%ecx		;			\
57*0a6a1f1dSLionel Sambuc 	rdmsr			;			\
58*0a6a1f1dSLionel Sambuc 	movl	%eax,TREC_IBF(%ebx);			\
59*0a6a1f1dSLionel Sambuc 	incl	%ecx		;			\
60*0a6a1f1dSLionel Sambuc 	rdmsr			;			\
61*0a6a1f1dSLionel Sambuc 	movl	%eax,TREC_IBT(%ebx)
62*0a6a1f1dSLionel Sambuc #endif
63*0a6a1f1dSLionel Sambuc 
64*0a6a1f1dSLionel Sambuc /*
65*0a6a1f1dSLionel Sambuc  * These are used on interrupt or trap entry or exit.
66*0a6a1f1dSLionel Sambuc  */
67*0a6a1f1dSLionel Sambuc #define	INTRENTRY \
68*0a6a1f1dSLionel Sambuc 	subl	$TF_PUSHSIZE,%esp	; \
69*0a6a1f1dSLionel Sambuc 	movw	%gs,TF_GS(%esp)	; \
70*0a6a1f1dSLionel Sambuc 	movw	%fs,TF_FS(%esp) ; \
71*0a6a1f1dSLionel Sambuc 	movl	%eax,TF_EAX(%esp)	; \
72*0a6a1f1dSLionel Sambuc 	movw	%es,TF_ES(%esp) ; \
73*0a6a1f1dSLionel Sambuc 	movw	%ds,TF_DS(%esp) ; \
74*0a6a1f1dSLionel Sambuc 	movl	$GSEL(GDATA_SEL, SEL_KPL),%eax	; \
75*0a6a1f1dSLionel Sambuc 	movl	%edi,TF_EDI(%esp)	; \
76*0a6a1f1dSLionel Sambuc 	movl	%esi,TF_ESI(%esp)	; \
77*0a6a1f1dSLionel Sambuc 	movw	%ax,%ds	; \
78*0a6a1f1dSLionel Sambuc 	movl	%ebp,TF_EBP(%esp)	; \
79*0a6a1f1dSLionel Sambuc 	movw	%ax,%es	; \
80*0a6a1f1dSLionel Sambuc 	movl	%ebx,TF_EBX(%esp)	; \
81*0a6a1f1dSLionel Sambuc 	movw	%ax,%gs	; \
82*0a6a1f1dSLionel Sambuc 	movl	%edx,TF_EDX(%esp)	; \
83*0a6a1f1dSLionel Sambuc 	movl	$GSEL(GCPU_SEL, SEL_KPL),%eax	; \
84*0a6a1f1dSLionel Sambuc 	movl	%ecx,TF_ECX(%esp)	; \
85*0a6a1f1dSLionel Sambuc 	movl	%eax,%fs	; \
86*0a6a1f1dSLionel Sambuc 	cld			; \
87*0a6a1f1dSLionel Sambuc 	TLOG
88*0a6a1f1dSLionel Sambuc 
89*0a6a1f1dSLionel Sambuc /*
90*0a6a1f1dSLionel Sambuc  * INTRFASTEXIT should be in sync with trap(), resume_iret and friends.
91*0a6a1f1dSLionel Sambuc  */
92*0a6a1f1dSLionel Sambuc #define	INTRFASTEXIT \
93*0a6a1f1dSLionel Sambuc 	movw	TF_GS(%esp),%gs	; \
94*0a6a1f1dSLionel Sambuc 	movw	TF_FS(%esp),%fs	; \
95*0a6a1f1dSLionel Sambuc 	movw	TF_ES(%esp),%es	; \
96*0a6a1f1dSLionel Sambuc 	movw	TF_DS(%esp),%ds	; \
97*0a6a1f1dSLionel Sambuc 	movl	TF_EDI(%esp),%edi	; \
98*0a6a1f1dSLionel Sambuc 	movl	TF_ESI(%esp),%esi	; \
99*0a6a1f1dSLionel Sambuc 	movl	TF_EBP(%esp),%ebp	; \
100*0a6a1f1dSLionel Sambuc 	movl	TF_EBX(%esp),%ebx	; \
101*0a6a1f1dSLionel Sambuc 	movl	TF_EDX(%esp),%edx	; \
102*0a6a1f1dSLionel Sambuc 	movl	TF_ECX(%esp),%ecx	; \
103*0a6a1f1dSLionel Sambuc 	movl	TF_EAX(%esp),%eax	; \
104*0a6a1f1dSLionel Sambuc 	addl	$(TF_PUSHSIZE+8),%esp	; \
105*0a6a1f1dSLionel Sambuc 	iret
106*0a6a1f1dSLionel Sambuc 
107*0a6a1f1dSLionel Sambuc #define	DO_DEFERRED_SWITCH \
108*0a6a1f1dSLionel Sambuc 	cmpl	$0, CPUVAR(WANT_PMAPLOAD)		; \
109*0a6a1f1dSLionel Sambuc 	jz	1f					; \
110*0a6a1f1dSLionel Sambuc 	call	_C_LABEL(pmap_load)			; \
111*0a6a1f1dSLionel Sambuc 	1:
112*0a6a1f1dSLionel Sambuc 
113*0a6a1f1dSLionel Sambuc #define	DO_DEFERRED_SWITCH_RETRY \
114*0a6a1f1dSLionel Sambuc 	1:						; \
115*0a6a1f1dSLionel Sambuc 	cmpl	$0, CPUVAR(WANT_PMAPLOAD)		; \
116*0a6a1f1dSLionel Sambuc 	jz	1f					; \
117*0a6a1f1dSLionel Sambuc 	call	_C_LABEL(pmap_load)			; \
118*0a6a1f1dSLionel Sambuc 	jmp	1b					; \
119*0a6a1f1dSLionel Sambuc 	1:
120*0a6a1f1dSLionel Sambuc 
121*0a6a1f1dSLionel Sambuc #define	CHECK_DEFERRED_SWITCH \
122*0a6a1f1dSLionel Sambuc 	cmpl	$0, CPUVAR(WANT_PMAPLOAD)
123*0a6a1f1dSLionel Sambuc 
124*0a6a1f1dSLionel Sambuc #define	CHECK_ASTPENDING(reg)	movl	CPUVAR(CURLWP),reg	; \
125*0a6a1f1dSLionel Sambuc 				cmpl	$0, L_MD_ASTPENDING(reg)
126*0a6a1f1dSLionel Sambuc #define	CLEAR_ASTPENDING(reg)	movl	$0, L_MD_ASTPENDING(reg)
127*0a6a1f1dSLionel Sambuc 
128*0a6a1f1dSLionel Sambuc /*
129*0a6a1f1dSLionel Sambuc  * IDEPTH_INCR:
130*0a6a1f1dSLionel Sambuc  * increase ci_idepth and switch to the interrupt stack if necessary.
131*0a6a1f1dSLionel Sambuc  * note that the initial value of ci_idepth is -1.
132*0a6a1f1dSLionel Sambuc  *
133*0a6a1f1dSLionel Sambuc  * => should be called with interrupt disabled.
134*0a6a1f1dSLionel Sambuc  * => save the old value of %esp in %eax.
135*0a6a1f1dSLionel Sambuc  */
136*0a6a1f1dSLionel Sambuc 
137*0a6a1f1dSLionel Sambuc #define	IDEPTH_INCR \
138*0a6a1f1dSLionel Sambuc 	incl	CPUVAR(IDEPTH); \
139*0a6a1f1dSLionel Sambuc 	movl	%esp, %eax; \
140*0a6a1f1dSLionel Sambuc 	jne	999f; \
141*0a6a1f1dSLionel Sambuc 	movl	CPUVAR(INTRSTACK), %esp; \
142*0a6a1f1dSLionel Sambuc 999:	pushl	%eax; /* eax == pointer to intrframe */ \
143*0a6a1f1dSLionel Sambuc 
144*0a6a1f1dSLionel Sambuc /*
145*0a6a1f1dSLionel Sambuc  * IDEPTH_DECR:
146*0a6a1f1dSLionel Sambuc  * decrement ci_idepth and switch back to
147*0a6a1f1dSLionel Sambuc  * the original stack saved by IDEPTH_INCR.
148*0a6a1f1dSLionel Sambuc  *
149*0a6a1f1dSLionel Sambuc  * => should be called with interrupt disabled.
150*0a6a1f1dSLionel Sambuc  */
151*0a6a1f1dSLionel Sambuc 
152*0a6a1f1dSLionel Sambuc #define	IDEPTH_DECR \
153*0a6a1f1dSLionel Sambuc 	popl	%esp; \
154*0a6a1f1dSLionel Sambuc 	decl	CPUVAR(IDEPTH)
155*0a6a1f1dSLionel Sambuc 
156*0a6a1f1dSLionel Sambuc #endif /* _I386_FRAMEASM_H_ */
157