xref: /openbsd-src/sys/arch/amd64/amd64/vector.S (revision 79c3ea79bf61f829bd074f6147bd6d2ff9119281)
1/*	$OpenBSD: vector.S,v 1.99 2024/11/10 03:02:43 jsg Exp $	*/
2/*	$NetBSD: vector.S,v 1.5 2004/06/28 09:13:11 fvdl Exp $	*/
3
4/*
5 * Copyright (c) 2001 Wasabi Systems, Inc.
6 * All rights reserved.
7 *
8 * Written by Frank van der Linden for Wasabi Systems, Inc.
9 *
10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions
12 * are met:
13 * 1. Redistributions of source code must retain the above copyright
14 *    notice, this list of conditions and the following disclaimer.
15 * 2. Redistributions in binary form must reproduce the above copyright
16 *    notice, this list of conditions and the following disclaimer in the
17 *    documentation and/or other materials provided with the distribution.
18 * 3. All advertising materials mentioning features or use of this software
19 *    must display the following acknowledgement:
20 *      This product includes software developed for the NetBSD Project by
21 *      Wasabi Systems, Inc.
22 * 4. The name of Wasabi Systems, Inc. may not be used to endorse
23 *    or promote products derived from this software without specific prior
24 *    written permission.
25 *
26 * THIS SOFTWARE IS PROVIDED BY WASABI SYSTEMS, INC. ``AS IS'' AND
27 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
28 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
29 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL WASABI SYSTEMS, INC
30 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
31 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
32 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
33 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
34 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
35 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
36 * POSSIBILITY OF SUCH DAMAGE.
37 */
38
39/*-
40 * Copyright (c) 1998 The NetBSD Foundation, Inc.
41 * All rights reserved.
42 *
43 * This code is derived from software contributed to The NetBSD Foundation
44 * by Charles M. Hannum.
45 *
46 * Redistribution and use in source and binary forms, with or without
47 * modification, are permitted provided that the following conditions
48 * are met:
49 * 1. Redistributions of source code must retain the above copyright
50 *    notice, this list of conditions and the following disclaimer.
51 * 2. Redistributions in binary form must reproduce the above copyright
52 *    notice, this list of conditions and the following disclaimer in the
53 *    documentation and/or other materials provided with the distribution.
54 *
55 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
56 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
57 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
58 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
59 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
60 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
61 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
62 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
63 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
64 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
65 * POSSIBILITY OF SUCH DAMAGE.
66 */
67
68#include <machine/param.h>
69#include <machine/i8259.h>
70#include <machine/i82093reg.h>
71#include <machine/i82489reg.h>
72#include <machine/asm.h>
73#include <machine/frameasm.h>
74#include <machine/segments.h>
75#include <machine/trap.h>
76#include <machine/intr.h>
77#include <machine/psl.h>
78#include <machine/codepatch.h>
79#include <machine/specialreg.h>
80
81#include "ioapic.h"
82#include "lapic.h"
83#include "assym.h"
84#include "xen.h"
85#include "hyperv.h"
86#include "vmm.h"
87
88/*****************************************************************************/
89
90/*
91 * Trap and fault vector routines
92 *
93 * On exit from the kernel to user mode, we always need to check for ASTs.  In
94 * addition, we need to do this atomically; otherwise an interrupt may occur
95 * which causes an AST, but it won't get processed until the next kernel entry
96 * (possibly the next clock tick).  Thus, we disable interrupt before checking,
97 * and only enable them again on the final `iret' or before calling the AST
98 * handler.
99 */
100
101/*****************************************************************************/
102
103#define	TRAP(a)		pushq $(a) ; jmp alltraps
104#define	ZTRAP(a)	pushq $0 ; TRAP(a)
105
106IDTVEC(trap00)
107	ZTRAP(T_DIVIDE)
108IDTVEC(trap01)
109	ZTRAP(T_TRCTRAP)
110
111/*
112 * NMIs can happen at any time, so there's no simple way to tell
113 * which GS.base is in place at the time of the interrupt.  Instead,
114 * borrow a couple ideas from FreeBSD and put the CPU's kernel
115 * GS.base in the memory right above the stack, storing the current
116 * one in a pair of callee-saved registers (%r12/13).  We save the
117 * current %cr3 in a callee-saved register too (%r15).
118 * Note: we don't unblock interrupts because a nested normal interrupt
119 * would also reenable NMIs.
120 */
121IDTVEC(trap02)
122	pushq	$0
123	pushq	$T_NMI
124calltrap_specstk:			# special stack path
125	TRAP_ENTRY_KERN
126	INTR_CLEAR_GPRS
127	movl	$MSR_GSBASE,%ecx	# save current GS.base...
128	rdmsr
129	movq	%rax,%r12		# ...in %r12 and %r13
130	movq	%rdx,%r13
131	movq	FRAMESIZE(%rsp),%rax	# get kernel GS.base
132	movq	%rax,%rdx
133	shrq	$32,%rdx
134	wrmsr				# switch to it
135	movq	%cr3,%r15		# save current %cr3 in %r15
136	movq	CPUVAR(KERN_CR3),%rax	# switch to kernel page tables
137	testq	%rax,%rax
138	jz	INTRENTRY_LABEL(calltrap_specstk)
139	movq	%rax,%cr3
140	jmp	INTRENTRY_LABEL(calltrap_specstk)
141	.text
142	.globl	INTRENTRY_LABEL(calltrap_specstk)
143INTRENTRY_LABEL(calltrap_specstk):
144	lfence				# block speculation through jz above
145	cld
146	SMAP_CLAC
147	movq	%rsp,%rdi
148	call	kerntrap
149	movq	$0,-8(%rsp)
150	movl	$MSR_GSBASE,%ecx	# restore GS.base
151	movq	%r12,%rax
152	movq	%r13,%rdx
153	wrmsr
154	/* who knows what happened in this trap; use IPBP on the way out */
155	CODEPATCH_START
156	xorl	%edx,%edx
157	movl	$PRED_CMD_IBPB,%eax
158	movl	$MSR_PRED_CMD,%ecx
159	wrmsr
160	CODEPATCH_END(CPTAG_IBPB_NOP)
161	call	pku_xonly
162	movq	$0,-8(%rsp)
163	popq	%rdi
164	popq	%rsi
165	popq	%rdx
166	popq	%rcx
167	popq	%r8
168	popq	%r9
169	popq	%r10
170	popq	%r11
171	popq	%r12
172	popq	%r13
173	popq	%r14
174	jmp	calltrap_specstk_tramp
175KUENTRY(calltrap_specstk_tramp)
176	movq	%r15,%cr3		# restore %cr3
177	popq	%r15
178	addq	$8,%rsp			# ignore tf_err
179	popq	%rbx
180	popq	%rax
181	addq	$8,%rsp			# ignore tf_trapno
182	popq	%rbp
183	iretq
184
185IDTVEC(trap03)
186#if defined(GPROF) || !defined(DDBPROF)
187	ZTRAP(T_BPTFLT)
188#else /* !defined(GPROF) && defined(DDBPROF) */
189	pushq	$0
190	pushq	$T_BPTFLT
191	testb	$SEL_RPL,24(%rsp)
192	je	INTRENTRY_LABEL(trap03)
193	jmp	alltraps
194	.text
195	.global	INTRENTRY_LABEL(trap03)
196INTRENTRY_LABEL(trap03):
197	FENCE_NO_SAFE_SMAP
198	TRAP_ENTRY_KERN
199	sti
200	cld
201	SMAP_CLAC
202	leaq	dt_prov_kprobe, %rdi
203	movq	%rsp, %rsi
204	call	dt_prov_kprobe_hook
205	movq	$0,-8(%rsp)
206	cmpl    $0, %eax
207	je     .Lreal_kern_trap
208
209	cli
210	movq	TF_RDI(%rsp),%rdi
211	movq	TF_RSI(%rsp),%rsi
212	movq	TF_R8(%rsp),%r8
213	movq	TF_R9(%rsp),%r9
214	movq	TF_R10(%rsp),%r10
215	movq	TF_R12(%rsp),%r12
216	movq	TF_R13(%rsp),%r13
217	movq	TF_R14(%rsp),%r14
218	movq	TF_R15(%rsp),%r15
219	movq	TF_RBP(%rsp),%rbp
220	movq	TF_RBX(%rsp),%rbx
221	movq	TF_RDX(%rsp),%rdx
222	movq	TF_RCX(%rsp),%rcx
223	movq	TF_R11(%rsp),%r11
224	/* %rax restored below, after being used to shift the stack */
225
226	cmpl    $2, %eax
227	je      .Lemulate_ret
228
229.Lemulate_push_rbp:
230
231	/*
232	 * We are returning from a probe trap so we need to fix the
233	 * stack layout and emulate the patched instruction.
234	 * Reserve enough room to emulate "pushq %rbp".
235	 */
236	subq	$16, %rsp
237
238	movq	(TF_RAX + 16)(%rsp), %rax
239	movq	%rax, TF_RAX(%rsp)
240
241	/* Shift hardware-saved registers. */
242	movq	(TF_RIP + 16)(%rsp), %rax
243	movq	%rax, TF_RIP(%rsp)
244	movq	(TF_CS + 16)(%rsp), %rax
245	movq	%rax, TF_CS(%rsp)
246	movq	(TF_RFLAGS + 16)(%rsp), %rax
247	movq	%rax, TF_RFLAGS(%rsp)
248	movq	(TF_RSP + 16)(%rsp), %rax
249	movq	%rax, TF_RSP(%rsp)
250	movq	(TF_SS + 16)(%rsp), %rax
251	movq	%rax, TF_SS(%rsp)
252
253	/* Pull 8 bytes off the stack and store %rbp in the expected location.*/
254	movq	TF_RSP(%rsp), %rax
255	subq	$8, %rax
256	movq	%rax, TF_RSP(%rsp)
257	movq	%rbp, (%rax)
258
259	/* Finally restore %rax */
260	movq	(TF_RAX + 16)(%rsp),%rax
261	jmp .ret_int3
262
263.Lemulate_ret:
264
265	/* Store a new return address in %rip */
266	movq    TF_RSP(%rsp), %rax
267	movq    (%rax), %rax
268	movq    %rax, TF_RIP(%rsp)
269	addq    $8, TF_RSP(%rsp)
270
271	/* Finally restore %rax */
272	movq	(TF_RAX)(%rsp),%rax
273
274.ret_int3:
275	addq	$TF_RIP,%rsp
276	iretq
277#endif /* !defined(GPROF) && defined(DDBPROF) */
278
279IDTVEC(trap04)
280	ZTRAP(T_OFLOW)	# impossible: INTO instruction invalid in amd64
281IDTVEC(trap05)
282	ZTRAP(T_BOUND)	# impossible: BOUND instruction invalid in amd64
283IDTVEC(trap06)
284	ZTRAP(T_PRIVINFLT)
285IDTVEC(trap07)
286	ZTRAP(T_DNA)		# impossible: we don't do lazy FPU
287IDTVEC(trap08)
288	pushq	$T_DOUBLEFLT
289	jmp	calltrap_specstk
290IDTVEC(trap09)
291	ZTRAP(T_FPOPFLT)	# impossible: not generated on amd64
292IDTVEC(trap0a)
293	TRAP(T_TSSFLT)
294IDTVEC(trap0b)
295	TRAP(T_SEGNPFLT)
296IDTVEC(trap0c)
297	TRAP(T_STKFLT)
298
299/*
300 * The #GP (general protection fault) handler has a few weird cases
301 * to handle:
302 *  - trapping in iretq to userspace and
303 *  - trapping in xrstor in the kernel.
304 *  - trapping when invalid MSRs are read in rdmsr_safe
305 * We detect these by examining the %rip in the iretq_frame.
306 * Handling them is done by updating %rip in the iretq_frame to point
307 * to a stub handler of some sort and then iretq'ing to it.  For the
308 * iretq fault we resume in a stub which acts like we got a fresh #GP.
309 * For the xrstor fault we resume to a stub which returns an error to
310 * the routine that requested the xrstor.
311 */
312IDTVEC(trap0d)
313	pushq	%rdx
314	pushq	%rcx
315	movq	24(%rsp),%rdx		/* over %r[cd]x and err to %rip */
316	leaq	doreti_iret(%rip),%rcx
317	cmpq	%rcx,%rdx
318	je	.Lhandle_doreti
319	leaq	xrstor_fault(%rip),%rcx
320	cmpq	%rcx,%rdx
321	je	.Lhandle_xrstor
322	leaq	xsetbv_fault(%rip),%rcx
323	cmpq	%rcx,%rdx
324	je	.Lhandle_xsetbv
325	leaq	rdmsr_safe_fault(%rip),%rcx
326	cmpq	%rcx,%rdx
327	je	.Lhandle_rdmsr_safe
328	popq	%rcx
329	popq	%rdx
330	TRAP(T_PROTFLT)
331
332.Lhandle_rdmsr_safe:
333	/* rdmsr faulted; just resume in rdmsr_resume */
334	leaq	rdmsr_resume(%rip),%rcx
335	jmp	1f
336
337.Lhandle_xrstor:
338	/* xrstor faulted; just resume in xrstor_resume */
339	leaq	xrstor_resume(%rip),%rcx
340	jmp	1f
341
342.Lhandle_xsetbv:
343	/* xsetbv faulted; just resume in xsetbv_resume */
344	leaq	xsetbv_resume(%rip),%rcx
345	jmp	1f
346
347.Lhandle_doreti:
348	/* iretq faulted; resume in a stub that acts like we got a #GP */
349	leaq	.Lhandle_doreti_resume(%rip),%rcx
3501:	lfence		/* block speculation through conditionals above */
351	movq	%rcx,24(%rsp)		/* over %r[cd]x and err to %rip */
352	popq	%rcx
353	popq	%rdx
354	addq	$8,%rsp			/* pop the err code */
355	jmp	doreti_iret
356.Lhandle_doreti_resume:
357	ZTRAP(T_PROTFLT)
358
359IDTVEC(trap0e)
360	TRAP(T_PAGEFLT)
361IDTVEC(intrspurious)
362IDTVEC_ALIAS(trap0f, intrspurious)
363	iretq
364IDTVEC(trap10)
365	ZTRAP(T_ARITHTRAP)
366IDTVEC(trap11)
367	TRAP(T_ALIGNFLT)
368IDTVEC(trap12)
369	ZTRAP(T_MCA)
370IDTVEC(trap13)
371	ZTRAP(T_XMM)
372IDTVEC(trap14)
373	ZTRAP(T_VE)
374IDTVEC(trap15)
375	TRAP(T_CP)
376IDTVEC(trap1f)
377IDTVEC_ALIAS(trap16, trap1f)
378IDTVEC_ALIAS(trap17, trap1f)
379IDTVEC_ALIAS(trap18, trap1f)
380IDTVEC_ALIAS(trap19, trap1f)
381IDTVEC_ALIAS(trap1a, trap1f)
382IDTVEC_ALIAS(trap1b, trap1f)
383IDTVEC_ALIAS(trap1c, trap1f)
384IDTVEC_ALIAS(trap1d, trap1f)
385IDTVEC_ALIAS(trap1e, trap1f)
386	/* 22 - 31 reserved for future exp */
387	ZTRAP(T_RESERVED)
388
389	.section .rodata
390	.globl	Xexceptions
391	.type Xexceptions,@object
392Xexceptions:
393	.quad	Xtrap00, Xtrap01, Xtrap02, Xtrap03
394	.quad	Xtrap04, Xtrap05, Xtrap06, Xtrap07
395	.quad	Xtrap08, Xtrap09, Xtrap0a, Xtrap0b
396	.quad	Xtrap0c, Xtrap0d, Xtrap0e, Xtrap0f
397	.quad	Xtrap10, Xtrap11, Xtrap12, Xtrap13
398	.quad	Xtrap14, Xtrap15, Xtrap16, Xtrap17
399	.quad	Xtrap18, Xtrap19, Xtrap1a, Xtrap1b
400	.quad	Xtrap1c, Xtrap1d, Xtrap1e, Xtrap1f
401END(Xexceptions)
402
403/*
404 * All traps go through here.  Figure out whether we're
405 * a) coming from usermode and need the Meltdown mitigation before
406 *    jumping to user trap handling followed by AST and
407 *    return-to-userspace handling, or
408 * b) coming from supervisor mode and can directly jump to kernel
409 *    trap handling before returning sans AST or other handling.
410 */
411KUTEXT_PAGE_START
412 	.align	NBPG, 0xcc
413	/*
414	 * This is the Meltdown alltraps page, which is mapped into
415	 * the U-K page tables at the same location as alltraps
416	 * below.  For this, the Meltdown case, we must be coming from
417	 * userspace so we skip the SEL_RPL test and go directly to
418	 * the swapgs+use-scratch+change-cr3 sequence.  Switching to
419	 * the kernel page tables (thank you, Intel) will make us
420	 * continue at the "TRAP_ENTRY_USER" after alltraps below.
421	 * In case the CPU speculates past the mov to cr3,
422	 * we put a retpoline-style pause-lfence-jmp-to-pause loop.
423	 */
424Xalltraps:
425	swapgs
426	movq	%rax,CPUVAR(SCRATCH)
427	movq	CPUVAR(KERN_CR3),%rax
428	.byte	0x66, 0x90	/* space for FENCE_SWAPGS_MIS_TAKEN below */
429	movq	%rax,%cr3
4300:	pause
431	lfence
432	jmp	0b
433KUTEXT_PAGE_END
434
435KTEXT_PAGE_START
436	.align	NBPG, 0xcc
437GENTRY(alltraps)
438	CODEPATCH_START
439	testb	$SEL_RPL,24(%rsp)
440	je	alltraps_kern
441	swapgs
442	CODEPATCH_END(CPTAG_MELTDOWN_ALLTRAPS)
443	FENCE_SWAPGS_MIS_TAKEN
444	movq	%rax,CPUVAR(SCRATCH)
445	.space	(0b - Xalltraps) - (. - alltraps), 0x90
446
447	/*
448	 * Traps from userspace
449	 */
450	TRAP_ENTRY_USER
451	sti
452	cld
453	SMAP_CLAC
454	.globl	recall_trap
455recall_trap:
456	movq	%rsp, %rdi
457	call	usertrap
458	movq	$0,-8(%rsp)
459	cli
460	jmp	intr_user_exit
461END(alltraps)
462
463/*
464 * Traps from supervisor mode (kernel)
465 * If we're not mitigating Meltdown, then there's a conditional branch
466 * above and we may need a fence to mitigate CVE-2019-1125.  If we're
467 * doing Meltdown mitigation there's just an unconditional branch and
468 * can skip the fence.
469 */
470	_ALIGN_TRAPS
471GENTRY(alltraps_kern)
472	FENCE_NO_SAFE_SMAP
473GENTRY(alltraps_kern_meltdown)
474	TRAP_ENTRY_KERN
475	sti
476	cld
477	SMAP_CLAC
478.Lreal_kern_trap:
479#ifdef DIAGNOSTIC
480	movl	CPUVAR(ILEVEL),%ebx
481#endif /* DIAGNOSTIC */
482	movq	%rsp, %rdi
483	call	kerntrap
484	movq	$0,-8(%rsp)
4852:	cli
486#ifndef DIAGNOSTIC
487	INTRFASTEXIT
488#else /* DIAGNOSTIC */
489	cmpl	CPUVAR(ILEVEL),%ebx
490	jne	3f
491	INTRFASTEXIT
4923:	sti
493	leaq	spl_lowered(%rip),%rdi
494	movl	CPUVAR(ILEVEL),%esi
495	movl	%ebx,%edx
496	xorq	%rax,%rax
497	call	printf
498	movq	$0,-8(%rsp)
499#ifdef DDB
500	int	$3
501#endif /* DDB */
502	movl	%ebx,CPUVAR(ILEVEL)
503	jmp	2b
504
505	.pushsection .rodata
506	.type spl_lowered,@object
507spl_lowered:
508	.asciz	"WARNING: SPL NOT LOWERED ON TRAP EXIT %x %x\n"
509END(spl_lowered)
510	.popsection
511#endif /* DIAGNOSTIC */
512END(alltraps_kern)
513END(alltraps_kern_meltdown)
514KTEXT_PAGE_END
515
516
517/*
518 * Macros for interrupt entry, call to handler, and exit.
519 *
520 * XXX
521 * The interrupt frame is set up to look like a trap frame.  This may be a
522 * waste.  The only handler which needs a frame is the clock handler, and it
523 * only needs a few bits.  Xdoreti() needs a trap frame for handling ASTs, but
524 * it could easily convert the frame on demand.
525 *
526 * The direct costs of setting up a trap frame are two pushq's (error code and
527 * trap number), an addl to get rid of these, and pushing and popping the
528 * callee-saved registers %ebx, %ebp, and %r1[2-5] twice.
529 *
530 * If the interrupt frame is made more flexible,  INTR can push %eax first and
531 * decide the ipending case with less overhead
532 */
533
534KUENTRY(x2apic_eoi)
535	pushq   %rax
536	pushq   %rcx
537	pushq   %rdx
538	mov     $MSR_X2APIC_EOI,%ecx
539	mov     $0,%eax
540	mov     $0,%edx
541	wrmsr
542	popq    %rdx
543	popq    %rcx
544	popq    %rax
545	ret
546	lfence
547END(x2apic_eoi)
548
549#if NLAPIC > 0
550#ifdef MULTIPROCESSOR
551KIDTVEC(recurse_lapic_ipi)
552	INTR_RECURSE
553	jmp	1f
554END(Xrecurse_lapic_ipi)
555IDTVEC(intr_lapic_ipi)
556	INTRENTRY(intr_lapic_ipi)
557	CODEPATCH_START
558	movl	$0,local_apic+LAPIC_EOI
559	CODEPATCH_END(CPTAG_EOI)
560	movl	CPUVAR(ILEVEL),%ebx
561	cmpl	$IPL_IPI,%ebx
562	jae	2f
563END(INTRENTRY_LABEL(intr_lapic_ipi))
564KIDTVEC_FALLTHROUGH(resume_lapic_ipi)
565	endbr64
5661:
567	incl	CPUVAR(IDEPTH)
568	movl	$IPL_IPI,CPUVAR(ILEVEL)
569	sti
570	cld
571	SMAP_CLAC
572	movq	%rbx,IF_PPL(%rsp)
573	call	x86_ipi_handler
574	movq	$0,-8(%rsp)
575	jmp	Xdoreti
5762:
577	movq	$(1 << LIR_IPI),%rax
578	orq	%rax,CPUVAR(IPENDING)
579	INTRFASTEXIT
580END(Xresume_lapic_ipi)
581
582/*
583 * "Fast" IPI handlers.  These are the IPIs which are handled without
584 * unblocking interrupts, so no need for 'recurse' or 'resume' entry points
585 */
586/* invalidate the entire TLB, no PCIDs version */
587IDTVEC(ipi_invltlb)
588	pushq	%rax
589
590	ioapic_asm_ack()
591
592	movq	%cr3, %rax
593	movq	%rax, %cr3
594
595	lock
596	decq	tlb_shoot_wait
597
598	popq	%rax
599	iretq
600END(Xipi_invltlb)
601
602#if NVMM > 0
603/* Invalidate VMX EPT */
604IDTVEC(ipi_invept)
605	pushq	%rax
606	pushq	%rdx
607
608	ioapic_asm_ack()
609
610	movq	$ept_shoot_vid, %rax
611	movq	ept_shoot_mode, %rdx
612	invept	(%rax), %rdx
613
614	lock
615	decq	tlb_shoot_wait
616
617	popq	%rdx
618	popq	%rax
619	iretq
620END(Xipi_invept)
621#endif /* NVMM > 0 */
622
623/* invalidate a single page, no PCIDs version */
624IDTVEC(ipi_invlpg)
625	pushq	%rax
626
627	ioapic_asm_ack()
628
629	movq	tlb_shoot_addr1, %rax
630	invlpg	(%rax)
631
632	lock
633	decq	tlb_shoot_wait
634
635	popq	%rax
636	iretq
637END(Xipi_invlpg)
638
639/* invalidate a range of pages, no PCIDs version */
640IDTVEC(ipi_invlrange)
641	pushq	%rax
642	pushq	%rdx
643
644	ioapic_asm_ack()
645
646	movq	tlb_shoot_addr1, %rax
647	movq	tlb_shoot_addr2, %rdx
6481:	invlpg	(%rax)
649	addq	$PAGE_SIZE, %rax
650	cmpq	%rdx, %rax
651	jb	1b
652
653	lock
654	decq	tlb_shoot_wait
655
656	popq	%rdx
657	popq	%rax
658	iretq
659END(Xipi_invlrange)
660
661/*
662 * Invalidate the userspace PCIDs.
663 */
664IDTVEC(ipi_invltlb_pcid)
665	pushq	%rax
666
667	ioapic_asm_ack()
668
669	/* set the type */
670	movl	$INVPCID_PCID,%eax
671
672	/* finish getting space for the INVPCID descriptor */
673#if INVPCID_PCID == PCID_PROC
674	pushq	%rax
675#else
676	pushq	$PCID_PROC
677#endif
678
679	invpcid	(%rsp),%rax
680
681	/* bump the pcid in the descriptor and invpcid again */
682	movl	$PCID_PROC_INTEL,(%rsp)
683	invpcid	(%rsp),%rax
684
685	lock
686	decq	tlb_shoot_wait
687
688	/* restore the stack */
689	popq	%rax
690	popq	%rax
691	iretq
692END(Xipi_invltlb_pcid)
693
694/*
695 * Invalidate a VA in two PCIDs.  Kernel VAs are present in PCIDs 0 and 1,
696 * while userspace VAs are present in PCIDs 1 and 2.
697 */
698IDTVEC(ipi_invlpg_pcid)
699	pushq	%rax
700
701	ioapic_asm_ack()
702
703	/* space for the INVPCID descriptor */
704	subq	$16,%rsp
705
706	/* set the PCID in the descriptor */
707	movl	tlb_shoot_first_pcid,%eax
708	movq	%rax,(%rsp)
709
710	/* set the address in the descriptor */
711	movq	tlb_shoot_addr1,%rax
712	movq	%rax,8(%rsp)
713
714	/* set the type to zero, and invpcid */
715	xorl	%eax,%eax
716	invpcid	(%rsp),%rax
717
718	/* bump the pcid in the descriptor and invpcid again */
719	addl	$1,(%rsp)
720	invpcid	(%rsp),%rax
721
722	lock
723	decq	tlb_shoot_wait
724
725	/* restore the stack */
726	addq	$16,%rsp
727	popq	%rax
728	iretq
729END(Xipi_invlpg_pcid)
730
731/*
732 * Invalidate a range of VA in two PCIDs.  Kernel VAs are present in
733 * PCIDs 0 and 1, while userspace VAs are present in PCIDs 1 and 2.
734 */
735IDTVEC(ipi_invlrange_pcid)
736	pushq	%rax
737	pushq	%rdx
738	pushq	%rcx
739
740	ioapic_asm_ack()
741
742	/* space for the INVPCID descriptor */
743	subq	$16,%rsp
744
745	/* set the PCID in the descriptor */
746	movl	tlb_shoot_first_pcid,%eax
747	movq	%rax,(%rsp)
748
749	/* set up for the loop: load the limit and set the type to zero */
750	movq	tlb_shoot_addr2,%rdx
751	xorl	%ecx,%ecx
752
753	/* set the address in the descriptor and loop the invalidate */
754	movq	tlb_shoot_addr1,%rax
7551:	movq	%rax,8(%rsp)
756	invpcid	(%rsp),%rcx
757	addl	$1,(%rsp)
758	invpcid	(%rsp),%rcx
759	subl	$1,(%rsp)
760	addq	$PAGE_SIZE,%rax
761	cmpq	%rdx,%rax
762	jb	1b
763
764	lock
765	decq	tlb_shoot_wait
766
767	/* restore the stack */
768	addq	$16,%rsp
769	popq	%rcx
770	popq	%rdx
771	popq	%rax
772	iretq
773END(Xipi_invlrange_pcid)
774
775IDTVEC(ipi_wbinvd)
776	ioapic_asm_ack()
777
778	wbinvd
779
780	lock
781	decq	wbinvd_wait
782
783	iretq
784END(Xipi_wbinvd)
785
786#endif /* MULTIPROCESSOR */
787
788	/*
789	 * Interrupt from the local APIC timer.
790	 */
791KIDTVEC(recurse_lapic_ltimer)
792	INTR_RECURSE
793	jmp	1f
794END(Xrecurse_lapic_ltimer)
795IDTVEC(intr_lapic_ltimer)
796	INTRENTRY(intr_lapic_ltimer)
797	CODEPATCH_START
798	movl	$0,local_apic+LAPIC_EOI
799	CODEPATCH_END(CPTAG_EOI)
800	movl	CPUVAR(ILEVEL),%ebx
801	cmpl	$IPL_CLOCK,%ebx
802	jae	2f
803END(INTRENTRY_LABEL(intr_lapic_ltimer))
804KIDTVEC_FALLTHROUGH(resume_lapic_ltimer)
805	endbr64
8061:
807	incl	CPUVAR(IDEPTH)
808	movl	$IPL_CLOCK,CPUVAR(ILEVEL)
809	sti
810	cld
811	SMAP_CLAC
812	movq	%rbx,IF_PPL(%rsp)
813	xorq	%rdi,%rdi
814	call	lapic_clockintr
815	movq	$0,-8(%rsp)
816	jmp	Xdoreti
8172:
818	movq	$(1 << LIR_TIMER),%rax
819	orq	%rax,CPUVAR(IPENDING)
820	INTRFASTEXIT
821END(Xresume_lapic_ltimer)
822
823#if NXEN > 0
824/*
825 * Xen event channel upcall interrupt handler.
826 * Only used when the hypervisor supports direct vector callbacks.
827 */
828KIDTVEC(recurse_xen_upcall)
829	INTR_RECURSE
830	jmp	1f
831END(Xrecurse_xen_upcall)
832IDTVEC(intr_xen_upcall)
833	INTRENTRY(intr_xen_upcall)
834	call	xen_intr_ack
835	movq	$0,-8(%rsp)
836	movl	CPUVAR(ILEVEL),%ebx
837	cmpl	$IPL_NET,%ebx
838	jae	2f
839END(INTRENTRY_LABEL(intr_xen_upcall))
840KIDTVEC_FALLTHROUGH(resume_xen_upcall)
841	endbr64
8421:
843	incl	CPUVAR(IDEPTH)
844	movl	$IPL_NET,CPUVAR(ILEVEL)
845	sti
846	cld
847	SMAP_CLAC
848	movq	%rbx,IF_PPL(%rsp)
849	call	xen_intr
850	movq	$0,-8(%rsp)
851	jmp	Xdoreti
8522:
853	movq	$(1 << LIR_XEN),%rax
854	orq	%rax,CPUVAR(IPENDING)
855	INTRFASTEXIT
856END(Xresume_xen_upcall)
857#endif /* NXEN > 0 */
858
859#if NHYPERV > 0
860/*
861 * Hyperv event channel upcall interrupt handler.
862 * Only used when the hypervisor supports direct vector callbacks.
863 */
864KIDTVEC(recurse_hyperv_upcall)
865	INTR_RECURSE
866	jmp	1f
867END(Xrecurse_hyperv_upcall)
868IDTVEC(intr_hyperv_upcall)
869	INTRENTRY(intr_hyperv_upcall)
870	movl	CPUVAR(ILEVEL),%ebx
871	cmpl	$IPL_NET,%ebx
872	jae	2f
873END(INTRENTRY_LABEL(intr_hyperv_upcall))
874KIDTVEC_FALLTHROUGH(resume_hyperv_upcall)
875	endbr64
8761:
877	incl	CPUVAR(IDEPTH)
878	movl	$IPL_NET,CPUVAR(ILEVEL)
879	sti
880	cld
881	SMAP_CLAC
882	movq	%rbx,IF_PPL(%rsp)
883	call	hv_intr
884	movq	$0,-8(%rsp)
885	jmp	Xdoreti
8862:
887	movq	$(1 << LIR_HYPERV),%rax
888	orq	%rax,CPUVAR(IPENDING)
889	INTRFASTEXIT
890END(Xresume_hyperv_upcall)
891#endif /* NHYPERV > 0 */
892#endif /* NLAPIC > 0 */
893
894#define voidop(num)
895
896
897/*
898 * This macro defines the generic stub code. Its arguments modify it
899 * for specific PICs.
900 */
901
902#define	INTRSTUB(name, num, early_ack, late_ack, mask, unmask, level_mask) \
903KIDTVEC(recurse_##name##num)						;\
904	INTR_RECURSE							;\
905END(Xrecurse_##name##num)						;\
906KIDTVEC_FALLTHROUGH(resume_##name##num)					\
907	endbr64								;\
908	movq	$IREENT_MAGIC,TF_ERR(%rsp)				;\
909	movl	%ebx,%r13d						;\
910	movq	CPUVAR(ISOURCES) + (num) * 8, %r14			;\
911	movl	IS_MAXLEVEL(%r14),%ebx					;\
912	jmp	1f							;\
913END(Xresume_##name##num)						;\
914IDTVEC(intr_##name##num)						;\
915	INTRENTRY(intr_##name##num)					;\
916	movq	CPUVAR(ISOURCES) + (num) * 8, %r14			;\
917	mask(num)			/* mask it in hardware */	;\
918	early_ack(num)			/* and allow other intrs */	;\
919	incl	uvmexp+V_INTR		/* statistical info */		;\
920	testq	%r14,%r14						;\
921	jz	9f			/* stray */			;\
922	movl	IS_MAXLEVEL(%r14),%ebx					;\
923	movl	CPUVAR(ILEVEL),%r13d					;\
924	cmpl	%ebx,%r13d						;\
925	jae	10f			/* currently masked; hold it */	;\
9261:									\
927	movq	%r13,IF_PPL(%rsp)					;\
928	movl	%ebx,CPUVAR(ILEVEL)					;\
929	sti								;\
930	cld								;\
931	SMAP_CLAC							;\
932	incl	CPUVAR(IDEPTH)						;\
933	movq	IS_HANDLERS(%r14),%rbx					;\
9346:	/* loop, walking chain of handlers */				\
935	movl	IH_LEVEL(%rbx),%r12d					;\
936	cmpl	%r13d,%r12d						;\
937	jle	7f							;\
938	movl	%r12d,CPUVAR(ILEVEL)					;\
939	movq	%rbx, %rsi						;\
940	movq	%rsp, %rdi						;\
941	call	intr_handler		/* call it */			;\
942	movq	$0,-8(%rsp)						;\
943	orl	%eax,%eax		/* should it be counted? */	;\
944	jz	4f			/* no, skip it */		;\
945	incq	IH_COUNT(%rbx)		/* count the intrs */		;\
946	cmpl	$0,intr_shared_edge					;\
947	jne	4f			/* if no shared edges ... */	;\
948	orl	%eax,%eax		/* 1 means stop trying */	;\
949	jns	5f							;\
9504:	movq	IH_NEXT(%rbx),%rbx	/* next handler in chain */	;\
951	testq	%rbx,%rbx						;\
952	jnz	6b							;\
9535:	/* successfully handled */					\
954	cli								;\
955	unmask(num)			/* unmask it in hardware */	;\
956	late_ack(num)							;\
957	sti								;\
958	jmp	Xdoreti			/* lower spl and do ASTs */	;\
9597:	/* current IPL > handler's ih_level */				\
960	cli								;\
961	movq	$(1 << num),%rax					;\
962	orq     %rax,CPUVAR(IPENDING)					;\
963	level_mask(num)							;\
964	late_ack(num)							;\
965	sti								;\
966	jmp	Xdoreti			/* lower spl and do ASTs */	;\
96710:	/* currently masked */						\
968	cli								;\
969	movq	$(1 << num),%rax					;\
970	orq	%rax,CPUVAR(IPENDING)					;\
971	level_mask(num)							;\
972	late_ack(num)							;\
973	INTRFASTEXIT							;\
9749:	/* spurious interrupt */					\
975	unmask(num)							;\
976	late_ack(num)							;\
977	testb	$SEL_RPL,TF_CS(%rsp)					;\
978	jnz	intr_user_exit						;\
979	INTRFASTEXIT							;\
980END(INTRENTRY_LABEL(intr_##name##num))
981
982#define ICUADDR IO_ICU1
983
984INTRSTUB(legacy,0,i8259_asm_ack1,voidop,i8259_asm_mask,i8259_asm_unmask,
985    voidop)
986INTRSTUB(legacy,1,i8259_asm_ack1,voidop,i8259_asm_mask,i8259_asm_unmask,
987    voidop)
988INTRSTUB(legacy,2,i8259_asm_ack1,voidop,i8259_asm_mask,i8259_asm_unmask,
989    voidop)
990INTRSTUB(legacy,3,i8259_asm_ack1,voidop,i8259_asm_mask,i8259_asm_unmask,
991    voidop)
992INTRSTUB(legacy,4,i8259_asm_ack1,voidop,i8259_asm_mask,i8259_asm_unmask,
993    voidop)
994INTRSTUB(legacy,5,i8259_asm_ack1,voidop,i8259_asm_mask,i8259_asm_unmask,
995    voidop)
996INTRSTUB(legacy,6,i8259_asm_ack1,voidop,i8259_asm_mask,i8259_asm_unmask,
997    voidop)
998INTRSTUB(legacy,7,i8259_asm_ack1,voidop,i8259_asm_mask,i8259_asm_unmask,
999    voidop)
1000#undef ICUADDR
1001#define ICUADDR IO_ICU2
1002
1003INTRSTUB(legacy,8,i8259_asm_ack2,voidop,i8259_asm_mask,i8259_asm_unmask,
1004    voidop)
1005INTRSTUB(legacy,9,i8259_asm_ack2,voidop,i8259_asm_mask,i8259_asm_unmask,
1006    voidop)
1007INTRSTUB(legacy,10,i8259_asm_ack2,voidop,i8259_asm_mask,i8259_asm_unmask,
1008    voidop)
1009INTRSTUB(legacy,11,i8259_asm_ack2,voidop,i8259_asm_mask,i8259_asm_unmask,
1010    voidop)
1011INTRSTUB(legacy,12,i8259_asm_ack2,voidop,i8259_asm_mask,i8259_asm_unmask,
1012    voidop)
1013INTRSTUB(legacy,13,i8259_asm_ack2,voidop,i8259_asm_mask,i8259_asm_unmask,
1014    voidop)
1015INTRSTUB(legacy,14,i8259_asm_ack2,voidop,i8259_asm_mask,i8259_asm_unmask,
1016    voidop)
1017INTRSTUB(legacy,15,i8259_asm_ack2,voidop,i8259_asm_mask,i8259_asm_unmask,
1018    voidop)
1019
1020#if NIOAPIC > 0
1021
1022INTRSTUB(ioapic_edge,0,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1023INTRSTUB(ioapic_edge,1,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1024INTRSTUB(ioapic_edge,2,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1025INTRSTUB(ioapic_edge,3,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1026INTRSTUB(ioapic_edge,4,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1027INTRSTUB(ioapic_edge,5,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1028INTRSTUB(ioapic_edge,6,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1029INTRSTUB(ioapic_edge,7,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1030INTRSTUB(ioapic_edge,8,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1031INTRSTUB(ioapic_edge,9,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1032INTRSTUB(ioapic_edge,10,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1033INTRSTUB(ioapic_edge,11,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1034INTRSTUB(ioapic_edge,12,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1035INTRSTUB(ioapic_edge,13,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1036INTRSTUB(ioapic_edge,14,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1037INTRSTUB(ioapic_edge,15,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1038INTRSTUB(ioapic_edge,16,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1039INTRSTUB(ioapic_edge,17,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1040INTRSTUB(ioapic_edge,18,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1041INTRSTUB(ioapic_edge,19,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1042INTRSTUB(ioapic_edge,20,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1043INTRSTUB(ioapic_edge,21,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1044INTRSTUB(ioapic_edge,22,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1045INTRSTUB(ioapic_edge,23,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1046INTRSTUB(ioapic_edge,24,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1047INTRSTUB(ioapic_edge,25,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1048INTRSTUB(ioapic_edge,26,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1049INTRSTUB(ioapic_edge,27,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1050INTRSTUB(ioapic_edge,28,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1051INTRSTUB(ioapic_edge,29,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1052INTRSTUB(ioapic_edge,30,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1053INTRSTUB(ioapic_edge,31,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1054INTRSTUB(ioapic_edge,32,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1055INTRSTUB(ioapic_edge,33,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1056INTRSTUB(ioapic_edge,34,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1057INTRSTUB(ioapic_edge,35,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1058INTRSTUB(ioapic_edge,36,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1059INTRSTUB(ioapic_edge,37,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1060INTRSTUB(ioapic_edge,38,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1061INTRSTUB(ioapic_edge,39,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1062INTRSTUB(ioapic_edge,40,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1063INTRSTUB(ioapic_edge,41,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1064INTRSTUB(ioapic_edge,42,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1065INTRSTUB(ioapic_edge,43,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1066INTRSTUB(ioapic_edge,44,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1067INTRSTUB(ioapic_edge,45,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1068INTRSTUB(ioapic_edge,46,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1069INTRSTUB(ioapic_edge,47,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1070INTRSTUB(ioapic_edge,48,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1071INTRSTUB(ioapic_edge,49,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1072INTRSTUB(ioapic_edge,50,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1073INTRSTUB(ioapic_edge,51,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1074INTRSTUB(ioapic_edge,52,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1075INTRSTUB(ioapic_edge,53,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1076INTRSTUB(ioapic_edge,54,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1077INTRSTUB(ioapic_edge,55,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1078INTRSTUB(ioapic_edge,56,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1079INTRSTUB(ioapic_edge,57,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1080INTRSTUB(ioapic_edge,58,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1081INTRSTUB(ioapic_edge,59,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1082INTRSTUB(ioapic_edge,60,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1083INTRSTUB(ioapic_edge,61,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1084INTRSTUB(ioapic_edge,62,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1085INTRSTUB(ioapic_edge,63,voidop,ioapic_asm_ack,voidop,voidop,voidop)
1086
1087INTRSTUB(ioapic_level,0,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1088INTRSTUB(ioapic_level,1,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1089INTRSTUB(ioapic_level,2,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1090INTRSTUB(ioapic_level,3,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1091INTRSTUB(ioapic_level,4,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1092INTRSTUB(ioapic_level,5,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1093INTRSTUB(ioapic_level,6,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1094INTRSTUB(ioapic_level,7,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1095INTRSTUB(ioapic_level,8,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1096INTRSTUB(ioapic_level,9,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1097INTRSTUB(ioapic_level,10,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1098INTRSTUB(ioapic_level,11,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1099INTRSTUB(ioapic_level,12,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1100INTRSTUB(ioapic_level,13,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1101INTRSTUB(ioapic_level,14,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1102INTRSTUB(ioapic_level,15,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1103INTRSTUB(ioapic_level,16,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1104INTRSTUB(ioapic_level,17,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1105INTRSTUB(ioapic_level,18,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1106INTRSTUB(ioapic_level,19,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1107INTRSTUB(ioapic_level,20,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1108INTRSTUB(ioapic_level,21,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1109INTRSTUB(ioapic_level,22,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1110INTRSTUB(ioapic_level,23,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1111INTRSTUB(ioapic_level,24,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1112INTRSTUB(ioapic_level,25,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1113INTRSTUB(ioapic_level,26,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1114INTRSTUB(ioapic_level,27,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1115INTRSTUB(ioapic_level,28,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1116INTRSTUB(ioapic_level,29,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1117INTRSTUB(ioapic_level,30,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1118INTRSTUB(ioapic_level,31,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1119INTRSTUB(ioapic_level,32,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1120INTRSTUB(ioapic_level,33,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1121INTRSTUB(ioapic_level,34,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1122INTRSTUB(ioapic_level,35,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1123INTRSTUB(ioapic_level,36,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1124INTRSTUB(ioapic_level,37,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1125INTRSTUB(ioapic_level,38,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1126INTRSTUB(ioapic_level,39,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1127INTRSTUB(ioapic_level,40,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1128INTRSTUB(ioapic_level,41,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1129INTRSTUB(ioapic_level,42,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1130INTRSTUB(ioapic_level,43,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1131INTRSTUB(ioapic_level,44,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1132INTRSTUB(ioapic_level,45,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1133INTRSTUB(ioapic_level,46,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1134INTRSTUB(ioapic_level,47,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1135INTRSTUB(ioapic_level,48,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1136INTRSTUB(ioapic_level,49,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1137INTRSTUB(ioapic_level,50,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1138INTRSTUB(ioapic_level,51,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1139INTRSTUB(ioapic_level,52,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1140INTRSTUB(ioapic_level,53,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1141INTRSTUB(ioapic_level,54,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1142INTRSTUB(ioapic_level,55,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1143INTRSTUB(ioapic_level,56,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1144INTRSTUB(ioapic_level,57,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1145INTRSTUB(ioapic_level,58,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1146INTRSTUB(ioapic_level,59,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1147INTRSTUB(ioapic_level,60,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1148INTRSTUB(ioapic_level,61,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1149INTRSTUB(ioapic_level,62,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1150INTRSTUB(ioapic_level,63,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
1151
1152#endif
1153
1154	.section .rodata
1155
1156	.globl i8259_stubs
1157	.type i8259_stubs,@object
1158i8259_stubs:
1159	.quad Xintr_legacy0, Xrecurse_legacy0, Xresume_legacy0
1160	.quad Xintr_legacy1, Xrecurse_legacy1, Xresume_legacy1
1161	.quad Xintr_legacy2, Xrecurse_legacy2, Xresume_legacy2
1162	.quad Xintr_legacy3, Xrecurse_legacy3, Xresume_legacy3
1163	.quad Xintr_legacy4, Xrecurse_legacy4, Xresume_legacy4
1164	.quad Xintr_legacy5, Xrecurse_legacy5, Xresume_legacy5
1165	.quad Xintr_legacy6, Xrecurse_legacy6, Xresume_legacy6
1166	.quad Xintr_legacy7, Xrecurse_legacy7, Xresume_legacy7
1167	.quad Xintr_legacy8, Xrecurse_legacy8, Xresume_legacy8
1168	.quad Xintr_legacy9, Xrecurse_legacy9, Xresume_legacy9
1169	.quad Xintr_legacy10, Xrecurse_legacy10, Xresume_legacy10
1170	.quad Xintr_legacy11, Xrecurse_legacy11, Xresume_legacy11
1171	.quad Xintr_legacy12, Xrecurse_legacy12, Xresume_legacy12
1172	.quad Xintr_legacy13, Xrecurse_legacy13, Xresume_legacy13
1173	.quad Xintr_legacy14, Xrecurse_legacy14, Xresume_legacy14
1174	.quad Xintr_legacy15, Xrecurse_legacy15, Xresume_legacy15
1175END(i8259_stubs)
1176
1177#if NIOAPIC > 0
1178	.globl ioapic_edge_stubs
1179	.type ioapic_edge_stubs,@object
1180ioapic_edge_stubs:
1181	.quad Xintr_ioapic_edge0, Xrecurse_ioapic_edge0, Xresume_ioapic_edge0
1182	.quad Xintr_ioapic_edge1, Xrecurse_ioapic_edge1, Xresume_ioapic_edge1
1183	.quad Xintr_ioapic_edge2, Xrecurse_ioapic_edge2, Xresume_ioapic_edge2
1184	.quad Xintr_ioapic_edge3, Xrecurse_ioapic_edge3, Xresume_ioapic_edge3
1185	.quad Xintr_ioapic_edge4, Xrecurse_ioapic_edge4, Xresume_ioapic_edge4
1186	.quad Xintr_ioapic_edge5, Xrecurse_ioapic_edge5, Xresume_ioapic_edge5
1187	.quad Xintr_ioapic_edge6, Xrecurse_ioapic_edge6, Xresume_ioapic_edge6
1188	.quad Xintr_ioapic_edge7, Xrecurse_ioapic_edge7, Xresume_ioapic_edge7
1189	.quad Xintr_ioapic_edge8, Xrecurse_ioapic_edge8, Xresume_ioapic_edge8
1190	.quad Xintr_ioapic_edge9, Xrecurse_ioapic_edge9, Xresume_ioapic_edge9
1191	.quad Xintr_ioapic_edge10, Xrecurse_ioapic_edge10, Xresume_ioapic_edge10
1192	.quad Xintr_ioapic_edge11, Xrecurse_ioapic_edge11, Xresume_ioapic_edge11
1193	.quad Xintr_ioapic_edge12, Xrecurse_ioapic_edge12, Xresume_ioapic_edge12
1194	.quad Xintr_ioapic_edge13, Xrecurse_ioapic_edge13, Xresume_ioapic_edge13
1195	.quad Xintr_ioapic_edge14, Xrecurse_ioapic_edge14, Xresume_ioapic_edge14
1196	.quad Xintr_ioapic_edge15, Xrecurse_ioapic_edge15, Xresume_ioapic_edge15
1197	.quad Xintr_ioapic_edge16, Xrecurse_ioapic_edge16, Xresume_ioapic_edge16
1198	.quad Xintr_ioapic_edge17, Xrecurse_ioapic_edge17, Xresume_ioapic_edge17
1199	.quad Xintr_ioapic_edge18, Xrecurse_ioapic_edge18, Xresume_ioapic_edge18
1200	.quad Xintr_ioapic_edge19, Xrecurse_ioapic_edge19, Xresume_ioapic_edge19
1201	.quad Xintr_ioapic_edge20, Xrecurse_ioapic_edge20, Xresume_ioapic_edge20
1202	.quad Xintr_ioapic_edge21, Xrecurse_ioapic_edge21, Xresume_ioapic_edge21
1203	.quad Xintr_ioapic_edge22, Xrecurse_ioapic_edge22, Xresume_ioapic_edge22
1204	.quad Xintr_ioapic_edge23, Xrecurse_ioapic_edge23, Xresume_ioapic_edge23
1205	.quad Xintr_ioapic_edge24, Xrecurse_ioapic_edge24, Xresume_ioapic_edge24
1206	.quad Xintr_ioapic_edge25, Xrecurse_ioapic_edge25, Xresume_ioapic_edge25
1207	.quad Xintr_ioapic_edge26, Xrecurse_ioapic_edge26, Xresume_ioapic_edge26
1208	.quad Xintr_ioapic_edge27, Xrecurse_ioapic_edge27, Xresume_ioapic_edge27
1209	.quad Xintr_ioapic_edge28, Xrecurse_ioapic_edge28, Xresume_ioapic_edge28
1210	.quad Xintr_ioapic_edge29, Xrecurse_ioapic_edge29, Xresume_ioapic_edge29
1211	.quad Xintr_ioapic_edge30, Xrecurse_ioapic_edge30, Xresume_ioapic_edge30
1212	.quad Xintr_ioapic_edge31, Xrecurse_ioapic_edge31, Xresume_ioapic_edge31
1213	.quad Xintr_ioapic_edge32, Xrecurse_ioapic_edge32, Xresume_ioapic_edge32
1214	.quad Xintr_ioapic_edge33, Xrecurse_ioapic_edge33, Xresume_ioapic_edge33
1215	.quad Xintr_ioapic_edge34, Xrecurse_ioapic_edge34, Xresume_ioapic_edge34
1216	.quad Xintr_ioapic_edge35, Xrecurse_ioapic_edge35, Xresume_ioapic_edge35
1217	.quad Xintr_ioapic_edge36, Xrecurse_ioapic_edge36, Xresume_ioapic_edge36
1218	.quad Xintr_ioapic_edge37, Xrecurse_ioapic_edge37, Xresume_ioapic_edge37
1219	.quad Xintr_ioapic_edge38, Xrecurse_ioapic_edge38, Xresume_ioapic_edge38
1220	.quad Xintr_ioapic_edge39, Xrecurse_ioapic_edge39, Xresume_ioapic_edge39
1221	.quad Xintr_ioapic_edge40, Xrecurse_ioapic_edge40, Xresume_ioapic_edge40
1222	.quad Xintr_ioapic_edge41, Xrecurse_ioapic_edge41, Xresume_ioapic_edge41
1223	.quad Xintr_ioapic_edge42, Xrecurse_ioapic_edge42, Xresume_ioapic_edge42
1224	.quad Xintr_ioapic_edge43, Xrecurse_ioapic_edge43, Xresume_ioapic_edge43
1225	.quad Xintr_ioapic_edge44, Xrecurse_ioapic_edge44, Xresume_ioapic_edge44
1226	.quad Xintr_ioapic_edge45, Xrecurse_ioapic_edge45, Xresume_ioapic_edge45
1227	.quad Xintr_ioapic_edge46, Xrecurse_ioapic_edge46, Xresume_ioapic_edge46
1228	.quad Xintr_ioapic_edge47, Xrecurse_ioapic_edge47, Xresume_ioapic_edge47
1229	.quad Xintr_ioapic_edge48, Xrecurse_ioapic_edge48, Xresume_ioapic_edge48
1230	.quad Xintr_ioapic_edge49, Xrecurse_ioapic_edge49, Xresume_ioapic_edge49
1231	.quad Xintr_ioapic_edge50, Xrecurse_ioapic_edge50, Xresume_ioapic_edge50
1232	.quad Xintr_ioapic_edge51, Xrecurse_ioapic_edge51, Xresume_ioapic_edge51
1233	.quad Xintr_ioapic_edge52, Xrecurse_ioapic_edge52, Xresume_ioapic_edge52
1234	.quad Xintr_ioapic_edge53, Xrecurse_ioapic_edge53, Xresume_ioapic_edge53
1235	.quad Xintr_ioapic_edge54, Xrecurse_ioapic_edge54, Xresume_ioapic_edge54
1236	.quad Xintr_ioapic_edge55, Xrecurse_ioapic_edge55, Xresume_ioapic_edge55
1237	.quad Xintr_ioapic_edge56, Xrecurse_ioapic_edge56, Xresume_ioapic_edge56
1238	.quad Xintr_ioapic_edge57, Xrecurse_ioapic_edge57, Xresume_ioapic_edge57
1239	.quad Xintr_ioapic_edge58, Xrecurse_ioapic_edge58, Xresume_ioapic_edge58
1240	.quad Xintr_ioapic_edge59, Xrecurse_ioapic_edge59, Xresume_ioapic_edge59
1241	.quad Xintr_ioapic_edge60, Xrecurse_ioapic_edge60, Xresume_ioapic_edge60
1242	.quad Xintr_ioapic_edge61, Xrecurse_ioapic_edge61, Xresume_ioapic_edge61
1243	.quad Xintr_ioapic_edge62, Xrecurse_ioapic_edge62, Xresume_ioapic_edge62
1244	.quad Xintr_ioapic_edge63, Xrecurse_ioapic_edge63, Xresume_ioapic_edge63
1245END(ioapic_edge_stubs)
1246
1247	.globl ioapic_level_stubs
1248	.type ioapic_level_stubs,@object
1249ioapic_level_stubs:
1250	.quad Xintr_ioapic_level0, Xrecurse_ioapic_level0, Xresume_ioapic_level0
1251	.quad Xintr_ioapic_level1, Xrecurse_ioapic_level1, Xresume_ioapic_level1
1252	.quad Xintr_ioapic_level2, Xrecurse_ioapic_level2, Xresume_ioapic_level2
1253	.quad Xintr_ioapic_level3, Xrecurse_ioapic_level3, Xresume_ioapic_level3
1254	.quad Xintr_ioapic_level4, Xrecurse_ioapic_level4, Xresume_ioapic_level4
1255	.quad Xintr_ioapic_level5, Xrecurse_ioapic_level5, Xresume_ioapic_level5
1256	.quad Xintr_ioapic_level6, Xrecurse_ioapic_level6, Xresume_ioapic_level6
1257	.quad Xintr_ioapic_level7, Xrecurse_ioapic_level7, Xresume_ioapic_level7
1258	.quad Xintr_ioapic_level8, Xrecurse_ioapic_level8, Xresume_ioapic_level8
1259	.quad Xintr_ioapic_level9, Xrecurse_ioapic_level9, Xresume_ioapic_level9
1260	.quad Xintr_ioapic_level10, Xrecurse_ioapic_level10, Xresume_ioapic_level10
1261	.quad Xintr_ioapic_level11, Xrecurse_ioapic_level11, Xresume_ioapic_level11
1262	.quad Xintr_ioapic_level12, Xrecurse_ioapic_level12, Xresume_ioapic_level12
1263	.quad Xintr_ioapic_level13, Xrecurse_ioapic_level13, Xresume_ioapic_level13
1264	.quad Xintr_ioapic_level14, Xrecurse_ioapic_level14, Xresume_ioapic_level14
1265	.quad Xintr_ioapic_level15, Xrecurse_ioapic_level15, Xresume_ioapic_level15
1266	.quad Xintr_ioapic_level16, Xrecurse_ioapic_level16, Xresume_ioapic_level16
1267	.quad Xintr_ioapic_level17, Xrecurse_ioapic_level17, Xresume_ioapic_level17
1268	.quad Xintr_ioapic_level18, Xrecurse_ioapic_level18, Xresume_ioapic_level18
1269	.quad Xintr_ioapic_level19, Xrecurse_ioapic_level19, Xresume_ioapic_level19
1270	.quad Xintr_ioapic_level20, Xrecurse_ioapic_level20, Xresume_ioapic_level20
1271	.quad Xintr_ioapic_level21, Xrecurse_ioapic_level21, Xresume_ioapic_level21
1272	.quad Xintr_ioapic_level22, Xrecurse_ioapic_level22, Xresume_ioapic_level22
1273	.quad Xintr_ioapic_level23, Xrecurse_ioapic_level23, Xresume_ioapic_level23
1274	.quad Xintr_ioapic_level24, Xrecurse_ioapic_level24, Xresume_ioapic_level24
1275	.quad Xintr_ioapic_level25, Xrecurse_ioapic_level25, Xresume_ioapic_level25
1276	.quad Xintr_ioapic_level26, Xrecurse_ioapic_level26, Xresume_ioapic_level26
1277	.quad Xintr_ioapic_level27, Xrecurse_ioapic_level27, Xresume_ioapic_level27
1278	.quad Xintr_ioapic_level28, Xrecurse_ioapic_level28, Xresume_ioapic_level28
1279	.quad Xintr_ioapic_level29, Xrecurse_ioapic_level29, Xresume_ioapic_level29
1280	.quad Xintr_ioapic_level30, Xrecurse_ioapic_level30, Xresume_ioapic_level30
1281	.quad Xintr_ioapic_level31, Xrecurse_ioapic_level31, Xresume_ioapic_level31
1282	.quad Xintr_ioapic_level32, Xrecurse_ioapic_level32, Xresume_ioapic_level32
1283	.quad Xintr_ioapic_level33, Xrecurse_ioapic_level33, Xresume_ioapic_level33
1284	.quad Xintr_ioapic_level34, Xrecurse_ioapic_level34, Xresume_ioapic_level34
1285	.quad Xintr_ioapic_level35, Xrecurse_ioapic_level35, Xresume_ioapic_level35
1286	.quad Xintr_ioapic_level36, Xrecurse_ioapic_level36, Xresume_ioapic_level36
1287	.quad Xintr_ioapic_level37, Xrecurse_ioapic_level37, Xresume_ioapic_level37
1288	.quad Xintr_ioapic_level38, Xrecurse_ioapic_level38, Xresume_ioapic_level38
1289	.quad Xintr_ioapic_level39, Xrecurse_ioapic_level39, Xresume_ioapic_level39
1290	.quad Xintr_ioapic_level40, Xrecurse_ioapic_level40, Xresume_ioapic_level40
1291	.quad Xintr_ioapic_level41, Xrecurse_ioapic_level41, Xresume_ioapic_level41
1292	.quad Xintr_ioapic_level42, Xrecurse_ioapic_level42, Xresume_ioapic_level42
1293	.quad Xintr_ioapic_level43, Xrecurse_ioapic_level43, Xresume_ioapic_level43
1294	.quad Xintr_ioapic_level44, Xrecurse_ioapic_level44, Xresume_ioapic_level44
1295	.quad Xintr_ioapic_level45, Xrecurse_ioapic_level45, Xresume_ioapic_level45
1296	.quad Xintr_ioapic_level46, Xrecurse_ioapic_level46, Xresume_ioapic_level46
1297	.quad Xintr_ioapic_level47, Xrecurse_ioapic_level47, Xresume_ioapic_level47
1298	.quad Xintr_ioapic_level48, Xrecurse_ioapic_level48, Xresume_ioapic_level48
1299	.quad Xintr_ioapic_level49, Xrecurse_ioapic_level49, Xresume_ioapic_level49
1300	.quad Xintr_ioapic_level50, Xrecurse_ioapic_level50, Xresume_ioapic_level50
1301	.quad Xintr_ioapic_level51, Xrecurse_ioapic_level51, Xresume_ioapic_level51
1302	.quad Xintr_ioapic_level52, Xrecurse_ioapic_level52, Xresume_ioapic_level52
1303	.quad Xintr_ioapic_level53, Xrecurse_ioapic_level53, Xresume_ioapic_level53
1304	.quad Xintr_ioapic_level54, Xrecurse_ioapic_level54, Xresume_ioapic_level54
1305	.quad Xintr_ioapic_level55, Xrecurse_ioapic_level55, Xresume_ioapic_level55
1306	.quad Xintr_ioapic_level56, Xrecurse_ioapic_level56, Xresume_ioapic_level56
1307	.quad Xintr_ioapic_level57, Xrecurse_ioapic_level57, Xresume_ioapic_level57
1308	.quad Xintr_ioapic_level58, Xrecurse_ioapic_level58, Xresume_ioapic_level58
1309	.quad Xintr_ioapic_level59, Xrecurse_ioapic_level59, Xresume_ioapic_level59
1310	.quad Xintr_ioapic_level60, Xrecurse_ioapic_level60, Xresume_ioapic_level60
1311	.quad Xintr_ioapic_level61, Xrecurse_ioapic_level61, Xresume_ioapic_level61
1312	.quad Xintr_ioapic_level62, Xrecurse_ioapic_level62, Xresume_ioapic_level62
1313	.quad Xintr_ioapic_level63, Xrecurse_ioapic_level63, Xresume_ioapic_level63
1314END(ioapic_level_stubs)
1315#endif
1316
1317/*
1318 * Soft interrupt handlers
1319 */
1320NENTRY(retpoline_r13)
1321	CODEPATCH_START
1322	JMP_RETPOLINE(r13)
1323	CODEPATCH_END(CPTAG_RETPOLINE_R13)
1324END(retpoline_r13)
1325
1326KIDTVEC(softtty)
1327	endbr64
1328	movl	$IPL_SOFTTTY, CPUVAR(ILEVEL)
1329	sti
1330	incl	CPUVAR(IDEPTH)
1331	movl	$X86_SOFTINTR_SOFTTTY,%edi
1332	call	softintr_dispatch
1333	movq	$0,-8(%rsp)
1334	decl	CPUVAR(IDEPTH)
1335	CODEPATCH_START
1336	jmp	retpoline_r13
1337	CODEPATCH_END(CPTAG_RETPOLINE_R13)
1338END(Xsofttty)
1339
1340KIDTVEC(softnet)
1341	endbr64
1342	movl	$IPL_SOFTNET, CPUVAR(ILEVEL)
1343	sti
1344	incl	CPUVAR(IDEPTH)
1345	movl	$X86_SOFTINTR_SOFTNET,%edi
1346	call	softintr_dispatch
1347	movq	$0,-8(%rsp)
1348	decl	CPUVAR(IDEPTH)
1349	CODEPATCH_START
1350	jmp	retpoline_r13
1351	CODEPATCH_END(CPTAG_RETPOLINE_R13)
1352END(Xsoftnet)
1353
1354KIDTVEC(softclock)
1355	endbr64
1356	movl	$IPL_SOFTCLOCK, CPUVAR(ILEVEL)
1357	sti
1358	incl	CPUVAR(IDEPTH)
1359	movl	$X86_SOFTINTR_SOFTCLOCK,%edi
1360	call	softintr_dispatch
1361	movq	$0,-8(%rsp)
1362	decl	CPUVAR(IDEPTH)
1363	CODEPATCH_START
1364	jmp	retpoline_r13
1365	CODEPATCH_END(CPTAG_RETPOLINE_R13)
1366END(Xsoftclock)
1367