xref: /illumos-gate/usr/src/uts/i86pc/ml/bios_call_src.S (revision 5d9d9091f564c198a760790b0bfa72c44e17912b)
1*5d9d9091SRichard Lowe/*
2*5d9d9091SRichard Lowe * CDDL HEADER START
3*5d9d9091SRichard Lowe *
4*5d9d9091SRichard Lowe * The contents of this file are subject to the terms of the
5*5d9d9091SRichard Lowe * Common Development and Distribution License (the "License").
6*5d9d9091SRichard Lowe * You may not use this file except in compliance with the License.
7*5d9d9091SRichard Lowe *
8*5d9d9091SRichard Lowe * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9*5d9d9091SRichard Lowe * or http://www.opensolaris.org/os/licensing.
10*5d9d9091SRichard Lowe * See the License for the specific language governing permissions
11*5d9d9091SRichard Lowe * and limitations under the License.
12*5d9d9091SRichard Lowe *
13*5d9d9091SRichard Lowe * When distributing Covered Code, include this CDDL HEADER in each
14*5d9d9091SRichard Lowe * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15*5d9d9091SRichard Lowe * If applicable, add the following below this CDDL HEADER, with the
16*5d9d9091SRichard Lowe * fields enclosed by brackets "[]" replaced with your own identifying
17*5d9d9091SRichard Lowe * information: Portions Copyright [yyyy] [name of copyright owner]
18*5d9d9091SRichard Lowe *
19*5d9d9091SRichard Lowe * CDDL HEADER END
20*5d9d9091SRichard Lowe */
21*5d9d9091SRichard Lowe
22*5d9d9091SRichard Lowe/*
23*5d9d9091SRichard Lowe * Copyright 2007 Sun Microsystems, Inc.  All rights reserved.
24*5d9d9091SRichard Lowe * Use is subject to license terms.
25*5d9d9091SRichard Lowe */
26*5d9d9091SRichard Lowe
27*5d9d9091SRichard Lowe/*
28*5d9d9091SRichard Lowe * Copyright 2019 Joyent, Inc.
29*5d9d9091SRichard Lowe */
30*5d9d9091SRichard Lowe
31*5d9d9091SRichard Lowe#include <sys/segments.h>
32*5d9d9091SRichard Lowe#include <sys/controlregs.h>
33*5d9d9091SRichard Lowe
34*5d9d9091SRichard Lowe/*
35*5d9d9091SRichard Lowe * Do a call into BIOS.  This goes down to 16 bit real mode and back again.
36*5d9d9091SRichard Lowe */
37*5d9d9091SRichard Lowe
38*5d9d9091SRichard Lowe/*
39*5d9d9091SRichard Lowe * instruction prefix to change operand size in instruction
40*5d9d9091SRichard Lowe */
41*5d9d9091SRichard Lowe#define DATASZ	.byte 0x66;
42*5d9d9091SRichard Lowe
43*5d9d9091SRichard Lowe	.globl	_start
44*5d9d9091SRichard Lowe_start:
45*5d9d9091SRichard Lowe
46*5d9d9091SRichard Lowe	/*
47*5d9d9091SRichard Lowe	 * Save caller registers
48*5d9d9091SRichard Lowe	 */
49*5d9d9091SRichard Lowe	movq	%rbp, save_rbp
50*5d9d9091SRichard Lowe	movq	%rsp, save_rsp
51*5d9d9091SRichard Lowe	movq	%rbx, save_rbx
52*5d9d9091SRichard Lowe	movq	%rsi, save_rsi
53*5d9d9091SRichard Lowe	movq	%r12, save_r12
54*5d9d9091SRichard Lowe	movq	%r13, save_r13
55*5d9d9091SRichard Lowe	movq	%r14, save_r14
56*5d9d9091SRichard Lowe	movq	%r15, save_r15
57*5d9d9091SRichard Lowe
58*5d9d9091SRichard Lowe	/* Switch to a low memory stack */
59*5d9d9091SRichard Lowe	movq	$_start, %rsp
60*5d9d9091SRichard Lowe
61*5d9d9091SRichard Lowe	/* put interrupt number in %bl */
62*5d9d9091SRichard Lowe	movq	%rdi, %rbx
63*5d9d9091SRichard Lowe
64*5d9d9091SRichard Lowe	/* allocate space for args on stack */
65*5d9d9091SRichard Lowe	subq	$18, %rsp
66*5d9d9091SRichard Lowe	movq	%rsp, %rdi
67*5d9d9091SRichard Lowe
68*5d9d9091SRichard Lowe	/* copy args from high memory to stack in low memory */
69*5d9d9091SRichard Lowe	cld
70*5d9d9091SRichard Lowe	movl	$18, %ecx
71*5d9d9091SRichard Lowe	rep
72*5d9d9091SRichard Lowe	movsb
73*5d9d9091SRichard Lowe
74*5d9d9091SRichard Lowe	/*
75*5d9d9091SRichard Lowe	 * Save system registers
76*5d9d9091SRichard Lowe	 */
77*5d9d9091SRichard Lowe	sidt	save_idt
78*5d9d9091SRichard Lowe	sgdt	save_gdt
79*5d9d9091SRichard Lowe	str	save_tr
80*5d9d9091SRichard Lowe	movw	%cs, save_cs
81*5d9d9091SRichard Lowe	movw	%ds, save_ds
82*5d9d9091SRichard Lowe	movw	%ss, save_ss
83*5d9d9091SRichard Lowe	movw	%es, save_es
84*5d9d9091SRichard Lowe	movw	%fs, save_fs
85*5d9d9091SRichard Lowe	movw	%gs, save_gs
86*5d9d9091SRichard Lowe	movq	%cr4, %rax
87*5d9d9091SRichard Lowe	movq	%rax, save_cr4
88*5d9d9091SRichard Lowe	movq	%cr3, %rax
89*5d9d9091SRichard Lowe	movq	%rax, save_cr3
90*5d9d9091SRichard Lowe	movq	%cr0, %rax
91*5d9d9091SRichard Lowe	movq	%rax, save_cr0
92*5d9d9091SRichard Lowe
93*5d9d9091SRichard Lowe	/*
94*5d9d9091SRichard Lowe	 * save/clear the extension parts of the fs/gs base registers and cr8
95*5d9d9091SRichard Lowe	 */
96*5d9d9091SRichard Lowe	movl	$MSR_AMD_FSBASE, %ecx
97*5d9d9091SRichard Lowe	rdmsr
98*5d9d9091SRichard Lowe	movl	%eax, save_fsbase
99*5d9d9091SRichard Lowe	movl	%edx, save_fsbase + 4
100*5d9d9091SRichard Lowe	xorl	%eax, %eax
101*5d9d9091SRichard Lowe	xorl	%edx, %edx
102*5d9d9091SRichard Lowe	wrmsr
103*5d9d9091SRichard Lowe
104*5d9d9091SRichard Lowe	movl	$MSR_AMD_GSBASE, %ecx
105*5d9d9091SRichard Lowe	rdmsr
106*5d9d9091SRichard Lowe	movl	%eax, save_gsbase
107*5d9d9091SRichard Lowe	movl	%edx, save_gsbase + 4
108*5d9d9091SRichard Lowe	xorl	%eax, %eax
109*5d9d9091SRichard Lowe	xorl	%edx, %edx
110*5d9d9091SRichard Lowe	wrmsr
111*5d9d9091SRichard Lowe
112*5d9d9091SRichard Lowe	movl	$MSR_AMD_KGSBASE, %ecx
113*5d9d9091SRichard Lowe	rdmsr
114*5d9d9091SRichard Lowe	movl	%eax, save_kgsbase
115*5d9d9091SRichard Lowe	movl	%edx, save_kgsbase + 4
116*5d9d9091SRichard Lowe	xorl	%eax, %eax
117*5d9d9091SRichard Lowe	xorl	%edx, %edx
118*5d9d9091SRichard Lowe	wrmsr
119*5d9d9091SRichard Lowe
120*5d9d9091SRichard Lowe	movq	%cr8, %rax
121*5d9d9091SRichard Lowe	movq	%rax, save_cr8
122*5d9d9091SRichard Lowe
123*5d9d9091SRichard Lowe	/*
124*5d9d9091SRichard Lowe	 * set offsets in 16 bit ljmp instructions below
125*5d9d9091SRichard Lowe	 */
126*5d9d9091SRichard Lowe	leaq	enter_real, %rax
127*5d9d9091SRichard Lowe	movw	%ax, enter_real_ljmp
128*5d9d9091SRichard Lowe
129*5d9d9091SRichard Lowe	leaq	enter_protected, %rax
130*5d9d9091SRichard Lowe	movw	%ax, enter_protected_ljmp
131*5d9d9091SRichard Lowe
132*5d9d9091SRichard Lowe	leaq	gdt_info, %rax
133*5d9d9091SRichard Lowe	movw	%ax, gdt_info_load
134*5d9d9091SRichard Lowe
135*5d9d9091SRichard Lowe	/*
136*5d9d9091SRichard Lowe	 * insert BIOS interrupt number into later instruction
137*5d9d9091SRichard Lowe	 */
138*5d9d9091SRichard Lowe	movb    %bl, int_instr+1
139*5d9d9091SRichard Lowe	jmp     1f
140*5d9d9091SRichard Lowe1:
141*5d9d9091SRichard Lowe
142*5d9d9091SRichard Lowe	/*
143*5d9d9091SRichard Lowe	 * zero out all the registers to make sure they're 16 bit clean
144*5d9d9091SRichard Lowe	 */
145*5d9d9091SRichard Lowe	xorq	%r8, %r8
146*5d9d9091SRichard Lowe	xorq	%r9, %r9
147*5d9d9091SRichard Lowe	xorq	%r10, %r10
148*5d9d9091SRichard Lowe	xorq	%r11, %r11
149*5d9d9091SRichard Lowe	xorq	%r12, %r12
150*5d9d9091SRichard Lowe	xorq	%r13, %r13
151*5d9d9091SRichard Lowe	xorq	%r14, %r14
152*5d9d9091SRichard Lowe	xorq	%r15, %r15
153*5d9d9091SRichard Lowe	xorl	%eax, %eax
154*5d9d9091SRichard Lowe	xorl	%ebx, %ebx
155*5d9d9091SRichard Lowe	xorl	%ecx, %ecx
156*5d9d9091SRichard Lowe	xorl	%edx, %edx
157*5d9d9091SRichard Lowe	xorl	%ebp, %ebp
158*5d9d9091SRichard Lowe	xorl	%esi, %esi
159*5d9d9091SRichard Lowe	xorl	%edi, %edi
160*5d9d9091SRichard Lowe
161*5d9d9091SRichard Lowe	/*
162*5d9d9091SRichard Lowe	 * Load our own GDT/IDT
163*5d9d9091SRichard Lowe	 */
164*5d9d9091SRichard Lowe	lgdt	gdt_info
165*5d9d9091SRichard Lowe	lidt	idt_info
166*5d9d9091SRichard Lowe
167*5d9d9091SRichard Lowe	/*
168*5d9d9091SRichard Lowe	 * Shut down 64 bit mode. First get into compatibility mode.
169*5d9d9091SRichard Lowe	 */
170*5d9d9091SRichard Lowe	movq	%rsp, %rax
171*5d9d9091SRichard Lowe	pushq	$B32DATA_SEL
172*5d9d9091SRichard Lowe	pushq	%rax
173*5d9d9091SRichard Lowe	pushf
174*5d9d9091SRichard Lowe	pushq	$B32CODE_SEL
175*5d9d9091SRichard Lowe	pushq	$1f
176*5d9d9091SRichard Lowe	iretq
177*5d9d9091SRichard Lowe1:
178*5d9d9091SRichard Lowe	.code32
179*5d9d9091SRichard Lowe
180*5d9d9091SRichard Lowe	/*
181*5d9d9091SRichard Lowe	 * disable long mode by:
182*5d9d9091SRichard Lowe	 * - shutting down paging (bit 31 of cr0)
183*5d9d9091SRichard Lowe	 * - flushing the TLB
184*5d9d9091SRichard Lowe	 * - disabling LME (long made enable) in EFER (extended feature reg)
185*5d9d9091SRichard Lowe	 */
186*5d9d9091SRichard Lowe	movl	%cr0, %eax
187*5d9d9091SRichard Lowe	btcl	$31, %eax		/* disable paging */
188*5d9d9091SRichard Lowe	movl	%eax, %cr0
189*5d9d9091SRichard Lowe	ljmp	$B32CODE_SEL, $1f
190*5d9d9091SRichard Lowe1:
191*5d9d9091SRichard Lowe
192*5d9d9091SRichard Lowe	xorl	%eax, %eax
193*5d9d9091SRichard Lowe	movl	%eax, %cr3		/* flushes TLB */
194*5d9d9091SRichard Lowe
195*5d9d9091SRichard Lowe	movl	$MSR_AMD_EFER, %ecx	/* Extended Feature Enable */
196*5d9d9091SRichard Lowe	rdmsr
197*5d9d9091SRichard Lowe	btcl	$8, %eax		/* bit 8 Long Mode Enable bit */
198*5d9d9091SRichard Lowe	wrmsr
199*5d9d9091SRichard Lowe
200*5d9d9091SRichard Lowe	/*
201*5d9d9091SRichard Lowe	 * ok.. now enter 16 bit mode, so we can shut down protected mode
202*5d9d9091SRichard Lowe	 *
203*5d9d9091SRichard Lowe	 * We'll have to act like we're still in a 32 bit section.
204*5d9d9091SRichard Lowe	 * So the code from this point has DATASZ in front of it to get 32 bit
205*5d9d9091SRichard Lowe	 * operands. If DATASZ is missing the operands will be 16 bit.
206*5d9d9091SRichard Lowe	 *
207*5d9d9091SRichard Lowe	 * Now shut down paging and protected (ie. segmentation) modes.
208*5d9d9091SRichard Lowe	 */
209*5d9d9091SRichard Lowe	ljmp	$B16CODE_SEL, $enter_16_bit
210*5d9d9091SRichard Loweenter_16_bit:
211*5d9d9091SRichard Lowe
212*5d9d9091SRichard Lowe	/*
213*5d9d9091SRichard Lowe	 * Make sure hidden parts of segment registers are 16 bit clean
214*5d9d9091SRichard Lowe	 */
215*5d9d9091SRichard Lowe	DATASZ	movl	$B16DATA_SEL, %eax
216*5d9d9091SRichard Lowe		movw    %ax, %ss
217*5d9d9091SRichard Lowe		movw    %ax, %ds
218*5d9d9091SRichard Lowe		movw    %ax, %es
219*5d9d9091SRichard Lowe		movw    %ax, %fs
220*5d9d9091SRichard Lowe		movw    %ax, %gs
221*5d9d9091SRichard Lowe
222*5d9d9091SRichard Lowe
223*5d9d9091SRichard Lowe	DATASZ	movl	$0x0, %eax	/* put us in real mode */
224*5d9d9091SRichard Lowe	DATASZ	movl	%eax, %cr0
225*5d9d9091SRichard Lowe	.byte	0xea			/* ljmp */
226*5d9d9091SRichard Loweenter_real_ljmp:
227*5d9d9091SRichard Lowe	.value	0			/* addr (16 bit) */
228*5d9d9091SRichard Lowe	.value	0x0			/* value for %cs */
229*5d9d9091SRichard Loweenter_real:
230*5d9d9091SRichard Lowe
231*5d9d9091SRichard Lowe	/*
232*5d9d9091SRichard Lowe	 * zero out the remaining segment registers
233*5d9d9091SRichard Lowe	 */
234*5d9d9091SRichard Lowe	DATASZ	xorl	%eax, %eax
235*5d9d9091SRichard Lowe		movw    %ax, %ss
236*5d9d9091SRichard Lowe		movw    %ax, %ds
237*5d9d9091SRichard Lowe		movw    %ax, %es
238*5d9d9091SRichard Lowe		movw    %ax, %fs
239*5d9d9091SRichard Lowe		movw    %ax, %gs
240*5d9d9091SRichard Lowe
241*5d9d9091SRichard Lowe	/*
242*5d9d9091SRichard Lowe	 * load the arguments to the BIOS call from the stack
243*5d9d9091SRichard Lowe	 */
244*5d9d9091SRichard Lowe	popl	%eax	/* really executes a 16 bit pop */
245*5d9d9091SRichard Lowe	popl	%ebx
246*5d9d9091SRichard Lowe	popl	%ecx
247*5d9d9091SRichard Lowe	popl	%edx
248*5d9d9091SRichard Lowe	popl	%esi
249*5d9d9091SRichard Lowe	popl	%edi
250*5d9d9091SRichard Lowe	popl	%ebp
251*5d9d9091SRichard Lowe	pop	%es
252*5d9d9091SRichard Lowe	pop	%ds
253*5d9d9091SRichard Lowe
254*5d9d9091SRichard Lowe	/*
255*5d9d9091SRichard Lowe	 * do the actual BIOS call
256*5d9d9091SRichard Lowe	 */
257*5d9d9091SRichard Lowe	sti
258*5d9d9091SRichard Loweint_instr:
259*5d9d9091SRichard Lowe	int	$0x10		/* this int number is overwritten */
260*5d9d9091SRichard Lowe	cli			/* ensure interrupts remain disabled */
261*5d9d9091SRichard Lowe
262*5d9d9091SRichard Lowe	/*
263*5d9d9091SRichard Lowe	 * save results of the BIOS call
264*5d9d9091SRichard Lowe	 */
265*5d9d9091SRichard Lowe	pushf
266*5d9d9091SRichard Lowe	push	%ds
267*5d9d9091SRichard Lowe	push	%es
268*5d9d9091SRichard Lowe	pushl	%ebp		/* still executes as 16 bit */
269*5d9d9091SRichard Lowe	pushl	%edi
270*5d9d9091SRichard Lowe	pushl	%esi
271*5d9d9091SRichard Lowe	pushl	%edx
272*5d9d9091SRichard Lowe	pushl	%ecx
273*5d9d9091SRichard Lowe	pushl	%ebx
274*5d9d9091SRichard Lowe	pushl	%eax
275*5d9d9091SRichard Lowe
276*5d9d9091SRichard Lowe	/*
277*5d9d9091SRichard Lowe	 * Restore protected mode and 32 bit execution
278*5d9d9091SRichard Lowe	 */
279*5d9d9091SRichard Lowe	push	$0			/* make sure %ds is zero before lgdt */
280*5d9d9091SRichard Lowe	pop	%ds
281*5d9d9091SRichard Lowe	.byte	0x0f, 0x01, 0x16	/* lgdt */
282*5d9d9091SRichard Lowegdt_info_load:
283*5d9d9091SRichard Lowe	.value	0	/* temp GDT in currently addressible mem */
284*5d9d9091SRichard Lowe
285*5d9d9091SRichard Lowe	DATASZ	movl	$0x1, %eax
286*5d9d9091SRichard Lowe	DATASZ	movl	%eax, %cr0
287*5d9d9091SRichard Lowe
288*5d9d9091SRichard Lowe	.byte	0xea			/* ljmp */
289*5d9d9091SRichard Loweenter_protected_ljmp:
290*5d9d9091SRichard Lowe	.value	0			/* addr (still in 16 bit) */
291*5d9d9091SRichard Lowe	.value	B32CODE_SEL		/* %cs value */
292*5d9d9091SRichard Loweenter_protected:
293*5d9d9091SRichard Lowe
294*5d9d9091SRichard Lowe	/*
295*5d9d9091SRichard Lowe	 * We are now back in a 32 bit code section, fix data/stack segments
296*5d9d9091SRichard Lowe	 */
297*5d9d9091SRichard Lowe	.code32
298*5d9d9091SRichard Lowe	movw	$B32DATA_SEL, %ax
299*5d9d9091SRichard Lowe	movw	%ax, %ds
300*5d9d9091SRichard Lowe	movw	%ax, %ss
301*5d9d9091SRichard Lowe
302*5d9d9091SRichard Lowe	/*
303*5d9d9091SRichard Lowe	 * Re-enable paging. Note we only use 32 bit mov's to restore these
304*5d9d9091SRichard Lowe	 * control registers. That's OK as the upper 32 bits are always zero.
305*5d9d9091SRichard Lowe	 */
306*5d9d9091SRichard Lowe	movl	save_cr4, %eax
307*5d9d9091SRichard Lowe	movl	%eax, %cr4
308*5d9d9091SRichard Lowe	movl	save_cr3, %eax
309*5d9d9091SRichard Lowe	movl	%eax, %cr3
310*5d9d9091SRichard Lowe
311*5d9d9091SRichard Lowe	/*
312*5d9d9091SRichard Lowe	 * re-enable long mode
313*5d9d9091SRichard Lowe	 */
314*5d9d9091SRichard Lowe	movl	$MSR_AMD_EFER, %ecx
315*5d9d9091SRichard Lowe	rdmsr
316*5d9d9091SRichard Lowe	btsl	$8, %eax
317*5d9d9091SRichard Lowe	wrmsr
318*5d9d9091SRichard Lowe
319*5d9d9091SRichard Lowe	movl	save_cr0, %eax
320*5d9d9091SRichard Lowe	movl	%eax, %cr0
321*5d9d9091SRichard Lowe	jmp	enter_paging
322*5d9d9091SRichard Loweenter_paging:
323*5d9d9091SRichard Lowe
324*5d9d9091SRichard Lowe
325*5d9d9091SRichard Lowe	/*
326*5d9d9091SRichard Lowe	 * transition back to 64 bit mode
327*5d9d9091SRichard Lowe	 */
328*5d9d9091SRichard Lowe	pushl	$B64CODE_SEL
329*5d9d9091SRichard Lowe	pushl	$longmode
330*5d9d9091SRichard Lowe	lret
331*5d9d9091SRichard Lowelongmode:
332*5d9d9091SRichard Lowe	.code64
333*5d9d9091SRichard Lowe	/*
334*5d9d9091SRichard Lowe	 * restore caller frame pointer and segment registers
335*5d9d9091SRichard Lowe	 */
336*5d9d9091SRichard Lowe	lgdt	save_gdt
337*5d9d9091SRichard Lowe	lidt	save_idt
338*5d9d9091SRichard Lowe
339*5d9d9091SRichard Lowe	/*
340*5d9d9091SRichard Lowe	 * Before loading the task register we need to reset the busy bit
341*5d9d9091SRichard Lowe	 * in its corresponding GDT selector. The busy bit is the 2nd bit in
342*5d9d9091SRichard Lowe	 * the 5th byte of the selector.
343*5d9d9091SRichard Lowe	 */
344*5d9d9091SRichard Lowe	movzwq	save_tr, %rax
345*5d9d9091SRichard Lowe	addq	save_gdt+2, %rax
346*5d9d9091SRichard Lowe	btcl	$1, 5(%rax)
347*5d9d9091SRichard Lowe	ltr	save_tr
348*5d9d9091SRichard Lowe	movw	save_ds, %ds
349*5d9d9091SRichard Lowe	movw	save_ss, %ss
350*5d9d9091SRichard Lowe	movw	save_es, %es
351*5d9d9091SRichard Lowe	movw	save_fs, %fs
352*5d9d9091SRichard Lowe	movw	save_gs, %gs
353*5d9d9091SRichard Lowe
354*5d9d9091SRichard Lowe	pushq	save_cs
355*5d9d9091SRichard Lowe	pushq	$.newcs
356*5d9d9091SRichard Lowe	lretq
357*5d9d9091SRichard Lowe.newcs:
358*5d9d9091SRichard Lowe
359*5d9d9091SRichard Lowe	/*
360*5d9d9091SRichard Lowe	 * restore the hidden kernel segment base register values
361*5d9d9091SRichard Lowe	 */
362*5d9d9091SRichard Lowe	movl	save_fsbase, %eax
363*5d9d9091SRichard Lowe	movl	save_fsbase + 4, %edx
364*5d9d9091SRichard Lowe	movl	$MSR_AMD_FSBASE, %ecx
365*5d9d9091SRichard Lowe	wrmsr
366*5d9d9091SRichard Lowe
367*5d9d9091SRichard Lowe	movl	save_gsbase, %eax
368*5d9d9091SRichard Lowe	movl	save_gsbase + 4, %edx
369*5d9d9091SRichard Lowe	movl	$MSR_AMD_GSBASE, %ecx
370*5d9d9091SRichard Lowe	wrmsr
371*5d9d9091SRichard Lowe
372*5d9d9091SRichard Lowe	movl	save_kgsbase, %eax
373*5d9d9091SRichard Lowe	movl	save_kgsbase + 4, %edx
374*5d9d9091SRichard Lowe	movl	$MSR_AMD_KGSBASE, %ecx
375*5d9d9091SRichard Lowe	wrmsr
376*5d9d9091SRichard Lowe
377*5d9d9091SRichard Lowe	movq	save_cr8, %rax
378*5d9d9091SRichard Lowe	cmpq	$0, %rax
379*5d9d9091SRichard Lowe	je	1f
380*5d9d9091SRichard Lowe	movq	%rax, %cr8
381*5d9d9091SRichard Lowe1:
382*5d9d9091SRichard Lowe
383*5d9d9091SRichard Lowe	/*
384*5d9d9091SRichard Lowe	 * copy results to caller's location, then restore remaining registers
385*5d9d9091SRichard Lowe	 */
386*5d9d9091SRichard Lowe	movq    save_rsi, %rdi
387*5d9d9091SRichard Lowe	movq	%rsp, %rsi
388*5d9d9091SRichard Lowe	movq	$18, %rcx
389*5d9d9091SRichard Lowe	rep
390*5d9d9091SRichard Lowe	movsb
391*5d9d9091SRichard Lowe	movw	18(%rsp), %ax
392*5d9d9091SRichard Lowe	andq	$0xffff, %rax
393*5d9d9091SRichard Lowe	movq    save_r12, %r12
394*5d9d9091SRichard Lowe	movq    save_r13, %r13
395*5d9d9091SRichard Lowe	movq    save_r14, %r14
396*5d9d9091SRichard Lowe	movq    save_r15, %r15
397*5d9d9091SRichard Lowe	movq    save_rbx, %rbx
398*5d9d9091SRichard Lowe	movq    save_rbp, %rbp
399*5d9d9091SRichard Lowe	movq    save_rsp, %rsp
400*5d9d9091SRichard Lowe	ret
401*5d9d9091SRichard Lowe
402*5d9d9091SRichard Lowe
403*5d9d9091SRichard Lowe/*
404*5d9d9091SRichard Lowe * Caller's registers to restore
405*5d9d9091SRichard Lowe */
406*5d9d9091SRichard Lowe	.align 4
407*5d9d9091SRichard Lowesave_esi:
408*5d9d9091SRichard Lowe	.long	0
409*5d9d9091SRichard Lowesave_edi:
410*5d9d9091SRichard Lowe	.long	0
411*5d9d9091SRichard Lowesave_ebx:
412*5d9d9091SRichard Lowe	.long	0
413*5d9d9091SRichard Lowesave_ebp:
414*5d9d9091SRichard Lowe	.long	0
415*5d9d9091SRichard Lowesave_esp:
416*5d9d9091SRichard Lowe	.long	0
417*5d9d9091SRichard Lowe
418*5d9d9091SRichard Lowe	.align 8
419*5d9d9091SRichard Lowesave_rsi:
420*5d9d9091SRichard Lowe	.quad	0
421*5d9d9091SRichard Lowesave_rbx:
422*5d9d9091SRichard Lowe	.quad	0
423*5d9d9091SRichard Lowesave_rbp:
424*5d9d9091SRichard Lowe	.quad	0
425*5d9d9091SRichard Lowesave_rsp:
426*5d9d9091SRichard Lowe	.quad	0
427*5d9d9091SRichard Lowesave_r12:
428*5d9d9091SRichard Lowe	.quad	0
429*5d9d9091SRichard Lowesave_r13:
430*5d9d9091SRichard Lowe	.quad	0
431*5d9d9091SRichard Lowesave_r14:
432*5d9d9091SRichard Lowe	.quad	0
433*5d9d9091SRichard Lowesave_r15:
434*5d9d9091SRichard Lowe	.quad	0
435*5d9d9091SRichard Lowesave_kgsbase:
436*5d9d9091SRichard Lowe	.quad	0
437*5d9d9091SRichard Lowesave_gsbase:
438*5d9d9091SRichard Lowe	.quad	0
439*5d9d9091SRichard Lowesave_fsbase:
440*5d9d9091SRichard Lowe	.quad	0
441*5d9d9091SRichard Lowesave_cr8:
442*5d9d9091SRichard Lowe	.quad	0
443*5d9d9091SRichard Lowe
444*5d9d9091SRichard Lowesave_idt:
445*5d9d9091SRichard Lowe	.quad	0
446*5d9d9091SRichard Lowe	.quad	0
447*5d9d9091SRichard Lowe
448*5d9d9091SRichard Lowesave_gdt:
449*5d9d9091SRichard Lowe	.quad	0
450*5d9d9091SRichard Lowe	.quad	0
451*5d9d9091SRichard Lowe
452*5d9d9091SRichard Lowesave_cr0:
453*5d9d9091SRichard Lowe	.quad	0
454*5d9d9091SRichard Lowesave_cr3:
455*5d9d9091SRichard Lowe	.quad	0
456*5d9d9091SRichard Lowesave_cr4:
457*5d9d9091SRichard Lowe	.quad	0
458*5d9d9091SRichard Lowesave_cs:
459*5d9d9091SRichard Lowe	.quad	0
460*5d9d9091SRichard Lowesave_ss:
461*5d9d9091SRichard Lowe	.value	0
462*5d9d9091SRichard Lowesave_ds:
463*5d9d9091SRichard Lowe	.value	0
464*5d9d9091SRichard Lowesave_es:
465*5d9d9091SRichard Lowe	.value	0
466*5d9d9091SRichard Lowesave_fs:
467*5d9d9091SRichard Lowe	.value	0
468*5d9d9091SRichard Lowesave_gs:
469*5d9d9091SRichard Lowe	.value	0
470*5d9d9091SRichard Lowesave_tr:
471*5d9d9091SRichard Lowe	.value	0
472*5d9d9091SRichard Lowe
473*5d9d9091SRichard Loweidt_info:
474*5d9d9091SRichard Lowe	.value 0x3ff
475*5d9d9091SRichard Lowe	.quad 0
476*5d9d9091SRichard Lowe
477*5d9d9091SRichard Lowe
478*5d9d9091SRichard Lowe/*
479*5d9d9091SRichard Lowe * We need to trampoline thru a gdt we have in low memory.
480*5d9d9091SRichard Lowe */
481*5d9d9091SRichard Lowe#include "../boot/boot_gdt.s"
482