xref: /netbsd-src/sys/arch/amd64/acpi/acpi_wakeup_low.S (revision e643d8ad7af4056c14f2ac979e90de1c46de5492)
1/*	$NetBSD: acpi_wakeup_low.S,v 1.10 2018/07/14 14:29:40 maxv Exp $	*/
2
3/*
4 * Copyright (c) 2007 Joerg Sonnenberger <joerg@netbsd.org>
5 * Copyright (c) 2001 Takanori Watanabe <takawata@jp.freebsd.org>
6 * Copyright (c) 2001 Mitsuru IWASAKI <iwasaki@jp.freebsd.org>
7 * All rights reserved.
8 *
9 * Redistribution and use in source and binary forms, with or without
10 * modification, are permitted provided that the following conditions
11 * are met:
12 * 1. Redistributions of source code must retain the above copyright
13 *    notice, this list of conditions and the following disclaimer.
14 * 2. Redistributions in binary form must reproduce the above copyright
15 *    notice, this list of conditions and the following disclaimer in the
16 *    documentation and/or other materials provided with the distribution.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
19 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
22 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
23 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
24 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
25 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
26 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
27 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
28 * SUCH DAMAGE.
29 */
30
31#include "assym.h"
32#include <machine/asm.h>
33#include <machine/segments.h>
34#include <machine/specialreg.h>
35
36	.text
37
38	.p2align 2, 0x90
39ENTRY(acpi_md_sleep_exit)
40	lgdt	ACPI_SUSPEND_GDT(%r8)
41
42	/* Reload fixed descriptors for new GDT */
43	movw	$GSEL(GUDATA_SEL, SEL_UPL),%ax
44	movw	%ax,%ds
45	movw	%ax,%es
46	movw	$GSEL(GDATA_SEL, SEL_KPL),%ax
47	movw	%ax,%ss
48
49	/*
50	 * FS and GS are driven by MSRs, so use NULL for them. If we're
51	 * returning to a 32bit LWP, %fs/%gs will be restored in
52	 * INTRFASTEXIT.
53	 */
54	xorw	%ax,%ax
55	movw	%ax,%fs
56	movw	%ax,%gs
57
58	movl	$MSR_EFER,%ecx
59	movl	ACPI_SUSPEND_EFER(%r8),%eax
60	movl	$0,%edx
61	wrmsr
62
63	movl	$MSR_FSBASE,%ecx
64	movl	ACPI_SUSPEND_FS(%r8),%eax
65	movl	ACPI_SUSPEND_FS+4(%r8),%edx
66	wrmsr
67
68	movl	$MSR_GSBASE,%ecx
69	movl	ACPI_SUSPEND_GS(%r8),%eax
70	movl	ACPI_SUSPEND_GS+4(%r8),%edx
71	wrmsr
72
73	movl	$MSR_KERNELGSBASE,%ecx
74	movl	ACPI_SUSPEND_KGS(%r8),%eax
75	movl	ACPI_SUSPEND_KGS+4(%r8),%edx
76	wrmsr
77
78	movq	ACPI_SUSPEND_CR8(%r8),%rax
79	movq	%rax,%cr8
80	movq	ACPI_SUSPEND_CR4(%r8),%rax
81	movq	%rax,%cr4
82	movq	ACPI_SUSPEND_CR3(%r8),%rax
83	movq	%rax,%cr3
84	movq	ACPI_SUSPEND_CR2(%r8),%rax
85	movq	%rax,%cr2
86	movq	ACPI_SUSPEND_CR0(%r8),%rax
87	movq	%rax,%cr0
88
89	jmp	1f
901:
91
92	movq	CPUVAR(GDT),%rax
93	movzwq	ACPI_SUSPEND_TR(%r8),%rdx
94	andq	$~0x0200,4(%rax,%rdx, 1)
95
96	ltr	%dx
97	lldt	ACPI_SUSPEND_LDT(%r8)
98	lidt	ACPI_SUSPEND_IDT(%r8)
99
100	movq	ACPI_SUSPEND_REG+(0*8)(%r8),%rsp
101	movq	ACPI_SUSPEND_REG+(1*8)(%r8),%rbx
102	movq	ACPI_SUSPEND_REG+(2*8)(%r8),%rbp
103	movq	ACPI_SUSPEND_REG+(3*8)(%r8),%r12
104	movq	ACPI_SUSPEND_REG+(4*8)(%r8),%r13
105	movq	ACPI_SUSPEND_REG+(5*8)(%r8),%r14
106	movq	ACPI_SUSPEND_REG+(6*8)(%r8),%r15
107
108	xorq	%rax,%rax
109
110	pushq	ACPI_SUSPEND_REG+(7*8)(%r8)
111	popfq
112	ret
113END(acpi_md_sleep_exit)
114
115	.p2align 2, 0x90
116ENTRY(acpi_md_sleep_prepare)
117	movq	CPUVAR(SELF),%r8
118	movq	%rbx,ACPI_SUSPEND_REG+(1*8)(%r8)
119	movq	%rbp,ACPI_SUSPEND_REG+(2*8)(%r8)
120	movq	%r12,ACPI_SUSPEND_REG+(3*8)(%r8)
121	movq	%r13,ACPI_SUSPEND_REG+(4*8)(%r8)
122	movq	%r14,ACPI_SUSPEND_REG+(5*8)(%r8)
123	movq	%r15,ACPI_SUSPEND_REG+(6*8)(%r8)
124
125	movq	%cr0,%rax
126	movq	%rax,ACPI_SUSPEND_CR0(%r8)
127	movq	%cr2,%rax
128	movq	%rax,ACPI_SUSPEND_CR2(%r8)
129	movq	%cr3,%rax
130	movq	%rax,ACPI_SUSPEND_CR3(%r8)
131	movq	%cr4,%rax
132	movq	%rax,ACPI_SUSPEND_CR4(%r8)
133	movq	%cr8,%rax
134	movq	%rax,ACPI_SUSPEND_CR8(%r8)
135
136	pushfq
137	popq	ACPI_SUSPEND_REG+(7*8)(%r8)
138
139	movq	%rsp,ACPI_SUSPEND_REG+(0*8)(%r8)
140
141	movl	$MSR_FSBASE,%ecx
142	rdmsr
143	movl	%eax,ACPI_SUSPEND_FS(%r8)
144	movl	%edx,ACPI_SUSPEND_FS+4(%r8)
145
146	movl	$MSR_GSBASE,%ecx
147	rdmsr
148	movl	%eax,ACPI_SUSPEND_GS(%r8)
149	movl	%edx,ACPI_SUSPEND_GS+4(%r8)
150
151	movl	$MSR_KERNELGSBASE,%ecx
152	rdmsr
153	movl	%eax,ACPI_SUSPEND_KGS(%r8)
154	movl	%edx,ACPI_SUSPEND_KGS+4(%r8)
155
156	movl	$MSR_EFER,%ecx
157	rdmsr
158	movl	%eax,ACPI_SUSPEND_EFER(%r8)
159
160	sgdt	ACPI_SUSPEND_GDT(%r8)
161	sidt	ACPI_SUSPEND_IDT(%r8)
162	sldt	ACPI_SUSPEND_LDT(%r8)
163	str	ACPI_SUSPEND_TR(%r8)
164
165	call	acpi_md_sleep_enter
166	/* acpi_md_sleep_enter only returns on failure. */
167	movl	$-1,%eax
168	ret
169END(acpi_md_sleep_prepare)
170