xref: /netbsd-src/tests/lib/libnvmm/h_mem_assist_asm.S (revision e6b71c148711fd8b0d6ef07729e79923e9aa810f)
1/*	$NetBSD: h_mem_assist_asm.S,v 1.10 2020/12/27 20:56:14 reinoud Exp $	*/
2
3/*
4 * Copyright (c) 2018-2020 Maxime Villard, m00nbsd.net
5 * All rights reserved.
6 *
7 * This code is part of the NVMM hypervisor.
8 *
9 * Redistribution and use in source and binary forms, with or without
10 * modification, are permitted provided that the following conditions
11 * are met:
12 * 1. Redistributions of source code must retain the above copyright
13 *    notice, this list of conditions and the following disclaimer.
14 * 2. Redistributions in binary form must reproduce the above copyright
15 *    notice, this list of conditions and the following disclaimer in the
16 *    documentation and/or other materials provided with the distribution.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
23 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
25 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
26 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
27 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
28 * SUCH DAMAGE.
29 */
30
31	.globl	test1_begin, test1_end
32	.globl	test2_begin, test2_end
33	.globl	test3_begin, test3_end
34	.globl	test4_begin, test4_end
35	.globl	test5_begin, test5_end
36	.globl	test6_begin, test6_end
37	.globl	test7_begin, test7_end
38	.globl	test8_begin, test8_end
39	.globl	test9_begin, test9_end
40	.globl	test10_begin, test10_end
41	.globl	test11_begin, test11_end
42	.globl	test12_begin, test12_end
43	.globl	test13_begin, test13_end
44	.globl	test14_begin, test14_end
45	.globl	test_64bit_15_begin, test_64bit_15_end
46	.globl	test_64bit_16_begin, test_64bit_16_end
47	.globl	test17_begin, test17_end
48	.text
49	.code64
50
51#define TEST_END	\
52	movq	$0xFFFFFFFFFFFFFFFF,%rcx;	\
53	rdmsr	;
54
55	.align	64
56test1_begin:
57	movq	$0x1000,%rax
58	movq	$0x1000,%rbp
59
60	movq	$0x1000,(%rax)
61
62	movq	$1,%r11
63	movq	$0x2000,(%rax,%r11,8)
64
65	movq	(%rbp),%r8
66	movq	8(%rbp),%rbx
67	addq	%rbx,%r8
68	movq	%r8,(%rbp)
69	movb	$4,(%rbp)
70
71	TEST_END
72test1_end:
73
74	.align	64
75test2_begin:
76	movq	$0x1000,%rax
77
78	movq	$0x1000,(%rax)
79	movq	$0x00FF,%rbx
80	orb	%bl,(%rax)
81	movq	$0x0400,%rcx
82	orw	%cx,(%rax)
83
84	movq	$0x0200,%rcx
85	orq	(%rax),%rcx
86	movq	%rcx,(%rax)
87
88	TEST_END
89test2_end:
90
91	.align	64
92test3_begin:
93	movq	$0x1000,%rax
94
95	movq	$0x1FFF,(%rax)
96	movq	$0x1FF0,%rbx
97	andq	%rbx,(%rax)
98	movq	$0x10C1,%rcx
99	andb	%cl,(%rax)
100
101	TEST_END
102test3_end:
103
104	.align	64
105test4_begin:
106	movq	$0x1000,%rax
107
108	movq	$0x1FFF,(%rax)
109	movq	$0x1FF0,%rbx
110	xorq	%rbx,(%rax)
111	movq	$0x10C0,%rcx
112	xorw	%cx,(%rax)
113
114	TEST_END
115test4_end:
116
117	.align	64
118test5_begin:
119	movq	$0xFFFFFFFF00001000,%rax
120
121	movq	$0x1FFF,(%eax)
122	movb	$0,(%eax,%ebx,1)
123
124	TEST_END
125test5_end:
126
127	.align	64
128test6_begin:
129	movq	$0xFFA0,%rax
130	movabs	%rax,0x1000
131
132	movabs	0x1000,%al
133	orb	$0x0B,%al
134	movabs	%al,0x1000
135
136	TEST_END
137test6_end:
138
139	.align	64
140test7_begin:
141	movq	$0x56,%rax
142
143	movq	$1,%rcx
144	movq	$0x1000,%rdi
145	rep	stosb
146
147	movq	$0x1234,%rax
148	stosw
149
150	TEST_END
151test7_end:
152
153	.align	64
154test8_begin:
155	movq	$0x1008,%rsi
156	movq	$0x12345678,(%rsi)
157
158	movq	$0x1000,%rdi
159
160	lodsw
161	movw	%ax,(%rdi)
162	addq	$2,%rdi
163
164	lodsb
165	movb	%al,(%rdi)
166	addq	$1,%rdi
167
168	lodsb
169	movb	%al,(%rdi)
170	addq	$2,%rdi
171
172	TEST_END
173test8_end:
174
175	.align	64
176test9_begin:
177	movq	$0x1000,%rax
178
179	movq	$0x12345678,8(%rax)
180
181	movq	$0x1008,%rsi
182	movq	$0x1000,%rdi
183
184	movq	$4,%rcx
185	rep movsb
186
187	movq	$2,%rcx
188	rep movsw
189
190	TEST_END
191test9_end:
192
193	.align	64
194test10_begin:
195	movq	$0x1000,%rax
196	movq	$0x12345678,(%rax)
197
198	movq	$0xFFFFFFFFFFFFFFFF,%rbx
199	movzbl	(%rax),%ebx
200	movq	%rbx,(%rax)
201
202	TEST_END
203test10_end:
204
205	.align	64
206test11_begin:
207	movq	$0x1000,%rax
208	movq	$0x12345678,(%rax)
209
210	movq	$0xFFFFFFFFFFFFFFFF,%rbx
211	movzwq	(%rax),%rbx
212	movq	%rbx,(%rax)
213
214	TEST_END
215test11_end:
216
217	.align	64
218test12_begin:
219	movq	$0x1000,%rax
220	movq	$0xFFFFFFFFF2345678,(%rax)
221
222	cmpb	$0x78,(%rax)
223	jne	.L12_failure
224	cmpb	$0x77,(%rax)
225	jl	.L12_failure
226	cmpb	$0x79,(%rax)
227	jg	.L12_failure
228
229	cmpw	$0x5678,(%rax)
230	jne	.L12_failure
231	cmpw	$0x5677,(%rax)
232	jl	.L12_failure
233	cmpw	$0x5679,(%rax)
234	jg	.L12_failure
235
236	cmpl	$0xF2345678,(%rax)
237	jne	.L12_failure
238	cmpl	$0xF2345677,(%rax)
239	jl	.L12_failure
240	cmpl	$0xF2345679,(%rax)
241	jg	.L12_failure
242
243	cmpq	$0xFFFFFFFFF2345678,(%rax)
244	jne	.L12_failure
245	cmpq	$0xFFFFFFFFF2345677,(%rax)
246	jl	.L12_failure
247	cmpq	$0xFFFFFFFFF2345679,(%rax)
248	jg	.L12_failure
249
250.L12_success:
251	movq	$1,(%rax)
252	TEST_END
253.L12_failure:
254	movq	$0,(%rax)
255	TEST_END
256test12_end:
257
258	.align	64
259test13_begin:
260	movq	$0x1000,%rax
261	movq	$0x000000001000A0FF,(%rax)
262
263	movq	$0xFFFF,%rcx
264	subb	%cl,(%rax)
265
266	movq	$0xA000,%rcx
267	subw	%cx,(%rax)
268
269	movq	$0x0000000F1000A0FF,%rcx
270	subq	(%rax),%rcx
271
272	movq	%rcx,(%rax)
273
274	TEST_END
275test13_end:
276
277	.align	64
278test14_begin:
279	movq	$0x1000,%rax
280	movq	$0xA0FF,(%rax)
281
282	testb	$0x0F,(%rax)
283	jz	.L14_failure
284
285	testw	$0x0F00,(%rax)
286	jnz	.L14_failure
287
288	testl	$0xA000,(%rax)
289	jz	.L14_failure
290
291.L14_success:
292	movq	$1,(%rax)
293	TEST_END
294.L14_failure:
295	movq	$0,(%rax)
296	TEST_END
297test14_end:
298
299	.align	64
300test_64bit_15_begin:
301	movq	$0x1000,%rax
302	movq	$0x120000,%rbx
303	movq	$0x003400,%rcx
304	movq	$0x000056,%rdx
305
306	xchgq	%rbx,(%rax)
307	xchgw	(%rax),%cx
308	xchgb	%dl,(%rax)
309
310	TEST_END
311test_64bit_15_end:
312
313	.align	64
314test_64bit_16_begin:
315	movq	$0x1000,%rax
316	movq	$0x000000,%rbx
317	movq	$0x000000,%rcx
318	movq	$0x000000,%rdx
319
320	movq	$0x123456,(%rax)
321	xchgq	%rbx,(%eax)
322	movq	$0,(%rax)
323	xchgq	%rbx,(%eax)
324
325	TEST_END
326test_64bit_16_end:
327
328	.align	64
329test17_begin:
330	movq	$0x1000,%rax
331	movq	$0xdeadbeefcafe, %rbx
332	movq	%rbx,0x00(%rax)
333	movq	%rbx,0x08(%rax)
334
335	movq	$0xdeadbeefcafe, %rbx
336	movq	%rbx,0x20(%rax)
337	movq	$0, %rbx
338	movq	%rbx,0x28(%rax)
339
340	movq	$0x1000,%rsi
341	movq	$0x1020,%rdi
342
343	movq	$3,%rcx
344	repe cmpsq
345
346	movq	%rcx,(%rax)
347	TEST_END
348test17_end:
349
350/* -------------------------------------------------------------------------- */
351
352	.globl	test_16bit_1_begin, test_16bit_1_end
353	.globl	test_16bit_2_begin, test_16bit_2_end
354	.globl	test_16bit_3_begin, test_16bit_3_end
355	.globl	test_16bit_4_begin, test_16bit_4_end
356	.globl	test_16bit_5_begin, test_16bit_5_end
357	.globl	test_16bit_6_begin, test_16bit_6_end
358
359#define TEST16_END	\
360	rdmsr
361
362	.code16
363
364	.align	64
365test_16bit_1_begin:
366	movw	$0x10f1,%bx
367	movw	$0x123,%dx
368
369	movb	%dl,(%bx)
370
371	TEST16_END
372test_16bit_1_end:
373
374	.align	64
375test_16bit_2_begin:
376	movw	$0x10f1,%bx
377	movw	$2,%di
378	movw	$0x123,%dx
379
380	movw	%dx,(%bx,%di)
381
382	TEST16_END
383test_16bit_2_end:
384
385	.align	64
386test_16bit_3_begin:
387	movw	$0x10f1,%bp
388	movw	$2,%si
389	movw	$0x678,%dx
390
391	movw	%dx,-2(%bp,%si)
392
393	TEST16_END
394test_16bit_3_end:
395
396	.align	64
397test_16bit_4_begin:
398	movw	$0x10f0,%bp
399	movw	$2,%si
400	movw	$2+4+4,%di
401	movw	$0xFFFF,%dx
402	movl	$0x0001,%eax
403	movl	$0x0010,%ebx
404	movl	$0x1000,%ecx
405
406	movw	%dx,4(%bp,%si)		/* 16bit opr 16bit adr */
407	andl	%eax,4(%bp,%si)		/* 32bit opr 16bit adr */
408	orw	%bx,4(%ebp,%esi)	/* 16bit opr 32bit adr */
409	orl	%ecx,-4(%bp,%di)	/* 32bit opr 16bit adr, negative */
410
411	TEST16_END
412test_16bit_4_end:
413
414	.align	64
415test_16bit_5_begin:
416	movb	$0x12,0x1234
417
418	TEST16_END
419test_16bit_5_end:
420
421	.align	64
422test_16bit_6_begin:
423	movw	$0x1234,%bp
424	movw	$4,%di
425	movw	$0x1200,%bx
426	movw	$0x0034,%cx
427
428	xchgw	%bx,(%bp)
429	xchgb	-4(%bp,%di),%cl
430
431	TEST16_END
432test_16bit_6_end:
433