xref: /minix3/common/lib/libc/arch/x86_64/string/memset.S (revision 0a6a1f1d05b60e214de2f05a7310ddd1f0e590e7)
1/*	$NetBSD: memset.S,v 1.5 2014/05/22 16:47:31 pooka Exp $	*/
2
3/*-
4 * Copyright (c) 2009 The NetBSD Foundation, Inc.
5 * All rights reserved.
6 *
7 * This code is derived from software contributed to The NetBSD Foundation
8 * by David Laight.
9 *
10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions
12 * are met:
13 * 1. Redistributions of source code must retain the above copyright
14 *    notice, this list of conditions and the following disclaimer.
15 * 2. Redistributions in binary form must reproduce the above copyright
16 *    notice, this list of conditions and the following disclaimer in the
17 *    documentation and/or other materials provided with the distribution.
18 *
19 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
20 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
21 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29 * POSSIBILITY OF SUCH DAMAGE.
30 */
31
32#include <machine/asm.h>
33
34#if defined(LIBC_SCCS)
35	RCSID("$NetBSD: memset.S,v 1.5 2014/05/22 16:47:31 pooka Exp $")
36#endif
37
38#ifndef _KERNEL
39/* bzero, %rdi is buffer, %rsi length */
40
41ENTRY(bzero)
42	mov	%rsi,%rdx		/* length */
43	xor	%eax,%eax		/* value to write */
44	jmp	1f
45#endif
46
47/* memset, %rdi is buffer, %rsi char to fill, %rdx length */
48
49ENTRY(memset)
50	movzbq	%sil,%rax		/* byte value to fill */
51	mov	%rdx,%rsi		/* copy of length */
52	mov	$0x0101010101010101,%r9
53	imul	%r9,%rax		/* fill value in all bytes */
54
551:
56	mov	%rdi,%r9		/* Need to return buffer address */
57	or	%edi,%edx		/* address | length */
58	mov	%rsi,%rcx
59	cmp	$7,%rsi
60	jbe	10f			/* jump if short fill */
61	test	$7,%dl			/* check for misaligned fill */
62	jnz	20f			/* jump if misaligned */
63
64/* Target aligned and length multiple of 8 */
652:
66	shr	$3,%rcx
67	rep	stosq
68	mov	%r9,%rax
69	ret
70
71/*
72 * Short transfer, any faffing here will generate mispredicted branches.
73 * So we keep it simple.
74 */
7510:	rep	stosb
76	mov	%r9,%rax
77	ret
78
79/*
80 * Buffer or length misaligned.
81 * Write pattern to first and last word of buffer, then fill middle.
82 * (This writes to some bytes more than once - possibly three times!.)
83 */
8420:
85	mov	%rax,(%rdi)
86	movzbq	%dil,%rdx		/* low address for alignment */
87	mov	%rax,-8(%rcx,%rdi)
88	and	$7,%dl			/* offset in word */
89	sub	%rdx,%rcx		/* adjust length ... */
90	add	%rdx,%rdi		/* ... and target */
91	jmp	2b
92END(memset)
93
94#ifndef _KERNEL
95END(bzero)
96#endif
97