1*38fd1498Szrj /* Copyright (C) 2004-2018 Free Software Foundation, Inc.
2*38fd1498Szrj
3*38fd1498Szrj This file is part of GCC.
4*38fd1498Szrj
5*38fd1498Szrj GCC is free software; you can redistribute it and/or modify
6*38fd1498Szrj it under the terms of the GNU General Public License as published by
7*38fd1498Szrj the Free Software Foundation; either version 3, or (at your option)
8*38fd1498Szrj any later version.
9*38fd1498Szrj
10*38fd1498Szrj GCC is distributed in the hope that it will be useful,
11*38fd1498Szrj but WITHOUT ANY WARRANTY; without even the implied warranty of
12*38fd1498Szrj MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13*38fd1498Szrj GNU General Public License for more details.
14*38fd1498Szrj
15*38fd1498Szrj Under Section 7 of GPL version 3, you are granted additional
16*38fd1498Szrj permissions described in the GCC Runtime Library Exception, version
17*38fd1498Szrj 3.1, as published by the Free Software Foundation.
18*38fd1498Szrj
19*38fd1498Szrj You should have received a copy of the GNU General Public License and
20*38fd1498Szrj a copy of the GCC Runtime Library Exception along with this program;
21*38fd1498Szrj see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
22*38fd1498Szrj <http://www.gnu.org/licenses/>. */
23*38fd1498Szrj
24*38fd1498Szrj #ifndef _MM_MALLOC_H_INCLUDED
25*38fd1498Szrj #define _MM_MALLOC_H_INCLUDED
26*38fd1498Szrj
27*38fd1498Szrj #include <stdlib.h>
28*38fd1498Szrj #include <errno.h>
29*38fd1498Szrj
30*38fd1498Szrj static __inline__ void *
_mm_malloc(size_t __size,size_t __align)31*38fd1498Szrj _mm_malloc (size_t __size, size_t __align)
32*38fd1498Szrj {
33*38fd1498Szrj void * __malloc_ptr;
34*38fd1498Szrj void * __aligned_ptr;
35*38fd1498Szrj
36*38fd1498Szrj /* Error if align is not a power of two. */
37*38fd1498Szrj if (__align & (__align - 1))
38*38fd1498Szrj {
39*38fd1498Szrj errno = EINVAL;
40*38fd1498Szrj return ((void *) 0);
41*38fd1498Szrj }
42*38fd1498Szrj
43*38fd1498Szrj if (__size == 0)
44*38fd1498Szrj return ((void *) 0);
45*38fd1498Szrj
46*38fd1498Szrj /* Assume malloc'd pointer is aligned at least to sizeof (void*).
47*38fd1498Szrj If necessary, add another sizeof (void*) to store the value
48*38fd1498Szrj returned by malloc. Effectively this enforces a minimum alignment
49*38fd1498Szrj of sizeof double. */
50*38fd1498Szrj if (__align < 2 * sizeof (void *))
51*38fd1498Szrj __align = 2 * sizeof (void *);
52*38fd1498Szrj
53*38fd1498Szrj __malloc_ptr = malloc (__size + __align);
54*38fd1498Szrj if (!__malloc_ptr)
55*38fd1498Szrj return ((void *) 0);
56*38fd1498Szrj
57*38fd1498Szrj /* Align We have at least sizeof (void *) space below malloc'd ptr. */
58*38fd1498Szrj __aligned_ptr = (void *) (((size_t) __malloc_ptr + __align)
59*38fd1498Szrj & ~((size_t) (__align) - 1));
60*38fd1498Szrj
61*38fd1498Szrj /* Store the original pointer just before p. */
62*38fd1498Szrj ((void **) __aligned_ptr)[-1] = __malloc_ptr;
63*38fd1498Szrj
64*38fd1498Szrj return __aligned_ptr;
65*38fd1498Szrj }
66*38fd1498Szrj
67*38fd1498Szrj static __inline__ void
_mm_free(void * __aligned_ptr)68*38fd1498Szrj _mm_free (void *__aligned_ptr)
69*38fd1498Szrj {
70*38fd1498Szrj if (__aligned_ptr)
71*38fd1498Szrj free (((void **) __aligned_ptr)[-1]);
72*38fd1498Szrj }
73*38fd1498Szrj
74*38fd1498Szrj #endif /* _MM_MALLOC_H_INCLUDED */
75