xref: /minix3/external/bsd/llvm/dist/clang/lib/Headers/mm_malloc.h (revision f4a2713ac843a11c696ec80c0a5e3e5d80b4d338)
1*f4a2713aSLionel Sambuc /*===---- mm_malloc.h - Allocating and Freeing Aligned Memory Blocks -------===
2*f4a2713aSLionel Sambuc  *
3*f4a2713aSLionel Sambuc  * Permission is hereby granted, free of charge, to any person obtaining a copy
4*f4a2713aSLionel Sambuc  * of this software and associated documentation files (the "Software"), to deal
5*f4a2713aSLionel Sambuc  * in the Software without restriction, including without limitation the rights
6*f4a2713aSLionel Sambuc  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7*f4a2713aSLionel Sambuc  * copies of the Software, and to permit persons to whom the Software is
8*f4a2713aSLionel Sambuc  * furnished to do so, subject to the following conditions:
9*f4a2713aSLionel Sambuc  *
10*f4a2713aSLionel Sambuc  * The above copyright notice and this permission notice shall be included in
11*f4a2713aSLionel Sambuc  * all copies or substantial portions of the Software.
12*f4a2713aSLionel Sambuc  *
13*f4a2713aSLionel Sambuc  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14*f4a2713aSLionel Sambuc  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15*f4a2713aSLionel Sambuc  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16*f4a2713aSLionel Sambuc  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17*f4a2713aSLionel Sambuc  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18*f4a2713aSLionel Sambuc  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19*f4a2713aSLionel Sambuc  * THE SOFTWARE.
20*f4a2713aSLionel Sambuc  *
21*f4a2713aSLionel Sambuc  *===-----------------------------------------------------------------------===
22*f4a2713aSLionel Sambuc  */
23*f4a2713aSLionel Sambuc 
24*f4a2713aSLionel Sambuc #ifndef __MM_MALLOC_H
25*f4a2713aSLionel Sambuc #define __MM_MALLOC_H
26*f4a2713aSLionel Sambuc 
27*f4a2713aSLionel Sambuc #include <stdlib.h>
28*f4a2713aSLionel Sambuc 
29*f4a2713aSLionel Sambuc #ifdef _WIN32
30*f4a2713aSLionel Sambuc #include <malloc.h>
31*f4a2713aSLionel Sambuc #else
32*f4a2713aSLionel Sambuc #ifndef __cplusplus
33*f4a2713aSLionel Sambuc extern int posix_memalign(void **__memptr, size_t __alignment, size_t __size);
34*f4a2713aSLionel Sambuc #else
35*f4a2713aSLionel Sambuc // Some systems (e.g. those with GNU libc) declare posix_memalign with an
36*f4a2713aSLionel Sambuc // exception specifier. Via an "egregious workaround" in
37*f4a2713aSLionel Sambuc // Sema::CheckEquivalentExceptionSpec, Clang accepts the following as a valid
38*f4a2713aSLionel Sambuc // redeclaration of glibc's declaration.
39*f4a2713aSLionel Sambuc extern "C" int posix_memalign(void **__memptr, size_t __alignment, size_t __size);
40*f4a2713aSLionel Sambuc #endif
41*f4a2713aSLionel Sambuc #endif
42*f4a2713aSLionel Sambuc 
43*f4a2713aSLionel Sambuc #if !(defined(_WIN32) && defined(_mm_malloc))
44*f4a2713aSLionel Sambuc static __inline__ void *__attribute__((__always_inline__, __nodebug__,
45*f4a2713aSLionel Sambuc                                        __malloc__))
_mm_malloc(size_t __size,size_t __align)46*f4a2713aSLionel Sambuc _mm_malloc(size_t __size, size_t __align)
47*f4a2713aSLionel Sambuc {
48*f4a2713aSLionel Sambuc   if (__align == 1) {
49*f4a2713aSLionel Sambuc     return malloc(__size);
50*f4a2713aSLionel Sambuc   }
51*f4a2713aSLionel Sambuc 
52*f4a2713aSLionel Sambuc   if (!(__align & (__align - 1)) && __align < sizeof(void *))
53*f4a2713aSLionel Sambuc     __align = sizeof(void *);
54*f4a2713aSLionel Sambuc 
55*f4a2713aSLionel Sambuc   void *__mallocedMemory;
56*f4a2713aSLionel Sambuc #if defined(__MINGW32__)
57*f4a2713aSLionel Sambuc   __mallocedMemory = __mingw_aligned_malloc(__size, __align);
58*f4a2713aSLionel Sambuc #elif defined(_WIN32)
59*f4a2713aSLionel Sambuc   __mallocedMemory = _aligned_malloc(__size, __align);
60*f4a2713aSLionel Sambuc #else
61*f4a2713aSLionel Sambuc   if (posix_memalign(&__mallocedMemory, __align, __size))
62*f4a2713aSLionel Sambuc     return 0;
63*f4a2713aSLionel Sambuc #endif
64*f4a2713aSLionel Sambuc 
65*f4a2713aSLionel Sambuc   return __mallocedMemory;
66*f4a2713aSLionel Sambuc }
67*f4a2713aSLionel Sambuc 
68*f4a2713aSLionel Sambuc static __inline__ void __attribute__((__always_inline__, __nodebug__))
_mm_free(void * __p)69*f4a2713aSLionel Sambuc _mm_free(void *__p)
70*f4a2713aSLionel Sambuc {
71*f4a2713aSLionel Sambuc   free(__p);
72*f4a2713aSLionel Sambuc }
73*f4a2713aSLionel Sambuc #endif
74*f4a2713aSLionel Sambuc 
75*f4a2713aSLionel Sambuc #endif /* __MM_MALLOC_H */
76