1*a28cd43dSSascha Wildner /* 2*a28cd43dSSascha Wildner * Copyright (c) 2016-2020, Yann Collet, Facebook, Inc. 3*a28cd43dSSascha Wildner * All rights reserved. 4*a28cd43dSSascha Wildner * 5*a28cd43dSSascha Wildner * This source code is licensed under both the BSD-style license (found in the 6*a28cd43dSSascha Wildner * LICENSE file in the root directory of this source tree) and the GPLv2 (found 7*a28cd43dSSascha Wildner * in the COPYING file in the root directory of this source tree). 8*a28cd43dSSascha Wildner * You may select, at your option, one of the above-listed licenses. 9*a28cd43dSSascha Wildner */ 10*a28cd43dSSascha Wildner 11*a28cd43dSSascha Wildner #ifndef ZSTD_COMPILER_H 12*a28cd43dSSascha Wildner #define ZSTD_COMPILER_H 13*a28cd43dSSascha Wildner 14*a28cd43dSSascha Wildner /*-******************************************************* 15*a28cd43dSSascha Wildner * Compiler specifics 16*a28cd43dSSascha Wildner *********************************************************/ 17*a28cd43dSSascha Wildner /* force inlining */ 18*a28cd43dSSascha Wildner 19*a28cd43dSSascha Wildner #if !defined(ZSTD_NO_INLINE) 20*a28cd43dSSascha Wildner #if (defined(__GNUC__) && !defined(__STRICT_ANSI__)) || defined(__cplusplus) || defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L /* C99 */ 21*a28cd43dSSascha Wildner # define INLINE_KEYWORD inline 22*a28cd43dSSascha Wildner #else 23*a28cd43dSSascha Wildner # define INLINE_KEYWORD 24*a28cd43dSSascha Wildner #endif 25*a28cd43dSSascha Wildner 26*a28cd43dSSascha Wildner #if defined(__GNUC__) || defined(__ICCARM__) 27*a28cd43dSSascha Wildner # define FORCE_INLINE_ATTR __attribute__((always_inline)) 28*a28cd43dSSascha Wildner #elif defined(_MSC_VER) 29*a28cd43dSSascha Wildner # define FORCE_INLINE_ATTR __forceinline 30*a28cd43dSSascha Wildner #else 31*a28cd43dSSascha Wildner # define FORCE_INLINE_ATTR 32*a28cd43dSSascha Wildner #endif 33*a28cd43dSSascha Wildner 34*a28cd43dSSascha Wildner #else 35*a28cd43dSSascha Wildner 36*a28cd43dSSascha Wildner #define INLINE_KEYWORD 37*a28cd43dSSascha Wildner #define FORCE_INLINE_ATTR 38*a28cd43dSSascha Wildner 39*a28cd43dSSascha Wildner #endif 40*a28cd43dSSascha Wildner 41*a28cd43dSSascha Wildner /** 42*a28cd43dSSascha Wildner On MSVC qsort requires that functions passed into it use the __cdecl calling conversion(CC). 43*a28cd43dSSascha Wildner This explictly marks such functions as __cdecl so that the code will still compile 44*a28cd43dSSascha Wildner if a CC other than __cdecl has been made the default. 45*a28cd43dSSascha Wildner */ 46*a28cd43dSSascha Wildner #if defined(_MSC_VER) 47*a28cd43dSSascha Wildner # define WIN_CDECL __cdecl 48*a28cd43dSSascha Wildner #else 49*a28cd43dSSascha Wildner # define WIN_CDECL 50*a28cd43dSSascha Wildner #endif 51*a28cd43dSSascha Wildner 52*a28cd43dSSascha Wildner /** 53*a28cd43dSSascha Wildner * FORCE_INLINE_TEMPLATE is used to define C "templates", which take constant 54*a28cd43dSSascha Wildner * parameters. They must be inlined for the compiler to eliminate the constant 55*a28cd43dSSascha Wildner * branches. 56*a28cd43dSSascha Wildner */ 57*a28cd43dSSascha Wildner #define FORCE_INLINE_TEMPLATE static INLINE_KEYWORD FORCE_INLINE_ATTR 58*a28cd43dSSascha Wildner /** 59*a28cd43dSSascha Wildner * HINT_INLINE is used to help the compiler generate better code. It is *not* 60*a28cd43dSSascha Wildner * used for "templates", so it can be tweaked based on the compilers 61*a28cd43dSSascha Wildner * performance. 62*a28cd43dSSascha Wildner * 63*a28cd43dSSascha Wildner * gcc-4.8 and gcc-4.9 have been shown to benefit from leaving off the 64*a28cd43dSSascha Wildner * always_inline attribute. 65*a28cd43dSSascha Wildner * 66*a28cd43dSSascha Wildner * clang up to 5.0.0 (trunk) benefit tremendously from the always_inline 67*a28cd43dSSascha Wildner * attribute. 68*a28cd43dSSascha Wildner */ 69*a28cd43dSSascha Wildner #if !defined(__clang__) && defined(__GNUC__) && __GNUC__ >= 4 && __GNUC_MINOR__ >= 8 && __GNUC__ < 5 70*a28cd43dSSascha Wildner # define HINT_INLINE static INLINE_KEYWORD 71*a28cd43dSSascha Wildner #else 72*a28cd43dSSascha Wildner # define HINT_INLINE static INLINE_KEYWORD FORCE_INLINE_ATTR 73*a28cd43dSSascha Wildner #endif 74*a28cd43dSSascha Wildner 75*a28cd43dSSascha Wildner /* UNUSED_ATTR tells the compiler it is okay if the function is unused. */ 76*a28cd43dSSascha Wildner #if defined(__GNUC__) 77*a28cd43dSSascha Wildner # define UNUSED_ATTR __attribute__((unused)) 78*a28cd43dSSascha Wildner #else 79*a28cd43dSSascha Wildner # define UNUSED_ATTR 80*a28cd43dSSascha Wildner #endif 81*a28cd43dSSascha Wildner 82*a28cd43dSSascha Wildner /* force no inlining */ 83*a28cd43dSSascha Wildner #ifdef _MSC_VER 84*a28cd43dSSascha Wildner # define FORCE_NOINLINE static __declspec(noinline) 85*a28cd43dSSascha Wildner #else 86*a28cd43dSSascha Wildner # if defined(__GNUC__) || defined(__ICCARM__) 87*a28cd43dSSascha Wildner # define FORCE_NOINLINE static __attribute__((__noinline__)) 88*a28cd43dSSascha Wildner # else 89*a28cd43dSSascha Wildner # define FORCE_NOINLINE static 90*a28cd43dSSascha Wildner # endif 91*a28cd43dSSascha Wildner #endif 92*a28cd43dSSascha Wildner 93*a28cd43dSSascha Wildner /* target attribute */ 94*a28cd43dSSascha Wildner #ifndef __has_attribute 95*a28cd43dSSascha Wildner #define __has_attribute(x) 0 /* Compatibility with non-clang compilers. */ 96*a28cd43dSSascha Wildner #endif 97*a28cd43dSSascha Wildner #if defined(__GNUC__) || defined(__ICCARM__) 98*a28cd43dSSascha Wildner # define TARGET_ATTRIBUTE(target) __attribute__((__target__(target))) 99*a28cd43dSSascha Wildner #else 100*a28cd43dSSascha Wildner # define TARGET_ATTRIBUTE(target) 101*a28cd43dSSascha Wildner #endif 102*a28cd43dSSascha Wildner 103*a28cd43dSSascha Wildner /* Enable runtime BMI2 dispatch based on the CPU. 104*a28cd43dSSascha Wildner * Enabled for clang & gcc >=4.8 on x86 when BMI2 isn't enabled by default. 105*a28cd43dSSascha Wildner */ 106*a28cd43dSSascha Wildner #ifndef DYNAMIC_BMI2 107*a28cd43dSSascha Wildner #if ((defined(__clang__) && __has_attribute(__target__)) \ 108*a28cd43dSSascha Wildner || (defined(__GNUC__) \ 109*a28cd43dSSascha Wildner && (__GNUC__ >= 5 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)))) \ 110*a28cd43dSSascha Wildner && (defined(__x86_64__) || defined(_M_X86)) \ 111*a28cd43dSSascha Wildner && !defined(__BMI2__) 112*a28cd43dSSascha Wildner # define DYNAMIC_BMI2 1 113*a28cd43dSSascha Wildner #else 114*a28cd43dSSascha Wildner # define DYNAMIC_BMI2 0 115*a28cd43dSSascha Wildner #endif 116*a28cd43dSSascha Wildner #endif 117*a28cd43dSSascha Wildner 118*a28cd43dSSascha Wildner /* prefetch 119*a28cd43dSSascha Wildner * can be disabled, by declaring NO_PREFETCH build macro */ 120*a28cd43dSSascha Wildner #if defined(NO_PREFETCH) 121*a28cd43dSSascha Wildner # define PREFETCH_L1(ptr) (void)(ptr) /* disabled */ 122*a28cd43dSSascha Wildner # define PREFETCH_L2(ptr) (void)(ptr) /* disabled */ 123*a28cd43dSSascha Wildner #else 124*a28cd43dSSascha Wildner # if defined(_MSC_VER) && (defined(_M_X64) || defined(_M_I86)) /* _mm_prefetch() is not defined outside of x86/x64 */ 125*a28cd43dSSascha Wildner # include <mmintrin.h> /* https://msdn.microsoft.com/fr-fr/library/84szxsww(v=vs.90).aspx */ 126*a28cd43dSSascha Wildner # define PREFETCH_L1(ptr) _mm_prefetch((const char*)(ptr), _MM_HINT_T0) 127*a28cd43dSSascha Wildner # define PREFETCH_L2(ptr) _mm_prefetch((const char*)(ptr), _MM_HINT_T1) 128*a28cd43dSSascha Wildner # elif defined(__GNUC__) && ( (__GNUC__ >= 4) || ( (__GNUC__ == 3) && (__GNUC_MINOR__ >= 1) ) ) 129*a28cd43dSSascha Wildner # define PREFETCH_L1(ptr) __builtin_prefetch((ptr), 0 /* rw==read */, 3 /* locality */) 130*a28cd43dSSascha Wildner # define PREFETCH_L2(ptr) __builtin_prefetch((ptr), 0 /* rw==read */, 2 /* locality */) 131*a28cd43dSSascha Wildner # elif defined(__aarch64__) 132*a28cd43dSSascha Wildner # define PREFETCH_L1(ptr) __asm__ __volatile__("prfm pldl1keep, %0" ::"Q"(*(ptr))) 133*a28cd43dSSascha Wildner # define PREFETCH_L2(ptr) __asm__ __volatile__("prfm pldl2keep, %0" ::"Q"(*(ptr))) 134*a28cd43dSSascha Wildner # else 135*a28cd43dSSascha Wildner # define PREFETCH_L1(ptr) (void)(ptr) /* disabled */ 136*a28cd43dSSascha Wildner # define PREFETCH_L2(ptr) (void)(ptr) /* disabled */ 137*a28cd43dSSascha Wildner # endif 138*a28cd43dSSascha Wildner #endif /* NO_PREFETCH */ 139*a28cd43dSSascha Wildner 140*a28cd43dSSascha Wildner #define CACHELINE_SIZE 64 141*a28cd43dSSascha Wildner 142*a28cd43dSSascha Wildner #define PREFETCH_AREA(p, s) { \ 143*a28cd43dSSascha Wildner const char* const _ptr = (const char*)(p); \ 144*a28cd43dSSascha Wildner size_t const _size = (size_t)(s); \ 145*a28cd43dSSascha Wildner size_t _pos; \ 146*a28cd43dSSascha Wildner for (_pos=0; _pos<_size; _pos+=CACHELINE_SIZE) { \ 147*a28cd43dSSascha Wildner PREFETCH_L2(_ptr + _pos); \ 148*a28cd43dSSascha Wildner } \ 149*a28cd43dSSascha Wildner } 150*a28cd43dSSascha Wildner 151*a28cd43dSSascha Wildner /* vectorization 152*a28cd43dSSascha Wildner * older GCC (pre gcc-4.3 picked as the cutoff) uses a different syntax */ 153*a28cd43dSSascha Wildner #if !defined(__INTEL_COMPILER) && !defined(__clang__) && defined(__GNUC__) 154*a28cd43dSSascha Wildner # if (__GNUC__ == 4 && __GNUC_MINOR__ > 3) || (__GNUC__ >= 5) 155*a28cd43dSSascha Wildner # define DONT_VECTORIZE __attribute__((optimize("no-tree-vectorize"))) 156*a28cd43dSSascha Wildner # else 157*a28cd43dSSascha Wildner # define DONT_VECTORIZE _Pragma("GCC optimize(\"no-tree-vectorize\")") 158*a28cd43dSSascha Wildner # endif 159*a28cd43dSSascha Wildner #else 160*a28cd43dSSascha Wildner # define DONT_VECTORIZE 161*a28cd43dSSascha Wildner #endif 162*a28cd43dSSascha Wildner 163*a28cd43dSSascha Wildner /* Tell the compiler that a branch is likely or unlikely. 164*a28cd43dSSascha Wildner * Only use these macros if it causes the compiler to generate better code. 165*a28cd43dSSascha Wildner * If you can remove a LIKELY/UNLIKELY annotation without speed changes in gcc 166*a28cd43dSSascha Wildner * and clang, please do. 167*a28cd43dSSascha Wildner */ 168*a28cd43dSSascha Wildner #if defined(__GNUC__) 169*a28cd43dSSascha Wildner #define LIKELY(x) (__builtin_expect((x), 1)) 170*a28cd43dSSascha Wildner #define UNLIKELY(x) (__builtin_expect((x), 0)) 171*a28cd43dSSascha Wildner #else 172*a28cd43dSSascha Wildner #define LIKELY(x) (x) 173*a28cd43dSSascha Wildner #define UNLIKELY(x) (x) 174*a28cd43dSSascha Wildner #endif 175*a28cd43dSSascha Wildner 176*a28cd43dSSascha Wildner /* disable warnings */ 177*a28cd43dSSascha Wildner #ifdef _MSC_VER /* Visual Studio */ 178*a28cd43dSSascha Wildner # include <intrin.h> /* For Visual 2005 */ 179*a28cd43dSSascha Wildner # pragma warning(disable : 4100) /* disable: C4100: unreferenced formal parameter */ 180*a28cd43dSSascha Wildner # pragma warning(disable : 4127) /* disable: C4127: conditional expression is constant */ 181*a28cd43dSSascha Wildner # pragma warning(disable : 4204) /* disable: C4204: non-constant aggregate initializer */ 182*a28cd43dSSascha Wildner # pragma warning(disable : 4214) /* disable: C4214: non-int bitfields */ 183*a28cd43dSSascha Wildner # pragma warning(disable : 4324) /* disable: C4324: padded structure */ 184*a28cd43dSSascha Wildner #endif 185*a28cd43dSSascha Wildner 186*a28cd43dSSascha Wildner /*Like DYNAMIC_BMI2 but for compile time determination of BMI2 support*/ 187*a28cd43dSSascha Wildner #ifndef STATIC_BMI2 188*a28cd43dSSascha Wildner # if defined(_MSC_VER) && (defined(_M_X64) || defined(_M_I86)) 189*a28cd43dSSascha Wildner # ifdef __AVX2__ //MSVC does not have a BMI2 specific flag, but every CPU that supports AVX2 also supports BMI2 190*a28cd43dSSascha Wildner # define STATIC_BMI2 1 191*a28cd43dSSascha Wildner # endif 192*a28cd43dSSascha Wildner # endif 193*a28cd43dSSascha Wildner #endif 194*a28cd43dSSascha Wildner 195*a28cd43dSSascha Wildner #ifndef STATIC_BMI2 196*a28cd43dSSascha Wildner #define STATIC_BMI2 0 197*a28cd43dSSascha Wildner #endif 198*a28cd43dSSascha Wildner 199*a28cd43dSSascha Wildner /* compat. with non-clang compilers */ 200*a28cd43dSSascha Wildner #ifndef __has_builtin 201*a28cd43dSSascha Wildner # define __has_builtin(x) 0 202*a28cd43dSSascha Wildner #endif 203*a28cd43dSSascha Wildner 204*a28cd43dSSascha Wildner /* compat. with non-clang compilers */ 205*a28cd43dSSascha Wildner #ifndef __has_feature 206*a28cd43dSSascha Wildner # define __has_feature(x) 0 207*a28cd43dSSascha Wildner #endif 208*a28cd43dSSascha Wildner 209*a28cd43dSSascha Wildner /* detects whether we are being compiled under msan */ 210*a28cd43dSSascha Wildner #ifndef ZSTD_MEMORY_SANITIZER 211*a28cd43dSSascha Wildner # if __has_feature(memory_sanitizer) 212*a28cd43dSSascha Wildner # define ZSTD_MEMORY_SANITIZER 1 213*a28cd43dSSascha Wildner # else 214*a28cd43dSSascha Wildner # define ZSTD_MEMORY_SANITIZER 0 215*a28cd43dSSascha Wildner # endif 216*a28cd43dSSascha Wildner #endif 217*a28cd43dSSascha Wildner 218*a28cd43dSSascha Wildner #if ZSTD_MEMORY_SANITIZER 219*a28cd43dSSascha Wildner /* Not all platforms that support msan provide sanitizers/msan_interface.h. 220*a28cd43dSSascha Wildner * We therefore declare the functions we need ourselves, rather than trying to 221*a28cd43dSSascha Wildner * include the header file... */ 222*a28cd43dSSascha Wildner #include <stddef.h> /* size_t */ 223*a28cd43dSSascha Wildner #define ZSTD_DEPS_NEED_STDINT 224*a28cd43dSSascha Wildner #include "zstd_deps.h" /* intptr_t */ 225*a28cd43dSSascha Wildner 226*a28cd43dSSascha Wildner /* Make memory region fully initialized (without changing its contents). */ 227*a28cd43dSSascha Wildner void __msan_unpoison(const volatile void *a, size_t size); 228*a28cd43dSSascha Wildner 229*a28cd43dSSascha Wildner /* Make memory region fully uninitialized (without changing its contents). 230*a28cd43dSSascha Wildner This is a legacy interface that does not update origin information. Use 231*a28cd43dSSascha Wildner __msan_allocated_memory() instead. */ 232*a28cd43dSSascha Wildner void __msan_poison(const volatile void *a, size_t size); 233*a28cd43dSSascha Wildner 234*a28cd43dSSascha Wildner /* Returns the offset of the first (at least partially) poisoned byte in the 235*a28cd43dSSascha Wildner memory range, or -1 if the whole range is good. */ 236*a28cd43dSSascha Wildner intptr_t __msan_test_shadow(const volatile void *x, size_t size); 237*a28cd43dSSascha Wildner #endif 238*a28cd43dSSascha Wildner 239*a28cd43dSSascha Wildner /* detects whether we are being compiled under asan */ 240*a28cd43dSSascha Wildner #ifndef ZSTD_ADDRESS_SANITIZER 241*a28cd43dSSascha Wildner # if __has_feature(address_sanitizer) 242*a28cd43dSSascha Wildner # define ZSTD_ADDRESS_SANITIZER 1 243*a28cd43dSSascha Wildner # elif defined(__SANITIZE_ADDRESS__) 244*a28cd43dSSascha Wildner # define ZSTD_ADDRESS_SANITIZER 1 245*a28cd43dSSascha Wildner # else 246*a28cd43dSSascha Wildner # define ZSTD_ADDRESS_SANITIZER 0 247*a28cd43dSSascha Wildner # endif 248*a28cd43dSSascha Wildner #endif 249*a28cd43dSSascha Wildner 250*a28cd43dSSascha Wildner #if ZSTD_ADDRESS_SANITIZER 251*a28cd43dSSascha Wildner /* Not all platforms that support asan provide sanitizers/asan_interface.h. 252*a28cd43dSSascha Wildner * We therefore declare the functions we need ourselves, rather than trying to 253*a28cd43dSSascha Wildner * include the header file... */ 254*a28cd43dSSascha Wildner #include <stddef.h> /* size_t */ 255*a28cd43dSSascha Wildner 256*a28cd43dSSascha Wildner /** 257*a28cd43dSSascha Wildner * Marks a memory region (<c>[addr, addr+size)</c>) as unaddressable. 258*a28cd43dSSascha Wildner * 259*a28cd43dSSascha Wildner * This memory must be previously allocated by your program. Instrumented 260*a28cd43dSSascha Wildner * code is forbidden from accessing addresses in this region until it is 261*a28cd43dSSascha Wildner * unpoisoned. This function is not guaranteed to poison the entire region - 262*a28cd43dSSascha Wildner * it could poison only a subregion of <c>[addr, addr+size)</c> due to ASan 263*a28cd43dSSascha Wildner * alignment restrictions. 264*a28cd43dSSascha Wildner * 265*a28cd43dSSascha Wildner * \note This function is not thread-safe because no two threads can poison or 266*a28cd43dSSascha Wildner * unpoison memory in the same memory region simultaneously. 267*a28cd43dSSascha Wildner * 268*a28cd43dSSascha Wildner * \param addr Start of memory region. 269*a28cd43dSSascha Wildner * \param size Size of memory region. */ 270*a28cd43dSSascha Wildner void __asan_poison_memory_region(void const volatile *addr, size_t size); 271*a28cd43dSSascha Wildner 272*a28cd43dSSascha Wildner /** 273*a28cd43dSSascha Wildner * Marks a memory region (<c>[addr, addr+size)</c>) as addressable. 274*a28cd43dSSascha Wildner * 275*a28cd43dSSascha Wildner * This memory must be previously allocated by your program. Accessing 276*a28cd43dSSascha Wildner * addresses in this region is allowed until this region is poisoned again. 277*a28cd43dSSascha Wildner * This function could unpoison a super-region of <c>[addr, addr+size)</c> due 278*a28cd43dSSascha Wildner * to ASan alignment restrictions. 279*a28cd43dSSascha Wildner * 280*a28cd43dSSascha Wildner * \note This function is not thread-safe because no two threads can 281*a28cd43dSSascha Wildner * poison or unpoison memory in the same memory region simultaneously. 282*a28cd43dSSascha Wildner * 283*a28cd43dSSascha Wildner * \param addr Start of memory region. 284*a28cd43dSSascha Wildner * \param size Size of memory region. */ 285*a28cd43dSSascha Wildner void __asan_unpoison_memory_region(void const volatile *addr, size_t size); 286*a28cd43dSSascha Wildner #endif 287*a28cd43dSSascha Wildner 288*a28cd43dSSascha Wildner #endif /* ZSTD_COMPILER_H */ 289