xref: /freebsd/sys/contrib/zstd/lib/common/compiler.h (revision f7cd7fe51c4140960ebea00410ed62894f5625d1)
10c16b537SWarner Losh /*
237f1f268SConrad Meyer  * Copyright (c) 2016-2020, Yann Collet, Facebook, Inc.
30c16b537SWarner Losh  * All rights reserved.
40c16b537SWarner Losh  *
50c16b537SWarner Losh  * This source code is licensed under both the BSD-style license (found in the
60c16b537SWarner Losh  * LICENSE file in the root directory of this source tree) and the GPLv2 (found
70c16b537SWarner Losh  * in the COPYING file in the root directory of this source tree).
80c16b537SWarner Losh  * You may select, at your option, one of the above-listed licenses.
90c16b537SWarner Losh  */
100c16b537SWarner Losh 
110c16b537SWarner Losh #ifndef ZSTD_COMPILER_H
120c16b537SWarner Losh #define ZSTD_COMPILER_H
130c16b537SWarner Losh 
140c16b537SWarner Losh /*-*******************************************************
150c16b537SWarner Losh *  Compiler specifics
160c16b537SWarner Losh *********************************************************/
170c16b537SWarner Losh /* force inlining */
18a0483764SConrad Meyer 
19a0483764SConrad Meyer #if !defined(ZSTD_NO_INLINE)
2037f1f268SConrad Meyer #if (defined(__GNUC__) && !defined(__STRICT_ANSI__)) || defined(__cplusplus) || defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L   /* C99 */
210c16b537SWarner Losh #  define INLINE_KEYWORD inline
220c16b537SWarner Losh #else
230c16b537SWarner Losh #  define INLINE_KEYWORD
240c16b537SWarner Losh #endif
250c16b537SWarner Losh 
269cbefe25SConrad Meyer #if defined(__GNUC__) || defined(__ICCARM__)
270c16b537SWarner Losh #  define FORCE_INLINE_ATTR __attribute__((always_inline))
280c16b537SWarner Losh #elif defined(_MSC_VER)
290c16b537SWarner Losh #  define FORCE_INLINE_ATTR __forceinline
300c16b537SWarner Losh #else
310c16b537SWarner Losh #  define FORCE_INLINE_ATTR
320c16b537SWarner Losh #endif
330c16b537SWarner Losh 
34a0483764SConrad Meyer #else
35a0483764SConrad Meyer 
36a0483764SConrad Meyer #define INLINE_KEYWORD
37a0483764SConrad Meyer #define FORCE_INLINE_ATTR
38a0483764SConrad Meyer 
39a0483764SConrad Meyer #endif
40a0483764SConrad Meyer 
410c16b537SWarner Losh /**
42*f7cd7fe5SConrad Meyer   On MSVC qsort requires that functions passed into it use the __cdecl calling conversion(CC).
43*f7cd7fe5SConrad Meyer   This explictly marks such functions as __cdecl so that the code will still compile
44*f7cd7fe5SConrad Meyer   if a CC other than __cdecl has been made the default.
45*f7cd7fe5SConrad Meyer */
46*f7cd7fe5SConrad Meyer #if  defined(_MSC_VER)
47*f7cd7fe5SConrad Meyer #  define WIN_CDECL __cdecl
48*f7cd7fe5SConrad Meyer #else
49*f7cd7fe5SConrad Meyer #  define WIN_CDECL
50*f7cd7fe5SConrad Meyer #endif
51*f7cd7fe5SConrad Meyer 
52*f7cd7fe5SConrad Meyer /**
530c16b537SWarner Losh  * FORCE_INLINE_TEMPLATE is used to define C "templates", which take constant
542b9c00cbSConrad Meyer  * parameters. They must be inlined for the compiler to eliminate the constant
550c16b537SWarner Losh  * branches.
560c16b537SWarner Losh  */
570c16b537SWarner Losh #define FORCE_INLINE_TEMPLATE static INLINE_KEYWORD FORCE_INLINE_ATTR
580c16b537SWarner Losh /**
590c16b537SWarner Losh  * HINT_INLINE is used to help the compiler generate better code. It is *not*
600c16b537SWarner Losh  * used for "templates", so it can be tweaked based on the compilers
610c16b537SWarner Losh  * performance.
620c16b537SWarner Losh  *
630c16b537SWarner Losh  * gcc-4.8 and gcc-4.9 have been shown to benefit from leaving off the
640c16b537SWarner Losh  * always_inline attribute.
650c16b537SWarner Losh  *
660c16b537SWarner Losh  * clang up to 5.0.0 (trunk) benefit tremendously from the always_inline
670c16b537SWarner Losh  * attribute.
680c16b537SWarner Losh  */
690c16b537SWarner Losh #if !defined(__clang__) && defined(__GNUC__) && __GNUC__ >= 4 && __GNUC_MINOR__ >= 8 && __GNUC__ < 5
700c16b537SWarner Losh #  define HINT_INLINE static INLINE_KEYWORD
710c16b537SWarner Losh #else
720c16b537SWarner Losh #  define HINT_INLINE static INLINE_KEYWORD FORCE_INLINE_ATTR
730c16b537SWarner Losh #endif
740c16b537SWarner Losh 
759cbefe25SConrad Meyer /* UNUSED_ATTR tells the compiler it is okay if the function is unused. */
769cbefe25SConrad Meyer #if defined(__GNUC__)
779cbefe25SConrad Meyer #  define UNUSED_ATTR __attribute__((unused))
789cbefe25SConrad Meyer #else
799cbefe25SConrad Meyer #  define UNUSED_ATTR
809cbefe25SConrad Meyer #endif
819cbefe25SConrad Meyer 
820c16b537SWarner Losh /* force no inlining */
830c16b537SWarner Losh #ifdef _MSC_VER
840c16b537SWarner Losh #  define FORCE_NOINLINE static __declspec(noinline)
850c16b537SWarner Losh #else
869cbefe25SConrad Meyer #  if defined(__GNUC__) || defined(__ICCARM__)
870c16b537SWarner Losh #    define FORCE_NOINLINE static __attribute__((__noinline__))
880c16b537SWarner Losh #  else
890c16b537SWarner Losh #    define FORCE_NOINLINE static
900c16b537SWarner Losh #  endif
910c16b537SWarner Losh #endif
920c16b537SWarner Losh 
9319fcbaf1SConrad Meyer /* target attribute */
9419fcbaf1SConrad Meyer #ifndef __has_attribute
9519fcbaf1SConrad Meyer   #define __has_attribute(x) 0  /* Compatibility with non-clang compilers. */
9619fcbaf1SConrad Meyer #endif
979cbefe25SConrad Meyer #if defined(__GNUC__) || defined(__ICCARM__)
9819fcbaf1SConrad Meyer #  define TARGET_ATTRIBUTE(target) __attribute__((__target__(target)))
9919fcbaf1SConrad Meyer #else
10019fcbaf1SConrad Meyer #  define TARGET_ATTRIBUTE(target)
10119fcbaf1SConrad Meyer #endif
10219fcbaf1SConrad Meyer 
10319fcbaf1SConrad Meyer /* Enable runtime BMI2 dispatch based on the CPU.
10419fcbaf1SConrad Meyer  * Enabled for clang & gcc >=4.8 on x86 when BMI2 isn't enabled by default.
10519fcbaf1SConrad Meyer  */
10619fcbaf1SConrad Meyer #ifndef DYNAMIC_BMI2
1070f743729SConrad Meyer   #if ((defined(__clang__) && __has_attribute(__target__)) \
10819fcbaf1SConrad Meyer       || (defined(__GNUC__) \
1090f743729SConrad Meyer           && (__GNUC__ >= 5 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)))) \
11019fcbaf1SConrad Meyer       && (defined(__x86_64__) || defined(_M_X86)) \
11119fcbaf1SConrad Meyer       && !defined(__BMI2__)
11219fcbaf1SConrad Meyer   #  define DYNAMIC_BMI2 1
11319fcbaf1SConrad Meyer   #else
11419fcbaf1SConrad Meyer   #  define DYNAMIC_BMI2 0
11519fcbaf1SConrad Meyer   #endif
11619fcbaf1SConrad Meyer #endif
11719fcbaf1SConrad Meyer 
1180f743729SConrad Meyer /* prefetch
119a0483764SConrad Meyer  * can be disabled, by declaring NO_PREFETCH build macro */
1200f743729SConrad Meyer #if defined(NO_PREFETCH)
121a0483764SConrad Meyer #  define PREFETCH_L1(ptr)  (void)(ptr)  /* disabled */
122a0483764SConrad Meyer #  define PREFETCH_L2(ptr)  (void)(ptr)  /* disabled */
1230f743729SConrad Meyer #else
1240c16b537SWarner Losh #  if defined(_MSC_VER) && (defined(_M_X64) || defined(_M_I86))  /* _mm_prefetch() is not defined outside of x86/x64 */
1250c16b537SWarner Losh #    include <mmintrin.h>   /* https://msdn.microsoft.com/fr-fr/library/84szxsww(v=vs.90).aspx */
126a0483764SConrad Meyer #    define PREFETCH_L1(ptr)  _mm_prefetch((const char*)(ptr), _MM_HINT_T0)
127a0483764SConrad Meyer #    define PREFETCH_L2(ptr)  _mm_prefetch((const char*)(ptr), _MM_HINT_T1)
1280f743729SConrad Meyer #  elif defined(__GNUC__) && ( (__GNUC__ >= 4) || ( (__GNUC__ == 3) && (__GNUC_MINOR__ >= 1) ) )
129a0483764SConrad Meyer #    define PREFETCH_L1(ptr)  __builtin_prefetch((ptr), 0 /* rw==read */, 3 /* locality */)
130a0483764SConrad Meyer #    define PREFETCH_L2(ptr)  __builtin_prefetch((ptr), 0 /* rw==read */, 2 /* locality */)
131*f7cd7fe5SConrad Meyer #  elif defined(__aarch64__)
132*f7cd7fe5SConrad Meyer #    define PREFETCH_L1(ptr)  __asm__ __volatile__("prfm pldl1keep, %0" ::"Q"(*(ptr)))
133*f7cd7fe5SConrad Meyer #    define PREFETCH_L2(ptr)  __asm__ __volatile__("prfm pldl2keep, %0" ::"Q"(*(ptr)))
1340c16b537SWarner Losh #  else
135a0483764SConrad Meyer #    define PREFETCH_L1(ptr) (void)(ptr)  /* disabled */
136a0483764SConrad Meyer #    define PREFETCH_L2(ptr) (void)(ptr)  /* disabled */
1370c16b537SWarner Losh #  endif
1380f743729SConrad Meyer #endif  /* NO_PREFETCH */
1390f743729SConrad Meyer 
1400f743729SConrad Meyer #define CACHELINE_SIZE 64
1410f743729SConrad Meyer 
1420f743729SConrad Meyer #define PREFETCH_AREA(p, s)  {            \
1430f743729SConrad Meyer     const char* const _ptr = (const char*)(p);  \
1440f743729SConrad Meyer     size_t const _size = (size_t)(s);     \
1450f743729SConrad Meyer     size_t _pos;                          \
1460f743729SConrad Meyer     for (_pos=0; _pos<_size; _pos+=CACHELINE_SIZE) {  \
147a0483764SConrad Meyer         PREFETCH_L2(_ptr + _pos);         \
1480f743729SConrad Meyer     }                                     \
1490f743729SConrad Meyer }
1500c16b537SWarner Losh 
1519cbefe25SConrad Meyer /* vectorization
1529cbefe25SConrad Meyer  * older GCC (pre gcc-4.3 picked as the cutoff) uses a different syntax */
15337f1f268SConrad Meyer #if !defined(__INTEL_COMPILER) && !defined(__clang__) && defined(__GNUC__)
1549cbefe25SConrad Meyer #  if (__GNUC__ == 4 && __GNUC_MINOR__ > 3) || (__GNUC__ >= 5)
1554d3f1eafSConrad Meyer #    define DONT_VECTORIZE __attribute__((optimize("no-tree-vectorize")))
1564d3f1eafSConrad Meyer #  else
1579cbefe25SConrad Meyer #    define DONT_VECTORIZE _Pragma("GCC optimize(\"no-tree-vectorize\")")
1589cbefe25SConrad Meyer #  endif
1599cbefe25SConrad Meyer #else
1604d3f1eafSConrad Meyer #  define DONT_VECTORIZE
1614d3f1eafSConrad Meyer #endif
1624d3f1eafSConrad Meyer 
16337f1f268SConrad Meyer /* Tell the compiler that a branch is likely or unlikely.
16437f1f268SConrad Meyer  * Only use these macros if it causes the compiler to generate better code.
16537f1f268SConrad Meyer  * If you can remove a LIKELY/UNLIKELY annotation without speed changes in gcc
16637f1f268SConrad Meyer  * and clang, please do.
16737f1f268SConrad Meyer  */
16837f1f268SConrad Meyer #if defined(__GNUC__)
16937f1f268SConrad Meyer #define LIKELY(x) (__builtin_expect((x), 1))
17037f1f268SConrad Meyer #define UNLIKELY(x) (__builtin_expect((x), 0))
17137f1f268SConrad Meyer #else
17237f1f268SConrad Meyer #define LIKELY(x) (x)
17337f1f268SConrad Meyer #define UNLIKELY(x) (x)
17437f1f268SConrad Meyer #endif
17537f1f268SConrad Meyer 
1760c16b537SWarner Losh /* disable warnings */
1770c16b537SWarner Losh #ifdef _MSC_VER    /* Visual Studio */
1780c16b537SWarner Losh #  include <intrin.h>                    /* For Visual 2005 */
1790c16b537SWarner Losh #  pragma warning(disable : 4100)        /* disable: C4100: unreferenced formal parameter */
1800c16b537SWarner Losh #  pragma warning(disable : 4127)        /* disable: C4127: conditional expression is constant */
1810c16b537SWarner Losh #  pragma warning(disable : 4204)        /* disable: C4204: non-constant aggregate initializer */
1820c16b537SWarner Losh #  pragma warning(disable : 4214)        /* disable: C4214: non-int bitfields */
1830c16b537SWarner Losh #  pragma warning(disable : 4324)        /* disable: C4324: padded structure */
1840c16b537SWarner Losh #endif
1850c16b537SWarner Losh 
186*f7cd7fe5SConrad Meyer /*Like DYNAMIC_BMI2 but for compile time determination of BMI2 support*/
187*f7cd7fe5SConrad Meyer #ifndef STATIC_BMI2
188*f7cd7fe5SConrad Meyer #  if defined(_MSC_VER) && (defined(_M_X64) || defined(_M_I86))
189*f7cd7fe5SConrad Meyer #    ifdef __AVX2__  //MSVC does not have a BMI2 specific flag, but every CPU that supports AVX2 also supports BMI2
190*f7cd7fe5SConrad Meyer #       define STATIC_BMI2 1
191*f7cd7fe5SConrad Meyer #    endif
192*f7cd7fe5SConrad Meyer #  endif
193*f7cd7fe5SConrad Meyer #endif
194*f7cd7fe5SConrad Meyer 
195*f7cd7fe5SConrad Meyer #ifndef STATIC_BMI2
196*f7cd7fe5SConrad Meyer     #define STATIC_BMI2 0
197*f7cd7fe5SConrad Meyer #endif
198*f7cd7fe5SConrad Meyer 
199*f7cd7fe5SConrad Meyer /* compat. with non-clang compilers */
200*f7cd7fe5SConrad Meyer #ifndef __has_builtin
201*f7cd7fe5SConrad Meyer #  define __has_builtin(x) 0
202*f7cd7fe5SConrad Meyer #endif
203*f7cd7fe5SConrad Meyer 
204*f7cd7fe5SConrad Meyer /* compat. with non-clang compilers */
205*f7cd7fe5SConrad Meyer #ifndef __has_feature
206*f7cd7fe5SConrad Meyer #  define __has_feature(x) 0
207*f7cd7fe5SConrad Meyer #endif
208*f7cd7fe5SConrad Meyer 
209*f7cd7fe5SConrad Meyer /* detects whether we are being compiled under msan */
210*f7cd7fe5SConrad Meyer #ifndef ZSTD_MEMORY_SANITIZER
211*f7cd7fe5SConrad Meyer #  if __has_feature(memory_sanitizer)
212*f7cd7fe5SConrad Meyer #    define ZSTD_MEMORY_SANITIZER 1
213*f7cd7fe5SConrad Meyer #  else
214*f7cd7fe5SConrad Meyer #    define ZSTD_MEMORY_SANITIZER 0
215*f7cd7fe5SConrad Meyer #  endif
216*f7cd7fe5SConrad Meyer #endif
217*f7cd7fe5SConrad Meyer 
218*f7cd7fe5SConrad Meyer #if ZSTD_MEMORY_SANITIZER
219*f7cd7fe5SConrad Meyer /* Not all platforms that support msan provide sanitizers/msan_interface.h.
220*f7cd7fe5SConrad Meyer  * We therefore declare the functions we need ourselves, rather than trying to
221*f7cd7fe5SConrad Meyer  * include the header file... */
222*f7cd7fe5SConrad Meyer #include <stddef.h>  /* size_t */
223*f7cd7fe5SConrad Meyer #define ZSTD_DEPS_NEED_STDINT
224*f7cd7fe5SConrad Meyer #include "zstd_deps.h"  /* intptr_t */
225*f7cd7fe5SConrad Meyer 
226*f7cd7fe5SConrad Meyer /* Make memory region fully initialized (without changing its contents). */
227*f7cd7fe5SConrad Meyer void __msan_unpoison(const volatile void *a, size_t size);
228*f7cd7fe5SConrad Meyer 
229*f7cd7fe5SConrad Meyer /* Make memory region fully uninitialized (without changing its contents).
230*f7cd7fe5SConrad Meyer    This is a legacy interface that does not update origin information. Use
231*f7cd7fe5SConrad Meyer    __msan_allocated_memory() instead. */
232*f7cd7fe5SConrad Meyer void __msan_poison(const volatile void *a, size_t size);
233*f7cd7fe5SConrad Meyer 
234*f7cd7fe5SConrad Meyer /* Returns the offset of the first (at least partially) poisoned byte in the
235*f7cd7fe5SConrad Meyer    memory range, or -1 if the whole range is good. */
236*f7cd7fe5SConrad Meyer intptr_t __msan_test_shadow(const volatile void *x, size_t size);
237*f7cd7fe5SConrad Meyer #endif
238*f7cd7fe5SConrad Meyer 
239*f7cd7fe5SConrad Meyer /* detects whether we are being compiled under asan */
240*f7cd7fe5SConrad Meyer #ifndef ZSTD_ADDRESS_SANITIZER
241*f7cd7fe5SConrad Meyer #  if __has_feature(address_sanitizer)
242*f7cd7fe5SConrad Meyer #    define ZSTD_ADDRESS_SANITIZER 1
243*f7cd7fe5SConrad Meyer #  elif defined(__SANITIZE_ADDRESS__)
244*f7cd7fe5SConrad Meyer #    define ZSTD_ADDRESS_SANITIZER 1
245*f7cd7fe5SConrad Meyer #  else
246*f7cd7fe5SConrad Meyer #    define ZSTD_ADDRESS_SANITIZER 0
247*f7cd7fe5SConrad Meyer #  endif
248*f7cd7fe5SConrad Meyer #endif
249*f7cd7fe5SConrad Meyer 
250*f7cd7fe5SConrad Meyer #if ZSTD_ADDRESS_SANITIZER
251*f7cd7fe5SConrad Meyer /* Not all platforms that support asan provide sanitizers/asan_interface.h.
252*f7cd7fe5SConrad Meyer  * We therefore declare the functions we need ourselves, rather than trying to
253*f7cd7fe5SConrad Meyer  * include the header file... */
254*f7cd7fe5SConrad Meyer #include <stddef.h>  /* size_t */
255*f7cd7fe5SConrad Meyer 
256*f7cd7fe5SConrad Meyer /**
257*f7cd7fe5SConrad Meyer  * Marks a memory region (<c>[addr, addr+size)</c>) as unaddressable.
258*f7cd7fe5SConrad Meyer  *
259*f7cd7fe5SConrad Meyer  * This memory must be previously allocated by your program. Instrumented
260*f7cd7fe5SConrad Meyer  * code is forbidden from accessing addresses in this region until it is
261*f7cd7fe5SConrad Meyer  * unpoisoned. This function is not guaranteed to poison the entire region -
262*f7cd7fe5SConrad Meyer  * it could poison only a subregion of <c>[addr, addr+size)</c> due to ASan
263*f7cd7fe5SConrad Meyer  * alignment restrictions.
264*f7cd7fe5SConrad Meyer  *
265*f7cd7fe5SConrad Meyer  * \note This function is not thread-safe because no two threads can poison or
266*f7cd7fe5SConrad Meyer  * unpoison memory in the same memory region simultaneously.
267*f7cd7fe5SConrad Meyer  *
268*f7cd7fe5SConrad Meyer  * \param addr Start of memory region.
269*f7cd7fe5SConrad Meyer  * \param size Size of memory region. */
270*f7cd7fe5SConrad Meyer void __asan_poison_memory_region(void const volatile *addr, size_t size);
271*f7cd7fe5SConrad Meyer 
272*f7cd7fe5SConrad Meyer /**
273*f7cd7fe5SConrad Meyer  * Marks a memory region (<c>[addr, addr+size)</c>) as addressable.
274*f7cd7fe5SConrad Meyer  *
275*f7cd7fe5SConrad Meyer  * This memory must be previously allocated by your program. Accessing
276*f7cd7fe5SConrad Meyer  * addresses in this region is allowed until this region is poisoned again.
277*f7cd7fe5SConrad Meyer  * This function could unpoison a super-region of <c>[addr, addr+size)</c> due
278*f7cd7fe5SConrad Meyer  * to ASan alignment restrictions.
279*f7cd7fe5SConrad Meyer  *
280*f7cd7fe5SConrad Meyer  * \note This function is not thread-safe because no two threads can
281*f7cd7fe5SConrad Meyer  * poison or unpoison memory in the same memory region simultaneously.
282*f7cd7fe5SConrad Meyer  *
283*f7cd7fe5SConrad Meyer  * \param addr Start of memory region.
284*f7cd7fe5SConrad Meyer  * \param size Size of memory region. */
285*f7cd7fe5SConrad Meyer void __asan_unpoison_memory_region(void const volatile *addr, size_t size);
286*f7cd7fe5SConrad Meyer #endif
287*f7cd7fe5SConrad Meyer 
2880c16b537SWarner Losh #endif /* ZSTD_COMPILER_H */
289