• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) Meta Platforms, Inc. and affiliates.
3  * All rights reserved.
4  *
5  * This source code is licensed under both the BSD-style license (found in the
6  * LICENSE file in the root directory of this source tree) and the GPLv2 (found
7  * in the COPYING file in the root directory of this source tree).
8  * You may select, at your option, one of the above-listed licenses.
9  */
10 
11 #ifndef ZSTD_COMPILER_H
12 #define ZSTD_COMPILER_H
13 
14 #include <stddef.h>
15 
16 #include "portability_macros.h"
17 
18 /*-*******************************************************
19 *  Compiler specifics
20 *********************************************************/
21 /* force inlining */
22 
23 #if !defined(ZSTD_NO_INLINE)
24 #if (defined(__GNUC__) && !defined(__STRICT_ANSI__)) || defined(__cplusplus) || defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L   /* C99 */
25 #  define INLINE_KEYWORD inline
26 #else
27 #  define INLINE_KEYWORD
28 #endif
29 
30 #if defined(__GNUC__) || defined(__IAR_SYSTEMS_ICC__)
31 #  define FORCE_INLINE_ATTR __attribute__((always_inline))
32 #elif defined(_MSC_VER)
33 #  define FORCE_INLINE_ATTR __forceinline
34 #else
35 #  define FORCE_INLINE_ATTR
36 #endif
37 
38 #else
39 
40 #define INLINE_KEYWORD
41 #define FORCE_INLINE_ATTR
42 
43 #endif
44 
45 /**
46   On MSVC qsort requires that functions passed into it use the __cdecl calling conversion(CC).
47   This explicitly marks such functions as __cdecl so that the code will still compile
48   if a CC other than __cdecl has been made the default.
49 */
50 #if  defined(_MSC_VER)
51 #  define WIN_CDECL __cdecl
52 #else
53 #  define WIN_CDECL
54 #endif
55 
56 /* UNUSED_ATTR tells the compiler it is okay if the function is unused. */
57 #if defined(__GNUC__) || defined(__IAR_SYSTEMS_ICC__)
58 #  define UNUSED_ATTR __attribute__((unused))
59 #else
60 #  define UNUSED_ATTR
61 #endif
62 
63 /**
64  * FORCE_INLINE_TEMPLATE is used to define C "templates", which take constant
65  * parameters. They must be inlined for the compiler to eliminate the constant
66  * branches.
67  */
68 #define FORCE_INLINE_TEMPLATE static INLINE_KEYWORD FORCE_INLINE_ATTR UNUSED_ATTR
69 /**
70  * HINT_INLINE is used to help the compiler generate better code. It is *not*
71  * used for "templates", so it can be tweaked based on the compilers
72  * performance.
73  *
74  * gcc-4.8 and gcc-4.9 have been shown to benefit from leaving off the
75  * always_inline attribute.
76  *
77  * clang up to 5.0.0 (trunk) benefit tremendously from the always_inline
78  * attribute.
79  */
80 #if !defined(__clang__) && defined(__GNUC__) && __GNUC__ >= 4 && __GNUC_MINOR__ >= 8 && __GNUC__ < 5
81 #  define HINT_INLINE static INLINE_KEYWORD
82 #else
83 #  define HINT_INLINE FORCE_INLINE_TEMPLATE
84 #endif
85 
86 /* "soft" inline :
87  * The compiler is free to select if it's a good idea to inline or not.
88  * The main objective is to silence compiler warnings
89  * when a defined function in included but not used.
90  *
91  * Note : this macro is prefixed `MEM_` because it used to be provided by `mem.h` unit.
92  * Updating the prefix is probably preferable, but requires a fairly large codemod,
93  * since this name is used everywhere.
94  */
95 #ifndef MEM_STATIC  /* already defined in Linux Kernel mem.h */
96 #if defined(__GNUC__)
97 #  define MEM_STATIC static __inline UNUSED_ATTR
98 #elif defined(__IAR_SYSTEMS_ICC__)
99 #  define MEM_STATIC static inline UNUSED_ATTR
100 #elif defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */)
101 #  define MEM_STATIC static inline
102 #elif defined(_MSC_VER)
103 #  define MEM_STATIC static __inline
104 #else
105 #  define MEM_STATIC static  /* this version may generate warnings for unused static functions; disable the relevant warning */
106 #endif
107 #endif
108 
109 /* force no inlining */
110 #ifdef _MSC_VER
111 #  define FORCE_NOINLINE static __declspec(noinline)
112 #else
113 #  if defined(__GNUC__) || defined(__IAR_SYSTEMS_ICC__)
114 #    define FORCE_NOINLINE static __attribute__((__noinline__))
115 #  else
116 #    define FORCE_NOINLINE static
117 #  endif
118 #endif
119 
120 
121 /* target attribute */
122 #if defined(__GNUC__) || defined(__IAR_SYSTEMS_ICC__)
123 #  define TARGET_ATTRIBUTE(target) __attribute__((__target__(target)))
124 #else
125 #  define TARGET_ATTRIBUTE(target)
126 #endif
127 
128 /* Target attribute for BMI2 dynamic dispatch.
129  * Enable lzcnt, bmi, and bmi2.
130  * We test for bmi1 & bmi2. lzcnt is included in bmi1.
131  */
132 #define BMI2_TARGET_ATTRIBUTE TARGET_ATTRIBUTE("lzcnt,bmi,bmi2")
133 
134 /* prefetch
135  * can be disabled, by declaring NO_PREFETCH build macro */
136 #if defined(NO_PREFETCH)
137 #  define PREFETCH_L1(ptr)  do { (void)(ptr); } while (0)  /* disabled */
138 #  define PREFETCH_L2(ptr)  do { (void)(ptr); } while (0)  /* disabled */
139 #else
140 #  if defined(_MSC_VER) && (defined(_M_X64) || defined(_M_I86)) && !defined(_M_ARM64EC)  /* _mm_prefetch() is not defined outside of x86/x64 */
141 #    include <mmintrin.h>   /* https://msdn.microsoft.com/fr-fr/library/84szxsww(v=vs.90).aspx */
142 #    define PREFETCH_L1(ptr)  _mm_prefetch((const char*)(ptr), _MM_HINT_T0)
143 #    define PREFETCH_L2(ptr)  _mm_prefetch((const char*)(ptr), _MM_HINT_T1)
144 #  elif defined(__GNUC__) && ( (__GNUC__ >= 4) || ( (__GNUC__ == 3) && (__GNUC_MINOR__ >= 1) ) )
145 #    define PREFETCH_L1(ptr)  __builtin_prefetch((ptr), 0 /* rw==read */, 3 /* locality */)
146 #    define PREFETCH_L2(ptr)  __builtin_prefetch((ptr), 0 /* rw==read */, 2 /* locality */)
147 #  elif defined(__aarch64__)
148 #    define PREFETCH_L1(ptr)  do { __asm__ __volatile__("prfm pldl1keep, %0" ::"Q"(*(ptr))); } while (0)
149 #    define PREFETCH_L2(ptr)  do { __asm__ __volatile__("prfm pldl2keep, %0" ::"Q"(*(ptr))); } while (0)
150 #  else
151 #    define PREFETCH_L1(ptr) do { (void)(ptr); } while (0)  /* disabled */
152 #    define PREFETCH_L2(ptr) do { (void)(ptr); } while (0)  /* disabled */
153 #  endif
154 #endif  /* NO_PREFETCH */
155 
156 #define CACHELINE_SIZE 64
157 
158 #define PREFETCH_AREA(p, s)                              \
159     do {                                                 \
160         const char* const _ptr = (const char*)(p);       \
161         size_t const _size = (size_t)(s);                \
162         size_t _pos;                                     \
163         for (_pos=0; _pos<_size; _pos+=CACHELINE_SIZE) { \
164             PREFETCH_L2(_ptr + _pos);                    \
165         }                                                \
166     } while (0)
167 
168 /* vectorization
169  * older GCC (pre gcc-4.3 picked as the cutoff) uses a different syntax,
170  * and some compilers, like Intel ICC and MCST LCC, do not support it at all. */
171 #if !defined(__INTEL_COMPILER) && !defined(__clang__) && defined(__GNUC__) && !defined(__LCC__)
172 #  if (__GNUC__ == 4 && __GNUC_MINOR__ > 3) || (__GNUC__ >= 5)
173 #    define DONT_VECTORIZE __attribute__((optimize("no-tree-vectorize")))
174 #  else
175 #    define DONT_VECTORIZE _Pragma("GCC optimize(\"no-tree-vectorize\")")
176 #  endif
177 #else
178 #  define DONT_VECTORIZE
179 #endif
180 
181 /* Tell the compiler that a branch is likely or unlikely.
182  * Only use these macros if it causes the compiler to generate better code.
183  * If you can remove a LIKELY/UNLIKELY annotation without speed changes in gcc
184  * and clang, please do.
185  */
186 #if defined(__GNUC__)
187 #define LIKELY(x) (__builtin_expect((x), 1))
188 #define UNLIKELY(x) (__builtin_expect((x), 0))
189 #else
190 #define LIKELY(x) (x)
191 #define UNLIKELY(x) (x)
192 #endif
193 
194 #if __has_builtin(__builtin_unreachable) || (defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5)))
195 #  define ZSTD_UNREACHABLE do { assert(0), __builtin_unreachable(); } while (0)
196 #else
197 #  define ZSTD_UNREACHABLE do { assert(0); } while (0)
198 #endif
199 
200 /* disable warnings */
201 #ifdef _MSC_VER    /* Visual Studio */
202 #  include <intrin.h>                    /* For Visual 2005 */
203 #  pragma warning(disable : 4100)        /* disable: C4100: unreferenced formal parameter */
204 #  pragma warning(disable : 4127)        /* disable: C4127: conditional expression is constant */
205 #  pragma warning(disable : 4204)        /* disable: C4204: non-constant aggregate initializer */
206 #  pragma warning(disable : 4214)        /* disable: C4214: non-int bitfields */
207 #  pragma warning(disable : 4324)        /* disable: C4324: padded structure */
208 #endif
209 
210 /* compile time determination of SIMD support */
211 #if !defined(ZSTD_NO_INTRINSICS)
212 #  if defined(__AVX2__)
213 #    define ZSTD_ARCH_X86_AVX2
214 #  endif
215 #  if defined(__SSE2__) || defined(_M_X64) || (defined (_M_IX86) && defined(_M_IX86_FP) && (_M_IX86_FP >= 2))
216 #    define ZSTD_ARCH_X86_SSE2
217 #  endif
218 #  if defined(__ARM_NEON) || defined(_M_ARM64)
219 #    define ZSTD_ARCH_ARM_NEON
220 #  endif
221 #
222 #  if defined(ZSTD_ARCH_X86_AVX2)
223 #    include <immintrin.h>
224 #  endif
225 #  if defined(ZSTD_ARCH_X86_SSE2)
226 #    include <emmintrin.h>
227 #  elif defined(ZSTD_ARCH_ARM_NEON)
228 #    include <arm_neon.h>
229 #  endif
230 #endif
231 
232 /* C-language Attributes are added in C23. */
233 #if defined(__STDC_VERSION__) && (__STDC_VERSION__ > 201710L) && defined(__has_c_attribute)
234 # define ZSTD_HAS_C_ATTRIBUTE(x) __has_c_attribute(x)
235 #else
236 # define ZSTD_HAS_C_ATTRIBUTE(x) 0
237 #endif
238 
239 /* Only use C++ attributes in C++. Some compilers report support for C++
240  * attributes when compiling with C.
241  */
242 #if defined(__cplusplus) && defined(__has_cpp_attribute)
243 # define ZSTD_HAS_CPP_ATTRIBUTE(x) __has_cpp_attribute(x)
244 #else
245 # define ZSTD_HAS_CPP_ATTRIBUTE(x) 0
246 #endif
247 
248 /* Define ZSTD_FALLTHROUGH macro for annotating switch case with the 'fallthrough' attribute.
249  * - C23: https://en.cppreference.com/w/c/language/attributes/fallthrough
250  * - CPP17: https://en.cppreference.com/w/cpp/language/attributes/fallthrough
251  * - Else: __attribute__((__fallthrough__))
252  */
253 #ifndef ZSTD_FALLTHROUGH
254 # if ZSTD_HAS_C_ATTRIBUTE(fallthrough)
255 #  define ZSTD_FALLTHROUGH [[fallthrough]]
256 # elif ZSTD_HAS_CPP_ATTRIBUTE(fallthrough)
257 #  define ZSTD_FALLTHROUGH [[fallthrough]]
258 # elif __has_attribute(__fallthrough__)
259 /* Leading semicolon is to satisfy gcc-11 with -pedantic. Without the semicolon
260  * gcc complains about: a label can only be part of a statement and a declaration is not a statement.
261  */
262 #  define ZSTD_FALLTHROUGH ; __attribute__((__fallthrough__))
263 # else
264 #  define ZSTD_FALLTHROUGH
265 # endif
266 #endif
267 
268 /*-**************************************************************
269 *  Alignment
270 *****************************************************************/
271 
272 /* @return 1 if @u is a 2^n value, 0 otherwise
273  * useful to check a value is valid for alignment restrictions */
ZSTD_isPower2(size_t u)274 MEM_STATIC int ZSTD_isPower2(size_t u) {
275     return (u & (u-1)) == 0;
276 }
277 
278 /* this test was initially positioned in mem.h,
279  * but this file is removed (or replaced) for linux kernel
280  * so it's now hosted in compiler.h,
281  * which remains valid for both user & kernel spaces.
282  */
283 
284 #ifndef ZSTD_ALIGNOF
285 # if defined(__GNUC__) || defined(_MSC_VER)
286 /* covers gcc, clang & MSVC */
287 /* note : this section must come first, before C11,
288  * due to a limitation in the kernel source generator */
289 #  define ZSTD_ALIGNOF(T) __alignof(T)
290 
291 # elif defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L)
292 /* C11 support */
293 #  include <stdalign.h>
294 #  define ZSTD_ALIGNOF(T) alignof(T)
295 
296 # else
297 /* No known support for alignof() - imperfect backup */
298 #  define ZSTD_ALIGNOF(T) (sizeof(void*) < sizeof(T) ? sizeof(void*) : sizeof(T))
299 
300 # endif
301 #endif /* ZSTD_ALIGNOF */
302 
303 #ifndef ZSTD_ALIGNED
304 /* C90-compatible alignment macro (GCC/Clang). Adjust for other compilers if needed. */
305 # if defined(__GNUC__) || defined(__clang__)
306 #  define ZSTD_ALIGNED(a) __attribute__((aligned(a)))
307 # elif defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L) /* C11 */
308 #  define ZSTD_ALIGNED(a) _Alignas(a)
309 #elif defined(_MSC_VER)
310 #  define ZSTD_ALIGNED(n) __declspec(align(n))
311 # else
312    /* this compiler will require its own alignment instruction */
313 #  define ZSTD_ALIGNED(...)
314 # endif
315 #endif /* ZSTD_ALIGNED */
316 
317 
318 /*-**************************************************************
319 *  Sanitizer
320 *****************************************************************/
321 
322 /**
323  * Zstd relies on pointer overflow in its decompressor.
324  * We add this attribute to functions that rely on pointer overflow.
325  */
326 #ifndef ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
327 #  if __has_attribute(no_sanitize)
328 #    if !defined(__clang__) && defined(__GNUC__) && __GNUC__ < 8
329        /* gcc < 8 only has signed-integer-overlow which triggers on pointer overflow */
330 #      define ZSTD_ALLOW_POINTER_OVERFLOW_ATTR __attribute__((no_sanitize("signed-integer-overflow")))
331 #    else
332        /* older versions of clang [3.7, 5.0) will warn that pointer-overflow is ignored. */
333 #      define ZSTD_ALLOW_POINTER_OVERFLOW_ATTR __attribute__((no_sanitize("pointer-overflow")))
334 #    endif
335 #  else
336 #    define ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
337 #  endif
338 #endif
339 
340 /**
341  * Helper function to perform a wrapped pointer difference without triggering
342  * UBSAN.
343  *
344  * @returns lhs - rhs with wrapping
345  */
346 MEM_STATIC
347 ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
ZSTD_wrappedPtrDiff(unsigned char const * lhs,unsigned char const * rhs)348 ptrdiff_t ZSTD_wrappedPtrDiff(unsigned char const* lhs, unsigned char const* rhs)
349 {
350     return lhs - rhs;
351 }
352 
353 /**
354  * Helper function to perform a wrapped pointer add without triggering UBSAN.
355  *
356  * @return ptr + add with wrapping
357  */
358 MEM_STATIC
359 ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
ZSTD_wrappedPtrAdd(unsigned char const * ptr,ptrdiff_t add)360 unsigned char const* ZSTD_wrappedPtrAdd(unsigned char const* ptr, ptrdiff_t add)
361 {
362     return ptr + add;
363 }
364 
365 /**
366  * Helper function to perform a wrapped pointer subtraction without triggering
367  * UBSAN.
368  *
369  * @return ptr - sub with wrapping
370  */
371 MEM_STATIC
372 ZSTD_ALLOW_POINTER_OVERFLOW_ATTR
ZSTD_wrappedPtrSub(unsigned char const * ptr,ptrdiff_t sub)373 unsigned char const* ZSTD_wrappedPtrSub(unsigned char const* ptr, ptrdiff_t sub)
374 {
375     return ptr - sub;
376 }
377 
378 /**
379  * Helper function to add to a pointer that works around C's undefined behavior
380  * of adding 0 to NULL.
381  *
382  * @returns `ptr + add` except it defines `NULL + 0 == NULL`.
383  */
384 MEM_STATIC
ZSTD_maybeNullPtrAdd(unsigned char * ptr,ptrdiff_t add)385 unsigned char* ZSTD_maybeNullPtrAdd(unsigned char* ptr, ptrdiff_t add)
386 {
387     return add > 0 ? ptr + add : ptr;
388 }
389 
390 /* Issue #3240 reports an ASAN failure on an llvm-mingw build. Out of an
391  * abundance of caution, disable our custom poisoning on mingw. */
392 #ifdef __MINGW32__
393 #ifndef ZSTD_ASAN_DONT_POISON_WORKSPACE
394 #define ZSTD_ASAN_DONT_POISON_WORKSPACE 1
395 #endif
396 #ifndef ZSTD_MSAN_DONT_POISON_WORKSPACE
397 #define ZSTD_MSAN_DONT_POISON_WORKSPACE 1
398 #endif
399 #endif
400 
401 #if ZSTD_MEMORY_SANITIZER && !defined(ZSTD_MSAN_DONT_POISON_WORKSPACE)
402 /* Not all platforms that support msan provide sanitizers/msan_interface.h.
403  * We therefore declare the functions we need ourselves, rather than trying to
404  * include the header file... */
405 #include <stddef.h>  /* size_t */
406 #define ZSTD_DEPS_NEED_STDINT
407 #include "zstd_deps.h"  /* intptr_t */
408 
409 /* Make memory region fully initialized (without changing its contents). */
410 void __msan_unpoison(const volatile void *a, size_t size);
411 
412 /* Make memory region fully uninitialized (without changing its contents).
413    This is a legacy interface that does not update origin information. Use
414    __msan_allocated_memory() instead. */
415 void __msan_poison(const volatile void *a, size_t size);
416 
417 /* Returns the offset of the first (at least partially) poisoned byte in the
418    memory range, or -1 if the whole range is good. */
419 intptr_t __msan_test_shadow(const volatile void *x, size_t size);
420 
421 /* Print shadow and origin for the memory range to stderr in a human-readable
422    format. */
423 void __msan_print_shadow(const volatile void *x, size_t size);
424 #endif
425 
426 #if ZSTD_ADDRESS_SANITIZER && !defined(ZSTD_ASAN_DONT_POISON_WORKSPACE)
427 /* Not all platforms that support asan provide sanitizers/asan_interface.h.
428  * We therefore declare the functions we need ourselves, rather than trying to
429  * include the header file... */
430 #include <stddef.h>  /* size_t */
431 
432 /**
433  * Marks a memory region (<c>[addr, addr+size)</c>) as unaddressable.
434  *
435  * This memory must be previously allocated by your program. Instrumented
436  * code is forbidden from accessing addresses in this region until it is
437  * unpoisoned. This function is not guaranteed to poison the entire region -
438  * it could poison only a subregion of <c>[addr, addr+size)</c> due to ASan
439  * alignment restrictions.
440  *
441  * \note This function is not thread-safe because no two threads can poison or
442  * unpoison memory in the same memory region simultaneously.
443  *
444  * \param addr Start of memory region.
445  * \param size Size of memory region. */
446 void __asan_poison_memory_region(void const volatile *addr, size_t size);
447 
448 /**
449  * Marks a memory region (<c>[addr, addr+size)</c>) as addressable.
450  *
451  * This memory must be previously allocated by your program. Accessing
452  * addresses in this region is allowed until this region is poisoned again.
453  * This function could unpoison a super-region of <c>[addr, addr+size)</c> due
454  * to ASan alignment restrictions.
455  *
456  * \note This function is not thread-safe because no two threads can
457  * poison or unpoison memory in the same memory region simultaneously.
458  *
459  * \param addr Start of memory region.
460  * \param size Size of memory region. */
461 void __asan_unpoison_memory_region(void const volatile *addr, size_t size);
462 #endif
463 
464 #endif /* ZSTD_COMPILER_H */
465