xref: /freebsd/contrib/llvm-project/openmp/runtime/src/kmp_os.h (revision 16794618f753f32c37356d06239604b85dd5b5cf)
1 /*
2  * kmp_os.h -- KPTS runtime header file.
3  */
4 
5 //===----------------------------------------------------------------------===//
6 //
7 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
8 // See https://llvm.org/LICENSE.txt for license information.
9 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #ifndef KMP_OS_H
14 #define KMP_OS_H
15 
16 #include "kmp_config.h"
17 #include <stdlib.h>
18 #include <atomic>
19 
20 #define KMP_FTN_PLAIN 1
21 #define KMP_FTN_APPEND 2
22 #define KMP_FTN_UPPER 3
23 /*
24 #define KMP_FTN_PREPEND 4
25 #define KMP_FTN_UAPPEND 5
26 */
27 
28 #define KMP_PTR_SKIP (sizeof(void *))
29 
30 /* -------------------------- Compiler variations ------------------------ */
31 
32 #define KMP_OFF 0
33 #define KMP_ON 1
34 
35 #define KMP_MEM_CONS_VOLATILE 0
36 #define KMP_MEM_CONS_FENCE 1
37 
38 #ifndef KMP_MEM_CONS_MODEL
39 #define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE
40 #endif
41 
42 #ifndef __has_cpp_attribute
43 #define __has_cpp_attribute(x) 0
44 #endif
45 
46 #ifndef __has_attribute
47 #define __has_attribute(x) 0
48 #endif
49 
50 /* ------------------------- Compiler recognition ---------------------- */
51 #define KMP_COMPILER_ICC 0
52 #define KMP_COMPILER_GCC 0
53 #define KMP_COMPILER_CLANG 0
54 #define KMP_COMPILER_MSVC 0
55 
56 #if defined(__INTEL_COMPILER)
57 #undef KMP_COMPILER_ICC
58 #define KMP_COMPILER_ICC 1
59 #elif defined(__clang__)
60 #undef KMP_COMPILER_CLANG
61 #define KMP_COMPILER_CLANG 1
62 #elif defined(__GNUC__)
63 #undef KMP_COMPILER_GCC
64 #define KMP_COMPILER_GCC 1
65 #elif defined(_MSC_VER)
66 #undef KMP_COMPILER_MSVC
67 #define KMP_COMPILER_MSVC 1
68 #else
69 #error Unknown compiler
70 #endif
71 
72 #if (KMP_OS_LINUX || KMP_OS_WINDOWS || KMP_OS_FREEBSD) && !KMP_OS_CNK
73 #define KMP_AFFINITY_SUPPORTED 1
74 #if KMP_OS_WINDOWS && KMP_ARCH_X86_64
75 #define KMP_GROUP_AFFINITY 1
76 #else
77 #define KMP_GROUP_AFFINITY 0
78 #endif
79 #else
80 #define KMP_AFFINITY_SUPPORTED 0
81 #define KMP_GROUP_AFFINITY 0
82 #endif
83 
84 /* Check for quad-precision extension. */
85 #define KMP_HAVE_QUAD 0
86 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
87 #if KMP_COMPILER_ICC
88 /* _Quad is already defined for icc */
89 #undef KMP_HAVE_QUAD
90 #define KMP_HAVE_QUAD 1
91 #elif KMP_COMPILER_CLANG
92 /* Clang doesn't support a software-implemented
93    128-bit extended precision type yet */
94 typedef long double _Quad;
95 #elif KMP_COMPILER_GCC
96 /* GCC on NetBSD lacks __multc3/__divtc3 builtins needed for quad */
97 #if !KMP_OS_NETBSD
98 typedef __float128 _Quad;
99 #undef KMP_HAVE_QUAD
100 #define KMP_HAVE_QUAD 1
101 #endif
102 #elif KMP_COMPILER_MSVC
103 typedef long double _Quad;
104 #endif
105 #else
106 #if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC
107 typedef long double _Quad;
108 #undef KMP_HAVE_QUAD
109 #define KMP_HAVE_QUAD 1
110 #endif
111 #endif /* KMP_ARCH_X86 || KMP_ARCH_X86_64 */
112 
113 #define KMP_USE_X87CONTROL 0
114 #if KMP_OS_WINDOWS
115 #define KMP_END_OF_LINE "\r\n"
116 typedef char kmp_int8;
117 typedef unsigned char kmp_uint8;
118 typedef short kmp_int16;
119 typedef unsigned short kmp_uint16;
120 typedef int kmp_int32;
121 typedef unsigned int kmp_uint32;
122 #define KMP_INT32_SPEC "d"
123 #define KMP_UINT32_SPEC "u"
124 #ifndef KMP_STRUCT64
125 typedef __int64 kmp_int64;
126 typedef unsigned __int64 kmp_uint64;
127 #define KMP_INT64_SPEC "I64d"
128 #define KMP_UINT64_SPEC "I64u"
129 #else
130 struct kmp_struct64 {
131   kmp_int32 a, b;
132 };
133 typedef struct kmp_struct64 kmp_int64;
134 typedef struct kmp_struct64 kmp_uint64;
135 /* Not sure what to use for KMP_[U]INT64_SPEC here */
136 #endif
137 #if KMP_ARCH_X86 && KMP_MSVC_COMPAT
138 #undef KMP_USE_X87CONTROL
139 #define KMP_USE_X87CONTROL 1
140 #endif
141 #if KMP_ARCH_X86_64
142 #define KMP_INTPTR 1
143 typedef __int64 kmp_intptr_t;
144 typedef unsigned __int64 kmp_uintptr_t;
145 #define KMP_INTPTR_SPEC "I64d"
146 #define KMP_UINTPTR_SPEC "I64u"
147 #endif
148 #endif /* KMP_OS_WINDOWS */
149 
150 #if KMP_OS_UNIX
151 #define KMP_END_OF_LINE "\n"
152 typedef char kmp_int8;
153 typedef unsigned char kmp_uint8;
154 typedef short kmp_int16;
155 typedef unsigned short kmp_uint16;
156 typedef int kmp_int32;
157 typedef unsigned int kmp_uint32;
158 typedef long long kmp_int64;
159 typedef unsigned long long kmp_uint64;
160 #define KMP_INT32_SPEC "d"
161 #define KMP_UINT32_SPEC "u"
162 #define KMP_INT64_SPEC "lld"
163 #define KMP_UINT64_SPEC "llu"
164 #endif /* KMP_OS_UNIX */
165 
166 #if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_MIPS
167 #define KMP_SIZE_T_SPEC KMP_UINT32_SPEC
168 #elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64 ||                 \
169     KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64
170 #define KMP_SIZE_T_SPEC KMP_UINT64_SPEC
171 #else
172 #error "Can't determine size_t printf format specifier."
173 #endif
174 
175 #if KMP_ARCH_X86
176 #define KMP_SIZE_T_MAX (0xFFFFFFFF)
177 #else
178 #define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF)
179 #endif
180 
181 typedef size_t kmp_size_t;
182 typedef float kmp_real32;
183 typedef double kmp_real64;
184 
185 #ifndef KMP_INTPTR
186 #define KMP_INTPTR 1
187 typedef long kmp_intptr_t;
188 typedef unsigned long kmp_uintptr_t;
189 #define KMP_INTPTR_SPEC "ld"
190 #define KMP_UINTPTR_SPEC "lu"
191 #endif
192 
193 #ifdef BUILD_I8
194 typedef kmp_int64 kmp_int;
195 typedef kmp_uint64 kmp_uint;
196 #else
197 typedef kmp_int32 kmp_int;
198 typedef kmp_uint32 kmp_uint;
199 #endif /* BUILD_I8 */
200 #define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF)
201 #define KMP_INT_MIN ((kmp_int32)0x80000000)
202 
203 // stdarg handling
204 #if (KMP_ARCH_ARM || KMP_ARCH_X86_64 || KMP_ARCH_AARCH64) &&                   \
205     (KMP_OS_FREEBSD || KMP_OS_LINUX)
206 typedef va_list *kmp_va_list;
207 #define kmp_va_deref(ap) (*(ap))
208 #define kmp_va_addr_of(ap) (&(ap))
209 #else
210 typedef va_list kmp_va_list;
211 #define kmp_va_deref(ap) (ap)
212 #define kmp_va_addr_of(ap) (ap)
213 #endif
214 
215 #ifdef __cplusplus
216 // macros to cast out qualifiers and to re-interpret types
217 #define CCAST(type, var) const_cast<type>(var)
218 #define RCAST(type, var) reinterpret_cast<type>(var)
219 //-------------------------------------------------------------------------
220 // template for debug prints specification ( d, u, lld, llu ), and to obtain
221 // signed/unsigned flavors of a type
222 template <typename T> struct traits_t {};
223 // int
224 template <> struct traits_t<signed int> {
225   typedef signed int signed_t;
226   typedef unsigned int unsigned_t;
227   typedef double floating_t;
228   static char const *spec;
229   static const signed_t max_value = 0x7fffffff;
230   static const signed_t min_value = 0x80000000;
231   static const int type_size = sizeof(signed_t);
232 };
233 // unsigned int
234 template <> struct traits_t<unsigned int> {
235   typedef signed int signed_t;
236   typedef unsigned int unsigned_t;
237   typedef double floating_t;
238   static char const *spec;
239   static const unsigned_t max_value = 0xffffffff;
240   static const unsigned_t min_value = 0x00000000;
241   static const int type_size = sizeof(unsigned_t);
242 };
243 // long
244 template <> struct traits_t<signed long> {
245   typedef signed long signed_t;
246   typedef unsigned long unsigned_t;
247   typedef long double floating_t;
248   static char const *spec;
249   static const int type_size = sizeof(signed_t);
250 };
251 // long long
252 template <> struct traits_t<signed long long> {
253   typedef signed long long signed_t;
254   typedef unsigned long long unsigned_t;
255   typedef long double floating_t;
256   static char const *spec;
257   static const signed_t max_value = 0x7fffffffffffffffLL;
258   static const signed_t min_value = 0x8000000000000000LL;
259   static const int type_size = sizeof(signed_t);
260 };
261 // unsigned long long
262 template <> struct traits_t<unsigned long long> {
263   typedef signed long long signed_t;
264   typedef unsigned long long unsigned_t;
265   typedef long double floating_t;
266   static char const *spec;
267   static const unsigned_t max_value = 0xffffffffffffffffLL;
268   static const unsigned_t min_value = 0x0000000000000000LL;
269   static const int type_size = sizeof(unsigned_t);
270 };
271 //-------------------------------------------------------------------------
272 #else
273 #define CCAST(type, var) (type)(var)
274 #define RCAST(type, var) (type)(var)
275 #endif // __cplusplus
276 
277 #define KMP_EXPORT extern /* export declaration in guide libraries */
278 
279 #if __GNUC__ >= 4 && !defined(__MINGW32__)
280 #define __forceinline __inline
281 #endif
282 
283 #if KMP_OS_WINDOWS
284 #include <windows.h>
285 
286 static inline int KMP_GET_PAGE_SIZE(void) {
287   SYSTEM_INFO si;
288   GetSystemInfo(&si);
289   return si.dwPageSize;
290 }
291 #else
292 #define KMP_GET_PAGE_SIZE() getpagesize()
293 #endif
294 
295 #define PAGE_ALIGNED(_addr)                                                    \
296   (!((size_t)_addr & (size_t)(KMP_GET_PAGE_SIZE() - 1)))
297 #define ALIGN_TO_PAGE(x)                                                       \
298   (void *)(((size_t)(x)) & ~((size_t)(KMP_GET_PAGE_SIZE() - 1)))
299 
300 /* ---------- Support for cache alignment, padding, etc. ----------------*/
301 
302 #ifdef __cplusplus
303 extern "C" {
304 #endif // __cplusplus
305 
306 #define INTERNODE_CACHE_LINE 4096 /* for multi-node systems */
307 
308 /* Define the default size of the cache line */
309 #ifndef CACHE_LINE
310 #define CACHE_LINE 128 /* cache line size in bytes */
311 #else
312 #if (CACHE_LINE < 64) && !defined(KMP_OS_DARWIN)
313 // 2006-02-13: This produces too many warnings on OS X*. Disable for now
314 #warning CACHE_LINE is too small.
315 #endif
316 #endif /* CACHE_LINE */
317 
318 #define KMP_CACHE_PREFETCH(ADDR) /* nothing */
319 
320 // Define attribute that indicates that the fall through from the previous
321 // case label is intentional and should not be diagnosed by a compiler
322 //   Code from libcxx/include/__config
323 // Use a function like macro to imply that it must be followed by a semicolon
324 #if __cplusplus > 201402L && __has_cpp_attribute(fallthrough)
325 #  define KMP_FALLTHROUGH() [[fallthrough]]
326 #elif __has_cpp_attribute(clang::fallthrough)
327 #  define KMP_FALLTHROUGH() [[clang::fallthrough]]
328 #elif __has_attribute(fallthrough) || __GNUC__ >= 7
329 #  define KMP_FALLTHROUGH() __attribute__((__fallthrough__))
330 #else
331 #  define KMP_FALLTHROUGH() ((void)0)
332 #endif
333 
334 // Define attribute that indicates a function does not return
335 #if __cplusplus >= 201103L
336 #define KMP_NORETURN [[noreturn]]
337 #elif KMP_OS_WINDOWS
338 #define KMP_NORETURN __declspec(noreturn)
339 #else
340 #define KMP_NORETURN __attribute__((noreturn))
341 #endif
342 
343 #if KMP_OS_WINDOWS && KMP_MSVC_COMPAT
344 #define KMP_ALIGN(bytes) __declspec(align(bytes))
345 #define KMP_THREAD_LOCAL __declspec(thread)
346 #define KMP_ALIAS /* Nothing */
347 #else
348 #define KMP_ALIGN(bytes) __attribute__((aligned(bytes)))
349 #define KMP_THREAD_LOCAL __thread
350 #define KMP_ALIAS(alias_of) __attribute__((alias(alias_of)))
351 #endif
352 
353 #if KMP_HAVE_WEAK_ATTRIBUTE && !KMP_DYNAMIC_LIB
354 #define KMP_WEAK_ATTRIBUTE_EXTERNAL __attribute__((weak))
355 #else
356 #define KMP_WEAK_ATTRIBUTE_EXTERNAL /* Nothing */
357 #endif
358 
359 #if KMP_HAVE_WEAK_ATTRIBUTE
360 #define KMP_WEAK_ATTRIBUTE_INTERNAL __attribute__((weak))
361 #else
362 #define KMP_WEAK_ATTRIBUTE_INTERNAL /* Nothing */
363 #endif
364 
365 // Define KMP_VERSION_SYMBOL and KMP_EXPAND_NAME
366 #ifndef KMP_STR
367 #define KMP_STR(x) _KMP_STR(x)
368 #define _KMP_STR(x) #x
369 #endif
370 
371 #ifdef KMP_USE_VERSION_SYMBOLS
372 // If using versioned symbols, KMP_EXPAND_NAME prepends
373 // __kmp_api_ to the real API name
374 #define KMP_EXPAND_NAME(api_name) _KMP_EXPAND_NAME(api_name)
375 #define _KMP_EXPAND_NAME(api_name) __kmp_api_##api_name
376 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str)                         \
377   _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, "VERSION")
378 #define _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, default_ver)            \
379   __typeof__(__kmp_api_##api_name) __kmp_api_##api_name##_##ver_num##_alias     \
380       __attribute__((alias(KMP_STR(__kmp_api_##api_name))));                    \
381   __asm__(                                                                      \
382       ".symver " KMP_STR(__kmp_api_##api_name##_##ver_num##_alias) "," KMP_STR( \
383           api_name) "@" ver_str "\n\t");                                        \
384   __asm__(".symver " KMP_STR(__kmp_api_##api_name) "," KMP_STR(                 \
385       api_name) "@@" default_ver "\n\t")
386 #else // KMP_USE_VERSION_SYMBOLS
387 #define KMP_EXPAND_NAME(api_name) api_name
388 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) /* Nothing */
389 #endif // KMP_USE_VERSION_SYMBOLS
390 
391 /* Temporary note: if performance testing of this passes, we can remove
392    all references to KMP_DO_ALIGN and replace with KMP_ALIGN.  */
393 #define KMP_DO_ALIGN(bytes) KMP_ALIGN(bytes)
394 #define KMP_ALIGN_CACHE KMP_ALIGN(CACHE_LINE)
395 #define KMP_ALIGN_CACHE_INTERNODE KMP_ALIGN(INTERNODE_CACHE_LINE)
396 
397 /* General purpose fence types for memory operations */
398 enum kmp_mem_fence_type {
399   kmp_no_fence, /* No memory fence */
400   kmp_acquire_fence, /* Acquire (read) memory fence */
401   kmp_release_fence, /* Release (write) memory fence */
402   kmp_full_fence /* Full (read+write) memory fence */
403 };
404 
405 // Synchronization primitives
406 
407 #if KMP_ASM_INTRINS && KMP_OS_WINDOWS
408 
409 #if KMP_MSVC_COMPAT && !KMP_COMPILER_CLANG
410 #pragma intrinsic(InterlockedExchangeAdd)
411 #pragma intrinsic(InterlockedCompareExchange)
412 #pragma intrinsic(InterlockedExchange)
413 #pragma intrinsic(InterlockedExchange64)
414 #endif
415 
416 // Using InterlockedIncrement / InterlockedDecrement causes a library loading
417 // ordering problem, so we use InterlockedExchangeAdd instead.
418 #define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd((volatile long *)(p), 1)
419 #define KMP_TEST_THEN_INC_ACQ32(p)                                             \
420   InterlockedExchangeAdd((volatile long *)(p), 1)
421 #define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd((volatile long *)(p), 4)
422 #define KMP_TEST_THEN_ADD4_ACQ32(p)                                            \
423   InterlockedExchangeAdd((volatile long *)(p), 4)
424 #define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd((volatile long *)(p), -1)
425 #define KMP_TEST_THEN_DEC_ACQ32(p)                                             \
426   InterlockedExchangeAdd((volatile long *)(p), -1)
427 #define KMP_TEST_THEN_ADD32(p, v)                                              \
428   InterlockedExchangeAdd((volatile long *)(p), (v))
429 
430 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv)                                 \
431   InterlockedCompareExchange((volatile long *)(p), (long)(sv), (long)(cv))
432 
433 #define KMP_XCHG_FIXED32(p, v)                                                 \
434   InterlockedExchange((volatile long *)(p), (long)(v))
435 #define KMP_XCHG_FIXED64(p, v)                                                 \
436   InterlockedExchange64((volatile kmp_int64 *)(p), (kmp_int64)(v))
437 
438 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
439   kmp_int32 tmp = InterlockedExchange((volatile long *)p, *(long *)&v);
440   return *(kmp_real32 *)&tmp;
441 }
442 
443 // Routines that we still need to implement in assembly.
444 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
445 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
446 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
447 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
448 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
449 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
450 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
451 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
452 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
453 
454 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
455                                          kmp_int8 sv);
456 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
457                                            kmp_int16 sv);
458 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
459                                            kmp_int32 sv);
460 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
461                                            kmp_int64 sv);
462 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
463                                              kmp_int8 sv);
464 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
465                                                kmp_int16 cv, kmp_int16 sv);
466 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
467                                                kmp_int32 cv, kmp_int32 sv);
468 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
469                                                kmp_int64 cv, kmp_int64 sv);
470 
471 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
472 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
473 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
474 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
475 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
476 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
477 
478 //#define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32((p), 1)
479 //#define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32((p), 1)
480 #define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64((p), 1LL)
481 #define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64((p), 1LL)
482 //#define KMP_TEST_THEN_ADD4_32(p) __kmp_test_then_add32((p), 4)
483 //#define KMP_TEST_THEN_ADD4_ACQ32(p) __kmp_test_then_add32((p), 4)
484 #define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64((p), 4LL)
485 #define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64((p), 4LL)
486 //#define KMP_TEST_THEN_DEC32(p) __kmp_test_then_add32((p), -1)
487 //#define KMP_TEST_THEN_DEC_ACQ32(p) __kmp_test_then_add32((p), -1)
488 #define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64((p), -1LL)
489 #define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64((p), -1LL)
490 //#define KMP_TEST_THEN_ADD32(p, v) __kmp_test_then_add32((p), (v))
491 #define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8((p), (v))
492 #define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64((p), (v))
493 
494 #define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8((p), (v))
495 #define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8((p), (v))
496 #define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32((p), (v))
497 #define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32((p), (v))
498 #define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64((p), (v))
499 #define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64((p), (v))
500 
501 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv)                                  \
502   __kmp_compare_and_store8((p), (cv), (sv))
503 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv)                                  \
504   __kmp_compare_and_store8((p), (cv), (sv))
505 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv)                                 \
506   __kmp_compare_and_store16((p), (cv), (sv))
507 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv)                                 \
508   __kmp_compare_and_store16((p), (cv), (sv))
509 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv)                                 \
510   __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
511                             (kmp_int32)(sv))
512 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv)                                 \
513   __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
514                             (kmp_int32)(sv))
515 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv)                                 \
516   __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
517                             (kmp_int64)(sv))
518 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv)                                 \
519   __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
520                             (kmp_int64)(sv))
521 
522 #if KMP_ARCH_X86
523 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv)                                   \
524   __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
525                             (kmp_int32)(sv))
526 #else /* 64 bit pointers */
527 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv)                                   \
528   __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
529                             (kmp_int64)(sv))
530 #endif /* KMP_ARCH_X86 */
531 
532 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv)                                  \
533   __kmp_compare_and_store_ret8((p), (cv), (sv))
534 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv)                                 \
535   __kmp_compare_and_store_ret16((p), (cv), (sv))
536 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv)                                 \
537   __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv),    \
538                                 (kmp_int64)(sv))
539 
540 #define KMP_XCHG_FIXED8(p, v)                                                  \
541   __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
542 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
543 //#define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
544 //#define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
545 //#define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
546 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
547 
548 #elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64)
549 
550 /* cast p to correct type so that proper intrinsic will be used */
551 #define KMP_TEST_THEN_INC32(p)                                                 \
552   __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
553 #define KMP_TEST_THEN_INC_ACQ32(p)                                             \
554   __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
555 #if KMP_ARCH_MIPS
556 #define KMP_TEST_THEN_INC64(p)                                                 \
557   __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
558 #define KMP_TEST_THEN_INC_ACQ64(p)                                             \
559   __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
560 #else
561 #define KMP_TEST_THEN_INC64(p)                                                 \
562   __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
563 #define KMP_TEST_THEN_INC_ACQ64(p)                                             \
564   __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
565 #endif
566 #define KMP_TEST_THEN_ADD4_32(p)                                               \
567   __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
568 #define KMP_TEST_THEN_ADD4_ACQ32(p)                                            \
569   __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
570 #if KMP_ARCH_MIPS
571 #define KMP_TEST_THEN_ADD4_64(p)                                               \
572   __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
573 #define KMP_TEST_THEN_ADD4_ACQ64(p)                                            \
574   __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
575 #define KMP_TEST_THEN_DEC64(p)                                                 \
576   __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
577 #define KMP_TEST_THEN_DEC_ACQ64(p)                                             \
578   __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
579 #else
580 #define KMP_TEST_THEN_ADD4_64(p)                                               \
581   __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
582 #define KMP_TEST_THEN_ADD4_ACQ64(p)                                            \
583   __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
584 #define KMP_TEST_THEN_DEC64(p)                                                 \
585   __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
586 #define KMP_TEST_THEN_DEC_ACQ64(p)                                             \
587   __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
588 #endif
589 #define KMP_TEST_THEN_DEC32(p)                                                 \
590   __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
591 #define KMP_TEST_THEN_DEC_ACQ32(p)                                             \
592   __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
593 #define KMP_TEST_THEN_ADD8(p, v)                                               \
594   __sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v))
595 #define KMP_TEST_THEN_ADD32(p, v)                                              \
596   __sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v))
597 #if KMP_ARCH_MIPS
598 #define KMP_TEST_THEN_ADD64(p, v)                                              \
599   __atomic_fetch_add((volatile kmp_uint64 *)(p), (kmp_uint64)(v),              \
600                      __ATOMIC_SEQ_CST)
601 #else
602 #define KMP_TEST_THEN_ADD64(p, v)                                              \
603   __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v))
604 #endif
605 
606 #define KMP_TEST_THEN_OR8(p, v)                                                \
607   __sync_fetch_and_or((volatile kmp_int8 *)(p), (kmp_int8)(v))
608 #define KMP_TEST_THEN_AND8(p, v)                                               \
609   __sync_fetch_and_and((volatile kmp_int8 *)(p), (kmp_int8)(v))
610 #define KMP_TEST_THEN_OR32(p, v)                                               \
611   __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
612 #define KMP_TEST_THEN_AND32(p, v)                                              \
613   __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
614 #if KMP_ARCH_MIPS
615 #define KMP_TEST_THEN_OR64(p, v)                                               \
616   __atomic_fetch_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v),               \
617                     __ATOMIC_SEQ_CST)
618 #define KMP_TEST_THEN_AND64(p, v)                                              \
619   __atomic_fetch_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v),              \
620                      __ATOMIC_SEQ_CST)
621 #else
622 #define KMP_TEST_THEN_OR64(p, v)                                               \
623   __sync_fetch_and_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
624 #define KMP_TEST_THEN_AND64(p, v)                                              \
625   __sync_fetch_and_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
626 #endif
627 
628 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv)                                  \
629   __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv),     \
630                                (kmp_uint8)(sv))
631 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv)                                  \
632   __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv),     \
633                                (kmp_uint8)(sv))
634 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv)                                 \
635   __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv),   \
636                                (kmp_uint16)(sv))
637 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv)                                 \
638   __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv),   \
639                                (kmp_uint16)(sv))
640 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv)                                 \
641   __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv),   \
642                                (kmp_uint32)(sv))
643 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv)                                 \
644   __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv),   \
645                                (kmp_uint32)(sv))
646 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv)                                   \
647   __sync_bool_compare_and_swap((void *volatile *)(p), (void *)(cv),            \
648                                (void *)(sv))
649 
650 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv)                                  \
651   __sync_val_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv),      \
652                               (kmp_uint8)(sv))
653 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv)                                 \
654   __sync_val_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv),    \
655                               (kmp_uint16)(sv))
656 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv)                                 \
657   __sync_val_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv),    \
658                               (kmp_uint32)(sv))
659 #if KMP_ARCH_MIPS
660 static inline bool mips_sync_bool_compare_and_swap(
661   volatile kmp_uint64 *p, kmp_uint64 cv, kmp_uint64 sv) {
662   return __atomic_compare_exchange(p, &cv, &sv, false, __ATOMIC_SEQ_CST,
663                                                        __ATOMIC_SEQ_CST);
664 }
665 static inline bool mips_sync_val_compare_and_swap(
666   volatile kmp_uint64 *p, kmp_uint64 cv, kmp_uint64 sv) {
667   __atomic_compare_exchange(p, &cv, &sv, false, __ATOMIC_SEQ_CST,
668                                                 __ATOMIC_SEQ_CST);
669   return cv;
670 }
671 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv)                                 \
672   mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv),\
673                                (kmp_uint64)(sv))
674 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv)                                 \
675   mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv),\
676                                (kmp_uint64)(sv))
677 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv)                                 \
678   mips_sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
679                               (kmp_uint64)(sv))
680 #else
681 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv)                                 \
682   __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv),   \
683                                (kmp_uint64)(sv))
684 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv)                                 \
685   __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv),   \
686                                (kmp_uint64)(sv))
687 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv)                                 \
688   __sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv),    \
689                               (kmp_uint64)(sv))
690 #endif
691 
692 #define KMP_XCHG_FIXED8(p, v)                                                  \
693   __sync_lock_test_and_set((volatile kmp_uint8 *)(p), (kmp_uint8)(v))
694 #define KMP_XCHG_FIXED16(p, v)                                                 \
695   __sync_lock_test_and_set((volatile kmp_uint16 *)(p), (kmp_uint16)(v))
696 #define KMP_XCHG_FIXED32(p, v)                                                 \
697   __sync_lock_test_and_set((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
698 #define KMP_XCHG_FIXED64(p, v)                                                 \
699   __sync_lock_test_and_set((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
700 
701 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
702   kmp_int32 tmp =
703       __sync_lock_test_and_set((volatile kmp_uint32 *)(p), *(kmp_uint32 *)&v);
704   return *(kmp_real32 *)&tmp;
705 }
706 
707 inline kmp_real64 KMP_XCHG_REAL64(volatile kmp_real64 *p, kmp_real64 v) {
708   kmp_int64 tmp =
709       __sync_lock_test_and_set((volatile kmp_uint64 *)(p), *(kmp_uint64 *)&v);
710   return *(kmp_real64 *)&tmp;
711 }
712 
713 #else
714 
715 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
716 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
717 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
718 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
719 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
720 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
721 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
722 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
723 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
724 
725 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
726                                          kmp_int8 sv);
727 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
728                                            kmp_int16 sv);
729 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
730                                            kmp_int32 sv);
731 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
732                                            kmp_int64 sv);
733 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
734                                              kmp_int8 sv);
735 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
736                                                kmp_int16 cv, kmp_int16 sv);
737 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
738                                                kmp_int32 cv, kmp_int32 sv);
739 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
740                                                kmp_int64 cv, kmp_int64 sv);
741 
742 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
743 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
744 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
745 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
746 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
747 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
748 
749 #define KMP_TEST_THEN_INC32(p)                                                 \
750   __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
751 #define KMP_TEST_THEN_INC_ACQ32(p)                                             \
752   __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
753 #define KMP_TEST_THEN_INC64(p)                                                 \
754   __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
755 #define KMP_TEST_THEN_INC_ACQ64(p)                                             \
756   __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
757 #define KMP_TEST_THEN_ADD4_32(p)                                               \
758   __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
759 #define KMP_TEST_THEN_ADD4_ACQ32(p)                                            \
760   __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
761 #define KMP_TEST_THEN_ADD4_64(p)                                               \
762   __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
763 #define KMP_TEST_THEN_ADD4_ACQ64(p)                                            \
764   __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
765 #define KMP_TEST_THEN_DEC32(p)                                                 \
766   __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
767 #define KMP_TEST_THEN_DEC_ACQ32(p)                                             \
768   __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
769 #define KMP_TEST_THEN_DEC64(p)                                                 \
770   __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
771 #define KMP_TEST_THEN_DEC_ACQ64(p)                                             \
772   __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
773 #define KMP_TEST_THEN_ADD8(p, v)                                               \
774   __kmp_test_then_add8((volatile kmp_int8 *)(p), (kmp_int8)(v))
775 #define KMP_TEST_THEN_ADD32(p, v)                                              \
776   __kmp_test_then_add32((volatile kmp_int32 *)(p), (kmp_int32)(v))
777 #define KMP_TEST_THEN_ADD64(p, v)                                              \
778   __kmp_test_then_add64((volatile kmp_int64 *)(p), (kmp_int64)(v))
779 
780 #define KMP_TEST_THEN_OR8(p, v)                                                \
781   __kmp_test_then_or8((volatile kmp_int8 *)(p), (kmp_int8)(v))
782 #define KMP_TEST_THEN_AND8(p, v)                                               \
783   __kmp_test_then_and8((volatile kmp_int8 *)(p), (kmp_int8)(v))
784 #define KMP_TEST_THEN_OR32(p, v)                                               \
785   __kmp_test_then_or32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
786 #define KMP_TEST_THEN_AND32(p, v)                                              \
787   __kmp_test_then_and32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
788 #define KMP_TEST_THEN_OR64(p, v)                                               \
789   __kmp_test_then_or64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
790 #define KMP_TEST_THEN_AND64(p, v)                                              \
791   __kmp_test_then_and64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
792 
793 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv)                                  \
794   __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv),           \
795                            (kmp_int8)(sv))
796 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv)                                  \
797   __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv),           \
798                            (kmp_int8)(sv))
799 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv)                                 \
800   __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv),        \
801                             (kmp_int16)(sv))
802 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv)                                 \
803   __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv),        \
804                             (kmp_int16)(sv))
805 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv)                                 \
806   __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
807                             (kmp_int32)(sv))
808 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv)                                 \
809   __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
810                             (kmp_int32)(sv))
811 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv)                                 \
812   __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
813                             (kmp_int64)(sv))
814 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv)                                 \
815   __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
816                             (kmp_int64)(sv))
817 
818 #if KMP_ARCH_X86
819 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv)                                   \
820   __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
821                             (kmp_int32)(sv))
822 #else /* 64 bit pointers */
823 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv)                                   \
824   __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
825                             (kmp_int64)(sv))
826 #endif /* KMP_ARCH_X86 */
827 
828 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv)                                  \
829   __kmp_compare_and_store_ret8((p), (cv), (sv))
830 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv)                                 \
831   __kmp_compare_and_store_ret16((p), (cv), (sv))
832 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv)                                 \
833   __kmp_compare_and_store_ret32((volatile kmp_int32 *)(p), (kmp_int32)(cv),    \
834                                 (kmp_int32)(sv))
835 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv)                                 \
836   __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv),    \
837                                 (kmp_int64)(sv))
838 
839 #define KMP_XCHG_FIXED8(p, v)                                                  \
840   __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
841 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
842 #define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
843 #define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
844 #define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
845 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
846 
847 #endif /* KMP_ASM_INTRINS */
848 
849 /* ------------- relaxed consistency memory model stuff ------------------ */
850 
851 #if KMP_OS_WINDOWS
852 #ifdef __ABSOFT_WIN
853 #define KMP_MB() asm("nop")
854 #define KMP_IMB() asm("nop")
855 #else
856 #define KMP_MB() /* _asm{ nop } */
857 #define KMP_IMB() /* _asm{ nop } */
858 #endif
859 #endif /* KMP_OS_WINDOWS */
860 
861 #if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS ||     \
862     KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64
863 #define KMP_MB() __sync_synchronize()
864 #endif
865 
866 #ifndef KMP_MB
867 #define KMP_MB() /* nothing to do */
868 #endif
869 
870 #ifndef KMP_IMB
871 #define KMP_IMB() /* nothing to do */
872 #endif
873 
874 #ifndef KMP_ST_REL32
875 #define KMP_ST_REL32(A, D) (*(A) = (D))
876 #endif
877 
878 #ifndef KMP_ST_REL64
879 #define KMP_ST_REL64(A, D) (*(A) = (D))
880 #endif
881 
882 #ifndef KMP_LD_ACQ32
883 #define KMP_LD_ACQ32(A) (*(A))
884 #endif
885 
886 #ifndef KMP_LD_ACQ64
887 #define KMP_LD_ACQ64(A) (*(A))
888 #endif
889 
890 /* ------------------------------------------------------------------------ */
891 // FIXME - maybe this should this be
892 //
893 // #define TCR_4(a)    (*(volatile kmp_int32 *)(&a))
894 // #define TCW_4(a,b)  (a) = (*(volatile kmp_int32 *)&(b))
895 //
896 // #define TCR_8(a)    (*(volatile kmp_int64 *)(a))
897 // #define TCW_8(a,b)  (a) = (*(volatile kmp_int64 *)(&b))
898 //
899 // I'm fairly certain this is the correct thing to do, but I'm afraid
900 // of performance regressions.
901 
902 #define TCR_1(a) (a)
903 #define TCW_1(a, b) (a) = (b)
904 #define TCR_4(a) (a)
905 #define TCW_4(a, b) (a) = (b)
906 #define TCI_4(a) (++(a))
907 #define TCD_4(a) (--(a))
908 #define TCR_8(a) (a)
909 #define TCW_8(a, b) (a) = (b)
910 #define TCI_8(a) (++(a))
911 #define TCD_8(a) (--(a))
912 #define TCR_SYNC_4(a) (a)
913 #define TCW_SYNC_4(a, b) (a) = (b)
914 #define TCX_SYNC_4(a, b, c)                                                    \
915   KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a),     \
916                               (kmp_int32)(b), (kmp_int32)(c))
917 #define TCR_SYNC_8(a) (a)
918 #define TCW_SYNC_8(a, b) (a) = (b)
919 #define TCX_SYNC_8(a, b, c)                                                    \
920   KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a),     \
921                               (kmp_int64)(b), (kmp_int64)(c))
922 
923 #if KMP_ARCH_X86 || KMP_ARCH_MIPS
924 // What about ARM?
925 #define TCR_PTR(a) ((void *)TCR_4(a))
926 #define TCW_PTR(a, b) TCW_4((a), (b))
927 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a))
928 #define TCW_SYNC_PTR(a, b) TCW_SYNC_4((a), (b))
929 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_4((a), (b), (c)))
930 
931 #else /* 64 bit pointers */
932 
933 #define TCR_PTR(a) ((void *)TCR_8(a))
934 #define TCW_PTR(a, b) TCW_8((a), (b))
935 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a))
936 #define TCW_SYNC_PTR(a, b) TCW_SYNC_8((a), (b))
937 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_8((a), (b), (c)))
938 
939 #endif /* KMP_ARCH_X86 */
940 
941 /* If these FTN_{TRUE,FALSE} values change, may need to change several places
942    where they are used to check that language is Fortran, not C. */
943 
944 #ifndef FTN_TRUE
945 #define FTN_TRUE TRUE
946 #endif
947 
948 #ifndef FTN_FALSE
949 #define FTN_FALSE FALSE
950 #endif
951 
952 typedef void (*microtask_t)(int *gtid, int *npr, ...);
953 
954 #ifdef USE_VOLATILE_CAST
955 #define VOLATILE_CAST(x) (volatile x)
956 #else
957 #define VOLATILE_CAST(x) (x)
958 #endif
959 
960 #define KMP_WAIT __kmp_wait_4
961 #define KMP_WAIT_PTR __kmp_wait_4_ptr
962 #define KMP_EQ __kmp_eq_4
963 #define KMP_NEQ __kmp_neq_4
964 #define KMP_LT __kmp_lt_4
965 #define KMP_GE __kmp_ge_4
966 #define KMP_LE __kmp_le_4
967 
968 /* Workaround for Intel(R) 64 code gen bug when taking address of static array
969  * (Intel(R) 64 Tracker #138) */
970 #if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX
971 #define STATIC_EFI2_WORKAROUND
972 #else
973 #define STATIC_EFI2_WORKAROUND static
974 #endif
975 
976 // Support of BGET usage
977 #ifndef KMP_USE_BGET
978 #define KMP_USE_BGET 1
979 #endif
980 
981 // Switches for OSS builds
982 #ifndef USE_CMPXCHG_FIX
983 #define USE_CMPXCHG_FIX 1
984 #endif
985 
986 // Enable dynamic user lock
987 #define KMP_USE_DYNAMIC_LOCK 1
988 
989 // Enable Intel(R) Transactional Synchronization Extensions (Intel(R) TSX) if
990 // dynamic user lock is turned on
991 #if KMP_USE_DYNAMIC_LOCK
992 // Visual studio can't handle the asm sections in this code
993 #define KMP_USE_TSX (KMP_ARCH_X86 || KMP_ARCH_X86_64) && !KMP_COMPILER_MSVC
994 #ifdef KMP_USE_ADAPTIVE_LOCKS
995 #undef KMP_USE_ADAPTIVE_LOCKS
996 #endif
997 #define KMP_USE_ADAPTIVE_LOCKS KMP_USE_TSX
998 #endif
999 
1000 // Enable tick time conversion of ticks to seconds
1001 #if KMP_STATS_ENABLED
1002 #define KMP_HAVE_TICK_TIME                                                     \
1003   (KMP_OS_LINUX && (KMP_MIC || KMP_ARCH_X86 || KMP_ARCH_X86_64))
1004 #endif
1005 
1006 // Warning levels
1007 enum kmp_warnings_level {
1008   kmp_warnings_off = 0, /* No warnings */
1009   kmp_warnings_low, /* Minimal warnings (default) */
1010   kmp_warnings_explicit = 6, /* Explicitly set to ON - more warnings */
1011   kmp_warnings_verbose /* reserved */
1012 };
1013 
1014 #ifdef __cplusplus
1015 } // extern "C"
1016 #endif // __cplusplus
1017 
1018 // Macros for C++11 atomic functions
1019 #define KMP_ATOMIC_LD(p, order) (p)->load(std::memory_order_##order)
1020 #define KMP_ATOMIC_OP(op, p, v, order) (p)->op(v, std::memory_order_##order)
1021 
1022 // For non-default load/store
1023 #define KMP_ATOMIC_LD_ACQ(p) KMP_ATOMIC_LD(p, acquire)
1024 #define KMP_ATOMIC_LD_RLX(p) KMP_ATOMIC_LD(p, relaxed)
1025 #define KMP_ATOMIC_ST_REL(p, v) KMP_ATOMIC_OP(store, p, v, release)
1026 #define KMP_ATOMIC_ST_RLX(p, v) KMP_ATOMIC_OP(store, p, v, relaxed)
1027 
1028 // For non-default fetch_<op>
1029 #define KMP_ATOMIC_ADD(p, v) KMP_ATOMIC_OP(fetch_add, p, v, acq_rel)
1030 #define KMP_ATOMIC_SUB(p, v) KMP_ATOMIC_OP(fetch_sub, p, v, acq_rel)
1031 #define KMP_ATOMIC_AND(p, v) KMP_ATOMIC_OP(fetch_and, p, v, acq_rel)
1032 #define KMP_ATOMIC_OR(p, v) KMP_ATOMIC_OP(fetch_or, p, v, acq_rel)
1033 #define KMP_ATOMIC_INC(p) KMP_ATOMIC_OP(fetch_add, p, 1, acq_rel)
1034 #define KMP_ATOMIC_DEC(p) KMP_ATOMIC_OP(fetch_sub, p, 1, acq_rel)
1035 #define KMP_ATOMIC_ADD_RLX(p, v) KMP_ATOMIC_OP(fetch_add, p, v, relaxed)
1036 #define KMP_ATOMIC_INC_RLX(p) KMP_ATOMIC_OP(fetch_add, p, 1, relaxed)
1037 
1038 // Callers of the following functions cannot see the side effect on "expected".
1039 template <typename T>
1040 bool __kmp_atomic_compare_store(std::atomic<T> *p, T expected, T desired) {
1041   return p->compare_exchange_strong(
1042       expected, desired, std::memory_order_acq_rel, std::memory_order_relaxed);
1043 }
1044 
1045 template <typename T>
1046 bool __kmp_atomic_compare_store_acq(std::atomic<T> *p, T expected, T desired) {
1047   return p->compare_exchange_strong(
1048       expected, desired, std::memory_order_acquire, std::memory_order_relaxed);
1049 }
1050 
1051 template <typename T>
1052 bool __kmp_atomic_compare_store_rel(std::atomic<T> *p, T expected, T desired) {
1053   return p->compare_exchange_strong(
1054       expected, desired, std::memory_order_release, std::memory_order_relaxed);
1055 }
1056 
1057 #endif /* KMP_OS_H */
1058 // Safe C API
1059 #include "kmp_safe_c_api.h"
1060