xref: /freebsd/contrib/llvm-project/openmp/runtime/src/kmp_os.h (revision 1db9f3b21e39176dd5b67cf8ac378633b172463e)
1 /*
2  * kmp_os.h -- KPTS runtime header file.
3  */
4 
5 //===----------------------------------------------------------------------===//
6 //
7 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
8 // See https://llvm.org/LICENSE.txt for license information.
9 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #ifndef KMP_OS_H
14 #define KMP_OS_H
15 
16 #include "kmp_config.h"
17 #include <atomic>
18 #include <stdarg.h>
19 #include <stdlib.h>
20 #include <string.h>
21 
22 #define KMP_FTN_PLAIN 1
23 #define KMP_FTN_APPEND 2
24 #define KMP_FTN_UPPER 3
25 /*
26 #define KMP_FTN_PREPEND 4
27 #define KMP_FTN_UAPPEND 5
28 */
29 
30 #define KMP_PTR_SKIP (sizeof(void *))
31 
32 /* -------------------------- Compiler variations ------------------------ */
33 
34 #define KMP_OFF 0
35 #define KMP_ON 1
36 
37 #define KMP_MEM_CONS_VOLATILE 0
38 #define KMP_MEM_CONS_FENCE 1
39 
40 #ifndef KMP_MEM_CONS_MODEL
41 #define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE
42 #endif
43 
44 #ifndef __has_cpp_attribute
45 #define __has_cpp_attribute(x) 0
46 #endif
47 
48 #ifndef __has_attribute
49 #define __has_attribute(x) 0
50 #endif
51 
52 /* ------------------------- Compiler recognition ---------------------- */
53 #define KMP_COMPILER_ICC 0
54 #define KMP_COMPILER_GCC 0
55 #define KMP_COMPILER_CLANG 0
56 #define KMP_COMPILER_MSVC 0
57 #define KMP_COMPILER_ICX 0
58 
59 #if __INTEL_CLANG_COMPILER
60 #undef KMP_COMPILER_ICX
61 #define KMP_COMPILER_ICX 1
62 #elif defined(__INTEL_COMPILER)
63 #undef KMP_COMPILER_ICC
64 #define KMP_COMPILER_ICC 1
65 #elif defined(__clang__)
66 #undef KMP_COMPILER_CLANG
67 #define KMP_COMPILER_CLANG 1
68 #elif defined(__GNUC__)
69 #undef KMP_COMPILER_GCC
70 #define KMP_COMPILER_GCC 1
71 #elif defined(_MSC_VER)
72 #undef KMP_COMPILER_MSVC
73 #define KMP_COMPILER_MSVC 1
74 #else
75 #error Unknown compiler
76 #endif
77 
78 #if (KMP_OS_LINUX || KMP_OS_WINDOWS || KMP_OS_FREEBSD) && !KMP_OS_WASI
79 #define KMP_AFFINITY_SUPPORTED 1
80 #if KMP_OS_WINDOWS && KMP_ARCH_X86_64
81 #define KMP_GROUP_AFFINITY 1
82 #else
83 #define KMP_GROUP_AFFINITY 0
84 #endif
85 #else
86 #define KMP_AFFINITY_SUPPORTED 0
87 #define KMP_GROUP_AFFINITY 0
88 #endif
89 
90 #if (KMP_OS_LINUX || (KMP_OS_FREEBSD && __FreeBSD_version >= 1301000))
91 #define KMP_HAVE_SCHED_GETCPU 1
92 #else
93 #define KMP_HAVE_SCHED_GETCPU 0
94 #endif
95 
96 /* Check for quad-precision extension. */
97 #define KMP_HAVE_QUAD 0
98 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
99 #if KMP_COMPILER_ICC || KMP_COMPILER_ICX
100 /* _Quad is already defined for icc */
101 #undef KMP_HAVE_QUAD
102 #define KMP_HAVE_QUAD 1
103 #elif KMP_COMPILER_CLANG
104 /* Clang doesn't support a software-implemented
105    128-bit extended precision type yet */
106 typedef long double _Quad;
107 #elif KMP_COMPILER_GCC
108 /* GCC on NetBSD lacks __multc3/__divtc3 builtins needed for quad until
109    NetBSD 10.0 which ships with GCC 10.5 */
110 #if (!KMP_OS_NETBSD || __GNUC__ >= 10)
111 typedef __float128 _Quad;
112 #undef KMP_HAVE_QUAD
113 #define KMP_HAVE_QUAD 1
114 #endif
115 #elif KMP_COMPILER_MSVC
116 typedef long double _Quad;
117 #endif
118 #else
119 #if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC
120 typedef long double _Quad;
121 #undef KMP_HAVE_QUAD
122 #define KMP_HAVE_QUAD 1
123 #endif
124 #endif /* KMP_ARCH_X86 || KMP_ARCH_X86_64 */
125 
126 #define KMP_USE_X87CONTROL 0
127 #if KMP_OS_WINDOWS
128 #define KMP_END_OF_LINE "\r\n"
129 typedef char kmp_int8;
130 typedef unsigned char kmp_uint8;
131 typedef short kmp_int16;
132 typedef unsigned short kmp_uint16;
133 typedef int kmp_int32;
134 typedef unsigned int kmp_uint32;
135 #define KMP_INT32_SPEC "d"
136 #define KMP_UINT32_SPEC "u"
137 #ifndef KMP_STRUCT64
138 typedef __int64 kmp_int64;
139 typedef unsigned __int64 kmp_uint64;
140 #define KMP_INT64_SPEC "I64d"
141 #define KMP_UINT64_SPEC "I64u"
142 #else
143 struct kmp_struct64 {
144   kmp_int32 a, b;
145 };
146 typedef struct kmp_struct64 kmp_int64;
147 typedef struct kmp_struct64 kmp_uint64;
148 /* Not sure what to use for KMP_[U]INT64_SPEC here */
149 #endif
150 #if KMP_ARCH_X86 && KMP_MSVC_COMPAT
151 #undef KMP_USE_X87CONTROL
152 #define KMP_USE_X87CONTROL 1
153 #endif
154 #if KMP_ARCH_X86_64 || KMP_ARCH_AARCH64
155 #define KMP_INTPTR 1
156 typedef __int64 kmp_intptr_t;
157 typedef unsigned __int64 kmp_uintptr_t;
158 #define KMP_INTPTR_SPEC "I64d"
159 #define KMP_UINTPTR_SPEC "I64u"
160 #endif
161 #endif /* KMP_OS_WINDOWS */
162 
163 #if KMP_OS_UNIX
164 #define KMP_END_OF_LINE "\n"
165 typedef char kmp_int8;
166 typedef unsigned char kmp_uint8;
167 typedef short kmp_int16;
168 typedef unsigned short kmp_uint16;
169 typedef int kmp_int32;
170 typedef unsigned int kmp_uint32;
171 typedef long long kmp_int64;
172 typedef unsigned long long kmp_uint64;
173 #define KMP_INT32_SPEC "d"
174 #define KMP_UINT32_SPEC "u"
175 #define KMP_INT64_SPEC "lld"
176 #define KMP_UINT64_SPEC "llu"
177 #endif /* KMP_OS_UNIX */
178 
179 #if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_MIPS || KMP_ARCH_WASM ||          \
180     KMP_ARCH_PPC
181 #define KMP_SIZE_T_SPEC KMP_UINT32_SPEC
182 #elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64 ||                 \
183     KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64 || KMP_ARCH_LOONGARCH64 ||             \
184     KMP_ARCH_VE || KMP_ARCH_S390X
185 #define KMP_SIZE_T_SPEC KMP_UINT64_SPEC
186 #else
187 #error "Can't determine size_t printf format specifier."
188 #endif
189 
190 #if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_WASM || KMP_ARCH_PPC
191 #define KMP_SIZE_T_MAX (0xFFFFFFFF)
192 #else
193 #define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF)
194 #endif
195 
196 typedef size_t kmp_size_t;
197 typedef float kmp_real32;
198 typedef double kmp_real64;
199 
200 #ifndef KMP_INTPTR
201 #define KMP_INTPTR 1
202 typedef long kmp_intptr_t;
203 typedef unsigned long kmp_uintptr_t;
204 #define KMP_INTPTR_SPEC "ld"
205 #define KMP_UINTPTR_SPEC "lu"
206 #endif
207 
208 #ifdef BUILD_I8
209 typedef kmp_int64 kmp_int;
210 typedef kmp_uint64 kmp_uint;
211 #else
212 typedef kmp_int32 kmp_int;
213 typedef kmp_uint32 kmp_uint;
214 #endif /* BUILD_I8 */
215 #define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF)
216 #define KMP_INT_MIN ((kmp_int32)0x80000000)
217 
218 // stdarg handling
219 #if (KMP_ARCH_ARM || KMP_ARCH_X86_64 || KMP_ARCH_AARCH64 || KMP_ARCH_WASM) &&  \
220     (KMP_OS_FREEBSD || KMP_OS_LINUX || KMP_OS_WASI)
221 typedef va_list *kmp_va_list;
222 #define kmp_va_deref(ap) (*(ap))
223 #define kmp_va_addr_of(ap) (&(ap))
224 #else
225 typedef va_list kmp_va_list;
226 #define kmp_va_deref(ap) (ap)
227 #define kmp_va_addr_of(ap) (ap)
228 #endif
229 
230 #ifdef __cplusplus
231 // macros to cast out qualifiers and to re-interpret types
232 #define CCAST(type, var) const_cast<type>(var)
233 #define RCAST(type, var) reinterpret_cast<type>(var)
234 //-------------------------------------------------------------------------
235 // template for debug prints specification ( d, u, lld, llu ), and to obtain
236 // signed/unsigned flavors of a type
237 template <typename T> struct traits_t {};
238 // int
239 template <> struct traits_t<signed int> {
240   typedef signed int signed_t;
241   typedef unsigned int unsigned_t;
242   typedef double floating_t;
243   static char const *spec;
244   static const signed_t max_value = 0x7fffffff;
245   static const signed_t min_value = 0x80000000;
246   static const int type_size = sizeof(signed_t);
247 };
248 // unsigned int
249 template <> struct traits_t<unsigned int> {
250   typedef signed int signed_t;
251   typedef unsigned int unsigned_t;
252   typedef double floating_t;
253   static char const *spec;
254   static const unsigned_t max_value = 0xffffffff;
255   static const unsigned_t min_value = 0x00000000;
256   static const int type_size = sizeof(unsigned_t);
257 };
258 // long
259 template <> struct traits_t<signed long> {
260   typedef signed long signed_t;
261   typedef unsigned long unsigned_t;
262   typedef long double floating_t;
263   static char const *spec;
264   static const int type_size = sizeof(signed_t);
265 };
266 // long long
267 template <> struct traits_t<signed long long> {
268   typedef signed long long signed_t;
269   typedef unsigned long long unsigned_t;
270   typedef long double floating_t;
271   static char const *spec;
272   static const signed_t max_value = 0x7fffffffffffffffLL;
273   static const signed_t min_value = 0x8000000000000000LL;
274   static const int type_size = sizeof(signed_t);
275 };
276 // unsigned long long
277 template <> struct traits_t<unsigned long long> {
278   typedef signed long long signed_t;
279   typedef unsigned long long unsigned_t;
280   typedef long double floating_t;
281   static char const *spec;
282   static const unsigned_t max_value = 0xffffffffffffffffLL;
283   static const unsigned_t min_value = 0x0000000000000000LL;
284   static const int type_size = sizeof(unsigned_t);
285 };
286 //-------------------------------------------------------------------------
287 #else
288 #define CCAST(type, var) (type)(var)
289 #define RCAST(type, var) (type)(var)
290 #endif // __cplusplus
291 
292 #define KMP_EXPORT extern /* export declaration in guide libraries */
293 
294 #if __GNUC__ >= 4 && !defined(__MINGW32__)
295 #define __forceinline __inline
296 #endif
297 
298 /* Check if the OS/arch can support user-level mwait */
299 // All mwait code tests for UMWAIT first, so it should only fall back to ring3
300 // MWAIT for KNL.
301 #define KMP_HAVE_MWAIT                                                         \
302   ((KMP_ARCH_X86 || KMP_ARCH_X86_64) && (KMP_OS_LINUX || KMP_OS_WINDOWS) &&    \
303    !KMP_MIC2)
304 #define KMP_HAVE_UMWAIT                                                        \
305   ((KMP_ARCH_X86 || KMP_ARCH_X86_64) && (KMP_OS_LINUX || KMP_OS_WINDOWS) &&    \
306    !KMP_MIC)
307 
308 #if KMP_OS_WINDOWS
309 #include <windows.h>
310 
311 static inline int KMP_GET_PAGE_SIZE(void) {
312   SYSTEM_INFO si;
313   GetSystemInfo(&si);
314   return si.dwPageSize;
315 }
316 #else
317 #define KMP_GET_PAGE_SIZE() getpagesize()
318 #endif
319 
320 #define PAGE_ALIGNED(_addr)                                                    \
321   (!((size_t)_addr & (size_t)(KMP_GET_PAGE_SIZE() - 1)))
322 #define ALIGN_TO_PAGE(x)                                                       \
323   (void *)(((size_t)(x)) & ~((size_t)(KMP_GET_PAGE_SIZE() - 1)))
324 
325 /* ---------- Support for cache alignment, padding, etc. ----------------*/
326 
327 #ifdef __cplusplus
328 extern "C" {
329 #endif // __cplusplus
330 
331 #define INTERNODE_CACHE_LINE 4096 /* for multi-node systems */
332 
333 /* Define the default size of the cache line */
334 #ifndef CACHE_LINE
335 #define CACHE_LINE 128 /* cache line size in bytes */
336 #else
337 #if (CACHE_LINE < 64) && !defined(KMP_OS_DARWIN)
338 // 2006-02-13: This produces too many warnings on OS X*. Disable for now
339 #warning CACHE_LINE is too small.
340 #endif
341 #endif /* CACHE_LINE */
342 
343 #define KMP_CACHE_PREFETCH(ADDR) /* nothing */
344 
345 // Define attribute that indicates that the fall through from the previous
346 // case label is intentional and should not be diagnosed by a compiler
347 //   Code from libcxx/include/__config
348 // Use a function like macro to imply that it must be followed by a semicolon
349 #if __cplusplus > 201402L && __has_cpp_attribute(fallthrough)
350 #define KMP_FALLTHROUGH() [[fallthrough]]
351 // icc cannot properly tell this attribute is absent so force off
352 #elif KMP_COMPILER_ICC
353 #define KMP_FALLTHROUGH() ((void)0)
354 #elif __has_cpp_attribute(clang::fallthrough)
355 #define KMP_FALLTHROUGH() [[clang::fallthrough]]
356 #elif __has_attribute(fallthrough) || __GNUC__ >= 7
357 #define KMP_FALLTHROUGH() __attribute__((__fallthrough__))
358 #else
359 #define KMP_FALLTHROUGH() ((void)0)
360 #endif
361 
362 #if KMP_HAVE_ATTRIBUTE_WAITPKG
363 #define KMP_ATTRIBUTE_TARGET_WAITPKG __attribute__((target("waitpkg")))
364 #else
365 #define KMP_ATTRIBUTE_TARGET_WAITPKG /* Nothing */
366 #endif
367 
368 #if KMP_HAVE_ATTRIBUTE_RTM
369 #define KMP_ATTRIBUTE_TARGET_RTM __attribute__((target("rtm")))
370 #else
371 #define KMP_ATTRIBUTE_TARGET_RTM /* Nothing */
372 #endif
373 
374 // Define attribute that indicates a function does not return
375 #if __cplusplus >= 201103L
376 #define KMP_NORETURN [[noreturn]]
377 #elif KMP_OS_WINDOWS
378 #define KMP_NORETURN __declspec(noreturn)
379 #else
380 #define KMP_NORETURN __attribute__((noreturn))
381 #endif
382 
383 #if KMP_OS_WINDOWS && KMP_MSVC_COMPAT
384 #define KMP_ALIGN(bytes) __declspec(align(bytes))
385 #define KMP_THREAD_LOCAL __declspec(thread)
386 #define KMP_ALIAS /* Nothing */
387 #else
388 #define KMP_ALIGN(bytes) __attribute__((aligned(bytes)))
389 #define KMP_THREAD_LOCAL __thread
390 #define KMP_ALIAS(alias_of) __attribute__((alias(alias_of)))
391 #endif
392 
393 #if KMP_HAVE_WEAK_ATTRIBUTE && !KMP_DYNAMIC_LIB
394 #define KMP_WEAK_ATTRIBUTE_EXTERNAL __attribute__((weak))
395 #else
396 #define KMP_WEAK_ATTRIBUTE_EXTERNAL /* Nothing */
397 #endif
398 
399 #if KMP_HAVE_WEAK_ATTRIBUTE
400 #define KMP_WEAK_ATTRIBUTE_INTERNAL __attribute__((weak))
401 #else
402 #define KMP_WEAK_ATTRIBUTE_INTERNAL /* Nothing */
403 #endif
404 
405 // Define KMP_VERSION_SYMBOL and KMP_EXPAND_NAME
406 #ifndef KMP_STR
407 #define KMP_STR(x) _KMP_STR(x)
408 #define _KMP_STR(x) #x
409 #endif
410 
411 #ifdef KMP_USE_VERSION_SYMBOLS
412 // If using versioned symbols, KMP_EXPAND_NAME prepends
413 // __kmp_api_ to the real API name
414 #define KMP_EXPAND_NAME(api_name) _KMP_EXPAND_NAME(api_name)
415 #define _KMP_EXPAND_NAME(api_name) __kmp_api_##api_name
416 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str)                         \
417   _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, "VERSION")
418 #define _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, default_ver)            \
419   __typeof__(__kmp_api_##api_name) __kmp_api_##api_name##_##ver_num##_alias     \
420       __attribute__((alias(KMP_STR(__kmp_api_##api_name))));                    \
421   __asm__(                                                                      \
422       ".symver " KMP_STR(__kmp_api_##api_name##_##ver_num##_alias) "," KMP_STR( \
423           api_name) "@" ver_str "\n\t");                                        \
424   __asm__(".symver " KMP_STR(__kmp_api_##api_name) "," KMP_STR(                 \
425       api_name) "@@" default_ver "\n\t")
426 
427 #define KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str)         \
428   _KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str, "VERSION")
429 #define _KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num, ver_str,          \
430                                  default_ver)                                    \
431   __typeof__(__kmp_api_##apic_name) __kmp_api_##apic_name##_##ver_num##_alias    \
432       __attribute__((alias(KMP_STR(__kmp_api_##apic_name))));                    \
433   __asm__(".symver " KMP_STR(__kmp_api_##apic_name) "," KMP_STR(                 \
434       apic_name) "@@" default_ver "\n\t");                                       \
435   __asm__(                                                                       \
436       ".symver " KMP_STR(__kmp_api_##apic_name##_##ver_num##_alias) "," KMP_STR( \
437           api_name) "@" ver_str "\n\t")
438 
439 #else // KMP_USE_VERSION_SYMBOLS
440 #define KMP_EXPAND_NAME(api_name) api_name
441 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) /* Nothing */
442 #define KMP_VERSION_OMPC_SYMBOL(apic_name, api_name, ver_num,                  \
443                                 ver_str) /* Nothing */
444 #endif // KMP_USE_VERSION_SYMBOLS
445 
446 /* Temporary note: if performance testing of this passes, we can remove
447    all references to KMP_DO_ALIGN and replace with KMP_ALIGN.  */
448 #define KMP_DO_ALIGN(bytes) KMP_ALIGN(bytes)
449 #define KMP_ALIGN_CACHE KMP_ALIGN(CACHE_LINE)
450 #define KMP_ALIGN_CACHE_INTERNODE KMP_ALIGN(INTERNODE_CACHE_LINE)
451 
452 /* General purpose fence types for memory operations */
453 enum kmp_mem_fence_type {
454   kmp_no_fence, /* No memory fence */
455   kmp_acquire_fence, /* Acquire (read) memory fence */
456   kmp_release_fence, /* Release (write) memory fence */
457   kmp_full_fence /* Full (read+write) memory fence */
458 };
459 
460 // Synchronization primitives
461 
462 #if KMP_ASM_INTRINS && KMP_OS_WINDOWS && !((KMP_ARCH_AARCH64 || KMP_ARCH_ARM) && (KMP_COMPILER_CLANG || KMP_COMPILER_GCC))
463 
464 #if KMP_MSVC_COMPAT && !KMP_COMPILER_CLANG
465 #pragma intrinsic(InterlockedExchangeAdd)
466 #pragma intrinsic(InterlockedCompareExchange)
467 #pragma intrinsic(InterlockedExchange)
468 #if !(KMP_COMPILER_ICX && KMP_32_BIT_ARCH)
469 #pragma intrinsic(InterlockedExchange64)
470 #endif
471 #endif
472 
473 // Using InterlockedIncrement / InterlockedDecrement causes a library loading
474 // ordering problem, so we use InterlockedExchangeAdd instead.
475 #define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd((volatile long *)(p), 1)
476 #define KMP_TEST_THEN_INC_ACQ32(p)                                             \
477   InterlockedExchangeAdd((volatile long *)(p), 1)
478 #define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd((volatile long *)(p), 4)
479 #define KMP_TEST_THEN_ADD4_ACQ32(p)                                            \
480   InterlockedExchangeAdd((volatile long *)(p), 4)
481 #define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd((volatile long *)(p), -1)
482 #define KMP_TEST_THEN_DEC_ACQ32(p)                                             \
483   InterlockedExchangeAdd((volatile long *)(p), -1)
484 #define KMP_TEST_THEN_ADD32(p, v)                                              \
485   InterlockedExchangeAdd((volatile long *)(p), (v))
486 
487 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv)                                 \
488   InterlockedCompareExchange((volatile long *)(p), (long)(sv), (long)(cv))
489 
490 #define KMP_XCHG_FIXED32(p, v)                                                 \
491   InterlockedExchange((volatile long *)(p), (long)(v))
492 #define KMP_XCHG_FIXED64(p, v)                                                 \
493   InterlockedExchange64((volatile kmp_int64 *)(p), (kmp_int64)(v))
494 
495 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
496   kmp_int32 tmp = InterlockedExchange((volatile long *)p, *(long *)&v);
497   return *(kmp_real32 *)&tmp;
498 }
499 
500 #define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8((p), (v))
501 #define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8((p), (v))
502 #define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32((p), (v))
503 #define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32((p), (v))
504 #define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64((p), (v))
505 #define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64((p), (v))
506 
507 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
508 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
509 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
510 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
511 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
512 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
513 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
514 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
515 
516 #if KMP_ARCH_AARCH64 && KMP_COMPILER_MSVC && !KMP_COMPILER_CLANG
517 #define KMP_TEST_THEN_INC64(p) _InterlockedExchangeAdd64((p), 1LL)
518 #define KMP_TEST_THEN_INC_ACQ64(p) _InterlockedExchangeAdd64_acq((p), 1LL)
519 #define KMP_TEST_THEN_ADD4_64(p) _InterlockedExchangeAdd64((p), 4LL)
520 // #define KMP_TEST_THEN_ADD4_ACQ64(p) _InterlockedExchangeAdd64_acq((p), 4LL)
521 // #define KMP_TEST_THEN_DEC64(p) _InterlockedExchangeAdd64((p), -1LL)
522 // #define KMP_TEST_THEN_DEC_ACQ64(p) _InterlockedExchangeAdd64_acq((p), -1LL)
523 // #define KMP_TEST_THEN_ADD8(p, v) _InterlockedExchangeAdd8((p), (v))
524 #define KMP_TEST_THEN_ADD64(p, v) _InterlockedExchangeAdd64((p), (v))
525 
526 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv)                                  \
527   __kmp_compare_and_store_acq8((p), (cv), (sv))
528 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv)                                  \
529   __kmp_compare_and_store_rel8((p), (cv), (sv))
530 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv)                                 \
531   __kmp_compare_and_store_acq16((p), (cv), (sv))
532 /*
533 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv)                                 \
534   __kmp_compare_and_store_rel16((p), (cv), (sv))
535 */
536 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv)                                 \
537   __kmp_compare_and_store_acq32((volatile kmp_int32 *)(p), (kmp_int32)(cv),    \
538                                 (kmp_int32)(sv))
539 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv)                                 \
540   __kmp_compare_and_store_rel32((volatile kmp_int32 *)(p), (kmp_int32)(cv),    \
541                                 (kmp_int32)(sv))
542 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv)                                 \
543   __kmp_compare_and_store_acq64((volatile kmp_int64 *)(p), (kmp_int64)(cv),    \
544                                 (kmp_int64)(sv))
545 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv)                                 \
546   __kmp_compare_and_store_rel64((volatile kmp_int64 *)(p), (kmp_int64)(cv),    \
547                                 (kmp_int64)(sv))
548 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv)                                   \
549   __kmp_compare_and_store_ptr((void *volatile *)(p), (void *)(cv), (void *)(sv))
550 
551 //  KMP_COMPARE_AND_STORE expects this order:       pointer, compare, exchange
552 // _InterlockedCompareExchange expects this order:  pointer, exchange, compare
553 // KMP_COMPARE_AND_STORE also returns a bool indicating a successful write. A
554 // write is successful if the return value of _InterlockedCompareExchange is the
555 // same as the compare value.
556 inline kmp_int8 __kmp_compare_and_store_acq8(volatile kmp_int8 *p, kmp_int8 cv,
557                                              kmp_int8 sv) {
558   return _InterlockedCompareExchange8_acq(p, sv, cv) == cv;
559 }
560 
561 inline kmp_int8 __kmp_compare_and_store_rel8(volatile kmp_int8 *p, kmp_int8 cv,
562                                              kmp_int8 sv) {
563   return _InterlockedCompareExchange8_rel(p, sv, cv) == cv;
564 }
565 
566 inline kmp_int16 __kmp_compare_and_store_acq16(volatile kmp_int16 *p,
567                                                kmp_int16 cv, kmp_int16 sv) {
568   return _InterlockedCompareExchange16_acq(p, sv, cv) == cv;
569 }
570 
571 inline kmp_int16 __kmp_compare_and_store_rel16(volatile kmp_int16 *p,
572                                                kmp_int16 cv, kmp_int16 sv) {
573   return _InterlockedCompareExchange16_rel(p, sv, cv) == cv;
574 }
575 
576 inline kmp_int32 __kmp_compare_and_store_acq32(volatile kmp_int32 *p,
577                                                kmp_int32 cv, kmp_int32 sv) {
578   return _InterlockedCompareExchange_acq((volatile long *)p, sv, cv) == cv;
579 }
580 
581 inline kmp_int32 __kmp_compare_and_store_rel32(volatile kmp_int32 *p,
582                                                kmp_int32 cv, kmp_int32 sv) {
583   return _InterlockedCompareExchange_rel((volatile long *)p, sv, cv) == cv;
584 }
585 
586 inline kmp_int32 __kmp_compare_and_store_acq64(volatile kmp_int64 *p,
587                                                kmp_int64 cv, kmp_int64 sv) {
588   return _InterlockedCompareExchange64_acq(p, sv, cv) == cv;
589 }
590 
591 inline kmp_int32 __kmp_compare_and_store_rel64(volatile kmp_int64 *p,
592                                                kmp_int64 cv, kmp_int64 sv) {
593   return _InterlockedCompareExchange64_rel(p, sv, cv) == cv;
594 }
595 
596 inline kmp_int32 __kmp_compare_and_store_ptr(void *volatile *p, void *cv,
597                                              void *sv) {
598   return _InterlockedCompareExchangePointer(p, sv, cv) == cv;
599 }
600 
601 // The _RET versions return the value instead of a bool
602 
603 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv)                                  \
604    _InterlockedCompareExchange8((p), (sv), (cv))
605 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv)                                 \
606   _InterlockedCompareExchange16((p), (sv), (cv))
607 
608 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv)                                 \
609   _InterlockedCompareExchange64((volatile kmp_int64 *)(p), (kmp_int64)(sv),    \
610                                 (kmp_int64)(cv))
611 
612 
613 #define KMP_XCHG_FIXED8(p, v)                                                  \
614   _InterlockedExchange8((volatile kmp_int8 *)(p), (kmp_int8)(v));
615 #define KMP_XCHG_FIXED16(p, v) _InterlockedExchange16((p), (v));
616 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
617 
618 inline kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v) {
619   kmp_int64 tmp = _InterlockedExchange64((volatile kmp_int64 *)p, *(kmp_int64
620   *)&v); return *(kmp_real64 *)&tmp;
621 }
622 
623 #else // !KMP_ARCH_AARCH64
624 
625 // Routines that we still need to implement in assembly.
626 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
627 
628 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
629                                          kmp_int8 sv);
630 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
631                                            kmp_int16 sv);
632 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
633                                            kmp_int32 sv);
634 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
635                                            kmp_int64 sv);
636 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
637                                              kmp_int8 sv);
638 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
639                                                kmp_int16 cv, kmp_int16 sv);
640 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
641                                                kmp_int32 cv, kmp_int32 sv);
642 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
643                                                kmp_int64 cv, kmp_int64 sv);
644 
645 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
646 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
647 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
648 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
649 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
650 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
651 
652 //#define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32((p), 1)
653 //#define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32((p), 1)
654 #define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64((p), 1LL)
655 #define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64((p), 1LL)
656 //#define KMP_TEST_THEN_ADD4_32(p) __kmp_test_then_add32((p), 4)
657 //#define KMP_TEST_THEN_ADD4_ACQ32(p) __kmp_test_then_add32((p), 4)
658 #define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64((p), 4LL)
659 #define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64((p), 4LL)
660 //#define KMP_TEST_THEN_DEC32(p) __kmp_test_then_add32((p), -1)
661 //#define KMP_TEST_THEN_DEC_ACQ32(p) __kmp_test_then_add32((p), -1)
662 #define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64((p), -1LL)
663 #define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64((p), -1LL)
664 //#define KMP_TEST_THEN_ADD32(p, v) __kmp_test_then_add32((p), (v))
665 #define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8((p), (v))
666 #define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64((p), (v))
667 
668 
669 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv)                                  \
670   __kmp_compare_and_store8((p), (cv), (sv))
671 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv)                                  \
672   __kmp_compare_and_store8((p), (cv), (sv))
673 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv)                                 \
674   __kmp_compare_and_store16((p), (cv), (sv))
675 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv)                                 \
676   __kmp_compare_and_store16((p), (cv), (sv))
677 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv)                                 \
678   __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
679                             (kmp_int32)(sv))
680 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv)                                 \
681   __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
682                             (kmp_int32)(sv))
683 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv)                                 \
684   __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
685                             (kmp_int64)(sv))
686 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv)                                 \
687   __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
688                             (kmp_int64)(sv))
689 
690 #if KMP_ARCH_X86
691 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv)                                   \
692   __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
693                             (kmp_int32)(sv))
694 #else /* 64 bit pointers */
695 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv)                                   \
696   __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
697                             (kmp_int64)(sv))
698 #endif /* KMP_ARCH_X86 */
699 
700 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv)                                  \
701   __kmp_compare_and_store_ret8((p), (cv), (sv))
702 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv)                                 \
703   __kmp_compare_and_store_ret16((p), (cv), (sv))
704 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv)                                 \
705   __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv),    \
706                                 (kmp_int64)(sv))
707 
708 #define KMP_XCHG_FIXED8(p, v)                                                  \
709   __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
710 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
711 //#define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
712 //#define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
713 //#define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
714 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
715 #endif
716 
717 #elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64)
718 
719 /* cast p to correct type so that proper intrinsic will be used */
720 #define KMP_TEST_THEN_INC32(p)                                                 \
721   __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
722 #define KMP_TEST_THEN_INC_ACQ32(p)                                             \
723   __sync_fetch_and_add((volatile kmp_int32 *)(p), 1)
724 #if KMP_ARCH_MIPS
725 #define KMP_TEST_THEN_INC64(p)                                                 \
726   __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
727 #define KMP_TEST_THEN_INC_ACQ64(p)                                             \
728   __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
729 #else
730 #define KMP_TEST_THEN_INC64(p)                                                 \
731   __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
732 #define KMP_TEST_THEN_INC_ACQ64(p)                                             \
733   __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL)
734 #endif
735 #define KMP_TEST_THEN_ADD4_32(p)                                               \
736   __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
737 #define KMP_TEST_THEN_ADD4_ACQ32(p)                                            \
738   __sync_fetch_and_add((volatile kmp_int32 *)(p), 4)
739 #if KMP_ARCH_MIPS
740 #define KMP_TEST_THEN_ADD4_64(p)                                               \
741   __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
742 #define KMP_TEST_THEN_ADD4_ACQ64(p)                                            \
743   __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST)
744 #define KMP_TEST_THEN_DEC64(p)                                                 \
745   __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
746 #define KMP_TEST_THEN_DEC_ACQ64(p)                                             \
747   __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST)
748 #else
749 #define KMP_TEST_THEN_ADD4_64(p)                                               \
750   __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
751 #define KMP_TEST_THEN_ADD4_ACQ64(p)                                            \
752   __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL)
753 #define KMP_TEST_THEN_DEC64(p)                                                 \
754   __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
755 #define KMP_TEST_THEN_DEC_ACQ64(p)                                             \
756   __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL)
757 #endif
758 #define KMP_TEST_THEN_DEC32(p)                                                 \
759   __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
760 #define KMP_TEST_THEN_DEC_ACQ32(p)                                             \
761   __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1)
762 #define KMP_TEST_THEN_ADD8(p, v)                                               \
763   __sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v))
764 #define KMP_TEST_THEN_ADD32(p, v)                                              \
765   __sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v))
766 #if KMP_ARCH_MIPS
767 #define KMP_TEST_THEN_ADD64(p, v)                                              \
768   __atomic_fetch_add((volatile kmp_uint64 *)(p), (kmp_uint64)(v),              \
769                      __ATOMIC_SEQ_CST)
770 #else
771 #define KMP_TEST_THEN_ADD64(p, v)                                              \
772   __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v))
773 #endif
774 
775 #define KMP_TEST_THEN_OR8(p, v)                                                \
776   __sync_fetch_and_or((volatile kmp_int8 *)(p), (kmp_int8)(v))
777 #define KMP_TEST_THEN_AND8(p, v)                                               \
778   __sync_fetch_and_and((volatile kmp_int8 *)(p), (kmp_int8)(v))
779 #define KMP_TEST_THEN_OR32(p, v)                                               \
780   __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
781 #define KMP_TEST_THEN_AND32(p, v)                                              \
782   __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
783 #if KMP_ARCH_MIPS
784 #define KMP_TEST_THEN_OR64(p, v)                                               \
785   __atomic_fetch_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v),               \
786                     __ATOMIC_SEQ_CST)
787 #define KMP_TEST_THEN_AND64(p, v)                                              \
788   __atomic_fetch_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v),              \
789                      __ATOMIC_SEQ_CST)
790 #else
791 #define KMP_TEST_THEN_OR64(p, v)                                               \
792   __sync_fetch_and_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
793 #define KMP_TEST_THEN_AND64(p, v)                                              \
794   __sync_fetch_and_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
795 #endif
796 
797 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv)                                  \
798   __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv),     \
799                                (kmp_uint8)(sv))
800 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv)                                  \
801   __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv),     \
802                                (kmp_uint8)(sv))
803 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv)                                 \
804   __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv),   \
805                                (kmp_uint16)(sv))
806 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv)                                 \
807   __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv),   \
808                                (kmp_uint16)(sv))
809 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv)                                 \
810   __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv),   \
811                                (kmp_uint32)(sv))
812 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv)                                 \
813   __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv),   \
814                                (kmp_uint32)(sv))
815 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv)                                   \
816   __sync_bool_compare_and_swap((void *volatile *)(p), (void *)(cv),            \
817                                (void *)(sv))
818 
819 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv)                                  \
820   __sync_val_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv),      \
821                               (kmp_uint8)(sv))
822 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv)                                 \
823   __sync_val_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv),    \
824                               (kmp_uint16)(sv))
825 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv)                                 \
826   __sync_val_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv),    \
827                               (kmp_uint32)(sv))
828 #if KMP_ARCH_MIPS
829 static inline bool mips_sync_bool_compare_and_swap(volatile kmp_uint64 *p,
830                                                    kmp_uint64 cv,
831                                                    kmp_uint64 sv) {
832   return __atomic_compare_exchange(p, &cv, &sv, false, __ATOMIC_SEQ_CST,
833                                    __ATOMIC_SEQ_CST);
834 }
835 static inline bool mips_sync_val_compare_and_swap(volatile kmp_uint64 *p,
836                                                   kmp_uint64 cv,
837                                                   kmp_uint64 sv) {
838   __atomic_compare_exchange(p, &cv, &sv, false, __ATOMIC_SEQ_CST,
839                             __ATOMIC_SEQ_CST);
840   return cv;
841 }
842 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv)                                 \
843   mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p),                  \
844                                   (kmp_uint64)(cv), (kmp_uint64)(sv))
845 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv)                                 \
846   mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p),                  \
847                                   (kmp_uint64)(cv), (kmp_uint64)(sv))
848 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv)                                 \
849   mips_sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \
850                                  (kmp_uint64)(sv))
851 #else
852 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv)                                 \
853   __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv),   \
854                                (kmp_uint64)(sv))
855 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv)                                 \
856   __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv),   \
857                                (kmp_uint64)(sv))
858 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv)                                 \
859   __sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv),    \
860                               (kmp_uint64)(sv))
861 #endif
862 
863 #if KMP_OS_DARWIN && defined(__INTEL_COMPILER) && __INTEL_COMPILER >= 1800
864 #define KMP_XCHG_FIXED8(p, v)                                                  \
865   __atomic_exchange_1((volatile kmp_uint8 *)(p), (kmp_uint8)(v),               \
866                       __ATOMIC_SEQ_CST)
867 #else
868 #define KMP_XCHG_FIXED8(p, v)                                                  \
869   __sync_lock_test_and_set((volatile kmp_uint8 *)(p), (kmp_uint8)(v))
870 #endif
871 #define KMP_XCHG_FIXED16(p, v)                                                 \
872   __sync_lock_test_and_set((volatile kmp_uint16 *)(p), (kmp_uint16)(v))
873 #define KMP_XCHG_FIXED32(p, v)                                                 \
874   __sync_lock_test_and_set((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
875 #define KMP_XCHG_FIXED64(p, v)                                                 \
876   __sync_lock_test_and_set((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
877 
878 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) {
879   volatile kmp_uint32 *up;
880   kmp_uint32 uv;
881   memcpy(&up, &p, sizeof(up));
882   memcpy(&uv, &v, sizeof(uv));
883   kmp_int32 tmp = __sync_lock_test_and_set(up, uv);
884   kmp_real32 ftmp;
885   memcpy(&ftmp, &tmp, sizeof(tmp));
886   return ftmp;
887 }
888 
889 inline kmp_real64 KMP_XCHG_REAL64(volatile kmp_real64 *p, kmp_real64 v) {
890   volatile kmp_uint64 *up;
891   kmp_uint64 uv;
892   memcpy(&up, &p, sizeof(up));
893   memcpy(&uv, &v, sizeof(uv));
894   kmp_int64 tmp = __sync_lock_test_and_set(up, uv);
895   kmp_real64 dtmp;
896   memcpy(&dtmp, &tmp, sizeof(tmp));
897   return dtmp;
898 }
899 
900 #else
901 
902 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v);
903 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v);
904 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v);
905 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v);
906 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v);
907 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v);
908 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v);
909 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v);
910 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v);
911 
912 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv,
913                                          kmp_int8 sv);
914 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv,
915                                            kmp_int16 sv);
916 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv,
917                                            kmp_int32 sv);
918 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv,
919                                            kmp_int64 sv);
920 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv,
921                                              kmp_int8 sv);
922 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p,
923                                                kmp_int16 cv, kmp_int16 sv);
924 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p,
925                                                kmp_int32 cv, kmp_int32 sv);
926 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p,
927                                                kmp_int64 cv, kmp_int64 sv);
928 
929 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v);
930 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v);
931 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v);
932 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v);
933 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v);
934 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v);
935 
936 #define KMP_TEST_THEN_INC32(p)                                                 \
937   __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
938 #define KMP_TEST_THEN_INC_ACQ32(p)                                             \
939   __kmp_test_then_add32((volatile kmp_int32 *)(p), 1)
940 #define KMP_TEST_THEN_INC64(p)                                                 \
941   __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
942 #define KMP_TEST_THEN_INC_ACQ64(p)                                             \
943   __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL)
944 #define KMP_TEST_THEN_ADD4_32(p)                                               \
945   __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
946 #define KMP_TEST_THEN_ADD4_ACQ32(p)                                            \
947   __kmp_test_then_add32((volatile kmp_int32 *)(p), 4)
948 #define KMP_TEST_THEN_ADD4_64(p)                                               \
949   __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
950 #define KMP_TEST_THEN_ADD4_ACQ64(p)                                            \
951   __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL)
952 #define KMP_TEST_THEN_DEC32(p)                                                 \
953   __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
954 #define KMP_TEST_THEN_DEC_ACQ32(p)                                             \
955   __kmp_test_then_add32((volatile kmp_int32 *)(p), -1)
956 #define KMP_TEST_THEN_DEC64(p)                                                 \
957   __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
958 #define KMP_TEST_THEN_DEC_ACQ64(p)                                             \
959   __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL)
960 #define KMP_TEST_THEN_ADD8(p, v)                                               \
961   __kmp_test_then_add8((volatile kmp_int8 *)(p), (kmp_int8)(v))
962 #define KMP_TEST_THEN_ADD32(p, v)                                              \
963   __kmp_test_then_add32((volatile kmp_int32 *)(p), (kmp_int32)(v))
964 #define KMP_TEST_THEN_ADD64(p, v)                                              \
965   __kmp_test_then_add64((volatile kmp_int64 *)(p), (kmp_int64)(v))
966 
967 #define KMP_TEST_THEN_OR8(p, v)                                                \
968   __kmp_test_then_or8((volatile kmp_int8 *)(p), (kmp_int8)(v))
969 #define KMP_TEST_THEN_AND8(p, v)                                               \
970   __kmp_test_then_and8((volatile kmp_int8 *)(p), (kmp_int8)(v))
971 #define KMP_TEST_THEN_OR32(p, v)                                               \
972   __kmp_test_then_or32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
973 #define KMP_TEST_THEN_AND32(p, v)                                              \
974   __kmp_test_then_and32((volatile kmp_uint32 *)(p), (kmp_uint32)(v))
975 #define KMP_TEST_THEN_OR64(p, v)                                               \
976   __kmp_test_then_or64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
977 #define KMP_TEST_THEN_AND64(p, v)                                              \
978   __kmp_test_then_and64((volatile kmp_uint64 *)(p), (kmp_uint64)(v))
979 
980 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv)                                  \
981   __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv),           \
982                            (kmp_int8)(sv))
983 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv)                                  \
984   __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv),           \
985                            (kmp_int8)(sv))
986 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv)                                 \
987   __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv),        \
988                             (kmp_int16)(sv))
989 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv)                                 \
990   __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv),        \
991                             (kmp_int16)(sv))
992 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv)                                 \
993   __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
994                             (kmp_int32)(sv))
995 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv)                                 \
996   __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
997                             (kmp_int32)(sv))
998 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv)                                 \
999   __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
1000                             (kmp_int64)(sv))
1001 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv)                                 \
1002   __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
1003                             (kmp_int64)(sv))
1004 
1005 #if KMP_ARCH_X86
1006 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv)                                   \
1007   __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv),        \
1008                             (kmp_int32)(sv))
1009 #else /* 64 bit pointers */
1010 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv)                                   \
1011   __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv),        \
1012                             (kmp_int64)(sv))
1013 #endif /* KMP_ARCH_X86 */
1014 
1015 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv)                                  \
1016   __kmp_compare_and_store_ret8((p), (cv), (sv))
1017 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv)                                 \
1018   __kmp_compare_and_store_ret16((p), (cv), (sv))
1019 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv)                                 \
1020   __kmp_compare_and_store_ret32((volatile kmp_int32 *)(p), (kmp_int32)(cv),    \
1021                                 (kmp_int32)(sv))
1022 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv)                                 \
1023   __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv),    \
1024                                 (kmp_int64)(sv))
1025 
1026 #define KMP_XCHG_FIXED8(p, v)                                                  \
1027   __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v));
1028 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v));
1029 #define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v));
1030 #define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v));
1031 #define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v));
1032 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v));
1033 
1034 #endif /* KMP_ASM_INTRINS */
1035 
1036 /* ------------- relaxed consistency memory model stuff ------------------ */
1037 
1038 #if KMP_OS_WINDOWS
1039 #ifdef __ABSOFT_WIN
1040 #define KMP_MB() asm("nop")
1041 #define KMP_IMB() asm("nop")
1042 #else
1043 #define KMP_MB() /* _asm{ nop } */
1044 #define KMP_IMB() /* _asm{ nop } */
1045 #endif
1046 #endif /* KMP_OS_WINDOWS */
1047 
1048 #if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS ||     \
1049     KMP_ARCH_MIPS64 || KMP_ARCH_RISCV64 || KMP_ARCH_LOONGARCH64 ||             \
1050     KMP_ARCH_VE || KMP_ARCH_S390X || KMP_ARCH_PPC
1051 #if KMP_OS_WINDOWS
1052 #undef KMP_MB
1053 #define KMP_MB() std::atomic_thread_fence(std::memory_order_seq_cst)
1054 #else /* !KMP_OS_WINDOWS */
1055 #define KMP_MB() __sync_synchronize()
1056 #endif
1057 #endif
1058 
1059 #ifndef KMP_MB
1060 #define KMP_MB() /* nothing to do */
1061 #endif
1062 
1063 #if KMP_ARCH_X86 || KMP_ARCH_X86_64
1064 #if KMP_MIC
1065 // fence-style instructions do not exist, but lock; xaddl $0,(%rsp) can be used.
1066 // We shouldn't need it, though, since the ABI rules require that
1067 // * If the compiler generates NGO stores it also generates the fence
1068 // * If users hand-code NGO stores they should insert the fence
1069 // therefore no incomplete unordered stores should be visible.
1070 #define KMP_MFENCE() /* Nothing */
1071 #define KMP_SFENCE() /* Nothing */
1072 #else
1073 #if KMP_COMPILER_ICC || KMP_COMPILER_ICX
1074 #define KMP_MFENCE_() _mm_mfence()
1075 #define KMP_SFENCE_() _mm_sfence()
1076 #elif KMP_COMPILER_MSVC
1077 #define KMP_MFENCE_() MemoryBarrier()
1078 #define KMP_SFENCE_() MemoryBarrier()
1079 #else
1080 #define KMP_MFENCE_() __sync_synchronize()
1081 #define KMP_SFENCE_() __sync_synchronize()
1082 #endif
1083 #define KMP_MFENCE()                                                           \
1084   if (UNLIKELY(!__kmp_cpuinfo.initialized)) {                                  \
1085     __kmp_query_cpuid(&__kmp_cpuinfo);                                         \
1086   }                                                                            \
1087   if (__kmp_cpuinfo.flags.sse2) {                                              \
1088     KMP_MFENCE_();                                                             \
1089   }
1090 #define KMP_SFENCE() KMP_SFENCE_()
1091 #endif
1092 #else
1093 #define KMP_MFENCE() KMP_MB()
1094 #define KMP_SFENCE() KMP_MB()
1095 #endif
1096 
1097 #ifndef KMP_IMB
1098 #define KMP_IMB() /* nothing to do */
1099 #endif
1100 
1101 #ifndef KMP_ST_REL32
1102 #define KMP_ST_REL32(A, D) (*(A) = (D))
1103 #endif
1104 
1105 #ifndef KMP_ST_REL64
1106 #define KMP_ST_REL64(A, D) (*(A) = (D))
1107 #endif
1108 
1109 #ifndef KMP_LD_ACQ32
1110 #define KMP_LD_ACQ32(A) (*(A))
1111 #endif
1112 
1113 #ifndef KMP_LD_ACQ64
1114 #define KMP_LD_ACQ64(A) (*(A))
1115 #endif
1116 
1117 /* ------------------------------------------------------------------------ */
1118 // FIXME - maybe this should this be
1119 //
1120 // #define TCR_4(a)    (*(volatile kmp_int32 *)(&a))
1121 // #define TCW_4(a,b)  (a) = (*(volatile kmp_int32 *)&(b))
1122 //
1123 // #define TCR_8(a)    (*(volatile kmp_int64 *)(a))
1124 // #define TCW_8(a,b)  (a) = (*(volatile kmp_int64 *)(&b))
1125 //
1126 // I'm fairly certain this is the correct thing to do, but I'm afraid
1127 // of performance regressions.
1128 
1129 #define TCR_1(a) (a)
1130 #define TCW_1(a, b) (a) = (b)
1131 #define TCR_4(a) (a)
1132 #define TCW_4(a, b) (a) = (b)
1133 #define TCI_4(a) (++(a))
1134 #define TCD_4(a) (--(a))
1135 #define TCR_8(a) (a)
1136 #define TCW_8(a, b) (a) = (b)
1137 #define TCI_8(a) (++(a))
1138 #define TCD_8(a) (--(a))
1139 #define TCR_SYNC_4(a) (a)
1140 #define TCW_SYNC_4(a, b) (a) = (b)
1141 #define TCX_SYNC_4(a, b, c)                                                    \
1142   KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a),     \
1143                               (kmp_int32)(b), (kmp_int32)(c))
1144 #define TCR_SYNC_8(a) (a)
1145 #define TCW_SYNC_8(a, b) (a) = (b)
1146 #define TCX_SYNC_8(a, b, c)                                                    \
1147   KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a),     \
1148                               (kmp_int64)(b), (kmp_int64)(c))
1149 
1150 #if KMP_ARCH_X86 || KMP_ARCH_MIPS || KMP_ARCH_WASM || KMP_ARCH_PPC
1151 // What about ARM?
1152 #define TCR_PTR(a) ((void *)TCR_4(a))
1153 #define TCW_PTR(a, b) TCW_4((a), (b))
1154 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a))
1155 #define TCW_SYNC_PTR(a, b) TCW_SYNC_4((a), (b))
1156 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_4((a), (b), (c)))
1157 
1158 #else /* 64 bit pointers */
1159 
1160 #define TCR_PTR(a) ((void *)TCR_8(a))
1161 #define TCW_PTR(a, b) TCW_8((a), (b))
1162 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a))
1163 #define TCW_SYNC_PTR(a, b) TCW_SYNC_8((a), (b))
1164 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_8((a), (b), (c)))
1165 
1166 #endif /* KMP_ARCH_X86 */
1167 
1168 /* If these FTN_{TRUE,FALSE} values change, may need to change several places
1169    where they are used to check that language is Fortran, not C. */
1170 
1171 #ifndef FTN_TRUE
1172 #define FTN_TRUE TRUE
1173 #endif
1174 
1175 #ifndef FTN_FALSE
1176 #define FTN_FALSE FALSE
1177 #endif
1178 
1179 typedef void (*microtask_t)(int *gtid, int *npr, ...);
1180 
1181 #ifdef USE_VOLATILE_CAST
1182 #define VOLATILE_CAST(x) (volatile x)
1183 #else
1184 #define VOLATILE_CAST(x) (x)
1185 #endif
1186 
1187 #define KMP_WAIT __kmp_wait_4
1188 #define KMP_WAIT_PTR __kmp_wait_4_ptr
1189 #define KMP_EQ __kmp_eq_4
1190 #define KMP_NEQ __kmp_neq_4
1191 #define KMP_LT __kmp_lt_4
1192 #define KMP_GE __kmp_ge_4
1193 #define KMP_LE __kmp_le_4
1194 
1195 /* Workaround for Intel(R) 64 code gen bug when taking address of static array
1196  * (Intel(R) 64 Tracker #138) */
1197 #if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX
1198 #define STATIC_EFI2_WORKAROUND
1199 #else
1200 #define STATIC_EFI2_WORKAROUND static
1201 #endif
1202 
1203 // Support of BGET usage
1204 #ifndef KMP_USE_BGET
1205 #define KMP_USE_BGET 1
1206 #endif
1207 
1208 // Switches for OSS builds
1209 #ifndef USE_CMPXCHG_FIX
1210 #define USE_CMPXCHG_FIX 1
1211 #endif
1212 
1213 // Enable dynamic user lock
1214 #define KMP_USE_DYNAMIC_LOCK 1
1215 
1216 // Enable Intel(R) Transactional Synchronization Extensions (Intel(R) TSX) if
1217 // dynamic user lock is turned on
1218 #if KMP_USE_DYNAMIC_LOCK
1219 // Visual studio can't handle the asm sections in this code
1220 #define KMP_USE_TSX (KMP_ARCH_X86 || KMP_ARCH_X86_64) && !KMP_COMPILER_MSVC
1221 #ifdef KMP_USE_ADAPTIVE_LOCKS
1222 #undef KMP_USE_ADAPTIVE_LOCKS
1223 #endif
1224 #define KMP_USE_ADAPTIVE_LOCKS KMP_USE_TSX
1225 #endif
1226 
1227 // Enable tick time conversion of ticks to seconds
1228 #if KMP_STATS_ENABLED
1229 #define KMP_HAVE_TICK_TIME                                                     \
1230   (KMP_OS_LINUX && (KMP_MIC || KMP_ARCH_X86 || KMP_ARCH_X86_64))
1231 #endif
1232 
1233 // Warning levels
1234 enum kmp_warnings_level {
1235   kmp_warnings_off = 0, /* No warnings */
1236   kmp_warnings_low, /* Minimal warnings (default) */
1237   kmp_warnings_explicit = 6, /* Explicitly set to ON - more warnings */
1238   kmp_warnings_verbose /* reserved */
1239 };
1240 
1241 #ifdef __cplusplus
1242 } // extern "C"
1243 #endif // __cplusplus
1244 
1245 // Safe C API
1246 #include "kmp_safe_c_api.h"
1247 
1248 // Macros for C++11 atomic functions
1249 #define KMP_ATOMIC_LD(p, order) (p)->load(std::memory_order_##order)
1250 #define KMP_ATOMIC_OP(op, p, v, order) (p)->op(v, std::memory_order_##order)
1251 
1252 // For non-default load/store
1253 #define KMP_ATOMIC_LD_ACQ(p) KMP_ATOMIC_LD(p, acquire)
1254 #define KMP_ATOMIC_LD_RLX(p) KMP_ATOMIC_LD(p, relaxed)
1255 #define KMP_ATOMIC_ST_REL(p, v) KMP_ATOMIC_OP(store, p, v, release)
1256 #define KMP_ATOMIC_ST_RLX(p, v) KMP_ATOMIC_OP(store, p, v, relaxed)
1257 
1258 // For non-default fetch_<op>
1259 #define KMP_ATOMIC_ADD(p, v) KMP_ATOMIC_OP(fetch_add, p, v, acq_rel)
1260 #define KMP_ATOMIC_SUB(p, v) KMP_ATOMIC_OP(fetch_sub, p, v, acq_rel)
1261 #define KMP_ATOMIC_AND(p, v) KMP_ATOMIC_OP(fetch_and, p, v, acq_rel)
1262 #define KMP_ATOMIC_OR(p, v) KMP_ATOMIC_OP(fetch_or, p, v, acq_rel)
1263 #define KMP_ATOMIC_INC(p) KMP_ATOMIC_OP(fetch_add, p, 1, acq_rel)
1264 #define KMP_ATOMIC_DEC(p) KMP_ATOMIC_OP(fetch_sub, p, 1, acq_rel)
1265 #define KMP_ATOMIC_ADD_RLX(p, v) KMP_ATOMIC_OP(fetch_add, p, v, relaxed)
1266 #define KMP_ATOMIC_INC_RLX(p) KMP_ATOMIC_OP(fetch_add, p, 1, relaxed)
1267 
1268 // Callers of the following functions cannot see the side effect on "expected".
1269 template <typename T>
1270 bool __kmp_atomic_compare_store(std::atomic<T> *p, T expected, T desired) {
1271   return p->compare_exchange_strong(
1272       expected, desired, std::memory_order_acq_rel, std::memory_order_relaxed);
1273 }
1274 
1275 template <typename T>
1276 bool __kmp_atomic_compare_store_acq(std::atomic<T> *p, T expected, T desired) {
1277   return p->compare_exchange_strong(
1278       expected, desired, std::memory_order_acquire, std::memory_order_relaxed);
1279 }
1280 
1281 template <typename T>
1282 bool __kmp_atomic_compare_store_rel(std::atomic<T> *p, T expected, T desired) {
1283   return p->compare_exchange_strong(
1284       expected, desired, std::memory_order_release, std::memory_order_relaxed);
1285 }
1286 
1287 // Symbol lookup on Linux/Windows
1288 #if KMP_OS_WINDOWS
1289 extern void *__kmp_lookup_symbol(const char *name, bool next = false);
1290 #define KMP_DLSYM(name) __kmp_lookup_symbol(name)
1291 #define KMP_DLSYM_NEXT(name) __kmp_lookup_symbol(name, true)
1292 #elif KMP_OS_WASI
1293 #define KMP_DLSYM(name) nullptr
1294 #define KMP_DLSYM_NEXT(name) nullptr
1295 #else
1296 #define KMP_DLSYM(name) dlsym(RTLD_DEFAULT, name)
1297 #define KMP_DLSYM_NEXT(name) dlsym(RTLD_NEXT, name)
1298 #endif
1299 
1300 #endif /* KMP_OS_H */
1301