1 /* 2 * kmp_os.h -- KPTS runtime header file. 3 */ 4 5 //===----------------------------------------------------------------------===// 6 // 7 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 8 // See https://llvm.org/LICENSE.txt for license information. 9 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 10 // 11 //===----------------------------------------------------------------------===// 12 13 #ifndef KMP_OS_H 14 #define KMP_OS_H 15 16 #include "kmp_config.h" 17 #include <stdlib.h> 18 #include <atomic> 19 20 #define KMP_FTN_PLAIN 1 21 #define KMP_FTN_APPEND 2 22 #define KMP_FTN_UPPER 3 23 /* 24 #define KMP_FTN_PREPEND 4 25 #define KMP_FTN_UAPPEND 5 26 */ 27 28 #define KMP_PTR_SKIP (sizeof(void *)) 29 30 /* -------------------------- Compiler variations ------------------------ */ 31 32 #define KMP_OFF 0 33 #define KMP_ON 1 34 35 #define KMP_MEM_CONS_VOLATILE 0 36 #define KMP_MEM_CONS_FENCE 1 37 38 #ifndef KMP_MEM_CONS_MODEL 39 #define KMP_MEM_CONS_MODEL KMP_MEM_CONS_VOLATILE 40 #endif 41 42 #ifndef __has_cpp_attribute 43 #define __has_cpp_attribute(x) 0 44 #endif 45 46 #ifndef __has_attribute 47 #define __has_attribute(x) 0 48 #endif 49 50 /* ------------------------- Compiler recognition ---------------------- */ 51 #define KMP_COMPILER_ICC 0 52 #define KMP_COMPILER_GCC 0 53 #define KMP_COMPILER_CLANG 0 54 #define KMP_COMPILER_MSVC 0 55 56 #if defined(__INTEL_COMPILER) 57 #undef KMP_COMPILER_ICC 58 #define KMP_COMPILER_ICC 1 59 #elif defined(__clang__) 60 #undef KMP_COMPILER_CLANG 61 #define KMP_COMPILER_CLANG 1 62 #elif defined(__GNUC__) 63 #undef KMP_COMPILER_GCC 64 #define KMP_COMPILER_GCC 1 65 #elif defined(_MSC_VER) 66 #undef KMP_COMPILER_MSVC 67 #define KMP_COMPILER_MSVC 1 68 #else 69 #error Unknown compiler 70 #endif 71 72 #if (KMP_OS_LINUX || KMP_OS_WINDOWS) && !KMP_OS_CNK 73 #define KMP_AFFINITY_SUPPORTED 1 74 #if KMP_OS_WINDOWS && KMP_ARCH_X86_64 75 #define KMP_GROUP_AFFINITY 1 76 #else 77 #define KMP_GROUP_AFFINITY 0 78 #endif 79 #else 80 #define KMP_AFFINITY_SUPPORTED 0 81 #define KMP_GROUP_AFFINITY 0 82 #endif 83 84 /* Check for quad-precision extension. */ 85 #define KMP_HAVE_QUAD 0 86 #if KMP_ARCH_X86 || KMP_ARCH_X86_64 87 #if KMP_COMPILER_ICC 88 /* _Quad is already defined for icc */ 89 #undef KMP_HAVE_QUAD 90 #define KMP_HAVE_QUAD 1 91 #elif KMP_COMPILER_CLANG 92 /* Clang doesn't support a software-implemented 93 128-bit extended precision type yet */ 94 typedef long double _Quad; 95 #elif KMP_COMPILER_GCC 96 /* GCC on NetBSD lacks __multc3/__divtc3 builtins needed for quad */ 97 #if !KMP_OS_NETBSD 98 typedef __float128 _Quad; 99 #undef KMP_HAVE_QUAD 100 #define KMP_HAVE_QUAD 1 101 #endif 102 #elif KMP_COMPILER_MSVC 103 typedef long double _Quad; 104 #endif 105 #else 106 #if __LDBL_MAX_EXP__ >= 16384 && KMP_COMPILER_GCC 107 typedef long double _Quad; 108 #undef KMP_HAVE_QUAD 109 #define KMP_HAVE_QUAD 1 110 #endif 111 #endif /* KMP_ARCH_X86 || KMP_ARCH_X86_64 */ 112 113 #define KMP_USE_X87CONTROL 0 114 #if KMP_OS_WINDOWS 115 #define KMP_END_OF_LINE "\r\n" 116 typedef char kmp_int8; 117 typedef unsigned char kmp_uint8; 118 typedef short kmp_int16; 119 typedef unsigned short kmp_uint16; 120 typedef int kmp_int32; 121 typedef unsigned int kmp_uint32; 122 #define KMP_INT32_SPEC "d" 123 #define KMP_UINT32_SPEC "u" 124 #ifndef KMP_STRUCT64 125 typedef __int64 kmp_int64; 126 typedef unsigned __int64 kmp_uint64; 127 #define KMP_INT64_SPEC "I64d" 128 #define KMP_UINT64_SPEC "I64u" 129 #else 130 struct kmp_struct64 { 131 kmp_int32 a, b; 132 }; 133 typedef struct kmp_struct64 kmp_int64; 134 typedef struct kmp_struct64 kmp_uint64; 135 /* Not sure what to use for KMP_[U]INT64_SPEC here */ 136 #endif 137 #if KMP_ARCH_X86 && KMP_MSVC_COMPAT 138 #undef KMP_USE_X87CONTROL 139 #define KMP_USE_X87CONTROL 1 140 #endif 141 #if KMP_ARCH_X86_64 142 #define KMP_INTPTR 1 143 typedef __int64 kmp_intptr_t; 144 typedef unsigned __int64 kmp_uintptr_t; 145 #define KMP_INTPTR_SPEC "I64d" 146 #define KMP_UINTPTR_SPEC "I64u" 147 #endif 148 #endif /* KMP_OS_WINDOWS */ 149 150 #if KMP_OS_UNIX 151 #define KMP_END_OF_LINE "\n" 152 typedef char kmp_int8; 153 typedef unsigned char kmp_uint8; 154 typedef short kmp_int16; 155 typedef unsigned short kmp_uint16; 156 typedef int kmp_int32; 157 typedef unsigned int kmp_uint32; 158 typedef long long kmp_int64; 159 typedef unsigned long long kmp_uint64; 160 #define KMP_INT32_SPEC "d" 161 #define KMP_UINT32_SPEC "u" 162 #define KMP_INT64_SPEC "lld" 163 #define KMP_UINT64_SPEC "llu" 164 #endif /* KMP_OS_UNIX */ 165 166 #if KMP_ARCH_X86 || KMP_ARCH_ARM || KMP_ARCH_MIPS 167 #define KMP_SIZE_T_SPEC KMP_UINT32_SPEC 168 #elif KMP_ARCH_X86_64 || KMP_ARCH_PPC64 || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS64 169 #define KMP_SIZE_T_SPEC KMP_UINT64_SPEC 170 #else 171 #error "Can't determine size_t printf format specifier." 172 #endif 173 174 #if KMP_ARCH_X86 175 #define KMP_SIZE_T_MAX (0xFFFFFFFF) 176 #else 177 #define KMP_SIZE_T_MAX (0xFFFFFFFFFFFFFFFF) 178 #endif 179 180 typedef size_t kmp_size_t; 181 typedef float kmp_real32; 182 typedef double kmp_real64; 183 184 #ifndef KMP_INTPTR 185 #define KMP_INTPTR 1 186 typedef long kmp_intptr_t; 187 typedef unsigned long kmp_uintptr_t; 188 #define KMP_INTPTR_SPEC "ld" 189 #define KMP_UINTPTR_SPEC "lu" 190 #endif 191 192 #ifdef BUILD_I8 193 typedef kmp_int64 kmp_int; 194 typedef kmp_uint64 kmp_uint; 195 #else 196 typedef kmp_int32 kmp_int; 197 typedef kmp_uint32 kmp_uint; 198 #endif /* BUILD_I8 */ 199 #define KMP_INT_MAX ((kmp_int32)0x7FFFFFFF) 200 #define KMP_INT_MIN ((kmp_int32)0x80000000) 201 202 #ifdef __cplusplus 203 // macros to cast out qualifiers and to re-interpret types 204 #define CCAST(type, var) const_cast<type>(var) 205 #define RCAST(type, var) reinterpret_cast<type>(var) 206 //------------------------------------------------------------------------- 207 // template for debug prints specification ( d, u, lld, llu ), and to obtain 208 // signed/unsigned flavors of a type 209 template <typename T> struct traits_t {}; 210 // int 211 template <> struct traits_t<signed int> { 212 typedef signed int signed_t; 213 typedef unsigned int unsigned_t; 214 typedef double floating_t; 215 static char const *spec; 216 static const signed_t max_value = 0x7fffffff; 217 static const signed_t min_value = 0x80000000; 218 static const int type_size = sizeof(signed_t); 219 }; 220 // unsigned int 221 template <> struct traits_t<unsigned int> { 222 typedef signed int signed_t; 223 typedef unsigned int unsigned_t; 224 typedef double floating_t; 225 static char const *spec; 226 static const unsigned_t max_value = 0xffffffff; 227 static const unsigned_t min_value = 0x00000000; 228 static const int type_size = sizeof(unsigned_t); 229 }; 230 // long 231 template <> struct traits_t<signed long> { 232 typedef signed long signed_t; 233 typedef unsigned long unsigned_t; 234 typedef long double floating_t; 235 static char const *spec; 236 static const int type_size = sizeof(signed_t); 237 }; 238 // long long 239 template <> struct traits_t<signed long long> { 240 typedef signed long long signed_t; 241 typedef unsigned long long unsigned_t; 242 typedef long double floating_t; 243 static char const *spec; 244 static const signed_t max_value = 0x7fffffffffffffffLL; 245 static const signed_t min_value = 0x8000000000000000LL; 246 static const int type_size = sizeof(signed_t); 247 }; 248 // unsigned long long 249 template <> struct traits_t<unsigned long long> { 250 typedef signed long long signed_t; 251 typedef unsigned long long unsigned_t; 252 typedef long double floating_t; 253 static char const *spec; 254 static const unsigned_t max_value = 0xffffffffffffffffLL; 255 static const unsigned_t min_value = 0x0000000000000000LL; 256 static const int type_size = sizeof(unsigned_t); 257 }; 258 //------------------------------------------------------------------------- 259 #else 260 #define CCAST(type, var) (type)(var) 261 #define RCAST(type, var) (type)(var) 262 #endif // __cplusplus 263 264 #define KMP_EXPORT extern /* export declaration in guide libraries */ 265 266 #if __GNUC__ >= 4 && !defined(__MINGW32__) 267 #define __forceinline __inline 268 #endif 269 270 #if KMP_OS_WINDOWS 271 #include <windows.h> 272 273 static inline int KMP_GET_PAGE_SIZE(void) { 274 SYSTEM_INFO si; 275 GetSystemInfo(&si); 276 return si.dwPageSize; 277 } 278 #else 279 #define KMP_GET_PAGE_SIZE() getpagesize() 280 #endif 281 282 #define PAGE_ALIGNED(_addr) \ 283 (!((size_t)_addr & (size_t)(KMP_GET_PAGE_SIZE() - 1))) 284 #define ALIGN_TO_PAGE(x) \ 285 (void *)(((size_t)(x)) & ~((size_t)(KMP_GET_PAGE_SIZE() - 1))) 286 287 /* ---------- Support for cache alignment, padding, etc. ----------------*/ 288 289 #ifdef __cplusplus 290 extern "C" { 291 #endif // __cplusplus 292 293 #define INTERNODE_CACHE_LINE 4096 /* for multi-node systems */ 294 295 /* Define the default size of the cache line */ 296 #ifndef CACHE_LINE 297 #define CACHE_LINE 128 /* cache line size in bytes */ 298 #else 299 #if (CACHE_LINE < 64) && !defined(KMP_OS_DARWIN) 300 // 2006-02-13: This produces too many warnings on OS X*. Disable for now 301 #warning CACHE_LINE is too small. 302 #endif 303 #endif /* CACHE_LINE */ 304 305 #define KMP_CACHE_PREFETCH(ADDR) /* nothing */ 306 307 // Define attribute that indicates that the fall through from the previous 308 // case label is intentional and should not be diagnosed by a compiler 309 // Code from libcxx/include/__config 310 // Use a function like macro to imply that it must be followed by a semicolon 311 #if __cplusplus > 201402L && __has_cpp_attribute(fallthrough) 312 # define KMP_FALLTHROUGH() [[fallthrough]] 313 #elif __has_cpp_attribute(clang::fallthrough) 314 # define KMP_FALLTHROUGH() [[clang::fallthrough]] 315 #elif __has_attribute(fallthough) || __GNUC__ >= 7 316 # define KMP_FALLTHROUGH() __attribute__((__fallthrough__)) 317 #else 318 # define KMP_FALLTHROUGH() ((void)0) 319 #endif 320 321 // Define attribute that indicates a function does not return 322 #if __cplusplus >= 201103L 323 #define KMP_NORETURN [[noreturn]] 324 #elif KMP_OS_WINDOWS 325 #define KMP_NORETURN __declspec(noreturn) 326 #else 327 #define KMP_NORETURN __attribute__((noreturn)) 328 #endif 329 330 #if KMP_OS_WINDOWS && KMP_MSVC_COMPAT 331 #define KMP_ALIGN(bytes) __declspec(align(bytes)) 332 #define KMP_THREAD_LOCAL __declspec(thread) 333 #define KMP_ALIAS /* Nothing */ 334 #else 335 #define KMP_ALIGN(bytes) __attribute__((aligned(bytes))) 336 #define KMP_THREAD_LOCAL __thread 337 #define KMP_ALIAS(alias_of) __attribute__((alias(alias_of))) 338 #endif 339 340 #if KMP_HAVE_WEAK_ATTRIBUTE 341 #define KMP_WEAK_ATTRIBUTE __attribute__((weak)) 342 #else 343 #define KMP_WEAK_ATTRIBUTE /* Nothing */ 344 #endif 345 346 // Define KMP_VERSION_SYMBOL and KMP_EXPAND_NAME 347 #ifndef KMP_STR 348 #define KMP_STR(x) _KMP_STR(x) 349 #define _KMP_STR(x) #x 350 #endif 351 352 #ifdef KMP_USE_VERSION_SYMBOLS 353 // If using versioned symbols, KMP_EXPAND_NAME prepends 354 // __kmp_api_ to the real API name 355 #define KMP_EXPAND_NAME(api_name) _KMP_EXPAND_NAME(api_name) 356 #define _KMP_EXPAND_NAME(api_name) __kmp_api_##api_name 357 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) \ 358 _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, "VERSION") 359 #define _KMP_VERSION_SYMBOL(api_name, ver_num, ver_str, default_ver) \ 360 __typeof__(__kmp_api_##api_name) __kmp_api_##api_name##_##ver_num##_alias \ 361 __attribute__((alias(KMP_STR(__kmp_api_##api_name)))); \ 362 __asm__( \ 363 ".symver " KMP_STR(__kmp_api_##api_name##_##ver_num##_alias) "," KMP_STR( \ 364 api_name) "@" ver_str "\n\t"); \ 365 __asm__(".symver " KMP_STR(__kmp_api_##api_name) "," KMP_STR( \ 366 api_name) "@@" default_ver "\n\t") 367 #else // KMP_USE_VERSION_SYMBOLS 368 #define KMP_EXPAND_NAME(api_name) api_name 369 #define KMP_VERSION_SYMBOL(api_name, ver_num, ver_str) /* Nothing */ 370 #endif // KMP_USE_VERSION_SYMBOLS 371 372 /* Temporary note: if performance testing of this passes, we can remove 373 all references to KMP_DO_ALIGN and replace with KMP_ALIGN. */ 374 #define KMP_DO_ALIGN(bytes) KMP_ALIGN(bytes) 375 #define KMP_ALIGN_CACHE KMP_ALIGN(CACHE_LINE) 376 #define KMP_ALIGN_CACHE_INTERNODE KMP_ALIGN(INTERNODE_CACHE_LINE) 377 378 /* General purpose fence types for memory operations */ 379 enum kmp_mem_fence_type { 380 kmp_no_fence, /* No memory fence */ 381 kmp_acquire_fence, /* Acquire (read) memory fence */ 382 kmp_release_fence, /* Release (write) memory fence */ 383 kmp_full_fence /* Full (read+write) memory fence */ 384 }; 385 386 // Synchronization primitives 387 388 #if KMP_ASM_INTRINS && KMP_OS_WINDOWS 389 390 #if KMP_MSVC_COMPAT && !KMP_COMPILER_CLANG 391 #pragma intrinsic(InterlockedExchangeAdd) 392 #pragma intrinsic(InterlockedCompareExchange) 393 #pragma intrinsic(InterlockedExchange) 394 #pragma intrinsic(InterlockedExchange64) 395 #endif 396 397 // Using InterlockedIncrement / InterlockedDecrement causes a library loading 398 // ordering problem, so we use InterlockedExchangeAdd instead. 399 #define KMP_TEST_THEN_INC32(p) InterlockedExchangeAdd((volatile long *)(p), 1) 400 #define KMP_TEST_THEN_INC_ACQ32(p) \ 401 InterlockedExchangeAdd((volatile long *)(p), 1) 402 #define KMP_TEST_THEN_ADD4_32(p) InterlockedExchangeAdd((volatile long *)(p), 4) 403 #define KMP_TEST_THEN_ADD4_ACQ32(p) \ 404 InterlockedExchangeAdd((volatile long *)(p), 4) 405 #define KMP_TEST_THEN_DEC32(p) InterlockedExchangeAdd((volatile long *)(p), -1) 406 #define KMP_TEST_THEN_DEC_ACQ32(p) \ 407 InterlockedExchangeAdd((volatile long *)(p), -1) 408 #define KMP_TEST_THEN_ADD32(p, v) \ 409 InterlockedExchangeAdd((volatile long *)(p), (v)) 410 411 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \ 412 InterlockedCompareExchange((volatile long *)(p), (long)(sv), (long)(cv)) 413 414 #define KMP_XCHG_FIXED32(p, v) \ 415 InterlockedExchange((volatile long *)(p), (long)(v)) 416 #define KMP_XCHG_FIXED64(p, v) \ 417 InterlockedExchange64((volatile kmp_int64 *)(p), (kmp_int64)(v)) 418 419 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) { 420 kmp_int32 tmp = InterlockedExchange((volatile long *)p, *(long *)&v); 421 return *(kmp_real32 *)&tmp; 422 } 423 424 // Routines that we still need to implement in assembly. 425 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v); 426 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v); 427 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v); 428 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v); 429 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v); 430 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v); 431 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v); 432 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v); 433 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v); 434 435 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv, 436 kmp_int8 sv); 437 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv, 438 kmp_int16 sv); 439 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv, 440 kmp_int32 sv); 441 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv, 442 kmp_int64 sv); 443 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv, 444 kmp_int8 sv); 445 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p, 446 kmp_int16 cv, kmp_int16 sv); 447 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p, 448 kmp_int32 cv, kmp_int32 sv); 449 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p, 450 kmp_int64 cv, kmp_int64 sv); 451 452 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v); 453 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v); 454 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v); 455 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v); 456 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v); 457 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v); 458 459 //#define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32((p), 1) 460 //#define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32((p), 1) 461 #define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64((p), 1LL) 462 #define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64((p), 1LL) 463 //#define KMP_TEST_THEN_ADD4_32(p) __kmp_test_then_add32((p), 4) 464 //#define KMP_TEST_THEN_ADD4_ACQ32(p) __kmp_test_then_add32((p), 4) 465 #define KMP_TEST_THEN_ADD4_64(p) __kmp_test_then_add64((p), 4LL) 466 #define KMP_TEST_THEN_ADD4_ACQ64(p) __kmp_test_then_add64((p), 4LL) 467 //#define KMP_TEST_THEN_DEC32(p) __kmp_test_then_add32((p), -1) 468 //#define KMP_TEST_THEN_DEC_ACQ32(p) __kmp_test_then_add32((p), -1) 469 #define KMP_TEST_THEN_DEC64(p) __kmp_test_then_add64((p), -1LL) 470 #define KMP_TEST_THEN_DEC_ACQ64(p) __kmp_test_then_add64((p), -1LL) 471 //#define KMP_TEST_THEN_ADD32(p, v) __kmp_test_then_add32((p), (v)) 472 #define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8((p), (v)) 473 #define KMP_TEST_THEN_ADD64(p, v) __kmp_test_then_add64((p), (v)) 474 475 #define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8((p), (v)) 476 #define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8((p), (v)) 477 #define KMP_TEST_THEN_OR32(p, v) __kmp_test_then_or32((p), (v)) 478 #define KMP_TEST_THEN_AND32(p, v) __kmp_test_then_and32((p), (v)) 479 #define KMP_TEST_THEN_OR64(p, v) __kmp_test_then_or64((p), (v)) 480 #define KMP_TEST_THEN_AND64(p, v) __kmp_test_then_and64((p), (v)) 481 482 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \ 483 __kmp_compare_and_store8((p), (cv), (sv)) 484 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \ 485 __kmp_compare_and_store8((p), (cv), (sv)) 486 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \ 487 __kmp_compare_and_store16((p), (cv), (sv)) 488 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \ 489 __kmp_compare_and_store16((p), (cv), (sv)) 490 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \ 491 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \ 492 (kmp_int32)(sv)) 493 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \ 494 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \ 495 (kmp_int32)(sv)) 496 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \ 497 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \ 498 (kmp_int64)(sv)) 499 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \ 500 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \ 501 (kmp_int64)(sv)) 502 503 #if KMP_ARCH_X86 504 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \ 505 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \ 506 (kmp_int32)(sv)) 507 #else /* 64 bit pointers */ 508 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \ 509 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \ 510 (kmp_int64)(sv)) 511 #endif /* KMP_ARCH_X86 */ 512 513 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \ 514 __kmp_compare_and_store_ret8((p), (cv), (sv)) 515 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \ 516 __kmp_compare_and_store_ret16((p), (cv), (sv)) 517 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \ 518 __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \ 519 (kmp_int64)(sv)) 520 521 #define KMP_XCHG_FIXED8(p, v) \ 522 __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v)); 523 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v)); 524 //#define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v)); 525 //#define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v)); 526 //#define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v)); 527 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v)); 528 529 #elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64) 530 531 /* cast p to correct type so that proper intrinsic will be used */ 532 #define KMP_TEST_THEN_INC32(p) \ 533 __sync_fetch_and_add((volatile kmp_int32 *)(p), 1) 534 #define KMP_TEST_THEN_INC_ACQ32(p) \ 535 __sync_fetch_and_add((volatile kmp_int32 *)(p), 1) 536 #if KMP_ARCH_MIPS 537 #define KMP_TEST_THEN_INC64(p) \ 538 __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST) 539 #define KMP_TEST_THEN_INC_ACQ64(p) \ 540 __atomic_fetch_add((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST) 541 #else 542 #define KMP_TEST_THEN_INC64(p) \ 543 __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL) 544 #define KMP_TEST_THEN_INC_ACQ64(p) \ 545 __sync_fetch_and_add((volatile kmp_int64 *)(p), 1LL) 546 #endif 547 #define KMP_TEST_THEN_ADD4_32(p) \ 548 __sync_fetch_and_add((volatile kmp_int32 *)(p), 4) 549 #define KMP_TEST_THEN_ADD4_ACQ32(p) \ 550 __sync_fetch_and_add((volatile kmp_int32 *)(p), 4) 551 #if KMP_ARCH_MIPS 552 #define KMP_TEST_THEN_ADD4_64(p) \ 553 __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST) 554 #define KMP_TEST_THEN_ADD4_ACQ64(p) \ 555 __atomic_fetch_add((volatile kmp_int64 *)(p), 4LL, __ATOMIC_SEQ_CST) 556 #define KMP_TEST_THEN_DEC64(p) \ 557 __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST) 558 #define KMP_TEST_THEN_DEC_ACQ64(p) \ 559 __atomic_fetch_sub((volatile kmp_int64 *)(p), 1LL, __ATOMIC_SEQ_CST) 560 #else 561 #define KMP_TEST_THEN_ADD4_64(p) \ 562 __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL) 563 #define KMP_TEST_THEN_ADD4_ACQ64(p) \ 564 __sync_fetch_and_add((volatile kmp_int64 *)(p), 4LL) 565 #define KMP_TEST_THEN_DEC64(p) \ 566 __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL) 567 #define KMP_TEST_THEN_DEC_ACQ64(p) \ 568 __sync_fetch_and_sub((volatile kmp_int64 *)(p), 1LL) 569 #endif 570 #define KMP_TEST_THEN_DEC32(p) \ 571 __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1) 572 #define KMP_TEST_THEN_DEC_ACQ32(p) \ 573 __sync_fetch_and_sub((volatile kmp_int32 *)(p), 1) 574 #define KMP_TEST_THEN_ADD8(p, v) \ 575 __sync_fetch_and_add((volatile kmp_int8 *)(p), (kmp_int8)(v)) 576 #define KMP_TEST_THEN_ADD32(p, v) \ 577 __sync_fetch_and_add((volatile kmp_int32 *)(p), (kmp_int32)(v)) 578 #if KMP_ARCH_MIPS 579 #define KMP_TEST_THEN_ADD64(p, v) \ 580 __atomic_fetch_add((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \ 581 __ATOMIC_SEQ_CST) 582 #else 583 #define KMP_TEST_THEN_ADD64(p, v) \ 584 __sync_fetch_and_add((volatile kmp_int64 *)(p), (kmp_int64)(v)) 585 #endif 586 587 #define KMP_TEST_THEN_OR8(p, v) \ 588 __sync_fetch_and_or((volatile kmp_int8 *)(p), (kmp_int8)(v)) 589 #define KMP_TEST_THEN_AND8(p, v) \ 590 __sync_fetch_and_and((volatile kmp_int8 *)(p), (kmp_int8)(v)) 591 #define KMP_TEST_THEN_OR32(p, v) \ 592 __sync_fetch_and_or((volatile kmp_uint32 *)(p), (kmp_uint32)(v)) 593 #define KMP_TEST_THEN_AND32(p, v) \ 594 __sync_fetch_and_and((volatile kmp_uint32 *)(p), (kmp_uint32)(v)) 595 #if KMP_ARCH_MIPS 596 #define KMP_TEST_THEN_OR64(p, v) \ 597 __atomic_fetch_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \ 598 __ATOMIC_SEQ_CST) 599 #define KMP_TEST_THEN_AND64(p, v) \ 600 __atomic_fetch_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v), \ 601 __ATOMIC_SEQ_CST) 602 #else 603 #define KMP_TEST_THEN_OR64(p, v) \ 604 __sync_fetch_and_or((volatile kmp_uint64 *)(p), (kmp_uint64)(v)) 605 #define KMP_TEST_THEN_AND64(p, v) \ 606 __sync_fetch_and_and((volatile kmp_uint64 *)(p), (kmp_uint64)(v)) 607 #endif 608 609 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \ 610 __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \ 611 (kmp_uint8)(sv)) 612 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \ 613 __sync_bool_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \ 614 (kmp_uint8)(sv)) 615 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \ 616 __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \ 617 (kmp_uint16)(sv)) 618 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \ 619 __sync_bool_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \ 620 (kmp_uint16)(sv)) 621 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \ 622 __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \ 623 (kmp_uint32)(sv)) 624 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \ 625 __sync_bool_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \ 626 (kmp_uint32)(sv)) 627 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \ 628 __sync_bool_compare_and_swap((void *volatile *)(p), (void *)(cv), \ 629 (void *)(sv)) 630 631 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \ 632 __sync_val_compare_and_swap((volatile kmp_uint8 *)(p), (kmp_uint8)(cv), \ 633 (kmp_uint8)(sv)) 634 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \ 635 __sync_val_compare_and_swap((volatile kmp_uint16 *)(p), (kmp_uint16)(cv), \ 636 (kmp_uint16)(sv)) 637 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \ 638 __sync_val_compare_and_swap((volatile kmp_uint32 *)(p), (kmp_uint32)(cv), \ 639 (kmp_uint32)(sv)) 640 #if KMP_ARCH_MIPS 641 static inline bool mips_sync_bool_compare_and_swap( 642 volatile kmp_uint64 *p, kmp_uint64 cv, kmp_uint64 sv) { 643 return __atomic_compare_exchange(p, &cv, &sv, false, __ATOMIC_SEQ_CST, 644 __ATOMIC_SEQ_CST); 645 } 646 static inline bool mips_sync_val_compare_and_swap( 647 volatile kmp_uint64 *p, kmp_uint64 cv, kmp_uint64 sv) { 648 __atomic_compare_exchange(p, &cv, &sv, false, __ATOMIC_SEQ_CST, 649 __ATOMIC_SEQ_CST); 650 return cv; 651 } 652 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \ 653 mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv),\ 654 (kmp_uint64)(sv)) 655 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \ 656 mips_sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv),\ 657 (kmp_uint64)(sv)) 658 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \ 659 mips_sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \ 660 (kmp_uint64)(sv)) 661 #else 662 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \ 663 __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \ 664 (kmp_uint64)(sv)) 665 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \ 666 __sync_bool_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \ 667 (kmp_uint64)(sv)) 668 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \ 669 __sync_val_compare_and_swap((volatile kmp_uint64 *)(p), (kmp_uint64)(cv), \ 670 (kmp_uint64)(sv)) 671 #endif 672 673 #define KMP_XCHG_FIXED8(p, v) \ 674 __sync_lock_test_and_set((volatile kmp_uint8 *)(p), (kmp_uint8)(v)) 675 #define KMP_XCHG_FIXED16(p, v) \ 676 __sync_lock_test_and_set((volatile kmp_uint16 *)(p), (kmp_uint16)(v)) 677 #define KMP_XCHG_FIXED32(p, v) \ 678 __sync_lock_test_and_set((volatile kmp_uint32 *)(p), (kmp_uint32)(v)) 679 #define KMP_XCHG_FIXED64(p, v) \ 680 __sync_lock_test_and_set((volatile kmp_uint64 *)(p), (kmp_uint64)(v)) 681 682 inline kmp_real32 KMP_XCHG_REAL32(volatile kmp_real32 *p, kmp_real32 v) { 683 kmp_int32 tmp = 684 __sync_lock_test_and_set((volatile kmp_uint32 *)(p), *(kmp_uint32 *)&v); 685 return *(kmp_real32 *)&tmp; 686 } 687 688 inline kmp_real64 KMP_XCHG_REAL64(volatile kmp_real64 *p, kmp_real64 v) { 689 kmp_int64 tmp = 690 __sync_lock_test_and_set((volatile kmp_uint64 *)(p), *(kmp_uint64 *)&v); 691 return *(kmp_real64 *)&tmp; 692 } 693 694 #else 695 696 extern kmp_int8 __kmp_test_then_add8(volatile kmp_int8 *p, kmp_int8 v); 697 extern kmp_int8 __kmp_test_then_or8(volatile kmp_int8 *p, kmp_int8 v); 698 extern kmp_int8 __kmp_test_then_and8(volatile kmp_int8 *p, kmp_int8 v); 699 extern kmp_int32 __kmp_test_then_add32(volatile kmp_int32 *p, kmp_int32 v); 700 extern kmp_uint32 __kmp_test_then_or32(volatile kmp_uint32 *p, kmp_uint32 v); 701 extern kmp_uint32 __kmp_test_then_and32(volatile kmp_uint32 *p, kmp_uint32 v); 702 extern kmp_int64 __kmp_test_then_add64(volatile kmp_int64 *p, kmp_int64 v); 703 extern kmp_uint64 __kmp_test_then_or64(volatile kmp_uint64 *p, kmp_uint64 v); 704 extern kmp_uint64 __kmp_test_then_and64(volatile kmp_uint64 *p, kmp_uint64 v); 705 706 extern kmp_int8 __kmp_compare_and_store8(volatile kmp_int8 *p, kmp_int8 cv, 707 kmp_int8 sv); 708 extern kmp_int16 __kmp_compare_and_store16(volatile kmp_int16 *p, kmp_int16 cv, 709 kmp_int16 sv); 710 extern kmp_int32 __kmp_compare_and_store32(volatile kmp_int32 *p, kmp_int32 cv, 711 kmp_int32 sv); 712 extern kmp_int32 __kmp_compare_and_store64(volatile kmp_int64 *p, kmp_int64 cv, 713 kmp_int64 sv); 714 extern kmp_int8 __kmp_compare_and_store_ret8(volatile kmp_int8 *p, kmp_int8 cv, 715 kmp_int8 sv); 716 extern kmp_int16 __kmp_compare_and_store_ret16(volatile kmp_int16 *p, 717 kmp_int16 cv, kmp_int16 sv); 718 extern kmp_int32 __kmp_compare_and_store_ret32(volatile kmp_int32 *p, 719 kmp_int32 cv, kmp_int32 sv); 720 extern kmp_int64 __kmp_compare_and_store_ret64(volatile kmp_int64 *p, 721 kmp_int64 cv, kmp_int64 sv); 722 723 extern kmp_int8 __kmp_xchg_fixed8(volatile kmp_int8 *p, kmp_int8 v); 724 extern kmp_int16 __kmp_xchg_fixed16(volatile kmp_int16 *p, kmp_int16 v); 725 extern kmp_int32 __kmp_xchg_fixed32(volatile kmp_int32 *p, kmp_int32 v); 726 extern kmp_int64 __kmp_xchg_fixed64(volatile kmp_int64 *p, kmp_int64 v); 727 extern kmp_real32 __kmp_xchg_real32(volatile kmp_real32 *p, kmp_real32 v); 728 extern kmp_real64 __kmp_xchg_real64(volatile kmp_real64 *p, kmp_real64 v); 729 730 #define KMP_TEST_THEN_INC32(p) \ 731 __kmp_test_then_add32((volatile kmp_int32 *)(p), 1) 732 #define KMP_TEST_THEN_INC_ACQ32(p) \ 733 __kmp_test_then_add32((volatile kmp_int32 *)(p), 1) 734 #define KMP_TEST_THEN_INC64(p) \ 735 __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL) 736 #define KMP_TEST_THEN_INC_ACQ64(p) \ 737 __kmp_test_then_add64((volatile kmp_int64 *)(p), 1LL) 738 #define KMP_TEST_THEN_ADD4_32(p) \ 739 __kmp_test_then_add32((volatile kmp_int32 *)(p), 4) 740 #define KMP_TEST_THEN_ADD4_ACQ32(p) \ 741 __kmp_test_then_add32((volatile kmp_int32 *)(p), 4) 742 #define KMP_TEST_THEN_ADD4_64(p) \ 743 __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL) 744 #define KMP_TEST_THEN_ADD4_ACQ64(p) \ 745 __kmp_test_then_add64((volatile kmp_int64 *)(p), 4LL) 746 #define KMP_TEST_THEN_DEC32(p) \ 747 __kmp_test_then_add32((volatile kmp_int32 *)(p), -1) 748 #define KMP_TEST_THEN_DEC_ACQ32(p) \ 749 __kmp_test_then_add32((volatile kmp_int32 *)(p), -1) 750 #define KMP_TEST_THEN_DEC64(p) \ 751 __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL) 752 #define KMP_TEST_THEN_DEC_ACQ64(p) \ 753 __kmp_test_then_add64((volatile kmp_int64 *)(p), -1LL) 754 #define KMP_TEST_THEN_ADD8(p, v) \ 755 __kmp_test_then_add8((volatile kmp_int8 *)(p), (kmp_int8)(v)) 756 #define KMP_TEST_THEN_ADD32(p, v) \ 757 __kmp_test_then_add32((volatile kmp_int32 *)(p), (kmp_int32)(v)) 758 #define KMP_TEST_THEN_ADD64(p, v) \ 759 __kmp_test_then_add64((volatile kmp_int64 *)(p), (kmp_int64)(v)) 760 761 #define KMP_TEST_THEN_OR8(p, v) \ 762 __kmp_test_then_or8((volatile kmp_int8 *)(p), (kmp_int8)(v)) 763 #define KMP_TEST_THEN_AND8(p, v) \ 764 __kmp_test_then_and8((volatile kmp_int8 *)(p), (kmp_int8)(v)) 765 #define KMP_TEST_THEN_OR32(p, v) \ 766 __kmp_test_then_or32((volatile kmp_uint32 *)(p), (kmp_uint32)(v)) 767 #define KMP_TEST_THEN_AND32(p, v) \ 768 __kmp_test_then_and32((volatile kmp_uint32 *)(p), (kmp_uint32)(v)) 769 #define KMP_TEST_THEN_OR64(p, v) \ 770 __kmp_test_then_or64((volatile kmp_uint64 *)(p), (kmp_uint64)(v)) 771 #define KMP_TEST_THEN_AND64(p, v) \ 772 __kmp_test_then_and64((volatile kmp_uint64 *)(p), (kmp_uint64)(v)) 773 774 #define KMP_COMPARE_AND_STORE_ACQ8(p, cv, sv) \ 775 __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \ 776 (kmp_int8)(sv)) 777 #define KMP_COMPARE_AND_STORE_REL8(p, cv, sv) \ 778 __kmp_compare_and_store8((volatile kmp_int8 *)(p), (kmp_int8)(cv), \ 779 (kmp_int8)(sv)) 780 #define KMP_COMPARE_AND_STORE_ACQ16(p, cv, sv) \ 781 __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \ 782 (kmp_int16)(sv)) 783 #define KMP_COMPARE_AND_STORE_REL16(p, cv, sv) \ 784 __kmp_compare_and_store16((volatile kmp_int16 *)(p), (kmp_int16)(cv), \ 785 (kmp_int16)(sv)) 786 #define KMP_COMPARE_AND_STORE_ACQ32(p, cv, sv) \ 787 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \ 788 (kmp_int32)(sv)) 789 #define KMP_COMPARE_AND_STORE_REL32(p, cv, sv) \ 790 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \ 791 (kmp_int32)(sv)) 792 #define KMP_COMPARE_AND_STORE_ACQ64(p, cv, sv) \ 793 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \ 794 (kmp_int64)(sv)) 795 #define KMP_COMPARE_AND_STORE_REL64(p, cv, sv) \ 796 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \ 797 (kmp_int64)(sv)) 798 799 #if KMP_ARCH_X86 800 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \ 801 __kmp_compare_and_store32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \ 802 (kmp_int32)(sv)) 803 #else /* 64 bit pointers */ 804 #define KMP_COMPARE_AND_STORE_PTR(p, cv, sv) \ 805 __kmp_compare_and_store64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \ 806 (kmp_int64)(sv)) 807 #endif /* KMP_ARCH_X86 */ 808 809 #define KMP_COMPARE_AND_STORE_RET8(p, cv, sv) \ 810 __kmp_compare_and_store_ret8((p), (cv), (sv)) 811 #define KMP_COMPARE_AND_STORE_RET16(p, cv, sv) \ 812 __kmp_compare_and_store_ret16((p), (cv), (sv)) 813 #define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) \ 814 __kmp_compare_and_store_ret32((volatile kmp_int32 *)(p), (kmp_int32)(cv), \ 815 (kmp_int32)(sv)) 816 #define KMP_COMPARE_AND_STORE_RET64(p, cv, sv) \ 817 __kmp_compare_and_store_ret64((volatile kmp_int64 *)(p), (kmp_int64)(cv), \ 818 (kmp_int64)(sv)) 819 820 #define KMP_XCHG_FIXED8(p, v) \ 821 __kmp_xchg_fixed8((volatile kmp_int8 *)(p), (kmp_int8)(v)); 822 #define KMP_XCHG_FIXED16(p, v) __kmp_xchg_fixed16((p), (v)); 823 #define KMP_XCHG_FIXED32(p, v) __kmp_xchg_fixed32((p), (v)); 824 #define KMP_XCHG_FIXED64(p, v) __kmp_xchg_fixed64((p), (v)); 825 #define KMP_XCHG_REAL32(p, v) __kmp_xchg_real32((p), (v)); 826 #define KMP_XCHG_REAL64(p, v) __kmp_xchg_real64((p), (v)); 827 828 #endif /* KMP_ASM_INTRINS */ 829 830 /* ------------- relaxed consistency memory model stuff ------------------ */ 831 832 #if KMP_OS_WINDOWS 833 #ifdef __ABSOFT_WIN 834 #define KMP_MB() asm("nop") 835 #define KMP_IMB() asm("nop") 836 #else 837 #define KMP_MB() /* _asm{ nop } */ 838 #define KMP_IMB() /* _asm{ nop } */ 839 #endif 840 #endif /* KMP_OS_WINDOWS */ 841 842 #if KMP_ARCH_PPC64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64 || KMP_ARCH_MIPS || \ 843 KMP_ARCH_MIPS64 844 #define KMP_MB() __sync_synchronize() 845 #endif 846 847 #ifndef KMP_MB 848 #define KMP_MB() /* nothing to do */ 849 #endif 850 851 #ifndef KMP_IMB 852 #define KMP_IMB() /* nothing to do */ 853 #endif 854 855 #ifndef KMP_ST_REL32 856 #define KMP_ST_REL32(A, D) (*(A) = (D)) 857 #endif 858 859 #ifndef KMP_ST_REL64 860 #define KMP_ST_REL64(A, D) (*(A) = (D)) 861 #endif 862 863 #ifndef KMP_LD_ACQ32 864 #define KMP_LD_ACQ32(A) (*(A)) 865 #endif 866 867 #ifndef KMP_LD_ACQ64 868 #define KMP_LD_ACQ64(A) (*(A)) 869 #endif 870 871 /* ------------------------------------------------------------------------ */ 872 // FIXME - maybe this should this be 873 // 874 // #define TCR_4(a) (*(volatile kmp_int32 *)(&a)) 875 // #define TCW_4(a,b) (a) = (*(volatile kmp_int32 *)&(b)) 876 // 877 // #define TCR_8(a) (*(volatile kmp_int64 *)(a)) 878 // #define TCW_8(a,b) (a) = (*(volatile kmp_int64 *)(&b)) 879 // 880 // I'm fairly certain this is the correct thing to do, but I'm afraid 881 // of performance regressions. 882 883 #define TCR_1(a) (a) 884 #define TCW_1(a, b) (a) = (b) 885 #define TCR_4(a) (a) 886 #define TCW_4(a, b) (a) = (b) 887 #define TCI_4(a) (++(a)) 888 #define TCD_4(a) (--(a)) 889 #define TCR_8(a) (a) 890 #define TCW_8(a, b) (a) = (b) 891 #define TCI_8(a) (++(a)) 892 #define TCD_8(a) (--(a)) 893 #define TCR_SYNC_4(a) (a) 894 #define TCW_SYNC_4(a, b) (a) = (b) 895 #define TCX_SYNC_4(a, b, c) \ 896 KMP_COMPARE_AND_STORE_REL32((volatile kmp_int32 *)(volatile void *)&(a), \ 897 (kmp_int32)(b), (kmp_int32)(c)) 898 #define TCR_SYNC_8(a) (a) 899 #define TCW_SYNC_8(a, b) (a) = (b) 900 #define TCX_SYNC_8(a, b, c) \ 901 KMP_COMPARE_AND_STORE_REL64((volatile kmp_int64 *)(volatile void *)&(a), \ 902 (kmp_int64)(b), (kmp_int64)(c)) 903 904 #if KMP_ARCH_X86 || KMP_ARCH_MIPS 905 // What about ARM? 906 #define TCR_PTR(a) ((void *)TCR_4(a)) 907 #define TCW_PTR(a, b) TCW_4((a), (b)) 908 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_4(a)) 909 #define TCW_SYNC_PTR(a, b) TCW_SYNC_4((a), (b)) 910 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_4((a), (b), (c))) 911 912 #else /* 64 bit pointers */ 913 914 #define TCR_PTR(a) ((void *)TCR_8(a)) 915 #define TCW_PTR(a, b) TCW_8((a), (b)) 916 #define TCR_SYNC_PTR(a) ((void *)TCR_SYNC_8(a)) 917 #define TCW_SYNC_PTR(a, b) TCW_SYNC_8((a), (b)) 918 #define TCX_SYNC_PTR(a, b, c) ((void *)TCX_SYNC_8((a), (b), (c))) 919 920 #endif /* KMP_ARCH_X86 */ 921 922 /* If these FTN_{TRUE,FALSE} values change, may need to change several places 923 where they are used to check that language is Fortran, not C. */ 924 925 #ifndef FTN_TRUE 926 #define FTN_TRUE TRUE 927 #endif 928 929 #ifndef FTN_FALSE 930 #define FTN_FALSE FALSE 931 #endif 932 933 typedef void (*microtask_t)(int *gtid, int *npr, ...); 934 935 #ifdef USE_VOLATILE_CAST 936 #define VOLATILE_CAST(x) (volatile x) 937 #else 938 #define VOLATILE_CAST(x) (x) 939 #endif 940 941 #define KMP_WAIT __kmp_wait_4 942 #define KMP_WAIT_PTR __kmp_wait_4_ptr 943 #define KMP_EQ __kmp_eq_4 944 #define KMP_NEQ __kmp_neq_4 945 #define KMP_LT __kmp_lt_4 946 #define KMP_GE __kmp_ge_4 947 #define KMP_LE __kmp_le_4 948 949 /* Workaround for Intel(R) 64 code gen bug when taking address of static array 950 * (Intel(R) 64 Tracker #138) */ 951 #if (KMP_ARCH_X86_64 || KMP_ARCH_PPC64) && KMP_OS_LINUX 952 #define STATIC_EFI2_WORKAROUND 953 #else 954 #define STATIC_EFI2_WORKAROUND static 955 #endif 956 957 // Support of BGET usage 958 #ifndef KMP_USE_BGET 959 #define KMP_USE_BGET 1 960 #endif 961 962 // Switches for OSS builds 963 #ifndef USE_CMPXCHG_FIX 964 #define USE_CMPXCHG_FIX 1 965 #endif 966 967 // Enable dynamic user lock 968 #define KMP_USE_DYNAMIC_LOCK 1 969 970 // Enable Intel(R) Transactional Synchronization Extensions (Intel(R) TSX) if 971 // dynamic user lock is turned on 972 #if KMP_USE_DYNAMIC_LOCK 973 // Visual studio can't handle the asm sections in this code 974 #define KMP_USE_TSX (KMP_ARCH_X86 || KMP_ARCH_X86_64) && !KMP_COMPILER_MSVC 975 #ifdef KMP_USE_ADAPTIVE_LOCKS 976 #undef KMP_USE_ADAPTIVE_LOCKS 977 #endif 978 #define KMP_USE_ADAPTIVE_LOCKS KMP_USE_TSX 979 #endif 980 981 // Enable tick time conversion of ticks to seconds 982 #if KMP_STATS_ENABLED 983 #define KMP_HAVE_TICK_TIME \ 984 (KMP_OS_LINUX && (KMP_MIC || KMP_ARCH_X86 || KMP_ARCH_X86_64)) 985 #endif 986 987 // Warning levels 988 enum kmp_warnings_level { 989 kmp_warnings_off = 0, /* No warnings */ 990 kmp_warnings_low, /* Minimal warnings (default) */ 991 kmp_warnings_explicit = 6, /* Explicitly set to ON - more warnings */ 992 kmp_warnings_verbose /* reserved */ 993 }; 994 995 #ifdef __cplusplus 996 } // extern "C" 997 #endif // __cplusplus 998 999 // Macros for C++11 atomic functions 1000 #define KMP_ATOMIC_LD(p, order) (p)->load(std::memory_order_##order) 1001 #define KMP_ATOMIC_OP(op, p, v, order) (p)->op(v, std::memory_order_##order) 1002 1003 // For non-default load/store 1004 #define KMP_ATOMIC_LD_ACQ(p) KMP_ATOMIC_LD(p, acquire) 1005 #define KMP_ATOMIC_LD_RLX(p) KMP_ATOMIC_LD(p, relaxed) 1006 #define KMP_ATOMIC_ST_REL(p, v) KMP_ATOMIC_OP(store, p, v, release) 1007 #define KMP_ATOMIC_ST_RLX(p, v) KMP_ATOMIC_OP(store, p, v, relaxed) 1008 1009 // For non-default fetch_<op> 1010 #define KMP_ATOMIC_ADD(p, v) KMP_ATOMIC_OP(fetch_add, p, v, acq_rel) 1011 #define KMP_ATOMIC_SUB(p, v) KMP_ATOMIC_OP(fetch_sub, p, v, acq_rel) 1012 #define KMP_ATOMIC_AND(p, v) KMP_ATOMIC_OP(fetch_and, p, v, acq_rel) 1013 #define KMP_ATOMIC_OR(p, v) KMP_ATOMIC_OP(fetch_or, p, v, acq_rel) 1014 #define KMP_ATOMIC_INC(p) KMP_ATOMIC_OP(fetch_add, p, 1, acq_rel) 1015 #define KMP_ATOMIC_DEC(p) KMP_ATOMIC_OP(fetch_sub, p, 1, acq_rel) 1016 #define KMP_ATOMIC_ADD_RLX(p, v) KMP_ATOMIC_OP(fetch_add, p, v, relaxed) 1017 #define KMP_ATOMIC_INC_RLX(p) KMP_ATOMIC_OP(fetch_add, p, 1, relaxed) 1018 1019 // Callers of the following functions cannot see the side effect on "expected". 1020 template <typename T> 1021 bool __kmp_atomic_compare_store(std::atomic<T> *p, T expected, T desired) { 1022 return p->compare_exchange_strong( 1023 expected, desired, std::memory_order_acq_rel, std::memory_order_relaxed); 1024 } 1025 1026 template <typename T> 1027 bool __kmp_atomic_compare_store_acq(std::atomic<T> *p, T expected, T desired) { 1028 return p->compare_exchange_strong( 1029 expected, desired, std::memory_order_acquire, std::memory_order_relaxed); 1030 } 1031 1032 template <typename T> 1033 bool __kmp_atomic_compare_store_rel(std::atomic<T> *p, T expected, T desired) { 1034 return p->compare_exchange_strong( 1035 expected, desired, std::memory_order_release, std::memory_order_relaxed); 1036 } 1037 1038 #endif /* KMP_OS_H */ 1039 // Safe C API 1040 #include "kmp_safe_c_api.h" 1041