1 //===-- sanitizer_atomic_msvc.h ---------------------------------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file is a part of ThreadSanitizer/AddressSanitizer runtime. 10 // Not intended for direct inclusion. Include sanitizer_atomic.h. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #ifndef SANITIZER_ATOMIC_MSVC_H 15 #define SANITIZER_ATOMIC_MSVC_H 16 17 extern "C" void _ReadWriteBarrier(); 18 #pragma intrinsic(_ReadWriteBarrier) 19 extern "C" void _mm_mfence(); 20 #pragma intrinsic(_mm_mfence) 21 extern "C" void _mm_pause(); 22 #pragma intrinsic(_mm_pause) 23 extern "C" char _InterlockedExchange8( // NOLINT 24 char volatile *Addend, char Value); // NOLINT 25 #pragma intrinsic(_InterlockedExchange8) 26 extern "C" short _InterlockedExchange16( // NOLINT 27 short volatile *Addend, short Value); // NOLINT 28 #pragma intrinsic(_InterlockedExchange16) 29 extern "C" long _InterlockedExchange( // NOLINT 30 long volatile *Addend, long Value); // NOLINT 31 #pragma intrinsic(_InterlockedExchange) 32 extern "C" long _InterlockedExchangeAdd( // NOLINT 33 long volatile * Addend, long Value); // NOLINT 34 #pragma intrinsic(_InterlockedExchangeAdd) 35 extern "C" char _InterlockedCompareExchange8( // NOLINT 36 char volatile *Destination, // NOLINT 37 char Exchange, char Comparand); // NOLINT 38 #pragma intrinsic(_InterlockedCompareExchange8) 39 extern "C" short _InterlockedCompareExchange16( // NOLINT 40 short volatile *Destination, // NOLINT 41 short Exchange, short Comparand); // NOLINT 42 #pragma intrinsic(_InterlockedCompareExchange16) 43 extern "C" 44 long long _InterlockedCompareExchange64( // NOLINT 45 long long volatile *Destination, // NOLINT 46 long long Exchange, long long Comparand); // NOLINT 47 #pragma intrinsic(_InterlockedCompareExchange64) 48 extern "C" void *_InterlockedCompareExchangePointer( 49 void *volatile *Destination, 50 void *Exchange, void *Comparand); 51 #pragma intrinsic(_InterlockedCompareExchangePointer) 52 extern "C" 53 long __cdecl _InterlockedCompareExchange( // NOLINT 54 long volatile *Destination, // NOLINT 55 long Exchange, long Comparand); // NOLINT 56 #pragma intrinsic(_InterlockedCompareExchange) 57 58 #ifdef _WIN64 59 extern "C" long long _InterlockedExchangeAdd64( // NOLINT 60 long long volatile * Addend, long long Value); // NOLINT 61 #pragma intrinsic(_InterlockedExchangeAdd64) 62 #endif 63 64 namespace __sanitizer { 65 66 INLINE void atomic_signal_fence(memory_order) { 67 _ReadWriteBarrier(); 68 } 69 70 INLINE void atomic_thread_fence(memory_order) { 71 _mm_mfence(); 72 } 73 74 INLINE void proc_yield(int cnt) { 75 for (int i = 0; i < cnt; i++) 76 _mm_pause(); 77 } 78 79 template<typename T> 80 INLINE typename T::Type atomic_load( 81 const volatile T *a, memory_order mo) { 82 DCHECK(mo & (memory_order_relaxed | memory_order_consume 83 | memory_order_acquire | memory_order_seq_cst)); 84 DCHECK(!((uptr)a % sizeof(*a))); 85 typename T::Type v; 86 // FIXME(dvyukov): 64-bit load is not atomic on 32-bits. 87 if (mo == memory_order_relaxed) { 88 v = a->val_dont_use; 89 } else { 90 atomic_signal_fence(memory_order_seq_cst); 91 v = a->val_dont_use; 92 atomic_signal_fence(memory_order_seq_cst); 93 } 94 return v; 95 } 96 97 template<typename T> 98 INLINE void atomic_store(volatile T *a, typename T::Type v, memory_order mo) { 99 DCHECK(mo & (memory_order_relaxed | memory_order_release 100 | memory_order_seq_cst)); 101 DCHECK(!((uptr)a % sizeof(*a))); 102 // FIXME(dvyukov): 64-bit store is not atomic on 32-bits. 103 if (mo == memory_order_relaxed) { 104 a->val_dont_use = v; 105 } else { 106 atomic_signal_fence(memory_order_seq_cst); 107 a->val_dont_use = v; 108 atomic_signal_fence(memory_order_seq_cst); 109 } 110 if (mo == memory_order_seq_cst) 111 atomic_thread_fence(memory_order_seq_cst); 112 } 113 114 INLINE u32 atomic_fetch_add(volatile atomic_uint32_t *a, 115 u32 v, memory_order mo) { 116 (void)mo; 117 DCHECK(!((uptr)a % sizeof(*a))); 118 return (u32)_InterlockedExchangeAdd( 119 (volatile long*)&a->val_dont_use, (long)v); // NOLINT 120 } 121 122 INLINE uptr atomic_fetch_add(volatile atomic_uintptr_t *a, 123 uptr v, memory_order mo) { 124 (void)mo; 125 DCHECK(!((uptr)a % sizeof(*a))); 126 #ifdef _WIN64 127 return (uptr)_InterlockedExchangeAdd64( 128 (volatile long long*)&a->val_dont_use, (long long)v); // NOLINT 129 #else 130 return (uptr)_InterlockedExchangeAdd( 131 (volatile long*)&a->val_dont_use, (long)v); // NOLINT 132 #endif 133 } 134 135 INLINE u32 atomic_fetch_sub(volatile atomic_uint32_t *a, 136 u32 v, memory_order mo) { 137 (void)mo; 138 DCHECK(!((uptr)a % sizeof(*a))); 139 return (u32)_InterlockedExchangeAdd( 140 (volatile long*)&a->val_dont_use, -(long)v); // NOLINT 141 } 142 143 INLINE uptr atomic_fetch_sub(volatile atomic_uintptr_t *a, 144 uptr v, memory_order mo) { 145 (void)mo; 146 DCHECK(!((uptr)a % sizeof(*a))); 147 #ifdef _WIN64 148 return (uptr)_InterlockedExchangeAdd64( 149 (volatile long long*)&a->val_dont_use, -(long long)v); // NOLINT 150 #else 151 return (uptr)_InterlockedExchangeAdd( 152 (volatile long*)&a->val_dont_use, -(long)v); // NOLINT 153 #endif 154 } 155 156 INLINE u8 atomic_exchange(volatile atomic_uint8_t *a, 157 u8 v, memory_order mo) { 158 (void)mo; 159 DCHECK(!((uptr)a % sizeof(*a))); 160 return (u8)_InterlockedExchange8((volatile char*)&a->val_dont_use, v); 161 } 162 163 INLINE u16 atomic_exchange(volatile atomic_uint16_t *a, 164 u16 v, memory_order mo) { 165 (void)mo; 166 DCHECK(!((uptr)a % sizeof(*a))); 167 return (u16)_InterlockedExchange16((volatile short*)&a->val_dont_use, v); 168 } 169 170 INLINE u32 atomic_exchange(volatile atomic_uint32_t *a, 171 u32 v, memory_order mo) { 172 (void)mo; 173 DCHECK(!((uptr)a % sizeof(*a))); 174 return (u32)_InterlockedExchange((volatile long*)&a->val_dont_use, v); 175 } 176 177 INLINE bool atomic_compare_exchange_strong(volatile atomic_uint8_t *a, 178 u8 *cmp, 179 u8 xchgv, 180 memory_order mo) { 181 (void)mo; 182 DCHECK(!((uptr)a % sizeof(*a))); 183 u8 cmpv = *cmp; 184 #ifdef _WIN64 185 u8 prev = (u8)_InterlockedCompareExchange8( 186 (volatile char*)&a->val_dont_use, (char)xchgv, (char)cmpv); 187 #else 188 u8 prev; 189 __asm { 190 mov al, cmpv 191 mov ecx, a 192 mov dl, xchgv 193 lock cmpxchg [ecx], dl 194 mov prev, al 195 } 196 #endif 197 if (prev == cmpv) 198 return true; 199 *cmp = prev; 200 return false; 201 } 202 203 INLINE bool atomic_compare_exchange_strong(volatile atomic_uintptr_t *a, 204 uptr *cmp, 205 uptr xchg, 206 memory_order mo) { 207 uptr cmpv = *cmp; 208 uptr prev = (uptr)_InterlockedCompareExchangePointer( 209 (void*volatile*)&a->val_dont_use, (void*)xchg, (void*)cmpv); 210 if (prev == cmpv) 211 return true; 212 *cmp = prev; 213 return false; 214 } 215 216 INLINE bool atomic_compare_exchange_strong(volatile atomic_uint16_t *a, 217 u16 *cmp, 218 u16 xchg, 219 memory_order mo) { 220 u16 cmpv = *cmp; 221 u16 prev = (u16)_InterlockedCompareExchange16( 222 (volatile short*)&a->val_dont_use, (short)xchg, (short)cmpv); 223 if (prev == cmpv) 224 return true; 225 *cmp = prev; 226 return false; 227 } 228 229 INLINE bool atomic_compare_exchange_strong(volatile atomic_uint32_t *a, 230 u32 *cmp, 231 u32 xchg, 232 memory_order mo) { 233 u32 cmpv = *cmp; 234 u32 prev = (u32)_InterlockedCompareExchange( 235 (volatile long*)&a->val_dont_use, (long)xchg, (long)cmpv); 236 if (prev == cmpv) 237 return true; 238 *cmp = prev; 239 return false; 240 } 241 242 INLINE bool atomic_compare_exchange_strong(volatile atomic_uint64_t *a, 243 u64 *cmp, 244 u64 xchg, 245 memory_order mo) { 246 u64 cmpv = *cmp; 247 u64 prev = (u64)_InterlockedCompareExchange64( 248 (volatile long long*)&a->val_dont_use, (long long)xchg, (long long)cmpv); 249 if (prev == cmpv) 250 return true; 251 *cmp = prev; 252 return false; 253 } 254 255 template<typename T> 256 INLINE bool atomic_compare_exchange_weak(volatile T *a, 257 typename T::Type *cmp, 258 typename T::Type xchg, 259 memory_order mo) { 260 return atomic_compare_exchange_strong(a, cmp, xchg, mo); 261 } 262 263 } // namespace __sanitizer 264 265 #endif // SANITIZER_ATOMIC_CLANG_H 266