11da177e4SLinus Torvalds #ifndef __LINUX_COMPILER_H 21da177e4SLinus Torvalds #define __LINUX_COMPILER_H 31da177e4SLinus Torvalds 4*d1515582SWill Deacon #include <linux/compiler_types.h> 5*d1515582SWill Deacon 61da177e4SLinus Torvalds #ifndef __ASSEMBLY__ 71da177e4SLinus Torvalds 81da177e4SLinus Torvalds #ifdef __KERNEL__ 91da177e4SLinus Torvalds 102ed84eebSSteven Rostedt /* 112ed84eebSSteven Rostedt * Note: DISABLE_BRANCH_PROFILING can be used by special lowlevel code 122ed84eebSSteven Rostedt * to disable branch tracing on a per file basis. 132ed84eebSSteven Rostedt */ 14d9ad8bc0SBart Van Assche #if defined(CONFIG_TRACE_BRANCH_PROFILING) \ 15d9ad8bc0SBart Van Assche && !defined(DISABLE_BRANCH_PROFILING) && !defined(__CHECKER__) 16134e6a03SSteven Rostedt (VMware) void ftrace_likely_update(struct ftrace_likely_data *f, int val, 17d45ae1f7SSteven Rostedt (VMware) int expect, int is_constant); 181f0d69a9SSteven Rostedt 191f0d69a9SSteven Rostedt #define likely_notrace(x) __builtin_expect(!!(x), 1) 201f0d69a9SSteven Rostedt #define unlikely_notrace(x) __builtin_expect(!!(x), 0) 211f0d69a9SSteven Rostedt 22d45ae1f7SSteven Rostedt (VMware) #define __branch_check__(x, expect, is_constant) ({ \ 231f0d69a9SSteven Rostedt int ______r; \ 24134e6a03SSteven Rostedt (VMware) static struct ftrace_likely_data \ 251f0d69a9SSteven Rostedt __attribute__((__aligned__(4))) \ 2645b79749SSteven Rostedt __attribute__((section("_ftrace_annotated_branch"))) \ 271f0d69a9SSteven Rostedt ______f = { \ 28134e6a03SSteven Rostedt (VMware) .data.func = __func__, \ 29134e6a03SSteven Rostedt (VMware) .data.file = __FILE__, \ 30134e6a03SSteven Rostedt (VMware) .data.line = __LINE__, \ 311f0d69a9SSteven Rostedt }; \ 32d45ae1f7SSteven Rostedt (VMware) ______r = __builtin_expect(!!(x), expect); \ 33d45ae1f7SSteven Rostedt (VMware) ftrace_likely_update(&______f, ______r, \ 34d45ae1f7SSteven Rostedt (VMware) expect, is_constant); \ 351f0d69a9SSteven Rostedt ______r; \ 361f0d69a9SSteven Rostedt }) 371f0d69a9SSteven Rostedt 381f0d69a9SSteven Rostedt /* 391f0d69a9SSteven Rostedt * Using __builtin_constant_p(x) to ignore cases where the return 401f0d69a9SSteven Rostedt * value is always the same. This idea is taken from a similar patch 411f0d69a9SSteven Rostedt * written by Daniel Walker. 421f0d69a9SSteven Rostedt */ 431f0d69a9SSteven Rostedt # ifndef likely 44d45ae1f7SSteven Rostedt (VMware) # define likely(x) (__branch_check__(x, 1, __builtin_constant_p(x))) 451f0d69a9SSteven Rostedt # endif 461f0d69a9SSteven Rostedt # ifndef unlikely 47d45ae1f7SSteven Rostedt (VMware) # define unlikely(x) (__branch_check__(x, 0, __builtin_constant_p(x))) 481f0d69a9SSteven Rostedt # endif 492bcd521aSSteven Rostedt 502bcd521aSSteven Rostedt #ifdef CONFIG_PROFILE_ALL_BRANCHES 512bcd521aSSteven Rostedt /* 522bcd521aSSteven Rostedt * "Define 'is'", Bill Clinton 532bcd521aSSteven Rostedt * "Define 'if'", Steven Rostedt 542bcd521aSSteven Rostedt */ 55ab3c9c68SLinus Torvalds #define if(cond, ...) __trace_if( (cond , ## __VA_ARGS__) ) 56ab3c9c68SLinus Torvalds #define __trace_if(cond) \ 57b33c8ff4SArnd Bergmann if (__builtin_constant_p(!!(cond)) ? !!(cond) : \ 582bcd521aSSteven Rostedt ({ \ 592bcd521aSSteven Rostedt int ______r; \ 602bcd521aSSteven Rostedt static struct ftrace_branch_data \ 612bcd521aSSteven Rostedt __attribute__((__aligned__(4))) \ 622bcd521aSSteven Rostedt __attribute__((section("_ftrace_branch"))) \ 632bcd521aSSteven Rostedt ______f = { \ 642bcd521aSSteven Rostedt .func = __func__, \ 652bcd521aSSteven Rostedt .file = __FILE__, \ 662bcd521aSSteven Rostedt .line = __LINE__, \ 672bcd521aSSteven Rostedt }; \ 682bcd521aSSteven Rostedt ______r = !!(cond); \ 6997e7e4f3SWitold Baryluk ______f.miss_hit[______r]++; \ 702bcd521aSSteven Rostedt ______r; \ 712bcd521aSSteven Rostedt })) 722bcd521aSSteven Rostedt #endif /* CONFIG_PROFILE_ALL_BRANCHES */ 732bcd521aSSteven Rostedt 741f0d69a9SSteven Rostedt #else 751da177e4SLinus Torvalds # define likely(x) __builtin_expect(!!(x), 1) 761da177e4SLinus Torvalds # define unlikely(x) __builtin_expect(!!(x), 0) 771f0d69a9SSteven Rostedt #endif 781da177e4SLinus Torvalds 791da177e4SLinus Torvalds /* Optimization barrier */ 801da177e4SLinus Torvalds #ifndef barrier 811da177e4SLinus Torvalds # define barrier() __memory_barrier() 821da177e4SLinus Torvalds #endif 831da177e4SLinus Torvalds 847829fb09SDaniel Borkmann #ifndef barrier_data 857829fb09SDaniel Borkmann # define barrier_data(ptr) barrier() 867829fb09SDaniel Borkmann #endif 877829fb09SDaniel Borkmann 8838938c87SDavid Daney /* Unreachable code */ 89649ea4d5SJosh Poimboeuf #ifdef CONFIG_STACK_VALIDATION 90649ea4d5SJosh Poimboeuf #define annotate_reachable() ({ \ 91649ea4d5SJosh Poimboeuf asm("%c0:\n\t" \ 92649ea4d5SJosh Poimboeuf ".pushsection .discard.reachable\n\t" \ 93649ea4d5SJosh Poimboeuf ".long %c0b - .\n\t" \ 94649ea4d5SJosh Poimboeuf ".popsection\n\t" : : "i" (__LINE__)); \ 95649ea4d5SJosh Poimboeuf }) 96649ea4d5SJosh Poimboeuf #define annotate_unreachable() ({ \ 97649ea4d5SJosh Poimboeuf asm("%c0:\n\t" \ 98649ea4d5SJosh Poimboeuf ".pushsection .discard.unreachable\n\t" \ 99649ea4d5SJosh Poimboeuf ".long %c0b - .\n\t" \ 100649ea4d5SJosh Poimboeuf ".popsection\n\t" : : "i" (__LINE__)); \ 101649ea4d5SJosh Poimboeuf }) 102649ea4d5SJosh Poimboeuf #define ASM_UNREACHABLE \ 103649ea4d5SJosh Poimboeuf "999:\n\t" \ 104649ea4d5SJosh Poimboeuf ".pushsection .discard.unreachable\n\t" \ 105649ea4d5SJosh Poimboeuf ".long 999b - .\n\t" \ 106649ea4d5SJosh Poimboeuf ".popsection\n\t" 107649ea4d5SJosh Poimboeuf #else 108649ea4d5SJosh Poimboeuf #define annotate_reachable() 109649ea4d5SJosh Poimboeuf #define annotate_unreachable() 110649ea4d5SJosh Poimboeuf #endif 111649ea4d5SJosh Poimboeuf 112aa5d1b81SKees Cook #ifndef ASM_UNREACHABLE 113aa5d1b81SKees Cook # define ASM_UNREACHABLE 114aa5d1b81SKees Cook #endif 11538938c87SDavid Daney #ifndef unreachable 116649ea4d5SJosh Poimboeuf # define unreachable() do { annotate_reachable(); do { } while (1); } while (0) 11738938c87SDavid Daney #endif 11838938c87SDavid Daney 119b67067f1SNicholas Piggin /* 120b67067f1SNicholas Piggin * KENTRY - kernel entry point 121b67067f1SNicholas Piggin * This can be used to annotate symbols (functions or data) that are used 122b67067f1SNicholas Piggin * without their linker symbol being referenced explicitly. For example, 123b67067f1SNicholas Piggin * interrupt vector handlers, or functions in the kernel image that are found 124b67067f1SNicholas Piggin * programatically. 125b67067f1SNicholas Piggin * 126b67067f1SNicholas Piggin * Not required for symbols exported with EXPORT_SYMBOL, or initcalls. Those 127b67067f1SNicholas Piggin * are handled in their own way (with KEEP() in linker scripts). 128b67067f1SNicholas Piggin * 129b67067f1SNicholas Piggin * KENTRY can be avoided if the symbols in question are marked as KEEP() in the 130b67067f1SNicholas Piggin * linker script. For example an architecture could KEEP() its entire 131b67067f1SNicholas Piggin * boot/exception vector code rather than annotate each function and data. 132b67067f1SNicholas Piggin */ 133b67067f1SNicholas Piggin #ifndef KENTRY 134b67067f1SNicholas Piggin # define KENTRY(sym) \ 135b67067f1SNicholas Piggin extern typeof(sym) sym; \ 136b67067f1SNicholas Piggin static const unsigned long __kentry_##sym \ 137b67067f1SNicholas Piggin __used \ 138b67067f1SNicholas Piggin __attribute__((section("___kentry" "+" #sym ), used)) \ 139b67067f1SNicholas Piggin = (unsigned long)&sym; 140b67067f1SNicholas Piggin #endif 141b67067f1SNicholas Piggin 1421da177e4SLinus Torvalds #ifndef RELOC_HIDE 1431da177e4SLinus Torvalds # define RELOC_HIDE(ptr, off) \ 1441da177e4SLinus Torvalds ({ unsigned long __ptr; \ 1451da177e4SLinus Torvalds __ptr = (unsigned long) (ptr); \ 1461da177e4SLinus Torvalds (typeof(ptr)) (__ptr + (off)); }) 1471da177e4SLinus Torvalds #endif 1481da177e4SLinus Torvalds 149fe8c8a12SCesar Eduardo Barros #ifndef OPTIMIZER_HIDE_VAR 150fe8c8a12SCesar Eduardo Barros #define OPTIMIZER_HIDE_VAR(var) barrier() 151fe8c8a12SCesar Eduardo Barros #endif 152fe8c8a12SCesar Eduardo Barros 1536f33d587SRusty Russell /* Not-quite-unique ID. */ 1546f33d587SRusty Russell #ifndef __UNIQUE_ID 1556f33d587SRusty Russell # define __UNIQUE_ID(prefix) __PASTE(__PASTE(__UNIQUE_ID_, prefix), __LINE__) 1566f33d587SRusty Russell #endif 1576f33d587SRusty Russell 158230fa253SChristian Borntraeger #include <uapi/linux/types.h> 159230fa253SChristian Borntraeger 160d976441fSAndrey Ryabinin #define __READ_ONCE_SIZE \ 161d976441fSAndrey Ryabinin ({ \ 162d976441fSAndrey Ryabinin switch (size) { \ 163d976441fSAndrey Ryabinin case 1: *(__u8 *)res = *(volatile __u8 *)p; break; \ 164d976441fSAndrey Ryabinin case 2: *(__u16 *)res = *(volatile __u16 *)p; break; \ 165d976441fSAndrey Ryabinin case 4: *(__u32 *)res = *(volatile __u32 *)p; break; \ 166d976441fSAndrey Ryabinin case 8: *(__u64 *)res = *(volatile __u64 *)p; break; \ 167d976441fSAndrey Ryabinin default: \ 168d976441fSAndrey Ryabinin barrier(); \ 169d976441fSAndrey Ryabinin __builtin_memcpy((void *)res, (const void *)p, size); \ 170d976441fSAndrey Ryabinin barrier(); \ 171d976441fSAndrey Ryabinin } \ 172d976441fSAndrey Ryabinin }) 173d976441fSAndrey Ryabinin 174d976441fSAndrey Ryabinin static __always_inline 175d976441fSAndrey Ryabinin void __read_once_size(const volatile void *p, void *res, int size) 176230fa253SChristian Borntraeger { 177d976441fSAndrey Ryabinin __READ_ONCE_SIZE; 178230fa253SChristian Borntraeger } 179d976441fSAndrey Ryabinin 180d976441fSAndrey Ryabinin #ifdef CONFIG_KASAN 181d976441fSAndrey Ryabinin /* 182d976441fSAndrey Ryabinin * This function is not 'inline' because __no_sanitize_address confilcts 183d976441fSAndrey Ryabinin * with inlining. Attempt to inline it may cause a build failure. 184d976441fSAndrey Ryabinin * https://gcc.gnu.org/bugzilla/show_bug.cgi?id=67368 185d976441fSAndrey Ryabinin * '__maybe_unused' allows us to avoid defined-but-not-used warnings. 186d976441fSAndrey Ryabinin */ 187d976441fSAndrey Ryabinin static __no_sanitize_address __maybe_unused 188d976441fSAndrey Ryabinin void __read_once_size_nocheck(const volatile void *p, void *res, int size) 189d976441fSAndrey Ryabinin { 190d976441fSAndrey Ryabinin __READ_ONCE_SIZE; 191230fa253SChristian Borntraeger } 192d976441fSAndrey Ryabinin #else 193d976441fSAndrey Ryabinin static __always_inline 194d976441fSAndrey Ryabinin void __read_once_size_nocheck(const volatile void *p, void *res, int size) 195d976441fSAndrey Ryabinin { 196d976441fSAndrey Ryabinin __READ_ONCE_SIZE; 197d976441fSAndrey Ryabinin } 198d976441fSAndrey Ryabinin #endif 199230fa253SChristian Borntraeger 20043239cbeSChristian Borntraeger static __always_inline void __write_once_size(volatile void *p, void *res, int size) 201230fa253SChristian Borntraeger { 202230fa253SChristian Borntraeger switch (size) { 203230fa253SChristian Borntraeger case 1: *(volatile __u8 *)p = *(__u8 *)res; break; 204230fa253SChristian Borntraeger case 2: *(volatile __u16 *)p = *(__u16 *)res; break; 205230fa253SChristian Borntraeger case 4: *(volatile __u32 *)p = *(__u32 *)res; break; 206230fa253SChristian Borntraeger case 8: *(volatile __u64 *)p = *(__u64 *)res; break; 207230fa253SChristian Borntraeger default: 208230fa253SChristian Borntraeger barrier(); 209230fa253SChristian Borntraeger __builtin_memcpy((void *)p, (const void *)res, size); 210230fa253SChristian Borntraeger barrier(); 211230fa253SChristian Borntraeger } 212230fa253SChristian Borntraeger } 213230fa253SChristian Borntraeger 214230fa253SChristian Borntraeger /* 215230fa253SChristian Borntraeger * Prevent the compiler from merging or refetching reads or writes. The 216230fa253SChristian Borntraeger * compiler is also forbidden from reordering successive instances of 21743239cbeSChristian Borntraeger * READ_ONCE, WRITE_ONCE and ACCESS_ONCE (see below), but only when the 218230fa253SChristian Borntraeger * compiler is aware of some particular ordering. One way to make the 219230fa253SChristian Borntraeger * compiler aware of ordering is to put the two invocations of READ_ONCE, 22043239cbeSChristian Borntraeger * WRITE_ONCE or ACCESS_ONCE() in different C statements. 221230fa253SChristian Borntraeger * 222230fa253SChristian Borntraeger * In contrast to ACCESS_ONCE these two macros will also work on aggregate 223230fa253SChristian Borntraeger * data types like structs or unions. If the size of the accessed data 224230fa253SChristian Borntraeger * type exceeds the word size of the machine (e.g., 32 bits or 64 bits) 225fed0764fSKonrad Rzeszutek Wilk * READ_ONCE() and WRITE_ONCE() will fall back to memcpy(). There's at 226fed0764fSKonrad Rzeszutek Wilk * least two memcpy()s: one for the __builtin_memcpy() and then one for 227fed0764fSKonrad Rzeszutek Wilk * the macro doing the copy of variable - '__u' allocated on the stack. 228230fa253SChristian Borntraeger * 229230fa253SChristian Borntraeger * Their two major use cases are: (1) Mediating communication between 230230fa253SChristian Borntraeger * process-level code and irq/NMI handlers, all running on the same CPU, 231230fa253SChristian Borntraeger * and (2) Ensuring that the compiler does not fold, spindle, or otherwise 232230fa253SChristian Borntraeger * mutilate accesses that either do not require ordering or that interact 233230fa253SChristian Borntraeger * with an explicit memory barrier or atomic instruction that provides the 234230fa253SChristian Borntraeger * required ordering. 235230fa253SChristian Borntraeger */ 236*d1515582SWill Deacon #include <asm/barrier.h> 237230fa253SChristian Borntraeger 238d976441fSAndrey Ryabinin #define __READ_ONCE(x, check) \ 239d976441fSAndrey Ryabinin ({ \ 240d976441fSAndrey Ryabinin union { typeof(x) __val; char __c[1]; } __u; \ 241d976441fSAndrey Ryabinin if (check) \ 242d976441fSAndrey Ryabinin __read_once_size(&(x), __u.__c, sizeof(x)); \ 243d976441fSAndrey Ryabinin else \ 244d976441fSAndrey Ryabinin __read_once_size_nocheck(&(x), __u.__c, sizeof(x)); \ 245d976441fSAndrey Ryabinin __u.__val; \ 246d976441fSAndrey Ryabinin }) 247d976441fSAndrey Ryabinin #define READ_ONCE(x) __READ_ONCE(x, 1) 248d976441fSAndrey Ryabinin 249d976441fSAndrey Ryabinin /* 250d976441fSAndrey Ryabinin * Use READ_ONCE_NOCHECK() instead of READ_ONCE() if you need 251d976441fSAndrey Ryabinin * to hide memory access from KASAN. 252d976441fSAndrey Ryabinin */ 253d976441fSAndrey Ryabinin #define READ_ONCE_NOCHECK(x) __READ_ONCE(x, 0) 254230fa253SChristian Borntraeger 25543239cbeSChristian Borntraeger #define WRITE_ONCE(x, val) \ 256ba33034fSChristian Borntraeger ({ \ 257ba33034fSChristian Borntraeger union { typeof(x) __val; char __c[1]; } __u = \ 258ba33034fSChristian Borntraeger { .__val = (__force typeof(x)) (val) }; \ 259ba33034fSChristian Borntraeger __write_once_size(&(x), __u.__c, sizeof(x)); \ 260ba33034fSChristian Borntraeger __u.__val; \ 261ba33034fSChristian Borntraeger }) 262230fa253SChristian Borntraeger 2631da177e4SLinus Torvalds #endif /* __KERNEL__ */ 2641da177e4SLinus Torvalds 2651da177e4SLinus Torvalds #endif /* __ASSEMBLY__ */ 2661da177e4SLinus Torvalds 2679f0cf4adSArjan van de Ven /* Compile time object size, -1 for unknown */ 2689f0cf4adSArjan van de Ven #ifndef __compiletime_object_size 2699f0cf4adSArjan van de Ven # define __compiletime_object_size(obj) -1 2709f0cf4adSArjan van de Ven #endif 2714a312769SArjan van de Ven #ifndef __compiletime_warning 2724a312769SArjan van de Ven # define __compiletime_warning(message) 2734a312769SArjan van de Ven #endif 27463312b6aSArjan van de Ven #ifndef __compiletime_error 27563312b6aSArjan van de Ven # define __compiletime_error(message) 2762c0d259eSJames Hogan /* 2772c0d259eSJames Hogan * Sparse complains of variable sized arrays due to the temporary variable in 2782c0d259eSJames Hogan * __compiletime_assert. Unfortunately we can't just expand it out to make 2792c0d259eSJames Hogan * sparse see a constant array size without breaking compiletime_assert on old 2802c0d259eSJames Hogan * versions of GCC (e.g. 4.2.4), so hide the array from sparse altogether. 2812c0d259eSJames Hogan */ 2822c0d259eSJames Hogan # ifndef __CHECKER__ 283c361d3e5SDaniel Santos # define __compiletime_error_fallback(condition) \ 2849a8ab1c3SDaniel Santos do { ((void)sizeof(char[1 - 2 * condition])); } while (0) 2852c0d259eSJames Hogan # endif 2862c0d259eSJames Hogan #endif 2872c0d259eSJames Hogan #ifndef __compiletime_error_fallback 288c361d3e5SDaniel Santos # define __compiletime_error_fallback(condition) do { } while (0) 28963312b6aSArjan van de Ven #endif 290c361d3e5SDaniel Santos 291c03567a8SJoe Stringer #ifdef __OPTIMIZE__ 2929a8ab1c3SDaniel Santos # define __compiletime_assert(condition, msg, prefix, suffix) \ 2939a8ab1c3SDaniel Santos do { \ 2949a8ab1c3SDaniel Santos bool __cond = !(condition); \ 2959a8ab1c3SDaniel Santos extern void prefix ## suffix(void) __compiletime_error(msg); \ 2969a8ab1c3SDaniel Santos if (__cond) \ 2979a8ab1c3SDaniel Santos prefix ## suffix(); \ 2989a8ab1c3SDaniel Santos __compiletime_error_fallback(__cond); \ 2999a8ab1c3SDaniel Santos } while (0) 300c03567a8SJoe Stringer #else 301c03567a8SJoe Stringer # define __compiletime_assert(condition, msg, prefix, suffix) do { } while (0) 302c03567a8SJoe Stringer #endif 3039a8ab1c3SDaniel Santos 3049a8ab1c3SDaniel Santos #define _compiletime_assert(condition, msg, prefix, suffix) \ 3059a8ab1c3SDaniel Santos __compiletime_assert(condition, msg, prefix, suffix) 3069a8ab1c3SDaniel Santos 3079a8ab1c3SDaniel Santos /** 3089a8ab1c3SDaniel Santos * compiletime_assert - break build and emit msg if condition is false 3099a8ab1c3SDaniel Santos * @condition: a compile-time constant condition to check 3109a8ab1c3SDaniel Santos * @msg: a message to emit if condition is false 3119a8ab1c3SDaniel Santos * 3129a8ab1c3SDaniel Santos * In tradition of POSIX assert, this macro will break the build if the 3139a8ab1c3SDaniel Santos * supplied condition is *false*, emitting the supplied error message if the 3149a8ab1c3SDaniel Santos * compiler has support to do so. 3159a8ab1c3SDaniel Santos */ 3169a8ab1c3SDaniel Santos #define compiletime_assert(condition, msg) \ 3179a8ab1c3SDaniel Santos _compiletime_assert(condition, msg, __compiletime_assert_, __LINE__) 3189a8ab1c3SDaniel Santos 31947933ad4SPeter Zijlstra #define compiletime_assert_atomic_type(t) \ 32047933ad4SPeter Zijlstra compiletime_assert(__native_word(t), \ 32147933ad4SPeter Zijlstra "Need native word sized stores/loads for atomicity.") 32247933ad4SPeter Zijlstra 3239c3cdc1fSLinus Torvalds /* 3249c3cdc1fSLinus Torvalds * Prevent the compiler from merging or refetching accesses. The compiler 3259c3cdc1fSLinus Torvalds * is also forbidden from reordering successive instances of ACCESS_ONCE(), 3269c3cdc1fSLinus Torvalds * but only when the compiler is aware of some particular ordering. One way 3279c3cdc1fSLinus Torvalds * to make the compiler aware of ordering is to put the two invocations of 3289c3cdc1fSLinus Torvalds * ACCESS_ONCE() in different C statements. 3299c3cdc1fSLinus Torvalds * 330927609d6SChristian Borntraeger * ACCESS_ONCE will only work on scalar types. For union types, ACCESS_ONCE 331927609d6SChristian Borntraeger * on a union member will work as long as the size of the member matches the 332927609d6SChristian Borntraeger * size of the union and the size is smaller than word size. 333927609d6SChristian Borntraeger * 334927609d6SChristian Borntraeger * The major use cases of ACCESS_ONCE used to be (1) Mediating communication 335927609d6SChristian Borntraeger * between process-level code and irq/NMI handlers, all running on the same CPU, 336927609d6SChristian Borntraeger * and (2) Ensuring that the compiler does not fold, spindle, or otherwise 337927609d6SChristian Borntraeger * mutilate accesses that either do not require ordering or that interact 338927609d6SChristian Borntraeger * with an explicit memory barrier or atomic instruction that provides the 339927609d6SChristian Borntraeger * required ordering. 340927609d6SChristian Borntraeger * 341663fdcbeSPreeti U Murthy * If possible use READ_ONCE()/WRITE_ONCE() instead. 3429c3cdc1fSLinus Torvalds */ 343927609d6SChristian Borntraeger #define __ACCESS_ONCE(x) ({ \ 344c5b19946SChristian Borntraeger __maybe_unused typeof(x) __var = (__force typeof(x)) 0; \ 345927609d6SChristian Borntraeger (volatile typeof(x) *)&(x); }) 346927609d6SChristian Borntraeger #define ACCESS_ONCE(x) (*__ACCESS_ONCE(x)) 3479c3cdc1fSLinus Torvalds 3480a04b016SPeter Zijlstra /** 3490a04b016SPeter Zijlstra * lockless_dereference() - safely load a pointer for later dereference 3500a04b016SPeter Zijlstra * @p: The pointer to load 3510a04b016SPeter Zijlstra * 3520a04b016SPeter Zijlstra * Similar to rcu_dereference(), but for situations where the pointed-to 3530a04b016SPeter Zijlstra * object's lifetime is managed by something other than RCU. That 3540a04b016SPeter Zijlstra * "something other" might be reference counting or simple immortality. 355331b6d8cSPeter Zijlstra * 356d7127b5eSJohannes Berg * The seemingly unused variable ___typecheck_p validates that @p is 357d7127b5eSJohannes Berg * indeed a pointer type by using a pointer to typeof(*p) as the type. 358d7127b5eSJohannes Berg * Taking a pointer to typeof(*p) again is needed in case p is void *. 3590a04b016SPeter Zijlstra */ 3600a04b016SPeter Zijlstra #define lockless_dereference(p) \ 3610a04b016SPeter Zijlstra ({ \ 36238183b9cSStephen Rothwell typeof(p) _________p1 = READ_ONCE(p); \ 363d7127b5eSJohannes Berg typeof(*(p)) *___typecheck_p __maybe_unused; \ 3640a04b016SPeter Zijlstra smp_read_barrier_depends(); /* Dependency order vs. p above. */ \ 3650a04b016SPeter Zijlstra (_________p1); \ 3660a04b016SPeter Zijlstra }) 3670a04b016SPeter Zijlstra 3681da177e4SLinus Torvalds #endif /* __LINUX_COMPILER_H */ 369