xref: /linux/arch/alpha/include/asm/rwonce.h (revision 3d0fe49454652117522f60bfbefb978ba0e5300b)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Copyright (C) 2019 Google LLC.
4  */
5 #ifndef __ASM_RWONCE_H
6 #define __ASM_RWONCE_H
7 
8 #ifdef CONFIG_SMP
9 
10 #include <asm/barrier.h>
11 
12 /*
13  * Alpha is apparently daft enough to reorder address-dependent loads
14  * on some CPU implementations. Knock some common sense into it with
15  * a memory barrier in READ_ONCE().
16  *
17  * For the curious, more information about this unusual reordering is
18  * available in chapter 15 of the "perfbook":
19  *
20  *  https://kernel.org/pub/linux/kernel/people/paulmck/perfbook/perfbook.html
21  *
22  */
23 #define __READ_ONCE(x)							\
24 ({									\
25 	__unqual_scalar_typeof(x) __x =					\
26 		(*(volatile typeof(__x) *)(&(x)));			\
27 	mb();								\
28 	(typeof(x))__x;							\
29 })
30 
31 #endif /* CONFIG_SMP */
32 
33 #include <asm-generic/rwonce.h>
34 
35 #endif /* __ASM_RWONCE_H */
36