xref: /linux/arch/x86/um/asm/barrier.h (revision 93d90ad708b8da6efc0e487b66111aa9db7f70c7)
1 #ifndef _ASM_UM_BARRIER_H_
2 #define _ASM_UM_BARRIER_H_
3 
4 #include <asm/asm.h>
5 #include <asm/segment.h>
6 #include <asm/cpufeature.h>
7 #include <asm/cmpxchg.h>
8 #include <asm/nops.h>
9 
10 #include <linux/kernel.h>
11 #include <linux/irqflags.h>
12 
13 /*
14  * Force strict CPU ordering.
15  * And yes, this is required on UP too when we're talking
16  * to devices.
17  */
18 #ifdef CONFIG_X86_32
19 
20 #define mb()	alternative("lock; addl $0,0(%%esp)", "mfence", X86_FEATURE_XMM2)
21 #define rmb()	alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2)
22 #define wmb()	alternative("lock; addl $0,0(%%esp)", "sfence", X86_FEATURE_XMM)
23 
24 #else /* CONFIG_X86_32 */
25 
26 #define mb()	asm volatile("mfence" : : : "memory")
27 #define rmb()	asm volatile("lfence" : : : "memory")
28 #define wmb()	asm volatile("sfence" : : : "memory")
29 
30 #endif /* CONFIG_X86_32 */
31 
32 #ifdef CONFIG_X86_PPRO_FENCE
33 #define dma_rmb()	rmb()
34 #else /* CONFIG_X86_PPRO_FENCE */
35 #define dma_rmb()	barrier()
36 #endif /* CONFIG_X86_PPRO_FENCE */
37 #define dma_wmb()	barrier()
38 
39 #ifdef CONFIG_SMP
40 
41 #define smp_mb()	mb()
42 #define smp_rmb()	dma_rmb()
43 #define smp_wmb()	barrier()
44 #define set_mb(var, value) do { (void)xchg(&var, value); } while (0)
45 
46 #else /* CONFIG_SMP */
47 
48 #define smp_mb()	barrier()
49 #define smp_rmb()	barrier()
50 #define smp_wmb()	barrier()
51 #define set_mb(var, value) do { var = value; barrier(); } while (0)
52 
53 #endif /* CONFIG_SMP */
54 
55 #define read_barrier_depends()		do { } while (0)
56 #define smp_read_barrier_depends()	do { } while (0)
57 
58 /*
59  * Stop RDTSC speculation. This is needed when you need to use RDTSC
60  * (or get_cycles or vread that possibly accesses the TSC) in a defined
61  * code region.
62  *
63  * (Could use an alternative three way for this if there was one.)
64  */
65 static inline void rdtsc_barrier(void)
66 {
67 	alternative(ASM_NOP3, "mfence", X86_FEATURE_MFENCE_RDTSC);
68 	alternative(ASM_NOP3, "lfence", X86_FEATURE_LFENCE_RDTSC);
69 }
70 
71 #endif
72