xref: /linux/arch/x86/include/asm/smap.h (revision 54a012b6223580c74b77f3dc2a7c6b3c29084d18)
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Supervisor Mode Access Prevention support
4  *
5  * Copyright (C) 2012 Intel Corporation
6  * Author: H. Peter Anvin <hpa@linux.intel.com>
7  */
8 
9 #ifndef _ASM_X86_SMAP_H
10 #define _ASM_X86_SMAP_H
11 
12 #include <asm/nops.h>
13 #include <asm/cpufeatures.h>
14 #include <asm/alternative.h>
15 
16 #ifdef __ASSEMBLER__
17 
18 #define ASM_CLAC \
19 	ALTERNATIVE "", "clac", X86_FEATURE_SMAP
20 
21 #define ASM_STAC \
22 	ALTERNATIVE "", "stac", X86_FEATURE_SMAP
23 
24 #else /* __ASSEMBLER__ */
25 
clac(void)26 static __always_inline void clac(void)
27 {
28 	/* Note: a barrier is implicit in alternative() */
29 	alternative("", "clac", X86_FEATURE_SMAP);
30 }
31 
stac(void)32 static __always_inline void stac(void)
33 {
34 	/* Note: a barrier is implicit in alternative() */
35 	alternative("", "stac", X86_FEATURE_SMAP);
36 }
37 
smap_save(void)38 static __always_inline unsigned long smap_save(void)
39 {
40 	unsigned long flags;
41 
42 	asm volatile ("# smap_save\n\t"
43 		      ALTERNATIVE(ANNOTATE_IGNORE_ALTERNATIVE
44 				  "", "pushf; pop %0; clac",
45 				  X86_FEATURE_SMAP)
46 		      : "=rm" (flags) : : "memory", "cc");
47 
48 	return flags;
49 }
50 
smap_restore(unsigned long flags)51 static __always_inline void smap_restore(unsigned long flags)
52 {
53 	asm volatile ("# smap_restore\n\t"
54 		      ALTERNATIVE(ANNOTATE_IGNORE_ALTERNATIVE
55 				  "", "push %0; popf",
56 				  X86_FEATURE_SMAP)
57 		      : : "g" (flags) : "memory", "cc");
58 }
59 
60 /* These macros can be used in asm() statements */
61 #define ASM_CLAC \
62 	ALTERNATIVE("", "clac", X86_FEATURE_SMAP)
63 #define ASM_STAC \
64 	ALTERNATIVE("", "stac", X86_FEATURE_SMAP)
65 
66 #define ASM_CLAC_UNSAFE \
67 	ALTERNATIVE("", ANNOTATE_IGNORE_ALTERNATIVE "clac", X86_FEATURE_SMAP)
68 #define ASM_STAC_UNSAFE \
69 	ALTERNATIVE("", ANNOTATE_IGNORE_ALTERNATIVE "stac", X86_FEATURE_SMAP)
70 
71 #endif /* __ASSEMBLER__ */
72 
73 #endif /* _ASM_X86_SMAP_H */
74