1 /* SPDX-License-Identifier: GPL-2.0-only */ 2 /* 3 * Supervisor Mode Access Prevention support 4 * 5 * Copyright (C) 2012 Intel Corporation 6 * Author: H. Peter Anvin <hpa@linux.intel.com> 7 */ 8 9 #ifndef _ASM_X86_SMAP_H 10 #define _ASM_X86_SMAP_H 11 12 #include <asm/nops.h> 13 #include <asm/cpufeatures.h> 14 15 /* "Raw" instruction opcodes */ 16 #define __ASM_CLAC ".byte 0x0f,0x01,0xca" 17 #define __ASM_STAC ".byte 0x0f,0x01,0xcb" 18 19 #ifdef __ASSEMBLY__ 20 21 #include <asm/alternative-asm.h> 22 23 #ifdef CONFIG_X86_SMAP 24 25 #define ASM_CLAC \ 26 ALTERNATIVE "", __ASM_CLAC, X86_FEATURE_SMAP 27 28 #define ASM_STAC \ 29 ALTERNATIVE "", __ASM_STAC, X86_FEATURE_SMAP 30 31 #else /* CONFIG_X86_SMAP */ 32 33 #define ASM_CLAC 34 #define ASM_STAC 35 36 #endif /* CONFIG_X86_SMAP */ 37 38 #else /* __ASSEMBLY__ */ 39 40 #include <asm/alternative.h> 41 42 #ifdef CONFIG_X86_SMAP 43 44 static __always_inline void clac(void) 45 { 46 /* Note: a barrier is implicit in alternative() */ 47 alternative("", __ASM_CLAC, X86_FEATURE_SMAP); 48 } 49 50 static __always_inline void stac(void) 51 { 52 /* Note: a barrier is implicit in alternative() */ 53 alternative("", __ASM_STAC, X86_FEATURE_SMAP); 54 } 55 56 static __always_inline unsigned long smap_save(void) 57 { 58 unsigned long flags; 59 60 asm volatile ("# smap_save\n\t" 61 ALTERNATIVE("jmp 1f", "", X86_FEATURE_SMAP) 62 "pushf; pop %0; " __ASM_CLAC "\n\t" 63 "1:" 64 : "=rm" (flags) : : "memory", "cc"); 65 66 return flags; 67 } 68 69 static __always_inline void smap_restore(unsigned long flags) 70 { 71 asm volatile ("# smap_restore\n\t" 72 ALTERNATIVE("jmp 1f", "", X86_FEATURE_SMAP) 73 "push %0; popf\n\t" 74 "1:" 75 : : "g" (flags) : "memory", "cc"); 76 } 77 78 /* These macros can be used in asm() statements */ 79 #define ASM_CLAC \ 80 ALTERNATIVE("", __ASM_CLAC, X86_FEATURE_SMAP) 81 #define ASM_STAC \ 82 ALTERNATIVE("", __ASM_STAC, X86_FEATURE_SMAP) 83 84 #else /* CONFIG_X86_SMAP */ 85 86 static inline void clac(void) { } 87 static inline void stac(void) { } 88 89 static inline unsigned long smap_save(void) { return 0; } 90 static inline void smap_restore(unsigned long flags) { } 91 92 #define ASM_CLAC 93 #define ASM_STAC 94 95 #endif /* CONFIG_X86_SMAP */ 96 97 #endif /* __ASSEMBLY__ */ 98 99 #endif /* _ASM_X86_SMAP_H */ 100