xref: /linux/arch/arm/include/asm/tls.h (revision 722ecdbce68a87de2d9296f91308f44ea900a039)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ASMARM_TLS_H
3 #define __ASMARM_TLS_H
4 
5 #include <linux/compiler.h>
6 #include <asm/thread_info.h>
7 
8 #ifdef __ASSEMBLY__
9 #include <asm/asm-offsets.h>
10 	.macro switch_tls_none, base, tp, tpuser, tmp1, tmp2
11 	.endm
12 
13 	.macro switch_tls_v6k, base, tp, tpuser, tmp1, tmp2
14 	mrc	p15, 0, \tmp2, c13, c0, 2	@ get the user r/w register
15 	@ TLS register update is deferred until return to user space
16 	mcr	p15, 0, \tpuser, c13, c0, 2	@ set the user r/w register
17 	str	\tmp2, [\base, #TI_TP_VALUE + 4] @ save it
18 	.endm
19 
20 	.macro switch_tls_v6, base, tp, tpuser, tmp1, tmp2
21 #ifdef CONFIG_SMP
22 ALT_SMP(nop)
23 ALT_UP_B(.L0_\@)
24 	.subsection 1
25 #endif
26 .L0_\@:
27 	ldr_va	\tmp1, elf_hwcap
28 	mov	\tmp2, #0xffff0fff
29 	tst	\tmp1, #HWCAP_TLS		@ hardware TLS available?
30 	streq	\tp, [\tmp2, #-15]		@ set TLS value at 0xffff0ff0
31 	beq	.L2_\@
32 	mcr	p15, 0, \tp, c13, c0, 3		@ yes, set TLS register
33 #ifdef CONFIG_SMP
34 	b	.L1_\@
35 	.previous
36 #endif
37 .L1_\@: switch_tls_v6k \base, \tp, \tpuser, \tmp1, \tmp2
38 .L2_\@:
39 	.endm
40 
41 	.macro switch_tls_software, base, tp, tpuser, tmp1, tmp2
42 	mov	\tmp1, #0xffff0fff
43 	str	\tp, [\tmp1, #-15]		@ set TLS value at 0xffff0ff0
44 	.endm
45 #else
46 #include <asm/smp_plat.h>
47 #endif
48 
49 #ifdef CONFIG_TLS_REG_EMUL
50 #define tls_emu		1
51 #define has_tls_reg		1
52 #define defer_tls_reg_update	0
53 #define switch_tls	switch_tls_none
54 #elif defined(CONFIG_CPU_V6)
55 #define tls_emu		0
56 #define has_tls_reg		(elf_hwcap & HWCAP_TLS)
57 #define defer_tls_reg_update	is_smp()
58 #define switch_tls	switch_tls_v6
59 #elif defined(CONFIG_CPU_32v6K)
60 #define tls_emu		0
61 #define has_tls_reg		1
62 #define defer_tls_reg_update	1
63 #define switch_tls	switch_tls_v6k
64 #else
65 #define tls_emu		0
66 #define has_tls_reg		0
67 #define defer_tls_reg_update	0
68 #define switch_tls	switch_tls_software
69 #endif
70 
71 #ifndef __ASSEMBLY__
72 
73 static inline void set_tls(unsigned long val)
74 {
75 	struct thread_info *thread;
76 
77 	thread = current_thread_info();
78 
79 	thread->tp_value[0] = val;
80 
81 	/*
82 	 * This code runs with preemption enabled and therefore must
83 	 * be reentrant with respect to switch_tls.
84 	 *
85 	 * We need to ensure ordering between the shadow state and the
86 	 * hardware state, so that we don't corrupt the hardware state
87 	 * with a stale shadow state during context switch.
88 	 *
89 	 * If we're preempted here, switch_tls will load TPIDRURO from
90 	 * thread_info upon resuming execution and the following mcr
91 	 * is merely redundant.
92 	 */
93 	barrier();
94 
95 	if (!tls_emu) {
96 		if (has_tls_reg && !defer_tls_reg_update) {
97 			asm("mcr p15, 0, %0, c13, c0, 3"
98 			    : : "r" (val));
99 		} else if (!has_tls_reg) {
100 #ifdef CONFIG_KUSER_HELPERS
101 			/*
102 			 * User space must never try to access this
103 			 * directly.  Expect your app to break
104 			 * eventually if you do so.  The user helper
105 			 * at 0xffff0fe0 must be used instead.  (see
106 			 * entry-armv.S for details)
107 			 */
108 			*((unsigned int *)0xffff0ff0) = val;
109 #endif
110 		}
111 
112 	}
113 }
114 
115 static inline unsigned long get_tpuser(void)
116 {
117 	unsigned long reg = 0;
118 
119 	if (has_tls_reg && !tls_emu)
120 		__asm__("mrc p15, 0, %0, c13, c0, 2" : "=r" (reg));
121 
122 	return reg;
123 }
124 
125 static inline void set_tpuser(unsigned long val)
126 {
127 	/* Since TPIDRURW is fully context-switched (unlike TPIDRURO),
128 	 * we need not update thread_info.
129 	 */
130 	if (has_tls_reg && !tls_emu) {
131 		asm("mcr p15, 0, %0, c13, c0, 2"
132 		    : : "r" (val));
133 	}
134 }
135 
136 static inline void flush_tls(void)
137 {
138 	set_tls(0);
139 	set_tpuser(0);
140 }
141 
142 #endif
143 #endif	/* __ASMARM_TLS_H */
144