xref: /linux/arch/loongarch/include/asm/stackframe.h (revision b7191581a973ab2fca45d2ca64416065f1660ae0)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
4  */
5 #ifndef _ASM_STACKFRAME_H
6 #define _ASM_STACKFRAME_H
7 
8 #include <linux/threads.h>
9 
10 #include <asm/addrspace.h>
11 #include <asm/asm.h>
12 #include <asm/asmmacro.h>
13 #include <asm/asm-offsets.h>
14 #include <asm/loongarch.h>
15 #include <asm/thread_info.h>
16 #include <asm/unwind_hints.h>
17 
18 /* Make the addition of cfi info a little easier. */
19 	.macro cfi_rel_offset reg offset=0 docfi=0
20 	.if \docfi
21 	.cfi_rel_offset \reg, \offset
22 	.endif
23 	.endm
24 
25 	.macro cfi_st reg offset=0 docfi=0
26 	cfi_rel_offset \reg, \offset, \docfi
27 	LONG_S	\reg, sp, \offset
28 	.endm
29 
30 	.macro cfi_restore reg offset=0 docfi=0
31 	.if \docfi
32 	.cfi_restore \reg
33 	.endif
34 	.endm
35 
36 	.macro cfi_ld reg offset=0 docfi=0
37 	LONG_L	\reg, sp, \offset
38 	cfi_restore \reg \offset \docfi
39 	.endm
40 
41 	.macro SETUP_DMWINS temp
42 	li.d	\temp, CSR_DMW0_INIT	# WUC, PLV0, 0x8000 xxxx xxxx xxxx
43 	csrwr	\temp, LOONGARCH_CSR_DMWIN0
44 	li.d	\temp, CSR_DMW1_INIT	# CAC, PLV0, 0x9000 xxxx xxxx xxxx
45 	csrwr	\temp, LOONGARCH_CSR_DMWIN1
46 	li.d	\temp, CSR_DMW2_INIT	# WUC, PLV0, 0xa000 xxxx xxxx xxxx
47 	csrwr	\temp, LOONGARCH_CSR_DMWIN2
48 	li.d	\temp, CSR_DMW3_INIT	# 0x0, unused
49 	csrwr	\temp, LOONGARCH_CSR_DMWIN3
50 	.endm
51 
52 /* Jump to the runtime virtual address. */
53 	.macro JUMP_VIRT_ADDR temp1 temp2
54 	li.d	\temp1, CACHE_BASE
55 	pcaddi	\temp2, 0
56 	bstrins.d  \temp1, \temp2, (DMW_PABITS - 1), 0
57 	jirl	zero, \temp1, 0xc
58 	.endm
59 
60 	.macro STACKLEAK_ERASE
61 #ifdef CONFIG_GCC_PLUGIN_STACKLEAK
62 	bl	stackleak_erase_on_task_stack
63 #endif
64 	.endm
65 
66 	.macro BACKUP_T0T1
67 	csrwr	t0, EXCEPTION_KS0
68 	csrwr	t1, EXCEPTION_KS1
69 	.endm
70 
71 	.macro RELOAD_T0T1
72 	csrrd   t0, EXCEPTION_KS0
73 	csrrd   t1, EXCEPTION_KS1
74 	.endm
75 
76 	.macro	SAVE_TEMP docfi=0
77 	RELOAD_T0T1
78 	cfi_st	t0, PT_R12, \docfi
79 	cfi_st	t1, PT_R13, \docfi
80 	cfi_st	t2, PT_R14, \docfi
81 	cfi_st	t3, PT_R15, \docfi
82 	cfi_st	t4, PT_R16, \docfi
83 	cfi_st	t5, PT_R17, \docfi
84 	cfi_st	t6, PT_R18, \docfi
85 	cfi_st	t7, PT_R19, \docfi
86 	cfi_st	t8, PT_R20, \docfi
87 	.endm
88 
89 	.macro	SAVE_STATIC docfi=0
90 	cfi_st	s0, PT_R23, \docfi
91 	cfi_st	s1, PT_R24, \docfi
92 	cfi_st	s2, PT_R25, \docfi
93 	cfi_st	s3, PT_R26, \docfi
94 	cfi_st	s4, PT_R27, \docfi
95 	cfi_st	s5, PT_R28, \docfi
96 	cfi_st	s6, PT_R29, \docfi
97 	cfi_st	s7, PT_R30, \docfi
98 	cfi_st	s8, PT_R31, \docfi
99 	.endm
100 
101 /*
102  * get_saved_sp returns the SP for the current CPU by looking in the
103  * kernelsp array for it. It stores the current sp in t0 and loads the
104  * new value in sp.
105  */
106 	.macro	get_saved_sp docfi=0
107 	la_abs	  t1, kernelsp
108 #ifdef CONFIG_SMP
109 	csrrd	  t0, PERCPU_BASE_KS
110 	LONG_ADD  t1, t1, t0
111 #endif
112 	move	  t0, sp
113 	.if \docfi
114 	.cfi_register sp, t0
115 	.endif
116 	LONG_L	  sp, t1, 0
117 	.endm
118 
119 	.macro	set_saved_sp stackp temp temp2
120 	la.pcrel  \temp, kernelsp
121 #ifdef CONFIG_SMP
122 	LONG_ADD  \temp, \temp, u0
123 #endif
124 	LONG_S	  \stackp, \temp, 0
125 	.endm
126 
127 	.macro	SAVE_SOME docfi=0
128 	csrrd	t1, LOONGARCH_CSR_PRMD
129 	andi	t1, t1, 0x3	/* extract pplv bit */
130 	move	t0, sp
131 	beqz	t1, 8f
132 	/* Called from user mode, new stack. */
133 	get_saved_sp docfi=\docfi
134 8:
135 	PTR_ADDI sp, sp, -PT_SIZE
136 	.if \docfi
137 	.cfi_def_cfa sp, 0
138 	.endif
139 	cfi_st	t0, PT_R3, \docfi
140 	cfi_rel_offset  sp, PT_R3, \docfi
141 	LONG_S	zero, sp, PT_R0
142 	csrrd	t0, LOONGARCH_CSR_PRMD
143 	LONG_S	t0, sp, PT_PRMD
144 	csrrd	t0, LOONGARCH_CSR_CRMD
145 	LONG_S	t0, sp, PT_CRMD
146 	csrrd	t0, LOONGARCH_CSR_EUEN
147 	LONG_S  t0, sp, PT_EUEN
148 	csrrd	t0, LOONGARCH_CSR_ECFG
149 	LONG_S	t0, sp, PT_ECFG
150 	csrrd	t0, LOONGARCH_CSR_ESTAT
151 	PTR_S	t0, sp, PT_ESTAT
152 	cfi_st	ra, PT_R1, \docfi
153 	cfi_st	a0, PT_R4, \docfi
154 	cfi_st	a1, PT_R5, \docfi
155 	cfi_st	a2, PT_R6, \docfi
156 	cfi_st	a3, PT_R7, \docfi
157 	cfi_st	a4, PT_R8, \docfi
158 	cfi_st	a5, PT_R9, \docfi
159 	cfi_st	a6, PT_R10, \docfi
160 	cfi_st	a7, PT_R11, \docfi
161 	csrrd	ra, LOONGARCH_CSR_ERA
162 	LONG_S	ra, sp, PT_ERA
163 	.if \docfi
164 	.cfi_rel_offset ra, PT_ERA
165 	.endif
166 	cfi_st	tp, PT_R2, \docfi
167 	cfi_st	fp, PT_R22, \docfi
168 
169 	/* Set thread_info if we're coming from user mode */
170 	csrrd	t0, LOONGARCH_CSR_PRMD
171 	andi	t0, t0, 0x3	/* extract pplv bit */
172 	beqz	t0, 9f
173 
174 	li.d	tp, ~_THREAD_MASK
175 	and	tp, tp, sp
176 	cfi_st  u0, PT_R21, \docfi
177 	csrrd	u0, PERCPU_BASE_KS
178 9:
179 #ifdef CONFIG_KGDB
180 	li.w	t0, CSR_CRMD_WE
181 	csrxchg	t0, t0, LOONGARCH_CSR_CRMD
182 #endif
183 	UNWIND_HINT_REGS
184 	.endm
185 
186 	.macro	SAVE_ALL docfi=0
187 	SAVE_SOME \docfi
188 	SAVE_TEMP \docfi
189 	SAVE_STATIC \docfi
190 	.endm
191 
192 	.macro	RESTORE_TEMP docfi=0
193 	cfi_ld	t0, PT_R12, \docfi
194 	cfi_ld	t1, PT_R13, \docfi
195 	cfi_ld	t2, PT_R14, \docfi
196 	cfi_ld	t3, PT_R15, \docfi
197 	cfi_ld	t4, PT_R16, \docfi
198 	cfi_ld	t5, PT_R17, \docfi
199 	cfi_ld	t6, PT_R18, \docfi
200 	cfi_ld	t7, PT_R19, \docfi
201 	cfi_ld	t8, PT_R20, \docfi
202 	.endm
203 
204 	.macro	RESTORE_STATIC docfi=0
205 	cfi_ld	s0, PT_R23, \docfi
206 	cfi_ld	s1, PT_R24, \docfi
207 	cfi_ld	s2, PT_R25, \docfi
208 	cfi_ld	s3, PT_R26, \docfi
209 	cfi_ld	s4, PT_R27, \docfi
210 	cfi_ld	s5, PT_R28, \docfi
211 	cfi_ld	s6, PT_R29, \docfi
212 	cfi_ld	s7, PT_R30, \docfi
213 	cfi_ld	s8, PT_R31, \docfi
214 	.endm
215 
216 	.macro	RESTORE_SOME docfi=0
217 	LONG_L	a0, sp, PT_PRMD
218 	andi    a0, a0, 0x3	/* extract pplv bit */
219 	beqz    a0, 8f
220 	cfi_ld  u0, PT_R21, \docfi
221 8:
222 	LONG_L	a0, sp, PT_ERA
223 	csrwr	a0, LOONGARCH_CSR_ERA
224 	LONG_L	a0, sp, PT_PRMD
225 	csrwr	a0, LOONGARCH_CSR_PRMD
226 	cfi_ld	ra, PT_R1, \docfi
227 	cfi_ld	a0, PT_R4, \docfi
228 	cfi_ld	a1, PT_R5, \docfi
229 	cfi_ld	a2, PT_R6, \docfi
230 	cfi_ld	a3, PT_R7, \docfi
231 	cfi_ld	a4, PT_R8, \docfi
232 	cfi_ld	a5, PT_R9, \docfi
233 	cfi_ld	a6, PT_R10, \docfi
234 	cfi_ld	a7, PT_R11, \docfi
235 	cfi_ld	tp, PT_R2, \docfi
236 	cfi_ld	fp, PT_R22, \docfi
237 	.endm
238 
239 	.macro	RESTORE_SP_AND_RET docfi=0
240 	cfi_ld	sp, PT_R3, \docfi
241 	UNWIND_HINT_FUNC
242 	ertn
243 	.endm
244 
245 	.macro	RESTORE_ALL_AND_RET docfi=0
246 	RESTORE_STATIC \docfi
247 	RESTORE_TEMP \docfi
248 	RESTORE_SOME \docfi
249 	RESTORE_SP_AND_RET \docfi
250 	.endm
251 
252 #endif /* _ASM_STACKFRAME_H */
253