xref: /linux/arch/loongarch/include/asm/stackframe.h (revision ae22a94997b8a03dcb3c922857c203246711f9d4)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
4  */
5 #ifndef _ASM_STACKFRAME_H
6 #define _ASM_STACKFRAME_H
7 
8 #include <linux/threads.h>
9 
10 #include <asm/addrspace.h>
11 #include <asm/asm.h>
12 #include <asm/asmmacro.h>
13 #include <asm/asm-offsets.h>
14 #include <asm/loongarch.h>
15 #include <asm/thread_info.h>
16 #include <asm/unwind_hints.h>
17 
18 /* Make the addition of cfi info a little easier. */
19 	.macro cfi_rel_offset reg offset=0 docfi=0
20 	.if \docfi
21 	.cfi_rel_offset \reg, \offset
22 	.endif
23 	.endm
24 
25 	.macro cfi_st reg offset=0 docfi=0
26 	cfi_rel_offset \reg, \offset, \docfi
27 	LONG_S	\reg, sp, \offset
28 	.endm
29 
30 	.macro cfi_restore reg offset=0 docfi=0
31 	.if \docfi
32 	.cfi_restore \reg
33 	.endif
34 	.endm
35 
36 	.macro cfi_ld reg offset=0 docfi=0
37 	LONG_L	\reg, sp, \offset
38 	cfi_restore \reg \offset \docfi
39 	.endm
40 
41 /* Jump to the runtime virtual address. */
42 	.macro JUMP_VIRT_ADDR temp1 temp2
43 	li.d	\temp1, CACHE_BASE
44 	pcaddi	\temp2, 0
45 	or	\temp1, \temp1, \temp2
46 	jirl	zero, \temp1, 0xc
47 	.endm
48 
49 	.macro BACKUP_T0T1
50 	csrwr	t0, EXCEPTION_KS0
51 	csrwr	t1, EXCEPTION_KS1
52 	.endm
53 
54 	.macro RELOAD_T0T1
55 	csrrd   t0, EXCEPTION_KS0
56 	csrrd   t1, EXCEPTION_KS1
57 	.endm
58 
59 	.macro	SAVE_TEMP docfi=0
60 	RELOAD_T0T1
61 	cfi_st	t0, PT_R12, \docfi
62 	cfi_st	t1, PT_R13, \docfi
63 	cfi_st	t2, PT_R14, \docfi
64 	cfi_st	t3, PT_R15, \docfi
65 	cfi_st	t4, PT_R16, \docfi
66 	cfi_st	t5, PT_R17, \docfi
67 	cfi_st	t6, PT_R18, \docfi
68 	cfi_st	t7, PT_R19, \docfi
69 	cfi_st	t8, PT_R20, \docfi
70 	.endm
71 
72 	.macro	SAVE_STATIC docfi=0
73 	cfi_st	s0, PT_R23, \docfi
74 	cfi_st	s1, PT_R24, \docfi
75 	cfi_st	s2, PT_R25, \docfi
76 	cfi_st	s3, PT_R26, \docfi
77 	cfi_st	s4, PT_R27, \docfi
78 	cfi_st	s5, PT_R28, \docfi
79 	cfi_st	s6, PT_R29, \docfi
80 	cfi_st	s7, PT_R30, \docfi
81 	cfi_st	s8, PT_R31, \docfi
82 	.endm
83 
84 /*
85  * get_saved_sp returns the SP for the current CPU by looking in the
86  * kernelsp array for it. It stores the current sp in t0 and loads the
87  * new value in sp.
88  */
89 	.macro	get_saved_sp docfi=0
90 	la_abs	  t1, kernelsp
91 #ifdef CONFIG_SMP
92 	csrrd	  t0, PERCPU_BASE_KS
93 	LONG_ADD  t1, t1, t0
94 #endif
95 	move	  t0, sp
96 	.if \docfi
97 	.cfi_register sp, t0
98 	.endif
99 	LONG_L	  sp, t1, 0
100 	.endm
101 
102 	.macro	set_saved_sp stackp temp temp2
103 	la.pcrel  \temp, kernelsp
104 #ifdef CONFIG_SMP
105 	LONG_ADD  \temp, \temp, u0
106 #endif
107 	LONG_S	  \stackp, \temp, 0
108 	.endm
109 
110 	.macro	SAVE_SOME docfi=0
111 	csrrd	t1, LOONGARCH_CSR_PRMD
112 	andi	t1, t1, 0x3	/* extract pplv bit */
113 	move	t0, sp
114 	beqz	t1, 8f
115 	/* Called from user mode, new stack. */
116 	get_saved_sp docfi=\docfi
117 8:
118 	PTR_ADDI sp, sp, -PT_SIZE
119 	.if \docfi
120 	.cfi_def_cfa sp, 0
121 	.endif
122 	cfi_st	t0, PT_R3, \docfi
123 	cfi_rel_offset  sp, PT_R3, \docfi
124 	LONG_S	zero, sp, PT_R0
125 	csrrd	t0, LOONGARCH_CSR_PRMD
126 	LONG_S	t0, sp, PT_PRMD
127 	csrrd	t0, LOONGARCH_CSR_CRMD
128 	LONG_S	t0, sp, PT_CRMD
129 	csrrd	t0, LOONGARCH_CSR_EUEN
130 	LONG_S  t0, sp, PT_EUEN
131 	csrrd	t0, LOONGARCH_CSR_ECFG
132 	LONG_S	t0, sp, PT_ECFG
133 	csrrd	t0, LOONGARCH_CSR_ESTAT
134 	PTR_S	t0, sp, PT_ESTAT
135 	cfi_st	ra, PT_R1, \docfi
136 	cfi_st	a0, PT_R4, \docfi
137 	cfi_st	a1, PT_R5, \docfi
138 	cfi_st	a2, PT_R6, \docfi
139 	cfi_st	a3, PT_R7, \docfi
140 	cfi_st	a4, PT_R8, \docfi
141 	cfi_st	a5, PT_R9, \docfi
142 	cfi_st	a6, PT_R10, \docfi
143 	cfi_st	a7, PT_R11, \docfi
144 	csrrd	ra, LOONGARCH_CSR_ERA
145 	LONG_S	ra, sp, PT_ERA
146 	.if \docfi
147 	.cfi_rel_offset ra, PT_ERA
148 	.endif
149 	cfi_st	tp, PT_R2, \docfi
150 	cfi_st	fp, PT_R22, \docfi
151 
152 	/* Set thread_info if we're coming from user mode */
153 	csrrd	t0, LOONGARCH_CSR_PRMD
154 	andi	t0, t0, 0x3	/* extract pplv bit */
155 	beqz	t0, 9f
156 
157 	li.d	tp, ~_THREAD_MASK
158 	and	tp, tp, sp
159 	cfi_st  u0, PT_R21, \docfi
160 	csrrd	u0, PERCPU_BASE_KS
161 9:
162 #ifdef CONFIG_KGDB
163 	li.w	t0, CSR_CRMD_WE
164 	csrxchg	t0, t0, LOONGARCH_CSR_CRMD
165 #endif
166 	UNWIND_HINT_REGS
167 	.endm
168 
169 	.macro	SAVE_ALL docfi=0
170 	SAVE_SOME \docfi
171 	SAVE_TEMP \docfi
172 	SAVE_STATIC \docfi
173 	.endm
174 
175 	.macro	RESTORE_TEMP docfi=0
176 	cfi_ld	t0, PT_R12, \docfi
177 	cfi_ld	t1, PT_R13, \docfi
178 	cfi_ld	t2, PT_R14, \docfi
179 	cfi_ld	t3, PT_R15, \docfi
180 	cfi_ld	t4, PT_R16, \docfi
181 	cfi_ld	t5, PT_R17, \docfi
182 	cfi_ld	t6, PT_R18, \docfi
183 	cfi_ld	t7, PT_R19, \docfi
184 	cfi_ld	t8, PT_R20, \docfi
185 	.endm
186 
187 	.macro	RESTORE_STATIC docfi=0
188 	cfi_ld	s0, PT_R23, \docfi
189 	cfi_ld	s1, PT_R24, \docfi
190 	cfi_ld	s2, PT_R25, \docfi
191 	cfi_ld	s3, PT_R26, \docfi
192 	cfi_ld	s4, PT_R27, \docfi
193 	cfi_ld	s5, PT_R28, \docfi
194 	cfi_ld	s6, PT_R29, \docfi
195 	cfi_ld	s7, PT_R30, \docfi
196 	cfi_ld	s8, PT_R31, \docfi
197 	.endm
198 
199 	.macro	RESTORE_SOME docfi=0
200 	LONG_L	a0, sp, PT_PRMD
201 	andi    a0, a0, 0x3	/* extract pplv bit */
202 	beqz    a0, 8f
203 	cfi_ld  u0, PT_R21, \docfi
204 8:
205 	LONG_L	a0, sp, PT_ERA
206 	csrwr	a0, LOONGARCH_CSR_ERA
207 	LONG_L	a0, sp, PT_PRMD
208 	csrwr	a0, LOONGARCH_CSR_PRMD
209 	cfi_ld	ra, PT_R1, \docfi
210 	cfi_ld	a0, PT_R4, \docfi
211 	cfi_ld	a1, PT_R5, \docfi
212 	cfi_ld	a2, PT_R6, \docfi
213 	cfi_ld	a3, PT_R7, \docfi
214 	cfi_ld	a4, PT_R8, \docfi
215 	cfi_ld	a5, PT_R9, \docfi
216 	cfi_ld	a6, PT_R10, \docfi
217 	cfi_ld	a7, PT_R11, \docfi
218 	cfi_ld	tp, PT_R2, \docfi
219 	cfi_ld	fp, PT_R22, \docfi
220 	.endm
221 
222 	.macro	RESTORE_SP_AND_RET docfi=0
223 	cfi_ld	sp, PT_R3, \docfi
224 	UNWIND_HINT_FUNC
225 	ertn
226 	.endm
227 
228 	.macro	RESTORE_ALL_AND_RET docfi=0
229 	RESTORE_STATIC \docfi
230 	RESTORE_TEMP \docfi
231 	RESTORE_SOME \docfi
232 	RESTORE_SP_AND_RET \docfi
233 	.endm
234 
235 #endif /* _ASM_STACKFRAME_H */
236