1 /* SPDX-License-Identifier: GPL-2.0 */ 2 /* 3 * Copyright (C) 2020-2022 Loongson Technology Corporation Limited 4 */ 5 #ifndef _ASM_STACKFRAME_H 6 #define _ASM_STACKFRAME_H 7 8 #include <linux/threads.h> 9 10 #include <asm/addrspace.h> 11 #include <asm/asm.h> 12 #include <asm/asmmacro.h> 13 #include <asm/asm-offsets.h> 14 #include <asm/loongarch.h> 15 #include <asm/thread_info.h> 16 #include <asm/unwind_hints.h> 17 18 /* Make the addition of cfi info a little easier. */ 19 .macro cfi_rel_offset reg offset=0 docfi=0 20 .if \docfi 21 .cfi_rel_offset \reg, \offset 22 .endif 23 .endm 24 25 .macro cfi_st reg offset=0 docfi=0 26 cfi_rel_offset \reg, \offset, \docfi 27 LONG_S \reg, sp, \offset 28 .endm 29 30 .macro cfi_restore reg offset=0 docfi=0 31 .if \docfi 32 .cfi_restore \reg 33 .endif 34 .endm 35 36 .macro cfi_ld reg offset=0 docfi=0 37 LONG_L \reg, sp, \offset 38 cfi_restore \reg \offset \docfi 39 .endm 40 41 .macro SETUP_TWINS temp 42 pcaddi t0, 0 43 PTR_LI t1, ~TO_PHYS_MASK 44 and t0, t0, t1 45 ori t0, t0, (1 << 4 | 1) 46 csrwr t0, LOONGARCH_CSR_DMWIN0 47 PTR_LI t0, CSR_DMW1_INIT 48 csrwr t0, LOONGARCH_CSR_DMWIN1 49 .endm 50 51 .macro SETUP_MODES temp 52 /* Enable PG */ 53 li.w \temp, 0xb0 # PLV=0, IE=0, PG=1 54 csrwr \temp, LOONGARCH_CSR_CRMD 55 li.w \temp, 0x04 # PLV=0, PIE=1, PWE=0 56 csrwr \temp, LOONGARCH_CSR_PRMD 57 li.w \temp, 0x00 # FPE=0, SXE=0, ASXE=0, BTE=0 58 csrwr \temp, LOONGARCH_CSR_EUEN 59 .endm 60 61 .macro SETUP_DMWINS temp 62 PTR_LI \temp, CSR_DMW0_INIT # SUC, PLV0, LA32: 0x8xxx xxxx, LA64: 0x8000 xxxx xxxx xxxx 63 csrwr \temp, LOONGARCH_CSR_DMWIN0 64 PTR_LI \temp, CSR_DMW1_INIT # CAC, PLV0, LA32: 0xaxxx xxxx, LA64: 0x9000 xxxx xxxx xxxx 65 csrwr \temp, LOONGARCH_CSR_DMWIN1 66 PTR_LI \temp, CSR_DMW2_INIT # WUC, PLV0, LA32: unavailable, LA64: 0xa000 xxxx xxxx xxxx 67 csrwr \temp, LOONGARCH_CSR_DMWIN2 68 PTR_LI \temp, CSR_DMW3_INIT # 0x0, unused 69 csrwr \temp, LOONGARCH_CSR_DMWIN3 70 .endm 71 72 /* Jump to the runtime virtual address. */ 73 .macro JUMP_VIRT_ADDR temp1 temp2 74 PTR_LI \temp1, CACHE_BASE 75 pcaddi \temp2, 0 76 PTR_BSTRINS \temp1, \temp2, (DMW_PABITS - 1), 0 77 jirl zero, \temp1, 0xc 78 .endm 79 80 .macro STACKLEAK_ERASE 81 #ifdef CONFIG_KSTACK_ERASE 82 bl stackleak_erase_on_task_stack 83 #endif 84 .endm 85 86 .macro BACKUP_T0T1 87 csrwr t0, EXCEPTION_KS0 88 csrwr t1, EXCEPTION_KS1 89 .endm 90 91 .macro RELOAD_T0T1 92 csrrd t0, EXCEPTION_KS0 93 csrrd t1, EXCEPTION_KS1 94 .endm 95 96 .macro SAVE_TEMP docfi=0 97 RELOAD_T0T1 98 cfi_st t0, PT_R12, \docfi 99 cfi_st t1, PT_R13, \docfi 100 cfi_st t2, PT_R14, \docfi 101 cfi_st t3, PT_R15, \docfi 102 cfi_st t4, PT_R16, \docfi 103 cfi_st t5, PT_R17, \docfi 104 cfi_st t6, PT_R18, \docfi 105 cfi_st t7, PT_R19, \docfi 106 cfi_st t8, PT_R20, \docfi 107 .endm 108 109 .macro SAVE_STATIC docfi=0 110 cfi_st s0, PT_R23, \docfi 111 cfi_st s1, PT_R24, \docfi 112 cfi_st s2, PT_R25, \docfi 113 cfi_st s3, PT_R26, \docfi 114 cfi_st s4, PT_R27, \docfi 115 cfi_st s5, PT_R28, \docfi 116 cfi_st s6, PT_R29, \docfi 117 cfi_st s7, PT_R30, \docfi 118 cfi_st s8, PT_R31, \docfi 119 .endm 120 121 /* 122 * get_saved_sp returns the SP for the current CPU by looking in the 123 * kernelsp array for it. It stores the current sp in t0 and loads the 124 * new value in sp. 125 */ 126 .macro get_saved_sp docfi=0 127 la_abs t1, kernelsp 128 #ifdef CONFIG_SMP 129 csrrd t0, PERCPU_BASE_KS 130 LONG_ADD t1, t1, t0 131 #endif 132 move t0, sp 133 .if \docfi 134 .cfi_register sp, t0 135 .endif 136 LONG_L sp, t1, 0 137 .endm 138 139 .macro set_saved_sp stackp temp temp2 140 la.pcrel \temp, kernelsp 141 #ifdef CONFIG_SMP 142 LONG_ADD \temp, \temp, u0 143 #endif 144 LONG_S \stackp, \temp, 0 145 .endm 146 147 .macro SAVE_SOME docfi=0 148 csrrd t1, LOONGARCH_CSR_PRMD 149 andi t1, t1, 0x3 /* extract pplv bit */ 150 move t0, sp 151 beqz t1, 8f 152 /* Called from user mode, new stack. */ 153 get_saved_sp docfi=\docfi 154 8: 155 PTR_ADDI sp, sp, -PT_SIZE 156 .if \docfi 157 .cfi_def_cfa sp, 0 158 .endif 159 cfi_st t0, PT_R3, \docfi 160 cfi_rel_offset sp, PT_R3, \docfi 161 LONG_S zero, sp, PT_R0 162 csrrd t0, LOONGARCH_CSR_PRMD 163 LONG_S t0, sp, PT_PRMD 164 csrrd t0, LOONGARCH_CSR_CRMD 165 LONG_S t0, sp, PT_CRMD 166 csrrd t0, LOONGARCH_CSR_EUEN 167 LONG_S t0, sp, PT_EUEN 168 csrrd t0, LOONGARCH_CSR_ECFG 169 LONG_S t0, sp, PT_ECFG 170 csrrd t0, LOONGARCH_CSR_ESTAT 171 PTR_S t0, sp, PT_ESTAT 172 cfi_st ra, PT_R1, \docfi 173 cfi_st a0, PT_R4, \docfi 174 cfi_st a1, PT_R5, \docfi 175 cfi_st a2, PT_R6, \docfi 176 cfi_st a3, PT_R7, \docfi 177 cfi_st a4, PT_R8, \docfi 178 cfi_st a5, PT_R9, \docfi 179 cfi_st a6, PT_R10, \docfi 180 cfi_st a7, PT_R11, \docfi 181 csrrd ra, LOONGARCH_CSR_ERA 182 LONG_S ra, sp, PT_ERA 183 .if \docfi 184 .cfi_rel_offset ra, PT_ERA 185 .endif 186 cfi_st tp, PT_R2, \docfi 187 cfi_st fp, PT_R22, \docfi 188 189 /* Set thread_info if we're coming from user mode */ 190 csrrd t0, LOONGARCH_CSR_PRMD 191 andi t0, t0, 0x3 /* extract pplv bit */ 192 beqz t0, 9f 193 194 LONG_LI tp, ~_THREAD_MASK 195 and tp, tp, sp 196 cfi_st u0, PT_R21, \docfi 197 csrrd u0, PERCPU_BASE_KS 198 9: 199 #ifdef CONFIG_KGDB 200 li.w t0, CSR_CRMD_WE 201 csrxchg t0, t0, LOONGARCH_CSR_CRMD 202 #endif 203 UNWIND_HINT_REGS 204 .endm 205 206 .macro SAVE_ALL docfi=0 207 SAVE_SOME \docfi 208 SAVE_TEMP \docfi 209 SAVE_STATIC \docfi 210 .endm 211 212 .macro RESTORE_TEMP docfi=0 213 cfi_ld t0, PT_R12, \docfi 214 cfi_ld t1, PT_R13, \docfi 215 cfi_ld t2, PT_R14, \docfi 216 cfi_ld t3, PT_R15, \docfi 217 cfi_ld t4, PT_R16, \docfi 218 cfi_ld t5, PT_R17, \docfi 219 cfi_ld t6, PT_R18, \docfi 220 cfi_ld t7, PT_R19, \docfi 221 cfi_ld t8, PT_R20, \docfi 222 .endm 223 224 .macro RESTORE_STATIC docfi=0 225 cfi_ld s0, PT_R23, \docfi 226 cfi_ld s1, PT_R24, \docfi 227 cfi_ld s2, PT_R25, \docfi 228 cfi_ld s3, PT_R26, \docfi 229 cfi_ld s4, PT_R27, \docfi 230 cfi_ld s5, PT_R28, \docfi 231 cfi_ld s6, PT_R29, \docfi 232 cfi_ld s7, PT_R30, \docfi 233 cfi_ld s8, PT_R31, \docfi 234 .endm 235 236 .macro RESTORE_SOME docfi=0 237 LONG_L a0, sp, PT_PRMD 238 andi a0, a0, 0x3 /* extract pplv bit */ 239 beqz a0, 8f 240 cfi_ld u0, PT_R21, \docfi 241 8: 242 LONG_L a0, sp, PT_ERA 243 csrwr a0, LOONGARCH_CSR_ERA 244 LONG_L a0, sp, PT_PRMD 245 csrwr a0, LOONGARCH_CSR_PRMD 246 cfi_ld ra, PT_R1, \docfi 247 cfi_ld a0, PT_R4, \docfi 248 cfi_ld a1, PT_R5, \docfi 249 cfi_ld a2, PT_R6, \docfi 250 cfi_ld a3, PT_R7, \docfi 251 cfi_ld a4, PT_R8, \docfi 252 cfi_ld a5, PT_R9, \docfi 253 cfi_ld a6, PT_R10, \docfi 254 cfi_ld a7, PT_R11, \docfi 255 cfi_ld tp, PT_R2, \docfi 256 cfi_ld fp, PT_R22, \docfi 257 .endm 258 259 .macro RESTORE_SP_AND_RET docfi=0 260 cfi_ld sp, PT_R3, \docfi 261 UNWIND_HINT_FUNC 262 ertn 263 .endm 264 265 .macro RESTORE_ALL_AND_RET docfi=0 266 RESTORE_STATIC \docfi 267 RESTORE_TEMP \docfi 268 RESTORE_SOME \docfi 269 RESTORE_SP_AND_RET \docfi 270 .endm 271 272 #endif /* _ASM_STACKFRAME_H */ 273