1/* 2 * This program is free software; you can redistribute it and/or modify 3 * it under the terms of the GNU General Public License, version 2, as 4 * published by the Free Software Foundation. 5 * 6 * This program is distributed in the hope that it will be useful, 7 * but WITHOUT ANY WARRANTY; without even the implied warranty of 8 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 9 * GNU General Public License for more details. 10 * 11 * Derived from book3s_hv_rmhandlers.S, which is: 12 * 13 * Copyright 2011 Paul Mackerras, IBM Corp. <paulus@au1.ibm.com> 14 * 15 */ 16 17#include <asm/reg.h> 18#include <asm/ppc_asm.h> 19#include <asm/asm-offsets.h> 20#include <asm/export.h> 21#include <asm/tm.h> 22#include <asm/cputable.h> 23 24#ifdef CONFIG_PPC_TRANSACTIONAL_MEM 25#define VCPU_GPRS_TM(reg) (((reg) * ULONG_SIZE) + VCPU_GPR_TM) 26 27/* 28 * Save transactional state and TM-related registers. 29 * Called with: 30 * - r3 pointing to the vcpu struct 31 * - r4 points to the MSR with current TS bits: 32 * (For HV KVM, it is VCPU_MSR ; For PR KVM, it is host MSR). 33 * This can modify all checkpointed registers, but 34 * restores r1, r2 before exit. 35 */ 36_GLOBAL(__kvmppc_save_tm) 37 mflr r0 38 std r0, PPC_LR_STKOFF(r1) 39 40 /* Turn on TM. */ 41 mfmsr r8 42 li r0, 1 43 rldimi r8, r0, MSR_TM_LG, 63-MSR_TM_LG 44 ori r8, r8, MSR_FP 45 oris r8, r8, (MSR_VEC | MSR_VSX)@h 46 mtmsrd r8 47 48 rldicl. r4, r4, 64 - MSR_TS_S_LG, 62 49 beq 1f /* TM not active in guest. */ 50 51 std r1, HSTATE_SCRATCH2(r13) 52 std r3, HSTATE_SCRATCH1(r13) 53 54#ifdef CONFIG_KVM_BOOK3S_HV_POSSIBLE 55BEGIN_FTR_SECTION 56 /* Emulation of the treclaim instruction needs TEXASR before treclaim */ 57 mfspr r6, SPRN_TEXASR 58 std r6, VCPU_ORIG_TEXASR(r3) 59END_FTR_SECTION_IFSET(CPU_FTR_P9_TM_HV_ASSIST) 60#endif 61 62 /* Clear the MSR RI since r1, r13 are all going to be foobar. */ 63 li r5, 0 64 mtmsrd r5, 1 65 66 li r3, TM_CAUSE_KVM_RESCHED 67 68 /* All GPRs are volatile at this point. */ 69 TRECLAIM(R3) 70 71 /* Temporarily store r13 and r9 so we have some regs to play with */ 72 SET_SCRATCH0(r13) 73 GET_PACA(r13) 74 std r9, PACATMSCRATCH(r13) 75 ld r9, HSTATE_SCRATCH1(r13) 76 77 /* Get a few more GPRs free. */ 78 std r29, VCPU_GPRS_TM(29)(r9) 79 std r30, VCPU_GPRS_TM(30)(r9) 80 std r31, VCPU_GPRS_TM(31)(r9) 81 82 /* Save away PPR and DSCR soon so don't run with user values. */ 83 mfspr r31, SPRN_PPR 84 HMT_MEDIUM 85 mfspr r30, SPRN_DSCR 86#ifdef CONFIG_KVM_BOOK3S_HV_POSSIBLE 87 ld r29, HSTATE_DSCR(r13) 88 mtspr SPRN_DSCR, r29 89#endif 90 91 /* Save all but r9, r13 & r29-r31 */ 92 reg = 0 93 .rept 29 94 .if (reg != 9) && (reg != 13) 95 std reg, VCPU_GPRS_TM(reg)(r9) 96 .endif 97 reg = reg + 1 98 .endr 99 /* ... now save r13 */ 100 GET_SCRATCH0(r4) 101 std r4, VCPU_GPRS_TM(13)(r9) 102 /* ... and save r9 */ 103 ld r4, PACATMSCRATCH(r13) 104 std r4, VCPU_GPRS_TM(9)(r9) 105 106 /* Reload stack pointer and TOC. */ 107 ld r1, HSTATE_SCRATCH2(r13) 108 ld r2, PACATOC(r13) 109 110 /* Set MSR RI now we have r1 and r13 back. */ 111 li r5, MSR_RI 112 mtmsrd r5, 1 113 114 /* Save away checkpinted SPRs. */ 115 std r31, VCPU_PPR_TM(r9) 116 std r30, VCPU_DSCR_TM(r9) 117 mflr r5 118 mfcr r6 119 mfctr r7 120 mfspr r8, SPRN_AMR 121 mfspr r10, SPRN_TAR 122 mfxer r11 123 std r5, VCPU_LR_TM(r9) 124 stw r6, VCPU_CR_TM(r9) 125 std r7, VCPU_CTR_TM(r9) 126 std r8, VCPU_AMR_TM(r9) 127 std r10, VCPU_TAR_TM(r9) 128 std r11, VCPU_XER_TM(r9) 129 130 /* Restore r12 as trap number. */ 131 lwz r12, VCPU_TRAP(r9) 132 133 /* Save FP/VSX. */ 134 addi r3, r9, VCPU_FPRS_TM 135 bl store_fp_state 136 addi r3, r9, VCPU_VRS_TM 137 bl store_vr_state 138 mfspr r6, SPRN_VRSAVE 139 stw r6, VCPU_VRSAVE_TM(r9) 1401: 141 /* 142 * We need to save these SPRs after the treclaim so that the software 143 * error code is recorded correctly in the TEXASR. Also the user may 144 * change these outside of a transaction, so they must always be 145 * context switched. 146 */ 147 mfspr r7, SPRN_TEXASR 148 std r7, VCPU_TEXASR(r9) 14911: 150 mfspr r5, SPRN_TFHAR 151 mfspr r6, SPRN_TFIAR 152 std r5, VCPU_TFHAR(r9) 153 std r6, VCPU_TFIAR(r9) 154 155 ld r0, PPC_LR_STKOFF(r1) 156 mtlr r0 157 blr 158 159/* 160 * _kvmppc_save_tm_pr() is a wrapper around __kvmppc_save_tm(), so that it can 161 * be invoked from C function by PR KVM only. 162 */ 163_GLOBAL(_kvmppc_save_tm_pr) 164 mflr r5 165 std r5, PPC_LR_STKOFF(r1) 166 stdu r1, -SWITCH_FRAME_SIZE(r1) 167 SAVE_NVGPRS(r1) 168 169 /* save MSR since TM/math bits might be impacted 170 * by __kvmppc_save_tm(). 171 */ 172 mfmsr r5 173 SAVE_GPR(5, r1) 174 175 /* also save DSCR/CR/TAR so that it can be recovered later */ 176 mfspr r6, SPRN_DSCR 177 SAVE_GPR(6, r1) 178 179 mfcr r7 180 stw r7, _CCR(r1) 181 182 mfspr r8, SPRN_TAR 183 SAVE_GPR(8, r1) 184 185 bl __kvmppc_save_tm 186 187 REST_GPR(8, r1) 188 mtspr SPRN_TAR, r8 189 190 ld r7, _CCR(r1) 191 mtcr r7 192 193 REST_GPR(6, r1) 194 mtspr SPRN_DSCR, r6 195 196 /* need preserve current MSR's MSR_TS bits */ 197 REST_GPR(5, r1) 198 mfmsr r6 199 rldicl r6, r6, 64 - MSR_TS_S_LG, 62 200 rldimi r5, r6, MSR_TS_S_LG, 63 - MSR_TS_T_LG 201 mtmsrd r5 202 203 REST_NVGPRS(r1) 204 addi r1, r1, SWITCH_FRAME_SIZE 205 ld r5, PPC_LR_STKOFF(r1) 206 mtlr r5 207 blr 208 209EXPORT_SYMBOL_GPL(_kvmppc_save_tm_pr); 210 211/* 212 * Restore transactional state and TM-related registers. 213 * Called with: 214 * - r3 pointing to the vcpu struct. 215 * - r4 is the guest MSR with desired TS bits: 216 * For HV KVM, it is VCPU_MSR 217 * For PR KVM, it is provided by caller 218 * This potentially modifies all checkpointed registers. 219 * It restores r1, r2 from the PACA. 220 */ 221_GLOBAL(__kvmppc_restore_tm) 222 mflr r0 223 std r0, PPC_LR_STKOFF(r1) 224 225 /* Turn on TM/FP/VSX/VMX so we can restore them. */ 226 mfmsr r5 227 li r6, MSR_TM >> 32 228 sldi r6, r6, 32 229 or r5, r5, r6 230 ori r5, r5, MSR_FP 231 oris r5, r5, (MSR_VEC | MSR_VSX)@h 232 mtmsrd r5 233 234 /* 235 * The user may change these outside of a transaction, so they must 236 * always be context switched. 237 */ 238 ld r5, VCPU_TFHAR(r3) 239 ld r6, VCPU_TFIAR(r3) 240 ld r7, VCPU_TEXASR(r3) 241 mtspr SPRN_TFHAR, r5 242 mtspr SPRN_TFIAR, r6 243 mtspr SPRN_TEXASR, r7 244 245 mr r5, r4 246 rldicl. r5, r5, 64 - MSR_TS_S_LG, 62 247 beqlr /* TM not active in guest */ 248 std r1, HSTATE_SCRATCH2(r13) 249 250 /* Make sure the failure summary is set, otherwise we'll program check 251 * when we trechkpt. It's possible that this might have been not set 252 * on a kvmppc_set_one_reg() call but we shouldn't let this crash the 253 * host. 254 */ 255 oris r7, r7, (TEXASR_FS)@h 256 mtspr SPRN_TEXASR, r7 257 258 /* 259 * We need to load up the checkpointed state for the guest. 260 * We need to do this early as it will blow away any GPRs, VSRs and 261 * some SPRs. 262 */ 263 264 mr r31, r3 265 addi r3, r31, VCPU_FPRS_TM 266 bl load_fp_state 267 addi r3, r31, VCPU_VRS_TM 268 bl load_vr_state 269 mr r3, r31 270 lwz r7, VCPU_VRSAVE_TM(r3) 271 mtspr SPRN_VRSAVE, r7 272 273 ld r5, VCPU_LR_TM(r3) 274 lwz r6, VCPU_CR_TM(r3) 275 ld r7, VCPU_CTR_TM(r3) 276 ld r8, VCPU_AMR_TM(r3) 277 ld r9, VCPU_TAR_TM(r3) 278 ld r10, VCPU_XER_TM(r3) 279 mtlr r5 280 mtcr r6 281 mtctr r7 282 mtspr SPRN_AMR, r8 283 mtspr SPRN_TAR, r9 284 mtxer r10 285 286 /* 287 * Load up PPR and DSCR values but don't put them in the actual SPRs 288 * till the last moment to avoid running with userspace PPR and DSCR for 289 * too long. 290 */ 291 ld r29, VCPU_DSCR_TM(r3) 292 ld r30, VCPU_PPR_TM(r3) 293 294 std r2, PACATMSCRATCH(r13) /* Save TOC */ 295 296 /* Clear the MSR RI since r1, r13 are all going to be foobar. */ 297 li r5, 0 298 mtmsrd r5, 1 299 300 /* Load GPRs r0-r28 */ 301 reg = 0 302 .rept 29 303 ld reg, VCPU_GPRS_TM(reg)(r31) 304 reg = reg + 1 305 .endr 306 307 mtspr SPRN_DSCR, r29 308 mtspr SPRN_PPR, r30 309 310 /* Load final GPRs */ 311 ld 29, VCPU_GPRS_TM(29)(r31) 312 ld 30, VCPU_GPRS_TM(30)(r31) 313 ld 31, VCPU_GPRS_TM(31)(r31) 314 315 /* TM checkpointed state is now setup. All GPRs are now volatile. */ 316 TRECHKPT 317 318 /* Now let's get back the state we need. */ 319 HMT_MEDIUM 320 GET_PACA(r13) 321#ifdef CONFIG_KVM_BOOK3S_HV_POSSIBLE 322 ld r29, HSTATE_DSCR(r13) 323 mtspr SPRN_DSCR, r29 324#endif 325 ld r1, HSTATE_SCRATCH2(r13) 326 ld r2, PACATMSCRATCH(r13) 327 328 /* Set the MSR RI since we have our registers back. */ 329 li r5, MSR_RI 330 mtmsrd r5, 1 331 ld r0, PPC_LR_STKOFF(r1) 332 mtlr r0 333 blr 334 335/* 336 * _kvmppc_restore_tm_pr() is a wrapper around __kvmppc_restore_tm(), so that it 337 * can be invoked from C function by PR KVM only. 338 */ 339_GLOBAL(_kvmppc_restore_tm_pr) 340 mflr r5 341 std r5, PPC_LR_STKOFF(r1) 342 stdu r1, -SWITCH_FRAME_SIZE(r1) 343 SAVE_NVGPRS(r1) 344 345 /* save MSR to avoid TM/math bits change */ 346 mfmsr r5 347 SAVE_GPR(5, r1) 348 349 /* also save DSCR/CR/TAR so that it can be recovered later */ 350 mfspr r6, SPRN_DSCR 351 SAVE_GPR(6, r1) 352 353 mfcr r7 354 stw r7, _CCR(r1) 355 356 mfspr r8, SPRN_TAR 357 SAVE_GPR(8, r1) 358 359 bl __kvmppc_restore_tm 360 361 REST_GPR(8, r1) 362 mtspr SPRN_TAR, r8 363 364 ld r7, _CCR(r1) 365 mtcr r7 366 367 REST_GPR(6, r1) 368 mtspr SPRN_DSCR, r6 369 370 /* need preserve current MSR's MSR_TS bits */ 371 REST_GPR(5, r1) 372 mfmsr r6 373 rldicl r6, r6, 64 - MSR_TS_S_LG, 62 374 rldimi r5, r6, MSR_TS_S_LG, 63 - MSR_TS_T_LG 375 mtmsrd r5 376 377 REST_NVGPRS(r1) 378 addi r1, r1, SWITCH_FRAME_SIZE 379 ld r5, PPC_LR_STKOFF(r1) 380 mtlr r5 381 blr 382 383EXPORT_SYMBOL_GPL(_kvmppc_restore_tm_pr); 384#endif /* CONFIG_PPC_TRANSACTIONAL_MEM */ 385