1 /* 2 * CDDL HEADER START 3 * 4 * The contents of this file are subject to the terms of the 5 * Common Development and Distribution License (the "License"). 6 * You may not use this file except in compliance with the License. 7 * 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 9 * or http://www.opensolaris.org/os/licensing. 10 * See the License for the specific language governing permissions 11 * and limitations under the License. 12 * 13 * When distributing Covered Code, include this CDDL HEADER in each 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE. 15 * If applicable, add the following below this CDDL HEADER, with the 16 * fields enclosed by brackets "[]" replaced with your own identifying 17 * information: Portions Copyright [yyyy] [name of copyright owner] 18 * 19 * CDDL HEADER END 20 */ 21 /* 22 * Copyright 2009 Sun Microsystems, Inc. All rights reserved. 23 * Use is subject to license terms. 24 */ 25 26 /* 27 * VM - Hardware Address Translation management. 28 * 29 * This file describes the contents of the sun reference mmu (sfmmu) 30 * specific hat data structures and the sfmmu specific hat procedures. 31 * The machine independent interface is described in <vm/hat.h>. 32 */ 33 34 #ifndef _VM_MACH_SFMMU_H 35 #define _VM_MACH_SFMMU_H 36 37 #include <sys/x_call.h> 38 #include <sys/hypervisor_api.h> 39 40 #ifdef __cplusplus 41 extern "C" { 42 #endif 43 44 /* 45 * Define UTSB_PHYS if user TSB is always accessed via physical address. 46 * On sun4v platform, user TSB is accessed via physical address. 47 */ 48 #define UTSB_PHYS 1 49 50 /* 51 * Hypervisor TSB info 52 */ 53 #define NHV_TSB_INFO 4 54 55 #ifndef _ASM 56 57 struct hv_tsb_block { 58 uint64_t hv_tsb_info_pa; /* hypervisor TSB info PA */ 59 uint64_t hv_tsb_info_cnt; /* hypervisor TSB info count */ 60 hv_tsb_info_t hv_tsb_info[NHV_TSB_INFO]; /* hypervisor TSB info */ 61 }; 62 63 #endif /* _ASM */ 64 65 #ifdef _ASM 66 67 /* 68 * This macro is used to set private/shared secondary context register in 69 * sfmmu_alloc_ctx(). 70 * Input: 71 * cnum = cnum 72 * is_shctx = sfmmu private/shared flag (0: private, 1: shared) 73 * tmp2 is only used in the sun4u version of this macro 74 */ 75 #define SET_SECCTX(cnum, is_shctx, tmp1, tmp2, label) \ 76 mov MMU_SCONTEXT, tmp1; \ 77 movrnz is_shctx, MMU_SCONTEXT1, tmp1; \ 78 stxa cnum, [tmp1]ASI_MMU_CTX; /* set 2nd ctx reg. */ \ 79 membar #Sync; \ 80 81 /* 82 * This macro is used in the MMU code to check if TL should be lowered from 83 * 2 to 1 to pop trapstat's state. See the block comment in trapstat.c 84 * for details. 85 */ 86 87 #define TSTAT_CHECK_TL1(label, scr1, scr2) \ 88 rdpr %tpc, scr1; \ 89 sethi %hi(KERNELBASE), scr2; \ 90 or scr2, %lo(KERNELBASE), scr2; \ 91 cmp scr1, scr2; \ 92 bgeu %xcc, 9f; \ 93 nop; \ 94 wrpr %g0, 1, %gl; \ 95 ba label; \ 96 wrpr %g0, 1, %tl; \ 97 9: 98 99 /* 100 * The following macros allow us to share majority of the 101 * SFMMU code between sun4u and sun4v platforms. 102 */ 103 104 #define SETUP_TSB_ASI(qlp, tmp) 105 106 #define SETUP_UTSB_ATOMIC_ASI(tmp1, tmp2) 107 108 /* 109 * Macro to swtich to alternate global register on sun4u platforms 110 * (not applicable to sun4v platforms) 111 */ 112 #define USE_ALTERNATE_GLOBALS(scr) 113 114 /* 115 * Macro to set %gl register value on sun4v platforms 116 * (not applicable to sun4u platforms) 117 */ 118 #define SET_GL_REG(val) \ 119 wrpr %g0, val, %gl 120 121 /* 122 * Get pseudo-tagacc value and context from the MMU fault area. Pseudo-tagacc 123 * is the faulting virtual address OR'd with 0 for KCONTEXT, INVALID_CONTEXT 124 * (1) for invalid context, and USER_CONTEXT (2) for user context. 125 * 126 * In: 127 * tagacc, ctxtype = scratch registers 128 * Out: 129 * tagacc = MMU data tag access register value 130 * ctx = context type (KCONTEXT, INVALID_CONTEXT or USER_CONTEXT) 131 */ 132 #define GET_MMU_D_PTAGACC_CTXTYPE(ptagacc, ctxtype) \ 133 MMU_FAULT_STATUS_AREA(ctxtype); \ 134 ldx [ctxtype + MMFSA_D_ADDR], ptagacc; \ 135 ldx [ctxtype + MMFSA_D_CTX], ctxtype; \ 136 srlx ptagacc, MMU_PAGESHIFT, ptagacc; /* align to page boundary */ \ 137 cmp ctxtype, USER_CONTEXT_TYPE; \ 138 sllx ptagacc, MMU_PAGESHIFT, ptagacc; \ 139 movgu %icc, USER_CONTEXT_TYPE, ctxtype; \ 140 or ptagacc, ctxtype, ptagacc 141 142 /* 143 * Synthesize/get data tag access register value from the MMU fault area 144 * 145 * In: 146 * tagacc, scr1 = scratch registers 147 * Out: 148 * tagacc = MMU data tag access register value 149 */ 150 #define GET_MMU_D_TAGACC(tagacc, scr1) \ 151 GET_MMU_D_PTAGACC_CTXTYPE(tagacc, scr1) 152 153 /* 154 * Synthesize/get data tag target register value from the MMU fault area 155 * 156 * In: 157 * ttarget, scr1 = scratch registers 158 * Out: 159 * ttarget = MMU data tag target register value 160 */ 161 #define GET_MMU_D_TTARGET(ttarget, scr1) \ 162 MMU_FAULT_STATUS_AREA(ttarget); \ 163 ldx [ttarget + MMFSA_D_CTX], scr1; \ 164 sllx scr1, TTARGET_CTX_SHIFT, scr1; \ 165 ldx [ttarget + MMFSA_D_ADDR], ttarget; \ 166 srlx ttarget, TTARGET_VA_SHIFT, ttarget; \ 167 or ttarget, scr1, ttarget 168 169 /* 170 * Synthesize/get data/instruction psuedo tag access register values 171 * from the MMU fault area (context is 0 for kernel, 1 for invalid, 2 for user) 172 * 173 * In: 174 * dtagacc, itagacc, scr1, scr2 = scratch registers 175 * Out: 176 * dtagacc = MMU data tag access register value w/psuedo-context 177 * itagacc = MMU instruction tag access register value w/pseudo-context 178 */ 179 #define GET_MMU_BOTH_TAGACC(dtagacc, itagacc, scr1, scr2) \ 180 MMU_FAULT_STATUS_AREA(scr1); \ 181 ldx [scr1 + MMFSA_D_ADDR], scr2; \ 182 ldx [scr1 + MMFSA_D_CTX], dtagacc; \ 183 srlx scr2, MMU_PAGESHIFT, scr2; /* align to page boundary */ \ 184 cmp dtagacc, USER_CONTEXT_TYPE; \ 185 sllx scr2, MMU_PAGESHIFT, scr2; \ 186 movgu %icc, USER_CONTEXT_TYPE, dtagacc; \ 187 or scr2, dtagacc, dtagacc; \ 188 ldx [scr1 + MMFSA_I_ADDR], scr2; \ 189 ldx [scr1 + MMFSA_I_CTX], itagacc; \ 190 srlx scr2, MMU_PAGESHIFT, scr2; /* align to page boundry */ \ 191 cmp itagacc, USER_CONTEXT_TYPE; \ 192 sllx scr2, MMU_PAGESHIFT, scr2; \ 193 movgu %icc, USER_CONTEXT_TYPE, itagacc; \ 194 or scr2, itagacc, itagacc 195 196 /* 197 * Synthesize/get MMU data fault address from the MMU fault area 198 * 199 * In: 200 * daddr, scr1 = scratch registers 201 * Out: 202 * daddr = MMU data fault address 203 */ 204 #define GET_MMU_D_ADDR(daddr, scr1) \ 205 MMU_FAULT_STATUS_AREA(scr1); \ 206 ldx [scr1 + MMFSA_D_ADDR], daddr 207 208 /* 209 * Get pseudo-tagacc value and context from the MMU fault area. Pseudo-tagacc 210 * is the faulting virtual address OR'd with 0 for KCONTEXT, INVALID_CONTEXT 211 * (1) for invalid context, and USER_CONTEXT (2) for user context. 212 * 213 * In: 214 * tagacc, ctxtype = scratch registers 215 * Out: 216 * tagacc = MMU instruction tag access register value 217 * ctxtype = context type (KCONTEXT, INVALID_CONTEXT or USER_CONTEXT) 218 */ 219 #define GET_MMU_I_PTAGACC_CTXTYPE(ptagacc, ctxtype) \ 220 MMU_FAULT_STATUS_AREA(ctxtype); \ 221 ldx [ctxtype + MMFSA_I_ADDR], ptagacc; \ 222 ldx [ctxtype + MMFSA_I_CTX], ctxtype; \ 223 srlx ptagacc, MMU_PAGESHIFT, ptagacc; /* align to page boundary */ \ 224 cmp ctxtype, USER_CONTEXT_TYPE; \ 225 sllx ptagacc, MMU_PAGESHIFT, ptagacc; \ 226 movgu %icc, USER_CONTEXT_TYPE, ctxtype; \ 227 or ptagacc, ctxtype, ptagacc 228 229 /* 230 * Load ITLB entry 231 * 232 * In: 233 * tte = reg containing tte 234 * scr1, scr2, scr3, scr4 = scratch registers 235 */ 236 #define ITLB_STUFF(tte, scr1, scr2, scr3, scr4) \ 237 mov %o0, scr1; \ 238 mov %o1, scr2; \ 239 mov %o2, scr3; \ 240 mov %o3, scr4; \ 241 MMU_FAULT_STATUS_AREA(%o2); \ 242 ldx [%o2 + MMFSA_I_ADDR], %o0; \ 243 ldx [%o2 + MMFSA_I_CTX], %o1; \ 244 mov tte, %o2; \ 245 mov MAP_ITLB, %o3; \ 246 ta MMU_MAP_ADDR; \ 247 /* BEGIN CSTYLED */ \ 248 brnz,a,pn %o0, ptl1_panic; \ 249 mov PTL1_BAD_HCALL, %g1; \ 250 /* END CSTYLED */ \ 251 mov scr1, %o0; \ 252 mov scr2, %o1; \ 253 mov scr3, %o2; \ 254 mov scr4, %o3 255 256 /* 257 * Load DTLB entry 258 * 259 * In: 260 * tte = reg containing tte 261 * scr1, scr2, scr3, scr4 = scratch registers 262 */ 263 #define DTLB_STUFF(tte, scr1, scr2, scr3, scr4) \ 264 mov %o0, scr1; \ 265 mov %o1, scr2; \ 266 mov %o2, scr3; \ 267 mov %o3, scr4; \ 268 MMU_FAULT_STATUS_AREA(%o2); \ 269 ldx [%o2 + MMFSA_D_ADDR], %o0; \ 270 ldx [%o2 + MMFSA_D_CTX], %o1; \ 271 mov tte, %o2; \ 272 mov MAP_DTLB, %o3; \ 273 ta MMU_MAP_ADDR; \ 274 /* BEGIN CSTYLED */ \ 275 brnz,a,pn %o0, ptl1_panic; \ 276 mov PTL1_BAD_HCALL, %g1; \ 277 /* END CSTYLED */ \ 278 mov scr1, %o0; \ 279 mov scr2, %o1; \ 280 mov scr3, %o2; \ 281 mov scr4, %o3 282 283 /* 284 * Returns PFN given the TTE and vaddr 285 * 286 * In: 287 * tte = reg containing tte 288 * vaddr = reg containing vaddr 289 * scr1, scr2, scr3 = scratch registers 290 * Out: 291 * tte = PFN value 292 */ 293 #define TTETOPFN(tte, vaddr, label, scr1, scr2, scr3) \ 294 and tte, TTE_SZ_BITS, scr1; /* scr1 = ttesz */ \ 295 sllx tte, TTE_PA_LSHIFT, tte; \ 296 sllx scr1, 1, scr2; \ 297 add scr2, scr1, scr2; /* mulx 3 */ \ 298 add scr2, MMU_PAGESHIFT + TTE_PA_LSHIFT, scr3; \ 299 /* CSTYLED */ \ 300 brz,pt scr2, label##1; \ 301 srlx tte, scr3, tte; \ 302 sllx tte, scr2, tte; \ 303 set 1, scr1; \ 304 add scr2, MMU_PAGESHIFT, scr3; \ 305 sllx scr1, scr3, scr1; \ 306 sub scr1, 1, scr1; /* scr1=TTE_PAGE_OFFSET(ttesz) */ \ 307 and vaddr, scr1, scr2; \ 308 srln scr2, MMU_PAGESHIFT, scr2; \ 309 or tte, scr2, tte; \ 310 /* CSTYLED */ \ 311 label##1: 312 313 /* 314 * TTE_SET_REF_ML is a macro that updates the reference bit if it is 315 * not already set. 316 * 317 * Parameters: 318 * tte = reg containing tte 319 * ttepa = physical pointer to tte 320 * tsbarea = tsb miss area 321 * tmp1 = tmp reg 322 * tmp2 = tmp reg 323 * label = temporary label 324 */ 325 326 #define TTE_SET_REF_ML(tte, ttepa, tsbarea, tmp1, tmp2, label) \ 327 /* BEGIN CSTYLED */ \ 328 /* check reference bit */ \ 329 btst TTE_REF_INT, tte; \ 330 bnz,pt %xcc, label##2; /* if ref bit set-skip ahead */ \ 331 nop; \ 332 /* update reference bit */ \ 333 label##1: \ 334 or tte, TTE_REF_INT, tmp1; \ 335 casxa [ttepa]ASI_MEM, tte, tmp1; /* update ref bit */ \ 336 cmp tte, tmp1; \ 337 bne,a,pn %xcc, label##1; \ 338 ldxa [ttepa]ASI_MEM, tte; /* MMU_READTTE through pa */ \ 339 or tte, TTE_REF_INT, tte; \ 340 label##2: \ 341 /* END CSTYLED */ 342 343 344 /* 345 * TTE_SET_REFMOD_ML is a macro that updates the reference and modify bits 346 * if not already set. 347 * 348 * Parameters: 349 * tte = reg containing tte 350 * ttepa = physical pointer to tte 351 * tsbarea = tsb miss area 352 * tmp1 = tmp reg 353 * tmp2 = tmp reg 354 * label = temporary label 355 * exitlabel = label where to jump to if write perm bit not set. 356 */ 357 358 #define TTE_SET_REFMOD_ML(tte, ttepa, tsbarea, tmp1, tmp2, label, \ 359 exitlabel) \ 360 /* BEGIN CSTYLED */ \ 361 /* check reference bit */ \ 362 btst TTE_WRPRM_INT, tte; \ 363 bz,pn %xcc, exitlabel; /* exit if wr_perm no set */ \ 364 btst TTE_HWWR_INT, tte; \ 365 bnz,pn %xcc, label##2; /* nothing to do */ \ 366 nop; \ 367 /* update reference bit */ \ 368 label##1: \ 369 or tte, TTE_HWWR_INT | TTE_REF_INT, tmp1; \ 370 casxa [ttepa]ASI_MEM, tte, tmp1; /* update ref/mod bit */ \ 371 cmp tte, tmp1; \ 372 bne,a,pn %xcc, label##1; \ 373 ldxa [ttepa]ASI_MEM, tte; /* MMU_READTTE through pa */ \ 374 or tte, TTE_HWWR_INT | TTE_REF_INT, tte; \ 375 label##2: \ 376 /* END CSTYLED */ 377 /* 378 * Get TSB base register from the scratchpad for 379 * shared contexts 380 * 381 * In: 382 * tsbmiss = pointer to tsbmiss area 383 * tsbmissoffset = offset to right tsb pointer 384 * tsbreg = scratch 385 * Out: 386 * tsbreg = tsbreg from the specified scratchpad register 387 */ 388 #define GET_UTSBREG_SHCTX(tsbmiss, tsbmissoffset, tsbreg) \ 389 ldx [tsbmiss + tsbmissoffset], tsbreg 390 391 392 /* 393 * Get the location of the TSB entry in the first TSB to probe 394 * 395 * In: 396 * tagacc = tag access register (not clobbered) 397 * tsbe, tmp1, tmp2 = scratch registers 398 * Out: 399 * tsbe = pointer to the tsbe in the 1st TSB 400 */ 401 402 #define GET_1ST_TSBE_PTR(tagacc, tsbe, tmp1, tmp2) \ 403 /* BEGIN CSTYLED */ \ 404 mov SCRATCHPAD_UTSBREG1, tmp1 ;\ 405 ldxa [tmp1]ASI_SCRATCHPAD, tsbe /* get tsbreg */ ;\ 406 and tsbe, TSB_SOFTSZ_MASK, tmp2 /* tmp2=szc */ ;\ 407 andn tsbe, TSB_SOFTSZ_MASK, tsbe /* tsbbase */ ;\ 408 mov TSB_ENTRIES(0), tmp1 /* nentries in TSB size 0 */ ;\ 409 sllx tmp1, tmp2, tmp1 /* tmp1 = nentries in TSB */ ;\ 410 sub tmp1, 1, tmp1 /* mask = nentries - 1 */ ;\ 411 srlx tagacc, MMU_PAGESHIFT, tmp2 ;\ 412 and tmp2, tmp1, tmp1 /* tsbent = virtpage & mask */ ;\ 413 sllx tmp1, TSB_ENTRY_SHIFT, tmp1 /* entry num --> ptr */ ;\ 414 add tsbe, tmp1, tsbe /* add entry offset to TSB base */ ;\ 415 /* END CSTYLED */ 416 417 418 /* 419 * Will probe the first TSB, and if it finds a match, will insert it 420 * into the TLB and retry. 421 * 422 * tsbe_ptr = precomputed first TSB entry pointer (in, ro) 423 * vpg_4m = 4M virtual page number for tag matching (in, ro) 424 * label = where to branch to if this is a miss (text) 425 * %asi = atomic ASI to use for the TSB access 426 * 427 * For trapstat, we have to explicily use these registers. 428 * g4 = location tag will be retrieved into from TSB (out) 429 * g5 = location data(tte) will be retrieved into from TSB (out) 430 */ 431 #define PROBE_1ST_DTSB(tsbe_ptr, vpg_4m, label) /* g4/g5 clobbered */ \ 432 /* BEGIN CSTYLED */ \ 433 ldda [tsbe_ptr]ASI_QUAD_LDD_PHYS, %g4 /* g4 = tag, g5 = data */ ;\ 434 cmp %g4, vpg_4m /* compare tag w/ TSB */ ;\ 435 bne,pn %xcc, label##1 /* branch if !match */ ;\ 436 nop ;\ 437 brgez,pn %g5, label##1 ;\ 438 nop ;\ 439 TT_TRACE(trace_tsbhit) ;\ 440 DTLB_STUFF(%g5, %g1, %g2, %g3, %g4) ;\ 441 /* trapstat expects tte in %g5 */ ;\ 442 retry /* retry faulted instruction */ ;\ 443 label##1: \ 444 /* END CSTYLED */ 445 446 447 /* 448 * Same as above, only if the TTE doesn't have the execute 449 * bit set, will branch to exec_fault directly. 450 */ 451 #define PROBE_1ST_ITSB(tsbe_ptr, vpg_4m, label) \ 452 /* BEGIN CSTYLED */ \ 453 ldda [tsbe_ptr]ASI_QUAD_LDD_PHYS, %g4 /* g4 = tag, g5 = data */ ;\ 454 cmp %g4, vpg_4m /* compare tag w/ TSB */ ;\ 455 bne,pn %xcc, label##1 /* branch if !match */ ;\ 456 nop ;\ 457 brgez,pn %g5, label##1 ;\ 458 nop ;\ 459 andcc %g5, TTE_EXECPRM_INT, %g0 /* check execute bit */ ;\ 460 bz,pn %icc, exec_fault ;\ 461 nop ;\ 462 TT_TRACE(trace_tsbhit) ;\ 463 ITLB_STUFF(%g5, %g1, %g2, %g3, %g4) ;\ 464 retry /* retry faulted instruction */ ;\ 465 label##1: \ 466 /* END CSTYLED */ 467 468 /* 469 * vpg_4m = 4M virtual page number for tag matching (in) 470 * tsbe_ptr = precomputed second TSB entry pointer (in) 471 * label = label to use to make branch targets unique (text) 472 * 473 * For trapstat, we have to explicity use these registers. 474 * g4 = tag portion of TSBE (out) 475 * g5 = data portion of TSBE (out) 476 */ 477 #define PROBE_2ND_DTSB(tsbe_ptr, vpg_4m, label) \ 478 /* BEGIN CSTYLED */ \ 479 ldda [tsbe_ptr]ASI_QUAD_LDD_PHYS, %g4 /* g4 = tag, g5 = data */ ;\ 480 /* since we are looking at 2nd tsb, if it's valid, it must be 4M */ ;\ 481 cmp %g4, vpg_4m ;\ 482 bne,pn %xcc, label##1 ;\ 483 nop ;\ 484 brgez,pn %g5, label##1 ;\ 485 nop ;\ 486 mov tsbe_ptr, %g1 /* trace_tsbhit wants ptr in %g1 */ ;\ 487 TT_TRACE(trace_tsbhit) ;\ 488 DTLB_STUFF(%g5, %g1, %g2, %g3, %g4) ;\ 489 /* trapstat expects tte in %g5 */ ;\ 490 retry /* retry faulted instruction */ ;\ 491 label##1: \ 492 /* END CSTYLED */ 493 494 495 /* 496 * Same as above, with the following additions: 497 * If the TTE found is not executable, branch directly 498 * to exec_fault. If a TSB miss, branch to TSB miss handler. 499 */ 500 #define PROBE_2ND_ITSB(tsbe_ptr, vpg_4m) \ 501 /* BEGIN CSTYLED */ \ 502 ldda [tsbe_ptr]ASI_QUAD_LDD_PHYS, %g4 /* g4 = tag, g5 = data */ ;\ 503 cmp %g4, vpg_4m /* compare tag w/ TSB */ ;\ 504 bne,pn %xcc, sfmmu_tsb_miss_tt /* branch if !match */ ;\ 505 nop ;\ 506 brgez,pn %g5, sfmmu_tsb_miss_tt ;\ 507 nop ;\ 508 andcc %g5, TTE_EXECPRM_INT, %g0 /* check execute bit */ ;\ 509 bz,pn %icc, exec_fault ;\ 510 mov tsbe_ptr, %g1 /* trap trace wants ptr in %g1 */ ;\ 511 TT_TRACE(trace_tsbhit) ;\ 512 ITLB_STUFF(%g5, %g1, %g2, %g3, %g4) ;\ 513 retry /* retry faulted instruction */ \ 514 /* END CSTYLED */ 515 516 /* 517 * 1. Get ctx1. The traptype is supplied by caller. 518 * 2. If iTSB miss, store in MMFSA_I_CTX 519 * 3. if dTSB miss, store in MMFSA_D_CTX 520 * 4. Thus the [D|I]TLB_STUFF will work as expected. 521 */ 522 #define SAVE_CTX1(traptype, ctx1, tmp, label) \ 523 /* BEGIN CSTYLED */ \ 524 mov MMU_SCONTEXT1, tmp ;\ 525 ldxa [tmp]ASI_MMU_CTX, ctx1 ;\ 526 MMU_FAULT_STATUS_AREA(tmp) ;\ 527 cmp traptype, FAST_IMMU_MISS_TT ;\ 528 be,a,pn %icc, label ;\ 529 stx ctx1, [tmp + MMFSA_I_CTX] ;\ 530 cmp traptype, T_INSTR_MMU_MISS ;\ 531 be,a,pn %icc, label ;\ 532 stx ctx1, [tmp + MMFSA_I_CTX] ;\ 533 stx ctx1, [tmp + MMFSA_D_CTX] ;\ 534 label: 535 /* END CSTYLED */ 536 537 #endif /* _ASM */ 538 539 #ifdef __cplusplus 540 } 541 #endif 542 543 #endif /* _VM_MACH_SFMMU_H */ 544