1 /* 2 * CDDL HEADER START 3 * 4 * The contents of this file are subject to the terms of the 5 * Common Development and Distribution License, Version 1.0 only 6 * (the "License"). You may not use this file except in compliance 7 * with the License. 8 * 9 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 10 * or http://www.opensolaris.org/os/licensing. 11 * See the License for the specific language governing permissions 12 * and limitations under the License. 13 * 14 * When distributing Covered Code, include this CDDL HEADER in each 15 * file and include the License file at usr/src/OPENSOLARIS.LICENSE. 16 * If applicable, add the following below this CDDL HEADER, with the 17 * fields enclosed by brackets "[]" replaced with your own identifying 18 * information: Portions Copyright [yyyy] [name of copyright owner] 19 * 20 * CDDL HEADER END 21 */ 22 /* 23 * Copyright 2005 Sun Microsystems, Inc. All rights reserved. 24 * Use is subject to license terms. 25 */ 26 27 /* 28 * VM - Hardware Address Translation management. 29 * 30 * This file describes the contents of the sun reference mmu (sfmmu) 31 * specific hat data structures and the sfmmu specific hat procedures. 32 * The machine independent interface is described in <vm/hat.h>. 33 */ 34 35 #ifndef _VM_MACH_SFMMU_H 36 #define _VM_MACH_SFMMU_H 37 38 #pragma ident "%Z%%M% %I% %E% SMI" 39 40 #include <sys/x_call.h> 41 #include <sys/hypervisor_api.h> 42 43 #ifdef __cplusplus 44 extern "C" { 45 #endif 46 47 /* 48 * Define UTSB_PHYS if user TSB is always accessed via physical address. 49 * On sun4v platform, user TSB is accessed via physical address. 50 */ 51 #define UTSB_PHYS 1 52 53 /* 54 * Hypervisor TSB info 55 */ 56 #define NHV_TSB_INFO 2 57 58 #ifndef _ASM 59 60 struct hv_tsb_block { 61 uint64_t hv_tsb_info_pa; /* hypervisor TSB info PA */ 62 uint64_t hv_tsb_info_cnt; /* hypervisor TSB info count */ 63 hv_tsb_info_t hv_tsb_info[NHV_TSB_INFO]; /* hypervisor TSB info */ 64 }; 65 66 #endif /* _ASM */ 67 68 #ifdef _ASM 69 70 /* 71 * This macro is used in the MMU code to check if TL should be lowered from 72 * 2 to 1 to pop trapstat's state. See the block comment in trapstat.c 73 * for details. 74 */ 75 76 #define TSTAT_CHECK_TL1(label, scr1, scr2) \ 77 rdpr %tpc, scr1; \ 78 sethi %hi(KERNELBASE), scr2; \ 79 or scr2, %lo(KERNELBASE), scr2; \ 80 cmp scr1, scr2; \ 81 bgeu %xcc, 9f; \ 82 nop; \ 83 wrpr %g0, 1, %gl; \ 84 ba label; \ 85 wrpr %g0, 1, %tl; \ 86 9: 87 88 /* 89 * The following macros allow us to share majority of the 90 * SFMMU code between sun4u and sun4v platforms. 91 */ 92 93 #define SETUP_TSB_ASI(qlp, tmp) 94 95 #define SETUP_UTSB_ATOMIC_ASI(tmp1, tmp2) 96 97 /* 98 * Macro to swtich to alternate global register on sun4u platforms 99 * (not applicable to sun4v platforms) 100 */ 101 #define USE_ALTERNATE_GLOBALS(scr) 102 103 /* 104 * Macro to set %gl register value on sun4v platforms 105 * (not applicable to sun4u platforms) 106 */ 107 #define SET_GL_REG(val) \ 108 wrpr %g0, val, %gl 109 110 /* 111 * Synthesize/get data tag access register value and context from the 112 * MMU fault area 113 * 114 * In: 115 * tagacc, ctx = scratch registers 116 * Out: 117 * tagacc = MMU data tag access register value 118 * ctx = context 119 */ 120 #define GET_MMU_D_TAGACC_CTX(tagacc, ctx) \ 121 MMU_FAULT_STATUS_AREA(ctx); \ 122 ldx [ctx + MMFSA_D_ADDR], tagacc; \ 123 ldx [ctx + MMFSA_D_CTX], ctx; \ 124 srlx tagacc, MMU_PAGESHIFT, tagacc; /* align to page boundry */ \ 125 sllx tagacc, MMU_PAGESHIFT, tagacc; \ 126 or tagacc, ctx, tagacc 127 128 /* 129 * Synthesize/get data tag access register value from the MMU fault area 130 * 131 * In: 132 * tagacc, scr1 = scratch registers 133 * Out: 134 * tagacc = MMU data tag access register value 135 */ 136 #define GET_MMU_D_TAGACC(tagacc, scr1) \ 137 GET_MMU_D_TAGACC_CTX(tagacc, scr1) 138 139 /* 140 * Synthesize/get data tag target register value from the MMU fault area 141 * 142 * In: 143 * ttarget, scr1 = scratch registers 144 * Out: 145 * ttarget = MMU data tag target register value 146 */ 147 #define GET_MMU_D_TTARGET(ttarget, scr1) \ 148 MMU_FAULT_STATUS_AREA(ttarget); \ 149 ldx [ttarget + MMFSA_D_CTX], scr1; \ 150 sllx scr1, TTARGET_CTX_SHIFT, scr1; \ 151 ldx [ttarget + MMFSA_D_ADDR], ttarget; \ 152 srlx ttarget, TTARGET_VA_SHIFT, ttarget; \ 153 or ttarget, scr1, ttarget 154 155 /* 156 * Synthesize/get data/instruction tag access register values 157 * from the MMU fault area. 158 * 159 * In: 160 * dtagacc, itagacc, scr1, scr2 = scratch registers 161 * Out: 162 * dtagacc = MMU data tag access register value 163 * itagacc = MMU instruction tag access register value 164 */ 165 #define GET_MMU_BOTH_TAGACC(dtagacc, itagacc, scr1, scr2) \ 166 MMU_FAULT_STATUS_AREA(scr1); \ 167 ldx [scr1 + MMFSA_D_ADDR], scr2; \ 168 ldx [scr1 + MMFSA_D_CTX], dtagacc; \ 169 srlx scr2, MMU_PAGESHIFT, scr2; /* align to page boundry */ \ 170 sllx scr2, MMU_PAGESHIFT, scr2; \ 171 or scr2, dtagacc, dtagacc; \ 172 ldx [scr1 + MMFSA_I_ADDR], scr2; \ 173 ldx [scr1 + MMFSA_I_CTX], itagacc; \ 174 srlx scr2, MMU_PAGESHIFT, scr2; /* align to page boundry */ \ 175 sllx scr2, MMU_PAGESHIFT, scr2; \ 176 or scr2, itagacc, itagacc 177 178 /* 179 * Synthesize/get MMU data fault address from the MMU fault area 180 * 181 * In: 182 * daddr, scr1 = scratch registers 183 * Out: 184 * daddr = MMU data fault address 185 */ 186 #define GET_MMU_D_ADDR(daddr, scr1) \ 187 MMU_FAULT_STATUS_AREA(scr1); \ 188 ldx [scr1 + MMFSA_D_ADDR], daddr 189 190 /* 191 * Load ITLB entry 192 * 193 * In: 194 * tte = reg containing tte 195 * scr1, scr2, scr3, scr4 = scratch registers 196 */ 197 #define ITLB_STUFF(tte, scr1, scr2, scr3, scr4) \ 198 mov %o0, scr1; \ 199 mov %o1, scr2; \ 200 mov %o2, scr3; \ 201 mov %o3, scr4; \ 202 MMU_FAULT_STATUS_AREA(%o2); \ 203 ldx [%o2 + MMFSA_I_ADDR], %o0; \ 204 ldx [%o2 + MMFSA_I_CTX], %o1; \ 205 mov tte, %o2; \ 206 mov MAP_ITLB, %o3; \ 207 ta MMU_MAP_ADDR; \ 208 /* BEGIN CSTYLED */ \ 209 brnz,a,pn %o0, ptl1_panic; \ 210 mov PTL1_BAD_HCALL, %g1; \ 211 /* END CSTYLED */ \ 212 mov scr1, %o0; \ 213 mov scr2, %o1; \ 214 mov scr3, %o2; \ 215 mov scr4, %o3 216 217 /* 218 * Load DTLB entry 219 * 220 * In: 221 * tte = reg containing tte 222 * scr1, scr2, scr3, scr4 = scratch registers 223 */ 224 #define DTLB_STUFF(tte, scr1, scr2, scr3, scr4) \ 225 mov %o0, scr1; \ 226 mov %o1, scr2; \ 227 mov %o2, scr3; \ 228 mov %o3, scr4; \ 229 MMU_FAULT_STATUS_AREA(%o2); \ 230 ldx [%o2 + MMFSA_D_ADDR], %o0; \ 231 ldx [%o2 + MMFSA_D_CTX], %o1; \ 232 mov tte, %o2; \ 233 mov MAP_DTLB, %o3; \ 234 ta MMU_MAP_ADDR; \ 235 /* BEGIN CSTYLED */ \ 236 brnz,a,pn %o0, ptl1_panic; \ 237 mov PTL1_BAD_HCALL, %g1; \ 238 /* END CSTYLED */ \ 239 mov scr1, %o0; \ 240 mov scr2, %o1; \ 241 mov scr3, %o2; \ 242 mov scr4, %o3 243 244 /* 245 * Returns PFN given the TTE and vaddr 246 * 247 * In: 248 * tte = reg containing tte 249 * vaddr = reg containing vaddr 250 * scr1, scr2, scr3 = scratch registers 251 * Out: 252 * tte = PFN value 253 */ 254 #define TTETOPFN(tte, vaddr, label, scr1, scr2, scr3) \ 255 and tte, TTE_SZ_BITS, scr1; /* scr1 = ttesz */ \ 256 sllx tte, TTE_PA_LSHIFT, tte; \ 257 sllx scr1, 1, scr2; \ 258 add scr2, scr1, scr2; /* mulx 3 */ \ 259 add scr2, MMU_PAGESHIFT + TTE_PA_LSHIFT, scr3; \ 260 /* CSTYLED */ \ 261 brz,pt scr2, label/**/1; \ 262 srlx tte, scr3, tte; \ 263 sllx tte, scr2, tte; \ 264 set 1, scr1; \ 265 add scr2, MMU_PAGESHIFT, scr3; \ 266 sllx scr1, scr3, scr1; \ 267 sub scr1, 1, scr1; /* scr1=TTE_PAGE_OFFSET(ttesz) */ \ 268 and vaddr, scr1, scr2; \ 269 srln scr2, MMU_PAGESHIFT, scr2; \ 270 or tte, scr2, tte; \ 271 /* CSTYLED */ \ 272 label/**/1: 273 274 /* 275 * TTE_SET_REF_ML is a macro that updates the reference bit if it is 276 * not already set. 277 * 278 * Parameters: 279 * tte = reg containing tte 280 * ttepa = physical pointer to tte 281 * tteva = virtual ptr to tte 282 * tsbarea = tsb miss area 283 * tmp1 = tmp reg 284 * label = temporary label 285 */ 286 287 #define TTE_SET_REF_ML(tte, ttepa, tteva, tsbarea, tmp1, label) \ 288 /* BEGIN CSTYLED */ \ 289 /* check reference bit */ \ 290 btst TTE_REF_INT, tte; \ 291 bnz,pt %xcc, label/**/2; /* if ref bit set-skip ahead */ \ 292 nop; \ 293 /* update reference bit */ \ 294 label/**/1: \ 295 or tte, TTE_REF_INT, tmp1; \ 296 casxa [ttepa]ASI_MEM, tte, tmp1; /* update ref bit */ \ 297 cmp tte, tmp1; \ 298 bne,a,pn %xcc, label/**/1; \ 299 ldxa [ttepa]ASI_MEM, tte; /* MMU_READTTE through pa */ \ 300 or tte, TTE_REF_INT, tte; \ 301 label/**/2: \ 302 /* END CSTYLED */ 303 304 305 /* 306 * TTE_SET_REFMOD_ML is a macro that updates the reference and modify bits 307 * if not already set. 308 * 309 * Parameters: 310 * tte = reg containing tte 311 * ttepa = physical pointer to tte 312 * tteva = virtual ptr to tte 313 * tsbarea = tsb miss area 314 * tmp1 = tmp reg 315 * label = temporary label 316 * exitlabel = label where to jump to if write perm bit not set. 317 */ 318 319 #define TTE_SET_REFMOD_ML(tte, ttepa, tteva, tsbarea, tmp1, label, \ 320 exitlabel) \ 321 /* BEGIN CSTYLED */ \ 322 /* check reference bit */ \ 323 btst TTE_WRPRM_INT, tte; \ 324 bz,pn %xcc, exitlabel; /* exit if wr_perm no set */ \ 325 btst TTE_HWWR_INT, tte; \ 326 bnz,pn %xcc, label/**/2; /* nothing to do */ \ 327 nop; \ 328 /* update reference bit */ \ 329 label/**/1: \ 330 or tte, TTE_HWWR_INT | TTE_REF_INT, tmp1; \ 331 casxa [ttepa]ASI_MEM, tte, tmp1; /* update ref/mod bit */ \ 332 cmp tte, tmp1; \ 333 bne,a,pn %xcc, label/**/1; \ 334 ldxa [ttepa]ASI_MEM, tte; /* MMU_READTTE through pa */ \ 335 or tte, TTE_HWWR_INT | TTE_REF_INT, tte; \ 336 label/**/2: \ 337 /* END CSTYLED */ 338 339 340 /* 341 * Synthesize a TSB base register contents for a process. 342 * 343 * In: 344 * tsbinfo = TSB info pointer (ro) 345 * tsbreg, tmp1 = scratch registers 346 * Out: 347 * tsbreg = value to program into TSB base register 348 */ 349 350 #define MAKE_UTSBREG(tsbinfo, tsbreg, tmp1) \ 351 ldx [tsbinfo + TSBINFO_PADDR], tsbreg; \ 352 lduh [tsbinfo + TSBINFO_SZCODE], tmp1; \ 353 and tmp1, TSB_SOFTSZ_MASK, tmp1; \ 354 or tsbreg, tmp1, tsbreg; \ 355 356 357 /* 358 * Load TSB base register into a dedicated scratchpad register. 359 * This register contains utsb_pabase in bits 63:13, and TSB size 360 * code in bits 2:0. 361 * 362 * In: 363 * tsbreg = value to load (ro) 364 * regnum = constant or register 365 * tmp1 = scratch register 366 * Out: 367 * Specified scratchpad register updated 368 * 369 */ 370 #define SET_UTSBREG(regnum, tsbreg, tmp1) \ 371 mov regnum, tmp1; \ 372 stxa tsbreg, [tmp1]ASI_SCRATCHPAD /* save tsbreg */ 373 374 /* 375 * Get TSB base register from the scratchpad 376 * 377 * In: 378 * regnum = constant or register 379 * tsbreg = scratch 380 * Out: 381 * tsbreg = tsbreg from the specified scratchpad register 382 */ 383 #define GET_UTSBREG(regnum, tsbreg) \ 384 mov regnum, tsbreg; \ 385 ldxa [tsbreg]ASI_SCRATCHPAD, tsbreg 386 387 388 /* 389 * Get the location of the TSB entry in the first TSB to probe 390 * 391 * In: 392 * tagacc = tag access register (not clobbered) 393 * tsbe, tmp1, tmp2 = scratch registers 394 * Out: 395 * tsbe = pointer to the tsbe in the 1st TSB 396 */ 397 398 #define GET_1ST_TSBE_PTR(tagacc, tsbe, tmp1, tmp2) \ 399 /* BEGIN CSTYLED */ \ 400 mov SCRATCHPAD_UTSBREG1, tmp1 ;\ 401 ldxa [tmp1]ASI_SCRATCHPAD, tsbe /* get tsbreg */ ;\ 402 and tsbe, TSB_SOFTSZ_MASK, tmp2 /* tmp2=szc */ ;\ 403 andn tsbe, TSB_SOFTSZ_MASK, tsbe /* tsbbase */ ;\ 404 mov TSB_ENTRIES(0), tmp1 /* nentries in TSB size 0 */ ;\ 405 sllx tmp1, tmp2, tmp1 /* tmp1 = nentries in TSB */ ;\ 406 sub tmp1, 1, tmp1 /* mask = nentries - 1 */ ;\ 407 srlx tagacc, MMU_PAGESHIFT, tmp2 ;\ 408 and tmp2, tmp1, tmp1 /* tsbent = virtpage & mask */ ;\ 409 sllx tmp1, TSB_ENTRY_SHIFT, tmp1 /* entry num --> ptr */ ;\ 410 add tsbe, tmp1, tsbe /* add entry offset to TSB base */ ;\ 411 /* END CSTYLED */ 412 413 414 /* 415 * Will probe the first TSB, and if it finds a match, will insert it 416 * into the TLB and retry. 417 * 418 * tsbe_ptr = precomputed first TSB entry pointer (in, ro) 419 * vpg_4m = 4M virtual page number for tag matching (in, ro) 420 * label = where to branch to if this is a miss (text) 421 * %asi = atomic ASI to use for the TSB access 422 * 423 * For trapstat, we have to explicily use these registers. 424 * g4 = location tag will be retrieved into from TSB (out) 425 * g5 = location data(tte) will be retrieved into from TSB (out) 426 */ 427 #define PROBE_1ST_DTSB(tsbe_ptr, vpg_4m, label) /* g4/g5 clobbered */ \ 428 /* BEGIN CSTYLED */ \ 429 ldda [tsbe_ptr]ASI_QUAD_LDD_PHYS, %g4 /* g4 = tag, g5 = data */ ;\ 430 cmp %g4, vpg_4m /* compare tag w/ TSB */ ;\ 431 bne,pn %xcc, label/**/1 /* branch if !match */ ;\ 432 nop ;\ 433 brgez,pn %g5, label/**/1 ;\ 434 nop ;\ 435 TT_TRACE(trace_tsbhit) ;\ 436 DTLB_STUFF(%g5, %g1, %g2, %g3, %g4) ;\ 437 /* trapstat expects tte in %g5 */ ;\ 438 retry /* retry faulted instruction */ ;\ 439 label/**/1: \ 440 /* END CSTYLED */ 441 442 443 /* 444 * Same as above, only if the TTE doesn't have the execute 445 * bit set, will branch to exec_fault directly. 446 */ 447 #define PROBE_1ST_ITSB(tsbe_ptr, vpg_4m, label) \ 448 /* BEGIN CSTYLED */ \ 449 ldda [tsbe_ptr]ASI_QUAD_LDD_PHYS, %g4 /* g4 = tag, g5 = data */ ;\ 450 cmp %g4, vpg_4m /* compare tag w/ TSB */ ;\ 451 bne,pn %xcc, label/**/1 /* branch if !match */ ;\ 452 nop ;\ 453 brgez,pn %g5, label/**/1 ;\ 454 nop ;\ 455 andcc %g5, TTE_EXECPRM_INT, %g0 /* check execute bit */ ;\ 456 bz,pn %icc, exec_fault ;\ 457 nop ;\ 458 TT_TRACE(trace_tsbhit) ;\ 459 ITLB_STUFF(%g5, %g1, %g2, %g3, %g4) ;\ 460 retry /* retry faulted instruction */ ;\ 461 label/**/1: \ 462 /* END CSTYLED */ 463 464 465 /* 466 * Get the location in the 2nd TSB of the tsbe for this fault. 467 * Assumes that the second TSB only contains 4M mappings. 468 * 469 * In: 470 * tagacc = tag access register (not clobbered) 471 * tsbe, tmp1, tmp2 = scratch registers 472 * Out: 473 * tsbe = pointer to the tsbe in the 2nd TSB 474 */ 475 476 #define GET_2ND_TSBE_PTR(tagacc, tsbe, tmp1, tmp2) \ 477 mov SCRATCHPAD_UTSBREG2, tmp1; \ 478 ldxa [tmp1]ASI_SCRATCHPAD, tsbe; /* get tsbreg */ \ 479 and tsbe, TSB_SOFTSZ_MASK, tmp2; /* tmp2=szc */ \ 480 andn tsbe, TSB_SOFTSZ_MASK, tsbe; /* tsbbase */ \ 481 mov TSB_ENTRIES(0), tmp1; /* nentries in TSB size 0 */ \ 482 sllx tmp1, tmp2, tmp1; /* tmp1 = nentries in TSB */ \ 483 sub tmp1, 1, tmp1; /* mask = nentries - 1 */ \ 484 srlx tagacc, MMU_PAGESHIFT4M, tmp2; \ 485 and tmp2, tmp1, tmp1; /* tsbent = virtpage & mask */ \ 486 sllx tmp1, TSB_ENTRY_SHIFT, tmp1; /* entry num --> ptr */ \ 487 add tsbe, tmp1, tsbe /* add entry offset to TSB base */ 488 489 490 /* 491 * vpg_4m = 4M virtual page number for tag matching (in) 492 * tsbe_ptr = precomputed second TSB entry pointer (in) 493 * label = label to use to make branch targets unique (text) 494 * 495 * For trapstat, we have to explicity use these registers. 496 * g4 = tag portion of TSBE (out) 497 * g5 = data portion of TSBE (out) 498 */ 499 #define PROBE_2ND_DTSB(tsbe_ptr, vpg_4m, label) \ 500 /* BEGIN CSTYLED */ \ 501 ldda [tsbe_ptr]ASI_QUAD_LDD_PHYS, %g4 /* g4 = tag, g5 = data */ ;\ 502 /* since we are looking at 2nd tsb, if it's valid, it must be 4M */ ;\ 503 cmp %g4, vpg_4m ;\ 504 bne,pn %xcc, label/**/1 ;\ 505 nop ;\ 506 brgez,pn %g5, label/**/1 ;\ 507 nop ;\ 508 mov tsbe_ptr, %g1 /* trace_tsbhit wants ptr in %g1 */ ;\ 509 TT_TRACE(trace_tsbhit) ;\ 510 DTLB_STUFF(%g5, %g1, %g2, %g3, %g4) ;\ 511 /* trapstat expects tte in %g5 */ ;\ 512 retry /* retry faulted instruction */ ;\ 513 label/**/1: \ 514 /* END CSTYLED */ 515 516 517 /* 518 * Same as above, with the following additions: 519 * If the TTE found is not executable, branch directly 520 * to exec_fault. If a TSB miss, branch to TSB miss handler. 521 */ 522 #define PROBE_2ND_ITSB(tsbe_ptr, vpg_4m) \ 523 /* BEGIN CSTYLED */ \ 524 ldda [tsbe_ptr]ASI_QUAD_LDD_PHYS, %g4 /* g4 = tag, g5 = data */ ;\ 525 cmp %g4, vpg_4m /* compare tag w/ TSB */ ;\ 526 bne,pn %xcc, sfmmu_tsb_miss_tt /* branch if !match */ ;\ 527 nop ;\ 528 brgez,pn %g5, sfmmu_tsb_miss_tt ;\ 529 nop ;\ 530 andcc %g5, TTE_EXECPRM_INT, %g0 /* check execute bit */ ;\ 531 bz,pn %icc, exec_fault ;\ 532 mov tsbe_ptr, %g1 /* trap trace wants ptr in %g1 */ ;\ 533 TT_TRACE(trace_tsbhit) ;\ 534 ITLB_STUFF(%g5, %g1, %g2, %g3, %g4) ;\ 535 retry /* retry faulted instruction */ \ 536 /* END CSTYLED */ 537 538 539 #endif /* _ASM */ 540 541 #ifdef __cplusplus 542 } 543 #endif 544 545 #endif /* _VM_MACH_SFMMU_H */ 546