1 /* 2 * CDDL HEADER START 3 * 4 * The contents of this file are subject to the terms of the 5 * Common Development and Distribution License (the "License"). 6 * You may not use this file except in compliance with the License. 7 * 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 9 * or http://www.opensolaris.org/os/licensing. 10 * See the License for the specific language governing permissions 11 * and limitations under the License. 12 * 13 * When distributing Covered Code, include this CDDL HEADER in each 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE. 15 * If applicable, add the following below this CDDL HEADER, with the 16 * fields enclosed by brackets "[]" replaced with your own identifying 17 * information: Portions Copyright [yyyy] [name of copyright owner] 18 * 19 * CDDL HEADER END 20 */ 21 /* 22 * Copyright 2008 Sun Microsystems, Inc. All rights reserved. 23 * Use is subject to license terms. 24 */ 25 26 /* 27 * VM - Hardware Address Translation management. 28 * 29 * This file describes the contents of the sun reference mmu (sfmmu) 30 * specific hat data structures and the sfmmu specific hat procedures. 31 * The machine independent interface is described in <vm/hat.h>. 32 */ 33 34 #ifndef _VM_MACH_SFMMU_H 35 #define _VM_MACH_SFMMU_H 36 37 #pragma ident "%Z%%M% %I% %E% SMI" 38 39 #include <sys/x_call.h> 40 #include <sys/hypervisor_api.h> 41 42 #ifdef __cplusplus 43 extern "C" { 44 #endif 45 46 /* 47 * Define UTSB_PHYS if user TSB is always accessed via physical address. 48 * On sun4v platform, user TSB is accessed via physical address. 49 */ 50 #define UTSB_PHYS 1 51 52 /* 53 * Hypervisor TSB info 54 */ 55 #define NHV_TSB_INFO 4 56 57 #ifndef _ASM 58 59 struct hv_tsb_block { 60 uint64_t hv_tsb_info_pa; /* hypervisor TSB info PA */ 61 uint64_t hv_tsb_info_cnt; /* hypervisor TSB info count */ 62 hv_tsb_info_t hv_tsb_info[NHV_TSB_INFO]; /* hypervisor TSB info */ 63 }; 64 65 #endif /* _ASM */ 66 67 #ifdef _ASM 68 69 /* 70 * This macro is used to set private/shared secondary context register in 71 * sfmmu_alloc_ctx(). 72 * Input: 73 * cnum = cnum 74 * is_shctx = sfmmu private/shared flag (0: private, 1: shared) 75 */ 76 #define SET_SECCTX(cnum, is_shctx, tmp1, tmp2, label) \ 77 mov MMU_SCONTEXT, tmp1; \ 78 movrnz is_shctx, MMU_SCONTEXT1, tmp1; \ 79 sethi %hi(FLUSH_ADDR), tmp2; \ 80 stxa cnum, [tmp1]ASI_MMU_CTX; /* set 2nd ctx reg. */ \ 81 flush tmp2; \ 82 83 /* 84 * This macro is used in the MMU code to check if TL should be lowered from 85 * 2 to 1 to pop trapstat's state. See the block comment in trapstat.c 86 * for details. 87 */ 88 89 #define TSTAT_CHECK_TL1(label, scr1, scr2) \ 90 rdpr %tpc, scr1; \ 91 sethi %hi(KERNELBASE), scr2; \ 92 or scr2, %lo(KERNELBASE), scr2; \ 93 cmp scr1, scr2; \ 94 bgeu %xcc, 9f; \ 95 nop; \ 96 wrpr %g0, 1, %gl; \ 97 ba label; \ 98 wrpr %g0, 1, %tl; \ 99 9: 100 101 /* 102 * The following macros allow us to share majority of the 103 * SFMMU code between sun4u and sun4v platforms. 104 */ 105 106 #define SETUP_TSB_ASI(qlp, tmp) 107 108 #define SETUP_UTSB_ATOMIC_ASI(tmp1, tmp2) 109 110 /* 111 * Macro to swtich to alternate global register on sun4u platforms 112 * (not applicable to sun4v platforms) 113 */ 114 #define USE_ALTERNATE_GLOBALS(scr) 115 116 /* 117 * Macro to set %gl register value on sun4v platforms 118 * (not applicable to sun4u platforms) 119 */ 120 #define SET_GL_REG(val) \ 121 wrpr %g0, val, %gl 122 123 /* 124 * Get pseudo-tagacc value and context from the MMU fault area. Pseudo-tagacc 125 * is the faulting virtual address OR'd with 0 for KCONTEXT, INVALID_CONTEXT 126 * (1) for invalid context, and USER_CONTEXT (2) for user context. 127 * 128 * In: 129 * tagacc, ctxtype = scratch registers 130 * Out: 131 * tagacc = MMU data tag access register value 132 * ctx = context type (KCONTEXT, INVALID_CONTEXT or USER_CONTEXT) 133 */ 134 #define GET_MMU_D_PTAGACC_CTXTYPE(ptagacc, ctxtype) \ 135 MMU_FAULT_STATUS_AREA(ctxtype); \ 136 ldx [ctxtype + MMFSA_D_ADDR], ptagacc; \ 137 ldx [ctxtype + MMFSA_D_CTX], ctxtype; \ 138 srlx ptagacc, MMU_PAGESHIFT, ptagacc; /* align to page boundary */ \ 139 cmp ctxtype, USER_CONTEXT_TYPE; \ 140 sllx ptagacc, MMU_PAGESHIFT, ptagacc; \ 141 movgu %icc, USER_CONTEXT_TYPE, ctxtype; \ 142 or ptagacc, ctxtype, ptagacc 143 144 /* 145 * Synthesize/get data tag access register value from the MMU fault area 146 * 147 * In: 148 * tagacc, scr1 = scratch registers 149 * Out: 150 * tagacc = MMU data tag access register value 151 */ 152 #define GET_MMU_D_TAGACC(tagacc, scr1) \ 153 GET_MMU_D_PTAGACC_CTXTYPE(tagacc, scr1) 154 155 /* 156 * Synthesize/get data tag target register value from the MMU fault area 157 * 158 * In: 159 * ttarget, scr1 = scratch registers 160 * Out: 161 * ttarget = MMU data tag target register value 162 */ 163 #define GET_MMU_D_TTARGET(ttarget, scr1) \ 164 MMU_FAULT_STATUS_AREA(ttarget); \ 165 ldx [ttarget + MMFSA_D_CTX], scr1; \ 166 sllx scr1, TTARGET_CTX_SHIFT, scr1; \ 167 ldx [ttarget + MMFSA_D_ADDR], ttarget; \ 168 srlx ttarget, TTARGET_VA_SHIFT, ttarget; \ 169 or ttarget, scr1, ttarget 170 171 /* 172 * Synthesize/get data/instruction psuedo tag access register values 173 * from the MMU fault area (context is 0 for kernel, 1 for invalid, 2 for user) 174 * 175 * In: 176 * dtagacc, itagacc, scr1, scr2 = scratch registers 177 * Out: 178 * dtagacc = MMU data tag access register value w/psuedo-context 179 * itagacc = MMU instruction tag access register value w/pseudo-context 180 */ 181 #define GET_MMU_BOTH_TAGACC(dtagacc, itagacc, scr1, scr2) \ 182 MMU_FAULT_STATUS_AREA(scr1); \ 183 ldx [scr1 + MMFSA_D_ADDR], scr2; \ 184 ldx [scr1 + MMFSA_D_CTX], dtagacc; \ 185 srlx scr2, MMU_PAGESHIFT, scr2; /* align to page boundary */ \ 186 cmp dtagacc, USER_CONTEXT_TYPE; \ 187 sllx scr2, MMU_PAGESHIFT, scr2; \ 188 movgu %icc, USER_CONTEXT_TYPE, dtagacc; \ 189 or scr2, dtagacc, dtagacc; \ 190 ldx [scr1 + MMFSA_I_ADDR], scr2; \ 191 ldx [scr1 + MMFSA_I_CTX], itagacc; \ 192 srlx scr2, MMU_PAGESHIFT, scr2; /* align to page boundry */ \ 193 cmp itagacc, USER_CONTEXT_TYPE; \ 194 sllx scr2, MMU_PAGESHIFT, scr2; \ 195 movgu %icc, USER_CONTEXT_TYPE, itagacc; \ 196 or scr2, itagacc, itagacc 197 198 /* 199 * Synthesize/get MMU data fault address from the MMU fault area 200 * 201 * In: 202 * daddr, scr1 = scratch registers 203 * Out: 204 * daddr = MMU data fault address 205 */ 206 #define GET_MMU_D_ADDR(daddr, scr1) \ 207 MMU_FAULT_STATUS_AREA(scr1); \ 208 ldx [scr1 + MMFSA_D_ADDR], daddr 209 210 /* 211 * Get pseudo-tagacc value and context from the MMU fault area. Pseudo-tagacc 212 * is the faulting virtual address OR'd with 0 for KCONTEXT, INVALID_CONTEXT 213 * (1) for invalid context, and USER_CONTEXT (2) for user context. 214 * 215 * In: 216 * tagacc, ctxtype = scratch registers 217 * Out: 218 * tagacc = MMU instruction tag access register value 219 * ctxtype = context type (KCONTEXT, INVALID_CONTEXT or USER_CONTEXT) 220 */ 221 #define GET_MMU_I_PTAGACC_CTXTYPE(ptagacc, ctxtype) \ 222 MMU_FAULT_STATUS_AREA(ctxtype); \ 223 ldx [ctxtype + MMFSA_I_ADDR], ptagacc; \ 224 ldx [ctxtype + MMFSA_I_CTX], ctxtype; \ 225 srlx ptagacc, MMU_PAGESHIFT, ptagacc; /* align to page boundary */ \ 226 cmp ctxtype, USER_CONTEXT_TYPE; \ 227 sllx ptagacc, MMU_PAGESHIFT, ptagacc; \ 228 movgu %icc, USER_CONTEXT_TYPE, ctxtype; \ 229 or ptagacc, ctxtype, ptagacc 230 231 /* 232 * Load ITLB entry 233 * 234 * In: 235 * tte = reg containing tte 236 * scr1, scr2, scr3, scr4 = scratch registers 237 */ 238 #define ITLB_STUFF(tte, scr1, scr2, scr3, scr4) \ 239 mov %o0, scr1; \ 240 mov %o1, scr2; \ 241 mov %o2, scr3; \ 242 mov %o3, scr4; \ 243 MMU_FAULT_STATUS_AREA(%o2); \ 244 ldx [%o2 + MMFSA_I_ADDR], %o0; \ 245 ldx [%o2 + MMFSA_I_CTX], %o1; \ 246 mov tte, %o2; \ 247 mov MAP_ITLB, %o3; \ 248 ta MMU_MAP_ADDR; \ 249 /* BEGIN CSTYLED */ \ 250 brnz,a,pn %o0, ptl1_panic; \ 251 mov PTL1_BAD_HCALL, %g1; \ 252 /* END CSTYLED */ \ 253 mov scr1, %o0; \ 254 mov scr2, %o1; \ 255 mov scr3, %o2; \ 256 mov scr4, %o3 257 258 /* 259 * Load DTLB entry 260 * 261 * In: 262 * tte = reg containing tte 263 * scr1, scr2, scr3, scr4 = scratch registers 264 */ 265 #define DTLB_STUFF(tte, scr1, scr2, scr3, scr4) \ 266 mov %o0, scr1; \ 267 mov %o1, scr2; \ 268 mov %o2, scr3; \ 269 mov %o3, scr4; \ 270 MMU_FAULT_STATUS_AREA(%o2); \ 271 ldx [%o2 + MMFSA_D_ADDR], %o0; \ 272 ldx [%o2 + MMFSA_D_CTX], %o1; \ 273 mov tte, %o2; \ 274 mov MAP_DTLB, %o3; \ 275 ta MMU_MAP_ADDR; \ 276 /* BEGIN CSTYLED */ \ 277 brnz,a,pn %o0, ptl1_panic; \ 278 mov PTL1_BAD_HCALL, %g1; \ 279 /* END CSTYLED */ \ 280 mov scr1, %o0; \ 281 mov scr2, %o1; \ 282 mov scr3, %o2; \ 283 mov scr4, %o3 284 285 /* 286 * Returns PFN given the TTE and vaddr 287 * 288 * In: 289 * tte = reg containing tte 290 * vaddr = reg containing vaddr 291 * scr1, scr2, scr3 = scratch registers 292 * Out: 293 * tte = PFN value 294 */ 295 #define TTETOPFN(tte, vaddr, label, scr1, scr2, scr3) \ 296 and tte, TTE_SZ_BITS, scr1; /* scr1 = ttesz */ \ 297 sllx tte, TTE_PA_LSHIFT, tte; \ 298 sllx scr1, 1, scr2; \ 299 add scr2, scr1, scr2; /* mulx 3 */ \ 300 add scr2, MMU_PAGESHIFT + TTE_PA_LSHIFT, scr3; \ 301 /* CSTYLED */ \ 302 brz,pt scr2, label/**/1; \ 303 srlx tte, scr3, tte; \ 304 sllx tte, scr2, tte; \ 305 set 1, scr1; \ 306 add scr2, MMU_PAGESHIFT, scr3; \ 307 sllx scr1, scr3, scr1; \ 308 sub scr1, 1, scr1; /* scr1=TTE_PAGE_OFFSET(ttesz) */ \ 309 and vaddr, scr1, scr2; \ 310 srln scr2, MMU_PAGESHIFT, scr2; \ 311 or tte, scr2, tte; \ 312 /* CSTYLED */ \ 313 label/**/1: 314 315 /* 316 * TTE_SET_REF_ML is a macro that updates the reference bit if it is 317 * not already set. 318 * 319 * Parameters: 320 * tte = reg containing tte 321 * ttepa = physical pointer to tte 322 * tteva = virtual ptr to tte 323 * tsbarea = tsb miss area 324 * tmp1 = tmp reg 325 * label = temporary label 326 */ 327 328 #define TTE_SET_REF_ML(tte, ttepa, tteva, tsbarea, tmp1, label) \ 329 /* BEGIN CSTYLED */ \ 330 /* check reference bit */ \ 331 btst TTE_REF_INT, tte; \ 332 bnz,pt %xcc, label/**/2; /* if ref bit set-skip ahead */ \ 333 nop; \ 334 /* update reference bit */ \ 335 label/**/1: \ 336 or tte, TTE_REF_INT, tmp1; \ 337 casxa [ttepa]ASI_MEM, tte, tmp1; /* update ref bit */ \ 338 cmp tte, tmp1; \ 339 bne,a,pn %xcc, label/**/1; \ 340 ldxa [ttepa]ASI_MEM, tte; /* MMU_READTTE through pa */ \ 341 or tte, TTE_REF_INT, tte; \ 342 label/**/2: \ 343 /* END CSTYLED */ 344 345 346 /* 347 * TTE_SET_REFMOD_ML is a macro that updates the reference and modify bits 348 * if not already set. 349 * 350 * Parameters: 351 * tte = reg containing tte 352 * ttepa = physical pointer to tte 353 * tteva = virtual ptr to tte 354 * tsbarea = tsb miss area 355 * tmp1 = tmp reg 356 * label = temporary label 357 * exitlabel = label where to jump to if write perm bit not set. 358 */ 359 360 #define TTE_SET_REFMOD_ML(tte, ttepa, tteva, tsbarea, tmp1, label, \ 361 exitlabel) \ 362 /* BEGIN CSTYLED */ \ 363 /* check reference bit */ \ 364 btst TTE_WRPRM_INT, tte; \ 365 bz,pn %xcc, exitlabel; /* exit if wr_perm no set */ \ 366 btst TTE_HWWR_INT, tte; \ 367 bnz,pn %xcc, label/**/2; /* nothing to do */ \ 368 nop; \ 369 /* update reference bit */ \ 370 label/**/1: \ 371 or tte, TTE_HWWR_INT | TTE_REF_INT, tmp1; \ 372 casxa [ttepa]ASI_MEM, tte, tmp1; /* update ref/mod bit */ \ 373 cmp tte, tmp1; \ 374 bne,a,pn %xcc, label/**/1; \ 375 ldxa [ttepa]ASI_MEM, tte; /* MMU_READTTE through pa */ \ 376 or tte, TTE_HWWR_INT | TTE_REF_INT, tte; \ 377 label/**/2: \ 378 /* END CSTYLED */ 379 /* 380 * Get TSB base register from the scratchpad for 381 * shared contexts 382 * 383 * In: 384 * tsbmiss = pointer to tsbmiss area 385 * tsbmissoffset = offset to right tsb pointer 386 * tsbreg = scratch 387 * Out: 388 * tsbreg = tsbreg from the specified scratchpad register 389 */ 390 #define GET_UTSBREG_SHCTX(tsbmiss, tsbmissoffset, tsbreg) \ 391 ldx [tsbmiss + tsbmissoffset], tsbreg 392 393 394 /* 395 * Get the location of the TSB entry in the first TSB to probe 396 * 397 * In: 398 * tagacc = tag access register (not clobbered) 399 * tsbe, tmp1, tmp2 = scratch registers 400 * Out: 401 * tsbe = pointer to the tsbe in the 1st TSB 402 */ 403 404 #define GET_1ST_TSBE_PTR(tagacc, tsbe, tmp1, tmp2) \ 405 /* BEGIN CSTYLED */ \ 406 mov SCRATCHPAD_UTSBREG1, tmp1 ;\ 407 ldxa [tmp1]ASI_SCRATCHPAD, tsbe /* get tsbreg */ ;\ 408 and tsbe, TSB_SOFTSZ_MASK, tmp2 /* tmp2=szc */ ;\ 409 andn tsbe, TSB_SOFTSZ_MASK, tsbe /* tsbbase */ ;\ 410 mov TSB_ENTRIES(0), tmp1 /* nentries in TSB size 0 */ ;\ 411 sllx tmp1, tmp2, tmp1 /* tmp1 = nentries in TSB */ ;\ 412 sub tmp1, 1, tmp1 /* mask = nentries - 1 */ ;\ 413 srlx tagacc, MMU_PAGESHIFT, tmp2 ;\ 414 and tmp2, tmp1, tmp1 /* tsbent = virtpage & mask */ ;\ 415 sllx tmp1, TSB_ENTRY_SHIFT, tmp1 /* entry num --> ptr */ ;\ 416 add tsbe, tmp1, tsbe /* add entry offset to TSB base */ ;\ 417 /* END CSTYLED */ 418 419 420 /* 421 * Will probe the first TSB, and if it finds a match, will insert it 422 * into the TLB and retry. 423 * 424 * tsbe_ptr = precomputed first TSB entry pointer (in, ro) 425 * vpg_4m = 4M virtual page number for tag matching (in, ro) 426 * label = where to branch to if this is a miss (text) 427 * %asi = atomic ASI to use for the TSB access 428 * 429 * For trapstat, we have to explicily use these registers. 430 * g4 = location tag will be retrieved into from TSB (out) 431 * g5 = location data(tte) will be retrieved into from TSB (out) 432 */ 433 #define PROBE_1ST_DTSB(tsbe_ptr, vpg_4m, label) /* g4/g5 clobbered */ \ 434 /* BEGIN CSTYLED */ \ 435 ldda [tsbe_ptr]ASI_QUAD_LDD_PHYS, %g4 /* g4 = tag, g5 = data */ ;\ 436 cmp %g4, vpg_4m /* compare tag w/ TSB */ ;\ 437 bne,pn %xcc, label/**/1 /* branch if !match */ ;\ 438 nop ;\ 439 brgez,pn %g5, label/**/1 ;\ 440 nop ;\ 441 TT_TRACE(trace_tsbhit) ;\ 442 DTLB_STUFF(%g5, %g1, %g2, %g3, %g4) ;\ 443 /* trapstat expects tte in %g5 */ ;\ 444 retry /* retry faulted instruction */ ;\ 445 label/**/1: \ 446 /* END CSTYLED */ 447 448 449 /* 450 * Same as above, only if the TTE doesn't have the execute 451 * bit set, will branch to exec_fault directly. 452 */ 453 #define PROBE_1ST_ITSB(tsbe_ptr, vpg_4m, label) \ 454 /* BEGIN CSTYLED */ \ 455 ldda [tsbe_ptr]ASI_QUAD_LDD_PHYS, %g4 /* g4 = tag, g5 = data */ ;\ 456 cmp %g4, vpg_4m /* compare tag w/ TSB */ ;\ 457 bne,pn %xcc, label/**/1 /* branch if !match */ ;\ 458 nop ;\ 459 brgez,pn %g5, label/**/1 ;\ 460 nop ;\ 461 andcc %g5, TTE_EXECPRM_INT, %g0 /* check execute bit */ ;\ 462 bz,pn %icc, exec_fault ;\ 463 nop ;\ 464 TT_TRACE(trace_tsbhit) ;\ 465 ITLB_STUFF(%g5, %g1, %g2, %g3, %g4) ;\ 466 retry /* retry faulted instruction */ ;\ 467 label/**/1: \ 468 /* END CSTYLED */ 469 470 /* 471 * vpg_4m = 4M virtual page number for tag matching (in) 472 * tsbe_ptr = precomputed second TSB entry pointer (in) 473 * label = label to use to make branch targets unique (text) 474 * 475 * For trapstat, we have to explicity use these registers. 476 * g4 = tag portion of TSBE (out) 477 * g5 = data portion of TSBE (out) 478 */ 479 #define PROBE_2ND_DTSB(tsbe_ptr, vpg_4m, label) \ 480 /* BEGIN CSTYLED */ \ 481 ldda [tsbe_ptr]ASI_QUAD_LDD_PHYS, %g4 /* g4 = tag, g5 = data */ ;\ 482 /* since we are looking at 2nd tsb, if it's valid, it must be 4M */ ;\ 483 cmp %g4, vpg_4m ;\ 484 bne,pn %xcc, label/**/1 ;\ 485 nop ;\ 486 brgez,pn %g5, label/**/1 ;\ 487 nop ;\ 488 mov tsbe_ptr, %g1 /* trace_tsbhit wants ptr in %g1 */ ;\ 489 TT_TRACE(trace_tsbhit) ;\ 490 DTLB_STUFF(%g5, %g1, %g2, %g3, %g4) ;\ 491 /* trapstat expects tte in %g5 */ ;\ 492 retry /* retry faulted instruction */ ;\ 493 label/**/1: \ 494 /* END CSTYLED */ 495 496 497 /* 498 * Same as above, with the following additions: 499 * If the TTE found is not executable, branch directly 500 * to exec_fault. If a TSB miss, branch to TSB miss handler. 501 */ 502 #define PROBE_2ND_ITSB(tsbe_ptr, vpg_4m) \ 503 /* BEGIN CSTYLED */ \ 504 ldda [tsbe_ptr]ASI_QUAD_LDD_PHYS, %g4 /* g4 = tag, g5 = data */ ;\ 505 cmp %g4, vpg_4m /* compare tag w/ TSB */ ;\ 506 bne,pn %xcc, sfmmu_tsb_miss_tt /* branch if !match */ ;\ 507 nop ;\ 508 brgez,pn %g5, sfmmu_tsb_miss_tt ;\ 509 nop ;\ 510 andcc %g5, TTE_EXECPRM_INT, %g0 /* check execute bit */ ;\ 511 bz,pn %icc, exec_fault ;\ 512 mov tsbe_ptr, %g1 /* trap trace wants ptr in %g1 */ ;\ 513 TT_TRACE(trace_tsbhit) ;\ 514 ITLB_STUFF(%g5, %g1, %g2, %g3, %g4) ;\ 515 retry /* retry faulted instruction */ \ 516 /* END CSTYLED */ 517 518 /* 519 * 1. Get ctx1. The traptype is supplied by caller. 520 * 2. If iTSB miss, store in MMFSA_I_CTX 521 * 3. if dTSB miss, store in MMFSA_D_CTX 522 * 4. Thus the [D|I]TLB_STUFF will work as expected. 523 */ 524 #define SAVE_CTX1(traptype, ctx1, tmp, label) \ 525 /* BEGIN CSTYLED */ \ 526 mov MMU_SCONTEXT1, tmp ;\ 527 ldxa [tmp]ASI_MMU_CTX, ctx1 ;\ 528 MMU_FAULT_STATUS_AREA(tmp) ;\ 529 cmp traptype, FAST_IMMU_MISS_TT ;\ 530 be,a,pn %icc, label ;\ 531 stx ctx1, [tmp + MMFSA_I_CTX] ;\ 532 cmp traptype, T_INSTR_MMU_MISS ;\ 533 be,a,pn %icc, label ;\ 534 stx ctx1, [tmp + MMFSA_I_CTX] ;\ 535 stx ctx1, [tmp + MMFSA_D_CTX] ;\ 536 label: 537 /* END CSTYLED */ 538 539 #endif /* _ASM */ 540 541 #ifdef __cplusplus 542 } 543 #endif 544 545 #endif /* _VM_MACH_SFMMU_H */ 546