xref: /titanic_41/usr/src/uts/sun4u/vm/mach_sfmmu.h (revision 8a57cef6afbd3163c64efedaa20cbfab2c46b9cd)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 /*
22  * Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
23  * Use is subject to license terms.
24  */
25 
26 /*
27  * VM - Hardware Address Translation management.
28  *
29  * This file describes the contents of the sun reference mmu (sfmmu)
30  * specific hat data structures and the sfmmu specific hat procedures.
31  * The machine independent interface is described in <vm/hat.h>.
32  */
33 
34 #ifndef _VM_MACH_SFMMU_H
35 #define	_VM_MACH_SFMMU_H
36 
37 #include <sys/x_call.h>
38 #include <sys/cheetahregs.h>
39 #include <sys/spitregs.h>
40 #include <sys/opl_olympus_regs.h>
41 #include <sys/mmu.h>
42 
43 #ifdef	__cplusplus
44 extern "C" {
45 #endif
46 
47 /*
48  * On sun4u platforms, user TSBs are accessed via virtual address by default.
49  * Platforms that support ASI_SCRATCHPAD registers can define UTSB_PHYS in the
50  * platform Makefile to access user TSBs via physical address but must also
51  * designate one ASI_SCRATCHPAD register to hold the second user TSB.  To
52  * designate the user TSB scratchpad register, platforms must provide a
53  * definition for SCRATCHPAD_UTSBREG2 below.
54  *
55  * Platforms that use UTSB_PHYS do not allocate 2 locked TLB entries to access
56  * the user TSBs.
57  */
58 #if defined(UTSB_PHYS)
59 
60 #if defined(_OPL)
61 #define	SCRATCHPAD_UTSBREG2	OPL_SCRATCHPAD_UTSBREG4 /* 4M-256M pages */
62 #define	SCRATCHPAD_UTSBREG3	OPL_SCRATCHPAD_UTSBREG5 /* 8K-512K pages */
63 #define	SCRATCHPAD_UTSBREG4	OPL_SCRATCHPAD_UTSBREG6 /* 4M-256M pages */
64 #else
65 #error "Compiling UTSB_PHYS but no SCRATCHPAD_UTSBREG2 specified"
66 #endif /* _OPL */
67 
68 #endif /* UTSB_PHYS */
69 
70 
71 #ifdef _ASM
72 
73 /*
74  * This macro is used to set private/shared secondary context register in
75  * sfmmu_alloc_ctx().
76  * if is_shctx = 0 then we set the SCONTEXT to cnum and invalidate the
77  * SHARED_CONTEXT register. If is_shctx = 1 then only the SHARED_CONTEXT
78  * register is set.
79  *  (See additional comments in sfmmu_alloc_ctx)
80  * Input:
81  * cnum     = cnum
82  * is_shctx = sfmmu private/shared flag (0: private, 1: shared)
83  * tmp1 :    %o4 scratch
84  * tmp2 :    %o5 scratch
85  * label: used as local branch targets
86  */
87 #define	SET_SECCTX(cnum, is_shctx, tmp1, tmp2, label)	   \
88 	/* BEGIN CSTYLED */				   \
89 	brnz,pn is_shctx, label/**/1			  ;\
90 	  sethi   %hi(FLUSH_ADDR), tmp2			  ;\
91 	mov     MMU_SCONTEXT, tmp1			  ;\
92 	stxa    cnum, [tmp1]ASI_MMU_CTX			  ;\
93 	flush   tmp2					  ;\
94 	sethi   %hi(shctx_on), tmp1			  ;\
95 	ld      [tmp1 + %lo(shctx_on)], tmp1		  ;\
96 	brz,pt  tmp1, label/**/3			  ;\
97 	mov    %g0, cnum				  ;\
98 	ba,pt    %xcc, label/**/2			  ;\
99 label/**/1:						  ;\
100 	set     SHCTXREG_VALID_BIT, tmp1		  ;\
101 	sllx    cnum, CTXREG_CTX_SHIFT, cnum		  ;\
102 	srlx    cnum, CTXREG_CTX_SHIFT, cnum		  ;\
103 	or      cnum, tmp1, cnum			  ;\
104 	mov     cnum, tmp1				  ;\
105 	sllx    cnum, 32, cnum				  ;\
106 	or      cnum, tmp1, cnum			  ;\
107 label/**/2:					          ;\
108 	mov     MMU_SHARED_CONTEXT, tmp1		  ;\
109 	stxa    cnum, [tmp1]ASI_MMU_CTX			  ;\
110 	flush   tmp2					  ;\
111 label/**/3:
112 	/* END CSTYLED */
113 
114 /*
115  * This macro is to control the pagesizes used for shared context on
116  * Rock systems.
117  */
118 #define	CHECK_SHARED_PGSZ(tsbarea, tte, tmp, use_shctx, label)
119 
120 /*
121  * This macro is used in the MMU code to check if TL should be lowered from
122  * 2 to 1 to pop trapstat's state.  See the block comment in trapstat.c
123  * for details.
124  */
125 
126 #define	TSTAT_CHECK_TL1(label, scr1, scr2)			\
127 	rdpr	%tpc, scr1;					\
128 	sethi	%hi(KERNELBASE), scr2;				\
129 	or	scr2, %lo(KERNELBASE), scr2; 			\
130 	cmp	scr1, scr2; 					\
131 	bgeu	%xcc, 9f;					\
132 	    nop;						\
133 	ba	label;						\
134 	wrpr	%g0, 1, %tl;					\
135 9:
136 
137 
138 /*
139  * The following macros allow us to share majority of the
140  * SFMMU code between sun4u and sun4v platforms.
141  */
142 
143 #define	SETUP_TSB_ASI(qlp, tmp)					\
144 	movrz	qlp, ASI_N, tmp;				\
145 	movrnz	qlp, ASI_MEM, tmp;				\
146 	mov	tmp, %asi
147 
148 /*
149  * Macro to swtich to alternate global register on sun4u platforms
150  * (not applicable to sun4v platforms)
151  */
152 #define	USE_ALTERNATE_GLOBALS(scr)				\
153 	rdpr	%pstate, scr;					\
154 	wrpr	scr, PSTATE_MG | PSTATE_AG, %pstate
155 
156 /*
157  * Macro to set %gl register value on sun4v platforms
158  * (not applicable to sun4u platforms)
159  */
160 #define	SET_GL_REG(val)
161 
162 /*
163  * Get MMU data tag access register value
164  *
165  * In:
166  *   tagacc, scr1 = scratch registers
167  * Out:
168  *   tagacc = MMU data tag access register value
169  */
170 #define	GET_MMU_D_TAGACC(tagacc, scr1)				\
171 	mov	MMU_TAG_ACCESS, scr1;				\
172 	ldxa	[scr1]ASI_DMMU, tagacc
173 
174 /*
175  * Get MMU data tag target register
176  *
177  * In:
178  *   ttarget, scr1 = scratch registers
179  * Out:
180  *   ttarget = MMU data tag target register value
181  */
182 #define	GET_MMU_D_TTARGET(ttarget, scr1)			\
183 	ldxa	[%g0]ASI_DMMU, ttarget
184 
185 /*
186  * Get MMU data/instruction tag access register values
187  *
188  * In:
189  *   dtagacc, itagacc, scr1, scr2 = scratch registers
190  * Out:
191  *   dtagacc = MMU data tag access register value
192  *   itagacc = MMU instruction tag access register value
193  */
194 #define	GET_MMU_BOTH_TAGACC(dtagacc, itagacc, scr1, scr2)	\
195 	mov	MMU_TAG_ACCESS, scr1;				\
196 	ldxa	[scr1]ASI_DMMU, dtagacc;			\
197 	ldxa	[scr1]ASI_IMMU, itagacc
198 
199 /*
200  * Get MMU data fault address from the tag access register
201  *
202  * In:
203  *   daddr, scr1 = scratch registers
204  * Out:
205  *   daddr = MMU data fault address
206  */
207 #define	GET_MMU_D_ADDR(daddr, scr1)				\
208 	mov	MMU_TAG_ACCESS, scr1;				\
209 	ldxa	[scr1]ASI_DMMU, daddr;				\
210 	set	TAGACC_CTX_MASK, scr1;				\
211 	andn	daddr, scr1, daddr
212 
213 
214 /*
215  * Load ITLB entry
216  *
217  * In:
218  *   tte = reg containing tte
219  *   scr1, scr2, scr3, scr4 = scratch registers (not used)
220  */
221 #define	ITLB_STUFF(tte, scr1, scr2, scr3, scr4)			\
222 	stxa	tte, [%g0]ASI_ITLB_IN
223 
224 /*
225  * Load DTLB entry
226  *
227  * In:
228  *   tte = reg containing tte
229  *   scr1, scr2, scr3, scr4 = scratch register (not used)
230  */
231 #define	DTLB_STUFF(tte, scr1, scr2, scr3, scr4)			\
232 	stxa	tte, [%g0]ASI_DTLB_IN
233 
234 
235 /*
236  * Returns PFN given the TTE and vaddr
237  *
238  * In:
239  *   tte = reg containing tte
240  *   vaddr = reg containing vaddr
241  *   scr1, scr2, scr3 = scratch registers
242  * Out:
243  *   tte = PFN value
244  */
245 #define	TTETOPFN(tte, vaddr, label, scr1, scr2, scr3)			\
246 	srlx	tte, TTE_SZ_SHFT, scr1;					\
247 	and	scr1, TTE_SZ_BITS, scr1;	/* scr1 = tte_size */	\
248 	srlx	tte, TTE_SZ2_SHFT, scr3;				\
249 	and	scr3, TTE_SZ2_BITS, scr3;	/* scr3 = tte_size2 */	\
250 	or	scr1, scr3, scr1;					\
251 	sllx	scr1, 1, scr2;						\
252 	add	scr2, scr1, scr2;		/* mulx 3 */		\
253 	sllx	tte, TTE_PA_LSHIFT, tte;				\
254 	add	scr2, MMU_PAGESHIFT + TTE_PA_LSHIFT, scr3;		\
255 	/* BEGIN CSTYLED */						\
256 	brz,pt	scr2, label/**/1;					\
257 	  srlx	tte, scr3, tte;						\
258 	/* END CSTYLED */						\
259 	sllx	tte, scr2, tte;						\
260 	set	1, scr1;						\
261 	add	scr2, MMU_PAGESHIFT, scr3;				\
262 	sllx	scr1, scr3, scr1;					\
263 	sub	scr1, 1, scr1;		/* g2=TTE_PAGE_OFFSET(ttesz) */	\
264 	and	vaddr, scr1, scr2;					\
265 	srln	scr2, MMU_PAGESHIFT, scr2;				\
266 	or	tte, scr2, tte;						\
267 	/* CSTYLED */							\
268 label/**/1:
269 
270 /*
271  * No support for non-coherent I-cache in sun4u
272  */
273 #define	TTE_SET_EXEC_ML(tte, ttepa, tmp1, label)
274 #define	TTE_CLR_SOFTEXEC_ML(tte)
275 #define	TTE_CHK_SOFTEXEC_ML(tte)	andcc tte, 0, %g0
276 
277 /*
278  * TTE_SET_REF_ML is a macro that updates the reference bit if it is
279  * not already set. Older sun4u platform use the virtual address to
280  * flush entries from dcache, this is not available here but there are
281  * only two positions in the 64K dcache where the cache line can reside
282  * so we need to flush both of them.
283  *
284  * Parameters:
285  * tte      = reg containing tte
286  * ttepa    = physical pointer to tte
287  * tsbarea  = tsb miss area
288  * tmp1     = tmp reg
289  * tmp2     = tmp reg
290  * label    = temporary label
291  */
292 
293 #define	TTE_SET_REF_ML(tte, ttepa, tsbarea, tmp1, tmp2, label)	\
294 	/* BEGIN CSTYLED */						\
295 	/* check reference bit */					\
296 	andcc	tte, TTE_REF_INT, %g0;					\
297 	bnz,pt	%xcc, label/**/4;	/* if ref bit set-skip ahead */	\
298 	  nop;								\
299 	GET_CPU_IMPL(tmp1);						\
300 	cmp	tmp1, SPITFIRE_IMPL;					\
301 	blt	%icc, label/**/2;	/* skip flush if FJ-OPL cpus */	\
302 	cmp	tmp1, CHEETAH_IMPL;					\
303 	bl,a	%icc, label/**/1;					\
304 	/* update reference bit */					\
305 	lduh	[tsbarea + TSBMISS_DMASK], tmp1;			\
306 	stxa	%g0, [ttepa]ASI_DC_INVAL; /* flush line from dcache */	\
307 	membar	#Sync;							\
308 	ba	label/**/2;						\
309 label/**/1:								\
310 	and	ttepa, tmp1, tmp1;					\
311 	stxa	%g0, [tmp1]ASI_DC_TAG; /* flush line1 from dcache */	\
312 	or	%g0, 1, tmp2;						\
313 	sllx	tmp2, MMU_PAGESHIFT, tmp2;				\
314 	xor	tmp1, tmp2, tmp1;					\
315 	stxa	%g0, [tmp1]ASI_DC_TAG; /* flush line2 from dcache */	\
316 	membar	#Sync;							\
317 label/**/2:								\
318 	or	tte, TTE_REF_INT, tmp1;					\
319 	casxa	[ttepa]ASI_MEM, tte, tmp1; 	/* update ref bit */	\
320 	cmp	tte, tmp1;						\
321 	bne,a,pn %xcc, label/**/2;					\
322 	ldxa	[ttepa]ASI_MEM, tte;	/* MMU_READTTE through pa */	\
323 	or	tte, TTE_REF_INT, tte;					\
324 label/**/4:								\
325 	/* END CSTYLED */
326 
327 
328 /*
329  * TTE_SET_REFMOD_ML is a macro that updates the reference and modify bits
330  * if not already set.
331  *
332  * Parameters:
333  * tte      = reg containing tte
334  * ttepa    = physical pointer to tte
335  * tsbarea  = tsb miss area
336  * tmp1     = tmp reg
337  * tmp2     = tmp reg
338  * label    = temporary label
339  * exitlabel = label where to jump to if write perm bit not set.
340  */
341 
342 #define	TTE_SET_REFMOD_ML(tte, ttepa, tsbarea, tmp1, tmp2, label,	\
343     exitlabel)								\
344 	/* BEGIN CSTYLED */						\
345 	/* check reference bit */					\
346 	andcc	tte, TTE_WRPRM_INT, %g0;				\
347 	bz,pn	%xcc, exitlabel;	/* exit if wr_perm not set */	\
348 	  nop;								\
349 	andcc	tte, TTE_HWWR_INT, %g0;					\
350 	bnz,pn	%xcc, label/**/4;	/* nothing to do */		\
351 	  nop;								\
352 	GET_CPU_IMPL(tmp1);						\
353 	cmp	tmp1, SPITFIRE_IMPL;					\
354 	blt	%icc, label/**/2;	/* skip flush if FJ-OPL cpus */	\
355 	cmp	tmp1, CHEETAH_IMPL;					\
356 	bl,a	%icc, label/**/1;					\
357 	/* update reference bit */					\
358 	lduh	[tsbarea + TSBMISS_DMASK], tmp1;			\
359 	stxa    %g0, [ttepa]ASI_DC_INVAL; /* flush line from dcache */ 	\
360 	membar	#Sync;							\
361 	ba	label/**/2;						\
362 label/**/1:								\
363 	and	ttepa, tmp1, tmp1;					\
364 	stxa	%g0, [tmp1]ASI_DC_TAG; /* flush line1 from dcache */	\
365 	or	%g0, 1, tmp2;						\
366 	sllx	tmp2, MMU_PAGESHIFT, tmp2;				\
367 	xor	tmp1, tmp2, tmp1;					\
368 	stxa	%g0, [tmp1]ASI_DC_TAG; /* flush line2 from dcache */	\
369 	membar	#Sync;							\
370 label/**/2:								\
371 	or	tte, TTE_HWWR_INT | TTE_REF_INT, tmp1;			\
372 	casxa	[ttepa]ASI_MEM, tte, tmp1; /* update ref/mod bit */	\
373 	cmp	tte, tmp1;						\
374 	bne,a,pn %xcc, label/**/2;					\
375 	  ldxa	[ttepa]ASI_MEM, tte;	/* MMU_READTTE through pa */	\
376 	or	tte, TTE_HWWR_INT | TTE_REF_INT, tte;			\
377 label/**/4:								\
378 	/* END CSTYLED */
379 
380 
381 #ifndef UTSB_PHYS
382 
383 /*
384  * Synthesize TSB base register contents for a process with
385  * a single TSB.
386  *
387  * We patch the virtual address mask in at runtime since the
388  * number of significant virtual address bits in the TSB VA
389  * can vary depending upon the TSB slab size being used on the
390  * machine.
391  *
392  * In:
393  *   tsbinfo = TSB info pointer (ro)
394  *   vabase = value of utsb_vabase (ro)
395  * Out:
396  *   tsbreg = value to program into TSB base register
397  */
398 
399 #define	MAKE_TSBREG(tsbreg, tsbinfo, vabase, tmp1, tmp2, label)		\
400 	/* BEGIN CSTYLED */						\
401 	ldx	[tsbinfo + TSBINFO_VADDR], tmp1;			\
402 	.global	label/**/_tsbreg_vamask					;\
403 label/**/_tsbreg_vamask:						\
404 	or	%g0, RUNTIME_PATCH, tsbreg;				\
405 	lduh	[tsbinfo + TSBINFO_SZCODE], tmp2;			\
406 	sllx	tsbreg, TSBREG_VAMASK_SHIFT, tsbreg;			\
407 	or	vabase, tmp2, tmp2;					\
408 	and	tmp1, tsbreg, tsbreg;					\
409 	or	tsbreg, tmp2, tsbreg;					\
410 	/* END CSTYLED */
411 
412 
413 /*
414  * Synthesize TSB base register contents for a process with
415  * two TSBs.  See hat_sfmmu.h for the layout of the TSB base
416  * register in this case.
417  *
418  * In:
419  *   tsb1 = pointer to first TSB info (ro)
420  *   tsb2 = pointer to second TSB info (ro)
421  * Out:
422  *   tsbreg = value to program into TSB base register
423  */
424 #define	MAKE_TSBREG_SECTSB(tsbreg, tsb1, tsb2, tmp1, tmp2, tmp3, label)	\
425 	/* BEGIN CSTYLED */						\
426 	set	TSBREG_MSB_CONST, tmp3					;\
427 	sllx	tmp3, TSBREG_MSB_SHIFT, tsbreg				;\
428 	.global	label/**/_tsbreg_vamask					;\
429 label/**/_tsbreg_vamask:						;\
430 	or	%g0, RUNTIME_PATCH, tmp3				;\
431 	sll	tmp3, TSBREG_VAMASK_SHIFT, tmp3				;\
432 	ldx	[tsb1 + TSBINFO_VADDR], tmp1				;\
433 	ldx	[tsb2 + TSBINFO_VADDR], tmp2				;\
434 	and	tmp1, tmp3, tmp1					;\
435 	and	tmp2, tmp3, tmp2					;\
436 	sllx	tmp2, TSBREG_SECTSB_MKSHIFT, tmp2			;\
437 	or	tmp1, tmp2, tmp3					;\
438 	or	tsbreg, tmp3, tsbreg					;\
439 	lduh	[tsb1 + TSBINFO_SZCODE], tmp1				;\
440 	lduh	[tsb2 + TSBINFO_SZCODE], tmp2				;\
441 	and	tmp1, TSB_SOFTSZ_MASK, tmp1				;\
442 	and	tmp2, TSB_SOFTSZ_MASK, tmp2				;\
443 	sllx	tmp2, TSBREG_SECSZ_SHIFT, tmp2				;\
444 	or	tmp1, tmp2, tmp3					;\
445 	or	tsbreg, tmp3, tsbreg					;\
446 	/* END CSTYLED */
447 
448 
449 /*
450  * Load the locked TSB TLB entry.
451  *
452  * In:
453  *   tsbinfo = tsb_info pointer as va (ro)
454  *   tteidx = shifted index into TLB to load the locked entry (ro)
455  *   va = virtual address at which to load the locked TSB entry (ro)
456  * Out:
457  * Scratch:
458  *   tmp
459  */
460 #define	LOAD_TSBTTE(tsbinfo, tteidx, va, tmp)				\
461 	mov	MMU_TAG_ACCESS, tmp;					\
462 	stxa	va, [tmp]ASI_DMMU;		/* set tag access */	\
463 	membar	#Sync;							\
464 	ldx	[tsbinfo + TSBINFO_TTE], tmp;	/* fetch locked tte */	\
465 	stxa	tmp, [tteidx]ASI_DTLB_ACCESS;	/* load locked tte */	\
466 	membar	#Sync
467 
468 
469 /*
470  * In the current implementation, TSBs usually come from physically
471  * contiguous chunks of memory up to 4MB in size, but 8K TSBs may be
472  * allocated from 8K chunks of memory under certain conditions.  To
473  * prevent aliasing in the virtual address cache when the TSB slab is
474  * 8K in size we must align the reserved (TL>0) TSB virtual address to
475  * have the same low-order bits as the kernel (TL=0) TSB virtual address,
476  * and map 8K TSBs with an 8K TTE.  In cases where the TSB reserved VA
477  * range is smaller than the assumed 4M we will patch the shift at
478  * runtime; otherwise we leave it alone (which is why RUNTIME_PATCH
479  * constant doesn't appear below).
480  *
481  * In:
482  *   tsbinfo (ro)
483  *   resva: reserved VA base for this TSB
484  * Out:
485  *   resva: corrected VA for this TSB
486  */
487 #define	RESV_OFFSET(tsbinfo, resva, tmp1, label)			\
488 	/* BEGIN CSTYLED */						\
489 	lduh	[tsbinfo + TSBINFO_SZCODE], tmp1			;\
490 	brgz,pn	tmp1, label/**/9	 				;\
491 	  nop								;\
492 	ldx	[tsbinfo + TSBINFO_VADDR], tmp1				;\
493 	.global	label/**/_resv_offset					;\
494 label/**/_resv_offset:							;\
495 	sllx	tmp1, (64 - MMU_PAGESHIFT4M), tmp1			;\
496 	srlx	tmp1, (64 - MMU_PAGESHIFT4M), tmp1			;\
497 	or	tmp1, resva, resva					;\
498 label/**/9:								\
499 	/* END CSTYLED */
500 
501 /*
502  * Determine the pointer of the entry in the first TSB to probe given
503  * the 8K TSB pointer register contents.
504  *
505  * In:
506  *   tsbp8k = 8K TSB pointer register (ro)
507  *   tmp = scratch register
508  *   label = label for hot patching of utsb_vabase
509  *
510  * Out: tsbe_ptr = TSB entry address
511  *
512  * Note: This function is patched at runtime for performance reasons.
513  *	 Any changes here require sfmmu_patch_utsb fixed.
514  */
515 
516 #define	GET_1ST_TSBE_PTR(tsbp8k, tsbe_ptr, tmp, label)			\
517 	/* BEGIN CSTYLED */						\
518 label/**/_get_1st_tsbe_ptr:						;\
519 	RUNTIME_PATCH_SETX(tsbe_ptr, tmp)				;\
520 	/* tsbeptr = contents of utsb_vabase */				;\
521 	/* clear upper bits leaving just bits 21:0 of TSB ptr. */	;\
522 	sllx	tsbp8k, TSBREG_FIRTSB_SHIFT, tmp			;\
523 	/* finish clear */						;\
524 	srlx	tmp, TSBREG_FIRTSB_SHIFT, tmp				;\
525 	/* or-in bits 41:22 of the VA to form the real pointer. */	;\
526 	or	tsbe_ptr, tmp, tsbe_ptr					\
527 	/* END CSTYLED */
528 
529 /*
530  * Determine the base address of the second TSB given the 8K TSB
531  * pointer register contents.
532  *
533  * In:
534  *   tsbp8k = 8K TSB pointer register (ro)
535  *   tmp = scratch register
536  *   label = label for hot patching of utsb_vabase
537  *
538  * Out:
539  *   tsbbase = TSB base address
540  *
541  * Note: This function is patched at runtime for performance reasons.
542  *	 Any changes here require sfmmu_patch_utsb fixed.
543  */
544 
545 #define	GET_2ND_TSB_BASE(tsbp8k, tsbbase, tmp, label)			\
546 	/* BEGIN CSTYLED */						\
547 label/**/_get_2nd_tsb_base:						;\
548 	RUNTIME_PATCH_SETX(tsbbase, tmp)				;\
549 	/* tsbbase = contents of utsb4m_vabase */			;\
550 	/* clear upper bits leaving just bits 21:xx of TSB addr. */	;\
551 	sllx	tsbp8k, TSBREG_SECTSB_LSHIFT, tmp			;\
552 	/* clear lower bits leaving just 21:13 in 8:0 */		;\
553 	srlx	tmp, (TSBREG_SECTSB_RSHIFT + MMU_PAGESHIFT), tmp	;\
554 	/* adjust TSB offset to bits 21:13 */				;\
555 	sllx	tmp, MMU_PAGESHIFT, tmp					;\
556 	or	tsbbase, tmp, tsbbase					;\
557 	/* END CSTYLED */
558 
559 /*
560  * Determine the size code of the second TSB given the 8K TSB
561  * pointer register contents.
562  *
563  * In:
564  *   tsbp8k = 8K TSB pointer register (ro)
565  * Out:
566  *   size = TSB size code
567  */
568 
569 #define	GET_2ND_TSB_SIZE(tsbp8k, size)					\
570 	srlx	tsbp8k, TSBREG_SECSZ_SHIFT, size;			\
571 	and	size, TSB_SOFTSZ_MASK, size
572 
573 /*
574  * Get the location in the 2nd TSB of the tsbe for this fault.
575  * Assumes that the second TSB only contains 4M mappings.
576  *
577  * In:
578  *   tagacc = tag access register (clobbered)
579  *   tsbp8k = contents of TSB8K pointer register (ro)
580  *   tmp1, tmp2 = scratch registers
581  *   label = label at which to patch in reserved TSB 4M VA range
582  * Out:
583  *   tsbe_ptr = pointer to the tsbe in the 2nd TSB
584  */
585 #define	GET_2ND_TSBE_PTR(tagacc, tsbp8k, tsbe_ptr, tmp1, tmp2, label)	\
586 	GET_2ND_TSB_BASE(tsbp8k, tsbe_ptr, tmp2, label);		\
587 	/* tsbe_ptr = TSB base address, tmp2 = junk */			\
588 	GET_2ND_TSB_SIZE(tsbp8k, tmp1);					\
589 	/* tmp1 = TSB size code */					\
590 	GET_TSBE_POINTER(MMU_PAGESHIFT4M, tsbe_ptr, tagacc, tmp1, tmp2)
591 
592 
593 #else /* !UTSB_PHYS */
594 
595 
596 /*
597  * Determine the pointer of the entry in the first TSB to probe given
598  * the 8K TSB pointer register contents.
599  *
600  * In:
601  *   tagacc = tag access register
602  *   tsbe_ptr = 8K TSB pointer register
603  *   tmp = scratch registers
604  *
605  * Out: tsbe_ptr = TSB entry address
606  *
607  * Note: This macro is a nop since the 8K TSB pointer register
608  *	 is the entry pointer and does not need to be decoded.
609  *	 It is defined to allow for code sharing with sun4v.
610  */
611 
612 #define	GET_1ST_TSBE_PTR(tagacc, tsbe_ptr, tmp1, tmp2)
613 
614 #endif /* !UTSB_PHYS */
615 
616 
617 /*
618  * Load TSB base register.  In the single TSB case this register
619  * contains utsb_vabase, bits 21:13 of tsbinfo->tsb_va, and the
620  * TSB size code in bits 2:0.  See hat_sfmmu.h for the layout in
621  * the case where we have multiple TSBs per process.
622  *
623  * In:
624  *   tsbreg = value to load (ro)
625  */
626 #define	LOAD_TSBREG(tsbreg, tmp1, tmp2)					\
627 	mov	MMU_TSB, tmp1;						\
628 	sethi	%hi(FLUSH_ADDR), tmp2;					\
629 	stxa	tsbreg, [tmp1]ASI_DMMU;		/* dtsb reg */		\
630 	stxa	tsbreg, [tmp1]ASI_IMMU;		/* itsb reg */		\
631 	flush	tmp2
632 
633 #ifdef UTSB_PHYS
634 #define	UTSB_PROBE_ASI	ASI_QUAD_LDD_PHYS
635 #else
636 #define	UTSB_PROBE_ASI	ASI_NQUAD_LD
637 #endif
638 #define	PROBE_TSB(tsbe_ptr, tag, tsbtag, label)                            \
639 	/* BEGIN CSTYLED */                                             \
640         ldda    [tsbe_ptr]UTSB_PROBE_ASI, tsbtag                        ;\
641         cmp     tsbtag, tag             /* compare tag w/ TSB */        ;\
642         bne,pn  %xcc, label/**/1        /* branch if !match */          ;\
643           nop                                                           \
644 	/* END CSTYLED */
645 /*
646  * Probe a TSB. If miss continue from the end of the macro for most probes
647  * except jump to TSB miss for 3rd ITSB probe. If hit retry faulted
648  * instruction for DTSB probes. For ITSB probes in case of TSB hit check
649  * execute bit and branch to exec_fault if the bit is not set otherwise retry
650  * faulted instruction. Do ITLB synthesis in case of hit in second ITSB if
651  * synthesis bit is set.
652  *
653  * tsbe_ptr = precomputed TSB entry pointer (in, ro)
654  * vpg_4m = 4M virtual page number for tag matching  (in, ro)
655  * label = where to branch to if this is a miss (text)
656  *
657  * For trapstat, we have to explicily use these registers.
658  * g4 = location tag will be retrieved into from TSB (out)
659  * g5 = location data(tte) will be retrieved into from TSB (out)
660  *
661  * In case of first tsb probe tsbe_ptr is %g1. For other tsb probes
662  * move tsbe_ptr into %g1 in case of hit for traptrace.
663  *
664  * If the probe fails and we continue from call site %g4-%g5 are clobbered.
665  * 2nd ITSB probe macro will also clobber %g6 in this case.
666  */
667 #define	PROBE_1ST_DTSB(tsbe_ptr, vpg_4m, label)                         \
668 	/* BEGIN CSTYLED */                                             \
669         PROBE_TSB(tsbe_ptr, vpg_4m, %g4, label)                         ;\
670         TT_TRACE(trace_tsbhit)                                          ;\
671         DTLB_STUFF(%g5, %g1, %g2, %g3, %g4)                             ;\
672         retry                      /* retry faulted instruction */      ;\
673 label/**/1:                                                             \
674 	/* END CSTYLED */
675 
676 #define	PROBE_2ND_DTSB(tsbe_ptr, vpg_4m, label)                         \
677 	/* BEGIN CSTYLED */                                             \
678         PROBE_TSB(tsbe_ptr, vpg_4m, %g4, label)                         ;\
679         mov     tsbe_ptr, %g1       /* trace_tsbhit wants ptr in %g1 */ ;\
680         TT_TRACE(trace_tsbhit)                                          ;\
681         DTLB_STUFF(%g5, %g1, %g2, %g3, %g4)                             ;\
682         retry                      /* retry faulted instruction */      ;\
683 label/**/1:                                                             \
684 	/* END CSTYLED */
685 
686 #define	PROBE_1ST_ITSB(tsbe_ptr, vpg_4m, label)                         \
687 	/* BEGIN CSTYLED */                                             \
688         PROBE_TSB(tsbe_ptr, vpg_4m, %g4, label)                         ;\
689         andcc   %g5, TTE_EXECPRM_INT, %g0  /* check execute bit */      ;\
690         bz,pn   %icc, exec_fault                                        ;\
691           nop                                                           ;\
692         TT_TRACE(trace_tsbhit)                                          ;\
693         ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)                             ;\
694         retry                           /* retry faulted instruction */ ;\
695 label/**/1:                                                             \
696 	/* END CSTYLED */
697 
698 #define	PROBE_2ND_ITSB(tsbe_ptr, vpg_4m, label)                         \
699 	/* BEGIN CSTYLED */                                             \
700         ldda    [tsbe_ptr]UTSB_PROBE_ASI, %g4 /* g4 = tag, g5 = data */ ;\
701         cmp     %g4, vpg_4m             /* compare tag w/ TSB */        ;\
702         bne,pn  %xcc, label/**/2        /* branch if !match */          ;\
703           or    %g0, TTE4M, %g6                                         ;\
704         andcc   %g5, TTE_EXECPRM_INT, %g0  /* check execute bit */      ;\
705         bz,a,pn %icc, label/**/1                                        ;\
706           sllx  %g6, TTE_SZ_SHFT, %g6                                   ;\
707         mov     tsbe_ptr, %g1         /* trap trace wants ptr in %g1 */ ;\
708         TT_TRACE(trace_tsbhit)                                          ;\
709         ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)                             ;\
710         retry                        /* retry faulted instruction */    ;\
711 label/**/1:                                                             ;\
712         andcc %g5, TTE_E_SYNTH_INT, %g0                                 ;\
713         bz,pn   %icc, exec_fault                                        ;\
714           mov   tsbe_ptr, %g1       /* trap trace wants ptr in %g1 */   ;\
715         or      %g5, %g6, %g5                                           ;\
716         TT_TRACE(trace_tsbhit)                                          ;\
717         ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)                             ;\
718         retry                      /* retry faulted instruction */      ;\
719 label/**/2:
720 	/* END CSTYLED */
721 
722 #ifdef UTSB_PHYS
723 
724 /*
725  * Updates the context filed in the tagaccess register with the shared
726  * context to force the next i/DTLB_STUFF() to load this mapping into
727  * the TLB with the shared context.
728  */
729 #define	SET_SHCTX_TAGACC(tmp1, tmp2, asi)                               \
730 	/* BEGIN CSTYLED */                                             \
731         mov     MMU_TAG_ACCESS, tmp2                                    ;\
732         ldxa    [tmp2]asi, tmp2                 /* tmp2 = VA|CTX */     ;\
733         srlx    tmp2, TAGACC_SHIFT, tmp2                                ;\
734         sllx    tmp2, TAGACC_SHIFT, tmp2        /* tmp2 = VA */         ;\
735         mov     MMU_SHARED_CONTEXT, tmp1        /* clobber tsbe_ptr */  ;\
736         ldxa    [tmp1]ASI_MMU_CTX, tmp1         /* tmp2 = shctx reg */  ;\
737         sllx    tmp1, SHCTXREG_CTX_LSHIFT, tmp1                         ;\
738         srlx    tmp1, SHCTXREG_CTX_LSHIFT, tmp1 /* tmp1 = SHCTX */      ;\
739         or      tmp1, tmp2, tmp1                /* tmp1  = VA|SHCTX */  ;\
740         mov     MMU_TAG_ACCESS, tmp2                                    ;\
741         stxa    tmp1, [tmp2]asi                 /* asi = VA|SHCTX */
742 	/* END CSTYLED */
743 
744 #define	PROBE_SHCTX_DTSB(tsbe_ptr, vpg_4m, label)                       \
745 	/* BEGIN CSTYLED */                                             \
746         PROBE_TSB(tsbe_ptr, vpg_4m, %g4, label)                         ;\
747         mov     tsbe_ptr, %g1       /* trace_tsbhit wants ptr in %g1 */ ;\
748         TT_TRACE(trace_tsbhit)                                          ;\
749         SET_SHCTX_TAGACC(%g3, %g4, ASI_DMMU)                            ;\
750         DTLB_STUFF(%g5, %g1, %g2, %g3, %g4)                             ;\
751         retry                      /* retry faulted instruction */      ;\
752 label/**/1:                                                             \
753 	/* END CSTYLED */
754 
755 #define	PROBE_3RD_DTSB(tsbe_ptr, vpg_4m, label)                         \
756 	/* BEGIN CSTYLED */                                             \
757         PROBE_SHCTX_DTSB(tsbe_ptr, vpg_4m, label)                  ;\
758 	/* END CSTYLED */
759 
760 #define	PROBE_4TH_DTSB(tsbe_ptr, vpg_4m, label)                         \
761 	/* BEGIN CSTYLED */                                             \
762         PROBE_SHCTX_DTSB(tsbe_ptr, vpg_4m, label)                  ;\
763 	/* END CSTYLED */
764 
765 #define	PROBE_SHCTX_ITSB(tsbe_ptr, vpg_4m, label)                       \
766 	/* BEGIN CSTYLED */                                             \
767         PROBE_TSB(tsbe_ptr, vpg_4m, %g4, label)                         ;\
768         andcc   %g5, TTE_EXECPRM_INT, %g0  /* check execute bit */      ;\
769         bz,pn %icc, exec_fault                                          ;\
770          mov     tsbe_ptr, %g1          /* for traptrace sake */        ;\
771         TT_TRACE(trace_tsbhit)                                          ;\
772         SET_SHCTX_TAGACC(%g3, %g4, ASI_IMMU)                            ;\
773         ITLB_STUFF(%g5, %g1, %g2, %g3, %g4)                             ;\
774         retry                           /* retry faulted instruction */ ;\
775 label/**/1:
776 	/* END CSTYLED */
777 
778 #define	PROBE_3RD_ITSB(tsbe_ptr, vpg_4m, label)                         \
779 	/* BEGIN CSTYLED */                                             \
780         PROBE_SHCTX_ITSB(tsbe_ptr, vpg_4m, sfmmu_tsb_miss_tt)      ;\
781 	/* END CSTYLED */
782 
783 #define	PROBE_4TH_ITSB(tsbe_ptr, vpg_4m, label)                         \
784 	/* BEGIN CSTYLED */                                             \
785         PROBE_SHCTX_ITSB(tsbe_ptr, vpg_4m, label)                  ;\
786 	/* END CSTYLED */
787 
788 /*
789  * The traptype is supplied by caller.
790  *
791  * If iTSB miss, store shctx into IMMU TAG ACCESS REG
792  * If dTSB miss, store shctx into DMMU TAG ACCESS REG
793  * Thus the [D|I]TLB_STUFF will work as expected.
794  */
795 #define	SAVE_CTX1(traptype, tmp1, tmp2, label)                          \
796 	/* BEGIN CSTYLED */                                             \
797         cmp     traptype, FAST_IMMU_MISS_TT                             ;\
798         be,pn %icc, label/**/1                                          ;\
799           nop                                                           ;\
800         SET_SHCTX_TAGACC(tmp1, tmp2, ASI_DMMU)                          ;\
801         membar  #Sync                                                   ;\
802         ba,a    label/**/2                                              ;\
803 label/**/1:                                                             ;\
804         SET_SHCTX_TAGACC(tmp1, tmp2, ASI_IMMU)                          ;\
805         sethi   %hi(FLUSH_ADDR), tmp1                                   ;\
806         flush   tmp1                                                    ;\
807 label/**/2:
808 	/* END CSTYLED */
809 
810 #endif /* UTSB_PHYS */
811 
812 #endif /* _ASM */
813 
814 #ifdef	__cplusplus
815 }
816 #endif
817 
818 #endif	/* _VM_MACH_SFMMU_H */
819