xref: /linux/arch/powerpc/mm/nohash/tlb_low.S (revision a9aaf1ff88a8cb99a1335c9eb76de637f0cf8c10)
1/* SPDX-License-Identifier: GPL-2.0-or-later */
2/*
3 * This file contains low-level functions for performing various
4 * types of TLB invalidations on various processors with no hash
5 * table.
6 *
7 * This file implements the following functions for all no-hash
8 * processors. Some aren't implemented for some variants. Some
9 * are inline in tlbflush.h
10 *
11 *	- tlbil_va
12 *	- tlbil_pid
13 *	- tlbil_all
14 *	- tlbivax_bcast
15 *
16 * Code mostly moved over from misc_32.S
17 *
18 *    Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org)
19 *
20 * Partially rewritten by Cort Dougan (cort@cs.nmt.edu)
21 * Paul Mackerras, Kumar Gala and Benjamin Herrenschmidt.
22 */
23
24#include <asm/reg.h>
25#include <asm/page.h>
26#include <asm/cputable.h>
27#include <asm/mmu.h>
28#include <asm/ppc_asm.h>
29#include <asm/asm-offsets.h>
30#include <asm/processor.h>
31#include <asm/bug.h>
32#include <asm/asm-compat.h>
33#include <asm/feature-fixups.h>
34
35#if defined(CONFIG_PPC_8xx)
36
37/*
38 * Nothing to do for 8xx, everything is inline
39 */
40
41#elif defined(CONFIG_44x) /* Includes 47x */
42
43/*
44 * 440 implementation uses tlbsx/we for tlbil_va and a full sweep
45 * of the TLB for everything else.
46 */
47_GLOBAL(__tlbil_va)
48	mfspr	r5,SPRN_MMUCR
49	mfmsr   r10
50
51	/*
52	 * We write 16 bits of STID since 47x supports that much, we
53	 * will never be passed out of bounds values on 440 (hopefully)
54	 */
55	rlwimi  r5,r4,0,16,31
56
57	/* We have to run the search with interrupts disabled, otherwise
58	 * an interrupt which causes a TLB miss can clobber the MMUCR
59	 * between the mtspr and the tlbsx.
60	 *
61	 * Critical and Machine Check interrupts take care of saving
62	 * and restoring MMUCR, so only normal interrupts have to be
63	 * taken care of.
64	 */
65	wrteei	0
66	mtspr	SPRN_MMUCR,r5
67	tlbsx.	r6,0,r3
68	bne	10f
69	sync
70#ifndef CONFIG_PPC_47x
71	/* On 440 There are only 64 TLB entries, so r3 < 64, which means bit
72	 * 22, is clear.  Since 22 is the V bit in the TLB_PAGEID, loading this
73	 * value will invalidate the TLB entry.
74	 */
75	tlbwe	r6,r6,PPC44x_TLB_PAGEID
76#else
77	oris	r7,r6,0x8000	/* specify way explicitly */
78	clrrwi	r4,r3,12	/* get an EPN for the hashing with V = 0 */
79	ori	r4,r4,PPC47x_TLBE_SIZE
80	tlbwe   r4,r7,0		/* write it */
81#endif /* !CONFIG_PPC_47x */
82	isync
8310:	wrtee	r10
84	blr
85
86_GLOBAL(_tlbil_all)
87_GLOBAL(_tlbil_pid)
88#ifndef CONFIG_PPC_47x
89	li	r3,0
90	sync
91
92	/* Load high watermark */
93	lis	r4,tlb_44x_hwater@ha
94	lwz	r5,tlb_44x_hwater@l(r4)
95
961:	tlbwe	r3,r3,PPC44x_TLB_PAGEID
97	addi	r3,r3,1
98	cmpw	0,r3,r5
99	ble	1b
100
101	isync
102	blr
103#else
104	/* 476 variant. There's not simple way to do this, hopefully we'll
105	 * try to limit the amount of such full invalidates
106	 */
107	mfmsr	r11		/* Interrupts off */
108	wrteei	0
109	li	r3,-1		/* Current set */
110	lis	r10,tlb_47x_boltmap@h
111	ori	r10,r10,tlb_47x_boltmap@l
112	lis	r7,0x8000	/* Specify way explicitly */
113
114	b	9f		/* For each set */
115
1161:	li	r9,4		/* Number of ways */
117	li	r4,0		/* Current way */
118	li	r6,0		/* Default entry value 0 */
119	andi.	r0,r8,1		/* Check if way 0 is bolted */
120	mtctr	r9		/* Load way counter */
121	bne-	3f		/* Bolted, skip loading it */
122
1232:	/* For each way */
124	or	r5,r3,r4	/* Make way|index for tlbre */
125	rlwimi	r5,r5,16,8,15	/* Copy index into position */
126	tlbre	r6,r5,0		/* Read entry */
1273:	addis	r4,r4,0x2000	/* Next way */
128	andi.	r0,r6,PPC47x_TLB0_VALID /* Valid entry ? */
129	beq	4f		/* Nope, skip it */
130	rlwimi	r7,r5,0,1,2	/* Insert way number */
131	rlwinm	r6,r6,0,21,19	/* Clear V */
132	tlbwe   r6,r7,0		/* Write it */
1334:	bdnz	2b		/* Loop for each way */
134	srwi	r8,r8,1		/* Next boltmap bit */
1359:	cmpwi	cr1,r3,255	/* Last set done ? */
136	addi	r3,r3,1		/* Next set */
137	beq	cr1,1f		/* End of loop */
138	andi.	r0,r3,0x1f	/* Need to load a new boltmap word ? */
139	bne	1b		/* No, loop */
140	lwz	r8,0(r10)	/* Load boltmap entry */
141	addi	r10,r10,4	/* Next word */
142	b	1b		/* Then loop */
1431:	isync			/* Sync shadows */
144	wrtee	r11
145	blr
146#endif /* !CONFIG_PPC_47x */
147
148#ifdef CONFIG_PPC_47x
149
150/*
151 * _tlbivax_bcast is only on 47x. We don't bother doing a runtime
152 * check though, it will blow up soon enough if we mistakenly try
153 * to use it on a 440.
154 */
155_GLOBAL(_tlbivax_bcast)
156	mfspr	r5,SPRN_MMUCR
157	mfmsr	r10
158	rlwimi	r5,r4,0,16,31
159	wrteei	0
160	mtspr	SPRN_MMUCR,r5
161	isync
162	PPC_TLBIVAX(0, R3)
163	isync
164	mbar
165	tlbsync
166BEGIN_FTR_SECTION
167	b	1f
168END_FTR_SECTION_IFSET(CPU_FTR_476_DD2)
169	sync
170	wrtee	r10
171	blr
172/*
173 * DD2 HW could hang if in instruction fetch happens before msync completes.
174 * Touch enough instruction cache lines to ensure cache hits
175 */
1761:	mflr	r9
177	bcl	20,31,$+4
1782:	mflr	r6
179	li	r7,32
180	PPC_ICBT(0,R6,R7)		/* touch next cache line */
181	add	r6,r6,r7
182	PPC_ICBT(0,R6,R7)		/* touch next cache line */
183	add	r6,r6,r7
184	PPC_ICBT(0,R6,R7)		/* touch next cache line */
185	sync
186	nop
187	nop
188	nop
189	nop
190	nop
191	nop
192	nop
193	nop
194	mtlr	r9
195	wrtee	r10
196	blr
197#endif /* CONFIG_PPC_47x */
198
199#elif defined(CONFIG_PPC_85xx)
200/*
201 * FSL BookE implementations.
202 *
203 * Since feature sections are using _SECTION_ELSE we need
204 * to have the larger code path before the _SECTION_ELSE
205 */
206
207/*
208 * Flush MMU TLB on the local processor
209 */
210_GLOBAL(_tlbil_all)
211BEGIN_MMU_FTR_SECTION
212	li	r3,(MMUCSR0_TLBFI)@l
213	mtspr	SPRN_MMUCSR0, r3
2141:
215	mfspr	r3,SPRN_MMUCSR0
216	andi.	r3,r3,MMUCSR0_TLBFI@l
217	bne	1b
218MMU_FTR_SECTION_ELSE
219	PPC_TLBILX_ALL(0,R0)
220ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX)
221	msync
222	isync
223	blr
224
225_GLOBAL(_tlbil_pid)
226BEGIN_MMU_FTR_SECTION
227	slwi	r3,r3,16
228	mfmsr	r10
229	wrteei	0
230	mfspr	r4,SPRN_MAS6	/* save MAS6 */
231	mtspr	SPRN_MAS6,r3
232	PPC_TLBILX_PID(0,R0)
233	mtspr	SPRN_MAS6,r4	/* restore MAS6 */
234	wrtee	r10
235MMU_FTR_SECTION_ELSE
236	li	r3,(MMUCSR0_TLBFI)@l
237	mtspr	SPRN_MMUCSR0, r3
2381:
239	mfspr	r3,SPRN_MMUCSR0
240	andi.	r3,r3,MMUCSR0_TLBFI@l
241	bne	1b
242ALT_MMU_FTR_SECTION_END_IFSET(MMU_FTR_USE_TLBILX)
243	msync
244	isync
245	blr
246
247/*
248 * Flush MMU TLB for a particular address, but only on the local processor
249 * (no broadcast)
250 */
251_GLOBAL(__tlbil_va)
252	mfmsr	r10
253	wrteei	0
254	slwi	r4,r4,16
255	ori	r4,r4,(MAS6_ISIZE(BOOK3E_PAGESZ_4K))@l
256	mtspr	SPRN_MAS6,r4		/* assume AS=0 for now */
257BEGIN_MMU_FTR_SECTION
258	tlbsx	0,r3
259	mfspr	r4,SPRN_MAS1		/* check valid */
260	andis.	r3,r4,MAS1_VALID@h
261	beq	1f
262	rlwinm	r4,r4,0,1,31
263	mtspr	SPRN_MAS1,r4
264	tlbwe
265MMU_FTR_SECTION_ELSE
266	PPC_TLBILX_VA(0,R3)
267ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX)
268	msync
269	isync
2701:	wrtee	r10
271	blr
272#elif defined(CONFIG_PPC_BOOK3E_64)
273/*
274 * New Book3E (>= 2.06) implementation
275 *
276 * Note: We may be able to get away without the interrupt masking stuff
277 * if we save/restore MAS6 on exceptions that might modify it
278 */
279_GLOBAL(_tlbil_pid)
280	slwi	r4,r3,MAS6_SPID_SHIFT
281	mfmsr	r10
282	wrteei	0
283	mtspr	SPRN_MAS6,r4
284	PPC_TLBILX_PID(0,R0)
285	wrtee	r10
286	msync
287	isync
288	blr
289
290_GLOBAL(_tlbil_pid_noind)
291	slwi	r4,r3,MAS6_SPID_SHIFT
292	mfmsr	r10
293	ori	r4,r4,MAS6_SIND
294	wrteei	0
295	mtspr	SPRN_MAS6,r4
296	PPC_TLBILX_PID(0,R0)
297	wrtee	r10
298	msync
299	isync
300	blr
301
302_GLOBAL(_tlbil_all)
303	PPC_TLBILX_ALL(0,R0)
304	msync
305	isync
306	blr
307
308_GLOBAL(_tlbil_va)
309	mfmsr	r10
310	wrteei	0
311	cmpwi	cr0,r6,0
312	slwi	r4,r4,MAS6_SPID_SHIFT
313	rlwimi	r4,r5,MAS6_ISIZE_SHIFT,MAS6_ISIZE_MASK
314	beq	1f
315	rlwimi	r4,r6,MAS6_SIND_SHIFT,MAS6_SIND
3161:	mtspr	SPRN_MAS6,r4		/* assume AS=0 for now */
317	PPC_TLBILX_VA(0,R3)
318	msync
319	isync
320	wrtee	r10
321	blr
322
323_GLOBAL(_tlbivax_bcast)
324	mfmsr	r10
325	wrteei	0
326	cmpwi	cr0,r6,0
327	slwi	r4,r4,MAS6_SPID_SHIFT
328	rlwimi	r4,r5,MAS6_ISIZE_SHIFT,MAS6_ISIZE_MASK
329	beq	1f
330	rlwimi	r4,r6,MAS6_SIND_SHIFT,MAS6_SIND
3311:	mtspr	SPRN_MAS6,r4		/* assume AS=0 for now */
332	PPC_TLBIVAX(0,R3)
333	mbar
334	tlbsync
335	sync
336	wrtee	r10
337	blr
338#else
339#error Unsupported processor type !
340#endif
341
342#if defined(CONFIG_PPC_E500)
343/*
344 * extern void loadcam_entry(unsigned int index)
345 *
346 * Load TLBCAM[index] entry in to the L2 CAM MMU
347 * Must preserve r7, r8, r9, r10, r11, r12
348 */
349_GLOBAL(loadcam_entry)
350	mflr	r5
351	LOAD_REG_ADDR_PIC(r4, TLBCAM)
352	mtlr	r5
353	mulli	r5,r3,TLBCAM_SIZE
354	add	r3,r5,r4
355	lwz	r4,TLBCAM_MAS0(r3)
356	mtspr	SPRN_MAS0,r4
357	lwz	r4,TLBCAM_MAS1(r3)
358	mtspr	SPRN_MAS1,r4
359	PPC_LL	r4,TLBCAM_MAS2(r3)
360	mtspr	SPRN_MAS2,r4
361	lwz	r4,TLBCAM_MAS3(r3)
362	mtspr	SPRN_MAS3,r4
363BEGIN_MMU_FTR_SECTION
364	lwz	r4,TLBCAM_MAS7(r3)
365	mtspr	SPRN_MAS7,r4
366END_MMU_FTR_SECTION_IFSET(MMU_FTR_BIG_PHYS)
367	isync
368	tlbwe
369	isync
370	blr
371
372/*
373 * Load multiple TLB entries at once, using an alternate-space
374 * trampoline so that we don't have to care about whether the same
375 * TLB entry maps us before and after.
376 *
377 * r3 = first entry to write
378 * r4 = number of entries to write
379 * r5 = temporary tlb entry (0 means no switch to AS1)
380 */
381_GLOBAL(loadcam_multi)
382	mflr	r8
383	/* Don't switch to AS=1 if already there */
384	mfmsr	r11
385	andi.	r11,r11,MSR_IS
386	bne	10f
387	mr.	r12, r5
388	beq	10f
389
390	/*
391	 * Set up temporary TLB entry that is the same as what we're
392	 * running from, but in AS=1.
393	 */
394	bcl	20,31,$+4
3951:	mflr	r6
396	tlbsx	0,r8
397	mfspr	r6,SPRN_MAS1
398	ori	r6,r6,MAS1_TS
399	mtspr	SPRN_MAS1,r6
400	mfspr	r6,SPRN_MAS0
401	rlwimi	r6,r5,MAS0_ESEL_SHIFT,MAS0_ESEL_MASK
402	mr	r7,r5
403	mtspr	SPRN_MAS0,r6
404	isync
405	tlbwe
406	isync
407
408	/* Switch to AS=1 */
409	mfmsr	r6
410	ori	r6,r6,MSR_IS|MSR_DS
411	mtmsr	r6
412	isync
413
41410:
415	mr	r9,r3
416	add	r10,r3,r4
4172:	bl	loadcam_entry
418	addi	r9,r9,1
419	cmpw	r9,r10
420	mr	r3,r9
421	blt	2b
422
423	/* Don't return to AS=0 if we were in AS=1 at function start */
424	andi.	r11,r11,MSR_IS
425	bne	3f
426	cmpwi	r12, 0
427	beq	3f
428
429	/* Return to AS=0 and clear the temporary entry */
430	mfmsr	r6
431	rlwinm.	r6,r6,0,~(MSR_IS|MSR_DS)
432	mtmsr	r6
433	isync
434
435	li	r6,0
436	mtspr	SPRN_MAS1,r6
437	rlwinm	r6,r7,MAS0_ESEL_SHIFT,MAS0_ESEL_MASK
438	oris	r6,r6,MAS0_TLBSEL(1)@h
439	mtspr	SPRN_MAS0,r6
440	isync
441	tlbwe
442	isync
443
4443:
445	mtlr	r8
446	blr
447#endif
448