xref: /linux/arch/powerpc/kernel/vdso/cacheflush.S (revision 4359a011e259a4608afc7fb3635370c9d4ba5943)
1/* SPDX-License-Identifier: GPL-2.0-or-later */
2/*
3 * vDSO provided cache flush routines
4 *
5 * Copyright (C) 2004 Benjamin Herrenschmuidt (benh@kernel.crashing.org),
6 *                    IBM Corp.
7 */
8#include <asm/processor.h>
9#include <asm/ppc_asm.h>
10#include <asm/vdso.h>
11#include <asm/vdso_datapage.h>
12#include <asm/asm-offsets.h>
13#include <asm/cache.h>
14
15	.text
16
17/*
18 * Default "generic" version of __kernel_sync_dicache.
19 *
20 * void __kernel_sync_dicache(unsigned long start, unsigned long end)
21 *
22 * Flushes the data cache & invalidate the instruction cache for the
23 * provided range [start, end[
24 */
25V_FUNCTION_BEGIN(__kernel_sync_dicache)
26  .cfi_startproc
27BEGIN_FTR_SECTION
28	b	3f
29END_FTR_SECTION_IFSET(CPU_FTR_COHERENT_ICACHE)
30#ifdef CONFIG_PPC64
31	mflr	r12
32  .cfi_register lr,r12
33	get_datapage	r10
34	mtlr	r12
35  .cfi_restore	lr
36#endif
37
38#ifdef CONFIG_PPC64
39	lwz	r7,CFG_DCACHE_BLOCKSZ(r10)
40	addi	r5,r7,-1
41#else
42	li	r5, L1_CACHE_BYTES - 1
43#endif
44	andc	r6,r3,r5		/* round low to line bdy */
45	subf	r8,r6,r4		/* compute length */
46	add	r8,r8,r5		/* ensure we get enough */
47#ifdef CONFIG_PPC64
48	lwz	r9,CFG_DCACHE_LOGBLOCKSZ(r10)
49	PPC_SRL.	r8,r8,r9		/* compute line count */
50#else
51	srwi.	r8, r8, L1_CACHE_SHIFT
52	mr	r7, r6
53#endif
54	crclr	cr0*4+so
55	beqlr				/* nothing to do? */
56	mtctr	r8
571:	dcbst	0,r6
58#ifdef CONFIG_PPC64
59	add	r6,r6,r7
60#else
61	addi	r6, r6, L1_CACHE_BYTES
62#endif
63	bdnz	1b
64	sync
65
66/* Now invalidate the instruction cache */
67
68#ifdef CONFIG_PPC64
69	lwz	r7,CFG_ICACHE_BLOCKSZ(r10)
70	addi	r5,r7,-1
71	andc	r6,r3,r5		/* round low to line bdy */
72	subf	r8,r6,r4		/* compute length */
73	add	r8,r8,r5
74	lwz	r9,CFG_ICACHE_LOGBLOCKSZ(r10)
75	PPC_SRL.	r8,r8,r9		/* compute line count */
76	crclr	cr0*4+so
77	beqlr				/* nothing to do? */
78#endif
79	mtctr	r8
80#ifdef CONFIG_PPC64
812:	icbi	0,r6
82	add	r6,r6,r7
83#else
842:	icbi	0, r7
85	addi	r7, r7, L1_CACHE_BYTES
86#endif
87	bdnz	2b
88	isync
89	li	r3,0
90	blr
913:
92	crclr	cr0*4+so
93	sync
94	icbi	0,r1
95	isync
96	li	r3,0
97	blr
98  .cfi_endproc
99V_FUNCTION_END(__kernel_sync_dicache)
100