xref: /linux/arch/arm64/mm/cache.S (revision 5ea5880764cbb164afb17a62e76ca75dc371409d)
1/* SPDX-License-Identifier: GPL-2.0-only */
2/*
3 * Cache maintenance
4 *
5 * Copyright (C) 2001 Deep Blue Solutions Ltd.
6 * Copyright (C) 2012 ARM Ltd.
7 */
8
9#include <linux/errno.h>
10#include <linux/linkage.h>
11#include <linux/init.h>
12#include <asm/assembler.h>
13#include <asm/cpufeature.h>
14#include <asm/alternative.h>
15#include <asm/asm-uaccess.h>
16
17/*
18 *	caches_clean_inval_pou_macro(start,end) [fixup]
19 *
20 *	Ensure that the I and D caches are coherent within specified region.
21 *	This is typically used when code has been written to a memory region,
22 *	and will be executed.
23 *
24 *	- start   - virtual start address of region
25 *	- end     - virtual end address of region
26 *	- fixup   - optional label to branch to on user fault
27 */
28.macro	caches_clean_inval_pou_macro, fixup
29alternative_if ARM64_HAS_CACHE_IDC
30	dsb     ishst
31	b       .Ldc_skip_\@
32alternative_else_nop_endif
33	mov     x2, x0
34	mov     x3, x1
35	dcache_by_line_op cvau, ish, x2, x3, x4, x5, \fixup
36.Ldc_skip_\@:
37alternative_if ARM64_HAS_CACHE_DIC
38	isb
39	b	.Lic_skip_\@
40alternative_else_nop_endif
41	invalidate_icache_by_line x0, x1, x2, x3, \fixup
42.Lic_skip_\@:
43.endm
44
45/*
46 *	caches_clean_inval_pou(start,end)
47 *
48 *	Ensure that the I and D caches are coherent within specified region.
49 *	This is typically used when code has been written to a memory region,
50 *	and will be executed.
51 *
52 *	- start   - virtual start address of region
53 *	- end     - virtual end address of region
54 */
55SYM_FUNC_START(caches_clean_inval_pou)
56	caches_clean_inval_pou_macro
57	ret
58SYM_FUNC_END(caches_clean_inval_pou)
59SYM_FUNC_ALIAS(__pi_caches_clean_inval_pou, caches_clean_inval_pou)
60
61/*
62 *	caches_clean_inval_user_pou(start,end)
63 *
64 *	Ensure that the I and D caches are coherent within specified region.
65 *	This is typically used when code has been written to a memory region,
66 *	and will be executed.
67 *
68 *	- start   - virtual start address of region
69 *	- end     - virtual end address of region
70 */
71SYM_FUNC_START(caches_clean_inval_user_pou)
72	uaccess_ttbr0_enable x2, x3, x4
73
74	caches_clean_inval_pou_macro 2f
75	mov	x0, xzr
761:
77	uaccess_ttbr0_disable x1, x2
78	ret
792:
80	mov	x0, #-EFAULT
81	b	1b
82SYM_FUNC_END(caches_clean_inval_user_pou)
83
84/*
85 *	icache_inval_pou(start,end)
86 *
87 *	Ensure that the I cache is invalid within specified region.
88 *
89 *	- start   - virtual start address of region
90 *	- end     - virtual end address of region
91 */
92SYM_FUNC_START(icache_inval_pou)
93alternative_if ARM64_HAS_CACHE_DIC
94	isb
95	ret
96alternative_else_nop_endif
97
98	invalidate_icache_by_line x0, x1, x2, x3
99	ret
100SYM_FUNC_END(icache_inval_pou)
101
102/*
103 *	dcache_clean_inval_poc(start, end)
104 *
105 *	Ensure that any D-cache lines for the interval [start, end)
106 *	are cleaned and invalidated to the PoC.
107 *
108 *	- start   - virtual start address of region
109 *	- end     - virtual end address of region
110 */
111SYM_FUNC_START(__pi_dcache_clean_inval_poc)
112	dcache_by_line_op civac, sy, x0, x1, x2, x3
113	ret
114SYM_FUNC_END(__pi_dcache_clean_inval_poc)
115SYM_FUNC_ALIAS(dcache_clean_inval_poc, __pi_dcache_clean_inval_poc)
116
117/*
118 *	dcache_clean_pou(start, end)
119 *
120 * 	Ensure that any D-cache lines for the interval [start, end)
121 * 	are cleaned to the PoU.
122 *
123 *	- start   - virtual start address of region
124 *	- end     - virtual end address of region
125 */
126SYM_FUNC_START(dcache_clean_pou)
127alternative_if ARM64_HAS_CACHE_IDC
128	dsb	ishst
129	ret
130alternative_else_nop_endif
131	dcache_by_line_op cvau, ish, x0, x1, x2, x3
132	ret
133SYM_FUNC_END(dcache_clean_pou)
134
135.macro __dcache_inval_poc_nosync
136	dcache_line_size x2, x3
137	sub	x3, x2, #1
138	tst	x1, x3				// end cache line aligned?
139	bic	x1, x1, x3
140	b.eq	1f
141	dc	civac, x1			// clean & invalidate D / U line
1421:	tst	x0, x3				// start cache line aligned?
143	bic	x0, x0, x3
144	b.eq	2f
145	dc	civac, x0			// clean & invalidate D / U line
146	b	3f
1472:	dc	ivac, x0			// invalidate D / U line
1483:	add	x0, x0, x2
149	cmp	x0, x1
150	b.lo	2b
151.endm
152
153/*
154 *	dcache_inval_poc(start, end)
155 *
156 * 	Ensure that any D-cache lines for the interval [start, end)
157 * 	are invalidated. Any partial lines at the ends of the interval are
158 *	also cleaned to PoC to prevent data loss.
159 *
160 *	- start   - kernel start address of region
161 *	- end     - kernel end address of region
162 */
163SYM_FUNC_START(__pi_dcache_inval_poc)
164	__dcache_inval_poc_nosync
165	dsb	sy
166	ret
167SYM_FUNC_END(__pi_dcache_inval_poc)
168SYM_FUNC_ALIAS(dcache_inval_poc, __pi_dcache_inval_poc)
169
170/*
171 *	dcache_inval_poc_nosync(start, end)
172 *
173 * 	Issue the instructions of D-cache lines for the interval [start, end)
174 * 	for invalidation. Not necessarily cleaned to PoC till an explicit dsb
175 *	sy is issued later
176 *
177 *	- start   - kernel start address of region
178 *	- end     - kernel end address of region
179 */
180SYM_FUNC_START(__pi_dcache_inval_poc_nosync)
181	__dcache_inval_poc_nosync
182	ret
183SYM_FUNC_END(__pi_dcache_inval_poc_nosync)
184SYM_FUNC_ALIAS(dcache_inval_poc_nosync, __pi_dcache_inval_poc_nosync)
185
186/*
187 *	dcache_clean_poc(start, end)
188 *
189 * 	Ensure that any D-cache lines for the interval [start, end)
190 * 	are cleaned to the PoC.
191 *
192 *	- start   - virtual start address of region
193 *	- end     - virtual end address of region
194 */
195SYM_FUNC_START(__pi_dcache_clean_poc)
196	dcache_by_line_op cvac, sy, x0, x1, x2, x3
197	ret
198SYM_FUNC_END(__pi_dcache_clean_poc)
199SYM_FUNC_ALIAS(dcache_clean_poc, __pi_dcache_clean_poc)
200
201/*
202 *	dcache_clean_poc_nosync(start, end)
203 *
204 * 	Issue the instructions of D-cache lines for the interval [start, end).
205 * 	not necessarily cleaned to the PoC till an explicit dsb sy afterward.
206 *
207 *	- start   - virtual start address of region
208 *	- end     - virtual end address of region
209 */
210SYM_FUNC_START(__pi_dcache_clean_poc_nosync)
211	dcache_by_line_op_nosync cvac, x0, x1, x2, x3
212	ret
213SYM_FUNC_END(__pi_dcache_clean_poc_nosync)
214SYM_FUNC_ALIAS(dcache_clean_poc_nosync, __pi_dcache_clean_poc_nosync)
215
216/*
217 *	dcache_clean_pop(start, end)
218 *
219 * 	Ensure that any D-cache lines for the interval [start, end)
220 * 	are cleaned to the PoP.
221 *
222 *	- start   - virtual start address of region
223 *	- end     - virtual end address of region
224 */
225SYM_FUNC_START(__pi_dcache_clean_pop)
226	alternative_if_not ARM64_HAS_DCPOP
227	b	dcache_clean_poc
228	alternative_else_nop_endif
229	dcache_by_line_op cvap, sy, x0, x1, x2, x3
230	ret
231SYM_FUNC_END(__pi_dcache_clean_pop)
232SYM_FUNC_ALIAS(dcache_clean_pop, __pi_dcache_clean_pop)
233