xref: /linux/arch/xtensa/include/asm/asmmacro.h (revision 34f7c6e7d4396090692a09789db231e12cb4762b)
1 /*
2  * include/asm-xtensa/asmmacro.h
3  *
4  * This file is subject to the terms and conditions of the GNU General Public
5  * License.  See the file "COPYING" in the main directory of this archive
6  * for more details.
7  *
8  * Copyright (C) 2005 Tensilica Inc.
9  */
10 
11 #ifndef _XTENSA_ASMMACRO_H
12 #define _XTENSA_ASMMACRO_H
13 
14 #include <asm/core.h>
15 
16 /*
17  * Some little helpers for loops. Use zero-overhead-loops
18  * where applicable and if supported by the processor.
19  *
20  * __loopi ar, at, size, inc
21  *         ar	register initialized with the start address
22  *	   at	scratch register used by macro
23  *	   size	size immediate value
24  *	   inc	increment
25  *
26  * __loops ar, as, at, inc_log2[, mask_log2][, cond][, ncond]
27  *	   ar	register initialized with the start address
28  *	   as	register initialized with the size
29  *	   at	scratch register use by macro
30  *	   inc_log2	increment [in log2]
31  *	   mask_log2	mask [in log2]
32  *	   cond		true condition (used in loop'cond')
33  *	   ncond	false condition (used in b'ncond')
34  *
35  * __loop  as
36  *	   restart loop. 'as' register must not have been modified!
37  *
38  * __endla ar, as, incr
39  *	   ar	start address (modified)
40  *	   as	scratch register used by __loops/__loopi macros or
41  *		end address used by __loopt macro
42  *	   inc	increment
43  */
44 
45 /*
46  * loop for given size as immediate
47  */
48 
49 	.macro	__loopi ar, at, size, incr
50 
51 #if XCHAL_HAVE_LOOPS
52 		movi	\at, ((\size + \incr - 1) / (\incr))
53 		loop	\at, 99f
54 #else
55 		addi	\at, \ar, \size
56 		98:
57 #endif
58 
59 	.endm
60 
61 /*
62  * loop for given size in register
63  */
64 
65 	.macro	__loops	ar, as, at, incr_log2, mask_log2, cond, ncond
66 
67 #if XCHAL_HAVE_LOOPS
68 		.ifgt \incr_log2 - 1
69 			addi	\at, \as, (1 << \incr_log2) - 1
70 			.ifnc \mask_log2,
71 				extui	\at, \at, \incr_log2, \mask_log2
72 			.else
73 				srli	\at, \at, \incr_log2
74 			.endif
75 		.endif
76 		loop\cond	\at, 99f
77 #else
78 		.ifnc \mask_log2,
79 			extui	\at, \as, \incr_log2, \mask_log2
80 		.else
81 			.ifnc \ncond,
82 				srli	\at, \as, \incr_log2
83 			.endif
84 		.endif
85 		.ifnc \ncond,
86 			b\ncond	\at, 99f
87 
88 		.endif
89 		.ifnc \mask_log2,
90 			slli	\at, \at, \incr_log2
91 			add	\at, \ar, \at
92 		.else
93 			add	\at, \ar, \as
94 		.endif
95 #endif
96 		98:
97 
98 	.endm
99 
100 /*
101  * loop from ar to as
102  */
103 
104 	.macro	__loopt	ar, as, at, incr_log2
105 
106 #if XCHAL_HAVE_LOOPS
107 		sub	\at, \as, \ar
108 		.ifgt	\incr_log2 - 1
109 			addi	\at, \at, (1 << \incr_log2) - 1
110 			srli	\at, \at, \incr_log2
111 		.endif
112 		loop	\at, 99f
113 #else
114 		98:
115 #endif
116 
117 	.endm
118 
119 /*
120  * restart loop. registers must be unchanged
121  */
122 
123 	.macro	__loop	as
124 
125 #if XCHAL_HAVE_LOOPS
126 		loop	\as, 99f
127 #else
128 		98:
129 #endif
130 
131 	.endm
132 
133 /*
134  * end of loop with no increment of the address.
135  */
136 
137 	.macro	__endl	ar, as
138 #if !XCHAL_HAVE_LOOPS
139 		bltu	\ar, \as, 98b
140 #endif
141 		99:
142 	.endm
143 
144 /*
145  * end of loop with increment of the address.
146  */
147 
148 	.macro	__endla	ar, as, incr
149 		addi	\ar, \ar, \incr
150 		__endl	\ar \as
151 	.endm
152 
153 /* Load or store instructions that may cause exceptions use the EX macro. */
154 
155 #define EX(handler)				\
156 	.section __ex_table, "a";		\
157 	.word	97f, handler;			\
158 	.previous				\
159 97:
160 
161 
162 /*
163  * Extract unaligned word that is split between two registers w0 and w1
164  * into r regardless of machine endianness. SAR must be loaded with the
165  * starting bit of the word (see __ssa8).
166  */
167 
168 	.macro __src_b	r, w0, w1
169 #ifdef __XTENSA_EB__
170 		src	\r, \w0, \w1
171 #else
172 		src	\r, \w1, \w0
173 #endif
174 	.endm
175 
176 /*
177  * Load 2 lowest address bits of r into SAR for __src_b to extract unaligned
178  * word starting at r from two registers loaded from consecutive aligned
179  * addresses covering r regardless of machine endianness.
180  *
181  *      r   0   1   2   3
182  * LE SAR   0   8  16  24
183  * BE SAR  32  24  16   8
184  */
185 
186 	.macro __ssa8	r
187 #ifdef __XTENSA_EB__
188 		ssa8b	\r
189 #else
190 		ssa8l	\r
191 #endif
192 	.endm
193 
194 	.macro	do_nsau cnt, val, tmp, a
195 #if XCHAL_HAVE_NSA
196 	nsau	\cnt, \val
197 #else
198 	mov	\a, \val
199 	movi	\cnt, 0
200 	extui	\tmp, \a, 16, 16
201 	bnez	\tmp, 0f
202 	movi	\cnt, 16
203 	slli	\a, \a, 16
204 0:
205 	extui	\tmp, \a, 24, 8
206 	bnez	\tmp, 1f
207 	addi	\cnt, \cnt, 8
208 	slli	\a, \a, 8
209 1:
210 	movi	\tmp, __nsau_data
211 	extui	\a, \a, 24, 8
212 	add	\tmp, \tmp, \a
213 	l8ui	\tmp, \tmp, 0
214 	add	\cnt, \cnt, \tmp
215 #endif /* !XCHAL_HAVE_NSA */
216 	.endm
217 
218 	.macro	do_abs dst, src, tmp
219 #if XCHAL_HAVE_ABS
220 	abs	\dst, \src
221 #else
222 	neg	\tmp, \src
223 	movgez	\tmp, \src, \src
224 	mov	\dst, \tmp
225 #endif
226 	.endm
227 
228 #if defined(__XTENSA_WINDOWED_ABI__)
229 
230 /* Assembly instructions for windowed kernel ABI. */
231 #define KABI_W
232 /* Assembly instructions for call0 kernel ABI (will be ignored). */
233 #define KABI_C0 #
234 
235 #define XTENSA_FRAME_SIZE_RESERVE	16
236 #define XTENSA_SPILL_STACK_RESERVE	32
237 
238 #define abi_entry(frame_size) \
239 	entry sp, (XTENSA_FRAME_SIZE_RESERVE + \
240 		   (((frame_size) + XTENSA_STACK_ALIGNMENT - 1) & \
241 		    -XTENSA_STACK_ALIGNMENT))
242 #define abi_entry_default abi_entry(0)
243 
244 #define abi_ret(frame_size) retw
245 #define abi_ret_default retw
246 
247 	/* direct call */
248 #define abi_call call4
249 	/* indirect call */
250 #define abi_callx callx4
251 	/* outgoing call argument registers */
252 #define abi_arg0 a6
253 #define abi_arg1 a7
254 #define abi_arg2 a8
255 #define abi_arg3 a9
256 #define abi_arg4 a10
257 #define abi_arg5 a11
258 	/* return value */
259 #define abi_rv a6
260 	/* registers preserved across call */
261 #define abi_saved0 a2
262 #define abi_saved1 a3
263 
264 	/* none of the above */
265 #define abi_tmp0 a4
266 #define abi_tmp1 a5
267 
268 #elif defined(__XTENSA_CALL0_ABI__)
269 
270 /* Assembly instructions for windowed kernel ABI (will be ignored). */
271 #define KABI_W #
272 /* Assembly instructions for call0 kernel ABI. */
273 #define KABI_C0
274 
275 #define XTENSA_SPILL_STACK_RESERVE	0
276 
277 #define abi_entry(frame_size) __abi_entry (frame_size)
278 
279 	.macro	__abi_entry frame_size
280 	.ifgt \frame_size
281 	addi sp, sp, -(((\frame_size) + XTENSA_STACK_ALIGNMENT - 1) & \
282 		       -XTENSA_STACK_ALIGNMENT)
283 	.endif
284 	.endm
285 
286 #define abi_entry_default
287 
288 #define abi_ret(frame_size) __abi_ret (frame_size)
289 
290 	.macro	__abi_ret frame_size
291 	.ifgt \frame_size
292 	addi sp, sp, (((\frame_size) + XTENSA_STACK_ALIGNMENT - 1) & \
293 		      -XTENSA_STACK_ALIGNMENT)
294 	.endif
295 	ret
296 	.endm
297 
298 #define abi_ret_default ret
299 
300 	/* direct call */
301 #define abi_call call0
302 	/* indirect call */
303 #define abi_callx callx0
304 	/* outgoing call argument registers */
305 #define abi_arg0 a2
306 #define abi_arg1 a3
307 #define abi_arg2 a4
308 #define abi_arg3 a5
309 #define abi_arg4 a6
310 #define abi_arg5 a7
311 	/* return value */
312 #define abi_rv a2
313 	/* registers preserved across call */
314 #define abi_saved0 a12
315 #define abi_saved1 a13
316 
317 	/* none of the above */
318 #define abi_tmp0 a8
319 #define abi_tmp1 a9
320 
321 #else
322 #error Unsupported Xtensa ABI
323 #endif
324 
325 #if defined(USER_SUPPORT_WINDOWED)
326 /* Assembly instructions for windowed user ABI. */
327 #define UABI_W
328 /* Assembly instructions for call0 user ABI (will be ignored). */
329 #define UABI_C0 #
330 #else
331 /* Assembly instructions for windowed user ABI (will be ignored). */
332 #define UABI_W #
333 /* Assembly instructions for call0 user ABI. */
334 #define UABI_C0
335 #endif
336 
337 #define __XTENSA_HANDLER	.section ".exception.text", "ax"
338 
339 #endif /* _XTENSA_ASMMACRO_H */
340