xref: /linux/arch/xtensa/kernel/align.S (revision e7d759f31ca295d589f7420719c311870bb3166f)
1/*
2 * arch/xtensa/kernel/align.S
3 *
4 * Handle unalignment and load/store exceptions.
5 *
6 * This file is subject to the terms and conditions of the GNU General
7 * Public License.  See the file "COPYING" in the main directory of
8 * this archive for more details.
9 *
10 * Copyright (C) 2001 - 2005 Tensilica, Inc.
11 * Copyright (C) 2014 Cadence Design Systems Inc.
12 *
13 * Rewritten by Chris Zankel <chris@zankel.net>
14 *
15 * Based on work from Joe Taylor <joe@tensilica.com, joetylr@yahoo.com>
16 * and Marc Gauthier <marc@tensilica.com, marc@alimni.uwaterloo.ca>
17 */
18
19#include <linux/linkage.h>
20#include <asm/current.h>
21#include <asm/asm-offsets.h>
22#include <asm/asmmacro.h>
23#include <asm/processor.h>
24
25#if XCHAL_UNALIGNED_LOAD_EXCEPTION || defined CONFIG_XTENSA_LOAD_STORE
26#define LOAD_EXCEPTION_HANDLER
27#endif
28
29#if XCHAL_UNALIGNED_STORE_EXCEPTION || defined CONFIG_XTENSA_LOAD_STORE
30#define STORE_EXCEPTION_HANDLER
31#endif
32
33#if defined LOAD_EXCEPTION_HANDLER || defined STORE_EXCEPTION_HANDLER
34#define ANY_EXCEPTION_HANDLER
35#endif
36
37#if XCHAL_HAVE_WINDOWED && defined CONFIG_MMU
38#define UNALIGNED_USER_EXCEPTION
39#endif
40
41/* Big and little endian 16-bit values are located in
42 * different halves of a register.  HWORD_START helps to
43 * abstract the notion of extracting a 16-bit value from a
44 * register.
45 * We also have to define new shifting instructions because
46 * lsb and msb are on 'opposite' ends in a register for
47 * different endian machines.
48 *
49 * Assume a memory region in ascending address:
50 *   	0 1 2 3|4 5 6 7
51 *
52 * When loading one word into a register, the content of that register is:
53 *  LE	3 2 1 0, 7 6 5 4
54 *  BE  0 1 2 3, 4 5 6 7
55 *
56 * Masking the bits of the higher/lower address means:
57 *  LE  X X 0 0, 0 0 X X
58 *  BE	0 0 X X, X X 0 0
59 *
60 * Shifting to higher/lower addresses, means:
61 *  LE  shift left / shift right
62 *  BE  shift right / shift left
63 *
64 * Extracting 16 bits from a 32 bit reg. value to higher/lower address means:
65 *  LE  mask 0 0 X X / shift left
66 *  BE  shift left / mask 0 0 X X
67 */
68
69#if XCHAL_HAVE_BE
70
71#define HWORD_START	16
72#define	INSN_OP0	28
73#define	INSN_T		24
74#define	INSN_OP1	16
75
76.macro __ssa8r	r;		ssa8l	\r;		.endm
77.macro __sh	r, s;		srl	\r, \s;		.endm
78.macro __sl	r, s;		sll	\r, \s;		.endm
79.macro __exth	r, s;		extui	\r, \s, 0, 16;	.endm
80.macro __extl	r, s;		slli	\r, \s, 16;	.endm
81
82#else
83
84#define HWORD_START	0
85#define	INSN_OP0	0
86#define	INSN_T		4
87#define	INSN_OP1	12
88
89.macro __ssa8r	r;		ssa8b	\r;		.endm
90.macro __sh	r, s;		sll	\r, \s;		.endm
91.macro __sl	r, s;		srl	\r, \s;		.endm
92.macro __exth	r, s;		slli	\r, \s, 16;	.endm
93.macro __extl	r, s;		extui	\r, \s, 0, 16;	.endm
94
95#endif
96
97/*
98 *	xxxx xxxx = imm8 field
99 *	     yyyy = imm4 field
100 *	     ssss = s field
101 *	     tttt = t field
102 *
103 *	       		 16		    0
104 *		          -------------------
105 *	L32I.N		  yyyy ssss tttt 1000
106 *	S32I.N	          yyyy ssss tttt 1001
107 *
108 *	       23			    0
109 *		-----------------------------
110 *	L8UI	xxxx xxxx 0000 ssss tttt 0010
111 *	L16UI	xxxx xxxx 0001 ssss tttt 0010
112 *	L32I	xxxx xxxx 0010 ssss tttt 0010
113 *	XXX	          0011 ssss tttt 0010
114 *	XXX	          0100 ssss tttt 0010
115 *	S16I	xxxx xxxx 0101 ssss tttt 0010
116 *	S32I	xxxx xxxx 0110 ssss tttt 0010
117 *	XXX	          0111 ssss tttt 0010
118 *	XXX	          1000 ssss tttt 0010
119 *	L16SI	xxxx xxxx 1001 ssss tttt 0010
120 *	XXX	          1010           0010
121 *      **L32AI	xxxx xxxx 1011 ssss tttt 0010 unsupported
122 *	XXX	          1100           0010
123 *	XXX	          1101           0010
124 *	XXX	          1110           0010
125 *	**S32RI	xxxx xxxx 1111 ssss tttt 0010 unsupported
126 *		-----------------------------
127 *                           ^         ^    ^
128 *    sub-opcode (NIBBLE_R) -+         |    |
129 *       t field (NIBBLE_T) -----------+    |
130 *  major opcode (NIBBLE_OP0) --------------+
131 */
132
133#define OP0_L32I_N	0x8		/* load immediate narrow */
134#define OP0_S32I_N	0x9		/* store immediate narrow */
135#define OP0_LSAI	0x2		/* load/store */
136#define OP1_SI_MASK	0x4		/* OP1 bit set for stores */
137#define OP1_SI_BIT	2		/* OP1 bit number for stores */
138
139#define OP1_L8UI	0x0
140#define OP1_L32I	0x2
141#define OP1_L16UI	0x1
142#define OP1_L16SI	0x9
143#define OP1_L32AI	0xb
144
145#define OP1_S32I	0x6
146#define OP1_S16I	0x5
147#define OP1_S32RI	0xf
148
149/*
150 * Entry condition:
151 *
152 *   a0:	trashed, original value saved on stack (PT_AREG0)
153 *   a1:	a1
154 *   a2:	new stack pointer, original in DEPC
155 *   a3:	a3
156 *   depc:	a2, original value saved on stack (PT_DEPC)
157 *   excsave_1:	dispatch table
158 *
159 *   PT_DEPC >= VALID_DOUBLE_EXCEPTION_ADDRESS: double exception, DEPC
160 *	     <  VALID_DOUBLE_EXCEPTION_ADDRESS: regular exception
161 */
162
163	.literal_position
164#ifdef CONFIG_XTENSA_LOAD_STORE
165ENTRY(fast_load_store)
166
167	call0	.Lsave_and_load_instruction
168
169	/* Analyze the instruction (load or store?). */
170
171	extui	a0, a4, INSN_OP0, 4	# get insn.op0 nibble
172
173#if XCHAL_HAVE_DENSITY
174	_beqi	a0, OP0_L32I_N, 1f	# L32I.N, jump
175#endif
176	bnei	a0, OP0_LSAI, .Linvalid_instruction
177	/* 'store indicator bit' set, jump */
178	bbsi.l	a4, OP1_SI_BIT + INSN_OP1, .Linvalid_instruction
179
1801:
181	movi	a3, ~3
182	and	a3, a3, a8		# align memory address
183
184	__ssa8	a8
185
186#ifdef CONFIG_MMU
187	/* l32e can't be used here even when it's available. */
188	/* TODO access_ok(a3) could be used here */
189	j	.Linvalid_instruction
190#endif
191	l32i	a5, a3, 0
192	l32i	a6, a3, 4
193	__src_b	a3, a5, a6		# a3 has the data word
194
195#if XCHAL_HAVE_DENSITY
196	addi	a7, a7, 2		# increment PC (assume 16-bit insn)
197	_beqi	a0, OP0_L32I_N, .Lload_w# l32i.n: jump
198	addi	a7, a7, 1
199#else
200	addi	a7, a7, 3
201#endif
202
203	extui	a5, a4, INSN_OP1, 4
204	_beqi	a5, OP1_L32I, .Lload_w
205	bnei	a5, OP1_L8UI, .Lload16
206	extui	a3, a3, 0, 8
207	j	.Lload_w
208
209ENDPROC(fast_load_store)
210#endif
211
212/*
213 * Entry condition:
214 *
215 *   a0:	trashed, original value saved on stack (PT_AREG0)
216 *   a1:	a1
217 *   a2:	new stack pointer, original in DEPC
218 *   a3:	a3
219 *   depc:	a2, original value saved on stack (PT_DEPC)
220 *   excsave_1:	dispatch table
221 *
222 *   PT_DEPC >= VALID_DOUBLE_EXCEPTION_ADDRESS: double exception, DEPC
223 *	     <  VALID_DOUBLE_EXCEPTION_ADDRESS: regular exception
224 */
225
226#ifdef ANY_EXCEPTION_HANDLER
227ENTRY(fast_unaligned)
228
229	call0	.Lsave_and_load_instruction
230
231	/* Analyze the instruction (load or store?). */
232
233	extui	a5, a4, INSN_OP0, 4	# get insn.op0 nibble
234
235#if XCHAL_HAVE_DENSITY
236	_beqi	a5, OP0_L32I_N, .Lload	# L32I.N, jump
237	addi	a6, a5, -OP0_S32I_N
238	_beqz	a6, .Lstore		# S32I.N, do a store
239#endif
240	/* 'store indicator bit' not set, jump */
241	_bbci.l	a4, OP1_SI_BIT + INSN_OP1, .Lload
242
243#ifdef STORE_EXCEPTION_HANDLER
244
245	/* Store: Jump to table entry to get the value in the source register.*/
246
247.Lstore:movi	a5, .Lstore_table	# table
248	extui	a6, a4, INSN_T, 4	# get source register
249	addx8	a5, a6, a5
250	jx	a5			# jump into table
251#endif
252#ifdef LOAD_EXCEPTION_HANDLER
253
254	/* Load: Load memory address. */
255
256.Lload: movi	a3, ~3
257	and	a3, a3, a8		# align memory address
258
259	__ssa8	a8
260#ifdef UNALIGNED_USER_EXCEPTION
261	addi	a3, a3, 8
262	l32e	a5, a3, -8
263	l32e	a6, a3, -4
264#else
265	l32i	a5, a3, 0
266	l32i	a6, a3, 4
267#endif
268	__src_b	a3, a5, a6		# a3 has the data word
269
270#if XCHAL_HAVE_DENSITY
271	addi	a7, a7, 2		# increment PC (assume 16-bit insn)
272
273	extui	a5, a4, INSN_OP0, 4
274	_beqi	a5, OP0_L32I_N, .Lload_w# l32i.n: jump
275
276	addi	a7, a7, 1
277#else
278	addi	a7, a7, 3
279#endif
280
281	extui	a5, a4, INSN_OP1, 4
282	_beqi	a5, OP1_L32I, .Lload_w	# l32i: jump
283#endif
284#ifdef LOAD_EXCEPTION_HANDLER
285.Lload16:
286	extui	a3, a3, 0, 16		# extract lower 16 bits
287	_beqi	a5, OP1_L16UI, .Lload_w
288	addi	a5, a5, -OP1_L16SI
289	_bnez	a5, .Linvalid_instruction
290
291	/* sign extend value */
292#if XCHAL_HAVE_SEXT
293	sext	a3, a3, 15
294#else
295	slli	a3, a3, 16
296	srai	a3, a3, 16
297#endif
298
299	/* Set target register. */
300
301.Lload_w:
302	extui	a4, a4, INSN_T, 4	# extract target register
303	movi	a5, .Lload_table
304	addx8	a4, a4, a5
305	jx	a4			# jump to entry for target register
306
307	.align	8
308.Lload_table:
309	s32i	a3, a2, PT_AREG0;	_j .Lexit;	.align 8
310	mov	a1, a3;			_j .Lexit;	.align 8 # fishy??
311	s32i	a3, a2, PT_AREG2;	_j .Lexit;	.align 8
312	s32i	a3, a2, PT_AREG3;	_j .Lexit;	.align 8
313	s32i	a3, a2, PT_AREG4;	_j .Lexit;	.align 8
314	s32i	a3, a2, PT_AREG5;	_j .Lexit;	.align 8
315	s32i	a3, a2, PT_AREG6;	_j .Lexit;	.align 8
316	s32i	a3, a2, PT_AREG7;	_j .Lexit;	.align 8
317	s32i	a3, a2, PT_AREG8;	_j .Lexit;	.align 8
318	mov	a9, a3		;	_j .Lexit;	.align 8
319	mov	a10, a3		;	_j .Lexit;	.align 8
320	mov	a11, a3		;	_j .Lexit;	.align 8
321	mov	a12, a3		;	_j .Lexit;	.align 8
322	mov	a13, a3		;	_j .Lexit;	.align 8
323	mov	a14, a3		;	_j .Lexit;	.align 8
324	mov	a15, a3		;	_j .Lexit;	.align 8
325#endif
326#ifdef STORE_EXCEPTION_HANDLER
327.Lstore_table:
328	l32i	a3, a2, PT_AREG0;	_j .Lstore_w;	.align 8
329	mov	a3, a1;			_j .Lstore_w;	.align 8	# fishy??
330	l32i	a3, a2, PT_AREG2;	_j .Lstore_w;	.align 8
331	l32i	a3, a2, PT_AREG3;	_j .Lstore_w;	.align 8
332	l32i	a3, a2, PT_AREG4;	_j .Lstore_w;	.align 8
333	l32i	a3, a2, PT_AREG5;	_j .Lstore_w;	.align 8
334	l32i	a3, a2, PT_AREG6;	_j .Lstore_w;	.align 8
335	l32i	a3, a2, PT_AREG7;	_j .Lstore_w;	.align 8
336	l32i	a3, a2, PT_AREG8;	_j .Lstore_w;	.align 8
337	mov	a3, a9		;	_j .Lstore_w;	.align 8
338	mov	a3, a10		;	_j .Lstore_w;	.align 8
339	mov	a3, a11		;	_j .Lstore_w;	.align 8
340	mov	a3, a12		;	_j .Lstore_w;	.align 8
341	mov	a3, a13		;	_j .Lstore_w;	.align 8
342	mov	a3, a14		;	_j .Lstore_w;	.align 8
343	mov	a3, a15		;	_j .Lstore_w;	.align 8
344#endif
345
346	/* We cannot handle this exception. */
347
348	.extern _kernel_exception
349.Linvalid_instruction:
350
351	movi	a4, 0
352	rsr	a3, excsave1
353	s32i	a4, a3, EXC_TABLE_FIXUP
354
355	/* Restore a4...a8 and SAR, set SP, and jump to default exception. */
356
357	l32i	a0, a2, PT_SAR
358	l32i	a8, a2, PT_AREG8
359	l32i	a7, a2, PT_AREG7
360	l32i	a6, a2, PT_AREG6
361	l32i	a5, a2, PT_AREG5
362	l32i	a4, a2, PT_AREG4
363	wsr	a0, sar
364	mov	a1, a2
365
366	rsr	a0, ps
367	bbsi.l  a0, PS_UM_BIT, 2f     # jump if user mode
368
369	movi	a0, _kernel_exception
370	jx	a0
371
3722:	movi	a0, _user_exception
373	jx	a0
374
375#ifdef STORE_EXCEPTION_HANDLER
376
377	# a7: instruction pointer, a4: instruction, a3: value
378.Lstore_w:
379	movi	a6, 0			# mask: ffffffff:00000000
380
381#if XCHAL_HAVE_DENSITY
382	addi	a7, a7, 2		# incr. PC,assume 16-bit instruction
383
384	extui	a5, a4, INSN_OP0, 4	# extract OP0
385	addi	a5, a5, -OP0_S32I_N
386	_beqz	a5, 1f			# s32i.n: jump
387
388	addi	a7, a7, 1		# increment PC, 32-bit instruction
389#else
390	addi	a7, a7, 3		# increment PC, 32-bit instruction
391#endif
392
393	extui	a5, a4, INSN_OP1, 4	# extract OP1
394	_beqi	a5, OP1_S32I, 1f	# jump if 32 bit store
395	_bnei	a5, OP1_S16I, .Linvalid_instruction
396
397	movi	a5, -1
398	__extl	a3, a3			# get 16-bit value
399	__exth	a6, a5			# get 16-bit mask ffffffff:ffff0000
400
401	/* Get memory address */
402
4031:
404	movi	a4, ~3
405	and	a4, a4, a8		# align memory address
406
407	/* Insert value into memory */
408
409	movi	a5, -1			# mask: ffffffff:XXXX0000
410#ifdef UNALIGNED_USER_EXCEPTION
411	addi	a4, a4, 8
412#endif
413
414	__ssa8r a8
415	__src_b	a8, a5, a6		# lo-mask  F..F0..0 (BE) 0..0F..F (LE)
416	__src_b	a6, a6, a5		# hi-mask  0..0F..F (BE) F..F0..0 (LE)
417#ifdef UNALIGNED_USER_EXCEPTION
418	l32e	a5, a4, -8
419#else
420	l32i	a5, a4, 0		# load lower address word
421#endif
422	and	a5, a5, a8		# mask
423	__sh	a8, a3 			# shift value
424	or	a5, a5, a8		# or with original value
425#ifdef UNALIGNED_USER_EXCEPTION
426	s32e	a5, a4, -8
427	l32e	a8, a4, -4
428#else
429	s32i	a5, a4, 0		# store
430	l32i	a8, a4, 4		# same for upper address word
431#endif
432	__sl	a5, a3
433	and	a6, a8, a6
434	or	a6, a6, a5
435#ifdef UNALIGNED_USER_EXCEPTION
436	s32e	a6, a4, -4
437#else
438	s32i	a6, a4, 4
439#endif
440#endif
441
442.Lexit:
443#if XCHAL_HAVE_LOOPS
444	rsr	a4, lend		# check if we reached LEND
445	bne	a7, a4, 1f
446	rsr	a4, lcount		# and LCOUNT != 0
447	beqz	a4, 1f
448	addi	a4, a4, -1		# decrement LCOUNT and set
449	rsr	a7, lbeg		# set PC to LBEGIN
450	wsr	a4, lcount
451#endif
452
4531:	wsr	a7, epc1		# skip emulated instruction
454
455	/* Update icount if we're single-stepping in userspace. */
456	rsr	a4, icountlevel
457	beqz	a4, 1f
458	bgeui	a4, LOCKLEVEL + 1, 1f
459	rsr	a4, icount
460	addi	a4, a4, 1
461	wsr	a4, icount
4621:
463	movi	a4, 0
464	rsr	a3, excsave1
465	s32i	a4, a3, EXC_TABLE_FIXUP
466
467	/* Restore working register */
468
469	l32i	a0, a2, PT_SAR
470	l32i	a8, a2, PT_AREG8
471	l32i	a7, a2, PT_AREG7
472	l32i	a6, a2, PT_AREG6
473	l32i	a5, a2, PT_AREG5
474	l32i	a4, a2, PT_AREG4
475	l32i	a3, a2, PT_AREG3
476
477	/* restore SAR and return */
478
479	wsr	a0, sar
480	l32i	a0, a2, PT_AREG0
481	l32i	a2, a2, PT_AREG2
482	rfe
483
484	.align	4
485.Lsave_and_load_instruction:
486
487	/* Save some working register */
488
489	s32i	a3, a2, PT_AREG3
490	s32i	a4, a2, PT_AREG4
491	s32i	a5, a2, PT_AREG5
492	s32i	a6, a2, PT_AREG6
493	s32i	a7, a2, PT_AREG7
494	s32i	a8, a2, PT_AREG8
495
496	rsr	a4, depc
497	s32i	a4, a2, PT_AREG2
498
499	rsr	a5, sar
500	s32i	a5, a2, PT_SAR
501
502	rsr	a3, excsave1
503	movi	a4, fast_unaligned_fixup
504	s32i	a4, a3, EXC_TABLE_FIXUP
505
506	rsr	a8, excvaddr		# load unaligned memory address
507
508	/* Now, identify one of the following load/store instructions.
509	 *
510	 * The only possible danger of a double exception on the
511	 * following l32i instructions is kernel code in vmalloc
512	 * memory. The processor was just executing at the EPC_1
513	 * address, and indeed, already fetched the instruction.  That
514	 * guarantees a TLB mapping, which hasn't been replaced by
515	 * this unaligned exception handler that uses only static TLB
516	 * mappings. However, high-level interrupt handlers might
517	 * modify TLB entries, so for the generic case, we register a
518	 * TABLE_FIXUP handler here, too.
519	 */
520
521	/* a3...a6 saved on stack, a2 = SP */
522
523	/* Extract the instruction that caused the unaligned access. */
524
525	rsr	a7, epc1	# load exception address
526	movi	a3, ~3
527	and	a3, a3, a7	# mask lower bits
528
529	l32i	a4, a3, 0	# load 2 words
530	l32i	a5, a3, 4
531
532	__ssa8	a7
533	__src_b	a4, a4, a5	# a4 has the instruction
534
535	ret
536
537ENDPROC(fast_unaligned)
538
539ENTRY(fast_unaligned_fixup)
540
541	l32i	a2, a3, EXC_TABLE_DOUBLE_SAVE
542	wsr	a3, excsave1
543
544	l32i	a8, a2, PT_AREG8
545	l32i	a7, a2, PT_AREG7
546	l32i	a6, a2, PT_AREG6
547	l32i	a5, a2, PT_AREG5
548	l32i	a4, a2, PT_SAR
549	l32i	a0, a2, PT_AREG2
550	wsr	a4, sar
551	wsr	a0, depc			# restore depc and a0
552	l32i	a4, a2, PT_AREG4
553
554	rsr	a0, exccause
555	s32i	a0, a2, PT_DEPC			# mark as a regular exception
556
557	rsr	a0, ps
558	bbsi.l  a0, PS_UM_BIT, 1f		# jump if user mode
559
560	rsr	a0, exccause
561	addx4	a0, a0, a3              	# find entry in table
562	l32i	a0, a0, EXC_TABLE_FAST_KERNEL   # load handler
563	l32i	a3, a2, PT_AREG3
564	jx	a0
5651:
566	rsr	a0, exccause
567	addx4	a0, a0, a3              	# find entry in table
568	l32i	a0, a0, EXC_TABLE_FAST_USER     # load handler
569	l32i	a3, a2, PT_AREG3
570	jx	a0
571
572ENDPROC(fast_unaligned_fixup)
573#endif
574