xref: /linux/arch/arm/vfp/vfphw.S (revision b8bb76713ec50df2f11efee386e16f93d51e1076)
1/*
2 *  linux/arch/arm/vfp/vfphw.S
3 *
4 *  Copyright (C) 2004 ARM Limited.
5 *  Written by Deep Blue Solutions Limited.
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
10 *
11 * This code is called from the kernel's undefined instruction trap.
12 * r9 holds the return address for successful handling.
13 * lr holds the return address for unrecognised instructions.
14 * r10 points at the start of the private FP workspace in the thread structure
15 * sp points to a struct pt_regs (as defined in include/asm/proc/ptrace.h)
16 */
17#include <asm/thread_info.h>
18#include <asm/vfpmacros.h>
19#include "../kernel/entry-header.S"
20
21	.macro	DBGSTR, str
22#ifdef DEBUG
23	stmfd	sp!, {r0-r3, ip, lr}
24	add	r0, pc, #4
25	bl	printk
26	b	1f
27	.asciz  "<7>VFP: \str\n"
28	.balign 4
291:	ldmfd	sp!, {r0-r3, ip, lr}
30#endif
31	.endm
32
33	.macro  DBGSTR1, str, arg
34#ifdef DEBUG
35	stmfd	sp!, {r0-r3, ip, lr}
36	mov	r1, \arg
37	add	r0, pc, #4
38	bl	printk
39	b	1f
40	.asciz  "<7>VFP: \str\n"
41	.balign 4
421:	ldmfd	sp!, {r0-r3, ip, lr}
43#endif
44	.endm
45
46	.macro  DBGSTR3, str, arg1, arg2, arg3
47#ifdef DEBUG
48	stmfd	sp!, {r0-r3, ip, lr}
49	mov	r3, \arg3
50	mov	r2, \arg2
51	mov	r1, \arg1
52	add	r0, pc, #4
53	bl	printk
54	b	1f
55	.asciz  "<7>VFP: \str\n"
56	.balign 4
571:	ldmfd	sp!, {r0-r3, ip, lr}
58#endif
59	.endm
60
61
62@ VFP hardware support entry point.
63@
64@  r0  = faulted instruction
65@  r2  = faulted PC+4
66@  r9  = successful return
67@  r10 = vfp_state union
68@  r11 = CPU number
69@  lr  = failure return
70
71ENTRY(vfp_support_entry)
72	DBGSTR3	"instr %08x pc %08x state %p", r0, r2, r10
73
74	VFPFMRX	r1, FPEXC		@ Is the VFP enabled?
75	DBGSTR1	"fpexc %08x", r1
76	tst	r1, #FPEXC_EN
77	bne	look_for_VFP_exceptions	@ VFP is already enabled
78
79	DBGSTR1 "enable %x", r10
80	ldr	r3, last_VFP_context_address
81	orr	r1, r1, #FPEXC_EN	@ user FPEXC has the enable bit set
82	ldr	r4, [r3, r11, lsl #2]	@ last_VFP_context pointer
83	bic	r5, r1, #FPEXC_EX	@ make sure exceptions are disabled
84	cmp	r4, r10
85	beq	check_for_exception	@ we are returning to the same
86					@ process, so the registers are
87					@ still there.  In this case, we do
88					@ not want to drop a pending exception.
89
90	VFPFMXR	FPEXC, r5		@ enable VFP, disable any pending
91					@ exceptions, so we can get at the
92					@ rest of it
93
94#ifndef CONFIG_SMP
95	@ Save out the current registers to the old thread state
96	@ No need for SMP since this is not done lazily
97
98	DBGSTR1	"save old state %p", r4
99	cmp	r4, #0
100	beq	no_old_VFP_process
101	VFPFSTMIA r4, r5		@ save the working registers
102	VFPFMRX	r5, FPSCR		@ current status
103	tst	r1, #FPEXC_EX		@ is there additional state to save?
104	beq	1f
105	VFPFMRX	r6, FPINST		@ FPINST (only if FPEXC.EX is set)
106	tst	r1, #FPEXC_FP2V		@ is there an FPINST2 to read?
107	beq	1f
108	VFPFMRX	r8, FPINST2		@ FPINST2 if needed (and present)
1091:
110	stmia	r4, {r1, r5, r6, r8}	@ save FPEXC, FPSCR, FPINST, FPINST2
111					@ and point r4 at the word at the
112					@ start of the register dump
113#endif
114
115no_old_VFP_process:
116	DBGSTR1	"load state %p", r10
117	str	r10, [r3, r11, lsl #2]	@ update the last_VFP_context pointer
118					@ Load the saved state back into the VFP
119	VFPFLDMIA r10, r5		@ reload the working registers while
120					@ FPEXC is in a safe state
121	ldmia	r10, {r1, r5, r6, r8}	@ load FPEXC, FPSCR, FPINST, FPINST2
122	tst	r1, #FPEXC_EX		@ is there additional state to restore?
123	beq	1f
124	VFPFMXR	FPINST, r6		@ restore FPINST (only if FPEXC.EX is set)
125	tst	r1, #FPEXC_FP2V		@ is there an FPINST2 to write?
126	beq	1f
127	VFPFMXR	FPINST2, r8		@ FPINST2 if needed (and present)
1281:
129	VFPFMXR	FPSCR, r5		@ restore status
130
131check_for_exception:
132	tst	r1, #FPEXC_EX
133	bne	process_exception	@ might as well handle the pending
134					@ exception before retrying branch
135					@ out before setting an FPEXC that
136					@ stops us reading stuff
137	VFPFMXR	FPEXC, r1		@ restore FPEXC last
138	sub	r2, r2, #4
139	str	r2, [sp, #S_PC]		@ retry the instruction
140	mov	pc, r9			@ we think we have handled things
141
142
143look_for_VFP_exceptions:
144	@ Check for synchronous or asynchronous exception
145	tst	r1, #FPEXC_EX | FPEXC_DEX
146	bne	process_exception
147	@ On some implementations of the VFP subarch 1, setting FPSCR.IXE
148	@ causes all the CDP instructions to be bounced synchronously without
149	@ setting the FPEXC.EX bit
150	VFPFMRX	r5, FPSCR
151	tst	r5, #FPSCR_IXE
152	bne	process_exception
153
154	@ Fall into hand on to next handler - appropriate coproc instr
155	@ not recognised by VFP
156
157	DBGSTR	"not VFP"
158	mov	pc, lr
159
160process_exception:
161	DBGSTR	"bounce"
162	mov	r2, sp			@ nothing stacked - regdump is at TOS
163	mov	lr, r9			@ setup for a return to the user code.
164
165	@ Now call the C code to package up the bounce to the support code
166	@   r0 holds the trigger instruction
167	@   r1 holds the FPEXC value
168	@   r2 pointer to register dump
169	b	VFP_bounce		@ we have handled this - the support
170					@ code will raise an exception if
171					@ required. If not, the user code will
172					@ retry the faulted instruction
173ENDPROC(vfp_support_entry)
174
175ENTRY(vfp_save_state)
176	@ Save the current VFP state
177	@ r0 - save location
178	@ r1 - FPEXC
179	DBGSTR1	"save VFP state %p", r0
180	VFPFSTMIA r0, r2		@ save the working registers
181	VFPFMRX	r2, FPSCR		@ current status
182	tst	r1, #FPEXC_EX		@ is there additional state to save?
183	beq	1f
184	VFPFMRX	r3, FPINST		@ FPINST (only if FPEXC.EX is set)
185	tst	r1, #FPEXC_FP2V		@ is there an FPINST2 to read?
186	beq	1f
187	VFPFMRX	r12, FPINST2		@ FPINST2 if needed (and present)
1881:
189	stmia	r0, {r1, r2, r3, r12}	@ save FPEXC, FPSCR, FPINST, FPINST2
190	mov	pc, lr
191ENDPROC(vfp_save_state)
192
193last_VFP_context_address:
194	.word	last_VFP_context
195
196ENTRY(vfp_get_float)
197	add	pc, pc, r0, lsl #3
198	mov	r0, r0
199	.irp	dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
200	mrc	p10, 0, r0, c\dr, c0, 0	@ fmrs	r0, s0
201	mov	pc, lr
202	mrc	p10, 0, r0, c\dr, c0, 4	@ fmrs	r0, s1
203	mov	pc, lr
204	.endr
205ENDPROC(vfp_get_float)
206
207ENTRY(vfp_put_float)
208	add	pc, pc, r1, lsl #3
209	mov	r0, r0
210	.irp	dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
211	mcr	p10, 0, r0, c\dr, c0, 0	@ fmsr	r0, s0
212	mov	pc, lr
213	mcr	p10, 0, r0, c\dr, c0, 4	@ fmsr	r0, s1
214	mov	pc, lr
215	.endr
216ENDPROC(vfp_put_float)
217
218ENTRY(vfp_get_double)
219	add	pc, pc, r0, lsl #3
220	mov	r0, r0
221	.irp	dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
222	fmrrd	r0, r1, d\dr
223	mov	pc, lr
224	.endr
225#ifdef CONFIG_VFPv3
226	@ d16 - d31 registers
227	.irp	dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
228	mrrc	p11, 3, r0, r1, c\dr	@ fmrrd	r0, r1, d\dr
229	mov	pc, lr
230	.endr
231#endif
232
233	@ virtual register 16 (or 32 if VFPv3) for compare with zero
234	mov	r0, #0
235	mov	r1, #0
236	mov	pc, lr
237ENDPROC(vfp_get_double)
238
239ENTRY(vfp_put_double)
240	add	pc, pc, r2, lsl #3
241	mov	r0, r0
242	.irp	dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
243	fmdrr	d\dr, r0, r1
244	mov	pc, lr
245	.endr
246#ifdef CONFIG_VFPv3
247	@ d16 - d31 registers
248	.irp	dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
249	mcrr	p11, 3, r1, r2, c\dr	@ fmdrr	r1, r2, d\dr
250	mov	pc, lr
251	.endr
252#endif
253ENDPROC(vfp_put_double)
254