1/* 2 * linux/arch/arm/vfp/vfphw.S 3 * 4 * Copyright (C) 2004 ARM Limited. 5 * Written by Deep Blue Solutions Limited. 6 * 7 * This program is free software; you can redistribute it and/or modify 8 * it under the terms of the GNU General Public License version 2 as 9 * published by the Free Software Foundation. 10 * 11 * This code is called from the kernel's undefined instruction trap. 12 * r9 holds the return address for successful handling. 13 * lr holds the return address for unrecognised instructions. 14 * r10 points at the start of the private FP workspace in the thread structure 15 * sp points to a struct pt_regs (as defined in include/asm/proc/ptrace.h) 16 */ 17#include <asm/thread_info.h> 18#include <asm/vfpmacros.h> 19#include "../kernel/entry-header.S" 20 21 .macro DBGSTR, str 22#ifdef DEBUG 23 stmfd sp!, {r0-r3, ip, lr} 24 add r0, pc, #4 25 bl printk 26 b 1f 27 .asciz "<7>VFP: \str\n" 28 .balign 4 291: ldmfd sp!, {r0-r3, ip, lr} 30#endif 31 .endm 32 33 .macro DBGSTR1, str, arg 34#ifdef DEBUG 35 stmfd sp!, {r0-r3, ip, lr} 36 mov r1, \arg 37 add r0, pc, #4 38 bl printk 39 b 1f 40 .asciz "<7>VFP: \str\n" 41 .balign 4 421: ldmfd sp!, {r0-r3, ip, lr} 43#endif 44 .endm 45 46 .macro DBGSTR3, str, arg1, arg2, arg3 47#ifdef DEBUG 48 stmfd sp!, {r0-r3, ip, lr} 49 mov r3, \arg3 50 mov r2, \arg2 51 mov r1, \arg1 52 add r0, pc, #4 53 bl printk 54 b 1f 55 .asciz "<7>VFP: \str\n" 56 .balign 4 571: ldmfd sp!, {r0-r3, ip, lr} 58#endif 59 .endm 60 61 62@ VFP hardware support entry point. 63@ 64@ r0 = faulted instruction 65@ r2 = faulted PC+4 66@ r9 = successful return 67@ r10 = vfp_state union 68@ r11 = CPU number 69@ lr = failure return 70 71ENTRY(vfp_support_entry) 72 DBGSTR3 "instr %08x pc %08x state %p", r0, r2, r10 73 74 VFPFMRX r1, FPEXC @ Is the VFP enabled? 75 DBGSTR1 "fpexc %08x", r1 76 tst r1, #FPEXC_EN 77 bne look_for_VFP_exceptions @ VFP is already enabled 78 79 DBGSTR1 "enable %x", r10 80 ldr r3, last_VFP_context_address 81 orr r1, r1, #FPEXC_EN @ user FPEXC has the enable bit set 82 ldr r4, [r3, r11, lsl #2] @ last_VFP_context pointer 83 bic r5, r1, #FPEXC_EX @ make sure exceptions are disabled 84 cmp r4, r10 85 beq check_for_exception @ we are returning to the same 86 @ process, so the registers are 87 @ still there. In this case, we do 88 @ not want to drop a pending exception. 89 90 VFPFMXR FPEXC, r5 @ enable VFP, disable any pending 91 @ exceptions, so we can get at the 92 @ rest of it 93 94#ifndef CONFIG_SMP 95 @ Save out the current registers to the old thread state 96 @ No need for SMP since this is not done lazily 97 98 DBGSTR1 "save old state %p", r4 99 cmp r4, #0 100 beq no_old_VFP_process 101 VFPFSTMIA r4, r5 @ save the working registers 102 VFPFMRX r5, FPSCR @ current status 103#ifndef CONFIG_CPU_FEROCEON 104 tst r1, #FPEXC_EX @ is there additional state to save? 105 beq 1f 106 VFPFMRX r6, FPINST @ FPINST (only if FPEXC.EX is set) 107 tst r1, #FPEXC_FP2V @ is there an FPINST2 to read? 108 beq 1f 109 VFPFMRX r8, FPINST2 @ FPINST2 if needed (and present) 1101: 111#endif 112 stmia r4, {r1, r5, r6, r8} @ save FPEXC, FPSCR, FPINST, FPINST2 113 @ and point r4 at the word at the 114 @ start of the register dump 115#endif 116 117no_old_VFP_process: 118 DBGSTR1 "load state %p", r10 119 str r10, [r3, r11, lsl #2] @ update the last_VFP_context pointer 120 @ Load the saved state back into the VFP 121 VFPFLDMIA r10, r5 @ reload the working registers while 122 @ FPEXC is in a safe state 123 ldmia r10, {r1, r5, r6, r8} @ load FPEXC, FPSCR, FPINST, FPINST2 124#ifndef CONFIG_CPU_FEROCEON 125 tst r1, #FPEXC_EX @ is there additional state to restore? 126 beq 1f 127 VFPFMXR FPINST, r6 @ restore FPINST (only if FPEXC.EX is set) 128 tst r1, #FPEXC_FP2V @ is there an FPINST2 to write? 129 beq 1f 130 VFPFMXR FPINST2, r8 @ FPINST2 if needed (and present) 1311: 132#endif 133 VFPFMXR FPSCR, r5 @ restore status 134 135check_for_exception: 136 tst r1, #FPEXC_EX 137 bne process_exception @ might as well handle the pending 138 @ exception before retrying branch 139 @ out before setting an FPEXC that 140 @ stops us reading stuff 141 VFPFMXR FPEXC, r1 @ restore FPEXC last 142 sub r2, r2, #4 143 str r2, [sp, #S_PC] @ retry the instruction 144#ifdef CONFIG_PREEMPT 145 get_thread_info r10 146 ldr r4, [r10, #TI_PREEMPT] @ get preempt count 147 sub r11, r4, #1 @ decrement it 148 str r11, [r10, #TI_PREEMPT] 149#endif 150 mov pc, r9 @ we think we have handled things 151 152 153look_for_VFP_exceptions: 154 @ Check for synchronous or asynchronous exception 155 tst r1, #FPEXC_EX | FPEXC_DEX 156 bne process_exception 157 @ On some implementations of the VFP subarch 1, setting FPSCR.IXE 158 @ causes all the CDP instructions to be bounced synchronously without 159 @ setting the FPEXC.EX bit 160 VFPFMRX r5, FPSCR 161 tst r5, #FPSCR_IXE 162 bne process_exception 163 164 @ Fall into hand on to next handler - appropriate coproc instr 165 @ not recognised by VFP 166 167 DBGSTR "not VFP" 168#ifdef CONFIG_PREEMPT 169 get_thread_info r10 170 ldr r4, [r10, #TI_PREEMPT] @ get preempt count 171 sub r11, r4, #1 @ decrement it 172 str r11, [r10, #TI_PREEMPT] 173#endif 174 mov pc, lr 175 176process_exception: 177 DBGSTR "bounce" 178 mov r2, sp @ nothing stacked - regdump is at TOS 179 mov lr, r9 @ setup for a return to the user code. 180 181 @ Now call the C code to package up the bounce to the support code 182 @ r0 holds the trigger instruction 183 @ r1 holds the FPEXC value 184 @ r2 pointer to register dump 185 b VFP_bounce @ we have handled this - the support 186 @ code will raise an exception if 187 @ required. If not, the user code will 188 @ retry the faulted instruction 189ENDPROC(vfp_support_entry) 190 191ENTRY(vfp_save_state) 192 @ Save the current VFP state 193 @ r0 - save location 194 @ r1 - FPEXC 195 DBGSTR1 "save VFP state %p", r0 196 VFPFSTMIA r0, r2 @ save the working registers 197 VFPFMRX r2, FPSCR @ current status 198 tst r1, #FPEXC_EX @ is there additional state to save? 199 beq 1f 200 VFPFMRX r3, FPINST @ FPINST (only if FPEXC.EX is set) 201 tst r1, #FPEXC_FP2V @ is there an FPINST2 to read? 202 beq 1f 203 VFPFMRX r12, FPINST2 @ FPINST2 if needed (and present) 2041: 205 stmia r0, {r1, r2, r3, r12} @ save FPEXC, FPSCR, FPINST, FPINST2 206 mov pc, lr 207ENDPROC(vfp_save_state) 208 209 .align 210last_VFP_context_address: 211 .word last_VFP_context 212 213 .macro tbl_branch, base, tmp, shift 214#ifdef CONFIG_THUMB2_KERNEL 215 adr \tmp, 1f 216 add \tmp, \tmp, \base, lsl \shift 217 mov pc, \tmp 218#else 219 add pc, pc, \base, lsl \shift 220 mov r0, r0 221#endif 2221: 223 .endm 224 225ENTRY(vfp_get_float) 226 tbl_branch r0, r3, #3 227 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 2281: mrc p10, 0, r0, c\dr, c0, 0 @ fmrs r0, s0 229 mov pc, lr 230 .org 1b + 8 2311: mrc p10, 0, r0, c\dr, c0, 4 @ fmrs r0, s1 232 mov pc, lr 233 .org 1b + 8 234 .endr 235ENDPROC(vfp_get_float) 236 237ENTRY(vfp_put_float) 238 tbl_branch r1, r3, #3 239 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 2401: mcr p10, 0, r0, c\dr, c0, 0 @ fmsr r0, s0 241 mov pc, lr 242 .org 1b + 8 2431: mcr p10, 0, r0, c\dr, c0, 4 @ fmsr r0, s1 244 mov pc, lr 245 .org 1b + 8 246 .endr 247ENDPROC(vfp_put_float) 248 249ENTRY(vfp_get_double) 250 tbl_branch r0, r3, #3 251 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 2521: fmrrd r0, r1, d\dr 253 mov pc, lr 254 .org 1b + 8 255 .endr 256#ifdef CONFIG_VFPv3 257 @ d16 - d31 registers 258 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 2591: mrrc p11, 3, r0, r1, c\dr @ fmrrd r0, r1, d\dr 260 mov pc, lr 261 .org 1b + 8 262 .endr 263#endif 264 265 @ virtual register 16 (or 32 if VFPv3) for compare with zero 266 mov r0, #0 267 mov r1, #0 268 mov pc, lr 269ENDPROC(vfp_get_double) 270 271ENTRY(vfp_put_double) 272 tbl_branch r2, r3, #3 273 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 2741: fmdrr d\dr, r0, r1 275 mov pc, lr 276 .org 1b + 8 277 .endr 278#ifdef CONFIG_VFPv3 279 @ d16 - d31 registers 280 .irp dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15 2811: mcrr p11, 3, r0, r1, c\dr @ fmdrr r0, r1, d\dr 282 mov pc, lr 283 .org 1b + 8 284 .endr 285#endif 286ENDPROC(vfp_put_double) 287