1*a06918a3SAndrew Turner/* 2*a06918a3SAndrew Turner * Copyright (C) 2013 Andrew Turner 3*a06918a3SAndrew Turner * All rights reserved. 4*a06918a3SAndrew Turner * 5*a06918a3SAndrew Turner * Redistribution and use in source and binary forms, with or without 6*a06918a3SAndrew Turner * modification, are permitted provided that the following conditions 7*a06918a3SAndrew Turner * are met: 8*a06918a3SAndrew Turner * 1. Redistributions of source code must retain the above copyright 9*a06918a3SAndrew Turner * notice, this list of conditions and the following disclaimer. 10*a06918a3SAndrew Turner * 2. Redistributions in binary form must reproduce the above copyright 11*a06918a3SAndrew Turner * notice, this list of conditions and the following disclaimer in the 12*a06918a3SAndrew Turner * documentation and/or other materials provided with the distribution. 13*a06918a3SAndrew Turner * 14*a06918a3SAndrew Turner * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 15*a06918a3SAndrew Turner * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 16*a06918a3SAndrew Turner * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 17*a06918a3SAndrew Turner * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 18*a06918a3SAndrew Turner * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 19*a06918a3SAndrew Turner * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 20*a06918a3SAndrew Turner * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 21*a06918a3SAndrew Turner * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 22*a06918a3SAndrew Turner * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 23*a06918a3SAndrew Turner * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 24*a06918a3SAndrew Turner * SUCH DAMAGE. 25*a06918a3SAndrew Turner * 26*a06918a3SAndrew Turner */ 27*a06918a3SAndrew Turner 28*a06918a3SAndrew Turner#include <machine/asm.h> 29*a06918a3SAndrew Turner__FBSDID("$FreeBSD$"); 30*a06918a3SAndrew Turner 31*a06918a3SAndrew Turner#include "aeabi_vfp.h" 32*a06918a3SAndrew Turner 33*a06918a3SAndrew Turner.fpu vfp 34*a06918a3SAndrew Turner.syntax unified 35*a06918a3SAndrew Turner 36*a06918a3SAndrew Turner/* int __aeabi_fcmpeq(float, float) */ 37*a06918a3SAndrew TurnerAEABI_ENTRY(fcmpeq) 38*a06918a3SAndrew Turner LOAD_SREGS(s0, s1, r0, r1) 39*a06918a3SAndrew Turner vcmp.f32 s0, s1 40*a06918a3SAndrew Turner vmrs APSR_nzcv, fpscr 41*a06918a3SAndrew Turner movne r0, #0 42*a06918a3SAndrew Turner moveq r0, #1 43*a06918a3SAndrew Turner RET 44*a06918a3SAndrew TurnerAEABI_END(fcmpeq) 45*a06918a3SAndrew Turner 46*a06918a3SAndrew Turner/* int __aeabi_fcmplt(float, float) */ 47*a06918a3SAndrew TurnerAEABI_ENTRY(fcmplt) 48*a06918a3SAndrew Turner LOAD_SREGS(s0, s1, r0, r1) 49*a06918a3SAndrew Turner vcmp.f32 s0, s1 50*a06918a3SAndrew Turner vmrs APSR_nzcv, fpscr 51*a06918a3SAndrew Turner movcs r0, #0 52*a06918a3SAndrew Turner movlt r0, #1 53*a06918a3SAndrew Turner RET 54*a06918a3SAndrew TurnerAEABI_END(fcmplt) 55*a06918a3SAndrew Turner 56*a06918a3SAndrew Turner/* int __aeabi_fcmple(float, float) */ 57*a06918a3SAndrew TurnerAEABI_ENTRY(fcmple) 58*a06918a3SAndrew Turner LOAD_SREGS(s0, s1, r0, r1) 59*a06918a3SAndrew Turner vcmp.f32 s0, s1 60*a06918a3SAndrew Turner vmrs APSR_nzcv, fpscr 61*a06918a3SAndrew Turner movhi r0, #0 62*a06918a3SAndrew Turner movls r0, #1 63*a06918a3SAndrew Turner RET 64*a06918a3SAndrew TurnerAEABI_END(fcmple) 65*a06918a3SAndrew Turner 66*a06918a3SAndrew Turner/* int __aeabi_fcmpge(float, float) */ 67*a06918a3SAndrew TurnerAEABI_ENTRY(fcmpge) 68*a06918a3SAndrew Turner LOAD_SREGS(s0, s1, r0, r1) 69*a06918a3SAndrew Turner vcmp.f32 s0, s1 70*a06918a3SAndrew Turner vmrs APSR_nzcv, fpscr 71*a06918a3SAndrew Turner movlt r0, #0 72*a06918a3SAndrew Turner movge r0, #1 73*a06918a3SAndrew Turner RET 74*a06918a3SAndrew TurnerAEABI_END(fcmpge) 75*a06918a3SAndrew Turner 76*a06918a3SAndrew Turner/* int __aeabi_fcmpgt(float, float) */ 77*a06918a3SAndrew TurnerAEABI_ENTRY(fcmpgt) 78*a06918a3SAndrew Turner LOAD_SREGS(s0, s1, r0, r1) 79*a06918a3SAndrew Turner vcmp.f32 s0, s1 80*a06918a3SAndrew Turner vmrs APSR_nzcv, fpscr 81*a06918a3SAndrew Turner movle r0, #0 82*a06918a3SAndrew Turner movgt r0, #1 83*a06918a3SAndrew Turner RET 84*a06918a3SAndrew TurnerAEABI_END(fcmpgt) 85*a06918a3SAndrew Turner 86*a06918a3SAndrew Turner/* int __aeabi_fcmpun(float, float) */ 87*a06918a3SAndrew TurnerAEABI_ENTRY(fcmpun) 88*a06918a3SAndrew Turner LOAD_SREGS(s0, s1, r0, r1) 89*a06918a3SAndrew Turner vcmp.f32 s0, s1 90*a06918a3SAndrew Turner vmrs APSR_nzcv, fpscr 91*a06918a3SAndrew Turner movvc r0, #0 92*a06918a3SAndrew Turner movvs r0, #1 93*a06918a3SAndrew Turner RET 94*a06918a3SAndrew TurnerAEABI_END(fcmpun) 95*a06918a3SAndrew Turner 96*a06918a3SAndrew Turner/* int __aeabi_f2iz(float) */ 97*a06918a3SAndrew TurnerAEABI_ENTRY(f2iz) 98*a06918a3SAndrew Turner LOAD_SREG(s0, r0) 99*a06918a3SAndrew Turner#if 0 100*a06918a3SAndrew Turner /* 101*a06918a3SAndrew Turner * This should be the correct instruction, but binutils incorrectly 102*a06918a3SAndrew Turner * encodes it as the version that used FPSCR to determine the rounding. 103*a06918a3SAndrew Turner * When binutils is fixed we can use this again. 104*a06918a3SAndrew Turner */ 105*a06918a3SAndrew Turner vcvt.s32.f32 s0, s0 106*a06918a3SAndrew Turner#else 107*a06918a3SAndrew Turner ftosizs s0, s0 108*a06918a3SAndrew Turner#endif 109*a06918a3SAndrew Turner vmov r0, s0 110*a06918a3SAndrew Turner RET 111*a06918a3SAndrew TurnerAEABI_END(f2iz) 112*a06918a3SAndrew Turner 113*a06918a3SAndrew Turner/* double __aeabi_f2d(float) */ 114*a06918a3SAndrew TurnerAEABI_ENTRY(f2d) 115*a06918a3SAndrew Turner LOAD_SREG(s0, r0) 116*a06918a3SAndrew Turner vcvt.f64.f32 d0, s0 117*a06918a3SAndrew Turner UNLOAD_DREG(r0, r1, d0) 118*a06918a3SAndrew Turner RET 119*a06918a3SAndrew TurnerAEABI_END(f2d) 120*a06918a3SAndrew Turner 121*a06918a3SAndrew Turner/* float __aeabi_i2f(int) */ 122*a06918a3SAndrew TurnerAEABI_ENTRY(i2f) 123*a06918a3SAndrew Turner vmov s0, r0 124*a06918a3SAndrew Turner vcvt.f32.s32 s0, s0 125*a06918a3SAndrew Turner UNLOAD_SREG(r0, s0) 126*a06918a3SAndrew Turner RET 127*a06918a3SAndrew TurnerAEABI_END(i2f) 128*a06918a3SAndrew Turner 129*a06918a3SAndrew Turner/* float __aeabi_fadd(float, float) */ 130*a06918a3SAndrew TurnerAEABI_ENTRY(fadd) 131*a06918a3SAndrew Turner LOAD_SREGS(s0, s1, r0, r1) 132*a06918a3SAndrew Turner vadd.f32 s0, s0, s1 133*a06918a3SAndrew Turner UNLOAD_SREG(r0, s0) 134*a06918a3SAndrew Turner RET 135*a06918a3SAndrew TurnerAEABI_END(fadd) 136*a06918a3SAndrew Turner 137*a06918a3SAndrew Turner/* float __aeabi_fmul(float, float) */ 138*a06918a3SAndrew TurnerAEABI_ENTRY(fdiv) 139*a06918a3SAndrew Turner LOAD_SREGS(s0, s1, r0, r1) 140*a06918a3SAndrew Turner vdiv.f32 s0, s0, s1 141*a06918a3SAndrew Turner UNLOAD_SREG(r0, s0) 142*a06918a3SAndrew Turner RET 143*a06918a3SAndrew TurnerAEABI_END(fdiv) 144*a06918a3SAndrew Turner 145*a06918a3SAndrew Turner/* float __aeabi_fmul(float, float) */ 146*a06918a3SAndrew TurnerAEABI_ENTRY(fmul) 147*a06918a3SAndrew Turner LOAD_SREGS(s0, s1, r0, r1) 148*a06918a3SAndrew Turner vmul.f32 s0, s0, s1 149*a06918a3SAndrew Turner UNLOAD_SREG(r0, s0) 150*a06918a3SAndrew Turner RET 151*a06918a3SAndrew TurnerAEABI_END(fmul) 152*a06918a3SAndrew Turner 153*a06918a3SAndrew Turner/* float __aeabi_fsub(float, float) */ 154*a06918a3SAndrew TurnerAEABI_ENTRY(fsub) 155*a06918a3SAndrew Turner LOAD_SREGS(s0, s1, r0, r1) 156*a06918a3SAndrew Turner vsub.f32 s0, s0, s1 157*a06918a3SAndrew Turner UNLOAD_SREG(r0, s0) 158*a06918a3SAndrew Turner RET 159*a06918a3SAndrew TurnerAEABI_END(fsub) 160*a06918a3SAndrew Turner 161