Home
last modified time | relevance | path

Searched refs:u_off (Results 1 – 10 of 10) sorted by relevance

/freebsd/contrib/arm-optimized-routines/math/aarch64/sve/
H A Dlog2f.c37 special_case (svuint32_t u_off, svfloat32_t p, svfloat32_t r2, svfloat32_t y, in special_case() argument
41 log2f, svreinterpret_f32 (svadd_x (svptrue_b32 (), u_off, data.off)), in special_case()
54 svuint32_t u_off = svreinterpret_u32 (x); in SV_NAME_F1() local
56 u_off = svsub_x (pg, u_off, d->off); in SV_NAME_F1()
57 svbool_t special = svcmpge (pg, svsub_x (pg, u_off, d->lower), Thresh); in SV_NAME_F1()
61 pg, svasr_x (pg, svreinterpret_s32 (u_off), 23)); /* Sign-extend. */ in SV_NAME_F1()
62 svuint32_t u = svand_x (pg, u_off, MantissaMask); in SV_NAME_F1()
81 return special_case (u_off, n, r, y, special); in SV_NAME_F1()
H A Dlogf.c38 special_case (svuint32_t u_off, svfloat32_t p, svfloat32_t r2, svfloat32_t y, in special_case() argument
42 logf, svreinterpret_f32 (svadd_x (svptrue_b32 (), u_off, data.off)), in special_case()
54 svuint32_t u_off = svreinterpret_u32 (x); in SV_NAME_F1() local
56 u_off = svsub_x (pg, u_off, d->off); in SV_NAME_F1()
57 svbool_t cmp = svcmpge (pg, svsub_x (pg, u_off, d->lower), Thresh); in SV_NAME_F1()
61 pg, svasr_x (pg, svreinterpret_s32 (u_off), 23)); /* Sign-extend. */ in SV_NAME_F1()
63 svuint32_t u = svand_x (pg, u_off, Mask); in SV_NAME_F1()
81 return special_case (u_off, p, r2, y, cmp); in SV_NAME_F1()
H A Dlog10f.c40 special_case (svuint32_t u_off, svfloat32_t p, svfloat32_t r2, svfloat32_t y, in special_case() argument
44 log10f, svreinterpret_f32 (svadd_x (svptrue_b32 (), u_off, data.off)), in special_case()
57 svuint32_t u_off = svreinterpret_u32 (x); in SV_NAME_F1() local
59 u_off = svsub_x (pg, u_off, d->off); in SV_NAME_F1()
60 svbool_t special = svcmpge (pg, svsub_x (pg, u_off, d->lower), Thres); in SV_NAME_F1()
64 pg, svasr_x (pg, svreinterpret_s32 (u_off), 23)); /* signextend. */ in SV_NAME_F1()
65 svuint32_t ix = svand_x (pg, u_off, MantissaMask); in SV_NAME_F1()
89 return special_case (u_off, hi, r2, y, special); in SV_NAME_F1()
/freebsd/contrib/arm-optimized-routines/math/aarch64/advsimd/
H A Dlog2f.c42 special_case (float32x4_t n, uint32x4_t u_off, float32x4_t p, float32x4_t r, in special_case() argument
46 return v_call_f32 (log2f, vreinterpretq_f32_u32 (vaddq_u32 (u_off, d->off)), in special_case()
62 uint32x4_t u_off = vreinterpretq_u32_f32 (x); in V_NAME_F1() local
65 u_off = vsubq_u32 (u_off, d->off); in V_NAME_F1()
67 vshrq_n_s32 (vreinterpretq_s32_u32 (u_off), 23)); /* signextend. */ in V_NAME_F1()
69 uint16x4_t special = vcge_u16 (vsubhn_u32 (u_off, d->offset_lower_bound), in V_NAME_F1()
72 uint32x4_t u = vaddq_u32 (vandq_u32 (u_off, d->mantissa_mask), d->off); in V_NAME_F1()
89 return special_case (n, u_off, p, r, special, d); in V_NAME_F1()
H A Dlog10f.c42 special_case (float32x4_t y, uint32x4_t u_off, float32x4_t p, float32x4_t r2, in special_case() argument
46 return v_call_f32 (log10f, vreinterpretq_f32_u32 (vaddq_u32 (u_off, d->off)), in special_case()
63 uint32x4_t u_off = vreinterpretq_u32_f32 (x); in V_NAME_F1() local
66 u_off = vsubq_u32 (u_off, d->off); in V_NAME_F1()
68 vshrq_n_s32 (vreinterpretq_s32_u32 (u_off), 23)); /* signextend. */ in V_NAME_F1()
70 uint16x4_t special = vcge_u16 (vsubhn_u32 (u_off, d->offset_lower_bound), in V_NAME_F1()
73 uint32x4_t u = vaddq_u32 (vandq_u32 (u_off, d->mantissa_mask), d->off); in V_NAME_F1()
93 return special_case (y, u_off, poly, r2, special, d); in V_NAME_F1()
H A Dlog.c59 special_case (float64x2_t hi, uint64x2_t u_off, float64x2_t y, float64x2_t r2, in special_case() argument
62 float64x2_t x = vreinterpretq_f64_u64 (vaddq_u64 (u_off, d->off)); in special_case()
78 uint64x2_t u_off = vsubq_u64 (u, d->off); in V_NAME_D1() local
83 int64x2_t k = vshrq_n_s64 (vreinterpretq_s64_u64 (u_off), 52); in V_NAME_D1()
84 uint64x2_t iz = vsubq_u64 (u, vandq_u64 (u_off, d->sign_exp_mask)); in V_NAME_D1()
87 struct entry e = lookup (u_off); in V_NAME_D1()
89 uint32x2_t special = vcge_u32 (vsubhn_u64 (u_off, d->offset_lower_bound), in V_NAME_D1()
109 return special_case (hi, u_off, y, r2, special, d); in V_NAME_D1()
H A Dlog2.c62 special_case (float64x2_t hi, uint64x2_t u_off, float64x2_t y, float64x2_t r2, in special_case() argument
65 float64x2_t x = vreinterpretq_f64_u64 (vaddq_u64 (u_off, d->off)); in special_case()
82 uint64x2_t u_off = vsubq_u64 (u, d->off); in V_NAME_D1() local
87 int64x2_t k = vshrq_n_s64 (vreinterpretq_s64_u64 (u_off), 52); in V_NAME_D1()
88 uint64x2_t iz = vsubq_u64 (u, vandq_u64 (u_off, d->sign_exp_mask)); in V_NAME_D1()
91 struct entry e = lookup (u_off); in V_NAME_D1()
93 uint32x2_t special = vcge_u32 (vsubhn_u64 (u_off, d->offset_lower_bound), in V_NAME_D1()
112 return special_case (hi, u_off, y, r2, special, d); in V_NAME_D1()
H A Dlog10.c63 special_case (float64x2_t hi, uint64x2_t u_off, float64x2_t y, float64x2_t r2, in special_case() argument
66 float64x2_t x = vreinterpretq_f64_u64 (vaddq_u64 (u_off, d->off)); in special_case()
84 uint64x2_t u_off = vsubq_u64 (u, d->off); in V_NAME_D1() local
89 int64x2_t k = vshrq_n_s64 (vreinterpretq_s64_u64 (u_off), 52); in V_NAME_D1()
90 uint64x2_t iz = vsubq_u64 (u, vandq_u64 (u_off, d->sign_exp_mask)); in V_NAME_D1()
93 struct entry e = lookup (u_off); in V_NAME_D1()
95 uint32x2_t special = vcge_u32 (vsubhn_u64 (u_off, d->offset_lower_bound), in V_NAME_D1()
120 return special_case (hi, u_off, y, r2, special, d); in V_NAME_D1()
H A Dlogf.c37 special_case (float32x4_t p, uint32x4_t u_off, float32x4_t y, float32x4_t r2, in special_case() argument
41 return v_call_f32 (logf, vreinterpretq_f32_u32 (vaddq_u32 (u_off, d->off)), in special_case()
53 uint32x4_t u_off = vsubq_u32 (vreinterpretq_u32_f32 (x), d->off); in V_NAME_F1() local
57 vshrq_n_s32 (vreinterpretq_s32_u32 (u_off), 23)); /* signextend. */ in V_NAME_F1()
58 uint16x4_t cmp = vcge_u16 (vsubhn_u32 (u_off, d->offset_lower_bound), in V_NAME_F1()
61 uint32x4_t u = vaddq_u32 (vandq_u32 (u_off, d->mantissa_mask), d->off); in V_NAME_F1()
77 return special_case (p, u_off, y, r2, cmp, d); in V_NAME_F1()
H A Dasinh.c102 uint64x2_t u_off = vsubq_u64 (u, d->off); in log_inline() local
104 int64x2_t k = vshrq_n_s64 (vreinterpretq_s64_u64 (u_off), 52); in log_inline()
105 uint64x2_t iz = vsubq_u64 (u, vandq_u64 (u_off, d->mask)); in log_inline()
108 struct entry e = lookup (u_off); in log_inline()