Lines Matching +full:1 +full:v2

108 		"1:	nopr	%%r7\n"  in fpu_lfpc_safe()
112 " jg 1b\n" in fpu_lfpc_safe()
114 EX_TABLE(1b, 2b) in fpu_lfpc_safe()
146 static __always_inline void fpu_vab(u8 v1, u8 v2, u8 v3) in fpu_vab() argument
148 asm volatile("VAB %[v1],%[v2],%[v3]" in fpu_vab()
150 : [v1] "I" (v1), [v2] "I" (v2), [v3] "I" (v3) in fpu_vab()
154 static __always_inline void fpu_vcksm(u8 v1, u8 v2, u8 v3) in fpu_vcksm() argument
156 asm volatile("VCKSM %[v1],%[v2],%[v3]" in fpu_vcksm()
158 : [v1] "I" (v1), [v2] "I" (v2), [v3] "I" (v3) in fpu_vcksm()
162 static __always_inline void fpu_vesravb(u8 v1, u8 v2, u8 v3) in fpu_vesravb() argument
164 asm volatile("VESRAVB %[v1],%[v2],%[v3]" in fpu_vesravb()
166 : [v1] "I" (v1), [v2] "I" (v2), [v3] "I" (v3) in fpu_vesravb()
170 static __always_inline void fpu_vgfmag(u8 v1, u8 v2, u8 v3, u8 v4) in fpu_vgfmag() argument
172 asm volatile("VGFMAG %[v1],%[v2],%[v3],%[v4]" in fpu_vgfmag()
174 : [v1] "I" (v1), [v2] "I" (v2), [v3] "I" (v3), [v4] "I" (v4) in fpu_vgfmag()
178 static __always_inline void fpu_vgfmg(u8 v1, u8 v2, u8 v3) in fpu_vgfmg() argument
180 asm volatile("VGFMG %[v1],%[v2],%[v3]" in fpu_vgfmg()
182 : [v1] "I" (v1), [v2] "I" (v2), [v3] "I" (v3) in fpu_vgfmg()
192 " la 1,%[vxr]\n" in fpu_vl()
193 " VL %[v1],0,,1\n" in fpu_vl()
197 : "memory", "1"); in fpu_vl()
247 size = min(index + 1, sizeof(__vector128)); in fpu_vll()
250 " la 1,%[vxr]\n" in fpu_vll()
251 " VLL %[v1],%[index],0,1\n" in fpu_vll()
256 : "memory", "1"); in fpu_vll()
265 size = min(index + 1, sizeof(__vector128)); in fpu_vll()
281 unsigned int size = ((_v3) - (_v1) + 1) * sizeof(__vector128); \
283 __vector128 _v[(_v3) - (_v1) + 1]; \
288 " la 1,%[vxrs]\n" \
289 " VLM %[v1],%[v3],0,1\n" \
293 : "memory", "1"); \
294 (_v3) - (_v1) + 1; \
301 unsigned int size = ((_v3) - (_v1) + 1) * sizeof(__vector128); \
303 __vector128 _v[(_v3) - (_v1) + 1]; \
312 (_v3) - (_v1) + 1; \
317 static __always_inline void fpu_vlr(u8 v1, u8 v2) in fpu_vlr() argument
319 asm volatile("VLR %[v1],%[v2]" in fpu_vlr()
321 : [v1] "I" (v1), [v2] "I" (v2) in fpu_vlr()
333 static __always_inline void fpu_vn(u8 v1, u8 v2, u8 v3) in fpu_vn() argument
335 asm volatile("VN %[v1],%[v2],%[v3]" in fpu_vn()
337 : [v1] "I" (v1), [v2] "I" (v2), [v3] "I" (v3) in fpu_vn()
341 static __always_inline void fpu_vperm(u8 v1, u8 v2, u8 v3, u8 v4) in fpu_vperm() argument
343 asm volatile("VPERM %[v1],%[v2],%[v3],%[v4]" in fpu_vperm()
345 : [v1] "I" (v1), [v2] "I" (v2), [v3] "I" (v3), [v4] "I" (v4) in fpu_vperm()
357 static __always_inline void fpu_vsrlb(u8 v1, u8 v2, u8 v3) in fpu_vsrlb() argument
359 asm volatile("VSRLB %[v1],%[v2],%[v3]" in fpu_vsrlb()
361 : [v1] "I" (v1), [v2] "I" (v2), [v3] "I" (v3) in fpu_vsrlb()
371 " la 1,%[vxr]\n" in fpu_vst()
372 " VST %[v1],0,,1\n" in fpu_vst()
375 : "memory", "1"); in fpu_vst()
397 size = min(index + 1, sizeof(__vector128)); in fpu_vstl()
400 " la 1,%[vxr]\n" in fpu_vstl()
401 " VSTL %[v1],%[index],0,1\n" in fpu_vstl()
404 : "memory", "1"); in fpu_vstl()
413 size = min(index + 1, sizeof(__vector128)); in fpu_vstl()
427 unsigned int size = ((_v3) - (_v1) + 1) * sizeof(__vector128); \
429 __vector128 _v[(_v3) - (_v1) + 1]; \
434 " la 1,%[vxrs]\n" \
435 " VSTM %[v1],%[v3],0,1\n" \
438 : "memory", "1"); \
439 (_v3) - (_v1) + 1; \
446 unsigned int size = ((_v3) - (_v1) + 1) * sizeof(__vector128); \
448 __vector128 _v[(_v3) - (_v1) + 1]; \
456 (_v3) - (_v1) + 1; \
461 static __always_inline void fpu_vupllf(u8 v1, u8 v2) in fpu_vupllf() argument
463 asm volatile("VUPLLF %[v1],%[v2]" in fpu_vupllf()
465 : [v1] "I" (v1), [v2] "I" (v2) in fpu_vupllf()
469 static __always_inline void fpu_vx(u8 v1, u8 v2, u8 v3) in fpu_vx() argument
471 asm volatile("VX %[v1],%[v2],%[v3]" in fpu_vx()
473 : [v1] "I" (v1), [v2] "I" (v2), [v3] "I" (v3) in fpu_vx()