xref: /titanic_51/usr/src/lib/libmvec/common/vis/__vsinf.S (revision 25c28e83beb90e7c80452a7c818c5e6f73a07dc8)
1/*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21/*
22 * Copyright 2011 Nexenta Systems, Inc.  All rights reserved.
23 */
24/*
25 * Copyright 2006 Sun Microsystems, Inc.  All rights reserved.
26 * Use is subject to license terms.
27 */
28
29	.file	"__vsinf.S"
30
31#include "libm.h"
32
33	RO_DATA
34	.align	64
35constants:
36	.word	0xbfc55554,0x60000000
37	.word	0x3f811077,0xe0000000
38	.word	0xbf29956b,0x60000000
39	.word	0x3ff00000,0x00000000
40	.word	0xbfe00000,0x00000000
41	.word	0x3fa55554,0xa0000000
42	.word	0xbf56c0c1,0xe0000000
43	.word	0x3ef99e24,0xe0000000
44	.word	0x3fe45f30,0x6dc9c883
45	.word	0x43380000,0x00000000
46	.word	0x3ff921fb,0x54400000
47	.word	0x3dd0b461,0x1a626331
48	.word	0x3f490fdb,0
49	.word	0x49c90fdb,0
50	.word	0x7f800000,0
51	.word	0x80000000,0
52
53#define S0		0x0
54#define S1		0x08
55#define S2		0x10
56#define one		0x18
57#define mhalf		0x20
58#define C0		0x28
59#define C1		0x30
60#define C2		0x38
61#define invpio2		0x40
62#define round		0x48
63#define pio2_1		0x50
64#define pio2_t		0x58
65#define thresh1		0x60
66#define thresh2		0x68
67#define inf		0x70
68#define signbit		0x78
69
70! local storage indices
71
72#define xsave		STACK_BIAS-0x8
73#define ysave		STACK_BIAS-0x10
74#define nsave		STACK_BIAS-0x14
75#define sxsave		STACK_BIAS-0x18
76#define sysave		STACK_BIAS-0x1c
77#define junk		STACK_BIAS-0x20
78#define n3		STACK_BIAS-0x24
79#define n2		STACK_BIAS-0x28
80#define n1		STACK_BIAS-0x2c
81#define n0		STACK_BIAS-0x30
82! sizeof temp storage - must be a multiple of 16 for V9
83#define tmps		0x30
84
85! register use
86
87! i0  n
88! i1  x
89! i2  stridex
90! i3  y
91! i4  stridey
92! i5  biguns
93
94! l0  n0
95! l1  n1
96! l2  n2
97! l3  n3
98! l4
99! l5
100! l6
101! l7
102
103! the following are 64-bit registers in both V8+ and V9
104
105! g1
106! g5
107
108! o0  py0
109! o1  py1
110! o2  py2
111! o3  py3
112! o4
113! o5
114! o7
115
116! f0  x0
117! f2  x1
118! f4  x2
119! f6  x3
120! f8  thresh1 (pi/4)
121! f10 y0
122! f12 y1
123! f14 y2
124! f16 y3
125! f18 thresh2 (2^19 pi)
126! f20
127! f22
128! f24
129! f26
130! f28 signbit
131! f30
132! f32
133! f34
134! f36
135! f38 inf
136! f40 S0
137! f42 S1
138! f44 S2
139! f46 one
140! f48 mhalf
141! f50 C0
142! f52 C1
143! f54 C2
144! f56 invpio2
145! f58 round
146! f60 pio2_1
147! f62 pio2_t
148
149	ENTRY(__vsinf)
150	save	%sp,-SA(MINFRAME)-tmps,%sp
151	PIC_SETUP(l7)
152	PIC_SET(l7,constants,l1)
153	mov	%l1,%g1
154	wr	%g0,0x82,%asi		! set %asi for non-faulting loads
155#ifdef __sparcv9
156	stx	%i1,[%fp+xsave]		! save arguments
157	stx	%i3,[%fp+ysave]
158#else
159	st	%i1,[%fp+xsave]		! save arguments
160	st	%i3,[%fp+ysave]
161#endif
162	st	%i0,[%fp+nsave]
163	st	%i2,[%fp+sxsave]
164	st	%i4,[%fp+sysave]
165	mov	0,%i5			! biguns = 0
166	ldd	[%g1+S0],%f40		! load constants
167	ldd	[%g1+S1],%f42
168	ldd	[%g1+S2],%f44
169	ldd	[%g1+one],%f46
170	ldd	[%g1+mhalf],%f48
171	ldd	[%g1+C0],%f50
172	ldd	[%g1+C1],%f52
173	ldd	[%g1+C2],%f54
174	ldd	[%g1+invpio2],%f56
175	ldd	[%g1+round],%f58
176	ldd	[%g1+pio2_1],%f60
177	ldd	[%g1+pio2_t],%f62
178	ldd	[%g1+thresh1],%f8
179	ldd	[%g1+thresh2],%f18
180	ldd	[%g1+inf],%f38
181	ldd	[%g1+signbit],%f28
182	sll	%i2,2,%i2		! scale strides
183	sll	%i4,2,%i4
184	fzero	%f10			! loop prologue
185	add	%fp,junk,%o0
186	fzero	%f12
187	add	%fp,junk,%o1
188	fzero	%f14
189	add	%fp,junk,%o2
190	fzero	%f16
191	ba	.start
192	add	%fp,junk,%o3
193
194! 16-byte aligned
195	.align	16
196.start:
197	ld	[%i1],%f0		! *x
198	add	%i1,%i2,%i1		! x += stridex
199	addcc	%i0,-1,%i0
200	fdtos	%f10,%f10
201
202	st	%f10,[%o0]
203	mov	%i3,%o0			! py0 = y
204	ble,pn	%icc,.last1
205! delay slot
206	add	%i3,%i4,%i3		! y += stridey
207
208	ld	[%i1],%f2		! *x
209	add	%i1,%i2,%i1		! x += stridex
210	addcc	%i0,-1,%i0
211	fdtos	%f12,%f12
212
213	st	%f12,[%o1]
214	mov	%i3,%o1			! py1 = y
215	ble,pn	%icc,.last2
216! delay slot
217	add	%i3,%i4,%i3		! y += stridey
218
219	ld	[%i1],%f4		! *x
220	add	%i1,%i2,%i1		! x += stridex
221	addcc	%i0,-1,%i0
222	fdtos	%f14,%f14
223
224	st	%f14,[%o2]
225	mov	%i3,%o2			! py2 = y
226	ble,pn	%icc,.last3
227! delay slot
228	add	%i3,%i4,%i3		! y += stridey
229
230	ld	[%i1],%f6		! *x
231	add	%i1,%i2,%i1		! x += stridex
232	nop
233	fdtos	%f16,%f16
234
235	st	%f16,[%o3]
236	mov	%i3,%o3			! py3 = y
237	add	%i3,%i4,%i3		! y += stridey
238.cont:
239	fabsd	%f0,%f30
240
241	fabsd	%f2,%f32
242
243	fabsd	%f4,%f34
244
245	fabsd	%f6,%f36
246	fcmple32 %f30,%f18,%l0
247
248	fcmple32 %f32,%f18,%l1
249
250	fcmple32 %f34,%f18,%l2
251
252	fcmple32 %f36,%f18,%l3
253	nop
254
255! 16-byte aligned
256	andcc	%l0,2,%g0
257	bz,pn	%icc,.range0		! branch if > 2^19 pi
258! delay slot
259	fcmple32 %f30,%f8,%l0
260
261.check1:
262	andcc	%l1,2,%g0
263	bz,pn	%icc,.range1		! branch if > 2^19 pi
264! delay slot
265	fcmple32 %f32,%f8,%l1
266
267.check2:
268	andcc	%l2,2,%g0
269	bz,pn	%icc,.range2		! branch if > 2^19 pi
270! delay slot
271	fcmple32 %f34,%f8,%l2
272
273.check3:
274	andcc	%l3,2,%g0
275	bz,pn	%icc,.range3		! branch if > 2^19 pi
276! delay slot
277	fcmple32 %f36,%f8,%l3
278
279.checkprimary:
280	fsmuld	%f0,%f0,%f30
281	fstod	%f0,%f0
282
283	fsmuld	%f2,%f2,%f32
284	fstod	%f2,%f2
285	and	%l0,%l1,%o4
286
287	fsmuld	%f4,%f4,%f34
288	fstod	%f4,%f4
289
290	fsmuld	%f6,%f6,%f36
291	fstod	%f6,%f6
292	and	%l2,%l3,%o5
293
294	fmuld	%f30,%f44,%f10
295	and	%o4,%o5,%o5
296
297	fmuld	%f32,%f44,%f12
298	andcc	%o5,2,%g0
299	bz,pn	%icc,.medium		! branch if any argument is > pi/4
300! delay slot
301	nop
302
303	fmuld	%f34,%f44,%f14
304
305	fmuld	%f36,%f44,%f16
306
307	fmuld	%f30,%f40,%f20
308	faddd	%f10,%f42,%f10
309
310	fmuld	%f32,%f40,%f22
311	faddd	%f12,%f42,%f12
312
313	fmuld	%f34,%f40,%f24
314	faddd	%f14,%f42,%f14
315
316	fmuld	%f36,%f40,%f26
317	faddd	%f16,%f42,%f16
318
319	fmuld	%f30,%f30,%f30
320	faddd	%f20,%f46,%f20
321
322	fmuld	%f32,%f32,%f32
323	faddd	%f22,%f46,%f22
324
325	fmuld	%f34,%f34,%f34
326	faddd	%f24,%f46,%f24
327
328	fmuld	%f36,%f36,%f36
329	faddd	%f26,%f46,%f26
330
331	fmuld	%f30,%f10,%f10
332
333	fmuld	%f32,%f12,%f12
334
335	fmuld	%f34,%f14,%f14
336
337	fmuld	%f36,%f16,%f16
338
339	faddd	%f10,%f20,%f10
340
341	faddd	%f12,%f22,%f12
342
343	faddd	%f14,%f24,%f14
344
345	faddd	%f16,%f26,%f16
346
347	fmuld	%f0,%f10,%f10
348
349	fmuld	%f2,%f12,%f12
350
351	fmuld	%f4,%f14,%f14
352
353	addcc	%i0,-1,%i0
354	bg,pt	%icc,.start
355! delay slot
356	fmuld	%f6,%f16,%f16
357
358	ba,pt	%icc,.end
359! delay slot
360	nop
361
362
363	.align	16
364.medium:
365	fmuld	%f0,%f56,%f10
366
367	fmuld	%f2,%f56,%f12
368
369	fmuld	%f4,%f56,%f14
370
371	fmuld	%f6,%f56,%f16
372
373	faddd	%f10,%f58,%f10
374	st	%f11,[%fp+n0]
375
376	faddd	%f12,%f58,%f12
377	st	%f13,[%fp+n1]
378
379	faddd	%f14,%f58,%f14
380	st	%f15,[%fp+n2]
381
382	faddd	%f16,%f58,%f16
383	st	%f17,[%fp+n3]
384
385	fsubd	%f10,%f58,%f10
386
387	fsubd	%f12,%f58,%f12
388
389	fsubd	%f14,%f58,%f14
390
391	fsubd	%f16,%f58,%f16
392
393	fmuld	%f10,%f60,%f20
394	ld	[%fp+n0],%l0
395
396	fmuld	%f12,%f60,%f22
397	ld	[%fp+n1],%l1
398
399	fmuld	%f14,%f60,%f24
400	ld	[%fp+n2],%l2
401
402	fmuld	%f16,%f60,%f26
403	ld	[%fp+n3],%l3
404
405	fsubd	%f0,%f20,%f0
406	fmuld	%f10,%f62,%f30
407
408	fsubd	%f2,%f22,%f2
409	fmuld	%f12,%f62,%f32
410
411	fsubd	%f4,%f24,%f4
412	fmuld	%f14,%f62,%f34
413
414	fsubd	%f6,%f26,%f6
415	fmuld	%f16,%f62,%f36
416
417	fsubd	%f0,%f30,%f0
418
419	fsubd	%f2,%f32,%f2
420
421	fsubd	%f4,%f34,%f4
422
423	fsubd	%f6,%f36,%f6
424	andcc	%l0,1,%g0
425
426	fmuld	%f0,%f0,%f30
427	bz,pn	%icc,.case8
428! delay slot
429	andcc	%l1,1,%g0
430
431	fmuld	%f2,%f2,%f32
432	bz,pn	%icc,.case4
433! delay slot
434	andcc	%l2,1,%g0
435
436	fmuld	%f4,%f4,%f34
437	bz,pn	%icc,.case2
438! delay slot
439	andcc	%l3,1,%g0
440
441	fmuld	%f6,%f6,%f36
442	bz,pn	%icc,.case1
443! delay slot
444	nop
445
446!.case0:
447	fmuld	%f30,%f54,%f10		! cos(x0)
448	fzero	%f0
449
450	fmuld	%f32,%f54,%f12		! cos(x1)
451	fzero	%f2
452
453	fmuld	%f34,%f54,%f14		! cos(x2)
454	fzero	%f4
455
456	fmuld	%f36,%f54,%f16		! cos(x3)
457	fzero	%f6
458
459	fmuld	%f30,%f48,%f20
460	faddd	%f10,%f52,%f10
461
462	fmuld	%f32,%f48,%f22
463	faddd	%f12,%f52,%f12
464
465	fmuld	%f34,%f48,%f24
466	faddd	%f14,%f52,%f14
467
468	fmuld	%f36,%f48,%f26
469	faddd	%f16,%f52,%f16
470
471	fmuld	%f30,%f10,%f10
472	faddd	%f20,%f46,%f20
473
474	fmuld	%f32,%f12,%f12
475	faddd	%f22,%f46,%f22
476
477	fmuld	%f34,%f14,%f14
478	faddd	%f24,%f46,%f24
479
480	fmuld	%f36,%f16,%f16
481	faddd	%f26,%f46,%f26
482
483	fmuld	%f30,%f30,%f30
484	faddd	%f10,%f50,%f10
485	and	%l0,2,%g1
486
487	fmuld	%f32,%f32,%f32
488	faddd	%f12,%f50,%f12
489	and	%l1,2,%g5
490
491	fmuld	%f34,%f34,%f34
492	faddd	%f14,%f50,%f14
493	and	%l2,2,%o4
494
495	fmuld	%f36,%f36,%f36
496	faddd	%f16,%f50,%f16
497	and	%l3,2,%o5
498
499	fmuld	%f30,%f10,%f10
500	fmovrdnz %g1,%f28,%f0
501
502	fmuld	%f32,%f12,%f12
503	fmovrdnz %g5,%f28,%f2
504
505	fmuld	%f34,%f14,%f14
506	fmovrdnz %o4,%f28,%f4
507
508	fmuld	%f36,%f16,%f16
509	fmovrdnz %o5,%f28,%f6
510
511	faddd	%f10,%f20,%f10
512
513	faddd	%f12,%f22,%f12
514
515	faddd	%f14,%f24,%f14
516
517	faddd	%f16,%f26,%f16
518
519	fxor	%f10,%f0,%f10
520
521	fxor	%f12,%f2,%f12
522
523	fxor	%f14,%f4,%f14
524
525	addcc	%i0,-1,%i0
526	bg,pt	%icc,.start
527! delay slot
528	fxor	%f16,%f6,%f16
529
530	ba,pt	%icc,.end
531! delay slot
532	nop
533
534	.align	16
535.case1:
536	fmuld	%f30,%f54,%f10		! cos(x0)
537	fzero	%f0
538
539	fmuld	%f32,%f54,%f12		! cos(x1)
540	fzero	%f2
541
542	fmuld	%f34,%f54,%f14		! cos(x2)
543	fzero	%f4
544
545	fmuld	%f36,%f44,%f16		! sin(x3)
546
547	fmuld	%f30,%f48,%f20
548	faddd	%f10,%f52,%f10
549
550	fmuld	%f32,%f48,%f22
551	faddd	%f12,%f52,%f12
552
553	fmuld	%f34,%f48,%f24
554	faddd	%f14,%f52,%f14
555
556	fmuld	%f36,%f40,%f26
557	faddd	%f16,%f42,%f16
558
559	fmuld	%f30,%f10,%f10
560	faddd	%f20,%f46,%f20
561
562	fmuld	%f32,%f12,%f12
563	faddd	%f22,%f46,%f22
564
565	fmuld	%f34,%f14,%f14
566	faddd	%f24,%f46,%f24
567
568	fmuld	%f36,%f36,%f36
569	faddd	%f26,%f46,%f26
570
571	fmuld	%f30,%f30,%f30
572	faddd	%f10,%f50,%f10
573	and	%l0,2,%g1
574
575	fmuld	%f32,%f32,%f32
576	faddd	%f12,%f50,%f12
577	and	%l1,2,%g5
578
579	fmuld	%f34,%f34,%f34
580	faddd	%f14,%f50,%f14
581	and	%l2,2,%o4
582
583	fmuld	%f36,%f16,%f16
584	fzero	%f36
585
586	fmuld	%f30,%f10,%f10
587	fmovrdnz %g1,%f28,%f0
588
589	fmuld	%f32,%f12,%f12
590	fmovrdnz %g5,%f28,%f2
591
592	fmuld	%f34,%f14,%f14
593	fmovrdnz %o4,%f28,%f4
594
595	faddd	%f16,%f26,%f16
596	and	%l3,2,%o5
597
598	faddd	%f10,%f20,%f10
599
600	faddd	%f12,%f22,%f12
601
602	faddd	%f14,%f24,%f14
603
604	fmuld	%f6,%f16,%f16
605	fmovrdnz %o5,%f28,%f36
606
607	fxor	%f10,%f0,%f10
608
609	fxor	%f12,%f2,%f12
610
611	fxor	%f14,%f4,%f14
612
613	addcc	%i0,-1,%i0
614	bg,pt	%icc,.start
615! delay slot
616	fxor	%f16,%f36,%f16
617
618	ba,pt	%icc,.end
619! delay slot
620	nop
621
622	.align	16
623.case2:
624	fmuld	%f6,%f6,%f36
625	bz,pn	%icc,.case3
626! delay slot
627	nop
628
629	fmuld	%f30,%f54,%f10		! cos(x0)
630	fzero	%f0
631
632	fmuld	%f32,%f54,%f12		! cos(x1)
633	fzero	%f2
634
635	fmuld	%f34,%f44,%f14		! sin(x2)
636
637	fmuld	%f36,%f54,%f16		! cos(x3)
638	fzero	%f6
639
640	fmuld	%f30,%f48,%f20
641	faddd	%f10,%f52,%f10
642
643	fmuld	%f32,%f48,%f22
644	faddd	%f12,%f52,%f12
645
646	fmuld	%f34,%f40,%f24
647	faddd	%f14,%f42,%f14
648
649	fmuld	%f36,%f48,%f26
650	faddd	%f16,%f52,%f16
651
652	fmuld	%f30,%f10,%f10
653	faddd	%f20,%f46,%f20
654
655	fmuld	%f32,%f12,%f12
656	faddd	%f22,%f46,%f22
657
658	fmuld	%f34,%f34,%f34
659	faddd	%f24,%f46,%f24
660
661	fmuld	%f36,%f16,%f16
662	faddd	%f26,%f46,%f26
663
664	fmuld	%f30,%f30,%f30
665	faddd	%f10,%f50,%f10
666	and	%l0,2,%g1
667
668	fmuld	%f32,%f32,%f32
669	faddd	%f12,%f50,%f12
670	and	%l1,2,%g5
671
672	fmuld	%f34,%f14,%f14
673	fzero	%f34
674
675	fmuld	%f36,%f36,%f36
676	faddd	%f16,%f50,%f16
677	and	%l3,2,%o5
678
679	fmuld	%f30,%f10,%f10
680	fmovrdnz %g1,%f28,%f0
681
682	fmuld	%f32,%f12,%f12
683	fmovrdnz %g5,%f28,%f2
684
685	faddd	%f14,%f24,%f14
686	and	%l2,2,%o4
687
688	fmuld	%f36,%f16,%f16
689	fmovrdnz %o5,%f28,%f6
690
691	faddd	%f10,%f20,%f10
692
693	faddd	%f12,%f22,%f12
694
695	fmuld	%f4,%f14,%f14
696	fmovrdnz %o4,%f28,%f34
697
698	faddd	%f16,%f26,%f16
699
700	fxor	%f10,%f0,%f10
701
702	fxor	%f12,%f2,%f12
703
704	fxor	%f14,%f34,%f14
705
706	addcc	%i0,-1,%i0
707	bg,pt	%icc,.start
708! delay slot
709	fxor	%f16,%f6,%f16
710
711	ba,pt	%icc,.end
712! delay slot
713	nop
714
715	.align	16
716.case3:
717	fmuld	%f30,%f54,%f10		! cos(x0)
718	fzero	%f0
719
720	fmuld	%f32,%f54,%f12		! cos(x1)
721	fzero	%f2
722
723	fmuld	%f34,%f44,%f14		! sin(x2)
724
725	fmuld	%f36,%f44,%f16		! sin(x3)
726
727	fmuld	%f30,%f48,%f20
728	faddd	%f10,%f52,%f10
729
730	fmuld	%f32,%f48,%f22
731	faddd	%f12,%f52,%f12
732
733	fmuld	%f34,%f40,%f24
734	faddd	%f14,%f42,%f14
735
736	fmuld	%f36,%f40,%f26
737	faddd	%f16,%f42,%f16
738
739	fmuld	%f30,%f10,%f10
740	faddd	%f20,%f46,%f20
741
742	fmuld	%f32,%f12,%f12
743	faddd	%f22,%f46,%f22
744
745	fmuld	%f34,%f34,%f34
746	faddd	%f24,%f46,%f24
747
748	fmuld	%f36,%f36,%f36
749	faddd	%f26,%f46,%f26
750
751	fmuld	%f30,%f30,%f30
752	faddd	%f10,%f50,%f10
753	and	%l0,2,%g1
754
755	fmuld	%f32,%f32,%f32
756	faddd	%f12,%f50,%f12
757	and	%l1,2,%g5
758
759	fmuld	%f34,%f14,%f14
760	fzero	%f34
761
762	fmuld	%f36,%f16,%f16
763	fzero	%f36
764
765	fmuld	%f30,%f10,%f10
766	fmovrdnz %g1,%f28,%f0
767
768	fmuld	%f32,%f12,%f12
769	fmovrdnz %g5,%f28,%f2
770
771	faddd	%f14,%f24,%f14
772	and	%l2,2,%o4
773
774	faddd	%f16,%f26,%f16
775	and	%l3,2,%o5
776
777	faddd	%f10,%f20,%f10
778
779	faddd	%f12,%f22,%f12
780
781	fmuld	%f4,%f14,%f14
782	fmovrdnz %o4,%f28,%f34
783
784	fmuld	%f6,%f16,%f16
785	fmovrdnz %o5,%f28,%f36
786
787	fxor	%f10,%f0,%f10
788
789	fxor	%f12,%f2,%f12
790
791	fxor	%f14,%f34,%f14
792
793	addcc	%i0,-1,%i0
794	bg,pt	%icc,.start
795! delay slot
796	fxor	%f16,%f36,%f16
797
798	ba,pt	%icc,.end
799! delay slot
800	nop
801
802	.align	16
803.case4:
804	fmuld	%f4,%f4,%f34
805	bz,pn	%icc,.case6
806! delay slot
807	andcc	%l3,1,%g0
808
809	fmuld	%f6,%f6,%f36
810	bz,pn	%icc,.case5
811! delay slot
812	nop
813
814	fmuld	%f30,%f54,%f10		! cos(x0)
815	fzero	%f0
816
817	fmuld	%f32,%f44,%f12		! sin(x1)
818
819	fmuld	%f34,%f54,%f14		! cos(x2)
820	fzero	%f4
821
822	fmuld	%f36,%f54,%f16		! cos(x3)
823	fzero	%f6
824
825	fmuld	%f30,%f48,%f20
826	faddd	%f10,%f52,%f10
827
828	fmuld	%f32,%f40,%f22
829	faddd	%f12,%f42,%f12
830
831	fmuld	%f34,%f48,%f24
832	faddd	%f14,%f52,%f14
833
834	fmuld	%f36,%f48,%f26
835	faddd	%f16,%f52,%f16
836
837	fmuld	%f30,%f10,%f10
838	faddd	%f20,%f46,%f20
839
840	fmuld	%f32,%f32,%f32
841	faddd	%f22,%f46,%f22
842
843	fmuld	%f34,%f14,%f14
844	faddd	%f24,%f46,%f24
845
846	fmuld	%f36,%f16,%f16
847	faddd	%f26,%f46,%f26
848
849	fmuld	%f30,%f30,%f30
850	faddd	%f10,%f50,%f10
851	and	%l0,2,%g1
852
853	fmuld	%f32,%f12,%f12
854	fzero	%f32
855
856	fmuld	%f34,%f34,%f34
857	faddd	%f14,%f50,%f14
858	and	%l2,2,%o4
859
860	fmuld	%f36,%f36,%f36
861	faddd	%f16,%f50,%f16
862	and	%l3,2,%o5
863
864	fmuld	%f30,%f10,%f10
865	fmovrdnz %g1,%f28,%f0
866
867	faddd	%f12,%f22,%f12
868	and	%l1,2,%g5
869
870	fmuld	%f34,%f14,%f14
871	fmovrdnz %o4,%f28,%f4
872
873	fmuld	%f36,%f16,%f16
874	fmovrdnz %o5,%f28,%f6
875
876	faddd	%f10,%f20,%f10
877
878	fmuld	%f2,%f12,%f12
879	fmovrdnz %g5,%f28,%f32
880
881	faddd	%f14,%f24,%f14
882
883	faddd	%f16,%f26,%f16
884
885	fxor	%f10,%f0,%f10
886
887	fxor	%f12,%f32,%f12
888
889	fxor	%f14,%f4,%f14
890
891	addcc	%i0,-1,%i0
892	bg,pt	%icc,.start
893! delay slot
894	fxor	%f16,%f6,%f16
895
896	ba,pt	%icc,.end
897! delay slot
898	nop
899
900	.align	16
901.case5:
902	fmuld	%f30,%f54,%f10		! cos(x0)
903	fzero	%f0
904
905	fmuld	%f32,%f44,%f12		! sin(x1)
906
907	fmuld	%f34,%f54,%f14		! cos(x2)
908	fzero	%f4
909
910	fmuld	%f36,%f44,%f16		! sin(x3)
911
912	fmuld	%f30,%f48,%f20
913	faddd	%f10,%f52,%f10
914
915	fmuld	%f32,%f40,%f22
916	faddd	%f12,%f42,%f12
917
918	fmuld	%f34,%f48,%f24
919	faddd	%f14,%f52,%f14
920
921	fmuld	%f36,%f40,%f26
922	faddd	%f16,%f42,%f16
923
924	fmuld	%f30,%f10,%f10
925	faddd	%f20,%f46,%f20
926
927	fmuld	%f32,%f32,%f32
928	faddd	%f22,%f46,%f22
929
930	fmuld	%f34,%f14,%f14
931	faddd	%f24,%f46,%f24
932
933	fmuld	%f36,%f36,%f36
934	faddd	%f26,%f46,%f26
935
936	fmuld	%f30,%f30,%f30
937	faddd	%f10,%f50,%f10
938	and	%l0,2,%g1
939
940	fmuld	%f32,%f12,%f12
941	fzero	%f32
942
943	fmuld	%f34,%f34,%f34
944	faddd	%f14,%f50,%f14
945	and	%l2,2,%o4
946
947	fmuld	%f36,%f16,%f16
948	fzero	%f36
949
950	fmuld	%f30,%f10,%f10
951	fmovrdnz %g1,%f28,%f0
952
953	faddd	%f12,%f22,%f12
954	and	%l1,2,%g5
955
956	fmuld	%f34,%f14,%f14
957	fmovrdnz %o4,%f28,%f4
958
959	faddd	%f16,%f26,%f16
960	and	%l3,2,%o5
961
962	faddd	%f10,%f20,%f10
963
964	fmuld	%f2,%f12,%f12
965	fmovrdnz %g5,%f28,%f32
966
967	faddd	%f14,%f24,%f14
968
969	fmuld	%f6,%f16,%f16
970	fmovrdnz %o5,%f28,%f36
971
972	fxor	%f10,%f0,%f10
973
974	fxor	%f12,%f32,%f12
975
976	fxor	%f14,%f4,%f14
977
978	addcc	%i0,-1,%i0
979	bg,pt	%icc,.start
980! delay slot
981	fxor	%f16,%f36,%f16
982
983	ba,pt	%icc,.end
984! delay slot
985	nop
986
987	.align	16
988.case6:
989	fmuld	%f6,%f6,%f36
990	bz,pn	%icc,.case7
991! delay slot
992	nop
993
994	fmuld	%f30,%f54,%f10		! cos(x0)
995	fzero	%f0
996
997	fmuld	%f32,%f44,%f12		! sin(x1)
998
999	fmuld	%f34,%f44,%f14		! sin(x2)
1000
1001	fmuld	%f36,%f54,%f16		! cos(x3)
1002	fzero	%f6
1003
1004	fmuld	%f30,%f48,%f20
1005	faddd	%f10,%f52,%f10
1006
1007	fmuld	%f32,%f40,%f22
1008	faddd	%f12,%f42,%f12
1009
1010	fmuld	%f34,%f40,%f24
1011	faddd	%f14,%f42,%f14
1012
1013	fmuld	%f36,%f48,%f26
1014	faddd	%f16,%f52,%f16
1015
1016	fmuld	%f30,%f10,%f10
1017	faddd	%f20,%f46,%f20
1018
1019	fmuld	%f32,%f32,%f32
1020	faddd	%f22,%f46,%f22
1021
1022	fmuld	%f34,%f34,%f34
1023	faddd	%f24,%f46,%f24
1024
1025	fmuld	%f36,%f16,%f16
1026	faddd	%f26,%f46,%f26
1027
1028	fmuld	%f30,%f30,%f30
1029	faddd	%f10,%f50,%f10
1030	and	%l0,2,%g1
1031
1032	fmuld	%f32,%f12,%f12
1033	fzero	%f32
1034
1035	fmuld	%f34,%f14,%f14
1036	fzero	%f34
1037
1038	fmuld	%f36,%f36,%f36
1039	faddd	%f16,%f50,%f16
1040	and	%l3,2,%o5
1041
1042	fmuld	%f30,%f10,%f10
1043	fmovrdnz %g1,%f28,%f0
1044
1045	faddd	%f12,%f22,%f12
1046	and	%l1,2,%g5
1047
1048	faddd	%f14,%f24,%f14
1049	and	%l2,2,%o4
1050
1051	fmuld	%f36,%f16,%f16
1052	fmovrdnz %o5,%f28,%f6
1053
1054	faddd	%f10,%f20,%f10
1055
1056	fmuld	%f2,%f12,%f12
1057	fmovrdnz %g5,%f28,%f32
1058
1059	fmuld	%f4,%f14,%f14
1060	fmovrdnz %o4,%f28,%f34
1061
1062	faddd	%f16,%f26,%f16
1063
1064	fxor	%f10,%f0,%f10
1065
1066	fxor	%f12,%f32,%f12
1067
1068	fxor	%f14,%f34,%f14
1069
1070	addcc	%i0,-1,%i0
1071	bg,pt	%icc,.start
1072! delay slot
1073	fxor	%f16,%f6,%f16
1074
1075	ba,pt	%icc,.end
1076! delay slot
1077	nop
1078
1079	.align	16
1080.case7:
1081	fmuld	%f30,%f54,%f10		! cos(x0)
1082	fzero	%f0
1083
1084	fmuld	%f32,%f44,%f12		! sin(x1)
1085
1086	fmuld	%f34,%f44,%f14		! sin(x2)
1087
1088	fmuld	%f36,%f44,%f16		! sin(x3)
1089
1090	fmuld	%f30,%f48,%f20
1091	faddd	%f10,%f52,%f10
1092
1093	fmuld	%f32,%f40,%f22
1094	faddd	%f12,%f42,%f12
1095
1096	fmuld	%f34,%f40,%f24
1097	faddd	%f14,%f42,%f14
1098
1099	fmuld	%f36,%f40,%f26
1100	faddd	%f16,%f42,%f16
1101
1102	fmuld	%f30,%f10,%f10
1103	faddd	%f20,%f46,%f20
1104
1105	fmuld	%f32,%f32,%f32
1106	faddd	%f22,%f46,%f22
1107
1108	fmuld	%f34,%f34,%f34
1109	faddd	%f24,%f46,%f24
1110
1111	fmuld	%f36,%f36,%f36
1112	faddd	%f26,%f46,%f26
1113
1114	fmuld	%f30,%f30,%f30
1115	faddd	%f10,%f50,%f10
1116	and	%l0,2,%g1
1117
1118	fmuld	%f32,%f12,%f12
1119	fzero	%f32
1120
1121	fmuld	%f34,%f14,%f14
1122	fzero	%f34
1123
1124	fmuld	%f36,%f16,%f16
1125	fzero	%f36
1126
1127	fmuld	%f30,%f10,%f10
1128	fmovrdnz %g1,%f28,%f0
1129
1130	faddd	%f12,%f22,%f12
1131	and	%l1,2,%g5
1132
1133	faddd	%f14,%f24,%f14
1134	and	%l2,2,%o4
1135
1136	faddd	%f16,%f26,%f16
1137	and	%l3,2,%o5
1138
1139	faddd	%f10,%f20,%f10
1140
1141	fmuld	%f2,%f12,%f12
1142	fmovrdnz %g5,%f28,%f32
1143
1144	fmuld	%f4,%f14,%f14
1145	fmovrdnz %o4,%f28,%f34
1146
1147	fmuld	%f6,%f16,%f16
1148	fmovrdnz %o5,%f28,%f36
1149
1150	fxor	%f10,%f0,%f10
1151
1152	fxor	%f12,%f32,%f12
1153
1154	fxor	%f14,%f34,%f14
1155
1156	addcc	%i0,-1,%i0
1157	bg,pt	%icc,.start
1158! delay slot
1159	fxor	%f16,%f36,%f16
1160
1161	ba,pt	%icc,.end
1162! delay slot
1163	nop
1164
1165
1166	.align	16
1167.case8:
1168	fmuld	%f2,%f2,%f32
1169	bz,pn	%icc,.case12
1170! delay slot
1171	andcc	%l2,1,%g0
1172
1173	fmuld	%f4,%f4,%f34
1174	bz,pn	%icc,.case10
1175! delay slot
1176	andcc	%l3,1,%g0
1177
1178	fmuld	%f6,%f6,%f36
1179	bz,pn	%icc,.case9
1180! delay slot
1181	nop
1182
1183	fmuld	%f30,%f44,%f10		! sin(x0)
1184
1185	fmuld	%f32,%f54,%f12		! cos(x1)
1186	fzero	%f2
1187
1188	fmuld	%f34,%f54,%f14		! cos(x2)
1189	fzero	%f4
1190
1191	fmuld	%f36,%f54,%f16		! cos(x3)
1192	fzero	%f6
1193
1194	fmuld	%f30,%f40,%f20
1195	faddd	%f10,%f42,%f10
1196
1197	fmuld	%f32,%f48,%f22
1198	faddd	%f12,%f52,%f12
1199
1200	fmuld	%f34,%f48,%f24
1201	faddd	%f14,%f52,%f14
1202
1203	fmuld	%f36,%f48,%f26
1204	faddd	%f16,%f52,%f16
1205
1206	fmuld	%f30,%f30,%f30
1207	faddd	%f20,%f46,%f20
1208
1209	fmuld	%f32,%f12,%f12
1210	faddd	%f22,%f46,%f22
1211
1212	fmuld	%f34,%f14,%f14
1213	faddd	%f24,%f46,%f24
1214
1215	fmuld	%f36,%f16,%f16
1216	faddd	%f26,%f46,%f26
1217
1218	fmuld	%f30,%f10,%f10
1219	fzero	%f30
1220
1221	fmuld	%f32,%f32,%f32
1222	faddd	%f12,%f50,%f12
1223	and	%l1,2,%g5
1224
1225	fmuld	%f34,%f34,%f34
1226	faddd	%f14,%f50,%f14
1227	and	%l2,2,%o4
1228
1229	fmuld	%f36,%f36,%f36
1230	faddd	%f16,%f50,%f16
1231	and	%l3,2,%o5
1232
1233	faddd	%f10,%f20,%f10
1234	and	%l0,2,%g1
1235
1236	fmuld	%f32,%f12,%f12
1237	fmovrdnz %g5,%f28,%f2
1238
1239	fmuld	%f34,%f14,%f14
1240	fmovrdnz %o4,%f28,%f4
1241
1242	fmuld	%f36,%f16,%f16
1243	fmovrdnz %o5,%f28,%f6
1244
1245	fmuld	%f0,%f10,%f10
1246	fmovrdnz %g1,%f28,%f30
1247
1248	faddd	%f12,%f22,%f12
1249
1250	faddd	%f14,%f24,%f14
1251
1252	faddd	%f16,%f26,%f16
1253
1254	fxor	%f10,%f30,%f10
1255
1256	fxor	%f12,%f2,%f12
1257
1258	fxor	%f14,%f4,%f14
1259
1260	addcc	%i0,-1,%i0
1261	bg,pt	%icc,.start
1262! delay slot
1263	fxor	%f16,%f6,%f16
1264
1265	ba,pt	%icc,.end
1266! delay slot
1267	nop
1268
1269	.align	16
1270.case9:
1271	fmuld	%f30,%f44,%f10		! sin(x0)
1272
1273	fmuld	%f32,%f54,%f12		! cos(x1)
1274	fzero	%f2
1275
1276	fmuld	%f34,%f54,%f14		! cos(x2)
1277	fzero	%f4
1278
1279	fmuld	%f36,%f44,%f16		! sin(x3)
1280
1281	fmuld	%f30,%f40,%f20
1282	faddd	%f10,%f42,%f10
1283
1284	fmuld	%f32,%f48,%f22
1285	faddd	%f12,%f52,%f12
1286
1287	fmuld	%f34,%f48,%f24
1288	faddd	%f14,%f52,%f14
1289
1290	fmuld	%f36,%f40,%f26
1291	faddd	%f16,%f42,%f16
1292
1293	fmuld	%f30,%f30,%f30
1294	faddd	%f20,%f46,%f20
1295
1296	fmuld	%f32,%f12,%f12
1297	faddd	%f22,%f46,%f22
1298
1299	fmuld	%f34,%f14,%f14
1300	faddd	%f24,%f46,%f24
1301
1302	fmuld	%f36,%f36,%f36
1303	faddd	%f26,%f46,%f26
1304
1305	fmuld	%f30,%f10,%f10
1306	fzero	%f30
1307
1308	fmuld	%f32,%f32,%f32
1309	faddd	%f12,%f50,%f12
1310	and	%l1,2,%g5
1311
1312	fmuld	%f34,%f34,%f34
1313	faddd	%f14,%f50,%f14
1314	and	%l2,2,%o4
1315
1316	fmuld	%f36,%f16,%f16
1317	fzero	%f36
1318
1319	faddd	%f10,%f20,%f10
1320	and	%l0,2,%g1
1321
1322	fmuld	%f32,%f12,%f12
1323	fmovrdnz %g5,%f28,%f2
1324
1325	fmuld	%f34,%f14,%f14
1326	fmovrdnz %o4,%f28,%f4
1327
1328	faddd	%f16,%f26,%f16
1329	and	%l3,2,%o5
1330
1331	fmuld	%f0,%f10,%f10
1332	fmovrdnz %g1,%f28,%f30
1333
1334	faddd	%f12,%f22,%f12
1335
1336	faddd	%f14,%f24,%f14
1337
1338	fmuld	%f6,%f16,%f16
1339	fmovrdnz %o5,%f28,%f36
1340
1341	fxor	%f10,%f30,%f10
1342
1343	fxor	%f12,%f2,%f12
1344
1345	fxor	%f14,%f4,%f14
1346
1347	addcc	%i0,-1,%i0
1348	bg,pt	%icc,.start
1349! delay slot
1350	fxor	%f16,%f36,%f16
1351
1352	ba,pt	%icc,.end
1353! delay slot
1354	nop
1355
1356	.align	16
1357.case10:
1358	fmuld	%f6,%f6,%f36
1359	bz,pn	%icc,.case11
1360! delay slot
1361	nop
1362
1363	fmuld	%f30,%f44,%f10		! sin(x0)
1364
1365	fmuld	%f32,%f54,%f12		! cos(x1)
1366	fzero	%f2
1367
1368	fmuld	%f34,%f44,%f14		! sin(x2)
1369
1370	fmuld	%f36,%f54,%f16		! cos(x3)
1371	fzero	%f6
1372
1373	fmuld	%f30,%f40,%f20
1374	faddd	%f10,%f42,%f10
1375
1376	fmuld	%f32,%f48,%f22
1377	faddd	%f12,%f52,%f12
1378
1379	fmuld	%f34,%f40,%f24
1380	faddd	%f14,%f42,%f14
1381
1382	fmuld	%f36,%f48,%f26
1383	faddd	%f16,%f52,%f16
1384
1385	fmuld	%f30,%f30,%f30
1386	faddd	%f20,%f46,%f20
1387
1388	fmuld	%f32,%f12,%f12
1389	faddd	%f22,%f46,%f22
1390
1391	fmuld	%f34,%f34,%f34
1392	faddd	%f24,%f46,%f24
1393
1394	fmuld	%f36,%f16,%f16
1395	faddd	%f26,%f46,%f26
1396
1397	fmuld	%f30,%f10,%f10
1398	fzero	%f30
1399
1400	fmuld	%f32,%f32,%f32
1401	faddd	%f12,%f50,%f12
1402	and	%l1,2,%g5
1403
1404	fmuld	%f34,%f14,%f14
1405	fzero	%f34
1406
1407	fmuld	%f36,%f36,%f36
1408	faddd	%f16,%f50,%f16
1409	and	%l3,2,%o5
1410
1411	faddd	%f10,%f20,%f10
1412	and	%l0,2,%g1
1413
1414	fmuld	%f32,%f12,%f12
1415	fmovrdnz %g5,%f28,%f2
1416
1417	faddd	%f14,%f24,%f14
1418	and	%l2,2,%o4
1419
1420	fmuld	%f36,%f16,%f16
1421	fmovrdnz %o5,%f28,%f6
1422
1423	fmuld	%f0,%f10,%f10
1424	fmovrdnz %g1,%f28,%f30
1425
1426	faddd	%f12,%f22,%f12
1427
1428	fmuld	%f4,%f14,%f14
1429	fmovrdnz %o4,%f28,%f34
1430
1431	faddd	%f16,%f26,%f16
1432
1433	fxor	%f10,%f30,%f10
1434
1435	fxor	%f12,%f2,%f12
1436
1437	fxor	%f14,%f34,%f14
1438
1439	addcc	%i0,-1,%i0
1440	bg,pt	%icc,.start
1441! delay slot
1442	fxor	%f16,%f6,%f16
1443
1444	ba,pt	%icc,.end
1445! delay slot
1446	nop
1447
1448	.align	16
1449.case11:
1450	fmuld	%f30,%f44,%f10		! sin(x0)
1451
1452	fmuld	%f32,%f54,%f12		! cos(x1)
1453	fzero	%f2
1454
1455	fmuld	%f34,%f44,%f14		! sin(x2)
1456
1457	fmuld	%f36,%f44,%f16		! sin(x3)
1458
1459	fmuld	%f30,%f40,%f20
1460	faddd	%f10,%f42,%f10
1461
1462	fmuld	%f32,%f48,%f22
1463	faddd	%f12,%f52,%f12
1464
1465	fmuld	%f34,%f40,%f24
1466	faddd	%f14,%f42,%f14
1467
1468	fmuld	%f36,%f40,%f26
1469	faddd	%f16,%f42,%f16
1470
1471	fmuld	%f30,%f30,%f30
1472	faddd	%f20,%f46,%f20
1473
1474	fmuld	%f32,%f12,%f12
1475	faddd	%f22,%f46,%f22
1476
1477	fmuld	%f34,%f34,%f34
1478	faddd	%f24,%f46,%f24
1479
1480	fmuld	%f36,%f36,%f36
1481	faddd	%f26,%f46,%f26
1482
1483	fmuld	%f30,%f10,%f10
1484	fzero	%f30
1485
1486	fmuld	%f32,%f32,%f32
1487	faddd	%f12,%f50,%f12
1488	and	%l1,2,%g5
1489
1490	fmuld	%f34,%f14,%f14
1491	fzero	%f34
1492
1493	fmuld	%f36,%f16,%f16
1494	fzero	%f36
1495
1496	faddd	%f10,%f20,%f10
1497	and	%l0,2,%g1
1498
1499	fmuld	%f32,%f12,%f12
1500	fmovrdnz %g5,%f28,%f2
1501
1502	faddd	%f14,%f24,%f14
1503	and	%l2,2,%o4
1504
1505	faddd	%f16,%f26,%f16
1506	and	%l3,2,%o5
1507
1508	fmuld	%f0,%f10,%f10
1509	fmovrdnz %g1,%f28,%f30
1510
1511	faddd	%f12,%f22,%f12
1512
1513	fmuld	%f4,%f14,%f14
1514	fmovrdnz %o4,%f28,%f34
1515
1516	fmuld	%f6,%f16,%f16
1517	fmovrdnz %o5,%f28,%f36
1518
1519	fxor	%f10,%f30,%f10
1520
1521	fxor	%f12,%f2,%f12
1522
1523	fxor	%f14,%f34,%f14
1524
1525	addcc	%i0,-1,%i0
1526	bg,pt	%icc,.start
1527! delay slot
1528	fxor	%f16,%f36,%f16
1529
1530	ba,pt	%icc,.end
1531! delay slot
1532	nop
1533
1534	.align	16
1535.case12:
1536	fmuld	%f4,%f4,%f34
1537	bz,pn	%icc,.case14
1538! delay slot
1539	andcc	%l3,1,%g0
1540
1541	fmuld	%f6,%f6,%f36
1542	bz,pn	%icc,.case13
1543! delay slot
1544	nop
1545
1546	fmuld	%f30,%f44,%f10		! sin(x0)
1547
1548	fmuld	%f32,%f44,%f12		! sin(x1)
1549
1550	fmuld	%f34,%f54,%f14		! cos(x2)
1551	fzero	%f4
1552
1553	fmuld	%f36,%f54,%f16		! cos(x3)
1554	fzero	%f6
1555
1556	fmuld	%f30,%f40,%f20
1557	faddd	%f10,%f42,%f10
1558
1559	fmuld	%f32,%f40,%f22
1560	faddd	%f12,%f42,%f12
1561
1562	fmuld	%f34,%f48,%f24
1563	faddd	%f14,%f52,%f14
1564
1565	fmuld	%f36,%f48,%f26
1566	faddd	%f16,%f52,%f16
1567
1568	fmuld	%f30,%f30,%f30
1569	faddd	%f20,%f46,%f20
1570
1571	fmuld	%f32,%f32,%f32
1572	faddd	%f22,%f46,%f22
1573
1574	fmuld	%f34,%f14,%f14
1575	faddd	%f24,%f46,%f24
1576
1577	fmuld	%f36,%f16,%f16
1578	faddd	%f26,%f46,%f26
1579
1580	fmuld	%f30,%f10,%f10
1581	fzero	%f30
1582
1583	fmuld	%f32,%f12,%f12
1584	fzero	%f32
1585
1586	fmuld	%f34,%f34,%f34
1587	faddd	%f14,%f50,%f14
1588	and	%l2,2,%o4
1589
1590	fmuld	%f36,%f36,%f36
1591	faddd	%f16,%f50,%f16
1592	and	%l3,2,%o5
1593
1594	faddd	%f10,%f20,%f10
1595	and	%l0,2,%g1
1596
1597	faddd	%f12,%f22,%f12
1598	and	%l1,2,%g5
1599
1600	fmuld	%f34,%f14,%f14
1601	fmovrdnz %o4,%f28,%f4
1602
1603	fmuld	%f36,%f16,%f16
1604	fmovrdnz %o5,%f28,%f6
1605
1606	fmuld	%f0,%f10,%f10
1607	fmovrdnz %g1,%f28,%f30
1608
1609	fmuld	%f2,%f12,%f12
1610	fmovrdnz %g5,%f28,%f32
1611
1612	faddd	%f14,%f24,%f14
1613
1614	faddd	%f16,%f26,%f16
1615
1616	fxor	%f10,%f30,%f10
1617
1618	fxor	%f12,%f32,%f12
1619
1620	fxor	%f14,%f4,%f14
1621
1622	addcc	%i0,-1,%i0
1623	bg,pt	%icc,.start
1624! delay slot
1625	fxor	%f16,%f6,%f16
1626
1627	ba,pt	%icc,.end
1628! delay slot
1629	nop
1630
1631	.align	16
1632.case13:
1633	fmuld	%f30,%f44,%f10		! sin(x0)
1634
1635	fmuld	%f32,%f44,%f12		! sin(x1)
1636
1637	fmuld	%f34,%f54,%f14		! cos(x2)
1638	fzero	%f4
1639
1640	fmuld	%f36,%f44,%f16		! sin(x3)
1641
1642	fmuld	%f30,%f40,%f20
1643	faddd	%f10,%f42,%f10
1644
1645	fmuld	%f32,%f40,%f22
1646	faddd	%f12,%f42,%f12
1647
1648	fmuld	%f34,%f48,%f24
1649	faddd	%f14,%f52,%f14
1650
1651	fmuld	%f36,%f40,%f26
1652	faddd	%f16,%f42,%f16
1653
1654	fmuld	%f30,%f30,%f30
1655	faddd	%f20,%f46,%f20
1656
1657	fmuld	%f32,%f32,%f32
1658	faddd	%f22,%f46,%f22
1659
1660	fmuld	%f34,%f14,%f14
1661	faddd	%f24,%f46,%f24
1662
1663	fmuld	%f36,%f36,%f36
1664	faddd	%f26,%f46,%f26
1665
1666	fmuld	%f30,%f10,%f10
1667	fzero	%f30
1668
1669	fmuld	%f32,%f12,%f12
1670	fzero	%f32
1671
1672	fmuld	%f34,%f34,%f34
1673	faddd	%f14,%f50,%f14
1674	and	%l2,2,%o4
1675
1676	fmuld	%f36,%f16,%f16
1677	fzero	%f36
1678
1679	faddd	%f10,%f20,%f10
1680	and	%l0,2,%g1
1681
1682	faddd	%f12,%f22,%f12
1683	and	%l1,2,%g5
1684
1685	fmuld	%f34,%f14,%f14
1686	fmovrdnz %o4,%f28,%f4
1687
1688	faddd	%f16,%f26,%f16
1689	and	%l3,2,%o5
1690
1691	fmuld	%f0,%f10,%f10
1692	fmovrdnz %g1,%f28,%f30
1693
1694	fmuld	%f2,%f12,%f12
1695	fmovrdnz %g5,%f28,%f32
1696
1697	faddd	%f14,%f24,%f14
1698
1699	fmuld	%f6,%f16,%f16
1700	fmovrdnz %o5,%f28,%f36
1701
1702	fxor	%f10,%f30,%f10
1703
1704	fxor	%f12,%f32,%f12
1705
1706	fxor	%f14,%f4,%f14
1707
1708	addcc	%i0,-1,%i0
1709	bg,pt	%icc,.start
1710! delay slot
1711	fxor	%f16,%f36,%f16
1712
1713	ba,pt	%icc,.end
1714! delay slot
1715	nop
1716
1717	.align	16
1718.case14:
1719	fmuld	%f6,%f6,%f36
1720	bz,pn	%icc,.case15
1721! delay slot
1722	nop
1723
1724	fmuld	%f30,%f44,%f10		! sin(x0)
1725
1726	fmuld	%f32,%f44,%f12		! sin(x1)
1727
1728	fmuld	%f34,%f44,%f14		! sin(x2)
1729
1730	fmuld	%f36,%f54,%f16		! cos(x3)
1731	fzero	%f6
1732
1733	fmuld	%f30,%f40,%f20
1734	faddd	%f10,%f42,%f10
1735
1736	fmuld	%f32,%f40,%f22
1737	faddd	%f12,%f42,%f12
1738
1739	fmuld	%f34,%f40,%f24
1740	faddd	%f14,%f42,%f14
1741
1742	fmuld	%f36,%f48,%f26
1743	faddd	%f16,%f52,%f16
1744
1745	fmuld	%f30,%f30,%f30
1746	faddd	%f20,%f46,%f20
1747
1748	fmuld	%f32,%f32,%f32
1749	faddd	%f22,%f46,%f22
1750
1751	fmuld	%f34,%f34,%f34
1752	faddd	%f24,%f46,%f24
1753
1754	fmuld	%f36,%f16,%f16
1755	faddd	%f26,%f46,%f26
1756
1757	fmuld	%f30,%f10,%f10
1758	fzero	%f30
1759
1760	fmuld	%f32,%f12,%f12
1761	fzero	%f32
1762
1763	fmuld	%f34,%f14,%f14
1764	fzero	%f34
1765
1766	fmuld	%f36,%f36,%f36
1767	faddd	%f16,%f50,%f16
1768	and	%l3,2,%o5
1769
1770	faddd	%f10,%f20,%f10
1771	and	%l0,2,%g1
1772
1773	faddd	%f12,%f22,%f12
1774	and	%l1,2,%g5
1775
1776	faddd	%f14,%f24,%f14
1777	and	%l2,2,%o4
1778
1779	fmuld	%f36,%f16,%f16
1780	fmovrdnz %o5,%f28,%f6
1781
1782	fmuld	%f0,%f10,%f10
1783	fmovrdnz %g1,%f28,%f30
1784
1785	fmuld	%f2,%f12,%f12
1786	fmovrdnz %g5,%f28,%f32
1787
1788	fmuld	%f4,%f14,%f14
1789	fmovrdnz %o4,%f28,%f34
1790
1791	faddd	%f16,%f26,%f16
1792
1793	fxor	%f10,%f30,%f10
1794
1795	fxor	%f12,%f32,%f12
1796
1797	fxor	%f14,%f34,%f14
1798
1799	addcc	%i0,-1,%i0
1800	bg,pt	%icc,.start
1801! delay slot
1802	fxor	%f16,%f6,%f16
1803
1804	ba,pt	%icc,.end
1805! delay slot
1806	nop
1807
1808	.align	16
1809.case15:
1810	fmuld	%f30,%f44,%f10		! sin(x0)
1811
1812	fmuld	%f32,%f44,%f12		! sin(x1)
1813
1814	fmuld	%f34,%f44,%f14		! sin(x2)
1815
1816	fmuld	%f36,%f44,%f16		! sin(x3)
1817
1818	fmuld	%f30,%f40,%f20
1819	faddd	%f10,%f42,%f10
1820
1821	fmuld	%f32,%f40,%f22
1822	faddd	%f12,%f42,%f12
1823
1824	fmuld	%f34,%f40,%f24
1825	faddd	%f14,%f42,%f14
1826
1827	fmuld	%f36,%f40,%f26
1828	faddd	%f16,%f42,%f16
1829
1830	fmuld	%f30,%f30,%f30
1831	faddd	%f20,%f46,%f20
1832
1833	fmuld	%f32,%f32,%f32
1834	faddd	%f22,%f46,%f22
1835
1836	fmuld	%f34,%f34,%f34
1837	faddd	%f24,%f46,%f24
1838
1839	fmuld	%f36,%f36,%f36
1840	faddd	%f26,%f46,%f26
1841
1842	fmuld	%f30,%f10,%f10
1843	fzero	%f30
1844
1845	fmuld	%f32,%f12,%f12
1846	fzero	%f32
1847
1848	fmuld	%f34,%f14,%f14
1849	fzero	%f34
1850
1851	fmuld	%f36,%f16,%f16
1852	fzero	%f36
1853
1854	faddd	%f10,%f20,%f10
1855	and	%l0,2,%g1
1856
1857	faddd	%f12,%f22,%f12
1858	and	%l1,2,%g5
1859
1860	faddd	%f14,%f24,%f14
1861	and	%l2,2,%o4
1862
1863	faddd	%f16,%f26,%f16
1864	and	%l3,2,%o5
1865
1866	fmuld	%f0,%f10,%f10
1867	fmovrdnz %g1,%f28,%f30
1868
1869	fmuld	%f2,%f12,%f12
1870	fmovrdnz %g5,%f28,%f32
1871
1872	fmuld	%f4,%f14,%f14
1873	fmovrdnz %o4,%f28,%f34
1874
1875	fmuld	%f6,%f16,%f16
1876	fmovrdnz %o5,%f28,%f36
1877
1878	fxor	%f10,%f30,%f10
1879
1880	fxor	%f12,%f32,%f12
1881
1882	fxor	%f14,%f34,%f14
1883
1884	addcc	%i0,-1,%i0
1885	bg,pt	%icc,.start
1886! delay slot
1887	fxor	%f16,%f36,%f16
1888
1889	ba,pt	%icc,.end
1890! delay slot
1891	nop
1892
1893
1894	.align	32
1895.end:
1896	fdtos	%f10,%f10
1897	st	%f10,[%o0]
1898	fdtos	%f12,%f12
1899	st	%f12,[%o1]
1900	fdtos	%f14,%f14
1901	st	%f14,[%o2]
1902	fdtos	%f16,%f16
1903	tst	%i5			! check for huge arguments remaining
1904	be,pt	%icc,.exit
1905! delay slot
1906	st	%f16,[%o3]
1907#ifdef __sparcv9
1908	ldx	[%fp+xsave],%o1
1909	ldx	[%fp+ysave],%o3
1910#else
1911	ld	[%fp+xsave],%o1
1912	ld	[%fp+ysave],%o3
1913#endif
1914	ld	[%fp+nsave],%o0
1915	ld	[%fp+sxsave],%o2
1916	ld	[%fp+sysave],%o4
1917	sra	%o2,0,%o2		! sign-extend for V9
1918	call	__vlibm_vsin_bigf
1919	sra	%o4,0,%o4		! delay slot
1920
1921.exit:
1922	ret
1923	restore
1924
1925
1926	.align	32
1927.last1:
1928	fdtos	%f12,%f12
1929	st	%f12,[%o1]
1930	fzeros	%f2
1931	add	%fp,junk,%o1
1932.last2:
1933	fdtos	%f14,%f14
1934	st	%f14,[%o2]
1935	fzeros	%f4
1936	add	%fp,junk,%o2
1937.last3:
1938	fdtos	%f16,%f16
1939	st	%f16,[%o3]
1940	fzeros	%f6
1941	ba,pt	%icc,.cont
1942! delay slot
1943	add	%fp,junk,%o3
1944
1945
1946	.align	16
1947.range0:
1948	fcmpgt32 %f38,%f30,%l0
1949	andcc	%l0,2,%g0
1950	bnz,a,pt %icc,1f		! branch if finite
1951! delay slot, squashed if branch not taken
1952	mov	1,%i5			! set biguns
1953	fzeros	%f1
1954	fmuls	%f0,%f1,%f0
1955	st	%f0,[%o0]
19561:
1957	addcc	%i0,-1,%i0
1958	ble,pn	%icc,1f
1959! delay slot
1960	nop
1961	ld	[%i1],%f0
1962	add	%i1,%i2,%i1
1963	mov	%i3,%o0
1964	add	%i3,%i4,%i3
1965	fabsd	%f0,%f30
1966	fcmple32 %f30,%f18,%l0
1967	andcc	%l0,2,%g0
1968	bz,pn	%icc,.range0
1969! delay slot
1970	nop
1971	ba,pt	%icc,.check1
1972! delay slot
1973	fcmple32 %f30,%f8,%l0
19741:
1975	fzero	%f0			! set up dummy argument
1976	add	%fp,junk,%o0
1977	mov	2,%l0
1978	ba,pt	%icc,.check1
1979! delay slot
1980	fzero	%f30
1981
1982
1983	.align	16
1984.range1:
1985	fcmpgt32 %f38,%f32,%l1
1986	andcc	%l1,2,%g0
1987	bnz,a,pt %icc,1f		! branch if finite
1988! delay slot, squashed if branch not taken
1989	mov	1,%i5			! set biguns
1990	fzeros	%f3
1991	fmuls	%f2,%f3,%f2
1992	st	%f2,[%o1]
19931:
1994	addcc	%i0,-1,%i0
1995	ble,pn	%icc,1f
1996! delay slot
1997	nop
1998	ld	[%i1],%f2
1999	add	%i1,%i2,%i1
2000	mov	%i3,%o1
2001	add	%i3,%i4,%i3
2002	fabsd	%f2,%f32
2003	fcmple32 %f32,%f18,%l1
2004	andcc	%l1,2,%g0
2005	bz,pn	%icc,.range1
2006! delay slot
2007	nop
2008	ba,pt	%icc,.check2
2009! delay slot
2010	fcmple32 %f32,%f8,%l1
20111:
2012	fzero	%f2			! set up dummy argument
2013	add	%fp,junk,%o1
2014	mov	2,%l1
2015	ba,pt	%icc,.check2
2016! delay slot
2017	fzero	%f32
2018
2019
2020	.align	16
2021.range2:
2022	fcmpgt32 %f38,%f34,%l2
2023	andcc	%l2,2,%g0
2024	bnz,a,pt %icc,1f		! branch if finite
2025! delay slot, squashed if branch not taken
2026	mov	1,%i5			! set biguns
2027	fzeros	%f5
2028	fmuls	%f4,%f5,%f4
2029	st	%f4,[%o2]
20301:
2031	addcc	%i0,-1,%i0
2032	ble,pn	%icc,1f
2033! delay slot
2034	nop
2035	ld	[%i1],%f4
2036	add	%i1,%i2,%i1
2037	mov	%i3,%o2
2038	add	%i3,%i4,%i3
2039	fabsd	%f4,%f34
2040	fcmple32 %f34,%f18,%l2
2041	andcc	%l2,2,%g0
2042	bz,pn	%icc,.range2
2043! delay slot
2044	nop
2045	ba,pt	%icc,.check3
2046! delay slot
2047	fcmple32 %f34,%f8,%l2
20481:
2049	fzero	%f4			! set up dummy argument
2050	add	%fp,junk,%o2
2051	mov	2,%l2
2052	ba,pt	%icc,.check3
2053! delay slot
2054	fzero	%f34
2055
2056
2057	.align	16
2058.range3:
2059	fcmpgt32 %f38,%f36,%l3
2060	andcc	%l3,2,%g0
2061	bnz,a,pt %icc,1f		! branch if finite
2062! delay slot, squashed if branch not taken
2063	mov	1,%i5			! set biguns
2064	fzeros	%f7
2065	fmuls	%f6,%f7,%f6
2066	st	%f6,[%o3]
20671:
2068	addcc	%i0,-1,%i0
2069	ble,pn	%icc,1f
2070! delay slot
2071	nop
2072	ld	[%i1],%f6
2073	add	%i1,%i2,%i1
2074	mov	%i3,%o3
2075	add	%i3,%i4,%i3
2076	fabsd	%f6,%f36
2077	fcmple32 %f36,%f18,%l3
2078	andcc	%l3,2,%g0
2079	bz,pn	%icc,.range3
2080! delay slot
2081	nop
2082	ba,pt	%icc,.checkprimary
2083! delay slot
2084	fcmple32 %f36,%f8,%l3
20851:
2086	fzero	%f6			! set up dummy argument
2087	add	%fp,junk,%o3
2088	mov	2,%l3
2089	ba,pt	%icc,.checkprimary
2090! delay slot
2091	fzero	%f36
2092
2093	SET_SIZE(__vsinf)
2094
2095