xref: /freebsd/sys/crypto/openssl/amd64/x25519-x86_64.S (revision 22cf89c938886d14f5796fc49f9f020c23ea8eaf)
1/* Do not modify. This file is auto-generated from x25519-x86_64.pl. */
2.text
3
4.globl	x25519_fe51_mul
5.type	x25519_fe51_mul,@function
6.align	32
7x25519_fe51_mul:
8.cfi_startproc
9	pushq	%rbp
10.cfi_adjust_cfa_offset	8
11.cfi_offset	%rbp,-16
12	pushq	%rbx
13.cfi_adjust_cfa_offset	8
14.cfi_offset	%rbx,-24
15	pushq	%r12
16.cfi_adjust_cfa_offset	8
17.cfi_offset	%r12,-32
18	pushq	%r13
19.cfi_adjust_cfa_offset	8
20.cfi_offset	%r13,-40
21	pushq	%r14
22.cfi_adjust_cfa_offset	8
23.cfi_offset	%r14,-48
24	pushq	%r15
25.cfi_adjust_cfa_offset	8
26.cfi_offset	%r15,-56
27	leaq	-40(%rsp),%rsp
28.cfi_adjust_cfa_offset	40
29.Lfe51_mul_body:
30
31	movq	0(%rsi),%rax
32	movq	0(%rdx),%r11
33	movq	8(%rdx),%r12
34	movq	16(%rdx),%r13
35	movq	24(%rdx),%rbp
36	movq	32(%rdx),%r14
37
38	movq	%rdi,32(%rsp)
39	movq	%rax,%rdi
40	mulq	%r11
41	movq	%r11,0(%rsp)
42	movq	%rax,%rbx
43	movq	%rdi,%rax
44	movq	%rdx,%rcx
45	mulq	%r12
46	movq	%r12,8(%rsp)
47	movq	%rax,%r8
48	movq	%rdi,%rax
49	leaq	(%r14,%r14,8),%r15
50	movq	%rdx,%r9
51	mulq	%r13
52	movq	%r13,16(%rsp)
53	movq	%rax,%r10
54	movq	%rdi,%rax
55	leaq	(%r14,%r15,2),%rdi
56	movq	%rdx,%r11
57	mulq	%rbp
58	movq	%rax,%r12
59	movq	0(%rsi),%rax
60	movq	%rdx,%r13
61	mulq	%r14
62	movq	%rax,%r14
63	movq	8(%rsi),%rax
64	movq	%rdx,%r15
65
66	mulq	%rdi
67	addq	%rax,%rbx
68	movq	16(%rsi),%rax
69	adcq	%rdx,%rcx
70	mulq	%rdi
71	addq	%rax,%r8
72	movq	24(%rsi),%rax
73	adcq	%rdx,%r9
74	mulq	%rdi
75	addq	%rax,%r10
76	movq	32(%rsi),%rax
77	adcq	%rdx,%r11
78	mulq	%rdi
79	imulq	$19,%rbp,%rdi
80	addq	%rax,%r12
81	movq	8(%rsi),%rax
82	adcq	%rdx,%r13
83	mulq	%rbp
84	movq	16(%rsp),%rbp
85	addq	%rax,%r14
86	movq	16(%rsi),%rax
87	adcq	%rdx,%r15
88
89	mulq	%rdi
90	addq	%rax,%rbx
91	movq	24(%rsi),%rax
92	adcq	%rdx,%rcx
93	mulq	%rdi
94	addq	%rax,%r8
95	movq	32(%rsi),%rax
96	adcq	%rdx,%r9
97	mulq	%rdi
98	imulq	$19,%rbp,%rdi
99	addq	%rax,%r10
100	movq	8(%rsi),%rax
101	adcq	%rdx,%r11
102	mulq	%rbp
103	addq	%rax,%r12
104	movq	16(%rsi),%rax
105	adcq	%rdx,%r13
106	mulq	%rbp
107	movq	8(%rsp),%rbp
108	addq	%rax,%r14
109	movq	24(%rsi),%rax
110	adcq	%rdx,%r15
111
112	mulq	%rdi
113	addq	%rax,%rbx
114	movq	32(%rsi),%rax
115	adcq	%rdx,%rcx
116	mulq	%rdi
117	addq	%rax,%r8
118	movq	8(%rsi),%rax
119	adcq	%rdx,%r9
120	mulq	%rbp
121	imulq	$19,%rbp,%rdi
122	addq	%rax,%r10
123	movq	16(%rsi),%rax
124	adcq	%rdx,%r11
125	mulq	%rbp
126	addq	%rax,%r12
127	movq	24(%rsi),%rax
128	adcq	%rdx,%r13
129	mulq	%rbp
130	movq	0(%rsp),%rbp
131	addq	%rax,%r14
132	movq	32(%rsi),%rax
133	adcq	%rdx,%r15
134
135	mulq	%rdi
136	addq	%rax,%rbx
137	movq	8(%rsi),%rax
138	adcq	%rdx,%rcx
139	mulq	%rbp
140	addq	%rax,%r8
141	movq	16(%rsi),%rax
142	adcq	%rdx,%r9
143	mulq	%rbp
144	addq	%rax,%r10
145	movq	24(%rsi),%rax
146	adcq	%rdx,%r11
147	mulq	%rbp
148	addq	%rax,%r12
149	movq	32(%rsi),%rax
150	adcq	%rdx,%r13
151	mulq	%rbp
152	addq	%rax,%r14
153	adcq	%rdx,%r15
154
155	movq	32(%rsp),%rdi
156	jmp	.Lreduce51
157.Lfe51_mul_epilogue:
158.cfi_endproc
159.size	x25519_fe51_mul,.-x25519_fe51_mul
160
161.globl	x25519_fe51_sqr
162.type	x25519_fe51_sqr,@function
163.align	32
164x25519_fe51_sqr:
165.cfi_startproc
166	pushq	%rbp
167.cfi_adjust_cfa_offset	8
168.cfi_offset	%rbp,-16
169	pushq	%rbx
170.cfi_adjust_cfa_offset	8
171.cfi_offset	%rbx,-24
172	pushq	%r12
173.cfi_adjust_cfa_offset	8
174.cfi_offset	%r12,-32
175	pushq	%r13
176.cfi_adjust_cfa_offset	8
177.cfi_offset	%r13,-40
178	pushq	%r14
179.cfi_adjust_cfa_offset	8
180.cfi_offset	%r14,-48
181	pushq	%r15
182.cfi_adjust_cfa_offset	8
183.cfi_offset	%r15,-56
184	leaq	-40(%rsp),%rsp
185.cfi_adjust_cfa_offset	40
186.Lfe51_sqr_body:
187
188	movq	0(%rsi),%rax
189	movq	16(%rsi),%r15
190	movq	32(%rsi),%rbp
191
192	movq	%rdi,32(%rsp)
193	leaq	(%rax,%rax,1),%r14
194	mulq	%rax
195	movq	%rax,%rbx
196	movq	8(%rsi),%rax
197	movq	%rdx,%rcx
198	mulq	%r14
199	movq	%rax,%r8
200	movq	%r15,%rax
201	movq	%r15,0(%rsp)
202	movq	%rdx,%r9
203	mulq	%r14
204	movq	%rax,%r10
205	movq	24(%rsi),%rax
206	movq	%rdx,%r11
207	imulq	$19,%rbp,%rdi
208	mulq	%r14
209	movq	%rax,%r12
210	movq	%rbp,%rax
211	movq	%rdx,%r13
212	mulq	%r14
213	movq	%rax,%r14
214	movq	%rbp,%rax
215	movq	%rdx,%r15
216
217	mulq	%rdi
218	addq	%rax,%r12
219	movq	8(%rsi),%rax
220	adcq	%rdx,%r13
221
222	movq	24(%rsi),%rsi
223	leaq	(%rax,%rax,1),%rbp
224	mulq	%rax
225	addq	%rax,%r10
226	movq	0(%rsp),%rax
227	adcq	%rdx,%r11
228	mulq	%rbp
229	addq	%rax,%r12
230	movq	%rbp,%rax
231	adcq	%rdx,%r13
232	mulq	%rsi
233	addq	%rax,%r14
234	movq	%rbp,%rax
235	adcq	%rdx,%r15
236	imulq	$19,%rsi,%rbp
237	mulq	%rdi
238	addq	%rax,%rbx
239	leaq	(%rsi,%rsi,1),%rax
240	adcq	%rdx,%rcx
241
242	mulq	%rdi
243	addq	%rax,%r10
244	movq	%rsi,%rax
245	adcq	%rdx,%r11
246	mulq	%rbp
247	addq	%rax,%r8
248	movq	0(%rsp),%rax
249	adcq	%rdx,%r9
250
251	leaq	(%rax,%rax,1),%rsi
252	mulq	%rax
253	addq	%rax,%r14
254	movq	%rbp,%rax
255	adcq	%rdx,%r15
256	mulq	%rsi
257	addq	%rax,%rbx
258	movq	%rsi,%rax
259	adcq	%rdx,%rcx
260	mulq	%rdi
261	addq	%rax,%r8
262	adcq	%rdx,%r9
263
264	movq	32(%rsp),%rdi
265	jmp	.Lreduce51
266
267.align	32
268.Lreduce51:
269	movq	$0x7ffffffffffff,%rbp
270
271	movq	%r10,%rdx
272	shrq	$51,%r10
273	shlq	$13,%r11
274	andq	%rbp,%rdx
275	orq	%r10,%r11
276	addq	%r11,%r12
277	adcq	$0,%r13
278
279	movq	%rbx,%rax
280	shrq	$51,%rbx
281	shlq	$13,%rcx
282	andq	%rbp,%rax
283	orq	%rbx,%rcx
284	addq	%rcx,%r8
285	adcq	$0,%r9
286
287	movq	%r12,%rbx
288	shrq	$51,%r12
289	shlq	$13,%r13
290	andq	%rbp,%rbx
291	orq	%r12,%r13
292	addq	%r13,%r14
293	adcq	$0,%r15
294
295	movq	%r8,%rcx
296	shrq	$51,%r8
297	shlq	$13,%r9
298	andq	%rbp,%rcx
299	orq	%r8,%r9
300	addq	%r9,%rdx
301
302	movq	%r14,%r10
303	shrq	$51,%r14
304	shlq	$13,%r15
305	andq	%rbp,%r10
306	orq	%r14,%r15
307
308	leaq	(%r15,%r15,8),%r14
309	leaq	(%r15,%r14,2),%r15
310	addq	%r15,%rax
311
312	movq	%rdx,%r8
313	andq	%rbp,%rdx
314	shrq	$51,%r8
315	addq	%r8,%rbx
316
317	movq	%rax,%r9
318	andq	%rbp,%rax
319	shrq	$51,%r9
320	addq	%r9,%rcx
321
322	movq	%rax,0(%rdi)
323	movq	%rcx,8(%rdi)
324	movq	%rdx,16(%rdi)
325	movq	%rbx,24(%rdi)
326	movq	%r10,32(%rdi)
327
328	movq	40(%rsp),%r15
329.cfi_restore	%r15
330	movq	48(%rsp),%r14
331.cfi_restore	%r14
332	movq	56(%rsp),%r13
333.cfi_restore	%r13
334	movq	64(%rsp),%r12
335.cfi_restore	%r12
336	movq	72(%rsp),%rbx
337.cfi_restore	%rbx
338	movq	80(%rsp),%rbp
339.cfi_restore	%rbp
340	leaq	88(%rsp),%rsp
341.cfi_adjust_cfa_offset	88
342.Lfe51_sqr_epilogue:
343	.byte	0xf3,0xc3
344.cfi_endproc
345.size	x25519_fe51_sqr,.-x25519_fe51_sqr
346
347.globl	x25519_fe51_mul121666
348.type	x25519_fe51_mul121666,@function
349.align	32
350x25519_fe51_mul121666:
351.cfi_startproc
352	pushq	%rbp
353.cfi_adjust_cfa_offset	8
354.cfi_offset	%rbp,-16
355	pushq	%rbx
356.cfi_adjust_cfa_offset	8
357.cfi_offset	%rbx,-24
358	pushq	%r12
359.cfi_adjust_cfa_offset	8
360.cfi_offset	%r12,-32
361	pushq	%r13
362.cfi_adjust_cfa_offset	8
363.cfi_offset	%r13,-40
364	pushq	%r14
365.cfi_adjust_cfa_offset	8
366.cfi_offset	%r14,-48
367	pushq	%r15
368.cfi_adjust_cfa_offset	8
369.cfi_offset	%r15,-56
370	leaq	-40(%rsp),%rsp
371.cfi_adjust_cfa_offset	40
372.Lfe51_mul121666_body:
373	movl	$121666,%eax
374
375	mulq	0(%rsi)
376	movq	%rax,%rbx
377	movl	$121666,%eax
378	movq	%rdx,%rcx
379	mulq	8(%rsi)
380	movq	%rax,%r8
381	movl	$121666,%eax
382	movq	%rdx,%r9
383	mulq	16(%rsi)
384	movq	%rax,%r10
385	movl	$121666,%eax
386	movq	%rdx,%r11
387	mulq	24(%rsi)
388	movq	%rax,%r12
389	movl	$121666,%eax
390	movq	%rdx,%r13
391	mulq	32(%rsi)
392	movq	%rax,%r14
393	movq	%rdx,%r15
394
395	jmp	.Lreduce51
396.Lfe51_mul121666_epilogue:
397.cfi_endproc
398.size	x25519_fe51_mul121666,.-x25519_fe51_mul121666
399
400.globl	x25519_fe64_eligible
401.type	x25519_fe64_eligible,@function
402.align	32
403x25519_fe64_eligible:
404.cfi_startproc
405	movl	OPENSSL_ia32cap_P+8(%rip),%ecx
406	xorl	%eax,%eax
407	andl	$0x80100,%ecx
408	cmpl	$0x80100,%ecx
409	cmovel	%ecx,%eax
410	.byte	0xf3,0xc3
411.cfi_endproc
412.size	x25519_fe64_eligible,.-x25519_fe64_eligible
413
414.globl	x25519_fe64_mul
415.type	x25519_fe64_mul,@function
416.align	32
417x25519_fe64_mul:
418.cfi_startproc
419	pushq	%rbp
420.cfi_adjust_cfa_offset	8
421.cfi_offset	%rbp,-16
422	pushq	%rbx
423.cfi_adjust_cfa_offset	8
424.cfi_offset	%rbx,-24
425	pushq	%r12
426.cfi_adjust_cfa_offset	8
427.cfi_offset	%r12,-32
428	pushq	%r13
429.cfi_adjust_cfa_offset	8
430.cfi_offset	%r13,-40
431	pushq	%r14
432.cfi_adjust_cfa_offset	8
433.cfi_offset	%r14,-48
434	pushq	%r15
435.cfi_adjust_cfa_offset	8
436.cfi_offset	%r15,-56
437	pushq	%rdi
438.cfi_adjust_cfa_offset	8
439.cfi_offset	%rdi,-64
440	leaq	-16(%rsp),%rsp
441.cfi_adjust_cfa_offset	16
442.Lfe64_mul_body:
443
444	movq	%rdx,%rax
445	movq	0(%rdx),%rbp
446	movq	0(%rsi),%rdx
447	movq	8(%rax),%rcx
448	movq	16(%rax),%r14
449	movq	24(%rax),%r15
450
451	mulxq	%rbp,%r8,%rax
452	xorl	%edi,%edi
453	mulxq	%rcx,%r9,%rbx
454	adcxq	%rax,%r9
455	mulxq	%r14,%r10,%rax
456	adcxq	%rbx,%r10
457	mulxq	%r15,%r11,%r12
458	movq	8(%rsi),%rdx
459	adcxq	%rax,%r11
460	movq	%r14,(%rsp)
461	adcxq	%rdi,%r12
462
463	mulxq	%rbp,%rax,%rbx
464	adoxq	%rax,%r9
465	adcxq	%rbx,%r10
466	mulxq	%rcx,%rax,%rbx
467	adoxq	%rax,%r10
468	adcxq	%rbx,%r11
469	mulxq	%r14,%rax,%rbx
470	adoxq	%rax,%r11
471	adcxq	%rbx,%r12
472	mulxq	%r15,%rax,%r13
473	movq	16(%rsi),%rdx
474	adoxq	%rax,%r12
475	adcxq	%rdi,%r13
476	adoxq	%rdi,%r13
477
478	mulxq	%rbp,%rax,%rbx
479	adcxq	%rax,%r10
480	adoxq	%rbx,%r11
481	mulxq	%rcx,%rax,%rbx
482	adcxq	%rax,%r11
483	adoxq	%rbx,%r12
484	mulxq	%r14,%rax,%rbx
485	adcxq	%rax,%r12
486	adoxq	%rbx,%r13
487	mulxq	%r15,%rax,%r14
488	movq	24(%rsi),%rdx
489	adcxq	%rax,%r13
490	adoxq	%rdi,%r14
491	adcxq	%rdi,%r14
492
493	mulxq	%rbp,%rax,%rbx
494	adoxq	%rax,%r11
495	adcxq	%rbx,%r12
496	mulxq	%rcx,%rax,%rbx
497	adoxq	%rax,%r12
498	adcxq	%rbx,%r13
499	mulxq	(%rsp),%rax,%rbx
500	adoxq	%rax,%r13
501	adcxq	%rbx,%r14
502	mulxq	%r15,%rax,%r15
503	movl	$38,%edx
504	adoxq	%rax,%r14
505	adcxq	%rdi,%r15
506	adoxq	%rdi,%r15
507
508	jmp	.Lreduce64
509.Lfe64_mul_epilogue:
510.cfi_endproc
511.size	x25519_fe64_mul,.-x25519_fe64_mul
512
513.globl	x25519_fe64_sqr
514.type	x25519_fe64_sqr,@function
515.align	32
516x25519_fe64_sqr:
517.cfi_startproc
518	pushq	%rbp
519.cfi_adjust_cfa_offset	8
520.cfi_offset	%rbp,-16
521	pushq	%rbx
522.cfi_adjust_cfa_offset	8
523.cfi_offset	%rbx,-24
524	pushq	%r12
525.cfi_adjust_cfa_offset	8
526.cfi_offset	%r12,-32
527	pushq	%r13
528.cfi_adjust_cfa_offset	8
529.cfi_offset	%r13,-40
530	pushq	%r14
531.cfi_adjust_cfa_offset	8
532.cfi_offset	%r14,-48
533	pushq	%r15
534.cfi_adjust_cfa_offset	8
535.cfi_offset	%r15,-56
536	pushq	%rdi
537.cfi_adjust_cfa_offset	8
538.cfi_offset	%rdi,-64
539	leaq	-16(%rsp),%rsp
540.cfi_adjust_cfa_offset	16
541.Lfe64_sqr_body:
542
543	movq	0(%rsi),%rdx
544	movq	8(%rsi),%rcx
545	movq	16(%rsi),%rbp
546	movq	24(%rsi),%rsi
547
548
549	mulxq	%rdx,%r8,%r15
550	mulxq	%rcx,%r9,%rax
551	xorl	%edi,%edi
552	mulxq	%rbp,%r10,%rbx
553	adcxq	%rax,%r10
554	mulxq	%rsi,%r11,%r12
555	movq	%rcx,%rdx
556	adcxq	%rbx,%r11
557	adcxq	%rdi,%r12
558
559
560	mulxq	%rbp,%rax,%rbx
561	adoxq	%rax,%r11
562	adcxq	%rbx,%r12
563	mulxq	%rsi,%rax,%r13
564	movq	%rbp,%rdx
565	adoxq	%rax,%r12
566	adcxq	%rdi,%r13
567
568
569	mulxq	%rsi,%rax,%r14
570	movq	%rcx,%rdx
571	adoxq	%rax,%r13
572	adcxq	%rdi,%r14
573	adoxq	%rdi,%r14
574
575	adcxq	%r9,%r9
576	adoxq	%r15,%r9
577	adcxq	%r10,%r10
578	mulxq	%rdx,%rax,%rbx
579	movq	%rbp,%rdx
580	adcxq	%r11,%r11
581	adoxq	%rax,%r10
582	adcxq	%r12,%r12
583	adoxq	%rbx,%r11
584	mulxq	%rdx,%rax,%rbx
585	movq	%rsi,%rdx
586	adcxq	%r13,%r13
587	adoxq	%rax,%r12
588	adcxq	%r14,%r14
589	adoxq	%rbx,%r13
590	mulxq	%rdx,%rax,%r15
591	movl	$38,%edx
592	adoxq	%rax,%r14
593	adcxq	%rdi,%r15
594	adoxq	%rdi,%r15
595	jmp	.Lreduce64
596
597.align	32
598.Lreduce64:
599	mulxq	%r12,%rax,%rbx
600	adcxq	%rax,%r8
601	adoxq	%rbx,%r9
602	mulxq	%r13,%rax,%rbx
603	adcxq	%rax,%r9
604	adoxq	%rbx,%r10
605	mulxq	%r14,%rax,%rbx
606	adcxq	%rax,%r10
607	adoxq	%rbx,%r11
608	mulxq	%r15,%rax,%r12
609	adcxq	%rax,%r11
610	adoxq	%rdi,%r12
611	adcxq	%rdi,%r12
612
613	movq	16(%rsp),%rdi
614	imulq	%rdx,%r12
615
616	addq	%r12,%r8
617	adcq	$0,%r9
618	adcq	$0,%r10
619	adcq	$0,%r11
620
621	sbbq	%rax,%rax
622	andq	$38,%rax
623
624	addq	%rax,%r8
625	movq	%r9,8(%rdi)
626	movq	%r10,16(%rdi)
627	movq	%r11,24(%rdi)
628	movq	%r8,0(%rdi)
629
630	movq	24(%rsp),%r15
631.cfi_restore	%r15
632	movq	32(%rsp),%r14
633.cfi_restore	%r14
634	movq	40(%rsp),%r13
635.cfi_restore	%r13
636	movq	48(%rsp),%r12
637.cfi_restore	%r12
638	movq	56(%rsp),%rbx
639.cfi_restore	%rbx
640	movq	64(%rsp),%rbp
641.cfi_restore	%rbp
642	leaq	72(%rsp),%rsp
643.cfi_adjust_cfa_offset	88
644.Lfe64_sqr_epilogue:
645	.byte	0xf3,0xc3
646.cfi_endproc
647.size	x25519_fe64_sqr,.-x25519_fe64_sqr
648
649.globl	x25519_fe64_mul121666
650.type	x25519_fe64_mul121666,@function
651.align	32
652x25519_fe64_mul121666:
653.Lfe64_mul121666_body:
654.cfi_startproc
655	movl	$121666,%edx
656	mulxq	0(%rsi),%r8,%rcx
657	mulxq	8(%rsi),%r9,%rax
658	addq	%rcx,%r9
659	mulxq	16(%rsi),%r10,%rcx
660	adcq	%rax,%r10
661	mulxq	24(%rsi),%r11,%rax
662	adcq	%rcx,%r11
663	adcq	$0,%rax
664
665	imulq	$38,%rax,%rax
666
667	addq	%rax,%r8
668	adcq	$0,%r9
669	adcq	$0,%r10
670	adcq	$0,%r11
671
672	sbbq	%rax,%rax
673	andq	$38,%rax
674
675	addq	%rax,%r8
676	movq	%r9,8(%rdi)
677	movq	%r10,16(%rdi)
678	movq	%r11,24(%rdi)
679	movq	%r8,0(%rdi)
680
681.Lfe64_mul121666_epilogue:
682	.byte	0xf3,0xc3
683.cfi_endproc
684.size	x25519_fe64_mul121666,.-x25519_fe64_mul121666
685
686.globl	x25519_fe64_add
687.type	x25519_fe64_add,@function
688.align	32
689x25519_fe64_add:
690.Lfe64_add_body:
691.cfi_startproc
692	movq	0(%rsi),%r8
693	movq	8(%rsi),%r9
694	movq	16(%rsi),%r10
695	movq	24(%rsi),%r11
696
697	addq	0(%rdx),%r8
698	adcq	8(%rdx),%r9
699	adcq	16(%rdx),%r10
700	adcq	24(%rdx),%r11
701
702	sbbq	%rax,%rax
703	andq	$38,%rax
704
705	addq	%rax,%r8
706	adcq	$0,%r9
707	adcq	$0,%r10
708	movq	%r9,8(%rdi)
709	adcq	$0,%r11
710	movq	%r10,16(%rdi)
711	sbbq	%rax,%rax
712	movq	%r11,24(%rdi)
713	andq	$38,%rax
714
715	addq	%rax,%r8
716	movq	%r8,0(%rdi)
717
718.Lfe64_add_epilogue:
719	.byte	0xf3,0xc3
720.cfi_endproc
721.size	x25519_fe64_add,.-x25519_fe64_add
722
723.globl	x25519_fe64_sub
724.type	x25519_fe64_sub,@function
725.align	32
726x25519_fe64_sub:
727.Lfe64_sub_body:
728.cfi_startproc
729	movq	0(%rsi),%r8
730	movq	8(%rsi),%r9
731	movq	16(%rsi),%r10
732	movq	24(%rsi),%r11
733
734	subq	0(%rdx),%r8
735	sbbq	8(%rdx),%r9
736	sbbq	16(%rdx),%r10
737	sbbq	24(%rdx),%r11
738
739	sbbq	%rax,%rax
740	andq	$38,%rax
741
742	subq	%rax,%r8
743	sbbq	$0,%r9
744	sbbq	$0,%r10
745	movq	%r9,8(%rdi)
746	sbbq	$0,%r11
747	movq	%r10,16(%rdi)
748	sbbq	%rax,%rax
749	movq	%r11,24(%rdi)
750	andq	$38,%rax
751
752	subq	%rax,%r8
753	movq	%r8,0(%rdi)
754
755.Lfe64_sub_epilogue:
756	.byte	0xf3,0xc3
757.cfi_endproc
758.size	x25519_fe64_sub,.-x25519_fe64_sub
759
760.globl	x25519_fe64_tobytes
761.type	x25519_fe64_tobytes,@function
762.align	32
763x25519_fe64_tobytes:
764.Lfe64_to_body:
765.cfi_startproc
766	movq	0(%rsi),%r8
767	movq	8(%rsi),%r9
768	movq	16(%rsi),%r10
769	movq	24(%rsi),%r11
770
771
772	leaq	(%r11,%r11,1),%rax
773	sarq	$63,%r11
774	shrq	$1,%rax
775	andq	$19,%r11
776	addq	$19,%r11
777
778	addq	%r11,%r8
779	adcq	$0,%r9
780	adcq	$0,%r10
781	adcq	$0,%rax
782
783	leaq	(%rax,%rax,1),%r11
784	sarq	$63,%rax
785	shrq	$1,%r11
786	notq	%rax
787	andq	$19,%rax
788
789	subq	%rax,%r8
790	sbbq	$0,%r9
791	sbbq	$0,%r10
792	sbbq	$0,%r11
793
794	movq	%r8,0(%rdi)
795	movq	%r9,8(%rdi)
796	movq	%r10,16(%rdi)
797	movq	%r11,24(%rdi)
798
799.Lfe64_to_epilogue:
800	.byte	0xf3,0xc3
801.cfi_endproc
802.size	x25519_fe64_tobytes,.-x25519_fe64_tobytes
803.byte	88,50,53,53,49,57,32,112,114,105,109,105,116,105,118,101,115,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
804	.section ".note.gnu.property", "a"
805	.p2align 3
806	.long 1f - 0f
807	.long 4f - 1f
808	.long 5
8090:
810	# "GNU" encoded with .byte, since .asciz isn't supported
811	# on Solaris.
812	.byte 0x47
813	.byte 0x4e
814	.byte 0x55
815	.byte 0
8161:
817	.p2align 3
818	.long 0xc0000002
819	.long 3f - 2f
8202:
821	.long 3
8223:
823	.p2align 3
8244:
825