xref: /freebsd/sys/crypto/openssl/amd64/aesni-sha256-x86_64.S (revision a03411e84728e9b267056fd31c7d1d9d1dc1b01e)
1/* Do not modify. This file is auto-generated from aesni-sha256-x86_64.pl. */
2.text
3
4
5.globl	aesni_cbc_sha256_enc
6.type	aesni_cbc_sha256_enc,@function
7.align	16
8aesni_cbc_sha256_enc:
9.cfi_startproc
10	leaq	OPENSSL_ia32cap_P(%rip),%r11
11	movl	$1,%eax
12	cmpq	$0,%rdi
13	je	.Lprobe
14	movl	0(%r11),%eax
15	movq	4(%r11),%r10
16	btq	$61,%r10
17	jc	aesni_cbc_sha256_enc_shaext
18	movq	%r10,%r11
19	shrq	$32,%r11
20
21	testl	$2048,%r10d
22	jnz	aesni_cbc_sha256_enc_xop
23	andl	$296,%r11d
24	cmpl	$296,%r11d
25	je	aesni_cbc_sha256_enc_avx2
26	andl	$268435456,%r10d
27	jnz	aesni_cbc_sha256_enc_avx
28	ud2
29	xorl	%eax,%eax
30	cmpq	$0,%rdi
31	je	.Lprobe
32	ud2
33.Lprobe:
34	.byte	0xf3,0xc3
35.cfi_endproc
36.size	aesni_cbc_sha256_enc,.-aesni_cbc_sha256_enc
37
38.align	64
39.type	K256,@object
40K256:
41.long	0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
42.long	0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
43.long	0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
44.long	0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
45.long	0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
46.long	0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
47.long	0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
48.long	0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
49.long	0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
50.long	0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
51.long	0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
52.long	0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
53.long	0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
54.long	0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
55.long	0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
56.long	0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
57.long	0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
58.long	0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
59.long	0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
60.long	0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
61.long	0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
62.long	0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
63.long	0xd192e819,0xd6990624,0xf40e3585,0x106aa070
64.long	0xd192e819,0xd6990624,0xf40e3585,0x106aa070
65.long	0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
66.long	0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
67.long	0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
68.long	0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
69.long	0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
70.long	0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
71.long	0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
72.long	0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
73
74.long	0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
75.long	0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
76.long	0,0,0,0,   0,0,0,0,   -1,-1,-1,-1
77.long	0,0,0,0,   0,0,0,0
78.byte	65,69,83,78,73,45,67,66,67,43,83,72,65,50,53,54,32,115,116,105,116,99,104,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
79.align	64
80.type	aesni_cbc_sha256_enc_xop,@function
81.align	64
82aesni_cbc_sha256_enc_xop:
83.cfi_startproc
84.Lxop_shortcut:
85	movq	8(%rsp),%r10
86	movq	%rsp,%rax
87.cfi_def_cfa_register	%rax
88	pushq	%rbx
89.cfi_offset	%rbx,-16
90	pushq	%rbp
91.cfi_offset	%rbp,-24
92	pushq	%r12
93.cfi_offset	%r12,-32
94	pushq	%r13
95.cfi_offset	%r13,-40
96	pushq	%r14
97.cfi_offset	%r14,-48
98	pushq	%r15
99.cfi_offset	%r15,-56
100	subq	$128,%rsp
101	andq	$-64,%rsp
102
103	shlq	$6,%rdx
104	subq	%rdi,%rsi
105	subq	%rdi,%r10
106	addq	%rdi,%rdx
107
108
109	movq	%rsi,64+8(%rsp)
110	movq	%rdx,64+16(%rsp)
111
112	movq	%r8,64+32(%rsp)
113	movq	%r9,64+40(%rsp)
114	movq	%r10,64+48(%rsp)
115	movq	%rax,120(%rsp)
116.cfi_escape	0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08
117.Lprologue_xop:
118	vzeroall
119
120	movq	%rdi,%r12
121	leaq	128(%rcx),%rdi
122	leaq	K256+544(%rip),%r13
123	movl	240-128(%rdi),%r14d
124	movq	%r9,%r15
125	movq	%r10,%rsi
126	vmovdqu	(%r8),%xmm8
127	subq	$9,%r14
128
129	movl	0(%r15),%eax
130	movl	4(%r15),%ebx
131	movl	8(%r15),%ecx
132	movl	12(%r15),%edx
133	movl	16(%r15),%r8d
134	movl	20(%r15),%r9d
135	movl	24(%r15),%r10d
136	movl	28(%r15),%r11d
137
138	vmovdqa	0(%r13,%r14,8),%xmm14
139	vmovdqa	16(%r13,%r14,8),%xmm13
140	vmovdqa	32(%r13,%r14,8),%xmm12
141	vmovdqu	0-128(%rdi),%xmm10
142	jmp	.Lloop_xop
143.align	16
144.Lloop_xop:
145	vmovdqa	K256+512(%rip),%xmm7
146	vmovdqu	0(%rsi,%r12,1),%xmm0
147	vmovdqu	16(%rsi,%r12,1),%xmm1
148	vmovdqu	32(%rsi,%r12,1),%xmm2
149	vmovdqu	48(%rsi,%r12,1),%xmm3
150	vpshufb	%xmm7,%xmm0,%xmm0
151	leaq	K256(%rip),%rbp
152	vpshufb	%xmm7,%xmm1,%xmm1
153	vpshufb	%xmm7,%xmm2,%xmm2
154	vpaddd	0(%rbp),%xmm0,%xmm4
155	vpshufb	%xmm7,%xmm3,%xmm3
156	vpaddd	32(%rbp),%xmm1,%xmm5
157	vpaddd	64(%rbp),%xmm2,%xmm6
158	vpaddd	96(%rbp),%xmm3,%xmm7
159	vmovdqa	%xmm4,0(%rsp)
160	movl	%eax,%r14d
161	vmovdqa	%xmm5,16(%rsp)
162	movl	%ebx,%esi
163	vmovdqa	%xmm6,32(%rsp)
164	xorl	%ecx,%esi
165	vmovdqa	%xmm7,48(%rsp)
166	movl	%r8d,%r13d
167	jmp	.Lxop_00_47
168
169.align	16
170.Lxop_00_47:
171	subq	$-32*4,%rbp
172	vmovdqu	(%r12),%xmm9
173	movq	%r12,64+0(%rsp)
174	vpalignr	$4,%xmm0,%xmm1,%xmm4
175	rorl	$14,%r13d
176	movl	%r14d,%eax
177	vpalignr	$4,%xmm2,%xmm3,%xmm7
178	movl	%r9d,%r12d
179	xorl	%r8d,%r13d
180.byte	143,232,120,194,236,14
181	rorl	$9,%r14d
182	xorl	%r10d,%r12d
183	vpsrld	$3,%xmm4,%xmm4
184	rorl	$5,%r13d
185	xorl	%eax,%r14d
186	vpaddd	%xmm7,%xmm0,%xmm0
187	andl	%r8d,%r12d
188	vpxor	%xmm10,%xmm9,%xmm9
189	vmovdqu	16-128(%rdi),%xmm10
190	xorl	%r8d,%r13d
191	addl	0(%rsp),%r11d
192	movl	%eax,%r15d
193.byte	143,232,120,194,245,11
194	rorl	$11,%r14d
195	xorl	%r10d,%r12d
196	vpxor	%xmm5,%xmm4,%xmm4
197	xorl	%ebx,%r15d
198	rorl	$6,%r13d
199	addl	%r12d,%r11d
200	andl	%r15d,%esi
201.byte	143,232,120,194,251,13
202	xorl	%eax,%r14d
203	addl	%r13d,%r11d
204	vpxor	%xmm6,%xmm4,%xmm4
205	xorl	%ebx,%esi
206	addl	%r11d,%edx
207	vpsrld	$10,%xmm3,%xmm6
208	rorl	$2,%r14d
209	addl	%esi,%r11d
210	vpaddd	%xmm4,%xmm0,%xmm0
211	movl	%edx,%r13d
212	addl	%r11d,%r14d
213.byte	143,232,120,194,239,2
214	rorl	$14,%r13d
215	movl	%r14d,%r11d
216	vpxor	%xmm6,%xmm7,%xmm7
217	movl	%r8d,%r12d
218	xorl	%edx,%r13d
219	rorl	$9,%r14d
220	xorl	%r9d,%r12d
221	vpxor	%xmm5,%xmm7,%xmm7
222	rorl	$5,%r13d
223	xorl	%r11d,%r14d
224	andl	%edx,%r12d
225	vpxor	%xmm8,%xmm9,%xmm9
226	xorl	%edx,%r13d
227	vpsrldq	$8,%xmm7,%xmm7
228	addl	4(%rsp),%r10d
229	movl	%r11d,%esi
230	rorl	$11,%r14d
231	xorl	%r9d,%r12d
232	vpaddd	%xmm7,%xmm0,%xmm0
233	xorl	%eax,%esi
234	rorl	$6,%r13d
235	addl	%r12d,%r10d
236	andl	%esi,%r15d
237.byte	143,232,120,194,248,13
238	xorl	%r11d,%r14d
239	addl	%r13d,%r10d
240	vpsrld	$10,%xmm0,%xmm6
241	xorl	%eax,%r15d
242	addl	%r10d,%ecx
243.byte	143,232,120,194,239,2
244	rorl	$2,%r14d
245	addl	%r15d,%r10d
246	vpxor	%xmm6,%xmm7,%xmm7
247	movl	%ecx,%r13d
248	addl	%r10d,%r14d
249	rorl	$14,%r13d
250	movl	%r14d,%r10d
251	vpxor	%xmm5,%xmm7,%xmm7
252	movl	%edx,%r12d
253	xorl	%ecx,%r13d
254	rorl	$9,%r14d
255	xorl	%r8d,%r12d
256	vpslldq	$8,%xmm7,%xmm7
257	rorl	$5,%r13d
258	xorl	%r10d,%r14d
259	andl	%ecx,%r12d
260	vaesenc	%xmm10,%xmm9,%xmm9
261	vmovdqu	32-128(%rdi),%xmm10
262	xorl	%ecx,%r13d
263	vpaddd	%xmm7,%xmm0,%xmm0
264	addl	8(%rsp),%r9d
265	movl	%r10d,%r15d
266	rorl	$11,%r14d
267	xorl	%r8d,%r12d
268	vpaddd	0(%rbp),%xmm0,%xmm6
269	xorl	%r11d,%r15d
270	rorl	$6,%r13d
271	addl	%r12d,%r9d
272	andl	%r15d,%esi
273	xorl	%r10d,%r14d
274	addl	%r13d,%r9d
275	xorl	%r11d,%esi
276	addl	%r9d,%ebx
277	rorl	$2,%r14d
278	addl	%esi,%r9d
279	movl	%ebx,%r13d
280	addl	%r9d,%r14d
281	rorl	$14,%r13d
282	movl	%r14d,%r9d
283	movl	%ecx,%r12d
284	xorl	%ebx,%r13d
285	rorl	$9,%r14d
286	xorl	%edx,%r12d
287	rorl	$5,%r13d
288	xorl	%r9d,%r14d
289	andl	%ebx,%r12d
290	vaesenc	%xmm10,%xmm9,%xmm9
291	vmovdqu	48-128(%rdi),%xmm10
292	xorl	%ebx,%r13d
293	addl	12(%rsp),%r8d
294	movl	%r9d,%esi
295	rorl	$11,%r14d
296	xorl	%edx,%r12d
297	xorl	%r10d,%esi
298	rorl	$6,%r13d
299	addl	%r12d,%r8d
300	andl	%esi,%r15d
301	xorl	%r9d,%r14d
302	addl	%r13d,%r8d
303	xorl	%r10d,%r15d
304	addl	%r8d,%eax
305	rorl	$2,%r14d
306	addl	%r15d,%r8d
307	movl	%eax,%r13d
308	addl	%r8d,%r14d
309	vmovdqa	%xmm6,0(%rsp)
310	vpalignr	$4,%xmm1,%xmm2,%xmm4
311	rorl	$14,%r13d
312	movl	%r14d,%r8d
313	vpalignr	$4,%xmm3,%xmm0,%xmm7
314	movl	%ebx,%r12d
315	xorl	%eax,%r13d
316.byte	143,232,120,194,236,14
317	rorl	$9,%r14d
318	xorl	%ecx,%r12d
319	vpsrld	$3,%xmm4,%xmm4
320	rorl	$5,%r13d
321	xorl	%r8d,%r14d
322	vpaddd	%xmm7,%xmm1,%xmm1
323	andl	%eax,%r12d
324	vaesenc	%xmm10,%xmm9,%xmm9
325	vmovdqu	64-128(%rdi),%xmm10
326	xorl	%eax,%r13d
327	addl	16(%rsp),%edx
328	movl	%r8d,%r15d
329.byte	143,232,120,194,245,11
330	rorl	$11,%r14d
331	xorl	%ecx,%r12d
332	vpxor	%xmm5,%xmm4,%xmm4
333	xorl	%r9d,%r15d
334	rorl	$6,%r13d
335	addl	%r12d,%edx
336	andl	%r15d,%esi
337.byte	143,232,120,194,248,13
338	xorl	%r8d,%r14d
339	addl	%r13d,%edx
340	vpxor	%xmm6,%xmm4,%xmm4
341	xorl	%r9d,%esi
342	addl	%edx,%r11d
343	vpsrld	$10,%xmm0,%xmm6
344	rorl	$2,%r14d
345	addl	%esi,%edx
346	vpaddd	%xmm4,%xmm1,%xmm1
347	movl	%r11d,%r13d
348	addl	%edx,%r14d
349.byte	143,232,120,194,239,2
350	rorl	$14,%r13d
351	movl	%r14d,%edx
352	vpxor	%xmm6,%xmm7,%xmm7
353	movl	%eax,%r12d
354	xorl	%r11d,%r13d
355	rorl	$9,%r14d
356	xorl	%ebx,%r12d
357	vpxor	%xmm5,%xmm7,%xmm7
358	rorl	$5,%r13d
359	xorl	%edx,%r14d
360	andl	%r11d,%r12d
361	vaesenc	%xmm10,%xmm9,%xmm9
362	vmovdqu	80-128(%rdi),%xmm10
363	xorl	%r11d,%r13d
364	vpsrldq	$8,%xmm7,%xmm7
365	addl	20(%rsp),%ecx
366	movl	%edx,%esi
367	rorl	$11,%r14d
368	xorl	%ebx,%r12d
369	vpaddd	%xmm7,%xmm1,%xmm1
370	xorl	%r8d,%esi
371	rorl	$6,%r13d
372	addl	%r12d,%ecx
373	andl	%esi,%r15d
374.byte	143,232,120,194,249,13
375	xorl	%edx,%r14d
376	addl	%r13d,%ecx
377	vpsrld	$10,%xmm1,%xmm6
378	xorl	%r8d,%r15d
379	addl	%ecx,%r10d
380.byte	143,232,120,194,239,2
381	rorl	$2,%r14d
382	addl	%r15d,%ecx
383	vpxor	%xmm6,%xmm7,%xmm7
384	movl	%r10d,%r13d
385	addl	%ecx,%r14d
386	rorl	$14,%r13d
387	movl	%r14d,%ecx
388	vpxor	%xmm5,%xmm7,%xmm7
389	movl	%r11d,%r12d
390	xorl	%r10d,%r13d
391	rorl	$9,%r14d
392	xorl	%eax,%r12d
393	vpslldq	$8,%xmm7,%xmm7
394	rorl	$5,%r13d
395	xorl	%ecx,%r14d
396	andl	%r10d,%r12d
397	vaesenc	%xmm10,%xmm9,%xmm9
398	vmovdqu	96-128(%rdi),%xmm10
399	xorl	%r10d,%r13d
400	vpaddd	%xmm7,%xmm1,%xmm1
401	addl	24(%rsp),%ebx
402	movl	%ecx,%r15d
403	rorl	$11,%r14d
404	xorl	%eax,%r12d
405	vpaddd	32(%rbp),%xmm1,%xmm6
406	xorl	%edx,%r15d
407	rorl	$6,%r13d
408	addl	%r12d,%ebx
409	andl	%r15d,%esi
410	xorl	%ecx,%r14d
411	addl	%r13d,%ebx
412	xorl	%edx,%esi
413	addl	%ebx,%r9d
414	rorl	$2,%r14d
415	addl	%esi,%ebx
416	movl	%r9d,%r13d
417	addl	%ebx,%r14d
418	rorl	$14,%r13d
419	movl	%r14d,%ebx
420	movl	%r10d,%r12d
421	xorl	%r9d,%r13d
422	rorl	$9,%r14d
423	xorl	%r11d,%r12d
424	rorl	$5,%r13d
425	xorl	%ebx,%r14d
426	andl	%r9d,%r12d
427	vaesenc	%xmm10,%xmm9,%xmm9
428	vmovdqu	112-128(%rdi),%xmm10
429	xorl	%r9d,%r13d
430	addl	28(%rsp),%eax
431	movl	%ebx,%esi
432	rorl	$11,%r14d
433	xorl	%r11d,%r12d
434	xorl	%ecx,%esi
435	rorl	$6,%r13d
436	addl	%r12d,%eax
437	andl	%esi,%r15d
438	xorl	%ebx,%r14d
439	addl	%r13d,%eax
440	xorl	%ecx,%r15d
441	addl	%eax,%r8d
442	rorl	$2,%r14d
443	addl	%r15d,%eax
444	movl	%r8d,%r13d
445	addl	%eax,%r14d
446	vmovdqa	%xmm6,16(%rsp)
447	vpalignr	$4,%xmm2,%xmm3,%xmm4
448	rorl	$14,%r13d
449	movl	%r14d,%eax
450	vpalignr	$4,%xmm0,%xmm1,%xmm7
451	movl	%r9d,%r12d
452	xorl	%r8d,%r13d
453.byte	143,232,120,194,236,14
454	rorl	$9,%r14d
455	xorl	%r10d,%r12d
456	vpsrld	$3,%xmm4,%xmm4
457	rorl	$5,%r13d
458	xorl	%eax,%r14d
459	vpaddd	%xmm7,%xmm2,%xmm2
460	andl	%r8d,%r12d
461	vaesenc	%xmm10,%xmm9,%xmm9
462	vmovdqu	128-128(%rdi),%xmm10
463	xorl	%r8d,%r13d
464	addl	32(%rsp),%r11d
465	movl	%eax,%r15d
466.byte	143,232,120,194,245,11
467	rorl	$11,%r14d
468	xorl	%r10d,%r12d
469	vpxor	%xmm5,%xmm4,%xmm4
470	xorl	%ebx,%r15d
471	rorl	$6,%r13d
472	addl	%r12d,%r11d
473	andl	%r15d,%esi
474.byte	143,232,120,194,249,13
475	xorl	%eax,%r14d
476	addl	%r13d,%r11d
477	vpxor	%xmm6,%xmm4,%xmm4
478	xorl	%ebx,%esi
479	addl	%r11d,%edx
480	vpsrld	$10,%xmm1,%xmm6
481	rorl	$2,%r14d
482	addl	%esi,%r11d
483	vpaddd	%xmm4,%xmm2,%xmm2
484	movl	%edx,%r13d
485	addl	%r11d,%r14d
486.byte	143,232,120,194,239,2
487	rorl	$14,%r13d
488	movl	%r14d,%r11d
489	vpxor	%xmm6,%xmm7,%xmm7
490	movl	%r8d,%r12d
491	xorl	%edx,%r13d
492	rorl	$9,%r14d
493	xorl	%r9d,%r12d
494	vpxor	%xmm5,%xmm7,%xmm7
495	rorl	$5,%r13d
496	xorl	%r11d,%r14d
497	andl	%edx,%r12d
498	vaesenc	%xmm10,%xmm9,%xmm9
499	vmovdqu	144-128(%rdi),%xmm10
500	xorl	%edx,%r13d
501	vpsrldq	$8,%xmm7,%xmm7
502	addl	36(%rsp),%r10d
503	movl	%r11d,%esi
504	rorl	$11,%r14d
505	xorl	%r9d,%r12d
506	vpaddd	%xmm7,%xmm2,%xmm2
507	xorl	%eax,%esi
508	rorl	$6,%r13d
509	addl	%r12d,%r10d
510	andl	%esi,%r15d
511.byte	143,232,120,194,250,13
512	xorl	%r11d,%r14d
513	addl	%r13d,%r10d
514	vpsrld	$10,%xmm2,%xmm6
515	xorl	%eax,%r15d
516	addl	%r10d,%ecx
517.byte	143,232,120,194,239,2
518	rorl	$2,%r14d
519	addl	%r15d,%r10d
520	vpxor	%xmm6,%xmm7,%xmm7
521	movl	%ecx,%r13d
522	addl	%r10d,%r14d
523	rorl	$14,%r13d
524	movl	%r14d,%r10d
525	vpxor	%xmm5,%xmm7,%xmm7
526	movl	%edx,%r12d
527	xorl	%ecx,%r13d
528	rorl	$9,%r14d
529	xorl	%r8d,%r12d
530	vpslldq	$8,%xmm7,%xmm7
531	rorl	$5,%r13d
532	xorl	%r10d,%r14d
533	andl	%ecx,%r12d
534	vaesenc	%xmm10,%xmm9,%xmm9
535	vmovdqu	160-128(%rdi),%xmm10
536	xorl	%ecx,%r13d
537	vpaddd	%xmm7,%xmm2,%xmm2
538	addl	40(%rsp),%r9d
539	movl	%r10d,%r15d
540	rorl	$11,%r14d
541	xorl	%r8d,%r12d
542	vpaddd	64(%rbp),%xmm2,%xmm6
543	xorl	%r11d,%r15d
544	rorl	$6,%r13d
545	addl	%r12d,%r9d
546	andl	%r15d,%esi
547	xorl	%r10d,%r14d
548	addl	%r13d,%r9d
549	xorl	%r11d,%esi
550	addl	%r9d,%ebx
551	rorl	$2,%r14d
552	addl	%esi,%r9d
553	movl	%ebx,%r13d
554	addl	%r9d,%r14d
555	rorl	$14,%r13d
556	movl	%r14d,%r9d
557	movl	%ecx,%r12d
558	xorl	%ebx,%r13d
559	rorl	$9,%r14d
560	xorl	%edx,%r12d
561	rorl	$5,%r13d
562	xorl	%r9d,%r14d
563	andl	%ebx,%r12d
564	vaesenclast	%xmm10,%xmm9,%xmm11
565	vaesenc	%xmm10,%xmm9,%xmm9
566	vmovdqu	176-128(%rdi),%xmm10
567	xorl	%ebx,%r13d
568	addl	44(%rsp),%r8d
569	movl	%r9d,%esi
570	rorl	$11,%r14d
571	xorl	%edx,%r12d
572	xorl	%r10d,%esi
573	rorl	$6,%r13d
574	addl	%r12d,%r8d
575	andl	%esi,%r15d
576	xorl	%r9d,%r14d
577	addl	%r13d,%r8d
578	xorl	%r10d,%r15d
579	addl	%r8d,%eax
580	rorl	$2,%r14d
581	addl	%r15d,%r8d
582	movl	%eax,%r13d
583	addl	%r8d,%r14d
584	vmovdqa	%xmm6,32(%rsp)
585	vpalignr	$4,%xmm3,%xmm0,%xmm4
586	rorl	$14,%r13d
587	movl	%r14d,%r8d
588	vpalignr	$4,%xmm1,%xmm2,%xmm7
589	movl	%ebx,%r12d
590	xorl	%eax,%r13d
591.byte	143,232,120,194,236,14
592	rorl	$9,%r14d
593	xorl	%ecx,%r12d
594	vpsrld	$3,%xmm4,%xmm4
595	rorl	$5,%r13d
596	xorl	%r8d,%r14d
597	vpaddd	%xmm7,%xmm3,%xmm3
598	andl	%eax,%r12d
599	vpand	%xmm12,%xmm11,%xmm8
600	vaesenc	%xmm10,%xmm9,%xmm9
601	vmovdqu	192-128(%rdi),%xmm10
602	xorl	%eax,%r13d
603	addl	48(%rsp),%edx
604	movl	%r8d,%r15d
605.byte	143,232,120,194,245,11
606	rorl	$11,%r14d
607	xorl	%ecx,%r12d
608	vpxor	%xmm5,%xmm4,%xmm4
609	xorl	%r9d,%r15d
610	rorl	$6,%r13d
611	addl	%r12d,%edx
612	andl	%r15d,%esi
613.byte	143,232,120,194,250,13
614	xorl	%r8d,%r14d
615	addl	%r13d,%edx
616	vpxor	%xmm6,%xmm4,%xmm4
617	xorl	%r9d,%esi
618	addl	%edx,%r11d
619	vpsrld	$10,%xmm2,%xmm6
620	rorl	$2,%r14d
621	addl	%esi,%edx
622	vpaddd	%xmm4,%xmm3,%xmm3
623	movl	%r11d,%r13d
624	addl	%edx,%r14d
625.byte	143,232,120,194,239,2
626	rorl	$14,%r13d
627	movl	%r14d,%edx
628	vpxor	%xmm6,%xmm7,%xmm7
629	movl	%eax,%r12d
630	xorl	%r11d,%r13d
631	rorl	$9,%r14d
632	xorl	%ebx,%r12d
633	vpxor	%xmm5,%xmm7,%xmm7
634	rorl	$5,%r13d
635	xorl	%edx,%r14d
636	andl	%r11d,%r12d
637	vaesenclast	%xmm10,%xmm9,%xmm11
638	vaesenc	%xmm10,%xmm9,%xmm9
639	vmovdqu	208-128(%rdi),%xmm10
640	xorl	%r11d,%r13d
641	vpsrldq	$8,%xmm7,%xmm7
642	addl	52(%rsp),%ecx
643	movl	%edx,%esi
644	rorl	$11,%r14d
645	xorl	%ebx,%r12d
646	vpaddd	%xmm7,%xmm3,%xmm3
647	xorl	%r8d,%esi
648	rorl	$6,%r13d
649	addl	%r12d,%ecx
650	andl	%esi,%r15d
651.byte	143,232,120,194,251,13
652	xorl	%edx,%r14d
653	addl	%r13d,%ecx
654	vpsrld	$10,%xmm3,%xmm6
655	xorl	%r8d,%r15d
656	addl	%ecx,%r10d
657.byte	143,232,120,194,239,2
658	rorl	$2,%r14d
659	addl	%r15d,%ecx
660	vpxor	%xmm6,%xmm7,%xmm7
661	movl	%r10d,%r13d
662	addl	%ecx,%r14d
663	rorl	$14,%r13d
664	movl	%r14d,%ecx
665	vpxor	%xmm5,%xmm7,%xmm7
666	movl	%r11d,%r12d
667	xorl	%r10d,%r13d
668	rorl	$9,%r14d
669	xorl	%eax,%r12d
670	vpslldq	$8,%xmm7,%xmm7
671	rorl	$5,%r13d
672	xorl	%ecx,%r14d
673	andl	%r10d,%r12d
674	vpand	%xmm13,%xmm11,%xmm11
675	vaesenc	%xmm10,%xmm9,%xmm9
676	vmovdqu	224-128(%rdi),%xmm10
677	xorl	%r10d,%r13d
678	vpaddd	%xmm7,%xmm3,%xmm3
679	addl	56(%rsp),%ebx
680	movl	%ecx,%r15d
681	rorl	$11,%r14d
682	xorl	%eax,%r12d
683	vpaddd	96(%rbp),%xmm3,%xmm6
684	xorl	%edx,%r15d
685	rorl	$6,%r13d
686	addl	%r12d,%ebx
687	andl	%r15d,%esi
688	xorl	%ecx,%r14d
689	addl	%r13d,%ebx
690	xorl	%edx,%esi
691	addl	%ebx,%r9d
692	rorl	$2,%r14d
693	addl	%esi,%ebx
694	movl	%r9d,%r13d
695	addl	%ebx,%r14d
696	rorl	$14,%r13d
697	movl	%r14d,%ebx
698	movl	%r10d,%r12d
699	xorl	%r9d,%r13d
700	rorl	$9,%r14d
701	xorl	%r11d,%r12d
702	rorl	$5,%r13d
703	xorl	%ebx,%r14d
704	andl	%r9d,%r12d
705	vpor	%xmm11,%xmm8,%xmm8
706	vaesenclast	%xmm10,%xmm9,%xmm11
707	vmovdqu	0-128(%rdi),%xmm10
708	xorl	%r9d,%r13d
709	addl	60(%rsp),%eax
710	movl	%ebx,%esi
711	rorl	$11,%r14d
712	xorl	%r11d,%r12d
713	xorl	%ecx,%esi
714	rorl	$6,%r13d
715	addl	%r12d,%eax
716	andl	%esi,%r15d
717	xorl	%ebx,%r14d
718	addl	%r13d,%eax
719	xorl	%ecx,%r15d
720	addl	%eax,%r8d
721	rorl	$2,%r14d
722	addl	%r15d,%eax
723	movl	%r8d,%r13d
724	addl	%eax,%r14d
725	vmovdqa	%xmm6,48(%rsp)
726	movq	64+0(%rsp),%r12
727	vpand	%xmm14,%xmm11,%xmm11
728	movq	64+8(%rsp),%r15
729	vpor	%xmm11,%xmm8,%xmm8
730	vmovdqu	%xmm8,(%r15,%r12,1)
731	leaq	16(%r12),%r12
732	cmpb	$0,131(%rbp)
733	jne	.Lxop_00_47
734	vmovdqu	(%r12),%xmm9
735	movq	%r12,64+0(%rsp)
736	rorl	$14,%r13d
737	movl	%r14d,%eax
738	movl	%r9d,%r12d
739	xorl	%r8d,%r13d
740	rorl	$9,%r14d
741	xorl	%r10d,%r12d
742	rorl	$5,%r13d
743	xorl	%eax,%r14d
744	andl	%r8d,%r12d
745	vpxor	%xmm10,%xmm9,%xmm9
746	vmovdqu	16-128(%rdi),%xmm10
747	xorl	%r8d,%r13d
748	addl	0(%rsp),%r11d
749	movl	%eax,%r15d
750	rorl	$11,%r14d
751	xorl	%r10d,%r12d
752	xorl	%ebx,%r15d
753	rorl	$6,%r13d
754	addl	%r12d,%r11d
755	andl	%r15d,%esi
756	xorl	%eax,%r14d
757	addl	%r13d,%r11d
758	xorl	%ebx,%esi
759	addl	%r11d,%edx
760	rorl	$2,%r14d
761	addl	%esi,%r11d
762	movl	%edx,%r13d
763	addl	%r11d,%r14d
764	rorl	$14,%r13d
765	movl	%r14d,%r11d
766	movl	%r8d,%r12d
767	xorl	%edx,%r13d
768	rorl	$9,%r14d
769	xorl	%r9d,%r12d
770	rorl	$5,%r13d
771	xorl	%r11d,%r14d
772	andl	%edx,%r12d
773	vpxor	%xmm8,%xmm9,%xmm9
774	xorl	%edx,%r13d
775	addl	4(%rsp),%r10d
776	movl	%r11d,%esi
777	rorl	$11,%r14d
778	xorl	%r9d,%r12d
779	xorl	%eax,%esi
780	rorl	$6,%r13d
781	addl	%r12d,%r10d
782	andl	%esi,%r15d
783	xorl	%r11d,%r14d
784	addl	%r13d,%r10d
785	xorl	%eax,%r15d
786	addl	%r10d,%ecx
787	rorl	$2,%r14d
788	addl	%r15d,%r10d
789	movl	%ecx,%r13d
790	addl	%r10d,%r14d
791	rorl	$14,%r13d
792	movl	%r14d,%r10d
793	movl	%edx,%r12d
794	xorl	%ecx,%r13d
795	rorl	$9,%r14d
796	xorl	%r8d,%r12d
797	rorl	$5,%r13d
798	xorl	%r10d,%r14d
799	andl	%ecx,%r12d
800	vaesenc	%xmm10,%xmm9,%xmm9
801	vmovdqu	32-128(%rdi),%xmm10
802	xorl	%ecx,%r13d
803	addl	8(%rsp),%r9d
804	movl	%r10d,%r15d
805	rorl	$11,%r14d
806	xorl	%r8d,%r12d
807	xorl	%r11d,%r15d
808	rorl	$6,%r13d
809	addl	%r12d,%r9d
810	andl	%r15d,%esi
811	xorl	%r10d,%r14d
812	addl	%r13d,%r9d
813	xorl	%r11d,%esi
814	addl	%r9d,%ebx
815	rorl	$2,%r14d
816	addl	%esi,%r9d
817	movl	%ebx,%r13d
818	addl	%r9d,%r14d
819	rorl	$14,%r13d
820	movl	%r14d,%r9d
821	movl	%ecx,%r12d
822	xorl	%ebx,%r13d
823	rorl	$9,%r14d
824	xorl	%edx,%r12d
825	rorl	$5,%r13d
826	xorl	%r9d,%r14d
827	andl	%ebx,%r12d
828	vaesenc	%xmm10,%xmm9,%xmm9
829	vmovdqu	48-128(%rdi),%xmm10
830	xorl	%ebx,%r13d
831	addl	12(%rsp),%r8d
832	movl	%r9d,%esi
833	rorl	$11,%r14d
834	xorl	%edx,%r12d
835	xorl	%r10d,%esi
836	rorl	$6,%r13d
837	addl	%r12d,%r8d
838	andl	%esi,%r15d
839	xorl	%r9d,%r14d
840	addl	%r13d,%r8d
841	xorl	%r10d,%r15d
842	addl	%r8d,%eax
843	rorl	$2,%r14d
844	addl	%r15d,%r8d
845	movl	%eax,%r13d
846	addl	%r8d,%r14d
847	rorl	$14,%r13d
848	movl	%r14d,%r8d
849	movl	%ebx,%r12d
850	xorl	%eax,%r13d
851	rorl	$9,%r14d
852	xorl	%ecx,%r12d
853	rorl	$5,%r13d
854	xorl	%r8d,%r14d
855	andl	%eax,%r12d
856	vaesenc	%xmm10,%xmm9,%xmm9
857	vmovdqu	64-128(%rdi),%xmm10
858	xorl	%eax,%r13d
859	addl	16(%rsp),%edx
860	movl	%r8d,%r15d
861	rorl	$11,%r14d
862	xorl	%ecx,%r12d
863	xorl	%r9d,%r15d
864	rorl	$6,%r13d
865	addl	%r12d,%edx
866	andl	%r15d,%esi
867	xorl	%r8d,%r14d
868	addl	%r13d,%edx
869	xorl	%r9d,%esi
870	addl	%edx,%r11d
871	rorl	$2,%r14d
872	addl	%esi,%edx
873	movl	%r11d,%r13d
874	addl	%edx,%r14d
875	rorl	$14,%r13d
876	movl	%r14d,%edx
877	movl	%eax,%r12d
878	xorl	%r11d,%r13d
879	rorl	$9,%r14d
880	xorl	%ebx,%r12d
881	rorl	$5,%r13d
882	xorl	%edx,%r14d
883	andl	%r11d,%r12d
884	vaesenc	%xmm10,%xmm9,%xmm9
885	vmovdqu	80-128(%rdi),%xmm10
886	xorl	%r11d,%r13d
887	addl	20(%rsp),%ecx
888	movl	%edx,%esi
889	rorl	$11,%r14d
890	xorl	%ebx,%r12d
891	xorl	%r8d,%esi
892	rorl	$6,%r13d
893	addl	%r12d,%ecx
894	andl	%esi,%r15d
895	xorl	%edx,%r14d
896	addl	%r13d,%ecx
897	xorl	%r8d,%r15d
898	addl	%ecx,%r10d
899	rorl	$2,%r14d
900	addl	%r15d,%ecx
901	movl	%r10d,%r13d
902	addl	%ecx,%r14d
903	rorl	$14,%r13d
904	movl	%r14d,%ecx
905	movl	%r11d,%r12d
906	xorl	%r10d,%r13d
907	rorl	$9,%r14d
908	xorl	%eax,%r12d
909	rorl	$5,%r13d
910	xorl	%ecx,%r14d
911	andl	%r10d,%r12d
912	vaesenc	%xmm10,%xmm9,%xmm9
913	vmovdqu	96-128(%rdi),%xmm10
914	xorl	%r10d,%r13d
915	addl	24(%rsp),%ebx
916	movl	%ecx,%r15d
917	rorl	$11,%r14d
918	xorl	%eax,%r12d
919	xorl	%edx,%r15d
920	rorl	$6,%r13d
921	addl	%r12d,%ebx
922	andl	%r15d,%esi
923	xorl	%ecx,%r14d
924	addl	%r13d,%ebx
925	xorl	%edx,%esi
926	addl	%ebx,%r9d
927	rorl	$2,%r14d
928	addl	%esi,%ebx
929	movl	%r9d,%r13d
930	addl	%ebx,%r14d
931	rorl	$14,%r13d
932	movl	%r14d,%ebx
933	movl	%r10d,%r12d
934	xorl	%r9d,%r13d
935	rorl	$9,%r14d
936	xorl	%r11d,%r12d
937	rorl	$5,%r13d
938	xorl	%ebx,%r14d
939	andl	%r9d,%r12d
940	vaesenc	%xmm10,%xmm9,%xmm9
941	vmovdqu	112-128(%rdi),%xmm10
942	xorl	%r9d,%r13d
943	addl	28(%rsp),%eax
944	movl	%ebx,%esi
945	rorl	$11,%r14d
946	xorl	%r11d,%r12d
947	xorl	%ecx,%esi
948	rorl	$6,%r13d
949	addl	%r12d,%eax
950	andl	%esi,%r15d
951	xorl	%ebx,%r14d
952	addl	%r13d,%eax
953	xorl	%ecx,%r15d
954	addl	%eax,%r8d
955	rorl	$2,%r14d
956	addl	%r15d,%eax
957	movl	%r8d,%r13d
958	addl	%eax,%r14d
959	rorl	$14,%r13d
960	movl	%r14d,%eax
961	movl	%r9d,%r12d
962	xorl	%r8d,%r13d
963	rorl	$9,%r14d
964	xorl	%r10d,%r12d
965	rorl	$5,%r13d
966	xorl	%eax,%r14d
967	andl	%r8d,%r12d
968	vaesenc	%xmm10,%xmm9,%xmm9
969	vmovdqu	128-128(%rdi),%xmm10
970	xorl	%r8d,%r13d
971	addl	32(%rsp),%r11d
972	movl	%eax,%r15d
973	rorl	$11,%r14d
974	xorl	%r10d,%r12d
975	xorl	%ebx,%r15d
976	rorl	$6,%r13d
977	addl	%r12d,%r11d
978	andl	%r15d,%esi
979	xorl	%eax,%r14d
980	addl	%r13d,%r11d
981	xorl	%ebx,%esi
982	addl	%r11d,%edx
983	rorl	$2,%r14d
984	addl	%esi,%r11d
985	movl	%edx,%r13d
986	addl	%r11d,%r14d
987	rorl	$14,%r13d
988	movl	%r14d,%r11d
989	movl	%r8d,%r12d
990	xorl	%edx,%r13d
991	rorl	$9,%r14d
992	xorl	%r9d,%r12d
993	rorl	$5,%r13d
994	xorl	%r11d,%r14d
995	andl	%edx,%r12d
996	vaesenc	%xmm10,%xmm9,%xmm9
997	vmovdqu	144-128(%rdi),%xmm10
998	xorl	%edx,%r13d
999	addl	36(%rsp),%r10d
1000	movl	%r11d,%esi
1001	rorl	$11,%r14d
1002	xorl	%r9d,%r12d
1003	xorl	%eax,%esi
1004	rorl	$6,%r13d
1005	addl	%r12d,%r10d
1006	andl	%esi,%r15d
1007	xorl	%r11d,%r14d
1008	addl	%r13d,%r10d
1009	xorl	%eax,%r15d
1010	addl	%r10d,%ecx
1011	rorl	$2,%r14d
1012	addl	%r15d,%r10d
1013	movl	%ecx,%r13d
1014	addl	%r10d,%r14d
1015	rorl	$14,%r13d
1016	movl	%r14d,%r10d
1017	movl	%edx,%r12d
1018	xorl	%ecx,%r13d
1019	rorl	$9,%r14d
1020	xorl	%r8d,%r12d
1021	rorl	$5,%r13d
1022	xorl	%r10d,%r14d
1023	andl	%ecx,%r12d
1024	vaesenc	%xmm10,%xmm9,%xmm9
1025	vmovdqu	160-128(%rdi),%xmm10
1026	xorl	%ecx,%r13d
1027	addl	40(%rsp),%r9d
1028	movl	%r10d,%r15d
1029	rorl	$11,%r14d
1030	xorl	%r8d,%r12d
1031	xorl	%r11d,%r15d
1032	rorl	$6,%r13d
1033	addl	%r12d,%r9d
1034	andl	%r15d,%esi
1035	xorl	%r10d,%r14d
1036	addl	%r13d,%r9d
1037	xorl	%r11d,%esi
1038	addl	%r9d,%ebx
1039	rorl	$2,%r14d
1040	addl	%esi,%r9d
1041	movl	%ebx,%r13d
1042	addl	%r9d,%r14d
1043	rorl	$14,%r13d
1044	movl	%r14d,%r9d
1045	movl	%ecx,%r12d
1046	xorl	%ebx,%r13d
1047	rorl	$9,%r14d
1048	xorl	%edx,%r12d
1049	rorl	$5,%r13d
1050	xorl	%r9d,%r14d
1051	andl	%ebx,%r12d
1052	vaesenclast	%xmm10,%xmm9,%xmm11
1053	vaesenc	%xmm10,%xmm9,%xmm9
1054	vmovdqu	176-128(%rdi),%xmm10
1055	xorl	%ebx,%r13d
1056	addl	44(%rsp),%r8d
1057	movl	%r9d,%esi
1058	rorl	$11,%r14d
1059	xorl	%edx,%r12d
1060	xorl	%r10d,%esi
1061	rorl	$6,%r13d
1062	addl	%r12d,%r8d
1063	andl	%esi,%r15d
1064	xorl	%r9d,%r14d
1065	addl	%r13d,%r8d
1066	xorl	%r10d,%r15d
1067	addl	%r8d,%eax
1068	rorl	$2,%r14d
1069	addl	%r15d,%r8d
1070	movl	%eax,%r13d
1071	addl	%r8d,%r14d
1072	rorl	$14,%r13d
1073	movl	%r14d,%r8d
1074	movl	%ebx,%r12d
1075	xorl	%eax,%r13d
1076	rorl	$9,%r14d
1077	xorl	%ecx,%r12d
1078	rorl	$5,%r13d
1079	xorl	%r8d,%r14d
1080	andl	%eax,%r12d
1081	vpand	%xmm12,%xmm11,%xmm8
1082	vaesenc	%xmm10,%xmm9,%xmm9
1083	vmovdqu	192-128(%rdi),%xmm10
1084	xorl	%eax,%r13d
1085	addl	48(%rsp),%edx
1086	movl	%r8d,%r15d
1087	rorl	$11,%r14d
1088	xorl	%ecx,%r12d
1089	xorl	%r9d,%r15d
1090	rorl	$6,%r13d
1091	addl	%r12d,%edx
1092	andl	%r15d,%esi
1093	xorl	%r8d,%r14d
1094	addl	%r13d,%edx
1095	xorl	%r9d,%esi
1096	addl	%edx,%r11d
1097	rorl	$2,%r14d
1098	addl	%esi,%edx
1099	movl	%r11d,%r13d
1100	addl	%edx,%r14d
1101	rorl	$14,%r13d
1102	movl	%r14d,%edx
1103	movl	%eax,%r12d
1104	xorl	%r11d,%r13d
1105	rorl	$9,%r14d
1106	xorl	%ebx,%r12d
1107	rorl	$5,%r13d
1108	xorl	%edx,%r14d
1109	andl	%r11d,%r12d
1110	vaesenclast	%xmm10,%xmm9,%xmm11
1111	vaesenc	%xmm10,%xmm9,%xmm9
1112	vmovdqu	208-128(%rdi),%xmm10
1113	xorl	%r11d,%r13d
1114	addl	52(%rsp),%ecx
1115	movl	%edx,%esi
1116	rorl	$11,%r14d
1117	xorl	%ebx,%r12d
1118	xorl	%r8d,%esi
1119	rorl	$6,%r13d
1120	addl	%r12d,%ecx
1121	andl	%esi,%r15d
1122	xorl	%edx,%r14d
1123	addl	%r13d,%ecx
1124	xorl	%r8d,%r15d
1125	addl	%ecx,%r10d
1126	rorl	$2,%r14d
1127	addl	%r15d,%ecx
1128	movl	%r10d,%r13d
1129	addl	%ecx,%r14d
1130	rorl	$14,%r13d
1131	movl	%r14d,%ecx
1132	movl	%r11d,%r12d
1133	xorl	%r10d,%r13d
1134	rorl	$9,%r14d
1135	xorl	%eax,%r12d
1136	rorl	$5,%r13d
1137	xorl	%ecx,%r14d
1138	andl	%r10d,%r12d
1139	vpand	%xmm13,%xmm11,%xmm11
1140	vaesenc	%xmm10,%xmm9,%xmm9
1141	vmovdqu	224-128(%rdi),%xmm10
1142	xorl	%r10d,%r13d
1143	addl	56(%rsp),%ebx
1144	movl	%ecx,%r15d
1145	rorl	$11,%r14d
1146	xorl	%eax,%r12d
1147	xorl	%edx,%r15d
1148	rorl	$6,%r13d
1149	addl	%r12d,%ebx
1150	andl	%r15d,%esi
1151	xorl	%ecx,%r14d
1152	addl	%r13d,%ebx
1153	xorl	%edx,%esi
1154	addl	%ebx,%r9d
1155	rorl	$2,%r14d
1156	addl	%esi,%ebx
1157	movl	%r9d,%r13d
1158	addl	%ebx,%r14d
1159	rorl	$14,%r13d
1160	movl	%r14d,%ebx
1161	movl	%r10d,%r12d
1162	xorl	%r9d,%r13d
1163	rorl	$9,%r14d
1164	xorl	%r11d,%r12d
1165	rorl	$5,%r13d
1166	xorl	%ebx,%r14d
1167	andl	%r9d,%r12d
1168	vpor	%xmm11,%xmm8,%xmm8
1169	vaesenclast	%xmm10,%xmm9,%xmm11
1170	vmovdqu	0-128(%rdi),%xmm10
1171	xorl	%r9d,%r13d
1172	addl	60(%rsp),%eax
1173	movl	%ebx,%esi
1174	rorl	$11,%r14d
1175	xorl	%r11d,%r12d
1176	xorl	%ecx,%esi
1177	rorl	$6,%r13d
1178	addl	%r12d,%eax
1179	andl	%esi,%r15d
1180	xorl	%ebx,%r14d
1181	addl	%r13d,%eax
1182	xorl	%ecx,%r15d
1183	addl	%eax,%r8d
1184	rorl	$2,%r14d
1185	addl	%r15d,%eax
1186	movl	%r8d,%r13d
1187	addl	%eax,%r14d
1188	movq	64+0(%rsp),%r12
1189	movq	64+8(%rsp),%r13
1190	movq	64+40(%rsp),%r15
1191	movq	64+48(%rsp),%rsi
1192
1193	vpand	%xmm14,%xmm11,%xmm11
1194	movl	%r14d,%eax
1195	vpor	%xmm11,%xmm8,%xmm8
1196	vmovdqu	%xmm8,(%r12,%r13,1)
1197	leaq	16(%r12),%r12
1198
1199	addl	0(%r15),%eax
1200	addl	4(%r15),%ebx
1201	addl	8(%r15),%ecx
1202	addl	12(%r15),%edx
1203	addl	16(%r15),%r8d
1204	addl	20(%r15),%r9d
1205	addl	24(%r15),%r10d
1206	addl	28(%r15),%r11d
1207
1208	cmpq	64+16(%rsp),%r12
1209
1210	movl	%eax,0(%r15)
1211	movl	%ebx,4(%r15)
1212	movl	%ecx,8(%r15)
1213	movl	%edx,12(%r15)
1214	movl	%r8d,16(%r15)
1215	movl	%r9d,20(%r15)
1216	movl	%r10d,24(%r15)
1217	movl	%r11d,28(%r15)
1218
1219	jb	.Lloop_xop
1220
1221	movq	64+32(%rsp),%r8
1222	movq	120(%rsp),%rsi
1223.cfi_def_cfa	%rsi,8
1224	vmovdqu	%xmm8,(%r8)
1225	vzeroall
1226	movq	-48(%rsi),%r15
1227.cfi_restore	%r15
1228	movq	-40(%rsi),%r14
1229.cfi_restore	%r14
1230	movq	-32(%rsi),%r13
1231.cfi_restore	%r13
1232	movq	-24(%rsi),%r12
1233.cfi_restore	%r12
1234	movq	-16(%rsi),%rbp
1235.cfi_restore	%rbp
1236	movq	-8(%rsi),%rbx
1237.cfi_restore	%rbx
1238	leaq	(%rsi),%rsp
1239.cfi_def_cfa_register	%rsp
1240.Lepilogue_xop:
1241	.byte	0xf3,0xc3
1242.cfi_endproc
1243.size	aesni_cbc_sha256_enc_xop,.-aesni_cbc_sha256_enc_xop
1244.type	aesni_cbc_sha256_enc_avx,@function
1245.align	64
1246aesni_cbc_sha256_enc_avx:
1247.cfi_startproc
1248.Lavx_shortcut:
1249	movq	8(%rsp),%r10
1250	movq	%rsp,%rax
1251.cfi_def_cfa_register	%rax
1252	pushq	%rbx
1253.cfi_offset	%rbx,-16
1254	pushq	%rbp
1255.cfi_offset	%rbp,-24
1256	pushq	%r12
1257.cfi_offset	%r12,-32
1258	pushq	%r13
1259.cfi_offset	%r13,-40
1260	pushq	%r14
1261.cfi_offset	%r14,-48
1262	pushq	%r15
1263.cfi_offset	%r15,-56
1264	subq	$128,%rsp
1265	andq	$-64,%rsp
1266
1267	shlq	$6,%rdx
1268	subq	%rdi,%rsi
1269	subq	%rdi,%r10
1270	addq	%rdi,%rdx
1271
1272
1273	movq	%rsi,64+8(%rsp)
1274	movq	%rdx,64+16(%rsp)
1275
1276	movq	%r8,64+32(%rsp)
1277	movq	%r9,64+40(%rsp)
1278	movq	%r10,64+48(%rsp)
1279	movq	%rax,120(%rsp)
1280.cfi_escape	0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08
1281.Lprologue_avx:
1282	vzeroall
1283
1284	movq	%rdi,%r12
1285	leaq	128(%rcx),%rdi
1286	leaq	K256+544(%rip),%r13
1287	movl	240-128(%rdi),%r14d
1288	movq	%r9,%r15
1289	movq	%r10,%rsi
1290	vmovdqu	(%r8),%xmm8
1291	subq	$9,%r14
1292
1293	movl	0(%r15),%eax
1294	movl	4(%r15),%ebx
1295	movl	8(%r15),%ecx
1296	movl	12(%r15),%edx
1297	movl	16(%r15),%r8d
1298	movl	20(%r15),%r9d
1299	movl	24(%r15),%r10d
1300	movl	28(%r15),%r11d
1301
1302	vmovdqa	0(%r13,%r14,8),%xmm14
1303	vmovdqa	16(%r13,%r14,8),%xmm13
1304	vmovdqa	32(%r13,%r14,8),%xmm12
1305	vmovdqu	0-128(%rdi),%xmm10
1306	jmp	.Lloop_avx
1307.align	16
1308.Lloop_avx:
1309	vmovdqa	K256+512(%rip),%xmm7
1310	vmovdqu	0(%rsi,%r12,1),%xmm0
1311	vmovdqu	16(%rsi,%r12,1),%xmm1
1312	vmovdqu	32(%rsi,%r12,1),%xmm2
1313	vmovdqu	48(%rsi,%r12,1),%xmm3
1314	vpshufb	%xmm7,%xmm0,%xmm0
1315	leaq	K256(%rip),%rbp
1316	vpshufb	%xmm7,%xmm1,%xmm1
1317	vpshufb	%xmm7,%xmm2,%xmm2
1318	vpaddd	0(%rbp),%xmm0,%xmm4
1319	vpshufb	%xmm7,%xmm3,%xmm3
1320	vpaddd	32(%rbp),%xmm1,%xmm5
1321	vpaddd	64(%rbp),%xmm2,%xmm6
1322	vpaddd	96(%rbp),%xmm3,%xmm7
1323	vmovdqa	%xmm4,0(%rsp)
1324	movl	%eax,%r14d
1325	vmovdqa	%xmm5,16(%rsp)
1326	movl	%ebx,%esi
1327	vmovdqa	%xmm6,32(%rsp)
1328	xorl	%ecx,%esi
1329	vmovdqa	%xmm7,48(%rsp)
1330	movl	%r8d,%r13d
1331	jmp	.Lavx_00_47
1332
1333.align	16
1334.Lavx_00_47:
1335	subq	$-32*4,%rbp
1336	vmovdqu	(%r12),%xmm9
1337	movq	%r12,64+0(%rsp)
1338	vpalignr	$4,%xmm0,%xmm1,%xmm4
1339	shrdl	$14,%r13d,%r13d
1340	movl	%r14d,%eax
1341	movl	%r9d,%r12d
1342	vpalignr	$4,%xmm2,%xmm3,%xmm7
1343	xorl	%r8d,%r13d
1344	shrdl	$9,%r14d,%r14d
1345	xorl	%r10d,%r12d
1346	vpsrld	$7,%xmm4,%xmm6
1347	shrdl	$5,%r13d,%r13d
1348	xorl	%eax,%r14d
1349	andl	%r8d,%r12d
1350	vpaddd	%xmm7,%xmm0,%xmm0
1351	vpxor	%xmm10,%xmm9,%xmm9
1352	vmovdqu	16-128(%rdi),%xmm10
1353	xorl	%r8d,%r13d
1354	addl	0(%rsp),%r11d
1355	movl	%eax,%r15d
1356	vpsrld	$3,%xmm4,%xmm7
1357	shrdl	$11,%r14d,%r14d
1358	xorl	%r10d,%r12d
1359	xorl	%ebx,%r15d
1360	vpslld	$14,%xmm4,%xmm5
1361	shrdl	$6,%r13d,%r13d
1362	addl	%r12d,%r11d
1363	andl	%r15d,%esi
1364	vpxor	%xmm6,%xmm7,%xmm4
1365	xorl	%eax,%r14d
1366	addl	%r13d,%r11d
1367	xorl	%ebx,%esi
1368	vpshufd	$250,%xmm3,%xmm7
1369	addl	%r11d,%edx
1370	shrdl	$2,%r14d,%r14d
1371	addl	%esi,%r11d
1372	vpsrld	$11,%xmm6,%xmm6
1373	movl	%edx,%r13d
1374	addl	%r11d,%r14d
1375	shrdl	$14,%r13d,%r13d
1376	vpxor	%xmm5,%xmm4,%xmm4
1377	movl	%r14d,%r11d
1378	movl	%r8d,%r12d
1379	xorl	%edx,%r13d
1380	vpslld	$11,%xmm5,%xmm5
1381	shrdl	$9,%r14d,%r14d
1382	xorl	%r9d,%r12d
1383	shrdl	$5,%r13d,%r13d
1384	vpxor	%xmm6,%xmm4,%xmm4
1385	xorl	%r11d,%r14d
1386	andl	%edx,%r12d
1387	vpxor	%xmm8,%xmm9,%xmm9
1388	xorl	%edx,%r13d
1389	vpsrld	$10,%xmm7,%xmm6
1390	addl	4(%rsp),%r10d
1391	movl	%r11d,%esi
1392	shrdl	$11,%r14d,%r14d
1393	vpxor	%xmm5,%xmm4,%xmm4
1394	xorl	%r9d,%r12d
1395	xorl	%eax,%esi
1396	shrdl	$6,%r13d,%r13d
1397	vpsrlq	$17,%xmm7,%xmm7
1398	addl	%r12d,%r10d
1399	andl	%esi,%r15d
1400	xorl	%r11d,%r14d
1401	vpaddd	%xmm4,%xmm0,%xmm0
1402	addl	%r13d,%r10d
1403	xorl	%eax,%r15d
1404	addl	%r10d,%ecx
1405	vpxor	%xmm7,%xmm6,%xmm6
1406	shrdl	$2,%r14d,%r14d
1407	addl	%r15d,%r10d
1408	movl	%ecx,%r13d
1409	vpsrlq	$2,%xmm7,%xmm7
1410	addl	%r10d,%r14d
1411	shrdl	$14,%r13d,%r13d
1412	movl	%r14d,%r10d
1413	vpxor	%xmm7,%xmm6,%xmm6
1414	movl	%edx,%r12d
1415	xorl	%ecx,%r13d
1416	shrdl	$9,%r14d,%r14d
1417	vpshufd	$132,%xmm6,%xmm6
1418	xorl	%r8d,%r12d
1419	shrdl	$5,%r13d,%r13d
1420	xorl	%r10d,%r14d
1421	vpsrldq	$8,%xmm6,%xmm6
1422	andl	%ecx,%r12d
1423	vaesenc	%xmm10,%xmm9,%xmm9
1424	vmovdqu	32-128(%rdi),%xmm10
1425	xorl	%ecx,%r13d
1426	addl	8(%rsp),%r9d
1427	vpaddd	%xmm6,%xmm0,%xmm0
1428	movl	%r10d,%r15d
1429	shrdl	$11,%r14d,%r14d
1430	xorl	%r8d,%r12d
1431	vpshufd	$80,%xmm0,%xmm7
1432	xorl	%r11d,%r15d
1433	shrdl	$6,%r13d,%r13d
1434	addl	%r12d,%r9d
1435	vpsrld	$10,%xmm7,%xmm6
1436	andl	%r15d,%esi
1437	xorl	%r10d,%r14d
1438	addl	%r13d,%r9d
1439	vpsrlq	$17,%xmm7,%xmm7
1440	xorl	%r11d,%esi
1441	addl	%r9d,%ebx
1442	shrdl	$2,%r14d,%r14d
1443	vpxor	%xmm7,%xmm6,%xmm6
1444	addl	%esi,%r9d
1445	movl	%ebx,%r13d
1446	addl	%r9d,%r14d
1447	vpsrlq	$2,%xmm7,%xmm7
1448	shrdl	$14,%r13d,%r13d
1449	movl	%r14d,%r9d
1450	movl	%ecx,%r12d
1451	vpxor	%xmm7,%xmm6,%xmm6
1452	xorl	%ebx,%r13d
1453	shrdl	$9,%r14d,%r14d
1454	xorl	%edx,%r12d
1455	vpshufd	$232,%xmm6,%xmm6
1456	shrdl	$5,%r13d,%r13d
1457	xorl	%r9d,%r14d
1458	andl	%ebx,%r12d
1459	vpslldq	$8,%xmm6,%xmm6
1460	vaesenc	%xmm10,%xmm9,%xmm9
1461	vmovdqu	48-128(%rdi),%xmm10
1462	xorl	%ebx,%r13d
1463	addl	12(%rsp),%r8d
1464	movl	%r9d,%esi
1465	vpaddd	%xmm6,%xmm0,%xmm0
1466	shrdl	$11,%r14d,%r14d
1467	xorl	%edx,%r12d
1468	xorl	%r10d,%esi
1469	vpaddd	0(%rbp),%xmm0,%xmm6
1470	shrdl	$6,%r13d,%r13d
1471	addl	%r12d,%r8d
1472	andl	%esi,%r15d
1473	xorl	%r9d,%r14d
1474	addl	%r13d,%r8d
1475	xorl	%r10d,%r15d
1476	addl	%r8d,%eax
1477	shrdl	$2,%r14d,%r14d
1478	addl	%r15d,%r8d
1479	movl	%eax,%r13d
1480	addl	%r8d,%r14d
1481	vmovdqa	%xmm6,0(%rsp)
1482	vpalignr	$4,%xmm1,%xmm2,%xmm4
1483	shrdl	$14,%r13d,%r13d
1484	movl	%r14d,%r8d
1485	movl	%ebx,%r12d
1486	vpalignr	$4,%xmm3,%xmm0,%xmm7
1487	xorl	%eax,%r13d
1488	shrdl	$9,%r14d,%r14d
1489	xorl	%ecx,%r12d
1490	vpsrld	$7,%xmm4,%xmm6
1491	shrdl	$5,%r13d,%r13d
1492	xorl	%r8d,%r14d
1493	andl	%eax,%r12d
1494	vpaddd	%xmm7,%xmm1,%xmm1
1495	vaesenc	%xmm10,%xmm9,%xmm9
1496	vmovdqu	64-128(%rdi),%xmm10
1497	xorl	%eax,%r13d
1498	addl	16(%rsp),%edx
1499	movl	%r8d,%r15d
1500	vpsrld	$3,%xmm4,%xmm7
1501	shrdl	$11,%r14d,%r14d
1502	xorl	%ecx,%r12d
1503	xorl	%r9d,%r15d
1504	vpslld	$14,%xmm4,%xmm5
1505	shrdl	$6,%r13d,%r13d
1506	addl	%r12d,%edx
1507	andl	%r15d,%esi
1508	vpxor	%xmm6,%xmm7,%xmm4
1509	xorl	%r8d,%r14d
1510	addl	%r13d,%edx
1511	xorl	%r9d,%esi
1512	vpshufd	$250,%xmm0,%xmm7
1513	addl	%edx,%r11d
1514	shrdl	$2,%r14d,%r14d
1515	addl	%esi,%edx
1516	vpsrld	$11,%xmm6,%xmm6
1517	movl	%r11d,%r13d
1518	addl	%edx,%r14d
1519	shrdl	$14,%r13d,%r13d
1520	vpxor	%xmm5,%xmm4,%xmm4
1521	movl	%r14d,%edx
1522	movl	%eax,%r12d
1523	xorl	%r11d,%r13d
1524	vpslld	$11,%xmm5,%xmm5
1525	shrdl	$9,%r14d,%r14d
1526	xorl	%ebx,%r12d
1527	shrdl	$5,%r13d,%r13d
1528	vpxor	%xmm6,%xmm4,%xmm4
1529	xorl	%edx,%r14d
1530	andl	%r11d,%r12d
1531	vaesenc	%xmm10,%xmm9,%xmm9
1532	vmovdqu	80-128(%rdi),%xmm10
1533	xorl	%r11d,%r13d
1534	vpsrld	$10,%xmm7,%xmm6
1535	addl	20(%rsp),%ecx
1536	movl	%edx,%esi
1537	shrdl	$11,%r14d,%r14d
1538	vpxor	%xmm5,%xmm4,%xmm4
1539	xorl	%ebx,%r12d
1540	xorl	%r8d,%esi
1541	shrdl	$6,%r13d,%r13d
1542	vpsrlq	$17,%xmm7,%xmm7
1543	addl	%r12d,%ecx
1544	andl	%esi,%r15d
1545	xorl	%edx,%r14d
1546	vpaddd	%xmm4,%xmm1,%xmm1
1547	addl	%r13d,%ecx
1548	xorl	%r8d,%r15d
1549	addl	%ecx,%r10d
1550	vpxor	%xmm7,%xmm6,%xmm6
1551	shrdl	$2,%r14d,%r14d
1552	addl	%r15d,%ecx
1553	movl	%r10d,%r13d
1554	vpsrlq	$2,%xmm7,%xmm7
1555	addl	%ecx,%r14d
1556	shrdl	$14,%r13d,%r13d
1557	movl	%r14d,%ecx
1558	vpxor	%xmm7,%xmm6,%xmm6
1559	movl	%r11d,%r12d
1560	xorl	%r10d,%r13d
1561	shrdl	$9,%r14d,%r14d
1562	vpshufd	$132,%xmm6,%xmm6
1563	xorl	%eax,%r12d
1564	shrdl	$5,%r13d,%r13d
1565	xorl	%ecx,%r14d
1566	vpsrldq	$8,%xmm6,%xmm6
1567	andl	%r10d,%r12d
1568	vaesenc	%xmm10,%xmm9,%xmm9
1569	vmovdqu	96-128(%rdi),%xmm10
1570	xorl	%r10d,%r13d
1571	addl	24(%rsp),%ebx
1572	vpaddd	%xmm6,%xmm1,%xmm1
1573	movl	%ecx,%r15d
1574	shrdl	$11,%r14d,%r14d
1575	xorl	%eax,%r12d
1576	vpshufd	$80,%xmm1,%xmm7
1577	xorl	%edx,%r15d
1578	shrdl	$6,%r13d,%r13d
1579	addl	%r12d,%ebx
1580	vpsrld	$10,%xmm7,%xmm6
1581	andl	%r15d,%esi
1582	xorl	%ecx,%r14d
1583	addl	%r13d,%ebx
1584	vpsrlq	$17,%xmm7,%xmm7
1585	xorl	%edx,%esi
1586	addl	%ebx,%r9d
1587	shrdl	$2,%r14d,%r14d
1588	vpxor	%xmm7,%xmm6,%xmm6
1589	addl	%esi,%ebx
1590	movl	%r9d,%r13d
1591	addl	%ebx,%r14d
1592	vpsrlq	$2,%xmm7,%xmm7
1593	shrdl	$14,%r13d,%r13d
1594	movl	%r14d,%ebx
1595	movl	%r10d,%r12d
1596	vpxor	%xmm7,%xmm6,%xmm6
1597	xorl	%r9d,%r13d
1598	shrdl	$9,%r14d,%r14d
1599	xorl	%r11d,%r12d
1600	vpshufd	$232,%xmm6,%xmm6
1601	shrdl	$5,%r13d,%r13d
1602	xorl	%ebx,%r14d
1603	andl	%r9d,%r12d
1604	vpslldq	$8,%xmm6,%xmm6
1605	vaesenc	%xmm10,%xmm9,%xmm9
1606	vmovdqu	112-128(%rdi),%xmm10
1607	xorl	%r9d,%r13d
1608	addl	28(%rsp),%eax
1609	movl	%ebx,%esi
1610	vpaddd	%xmm6,%xmm1,%xmm1
1611	shrdl	$11,%r14d,%r14d
1612	xorl	%r11d,%r12d
1613	xorl	%ecx,%esi
1614	vpaddd	32(%rbp),%xmm1,%xmm6
1615	shrdl	$6,%r13d,%r13d
1616	addl	%r12d,%eax
1617	andl	%esi,%r15d
1618	xorl	%ebx,%r14d
1619	addl	%r13d,%eax
1620	xorl	%ecx,%r15d
1621	addl	%eax,%r8d
1622	shrdl	$2,%r14d,%r14d
1623	addl	%r15d,%eax
1624	movl	%r8d,%r13d
1625	addl	%eax,%r14d
1626	vmovdqa	%xmm6,16(%rsp)
1627	vpalignr	$4,%xmm2,%xmm3,%xmm4
1628	shrdl	$14,%r13d,%r13d
1629	movl	%r14d,%eax
1630	movl	%r9d,%r12d
1631	vpalignr	$4,%xmm0,%xmm1,%xmm7
1632	xorl	%r8d,%r13d
1633	shrdl	$9,%r14d,%r14d
1634	xorl	%r10d,%r12d
1635	vpsrld	$7,%xmm4,%xmm6
1636	shrdl	$5,%r13d,%r13d
1637	xorl	%eax,%r14d
1638	andl	%r8d,%r12d
1639	vpaddd	%xmm7,%xmm2,%xmm2
1640	vaesenc	%xmm10,%xmm9,%xmm9
1641	vmovdqu	128-128(%rdi),%xmm10
1642	xorl	%r8d,%r13d
1643	addl	32(%rsp),%r11d
1644	movl	%eax,%r15d
1645	vpsrld	$3,%xmm4,%xmm7
1646	shrdl	$11,%r14d,%r14d
1647	xorl	%r10d,%r12d
1648	xorl	%ebx,%r15d
1649	vpslld	$14,%xmm4,%xmm5
1650	shrdl	$6,%r13d,%r13d
1651	addl	%r12d,%r11d
1652	andl	%r15d,%esi
1653	vpxor	%xmm6,%xmm7,%xmm4
1654	xorl	%eax,%r14d
1655	addl	%r13d,%r11d
1656	xorl	%ebx,%esi
1657	vpshufd	$250,%xmm1,%xmm7
1658	addl	%r11d,%edx
1659	shrdl	$2,%r14d,%r14d
1660	addl	%esi,%r11d
1661	vpsrld	$11,%xmm6,%xmm6
1662	movl	%edx,%r13d
1663	addl	%r11d,%r14d
1664	shrdl	$14,%r13d,%r13d
1665	vpxor	%xmm5,%xmm4,%xmm4
1666	movl	%r14d,%r11d
1667	movl	%r8d,%r12d
1668	xorl	%edx,%r13d
1669	vpslld	$11,%xmm5,%xmm5
1670	shrdl	$9,%r14d,%r14d
1671	xorl	%r9d,%r12d
1672	shrdl	$5,%r13d,%r13d
1673	vpxor	%xmm6,%xmm4,%xmm4
1674	xorl	%r11d,%r14d
1675	andl	%edx,%r12d
1676	vaesenc	%xmm10,%xmm9,%xmm9
1677	vmovdqu	144-128(%rdi),%xmm10
1678	xorl	%edx,%r13d
1679	vpsrld	$10,%xmm7,%xmm6
1680	addl	36(%rsp),%r10d
1681	movl	%r11d,%esi
1682	shrdl	$11,%r14d,%r14d
1683	vpxor	%xmm5,%xmm4,%xmm4
1684	xorl	%r9d,%r12d
1685	xorl	%eax,%esi
1686	shrdl	$6,%r13d,%r13d
1687	vpsrlq	$17,%xmm7,%xmm7
1688	addl	%r12d,%r10d
1689	andl	%esi,%r15d
1690	xorl	%r11d,%r14d
1691	vpaddd	%xmm4,%xmm2,%xmm2
1692	addl	%r13d,%r10d
1693	xorl	%eax,%r15d
1694	addl	%r10d,%ecx
1695	vpxor	%xmm7,%xmm6,%xmm6
1696	shrdl	$2,%r14d,%r14d
1697	addl	%r15d,%r10d
1698	movl	%ecx,%r13d
1699	vpsrlq	$2,%xmm7,%xmm7
1700	addl	%r10d,%r14d
1701	shrdl	$14,%r13d,%r13d
1702	movl	%r14d,%r10d
1703	vpxor	%xmm7,%xmm6,%xmm6
1704	movl	%edx,%r12d
1705	xorl	%ecx,%r13d
1706	shrdl	$9,%r14d,%r14d
1707	vpshufd	$132,%xmm6,%xmm6
1708	xorl	%r8d,%r12d
1709	shrdl	$5,%r13d,%r13d
1710	xorl	%r10d,%r14d
1711	vpsrldq	$8,%xmm6,%xmm6
1712	andl	%ecx,%r12d
1713	vaesenc	%xmm10,%xmm9,%xmm9
1714	vmovdqu	160-128(%rdi),%xmm10
1715	xorl	%ecx,%r13d
1716	addl	40(%rsp),%r9d
1717	vpaddd	%xmm6,%xmm2,%xmm2
1718	movl	%r10d,%r15d
1719	shrdl	$11,%r14d,%r14d
1720	xorl	%r8d,%r12d
1721	vpshufd	$80,%xmm2,%xmm7
1722	xorl	%r11d,%r15d
1723	shrdl	$6,%r13d,%r13d
1724	addl	%r12d,%r9d
1725	vpsrld	$10,%xmm7,%xmm6
1726	andl	%r15d,%esi
1727	xorl	%r10d,%r14d
1728	addl	%r13d,%r9d
1729	vpsrlq	$17,%xmm7,%xmm7
1730	xorl	%r11d,%esi
1731	addl	%r9d,%ebx
1732	shrdl	$2,%r14d,%r14d
1733	vpxor	%xmm7,%xmm6,%xmm6
1734	addl	%esi,%r9d
1735	movl	%ebx,%r13d
1736	addl	%r9d,%r14d
1737	vpsrlq	$2,%xmm7,%xmm7
1738	shrdl	$14,%r13d,%r13d
1739	movl	%r14d,%r9d
1740	movl	%ecx,%r12d
1741	vpxor	%xmm7,%xmm6,%xmm6
1742	xorl	%ebx,%r13d
1743	shrdl	$9,%r14d,%r14d
1744	xorl	%edx,%r12d
1745	vpshufd	$232,%xmm6,%xmm6
1746	shrdl	$5,%r13d,%r13d
1747	xorl	%r9d,%r14d
1748	andl	%ebx,%r12d
1749	vpslldq	$8,%xmm6,%xmm6
1750	vaesenclast	%xmm10,%xmm9,%xmm11
1751	vaesenc	%xmm10,%xmm9,%xmm9
1752	vmovdqu	176-128(%rdi),%xmm10
1753	xorl	%ebx,%r13d
1754	addl	44(%rsp),%r8d
1755	movl	%r9d,%esi
1756	vpaddd	%xmm6,%xmm2,%xmm2
1757	shrdl	$11,%r14d,%r14d
1758	xorl	%edx,%r12d
1759	xorl	%r10d,%esi
1760	vpaddd	64(%rbp),%xmm2,%xmm6
1761	shrdl	$6,%r13d,%r13d
1762	addl	%r12d,%r8d
1763	andl	%esi,%r15d
1764	xorl	%r9d,%r14d
1765	addl	%r13d,%r8d
1766	xorl	%r10d,%r15d
1767	addl	%r8d,%eax
1768	shrdl	$2,%r14d,%r14d
1769	addl	%r15d,%r8d
1770	movl	%eax,%r13d
1771	addl	%r8d,%r14d
1772	vmovdqa	%xmm6,32(%rsp)
1773	vpalignr	$4,%xmm3,%xmm0,%xmm4
1774	shrdl	$14,%r13d,%r13d
1775	movl	%r14d,%r8d
1776	movl	%ebx,%r12d
1777	vpalignr	$4,%xmm1,%xmm2,%xmm7
1778	xorl	%eax,%r13d
1779	shrdl	$9,%r14d,%r14d
1780	xorl	%ecx,%r12d
1781	vpsrld	$7,%xmm4,%xmm6
1782	shrdl	$5,%r13d,%r13d
1783	xorl	%r8d,%r14d
1784	andl	%eax,%r12d
1785	vpaddd	%xmm7,%xmm3,%xmm3
1786	vpand	%xmm12,%xmm11,%xmm8
1787	vaesenc	%xmm10,%xmm9,%xmm9
1788	vmovdqu	192-128(%rdi),%xmm10
1789	xorl	%eax,%r13d
1790	addl	48(%rsp),%edx
1791	movl	%r8d,%r15d
1792	vpsrld	$3,%xmm4,%xmm7
1793	shrdl	$11,%r14d,%r14d
1794	xorl	%ecx,%r12d
1795	xorl	%r9d,%r15d
1796	vpslld	$14,%xmm4,%xmm5
1797	shrdl	$6,%r13d,%r13d
1798	addl	%r12d,%edx
1799	andl	%r15d,%esi
1800	vpxor	%xmm6,%xmm7,%xmm4
1801	xorl	%r8d,%r14d
1802	addl	%r13d,%edx
1803	xorl	%r9d,%esi
1804	vpshufd	$250,%xmm2,%xmm7
1805	addl	%edx,%r11d
1806	shrdl	$2,%r14d,%r14d
1807	addl	%esi,%edx
1808	vpsrld	$11,%xmm6,%xmm6
1809	movl	%r11d,%r13d
1810	addl	%edx,%r14d
1811	shrdl	$14,%r13d,%r13d
1812	vpxor	%xmm5,%xmm4,%xmm4
1813	movl	%r14d,%edx
1814	movl	%eax,%r12d
1815	xorl	%r11d,%r13d
1816	vpslld	$11,%xmm5,%xmm5
1817	shrdl	$9,%r14d,%r14d
1818	xorl	%ebx,%r12d
1819	shrdl	$5,%r13d,%r13d
1820	vpxor	%xmm6,%xmm4,%xmm4
1821	xorl	%edx,%r14d
1822	andl	%r11d,%r12d
1823	vaesenclast	%xmm10,%xmm9,%xmm11
1824	vaesenc	%xmm10,%xmm9,%xmm9
1825	vmovdqu	208-128(%rdi),%xmm10
1826	xorl	%r11d,%r13d
1827	vpsrld	$10,%xmm7,%xmm6
1828	addl	52(%rsp),%ecx
1829	movl	%edx,%esi
1830	shrdl	$11,%r14d,%r14d
1831	vpxor	%xmm5,%xmm4,%xmm4
1832	xorl	%ebx,%r12d
1833	xorl	%r8d,%esi
1834	shrdl	$6,%r13d,%r13d
1835	vpsrlq	$17,%xmm7,%xmm7
1836	addl	%r12d,%ecx
1837	andl	%esi,%r15d
1838	xorl	%edx,%r14d
1839	vpaddd	%xmm4,%xmm3,%xmm3
1840	addl	%r13d,%ecx
1841	xorl	%r8d,%r15d
1842	addl	%ecx,%r10d
1843	vpxor	%xmm7,%xmm6,%xmm6
1844	shrdl	$2,%r14d,%r14d
1845	addl	%r15d,%ecx
1846	movl	%r10d,%r13d
1847	vpsrlq	$2,%xmm7,%xmm7
1848	addl	%ecx,%r14d
1849	shrdl	$14,%r13d,%r13d
1850	movl	%r14d,%ecx
1851	vpxor	%xmm7,%xmm6,%xmm6
1852	movl	%r11d,%r12d
1853	xorl	%r10d,%r13d
1854	shrdl	$9,%r14d,%r14d
1855	vpshufd	$132,%xmm6,%xmm6
1856	xorl	%eax,%r12d
1857	shrdl	$5,%r13d,%r13d
1858	xorl	%ecx,%r14d
1859	vpsrldq	$8,%xmm6,%xmm6
1860	andl	%r10d,%r12d
1861	vpand	%xmm13,%xmm11,%xmm11
1862	vaesenc	%xmm10,%xmm9,%xmm9
1863	vmovdqu	224-128(%rdi),%xmm10
1864	xorl	%r10d,%r13d
1865	addl	56(%rsp),%ebx
1866	vpaddd	%xmm6,%xmm3,%xmm3
1867	movl	%ecx,%r15d
1868	shrdl	$11,%r14d,%r14d
1869	xorl	%eax,%r12d
1870	vpshufd	$80,%xmm3,%xmm7
1871	xorl	%edx,%r15d
1872	shrdl	$6,%r13d,%r13d
1873	addl	%r12d,%ebx
1874	vpsrld	$10,%xmm7,%xmm6
1875	andl	%r15d,%esi
1876	xorl	%ecx,%r14d
1877	addl	%r13d,%ebx
1878	vpsrlq	$17,%xmm7,%xmm7
1879	xorl	%edx,%esi
1880	addl	%ebx,%r9d
1881	shrdl	$2,%r14d,%r14d
1882	vpxor	%xmm7,%xmm6,%xmm6
1883	addl	%esi,%ebx
1884	movl	%r9d,%r13d
1885	addl	%ebx,%r14d
1886	vpsrlq	$2,%xmm7,%xmm7
1887	shrdl	$14,%r13d,%r13d
1888	movl	%r14d,%ebx
1889	movl	%r10d,%r12d
1890	vpxor	%xmm7,%xmm6,%xmm6
1891	xorl	%r9d,%r13d
1892	shrdl	$9,%r14d,%r14d
1893	xorl	%r11d,%r12d
1894	vpshufd	$232,%xmm6,%xmm6
1895	shrdl	$5,%r13d,%r13d
1896	xorl	%ebx,%r14d
1897	andl	%r9d,%r12d
1898	vpslldq	$8,%xmm6,%xmm6
1899	vpor	%xmm11,%xmm8,%xmm8
1900	vaesenclast	%xmm10,%xmm9,%xmm11
1901	vmovdqu	0-128(%rdi),%xmm10
1902	xorl	%r9d,%r13d
1903	addl	60(%rsp),%eax
1904	movl	%ebx,%esi
1905	vpaddd	%xmm6,%xmm3,%xmm3
1906	shrdl	$11,%r14d,%r14d
1907	xorl	%r11d,%r12d
1908	xorl	%ecx,%esi
1909	vpaddd	96(%rbp),%xmm3,%xmm6
1910	shrdl	$6,%r13d,%r13d
1911	addl	%r12d,%eax
1912	andl	%esi,%r15d
1913	xorl	%ebx,%r14d
1914	addl	%r13d,%eax
1915	xorl	%ecx,%r15d
1916	addl	%eax,%r8d
1917	shrdl	$2,%r14d,%r14d
1918	addl	%r15d,%eax
1919	movl	%r8d,%r13d
1920	addl	%eax,%r14d
1921	vmovdqa	%xmm6,48(%rsp)
1922	movq	64+0(%rsp),%r12
1923	vpand	%xmm14,%xmm11,%xmm11
1924	movq	64+8(%rsp),%r15
1925	vpor	%xmm11,%xmm8,%xmm8
1926	vmovdqu	%xmm8,(%r15,%r12,1)
1927	leaq	16(%r12),%r12
1928	cmpb	$0,131(%rbp)
1929	jne	.Lavx_00_47
1930	vmovdqu	(%r12),%xmm9
1931	movq	%r12,64+0(%rsp)
1932	shrdl	$14,%r13d,%r13d
1933	movl	%r14d,%eax
1934	movl	%r9d,%r12d
1935	xorl	%r8d,%r13d
1936	shrdl	$9,%r14d,%r14d
1937	xorl	%r10d,%r12d
1938	shrdl	$5,%r13d,%r13d
1939	xorl	%eax,%r14d
1940	andl	%r8d,%r12d
1941	vpxor	%xmm10,%xmm9,%xmm9
1942	vmovdqu	16-128(%rdi),%xmm10
1943	xorl	%r8d,%r13d
1944	addl	0(%rsp),%r11d
1945	movl	%eax,%r15d
1946	shrdl	$11,%r14d,%r14d
1947	xorl	%r10d,%r12d
1948	xorl	%ebx,%r15d
1949	shrdl	$6,%r13d,%r13d
1950	addl	%r12d,%r11d
1951	andl	%r15d,%esi
1952	xorl	%eax,%r14d
1953	addl	%r13d,%r11d
1954	xorl	%ebx,%esi
1955	addl	%r11d,%edx
1956	shrdl	$2,%r14d,%r14d
1957	addl	%esi,%r11d
1958	movl	%edx,%r13d
1959	addl	%r11d,%r14d
1960	shrdl	$14,%r13d,%r13d
1961	movl	%r14d,%r11d
1962	movl	%r8d,%r12d
1963	xorl	%edx,%r13d
1964	shrdl	$9,%r14d,%r14d
1965	xorl	%r9d,%r12d
1966	shrdl	$5,%r13d,%r13d
1967	xorl	%r11d,%r14d
1968	andl	%edx,%r12d
1969	vpxor	%xmm8,%xmm9,%xmm9
1970	xorl	%edx,%r13d
1971	addl	4(%rsp),%r10d
1972	movl	%r11d,%esi
1973	shrdl	$11,%r14d,%r14d
1974	xorl	%r9d,%r12d
1975	xorl	%eax,%esi
1976	shrdl	$6,%r13d,%r13d
1977	addl	%r12d,%r10d
1978	andl	%esi,%r15d
1979	xorl	%r11d,%r14d
1980	addl	%r13d,%r10d
1981	xorl	%eax,%r15d
1982	addl	%r10d,%ecx
1983	shrdl	$2,%r14d,%r14d
1984	addl	%r15d,%r10d
1985	movl	%ecx,%r13d
1986	addl	%r10d,%r14d
1987	shrdl	$14,%r13d,%r13d
1988	movl	%r14d,%r10d
1989	movl	%edx,%r12d
1990	xorl	%ecx,%r13d
1991	shrdl	$9,%r14d,%r14d
1992	xorl	%r8d,%r12d
1993	shrdl	$5,%r13d,%r13d
1994	xorl	%r10d,%r14d
1995	andl	%ecx,%r12d
1996	vaesenc	%xmm10,%xmm9,%xmm9
1997	vmovdqu	32-128(%rdi),%xmm10
1998	xorl	%ecx,%r13d
1999	addl	8(%rsp),%r9d
2000	movl	%r10d,%r15d
2001	shrdl	$11,%r14d,%r14d
2002	xorl	%r8d,%r12d
2003	xorl	%r11d,%r15d
2004	shrdl	$6,%r13d,%r13d
2005	addl	%r12d,%r9d
2006	andl	%r15d,%esi
2007	xorl	%r10d,%r14d
2008	addl	%r13d,%r9d
2009	xorl	%r11d,%esi
2010	addl	%r9d,%ebx
2011	shrdl	$2,%r14d,%r14d
2012	addl	%esi,%r9d
2013	movl	%ebx,%r13d
2014	addl	%r9d,%r14d
2015	shrdl	$14,%r13d,%r13d
2016	movl	%r14d,%r9d
2017	movl	%ecx,%r12d
2018	xorl	%ebx,%r13d
2019	shrdl	$9,%r14d,%r14d
2020	xorl	%edx,%r12d
2021	shrdl	$5,%r13d,%r13d
2022	xorl	%r9d,%r14d
2023	andl	%ebx,%r12d
2024	vaesenc	%xmm10,%xmm9,%xmm9
2025	vmovdqu	48-128(%rdi),%xmm10
2026	xorl	%ebx,%r13d
2027	addl	12(%rsp),%r8d
2028	movl	%r9d,%esi
2029	shrdl	$11,%r14d,%r14d
2030	xorl	%edx,%r12d
2031	xorl	%r10d,%esi
2032	shrdl	$6,%r13d,%r13d
2033	addl	%r12d,%r8d
2034	andl	%esi,%r15d
2035	xorl	%r9d,%r14d
2036	addl	%r13d,%r8d
2037	xorl	%r10d,%r15d
2038	addl	%r8d,%eax
2039	shrdl	$2,%r14d,%r14d
2040	addl	%r15d,%r8d
2041	movl	%eax,%r13d
2042	addl	%r8d,%r14d
2043	shrdl	$14,%r13d,%r13d
2044	movl	%r14d,%r8d
2045	movl	%ebx,%r12d
2046	xorl	%eax,%r13d
2047	shrdl	$9,%r14d,%r14d
2048	xorl	%ecx,%r12d
2049	shrdl	$5,%r13d,%r13d
2050	xorl	%r8d,%r14d
2051	andl	%eax,%r12d
2052	vaesenc	%xmm10,%xmm9,%xmm9
2053	vmovdqu	64-128(%rdi),%xmm10
2054	xorl	%eax,%r13d
2055	addl	16(%rsp),%edx
2056	movl	%r8d,%r15d
2057	shrdl	$11,%r14d,%r14d
2058	xorl	%ecx,%r12d
2059	xorl	%r9d,%r15d
2060	shrdl	$6,%r13d,%r13d
2061	addl	%r12d,%edx
2062	andl	%r15d,%esi
2063	xorl	%r8d,%r14d
2064	addl	%r13d,%edx
2065	xorl	%r9d,%esi
2066	addl	%edx,%r11d
2067	shrdl	$2,%r14d,%r14d
2068	addl	%esi,%edx
2069	movl	%r11d,%r13d
2070	addl	%edx,%r14d
2071	shrdl	$14,%r13d,%r13d
2072	movl	%r14d,%edx
2073	movl	%eax,%r12d
2074	xorl	%r11d,%r13d
2075	shrdl	$9,%r14d,%r14d
2076	xorl	%ebx,%r12d
2077	shrdl	$5,%r13d,%r13d
2078	xorl	%edx,%r14d
2079	andl	%r11d,%r12d
2080	vaesenc	%xmm10,%xmm9,%xmm9
2081	vmovdqu	80-128(%rdi),%xmm10
2082	xorl	%r11d,%r13d
2083	addl	20(%rsp),%ecx
2084	movl	%edx,%esi
2085	shrdl	$11,%r14d,%r14d
2086	xorl	%ebx,%r12d
2087	xorl	%r8d,%esi
2088	shrdl	$6,%r13d,%r13d
2089	addl	%r12d,%ecx
2090	andl	%esi,%r15d
2091	xorl	%edx,%r14d
2092	addl	%r13d,%ecx
2093	xorl	%r8d,%r15d
2094	addl	%ecx,%r10d
2095	shrdl	$2,%r14d,%r14d
2096	addl	%r15d,%ecx
2097	movl	%r10d,%r13d
2098	addl	%ecx,%r14d
2099	shrdl	$14,%r13d,%r13d
2100	movl	%r14d,%ecx
2101	movl	%r11d,%r12d
2102	xorl	%r10d,%r13d
2103	shrdl	$9,%r14d,%r14d
2104	xorl	%eax,%r12d
2105	shrdl	$5,%r13d,%r13d
2106	xorl	%ecx,%r14d
2107	andl	%r10d,%r12d
2108	vaesenc	%xmm10,%xmm9,%xmm9
2109	vmovdqu	96-128(%rdi),%xmm10
2110	xorl	%r10d,%r13d
2111	addl	24(%rsp),%ebx
2112	movl	%ecx,%r15d
2113	shrdl	$11,%r14d,%r14d
2114	xorl	%eax,%r12d
2115	xorl	%edx,%r15d
2116	shrdl	$6,%r13d,%r13d
2117	addl	%r12d,%ebx
2118	andl	%r15d,%esi
2119	xorl	%ecx,%r14d
2120	addl	%r13d,%ebx
2121	xorl	%edx,%esi
2122	addl	%ebx,%r9d
2123	shrdl	$2,%r14d,%r14d
2124	addl	%esi,%ebx
2125	movl	%r9d,%r13d
2126	addl	%ebx,%r14d
2127	shrdl	$14,%r13d,%r13d
2128	movl	%r14d,%ebx
2129	movl	%r10d,%r12d
2130	xorl	%r9d,%r13d
2131	shrdl	$9,%r14d,%r14d
2132	xorl	%r11d,%r12d
2133	shrdl	$5,%r13d,%r13d
2134	xorl	%ebx,%r14d
2135	andl	%r9d,%r12d
2136	vaesenc	%xmm10,%xmm9,%xmm9
2137	vmovdqu	112-128(%rdi),%xmm10
2138	xorl	%r9d,%r13d
2139	addl	28(%rsp),%eax
2140	movl	%ebx,%esi
2141	shrdl	$11,%r14d,%r14d
2142	xorl	%r11d,%r12d
2143	xorl	%ecx,%esi
2144	shrdl	$6,%r13d,%r13d
2145	addl	%r12d,%eax
2146	andl	%esi,%r15d
2147	xorl	%ebx,%r14d
2148	addl	%r13d,%eax
2149	xorl	%ecx,%r15d
2150	addl	%eax,%r8d
2151	shrdl	$2,%r14d,%r14d
2152	addl	%r15d,%eax
2153	movl	%r8d,%r13d
2154	addl	%eax,%r14d
2155	shrdl	$14,%r13d,%r13d
2156	movl	%r14d,%eax
2157	movl	%r9d,%r12d
2158	xorl	%r8d,%r13d
2159	shrdl	$9,%r14d,%r14d
2160	xorl	%r10d,%r12d
2161	shrdl	$5,%r13d,%r13d
2162	xorl	%eax,%r14d
2163	andl	%r8d,%r12d
2164	vaesenc	%xmm10,%xmm9,%xmm9
2165	vmovdqu	128-128(%rdi),%xmm10
2166	xorl	%r8d,%r13d
2167	addl	32(%rsp),%r11d
2168	movl	%eax,%r15d
2169	shrdl	$11,%r14d,%r14d
2170	xorl	%r10d,%r12d
2171	xorl	%ebx,%r15d
2172	shrdl	$6,%r13d,%r13d
2173	addl	%r12d,%r11d
2174	andl	%r15d,%esi
2175	xorl	%eax,%r14d
2176	addl	%r13d,%r11d
2177	xorl	%ebx,%esi
2178	addl	%r11d,%edx
2179	shrdl	$2,%r14d,%r14d
2180	addl	%esi,%r11d
2181	movl	%edx,%r13d
2182	addl	%r11d,%r14d
2183	shrdl	$14,%r13d,%r13d
2184	movl	%r14d,%r11d
2185	movl	%r8d,%r12d
2186	xorl	%edx,%r13d
2187	shrdl	$9,%r14d,%r14d
2188	xorl	%r9d,%r12d
2189	shrdl	$5,%r13d,%r13d
2190	xorl	%r11d,%r14d
2191	andl	%edx,%r12d
2192	vaesenc	%xmm10,%xmm9,%xmm9
2193	vmovdqu	144-128(%rdi),%xmm10
2194	xorl	%edx,%r13d
2195	addl	36(%rsp),%r10d
2196	movl	%r11d,%esi
2197	shrdl	$11,%r14d,%r14d
2198	xorl	%r9d,%r12d
2199	xorl	%eax,%esi
2200	shrdl	$6,%r13d,%r13d
2201	addl	%r12d,%r10d
2202	andl	%esi,%r15d
2203	xorl	%r11d,%r14d
2204	addl	%r13d,%r10d
2205	xorl	%eax,%r15d
2206	addl	%r10d,%ecx
2207	shrdl	$2,%r14d,%r14d
2208	addl	%r15d,%r10d
2209	movl	%ecx,%r13d
2210	addl	%r10d,%r14d
2211	shrdl	$14,%r13d,%r13d
2212	movl	%r14d,%r10d
2213	movl	%edx,%r12d
2214	xorl	%ecx,%r13d
2215	shrdl	$9,%r14d,%r14d
2216	xorl	%r8d,%r12d
2217	shrdl	$5,%r13d,%r13d
2218	xorl	%r10d,%r14d
2219	andl	%ecx,%r12d
2220	vaesenc	%xmm10,%xmm9,%xmm9
2221	vmovdqu	160-128(%rdi),%xmm10
2222	xorl	%ecx,%r13d
2223	addl	40(%rsp),%r9d
2224	movl	%r10d,%r15d
2225	shrdl	$11,%r14d,%r14d
2226	xorl	%r8d,%r12d
2227	xorl	%r11d,%r15d
2228	shrdl	$6,%r13d,%r13d
2229	addl	%r12d,%r9d
2230	andl	%r15d,%esi
2231	xorl	%r10d,%r14d
2232	addl	%r13d,%r9d
2233	xorl	%r11d,%esi
2234	addl	%r9d,%ebx
2235	shrdl	$2,%r14d,%r14d
2236	addl	%esi,%r9d
2237	movl	%ebx,%r13d
2238	addl	%r9d,%r14d
2239	shrdl	$14,%r13d,%r13d
2240	movl	%r14d,%r9d
2241	movl	%ecx,%r12d
2242	xorl	%ebx,%r13d
2243	shrdl	$9,%r14d,%r14d
2244	xorl	%edx,%r12d
2245	shrdl	$5,%r13d,%r13d
2246	xorl	%r9d,%r14d
2247	andl	%ebx,%r12d
2248	vaesenclast	%xmm10,%xmm9,%xmm11
2249	vaesenc	%xmm10,%xmm9,%xmm9
2250	vmovdqu	176-128(%rdi),%xmm10
2251	xorl	%ebx,%r13d
2252	addl	44(%rsp),%r8d
2253	movl	%r9d,%esi
2254	shrdl	$11,%r14d,%r14d
2255	xorl	%edx,%r12d
2256	xorl	%r10d,%esi
2257	shrdl	$6,%r13d,%r13d
2258	addl	%r12d,%r8d
2259	andl	%esi,%r15d
2260	xorl	%r9d,%r14d
2261	addl	%r13d,%r8d
2262	xorl	%r10d,%r15d
2263	addl	%r8d,%eax
2264	shrdl	$2,%r14d,%r14d
2265	addl	%r15d,%r8d
2266	movl	%eax,%r13d
2267	addl	%r8d,%r14d
2268	shrdl	$14,%r13d,%r13d
2269	movl	%r14d,%r8d
2270	movl	%ebx,%r12d
2271	xorl	%eax,%r13d
2272	shrdl	$9,%r14d,%r14d
2273	xorl	%ecx,%r12d
2274	shrdl	$5,%r13d,%r13d
2275	xorl	%r8d,%r14d
2276	andl	%eax,%r12d
2277	vpand	%xmm12,%xmm11,%xmm8
2278	vaesenc	%xmm10,%xmm9,%xmm9
2279	vmovdqu	192-128(%rdi),%xmm10
2280	xorl	%eax,%r13d
2281	addl	48(%rsp),%edx
2282	movl	%r8d,%r15d
2283	shrdl	$11,%r14d,%r14d
2284	xorl	%ecx,%r12d
2285	xorl	%r9d,%r15d
2286	shrdl	$6,%r13d,%r13d
2287	addl	%r12d,%edx
2288	andl	%r15d,%esi
2289	xorl	%r8d,%r14d
2290	addl	%r13d,%edx
2291	xorl	%r9d,%esi
2292	addl	%edx,%r11d
2293	shrdl	$2,%r14d,%r14d
2294	addl	%esi,%edx
2295	movl	%r11d,%r13d
2296	addl	%edx,%r14d
2297	shrdl	$14,%r13d,%r13d
2298	movl	%r14d,%edx
2299	movl	%eax,%r12d
2300	xorl	%r11d,%r13d
2301	shrdl	$9,%r14d,%r14d
2302	xorl	%ebx,%r12d
2303	shrdl	$5,%r13d,%r13d
2304	xorl	%edx,%r14d
2305	andl	%r11d,%r12d
2306	vaesenclast	%xmm10,%xmm9,%xmm11
2307	vaesenc	%xmm10,%xmm9,%xmm9
2308	vmovdqu	208-128(%rdi),%xmm10
2309	xorl	%r11d,%r13d
2310	addl	52(%rsp),%ecx
2311	movl	%edx,%esi
2312	shrdl	$11,%r14d,%r14d
2313	xorl	%ebx,%r12d
2314	xorl	%r8d,%esi
2315	shrdl	$6,%r13d,%r13d
2316	addl	%r12d,%ecx
2317	andl	%esi,%r15d
2318	xorl	%edx,%r14d
2319	addl	%r13d,%ecx
2320	xorl	%r8d,%r15d
2321	addl	%ecx,%r10d
2322	shrdl	$2,%r14d,%r14d
2323	addl	%r15d,%ecx
2324	movl	%r10d,%r13d
2325	addl	%ecx,%r14d
2326	shrdl	$14,%r13d,%r13d
2327	movl	%r14d,%ecx
2328	movl	%r11d,%r12d
2329	xorl	%r10d,%r13d
2330	shrdl	$9,%r14d,%r14d
2331	xorl	%eax,%r12d
2332	shrdl	$5,%r13d,%r13d
2333	xorl	%ecx,%r14d
2334	andl	%r10d,%r12d
2335	vpand	%xmm13,%xmm11,%xmm11
2336	vaesenc	%xmm10,%xmm9,%xmm9
2337	vmovdqu	224-128(%rdi),%xmm10
2338	xorl	%r10d,%r13d
2339	addl	56(%rsp),%ebx
2340	movl	%ecx,%r15d
2341	shrdl	$11,%r14d,%r14d
2342	xorl	%eax,%r12d
2343	xorl	%edx,%r15d
2344	shrdl	$6,%r13d,%r13d
2345	addl	%r12d,%ebx
2346	andl	%r15d,%esi
2347	xorl	%ecx,%r14d
2348	addl	%r13d,%ebx
2349	xorl	%edx,%esi
2350	addl	%ebx,%r9d
2351	shrdl	$2,%r14d,%r14d
2352	addl	%esi,%ebx
2353	movl	%r9d,%r13d
2354	addl	%ebx,%r14d
2355	shrdl	$14,%r13d,%r13d
2356	movl	%r14d,%ebx
2357	movl	%r10d,%r12d
2358	xorl	%r9d,%r13d
2359	shrdl	$9,%r14d,%r14d
2360	xorl	%r11d,%r12d
2361	shrdl	$5,%r13d,%r13d
2362	xorl	%ebx,%r14d
2363	andl	%r9d,%r12d
2364	vpor	%xmm11,%xmm8,%xmm8
2365	vaesenclast	%xmm10,%xmm9,%xmm11
2366	vmovdqu	0-128(%rdi),%xmm10
2367	xorl	%r9d,%r13d
2368	addl	60(%rsp),%eax
2369	movl	%ebx,%esi
2370	shrdl	$11,%r14d,%r14d
2371	xorl	%r11d,%r12d
2372	xorl	%ecx,%esi
2373	shrdl	$6,%r13d,%r13d
2374	addl	%r12d,%eax
2375	andl	%esi,%r15d
2376	xorl	%ebx,%r14d
2377	addl	%r13d,%eax
2378	xorl	%ecx,%r15d
2379	addl	%eax,%r8d
2380	shrdl	$2,%r14d,%r14d
2381	addl	%r15d,%eax
2382	movl	%r8d,%r13d
2383	addl	%eax,%r14d
2384	movq	64+0(%rsp),%r12
2385	movq	64+8(%rsp),%r13
2386	movq	64+40(%rsp),%r15
2387	movq	64+48(%rsp),%rsi
2388
2389	vpand	%xmm14,%xmm11,%xmm11
2390	movl	%r14d,%eax
2391	vpor	%xmm11,%xmm8,%xmm8
2392	vmovdqu	%xmm8,(%r12,%r13,1)
2393	leaq	16(%r12),%r12
2394
2395	addl	0(%r15),%eax
2396	addl	4(%r15),%ebx
2397	addl	8(%r15),%ecx
2398	addl	12(%r15),%edx
2399	addl	16(%r15),%r8d
2400	addl	20(%r15),%r9d
2401	addl	24(%r15),%r10d
2402	addl	28(%r15),%r11d
2403
2404	cmpq	64+16(%rsp),%r12
2405
2406	movl	%eax,0(%r15)
2407	movl	%ebx,4(%r15)
2408	movl	%ecx,8(%r15)
2409	movl	%edx,12(%r15)
2410	movl	%r8d,16(%r15)
2411	movl	%r9d,20(%r15)
2412	movl	%r10d,24(%r15)
2413	movl	%r11d,28(%r15)
2414	jb	.Lloop_avx
2415
2416	movq	64+32(%rsp),%r8
2417	movq	120(%rsp),%rsi
2418.cfi_def_cfa	%rsi,8
2419	vmovdqu	%xmm8,(%r8)
2420	vzeroall
2421	movq	-48(%rsi),%r15
2422.cfi_restore	%r15
2423	movq	-40(%rsi),%r14
2424.cfi_restore	%r14
2425	movq	-32(%rsi),%r13
2426.cfi_restore	%r13
2427	movq	-24(%rsi),%r12
2428.cfi_restore	%r12
2429	movq	-16(%rsi),%rbp
2430.cfi_restore	%rbp
2431	movq	-8(%rsi),%rbx
2432.cfi_restore	%rbx
2433	leaq	(%rsi),%rsp
2434.cfi_def_cfa_register	%rsp
2435.Lepilogue_avx:
2436	.byte	0xf3,0xc3
2437.cfi_endproc
2438.size	aesni_cbc_sha256_enc_avx,.-aesni_cbc_sha256_enc_avx
2439.type	aesni_cbc_sha256_enc_avx2,@function
2440.align	64
2441aesni_cbc_sha256_enc_avx2:
2442.cfi_startproc
2443.Lavx2_shortcut:
2444	movq	8(%rsp),%r10
2445	movq	%rsp,%rax
2446.cfi_def_cfa_register	%rax
2447	pushq	%rbx
2448.cfi_offset	%rbx,-16
2449	pushq	%rbp
2450.cfi_offset	%rbp,-24
2451	pushq	%r12
2452.cfi_offset	%r12,-32
2453	pushq	%r13
2454.cfi_offset	%r13,-40
2455	pushq	%r14
2456.cfi_offset	%r14,-48
2457	pushq	%r15
2458.cfi_offset	%r15,-56
2459	subq	$576,%rsp
2460	andq	$-1024,%rsp
2461	addq	$448,%rsp
2462
2463	shlq	$6,%rdx
2464	subq	%rdi,%rsi
2465	subq	%rdi,%r10
2466	addq	%rdi,%rdx
2467
2468
2469
2470	movq	%rdx,64+16(%rsp)
2471
2472	movq	%r8,64+32(%rsp)
2473	movq	%r9,64+40(%rsp)
2474	movq	%r10,64+48(%rsp)
2475	movq	%rax,120(%rsp)
2476.cfi_escape	0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08
2477.Lprologue_avx2:
2478	vzeroall
2479
2480	movq	%rdi,%r13
2481	vpinsrq	$1,%rsi,%xmm15,%xmm15
2482	leaq	128(%rcx),%rdi
2483	leaq	K256+544(%rip),%r12
2484	movl	240-128(%rdi),%r14d
2485	movq	%r9,%r15
2486	movq	%r10,%rsi
2487	vmovdqu	(%r8),%xmm8
2488	leaq	-9(%r14),%r14
2489
2490	vmovdqa	0(%r12,%r14,8),%xmm14
2491	vmovdqa	16(%r12,%r14,8),%xmm13
2492	vmovdqa	32(%r12,%r14,8),%xmm12
2493
2494	subq	$-64,%r13
2495	movl	0(%r15),%eax
2496	leaq	(%rsi,%r13,1),%r12
2497	movl	4(%r15),%ebx
2498	cmpq	%rdx,%r13
2499	movl	8(%r15),%ecx
2500	cmoveq	%rsp,%r12
2501	movl	12(%r15),%edx
2502	movl	16(%r15),%r8d
2503	movl	20(%r15),%r9d
2504	movl	24(%r15),%r10d
2505	movl	28(%r15),%r11d
2506	vmovdqu	0-128(%rdi),%xmm10
2507	jmp	.Loop_avx2
2508.align	16
2509.Loop_avx2:
2510	vmovdqa	K256+512(%rip),%ymm7
2511	vmovdqu	-64+0(%rsi,%r13,1),%xmm0
2512	vmovdqu	-64+16(%rsi,%r13,1),%xmm1
2513	vmovdqu	-64+32(%rsi,%r13,1),%xmm2
2514	vmovdqu	-64+48(%rsi,%r13,1),%xmm3
2515
2516	vinserti128	$1,(%r12),%ymm0,%ymm0
2517	vinserti128	$1,16(%r12),%ymm1,%ymm1
2518	vpshufb	%ymm7,%ymm0,%ymm0
2519	vinserti128	$1,32(%r12),%ymm2,%ymm2
2520	vpshufb	%ymm7,%ymm1,%ymm1
2521	vinserti128	$1,48(%r12),%ymm3,%ymm3
2522
2523	leaq	K256(%rip),%rbp
2524	vpshufb	%ymm7,%ymm2,%ymm2
2525	leaq	-64(%r13),%r13
2526	vpaddd	0(%rbp),%ymm0,%ymm4
2527	vpshufb	%ymm7,%ymm3,%ymm3
2528	vpaddd	32(%rbp),%ymm1,%ymm5
2529	vpaddd	64(%rbp),%ymm2,%ymm6
2530	vpaddd	96(%rbp),%ymm3,%ymm7
2531	vmovdqa	%ymm4,0(%rsp)
2532	xorl	%r14d,%r14d
2533	vmovdqa	%ymm5,32(%rsp)
2534
2535	movq	120(%rsp),%rsi
2536.cfi_def_cfa	%rsi,8
2537	leaq	-64(%rsp),%rsp
2538
2539
2540
2541	movq	%rsi,-8(%rsp)
2542.cfi_escape	0x0f,0x05,0x77,0x78,0x06,0x23,0x08
2543	movl	%ebx,%esi
2544	vmovdqa	%ymm6,0(%rsp)
2545	xorl	%ecx,%esi
2546	vmovdqa	%ymm7,32(%rsp)
2547	movl	%r9d,%r12d
2548	subq	$-32*4,%rbp
2549	jmp	.Lavx2_00_47
2550
2551.align	16
2552.Lavx2_00_47:
2553	vmovdqu	(%r13),%xmm9
2554	vpinsrq	$0,%r13,%xmm15,%xmm15
2555	leaq	-64(%rsp),%rsp
2556.cfi_escape	0x0f,0x05,0x77,0x38,0x06,0x23,0x08
2557
2558	pushq	64-8(%rsp)
2559.cfi_escape	0x0f,0x05,0x77,0x00,0x06,0x23,0x08
2560	leaq	8(%rsp),%rsp
2561.cfi_escape	0x0f,0x05,0x77,0x78,0x06,0x23,0x08
2562	vpalignr	$4,%ymm0,%ymm1,%ymm4
2563	addl	0+128(%rsp),%r11d
2564	andl	%r8d,%r12d
2565	rorxl	$25,%r8d,%r13d
2566	vpalignr	$4,%ymm2,%ymm3,%ymm7
2567	rorxl	$11,%r8d,%r15d
2568	leal	(%rax,%r14,1),%eax
2569	leal	(%r11,%r12,1),%r11d
2570	vpsrld	$7,%ymm4,%ymm6
2571	andnl	%r10d,%r8d,%r12d
2572	xorl	%r15d,%r13d
2573	rorxl	$6,%r8d,%r14d
2574	vpaddd	%ymm7,%ymm0,%ymm0
2575	leal	(%r11,%r12,1),%r11d
2576	xorl	%r14d,%r13d
2577	movl	%eax,%r15d
2578	vpsrld	$3,%ymm4,%ymm7
2579	rorxl	$22,%eax,%r12d
2580	leal	(%r11,%r13,1),%r11d
2581	xorl	%ebx,%r15d
2582	vpslld	$14,%ymm4,%ymm5
2583	rorxl	$13,%eax,%r14d
2584	rorxl	$2,%eax,%r13d
2585	leal	(%rdx,%r11,1),%edx
2586	vpxor	%ymm6,%ymm7,%ymm4
2587	andl	%r15d,%esi
2588	vpxor	%xmm10,%xmm9,%xmm9
2589	vmovdqu	16-128(%rdi),%xmm10
2590	xorl	%r12d,%r14d
2591	xorl	%ebx,%esi
2592	vpshufd	$250,%ymm3,%ymm7
2593	xorl	%r13d,%r14d
2594	leal	(%r11,%rsi,1),%r11d
2595	movl	%r8d,%r12d
2596	vpsrld	$11,%ymm6,%ymm6
2597	addl	4+128(%rsp),%r10d
2598	andl	%edx,%r12d
2599	rorxl	$25,%edx,%r13d
2600	vpxor	%ymm5,%ymm4,%ymm4
2601	rorxl	$11,%edx,%esi
2602	leal	(%r11,%r14,1),%r11d
2603	leal	(%r10,%r12,1),%r10d
2604	vpslld	$11,%ymm5,%ymm5
2605	andnl	%r9d,%edx,%r12d
2606	xorl	%esi,%r13d
2607	rorxl	$6,%edx,%r14d
2608	vpxor	%ymm6,%ymm4,%ymm4
2609	leal	(%r10,%r12,1),%r10d
2610	xorl	%r14d,%r13d
2611	movl	%r11d,%esi
2612	vpsrld	$10,%ymm7,%ymm6
2613	rorxl	$22,%r11d,%r12d
2614	leal	(%r10,%r13,1),%r10d
2615	xorl	%eax,%esi
2616	vpxor	%ymm5,%ymm4,%ymm4
2617	rorxl	$13,%r11d,%r14d
2618	rorxl	$2,%r11d,%r13d
2619	leal	(%rcx,%r10,1),%ecx
2620	vpsrlq	$17,%ymm7,%ymm7
2621	andl	%esi,%r15d
2622	vpxor	%xmm8,%xmm9,%xmm9
2623	xorl	%r12d,%r14d
2624	xorl	%eax,%r15d
2625	vpaddd	%ymm4,%ymm0,%ymm0
2626	xorl	%r13d,%r14d
2627	leal	(%r10,%r15,1),%r10d
2628	movl	%edx,%r12d
2629	vpxor	%ymm7,%ymm6,%ymm6
2630	addl	8+128(%rsp),%r9d
2631	andl	%ecx,%r12d
2632	rorxl	$25,%ecx,%r13d
2633	vpsrlq	$2,%ymm7,%ymm7
2634	rorxl	$11,%ecx,%r15d
2635	leal	(%r10,%r14,1),%r10d
2636	leal	(%r9,%r12,1),%r9d
2637	vpxor	%ymm7,%ymm6,%ymm6
2638	andnl	%r8d,%ecx,%r12d
2639	xorl	%r15d,%r13d
2640	rorxl	$6,%ecx,%r14d
2641	vpshufd	$132,%ymm6,%ymm6
2642	leal	(%r9,%r12,1),%r9d
2643	xorl	%r14d,%r13d
2644	movl	%r10d,%r15d
2645	vpsrldq	$8,%ymm6,%ymm6
2646	rorxl	$22,%r10d,%r12d
2647	leal	(%r9,%r13,1),%r9d
2648	xorl	%r11d,%r15d
2649	vpaddd	%ymm6,%ymm0,%ymm0
2650	rorxl	$13,%r10d,%r14d
2651	rorxl	$2,%r10d,%r13d
2652	leal	(%rbx,%r9,1),%ebx
2653	vpshufd	$80,%ymm0,%ymm7
2654	andl	%r15d,%esi
2655	vaesenc	%xmm10,%xmm9,%xmm9
2656	vmovdqu	32-128(%rdi),%xmm10
2657	xorl	%r12d,%r14d
2658	xorl	%r11d,%esi
2659	vpsrld	$10,%ymm7,%ymm6
2660	xorl	%r13d,%r14d
2661	leal	(%r9,%rsi,1),%r9d
2662	movl	%ecx,%r12d
2663	vpsrlq	$17,%ymm7,%ymm7
2664	addl	12+128(%rsp),%r8d
2665	andl	%ebx,%r12d
2666	rorxl	$25,%ebx,%r13d
2667	vpxor	%ymm7,%ymm6,%ymm6
2668	rorxl	$11,%ebx,%esi
2669	leal	(%r9,%r14,1),%r9d
2670	leal	(%r8,%r12,1),%r8d
2671	vpsrlq	$2,%ymm7,%ymm7
2672	andnl	%edx,%ebx,%r12d
2673	xorl	%esi,%r13d
2674	rorxl	$6,%ebx,%r14d
2675	vpxor	%ymm7,%ymm6,%ymm6
2676	leal	(%r8,%r12,1),%r8d
2677	xorl	%r14d,%r13d
2678	movl	%r9d,%esi
2679	vpshufd	$232,%ymm6,%ymm6
2680	rorxl	$22,%r9d,%r12d
2681	leal	(%r8,%r13,1),%r8d
2682	xorl	%r10d,%esi
2683	vpslldq	$8,%ymm6,%ymm6
2684	rorxl	$13,%r9d,%r14d
2685	rorxl	$2,%r9d,%r13d
2686	leal	(%rax,%r8,1),%eax
2687	vpaddd	%ymm6,%ymm0,%ymm0
2688	andl	%esi,%r15d
2689	vaesenc	%xmm10,%xmm9,%xmm9
2690	vmovdqu	48-128(%rdi),%xmm10
2691	xorl	%r12d,%r14d
2692	xorl	%r10d,%r15d
2693	vpaddd	0(%rbp),%ymm0,%ymm6
2694	xorl	%r13d,%r14d
2695	leal	(%r8,%r15,1),%r8d
2696	movl	%ebx,%r12d
2697	vmovdqa	%ymm6,0(%rsp)
2698	vpalignr	$4,%ymm1,%ymm2,%ymm4
2699	addl	32+128(%rsp),%edx
2700	andl	%eax,%r12d
2701	rorxl	$25,%eax,%r13d
2702	vpalignr	$4,%ymm3,%ymm0,%ymm7
2703	rorxl	$11,%eax,%r15d
2704	leal	(%r8,%r14,1),%r8d
2705	leal	(%rdx,%r12,1),%edx
2706	vpsrld	$7,%ymm4,%ymm6
2707	andnl	%ecx,%eax,%r12d
2708	xorl	%r15d,%r13d
2709	rorxl	$6,%eax,%r14d
2710	vpaddd	%ymm7,%ymm1,%ymm1
2711	leal	(%rdx,%r12,1),%edx
2712	xorl	%r14d,%r13d
2713	movl	%r8d,%r15d
2714	vpsrld	$3,%ymm4,%ymm7
2715	rorxl	$22,%r8d,%r12d
2716	leal	(%rdx,%r13,1),%edx
2717	xorl	%r9d,%r15d
2718	vpslld	$14,%ymm4,%ymm5
2719	rorxl	$13,%r8d,%r14d
2720	rorxl	$2,%r8d,%r13d
2721	leal	(%r11,%rdx,1),%r11d
2722	vpxor	%ymm6,%ymm7,%ymm4
2723	andl	%r15d,%esi
2724	vaesenc	%xmm10,%xmm9,%xmm9
2725	vmovdqu	64-128(%rdi),%xmm10
2726	xorl	%r12d,%r14d
2727	xorl	%r9d,%esi
2728	vpshufd	$250,%ymm0,%ymm7
2729	xorl	%r13d,%r14d
2730	leal	(%rdx,%rsi,1),%edx
2731	movl	%eax,%r12d
2732	vpsrld	$11,%ymm6,%ymm6
2733	addl	36+128(%rsp),%ecx
2734	andl	%r11d,%r12d
2735	rorxl	$25,%r11d,%r13d
2736	vpxor	%ymm5,%ymm4,%ymm4
2737	rorxl	$11,%r11d,%esi
2738	leal	(%rdx,%r14,1),%edx
2739	leal	(%rcx,%r12,1),%ecx
2740	vpslld	$11,%ymm5,%ymm5
2741	andnl	%ebx,%r11d,%r12d
2742	xorl	%esi,%r13d
2743	rorxl	$6,%r11d,%r14d
2744	vpxor	%ymm6,%ymm4,%ymm4
2745	leal	(%rcx,%r12,1),%ecx
2746	xorl	%r14d,%r13d
2747	movl	%edx,%esi
2748	vpsrld	$10,%ymm7,%ymm6
2749	rorxl	$22,%edx,%r12d
2750	leal	(%rcx,%r13,1),%ecx
2751	xorl	%r8d,%esi
2752	vpxor	%ymm5,%ymm4,%ymm4
2753	rorxl	$13,%edx,%r14d
2754	rorxl	$2,%edx,%r13d
2755	leal	(%r10,%rcx,1),%r10d
2756	vpsrlq	$17,%ymm7,%ymm7
2757	andl	%esi,%r15d
2758	vaesenc	%xmm10,%xmm9,%xmm9
2759	vmovdqu	80-128(%rdi),%xmm10
2760	xorl	%r12d,%r14d
2761	xorl	%r8d,%r15d
2762	vpaddd	%ymm4,%ymm1,%ymm1
2763	xorl	%r13d,%r14d
2764	leal	(%rcx,%r15,1),%ecx
2765	movl	%r11d,%r12d
2766	vpxor	%ymm7,%ymm6,%ymm6
2767	addl	40+128(%rsp),%ebx
2768	andl	%r10d,%r12d
2769	rorxl	$25,%r10d,%r13d
2770	vpsrlq	$2,%ymm7,%ymm7
2771	rorxl	$11,%r10d,%r15d
2772	leal	(%rcx,%r14,1),%ecx
2773	leal	(%rbx,%r12,1),%ebx
2774	vpxor	%ymm7,%ymm6,%ymm6
2775	andnl	%eax,%r10d,%r12d
2776	xorl	%r15d,%r13d
2777	rorxl	$6,%r10d,%r14d
2778	vpshufd	$132,%ymm6,%ymm6
2779	leal	(%rbx,%r12,1),%ebx
2780	xorl	%r14d,%r13d
2781	movl	%ecx,%r15d
2782	vpsrldq	$8,%ymm6,%ymm6
2783	rorxl	$22,%ecx,%r12d
2784	leal	(%rbx,%r13,1),%ebx
2785	xorl	%edx,%r15d
2786	vpaddd	%ymm6,%ymm1,%ymm1
2787	rorxl	$13,%ecx,%r14d
2788	rorxl	$2,%ecx,%r13d
2789	leal	(%r9,%rbx,1),%r9d
2790	vpshufd	$80,%ymm1,%ymm7
2791	andl	%r15d,%esi
2792	vaesenc	%xmm10,%xmm9,%xmm9
2793	vmovdqu	96-128(%rdi),%xmm10
2794	xorl	%r12d,%r14d
2795	xorl	%edx,%esi
2796	vpsrld	$10,%ymm7,%ymm6
2797	xorl	%r13d,%r14d
2798	leal	(%rbx,%rsi,1),%ebx
2799	movl	%r10d,%r12d
2800	vpsrlq	$17,%ymm7,%ymm7
2801	addl	44+128(%rsp),%eax
2802	andl	%r9d,%r12d
2803	rorxl	$25,%r9d,%r13d
2804	vpxor	%ymm7,%ymm6,%ymm6
2805	rorxl	$11,%r9d,%esi
2806	leal	(%rbx,%r14,1),%ebx
2807	leal	(%rax,%r12,1),%eax
2808	vpsrlq	$2,%ymm7,%ymm7
2809	andnl	%r11d,%r9d,%r12d
2810	xorl	%esi,%r13d
2811	rorxl	$6,%r9d,%r14d
2812	vpxor	%ymm7,%ymm6,%ymm6
2813	leal	(%rax,%r12,1),%eax
2814	xorl	%r14d,%r13d
2815	movl	%ebx,%esi
2816	vpshufd	$232,%ymm6,%ymm6
2817	rorxl	$22,%ebx,%r12d
2818	leal	(%rax,%r13,1),%eax
2819	xorl	%ecx,%esi
2820	vpslldq	$8,%ymm6,%ymm6
2821	rorxl	$13,%ebx,%r14d
2822	rorxl	$2,%ebx,%r13d
2823	leal	(%r8,%rax,1),%r8d
2824	vpaddd	%ymm6,%ymm1,%ymm1
2825	andl	%esi,%r15d
2826	vaesenc	%xmm10,%xmm9,%xmm9
2827	vmovdqu	112-128(%rdi),%xmm10
2828	xorl	%r12d,%r14d
2829	xorl	%ecx,%r15d
2830	vpaddd	32(%rbp),%ymm1,%ymm6
2831	xorl	%r13d,%r14d
2832	leal	(%rax,%r15,1),%eax
2833	movl	%r9d,%r12d
2834	vmovdqa	%ymm6,32(%rsp)
2835	leaq	-64(%rsp),%rsp
2836.cfi_escape	0x0f,0x05,0x77,0x38,0x06,0x23,0x08
2837
2838	pushq	64-8(%rsp)
2839.cfi_escape	0x0f,0x05,0x77,0x00,0x06,0x23,0x08
2840	leaq	8(%rsp),%rsp
2841.cfi_escape	0x0f,0x05,0x77,0x78,0x06,0x23,0x08
2842	vpalignr	$4,%ymm2,%ymm3,%ymm4
2843	addl	0+128(%rsp),%r11d
2844	andl	%r8d,%r12d
2845	rorxl	$25,%r8d,%r13d
2846	vpalignr	$4,%ymm0,%ymm1,%ymm7
2847	rorxl	$11,%r8d,%r15d
2848	leal	(%rax,%r14,1),%eax
2849	leal	(%r11,%r12,1),%r11d
2850	vpsrld	$7,%ymm4,%ymm6
2851	andnl	%r10d,%r8d,%r12d
2852	xorl	%r15d,%r13d
2853	rorxl	$6,%r8d,%r14d
2854	vpaddd	%ymm7,%ymm2,%ymm2
2855	leal	(%r11,%r12,1),%r11d
2856	xorl	%r14d,%r13d
2857	movl	%eax,%r15d
2858	vpsrld	$3,%ymm4,%ymm7
2859	rorxl	$22,%eax,%r12d
2860	leal	(%r11,%r13,1),%r11d
2861	xorl	%ebx,%r15d
2862	vpslld	$14,%ymm4,%ymm5
2863	rorxl	$13,%eax,%r14d
2864	rorxl	$2,%eax,%r13d
2865	leal	(%rdx,%r11,1),%edx
2866	vpxor	%ymm6,%ymm7,%ymm4
2867	andl	%r15d,%esi
2868	vaesenc	%xmm10,%xmm9,%xmm9
2869	vmovdqu	128-128(%rdi),%xmm10
2870	xorl	%r12d,%r14d
2871	xorl	%ebx,%esi
2872	vpshufd	$250,%ymm1,%ymm7
2873	xorl	%r13d,%r14d
2874	leal	(%r11,%rsi,1),%r11d
2875	movl	%r8d,%r12d
2876	vpsrld	$11,%ymm6,%ymm6
2877	addl	4+128(%rsp),%r10d
2878	andl	%edx,%r12d
2879	rorxl	$25,%edx,%r13d
2880	vpxor	%ymm5,%ymm4,%ymm4
2881	rorxl	$11,%edx,%esi
2882	leal	(%r11,%r14,1),%r11d
2883	leal	(%r10,%r12,1),%r10d
2884	vpslld	$11,%ymm5,%ymm5
2885	andnl	%r9d,%edx,%r12d
2886	xorl	%esi,%r13d
2887	rorxl	$6,%edx,%r14d
2888	vpxor	%ymm6,%ymm4,%ymm4
2889	leal	(%r10,%r12,1),%r10d
2890	xorl	%r14d,%r13d
2891	movl	%r11d,%esi
2892	vpsrld	$10,%ymm7,%ymm6
2893	rorxl	$22,%r11d,%r12d
2894	leal	(%r10,%r13,1),%r10d
2895	xorl	%eax,%esi
2896	vpxor	%ymm5,%ymm4,%ymm4
2897	rorxl	$13,%r11d,%r14d
2898	rorxl	$2,%r11d,%r13d
2899	leal	(%rcx,%r10,1),%ecx
2900	vpsrlq	$17,%ymm7,%ymm7
2901	andl	%esi,%r15d
2902	vaesenc	%xmm10,%xmm9,%xmm9
2903	vmovdqu	144-128(%rdi),%xmm10
2904	xorl	%r12d,%r14d
2905	xorl	%eax,%r15d
2906	vpaddd	%ymm4,%ymm2,%ymm2
2907	xorl	%r13d,%r14d
2908	leal	(%r10,%r15,1),%r10d
2909	movl	%edx,%r12d
2910	vpxor	%ymm7,%ymm6,%ymm6
2911	addl	8+128(%rsp),%r9d
2912	andl	%ecx,%r12d
2913	rorxl	$25,%ecx,%r13d
2914	vpsrlq	$2,%ymm7,%ymm7
2915	rorxl	$11,%ecx,%r15d
2916	leal	(%r10,%r14,1),%r10d
2917	leal	(%r9,%r12,1),%r9d
2918	vpxor	%ymm7,%ymm6,%ymm6
2919	andnl	%r8d,%ecx,%r12d
2920	xorl	%r15d,%r13d
2921	rorxl	$6,%ecx,%r14d
2922	vpshufd	$132,%ymm6,%ymm6
2923	leal	(%r9,%r12,1),%r9d
2924	xorl	%r14d,%r13d
2925	movl	%r10d,%r15d
2926	vpsrldq	$8,%ymm6,%ymm6
2927	rorxl	$22,%r10d,%r12d
2928	leal	(%r9,%r13,1),%r9d
2929	xorl	%r11d,%r15d
2930	vpaddd	%ymm6,%ymm2,%ymm2
2931	rorxl	$13,%r10d,%r14d
2932	rorxl	$2,%r10d,%r13d
2933	leal	(%rbx,%r9,1),%ebx
2934	vpshufd	$80,%ymm2,%ymm7
2935	andl	%r15d,%esi
2936	vaesenc	%xmm10,%xmm9,%xmm9
2937	vmovdqu	160-128(%rdi),%xmm10
2938	xorl	%r12d,%r14d
2939	xorl	%r11d,%esi
2940	vpsrld	$10,%ymm7,%ymm6
2941	xorl	%r13d,%r14d
2942	leal	(%r9,%rsi,1),%r9d
2943	movl	%ecx,%r12d
2944	vpsrlq	$17,%ymm7,%ymm7
2945	addl	12+128(%rsp),%r8d
2946	andl	%ebx,%r12d
2947	rorxl	$25,%ebx,%r13d
2948	vpxor	%ymm7,%ymm6,%ymm6
2949	rorxl	$11,%ebx,%esi
2950	leal	(%r9,%r14,1),%r9d
2951	leal	(%r8,%r12,1),%r8d
2952	vpsrlq	$2,%ymm7,%ymm7
2953	andnl	%edx,%ebx,%r12d
2954	xorl	%esi,%r13d
2955	rorxl	$6,%ebx,%r14d
2956	vpxor	%ymm7,%ymm6,%ymm6
2957	leal	(%r8,%r12,1),%r8d
2958	xorl	%r14d,%r13d
2959	movl	%r9d,%esi
2960	vpshufd	$232,%ymm6,%ymm6
2961	rorxl	$22,%r9d,%r12d
2962	leal	(%r8,%r13,1),%r8d
2963	xorl	%r10d,%esi
2964	vpslldq	$8,%ymm6,%ymm6
2965	rorxl	$13,%r9d,%r14d
2966	rorxl	$2,%r9d,%r13d
2967	leal	(%rax,%r8,1),%eax
2968	vpaddd	%ymm6,%ymm2,%ymm2
2969	andl	%esi,%r15d
2970	vaesenclast	%xmm10,%xmm9,%xmm11
2971	vaesenc	%xmm10,%xmm9,%xmm9
2972	vmovdqu	176-128(%rdi),%xmm10
2973	xorl	%r12d,%r14d
2974	xorl	%r10d,%r15d
2975	vpaddd	64(%rbp),%ymm2,%ymm6
2976	xorl	%r13d,%r14d
2977	leal	(%r8,%r15,1),%r8d
2978	movl	%ebx,%r12d
2979	vmovdqa	%ymm6,0(%rsp)
2980	vpalignr	$4,%ymm3,%ymm0,%ymm4
2981	addl	32+128(%rsp),%edx
2982	andl	%eax,%r12d
2983	rorxl	$25,%eax,%r13d
2984	vpalignr	$4,%ymm1,%ymm2,%ymm7
2985	rorxl	$11,%eax,%r15d
2986	leal	(%r8,%r14,1),%r8d
2987	leal	(%rdx,%r12,1),%edx
2988	vpsrld	$7,%ymm4,%ymm6
2989	andnl	%ecx,%eax,%r12d
2990	xorl	%r15d,%r13d
2991	rorxl	$6,%eax,%r14d
2992	vpaddd	%ymm7,%ymm3,%ymm3
2993	leal	(%rdx,%r12,1),%edx
2994	xorl	%r14d,%r13d
2995	movl	%r8d,%r15d
2996	vpsrld	$3,%ymm4,%ymm7
2997	rorxl	$22,%r8d,%r12d
2998	leal	(%rdx,%r13,1),%edx
2999	xorl	%r9d,%r15d
3000	vpslld	$14,%ymm4,%ymm5
3001	rorxl	$13,%r8d,%r14d
3002	rorxl	$2,%r8d,%r13d
3003	leal	(%r11,%rdx,1),%r11d
3004	vpxor	%ymm6,%ymm7,%ymm4
3005	andl	%r15d,%esi
3006	vpand	%xmm12,%xmm11,%xmm8
3007	vaesenc	%xmm10,%xmm9,%xmm9
3008	vmovdqu	192-128(%rdi),%xmm10
3009	xorl	%r12d,%r14d
3010	xorl	%r9d,%esi
3011	vpshufd	$250,%ymm2,%ymm7
3012	xorl	%r13d,%r14d
3013	leal	(%rdx,%rsi,1),%edx
3014	movl	%eax,%r12d
3015	vpsrld	$11,%ymm6,%ymm6
3016	addl	36+128(%rsp),%ecx
3017	andl	%r11d,%r12d
3018	rorxl	$25,%r11d,%r13d
3019	vpxor	%ymm5,%ymm4,%ymm4
3020	rorxl	$11,%r11d,%esi
3021	leal	(%rdx,%r14,1),%edx
3022	leal	(%rcx,%r12,1),%ecx
3023	vpslld	$11,%ymm5,%ymm5
3024	andnl	%ebx,%r11d,%r12d
3025	xorl	%esi,%r13d
3026	rorxl	$6,%r11d,%r14d
3027	vpxor	%ymm6,%ymm4,%ymm4
3028	leal	(%rcx,%r12,1),%ecx
3029	xorl	%r14d,%r13d
3030	movl	%edx,%esi
3031	vpsrld	$10,%ymm7,%ymm6
3032	rorxl	$22,%edx,%r12d
3033	leal	(%rcx,%r13,1),%ecx
3034	xorl	%r8d,%esi
3035	vpxor	%ymm5,%ymm4,%ymm4
3036	rorxl	$13,%edx,%r14d
3037	rorxl	$2,%edx,%r13d
3038	leal	(%r10,%rcx,1),%r10d
3039	vpsrlq	$17,%ymm7,%ymm7
3040	andl	%esi,%r15d
3041	vaesenclast	%xmm10,%xmm9,%xmm11
3042	vaesenc	%xmm10,%xmm9,%xmm9
3043	vmovdqu	208-128(%rdi),%xmm10
3044	xorl	%r12d,%r14d
3045	xorl	%r8d,%r15d
3046	vpaddd	%ymm4,%ymm3,%ymm3
3047	xorl	%r13d,%r14d
3048	leal	(%rcx,%r15,1),%ecx
3049	movl	%r11d,%r12d
3050	vpxor	%ymm7,%ymm6,%ymm6
3051	addl	40+128(%rsp),%ebx
3052	andl	%r10d,%r12d
3053	rorxl	$25,%r10d,%r13d
3054	vpsrlq	$2,%ymm7,%ymm7
3055	rorxl	$11,%r10d,%r15d
3056	leal	(%rcx,%r14,1),%ecx
3057	leal	(%rbx,%r12,1),%ebx
3058	vpxor	%ymm7,%ymm6,%ymm6
3059	andnl	%eax,%r10d,%r12d
3060	xorl	%r15d,%r13d
3061	rorxl	$6,%r10d,%r14d
3062	vpshufd	$132,%ymm6,%ymm6
3063	leal	(%rbx,%r12,1),%ebx
3064	xorl	%r14d,%r13d
3065	movl	%ecx,%r15d
3066	vpsrldq	$8,%ymm6,%ymm6
3067	rorxl	$22,%ecx,%r12d
3068	leal	(%rbx,%r13,1),%ebx
3069	xorl	%edx,%r15d
3070	vpaddd	%ymm6,%ymm3,%ymm3
3071	rorxl	$13,%ecx,%r14d
3072	rorxl	$2,%ecx,%r13d
3073	leal	(%r9,%rbx,1),%r9d
3074	vpshufd	$80,%ymm3,%ymm7
3075	andl	%r15d,%esi
3076	vpand	%xmm13,%xmm11,%xmm11
3077	vaesenc	%xmm10,%xmm9,%xmm9
3078	vmovdqu	224-128(%rdi),%xmm10
3079	xorl	%r12d,%r14d
3080	xorl	%edx,%esi
3081	vpsrld	$10,%ymm7,%ymm6
3082	xorl	%r13d,%r14d
3083	leal	(%rbx,%rsi,1),%ebx
3084	movl	%r10d,%r12d
3085	vpsrlq	$17,%ymm7,%ymm7
3086	addl	44+128(%rsp),%eax
3087	andl	%r9d,%r12d
3088	rorxl	$25,%r9d,%r13d
3089	vpxor	%ymm7,%ymm6,%ymm6
3090	rorxl	$11,%r9d,%esi
3091	leal	(%rbx,%r14,1),%ebx
3092	leal	(%rax,%r12,1),%eax
3093	vpsrlq	$2,%ymm7,%ymm7
3094	andnl	%r11d,%r9d,%r12d
3095	xorl	%esi,%r13d
3096	rorxl	$6,%r9d,%r14d
3097	vpxor	%ymm7,%ymm6,%ymm6
3098	leal	(%rax,%r12,1),%eax
3099	xorl	%r14d,%r13d
3100	movl	%ebx,%esi
3101	vpshufd	$232,%ymm6,%ymm6
3102	rorxl	$22,%ebx,%r12d
3103	leal	(%rax,%r13,1),%eax
3104	xorl	%ecx,%esi
3105	vpslldq	$8,%ymm6,%ymm6
3106	rorxl	$13,%ebx,%r14d
3107	rorxl	$2,%ebx,%r13d
3108	leal	(%r8,%rax,1),%r8d
3109	vpaddd	%ymm6,%ymm3,%ymm3
3110	andl	%esi,%r15d
3111	vpor	%xmm11,%xmm8,%xmm8
3112	vaesenclast	%xmm10,%xmm9,%xmm11
3113	vmovdqu	0-128(%rdi),%xmm10
3114	xorl	%r12d,%r14d
3115	xorl	%ecx,%r15d
3116	vpaddd	96(%rbp),%ymm3,%ymm6
3117	xorl	%r13d,%r14d
3118	leal	(%rax,%r15,1),%eax
3119	movl	%r9d,%r12d
3120	vmovdqa	%ymm6,32(%rsp)
3121	vmovq	%xmm15,%r13
3122	vpextrq	$1,%xmm15,%r15
3123	vpand	%xmm14,%xmm11,%xmm11
3124	vpor	%xmm11,%xmm8,%xmm8
3125	vmovdqu	%xmm8,(%r15,%r13,1)
3126	leaq	16(%r13),%r13
3127	leaq	128(%rbp),%rbp
3128	cmpb	$0,3(%rbp)
3129	jne	.Lavx2_00_47
3130	vmovdqu	(%r13),%xmm9
3131	vpinsrq	$0,%r13,%xmm15,%xmm15
3132	addl	0+64(%rsp),%r11d
3133	andl	%r8d,%r12d
3134	rorxl	$25,%r8d,%r13d
3135	rorxl	$11,%r8d,%r15d
3136	leal	(%rax,%r14,1),%eax
3137	leal	(%r11,%r12,1),%r11d
3138	andnl	%r10d,%r8d,%r12d
3139	xorl	%r15d,%r13d
3140	rorxl	$6,%r8d,%r14d
3141	leal	(%r11,%r12,1),%r11d
3142	xorl	%r14d,%r13d
3143	movl	%eax,%r15d
3144	rorxl	$22,%eax,%r12d
3145	leal	(%r11,%r13,1),%r11d
3146	xorl	%ebx,%r15d
3147	rorxl	$13,%eax,%r14d
3148	rorxl	$2,%eax,%r13d
3149	leal	(%rdx,%r11,1),%edx
3150	andl	%r15d,%esi
3151	vpxor	%xmm10,%xmm9,%xmm9
3152	vmovdqu	16-128(%rdi),%xmm10
3153	xorl	%r12d,%r14d
3154	xorl	%ebx,%esi
3155	xorl	%r13d,%r14d
3156	leal	(%r11,%rsi,1),%r11d
3157	movl	%r8d,%r12d
3158	addl	4+64(%rsp),%r10d
3159	andl	%edx,%r12d
3160	rorxl	$25,%edx,%r13d
3161	rorxl	$11,%edx,%esi
3162	leal	(%r11,%r14,1),%r11d
3163	leal	(%r10,%r12,1),%r10d
3164	andnl	%r9d,%edx,%r12d
3165	xorl	%esi,%r13d
3166	rorxl	$6,%edx,%r14d
3167	leal	(%r10,%r12,1),%r10d
3168	xorl	%r14d,%r13d
3169	movl	%r11d,%esi
3170	rorxl	$22,%r11d,%r12d
3171	leal	(%r10,%r13,1),%r10d
3172	xorl	%eax,%esi
3173	rorxl	$13,%r11d,%r14d
3174	rorxl	$2,%r11d,%r13d
3175	leal	(%rcx,%r10,1),%ecx
3176	andl	%esi,%r15d
3177	vpxor	%xmm8,%xmm9,%xmm9
3178	xorl	%r12d,%r14d
3179	xorl	%eax,%r15d
3180	xorl	%r13d,%r14d
3181	leal	(%r10,%r15,1),%r10d
3182	movl	%edx,%r12d
3183	addl	8+64(%rsp),%r9d
3184	andl	%ecx,%r12d
3185	rorxl	$25,%ecx,%r13d
3186	rorxl	$11,%ecx,%r15d
3187	leal	(%r10,%r14,1),%r10d
3188	leal	(%r9,%r12,1),%r9d
3189	andnl	%r8d,%ecx,%r12d
3190	xorl	%r15d,%r13d
3191	rorxl	$6,%ecx,%r14d
3192	leal	(%r9,%r12,1),%r9d
3193	xorl	%r14d,%r13d
3194	movl	%r10d,%r15d
3195	rorxl	$22,%r10d,%r12d
3196	leal	(%r9,%r13,1),%r9d
3197	xorl	%r11d,%r15d
3198	rorxl	$13,%r10d,%r14d
3199	rorxl	$2,%r10d,%r13d
3200	leal	(%rbx,%r9,1),%ebx
3201	andl	%r15d,%esi
3202	vaesenc	%xmm10,%xmm9,%xmm9
3203	vmovdqu	32-128(%rdi),%xmm10
3204	xorl	%r12d,%r14d
3205	xorl	%r11d,%esi
3206	xorl	%r13d,%r14d
3207	leal	(%r9,%rsi,1),%r9d
3208	movl	%ecx,%r12d
3209	addl	12+64(%rsp),%r8d
3210	andl	%ebx,%r12d
3211	rorxl	$25,%ebx,%r13d
3212	rorxl	$11,%ebx,%esi
3213	leal	(%r9,%r14,1),%r9d
3214	leal	(%r8,%r12,1),%r8d
3215	andnl	%edx,%ebx,%r12d
3216	xorl	%esi,%r13d
3217	rorxl	$6,%ebx,%r14d
3218	leal	(%r8,%r12,1),%r8d
3219	xorl	%r14d,%r13d
3220	movl	%r9d,%esi
3221	rorxl	$22,%r9d,%r12d
3222	leal	(%r8,%r13,1),%r8d
3223	xorl	%r10d,%esi
3224	rorxl	$13,%r9d,%r14d
3225	rorxl	$2,%r9d,%r13d
3226	leal	(%rax,%r8,1),%eax
3227	andl	%esi,%r15d
3228	vaesenc	%xmm10,%xmm9,%xmm9
3229	vmovdqu	48-128(%rdi),%xmm10
3230	xorl	%r12d,%r14d
3231	xorl	%r10d,%r15d
3232	xorl	%r13d,%r14d
3233	leal	(%r8,%r15,1),%r8d
3234	movl	%ebx,%r12d
3235	addl	32+64(%rsp),%edx
3236	andl	%eax,%r12d
3237	rorxl	$25,%eax,%r13d
3238	rorxl	$11,%eax,%r15d
3239	leal	(%r8,%r14,1),%r8d
3240	leal	(%rdx,%r12,1),%edx
3241	andnl	%ecx,%eax,%r12d
3242	xorl	%r15d,%r13d
3243	rorxl	$6,%eax,%r14d
3244	leal	(%rdx,%r12,1),%edx
3245	xorl	%r14d,%r13d
3246	movl	%r8d,%r15d
3247	rorxl	$22,%r8d,%r12d
3248	leal	(%rdx,%r13,1),%edx
3249	xorl	%r9d,%r15d
3250	rorxl	$13,%r8d,%r14d
3251	rorxl	$2,%r8d,%r13d
3252	leal	(%r11,%rdx,1),%r11d
3253	andl	%r15d,%esi
3254	vaesenc	%xmm10,%xmm9,%xmm9
3255	vmovdqu	64-128(%rdi),%xmm10
3256	xorl	%r12d,%r14d
3257	xorl	%r9d,%esi
3258	xorl	%r13d,%r14d
3259	leal	(%rdx,%rsi,1),%edx
3260	movl	%eax,%r12d
3261	addl	36+64(%rsp),%ecx
3262	andl	%r11d,%r12d
3263	rorxl	$25,%r11d,%r13d
3264	rorxl	$11,%r11d,%esi
3265	leal	(%rdx,%r14,1),%edx
3266	leal	(%rcx,%r12,1),%ecx
3267	andnl	%ebx,%r11d,%r12d
3268	xorl	%esi,%r13d
3269	rorxl	$6,%r11d,%r14d
3270	leal	(%rcx,%r12,1),%ecx
3271	xorl	%r14d,%r13d
3272	movl	%edx,%esi
3273	rorxl	$22,%edx,%r12d
3274	leal	(%rcx,%r13,1),%ecx
3275	xorl	%r8d,%esi
3276	rorxl	$13,%edx,%r14d
3277	rorxl	$2,%edx,%r13d
3278	leal	(%r10,%rcx,1),%r10d
3279	andl	%esi,%r15d
3280	vaesenc	%xmm10,%xmm9,%xmm9
3281	vmovdqu	80-128(%rdi),%xmm10
3282	xorl	%r12d,%r14d
3283	xorl	%r8d,%r15d
3284	xorl	%r13d,%r14d
3285	leal	(%rcx,%r15,1),%ecx
3286	movl	%r11d,%r12d
3287	addl	40+64(%rsp),%ebx
3288	andl	%r10d,%r12d
3289	rorxl	$25,%r10d,%r13d
3290	rorxl	$11,%r10d,%r15d
3291	leal	(%rcx,%r14,1),%ecx
3292	leal	(%rbx,%r12,1),%ebx
3293	andnl	%eax,%r10d,%r12d
3294	xorl	%r15d,%r13d
3295	rorxl	$6,%r10d,%r14d
3296	leal	(%rbx,%r12,1),%ebx
3297	xorl	%r14d,%r13d
3298	movl	%ecx,%r15d
3299	rorxl	$22,%ecx,%r12d
3300	leal	(%rbx,%r13,1),%ebx
3301	xorl	%edx,%r15d
3302	rorxl	$13,%ecx,%r14d
3303	rorxl	$2,%ecx,%r13d
3304	leal	(%r9,%rbx,1),%r9d
3305	andl	%r15d,%esi
3306	vaesenc	%xmm10,%xmm9,%xmm9
3307	vmovdqu	96-128(%rdi),%xmm10
3308	xorl	%r12d,%r14d
3309	xorl	%edx,%esi
3310	xorl	%r13d,%r14d
3311	leal	(%rbx,%rsi,1),%ebx
3312	movl	%r10d,%r12d
3313	addl	44+64(%rsp),%eax
3314	andl	%r9d,%r12d
3315	rorxl	$25,%r9d,%r13d
3316	rorxl	$11,%r9d,%esi
3317	leal	(%rbx,%r14,1),%ebx
3318	leal	(%rax,%r12,1),%eax
3319	andnl	%r11d,%r9d,%r12d
3320	xorl	%esi,%r13d
3321	rorxl	$6,%r9d,%r14d
3322	leal	(%rax,%r12,1),%eax
3323	xorl	%r14d,%r13d
3324	movl	%ebx,%esi
3325	rorxl	$22,%ebx,%r12d
3326	leal	(%rax,%r13,1),%eax
3327	xorl	%ecx,%esi
3328	rorxl	$13,%ebx,%r14d
3329	rorxl	$2,%ebx,%r13d
3330	leal	(%r8,%rax,1),%r8d
3331	andl	%esi,%r15d
3332	vaesenc	%xmm10,%xmm9,%xmm9
3333	vmovdqu	112-128(%rdi),%xmm10
3334	xorl	%r12d,%r14d
3335	xorl	%ecx,%r15d
3336	xorl	%r13d,%r14d
3337	leal	(%rax,%r15,1),%eax
3338	movl	%r9d,%r12d
3339	addl	0(%rsp),%r11d
3340	andl	%r8d,%r12d
3341	rorxl	$25,%r8d,%r13d
3342	rorxl	$11,%r8d,%r15d
3343	leal	(%rax,%r14,1),%eax
3344	leal	(%r11,%r12,1),%r11d
3345	andnl	%r10d,%r8d,%r12d
3346	xorl	%r15d,%r13d
3347	rorxl	$6,%r8d,%r14d
3348	leal	(%r11,%r12,1),%r11d
3349	xorl	%r14d,%r13d
3350	movl	%eax,%r15d
3351	rorxl	$22,%eax,%r12d
3352	leal	(%r11,%r13,1),%r11d
3353	xorl	%ebx,%r15d
3354	rorxl	$13,%eax,%r14d
3355	rorxl	$2,%eax,%r13d
3356	leal	(%rdx,%r11,1),%edx
3357	andl	%r15d,%esi
3358	vaesenc	%xmm10,%xmm9,%xmm9
3359	vmovdqu	128-128(%rdi),%xmm10
3360	xorl	%r12d,%r14d
3361	xorl	%ebx,%esi
3362	xorl	%r13d,%r14d
3363	leal	(%r11,%rsi,1),%r11d
3364	movl	%r8d,%r12d
3365	addl	4(%rsp),%r10d
3366	andl	%edx,%r12d
3367	rorxl	$25,%edx,%r13d
3368	rorxl	$11,%edx,%esi
3369	leal	(%r11,%r14,1),%r11d
3370	leal	(%r10,%r12,1),%r10d
3371	andnl	%r9d,%edx,%r12d
3372	xorl	%esi,%r13d
3373	rorxl	$6,%edx,%r14d
3374	leal	(%r10,%r12,1),%r10d
3375	xorl	%r14d,%r13d
3376	movl	%r11d,%esi
3377	rorxl	$22,%r11d,%r12d
3378	leal	(%r10,%r13,1),%r10d
3379	xorl	%eax,%esi
3380	rorxl	$13,%r11d,%r14d
3381	rorxl	$2,%r11d,%r13d
3382	leal	(%rcx,%r10,1),%ecx
3383	andl	%esi,%r15d
3384	vaesenc	%xmm10,%xmm9,%xmm9
3385	vmovdqu	144-128(%rdi),%xmm10
3386	xorl	%r12d,%r14d
3387	xorl	%eax,%r15d
3388	xorl	%r13d,%r14d
3389	leal	(%r10,%r15,1),%r10d
3390	movl	%edx,%r12d
3391	addl	8(%rsp),%r9d
3392	andl	%ecx,%r12d
3393	rorxl	$25,%ecx,%r13d
3394	rorxl	$11,%ecx,%r15d
3395	leal	(%r10,%r14,1),%r10d
3396	leal	(%r9,%r12,1),%r9d
3397	andnl	%r8d,%ecx,%r12d
3398	xorl	%r15d,%r13d
3399	rorxl	$6,%ecx,%r14d
3400	leal	(%r9,%r12,1),%r9d
3401	xorl	%r14d,%r13d
3402	movl	%r10d,%r15d
3403	rorxl	$22,%r10d,%r12d
3404	leal	(%r9,%r13,1),%r9d
3405	xorl	%r11d,%r15d
3406	rorxl	$13,%r10d,%r14d
3407	rorxl	$2,%r10d,%r13d
3408	leal	(%rbx,%r9,1),%ebx
3409	andl	%r15d,%esi
3410	vaesenc	%xmm10,%xmm9,%xmm9
3411	vmovdqu	160-128(%rdi),%xmm10
3412	xorl	%r12d,%r14d
3413	xorl	%r11d,%esi
3414	xorl	%r13d,%r14d
3415	leal	(%r9,%rsi,1),%r9d
3416	movl	%ecx,%r12d
3417	addl	12(%rsp),%r8d
3418	andl	%ebx,%r12d
3419	rorxl	$25,%ebx,%r13d
3420	rorxl	$11,%ebx,%esi
3421	leal	(%r9,%r14,1),%r9d
3422	leal	(%r8,%r12,1),%r8d
3423	andnl	%edx,%ebx,%r12d
3424	xorl	%esi,%r13d
3425	rorxl	$6,%ebx,%r14d
3426	leal	(%r8,%r12,1),%r8d
3427	xorl	%r14d,%r13d
3428	movl	%r9d,%esi
3429	rorxl	$22,%r9d,%r12d
3430	leal	(%r8,%r13,1),%r8d
3431	xorl	%r10d,%esi
3432	rorxl	$13,%r9d,%r14d
3433	rorxl	$2,%r9d,%r13d
3434	leal	(%rax,%r8,1),%eax
3435	andl	%esi,%r15d
3436	vaesenclast	%xmm10,%xmm9,%xmm11
3437	vaesenc	%xmm10,%xmm9,%xmm9
3438	vmovdqu	176-128(%rdi),%xmm10
3439	xorl	%r12d,%r14d
3440	xorl	%r10d,%r15d
3441	xorl	%r13d,%r14d
3442	leal	(%r8,%r15,1),%r8d
3443	movl	%ebx,%r12d
3444	addl	32(%rsp),%edx
3445	andl	%eax,%r12d
3446	rorxl	$25,%eax,%r13d
3447	rorxl	$11,%eax,%r15d
3448	leal	(%r8,%r14,1),%r8d
3449	leal	(%rdx,%r12,1),%edx
3450	andnl	%ecx,%eax,%r12d
3451	xorl	%r15d,%r13d
3452	rorxl	$6,%eax,%r14d
3453	leal	(%rdx,%r12,1),%edx
3454	xorl	%r14d,%r13d
3455	movl	%r8d,%r15d
3456	rorxl	$22,%r8d,%r12d
3457	leal	(%rdx,%r13,1),%edx
3458	xorl	%r9d,%r15d
3459	rorxl	$13,%r8d,%r14d
3460	rorxl	$2,%r8d,%r13d
3461	leal	(%r11,%rdx,1),%r11d
3462	andl	%r15d,%esi
3463	vpand	%xmm12,%xmm11,%xmm8
3464	vaesenc	%xmm10,%xmm9,%xmm9
3465	vmovdqu	192-128(%rdi),%xmm10
3466	xorl	%r12d,%r14d
3467	xorl	%r9d,%esi
3468	xorl	%r13d,%r14d
3469	leal	(%rdx,%rsi,1),%edx
3470	movl	%eax,%r12d
3471	addl	36(%rsp),%ecx
3472	andl	%r11d,%r12d
3473	rorxl	$25,%r11d,%r13d
3474	rorxl	$11,%r11d,%esi
3475	leal	(%rdx,%r14,1),%edx
3476	leal	(%rcx,%r12,1),%ecx
3477	andnl	%ebx,%r11d,%r12d
3478	xorl	%esi,%r13d
3479	rorxl	$6,%r11d,%r14d
3480	leal	(%rcx,%r12,1),%ecx
3481	xorl	%r14d,%r13d
3482	movl	%edx,%esi
3483	rorxl	$22,%edx,%r12d
3484	leal	(%rcx,%r13,1),%ecx
3485	xorl	%r8d,%esi
3486	rorxl	$13,%edx,%r14d
3487	rorxl	$2,%edx,%r13d
3488	leal	(%r10,%rcx,1),%r10d
3489	andl	%esi,%r15d
3490	vaesenclast	%xmm10,%xmm9,%xmm11
3491	vaesenc	%xmm10,%xmm9,%xmm9
3492	vmovdqu	208-128(%rdi),%xmm10
3493	xorl	%r12d,%r14d
3494	xorl	%r8d,%r15d
3495	xorl	%r13d,%r14d
3496	leal	(%rcx,%r15,1),%ecx
3497	movl	%r11d,%r12d
3498	addl	40(%rsp),%ebx
3499	andl	%r10d,%r12d
3500	rorxl	$25,%r10d,%r13d
3501	rorxl	$11,%r10d,%r15d
3502	leal	(%rcx,%r14,1),%ecx
3503	leal	(%rbx,%r12,1),%ebx
3504	andnl	%eax,%r10d,%r12d
3505	xorl	%r15d,%r13d
3506	rorxl	$6,%r10d,%r14d
3507	leal	(%rbx,%r12,1),%ebx
3508	xorl	%r14d,%r13d
3509	movl	%ecx,%r15d
3510	rorxl	$22,%ecx,%r12d
3511	leal	(%rbx,%r13,1),%ebx
3512	xorl	%edx,%r15d
3513	rorxl	$13,%ecx,%r14d
3514	rorxl	$2,%ecx,%r13d
3515	leal	(%r9,%rbx,1),%r9d
3516	andl	%r15d,%esi
3517	vpand	%xmm13,%xmm11,%xmm11
3518	vaesenc	%xmm10,%xmm9,%xmm9
3519	vmovdqu	224-128(%rdi),%xmm10
3520	xorl	%r12d,%r14d
3521	xorl	%edx,%esi
3522	xorl	%r13d,%r14d
3523	leal	(%rbx,%rsi,1),%ebx
3524	movl	%r10d,%r12d
3525	addl	44(%rsp),%eax
3526	andl	%r9d,%r12d
3527	rorxl	$25,%r9d,%r13d
3528	rorxl	$11,%r9d,%esi
3529	leal	(%rbx,%r14,1),%ebx
3530	leal	(%rax,%r12,1),%eax
3531	andnl	%r11d,%r9d,%r12d
3532	xorl	%esi,%r13d
3533	rorxl	$6,%r9d,%r14d
3534	leal	(%rax,%r12,1),%eax
3535	xorl	%r14d,%r13d
3536	movl	%ebx,%esi
3537	rorxl	$22,%ebx,%r12d
3538	leal	(%rax,%r13,1),%eax
3539	xorl	%ecx,%esi
3540	rorxl	$13,%ebx,%r14d
3541	rorxl	$2,%ebx,%r13d
3542	leal	(%r8,%rax,1),%r8d
3543	andl	%esi,%r15d
3544	vpor	%xmm11,%xmm8,%xmm8
3545	vaesenclast	%xmm10,%xmm9,%xmm11
3546	vmovdqu	0-128(%rdi),%xmm10
3547	xorl	%r12d,%r14d
3548	xorl	%ecx,%r15d
3549	xorl	%r13d,%r14d
3550	leal	(%rax,%r15,1),%eax
3551	movl	%r9d,%r12d
3552	vpextrq	$1,%xmm15,%r12
3553	vmovq	%xmm15,%r13
3554	movq	552(%rsp),%r15
3555	addl	%r14d,%eax
3556	leaq	448(%rsp),%rbp
3557
3558	vpand	%xmm14,%xmm11,%xmm11
3559	vpor	%xmm11,%xmm8,%xmm8
3560	vmovdqu	%xmm8,(%r12,%r13,1)
3561	leaq	16(%r13),%r13
3562
3563	addl	0(%r15),%eax
3564	addl	4(%r15),%ebx
3565	addl	8(%r15),%ecx
3566	addl	12(%r15),%edx
3567	addl	16(%r15),%r8d
3568	addl	20(%r15),%r9d
3569	addl	24(%r15),%r10d
3570	addl	28(%r15),%r11d
3571
3572	movl	%eax,0(%r15)
3573	movl	%ebx,4(%r15)
3574	movl	%ecx,8(%r15)
3575	movl	%edx,12(%r15)
3576	movl	%r8d,16(%r15)
3577	movl	%r9d,20(%r15)
3578	movl	%r10d,24(%r15)
3579	movl	%r11d,28(%r15)
3580
3581	cmpq	80(%rbp),%r13
3582	je	.Ldone_avx2
3583
3584	xorl	%r14d,%r14d
3585	movl	%ebx,%esi
3586	movl	%r9d,%r12d
3587	xorl	%ecx,%esi
3588	jmp	.Lower_avx2
3589.align	16
3590.Lower_avx2:
3591	vmovdqu	(%r13),%xmm9
3592	vpinsrq	$0,%r13,%xmm15,%xmm15
3593	addl	0+16(%rbp),%r11d
3594	andl	%r8d,%r12d
3595	rorxl	$25,%r8d,%r13d
3596	rorxl	$11,%r8d,%r15d
3597	leal	(%rax,%r14,1),%eax
3598	leal	(%r11,%r12,1),%r11d
3599	andnl	%r10d,%r8d,%r12d
3600	xorl	%r15d,%r13d
3601	rorxl	$6,%r8d,%r14d
3602	leal	(%r11,%r12,1),%r11d
3603	xorl	%r14d,%r13d
3604	movl	%eax,%r15d
3605	rorxl	$22,%eax,%r12d
3606	leal	(%r11,%r13,1),%r11d
3607	xorl	%ebx,%r15d
3608	rorxl	$13,%eax,%r14d
3609	rorxl	$2,%eax,%r13d
3610	leal	(%rdx,%r11,1),%edx
3611	andl	%r15d,%esi
3612	vpxor	%xmm10,%xmm9,%xmm9
3613	vmovdqu	16-128(%rdi),%xmm10
3614	xorl	%r12d,%r14d
3615	xorl	%ebx,%esi
3616	xorl	%r13d,%r14d
3617	leal	(%r11,%rsi,1),%r11d
3618	movl	%r8d,%r12d
3619	addl	4+16(%rbp),%r10d
3620	andl	%edx,%r12d
3621	rorxl	$25,%edx,%r13d
3622	rorxl	$11,%edx,%esi
3623	leal	(%r11,%r14,1),%r11d
3624	leal	(%r10,%r12,1),%r10d
3625	andnl	%r9d,%edx,%r12d
3626	xorl	%esi,%r13d
3627	rorxl	$6,%edx,%r14d
3628	leal	(%r10,%r12,1),%r10d
3629	xorl	%r14d,%r13d
3630	movl	%r11d,%esi
3631	rorxl	$22,%r11d,%r12d
3632	leal	(%r10,%r13,1),%r10d
3633	xorl	%eax,%esi
3634	rorxl	$13,%r11d,%r14d
3635	rorxl	$2,%r11d,%r13d
3636	leal	(%rcx,%r10,1),%ecx
3637	andl	%esi,%r15d
3638	vpxor	%xmm8,%xmm9,%xmm9
3639	xorl	%r12d,%r14d
3640	xorl	%eax,%r15d
3641	xorl	%r13d,%r14d
3642	leal	(%r10,%r15,1),%r10d
3643	movl	%edx,%r12d
3644	addl	8+16(%rbp),%r9d
3645	andl	%ecx,%r12d
3646	rorxl	$25,%ecx,%r13d
3647	rorxl	$11,%ecx,%r15d
3648	leal	(%r10,%r14,1),%r10d
3649	leal	(%r9,%r12,1),%r9d
3650	andnl	%r8d,%ecx,%r12d
3651	xorl	%r15d,%r13d
3652	rorxl	$6,%ecx,%r14d
3653	leal	(%r9,%r12,1),%r9d
3654	xorl	%r14d,%r13d
3655	movl	%r10d,%r15d
3656	rorxl	$22,%r10d,%r12d
3657	leal	(%r9,%r13,1),%r9d
3658	xorl	%r11d,%r15d
3659	rorxl	$13,%r10d,%r14d
3660	rorxl	$2,%r10d,%r13d
3661	leal	(%rbx,%r9,1),%ebx
3662	andl	%r15d,%esi
3663	vaesenc	%xmm10,%xmm9,%xmm9
3664	vmovdqu	32-128(%rdi),%xmm10
3665	xorl	%r12d,%r14d
3666	xorl	%r11d,%esi
3667	xorl	%r13d,%r14d
3668	leal	(%r9,%rsi,1),%r9d
3669	movl	%ecx,%r12d
3670	addl	12+16(%rbp),%r8d
3671	andl	%ebx,%r12d
3672	rorxl	$25,%ebx,%r13d
3673	rorxl	$11,%ebx,%esi
3674	leal	(%r9,%r14,1),%r9d
3675	leal	(%r8,%r12,1),%r8d
3676	andnl	%edx,%ebx,%r12d
3677	xorl	%esi,%r13d
3678	rorxl	$6,%ebx,%r14d
3679	leal	(%r8,%r12,1),%r8d
3680	xorl	%r14d,%r13d
3681	movl	%r9d,%esi
3682	rorxl	$22,%r9d,%r12d
3683	leal	(%r8,%r13,1),%r8d
3684	xorl	%r10d,%esi
3685	rorxl	$13,%r9d,%r14d
3686	rorxl	$2,%r9d,%r13d
3687	leal	(%rax,%r8,1),%eax
3688	andl	%esi,%r15d
3689	vaesenc	%xmm10,%xmm9,%xmm9
3690	vmovdqu	48-128(%rdi),%xmm10
3691	xorl	%r12d,%r14d
3692	xorl	%r10d,%r15d
3693	xorl	%r13d,%r14d
3694	leal	(%r8,%r15,1),%r8d
3695	movl	%ebx,%r12d
3696	addl	32+16(%rbp),%edx
3697	andl	%eax,%r12d
3698	rorxl	$25,%eax,%r13d
3699	rorxl	$11,%eax,%r15d
3700	leal	(%r8,%r14,1),%r8d
3701	leal	(%rdx,%r12,1),%edx
3702	andnl	%ecx,%eax,%r12d
3703	xorl	%r15d,%r13d
3704	rorxl	$6,%eax,%r14d
3705	leal	(%rdx,%r12,1),%edx
3706	xorl	%r14d,%r13d
3707	movl	%r8d,%r15d
3708	rorxl	$22,%r8d,%r12d
3709	leal	(%rdx,%r13,1),%edx
3710	xorl	%r9d,%r15d
3711	rorxl	$13,%r8d,%r14d
3712	rorxl	$2,%r8d,%r13d
3713	leal	(%r11,%rdx,1),%r11d
3714	andl	%r15d,%esi
3715	vaesenc	%xmm10,%xmm9,%xmm9
3716	vmovdqu	64-128(%rdi),%xmm10
3717	xorl	%r12d,%r14d
3718	xorl	%r9d,%esi
3719	xorl	%r13d,%r14d
3720	leal	(%rdx,%rsi,1),%edx
3721	movl	%eax,%r12d
3722	addl	36+16(%rbp),%ecx
3723	andl	%r11d,%r12d
3724	rorxl	$25,%r11d,%r13d
3725	rorxl	$11,%r11d,%esi
3726	leal	(%rdx,%r14,1),%edx
3727	leal	(%rcx,%r12,1),%ecx
3728	andnl	%ebx,%r11d,%r12d
3729	xorl	%esi,%r13d
3730	rorxl	$6,%r11d,%r14d
3731	leal	(%rcx,%r12,1),%ecx
3732	xorl	%r14d,%r13d
3733	movl	%edx,%esi
3734	rorxl	$22,%edx,%r12d
3735	leal	(%rcx,%r13,1),%ecx
3736	xorl	%r8d,%esi
3737	rorxl	$13,%edx,%r14d
3738	rorxl	$2,%edx,%r13d
3739	leal	(%r10,%rcx,1),%r10d
3740	andl	%esi,%r15d
3741	vaesenc	%xmm10,%xmm9,%xmm9
3742	vmovdqu	80-128(%rdi),%xmm10
3743	xorl	%r12d,%r14d
3744	xorl	%r8d,%r15d
3745	xorl	%r13d,%r14d
3746	leal	(%rcx,%r15,1),%ecx
3747	movl	%r11d,%r12d
3748	addl	40+16(%rbp),%ebx
3749	andl	%r10d,%r12d
3750	rorxl	$25,%r10d,%r13d
3751	rorxl	$11,%r10d,%r15d
3752	leal	(%rcx,%r14,1),%ecx
3753	leal	(%rbx,%r12,1),%ebx
3754	andnl	%eax,%r10d,%r12d
3755	xorl	%r15d,%r13d
3756	rorxl	$6,%r10d,%r14d
3757	leal	(%rbx,%r12,1),%ebx
3758	xorl	%r14d,%r13d
3759	movl	%ecx,%r15d
3760	rorxl	$22,%ecx,%r12d
3761	leal	(%rbx,%r13,1),%ebx
3762	xorl	%edx,%r15d
3763	rorxl	$13,%ecx,%r14d
3764	rorxl	$2,%ecx,%r13d
3765	leal	(%r9,%rbx,1),%r9d
3766	andl	%r15d,%esi
3767	vaesenc	%xmm10,%xmm9,%xmm9
3768	vmovdqu	96-128(%rdi),%xmm10
3769	xorl	%r12d,%r14d
3770	xorl	%edx,%esi
3771	xorl	%r13d,%r14d
3772	leal	(%rbx,%rsi,1),%ebx
3773	movl	%r10d,%r12d
3774	addl	44+16(%rbp),%eax
3775	andl	%r9d,%r12d
3776	rorxl	$25,%r9d,%r13d
3777	rorxl	$11,%r9d,%esi
3778	leal	(%rbx,%r14,1),%ebx
3779	leal	(%rax,%r12,1),%eax
3780	andnl	%r11d,%r9d,%r12d
3781	xorl	%esi,%r13d
3782	rorxl	$6,%r9d,%r14d
3783	leal	(%rax,%r12,1),%eax
3784	xorl	%r14d,%r13d
3785	movl	%ebx,%esi
3786	rorxl	$22,%ebx,%r12d
3787	leal	(%rax,%r13,1),%eax
3788	xorl	%ecx,%esi
3789	rorxl	$13,%ebx,%r14d
3790	rorxl	$2,%ebx,%r13d
3791	leal	(%r8,%rax,1),%r8d
3792	andl	%esi,%r15d
3793	vaesenc	%xmm10,%xmm9,%xmm9
3794	vmovdqu	112-128(%rdi),%xmm10
3795	xorl	%r12d,%r14d
3796	xorl	%ecx,%r15d
3797	xorl	%r13d,%r14d
3798	leal	(%rax,%r15,1),%eax
3799	movl	%r9d,%r12d
3800	leaq	-64(%rbp),%rbp
3801	addl	0+16(%rbp),%r11d
3802	andl	%r8d,%r12d
3803	rorxl	$25,%r8d,%r13d
3804	rorxl	$11,%r8d,%r15d
3805	leal	(%rax,%r14,1),%eax
3806	leal	(%r11,%r12,1),%r11d
3807	andnl	%r10d,%r8d,%r12d
3808	xorl	%r15d,%r13d
3809	rorxl	$6,%r8d,%r14d
3810	leal	(%r11,%r12,1),%r11d
3811	xorl	%r14d,%r13d
3812	movl	%eax,%r15d
3813	rorxl	$22,%eax,%r12d
3814	leal	(%r11,%r13,1),%r11d
3815	xorl	%ebx,%r15d
3816	rorxl	$13,%eax,%r14d
3817	rorxl	$2,%eax,%r13d
3818	leal	(%rdx,%r11,1),%edx
3819	andl	%r15d,%esi
3820	vaesenc	%xmm10,%xmm9,%xmm9
3821	vmovdqu	128-128(%rdi),%xmm10
3822	xorl	%r12d,%r14d
3823	xorl	%ebx,%esi
3824	xorl	%r13d,%r14d
3825	leal	(%r11,%rsi,1),%r11d
3826	movl	%r8d,%r12d
3827	addl	4+16(%rbp),%r10d
3828	andl	%edx,%r12d
3829	rorxl	$25,%edx,%r13d
3830	rorxl	$11,%edx,%esi
3831	leal	(%r11,%r14,1),%r11d
3832	leal	(%r10,%r12,1),%r10d
3833	andnl	%r9d,%edx,%r12d
3834	xorl	%esi,%r13d
3835	rorxl	$6,%edx,%r14d
3836	leal	(%r10,%r12,1),%r10d
3837	xorl	%r14d,%r13d
3838	movl	%r11d,%esi
3839	rorxl	$22,%r11d,%r12d
3840	leal	(%r10,%r13,1),%r10d
3841	xorl	%eax,%esi
3842	rorxl	$13,%r11d,%r14d
3843	rorxl	$2,%r11d,%r13d
3844	leal	(%rcx,%r10,1),%ecx
3845	andl	%esi,%r15d
3846	vaesenc	%xmm10,%xmm9,%xmm9
3847	vmovdqu	144-128(%rdi),%xmm10
3848	xorl	%r12d,%r14d
3849	xorl	%eax,%r15d
3850	xorl	%r13d,%r14d
3851	leal	(%r10,%r15,1),%r10d
3852	movl	%edx,%r12d
3853	addl	8+16(%rbp),%r9d
3854	andl	%ecx,%r12d
3855	rorxl	$25,%ecx,%r13d
3856	rorxl	$11,%ecx,%r15d
3857	leal	(%r10,%r14,1),%r10d
3858	leal	(%r9,%r12,1),%r9d
3859	andnl	%r8d,%ecx,%r12d
3860	xorl	%r15d,%r13d
3861	rorxl	$6,%ecx,%r14d
3862	leal	(%r9,%r12,1),%r9d
3863	xorl	%r14d,%r13d
3864	movl	%r10d,%r15d
3865	rorxl	$22,%r10d,%r12d
3866	leal	(%r9,%r13,1),%r9d
3867	xorl	%r11d,%r15d
3868	rorxl	$13,%r10d,%r14d
3869	rorxl	$2,%r10d,%r13d
3870	leal	(%rbx,%r9,1),%ebx
3871	andl	%r15d,%esi
3872	vaesenc	%xmm10,%xmm9,%xmm9
3873	vmovdqu	160-128(%rdi),%xmm10
3874	xorl	%r12d,%r14d
3875	xorl	%r11d,%esi
3876	xorl	%r13d,%r14d
3877	leal	(%r9,%rsi,1),%r9d
3878	movl	%ecx,%r12d
3879	addl	12+16(%rbp),%r8d
3880	andl	%ebx,%r12d
3881	rorxl	$25,%ebx,%r13d
3882	rorxl	$11,%ebx,%esi
3883	leal	(%r9,%r14,1),%r9d
3884	leal	(%r8,%r12,1),%r8d
3885	andnl	%edx,%ebx,%r12d
3886	xorl	%esi,%r13d
3887	rorxl	$6,%ebx,%r14d
3888	leal	(%r8,%r12,1),%r8d
3889	xorl	%r14d,%r13d
3890	movl	%r9d,%esi
3891	rorxl	$22,%r9d,%r12d
3892	leal	(%r8,%r13,1),%r8d
3893	xorl	%r10d,%esi
3894	rorxl	$13,%r9d,%r14d
3895	rorxl	$2,%r9d,%r13d
3896	leal	(%rax,%r8,1),%eax
3897	andl	%esi,%r15d
3898	vaesenclast	%xmm10,%xmm9,%xmm11
3899	vaesenc	%xmm10,%xmm9,%xmm9
3900	vmovdqu	176-128(%rdi),%xmm10
3901	xorl	%r12d,%r14d
3902	xorl	%r10d,%r15d
3903	xorl	%r13d,%r14d
3904	leal	(%r8,%r15,1),%r8d
3905	movl	%ebx,%r12d
3906	addl	32+16(%rbp),%edx
3907	andl	%eax,%r12d
3908	rorxl	$25,%eax,%r13d
3909	rorxl	$11,%eax,%r15d
3910	leal	(%r8,%r14,1),%r8d
3911	leal	(%rdx,%r12,1),%edx
3912	andnl	%ecx,%eax,%r12d
3913	xorl	%r15d,%r13d
3914	rorxl	$6,%eax,%r14d
3915	leal	(%rdx,%r12,1),%edx
3916	xorl	%r14d,%r13d
3917	movl	%r8d,%r15d
3918	rorxl	$22,%r8d,%r12d
3919	leal	(%rdx,%r13,1),%edx
3920	xorl	%r9d,%r15d
3921	rorxl	$13,%r8d,%r14d
3922	rorxl	$2,%r8d,%r13d
3923	leal	(%r11,%rdx,1),%r11d
3924	andl	%r15d,%esi
3925	vpand	%xmm12,%xmm11,%xmm8
3926	vaesenc	%xmm10,%xmm9,%xmm9
3927	vmovdqu	192-128(%rdi),%xmm10
3928	xorl	%r12d,%r14d
3929	xorl	%r9d,%esi
3930	xorl	%r13d,%r14d
3931	leal	(%rdx,%rsi,1),%edx
3932	movl	%eax,%r12d
3933	addl	36+16(%rbp),%ecx
3934	andl	%r11d,%r12d
3935	rorxl	$25,%r11d,%r13d
3936	rorxl	$11,%r11d,%esi
3937	leal	(%rdx,%r14,1),%edx
3938	leal	(%rcx,%r12,1),%ecx
3939	andnl	%ebx,%r11d,%r12d
3940	xorl	%esi,%r13d
3941	rorxl	$6,%r11d,%r14d
3942	leal	(%rcx,%r12,1),%ecx
3943	xorl	%r14d,%r13d
3944	movl	%edx,%esi
3945	rorxl	$22,%edx,%r12d
3946	leal	(%rcx,%r13,1),%ecx
3947	xorl	%r8d,%esi
3948	rorxl	$13,%edx,%r14d
3949	rorxl	$2,%edx,%r13d
3950	leal	(%r10,%rcx,1),%r10d
3951	andl	%esi,%r15d
3952	vaesenclast	%xmm10,%xmm9,%xmm11
3953	vaesenc	%xmm10,%xmm9,%xmm9
3954	vmovdqu	208-128(%rdi),%xmm10
3955	xorl	%r12d,%r14d
3956	xorl	%r8d,%r15d
3957	xorl	%r13d,%r14d
3958	leal	(%rcx,%r15,1),%ecx
3959	movl	%r11d,%r12d
3960	addl	40+16(%rbp),%ebx
3961	andl	%r10d,%r12d
3962	rorxl	$25,%r10d,%r13d
3963	rorxl	$11,%r10d,%r15d
3964	leal	(%rcx,%r14,1),%ecx
3965	leal	(%rbx,%r12,1),%ebx
3966	andnl	%eax,%r10d,%r12d
3967	xorl	%r15d,%r13d
3968	rorxl	$6,%r10d,%r14d
3969	leal	(%rbx,%r12,1),%ebx
3970	xorl	%r14d,%r13d
3971	movl	%ecx,%r15d
3972	rorxl	$22,%ecx,%r12d
3973	leal	(%rbx,%r13,1),%ebx
3974	xorl	%edx,%r15d
3975	rorxl	$13,%ecx,%r14d
3976	rorxl	$2,%ecx,%r13d
3977	leal	(%r9,%rbx,1),%r9d
3978	andl	%r15d,%esi
3979	vpand	%xmm13,%xmm11,%xmm11
3980	vaesenc	%xmm10,%xmm9,%xmm9
3981	vmovdqu	224-128(%rdi),%xmm10
3982	xorl	%r12d,%r14d
3983	xorl	%edx,%esi
3984	xorl	%r13d,%r14d
3985	leal	(%rbx,%rsi,1),%ebx
3986	movl	%r10d,%r12d
3987	addl	44+16(%rbp),%eax
3988	andl	%r9d,%r12d
3989	rorxl	$25,%r9d,%r13d
3990	rorxl	$11,%r9d,%esi
3991	leal	(%rbx,%r14,1),%ebx
3992	leal	(%rax,%r12,1),%eax
3993	andnl	%r11d,%r9d,%r12d
3994	xorl	%esi,%r13d
3995	rorxl	$6,%r9d,%r14d
3996	leal	(%rax,%r12,1),%eax
3997	xorl	%r14d,%r13d
3998	movl	%ebx,%esi
3999	rorxl	$22,%ebx,%r12d
4000	leal	(%rax,%r13,1),%eax
4001	xorl	%ecx,%esi
4002	rorxl	$13,%ebx,%r14d
4003	rorxl	$2,%ebx,%r13d
4004	leal	(%r8,%rax,1),%r8d
4005	andl	%esi,%r15d
4006	vpor	%xmm11,%xmm8,%xmm8
4007	vaesenclast	%xmm10,%xmm9,%xmm11
4008	vmovdqu	0-128(%rdi),%xmm10
4009	xorl	%r12d,%r14d
4010	xorl	%ecx,%r15d
4011	xorl	%r13d,%r14d
4012	leal	(%rax,%r15,1),%eax
4013	movl	%r9d,%r12d
4014	vmovq	%xmm15,%r13
4015	vpextrq	$1,%xmm15,%r15
4016	vpand	%xmm14,%xmm11,%xmm11
4017	vpor	%xmm11,%xmm8,%xmm8
4018	leaq	-64(%rbp),%rbp
4019	vmovdqu	%xmm8,(%r15,%r13,1)
4020	leaq	16(%r13),%r13
4021	cmpq	%rsp,%rbp
4022	jae	.Lower_avx2
4023
4024	movq	552(%rsp),%r15
4025	leaq	64(%r13),%r13
4026	movq	560(%rsp),%rsi
4027	addl	%r14d,%eax
4028	leaq	448(%rsp),%rsp
4029
4030	addl	0(%r15),%eax
4031	addl	4(%r15),%ebx
4032	addl	8(%r15),%ecx
4033	addl	12(%r15),%edx
4034	addl	16(%r15),%r8d
4035	addl	20(%r15),%r9d
4036	addl	24(%r15),%r10d
4037	leaq	(%rsi,%r13,1),%r12
4038	addl	28(%r15),%r11d
4039
4040	cmpq	64+16(%rsp),%r13
4041
4042	movl	%eax,0(%r15)
4043	cmoveq	%rsp,%r12
4044	movl	%ebx,4(%r15)
4045	movl	%ecx,8(%r15)
4046	movl	%edx,12(%r15)
4047	movl	%r8d,16(%r15)
4048	movl	%r9d,20(%r15)
4049	movl	%r10d,24(%r15)
4050	movl	%r11d,28(%r15)
4051
4052	jbe	.Loop_avx2
4053	leaq	(%rsp),%rbp
4054
4055
4056.cfi_escape	0x0f,0x06,0x76,0xf8,0x00,0x06,0x23,0x08
4057
4058.Ldone_avx2:
4059	movq	64+32(%rbp),%r8
4060	movq	64+56(%rbp),%rsi
4061.cfi_def_cfa	%rsi,8
4062	vmovdqu	%xmm8,(%r8)
4063	vzeroall
4064	movq	-48(%rsi),%r15
4065.cfi_restore	%r15
4066	movq	-40(%rsi),%r14
4067.cfi_restore	%r14
4068	movq	-32(%rsi),%r13
4069.cfi_restore	%r13
4070	movq	-24(%rsi),%r12
4071.cfi_restore	%r12
4072	movq	-16(%rsi),%rbp
4073.cfi_restore	%rbp
4074	movq	-8(%rsi),%rbx
4075.cfi_restore	%rbx
4076	leaq	(%rsi),%rsp
4077.cfi_def_cfa_register	%rsp
4078.Lepilogue_avx2:
4079	.byte	0xf3,0xc3
4080.cfi_endproc
4081.size	aesni_cbc_sha256_enc_avx2,.-aesni_cbc_sha256_enc_avx2
4082.type	aesni_cbc_sha256_enc_shaext,@function
4083.align	32
4084aesni_cbc_sha256_enc_shaext:
4085.cfi_startproc
4086	movq	8(%rsp),%r10
4087	leaq	K256+128(%rip),%rax
4088	movdqu	(%r9),%xmm1
4089	movdqu	16(%r9),%xmm2
4090	movdqa	512-128(%rax),%xmm3
4091
4092	movl	240(%rcx),%r11d
4093	subq	%rdi,%rsi
4094	movups	(%rcx),%xmm15
4095	movups	(%r8),%xmm6
4096	movups	16(%rcx),%xmm4
4097	leaq	112(%rcx),%rcx
4098
4099	pshufd	$0x1b,%xmm1,%xmm0
4100	pshufd	$0xb1,%xmm1,%xmm1
4101	pshufd	$0x1b,%xmm2,%xmm2
4102	movdqa	%xmm3,%xmm7
4103.byte	102,15,58,15,202,8
4104	punpcklqdq	%xmm0,%xmm2
4105
4106	jmp	.Loop_shaext
4107
4108.align	16
4109.Loop_shaext:
4110	movdqu	(%r10),%xmm10
4111	movdqu	16(%r10),%xmm11
4112	movdqu	32(%r10),%xmm12
4113.byte	102,68,15,56,0,211
4114	movdqu	48(%r10),%xmm13
4115
4116	movdqa	0-128(%rax),%xmm0
4117	paddd	%xmm10,%xmm0
4118.byte	102,68,15,56,0,219
4119	movdqa	%xmm2,%xmm9
4120	movdqa	%xmm1,%xmm8
4121	movups	0(%rdi),%xmm14
4122	xorps	%xmm15,%xmm14
4123	xorps	%xmm14,%xmm6
4124	movups	-80(%rcx),%xmm5
4125	aesenc	%xmm4,%xmm6
4126.byte	15,56,203,209
4127	pshufd	$0x0e,%xmm0,%xmm0
4128	movups	-64(%rcx),%xmm4
4129	aesenc	%xmm5,%xmm6
4130.byte	15,56,203,202
4131
4132	movdqa	32-128(%rax),%xmm0
4133	paddd	%xmm11,%xmm0
4134.byte	102,68,15,56,0,227
4135	leaq	64(%r10),%r10
4136	movups	-48(%rcx),%xmm5
4137	aesenc	%xmm4,%xmm6
4138.byte	15,56,203,209
4139	pshufd	$0x0e,%xmm0,%xmm0
4140	movups	-32(%rcx),%xmm4
4141	aesenc	%xmm5,%xmm6
4142.byte	15,56,203,202
4143
4144	movdqa	64-128(%rax),%xmm0
4145	paddd	%xmm12,%xmm0
4146.byte	102,68,15,56,0,235
4147.byte	69,15,56,204,211
4148	movups	-16(%rcx),%xmm5
4149	aesenc	%xmm4,%xmm6
4150.byte	15,56,203,209
4151	pshufd	$0x0e,%xmm0,%xmm0
4152	movdqa	%xmm13,%xmm3
4153.byte	102,65,15,58,15,220,4
4154	paddd	%xmm3,%xmm10
4155	movups	0(%rcx),%xmm4
4156	aesenc	%xmm5,%xmm6
4157.byte	15,56,203,202
4158
4159	movdqa	96-128(%rax),%xmm0
4160	paddd	%xmm13,%xmm0
4161.byte	69,15,56,205,213
4162.byte	69,15,56,204,220
4163	movups	16(%rcx),%xmm5
4164	aesenc	%xmm4,%xmm6
4165.byte	15,56,203,209
4166	pshufd	$0x0e,%xmm0,%xmm0
4167	movups	32(%rcx),%xmm4
4168	aesenc	%xmm5,%xmm6
4169	movdqa	%xmm10,%xmm3
4170.byte	102,65,15,58,15,221,4
4171	paddd	%xmm3,%xmm11
4172.byte	15,56,203,202
4173	movdqa	128-128(%rax),%xmm0
4174	paddd	%xmm10,%xmm0
4175.byte	69,15,56,205,218
4176.byte	69,15,56,204,229
4177	movups	48(%rcx),%xmm5
4178	aesenc	%xmm4,%xmm6
4179.byte	15,56,203,209
4180	pshufd	$0x0e,%xmm0,%xmm0
4181	movdqa	%xmm11,%xmm3
4182.byte	102,65,15,58,15,218,4
4183	paddd	%xmm3,%xmm12
4184	cmpl	$11,%r11d
4185	jb	.Laesenclast1
4186	movups	64(%rcx),%xmm4
4187	aesenc	%xmm5,%xmm6
4188	movups	80(%rcx),%xmm5
4189	aesenc	%xmm4,%xmm6
4190	je	.Laesenclast1
4191	movups	96(%rcx),%xmm4
4192	aesenc	%xmm5,%xmm6
4193	movups	112(%rcx),%xmm5
4194	aesenc	%xmm4,%xmm6
4195.Laesenclast1:
4196	aesenclast	%xmm5,%xmm6
4197	movups	16-112(%rcx),%xmm4
4198	nop
4199.byte	15,56,203,202
4200	movups	16(%rdi),%xmm14
4201	xorps	%xmm15,%xmm14
4202	movups	%xmm6,0(%rsi,%rdi,1)
4203	xorps	%xmm14,%xmm6
4204	movups	-80(%rcx),%xmm5
4205	aesenc	%xmm4,%xmm6
4206	movdqa	160-128(%rax),%xmm0
4207	paddd	%xmm11,%xmm0
4208.byte	69,15,56,205,227
4209.byte	69,15,56,204,234
4210	movups	-64(%rcx),%xmm4
4211	aesenc	%xmm5,%xmm6
4212.byte	15,56,203,209
4213	pshufd	$0x0e,%xmm0,%xmm0
4214	movdqa	%xmm12,%xmm3
4215.byte	102,65,15,58,15,219,4
4216	paddd	%xmm3,%xmm13
4217	movups	-48(%rcx),%xmm5
4218	aesenc	%xmm4,%xmm6
4219.byte	15,56,203,202
4220	movdqa	192-128(%rax),%xmm0
4221	paddd	%xmm12,%xmm0
4222.byte	69,15,56,205,236
4223.byte	69,15,56,204,211
4224	movups	-32(%rcx),%xmm4
4225	aesenc	%xmm5,%xmm6
4226.byte	15,56,203,209
4227	pshufd	$0x0e,%xmm0,%xmm0
4228	movdqa	%xmm13,%xmm3
4229.byte	102,65,15,58,15,220,4
4230	paddd	%xmm3,%xmm10
4231	movups	-16(%rcx),%xmm5
4232	aesenc	%xmm4,%xmm6
4233.byte	15,56,203,202
4234	movdqa	224-128(%rax),%xmm0
4235	paddd	%xmm13,%xmm0
4236.byte	69,15,56,205,213
4237.byte	69,15,56,204,220
4238	movups	0(%rcx),%xmm4
4239	aesenc	%xmm5,%xmm6
4240.byte	15,56,203,209
4241	pshufd	$0x0e,%xmm0,%xmm0
4242	movdqa	%xmm10,%xmm3
4243.byte	102,65,15,58,15,221,4
4244	paddd	%xmm3,%xmm11
4245	movups	16(%rcx),%xmm5
4246	aesenc	%xmm4,%xmm6
4247.byte	15,56,203,202
4248	movdqa	256-128(%rax),%xmm0
4249	paddd	%xmm10,%xmm0
4250.byte	69,15,56,205,218
4251.byte	69,15,56,204,229
4252	movups	32(%rcx),%xmm4
4253	aesenc	%xmm5,%xmm6
4254.byte	15,56,203,209
4255	pshufd	$0x0e,%xmm0,%xmm0
4256	movdqa	%xmm11,%xmm3
4257.byte	102,65,15,58,15,218,4
4258	paddd	%xmm3,%xmm12
4259	movups	48(%rcx),%xmm5
4260	aesenc	%xmm4,%xmm6
4261	cmpl	$11,%r11d
4262	jb	.Laesenclast2
4263	movups	64(%rcx),%xmm4
4264	aesenc	%xmm5,%xmm6
4265	movups	80(%rcx),%xmm5
4266	aesenc	%xmm4,%xmm6
4267	je	.Laesenclast2
4268	movups	96(%rcx),%xmm4
4269	aesenc	%xmm5,%xmm6
4270	movups	112(%rcx),%xmm5
4271	aesenc	%xmm4,%xmm6
4272.Laesenclast2:
4273	aesenclast	%xmm5,%xmm6
4274	movups	16-112(%rcx),%xmm4
4275	nop
4276.byte	15,56,203,202
4277	movups	32(%rdi),%xmm14
4278	xorps	%xmm15,%xmm14
4279	movups	%xmm6,16(%rsi,%rdi,1)
4280	xorps	%xmm14,%xmm6
4281	movups	-80(%rcx),%xmm5
4282	aesenc	%xmm4,%xmm6
4283	movdqa	288-128(%rax),%xmm0
4284	paddd	%xmm11,%xmm0
4285.byte	69,15,56,205,227
4286.byte	69,15,56,204,234
4287	movups	-64(%rcx),%xmm4
4288	aesenc	%xmm5,%xmm6
4289.byte	15,56,203,209
4290	pshufd	$0x0e,%xmm0,%xmm0
4291	movdqa	%xmm12,%xmm3
4292.byte	102,65,15,58,15,219,4
4293	paddd	%xmm3,%xmm13
4294	movups	-48(%rcx),%xmm5
4295	aesenc	%xmm4,%xmm6
4296.byte	15,56,203,202
4297	movdqa	320-128(%rax),%xmm0
4298	paddd	%xmm12,%xmm0
4299.byte	69,15,56,205,236
4300.byte	69,15,56,204,211
4301	movups	-32(%rcx),%xmm4
4302	aesenc	%xmm5,%xmm6
4303.byte	15,56,203,209
4304	pshufd	$0x0e,%xmm0,%xmm0
4305	movdqa	%xmm13,%xmm3
4306.byte	102,65,15,58,15,220,4
4307	paddd	%xmm3,%xmm10
4308	movups	-16(%rcx),%xmm5
4309	aesenc	%xmm4,%xmm6
4310.byte	15,56,203,202
4311	movdqa	352-128(%rax),%xmm0
4312	paddd	%xmm13,%xmm0
4313.byte	69,15,56,205,213
4314.byte	69,15,56,204,220
4315	movups	0(%rcx),%xmm4
4316	aesenc	%xmm5,%xmm6
4317.byte	15,56,203,209
4318	pshufd	$0x0e,%xmm0,%xmm0
4319	movdqa	%xmm10,%xmm3
4320.byte	102,65,15,58,15,221,4
4321	paddd	%xmm3,%xmm11
4322	movups	16(%rcx),%xmm5
4323	aesenc	%xmm4,%xmm6
4324.byte	15,56,203,202
4325	movdqa	384-128(%rax),%xmm0
4326	paddd	%xmm10,%xmm0
4327.byte	69,15,56,205,218
4328.byte	69,15,56,204,229
4329	movups	32(%rcx),%xmm4
4330	aesenc	%xmm5,%xmm6
4331.byte	15,56,203,209
4332	pshufd	$0x0e,%xmm0,%xmm0
4333	movdqa	%xmm11,%xmm3
4334.byte	102,65,15,58,15,218,4
4335	paddd	%xmm3,%xmm12
4336	movups	48(%rcx),%xmm5
4337	aesenc	%xmm4,%xmm6
4338.byte	15,56,203,202
4339	movdqa	416-128(%rax),%xmm0
4340	paddd	%xmm11,%xmm0
4341.byte	69,15,56,205,227
4342.byte	69,15,56,204,234
4343	cmpl	$11,%r11d
4344	jb	.Laesenclast3
4345	movups	64(%rcx),%xmm4
4346	aesenc	%xmm5,%xmm6
4347	movups	80(%rcx),%xmm5
4348	aesenc	%xmm4,%xmm6
4349	je	.Laesenclast3
4350	movups	96(%rcx),%xmm4
4351	aesenc	%xmm5,%xmm6
4352	movups	112(%rcx),%xmm5
4353	aesenc	%xmm4,%xmm6
4354.Laesenclast3:
4355	aesenclast	%xmm5,%xmm6
4356	movups	16-112(%rcx),%xmm4
4357	nop
4358.byte	15,56,203,209
4359	pshufd	$0x0e,%xmm0,%xmm0
4360	movdqa	%xmm12,%xmm3
4361.byte	102,65,15,58,15,219,4
4362	paddd	%xmm3,%xmm13
4363	movups	48(%rdi),%xmm14
4364	xorps	%xmm15,%xmm14
4365	movups	%xmm6,32(%rsi,%rdi,1)
4366	xorps	%xmm14,%xmm6
4367	movups	-80(%rcx),%xmm5
4368	aesenc	%xmm4,%xmm6
4369	movups	-64(%rcx),%xmm4
4370	aesenc	%xmm5,%xmm6
4371.byte	15,56,203,202
4372
4373	movdqa	448-128(%rax),%xmm0
4374	paddd	%xmm12,%xmm0
4375.byte	69,15,56,205,236
4376	movdqa	%xmm7,%xmm3
4377	movups	-48(%rcx),%xmm5
4378	aesenc	%xmm4,%xmm6
4379.byte	15,56,203,209
4380	pshufd	$0x0e,%xmm0,%xmm0
4381	movups	-32(%rcx),%xmm4
4382	aesenc	%xmm5,%xmm6
4383.byte	15,56,203,202
4384
4385	movdqa	480-128(%rax),%xmm0
4386	paddd	%xmm13,%xmm0
4387	movups	-16(%rcx),%xmm5
4388	aesenc	%xmm4,%xmm6
4389	movups	0(%rcx),%xmm4
4390	aesenc	%xmm5,%xmm6
4391.byte	15,56,203,209
4392	pshufd	$0x0e,%xmm0,%xmm0
4393	movups	16(%rcx),%xmm5
4394	aesenc	%xmm4,%xmm6
4395.byte	15,56,203,202
4396
4397	movups	32(%rcx),%xmm4
4398	aesenc	%xmm5,%xmm6
4399	movups	48(%rcx),%xmm5
4400	aesenc	%xmm4,%xmm6
4401	cmpl	$11,%r11d
4402	jb	.Laesenclast4
4403	movups	64(%rcx),%xmm4
4404	aesenc	%xmm5,%xmm6
4405	movups	80(%rcx),%xmm5
4406	aesenc	%xmm4,%xmm6
4407	je	.Laesenclast4
4408	movups	96(%rcx),%xmm4
4409	aesenc	%xmm5,%xmm6
4410	movups	112(%rcx),%xmm5
4411	aesenc	%xmm4,%xmm6
4412.Laesenclast4:
4413	aesenclast	%xmm5,%xmm6
4414	movups	16-112(%rcx),%xmm4
4415	nop
4416
4417	paddd	%xmm9,%xmm2
4418	paddd	%xmm8,%xmm1
4419
4420	decq	%rdx
4421	movups	%xmm6,48(%rsi,%rdi,1)
4422	leaq	64(%rdi),%rdi
4423	jnz	.Loop_shaext
4424
4425	pshufd	$0xb1,%xmm2,%xmm2
4426	pshufd	$0x1b,%xmm1,%xmm3
4427	pshufd	$0xb1,%xmm1,%xmm1
4428	punpckhqdq	%xmm2,%xmm1
4429.byte	102,15,58,15,211,8
4430
4431	movups	%xmm6,(%r8)
4432	movdqu	%xmm1,(%r9)
4433	movdqu	%xmm2,16(%r9)
4434	.byte	0xf3,0xc3
4435.cfi_endproc
4436.size	aesni_cbc_sha256_enc_shaext,.-aesni_cbc_sha256_enc_shaext
4437	.section ".note.gnu.property", "a"
4438	.p2align 3
4439	.long 1f - 0f
4440	.long 4f - 1f
4441	.long 5
44420:
4443	# "GNU" encoded with .byte, since .asciz isn't supported
4444	# on Solaris.
4445	.byte 0x47
4446	.byte 0x4e
4447	.byte 0x55
4448	.byte 0
44491:
4450	.p2align 3
4451	.long 0xc0000002
4452	.long 3f - 2f
44532:
4454	.long 3
44553:
4456	.p2align 3
44574:
4458