xref: /freebsd/sys/crypto/openssl/amd64/aesni-sha256-x86_64.S (revision b077aed33b7b6aefca7b17ddb250cf521f938613)
1/* $FreeBSD$ */
2/* Do not modify. This file is auto-generated from aesni-sha256-x86_64.pl. */
3.text
4
5
6.globl	aesni_cbc_sha256_enc
7.type	aesni_cbc_sha256_enc,@function
8.align	16
9aesni_cbc_sha256_enc:
10.cfi_startproc
11	leaq	OPENSSL_ia32cap_P(%rip),%r11
12	movl	$1,%eax
13	cmpq	$0,%rdi
14	je	.Lprobe
15	movl	0(%r11),%eax
16	movq	4(%r11),%r10
17	btq	$61,%r10
18	jc	aesni_cbc_sha256_enc_shaext
19	movq	%r10,%r11
20	shrq	$32,%r11
21
22	testl	$2048,%r10d
23	jnz	aesni_cbc_sha256_enc_xop
24	andl	$296,%r11d
25	cmpl	$296,%r11d
26	je	aesni_cbc_sha256_enc_avx2
27	andl	$268435456,%r10d
28	jnz	aesni_cbc_sha256_enc_avx
29	ud2
30	xorl	%eax,%eax
31	cmpq	$0,%rdi
32	je	.Lprobe
33	ud2
34.Lprobe:
35	.byte	0xf3,0xc3
36.cfi_endproc
37.size	aesni_cbc_sha256_enc,.-aesni_cbc_sha256_enc
38
39.align	64
40.type	K256,@object
41K256:
42.long	0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
43.long	0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
44.long	0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
45.long	0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
46.long	0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
47.long	0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
48.long	0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
49.long	0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
50.long	0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
51.long	0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
52.long	0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
53.long	0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
54.long	0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
55.long	0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
56.long	0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
57.long	0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
58.long	0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
59.long	0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
60.long	0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
61.long	0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
62.long	0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
63.long	0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
64.long	0xd192e819,0xd6990624,0xf40e3585,0x106aa070
65.long	0xd192e819,0xd6990624,0xf40e3585,0x106aa070
66.long	0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
67.long	0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
68.long	0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
69.long	0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
70.long	0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
71.long	0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
72.long	0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
73.long	0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
74
75.long	0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
76.long	0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
77.long	0,0,0,0,   0,0,0,0,   -1,-1,-1,-1
78.long	0,0,0,0,   0,0,0,0
79.byte	65,69,83,78,73,45,67,66,67,43,83,72,65,50,53,54,32,115,116,105,116,99,104,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
80.align	64
81.type	aesni_cbc_sha256_enc_xop,@function
82.align	64
83aesni_cbc_sha256_enc_xop:
84.cfi_startproc
85.Lxop_shortcut:
86	movq	8(%rsp),%r10
87	movq	%rsp,%rax
88.cfi_def_cfa_register	%rax
89	pushq	%rbx
90.cfi_offset	%rbx,-16
91	pushq	%rbp
92.cfi_offset	%rbp,-24
93	pushq	%r12
94.cfi_offset	%r12,-32
95	pushq	%r13
96.cfi_offset	%r13,-40
97	pushq	%r14
98.cfi_offset	%r14,-48
99	pushq	%r15
100.cfi_offset	%r15,-56
101	subq	$128,%rsp
102	andq	$-64,%rsp
103
104	shlq	$6,%rdx
105	subq	%rdi,%rsi
106	subq	%rdi,%r10
107	addq	%rdi,%rdx
108
109
110	movq	%rsi,64+8(%rsp)
111	movq	%rdx,64+16(%rsp)
112
113	movq	%r8,64+32(%rsp)
114	movq	%r9,64+40(%rsp)
115	movq	%r10,64+48(%rsp)
116	movq	%rax,120(%rsp)
117.cfi_escape	0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08
118.Lprologue_xop:
119	vzeroall
120
121	movq	%rdi,%r12
122	leaq	128(%rcx),%rdi
123	leaq	K256+544(%rip),%r13
124	movl	240-128(%rdi),%r14d
125	movq	%r9,%r15
126	movq	%r10,%rsi
127	vmovdqu	(%r8),%xmm8
128	subq	$9,%r14
129
130	movl	0(%r15),%eax
131	movl	4(%r15),%ebx
132	movl	8(%r15),%ecx
133	movl	12(%r15),%edx
134	movl	16(%r15),%r8d
135	movl	20(%r15),%r9d
136	movl	24(%r15),%r10d
137	movl	28(%r15),%r11d
138
139	vmovdqa	0(%r13,%r14,8),%xmm14
140	vmovdqa	16(%r13,%r14,8),%xmm13
141	vmovdqa	32(%r13,%r14,8),%xmm12
142	vmovdqu	0-128(%rdi),%xmm10
143	jmp	.Lloop_xop
144.align	16
145.Lloop_xop:
146	vmovdqa	K256+512(%rip),%xmm7
147	vmovdqu	0(%rsi,%r12,1),%xmm0
148	vmovdqu	16(%rsi,%r12,1),%xmm1
149	vmovdqu	32(%rsi,%r12,1),%xmm2
150	vmovdqu	48(%rsi,%r12,1),%xmm3
151	vpshufb	%xmm7,%xmm0,%xmm0
152	leaq	K256(%rip),%rbp
153	vpshufb	%xmm7,%xmm1,%xmm1
154	vpshufb	%xmm7,%xmm2,%xmm2
155	vpaddd	0(%rbp),%xmm0,%xmm4
156	vpshufb	%xmm7,%xmm3,%xmm3
157	vpaddd	32(%rbp),%xmm1,%xmm5
158	vpaddd	64(%rbp),%xmm2,%xmm6
159	vpaddd	96(%rbp),%xmm3,%xmm7
160	vmovdqa	%xmm4,0(%rsp)
161	movl	%eax,%r14d
162	vmovdqa	%xmm5,16(%rsp)
163	movl	%ebx,%esi
164	vmovdqa	%xmm6,32(%rsp)
165	xorl	%ecx,%esi
166	vmovdqa	%xmm7,48(%rsp)
167	movl	%r8d,%r13d
168	jmp	.Lxop_00_47
169
170.align	16
171.Lxop_00_47:
172	subq	$-32*4,%rbp
173	vmovdqu	(%r12),%xmm9
174	movq	%r12,64+0(%rsp)
175	vpalignr	$4,%xmm0,%xmm1,%xmm4
176	rorl	$14,%r13d
177	movl	%r14d,%eax
178	vpalignr	$4,%xmm2,%xmm3,%xmm7
179	movl	%r9d,%r12d
180	xorl	%r8d,%r13d
181.byte	143,232,120,194,236,14
182	rorl	$9,%r14d
183	xorl	%r10d,%r12d
184	vpsrld	$3,%xmm4,%xmm4
185	rorl	$5,%r13d
186	xorl	%eax,%r14d
187	vpaddd	%xmm7,%xmm0,%xmm0
188	andl	%r8d,%r12d
189	vpxor	%xmm10,%xmm9,%xmm9
190	vmovdqu	16-128(%rdi),%xmm10
191	xorl	%r8d,%r13d
192	addl	0(%rsp),%r11d
193	movl	%eax,%r15d
194.byte	143,232,120,194,245,11
195	rorl	$11,%r14d
196	xorl	%r10d,%r12d
197	vpxor	%xmm5,%xmm4,%xmm4
198	xorl	%ebx,%r15d
199	rorl	$6,%r13d
200	addl	%r12d,%r11d
201	andl	%r15d,%esi
202.byte	143,232,120,194,251,13
203	xorl	%eax,%r14d
204	addl	%r13d,%r11d
205	vpxor	%xmm6,%xmm4,%xmm4
206	xorl	%ebx,%esi
207	addl	%r11d,%edx
208	vpsrld	$10,%xmm3,%xmm6
209	rorl	$2,%r14d
210	addl	%esi,%r11d
211	vpaddd	%xmm4,%xmm0,%xmm0
212	movl	%edx,%r13d
213	addl	%r11d,%r14d
214.byte	143,232,120,194,239,2
215	rorl	$14,%r13d
216	movl	%r14d,%r11d
217	vpxor	%xmm6,%xmm7,%xmm7
218	movl	%r8d,%r12d
219	xorl	%edx,%r13d
220	rorl	$9,%r14d
221	xorl	%r9d,%r12d
222	vpxor	%xmm5,%xmm7,%xmm7
223	rorl	$5,%r13d
224	xorl	%r11d,%r14d
225	andl	%edx,%r12d
226	vpxor	%xmm8,%xmm9,%xmm9
227	xorl	%edx,%r13d
228	vpsrldq	$8,%xmm7,%xmm7
229	addl	4(%rsp),%r10d
230	movl	%r11d,%esi
231	rorl	$11,%r14d
232	xorl	%r9d,%r12d
233	vpaddd	%xmm7,%xmm0,%xmm0
234	xorl	%eax,%esi
235	rorl	$6,%r13d
236	addl	%r12d,%r10d
237	andl	%esi,%r15d
238.byte	143,232,120,194,248,13
239	xorl	%r11d,%r14d
240	addl	%r13d,%r10d
241	vpsrld	$10,%xmm0,%xmm6
242	xorl	%eax,%r15d
243	addl	%r10d,%ecx
244.byte	143,232,120,194,239,2
245	rorl	$2,%r14d
246	addl	%r15d,%r10d
247	vpxor	%xmm6,%xmm7,%xmm7
248	movl	%ecx,%r13d
249	addl	%r10d,%r14d
250	rorl	$14,%r13d
251	movl	%r14d,%r10d
252	vpxor	%xmm5,%xmm7,%xmm7
253	movl	%edx,%r12d
254	xorl	%ecx,%r13d
255	rorl	$9,%r14d
256	xorl	%r8d,%r12d
257	vpslldq	$8,%xmm7,%xmm7
258	rorl	$5,%r13d
259	xorl	%r10d,%r14d
260	andl	%ecx,%r12d
261	vaesenc	%xmm10,%xmm9,%xmm9
262	vmovdqu	32-128(%rdi),%xmm10
263	xorl	%ecx,%r13d
264	vpaddd	%xmm7,%xmm0,%xmm0
265	addl	8(%rsp),%r9d
266	movl	%r10d,%r15d
267	rorl	$11,%r14d
268	xorl	%r8d,%r12d
269	vpaddd	0(%rbp),%xmm0,%xmm6
270	xorl	%r11d,%r15d
271	rorl	$6,%r13d
272	addl	%r12d,%r9d
273	andl	%r15d,%esi
274	xorl	%r10d,%r14d
275	addl	%r13d,%r9d
276	xorl	%r11d,%esi
277	addl	%r9d,%ebx
278	rorl	$2,%r14d
279	addl	%esi,%r9d
280	movl	%ebx,%r13d
281	addl	%r9d,%r14d
282	rorl	$14,%r13d
283	movl	%r14d,%r9d
284	movl	%ecx,%r12d
285	xorl	%ebx,%r13d
286	rorl	$9,%r14d
287	xorl	%edx,%r12d
288	rorl	$5,%r13d
289	xorl	%r9d,%r14d
290	andl	%ebx,%r12d
291	vaesenc	%xmm10,%xmm9,%xmm9
292	vmovdqu	48-128(%rdi),%xmm10
293	xorl	%ebx,%r13d
294	addl	12(%rsp),%r8d
295	movl	%r9d,%esi
296	rorl	$11,%r14d
297	xorl	%edx,%r12d
298	xorl	%r10d,%esi
299	rorl	$6,%r13d
300	addl	%r12d,%r8d
301	andl	%esi,%r15d
302	xorl	%r9d,%r14d
303	addl	%r13d,%r8d
304	xorl	%r10d,%r15d
305	addl	%r8d,%eax
306	rorl	$2,%r14d
307	addl	%r15d,%r8d
308	movl	%eax,%r13d
309	addl	%r8d,%r14d
310	vmovdqa	%xmm6,0(%rsp)
311	vpalignr	$4,%xmm1,%xmm2,%xmm4
312	rorl	$14,%r13d
313	movl	%r14d,%r8d
314	vpalignr	$4,%xmm3,%xmm0,%xmm7
315	movl	%ebx,%r12d
316	xorl	%eax,%r13d
317.byte	143,232,120,194,236,14
318	rorl	$9,%r14d
319	xorl	%ecx,%r12d
320	vpsrld	$3,%xmm4,%xmm4
321	rorl	$5,%r13d
322	xorl	%r8d,%r14d
323	vpaddd	%xmm7,%xmm1,%xmm1
324	andl	%eax,%r12d
325	vaesenc	%xmm10,%xmm9,%xmm9
326	vmovdqu	64-128(%rdi),%xmm10
327	xorl	%eax,%r13d
328	addl	16(%rsp),%edx
329	movl	%r8d,%r15d
330.byte	143,232,120,194,245,11
331	rorl	$11,%r14d
332	xorl	%ecx,%r12d
333	vpxor	%xmm5,%xmm4,%xmm4
334	xorl	%r9d,%r15d
335	rorl	$6,%r13d
336	addl	%r12d,%edx
337	andl	%r15d,%esi
338.byte	143,232,120,194,248,13
339	xorl	%r8d,%r14d
340	addl	%r13d,%edx
341	vpxor	%xmm6,%xmm4,%xmm4
342	xorl	%r9d,%esi
343	addl	%edx,%r11d
344	vpsrld	$10,%xmm0,%xmm6
345	rorl	$2,%r14d
346	addl	%esi,%edx
347	vpaddd	%xmm4,%xmm1,%xmm1
348	movl	%r11d,%r13d
349	addl	%edx,%r14d
350.byte	143,232,120,194,239,2
351	rorl	$14,%r13d
352	movl	%r14d,%edx
353	vpxor	%xmm6,%xmm7,%xmm7
354	movl	%eax,%r12d
355	xorl	%r11d,%r13d
356	rorl	$9,%r14d
357	xorl	%ebx,%r12d
358	vpxor	%xmm5,%xmm7,%xmm7
359	rorl	$5,%r13d
360	xorl	%edx,%r14d
361	andl	%r11d,%r12d
362	vaesenc	%xmm10,%xmm9,%xmm9
363	vmovdqu	80-128(%rdi),%xmm10
364	xorl	%r11d,%r13d
365	vpsrldq	$8,%xmm7,%xmm7
366	addl	20(%rsp),%ecx
367	movl	%edx,%esi
368	rorl	$11,%r14d
369	xorl	%ebx,%r12d
370	vpaddd	%xmm7,%xmm1,%xmm1
371	xorl	%r8d,%esi
372	rorl	$6,%r13d
373	addl	%r12d,%ecx
374	andl	%esi,%r15d
375.byte	143,232,120,194,249,13
376	xorl	%edx,%r14d
377	addl	%r13d,%ecx
378	vpsrld	$10,%xmm1,%xmm6
379	xorl	%r8d,%r15d
380	addl	%ecx,%r10d
381.byte	143,232,120,194,239,2
382	rorl	$2,%r14d
383	addl	%r15d,%ecx
384	vpxor	%xmm6,%xmm7,%xmm7
385	movl	%r10d,%r13d
386	addl	%ecx,%r14d
387	rorl	$14,%r13d
388	movl	%r14d,%ecx
389	vpxor	%xmm5,%xmm7,%xmm7
390	movl	%r11d,%r12d
391	xorl	%r10d,%r13d
392	rorl	$9,%r14d
393	xorl	%eax,%r12d
394	vpslldq	$8,%xmm7,%xmm7
395	rorl	$5,%r13d
396	xorl	%ecx,%r14d
397	andl	%r10d,%r12d
398	vaesenc	%xmm10,%xmm9,%xmm9
399	vmovdqu	96-128(%rdi),%xmm10
400	xorl	%r10d,%r13d
401	vpaddd	%xmm7,%xmm1,%xmm1
402	addl	24(%rsp),%ebx
403	movl	%ecx,%r15d
404	rorl	$11,%r14d
405	xorl	%eax,%r12d
406	vpaddd	32(%rbp),%xmm1,%xmm6
407	xorl	%edx,%r15d
408	rorl	$6,%r13d
409	addl	%r12d,%ebx
410	andl	%r15d,%esi
411	xorl	%ecx,%r14d
412	addl	%r13d,%ebx
413	xorl	%edx,%esi
414	addl	%ebx,%r9d
415	rorl	$2,%r14d
416	addl	%esi,%ebx
417	movl	%r9d,%r13d
418	addl	%ebx,%r14d
419	rorl	$14,%r13d
420	movl	%r14d,%ebx
421	movl	%r10d,%r12d
422	xorl	%r9d,%r13d
423	rorl	$9,%r14d
424	xorl	%r11d,%r12d
425	rorl	$5,%r13d
426	xorl	%ebx,%r14d
427	andl	%r9d,%r12d
428	vaesenc	%xmm10,%xmm9,%xmm9
429	vmovdqu	112-128(%rdi),%xmm10
430	xorl	%r9d,%r13d
431	addl	28(%rsp),%eax
432	movl	%ebx,%esi
433	rorl	$11,%r14d
434	xorl	%r11d,%r12d
435	xorl	%ecx,%esi
436	rorl	$6,%r13d
437	addl	%r12d,%eax
438	andl	%esi,%r15d
439	xorl	%ebx,%r14d
440	addl	%r13d,%eax
441	xorl	%ecx,%r15d
442	addl	%eax,%r8d
443	rorl	$2,%r14d
444	addl	%r15d,%eax
445	movl	%r8d,%r13d
446	addl	%eax,%r14d
447	vmovdqa	%xmm6,16(%rsp)
448	vpalignr	$4,%xmm2,%xmm3,%xmm4
449	rorl	$14,%r13d
450	movl	%r14d,%eax
451	vpalignr	$4,%xmm0,%xmm1,%xmm7
452	movl	%r9d,%r12d
453	xorl	%r8d,%r13d
454.byte	143,232,120,194,236,14
455	rorl	$9,%r14d
456	xorl	%r10d,%r12d
457	vpsrld	$3,%xmm4,%xmm4
458	rorl	$5,%r13d
459	xorl	%eax,%r14d
460	vpaddd	%xmm7,%xmm2,%xmm2
461	andl	%r8d,%r12d
462	vaesenc	%xmm10,%xmm9,%xmm9
463	vmovdqu	128-128(%rdi),%xmm10
464	xorl	%r8d,%r13d
465	addl	32(%rsp),%r11d
466	movl	%eax,%r15d
467.byte	143,232,120,194,245,11
468	rorl	$11,%r14d
469	xorl	%r10d,%r12d
470	vpxor	%xmm5,%xmm4,%xmm4
471	xorl	%ebx,%r15d
472	rorl	$6,%r13d
473	addl	%r12d,%r11d
474	andl	%r15d,%esi
475.byte	143,232,120,194,249,13
476	xorl	%eax,%r14d
477	addl	%r13d,%r11d
478	vpxor	%xmm6,%xmm4,%xmm4
479	xorl	%ebx,%esi
480	addl	%r11d,%edx
481	vpsrld	$10,%xmm1,%xmm6
482	rorl	$2,%r14d
483	addl	%esi,%r11d
484	vpaddd	%xmm4,%xmm2,%xmm2
485	movl	%edx,%r13d
486	addl	%r11d,%r14d
487.byte	143,232,120,194,239,2
488	rorl	$14,%r13d
489	movl	%r14d,%r11d
490	vpxor	%xmm6,%xmm7,%xmm7
491	movl	%r8d,%r12d
492	xorl	%edx,%r13d
493	rorl	$9,%r14d
494	xorl	%r9d,%r12d
495	vpxor	%xmm5,%xmm7,%xmm7
496	rorl	$5,%r13d
497	xorl	%r11d,%r14d
498	andl	%edx,%r12d
499	vaesenc	%xmm10,%xmm9,%xmm9
500	vmovdqu	144-128(%rdi),%xmm10
501	xorl	%edx,%r13d
502	vpsrldq	$8,%xmm7,%xmm7
503	addl	36(%rsp),%r10d
504	movl	%r11d,%esi
505	rorl	$11,%r14d
506	xorl	%r9d,%r12d
507	vpaddd	%xmm7,%xmm2,%xmm2
508	xorl	%eax,%esi
509	rorl	$6,%r13d
510	addl	%r12d,%r10d
511	andl	%esi,%r15d
512.byte	143,232,120,194,250,13
513	xorl	%r11d,%r14d
514	addl	%r13d,%r10d
515	vpsrld	$10,%xmm2,%xmm6
516	xorl	%eax,%r15d
517	addl	%r10d,%ecx
518.byte	143,232,120,194,239,2
519	rorl	$2,%r14d
520	addl	%r15d,%r10d
521	vpxor	%xmm6,%xmm7,%xmm7
522	movl	%ecx,%r13d
523	addl	%r10d,%r14d
524	rorl	$14,%r13d
525	movl	%r14d,%r10d
526	vpxor	%xmm5,%xmm7,%xmm7
527	movl	%edx,%r12d
528	xorl	%ecx,%r13d
529	rorl	$9,%r14d
530	xorl	%r8d,%r12d
531	vpslldq	$8,%xmm7,%xmm7
532	rorl	$5,%r13d
533	xorl	%r10d,%r14d
534	andl	%ecx,%r12d
535	vaesenc	%xmm10,%xmm9,%xmm9
536	vmovdqu	160-128(%rdi),%xmm10
537	xorl	%ecx,%r13d
538	vpaddd	%xmm7,%xmm2,%xmm2
539	addl	40(%rsp),%r9d
540	movl	%r10d,%r15d
541	rorl	$11,%r14d
542	xorl	%r8d,%r12d
543	vpaddd	64(%rbp),%xmm2,%xmm6
544	xorl	%r11d,%r15d
545	rorl	$6,%r13d
546	addl	%r12d,%r9d
547	andl	%r15d,%esi
548	xorl	%r10d,%r14d
549	addl	%r13d,%r9d
550	xorl	%r11d,%esi
551	addl	%r9d,%ebx
552	rorl	$2,%r14d
553	addl	%esi,%r9d
554	movl	%ebx,%r13d
555	addl	%r9d,%r14d
556	rorl	$14,%r13d
557	movl	%r14d,%r9d
558	movl	%ecx,%r12d
559	xorl	%ebx,%r13d
560	rorl	$9,%r14d
561	xorl	%edx,%r12d
562	rorl	$5,%r13d
563	xorl	%r9d,%r14d
564	andl	%ebx,%r12d
565	vaesenclast	%xmm10,%xmm9,%xmm11
566	vaesenc	%xmm10,%xmm9,%xmm9
567	vmovdqu	176-128(%rdi),%xmm10
568	xorl	%ebx,%r13d
569	addl	44(%rsp),%r8d
570	movl	%r9d,%esi
571	rorl	$11,%r14d
572	xorl	%edx,%r12d
573	xorl	%r10d,%esi
574	rorl	$6,%r13d
575	addl	%r12d,%r8d
576	andl	%esi,%r15d
577	xorl	%r9d,%r14d
578	addl	%r13d,%r8d
579	xorl	%r10d,%r15d
580	addl	%r8d,%eax
581	rorl	$2,%r14d
582	addl	%r15d,%r8d
583	movl	%eax,%r13d
584	addl	%r8d,%r14d
585	vmovdqa	%xmm6,32(%rsp)
586	vpalignr	$4,%xmm3,%xmm0,%xmm4
587	rorl	$14,%r13d
588	movl	%r14d,%r8d
589	vpalignr	$4,%xmm1,%xmm2,%xmm7
590	movl	%ebx,%r12d
591	xorl	%eax,%r13d
592.byte	143,232,120,194,236,14
593	rorl	$9,%r14d
594	xorl	%ecx,%r12d
595	vpsrld	$3,%xmm4,%xmm4
596	rorl	$5,%r13d
597	xorl	%r8d,%r14d
598	vpaddd	%xmm7,%xmm3,%xmm3
599	andl	%eax,%r12d
600	vpand	%xmm12,%xmm11,%xmm8
601	vaesenc	%xmm10,%xmm9,%xmm9
602	vmovdqu	192-128(%rdi),%xmm10
603	xorl	%eax,%r13d
604	addl	48(%rsp),%edx
605	movl	%r8d,%r15d
606.byte	143,232,120,194,245,11
607	rorl	$11,%r14d
608	xorl	%ecx,%r12d
609	vpxor	%xmm5,%xmm4,%xmm4
610	xorl	%r9d,%r15d
611	rorl	$6,%r13d
612	addl	%r12d,%edx
613	andl	%r15d,%esi
614.byte	143,232,120,194,250,13
615	xorl	%r8d,%r14d
616	addl	%r13d,%edx
617	vpxor	%xmm6,%xmm4,%xmm4
618	xorl	%r9d,%esi
619	addl	%edx,%r11d
620	vpsrld	$10,%xmm2,%xmm6
621	rorl	$2,%r14d
622	addl	%esi,%edx
623	vpaddd	%xmm4,%xmm3,%xmm3
624	movl	%r11d,%r13d
625	addl	%edx,%r14d
626.byte	143,232,120,194,239,2
627	rorl	$14,%r13d
628	movl	%r14d,%edx
629	vpxor	%xmm6,%xmm7,%xmm7
630	movl	%eax,%r12d
631	xorl	%r11d,%r13d
632	rorl	$9,%r14d
633	xorl	%ebx,%r12d
634	vpxor	%xmm5,%xmm7,%xmm7
635	rorl	$5,%r13d
636	xorl	%edx,%r14d
637	andl	%r11d,%r12d
638	vaesenclast	%xmm10,%xmm9,%xmm11
639	vaesenc	%xmm10,%xmm9,%xmm9
640	vmovdqu	208-128(%rdi),%xmm10
641	xorl	%r11d,%r13d
642	vpsrldq	$8,%xmm7,%xmm7
643	addl	52(%rsp),%ecx
644	movl	%edx,%esi
645	rorl	$11,%r14d
646	xorl	%ebx,%r12d
647	vpaddd	%xmm7,%xmm3,%xmm3
648	xorl	%r8d,%esi
649	rorl	$6,%r13d
650	addl	%r12d,%ecx
651	andl	%esi,%r15d
652.byte	143,232,120,194,251,13
653	xorl	%edx,%r14d
654	addl	%r13d,%ecx
655	vpsrld	$10,%xmm3,%xmm6
656	xorl	%r8d,%r15d
657	addl	%ecx,%r10d
658.byte	143,232,120,194,239,2
659	rorl	$2,%r14d
660	addl	%r15d,%ecx
661	vpxor	%xmm6,%xmm7,%xmm7
662	movl	%r10d,%r13d
663	addl	%ecx,%r14d
664	rorl	$14,%r13d
665	movl	%r14d,%ecx
666	vpxor	%xmm5,%xmm7,%xmm7
667	movl	%r11d,%r12d
668	xorl	%r10d,%r13d
669	rorl	$9,%r14d
670	xorl	%eax,%r12d
671	vpslldq	$8,%xmm7,%xmm7
672	rorl	$5,%r13d
673	xorl	%ecx,%r14d
674	andl	%r10d,%r12d
675	vpand	%xmm13,%xmm11,%xmm11
676	vaesenc	%xmm10,%xmm9,%xmm9
677	vmovdqu	224-128(%rdi),%xmm10
678	xorl	%r10d,%r13d
679	vpaddd	%xmm7,%xmm3,%xmm3
680	addl	56(%rsp),%ebx
681	movl	%ecx,%r15d
682	rorl	$11,%r14d
683	xorl	%eax,%r12d
684	vpaddd	96(%rbp),%xmm3,%xmm6
685	xorl	%edx,%r15d
686	rorl	$6,%r13d
687	addl	%r12d,%ebx
688	andl	%r15d,%esi
689	xorl	%ecx,%r14d
690	addl	%r13d,%ebx
691	xorl	%edx,%esi
692	addl	%ebx,%r9d
693	rorl	$2,%r14d
694	addl	%esi,%ebx
695	movl	%r9d,%r13d
696	addl	%ebx,%r14d
697	rorl	$14,%r13d
698	movl	%r14d,%ebx
699	movl	%r10d,%r12d
700	xorl	%r9d,%r13d
701	rorl	$9,%r14d
702	xorl	%r11d,%r12d
703	rorl	$5,%r13d
704	xorl	%ebx,%r14d
705	andl	%r9d,%r12d
706	vpor	%xmm11,%xmm8,%xmm8
707	vaesenclast	%xmm10,%xmm9,%xmm11
708	vmovdqu	0-128(%rdi),%xmm10
709	xorl	%r9d,%r13d
710	addl	60(%rsp),%eax
711	movl	%ebx,%esi
712	rorl	$11,%r14d
713	xorl	%r11d,%r12d
714	xorl	%ecx,%esi
715	rorl	$6,%r13d
716	addl	%r12d,%eax
717	andl	%esi,%r15d
718	xorl	%ebx,%r14d
719	addl	%r13d,%eax
720	xorl	%ecx,%r15d
721	addl	%eax,%r8d
722	rorl	$2,%r14d
723	addl	%r15d,%eax
724	movl	%r8d,%r13d
725	addl	%eax,%r14d
726	vmovdqa	%xmm6,48(%rsp)
727	movq	64+0(%rsp),%r12
728	vpand	%xmm14,%xmm11,%xmm11
729	movq	64+8(%rsp),%r15
730	vpor	%xmm11,%xmm8,%xmm8
731	vmovdqu	%xmm8,(%r15,%r12,1)
732	leaq	16(%r12),%r12
733	cmpb	$0,131(%rbp)
734	jne	.Lxop_00_47
735	vmovdqu	(%r12),%xmm9
736	movq	%r12,64+0(%rsp)
737	rorl	$14,%r13d
738	movl	%r14d,%eax
739	movl	%r9d,%r12d
740	xorl	%r8d,%r13d
741	rorl	$9,%r14d
742	xorl	%r10d,%r12d
743	rorl	$5,%r13d
744	xorl	%eax,%r14d
745	andl	%r8d,%r12d
746	vpxor	%xmm10,%xmm9,%xmm9
747	vmovdqu	16-128(%rdi),%xmm10
748	xorl	%r8d,%r13d
749	addl	0(%rsp),%r11d
750	movl	%eax,%r15d
751	rorl	$11,%r14d
752	xorl	%r10d,%r12d
753	xorl	%ebx,%r15d
754	rorl	$6,%r13d
755	addl	%r12d,%r11d
756	andl	%r15d,%esi
757	xorl	%eax,%r14d
758	addl	%r13d,%r11d
759	xorl	%ebx,%esi
760	addl	%r11d,%edx
761	rorl	$2,%r14d
762	addl	%esi,%r11d
763	movl	%edx,%r13d
764	addl	%r11d,%r14d
765	rorl	$14,%r13d
766	movl	%r14d,%r11d
767	movl	%r8d,%r12d
768	xorl	%edx,%r13d
769	rorl	$9,%r14d
770	xorl	%r9d,%r12d
771	rorl	$5,%r13d
772	xorl	%r11d,%r14d
773	andl	%edx,%r12d
774	vpxor	%xmm8,%xmm9,%xmm9
775	xorl	%edx,%r13d
776	addl	4(%rsp),%r10d
777	movl	%r11d,%esi
778	rorl	$11,%r14d
779	xorl	%r9d,%r12d
780	xorl	%eax,%esi
781	rorl	$6,%r13d
782	addl	%r12d,%r10d
783	andl	%esi,%r15d
784	xorl	%r11d,%r14d
785	addl	%r13d,%r10d
786	xorl	%eax,%r15d
787	addl	%r10d,%ecx
788	rorl	$2,%r14d
789	addl	%r15d,%r10d
790	movl	%ecx,%r13d
791	addl	%r10d,%r14d
792	rorl	$14,%r13d
793	movl	%r14d,%r10d
794	movl	%edx,%r12d
795	xorl	%ecx,%r13d
796	rorl	$9,%r14d
797	xorl	%r8d,%r12d
798	rorl	$5,%r13d
799	xorl	%r10d,%r14d
800	andl	%ecx,%r12d
801	vaesenc	%xmm10,%xmm9,%xmm9
802	vmovdqu	32-128(%rdi),%xmm10
803	xorl	%ecx,%r13d
804	addl	8(%rsp),%r9d
805	movl	%r10d,%r15d
806	rorl	$11,%r14d
807	xorl	%r8d,%r12d
808	xorl	%r11d,%r15d
809	rorl	$6,%r13d
810	addl	%r12d,%r9d
811	andl	%r15d,%esi
812	xorl	%r10d,%r14d
813	addl	%r13d,%r9d
814	xorl	%r11d,%esi
815	addl	%r9d,%ebx
816	rorl	$2,%r14d
817	addl	%esi,%r9d
818	movl	%ebx,%r13d
819	addl	%r9d,%r14d
820	rorl	$14,%r13d
821	movl	%r14d,%r9d
822	movl	%ecx,%r12d
823	xorl	%ebx,%r13d
824	rorl	$9,%r14d
825	xorl	%edx,%r12d
826	rorl	$5,%r13d
827	xorl	%r9d,%r14d
828	andl	%ebx,%r12d
829	vaesenc	%xmm10,%xmm9,%xmm9
830	vmovdqu	48-128(%rdi),%xmm10
831	xorl	%ebx,%r13d
832	addl	12(%rsp),%r8d
833	movl	%r9d,%esi
834	rorl	$11,%r14d
835	xorl	%edx,%r12d
836	xorl	%r10d,%esi
837	rorl	$6,%r13d
838	addl	%r12d,%r8d
839	andl	%esi,%r15d
840	xorl	%r9d,%r14d
841	addl	%r13d,%r8d
842	xorl	%r10d,%r15d
843	addl	%r8d,%eax
844	rorl	$2,%r14d
845	addl	%r15d,%r8d
846	movl	%eax,%r13d
847	addl	%r8d,%r14d
848	rorl	$14,%r13d
849	movl	%r14d,%r8d
850	movl	%ebx,%r12d
851	xorl	%eax,%r13d
852	rorl	$9,%r14d
853	xorl	%ecx,%r12d
854	rorl	$5,%r13d
855	xorl	%r8d,%r14d
856	andl	%eax,%r12d
857	vaesenc	%xmm10,%xmm9,%xmm9
858	vmovdqu	64-128(%rdi),%xmm10
859	xorl	%eax,%r13d
860	addl	16(%rsp),%edx
861	movl	%r8d,%r15d
862	rorl	$11,%r14d
863	xorl	%ecx,%r12d
864	xorl	%r9d,%r15d
865	rorl	$6,%r13d
866	addl	%r12d,%edx
867	andl	%r15d,%esi
868	xorl	%r8d,%r14d
869	addl	%r13d,%edx
870	xorl	%r9d,%esi
871	addl	%edx,%r11d
872	rorl	$2,%r14d
873	addl	%esi,%edx
874	movl	%r11d,%r13d
875	addl	%edx,%r14d
876	rorl	$14,%r13d
877	movl	%r14d,%edx
878	movl	%eax,%r12d
879	xorl	%r11d,%r13d
880	rorl	$9,%r14d
881	xorl	%ebx,%r12d
882	rorl	$5,%r13d
883	xorl	%edx,%r14d
884	andl	%r11d,%r12d
885	vaesenc	%xmm10,%xmm9,%xmm9
886	vmovdqu	80-128(%rdi),%xmm10
887	xorl	%r11d,%r13d
888	addl	20(%rsp),%ecx
889	movl	%edx,%esi
890	rorl	$11,%r14d
891	xorl	%ebx,%r12d
892	xorl	%r8d,%esi
893	rorl	$6,%r13d
894	addl	%r12d,%ecx
895	andl	%esi,%r15d
896	xorl	%edx,%r14d
897	addl	%r13d,%ecx
898	xorl	%r8d,%r15d
899	addl	%ecx,%r10d
900	rorl	$2,%r14d
901	addl	%r15d,%ecx
902	movl	%r10d,%r13d
903	addl	%ecx,%r14d
904	rorl	$14,%r13d
905	movl	%r14d,%ecx
906	movl	%r11d,%r12d
907	xorl	%r10d,%r13d
908	rorl	$9,%r14d
909	xorl	%eax,%r12d
910	rorl	$5,%r13d
911	xorl	%ecx,%r14d
912	andl	%r10d,%r12d
913	vaesenc	%xmm10,%xmm9,%xmm9
914	vmovdqu	96-128(%rdi),%xmm10
915	xorl	%r10d,%r13d
916	addl	24(%rsp),%ebx
917	movl	%ecx,%r15d
918	rorl	$11,%r14d
919	xorl	%eax,%r12d
920	xorl	%edx,%r15d
921	rorl	$6,%r13d
922	addl	%r12d,%ebx
923	andl	%r15d,%esi
924	xorl	%ecx,%r14d
925	addl	%r13d,%ebx
926	xorl	%edx,%esi
927	addl	%ebx,%r9d
928	rorl	$2,%r14d
929	addl	%esi,%ebx
930	movl	%r9d,%r13d
931	addl	%ebx,%r14d
932	rorl	$14,%r13d
933	movl	%r14d,%ebx
934	movl	%r10d,%r12d
935	xorl	%r9d,%r13d
936	rorl	$9,%r14d
937	xorl	%r11d,%r12d
938	rorl	$5,%r13d
939	xorl	%ebx,%r14d
940	andl	%r9d,%r12d
941	vaesenc	%xmm10,%xmm9,%xmm9
942	vmovdqu	112-128(%rdi),%xmm10
943	xorl	%r9d,%r13d
944	addl	28(%rsp),%eax
945	movl	%ebx,%esi
946	rorl	$11,%r14d
947	xorl	%r11d,%r12d
948	xorl	%ecx,%esi
949	rorl	$6,%r13d
950	addl	%r12d,%eax
951	andl	%esi,%r15d
952	xorl	%ebx,%r14d
953	addl	%r13d,%eax
954	xorl	%ecx,%r15d
955	addl	%eax,%r8d
956	rorl	$2,%r14d
957	addl	%r15d,%eax
958	movl	%r8d,%r13d
959	addl	%eax,%r14d
960	rorl	$14,%r13d
961	movl	%r14d,%eax
962	movl	%r9d,%r12d
963	xorl	%r8d,%r13d
964	rorl	$9,%r14d
965	xorl	%r10d,%r12d
966	rorl	$5,%r13d
967	xorl	%eax,%r14d
968	andl	%r8d,%r12d
969	vaesenc	%xmm10,%xmm9,%xmm9
970	vmovdqu	128-128(%rdi),%xmm10
971	xorl	%r8d,%r13d
972	addl	32(%rsp),%r11d
973	movl	%eax,%r15d
974	rorl	$11,%r14d
975	xorl	%r10d,%r12d
976	xorl	%ebx,%r15d
977	rorl	$6,%r13d
978	addl	%r12d,%r11d
979	andl	%r15d,%esi
980	xorl	%eax,%r14d
981	addl	%r13d,%r11d
982	xorl	%ebx,%esi
983	addl	%r11d,%edx
984	rorl	$2,%r14d
985	addl	%esi,%r11d
986	movl	%edx,%r13d
987	addl	%r11d,%r14d
988	rorl	$14,%r13d
989	movl	%r14d,%r11d
990	movl	%r8d,%r12d
991	xorl	%edx,%r13d
992	rorl	$9,%r14d
993	xorl	%r9d,%r12d
994	rorl	$5,%r13d
995	xorl	%r11d,%r14d
996	andl	%edx,%r12d
997	vaesenc	%xmm10,%xmm9,%xmm9
998	vmovdqu	144-128(%rdi),%xmm10
999	xorl	%edx,%r13d
1000	addl	36(%rsp),%r10d
1001	movl	%r11d,%esi
1002	rorl	$11,%r14d
1003	xorl	%r9d,%r12d
1004	xorl	%eax,%esi
1005	rorl	$6,%r13d
1006	addl	%r12d,%r10d
1007	andl	%esi,%r15d
1008	xorl	%r11d,%r14d
1009	addl	%r13d,%r10d
1010	xorl	%eax,%r15d
1011	addl	%r10d,%ecx
1012	rorl	$2,%r14d
1013	addl	%r15d,%r10d
1014	movl	%ecx,%r13d
1015	addl	%r10d,%r14d
1016	rorl	$14,%r13d
1017	movl	%r14d,%r10d
1018	movl	%edx,%r12d
1019	xorl	%ecx,%r13d
1020	rorl	$9,%r14d
1021	xorl	%r8d,%r12d
1022	rorl	$5,%r13d
1023	xorl	%r10d,%r14d
1024	andl	%ecx,%r12d
1025	vaesenc	%xmm10,%xmm9,%xmm9
1026	vmovdqu	160-128(%rdi),%xmm10
1027	xorl	%ecx,%r13d
1028	addl	40(%rsp),%r9d
1029	movl	%r10d,%r15d
1030	rorl	$11,%r14d
1031	xorl	%r8d,%r12d
1032	xorl	%r11d,%r15d
1033	rorl	$6,%r13d
1034	addl	%r12d,%r9d
1035	andl	%r15d,%esi
1036	xorl	%r10d,%r14d
1037	addl	%r13d,%r9d
1038	xorl	%r11d,%esi
1039	addl	%r9d,%ebx
1040	rorl	$2,%r14d
1041	addl	%esi,%r9d
1042	movl	%ebx,%r13d
1043	addl	%r9d,%r14d
1044	rorl	$14,%r13d
1045	movl	%r14d,%r9d
1046	movl	%ecx,%r12d
1047	xorl	%ebx,%r13d
1048	rorl	$9,%r14d
1049	xorl	%edx,%r12d
1050	rorl	$5,%r13d
1051	xorl	%r9d,%r14d
1052	andl	%ebx,%r12d
1053	vaesenclast	%xmm10,%xmm9,%xmm11
1054	vaesenc	%xmm10,%xmm9,%xmm9
1055	vmovdqu	176-128(%rdi),%xmm10
1056	xorl	%ebx,%r13d
1057	addl	44(%rsp),%r8d
1058	movl	%r9d,%esi
1059	rorl	$11,%r14d
1060	xorl	%edx,%r12d
1061	xorl	%r10d,%esi
1062	rorl	$6,%r13d
1063	addl	%r12d,%r8d
1064	andl	%esi,%r15d
1065	xorl	%r9d,%r14d
1066	addl	%r13d,%r8d
1067	xorl	%r10d,%r15d
1068	addl	%r8d,%eax
1069	rorl	$2,%r14d
1070	addl	%r15d,%r8d
1071	movl	%eax,%r13d
1072	addl	%r8d,%r14d
1073	rorl	$14,%r13d
1074	movl	%r14d,%r8d
1075	movl	%ebx,%r12d
1076	xorl	%eax,%r13d
1077	rorl	$9,%r14d
1078	xorl	%ecx,%r12d
1079	rorl	$5,%r13d
1080	xorl	%r8d,%r14d
1081	andl	%eax,%r12d
1082	vpand	%xmm12,%xmm11,%xmm8
1083	vaesenc	%xmm10,%xmm9,%xmm9
1084	vmovdqu	192-128(%rdi),%xmm10
1085	xorl	%eax,%r13d
1086	addl	48(%rsp),%edx
1087	movl	%r8d,%r15d
1088	rorl	$11,%r14d
1089	xorl	%ecx,%r12d
1090	xorl	%r9d,%r15d
1091	rorl	$6,%r13d
1092	addl	%r12d,%edx
1093	andl	%r15d,%esi
1094	xorl	%r8d,%r14d
1095	addl	%r13d,%edx
1096	xorl	%r9d,%esi
1097	addl	%edx,%r11d
1098	rorl	$2,%r14d
1099	addl	%esi,%edx
1100	movl	%r11d,%r13d
1101	addl	%edx,%r14d
1102	rorl	$14,%r13d
1103	movl	%r14d,%edx
1104	movl	%eax,%r12d
1105	xorl	%r11d,%r13d
1106	rorl	$9,%r14d
1107	xorl	%ebx,%r12d
1108	rorl	$5,%r13d
1109	xorl	%edx,%r14d
1110	andl	%r11d,%r12d
1111	vaesenclast	%xmm10,%xmm9,%xmm11
1112	vaesenc	%xmm10,%xmm9,%xmm9
1113	vmovdqu	208-128(%rdi),%xmm10
1114	xorl	%r11d,%r13d
1115	addl	52(%rsp),%ecx
1116	movl	%edx,%esi
1117	rorl	$11,%r14d
1118	xorl	%ebx,%r12d
1119	xorl	%r8d,%esi
1120	rorl	$6,%r13d
1121	addl	%r12d,%ecx
1122	andl	%esi,%r15d
1123	xorl	%edx,%r14d
1124	addl	%r13d,%ecx
1125	xorl	%r8d,%r15d
1126	addl	%ecx,%r10d
1127	rorl	$2,%r14d
1128	addl	%r15d,%ecx
1129	movl	%r10d,%r13d
1130	addl	%ecx,%r14d
1131	rorl	$14,%r13d
1132	movl	%r14d,%ecx
1133	movl	%r11d,%r12d
1134	xorl	%r10d,%r13d
1135	rorl	$9,%r14d
1136	xorl	%eax,%r12d
1137	rorl	$5,%r13d
1138	xorl	%ecx,%r14d
1139	andl	%r10d,%r12d
1140	vpand	%xmm13,%xmm11,%xmm11
1141	vaesenc	%xmm10,%xmm9,%xmm9
1142	vmovdqu	224-128(%rdi),%xmm10
1143	xorl	%r10d,%r13d
1144	addl	56(%rsp),%ebx
1145	movl	%ecx,%r15d
1146	rorl	$11,%r14d
1147	xorl	%eax,%r12d
1148	xorl	%edx,%r15d
1149	rorl	$6,%r13d
1150	addl	%r12d,%ebx
1151	andl	%r15d,%esi
1152	xorl	%ecx,%r14d
1153	addl	%r13d,%ebx
1154	xorl	%edx,%esi
1155	addl	%ebx,%r9d
1156	rorl	$2,%r14d
1157	addl	%esi,%ebx
1158	movl	%r9d,%r13d
1159	addl	%ebx,%r14d
1160	rorl	$14,%r13d
1161	movl	%r14d,%ebx
1162	movl	%r10d,%r12d
1163	xorl	%r9d,%r13d
1164	rorl	$9,%r14d
1165	xorl	%r11d,%r12d
1166	rorl	$5,%r13d
1167	xorl	%ebx,%r14d
1168	andl	%r9d,%r12d
1169	vpor	%xmm11,%xmm8,%xmm8
1170	vaesenclast	%xmm10,%xmm9,%xmm11
1171	vmovdqu	0-128(%rdi),%xmm10
1172	xorl	%r9d,%r13d
1173	addl	60(%rsp),%eax
1174	movl	%ebx,%esi
1175	rorl	$11,%r14d
1176	xorl	%r11d,%r12d
1177	xorl	%ecx,%esi
1178	rorl	$6,%r13d
1179	addl	%r12d,%eax
1180	andl	%esi,%r15d
1181	xorl	%ebx,%r14d
1182	addl	%r13d,%eax
1183	xorl	%ecx,%r15d
1184	addl	%eax,%r8d
1185	rorl	$2,%r14d
1186	addl	%r15d,%eax
1187	movl	%r8d,%r13d
1188	addl	%eax,%r14d
1189	movq	64+0(%rsp),%r12
1190	movq	64+8(%rsp),%r13
1191	movq	64+40(%rsp),%r15
1192	movq	64+48(%rsp),%rsi
1193
1194	vpand	%xmm14,%xmm11,%xmm11
1195	movl	%r14d,%eax
1196	vpor	%xmm11,%xmm8,%xmm8
1197	vmovdqu	%xmm8,(%r12,%r13,1)
1198	leaq	16(%r12),%r12
1199
1200	addl	0(%r15),%eax
1201	addl	4(%r15),%ebx
1202	addl	8(%r15),%ecx
1203	addl	12(%r15),%edx
1204	addl	16(%r15),%r8d
1205	addl	20(%r15),%r9d
1206	addl	24(%r15),%r10d
1207	addl	28(%r15),%r11d
1208
1209	cmpq	64+16(%rsp),%r12
1210
1211	movl	%eax,0(%r15)
1212	movl	%ebx,4(%r15)
1213	movl	%ecx,8(%r15)
1214	movl	%edx,12(%r15)
1215	movl	%r8d,16(%r15)
1216	movl	%r9d,20(%r15)
1217	movl	%r10d,24(%r15)
1218	movl	%r11d,28(%r15)
1219
1220	jb	.Lloop_xop
1221
1222	movq	64+32(%rsp),%r8
1223	movq	120(%rsp),%rsi
1224.cfi_def_cfa	%rsi,8
1225	vmovdqu	%xmm8,(%r8)
1226	vzeroall
1227	movq	-48(%rsi),%r15
1228.cfi_restore	%r15
1229	movq	-40(%rsi),%r14
1230.cfi_restore	%r14
1231	movq	-32(%rsi),%r13
1232.cfi_restore	%r13
1233	movq	-24(%rsi),%r12
1234.cfi_restore	%r12
1235	movq	-16(%rsi),%rbp
1236.cfi_restore	%rbp
1237	movq	-8(%rsi),%rbx
1238.cfi_restore	%rbx
1239	leaq	(%rsi),%rsp
1240.cfi_def_cfa_register	%rsp
1241.Lepilogue_xop:
1242	.byte	0xf3,0xc3
1243.cfi_endproc
1244.size	aesni_cbc_sha256_enc_xop,.-aesni_cbc_sha256_enc_xop
1245.type	aesni_cbc_sha256_enc_avx,@function
1246.align	64
1247aesni_cbc_sha256_enc_avx:
1248.cfi_startproc
1249.Lavx_shortcut:
1250	movq	8(%rsp),%r10
1251	movq	%rsp,%rax
1252.cfi_def_cfa_register	%rax
1253	pushq	%rbx
1254.cfi_offset	%rbx,-16
1255	pushq	%rbp
1256.cfi_offset	%rbp,-24
1257	pushq	%r12
1258.cfi_offset	%r12,-32
1259	pushq	%r13
1260.cfi_offset	%r13,-40
1261	pushq	%r14
1262.cfi_offset	%r14,-48
1263	pushq	%r15
1264.cfi_offset	%r15,-56
1265	subq	$128,%rsp
1266	andq	$-64,%rsp
1267
1268	shlq	$6,%rdx
1269	subq	%rdi,%rsi
1270	subq	%rdi,%r10
1271	addq	%rdi,%rdx
1272
1273
1274	movq	%rsi,64+8(%rsp)
1275	movq	%rdx,64+16(%rsp)
1276
1277	movq	%r8,64+32(%rsp)
1278	movq	%r9,64+40(%rsp)
1279	movq	%r10,64+48(%rsp)
1280	movq	%rax,120(%rsp)
1281.cfi_escape	0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08
1282.Lprologue_avx:
1283	vzeroall
1284
1285	movq	%rdi,%r12
1286	leaq	128(%rcx),%rdi
1287	leaq	K256+544(%rip),%r13
1288	movl	240-128(%rdi),%r14d
1289	movq	%r9,%r15
1290	movq	%r10,%rsi
1291	vmovdqu	(%r8),%xmm8
1292	subq	$9,%r14
1293
1294	movl	0(%r15),%eax
1295	movl	4(%r15),%ebx
1296	movl	8(%r15),%ecx
1297	movl	12(%r15),%edx
1298	movl	16(%r15),%r8d
1299	movl	20(%r15),%r9d
1300	movl	24(%r15),%r10d
1301	movl	28(%r15),%r11d
1302
1303	vmovdqa	0(%r13,%r14,8),%xmm14
1304	vmovdqa	16(%r13,%r14,8),%xmm13
1305	vmovdqa	32(%r13,%r14,8),%xmm12
1306	vmovdqu	0-128(%rdi),%xmm10
1307	jmp	.Lloop_avx
1308.align	16
1309.Lloop_avx:
1310	vmovdqa	K256+512(%rip),%xmm7
1311	vmovdqu	0(%rsi,%r12,1),%xmm0
1312	vmovdqu	16(%rsi,%r12,1),%xmm1
1313	vmovdqu	32(%rsi,%r12,1),%xmm2
1314	vmovdqu	48(%rsi,%r12,1),%xmm3
1315	vpshufb	%xmm7,%xmm0,%xmm0
1316	leaq	K256(%rip),%rbp
1317	vpshufb	%xmm7,%xmm1,%xmm1
1318	vpshufb	%xmm7,%xmm2,%xmm2
1319	vpaddd	0(%rbp),%xmm0,%xmm4
1320	vpshufb	%xmm7,%xmm3,%xmm3
1321	vpaddd	32(%rbp),%xmm1,%xmm5
1322	vpaddd	64(%rbp),%xmm2,%xmm6
1323	vpaddd	96(%rbp),%xmm3,%xmm7
1324	vmovdqa	%xmm4,0(%rsp)
1325	movl	%eax,%r14d
1326	vmovdqa	%xmm5,16(%rsp)
1327	movl	%ebx,%esi
1328	vmovdqa	%xmm6,32(%rsp)
1329	xorl	%ecx,%esi
1330	vmovdqa	%xmm7,48(%rsp)
1331	movl	%r8d,%r13d
1332	jmp	.Lavx_00_47
1333
1334.align	16
1335.Lavx_00_47:
1336	subq	$-32*4,%rbp
1337	vmovdqu	(%r12),%xmm9
1338	movq	%r12,64+0(%rsp)
1339	vpalignr	$4,%xmm0,%xmm1,%xmm4
1340	shrdl	$14,%r13d,%r13d
1341	movl	%r14d,%eax
1342	movl	%r9d,%r12d
1343	vpalignr	$4,%xmm2,%xmm3,%xmm7
1344	xorl	%r8d,%r13d
1345	shrdl	$9,%r14d,%r14d
1346	xorl	%r10d,%r12d
1347	vpsrld	$7,%xmm4,%xmm6
1348	shrdl	$5,%r13d,%r13d
1349	xorl	%eax,%r14d
1350	andl	%r8d,%r12d
1351	vpaddd	%xmm7,%xmm0,%xmm0
1352	vpxor	%xmm10,%xmm9,%xmm9
1353	vmovdqu	16-128(%rdi),%xmm10
1354	xorl	%r8d,%r13d
1355	addl	0(%rsp),%r11d
1356	movl	%eax,%r15d
1357	vpsrld	$3,%xmm4,%xmm7
1358	shrdl	$11,%r14d,%r14d
1359	xorl	%r10d,%r12d
1360	xorl	%ebx,%r15d
1361	vpslld	$14,%xmm4,%xmm5
1362	shrdl	$6,%r13d,%r13d
1363	addl	%r12d,%r11d
1364	andl	%r15d,%esi
1365	vpxor	%xmm6,%xmm7,%xmm4
1366	xorl	%eax,%r14d
1367	addl	%r13d,%r11d
1368	xorl	%ebx,%esi
1369	vpshufd	$250,%xmm3,%xmm7
1370	addl	%r11d,%edx
1371	shrdl	$2,%r14d,%r14d
1372	addl	%esi,%r11d
1373	vpsrld	$11,%xmm6,%xmm6
1374	movl	%edx,%r13d
1375	addl	%r11d,%r14d
1376	shrdl	$14,%r13d,%r13d
1377	vpxor	%xmm5,%xmm4,%xmm4
1378	movl	%r14d,%r11d
1379	movl	%r8d,%r12d
1380	xorl	%edx,%r13d
1381	vpslld	$11,%xmm5,%xmm5
1382	shrdl	$9,%r14d,%r14d
1383	xorl	%r9d,%r12d
1384	shrdl	$5,%r13d,%r13d
1385	vpxor	%xmm6,%xmm4,%xmm4
1386	xorl	%r11d,%r14d
1387	andl	%edx,%r12d
1388	vpxor	%xmm8,%xmm9,%xmm9
1389	xorl	%edx,%r13d
1390	vpsrld	$10,%xmm7,%xmm6
1391	addl	4(%rsp),%r10d
1392	movl	%r11d,%esi
1393	shrdl	$11,%r14d,%r14d
1394	vpxor	%xmm5,%xmm4,%xmm4
1395	xorl	%r9d,%r12d
1396	xorl	%eax,%esi
1397	shrdl	$6,%r13d,%r13d
1398	vpsrlq	$17,%xmm7,%xmm7
1399	addl	%r12d,%r10d
1400	andl	%esi,%r15d
1401	xorl	%r11d,%r14d
1402	vpaddd	%xmm4,%xmm0,%xmm0
1403	addl	%r13d,%r10d
1404	xorl	%eax,%r15d
1405	addl	%r10d,%ecx
1406	vpxor	%xmm7,%xmm6,%xmm6
1407	shrdl	$2,%r14d,%r14d
1408	addl	%r15d,%r10d
1409	movl	%ecx,%r13d
1410	vpsrlq	$2,%xmm7,%xmm7
1411	addl	%r10d,%r14d
1412	shrdl	$14,%r13d,%r13d
1413	movl	%r14d,%r10d
1414	vpxor	%xmm7,%xmm6,%xmm6
1415	movl	%edx,%r12d
1416	xorl	%ecx,%r13d
1417	shrdl	$9,%r14d,%r14d
1418	vpshufd	$132,%xmm6,%xmm6
1419	xorl	%r8d,%r12d
1420	shrdl	$5,%r13d,%r13d
1421	xorl	%r10d,%r14d
1422	vpsrldq	$8,%xmm6,%xmm6
1423	andl	%ecx,%r12d
1424	vaesenc	%xmm10,%xmm9,%xmm9
1425	vmovdqu	32-128(%rdi),%xmm10
1426	xorl	%ecx,%r13d
1427	addl	8(%rsp),%r9d
1428	vpaddd	%xmm6,%xmm0,%xmm0
1429	movl	%r10d,%r15d
1430	shrdl	$11,%r14d,%r14d
1431	xorl	%r8d,%r12d
1432	vpshufd	$80,%xmm0,%xmm7
1433	xorl	%r11d,%r15d
1434	shrdl	$6,%r13d,%r13d
1435	addl	%r12d,%r9d
1436	vpsrld	$10,%xmm7,%xmm6
1437	andl	%r15d,%esi
1438	xorl	%r10d,%r14d
1439	addl	%r13d,%r9d
1440	vpsrlq	$17,%xmm7,%xmm7
1441	xorl	%r11d,%esi
1442	addl	%r9d,%ebx
1443	shrdl	$2,%r14d,%r14d
1444	vpxor	%xmm7,%xmm6,%xmm6
1445	addl	%esi,%r9d
1446	movl	%ebx,%r13d
1447	addl	%r9d,%r14d
1448	vpsrlq	$2,%xmm7,%xmm7
1449	shrdl	$14,%r13d,%r13d
1450	movl	%r14d,%r9d
1451	movl	%ecx,%r12d
1452	vpxor	%xmm7,%xmm6,%xmm6
1453	xorl	%ebx,%r13d
1454	shrdl	$9,%r14d,%r14d
1455	xorl	%edx,%r12d
1456	vpshufd	$232,%xmm6,%xmm6
1457	shrdl	$5,%r13d,%r13d
1458	xorl	%r9d,%r14d
1459	andl	%ebx,%r12d
1460	vpslldq	$8,%xmm6,%xmm6
1461	vaesenc	%xmm10,%xmm9,%xmm9
1462	vmovdqu	48-128(%rdi),%xmm10
1463	xorl	%ebx,%r13d
1464	addl	12(%rsp),%r8d
1465	movl	%r9d,%esi
1466	vpaddd	%xmm6,%xmm0,%xmm0
1467	shrdl	$11,%r14d,%r14d
1468	xorl	%edx,%r12d
1469	xorl	%r10d,%esi
1470	vpaddd	0(%rbp),%xmm0,%xmm6
1471	shrdl	$6,%r13d,%r13d
1472	addl	%r12d,%r8d
1473	andl	%esi,%r15d
1474	xorl	%r9d,%r14d
1475	addl	%r13d,%r8d
1476	xorl	%r10d,%r15d
1477	addl	%r8d,%eax
1478	shrdl	$2,%r14d,%r14d
1479	addl	%r15d,%r8d
1480	movl	%eax,%r13d
1481	addl	%r8d,%r14d
1482	vmovdqa	%xmm6,0(%rsp)
1483	vpalignr	$4,%xmm1,%xmm2,%xmm4
1484	shrdl	$14,%r13d,%r13d
1485	movl	%r14d,%r8d
1486	movl	%ebx,%r12d
1487	vpalignr	$4,%xmm3,%xmm0,%xmm7
1488	xorl	%eax,%r13d
1489	shrdl	$9,%r14d,%r14d
1490	xorl	%ecx,%r12d
1491	vpsrld	$7,%xmm4,%xmm6
1492	shrdl	$5,%r13d,%r13d
1493	xorl	%r8d,%r14d
1494	andl	%eax,%r12d
1495	vpaddd	%xmm7,%xmm1,%xmm1
1496	vaesenc	%xmm10,%xmm9,%xmm9
1497	vmovdqu	64-128(%rdi),%xmm10
1498	xorl	%eax,%r13d
1499	addl	16(%rsp),%edx
1500	movl	%r8d,%r15d
1501	vpsrld	$3,%xmm4,%xmm7
1502	shrdl	$11,%r14d,%r14d
1503	xorl	%ecx,%r12d
1504	xorl	%r9d,%r15d
1505	vpslld	$14,%xmm4,%xmm5
1506	shrdl	$6,%r13d,%r13d
1507	addl	%r12d,%edx
1508	andl	%r15d,%esi
1509	vpxor	%xmm6,%xmm7,%xmm4
1510	xorl	%r8d,%r14d
1511	addl	%r13d,%edx
1512	xorl	%r9d,%esi
1513	vpshufd	$250,%xmm0,%xmm7
1514	addl	%edx,%r11d
1515	shrdl	$2,%r14d,%r14d
1516	addl	%esi,%edx
1517	vpsrld	$11,%xmm6,%xmm6
1518	movl	%r11d,%r13d
1519	addl	%edx,%r14d
1520	shrdl	$14,%r13d,%r13d
1521	vpxor	%xmm5,%xmm4,%xmm4
1522	movl	%r14d,%edx
1523	movl	%eax,%r12d
1524	xorl	%r11d,%r13d
1525	vpslld	$11,%xmm5,%xmm5
1526	shrdl	$9,%r14d,%r14d
1527	xorl	%ebx,%r12d
1528	shrdl	$5,%r13d,%r13d
1529	vpxor	%xmm6,%xmm4,%xmm4
1530	xorl	%edx,%r14d
1531	andl	%r11d,%r12d
1532	vaesenc	%xmm10,%xmm9,%xmm9
1533	vmovdqu	80-128(%rdi),%xmm10
1534	xorl	%r11d,%r13d
1535	vpsrld	$10,%xmm7,%xmm6
1536	addl	20(%rsp),%ecx
1537	movl	%edx,%esi
1538	shrdl	$11,%r14d,%r14d
1539	vpxor	%xmm5,%xmm4,%xmm4
1540	xorl	%ebx,%r12d
1541	xorl	%r8d,%esi
1542	shrdl	$6,%r13d,%r13d
1543	vpsrlq	$17,%xmm7,%xmm7
1544	addl	%r12d,%ecx
1545	andl	%esi,%r15d
1546	xorl	%edx,%r14d
1547	vpaddd	%xmm4,%xmm1,%xmm1
1548	addl	%r13d,%ecx
1549	xorl	%r8d,%r15d
1550	addl	%ecx,%r10d
1551	vpxor	%xmm7,%xmm6,%xmm6
1552	shrdl	$2,%r14d,%r14d
1553	addl	%r15d,%ecx
1554	movl	%r10d,%r13d
1555	vpsrlq	$2,%xmm7,%xmm7
1556	addl	%ecx,%r14d
1557	shrdl	$14,%r13d,%r13d
1558	movl	%r14d,%ecx
1559	vpxor	%xmm7,%xmm6,%xmm6
1560	movl	%r11d,%r12d
1561	xorl	%r10d,%r13d
1562	shrdl	$9,%r14d,%r14d
1563	vpshufd	$132,%xmm6,%xmm6
1564	xorl	%eax,%r12d
1565	shrdl	$5,%r13d,%r13d
1566	xorl	%ecx,%r14d
1567	vpsrldq	$8,%xmm6,%xmm6
1568	andl	%r10d,%r12d
1569	vaesenc	%xmm10,%xmm9,%xmm9
1570	vmovdqu	96-128(%rdi),%xmm10
1571	xorl	%r10d,%r13d
1572	addl	24(%rsp),%ebx
1573	vpaddd	%xmm6,%xmm1,%xmm1
1574	movl	%ecx,%r15d
1575	shrdl	$11,%r14d,%r14d
1576	xorl	%eax,%r12d
1577	vpshufd	$80,%xmm1,%xmm7
1578	xorl	%edx,%r15d
1579	shrdl	$6,%r13d,%r13d
1580	addl	%r12d,%ebx
1581	vpsrld	$10,%xmm7,%xmm6
1582	andl	%r15d,%esi
1583	xorl	%ecx,%r14d
1584	addl	%r13d,%ebx
1585	vpsrlq	$17,%xmm7,%xmm7
1586	xorl	%edx,%esi
1587	addl	%ebx,%r9d
1588	shrdl	$2,%r14d,%r14d
1589	vpxor	%xmm7,%xmm6,%xmm6
1590	addl	%esi,%ebx
1591	movl	%r9d,%r13d
1592	addl	%ebx,%r14d
1593	vpsrlq	$2,%xmm7,%xmm7
1594	shrdl	$14,%r13d,%r13d
1595	movl	%r14d,%ebx
1596	movl	%r10d,%r12d
1597	vpxor	%xmm7,%xmm6,%xmm6
1598	xorl	%r9d,%r13d
1599	shrdl	$9,%r14d,%r14d
1600	xorl	%r11d,%r12d
1601	vpshufd	$232,%xmm6,%xmm6
1602	shrdl	$5,%r13d,%r13d
1603	xorl	%ebx,%r14d
1604	andl	%r9d,%r12d
1605	vpslldq	$8,%xmm6,%xmm6
1606	vaesenc	%xmm10,%xmm9,%xmm9
1607	vmovdqu	112-128(%rdi),%xmm10
1608	xorl	%r9d,%r13d
1609	addl	28(%rsp),%eax
1610	movl	%ebx,%esi
1611	vpaddd	%xmm6,%xmm1,%xmm1
1612	shrdl	$11,%r14d,%r14d
1613	xorl	%r11d,%r12d
1614	xorl	%ecx,%esi
1615	vpaddd	32(%rbp),%xmm1,%xmm6
1616	shrdl	$6,%r13d,%r13d
1617	addl	%r12d,%eax
1618	andl	%esi,%r15d
1619	xorl	%ebx,%r14d
1620	addl	%r13d,%eax
1621	xorl	%ecx,%r15d
1622	addl	%eax,%r8d
1623	shrdl	$2,%r14d,%r14d
1624	addl	%r15d,%eax
1625	movl	%r8d,%r13d
1626	addl	%eax,%r14d
1627	vmovdqa	%xmm6,16(%rsp)
1628	vpalignr	$4,%xmm2,%xmm3,%xmm4
1629	shrdl	$14,%r13d,%r13d
1630	movl	%r14d,%eax
1631	movl	%r9d,%r12d
1632	vpalignr	$4,%xmm0,%xmm1,%xmm7
1633	xorl	%r8d,%r13d
1634	shrdl	$9,%r14d,%r14d
1635	xorl	%r10d,%r12d
1636	vpsrld	$7,%xmm4,%xmm6
1637	shrdl	$5,%r13d,%r13d
1638	xorl	%eax,%r14d
1639	andl	%r8d,%r12d
1640	vpaddd	%xmm7,%xmm2,%xmm2
1641	vaesenc	%xmm10,%xmm9,%xmm9
1642	vmovdqu	128-128(%rdi),%xmm10
1643	xorl	%r8d,%r13d
1644	addl	32(%rsp),%r11d
1645	movl	%eax,%r15d
1646	vpsrld	$3,%xmm4,%xmm7
1647	shrdl	$11,%r14d,%r14d
1648	xorl	%r10d,%r12d
1649	xorl	%ebx,%r15d
1650	vpslld	$14,%xmm4,%xmm5
1651	shrdl	$6,%r13d,%r13d
1652	addl	%r12d,%r11d
1653	andl	%r15d,%esi
1654	vpxor	%xmm6,%xmm7,%xmm4
1655	xorl	%eax,%r14d
1656	addl	%r13d,%r11d
1657	xorl	%ebx,%esi
1658	vpshufd	$250,%xmm1,%xmm7
1659	addl	%r11d,%edx
1660	shrdl	$2,%r14d,%r14d
1661	addl	%esi,%r11d
1662	vpsrld	$11,%xmm6,%xmm6
1663	movl	%edx,%r13d
1664	addl	%r11d,%r14d
1665	shrdl	$14,%r13d,%r13d
1666	vpxor	%xmm5,%xmm4,%xmm4
1667	movl	%r14d,%r11d
1668	movl	%r8d,%r12d
1669	xorl	%edx,%r13d
1670	vpslld	$11,%xmm5,%xmm5
1671	shrdl	$9,%r14d,%r14d
1672	xorl	%r9d,%r12d
1673	shrdl	$5,%r13d,%r13d
1674	vpxor	%xmm6,%xmm4,%xmm4
1675	xorl	%r11d,%r14d
1676	andl	%edx,%r12d
1677	vaesenc	%xmm10,%xmm9,%xmm9
1678	vmovdqu	144-128(%rdi),%xmm10
1679	xorl	%edx,%r13d
1680	vpsrld	$10,%xmm7,%xmm6
1681	addl	36(%rsp),%r10d
1682	movl	%r11d,%esi
1683	shrdl	$11,%r14d,%r14d
1684	vpxor	%xmm5,%xmm4,%xmm4
1685	xorl	%r9d,%r12d
1686	xorl	%eax,%esi
1687	shrdl	$6,%r13d,%r13d
1688	vpsrlq	$17,%xmm7,%xmm7
1689	addl	%r12d,%r10d
1690	andl	%esi,%r15d
1691	xorl	%r11d,%r14d
1692	vpaddd	%xmm4,%xmm2,%xmm2
1693	addl	%r13d,%r10d
1694	xorl	%eax,%r15d
1695	addl	%r10d,%ecx
1696	vpxor	%xmm7,%xmm6,%xmm6
1697	shrdl	$2,%r14d,%r14d
1698	addl	%r15d,%r10d
1699	movl	%ecx,%r13d
1700	vpsrlq	$2,%xmm7,%xmm7
1701	addl	%r10d,%r14d
1702	shrdl	$14,%r13d,%r13d
1703	movl	%r14d,%r10d
1704	vpxor	%xmm7,%xmm6,%xmm6
1705	movl	%edx,%r12d
1706	xorl	%ecx,%r13d
1707	shrdl	$9,%r14d,%r14d
1708	vpshufd	$132,%xmm6,%xmm6
1709	xorl	%r8d,%r12d
1710	shrdl	$5,%r13d,%r13d
1711	xorl	%r10d,%r14d
1712	vpsrldq	$8,%xmm6,%xmm6
1713	andl	%ecx,%r12d
1714	vaesenc	%xmm10,%xmm9,%xmm9
1715	vmovdqu	160-128(%rdi),%xmm10
1716	xorl	%ecx,%r13d
1717	addl	40(%rsp),%r9d
1718	vpaddd	%xmm6,%xmm2,%xmm2
1719	movl	%r10d,%r15d
1720	shrdl	$11,%r14d,%r14d
1721	xorl	%r8d,%r12d
1722	vpshufd	$80,%xmm2,%xmm7
1723	xorl	%r11d,%r15d
1724	shrdl	$6,%r13d,%r13d
1725	addl	%r12d,%r9d
1726	vpsrld	$10,%xmm7,%xmm6
1727	andl	%r15d,%esi
1728	xorl	%r10d,%r14d
1729	addl	%r13d,%r9d
1730	vpsrlq	$17,%xmm7,%xmm7
1731	xorl	%r11d,%esi
1732	addl	%r9d,%ebx
1733	shrdl	$2,%r14d,%r14d
1734	vpxor	%xmm7,%xmm6,%xmm6
1735	addl	%esi,%r9d
1736	movl	%ebx,%r13d
1737	addl	%r9d,%r14d
1738	vpsrlq	$2,%xmm7,%xmm7
1739	shrdl	$14,%r13d,%r13d
1740	movl	%r14d,%r9d
1741	movl	%ecx,%r12d
1742	vpxor	%xmm7,%xmm6,%xmm6
1743	xorl	%ebx,%r13d
1744	shrdl	$9,%r14d,%r14d
1745	xorl	%edx,%r12d
1746	vpshufd	$232,%xmm6,%xmm6
1747	shrdl	$5,%r13d,%r13d
1748	xorl	%r9d,%r14d
1749	andl	%ebx,%r12d
1750	vpslldq	$8,%xmm6,%xmm6
1751	vaesenclast	%xmm10,%xmm9,%xmm11
1752	vaesenc	%xmm10,%xmm9,%xmm9
1753	vmovdqu	176-128(%rdi),%xmm10
1754	xorl	%ebx,%r13d
1755	addl	44(%rsp),%r8d
1756	movl	%r9d,%esi
1757	vpaddd	%xmm6,%xmm2,%xmm2
1758	shrdl	$11,%r14d,%r14d
1759	xorl	%edx,%r12d
1760	xorl	%r10d,%esi
1761	vpaddd	64(%rbp),%xmm2,%xmm6
1762	shrdl	$6,%r13d,%r13d
1763	addl	%r12d,%r8d
1764	andl	%esi,%r15d
1765	xorl	%r9d,%r14d
1766	addl	%r13d,%r8d
1767	xorl	%r10d,%r15d
1768	addl	%r8d,%eax
1769	shrdl	$2,%r14d,%r14d
1770	addl	%r15d,%r8d
1771	movl	%eax,%r13d
1772	addl	%r8d,%r14d
1773	vmovdqa	%xmm6,32(%rsp)
1774	vpalignr	$4,%xmm3,%xmm0,%xmm4
1775	shrdl	$14,%r13d,%r13d
1776	movl	%r14d,%r8d
1777	movl	%ebx,%r12d
1778	vpalignr	$4,%xmm1,%xmm2,%xmm7
1779	xorl	%eax,%r13d
1780	shrdl	$9,%r14d,%r14d
1781	xorl	%ecx,%r12d
1782	vpsrld	$7,%xmm4,%xmm6
1783	shrdl	$5,%r13d,%r13d
1784	xorl	%r8d,%r14d
1785	andl	%eax,%r12d
1786	vpaddd	%xmm7,%xmm3,%xmm3
1787	vpand	%xmm12,%xmm11,%xmm8
1788	vaesenc	%xmm10,%xmm9,%xmm9
1789	vmovdqu	192-128(%rdi),%xmm10
1790	xorl	%eax,%r13d
1791	addl	48(%rsp),%edx
1792	movl	%r8d,%r15d
1793	vpsrld	$3,%xmm4,%xmm7
1794	shrdl	$11,%r14d,%r14d
1795	xorl	%ecx,%r12d
1796	xorl	%r9d,%r15d
1797	vpslld	$14,%xmm4,%xmm5
1798	shrdl	$6,%r13d,%r13d
1799	addl	%r12d,%edx
1800	andl	%r15d,%esi
1801	vpxor	%xmm6,%xmm7,%xmm4
1802	xorl	%r8d,%r14d
1803	addl	%r13d,%edx
1804	xorl	%r9d,%esi
1805	vpshufd	$250,%xmm2,%xmm7
1806	addl	%edx,%r11d
1807	shrdl	$2,%r14d,%r14d
1808	addl	%esi,%edx
1809	vpsrld	$11,%xmm6,%xmm6
1810	movl	%r11d,%r13d
1811	addl	%edx,%r14d
1812	shrdl	$14,%r13d,%r13d
1813	vpxor	%xmm5,%xmm4,%xmm4
1814	movl	%r14d,%edx
1815	movl	%eax,%r12d
1816	xorl	%r11d,%r13d
1817	vpslld	$11,%xmm5,%xmm5
1818	shrdl	$9,%r14d,%r14d
1819	xorl	%ebx,%r12d
1820	shrdl	$5,%r13d,%r13d
1821	vpxor	%xmm6,%xmm4,%xmm4
1822	xorl	%edx,%r14d
1823	andl	%r11d,%r12d
1824	vaesenclast	%xmm10,%xmm9,%xmm11
1825	vaesenc	%xmm10,%xmm9,%xmm9
1826	vmovdqu	208-128(%rdi),%xmm10
1827	xorl	%r11d,%r13d
1828	vpsrld	$10,%xmm7,%xmm6
1829	addl	52(%rsp),%ecx
1830	movl	%edx,%esi
1831	shrdl	$11,%r14d,%r14d
1832	vpxor	%xmm5,%xmm4,%xmm4
1833	xorl	%ebx,%r12d
1834	xorl	%r8d,%esi
1835	shrdl	$6,%r13d,%r13d
1836	vpsrlq	$17,%xmm7,%xmm7
1837	addl	%r12d,%ecx
1838	andl	%esi,%r15d
1839	xorl	%edx,%r14d
1840	vpaddd	%xmm4,%xmm3,%xmm3
1841	addl	%r13d,%ecx
1842	xorl	%r8d,%r15d
1843	addl	%ecx,%r10d
1844	vpxor	%xmm7,%xmm6,%xmm6
1845	shrdl	$2,%r14d,%r14d
1846	addl	%r15d,%ecx
1847	movl	%r10d,%r13d
1848	vpsrlq	$2,%xmm7,%xmm7
1849	addl	%ecx,%r14d
1850	shrdl	$14,%r13d,%r13d
1851	movl	%r14d,%ecx
1852	vpxor	%xmm7,%xmm6,%xmm6
1853	movl	%r11d,%r12d
1854	xorl	%r10d,%r13d
1855	shrdl	$9,%r14d,%r14d
1856	vpshufd	$132,%xmm6,%xmm6
1857	xorl	%eax,%r12d
1858	shrdl	$5,%r13d,%r13d
1859	xorl	%ecx,%r14d
1860	vpsrldq	$8,%xmm6,%xmm6
1861	andl	%r10d,%r12d
1862	vpand	%xmm13,%xmm11,%xmm11
1863	vaesenc	%xmm10,%xmm9,%xmm9
1864	vmovdqu	224-128(%rdi),%xmm10
1865	xorl	%r10d,%r13d
1866	addl	56(%rsp),%ebx
1867	vpaddd	%xmm6,%xmm3,%xmm3
1868	movl	%ecx,%r15d
1869	shrdl	$11,%r14d,%r14d
1870	xorl	%eax,%r12d
1871	vpshufd	$80,%xmm3,%xmm7
1872	xorl	%edx,%r15d
1873	shrdl	$6,%r13d,%r13d
1874	addl	%r12d,%ebx
1875	vpsrld	$10,%xmm7,%xmm6
1876	andl	%r15d,%esi
1877	xorl	%ecx,%r14d
1878	addl	%r13d,%ebx
1879	vpsrlq	$17,%xmm7,%xmm7
1880	xorl	%edx,%esi
1881	addl	%ebx,%r9d
1882	shrdl	$2,%r14d,%r14d
1883	vpxor	%xmm7,%xmm6,%xmm6
1884	addl	%esi,%ebx
1885	movl	%r9d,%r13d
1886	addl	%ebx,%r14d
1887	vpsrlq	$2,%xmm7,%xmm7
1888	shrdl	$14,%r13d,%r13d
1889	movl	%r14d,%ebx
1890	movl	%r10d,%r12d
1891	vpxor	%xmm7,%xmm6,%xmm6
1892	xorl	%r9d,%r13d
1893	shrdl	$9,%r14d,%r14d
1894	xorl	%r11d,%r12d
1895	vpshufd	$232,%xmm6,%xmm6
1896	shrdl	$5,%r13d,%r13d
1897	xorl	%ebx,%r14d
1898	andl	%r9d,%r12d
1899	vpslldq	$8,%xmm6,%xmm6
1900	vpor	%xmm11,%xmm8,%xmm8
1901	vaesenclast	%xmm10,%xmm9,%xmm11
1902	vmovdqu	0-128(%rdi),%xmm10
1903	xorl	%r9d,%r13d
1904	addl	60(%rsp),%eax
1905	movl	%ebx,%esi
1906	vpaddd	%xmm6,%xmm3,%xmm3
1907	shrdl	$11,%r14d,%r14d
1908	xorl	%r11d,%r12d
1909	xorl	%ecx,%esi
1910	vpaddd	96(%rbp),%xmm3,%xmm6
1911	shrdl	$6,%r13d,%r13d
1912	addl	%r12d,%eax
1913	andl	%esi,%r15d
1914	xorl	%ebx,%r14d
1915	addl	%r13d,%eax
1916	xorl	%ecx,%r15d
1917	addl	%eax,%r8d
1918	shrdl	$2,%r14d,%r14d
1919	addl	%r15d,%eax
1920	movl	%r8d,%r13d
1921	addl	%eax,%r14d
1922	vmovdqa	%xmm6,48(%rsp)
1923	movq	64+0(%rsp),%r12
1924	vpand	%xmm14,%xmm11,%xmm11
1925	movq	64+8(%rsp),%r15
1926	vpor	%xmm11,%xmm8,%xmm8
1927	vmovdqu	%xmm8,(%r15,%r12,1)
1928	leaq	16(%r12),%r12
1929	cmpb	$0,131(%rbp)
1930	jne	.Lavx_00_47
1931	vmovdqu	(%r12),%xmm9
1932	movq	%r12,64+0(%rsp)
1933	shrdl	$14,%r13d,%r13d
1934	movl	%r14d,%eax
1935	movl	%r9d,%r12d
1936	xorl	%r8d,%r13d
1937	shrdl	$9,%r14d,%r14d
1938	xorl	%r10d,%r12d
1939	shrdl	$5,%r13d,%r13d
1940	xorl	%eax,%r14d
1941	andl	%r8d,%r12d
1942	vpxor	%xmm10,%xmm9,%xmm9
1943	vmovdqu	16-128(%rdi),%xmm10
1944	xorl	%r8d,%r13d
1945	addl	0(%rsp),%r11d
1946	movl	%eax,%r15d
1947	shrdl	$11,%r14d,%r14d
1948	xorl	%r10d,%r12d
1949	xorl	%ebx,%r15d
1950	shrdl	$6,%r13d,%r13d
1951	addl	%r12d,%r11d
1952	andl	%r15d,%esi
1953	xorl	%eax,%r14d
1954	addl	%r13d,%r11d
1955	xorl	%ebx,%esi
1956	addl	%r11d,%edx
1957	shrdl	$2,%r14d,%r14d
1958	addl	%esi,%r11d
1959	movl	%edx,%r13d
1960	addl	%r11d,%r14d
1961	shrdl	$14,%r13d,%r13d
1962	movl	%r14d,%r11d
1963	movl	%r8d,%r12d
1964	xorl	%edx,%r13d
1965	shrdl	$9,%r14d,%r14d
1966	xorl	%r9d,%r12d
1967	shrdl	$5,%r13d,%r13d
1968	xorl	%r11d,%r14d
1969	andl	%edx,%r12d
1970	vpxor	%xmm8,%xmm9,%xmm9
1971	xorl	%edx,%r13d
1972	addl	4(%rsp),%r10d
1973	movl	%r11d,%esi
1974	shrdl	$11,%r14d,%r14d
1975	xorl	%r9d,%r12d
1976	xorl	%eax,%esi
1977	shrdl	$6,%r13d,%r13d
1978	addl	%r12d,%r10d
1979	andl	%esi,%r15d
1980	xorl	%r11d,%r14d
1981	addl	%r13d,%r10d
1982	xorl	%eax,%r15d
1983	addl	%r10d,%ecx
1984	shrdl	$2,%r14d,%r14d
1985	addl	%r15d,%r10d
1986	movl	%ecx,%r13d
1987	addl	%r10d,%r14d
1988	shrdl	$14,%r13d,%r13d
1989	movl	%r14d,%r10d
1990	movl	%edx,%r12d
1991	xorl	%ecx,%r13d
1992	shrdl	$9,%r14d,%r14d
1993	xorl	%r8d,%r12d
1994	shrdl	$5,%r13d,%r13d
1995	xorl	%r10d,%r14d
1996	andl	%ecx,%r12d
1997	vaesenc	%xmm10,%xmm9,%xmm9
1998	vmovdqu	32-128(%rdi),%xmm10
1999	xorl	%ecx,%r13d
2000	addl	8(%rsp),%r9d
2001	movl	%r10d,%r15d
2002	shrdl	$11,%r14d,%r14d
2003	xorl	%r8d,%r12d
2004	xorl	%r11d,%r15d
2005	shrdl	$6,%r13d,%r13d
2006	addl	%r12d,%r9d
2007	andl	%r15d,%esi
2008	xorl	%r10d,%r14d
2009	addl	%r13d,%r9d
2010	xorl	%r11d,%esi
2011	addl	%r9d,%ebx
2012	shrdl	$2,%r14d,%r14d
2013	addl	%esi,%r9d
2014	movl	%ebx,%r13d
2015	addl	%r9d,%r14d
2016	shrdl	$14,%r13d,%r13d
2017	movl	%r14d,%r9d
2018	movl	%ecx,%r12d
2019	xorl	%ebx,%r13d
2020	shrdl	$9,%r14d,%r14d
2021	xorl	%edx,%r12d
2022	shrdl	$5,%r13d,%r13d
2023	xorl	%r9d,%r14d
2024	andl	%ebx,%r12d
2025	vaesenc	%xmm10,%xmm9,%xmm9
2026	vmovdqu	48-128(%rdi),%xmm10
2027	xorl	%ebx,%r13d
2028	addl	12(%rsp),%r8d
2029	movl	%r9d,%esi
2030	shrdl	$11,%r14d,%r14d
2031	xorl	%edx,%r12d
2032	xorl	%r10d,%esi
2033	shrdl	$6,%r13d,%r13d
2034	addl	%r12d,%r8d
2035	andl	%esi,%r15d
2036	xorl	%r9d,%r14d
2037	addl	%r13d,%r8d
2038	xorl	%r10d,%r15d
2039	addl	%r8d,%eax
2040	shrdl	$2,%r14d,%r14d
2041	addl	%r15d,%r8d
2042	movl	%eax,%r13d
2043	addl	%r8d,%r14d
2044	shrdl	$14,%r13d,%r13d
2045	movl	%r14d,%r8d
2046	movl	%ebx,%r12d
2047	xorl	%eax,%r13d
2048	shrdl	$9,%r14d,%r14d
2049	xorl	%ecx,%r12d
2050	shrdl	$5,%r13d,%r13d
2051	xorl	%r8d,%r14d
2052	andl	%eax,%r12d
2053	vaesenc	%xmm10,%xmm9,%xmm9
2054	vmovdqu	64-128(%rdi),%xmm10
2055	xorl	%eax,%r13d
2056	addl	16(%rsp),%edx
2057	movl	%r8d,%r15d
2058	shrdl	$11,%r14d,%r14d
2059	xorl	%ecx,%r12d
2060	xorl	%r9d,%r15d
2061	shrdl	$6,%r13d,%r13d
2062	addl	%r12d,%edx
2063	andl	%r15d,%esi
2064	xorl	%r8d,%r14d
2065	addl	%r13d,%edx
2066	xorl	%r9d,%esi
2067	addl	%edx,%r11d
2068	shrdl	$2,%r14d,%r14d
2069	addl	%esi,%edx
2070	movl	%r11d,%r13d
2071	addl	%edx,%r14d
2072	shrdl	$14,%r13d,%r13d
2073	movl	%r14d,%edx
2074	movl	%eax,%r12d
2075	xorl	%r11d,%r13d
2076	shrdl	$9,%r14d,%r14d
2077	xorl	%ebx,%r12d
2078	shrdl	$5,%r13d,%r13d
2079	xorl	%edx,%r14d
2080	andl	%r11d,%r12d
2081	vaesenc	%xmm10,%xmm9,%xmm9
2082	vmovdqu	80-128(%rdi),%xmm10
2083	xorl	%r11d,%r13d
2084	addl	20(%rsp),%ecx
2085	movl	%edx,%esi
2086	shrdl	$11,%r14d,%r14d
2087	xorl	%ebx,%r12d
2088	xorl	%r8d,%esi
2089	shrdl	$6,%r13d,%r13d
2090	addl	%r12d,%ecx
2091	andl	%esi,%r15d
2092	xorl	%edx,%r14d
2093	addl	%r13d,%ecx
2094	xorl	%r8d,%r15d
2095	addl	%ecx,%r10d
2096	shrdl	$2,%r14d,%r14d
2097	addl	%r15d,%ecx
2098	movl	%r10d,%r13d
2099	addl	%ecx,%r14d
2100	shrdl	$14,%r13d,%r13d
2101	movl	%r14d,%ecx
2102	movl	%r11d,%r12d
2103	xorl	%r10d,%r13d
2104	shrdl	$9,%r14d,%r14d
2105	xorl	%eax,%r12d
2106	shrdl	$5,%r13d,%r13d
2107	xorl	%ecx,%r14d
2108	andl	%r10d,%r12d
2109	vaesenc	%xmm10,%xmm9,%xmm9
2110	vmovdqu	96-128(%rdi),%xmm10
2111	xorl	%r10d,%r13d
2112	addl	24(%rsp),%ebx
2113	movl	%ecx,%r15d
2114	shrdl	$11,%r14d,%r14d
2115	xorl	%eax,%r12d
2116	xorl	%edx,%r15d
2117	shrdl	$6,%r13d,%r13d
2118	addl	%r12d,%ebx
2119	andl	%r15d,%esi
2120	xorl	%ecx,%r14d
2121	addl	%r13d,%ebx
2122	xorl	%edx,%esi
2123	addl	%ebx,%r9d
2124	shrdl	$2,%r14d,%r14d
2125	addl	%esi,%ebx
2126	movl	%r9d,%r13d
2127	addl	%ebx,%r14d
2128	shrdl	$14,%r13d,%r13d
2129	movl	%r14d,%ebx
2130	movl	%r10d,%r12d
2131	xorl	%r9d,%r13d
2132	shrdl	$9,%r14d,%r14d
2133	xorl	%r11d,%r12d
2134	shrdl	$5,%r13d,%r13d
2135	xorl	%ebx,%r14d
2136	andl	%r9d,%r12d
2137	vaesenc	%xmm10,%xmm9,%xmm9
2138	vmovdqu	112-128(%rdi),%xmm10
2139	xorl	%r9d,%r13d
2140	addl	28(%rsp),%eax
2141	movl	%ebx,%esi
2142	shrdl	$11,%r14d,%r14d
2143	xorl	%r11d,%r12d
2144	xorl	%ecx,%esi
2145	shrdl	$6,%r13d,%r13d
2146	addl	%r12d,%eax
2147	andl	%esi,%r15d
2148	xorl	%ebx,%r14d
2149	addl	%r13d,%eax
2150	xorl	%ecx,%r15d
2151	addl	%eax,%r8d
2152	shrdl	$2,%r14d,%r14d
2153	addl	%r15d,%eax
2154	movl	%r8d,%r13d
2155	addl	%eax,%r14d
2156	shrdl	$14,%r13d,%r13d
2157	movl	%r14d,%eax
2158	movl	%r9d,%r12d
2159	xorl	%r8d,%r13d
2160	shrdl	$9,%r14d,%r14d
2161	xorl	%r10d,%r12d
2162	shrdl	$5,%r13d,%r13d
2163	xorl	%eax,%r14d
2164	andl	%r8d,%r12d
2165	vaesenc	%xmm10,%xmm9,%xmm9
2166	vmovdqu	128-128(%rdi),%xmm10
2167	xorl	%r8d,%r13d
2168	addl	32(%rsp),%r11d
2169	movl	%eax,%r15d
2170	shrdl	$11,%r14d,%r14d
2171	xorl	%r10d,%r12d
2172	xorl	%ebx,%r15d
2173	shrdl	$6,%r13d,%r13d
2174	addl	%r12d,%r11d
2175	andl	%r15d,%esi
2176	xorl	%eax,%r14d
2177	addl	%r13d,%r11d
2178	xorl	%ebx,%esi
2179	addl	%r11d,%edx
2180	shrdl	$2,%r14d,%r14d
2181	addl	%esi,%r11d
2182	movl	%edx,%r13d
2183	addl	%r11d,%r14d
2184	shrdl	$14,%r13d,%r13d
2185	movl	%r14d,%r11d
2186	movl	%r8d,%r12d
2187	xorl	%edx,%r13d
2188	shrdl	$9,%r14d,%r14d
2189	xorl	%r9d,%r12d
2190	shrdl	$5,%r13d,%r13d
2191	xorl	%r11d,%r14d
2192	andl	%edx,%r12d
2193	vaesenc	%xmm10,%xmm9,%xmm9
2194	vmovdqu	144-128(%rdi),%xmm10
2195	xorl	%edx,%r13d
2196	addl	36(%rsp),%r10d
2197	movl	%r11d,%esi
2198	shrdl	$11,%r14d,%r14d
2199	xorl	%r9d,%r12d
2200	xorl	%eax,%esi
2201	shrdl	$6,%r13d,%r13d
2202	addl	%r12d,%r10d
2203	andl	%esi,%r15d
2204	xorl	%r11d,%r14d
2205	addl	%r13d,%r10d
2206	xorl	%eax,%r15d
2207	addl	%r10d,%ecx
2208	shrdl	$2,%r14d,%r14d
2209	addl	%r15d,%r10d
2210	movl	%ecx,%r13d
2211	addl	%r10d,%r14d
2212	shrdl	$14,%r13d,%r13d
2213	movl	%r14d,%r10d
2214	movl	%edx,%r12d
2215	xorl	%ecx,%r13d
2216	shrdl	$9,%r14d,%r14d
2217	xorl	%r8d,%r12d
2218	shrdl	$5,%r13d,%r13d
2219	xorl	%r10d,%r14d
2220	andl	%ecx,%r12d
2221	vaesenc	%xmm10,%xmm9,%xmm9
2222	vmovdqu	160-128(%rdi),%xmm10
2223	xorl	%ecx,%r13d
2224	addl	40(%rsp),%r9d
2225	movl	%r10d,%r15d
2226	shrdl	$11,%r14d,%r14d
2227	xorl	%r8d,%r12d
2228	xorl	%r11d,%r15d
2229	shrdl	$6,%r13d,%r13d
2230	addl	%r12d,%r9d
2231	andl	%r15d,%esi
2232	xorl	%r10d,%r14d
2233	addl	%r13d,%r9d
2234	xorl	%r11d,%esi
2235	addl	%r9d,%ebx
2236	shrdl	$2,%r14d,%r14d
2237	addl	%esi,%r9d
2238	movl	%ebx,%r13d
2239	addl	%r9d,%r14d
2240	shrdl	$14,%r13d,%r13d
2241	movl	%r14d,%r9d
2242	movl	%ecx,%r12d
2243	xorl	%ebx,%r13d
2244	shrdl	$9,%r14d,%r14d
2245	xorl	%edx,%r12d
2246	shrdl	$5,%r13d,%r13d
2247	xorl	%r9d,%r14d
2248	andl	%ebx,%r12d
2249	vaesenclast	%xmm10,%xmm9,%xmm11
2250	vaesenc	%xmm10,%xmm9,%xmm9
2251	vmovdqu	176-128(%rdi),%xmm10
2252	xorl	%ebx,%r13d
2253	addl	44(%rsp),%r8d
2254	movl	%r9d,%esi
2255	shrdl	$11,%r14d,%r14d
2256	xorl	%edx,%r12d
2257	xorl	%r10d,%esi
2258	shrdl	$6,%r13d,%r13d
2259	addl	%r12d,%r8d
2260	andl	%esi,%r15d
2261	xorl	%r9d,%r14d
2262	addl	%r13d,%r8d
2263	xorl	%r10d,%r15d
2264	addl	%r8d,%eax
2265	shrdl	$2,%r14d,%r14d
2266	addl	%r15d,%r8d
2267	movl	%eax,%r13d
2268	addl	%r8d,%r14d
2269	shrdl	$14,%r13d,%r13d
2270	movl	%r14d,%r8d
2271	movl	%ebx,%r12d
2272	xorl	%eax,%r13d
2273	shrdl	$9,%r14d,%r14d
2274	xorl	%ecx,%r12d
2275	shrdl	$5,%r13d,%r13d
2276	xorl	%r8d,%r14d
2277	andl	%eax,%r12d
2278	vpand	%xmm12,%xmm11,%xmm8
2279	vaesenc	%xmm10,%xmm9,%xmm9
2280	vmovdqu	192-128(%rdi),%xmm10
2281	xorl	%eax,%r13d
2282	addl	48(%rsp),%edx
2283	movl	%r8d,%r15d
2284	shrdl	$11,%r14d,%r14d
2285	xorl	%ecx,%r12d
2286	xorl	%r9d,%r15d
2287	shrdl	$6,%r13d,%r13d
2288	addl	%r12d,%edx
2289	andl	%r15d,%esi
2290	xorl	%r8d,%r14d
2291	addl	%r13d,%edx
2292	xorl	%r9d,%esi
2293	addl	%edx,%r11d
2294	shrdl	$2,%r14d,%r14d
2295	addl	%esi,%edx
2296	movl	%r11d,%r13d
2297	addl	%edx,%r14d
2298	shrdl	$14,%r13d,%r13d
2299	movl	%r14d,%edx
2300	movl	%eax,%r12d
2301	xorl	%r11d,%r13d
2302	shrdl	$9,%r14d,%r14d
2303	xorl	%ebx,%r12d
2304	shrdl	$5,%r13d,%r13d
2305	xorl	%edx,%r14d
2306	andl	%r11d,%r12d
2307	vaesenclast	%xmm10,%xmm9,%xmm11
2308	vaesenc	%xmm10,%xmm9,%xmm9
2309	vmovdqu	208-128(%rdi),%xmm10
2310	xorl	%r11d,%r13d
2311	addl	52(%rsp),%ecx
2312	movl	%edx,%esi
2313	shrdl	$11,%r14d,%r14d
2314	xorl	%ebx,%r12d
2315	xorl	%r8d,%esi
2316	shrdl	$6,%r13d,%r13d
2317	addl	%r12d,%ecx
2318	andl	%esi,%r15d
2319	xorl	%edx,%r14d
2320	addl	%r13d,%ecx
2321	xorl	%r8d,%r15d
2322	addl	%ecx,%r10d
2323	shrdl	$2,%r14d,%r14d
2324	addl	%r15d,%ecx
2325	movl	%r10d,%r13d
2326	addl	%ecx,%r14d
2327	shrdl	$14,%r13d,%r13d
2328	movl	%r14d,%ecx
2329	movl	%r11d,%r12d
2330	xorl	%r10d,%r13d
2331	shrdl	$9,%r14d,%r14d
2332	xorl	%eax,%r12d
2333	shrdl	$5,%r13d,%r13d
2334	xorl	%ecx,%r14d
2335	andl	%r10d,%r12d
2336	vpand	%xmm13,%xmm11,%xmm11
2337	vaesenc	%xmm10,%xmm9,%xmm9
2338	vmovdqu	224-128(%rdi),%xmm10
2339	xorl	%r10d,%r13d
2340	addl	56(%rsp),%ebx
2341	movl	%ecx,%r15d
2342	shrdl	$11,%r14d,%r14d
2343	xorl	%eax,%r12d
2344	xorl	%edx,%r15d
2345	shrdl	$6,%r13d,%r13d
2346	addl	%r12d,%ebx
2347	andl	%r15d,%esi
2348	xorl	%ecx,%r14d
2349	addl	%r13d,%ebx
2350	xorl	%edx,%esi
2351	addl	%ebx,%r9d
2352	shrdl	$2,%r14d,%r14d
2353	addl	%esi,%ebx
2354	movl	%r9d,%r13d
2355	addl	%ebx,%r14d
2356	shrdl	$14,%r13d,%r13d
2357	movl	%r14d,%ebx
2358	movl	%r10d,%r12d
2359	xorl	%r9d,%r13d
2360	shrdl	$9,%r14d,%r14d
2361	xorl	%r11d,%r12d
2362	shrdl	$5,%r13d,%r13d
2363	xorl	%ebx,%r14d
2364	andl	%r9d,%r12d
2365	vpor	%xmm11,%xmm8,%xmm8
2366	vaesenclast	%xmm10,%xmm9,%xmm11
2367	vmovdqu	0-128(%rdi),%xmm10
2368	xorl	%r9d,%r13d
2369	addl	60(%rsp),%eax
2370	movl	%ebx,%esi
2371	shrdl	$11,%r14d,%r14d
2372	xorl	%r11d,%r12d
2373	xorl	%ecx,%esi
2374	shrdl	$6,%r13d,%r13d
2375	addl	%r12d,%eax
2376	andl	%esi,%r15d
2377	xorl	%ebx,%r14d
2378	addl	%r13d,%eax
2379	xorl	%ecx,%r15d
2380	addl	%eax,%r8d
2381	shrdl	$2,%r14d,%r14d
2382	addl	%r15d,%eax
2383	movl	%r8d,%r13d
2384	addl	%eax,%r14d
2385	movq	64+0(%rsp),%r12
2386	movq	64+8(%rsp),%r13
2387	movq	64+40(%rsp),%r15
2388	movq	64+48(%rsp),%rsi
2389
2390	vpand	%xmm14,%xmm11,%xmm11
2391	movl	%r14d,%eax
2392	vpor	%xmm11,%xmm8,%xmm8
2393	vmovdqu	%xmm8,(%r12,%r13,1)
2394	leaq	16(%r12),%r12
2395
2396	addl	0(%r15),%eax
2397	addl	4(%r15),%ebx
2398	addl	8(%r15),%ecx
2399	addl	12(%r15),%edx
2400	addl	16(%r15),%r8d
2401	addl	20(%r15),%r9d
2402	addl	24(%r15),%r10d
2403	addl	28(%r15),%r11d
2404
2405	cmpq	64+16(%rsp),%r12
2406
2407	movl	%eax,0(%r15)
2408	movl	%ebx,4(%r15)
2409	movl	%ecx,8(%r15)
2410	movl	%edx,12(%r15)
2411	movl	%r8d,16(%r15)
2412	movl	%r9d,20(%r15)
2413	movl	%r10d,24(%r15)
2414	movl	%r11d,28(%r15)
2415	jb	.Lloop_avx
2416
2417	movq	64+32(%rsp),%r8
2418	movq	120(%rsp),%rsi
2419.cfi_def_cfa	%rsi,8
2420	vmovdqu	%xmm8,(%r8)
2421	vzeroall
2422	movq	-48(%rsi),%r15
2423.cfi_restore	%r15
2424	movq	-40(%rsi),%r14
2425.cfi_restore	%r14
2426	movq	-32(%rsi),%r13
2427.cfi_restore	%r13
2428	movq	-24(%rsi),%r12
2429.cfi_restore	%r12
2430	movq	-16(%rsi),%rbp
2431.cfi_restore	%rbp
2432	movq	-8(%rsi),%rbx
2433.cfi_restore	%rbx
2434	leaq	(%rsi),%rsp
2435.cfi_def_cfa_register	%rsp
2436.Lepilogue_avx:
2437	.byte	0xf3,0xc3
2438.cfi_endproc
2439.size	aesni_cbc_sha256_enc_avx,.-aesni_cbc_sha256_enc_avx
2440.type	aesni_cbc_sha256_enc_avx2,@function
2441.align	64
2442aesni_cbc_sha256_enc_avx2:
2443.cfi_startproc
2444.Lavx2_shortcut:
2445	movq	8(%rsp),%r10
2446	movq	%rsp,%rax
2447.cfi_def_cfa_register	%rax
2448	pushq	%rbx
2449.cfi_offset	%rbx,-16
2450	pushq	%rbp
2451.cfi_offset	%rbp,-24
2452	pushq	%r12
2453.cfi_offset	%r12,-32
2454	pushq	%r13
2455.cfi_offset	%r13,-40
2456	pushq	%r14
2457.cfi_offset	%r14,-48
2458	pushq	%r15
2459.cfi_offset	%r15,-56
2460	subq	$576,%rsp
2461	andq	$-1024,%rsp
2462	addq	$448,%rsp
2463
2464	shlq	$6,%rdx
2465	subq	%rdi,%rsi
2466	subq	%rdi,%r10
2467	addq	%rdi,%rdx
2468
2469
2470
2471	movq	%rdx,64+16(%rsp)
2472
2473	movq	%r8,64+32(%rsp)
2474	movq	%r9,64+40(%rsp)
2475	movq	%r10,64+48(%rsp)
2476	movq	%rax,120(%rsp)
2477.cfi_escape	0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08
2478.Lprologue_avx2:
2479	vzeroall
2480
2481	movq	%rdi,%r13
2482	vpinsrq	$1,%rsi,%xmm15,%xmm15
2483	leaq	128(%rcx),%rdi
2484	leaq	K256+544(%rip),%r12
2485	movl	240-128(%rdi),%r14d
2486	movq	%r9,%r15
2487	movq	%r10,%rsi
2488	vmovdqu	(%r8),%xmm8
2489	leaq	-9(%r14),%r14
2490
2491	vmovdqa	0(%r12,%r14,8),%xmm14
2492	vmovdqa	16(%r12,%r14,8),%xmm13
2493	vmovdqa	32(%r12,%r14,8),%xmm12
2494
2495	subq	$-64,%r13
2496	movl	0(%r15),%eax
2497	leaq	(%rsi,%r13,1),%r12
2498	movl	4(%r15),%ebx
2499	cmpq	%rdx,%r13
2500	movl	8(%r15),%ecx
2501	cmoveq	%rsp,%r12
2502	movl	12(%r15),%edx
2503	movl	16(%r15),%r8d
2504	movl	20(%r15),%r9d
2505	movl	24(%r15),%r10d
2506	movl	28(%r15),%r11d
2507	vmovdqu	0-128(%rdi),%xmm10
2508	jmp	.Loop_avx2
2509.align	16
2510.Loop_avx2:
2511	vmovdqa	K256+512(%rip),%ymm7
2512	vmovdqu	-64+0(%rsi,%r13,1),%xmm0
2513	vmovdqu	-64+16(%rsi,%r13,1),%xmm1
2514	vmovdqu	-64+32(%rsi,%r13,1),%xmm2
2515	vmovdqu	-64+48(%rsi,%r13,1),%xmm3
2516
2517	vinserti128	$1,(%r12),%ymm0,%ymm0
2518	vinserti128	$1,16(%r12),%ymm1,%ymm1
2519	vpshufb	%ymm7,%ymm0,%ymm0
2520	vinserti128	$1,32(%r12),%ymm2,%ymm2
2521	vpshufb	%ymm7,%ymm1,%ymm1
2522	vinserti128	$1,48(%r12),%ymm3,%ymm3
2523
2524	leaq	K256(%rip),%rbp
2525	vpshufb	%ymm7,%ymm2,%ymm2
2526	leaq	-64(%r13),%r13
2527	vpaddd	0(%rbp),%ymm0,%ymm4
2528	vpshufb	%ymm7,%ymm3,%ymm3
2529	vpaddd	32(%rbp),%ymm1,%ymm5
2530	vpaddd	64(%rbp),%ymm2,%ymm6
2531	vpaddd	96(%rbp),%ymm3,%ymm7
2532	vmovdqa	%ymm4,0(%rsp)
2533	xorl	%r14d,%r14d
2534	vmovdqa	%ymm5,32(%rsp)
2535
2536	movq	120(%rsp),%rsi
2537.cfi_def_cfa	%rsi,8
2538	leaq	-64(%rsp),%rsp
2539
2540
2541
2542	movq	%rsi,-8(%rsp)
2543.cfi_escape	0x0f,0x05,0x77,0x78,0x06,0x23,0x08
2544	movl	%ebx,%esi
2545	vmovdqa	%ymm6,0(%rsp)
2546	xorl	%ecx,%esi
2547	vmovdqa	%ymm7,32(%rsp)
2548	movl	%r9d,%r12d
2549	subq	$-32*4,%rbp
2550	jmp	.Lavx2_00_47
2551
2552.align	16
2553.Lavx2_00_47:
2554	vmovdqu	(%r13),%xmm9
2555	vpinsrq	$0,%r13,%xmm15,%xmm15
2556	leaq	-64(%rsp),%rsp
2557.cfi_escape	0x0f,0x05,0x77,0x38,0x06,0x23,0x08
2558
2559	pushq	64-8(%rsp)
2560.cfi_escape	0x0f,0x05,0x77,0x00,0x06,0x23,0x08
2561	leaq	8(%rsp),%rsp
2562.cfi_escape	0x0f,0x05,0x77,0x78,0x06,0x23,0x08
2563	vpalignr	$4,%ymm0,%ymm1,%ymm4
2564	addl	0+128(%rsp),%r11d
2565	andl	%r8d,%r12d
2566	rorxl	$25,%r8d,%r13d
2567	vpalignr	$4,%ymm2,%ymm3,%ymm7
2568	rorxl	$11,%r8d,%r15d
2569	leal	(%rax,%r14,1),%eax
2570	leal	(%r11,%r12,1),%r11d
2571	vpsrld	$7,%ymm4,%ymm6
2572	andnl	%r10d,%r8d,%r12d
2573	xorl	%r15d,%r13d
2574	rorxl	$6,%r8d,%r14d
2575	vpaddd	%ymm7,%ymm0,%ymm0
2576	leal	(%r11,%r12,1),%r11d
2577	xorl	%r14d,%r13d
2578	movl	%eax,%r15d
2579	vpsrld	$3,%ymm4,%ymm7
2580	rorxl	$22,%eax,%r12d
2581	leal	(%r11,%r13,1),%r11d
2582	xorl	%ebx,%r15d
2583	vpslld	$14,%ymm4,%ymm5
2584	rorxl	$13,%eax,%r14d
2585	rorxl	$2,%eax,%r13d
2586	leal	(%rdx,%r11,1),%edx
2587	vpxor	%ymm6,%ymm7,%ymm4
2588	andl	%r15d,%esi
2589	vpxor	%xmm10,%xmm9,%xmm9
2590	vmovdqu	16-128(%rdi),%xmm10
2591	xorl	%r12d,%r14d
2592	xorl	%ebx,%esi
2593	vpshufd	$250,%ymm3,%ymm7
2594	xorl	%r13d,%r14d
2595	leal	(%r11,%rsi,1),%r11d
2596	movl	%r8d,%r12d
2597	vpsrld	$11,%ymm6,%ymm6
2598	addl	4+128(%rsp),%r10d
2599	andl	%edx,%r12d
2600	rorxl	$25,%edx,%r13d
2601	vpxor	%ymm5,%ymm4,%ymm4
2602	rorxl	$11,%edx,%esi
2603	leal	(%r11,%r14,1),%r11d
2604	leal	(%r10,%r12,1),%r10d
2605	vpslld	$11,%ymm5,%ymm5
2606	andnl	%r9d,%edx,%r12d
2607	xorl	%esi,%r13d
2608	rorxl	$6,%edx,%r14d
2609	vpxor	%ymm6,%ymm4,%ymm4
2610	leal	(%r10,%r12,1),%r10d
2611	xorl	%r14d,%r13d
2612	movl	%r11d,%esi
2613	vpsrld	$10,%ymm7,%ymm6
2614	rorxl	$22,%r11d,%r12d
2615	leal	(%r10,%r13,1),%r10d
2616	xorl	%eax,%esi
2617	vpxor	%ymm5,%ymm4,%ymm4
2618	rorxl	$13,%r11d,%r14d
2619	rorxl	$2,%r11d,%r13d
2620	leal	(%rcx,%r10,1),%ecx
2621	vpsrlq	$17,%ymm7,%ymm7
2622	andl	%esi,%r15d
2623	vpxor	%xmm8,%xmm9,%xmm9
2624	xorl	%r12d,%r14d
2625	xorl	%eax,%r15d
2626	vpaddd	%ymm4,%ymm0,%ymm0
2627	xorl	%r13d,%r14d
2628	leal	(%r10,%r15,1),%r10d
2629	movl	%edx,%r12d
2630	vpxor	%ymm7,%ymm6,%ymm6
2631	addl	8+128(%rsp),%r9d
2632	andl	%ecx,%r12d
2633	rorxl	$25,%ecx,%r13d
2634	vpsrlq	$2,%ymm7,%ymm7
2635	rorxl	$11,%ecx,%r15d
2636	leal	(%r10,%r14,1),%r10d
2637	leal	(%r9,%r12,1),%r9d
2638	vpxor	%ymm7,%ymm6,%ymm6
2639	andnl	%r8d,%ecx,%r12d
2640	xorl	%r15d,%r13d
2641	rorxl	$6,%ecx,%r14d
2642	vpshufd	$132,%ymm6,%ymm6
2643	leal	(%r9,%r12,1),%r9d
2644	xorl	%r14d,%r13d
2645	movl	%r10d,%r15d
2646	vpsrldq	$8,%ymm6,%ymm6
2647	rorxl	$22,%r10d,%r12d
2648	leal	(%r9,%r13,1),%r9d
2649	xorl	%r11d,%r15d
2650	vpaddd	%ymm6,%ymm0,%ymm0
2651	rorxl	$13,%r10d,%r14d
2652	rorxl	$2,%r10d,%r13d
2653	leal	(%rbx,%r9,1),%ebx
2654	vpshufd	$80,%ymm0,%ymm7
2655	andl	%r15d,%esi
2656	vaesenc	%xmm10,%xmm9,%xmm9
2657	vmovdqu	32-128(%rdi),%xmm10
2658	xorl	%r12d,%r14d
2659	xorl	%r11d,%esi
2660	vpsrld	$10,%ymm7,%ymm6
2661	xorl	%r13d,%r14d
2662	leal	(%r9,%rsi,1),%r9d
2663	movl	%ecx,%r12d
2664	vpsrlq	$17,%ymm7,%ymm7
2665	addl	12+128(%rsp),%r8d
2666	andl	%ebx,%r12d
2667	rorxl	$25,%ebx,%r13d
2668	vpxor	%ymm7,%ymm6,%ymm6
2669	rorxl	$11,%ebx,%esi
2670	leal	(%r9,%r14,1),%r9d
2671	leal	(%r8,%r12,1),%r8d
2672	vpsrlq	$2,%ymm7,%ymm7
2673	andnl	%edx,%ebx,%r12d
2674	xorl	%esi,%r13d
2675	rorxl	$6,%ebx,%r14d
2676	vpxor	%ymm7,%ymm6,%ymm6
2677	leal	(%r8,%r12,1),%r8d
2678	xorl	%r14d,%r13d
2679	movl	%r9d,%esi
2680	vpshufd	$232,%ymm6,%ymm6
2681	rorxl	$22,%r9d,%r12d
2682	leal	(%r8,%r13,1),%r8d
2683	xorl	%r10d,%esi
2684	vpslldq	$8,%ymm6,%ymm6
2685	rorxl	$13,%r9d,%r14d
2686	rorxl	$2,%r9d,%r13d
2687	leal	(%rax,%r8,1),%eax
2688	vpaddd	%ymm6,%ymm0,%ymm0
2689	andl	%esi,%r15d
2690	vaesenc	%xmm10,%xmm9,%xmm9
2691	vmovdqu	48-128(%rdi),%xmm10
2692	xorl	%r12d,%r14d
2693	xorl	%r10d,%r15d
2694	vpaddd	0(%rbp),%ymm0,%ymm6
2695	xorl	%r13d,%r14d
2696	leal	(%r8,%r15,1),%r8d
2697	movl	%ebx,%r12d
2698	vmovdqa	%ymm6,0(%rsp)
2699	vpalignr	$4,%ymm1,%ymm2,%ymm4
2700	addl	32+128(%rsp),%edx
2701	andl	%eax,%r12d
2702	rorxl	$25,%eax,%r13d
2703	vpalignr	$4,%ymm3,%ymm0,%ymm7
2704	rorxl	$11,%eax,%r15d
2705	leal	(%r8,%r14,1),%r8d
2706	leal	(%rdx,%r12,1),%edx
2707	vpsrld	$7,%ymm4,%ymm6
2708	andnl	%ecx,%eax,%r12d
2709	xorl	%r15d,%r13d
2710	rorxl	$6,%eax,%r14d
2711	vpaddd	%ymm7,%ymm1,%ymm1
2712	leal	(%rdx,%r12,1),%edx
2713	xorl	%r14d,%r13d
2714	movl	%r8d,%r15d
2715	vpsrld	$3,%ymm4,%ymm7
2716	rorxl	$22,%r8d,%r12d
2717	leal	(%rdx,%r13,1),%edx
2718	xorl	%r9d,%r15d
2719	vpslld	$14,%ymm4,%ymm5
2720	rorxl	$13,%r8d,%r14d
2721	rorxl	$2,%r8d,%r13d
2722	leal	(%r11,%rdx,1),%r11d
2723	vpxor	%ymm6,%ymm7,%ymm4
2724	andl	%r15d,%esi
2725	vaesenc	%xmm10,%xmm9,%xmm9
2726	vmovdqu	64-128(%rdi),%xmm10
2727	xorl	%r12d,%r14d
2728	xorl	%r9d,%esi
2729	vpshufd	$250,%ymm0,%ymm7
2730	xorl	%r13d,%r14d
2731	leal	(%rdx,%rsi,1),%edx
2732	movl	%eax,%r12d
2733	vpsrld	$11,%ymm6,%ymm6
2734	addl	36+128(%rsp),%ecx
2735	andl	%r11d,%r12d
2736	rorxl	$25,%r11d,%r13d
2737	vpxor	%ymm5,%ymm4,%ymm4
2738	rorxl	$11,%r11d,%esi
2739	leal	(%rdx,%r14,1),%edx
2740	leal	(%rcx,%r12,1),%ecx
2741	vpslld	$11,%ymm5,%ymm5
2742	andnl	%ebx,%r11d,%r12d
2743	xorl	%esi,%r13d
2744	rorxl	$6,%r11d,%r14d
2745	vpxor	%ymm6,%ymm4,%ymm4
2746	leal	(%rcx,%r12,1),%ecx
2747	xorl	%r14d,%r13d
2748	movl	%edx,%esi
2749	vpsrld	$10,%ymm7,%ymm6
2750	rorxl	$22,%edx,%r12d
2751	leal	(%rcx,%r13,1),%ecx
2752	xorl	%r8d,%esi
2753	vpxor	%ymm5,%ymm4,%ymm4
2754	rorxl	$13,%edx,%r14d
2755	rorxl	$2,%edx,%r13d
2756	leal	(%r10,%rcx,1),%r10d
2757	vpsrlq	$17,%ymm7,%ymm7
2758	andl	%esi,%r15d
2759	vaesenc	%xmm10,%xmm9,%xmm9
2760	vmovdqu	80-128(%rdi),%xmm10
2761	xorl	%r12d,%r14d
2762	xorl	%r8d,%r15d
2763	vpaddd	%ymm4,%ymm1,%ymm1
2764	xorl	%r13d,%r14d
2765	leal	(%rcx,%r15,1),%ecx
2766	movl	%r11d,%r12d
2767	vpxor	%ymm7,%ymm6,%ymm6
2768	addl	40+128(%rsp),%ebx
2769	andl	%r10d,%r12d
2770	rorxl	$25,%r10d,%r13d
2771	vpsrlq	$2,%ymm7,%ymm7
2772	rorxl	$11,%r10d,%r15d
2773	leal	(%rcx,%r14,1),%ecx
2774	leal	(%rbx,%r12,1),%ebx
2775	vpxor	%ymm7,%ymm6,%ymm6
2776	andnl	%eax,%r10d,%r12d
2777	xorl	%r15d,%r13d
2778	rorxl	$6,%r10d,%r14d
2779	vpshufd	$132,%ymm6,%ymm6
2780	leal	(%rbx,%r12,1),%ebx
2781	xorl	%r14d,%r13d
2782	movl	%ecx,%r15d
2783	vpsrldq	$8,%ymm6,%ymm6
2784	rorxl	$22,%ecx,%r12d
2785	leal	(%rbx,%r13,1),%ebx
2786	xorl	%edx,%r15d
2787	vpaddd	%ymm6,%ymm1,%ymm1
2788	rorxl	$13,%ecx,%r14d
2789	rorxl	$2,%ecx,%r13d
2790	leal	(%r9,%rbx,1),%r9d
2791	vpshufd	$80,%ymm1,%ymm7
2792	andl	%r15d,%esi
2793	vaesenc	%xmm10,%xmm9,%xmm9
2794	vmovdqu	96-128(%rdi),%xmm10
2795	xorl	%r12d,%r14d
2796	xorl	%edx,%esi
2797	vpsrld	$10,%ymm7,%ymm6
2798	xorl	%r13d,%r14d
2799	leal	(%rbx,%rsi,1),%ebx
2800	movl	%r10d,%r12d
2801	vpsrlq	$17,%ymm7,%ymm7
2802	addl	44+128(%rsp),%eax
2803	andl	%r9d,%r12d
2804	rorxl	$25,%r9d,%r13d
2805	vpxor	%ymm7,%ymm6,%ymm6
2806	rorxl	$11,%r9d,%esi
2807	leal	(%rbx,%r14,1),%ebx
2808	leal	(%rax,%r12,1),%eax
2809	vpsrlq	$2,%ymm7,%ymm7
2810	andnl	%r11d,%r9d,%r12d
2811	xorl	%esi,%r13d
2812	rorxl	$6,%r9d,%r14d
2813	vpxor	%ymm7,%ymm6,%ymm6
2814	leal	(%rax,%r12,1),%eax
2815	xorl	%r14d,%r13d
2816	movl	%ebx,%esi
2817	vpshufd	$232,%ymm6,%ymm6
2818	rorxl	$22,%ebx,%r12d
2819	leal	(%rax,%r13,1),%eax
2820	xorl	%ecx,%esi
2821	vpslldq	$8,%ymm6,%ymm6
2822	rorxl	$13,%ebx,%r14d
2823	rorxl	$2,%ebx,%r13d
2824	leal	(%r8,%rax,1),%r8d
2825	vpaddd	%ymm6,%ymm1,%ymm1
2826	andl	%esi,%r15d
2827	vaesenc	%xmm10,%xmm9,%xmm9
2828	vmovdqu	112-128(%rdi),%xmm10
2829	xorl	%r12d,%r14d
2830	xorl	%ecx,%r15d
2831	vpaddd	32(%rbp),%ymm1,%ymm6
2832	xorl	%r13d,%r14d
2833	leal	(%rax,%r15,1),%eax
2834	movl	%r9d,%r12d
2835	vmovdqa	%ymm6,32(%rsp)
2836	leaq	-64(%rsp),%rsp
2837.cfi_escape	0x0f,0x05,0x77,0x38,0x06,0x23,0x08
2838
2839	pushq	64-8(%rsp)
2840.cfi_escape	0x0f,0x05,0x77,0x00,0x06,0x23,0x08
2841	leaq	8(%rsp),%rsp
2842.cfi_escape	0x0f,0x05,0x77,0x78,0x06,0x23,0x08
2843	vpalignr	$4,%ymm2,%ymm3,%ymm4
2844	addl	0+128(%rsp),%r11d
2845	andl	%r8d,%r12d
2846	rorxl	$25,%r8d,%r13d
2847	vpalignr	$4,%ymm0,%ymm1,%ymm7
2848	rorxl	$11,%r8d,%r15d
2849	leal	(%rax,%r14,1),%eax
2850	leal	(%r11,%r12,1),%r11d
2851	vpsrld	$7,%ymm4,%ymm6
2852	andnl	%r10d,%r8d,%r12d
2853	xorl	%r15d,%r13d
2854	rorxl	$6,%r8d,%r14d
2855	vpaddd	%ymm7,%ymm2,%ymm2
2856	leal	(%r11,%r12,1),%r11d
2857	xorl	%r14d,%r13d
2858	movl	%eax,%r15d
2859	vpsrld	$3,%ymm4,%ymm7
2860	rorxl	$22,%eax,%r12d
2861	leal	(%r11,%r13,1),%r11d
2862	xorl	%ebx,%r15d
2863	vpslld	$14,%ymm4,%ymm5
2864	rorxl	$13,%eax,%r14d
2865	rorxl	$2,%eax,%r13d
2866	leal	(%rdx,%r11,1),%edx
2867	vpxor	%ymm6,%ymm7,%ymm4
2868	andl	%r15d,%esi
2869	vaesenc	%xmm10,%xmm9,%xmm9
2870	vmovdqu	128-128(%rdi),%xmm10
2871	xorl	%r12d,%r14d
2872	xorl	%ebx,%esi
2873	vpshufd	$250,%ymm1,%ymm7
2874	xorl	%r13d,%r14d
2875	leal	(%r11,%rsi,1),%r11d
2876	movl	%r8d,%r12d
2877	vpsrld	$11,%ymm6,%ymm6
2878	addl	4+128(%rsp),%r10d
2879	andl	%edx,%r12d
2880	rorxl	$25,%edx,%r13d
2881	vpxor	%ymm5,%ymm4,%ymm4
2882	rorxl	$11,%edx,%esi
2883	leal	(%r11,%r14,1),%r11d
2884	leal	(%r10,%r12,1),%r10d
2885	vpslld	$11,%ymm5,%ymm5
2886	andnl	%r9d,%edx,%r12d
2887	xorl	%esi,%r13d
2888	rorxl	$6,%edx,%r14d
2889	vpxor	%ymm6,%ymm4,%ymm4
2890	leal	(%r10,%r12,1),%r10d
2891	xorl	%r14d,%r13d
2892	movl	%r11d,%esi
2893	vpsrld	$10,%ymm7,%ymm6
2894	rorxl	$22,%r11d,%r12d
2895	leal	(%r10,%r13,1),%r10d
2896	xorl	%eax,%esi
2897	vpxor	%ymm5,%ymm4,%ymm4
2898	rorxl	$13,%r11d,%r14d
2899	rorxl	$2,%r11d,%r13d
2900	leal	(%rcx,%r10,1),%ecx
2901	vpsrlq	$17,%ymm7,%ymm7
2902	andl	%esi,%r15d
2903	vaesenc	%xmm10,%xmm9,%xmm9
2904	vmovdqu	144-128(%rdi),%xmm10
2905	xorl	%r12d,%r14d
2906	xorl	%eax,%r15d
2907	vpaddd	%ymm4,%ymm2,%ymm2
2908	xorl	%r13d,%r14d
2909	leal	(%r10,%r15,1),%r10d
2910	movl	%edx,%r12d
2911	vpxor	%ymm7,%ymm6,%ymm6
2912	addl	8+128(%rsp),%r9d
2913	andl	%ecx,%r12d
2914	rorxl	$25,%ecx,%r13d
2915	vpsrlq	$2,%ymm7,%ymm7
2916	rorxl	$11,%ecx,%r15d
2917	leal	(%r10,%r14,1),%r10d
2918	leal	(%r9,%r12,1),%r9d
2919	vpxor	%ymm7,%ymm6,%ymm6
2920	andnl	%r8d,%ecx,%r12d
2921	xorl	%r15d,%r13d
2922	rorxl	$6,%ecx,%r14d
2923	vpshufd	$132,%ymm6,%ymm6
2924	leal	(%r9,%r12,1),%r9d
2925	xorl	%r14d,%r13d
2926	movl	%r10d,%r15d
2927	vpsrldq	$8,%ymm6,%ymm6
2928	rorxl	$22,%r10d,%r12d
2929	leal	(%r9,%r13,1),%r9d
2930	xorl	%r11d,%r15d
2931	vpaddd	%ymm6,%ymm2,%ymm2
2932	rorxl	$13,%r10d,%r14d
2933	rorxl	$2,%r10d,%r13d
2934	leal	(%rbx,%r9,1),%ebx
2935	vpshufd	$80,%ymm2,%ymm7
2936	andl	%r15d,%esi
2937	vaesenc	%xmm10,%xmm9,%xmm9
2938	vmovdqu	160-128(%rdi),%xmm10
2939	xorl	%r12d,%r14d
2940	xorl	%r11d,%esi
2941	vpsrld	$10,%ymm7,%ymm6
2942	xorl	%r13d,%r14d
2943	leal	(%r9,%rsi,1),%r9d
2944	movl	%ecx,%r12d
2945	vpsrlq	$17,%ymm7,%ymm7
2946	addl	12+128(%rsp),%r8d
2947	andl	%ebx,%r12d
2948	rorxl	$25,%ebx,%r13d
2949	vpxor	%ymm7,%ymm6,%ymm6
2950	rorxl	$11,%ebx,%esi
2951	leal	(%r9,%r14,1),%r9d
2952	leal	(%r8,%r12,1),%r8d
2953	vpsrlq	$2,%ymm7,%ymm7
2954	andnl	%edx,%ebx,%r12d
2955	xorl	%esi,%r13d
2956	rorxl	$6,%ebx,%r14d
2957	vpxor	%ymm7,%ymm6,%ymm6
2958	leal	(%r8,%r12,1),%r8d
2959	xorl	%r14d,%r13d
2960	movl	%r9d,%esi
2961	vpshufd	$232,%ymm6,%ymm6
2962	rorxl	$22,%r9d,%r12d
2963	leal	(%r8,%r13,1),%r8d
2964	xorl	%r10d,%esi
2965	vpslldq	$8,%ymm6,%ymm6
2966	rorxl	$13,%r9d,%r14d
2967	rorxl	$2,%r9d,%r13d
2968	leal	(%rax,%r8,1),%eax
2969	vpaddd	%ymm6,%ymm2,%ymm2
2970	andl	%esi,%r15d
2971	vaesenclast	%xmm10,%xmm9,%xmm11
2972	vaesenc	%xmm10,%xmm9,%xmm9
2973	vmovdqu	176-128(%rdi),%xmm10
2974	xorl	%r12d,%r14d
2975	xorl	%r10d,%r15d
2976	vpaddd	64(%rbp),%ymm2,%ymm6
2977	xorl	%r13d,%r14d
2978	leal	(%r8,%r15,1),%r8d
2979	movl	%ebx,%r12d
2980	vmovdqa	%ymm6,0(%rsp)
2981	vpalignr	$4,%ymm3,%ymm0,%ymm4
2982	addl	32+128(%rsp),%edx
2983	andl	%eax,%r12d
2984	rorxl	$25,%eax,%r13d
2985	vpalignr	$4,%ymm1,%ymm2,%ymm7
2986	rorxl	$11,%eax,%r15d
2987	leal	(%r8,%r14,1),%r8d
2988	leal	(%rdx,%r12,1),%edx
2989	vpsrld	$7,%ymm4,%ymm6
2990	andnl	%ecx,%eax,%r12d
2991	xorl	%r15d,%r13d
2992	rorxl	$6,%eax,%r14d
2993	vpaddd	%ymm7,%ymm3,%ymm3
2994	leal	(%rdx,%r12,1),%edx
2995	xorl	%r14d,%r13d
2996	movl	%r8d,%r15d
2997	vpsrld	$3,%ymm4,%ymm7
2998	rorxl	$22,%r8d,%r12d
2999	leal	(%rdx,%r13,1),%edx
3000	xorl	%r9d,%r15d
3001	vpslld	$14,%ymm4,%ymm5
3002	rorxl	$13,%r8d,%r14d
3003	rorxl	$2,%r8d,%r13d
3004	leal	(%r11,%rdx,1),%r11d
3005	vpxor	%ymm6,%ymm7,%ymm4
3006	andl	%r15d,%esi
3007	vpand	%xmm12,%xmm11,%xmm8
3008	vaesenc	%xmm10,%xmm9,%xmm9
3009	vmovdqu	192-128(%rdi),%xmm10
3010	xorl	%r12d,%r14d
3011	xorl	%r9d,%esi
3012	vpshufd	$250,%ymm2,%ymm7
3013	xorl	%r13d,%r14d
3014	leal	(%rdx,%rsi,1),%edx
3015	movl	%eax,%r12d
3016	vpsrld	$11,%ymm6,%ymm6
3017	addl	36+128(%rsp),%ecx
3018	andl	%r11d,%r12d
3019	rorxl	$25,%r11d,%r13d
3020	vpxor	%ymm5,%ymm4,%ymm4
3021	rorxl	$11,%r11d,%esi
3022	leal	(%rdx,%r14,1),%edx
3023	leal	(%rcx,%r12,1),%ecx
3024	vpslld	$11,%ymm5,%ymm5
3025	andnl	%ebx,%r11d,%r12d
3026	xorl	%esi,%r13d
3027	rorxl	$6,%r11d,%r14d
3028	vpxor	%ymm6,%ymm4,%ymm4
3029	leal	(%rcx,%r12,1),%ecx
3030	xorl	%r14d,%r13d
3031	movl	%edx,%esi
3032	vpsrld	$10,%ymm7,%ymm6
3033	rorxl	$22,%edx,%r12d
3034	leal	(%rcx,%r13,1),%ecx
3035	xorl	%r8d,%esi
3036	vpxor	%ymm5,%ymm4,%ymm4
3037	rorxl	$13,%edx,%r14d
3038	rorxl	$2,%edx,%r13d
3039	leal	(%r10,%rcx,1),%r10d
3040	vpsrlq	$17,%ymm7,%ymm7
3041	andl	%esi,%r15d
3042	vaesenclast	%xmm10,%xmm9,%xmm11
3043	vaesenc	%xmm10,%xmm9,%xmm9
3044	vmovdqu	208-128(%rdi),%xmm10
3045	xorl	%r12d,%r14d
3046	xorl	%r8d,%r15d
3047	vpaddd	%ymm4,%ymm3,%ymm3
3048	xorl	%r13d,%r14d
3049	leal	(%rcx,%r15,1),%ecx
3050	movl	%r11d,%r12d
3051	vpxor	%ymm7,%ymm6,%ymm6
3052	addl	40+128(%rsp),%ebx
3053	andl	%r10d,%r12d
3054	rorxl	$25,%r10d,%r13d
3055	vpsrlq	$2,%ymm7,%ymm7
3056	rorxl	$11,%r10d,%r15d
3057	leal	(%rcx,%r14,1),%ecx
3058	leal	(%rbx,%r12,1),%ebx
3059	vpxor	%ymm7,%ymm6,%ymm6
3060	andnl	%eax,%r10d,%r12d
3061	xorl	%r15d,%r13d
3062	rorxl	$6,%r10d,%r14d
3063	vpshufd	$132,%ymm6,%ymm6
3064	leal	(%rbx,%r12,1),%ebx
3065	xorl	%r14d,%r13d
3066	movl	%ecx,%r15d
3067	vpsrldq	$8,%ymm6,%ymm6
3068	rorxl	$22,%ecx,%r12d
3069	leal	(%rbx,%r13,1),%ebx
3070	xorl	%edx,%r15d
3071	vpaddd	%ymm6,%ymm3,%ymm3
3072	rorxl	$13,%ecx,%r14d
3073	rorxl	$2,%ecx,%r13d
3074	leal	(%r9,%rbx,1),%r9d
3075	vpshufd	$80,%ymm3,%ymm7
3076	andl	%r15d,%esi
3077	vpand	%xmm13,%xmm11,%xmm11
3078	vaesenc	%xmm10,%xmm9,%xmm9
3079	vmovdqu	224-128(%rdi),%xmm10
3080	xorl	%r12d,%r14d
3081	xorl	%edx,%esi
3082	vpsrld	$10,%ymm7,%ymm6
3083	xorl	%r13d,%r14d
3084	leal	(%rbx,%rsi,1),%ebx
3085	movl	%r10d,%r12d
3086	vpsrlq	$17,%ymm7,%ymm7
3087	addl	44+128(%rsp),%eax
3088	andl	%r9d,%r12d
3089	rorxl	$25,%r9d,%r13d
3090	vpxor	%ymm7,%ymm6,%ymm6
3091	rorxl	$11,%r9d,%esi
3092	leal	(%rbx,%r14,1),%ebx
3093	leal	(%rax,%r12,1),%eax
3094	vpsrlq	$2,%ymm7,%ymm7
3095	andnl	%r11d,%r9d,%r12d
3096	xorl	%esi,%r13d
3097	rorxl	$6,%r9d,%r14d
3098	vpxor	%ymm7,%ymm6,%ymm6
3099	leal	(%rax,%r12,1),%eax
3100	xorl	%r14d,%r13d
3101	movl	%ebx,%esi
3102	vpshufd	$232,%ymm6,%ymm6
3103	rorxl	$22,%ebx,%r12d
3104	leal	(%rax,%r13,1),%eax
3105	xorl	%ecx,%esi
3106	vpslldq	$8,%ymm6,%ymm6
3107	rorxl	$13,%ebx,%r14d
3108	rorxl	$2,%ebx,%r13d
3109	leal	(%r8,%rax,1),%r8d
3110	vpaddd	%ymm6,%ymm3,%ymm3
3111	andl	%esi,%r15d
3112	vpor	%xmm11,%xmm8,%xmm8
3113	vaesenclast	%xmm10,%xmm9,%xmm11
3114	vmovdqu	0-128(%rdi),%xmm10
3115	xorl	%r12d,%r14d
3116	xorl	%ecx,%r15d
3117	vpaddd	96(%rbp),%ymm3,%ymm6
3118	xorl	%r13d,%r14d
3119	leal	(%rax,%r15,1),%eax
3120	movl	%r9d,%r12d
3121	vmovdqa	%ymm6,32(%rsp)
3122	vmovq	%xmm15,%r13
3123	vpextrq	$1,%xmm15,%r15
3124	vpand	%xmm14,%xmm11,%xmm11
3125	vpor	%xmm11,%xmm8,%xmm8
3126	vmovdqu	%xmm8,(%r15,%r13,1)
3127	leaq	16(%r13),%r13
3128	leaq	128(%rbp),%rbp
3129	cmpb	$0,3(%rbp)
3130	jne	.Lavx2_00_47
3131	vmovdqu	(%r13),%xmm9
3132	vpinsrq	$0,%r13,%xmm15,%xmm15
3133	addl	0+64(%rsp),%r11d
3134	andl	%r8d,%r12d
3135	rorxl	$25,%r8d,%r13d
3136	rorxl	$11,%r8d,%r15d
3137	leal	(%rax,%r14,1),%eax
3138	leal	(%r11,%r12,1),%r11d
3139	andnl	%r10d,%r8d,%r12d
3140	xorl	%r15d,%r13d
3141	rorxl	$6,%r8d,%r14d
3142	leal	(%r11,%r12,1),%r11d
3143	xorl	%r14d,%r13d
3144	movl	%eax,%r15d
3145	rorxl	$22,%eax,%r12d
3146	leal	(%r11,%r13,1),%r11d
3147	xorl	%ebx,%r15d
3148	rorxl	$13,%eax,%r14d
3149	rorxl	$2,%eax,%r13d
3150	leal	(%rdx,%r11,1),%edx
3151	andl	%r15d,%esi
3152	vpxor	%xmm10,%xmm9,%xmm9
3153	vmovdqu	16-128(%rdi),%xmm10
3154	xorl	%r12d,%r14d
3155	xorl	%ebx,%esi
3156	xorl	%r13d,%r14d
3157	leal	(%r11,%rsi,1),%r11d
3158	movl	%r8d,%r12d
3159	addl	4+64(%rsp),%r10d
3160	andl	%edx,%r12d
3161	rorxl	$25,%edx,%r13d
3162	rorxl	$11,%edx,%esi
3163	leal	(%r11,%r14,1),%r11d
3164	leal	(%r10,%r12,1),%r10d
3165	andnl	%r9d,%edx,%r12d
3166	xorl	%esi,%r13d
3167	rorxl	$6,%edx,%r14d
3168	leal	(%r10,%r12,1),%r10d
3169	xorl	%r14d,%r13d
3170	movl	%r11d,%esi
3171	rorxl	$22,%r11d,%r12d
3172	leal	(%r10,%r13,1),%r10d
3173	xorl	%eax,%esi
3174	rorxl	$13,%r11d,%r14d
3175	rorxl	$2,%r11d,%r13d
3176	leal	(%rcx,%r10,1),%ecx
3177	andl	%esi,%r15d
3178	vpxor	%xmm8,%xmm9,%xmm9
3179	xorl	%r12d,%r14d
3180	xorl	%eax,%r15d
3181	xorl	%r13d,%r14d
3182	leal	(%r10,%r15,1),%r10d
3183	movl	%edx,%r12d
3184	addl	8+64(%rsp),%r9d
3185	andl	%ecx,%r12d
3186	rorxl	$25,%ecx,%r13d
3187	rorxl	$11,%ecx,%r15d
3188	leal	(%r10,%r14,1),%r10d
3189	leal	(%r9,%r12,1),%r9d
3190	andnl	%r8d,%ecx,%r12d
3191	xorl	%r15d,%r13d
3192	rorxl	$6,%ecx,%r14d
3193	leal	(%r9,%r12,1),%r9d
3194	xorl	%r14d,%r13d
3195	movl	%r10d,%r15d
3196	rorxl	$22,%r10d,%r12d
3197	leal	(%r9,%r13,1),%r9d
3198	xorl	%r11d,%r15d
3199	rorxl	$13,%r10d,%r14d
3200	rorxl	$2,%r10d,%r13d
3201	leal	(%rbx,%r9,1),%ebx
3202	andl	%r15d,%esi
3203	vaesenc	%xmm10,%xmm9,%xmm9
3204	vmovdqu	32-128(%rdi),%xmm10
3205	xorl	%r12d,%r14d
3206	xorl	%r11d,%esi
3207	xorl	%r13d,%r14d
3208	leal	(%r9,%rsi,1),%r9d
3209	movl	%ecx,%r12d
3210	addl	12+64(%rsp),%r8d
3211	andl	%ebx,%r12d
3212	rorxl	$25,%ebx,%r13d
3213	rorxl	$11,%ebx,%esi
3214	leal	(%r9,%r14,1),%r9d
3215	leal	(%r8,%r12,1),%r8d
3216	andnl	%edx,%ebx,%r12d
3217	xorl	%esi,%r13d
3218	rorxl	$6,%ebx,%r14d
3219	leal	(%r8,%r12,1),%r8d
3220	xorl	%r14d,%r13d
3221	movl	%r9d,%esi
3222	rorxl	$22,%r9d,%r12d
3223	leal	(%r8,%r13,1),%r8d
3224	xorl	%r10d,%esi
3225	rorxl	$13,%r9d,%r14d
3226	rorxl	$2,%r9d,%r13d
3227	leal	(%rax,%r8,1),%eax
3228	andl	%esi,%r15d
3229	vaesenc	%xmm10,%xmm9,%xmm9
3230	vmovdqu	48-128(%rdi),%xmm10
3231	xorl	%r12d,%r14d
3232	xorl	%r10d,%r15d
3233	xorl	%r13d,%r14d
3234	leal	(%r8,%r15,1),%r8d
3235	movl	%ebx,%r12d
3236	addl	32+64(%rsp),%edx
3237	andl	%eax,%r12d
3238	rorxl	$25,%eax,%r13d
3239	rorxl	$11,%eax,%r15d
3240	leal	(%r8,%r14,1),%r8d
3241	leal	(%rdx,%r12,1),%edx
3242	andnl	%ecx,%eax,%r12d
3243	xorl	%r15d,%r13d
3244	rorxl	$6,%eax,%r14d
3245	leal	(%rdx,%r12,1),%edx
3246	xorl	%r14d,%r13d
3247	movl	%r8d,%r15d
3248	rorxl	$22,%r8d,%r12d
3249	leal	(%rdx,%r13,1),%edx
3250	xorl	%r9d,%r15d
3251	rorxl	$13,%r8d,%r14d
3252	rorxl	$2,%r8d,%r13d
3253	leal	(%r11,%rdx,1),%r11d
3254	andl	%r15d,%esi
3255	vaesenc	%xmm10,%xmm9,%xmm9
3256	vmovdqu	64-128(%rdi),%xmm10
3257	xorl	%r12d,%r14d
3258	xorl	%r9d,%esi
3259	xorl	%r13d,%r14d
3260	leal	(%rdx,%rsi,1),%edx
3261	movl	%eax,%r12d
3262	addl	36+64(%rsp),%ecx
3263	andl	%r11d,%r12d
3264	rorxl	$25,%r11d,%r13d
3265	rorxl	$11,%r11d,%esi
3266	leal	(%rdx,%r14,1),%edx
3267	leal	(%rcx,%r12,1),%ecx
3268	andnl	%ebx,%r11d,%r12d
3269	xorl	%esi,%r13d
3270	rorxl	$6,%r11d,%r14d
3271	leal	(%rcx,%r12,1),%ecx
3272	xorl	%r14d,%r13d
3273	movl	%edx,%esi
3274	rorxl	$22,%edx,%r12d
3275	leal	(%rcx,%r13,1),%ecx
3276	xorl	%r8d,%esi
3277	rorxl	$13,%edx,%r14d
3278	rorxl	$2,%edx,%r13d
3279	leal	(%r10,%rcx,1),%r10d
3280	andl	%esi,%r15d
3281	vaesenc	%xmm10,%xmm9,%xmm9
3282	vmovdqu	80-128(%rdi),%xmm10
3283	xorl	%r12d,%r14d
3284	xorl	%r8d,%r15d
3285	xorl	%r13d,%r14d
3286	leal	(%rcx,%r15,1),%ecx
3287	movl	%r11d,%r12d
3288	addl	40+64(%rsp),%ebx
3289	andl	%r10d,%r12d
3290	rorxl	$25,%r10d,%r13d
3291	rorxl	$11,%r10d,%r15d
3292	leal	(%rcx,%r14,1),%ecx
3293	leal	(%rbx,%r12,1),%ebx
3294	andnl	%eax,%r10d,%r12d
3295	xorl	%r15d,%r13d
3296	rorxl	$6,%r10d,%r14d
3297	leal	(%rbx,%r12,1),%ebx
3298	xorl	%r14d,%r13d
3299	movl	%ecx,%r15d
3300	rorxl	$22,%ecx,%r12d
3301	leal	(%rbx,%r13,1),%ebx
3302	xorl	%edx,%r15d
3303	rorxl	$13,%ecx,%r14d
3304	rorxl	$2,%ecx,%r13d
3305	leal	(%r9,%rbx,1),%r9d
3306	andl	%r15d,%esi
3307	vaesenc	%xmm10,%xmm9,%xmm9
3308	vmovdqu	96-128(%rdi),%xmm10
3309	xorl	%r12d,%r14d
3310	xorl	%edx,%esi
3311	xorl	%r13d,%r14d
3312	leal	(%rbx,%rsi,1),%ebx
3313	movl	%r10d,%r12d
3314	addl	44+64(%rsp),%eax
3315	andl	%r9d,%r12d
3316	rorxl	$25,%r9d,%r13d
3317	rorxl	$11,%r9d,%esi
3318	leal	(%rbx,%r14,1),%ebx
3319	leal	(%rax,%r12,1),%eax
3320	andnl	%r11d,%r9d,%r12d
3321	xorl	%esi,%r13d
3322	rorxl	$6,%r9d,%r14d
3323	leal	(%rax,%r12,1),%eax
3324	xorl	%r14d,%r13d
3325	movl	%ebx,%esi
3326	rorxl	$22,%ebx,%r12d
3327	leal	(%rax,%r13,1),%eax
3328	xorl	%ecx,%esi
3329	rorxl	$13,%ebx,%r14d
3330	rorxl	$2,%ebx,%r13d
3331	leal	(%r8,%rax,1),%r8d
3332	andl	%esi,%r15d
3333	vaesenc	%xmm10,%xmm9,%xmm9
3334	vmovdqu	112-128(%rdi),%xmm10
3335	xorl	%r12d,%r14d
3336	xorl	%ecx,%r15d
3337	xorl	%r13d,%r14d
3338	leal	(%rax,%r15,1),%eax
3339	movl	%r9d,%r12d
3340	addl	0(%rsp),%r11d
3341	andl	%r8d,%r12d
3342	rorxl	$25,%r8d,%r13d
3343	rorxl	$11,%r8d,%r15d
3344	leal	(%rax,%r14,1),%eax
3345	leal	(%r11,%r12,1),%r11d
3346	andnl	%r10d,%r8d,%r12d
3347	xorl	%r15d,%r13d
3348	rorxl	$6,%r8d,%r14d
3349	leal	(%r11,%r12,1),%r11d
3350	xorl	%r14d,%r13d
3351	movl	%eax,%r15d
3352	rorxl	$22,%eax,%r12d
3353	leal	(%r11,%r13,1),%r11d
3354	xorl	%ebx,%r15d
3355	rorxl	$13,%eax,%r14d
3356	rorxl	$2,%eax,%r13d
3357	leal	(%rdx,%r11,1),%edx
3358	andl	%r15d,%esi
3359	vaesenc	%xmm10,%xmm9,%xmm9
3360	vmovdqu	128-128(%rdi),%xmm10
3361	xorl	%r12d,%r14d
3362	xorl	%ebx,%esi
3363	xorl	%r13d,%r14d
3364	leal	(%r11,%rsi,1),%r11d
3365	movl	%r8d,%r12d
3366	addl	4(%rsp),%r10d
3367	andl	%edx,%r12d
3368	rorxl	$25,%edx,%r13d
3369	rorxl	$11,%edx,%esi
3370	leal	(%r11,%r14,1),%r11d
3371	leal	(%r10,%r12,1),%r10d
3372	andnl	%r9d,%edx,%r12d
3373	xorl	%esi,%r13d
3374	rorxl	$6,%edx,%r14d
3375	leal	(%r10,%r12,1),%r10d
3376	xorl	%r14d,%r13d
3377	movl	%r11d,%esi
3378	rorxl	$22,%r11d,%r12d
3379	leal	(%r10,%r13,1),%r10d
3380	xorl	%eax,%esi
3381	rorxl	$13,%r11d,%r14d
3382	rorxl	$2,%r11d,%r13d
3383	leal	(%rcx,%r10,1),%ecx
3384	andl	%esi,%r15d
3385	vaesenc	%xmm10,%xmm9,%xmm9
3386	vmovdqu	144-128(%rdi),%xmm10
3387	xorl	%r12d,%r14d
3388	xorl	%eax,%r15d
3389	xorl	%r13d,%r14d
3390	leal	(%r10,%r15,1),%r10d
3391	movl	%edx,%r12d
3392	addl	8(%rsp),%r9d
3393	andl	%ecx,%r12d
3394	rorxl	$25,%ecx,%r13d
3395	rorxl	$11,%ecx,%r15d
3396	leal	(%r10,%r14,1),%r10d
3397	leal	(%r9,%r12,1),%r9d
3398	andnl	%r8d,%ecx,%r12d
3399	xorl	%r15d,%r13d
3400	rorxl	$6,%ecx,%r14d
3401	leal	(%r9,%r12,1),%r9d
3402	xorl	%r14d,%r13d
3403	movl	%r10d,%r15d
3404	rorxl	$22,%r10d,%r12d
3405	leal	(%r9,%r13,1),%r9d
3406	xorl	%r11d,%r15d
3407	rorxl	$13,%r10d,%r14d
3408	rorxl	$2,%r10d,%r13d
3409	leal	(%rbx,%r9,1),%ebx
3410	andl	%r15d,%esi
3411	vaesenc	%xmm10,%xmm9,%xmm9
3412	vmovdqu	160-128(%rdi),%xmm10
3413	xorl	%r12d,%r14d
3414	xorl	%r11d,%esi
3415	xorl	%r13d,%r14d
3416	leal	(%r9,%rsi,1),%r9d
3417	movl	%ecx,%r12d
3418	addl	12(%rsp),%r8d
3419	andl	%ebx,%r12d
3420	rorxl	$25,%ebx,%r13d
3421	rorxl	$11,%ebx,%esi
3422	leal	(%r9,%r14,1),%r9d
3423	leal	(%r8,%r12,1),%r8d
3424	andnl	%edx,%ebx,%r12d
3425	xorl	%esi,%r13d
3426	rorxl	$6,%ebx,%r14d
3427	leal	(%r8,%r12,1),%r8d
3428	xorl	%r14d,%r13d
3429	movl	%r9d,%esi
3430	rorxl	$22,%r9d,%r12d
3431	leal	(%r8,%r13,1),%r8d
3432	xorl	%r10d,%esi
3433	rorxl	$13,%r9d,%r14d
3434	rorxl	$2,%r9d,%r13d
3435	leal	(%rax,%r8,1),%eax
3436	andl	%esi,%r15d
3437	vaesenclast	%xmm10,%xmm9,%xmm11
3438	vaesenc	%xmm10,%xmm9,%xmm9
3439	vmovdqu	176-128(%rdi),%xmm10
3440	xorl	%r12d,%r14d
3441	xorl	%r10d,%r15d
3442	xorl	%r13d,%r14d
3443	leal	(%r8,%r15,1),%r8d
3444	movl	%ebx,%r12d
3445	addl	32(%rsp),%edx
3446	andl	%eax,%r12d
3447	rorxl	$25,%eax,%r13d
3448	rorxl	$11,%eax,%r15d
3449	leal	(%r8,%r14,1),%r8d
3450	leal	(%rdx,%r12,1),%edx
3451	andnl	%ecx,%eax,%r12d
3452	xorl	%r15d,%r13d
3453	rorxl	$6,%eax,%r14d
3454	leal	(%rdx,%r12,1),%edx
3455	xorl	%r14d,%r13d
3456	movl	%r8d,%r15d
3457	rorxl	$22,%r8d,%r12d
3458	leal	(%rdx,%r13,1),%edx
3459	xorl	%r9d,%r15d
3460	rorxl	$13,%r8d,%r14d
3461	rorxl	$2,%r8d,%r13d
3462	leal	(%r11,%rdx,1),%r11d
3463	andl	%r15d,%esi
3464	vpand	%xmm12,%xmm11,%xmm8
3465	vaesenc	%xmm10,%xmm9,%xmm9
3466	vmovdqu	192-128(%rdi),%xmm10
3467	xorl	%r12d,%r14d
3468	xorl	%r9d,%esi
3469	xorl	%r13d,%r14d
3470	leal	(%rdx,%rsi,1),%edx
3471	movl	%eax,%r12d
3472	addl	36(%rsp),%ecx
3473	andl	%r11d,%r12d
3474	rorxl	$25,%r11d,%r13d
3475	rorxl	$11,%r11d,%esi
3476	leal	(%rdx,%r14,1),%edx
3477	leal	(%rcx,%r12,1),%ecx
3478	andnl	%ebx,%r11d,%r12d
3479	xorl	%esi,%r13d
3480	rorxl	$6,%r11d,%r14d
3481	leal	(%rcx,%r12,1),%ecx
3482	xorl	%r14d,%r13d
3483	movl	%edx,%esi
3484	rorxl	$22,%edx,%r12d
3485	leal	(%rcx,%r13,1),%ecx
3486	xorl	%r8d,%esi
3487	rorxl	$13,%edx,%r14d
3488	rorxl	$2,%edx,%r13d
3489	leal	(%r10,%rcx,1),%r10d
3490	andl	%esi,%r15d
3491	vaesenclast	%xmm10,%xmm9,%xmm11
3492	vaesenc	%xmm10,%xmm9,%xmm9
3493	vmovdqu	208-128(%rdi),%xmm10
3494	xorl	%r12d,%r14d
3495	xorl	%r8d,%r15d
3496	xorl	%r13d,%r14d
3497	leal	(%rcx,%r15,1),%ecx
3498	movl	%r11d,%r12d
3499	addl	40(%rsp),%ebx
3500	andl	%r10d,%r12d
3501	rorxl	$25,%r10d,%r13d
3502	rorxl	$11,%r10d,%r15d
3503	leal	(%rcx,%r14,1),%ecx
3504	leal	(%rbx,%r12,1),%ebx
3505	andnl	%eax,%r10d,%r12d
3506	xorl	%r15d,%r13d
3507	rorxl	$6,%r10d,%r14d
3508	leal	(%rbx,%r12,1),%ebx
3509	xorl	%r14d,%r13d
3510	movl	%ecx,%r15d
3511	rorxl	$22,%ecx,%r12d
3512	leal	(%rbx,%r13,1),%ebx
3513	xorl	%edx,%r15d
3514	rorxl	$13,%ecx,%r14d
3515	rorxl	$2,%ecx,%r13d
3516	leal	(%r9,%rbx,1),%r9d
3517	andl	%r15d,%esi
3518	vpand	%xmm13,%xmm11,%xmm11
3519	vaesenc	%xmm10,%xmm9,%xmm9
3520	vmovdqu	224-128(%rdi),%xmm10
3521	xorl	%r12d,%r14d
3522	xorl	%edx,%esi
3523	xorl	%r13d,%r14d
3524	leal	(%rbx,%rsi,1),%ebx
3525	movl	%r10d,%r12d
3526	addl	44(%rsp),%eax
3527	andl	%r9d,%r12d
3528	rorxl	$25,%r9d,%r13d
3529	rorxl	$11,%r9d,%esi
3530	leal	(%rbx,%r14,1),%ebx
3531	leal	(%rax,%r12,1),%eax
3532	andnl	%r11d,%r9d,%r12d
3533	xorl	%esi,%r13d
3534	rorxl	$6,%r9d,%r14d
3535	leal	(%rax,%r12,1),%eax
3536	xorl	%r14d,%r13d
3537	movl	%ebx,%esi
3538	rorxl	$22,%ebx,%r12d
3539	leal	(%rax,%r13,1),%eax
3540	xorl	%ecx,%esi
3541	rorxl	$13,%ebx,%r14d
3542	rorxl	$2,%ebx,%r13d
3543	leal	(%r8,%rax,1),%r8d
3544	andl	%esi,%r15d
3545	vpor	%xmm11,%xmm8,%xmm8
3546	vaesenclast	%xmm10,%xmm9,%xmm11
3547	vmovdqu	0-128(%rdi),%xmm10
3548	xorl	%r12d,%r14d
3549	xorl	%ecx,%r15d
3550	xorl	%r13d,%r14d
3551	leal	(%rax,%r15,1),%eax
3552	movl	%r9d,%r12d
3553	vpextrq	$1,%xmm15,%r12
3554	vmovq	%xmm15,%r13
3555	movq	552(%rsp),%r15
3556	addl	%r14d,%eax
3557	leaq	448(%rsp),%rbp
3558
3559	vpand	%xmm14,%xmm11,%xmm11
3560	vpor	%xmm11,%xmm8,%xmm8
3561	vmovdqu	%xmm8,(%r12,%r13,1)
3562	leaq	16(%r13),%r13
3563
3564	addl	0(%r15),%eax
3565	addl	4(%r15),%ebx
3566	addl	8(%r15),%ecx
3567	addl	12(%r15),%edx
3568	addl	16(%r15),%r8d
3569	addl	20(%r15),%r9d
3570	addl	24(%r15),%r10d
3571	addl	28(%r15),%r11d
3572
3573	movl	%eax,0(%r15)
3574	movl	%ebx,4(%r15)
3575	movl	%ecx,8(%r15)
3576	movl	%edx,12(%r15)
3577	movl	%r8d,16(%r15)
3578	movl	%r9d,20(%r15)
3579	movl	%r10d,24(%r15)
3580	movl	%r11d,28(%r15)
3581
3582	cmpq	80(%rbp),%r13
3583	je	.Ldone_avx2
3584
3585	xorl	%r14d,%r14d
3586	movl	%ebx,%esi
3587	movl	%r9d,%r12d
3588	xorl	%ecx,%esi
3589	jmp	.Lower_avx2
3590.align	16
3591.Lower_avx2:
3592	vmovdqu	(%r13),%xmm9
3593	vpinsrq	$0,%r13,%xmm15,%xmm15
3594	addl	0+16(%rbp),%r11d
3595	andl	%r8d,%r12d
3596	rorxl	$25,%r8d,%r13d
3597	rorxl	$11,%r8d,%r15d
3598	leal	(%rax,%r14,1),%eax
3599	leal	(%r11,%r12,1),%r11d
3600	andnl	%r10d,%r8d,%r12d
3601	xorl	%r15d,%r13d
3602	rorxl	$6,%r8d,%r14d
3603	leal	(%r11,%r12,1),%r11d
3604	xorl	%r14d,%r13d
3605	movl	%eax,%r15d
3606	rorxl	$22,%eax,%r12d
3607	leal	(%r11,%r13,1),%r11d
3608	xorl	%ebx,%r15d
3609	rorxl	$13,%eax,%r14d
3610	rorxl	$2,%eax,%r13d
3611	leal	(%rdx,%r11,1),%edx
3612	andl	%r15d,%esi
3613	vpxor	%xmm10,%xmm9,%xmm9
3614	vmovdqu	16-128(%rdi),%xmm10
3615	xorl	%r12d,%r14d
3616	xorl	%ebx,%esi
3617	xorl	%r13d,%r14d
3618	leal	(%r11,%rsi,1),%r11d
3619	movl	%r8d,%r12d
3620	addl	4+16(%rbp),%r10d
3621	andl	%edx,%r12d
3622	rorxl	$25,%edx,%r13d
3623	rorxl	$11,%edx,%esi
3624	leal	(%r11,%r14,1),%r11d
3625	leal	(%r10,%r12,1),%r10d
3626	andnl	%r9d,%edx,%r12d
3627	xorl	%esi,%r13d
3628	rorxl	$6,%edx,%r14d
3629	leal	(%r10,%r12,1),%r10d
3630	xorl	%r14d,%r13d
3631	movl	%r11d,%esi
3632	rorxl	$22,%r11d,%r12d
3633	leal	(%r10,%r13,1),%r10d
3634	xorl	%eax,%esi
3635	rorxl	$13,%r11d,%r14d
3636	rorxl	$2,%r11d,%r13d
3637	leal	(%rcx,%r10,1),%ecx
3638	andl	%esi,%r15d
3639	vpxor	%xmm8,%xmm9,%xmm9
3640	xorl	%r12d,%r14d
3641	xorl	%eax,%r15d
3642	xorl	%r13d,%r14d
3643	leal	(%r10,%r15,1),%r10d
3644	movl	%edx,%r12d
3645	addl	8+16(%rbp),%r9d
3646	andl	%ecx,%r12d
3647	rorxl	$25,%ecx,%r13d
3648	rorxl	$11,%ecx,%r15d
3649	leal	(%r10,%r14,1),%r10d
3650	leal	(%r9,%r12,1),%r9d
3651	andnl	%r8d,%ecx,%r12d
3652	xorl	%r15d,%r13d
3653	rorxl	$6,%ecx,%r14d
3654	leal	(%r9,%r12,1),%r9d
3655	xorl	%r14d,%r13d
3656	movl	%r10d,%r15d
3657	rorxl	$22,%r10d,%r12d
3658	leal	(%r9,%r13,1),%r9d
3659	xorl	%r11d,%r15d
3660	rorxl	$13,%r10d,%r14d
3661	rorxl	$2,%r10d,%r13d
3662	leal	(%rbx,%r9,1),%ebx
3663	andl	%r15d,%esi
3664	vaesenc	%xmm10,%xmm9,%xmm9
3665	vmovdqu	32-128(%rdi),%xmm10
3666	xorl	%r12d,%r14d
3667	xorl	%r11d,%esi
3668	xorl	%r13d,%r14d
3669	leal	(%r9,%rsi,1),%r9d
3670	movl	%ecx,%r12d
3671	addl	12+16(%rbp),%r8d
3672	andl	%ebx,%r12d
3673	rorxl	$25,%ebx,%r13d
3674	rorxl	$11,%ebx,%esi
3675	leal	(%r9,%r14,1),%r9d
3676	leal	(%r8,%r12,1),%r8d
3677	andnl	%edx,%ebx,%r12d
3678	xorl	%esi,%r13d
3679	rorxl	$6,%ebx,%r14d
3680	leal	(%r8,%r12,1),%r8d
3681	xorl	%r14d,%r13d
3682	movl	%r9d,%esi
3683	rorxl	$22,%r9d,%r12d
3684	leal	(%r8,%r13,1),%r8d
3685	xorl	%r10d,%esi
3686	rorxl	$13,%r9d,%r14d
3687	rorxl	$2,%r9d,%r13d
3688	leal	(%rax,%r8,1),%eax
3689	andl	%esi,%r15d
3690	vaesenc	%xmm10,%xmm9,%xmm9
3691	vmovdqu	48-128(%rdi),%xmm10
3692	xorl	%r12d,%r14d
3693	xorl	%r10d,%r15d
3694	xorl	%r13d,%r14d
3695	leal	(%r8,%r15,1),%r8d
3696	movl	%ebx,%r12d
3697	addl	32+16(%rbp),%edx
3698	andl	%eax,%r12d
3699	rorxl	$25,%eax,%r13d
3700	rorxl	$11,%eax,%r15d
3701	leal	(%r8,%r14,1),%r8d
3702	leal	(%rdx,%r12,1),%edx
3703	andnl	%ecx,%eax,%r12d
3704	xorl	%r15d,%r13d
3705	rorxl	$6,%eax,%r14d
3706	leal	(%rdx,%r12,1),%edx
3707	xorl	%r14d,%r13d
3708	movl	%r8d,%r15d
3709	rorxl	$22,%r8d,%r12d
3710	leal	(%rdx,%r13,1),%edx
3711	xorl	%r9d,%r15d
3712	rorxl	$13,%r8d,%r14d
3713	rorxl	$2,%r8d,%r13d
3714	leal	(%r11,%rdx,1),%r11d
3715	andl	%r15d,%esi
3716	vaesenc	%xmm10,%xmm9,%xmm9
3717	vmovdqu	64-128(%rdi),%xmm10
3718	xorl	%r12d,%r14d
3719	xorl	%r9d,%esi
3720	xorl	%r13d,%r14d
3721	leal	(%rdx,%rsi,1),%edx
3722	movl	%eax,%r12d
3723	addl	36+16(%rbp),%ecx
3724	andl	%r11d,%r12d
3725	rorxl	$25,%r11d,%r13d
3726	rorxl	$11,%r11d,%esi
3727	leal	(%rdx,%r14,1),%edx
3728	leal	(%rcx,%r12,1),%ecx
3729	andnl	%ebx,%r11d,%r12d
3730	xorl	%esi,%r13d
3731	rorxl	$6,%r11d,%r14d
3732	leal	(%rcx,%r12,1),%ecx
3733	xorl	%r14d,%r13d
3734	movl	%edx,%esi
3735	rorxl	$22,%edx,%r12d
3736	leal	(%rcx,%r13,1),%ecx
3737	xorl	%r8d,%esi
3738	rorxl	$13,%edx,%r14d
3739	rorxl	$2,%edx,%r13d
3740	leal	(%r10,%rcx,1),%r10d
3741	andl	%esi,%r15d
3742	vaesenc	%xmm10,%xmm9,%xmm9
3743	vmovdqu	80-128(%rdi),%xmm10
3744	xorl	%r12d,%r14d
3745	xorl	%r8d,%r15d
3746	xorl	%r13d,%r14d
3747	leal	(%rcx,%r15,1),%ecx
3748	movl	%r11d,%r12d
3749	addl	40+16(%rbp),%ebx
3750	andl	%r10d,%r12d
3751	rorxl	$25,%r10d,%r13d
3752	rorxl	$11,%r10d,%r15d
3753	leal	(%rcx,%r14,1),%ecx
3754	leal	(%rbx,%r12,1),%ebx
3755	andnl	%eax,%r10d,%r12d
3756	xorl	%r15d,%r13d
3757	rorxl	$6,%r10d,%r14d
3758	leal	(%rbx,%r12,1),%ebx
3759	xorl	%r14d,%r13d
3760	movl	%ecx,%r15d
3761	rorxl	$22,%ecx,%r12d
3762	leal	(%rbx,%r13,1),%ebx
3763	xorl	%edx,%r15d
3764	rorxl	$13,%ecx,%r14d
3765	rorxl	$2,%ecx,%r13d
3766	leal	(%r9,%rbx,1),%r9d
3767	andl	%r15d,%esi
3768	vaesenc	%xmm10,%xmm9,%xmm9
3769	vmovdqu	96-128(%rdi),%xmm10
3770	xorl	%r12d,%r14d
3771	xorl	%edx,%esi
3772	xorl	%r13d,%r14d
3773	leal	(%rbx,%rsi,1),%ebx
3774	movl	%r10d,%r12d
3775	addl	44+16(%rbp),%eax
3776	andl	%r9d,%r12d
3777	rorxl	$25,%r9d,%r13d
3778	rorxl	$11,%r9d,%esi
3779	leal	(%rbx,%r14,1),%ebx
3780	leal	(%rax,%r12,1),%eax
3781	andnl	%r11d,%r9d,%r12d
3782	xorl	%esi,%r13d
3783	rorxl	$6,%r9d,%r14d
3784	leal	(%rax,%r12,1),%eax
3785	xorl	%r14d,%r13d
3786	movl	%ebx,%esi
3787	rorxl	$22,%ebx,%r12d
3788	leal	(%rax,%r13,1),%eax
3789	xorl	%ecx,%esi
3790	rorxl	$13,%ebx,%r14d
3791	rorxl	$2,%ebx,%r13d
3792	leal	(%r8,%rax,1),%r8d
3793	andl	%esi,%r15d
3794	vaesenc	%xmm10,%xmm9,%xmm9
3795	vmovdqu	112-128(%rdi),%xmm10
3796	xorl	%r12d,%r14d
3797	xorl	%ecx,%r15d
3798	xorl	%r13d,%r14d
3799	leal	(%rax,%r15,1),%eax
3800	movl	%r9d,%r12d
3801	leaq	-64(%rbp),%rbp
3802	addl	0+16(%rbp),%r11d
3803	andl	%r8d,%r12d
3804	rorxl	$25,%r8d,%r13d
3805	rorxl	$11,%r8d,%r15d
3806	leal	(%rax,%r14,1),%eax
3807	leal	(%r11,%r12,1),%r11d
3808	andnl	%r10d,%r8d,%r12d
3809	xorl	%r15d,%r13d
3810	rorxl	$6,%r8d,%r14d
3811	leal	(%r11,%r12,1),%r11d
3812	xorl	%r14d,%r13d
3813	movl	%eax,%r15d
3814	rorxl	$22,%eax,%r12d
3815	leal	(%r11,%r13,1),%r11d
3816	xorl	%ebx,%r15d
3817	rorxl	$13,%eax,%r14d
3818	rorxl	$2,%eax,%r13d
3819	leal	(%rdx,%r11,1),%edx
3820	andl	%r15d,%esi
3821	vaesenc	%xmm10,%xmm9,%xmm9
3822	vmovdqu	128-128(%rdi),%xmm10
3823	xorl	%r12d,%r14d
3824	xorl	%ebx,%esi
3825	xorl	%r13d,%r14d
3826	leal	(%r11,%rsi,1),%r11d
3827	movl	%r8d,%r12d
3828	addl	4+16(%rbp),%r10d
3829	andl	%edx,%r12d
3830	rorxl	$25,%edx,%r13d
3831	rorxl	$11,%edx,%esi
3832	leal	(%r11,%r14,1),%r11d
3833	leal	(%r10,%r12,1),%r10d
3834	andnl	%r9d,%edx,%r12d
3835	xorl	%esi,%r13d
3836	rorxl	$6,%edx,%r14d
3837	leal	(%r10,%r12,1),%r10d
3838	xorl	%r14d,%r13d
3839	movl	%r11d,%esi
3840	rorxl	$22,%r11d,%r12d
3841	leal	(%r10,%r13,1),%r10d
3842	xorl	%eax,%esi
3843	rorxl	$13,%r11d,%r14d
3844	rorxl	$2,%r11d,%r13d
3845	leal	(%rcx,%r10,1),%ecx
3846	andl	%esi,%r15d
3847	vaesenc	%xmm10,%xmm9,%xmm9
3848	vmovdqu	144-128(%rdi),%xmm10
3849	xorl	%r12d,%r14d
3850	xorl	%eax,%r15d
3851	xorl	%r13d,%r14d
3852	leal	(%r10,%r15,1),%r10d
3853	movl	%edx,%r12d
3854	addl	8+16(%rbp),%r9d
3855	andl	%ecx,%r12d
3856	rorxl	$25,%ecx,%r13d
3857	rorxl	$11,%ecx,%r15d
3858	leal	(%r10,%r14,1),%r10d
3859	leal	(%r9,%r12,1),%r9d
3860	andnl	%r8d,%ecx,%r12d
3861	xorl	%r15d,%r13d
3862	rorxl	$6,%ecx,%r14d
3863	leal	(%r9,%r12,1),%r9d
3864	xorl	%r14d,%r13d
3865	movl	%r10d,%r15d
3866	rorxl	$22,%r10d,%r12d
3867	leal	(%r9,%r13,1),%r9d
3868	xorl	%r11d,%r15d
3869	rorxl	$13,%r10d,%r14d
3870	rorxl	$2,%r10d,%r13d
3871	leal	(%rbx,%r9,1),%ebx
3872	andl	%r15d,%esi
3873	vaesenc	%xmm10,%xmm9,%xmm9
3874	vmovdqu	160-128(%rdi),%xmm10
3875	xorl	%r12d,%r14d
3876	xorl	%r11d,%esi
3877	xorl	%r13d,%r14d
3878	leal	(%r9,%rsi,1),%r9d
3879	movl	%ecx,%r12d
3880	addl	12+16(%rbp),%r8d
3881	andl	%ebx,%r12d
3882	rorxl	$25,%ebx,%r13d
3883	rorxl	$11,%ebx,%esi
3884	leal	(%r9,%r14,1),%r9d
3885	leal	(%r8,%r12,1),%r8d
3886	andnl	%edx,%ebx,%r12d
3887	xorl	%esi,%r13d
3888	rorxl	$6,%ebx,%r14d
3889	leal	(%r8,%r12,1),%r8d
3890	xorl	%r14d,%r13d
3891	movl	%r9d,%esi
3892	rorxl	$22,%r9d,%r12d
3893	leal	(%r8,%r13,1),%r8d
3894	xorl	%r10d,%esi
3895	rorxl	$13,%r9d,%r14d
3896	rorxl	$2,%r9d,%r13d
3897	leal	(%rax,%r8,1),%eax
3898	andl	%esi,%r15d
3899	vaesenclast	%xmm10,%xmm9,%xmm11
3900	vaesenc	%xmm10,%xmm9,%xmm9
3901	vmovdqu	176-128(%rdi),%xmm10
3902	xorl	%r12d,%r14d
3903	xorl	%r10d,%r15d
3904	xorl	%r13d,%r14d
3905	leal	(%r8,%r15,1),%r8d
3906	movl	%ebx,%r12d
3907	addl	32+16(%rbp),%edx
3908	andl	%eax,%r12d
3909	rorxl	$25,%eax,%r13d
3910	rorxl	$11,%eax,%r15d
3911	leal	(%r8,%r14,1),%r8d
3912	leal	(%rdx,%r12,1),%edx
3913	andnl	%ecx,%eax,%r12d
3914	xorl	%r15d,%r13d
3915	rorxl	$6,%eax,%r14d
3916	leal	(%rdx,%r12,1),%edx
3917	xorl	%r14d,%r13d
3918	movl	%r8d,%r15d
3919	rorxl	$22,%r8d,%r12d
3920	leal	(%rdx,%r13,1),%edx
3921	xorl	%r9d,%r15d
3922	rorxl	$13,%r8d,%r14d
3923	rorxl	$2,%r8d,%r13d
3924	leal	(%r11,%rdx,1),%r11d
3925	andl	%r15d,%esi
3926	vpand	%xmm12,%xmm11,%xmm8
3927	vaesenc	%xmm10,%xmm9,%xmm9
3928	vmovdqu	192-128(%rdi),%xmm10
3929	xorl	%r12d,%r14d
3930	xorl	%r9d,%esi
3931	xorl	%r13d,%r14d
3932	leal	(%rdx,%rsi,1),%edx
3933	movl	%eax,%r12d
3934	addl	36+16(%rbp),%ecx
3935	andl	%r11d,%r12d
3936	rorxl	$25,%r11d,%r13d
3937	rorxl	$11,%r11d,%esi
3938	leal	(%rdx,%r14,1),%edx
3939	leal	(%rcx,%r12,1),%ecx
3940	andnl	%ebx,%r11d,%r12d
3941	xorl	%esi,%r13d
3942	rorxl	$6,%r11d,%r14d
3943	leal	(%rcx,%r12,1),%ecx
3944	xorl	%r14d,%r13d
3945	movl	%edx,%esi
3946	rorxl	$22,%edx,%r12d
3947	leal	(%rcx,%r13,1),%ecx
3948	xorl	%r8d,%esi
3949	rorxl	$13,%edx,%r14d
3950	rorxl	$2,%edx,%r13d
3951	leal	(%r10,%rcx,1),%r10d
3952	andl	%esi,%r15d
3953	vaesenclast	%xmm10,%xmm9,%xmm11
3954	vaesenc	%xmm10,%xmm9,%xmm9
3955	vmovdqu	208-128(%rdi),%xmm10
3956	xorl	%r12d,%r14d
3957	xorl	%r8d,%r15d
3958	xorl	%r13d,%r14d
3959	leal	(%rcx,%r15,1),%ecx
3960	movl	%r11d,%r12d
3961	addl	40+16(%rbp),%ebx
3962	andl	%r10d,%r12d
3963	rorxl	$25,%r10d,%r13d
3964	rorxl	$11,%r10d,%r15d
3965	leal	(%rcx,%r14,1),%ecx
3966	leal	(%rbx,%r12,1),%ebx
3967	andnl	%eax,%r10d,%r12d
3968	xorl	%r15d,%r13d
3969	rorxl	$6,%r10d,%r14d
3970	leal	(%rbx,%r12,1),%ebx
3971	xorl	%r14d,%r13d
3972	movl	%ecx,%r15d
3973	rorxl	$22,%ecx,%r12d
3974	leal	(%rbx,%r13,1),%ebx
3975	xorl	%edx,%r15d
3976	rorxl	$13,%ecx,%r14d
3977	rorxl	$2,%ecx,%r13d
3978	leal	(%r9,%rbx,1),%r9d
3979	andl	%r15d,%esi
3980	vpand	%xmm13,%xmm11,%xmm11
3981	vaesenc	%xmm10,%xmm9,%xmm9
3982	vmovdqu	224-128(%rdi),%xmm10
3983	xorl	%r12d,%r14d
3984	xorl	%edx,%esi
3985	xorl	%r13d,%r14d
3986	leal	(%rbx,%rsi,1),%ebx
3987	movl	%r10d,%r12d
3988	addl	44+16(%rbp),%eax
3989	andl	%r9d,%r12d
3990	rorxl	$25,%r9d,%r13d
3991	rorxl	$11,%r9d,%esi
3992	leal	(%rbx,%r14,1),%ebx
3993	leal	(%rax,%r12,1),%eax
3994	andnl	%r11d,%r9d,%r12d
3995	xorl	%esi,%r13d
3996	rorxl	$6,%r9d,%r14d
3997	leal	(%rax,%r12,1),%eax
3998	xorl	%r14d,%r13d
3999	movl	%ebx,%esi
4000	rorxl	$22,%ebx,%r12d
4001	leal	(%rax,%r13,1),%eax
4002	xorl	%ecx,%esi
4003	rorxl	$13,%ebx,%r14d
4004	rorxl	$2,%ebx,%r13d
4005	leal	(%r8,%rax,1),%r8d
4006	andl	%esi,%r15d
4007	vpor	%xmm11,%xmm8,%xmm8
4008	vaesenclast	%xmm10,%xmm9,%xmm11
4009	vmovdqu	0-128(%rdi),%xmm10
4010	xorl	%r12d,%r14d
4011	xorl	%ecx,%r15d
4012	xorl	%r13d,%r14d
4013	leal	(%rax,%r15,1),%eax
4014	movl	%r9d,%r12d
4015	vmovq	%xmm15,%r13
4016	vpextrq	$1,%xmm15,%r15
4017	vpand	%xmm14,%xmm11,%xmm11
4018	vpor	%xmm11,%xmm8,%xmm8
4019	leaq	-64(%rbp),%rbp
4020	vmovdqu	%xmm8,(%r15,%r13,1)
4021	leaq	16(%r13),%r13
4022	cmpq	%rsp,%rbp
4023	jae	.Lower_avx2
4024
4025	movq	552(%rsp),%r15
4026	leaq	64(%r13),%r13
4027	movq	560(%rsp),%rsi
4028	addl	%r14d,%eax
4029	leaq	448(%rsp),%rsp
4030
4031	addl	0(%r15),%eax
4032	addl	4(%r15),%ebx
4033	addl	8(%r15),%ecx
4034	addl	12(%r15),%edx
4035	addl	16(%r15),%r8d
4036	addl	20(%r15),%r9d
4037	addl	24(%r15),%r10d
4038	leaq	(%rsi,%r13,1),%r12
4039	addl	28(%r15),%r11d
4040
4041	cmpq	64+16(%rsp),%r13
4042
4043	movl	%eax,0(%r15)
4044	cmoveq	%rsp,%r12
4045	movl	%ebx,4(%r15)
4046	movl	%ecx,8(%r15)
4047	movl	%edx,12(%r15)
4048	movl	%r8d,16(%r15)
4049	movl	%r9d,20(%r15)
4050	movl	%r10d,24(%r15)
4051	movl	%r11d,28(%r15)
4052
4053	jbe	.Loop_avx2
4054	leaq	(%rsp),%rbp
4055
4056
4057.cfi_escape	0x0f,0x06,0x76,0xf8,0x00,0x06,0x23,0x08
4058
4059.Ldone_avx2:
4060	movq	64+32(%rbp),%r8
4061	movq	64+56(%rbp),%rsi
4062.cfi_def_cfa	%rsi,8
4063	vmovdqu	%xmm8,(%r8)
4064	vzeroall
4065	movq	-48(%rsi),%r15
4066.cfi_restore	%r15
4067	movq	-40(%rsi),%r14
4068.cfi_restore	%r14
4069	movq	-32(%rsi),%r13
4070.cfi_restore	%r13
4071	movq	-24(%rsi),%r12
4072.cfi_restore	%r12
4073	movq	-16(%rsi),%rbp
4074.cfi_restore	%rbp
4075	movq	-8(%rsi),%rbx
4076.cfi_restore	%rbx
4077	leaq	(%rsi),%rsp
4078.cfi_def_cfa_register	%rsp
4079.Lepilogue_avx2:
4080	.byte	0xf3,0xc3
4081.cfi_endproc
4082.size	aesni_cbc_sha256_enc_avx2,.-aesni_cbc_sha256_enc_avx2
4083.type	aesni_cbc_sha256_enc_shaext,@function
4084.align	32
4085aesni_cbc_sha256_enc_shaext:
4086.cfi_startproc
4087	movq	8(%rsp),%r10
4088	leaq	K256+128(%rip),%rax
4089	movdqu	(%r9),%xmm1
4090	movdqu	16(%r9),%xmm2
4091	movdqa	512-128(%rax),%xmm3
4092
4093	movl	240(%rcx),%r11d
4094	subq	%rdi,%rsi
4095	movups	(%rcx),%xmm15
4096	movups	(%r8),%xmm6
4097	movups	16(%rcx),%xmm4
4098	leaq	112(%rcx),%rcx
4099
4100	pshufd	$0x1b,%xmm1,%xmm0
4101	pshufd	$0xb1,%xmm1,%xmm1
4102	pshufd	$0x1b,%xmm2,%xmm2
4103	movdqa	%xmm3,%xmm7
4104.byte	102,15,58,15,202,8
4105	punpcklqdq	%xmm0,%xmm2
4106
4107	jmp	.Loop_shaext
4108
4109.align	16
4110.Loop_shaext:
4111	movdqu	(%r10),%xmm10
4112	movdqu	16(%r10),%xmm11
4113	movdqu	32(%r10),%xmm12
4114.byte	102,68,15,56,0,211
4115	movdqu	48(%r10),%xmm13
4116
4117	movdqa	0-128(%rax),%xmm0
4118	paddd	%xmm10,%xmm0
4119.byte	102,68,15,56,0,219
4120	movdqa	%xmm2,%xmm9
4121	movdqa	%xmm1,%xmm8
4122	movups	0(%rdi),%xmm14
4123	xorps	%xmm15,%xmm14
4124	xorps	%xmm14,%xmm6
4125	movups	-80(%rcx),%xmm5
4126	aesenc	%xmm4,%xmm6
4127.byte	15,56,203,209
4128	pshufd	$0x0e,%xmm0,%xmm0
4129	movups	-64(%rcx),%xmm4
4130	aesenc	%xmm5,%xmm6
4131.byte	15,56,203,202
4132
4133	movdqa	32-128(%rax),%xmm0
4134	paddd	%xmm11,%xmm0
4135.byte	102,68,15,56,0,227
4136	leaq	64(%r10),%r10
4137	movups	-48(%rcx),%xmm5
4138	aesenc	%xmm4,%xmm6
4139.byte	15,56,203,209
4140	pshufd	$0x0e,%xmm0,%xmm0
4141	movups	-32(%rcx),%xmm4
4142	aesenc	%xmm5,%xmm6
4143.byte	15,56,203,202
4144
4145	movdqa	64-128(%rax),%xmm0
4146	paddd	%xmm12,%xmm0
4147.byte	102,68,15,56,0,235
4148.byte	69,15,56,204,211
4149	movups	-16(%rcx),%xmm5
4150	aesenc	%xmm4,%xmm6
4151.byte	15,56,203,209
4152	pshufd	$0x0e,%xmm0,%xmm0
4153	movdqa	%xmm13,%xmm3
4154.byte	102,65,15,58,15,220,4
4155	paddd	%xmm3,%xmm10
4156	movups	0(%rcx),%xmm4
4157	aesenc	%xmm5,%xmm6
4158.byte	15,56,203,202
4159
4160	movdqa	96-128(%rax),%xmm0
4161	paddd	%xmm13,%xmm0
4162.byte	69,15,56,205,213
4163.byte	69,15,56,204,220
4164	movups	16(%rcx),%xmm5
4165	aesenc	%xmm4,%xmm6
4166.byte	15,56,203,209
4167	pshufd	$0x0e,%xmm0,%xmm0
4168	movups	32(%rcx),%xmm4
4169	aesenc	%xmm5,%xmm6
4170	movdqa	%xmm10,%xmm3
4171.byte	102,65,15,58,15,221,4
4172	paddd	%xmm3,%xmm11
4173.byte	15,56,203,202
4174	movdqa	128-128(%rax),%xmm0
4175	paddd	%xmm10,%xmm0
4176.byte	69,15,56,205,218
4177.byte	69,15,56,204,229
4178	movups	48(%rcx),%xmm5
4179	aesenc	%xmm4,%xmm6
4180.byte	15,56,203,209
4181	pshufd	$0x0e,%xmm0,%xmm0
4182	movdqa	%xmm11,%xmm3
4183.byte	102,65,15,58,15,218,4
4184	paddd	%xmm3,%xmm12
4185	cmpl	$11,%r11d
4186	jb	.Laesenclast1
4187	movups	64(%rcx),%xmm4
4188	aesenc	%xmm5,%xmm6
4189	movups	80(%rcx),%xmm5
4190	aesenc	%xmm4,%xmm6
4191	je	.Laesenclast1
4192	movups	96(%rcx),%xmm4
4193	aesenc	%xmm5,%xmm6
4194	movups	112(%rcx),%xmm5
4195	aesenc	%xmm4,%xmm6
4196.Laesenclast1:
4197	aesenclast	%xmm5,%xmm6
4198	movups	16-112(%rcx),%xmm4
4199	nop
4200.byte	15,56,203,202
4201	movups	16(%rdi),%xmm14
4202	xorps	%xmm15,%xmm14
4203	movups	%xmm6,0(%rsi,%rdi,1)
4204	xorps	%xmm14,%xmm6
4205	movups	-80(%rcx),%xmm5
4206	aesenc	%xmm4,%xmm6
4207	movdqa	160-128(%rax),%xmm0
4208	paddd	%xmm11,%xmm0
4209.byte	69,15,56,205,227
4210.byte	69,15,56,204,234
4211	movups	-64(%rcx),%xmm4
4212	aesenc	%xmm5,%xmm6
4213.byte	15,56,203,209
4214	pshufd	$0x0e,%xmm0,%xmm0
4215	movdqa	%xmm12,%xmm3
4216.byte	102,65,15,58,15,219,4
4217	paddd	%xmm3,%xmm13
4218	movups	-48(%rcx),%xmm5
4219	aesenc	%xmm4,%xmm6
4220.byte	15,56,203,202
4221	movdqa	192-128(%rax),%xmm0
4222	paddd	%xmm12,%xmm0
4223.byte	69,15,56,205,236
4224.byte	69,15,56,204,211
4225	movups	-32(%rcx),%xmm4
4226	aesenc	%xmm5,%xmm6
4227.byte	15,56,203,209
4228	pshufd	$0x0e,%xmm0,%xmm0
4229	movdqa	%xmm13,%xmm3
4230.byte	102,65,15,58,15,220,4
4231	paddd	%xmm3,%xmm10
4232	movups	-16(%rcx),%xmm5
4233	aesenc	%xmm4,%xmm6
4234.byte	15,56,203,202
4235	movdqa	224-128(%rax),%xmm0
4236	paddd	%xmm13,%xmm0
4237.byte	69,15,56,205,213
4238.byte	69,15,56,204,220
4239	movups	0(%rcx),%xmm4
4240	aesenc	%xmm5,%xmm6
4241.byte	15,56,203,209
4242	pshufd	$0x0e,%xmm0,%xmm0
4243	movdqa	%xmm10,%xmm3
4244.byte	102,65,15,58,15,221,4
4245	paddd	%xmm3,%xmm11
4246	movups	16(%rcx),%xmm5
4247	aesenc	%xmm4,%xmm6
4248.byte	15,56,203,202
4249	movdqa	256-128(%rax),%xmm0
4250	paddd	%xmm10,%xmm0
4251.byte	69,15,56,205,218
4252.byte	69,15,56,204,229
4253	movups	32(%rcx),%xmm4
4254	aesenc	%xmm5,%xmm6
4255.byte	15,56,203,209
4256	pshufd	$0x0e,%xmm0,%xmm0
4257	movdqa	%xmm11,%xmm3
4258.byte	102,65,15,58,15,218,4
4259	paddd	%xmm3,%xmm12
4260	movups	48(%rcx),%xmm5
4261	aesenc	%xmm4,%xmm6
4262	cmpl	$11,%r11d
4263	jb	.Laesenclast2
4264	movups	64(%rcx),%xmm4
4265	aesenc	%xmm5,%xmm6
4266	movups	80(%rcx),%xmm5
4267	aesenc	%xmm4,%xmm6
4268	je	.Laesenclast2
4269	movups	96(%rcx),%xmm4
4270	aesenc	%xmm5,%xmm6
4271	movups	112(%rcx),%xmm5
4272	aesenc	%xmm4,%xmm6
4273.Laesenclast2:
4274	aesenclast	%xmm5,%xmm6
4275	movups	16-112(%rcx),%xmm4
4276	nop
4277.byte	15,56,203,202
4278	movups	32(%rdi),%xmm14
4279	xorps	%xmm15,%xmm14
4280	movups	%xmm6,16(%rsi,%rdi,1)
4281	xorps	%xmm14,%xmm6
4282	movups	-80(%rcx),%xmm5
4283	aesenc	%xmm4,%xmm6
4284	movdqa	288-128(%rax),%xmm0
4285	paddd	%xmm11,%xmm0
4286.byte	69,15,56,205,227
4287.byte	69,15,56,204,234
4288	movups	-64(%rcx),%xmm4
4289	aesenc	%xmm5,%xmm6
4290.byte	15,56,203,209
4291	pshufd	$0x0e,%xmm0,%xmm0
4292	movdqa	%xmm12,%xmm3
4293.byte	102,65,15,58,15,219,4
4294	paddd	%xmm3,%xmm13
4295	movups	-48(%rcx),%xmm5
4296	aesenc	%xmm4,%xmm6
4297.byte	15,56,203,202
4298	movdqa	320-128(%rax),%xmm0
4299	paddd	%xmm12,%xmm0
4300.byte	69,15,56,205,236
4301.byte	69,15,56,204,211
4302	movups	-32(%rcx),%xmm4
4303	aesenc	%xmm5,%xmm6
4304.byte	15,56,203,209
4305	pshufd	$0x0e,%xmm0,%xmm0
4306	movdqa	%xmm13,%xmm3
4307.byte	102,65,15,58,15,220,4
4308	paddd	%xmm3,%xmm10
4309	movups	-16(%rcx),%xmm5
4310	aesenc	%xmm4,%xmm6
4311.byte	15,56,203,202
4312	movdqa	352-128(%rax),%xmm0
4313	paddd	%xmm13,%xmm0
4314.byte	69,15,56,205,213
4315.byte	69,15,56,204,220
4316	movups	0(%rcx),%xmm4
4317	aesenc	%xmm5,%xmm6
4318.byte	15,56,203,209
4319	pshufd	$0x0e,%xmm0,%xmm0
4320	movdqa	%xmm10,%xmm3
4321.byte	102,65,15,58,15,221,4
4322	paddd	%xmm3,%xmm11
4323	movups	16(%rcx),%xmm5
4324	aesenc	%xmm4,%xmm6
4325.byte	15,56,203,202
4326	movdqa	384-128(%rax),%xmm0
4327	paddd	%xmm10,%xmm0
4328.byte	69,15,56,205,218
4329.byte	69,15,56,204,229
4330	movups	32(%rcx),%xmm4
4331	aesenc	%xmm5,%xmm6
4332.byte	15,56,203,209
4333	pshufd	$0x0e,%xmm0,%xmm0
4334	movdqa	%xmm11,%xmm3
4335.byte	102,65,15,58,15,218,4
4336	paddd	%xmm3,%xmm12
4337	movups	48(%rcx),%xmm5
4338	aesenc	%xmm4,%xmm6
4339.byte	15,56,203,202
4340	movdqa	416-128(%rax),%xmm0
4341	paddd	%xmm11,%xmm0
4342.byte	69,15,56,205,227
4343.byte	69,15,56,204,234
4344	cmpl	$11,%r11d
4345	jb	.Laesenclast3
4346	movups	64(%rcx),%xmm4
4347	aesenc	%xmm5,%xmm6
4348	movups	80(%rcx),%xmm5
4349	aesenc	%xmm4,%xmm6
4350	je	.Laesenclast3
4351	movups	96(%rcx),%xmm4
4352	aesenc	%xmm5,%xmm6
4353	movups	112(%rcx),%xmm5
4354	aesenc	%xmm4,%xmm6
4355.Laesenclast3:
4356	aesenclast	%xmm5,%xmm6
4357	movups	16-112(%rcx),%xmm4
4358	nop
4359.byte	15,56,203,209
4360	pshufd	$0x0e,%xmm0,%xmm0
4361	movdqa	%xmm12,%xmm3
4362.byte	102,65,15,58,15,219,4
4363	paddd	%xmm3,%xmm13
4364	movups	48(%rdi),%xmm14
4365	xorps	%xmm15,%xmm14
4366	movups	%xmm6,32(%rsi,%rdi,1)
4367	xorps	%xmm14,%xmm6
4368	movups	-80(%rcx),%xmm5
4369	aesenc	%xmm4,%xmm6
4370	movups	-64(%rcx),%xmm4
4371	aesenc	%xmm5,%xmm6
4372.byte	15,56,203,202
4373
4374	movdqa	448-128(%rax),%xmm0
4375	paddd	%xmm12,%xmm0
4376.byte	69,15,56,205,236
4377	movdqa	%xmm7,%xmm3
4378	movups	-48(%rcx),%xmm5
4379	aesenc	%xmm4,%xmm6
4380.byte	15,56,203,209
4381	pshufd	$0x0e,%xmm0,%xmm0
4382	movups	-32(%rcx),%xmm4
4383	aesenc	%xmm5,%xmm6
4384.byte	15,56,203,202
4385
4386	movdqa	480-128(%rax),%xmm0
4387	paddd	%xmm13,%xmm0
4388	movups	-16(%rcx),%xmm5
4389	aesenc	%xmm4,%xmm6
4390	movups	0(%rcx),%xmm4
4391	aesenc	%xmm5,%xmm6
4392.byte	15,56,203,209
4393	pshufd	$0x0e,%xmm0,%xmm0
4394	movups	16(%rcx),%xmm5
4395	aesenc	%xmm4,%xmm6
4396.byte	15,56,203,202
4397
4398	movups	32(%rcx),%xmm4
4399	aesenc	%xmm5,%xmm6
4400	movups	48(%rcx),%xmm5
4401	aesenc	%xmm4,%xmm6
4402	cmpl	$11,%r11d
4403	jb	.Laesenclast4
4404	movups	64(%rcx),%xmm4
4405	aesenc	%xmm5,%xmm6
4406	movups	80(%rcx),%xmm5
4407	aesenc	%xmm4,%xmm6
4408	je	.Laesenclast4
4409	movups	96(%rcx),%xmm4
4410	aesenc	%xmm5,%xmm6
4411	movups	112(%rcx),%xmm5
4412	aesenc	%xmm4,%xmm6
4413.Laesenclast4:
4414	aesenclast	%xmm5,%xmm6
4415	movups	16-112(%rcx),%xmm4
4416	nop
4417
4418	paddd	%xmm9,%xmm2
4419	paddd	%xmm8,%xmm1
4420
4421	decq	%rdx
4422	movups	%xmm6,48(%rsi,%rdi,1)
4423	leaq	64(%rdi),%rdi
4424	jnz	.Loop_shaext
4425
4426	pshufd	$0xb1,%xmm2,%xmm2
4427	pshufd	$0x1b,%xmm1,%xmm3
4428	pshufd	$0xb1,%xmm1,%xmm1
4429	punpckhqdq	%xmm2,%xmm1
4430.byte	102,15,58,15,211,8
4431
4432	movups	%xmm6,(%r8)
4433	movdqu	%xmm1,(%r9)
4434	movdqu	%xmm2,16(%r9)
4435	.byte	0xf3,0xc3
4436.cfi_endproc
4437.size	aesni_cbc_sha256_enc_shaext,.-aesni_cbc_sha256_enc_shaext
4438