Lines Matching +full:1 +full:- +full:16
1 /* Do not modify. This file is auto-generated from aesni-x86.pl. */
6 .align 16
20 movups 16(%edx),%xmm1
27 leal 16(%edx),%edx
35 .size aesni_encrypt,.-.L_aesni_encrypt_begin
38 .align 16
52 movups 16(%edx),%xmm1
59 leal 16(%edx),%edx
67 .size aesni_decrypt,.-.L_aesni_decrypt_begin
69 .align 16
78 movups 16(%edx),%xmm1
82 leal 32(%edx,%ecx,1),%edx
84 addl $16,%ecx
88 movups (%edx,%ecx,1),%xmm1
92 movups -16(%edx,%ecx,1),%xmm0
99 .size _aesni_encrypt2,.-_aesni_encrypt2
101 .align 16
110 movups 16(%edx),%xmm1
114 leal 32(%edx,%ecx,1),%edx
116 addl $16,%ecx
120 movups (%edx,%ecx,1),%xmm1
124 movups -16(%edx,%ecx,1),%xmm0
131 .size _aesni_decrypt2,.-_aesni_decrypt2
133 .align 16
142 movups 16(%edx),%xmm1
147 leal 32(%edx,%ecx,1),%edx
149 addl $16,%ecx
154 movups (%edx,%ecx,1),%xmm1
159 movups -16(%edx,%ecx,1),%xmm0
168 .size _aesni_encrypt3,.-_aesni_encrypt3
170 .align 16
179 movups 16(%edx),%xmm1
184 leal 32(%edx,%ecx,1),%edx
186 addl $16,%ecx
191 movups (%edx,%ecx,1),%xmm1
196 movups -16(%edx,%ecx,1),%xmm0
205 .size _aesni_decrypt3,.-_aesni_decrypt3
207 .align 16
215 movups 16(%edx),%xmm1
222 leal 32(%edx,%ecx,1),%edx
225 addl $16,%ecx
231 movups (%edx,%ecx,1),%xmm1
237 movups -16(%edx,%ecx,1),%xmm0
248 .size _aesni_encrypt4,.-_aesni_encrypt4
250 .align 16
258 movups 16(%edx),%xmm1
265 leal 32(%edx,%ecx,1),%edx
268 addl $16,%ecx
274 movups (%edx,%ecx,1),%xmm1
280 movups -16(%edx,%ecx,1),%xmm0
291 .size _aesni_decrypt4,.-_aesni_decrypt4
293 .align 16
302 movups 16(%edx),%xmm1
310 leal 32(%edx,%ecx,1),%edx
314 movups (%edx,%ecx,1),%xmm0
315 addl $16,%ecx
317 .align 16
327 movups (%edx,%ecx,1),%xmm1
335 movups -16(%edx,%ecx,1),%xmm0
350 .size _aesni_encrypt6,.-_aesni_encrypt6
352 .align 16
361 movups 16(%edx),%xmm1
369 leal 32(%edx,%ecx,1),%edx
373 movups (%edx,%ecx,1),%xmm0
374 addl $16,%ecx
376 .align 16
386 movups (%edx,%ecx,1),%xmm1
394 movups -16(%edx,%ecx,1),%xmm0
409 .size _aesni_decrypt6,.-_aesni_decrypt6
412 .align 16
429 andl $-16,%eax
439 movdqu 16(%esi),%xmm3
447 .align 16
451 movups %xmm3,16(%edi)
452 movdqu 16(%esi),%xmm3
470 movups %xmm3,16(%edi)
482 movups 16(%esi),%xmm3
493 movups %xmm3,16(%edi)
498 .align 16
501 movups 16(%edx),%xmm1
508 leal 16(%edx),%edx
513 .align 16
517 movups %xmm3,16(%edi)
519 .align 16
523 movups %xmm3,16(%edi)
526 .align 16
530 movups %xmm3,16(%edi)
534 .align 16
541 movdqu 16(%esi),%xmm3
549 .align 16
553 movups %xmm3,16(%edi)
554 movdqu 16(%esi),%xmm3
572 movups %xmm3,16(%edi)
584 movups 16(%esi),%xmm3
595 movups %xmm3,16(%edi)
600 .align 16
603 movups 16(%edx),%xmm1
610 leal 16(%edx),%edx
615 .align 16
619 movups %xmm3,16(%edi)
621 .align 16
625 movups %xmm3,16(%edi)
628 .align 16
632 movups %xmm3,16(%edi)
649 .size aesni_ecb_encrypt,.-.L_aesni_ecb_encrypt_begin
652 .align 16
672 andl $-16,%esp
681 movl $1,%ebx
683 movl %ebx,16(%esp)
688 movl $16,%ebx
692 leal 32(%edx,%ecx,1),%edx
700 movups 16(%ebp),%xmm1
707 movups (%edx,%ecx,1),%xmm1
711 movups -16(%edx,%ecx,1),%xmm0
715 paddq 16(%esp),%xmm7
719 leal 16(%esi),%esi
724 leal 16(%edi),%edi
742 .size aesni_ccm64_encrypt_blocks,.-.L_aesni_ccm64_encrypt_blocks_begin
745 .align 16
765 andl $-16,%esp
774 movl $1,%ebx
776 movl %ebx,16(%esp)
786 movups 16(%edx),%xmm1
793 leal 16(%edx),%edx
797 movl $16,%ecx
799 paddq 16(%esp),%xmm7
800 leal 16(%esi),%esi
802 leal 32(%ebp,%ebx,1),%edx
805 .align 16
810 leal 16(%edi),%edi
812 subl $1,%eax
816 movups 16(%ebp),%xmm1
824 movups (%edx,%ecx,1),%xmm1
828 movups -16(%edx,%ecx,1),%xmm0
831 paddq 16(%esp),%xmm7
836 leal 16(%esi),%esi
838 .align 16
843 movups 16(%edx),%xmm1
851 leal 16(%edx),%edx
870 .size aesni_ccm64_decrypt_blocks,.-.L_aesni_ccm64_decrypt_blocks_begin
873 .align 16
892 andl $-16,%esp
894 cmpl $1,%eax
903 movl %ecx,16(%esp)
918 .byte 102,15,58,34,195,1
920 .byte 102,15,58,34,205,1
936 movl $16,%ebx
940 leal 32(%edx,%ecx,1),%edx
943 .align 16
952 movups 16(%ebp),%xmm1
967 movups 16(%esi),%xmm0
972 movdqa 16(%esp),%xmm0
975 movups %xmm3,16(%edi)
1022 movups 16(%esi),%xmm0
1032 movups %xmm3,16(%edi)
1037 .align 16
1043 movups 16(%edx),%xmm1
1050 leal 16(%edx),%edx
1057 .align 16
1061 movups 16(%esi),%xmm6
1065 movups %xmm3,16(%edi)
1067 .align 16
1071 movups 16(%esi),%xmm6
1077 movups %xmm3,16(%edi)
1080 .align 16
1084 movups 16(%esi),%xmm7
1091 movups %xmm3,16(%edi)
1113 .size aesni_ctr32_encrypt_blocks,.-.L_aesni_ctr32_encrypt_blocks_begin
1116 .align 16
1133 movups 16(%edx),%xmm1
1140 leal 16(%edx),%edx
1150 andl $-16,%esp
1153 movl $1,104(%esp)
1161 andl $-16,%eax
1167 movl $16,%ebx
1169 leal 32(%edx,%ecx,1),%edx
1171 .align 16
1182 movdqa %xmm1,16(%esp)
1209 movdqu 16(%esi),%xmm3
1223 movups 16(%ebp),%xmm1
1224 pxor 16(%esp),%xmm3
1241 xorps 16(%esp),%xmm3
1244 movups %xmm3,16(%edi)
1295 movdqa %xmm6,16(%esp)
1304 movdqu 16(%esi),%xmm3
1308 pxor 16(%esp),%xmm3
1318 xorps 16(%esp),%xmm3
1322 movups %xmm3,16(%edi)
1329 .align 16
1332 leal 16(%esi),%esi
1335 movups 16(%edx),%xmm1
1342 leal 16(%edx),%edx
1347 leal 16(%edi),%edi
1350 .align 16
1354 movups 16(%esi),%xmm3
1362 movups %xmm3,16(%edi)
1366 .align 16
1370 movups 16(%esi),%xmm3
1381 movups %xmm3,16(%edi)
1386 .align 16
1390 movups 16(%esi),%xmm3
1395 xorps 16(%esp),%xmm3
1400 xorps 16(%esp),%xmm3
1404 movups %xmm3,16(%edi)
1410 .align 16
1418 .align 16
1432 movzbl -16(%edi),%edx
1433 leal 1(%esi),%esi
1434 movb %cl,-16(%edi)
1436 leal 1(%edi),%edi
1437 subl $1,%eax
1442 movups -16(%edi),%xmm2
1445 movups 16(%edx),%xmm1
1452 leal 16(%edx),%edx
1456 movups %xmm2,-16(%edi)
1463 movdqa %xmm0,16(%esp)
1478 .size aesni_xts_encrypt,.-.L_aesni_xts_encrypt_begin
1481 .align 16
1498 movups 16(%edx),%xmm1
1505 leal 16(%edx),%edx
1514 andl $-16,%esp
1522 movl $1,104(%esp)
1533 andl $-16,%eax
1537 movl $16,%ebx
1539 leal 32(%edx,%ecx,1),%edx
1541 .align 16
1552 movdqa %xmm1,16(%esp)
1579 movdqu 16(%esi),%xmm3
1593 movups 16(%ebp),%xmm1
1594 pxor 16(%esp),%xmm3
1611 xorps 16(%esp),%xmm3
1614 movups %xmm3,16(%edi)
1665 movdqa %xmm6,16(%esp)
1674 movdqu 16(%esi),%xmm3
1678 pxor 16(%esp),%xmm3
1688 xorps 16(%esp),%xmm3
1692 movups %xmm3,16(%edi)
1699 .align 16
1702 leal 16(%esi),%esi
1705 movups 16(%edx),%xmm1
1712 leal 16(%edx),%edx
1717 leal 16(%edi),%edi
1720 .align 16
1724 movups 16(%esi),%xmm3
1732 movups %xmm3,16(%edi)
1736 .align 16
1740 movups 16(%esi),%xmm3
1751 movups %xmm3,16(%edi)
1756 .align 16
1760 movups 16(%esi),%xmm3
1765 xorps 16(%esp),%xmm3
1770 xorps 16(%esp),%xmm3
1774 movups %xmm3,16(%edi)
1780 .align 16
1787 .align 16
1813 movups 16(%edx),%xmm1
1820 leal 16(%edx),%edx
1826 movzbl 16(%esi),%ecx
1828 leal 1(%esi),%esi
1830 movb %dl,16(%edi)
1831 leal 1(%edi),%edi
1832 subl $1,%eax
1840 movups 16(%edx),%xmm1
1847 leal 16(%edx),%edx
1858 movdqa %xmm0,16(%esp)
1873 .size aesni_xts_decrypt,.-.L_aesni_xts_decrypt_begin
1876 .align 16
1900 andl $-16,%esp
1903 leal -96(%esi,%eax,1),%eax
1908 testl $1,%ebp
1911 addl $1,%ebp
1913 movdqu (%ebx,%eax,1),%xmm7
1916 leal 16(%esi),%esi
1922 movups 16(%edx),%xmm1
1929 leal 16(%edx),%edx
1935 movups %xmm2,-16(%edi,%esi,1)
1941 movl $16,%edi
1944 leal 32(%edx,%ecx,1),%edx
1951 leal 1(%ebp),%ecx
1962 movdqu (%ebx,%ecx,1),%xmm3
1965 movdqu (%ebx,%eax,1),%xmm5
1967 movdqu (%ebx,%edi,1),%xmm7
1972 movdqa %xmm3,16(%esp)
1980 movups -48(%edx,%ecx,1),%xmm0
1982 movdqu 16(%esi),%xmm3
2001 movups -32(%edx,%ecx,1),%xmm1
2003 pxor 16(%esp),%xmm3
2008 movups -16(%edx,%ecx,1),%xmm0
2020 pxor 16(%esp),%xmm3
2026 movdqu %xmm2,-96(%edi,%esi,1)
2027 movdqu %xmm3,-80(%edi,%esi,1)
2028 movdqu %xmm4,-64(%edi,%esi,1)
2029 movdqu %xmm5,-48(%edi,%esi,1)
2030 movdqu %xmm6,-32(%edi,%esi,1)
2031 movdqu %xmm7,-16(%edi,%esi,1)
2044 leal 1(%ebp),%ecx
2051 movdqu (%ebx,%ecx,1),%xmm3
2054 movdqu (%ebx,%eax,1),%xmm5
2060 movdqa %xmm3,16(%esp)
2067 movups -48(%edx,%ecx,1),%xmm0
2069 movdqu 16(%esi),%xmm3
2085 movups -32(%edx,%ecx,1),%xmm1
2087 pxor 16(%esp),%xmm3
2091 movups -16(%edx,%ecx,1),%xmm0
2102 pxor 16(%esp),%xmm3
2107 movdqu %xmm2,(%edi,%esi,1)
2108 movdqu %xmm3,16(%edi,%esi,1)
2109 movdqu %xmm4,32(%edi,%esi,1)
2110 movdqu %xmm5,48(%edi,%esi,1)
2111 movdqu %xmm6,64(%edi,%esi,1)
2113 .align 16
2125 movups 16(%edx),%xmm1
2132 leal 16(%edx),%edx
2138 movups %xmm2,(%edi,%esi,1)
2140 .align 16
2142 leal 1(%ebp),%ecx
2147 movdqu (%ebx,%ecx,1),%xmm7
2149 movdqu 16(%esi),%xmm3
2164 movups %xmm2,(%edi,%esi,1)
2165 movups %xmm3,16(%edi,%esi,1)
2167 .align 16
2169 leal 1(%ebp),%ecx
2174 movdqu (%ebx,%ecx,1),%xmm6
2177 movdqu 16(%esi),%xmm3
2197 movups %xmm2,(%edi,%esi,1)
2198 movups %xmm3,16(%edi,%esi,1)
2199 movups %xmm4,32(%edi,%esi,1)
2201 .align 16
2203 leal 1(%ebp),%ecx
2211 movdqu (%ebx,%ecx,1),%xmm5
2213 movdqu (%ebx,%eax,1),%xmm7
2217 movdqu 16(%esi),%xmm3
2221 movdqa %xmm5,16(%esp)
2228 pxor 16(%esp),%xmm3
2237 xorps 16(%esp),%xmm3
2239 movups %xmm2,(%edi,%esi,1)
2241 movups %xmm3,16(%edi,%esi,1)
2243 movups %xmm4,32(%edi,%esi,1)
2245 movups %xmm5,48(%edi,%esi,1)
2252 movdqa %xmm2,16(%esp)
2273 .size aesni_ocb_encrypt,.-.L_aesni_ocb_encrypt_begin
2276 .align 16
2300 andl $-16,%esp
2303 leal -96(%esi,%eax,1),%eax
2308 testl $1,%ebp
2311 addl $1,%ebp
2313 movdqu (%ebx,%eax,1),%xmm7
2316 leal 16(%esi),%esi
2321 movups 16(%edx),%xmm1
2328 leal 16(%edx),%edx
2335 movups %xmm2,-16(%edi,%esi,1)
2341 movl $16,%edi
2344 leal 32(%edx,%ecx,1),%edx
2351 leal 1(%ebp),%ecx
2362 movdqu (%ebx,%ecx,1),%xmm3
2365 movdqu (%ebx,%eax,1),%xmm5
2367 movdqu (%ebx,%edi,1),%xmm7
2372 movdqa %xmm3,16(%esp)
2380 movups -48(%edx,%ecx,1),%xmm0
2382 movdqu 16(%esi),%xmm3
2395 movups -32(%edx,%ecx,1),%xmm1
2397 pxor 16(%esp),%xmm3
2402 movups -16(%edx,%ecx,1),%xmm0
2415 pxor 16(%esp),%xmm3
2421 movdqu %xmm2,-96(%edi,%esi,1)
2423 movdqu %xmm3,-80(%edi,%esi,1)
2425 movdqu %xmm4,-64(%edi,%esi,1)
2427 movdqu %xmm5,-48(%edi,%esi,1)
2429 movdqu %xmm6,-32(%edi,%esi,1)
2431 movdqu %xmm7,-16(%edi,%esi,1)
2444 leal 1(%ebp),%ecx
2451 movdqu (%ebx,%ecx,1),%xmm3
2454 movdqu (%ebx,%eax,1),%xmm5
2460 movdqa %xmm3,16(%esp)
2467 movups -48(%edx,%ecx,1),%xmm0
2469 movdqu 16(%esi),%xmm3
2480 movups -32(%edx,%ecx,1),%xmm1
2482 pxor 16(%esp),%xmm3
2486 movups -16(%edx,%ecx,1),%xmm0
2498 pxor 16(%esp),%xmm3
2503 movdqu %xmm2,(%edi,%esi,1)
2505 movdqu %xmm3,16(%edi,%esi,1)
2507 movdqu %xmm4,32(%edi,%esi,1)
2509 movdqu %xmm5,48(%edi,%esi,1)
2511 movdqu %xmm6,64(%edi,%esi,1)
2513 .align 16
2524 movups 16(%edx),%xmm1
2531 leal 16(%edx),%edx
2538 movups %xmm2,(%edi,%esi,1)
2540 .align 16
2542 leal 1(%ebp),%ecx
2547 movdqu (%ebx,%ecx,1),%xmm7
2549 movdqu 16(%esi),%xmm3
2562 movups %xmm2,(%edi,%esi,1)
2564 movups %xmm3,16(%edi,%esi,1)
2567 .align 16
2569 leal 1(%ebp),%ecx
2574 movdqu (%ebx,%ecx,1),%xmm6
2577 movdqu 16(%esi),%xmm3
2593 movups %xmm2,(%edi,%esi,1)
2596 movups %xmm3,16(%edi,%esi,1)
2598 movups %xmm4,32(%edi,%esi,1)
2601 .align 16
2603 leal 1(%ebp),%ecx
2611 movdqu (%ebx,%ecx,1),%xmm5
2613 movdqu (%ebx,%eax,1),%xmm7
2617 movdqu 16(%esi),%xmm3
2621 movdqa %xmm5,16(%esp)
2627 pxor 16(%esp),%xmm3
2634 xorps 16(%esp),%xmm3
2636 movups %xmm2,(%edi,%esi,1)
2639 movups %xmm3,16(%edi,%esi,1)
2642 movups %xmm4,32(%edi,%esi,1)
2644 movups %xmm5,48(%edi,%esi,1)
2652 movdqa %xmm2,16(%esp)
2673 .size aesni_ocb_decrypt,.-.L_aesni_ocb_decrypt_begin
2676 .align 16
2693 andl $-16,%ebx
2703 movl %ebx,16(%esp)
2707 cmpl $16,%eax
2709 subl $16,%eax
2711 .align 16
2714 leal 16(%esi),%esi
2716 movups 16(%edx),%xmm1
2724 leal 16(%edx),%edx
2730 leal 16(%edi),%edi
2731 subl $16,%eax
2733 addl $16,%eax
2741 movl $16,%ecx
2745 leal -16(%edi),%edi
2750 .align 16
2757 .align 16
2761 leal 16(%edi),%edi
2764 movdqu 16(%esi),%xmm3
2771 movups 16(%esi),%xmm0
2783 movups %xmm3,16(%edi)
2798 leal 16(%edi),%edi
2802 cmpl $16,%eax
2804 movups 16(%esi),%xmm3
2820 movups 16(%esi),%xmm0
2830 movups %xmm3,16(%edi)
2841 .align 16
2844 movups 16(%edx),%xmm1
2851 leal 16(%edx),%edx
2856 subl $16,%eax
2858 .align 16
2866 leal 16(%edi),%edi
2870 .align 16
2879 movups %xmm3,16(%edi)
2885 .align 16
2888 movups 16(%esi),%xmm1
2895 movups %xmm3,16(%edi)
2905 .align 16
2917 .align 16
2921 movl $16,%ecx
2927 movl 16(%esp),%esp
2939 .size aesni_cbc_encrypt,.-.L_aesni_cbc_encrypt_begin
2941 .align 16
2957 leal .Lkey_const-.L112pic(%ebx),%ebx
2958 leal OPENSSL_ia32cap_P-.Lkey_const(%ebx),%ebp
2962 leal 16(%edx),%edx
2970 .align 16
2975 movups %xmm0,-16(%edx)
2976 .byte 102,15,58,223,200,1
2984 .byte 102,15,58,223,200,16
2999 .align 16
3002 leal 16(%edx),%edx
3004 shufps $16,%xmm0,%xmm4
3011 .align 16
3017 movdqu %xmm0,-16(%edx)
3021 pslld $1,%xmm4
3022 leal 16(%edx),%edx
3031 movdqu %xmm0,-16(%edx)
3038 pslld $1,%xmm4
3059 movdqu %xmm0,16(%edx)
3063 .align 16
3065 movq 16(%eax),%xmm2
3069 movups %xmm0,-16(%edx)
3070 .byte 102,15,58,223,202,1
3078 .byte 102,15,58,223,202,16
3089 .align 16
3092 leal 16(%edx),%edx
3093 .align 16
3097 shufps $16,%xmm0,%xmm4
3109 .align 16
3115 movups %xmm3,16(%edx)
3118 .align 16
3120 movdqa 16(%ebx),%xmm5
3123 movdqu %xmm0,-16(%edx)
3129 pslld $1,%xmm4
3144 movdqu %xmm0,-16(%edx)
3150 .align 16
3152 movups 16(%eax),%xmm2
3153 leal 16(%edx),%edx
3157 movups %xmm0,-32(%edx)
3158 movups %xmm2,-16(%edx)
3159 .byte 102,15,58,223,202,1
3161 .byte 102,15,58,223,200,1
3175 .byte 102,15,58,223,202,16
3177 .byte 102,15,58,223,200,16
3186 movl %ecx,16(%edx)
3189 .align 16
3192 leal 16(%edx),%edx
3194 shufps $16,%xmm0,%xmm4
3201 .align 16
3204 leal 16(%edx),%edx
3205 shufps $16,%xmm2,%xmm4
3212 .align 16
3217 movdqu %xmm0,-32(%edx)
3219 movdqu %xmm2,-16(%edx)
3230 pslld $1,%xmm4
3246 movdqu %xmm2,16(%edx)
3252 movl %ecx,16(%edx)
3266 movl $-1,%eax
3273 movl $-2,%eax
3277 .size _aesni_set_encrypt_key,.-_aesni_set_encrypt_key
3280 .align 16
3293 .size aesni_set_encrypt_key,.-.L_aesni_set_encrypt_key_begin
3296 .align 16
3312 leal 16(%edx,%ecx,1),%eax
3317 leal 16(%edx),%edx
3318 leal -16(%eax),%eax
3324 leal 16(%edx),%edx
3325 leal -16(%eax),%eax
3326 movups %xmm0,16(%eax)
3327 movups %xmm1,-16(%edx)
3338 .size aesni_set_decrypt_key,.-.L_aesni_set_decrypt_key_begin
3343 .long 1,1,1,1
3349 .comm OPENSSL_ia32cap_P,16,4
3353 .long 1f - 0f
3354 .long 4f - 1f
3358 1:
3361 .long 3f - 2f
3371 .align 16
3385 movups 16(%edx),%xmm1
3392 leal 16(%edx),%edx
3400 .size aesni_encrypt,.-.L_aesni_encrypt_begin
3403 .align 16
3417 movups 16(%edx),%xmm1
3424 leal 16(%edx),%edx
3432 .size aesni_decrypt,.-.L_aesni_decrypt_begin
3434 .align 16
3443 movups 16(%edx),%xmm1
3447 leal 32(%edx,%ecx,1),%edx
3449 addl $16,%ecx
3453 movups (%edx,%ecx,1),%xmm1
3457 movups -16(%edx,%ecx,1),%xmm0
3464 .size _aesni_encrypt2,.-_aesni_encrypt2
3466 .align 16
3475 movups 16(%edx),%xmm1
3479 leal 32(%edx,%ecx,1),%edx
3481 addl $16,%ecx
3485 movups (%edx,%ecx,1),%xmm1
3489 movups -16(%edx,%ecx,1),%xmm0
3496 .size _aesni_decrypt2,.-_aesni_decrypt2
3498 .align 16
3507 movups 16(%edx),%xmm1
3512 leal 32(%edx,%ecx,1),%edx
3514 addl $16,%ecx
3519 movups (%edx,%ecx,1),%xmm1
3524 movups -16(%edx,%ecx,1),%xmm0
3533 .size _aesni_encrypt3,.-_aesni_encrypt3
3535 .align 16
3544 movups 16(%edx),%xmm1
3549 leal 32(%edx,%ecx,1),%edx
3551 addl $16,%ecx
3556 movups (%edx,%ecx,1),%xmm1
3561 movups -16(%edx,%ecx,1),%xmm0
3570 .size _aesni_decrypt3,.-_aesni_decrypt3
3572 .align 16
3580 movups 16(%edx),%xmm1
3587 leal 32(%edx,%ecx,1),%edx
3590 addl $16,%ecx
3596 movups (%edx,%ecx,1),%xmm1
3602 movups -16(%edx,%ecx,1),%xmm0
3613 .size _aesni_encrypt4,.-_aesni_encrypt4
3615 .align 16
3623 movups 16(%edx),%xmm1
3630 leal 32(%edx,%ecx,1),%edx
3633 addl $16,%ecx
3639 movups (%edx,%ecx,1),%xmm1
3645 movups -16(%edx,%ecx,1),%xmm0
3656 .size _aesni_decrypt4,.-_aesni_decrypt4
3658 .align 16
3667 movups 16(%edx),%xmm1
3675 leal 32(%edx,%ecx,1),%edx
3679 movups (%edx,%ecx,1),%xmm0
3680 addl $16,%ecx
3682 .align 16
3692 movups (%edx,%ecx,1),%xmm1
3700 movups -16(%edx,%ecx,1),%xmm0
3715 .size _aesni_encrypt6,.-_aesni_encrypt6
3717 .align 16
3726 movups 16(%edx),%xmm1
3734 leal 32(%edx,%ecx,1),%edx
3738 movups (%edx,%ecx,1),%xmm0
3739 addl $16,%ecx
3741 .align 16
3751 movups (%edx,%ecx,1),%xmm1
3759 movups -16(%edx,%ecx,1),%xmm0
3774 .size _aesni_decrypt6,.-_aesni_decrypt6
3777 .align 16
3794 andl $-16,%eax
3804 movdqu 16(%esi),%xmm3
3812 .align 16
3816 movups %xmm3,16(%edi)
3817 movdqu 16(%esi),%xmm3
3835 movups %xmm3,16(%edi)
3847 movups 16(%esi),%xmm3
3858 movups %xmm3,16(%edi)
3863 .align 16
3866 movups 16(%edx),%xmm1
3873 leal 16(%edx),%edx
3878 .align 16
3882 movups %xmm3,16(%edi)
3884 .align 16
3888 movups %xmm3,16(%edi)
3891 .align 16
3895 movups %xmm3,16(%edi)
3899 .align 16
3906 movdqu 16(%esi),%xmm3
3914 .align 16
3918 movups %xmm3,16(%edi)
3919 movdqu 16(%esi),%xmm3
3937 movups %xmm3,16(%edi)
3949 movups 16(%esi),%xmm3
3960 movups %xmm3,16(%edi)
3965 .align 16
3968 movups 16(%edx),%xmm1
3975 leal 16(%edx),%edx
3980 .align 16
3984 movups %xmm3,16(%edi)
3986 .align 16
3990 movups %xmm3,16(%edi)
3993 .align 16
3997 movups %xmm3,16(%edi)
4014 .size aesni_ecb_encrypt,.-.L_aesni_ecb_encrypt_begin
4017 .align 16
4037 andl $-16,%esp
4046 movl $1,%ebx
4048 movl %ebx,16(%esp)
4053 movl $16,%ebx
4057 leal 32(%edx,%ecx,1),%edx
4065 movups 16(%ebp),%xmm1
4072 movups (%edx,%ecx,1),%xmm1
4076 movups -16(%edx,%ecx,1),%xmm0
4080 paddq 16(%esp),%xmm7
4084 leal 16(%esi),%esi
4089 leal 16(%edi),%edi
4107 .size aesni_ccm64_encrypt_blocks,.-.L_aesni_ccm64_encrypt_blocks_begin
4110 .align 16
4130 andl $-16,%esp
4139 movl $1,%ebx
4141 movl %ebx,16(%esp)
4151 movups 16(%edx),%xmm1
4158 leal 16(%edx),%edx
4162 movl $16,%ecx
4164 paddq 16(%esp),%xmm7
4165 leal 16(%esi),%esi
4167 leal 32(%ebp,%ebx,1),%edx
4170 .align 16
4175 leal 16(%edi),%edi
4177 subl $1,%eax
4181 movups 16(%ebp),%xmm1
4189 movups (%edx,%ecx,1),%xmm1
4193 movups -16(%edx,%ecx,1),%xmm0
4196 paddq 16(%esp),%xmm7
4201 leal 16(%esi),%esi
4203 .align 16
4208 movups 16(%edx),%xmm1
4216 leal 16(%edx),%edx
4235 .size aesni_ccm64_decrypt_blocks,.-.L_aesni_ccm64_decrypt_blocks_begin
4238 .align 16
4257 andl $-16,%esp
4259 cmpl $1,%eax
4268 movl %ecx,16(%esp)
4283 .byte 102,15,58,34,195,1
4285 .byte 102,15,58,34,205,1
4301 movl $16,%ebx
4305 leal 32(%edx,%ecx,1),%edx
4308 .align 16
4317 movups 16(%ebp),%xmm1
4332 movups 16(%esi),%xmm0
4337 movdqa 16(%esp),%xmm0
4340 movups %xmm3,16(%edi)
4387 movups 16(%esi),%xmm0
4397 movups %xmm3,16(%edi)
4402 .align 16
4408 movups 16(%edx),%xmm1
4415 leal 16(%edx),%edx
4422 .align 16
4426 movups 16(%esi),%xmm6
4430 movups %xmm3,16(%edi)
4432 .align 16
4436 movups 16(%esi),%xmm6
4442 movups %xmm3,16(%edi)
4445 .align 16
4449 movups 16(%esi),%xmm7
4456 movups %xmm3,16(%edi)
4478 .size aesni_ctr32_encrypt_blocks,.-.L_aesni_ctr32_encrypt_blocks_begin
4481 .align 16
4498 movups 16(%edx),%xmm1
4505 leal 16(%edx),%edx
4515 andl $-16,%esp
4518 movl $1,104(%esp)
4526 andl $-16,%eax
4532 movl $16,%ebx
4534 leal 32(%edx,%ecx,1),%edx
4536 .align 16
4547 movdqa %xmm1,16(%esp)
4574 movdqu 16(%esi),%xmm3
4588 movups 16(%ebp),%xmm1
4589 pxor 16(%esp),%xmm3
4606 xorps 16(%esp),%xmm3
4609 movups %xmm3,16(%edi)
4660 movdqa %xmm6,16(%esp)
4669 movdqu 16(%esi),%xmm3
4673 pxor 16(%esp),%xmm3
4683 xorps 16(%esp),%xmm3
4687 movups %xmm3,16(%edi)
4694 .align 16
4697 leal 16(%esi),%esi
4700 movups 16(%edx),%xmm1
4707 leal 16(%edx),%edx
4712 leal 16(%edi),%edi
4715 .align 16
4719 movups 16(%esi),%xmm3
4727 movups %xmm3,16(%edi)
4731 .align 16
4735 movups 16(%esi),%xmm3
4746 movups %xmm3,16(%edi)
4751 .align 16
4755 movups 16(%esi),%xmm3
4760 xorps 16(%esp),%xmm3
4765 xorps 16(%esp),%xmm3
4769 movups %xmm3,16(%edi)
4775 .align 16
4783 .align 16
4797 movzbl -16(%edi),%edx
4798 leal 1(%esi),%esi
4799 movb %cl,-16(%edi)
4801 leal 1(%edi),%edi
4802 subl $1,%eax
4807 movups -16(%edi),%xmm2
4810 movups 16(%edx),%xmm1
4817 leal 16(%edx),%edx
4821 movups %xmm2,-16(%edi)
4828 movdqa %xmm0,16(%esp)
4843 .size aesni_xts_encrypt,.-.L_aesni_xts_encrypt_begin
4846 .align 16
4863 movups 16(%edx),%xmm1
4870 leal 16(%edx),%edx
4879 andl $-16,%esp
4887 movl $1,104(%esp)
4898 andl $-16,%eax
4902 movl $16,%ebx
4904 leal 32(%edx,%ecx,1),%edx
4906 .align 16
4917 movdqa %xmm1,16(%esp)
4944 movdqu 16(%esi),%xmm3
4958 movups 16(%ebp),%xmm1
4959 pxor 16(%esp),%xmm3
4976 xorps 16(%esp),%xmm3
4979 movups %xmm3,16(%edi)
5030 movdqa %xmm6,16(%esp)
5039 movdqu 16(%esi),%xmm3
5043 pxor 16(%esp),%xmm3
5053 xorps 16(%esp),%xmm3
5057 movups %xmm3,16(%edi)
5064 .align 16
5067 leal 16(%esi),%esi
5070 movups 16(%edx),%xmm1
5077 leal 16(%edx),%edx
5082 leal 16(%edi),%edi
5085 .align 16
5089 movups 16(%esi),%xmm3
5097 movups %xmm3,16(%edi)
5101 .align 16
5105 movups 16(%esi),%xmm3
5116 movups %xmm3,16(%edi)
5121 .align 16
5125 movups 16(%esi),%xmm3
5130 xorps 16(%esp),%xmm3
5135 xorps 16(%esp),%xmm3
5139 movups %xmm3,16(%edi)
5145 .align 16
5152 .align 16
5178 movups 16(%edx),%xmm1
5185 leal 16(%edx),%edx
5191 movzbl 16(%esi),%ecx
5193 leal 1(%esi),%esi
5195 movb %dl,16(%edi)
5196 leal 1(%edi),%edi
5197 subl $1,%eax
5205 movups 16(%edx),%xmm1
5212 leal 16(%edx),%edx
5223 movdqa %xmm0,16(%esp)
5238 .size aesni_xts_decrypt,.-.L_aesni_xts_decrypt_begin
5241 .align 16
5265 andl $-16,%esp
5268 leal -96(%esi,%eax,1),%eax
5273 testl $1,%ebp
5276 addl $1,%ebp
5278 movdqu (%ebx,%eax,1),%xmm7
5281 leal 16(%esi),%esi
5287 movups 16(%edx),%xmm1
5294 leal 16(%edx),%edx
5300 movups %xmm2,-16(%edi,%esi,1)
5306 movl $16,%edi
5309 leal 32(%edx,%ecx,1),%edx
5316 leal 1(%ebp),%ecx
5327 movdqu (%ebx,%ecx,1),%xmm3
5330 movdqu (%ebx,%eax,1),%xmm5
5332 movdqu (%ebx,%edi,1),%xmm7
5337 movdqa %xmm3,16(%esp)
5345 movups -48(%edx,%ecx,1),%xmm0
5347 movdqu 16(%esi),%xmm3
5366 movups -32(%edx,%ecx,1),%xmm1
5368 pxor 16(%esp),%xmm3
5373 movups -16(%edx,%ecx,1),%xmm0
5385 pxor 16(%esp),%xmm3
5391 movdqu %xmm2,-96(%edi,%esi,1)
5392 movdqu %xmm3,-80(%edi,%esi,1)
5393 movdqu %xmm4,-64(%edi,%esi,1)
5394 movdqu %xmm5,-48(%edi,%esi,1)
5395 movdqu %xmm6,-32(%edi,%esi,1)
5396 movdqu %xmm7,-16(%edi,%esi,1)
5409 leal 1(%ebp),%ecx
5416 movdqu (%ebx,%ecx,1),%xmm3
5419 movdqu (%ebx,%eax,1),%xmm5
5425 movdqa %xmm3,16(%esp)
5432 movups -48(%edx,%ecx,1),%xmm0
5434 movdqu 16(%esi),%xmm3
5450 movups -32(%edx,%ecx,1),%xmm1
5452 pxor 16(%esp),%xmm3
5456 movups -16(%edx,%ecx,1),%xmm0
5467 pxor 16(%esp),%xmm3
5472 movdqu %xmm2,(%edi,%esi,1)
5473 movdqu %xmm3,16(%edi,%esi,1)
5474 movdqu %xmm4,32(%edi,%esi,1)
5475 movdqu %xmm5,48(%edi,%esi,1)
5476 movdqu %xmm6,64(%edi,%esi,1)
5478 .align 16
5490 movups 16(%edx),%xmm1
5497 leal 16(%edx),%edx
5503 movups %xmm2,(%edi,%esi,1)
5505 .align 16
5507 leal 1(%ebp),%ecx
5512 movdqu (%ebx,%ecx,1),%xmm7
5514 movdqu 16(%esi),%xmm3
5529 movups %xmm2,(%edi,%esi,1)
5530 movups %xmm3,16(%edi,%esi,1)
5532 .align 16
5534 leal 1(%ebp),%ecx
5539 movdqu (%ebx,%ecx,1),%xmm6
5542 movdqu 16(%esi),%xmm3
5562 movups %xmm2,(%edi,%esi,1)
5563 movups %xmm3,16(%edi,%esi,1)
5564 movups %xmm4,32(%edi,%esi,1)
5566 .align 16
5568 leal 1(%ebp),%ecx
5576 movdqu (%ebx,%ecx,1),%xmm5
5578 movdqu (%ebx,%eax,1),%xmm7
5582 movdqu 16(%esi),%xmm3
5586 movdqa %xmm5,16(%esp)
5593 pxor 16(%esp),%xmm3
5602 xorps 16(%esp),%xmm3
5604 movups %xmm2,(%edi,%esi,1)
5606 movups %xmm3,16(%edi,%esi,1)
5608 movups %xmm4,32(%edi,%esi,1)
5610 movups %xmm5,48(%edi,%esi,1)
5617 movdqa %xmm2,16(%esp)
5638 .size aesni_ocb_encrypt,.-.L_aesni_ocb_encrypt_begin
5641 .align 16
5665 andl $-16,%esp
5668 leal -96(%esi,%eax,1),%eax
5673 testl $1,%ebp
5676 addl $1,%ebp
5678 movdqu (%ebx,%eax,1),%xmm7
5681 leal 16(%esi),%esi
5686 movups 16(%edx),%xmm1
5693 leal 16(%edx),%edx
5700 movups %xmm2,-16(%edi,%esi,1)
5706 movl $16,%edi
5709 leal 32(%edx,%ecx,1),%edx
5716 leal 1(%ebp),%ecx
5727 movdqu (%ebx,%ecx,1),%xmm3
5730 movdqu (%ebx,%eax,1),%xmm5
5732 movdqu (%ebx,%edi,1),%xmm7
5737 movdqa %xmm3,16(%esp)
5745 movups -48(%edx,%ecx,1),%xmm0
5747 movdqu 16(%esi),%xmm3
5760 movups -32(%edx,%ecx,1),%xmm1
5762 pxor 16(%esp),%xmm3
5767 movups -16(%edx,%ecx,1),%xmm0
5780 pxor 16(%esp),%xmm3
5786 movdqu %xmm2,-96(%edi,%esi,1)
5788 movdqu %xmm3,-80(%edi,%esi,1)
5790 movdqu %xmm4,-64(%edi,%esi,1)
5792 movdqu %xmm5,-48(%edi,%esi,1)
5794 movdqu %xmm6,-32(%edi,%esi,1)
5796 movdqu %xmm7,-16(%edi,%esi,1)
5809 leal 1(%ebp),%ecx
5816 movdqu (%ebx,%ecx,1),%xmm3
5819 movdqu (%ebx,%eax,1),%xmm5
5825 movdqa %xmm3,16(%esp)
5832 movups -48(%edx,%ecx,1),%xmm0
5834 movdqu 16(%esi),%xmm3
5845 movups -32(%edx,%ecx,1),%xmm1
5847 pxor 16(%esp),%xmm3
5851 movups -16(%edx,%ecx,1),%xmm0
5863 pxor 16(%esp),%xmm3
5868 movdqu %xmm2,(%edi,%esi,1)
5870 movdqu %xmm3,16(%edi,%esi,1)
5872 movdqu %xmm4,32(%edi,%esi,1)
5874 movdqu %xmm5,48(%edi,%esi,1)
5876 movdqu %xmm6,64(%edi,%esi,1)
5878 .align 16
5889 movups 16(%edx),%xmm1
5896 leal 16(%edx),%edx
5903 movups %xmm2,(%edi,%esi,1)
5905 .align 16
5907 leal 1(%ebp),%ecx
5912 movdqu (%ebx,%ecx,1),%xmm7
5914 movdqu 16(%esi),%xmm3
5927 movups %xmm2,(%edi,%esi,1)
5929 movups %xmm3,16(%edi,%esi,1)
5932 .align 16
5934 leal 1(%ebp),%ecx
5939 movdqu (%ebx,%ecx,1),%xmm6
5942 movdqu 16(%esi),%xmm3
5958 movups %xmm2,(%edi,%esi,1)
5961 movups %xmm3,16(%edi,%esi,1)
5963 movups %xmm4,32(%edi,%esi,1)
5966 .align 16
5968 leal 1(%ebp),%ecx
5976 movdqu (%ebx,%ecx,1),%xmm5
5978 movdqu (%ebx,%eax,1),%xmm7
5982 movdqu 16(%esi),%xmm3
5986 movdqa %xmm5,16(%esp)
5992 pxor 16(%esp),%xmm3
5999 xorps 16(%esp),%xmm3
6001 movups %xmm2,(%edi,%esi,1)
6004 movups %xmm3,16(%edi,%esi,1)
6007 movups %xmm4,32(%edi,%esi,1)
6009 movups %xmm5,48(%edi,%esi,1)
6017 movdqa %xmm2,16(%esp)
6038 .size aesni_ocb_decrypt,.-.L_aesni_ocb_decrypt_begin
6041 .align 16
6058 andl $-16,%ebx
6068 movl %ebx,16(%esp)
6072 cmpl $16,%eax
6074 subl $16,%eax
6076 .align 16
6079 leal 16(%esi),%esi
6081 movups 16(%edx),%xmm1
6089 leal 16(%edx),%edx
6095 leal 16(%edi),%edi
6096 subl $16,%eax
6098 addl $16,%eax
6106 movl $16,%ecx
6110 leal -16(%edi),%edi
6115 .align 16
6122 .align 16
6126 leal 16(%edi),%edi
6129 movdqu 16(%esi),%xmm3
6136 movups 16(%esi),%xmm0
6148 movups %xmm3,16(%edi)
6163 leal 16(%edi),%edi
6167 cmpl $16,%eax
6169 movups 16(%esi),%xmm3
6185 movups 16(%esi),%xmm0
6195 movups %xmm3,16(%edi)
6206 .align 16
6209 movups 16(%edx),%xmm1
6216 leal 16(%edx),%edx
6221 subl $16,%eax
6223 .align 16
6231 leal 16(%edi),%edi
6235 .align 16
6244 movups %xmm3,16(%edi)
6250 .align 16
6253 movups 16(%esi),%xmm1
6260 movups %xmm3,16(%edi)
6270 .align 16
6282 .align 16
6286 movl $16,%ecx
6292 movl 16(%esp),%esp
6304 .size aesni_cbc_encrypt,.-.L_aesni_cbc_encrypt_begin
6306 .align 16
6322 leal .Lkey_const-.L112pic(%ebx),%ebx
6327 leal 16(%edx),%edx
6335 .align 16
6340 movups %xmm0,-16(%edx)
6341 .byte 102,15,58,223,200,1
6349 .byte 102,15,58,223,200,16
6364 .align 16
6367 leal 16(%edx),%edx
6369 shufps $16,%xmm0,%xmm4
6376 .align 16
6382 movdqu %xmm0,-16(%edx)
6386 pslld $1,%xmm4
6387 leal 16(%edx),%edx
6396 movdqu %xmm0,-16(%edx)
6403 pslld $1,%xmm4
6424 movdqu %xmm0,16(%edx)
6428 .align 16
6430 movq 16(%eax),%xmm2
6434 movups %xmm0,-16(%edx)
6435 .byte 102,15,58,223,202,1
6443 .byte 102,15,58,223,202,16
6454 .align 16
6457 leal 16(%edx),%edx
6458 .align 16
6462 shufps $16,%xmm0,%xmm4
6474 .align 16
6480 movups %xmm3,16(%edx)
6483 .align 16
6485 movdqa 16(%ebx),%xmm5
6488 movdqu %xmm0,-16(%edx)
6494 pslld $1,%xmm4
6509 movdqu %xmm0,-16(%edx)
6515 .align 16
6517 movups 16(%eax),%xmm2
6518 leal 16(%edx),%edx
6522 movups %xmm0,-32(%edx)
6523 movups %xmm2,-16(%edx)
6524 .byte 102,15,58,223,202,1
6526 .byte 102,15,58,223,200,1
6540 .byte 102,15,58,223,202,16
6542 .byte 102,15,58,223,200,16
6551 movl %ecx,16(%edx)
6554 .align 16
6557 leal 16(%edx),%edx
6559 shufps $16,%xmm0,%xmm4
6566 .align 16
6569 leal 16(%edx),%edx
6570 shufps $16,%xmm2,%xmm4
6577 .align 16
6582 movdqu %xmm0,-32(%edx)
6584 movdqu %xmm2,-16(%edx)
6595 pslld $1,%xmm4
6611 movdqu %xmm2,16(%edx)
6617 movl %ecx,16(%edx)
6631 movl $-1,%eax
6638 movl $-2,%eax
6642 .size _aesni_set_encrypt_key,.-_aesni_set_encrypt_key
6645 .align 16
6658 .size aesni_set_encrypt_key,.-.L_aesni_set_encrypt_key_begin
6661 .align 16
6677 leal 16(%edx,%ecx,1),%eax
6682 leal 16(%edx),%edx
6683 leal -16(%eax),%eax
6689 leal 16(%edx),%edx
6690 leal -16(%eax),%eax
6691 movups %xmm0,16(%eax)
6692 movups %xmm1,-16(%edx)
6703 .size aesni_set_decrypt_key,.-.L_aesni_set_decrypt_key_begin
6708 .long 1,1,1,1
6714 .comm OPENSSL_ia32cap_P,16,4
6718 .long 1f - 0f
6719 .long 4f - 1f
6723 1:
6726 .long 3f - 2f