| /linux/arch/x86/crypto/ |
| H A D | aria-aesni-avx-asm_64.S | 889 inpack16_post(%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, 893 %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, 895 aria_fe(%xmm1, %xmm0, %xmm3, %xmm2, %xmm4, %xmm5, %xmm6, %xmm7, 899 %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, 901 aria_fe(%xmm1, %xmm0, %xmm3, %xmm2, %xmm4, %xmm5, %xmm6, %xmm7, 905 %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, 907 aria_fe(%xmm1, %xmm0, %xmm3, %xmm2, %xmm4, %xmm5, %xmm6, %xmm7, 911 %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, 913 aria_fe(%xmm1, %xmm0, %xmm3, %xmm2, %xmm4, %xmm5, %xmm6, %xmm7, 917 %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, [all …]
|
| H A D | aes-gcm-aesni-x86_64.S | 504 .set GFPOLY, %xmm5 585 .set H_POW1_X64, %xmm5 // H^1 * x^64 711 .set GHASH_ACC, %xmm5 // GHASH accumulator; in main loop also 1016 .set H_POW1, %xmm5 // H^1
|
| H A D | aes-gcm-vaes-avx512.S | 297 .set GFPOLY_XMM, %xmm5 516 .set GHASH_ACC_XMM, %xmm5 722 .set GHASHDATA1_XMM, %xmm5 1050 .set BSWAP_MASK, %xmm5
|
| H A D | nh-sse2-x86_64.S | 18 #define K1 %xmm5
|
| H A D | nh-avx2-x86_64.S | 20 #define K1_XMM %xmm5
|
| H A D | aes-gcm-vaes-avx2.S | 237 .set H_INC_XMM, %xmm5 517 .set GHASH_ACC_XMM, %xmm5 728 .set LO_XMM, %xmm5 1035 .set BSWAP_MASK, %xmm5
|
| H A D | cast6-avx-x86_64-asm_64.S | 43 #define RB2 %xmm5
|
| H A D | twofish-avx-x86_64-asm_64.S | 43 #define RB2 %xmm5
|
| H A D | sm4-aesni-avx2-asm_64.S | 54 #define RTMP2x %xmm5
|
| H A D | sm4-aesni-avx-asm_64.S | 27 #define RTMP2 %xmm5
|
| H A D | sm3-avx-asm_64.S | 122 #define W5 %xmm5
|
| /linux/lib/crypto/x86/ |
| H A D | chacha-ssse3-x86_64.S | 39 movdqa ROT16(%rip),%xmm5 45 pshufb %xmm5,%xmm3 78 pshufb %xmm5,%xmm3 252 movq 0x10(%rdi),%xmm5 253 pshufd $0x00,%xmm5,%xmm4 254 pshufd $0x55,%xmm5,%xmm5 292 paddd %xmm5,%xmm0 318 pxor %xmm9,%xmm5 319 movdqa %xmm5,%xmm0 321 psrld $20,%xmm5 [all …]
|
| H A D | blake2s-core.S | 94 movd (DATA,%rax,4),%xmm5 99 punpckldq %xmm5,%xmm4 113 movd (DATA,%rax,4),%xmm5 120 punpckldq %xmm6,%xmm5 122 punpcklqdq %xmm7,%xmm5 123 paddd %xmm5,%xmm0 143 movd (DATA,%rax,4),%xmm5 145 punpckldq %xmm5,%xmm4 162 movd (DATA,%rax,4),%xmm5 166 punpckldq %xmm6,%xmm5 [all …]
|
| H A D | sha512-avx-asm.S | 174 vmovdqu W_t(idx), %xmm5 # XMM5 = W[t-15] 178 vpsrlq $1, %xmm5, %xmm6 # XMM6 = W[t-15]>>1 188 vpsrlq $8, %xmm5, %xmm7 # XMM7 = W[t-15]>>8 197 vpsrlq $7, %xmm5, %xmm8 # XMM8 = W[t-15]>>7 206 vpsllq $(64-1), %xmm5, %xmm9 # XMM9 = W[t-15]<<63 234 vpsllq $(64-8), %xmm5, %xmm5 # XMM5 = W[t-15]<<56 237 vpxor %xmm5, %xmm6, %xmm6 # XMM6 = s0(W[t-15])
|
| H A D | sha512-ssse3-asm.S | 179 movdqu W_t(idx), %xmm5 # XMM5 = W[t-15] 182 movdqa %xmm5, %xmm3 # XMM3 = W[t-15] 194 pxor %xmm5, %xmm3 # XMM3 = (W[t-15] >> 1) ^ W[t-15] 206 pxor %xmm5, %xmm3 # XMM3 = (((W[t-15]>>1)^W[t-15])>>6)^W[t-15] 218 movdqa %xmm5, %xmm4 # XMM4 = W[t-15] 231 pxor %xmm5, %xmm4 # XMM4 = (W[t-15]<<7)^W[t-15]
|
| H A D | sha1-ni-asm.S | 67 #define MSG2 %xmm5
|
| H A D | sha256-ni-asm.S | 69 #define MSG2 %xmm5 207 #define TMP_A %xmm5
|
| H A D | chacha-avx512vl-x86_64.S | 392 vextracti128 $1,%ymm10,%xmm5 415 vmovdqa %xmm5,%xmm10
|
| H A D | sha1-ssse3-and-avx.S | 54 #define W16 %xmm5
|
| H A D | chacha-avx2-x86_64.S | 458 vextracti128 $1,%ymm10,%xmm5 481 vmovdqa %xmm5,%xmm10
|
| H A D | sha256-avx-asm.S | 81 X1 = %xmm5
|
| H A D | sha256-ssse3-asm.S | 75 X1 = %xmm5
|
| /linux/arch/x86/entry/vdso/ |
| H A D | vgetrandom-chacha.S | 37 .set copy0, %xmm5
|
| /linux/lib/crc/x86/ |
| H A D | crc-pclmul-template.S | 391 _fold_vec_final 16, %xmm0, %xmm1, CONSTS_XMM, BSWAP_MASK_XMM, %xmm4, %xmm5
|
| /linux/include/hyperv/ |
| H A D | hvhdk.h | 83 struct hv_u128 xmm5; member
|