Home
last modified time | relevance | path

Searched refs:xmm4 (Results 1 – 25 of 30) sorted by relevance

12

/linux/lib/crypto/x86/
H A Dblake2s-core.S92 movd (DATA,%rax,4),%xmm4
99 punpckldq %xmm5,%xmm4
101 punpcklqdq %xmm6,%xmm4
102 paddd %xmm4,%xmm0
119 movd (DATA,%rax,4),%xmm4
121 punpckldq %xmm4,%xmm7
141 movd (DATA,%rax,4),%xmm4
145 punpckldq %xmm5,%xmm4
146 punpcklqdq %xmm4,%xmm6
160 movd (DATA,%rax,4),%xmm4
[all …]
H A Dchacha-ssse3-x86_64.S38 movdqa ROT8(%rip),%xmm4
58 pshufb %xmm4,%xmm3
91 pshufb %xmm4,%xmm3
139 movdqu 0x00(%rdx),%xmm4
140 pxor %xmm4,%xmm0
253 pshufd $0x00,%xmm5,%xmm4
286 paddd %xmm4,%xmm0
311 pxor %xmm8,%xmm4
312 movdqa %xmm4,%xmm0
314 psrld $20,%xmm4
[all …]
H A Dsha512-avx-asm.S172 vmovdqa W_t(idx), %xmm4 # XMM4 = W[t-2]
176 vpsrlq $61, %xmm4, %xmm0 # XMM0 = W[t-2]>>61
181 vpsrlq $19, %xmm4, %xmm1 # XMM1 = W[t-2]>>19
190 vpsrlq $6, %xmm4, %xmm2 # XMM2 = W[t-2]>>6
200 vpsllq $(64-61), %xmm4, %xmm3 # XMM3 = W[t-2]<<3
217 vpsllq $(64-19), %xmm4, %xmm4 # XMM4 = W[t-2]<<25
220 vpxor %xmm4, %xmm0, %xmm0 # XMM0 = W[t-2]>>61 ^ W[t-2]>>19 ^
H A Dpolyval-pclmul-avx.S95 vpclmulqdq $0x11, (16*\i)(KEY_POWERS), %xmm0, %xmm4
98 vpxor %xmm4, HI, HI
H A Dsha512-ssse3-asm.S218 movdqa %xmm5, %xmm4 # XMM4 = W[t-15]
225 psllq $(64-1)-(64-8), %xmm4 # XMM4 = W[t-15] << 7
231 pxor %xmm5, %xmm4 # XMM4 = (W[t-15]<<7)^W[t-15]
237 psllq $(64-8), %xmm4 # XMM4 = ((W[t-15]<<7)^W[t-15])<<56
246 pxor %xmm4, %xmm3 # XMM3 = s0(W[t-15])
H A Dsha1-ni-asm.S66 #define MSG1 %xmm4
H A Dsha256-ni-asm.S68 #define MSG1 %xmm4
206 #define STATE1_B %xmm4
H A Dchacha-avx512vl-x86_64.S385 vextracti128 $1,%ymm10,%xmm4
409 vmovdqa %xmm4,%xmm10
H A Dsha1-ssse3-and-avx.S53 #define W12 %xmm4
H A Dchacha-avx2-x86_64.S451 vextracti128 $1,%ymm10,%xmm4
475 vmovdqa %xmm4,%xmm10
H A Dsha256-avx-asm.S80 X0 = %xmm4
/linux/arch/x86/crypto/
H A Daria-aesni-avx-asm_64.S889 inpack16_post(%xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
893 %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
895 aria_fe(%xmm1, %xmm0, %xmm3, %xmm2, %xmm4, %xmm5, %xmm6, %xmm7,
899 %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
901 aria_fe(%xmm1, %xmm0, %xmm3, %xmm2, %xmm4, %xmm5, %xmm6, %xmm7,
905 %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
907 aria_fe(%xmm1, %xmm0, %xmm3, %xmm2, %xmm4, %xmm5, %xmm6, %xmm7,
911 %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
913 aria_fe(%xmm1, %xmm0, %xmm3, %xmm2, %xmm4, %xmm5, %xmm6, %xmm7,
917 %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7,
[all …]
H A Daes-gcm-aesni-x86_64.S503 .set H_POW1_X64, %xmm4 // H^1 * x^64
584 .set H_POW1, %xmm4 // H^1
710 .set MI, %xmm4 // Middle part of unreduced product
1015 .set GHASH_ACC, %xmm4
H A Daes-gcm-vaes-avx512.S294 .set H_INC_XMM, %xmm4
514 .set BSWAP_MASK_XMM, %xmm4
720 .set GHASHDATA0_XMM, %xmm4
1049 .set GFPOLY, %xmm4
H A Dnh-sse2-x86_64.S17 #define K0 %xmm4
H A Dnh-avx2-x86_64.S18 #define K0_XMM %xmm4
H A Daes-gcm-vaes-avx2.S515 .set MI_XMM, %xmm4
724 .set TMP2_XMM, %xmm4
1034 .set GFPOLY, %xmm4
H A Dcast6-avx-x86_64-asm_64.S42 #define RA2 %xmm4
H A Dtwofish-avx-x86_64-asm_64.S42 #define RA2 %xmm4
H A Dsm4-aesni-avx2-asm_64.S53 #define RTMP1x %xmm4
H A Dsm4-aesni-avx-asm_64.S26 #define RTMP1 %xmm4
H A Dsm3-avx-asm_64.S121 #define W4 %xmm4
/linux/lib/crc/x86/
H A Dcrc-pclmul-template.S391 _fold_vec_final 16, %xmm0, %xmm1, CONSTS_XMM, BSWAP_MASK_XMM, %xmm4, %xmm5
427 %xmm0, %xmm0, unaligned_mem_tmp=%xmm4
433 movdqa %xmm0, %xmm4
436 movdqa %xmm4, %xmm0
444 _fold_vec %xmm0, %xmm1, CONSTS_XMM, %xmm4
/linux/arch/x86/entry/vdso/
H A Dvgetrandom-chacha.S36 .set state3, %xmm4
/linux/include/hyperv/
H A Dhvhdk.h82 struct hv_u128 xmm4; member

12