Home
last modified time | relevance | path

Searched refs:XTMP4 (Results 1 – 5 of 5) sorted by relevance

/linux/arch/x86/crypto/
H A Dsha256-ssse3-asm.S84 XTMP4 = %xmm8 define
195 movdqa XTMP3, XTMP4 # XTMP4 = W[-15]
212 psrld $3, XTMP4 # XTMP4 = W[-15] >> 3
220 pxor XTMP4, XTMP1 # XTMP1 = s0
237 movdqa XTMP2, XTMP4 # XTMP4 = W[-2] {BBAA}
248 psrld $10, XTMP4 # XTMP4 = W[-2] >> 10 {BBAA}
257 pxor XTMP2, XTMP4 # XTMP4 = s1 {xBxA}
261 pshufb SHUF_00BA, XTMP4 # XTMP4 = s1 {00BA}
265 paddd XTMP4, XTMP0 # XTMP0 = {..., ..., W[1], W[0]}
H A Dsha256-avx-asm.S90 XTMP4 = %xmm8 define
204 vpsrld $3, XTMP1, XTMP4 # XTMP4 = W[-15] >> 3
220 vpxor XTMP4, XTMP3, XTMP1 # XTMP1 = s0
240 vpsrld $10, XTMP2, XTMP4 # XTMP4 = W[-2] >> 10 {BBAA}
254 vpxor XTMP2, XTMP4, XTMP4 # XTMP4 = s1 {xBxA}
258 vpshufb SHUF_00BA, XTMP4, XTMP4 # XTMP4 = s1 {00BA}
262 vpaddd XTMP4, XTMP0, XTMP0 # XTMP0 = {..., ..., W[1], W[0]}
H A Dsha256-avx2-asm.S83 XTMP4 = %ymm8 define
213 vpsrld $3, XTMP1, XTMP4 # XTMP4 = W[-15] >> 3
240 vpxor XTMP4, XTMP3, XTMP1 # XTMP1 = s0
250 vpsrld $10, XTMP2, XTMP4 # XTMP4 = W[-2] >> 10 {BBAA}
280 vpxor XTMP2, XTMP4, XTMP4 # XTMP4 = s1 {xBxA}
283 vpshufb SHUF_00BA, XTMP4, XTMP4 # XTMP4 = s1 {00BA}
286 vpaddd XTMP4, XTMP0, XTMP0 # XTMP0 = {..., ..., W[1], W[0]}
H A Dsm3-avx-asm_64.S128 #define XTMP4 %xmm10 macro
249 vpxor XTMP0, XTMP1, XTMP4; \
254 vmovdqa XTMP4, IW_W1W2_ADDR(0, 0); \
309 vpshufd $0b10111111, w4, XTMP4; \
310 vpalignr $12, XTMP4, w5, XTMP4; \
311 vmovdqa XTMP4, XW_W1_ADDR((round), 0); \
313 vpxor w0, XTMP4, XTMP1; \
/linux/arch/arm64/crypto/
H A Dsm3-neon-core.S80 #define XTMP4 v18 macro
205 eor XTMP4.16b, XTMP1.16b, XTMP0.16b;
211 st1 {XTMP4.16b}, [addr0]; \
280 shl XTMP4.4s, XTMP0.4s, #23;
286 sri XTMP4.4s, XTMP0.4s, #(32-23);
293 eor w0.16b, w0.16b, XTMP4.16b;
557 clear_vec(XTMP4)