Home
last modified time | relevance | path

Searched refs:_mm_xor_si128 (Results 1 – 25 of 38) sorted by relevance

12

/freebsd/contrib/bearssl/src/hash/
H A Dghash_pclmul.c149 kx = _mm_xor_si128(kw, _mm_shuffle_epi32(kw, 0x0E)); \
158 kx = _mm_xor_si128(k0, k1); \
182 x1 = _mm_xor_si128( \
184 _mm_xor_si128( \
185 _mm_xor_si128( \
188 _mm_xor_si128( \
191 x2 = _mm_xor_si128( \
192 _mm_xor_si128( \
195 _mm_xor_si128( \
198 x0 = _mm_xor_si128( \
[all …]
/freebsd/sys/crypto/aesni/
H A Daesni_ghash.c117 tmp4 = _mm_xor_si128(tmp4, tmp5); in gfmul()
120 tmp3 = _mm_xor_si128(tmp3, tmp5); in gfmul()
121 tmp6 = _mm_xor_si128(tmp6, tmp4); in gfmul()
139 tmp7 = _mm_xor_si128(tmp7, tmp8); in gfmul()
140 tmp7 = _mm_xor_si128(tmp7, tmp9); in gfmul()
143 tmp3 = _mm_xor_si128(tmp3, tmp7); in gfmul()
148 tmp2 = _mm_xor_si128(tmp2, tmp4); in gfmul()
149 tmp2 = _mm_xor_si128(tmp2, tmp5); in gfmul()
150 tmp2 = _mm_xor_si128(tmp2, tmp8); in gfmul()
151 tmp3 = _mm_xor_si128(tmp3, tmp2); in gfmul()
[all …]
H A Dintel_sha1.c114 msg0 = _mm_xor_si128(msg0, msg2); in intel_sha1_step()
124 msg1 = _mm_xor_si128(msg1, msg3); in intel_sha1_step()
132 msg2 = _mm_xor_si128(msg2, msg0); in intel_sha1_step()
140 msg3 = _mm_xor_si128(msg3, msg1); in intel_sha1_step()
148 msg0 = _mm_xor_si128(msg0, msg2); in intel_sha1_step()
156 msg1 = _mm_xor_si128(msg1, msg3); in intel_sha1_step()
164 msg2 = _mm_xor_si128(msg2, msg0); in intel_sha1_step()
172 msg3 = _mm_xor_si128(msg3, msg1); in intel_sha1_step()
180 msg0 = _mm_xor_si128(msg0, msg2); in intel_sha1_step()
188 msg1 = _mm_xor_si128(msg1, msg3); in intel_sha1_step()
[all …]
/freebsd/sys/contrib/libsodium/src/libsodium/crypto_stream/salsa20/xmm6int/
H A Du0.h19 diag3 = _mm_xor_si128(diag3, a0);
21 diag3 = _mm_xor_si128(diag3, b0);
28 diag2 = _mm_xor_si128(diag2, a1);
30 diag2 = _mm_xor_si128(diag2, b1);
37 diag1 = _mm_xor_si128(diag1, a2);
39 diag1 = _mm_xor_si128(diag1, b2);
46 diag0 = _mm_xor_si128(diag0, a3);
48 diag0 = _mm_xor_si128(diag0, b3);
55 diag1 = _mm_xor_si128(diag1, a4);
57 diag1 = _mm_xor_si128(diag1, b4);
[all …]
H A Du1.h20 diag3 = _mm_xor_si128(diag3, a0);
22 diag3 = _mm_xor_si128(diag3, b0);
29 diag2 = _mm_xor_si128(diag2, a1);
31 diag2 = _mm_xor_si128(diag2, b1);
38 diag1 = _mm_xor_si128(diag1, a2);
40 diag1 = _mm_xor_si128(diag1, b2);
47 diag0 = _mm_xor_si128(diag0, a3);
49 diag0 = _mm_xor_si128(diag0, b3);
56 diag1 = _mm_xor_si128(diag1, a4);
58 diag1 = _mm_xor_si128(diag1, b4);
[all …]
H A Du4.h109 z4 = _mm_xor_si128(z4, y4);
111 z4 = _mm_xor_si128(z4, r4);
117 z9 = _mm_xor_si128(z9, y9);
119 z9 = _mm_xor_si128(z9, r9);
125 z8 = _mm_xor_si128(z8, y8);
127 z8 = _mm_xor_si128(z8, r8);
133 z13 = _mm_xor_si128(z13, y13);
135 z13 = _mm_xor_si128(z13, r13);
141 z12 = _mm_xor_si128(z12, y12);
143 z12 = _mm_xor_si128(z12, r12);
[all …]
/freebsd/sys/contrib/libsodium/src/libsodium/crypto_stream/chacha20/dolbeau/
H A Du1.h20 x_3 = _mm_xor_si128(x_3, x_0);
24 x_1 = _mm_xor_si128(x_1, x_2);
29 x_1 = _mm_xor_si128(x_1, t_1);
32 x_3 = _mm_xor_si128(x_3, x_0);
38 x_1 = _mm_xor_si128(x_1, x_2);
44 x_1 = _mm_xor_si128(x_1, t_1);
47 x_3 = _mm_xor_si128(x_3, x_0);
51 x_1 = _mm_xor_si128(x_1, x_2);
56 x_1 = _mm_xor_si128(x_1, t_1);
59 x_3 = _mm_xor_si128(x_3, x_0);
[all …]
H A Du0.h19 x_3 = _mm_xor_si128(x_3, x_0);
23 x_1 = _mm_xor_si128(x_1, x_2);
28 x_1 = _mm_xor_si128(x_1, t_1);
31 x_3 = _mm_xor_si128(x_3, x_0);
37 x_1 = _mm_xor_si128(x_1, x_2);
43 x_1 = _mm_xor_si128(x_1, t_1);
46 x_3 = _mm_xor_si128(x_3, x_0);
50 x_1 = _mm_xor_si128(x_1, x_2);
55 x_1 = _mm_xor_si128(x_1, t_1);
58 x_3 = _mm_xor_si128(x_3, x_0);
[all …]
H A Du4.h9 t_##A = _mm_xor_si128(x_##D, x_##A); \
12 t_##C = _mm_xor_si128(x_##B, x_##C); \
15 t_##A = _mm_xor_si128(x_##D, x_##A); \
18 t_##C = _mm_xor_si128(x_##B, x_##C); \
140 t0 = _mm_xor_si128(x_##A, _mm_loadu_si128((__m128i*) (m + 0))); \
142 t1 = _mm_xor_si128(x_##B, _mm_loadu_si128((__m128i*) (m + 64))); \
144 t2 = _mm_xor_si128(x_##C, _mm_loadu_si128((__m128i*) (m + 128))); \
146 t3 = _mm_xor_si128(x_##D, _mm_loadu_si128((__m128i*) (m + 192))); \
/freebsd/contrib/bearssl/src/symcipher/
H A Daes_x86ni.c53 k = _mm_xor_si128(k, _mm_slli_si128(k, 4)); in expand_step128()
54 k = _mm_xor_si128(k, _mm_slli_si128(k, 4)); in expand_step128()
55 k = _mm_xor_si128(k, _mm_slli_si128(k, 4)); in expand_step128()
57 return _mm_xor_si128(k, k2); in expand_step128()
68 *t1 = _mm_xor_si128(*t1, t4); in expand_step192()
70 *t1 = _mm_xor_si128(*t1, t4); in expand_step192()
72 *t1 = _mm_xor_si128(*t1, t4); in expand_step192()
73 *t1 = _mm_xor_si128(*t1, *t2); in expand_step192()
76 *t3 = _mm_xor_si128(*t3, t4); in expand_step192()
77 *t3 = _mm_xor_si128(*t3, *t2); in expand_step192()
[all …]
H A Dchacha20_sse2.c105 s3 = _mm_xor_si128(s3, s0); in br_chacha20_sse2_run()
111 s1 = _mm_xor_si128(s1, s2); in br_chacha20_sse2_run()
117 s3 = _mm_xor_si128(s3, s0); in br_chacha20_sse2_run()
123 s1 = _mm_xor_si128(s1, s2); in br_chacha20_sse2_run()
138 s3 = _mm_xor_si128(s3, s0); in br_chacha20_sse2_run()
144 s1 = _mm_xor_si128(s1, s2); in br_chacha20_sse2_run()
150 s3 = _mm_xor_si128(s3, s0); in br_chacha20_sse2_run()
156 s1 = _mm_xor_si128(s1, s2); in br_chacha20_sse2_run()
205 b0 = _mm_xor_si128(b0, s0); in br_chacha20_sse2_run()
206 b1 = _mm_xor_si128(b1, s1); in br_chacha20_sse2_run()
[all …]
H A Daes_x86ni_ctrcbc.c103 x0 = _mm_xor_si128(x0, sk[0]); in br_aes_x86ni_ctrcbc_ctr()
104 x1 = _mm_xor_si128(x1, sk[0]); in br_aes_x86ni_ctrcbc_ctr()
105 x2 = _mm_xor_si128(x2, sk[0]); in br_aes_x86ni_ctrcbc_ctr()
106 x3 = _mm_xor_si128(x3, sk[0]); in br_aes_x86ni_ctrcbc_ctr()
184 x0 = _mm_xor_si128(x0, in br_aes_x86ni_ctrcbc_ctr()
186 x1 = _mm_xor_si128(x1, in br_aes_x86ni_ctrcbc_ctr()
188 x2 = _mm_xor_si128(x2, in br_aes_x86ni_ctrcbc_ctr()
190 x3 = _mm_xor_si128(x3, in br_aes_x86ni_ctrcbc_ctr()
274 x = _mm_xor_si128(_mm_loadu_si128((void *)buf), ivx); in br_aes_x86ni_ctrcbc_mac()
275 x = _mm_xor_si128(x, sk[0]); in br_aes_x86ni_ctrcbc_mac()
[all …]
H A Daes_x86ni_cbcdec.c95 x0 = _mm_xor_si128(x0, sk[0]); in br_aes_x86ni_cbcdec_run()
96 x1 = _mm_xor_si128(x1, sk[0]); in br_aes_x86ni_cbcdec_run()
97 x2 = _mm_xor_si128(x2, sk[0]); in br_aes_x86ni_cbcdec_run()
98 x3 = _mm_xor_si128(x3, sk[0]); in br_aes_x86ni_cbcdec_run()
175 x0 = _mm_xor_si128(x0, ivx); in br_aes_x86ni_cbcdec_run()
176 x1 = _mm_xor_si128(x1, e0); in br_aes_x86ni_cbcdec_run()
177 x2 = _mm_xor_si128(x2, e1); in br_aes_x86ni_cbcdec_run()
178 x3 = _mm_xor_si128(x3, e2); in br_aes_x86ni_cbcdec_run()
H A Daes_x86ni_ctr.c75 x0 = _mm_xor_si128(x0, sk[0]); in br_aes_x86ni_ctr_run()
76 x1 = _mm_xor_si128(x1, sk[0]); in br_aes_x86ni_ctr_run()
77 x2 = _mm_xor_si128(x2, sk[0]); in br_aes_x86ni_ctr_run()
78 x3 = _mm_xor_si128(x3, sk[0]); in br_aes_x86ni_ctr_run()
156 x0 = _mm_xor_si128(x0, in br_aes_x86ni_ctr_run()
158 x1 = _mm_xor_si128(x1, in br_aes_x86ni_ctr_run()
160 x2 = _mm_xor_si128(x2, in br_aes_x86ni_ctr_run()
162 x3 = _mm_xor_si128(x3, in br_aes_x86ni_ctr_run()
/freebsd/sys/contrib/libsodium/src/libsodium/crypto_aead/aes256gcm/aesni/
H A Daead_aes256gcm_aesni.c74 X0 = _mm_xor_si128(X0, X3); \ in aesni_key256_expand()
76 X0 = _mm_xor_si128(_mm_xor_si128(X0, X3), X1); \ in aesni_key256_expand()
83 X2 = _mm_xor_si128(X2, X3); \ in aesni_key256_expand()
85 X2 = _mm_xor_si128(_mm_xor_si128(X2, X3), X1); \ in aesni_key256_expand()
103 __m128i temp = _mm_xor_si128(nv, rkeys[0]); in aesni_encrypt1()
142 temp##a = _mm_xor_si128(nv##a, rkeys[0])
225 A = _mm_xor_si128(A, C); in addmul()
230 tmp10 = _mm_xor_si128(tmp4, tmp5); in addmul()
233 tmp15 = _mm_xor_si128(tmp3, tmp13); in addmul()
234 tmp17 = _mm_xor_si128(tmp6, tmp11); in addmul()
[all …]
/freebsd/sys/contrib/libsodium/src/libsodium/crypto_generichash/blake2b/ref/
H A Dblake2b-compress-sse41.c62 row4l = _mm_xor_si128(LOADU(&blake2b_IV[4]), LOADU(&S->t[0])); in blake2b_compress_sse41()
63 row4h = _mm_xor_si128(LOADU(&blake2b_IV[6]), LOADU(&S->f[0])); in blake2b_compress_sse41()
76 row1l = _mm_xor_si128(row3l, row1l); in blake2b_compress_sse41()
77 row1h = _mm_xor_si128(row3h, row1h); in blake2b_compress_sse41()
78 STOREU(&S->h[0], _mm_xor_si128(LOADU(&S->h[0]), row1l)); in blake2b_compress_sse41()
79 STOREU(&S->h[2], _mm_xor_si128(LOADU(&S->h[2]), row1h)); in blake2b_compress_sse41()
80 row2l = _mm_xor_si128(row4l, row2l); in blake2b_compress_sse41()
81 row2h = _mm_xor_si128(row4h, row2h); in blake2b_compress_sse41()
82 STOREU(&S->h[4], _mm_xor_si128(LOADU(&S->h[4]), row2l)); in blake2b_compress_sse41()
83 STOREU(&S->h[6], _mm_xor_si128(LOADU(&S->h[6]), row2h)); in blake2b_compress_sse41()
H A Dblake2b-compress-ssse3.c65 row4l = _mm_xor_si128(LOADU(&blake2b_IV[4]), LOADU(&S->t[0])); in blake2b_compress_ssse3()
66 row4h = _mm_xor_si128(LOADU(&blake2b_IV[6]), LOADU(&S->f[0])); in blake2b_compress_ssse3()
79 row1l = _mm_xor_si128(row3l, row1l); in blake2b_compress_ssse3()
80 row1h = _mm_xor_si128(row3h, row1h); in blake2b_compress_ssse3()
81 STOREU(&S->h[0], _mm_xor_si128(LOADU(&S->h[0]), row1l)); in blake2b_compress_ssse3()
82 STOREU(&S->h[2], _mm_xor_si128(LOADU(&S->h[2]), row1h)); in blake2b_compress_ssse3()
83 row2l = _mm_xor_si128(row4l, row2l); in blake2b_compress_ssse3()
84 row2h = _mm_xor_si128(row4h, row2h); in blake2b_compress_ssse3()
85 STOREU(&S->h[4], _mm_xor_si128(LOADU(&S->h[4]), row2l)); in blake2b_compress_ssse3()
86 STOREU(&S->h[6], _mm_xor_si128(LOADU(&S->h[6]), row2h)); in blake2b_compress_ssse3()
H A Dblake2b-compress-sse41.h16 ? _mm_xor_si128(_mm_srli_epi64((x), -(c)), \
18 : _mm_xor_si128(_mm_srli_epi64((x), -(c)), \
25 row4l = _mm_xor_si128(row4l, row1l); \
26 row4h = _mm_xor_si128(row4h, row1h); \
34 row2l = _mm_xor_si128(row2l, row3l); \
35 row2h = _mm_xor_si128(row2h, row3h); \
44 row4l = _mm_xor_si128(row4l, row1l); \
45 row4h = _mm_xor_si128(row4h, row1h); \
53 row2l = _mm_xor_si128(row2l, row3l); \
54 row2h = _mm_xor_si128(row2h, row3h); \
H A Dblake2b-compress-ssse3.h16 ? _mm_xor_si128(_mm_srli_epi64((x), -(c)), \
18 : _mm_xor_si128(_mm_srli_epi64((x), -(c)), \
25 row4l = _mm_xor_si128(row4l, row1l); \
26 row4h = _mm_xor_si128(row4h, row1h); \
34 row2l = _mm_xor_si128(row2l, row3l); \
35 row2h = _mm_xor_si128(row2h, row3h); \
44 row4l = _mm_xor_si128(row4l, row1l); \
45 row4h = _mm_xor_si128(row4h, row1h); \
53 row2l = _mm_xor_si128(row2l, row3l); \
54 row2h = _mm_xor_si128(row2h, row3h); \
/freebsd/sys/contrib/libsodium/src/libsodium/crypto_pwhash/argon2/
H A Dblamka-round-ssse3.h19 ? _mm_xor_si128(_mm_srli_epi64((x), -(c)), \
21 : _mm_xor_si128(_mm_srli_epi64((x), -(c)), \
36 D0 = _mm_xor_si128(D0, A0); \
37 D1 = _mm_xor_si128(D1, A1); \
45 B0 = _mm_xor_si128(B0, C0); \
46 B1 = _mm_xor_si128(B1, C1); \
57 D0 = _mm_xor_si128(D0, A0); \
58 D1 = _mm_xor_si128(D1, A1); \
66 B0 = _mm_xor_si128(B0, C0); \
67 B1 = _mm_xor_si128(B1, C1); \
H A Dargon2-fill-block-ssse3.c45 block_XY[i] = state[i] = _mm_xor_si128( in fill_block()
62 state[i] = _mm_xor_si128(state[i], block_XY[i]); in fill_block()
75 state[i] = _mm_xor_si128( in fill_block_with_xor()
77 block_XY[i] = _mm_xor_si128( in fill_block_with_xor()
94 state[i] = _mm_xor_si128(state[i], block_XY[i]); in fill_block_with_xor()
/freebsd/sys/contrib/libsodium/src/libsodium/crypto_pwhash/scryptsalsa208sha256/sse/
H A Dpwhash_scryptsalsa208sha256_sse.c55 out = _mm_xor_si128(out, _mm_roti_epi32(_mm_add_epi32(in1, in2), s));
60 out = _mm_xor_si128(out, _mm_slli_epi32(T, s)); \
61 out = _mm_xor_si128(out, _mm_srli_epi32(T, 32 - s)); \
93 __m128i Y0 = X0 = _mm_xor_si128(X0, (in)[0]); \
94 __m128i Y1 = X1 = _mm_xor_si128(X1, (in)[1]); \
95 __m128i Y2 = X2 = _mm_xor_si128(X2, (in)[2]); \
96 __m128i Y3 = X3 = _mm_xor_si128(X3, (in)[3]); \
151 X0 = _mm_xor_si128(X0, (in)[0]); \
152 X1 = _mm_xor_si128(X1, (in)[1]); \
153 X2 = _mm_xor_si128(X2, (in)[2]); \
[all …]
/freebsd/sys/contrib/libb2/
H A Dblake2b-round.h36 : (-(c) == 63) ? _mm_xor_si128(_mm_srli_epi64((x), -(c)), _mm_add_epi64((x), (x))) \
37 : _mm_xor_si128(_mm_srli_epi64((x), -(c)), _mm_slli_epi64((x), 64-(-(c))))
39 #define _mm_roti_epi64(r, c) _mm_xor_si128(_mm_srli_epi64( (r), -(c) ),_mm_slli_epi64( (r), 64-(-(c…
51 row4l = _mm_xor_si128(row4l, row1l); \
52 row4h = _mm_xor_si128(row4h, row1h); \
60 row2l = _mm_xor_si128(row2l, row3l); \
61 row2h = _mm_xor_si128(row2h, row3h); \
70 row4l = _mm_xor_si128(row4l, row1l); \
71 row4h = _mm_xor_si128(row4h, row1h); \
79 row2l = _mm_xor_si128(row2l, row3l); \
[all …]
H A Dblake2s-round.h35 : _mm_xor_si128(_mm_srli_epi32( (r), -(c) ),_mm_slli_epi32( (r), 32-(-(c)) )) )
37 #define _mm_roti_epi32(r, c) _mm_xor_si128(_mm_srli_epi32( (r), -(c) ),_mm_slli_epi32( (r), 32-(-(c…
46 row4 = _mm_xor_si128( row4, row1 ); \
49 row2 = _mm_xor_si128( row2, row3 ); \
54 row4 = _mm_xor_si128( row4, row1 ); \
57 row2 = _mm_xor_si128( row2, row3 ); \
H A Dblake2b.c332 row4l = _mm_xor_si128( LOADU( &blake2b_IV[4] ), LOADU( &S->t[0] ) ); in blake2b_compress()
333 row4h = _mm_xor_si128( LOADU( &blake2b_IV[6] ), LOADU( &S->f[0] ) ); in blake2b_compress()
346 row1l = _mm_xor_si128( row3l, row1l ); in blake2b_compress()
347 row1h = _mm_xor_si128( row3h, row1h ); in blake2b_compress()
348 STOREU( &S->h[0], _mm_xor_si128( LOADU( &S->h[0] ), row1l ) ); in blake2b_compress()
349 STOREU( &S->h[2], _mm_xor_si128( LOADU( &S->h[2] ), row1h ) ); in blake2b_compress()
350 row2l = _mm_xor_si128( row4l, row2l ); in blake2b_compress()
351 row2h = _mm_xor_si128( row4h, row2h ); in blake2b_compress()
352 STOREU( &S->h[4], _mm_xor_si128( LOADU( &S->h[4] ), row2l ) ); in blake2b_compress()
353 STOREU( &S->h[6], _mm_xor_si128( LOADU( &S->h[6] ), row2h ) ); in blake2b_compress()

12