Lines Matching +full:4 +full:x2
65 sk[u] = _mm_loadu_si128((void *)(ctx->skey.skni + (u << 4))); in br_aes_x86ni_ctr_run()
69 __m128i x0, x1, x2, x3; in br_aes_x86ni_ctr_run() local
73 x2 = _mm_insert_epi32(ivx, br_bswap32(cc + 2), 3); in br_aes_x86ni_ctr_run()
77 x2 = _mm_xor_si128(x2, sk[0]); in br_aes_x86ni_ctr_run()
81 x2 = _mm_aesenc_si128(x2, sk[1]); in br_aes_x86ni_ctr_run()
85 x2 = _mm_aesenc_si128(x2, sk[2]); in br_aes_x86ni_ctr_run()
89 x2 = _mm_aesenc_si128(x2, sk[3]); in br_aes_x86ni_ctr_run()
91 x0 = _mm_aesenc_si128(x0, sk[4]); in br_aes_x86ni_ctr_run()
92 x1 = _mm_aesenc_si128(x1, sk[4]); in br_aes_x86ni_ctr_run()
93 x2 = _mm_aesenc_si128(x2, sk[4]); in br_aes_x86ni_ctr_run()
94 x3 = _mm_aesenc_si128(x3, sk[4]); in br_aes_x86ni_ctr_run()
97 x2 = _mm_aesenc_si128(x2, sk[5]); in br_aes_x86ni_ctr_run()
101 x2 = _mm_aesenc_si128(x2, sk[6]); in br_aes_x86ni_ctr_run()
105 x2 = _mm_aesenc_si128(x2, sk[7]); in br_aes_x86ni_ctr_run()
109 x2 = _mm_aesenc_si128(x2, sk[8]); in br_aes_x86ni_ctr_run()
113 x2 = _mm_aesenc_si128(x2, sk[9]); in br_aes_x86ni_ctr_run()
118 x2 = _mm_aesenclast_si128(x2, sk[10]); in br_aes_x86ni_ctr_run()
123 x2 = _mm_aesenc_si128(x2, sk[10]); in br_aes_x86ni_ctr_run()
127 x2 = _mm_aesenc_si128(x2, sk[11]); in br_aes_x86ni_ctr_run()
131 x2 = _mm_aesenclast_si128(x2, sk[12]); in br_aes_x86ni_ctr_run()
136 x2 = _mm_aesenc_si128(x2, sk[10]); in br_aes_x86ni_ctr_run()
140 x2 = _mm_aesenc_si128(x2, sk[11]); in br_aes_x86ni_ctr_run()
144 x2 = _mm_aesenc_si128(x2, sk[12]); in br_aes_x86ni_ctr_run()
148 x2 = _mm_aesenc_si128(x2, sk[13]); in br_aes_x86ni_ctr_run()
152 x2 = _mm_aesenclast_si128(x2, sk[14]); in br_aes_x86ni_ctr_run()
160 x2 = _mm_xor_si128(x2, in br_aes_x86ni_ctr_run()
166 _mm_storeu_si128((void *)(buf + 32), x2); in br_aes_x86ni_ctr_run()
170 cc += 4; in br_aes_x86ni_ctr_run()
176 _mm_storeu_si128((void *)(tmp + 32), x2); in br_aes_x86ni_ctr_run()
181 cc += (uint32_t)len >> 4; in br_aes_x86ni_ctr_run()
194 4,