Lines Matching full:t4
64 __m128i t4; in expand_step192() local
67 t4 = _mm_slli_si128(*t1, 0x4); in expand_step192()
68 *t1 = _mm_xor_si128(*t1, t4); in expand_step192()
69 t4 = _mm_slli_si128(t4, 0x4); in expand_step192()
70 *t1 = _mm_xor_si128(*t1, t4); in expand_step192()
71 t4 = _mm_slli_si128(t4, 0x4); in expand_step192()
72 *t1 = _mm_xor_si128(*t1, t4); in expand_step192()
75 t4 = _mm_slli_si128(*t3, 0x4); in expand_step192()
76 *t3 = _mm_xor_si128(*t3, t4); in expand_step192()
84 __m128i t4; in expand_step256_1() local
87 t4 = _mm_slli_si128(*t1, 0x4); in expand_step256_1()
88 *t1 = _mm_xor_si128(*t1, t4); in expand_step256_1()
89 t4 = _mm_slli_si128(t4, 0x4); in expand_step256_1()
90 *t1 = _mm_xor_si128(*t1, t4); in expand_step256_1()
91 t4 = _mm_slli_si128(t4, 0x4); in expand_step256_1()
92 *t1 = _mm_xor_si128(*t1, t4); in expand_step256_1()
100 __m128i t2, t4; in expand_step256_2() local
102 t4 = _mm_aeskeygenassist_si128(*t1, 0x0); in expand_step256_2()
103 t2 = _mm_shuffle_epi32(t4, 0xAA); in expand_step256_2()
104 t4 = _mm_slli_si128(*t3, 0x4); in expand_step256_2()
105 *t3 = _mm_xor_si128(*t3, t4); in expand_step256_2()
106 t4 = _mm_slli_si128(t4, 0x4); in expand_step256_2()
107 *t3 = _mm_xor_si128(*t3, t4); in expand_step256_2()
108 t4 = _mm_slli_si128(t4, 0x4); in expand_step256_2()
109 *t3 = _mm_xor_si128(*t3, t4); in expand_step256_2()