| /freebsd/contrib/bearssl/src/symcipher/ |
| H A D | aes_x86ni.c | 64 __m128i t4; in expand_step192() local 67 t4 = _mm_slli_si128(*t1, 0x4); in expand_step192() 68 *t1 = _mm_xor_si128(*t1, t4); in expand_step192() 69 t4 = _mm_slli_si128(t4, 0x4); in expand_step192() 70 *t1 = _mm_xor_si128(*t1, t4); in expand_step192() 71 t4 = _mm_slli_si128(t4, 0x4); in expand_step192() 72 *t1 = _mm_xor_si128(*t1, t4); in expand_step192() 75 t4 = _mm_slli_si128(*t3, 0x4); in expand_step192() 76 *t3 = _mm_xor_si128(*t3, t4); in expand_step192() 84 __m128i t4; in expand_step256_1() local [all …]
|
| /freebsd/crypto/openssl/crypto/bn/asm/ |
| H A D | ppc64-mont.pl | 143 $t4="r28"; 299 extrdi $t4,$t7,16,48 303 std $t4,`$FRAME+32`($sp) 312 lwz $t4,`4^$LITTLE_ENDIAN`($np) ; load n[0] as 32-bit word pair 327 mullw $t4,$a0,$t1 ; mulld ap[0]*bp[0] 343 mullw $t0,$t4,$n0 ; mulld tp[0]*n0 344 mulhwu $t1,$t4,$n0 346 mullw $t3,$t4,$n1 350 extrwi $t4,$t0,16,16 354 std $t4,`$FRAME+32`($sp) ; yes, std in 32-bit build [all …]
|
| /freebsd/crypto/openssl/crypto/ec/asm/ |
| H A D | ecp_sm2p256-armv8.pl | 26 my ($t4,$t5,$t6,$t7,$t8)=map("x$_",(15..17,19,20)); 42 adc $t4,xzr,xzr 61 sbcs $t4,$t4,xzr 89 sbc $t4,xzr,xzr 108 tst $t4,$t4 272 adcs $t4,xzr,xzr 288 sbcs $t4,$t4,xzr 294 eor $t4,$t4,$t4 303 adcs $t4,xzr,xzr 319 sbcs $t4,$t4,xzr [all …]
|
| H A D | ecp_nistp521-ppc64.pl | 150 my ($t1, $t2, $t3, $t4) = ("v33", "v34", "v44", "v54"); 189 xxpermdi $t4,$in2[1],$in2[0],0b00 190 vmsumudm $out[3],$t3,$t4,$out[3] 194 xxpermdi $t4,$in2[2],$in2[1],0b00 195 vmsumudm $out[4],$t3,$t4,$out[4] 200 xxpermdi $t4,$in2[3],$in2[2],0b00 201 vmsumudm $out[5],$t3,$t4,$out[5] 205 xxpermdi $t4,$in2[4],$in2[3],0b00 206 vmsumudm $out[6],$t3,$t4,$out[6] 210 xxpermdi $t4,$in2[5],$in2[4],0b00 [all …]
|
| H A D | ecp_nistz256-x86_64.pl | 119 my ($t0,$t1,$t2,$t3,$t4)=("%rax","%rdx","%rcx","%r12","%r13"); 136 xor $t4,$t4 147 adc \$0, $t4 155 sbb \$0, $t4 198 xor $t4, $t4 205 adc \$0, $t4 213 cmovz $a_ptr, $t4 227 shl \$63, $t4 229 or $t4, $a3 261 xor $t4, $t4 [all …]
|
| H A D | ecp_nistz256-armv8.pl | 1370 my ($ord2,$ord3,$ordk,$t4) = map("x$_",(21..24)); 1411 mul $t4,$acc0,$ordk 1434 lsl $t0,$t4,#32 1435 subs $acc2,$acc2,$t4 1436 lsr $t1,$t4,#32 1442 umulh $t1,$ord0,$t4 1443 mul $t2,$ord1,$t4 1444 umulh $t3,$ord1,$t4 1455 adcs $acc2,$acc3,$t4 1456 adcs $acc3,$acc4,$t4 [all …]
|
| H A D | ecp_nistz256-sparcv9.pl | 101 my ($t0,$t1,$t2,$t3,$t4,$t5,$t6,$t7)=(map("%o$_",(0..5)),"%g4","%g5"); 193 ld [$ap+16],$t4 201 mulx $t4,$bi,$t4 209 srlx $t4,32,@acc[5] 224 addccc @acc[4],$t4,@acc[4] 226 ld [$ap+16],$t4 286 mulx $t4,$bi,$t4 297 add @acc[4],$t4,$t4 300 srlx $t4,32,@acc[5] 312 addccc @acc[4],$t4,@acc[4] [all …]
|
| H A D | ecp_nistz256-ppc64.pl | 1733 my ($ordk,$ord0,$ord1,$t4) = map("r$_",(18..21)); 1796 mulld $t4,$acc0,$ordk 1819 sldi $t0,$t4,32 1820 subfc $acc2,$t4,$acc2 1821 srdi $t1,$t4,32 1827 mulhdu $t1,$ord0,$t4 1828 mulld $t2,$ord1,$t4 1829 mulhdu $t3,$ord1,$t4 1840 adde $acc2,$acc3,$t4 1841 adde $acc3,$acc4,$t4 [all …]
|
| /freebsd/contrib/bearssl/src/ec/ |
| H A D | ec_p256_m64.c | 263 uint64_t x, f, t0, t1, t2, t3, t4; in f256_montymul() local 295 t4 = (uint64_t)(z >> 64); in f256_montymul() 312 z = t4 + (z >> 64); in f256_montymul() 314 t4 = (uint64_t)(z >> 64); in f256_montymul() 329 t4 += (uint64_t)(z >> 64); in f256_montymul() 350 z = (unsigned __int128)t0 + t4; in f256_montymul() 352 z = (unsigned __int128)t1 - (t4 << 32) + (z >> 64); in f256_montymul() 356 t3 = t3 - (uint64_t)(z >> 127) - t4 + (t4 << 32); in f256_montymul() 365 uint64_t x, f, t0, t1, t2, t3, t4; in f256_montymul() 413 t4 = _addcarry_u64(0, t3, f, &t3); in f256_montymul() [all …]
|
| H A D | ec_c25519_m64.c | 174 uint64_t t0, t1, t2, t3, t4, cc; in f255_sub() local 191 t4 = 1 + (uint64_t)(z >> 64); in f255_sub() 199 cc = (38 & -t4) + (19 & -(t3 >> 63)); in f255_sub() 218 uint64_t t0, t1, t2, t3, t4; in f255_sub() 225 (void)_subborrow_u64(k, 1, 0, &t4); in f255_sub() 231 (void)_subborrow_u64(k, t4, 0, &t4); in f255_sub() 239 t4 = (38 & -t4) + (19 & -(t3 >> 63)); in f255_sub() 241 k = _addcarry_u64(0, t0, t4, &d[0]); in f255_sub() 258 uint64_t t0, t1, t2, t3, t4, t5, t6, t7, th; in f255_mul() local 271 t4 = (uint64_t)(z >> 64); in f255_mul() [all …]
|
| H A D | ec_p256_m62.c | 777 uint64_t t1[5], t2[5], t3[5], t4[5]; in p256_double() local 815 f256_montymul(t4, P->y, P->z); in p256_double() 816 f256_add(P->z, t4, t4); in p256_double() 825 f256_montysquare(t4, t3); in p256_double() 826 f256_add(t4, t4, t4); in p256_double() 827 f256_sub(P->y, P->y, t4); in p256_double() 879 uint64_t t1[5], t2[5], t3[5], t4[5], t5[5], t6[5], t7[5], tt; in p256_add() local 887 f256_montymul(t4, P2->z, t3); in p256_add() 888 f256_montymul(t3, P1->y, t4); in p256_add() 893 f256_montysquare(t4, P1->z); in p256_add() [all …]
|
| H A D | ec_p256_m31.c | 762 uint32_t t1[9], t2[9], t3[9], t4[9]; in p256_double() local 800 mul_f256(t4, Q->y, Q->z); in p256_double() 801 add_f256(Q->z, t4, t4); in p256_double() 809 square_f256(t4, t3); in p256_double() 810 add_f256(t4, t4, t4); in p256_double() 811 sub_f256(Q->y, Q->y, t4); in p256_double() 861 uint32_t t1[9], t2[9], t3[9], t4[9], t5[9], t6[9], t7[9]; in p256_add() local 870 mul_f256(t4, P2->z, t3); in p256_add() 871 mul_f256(t3, P1->y, t4); in p256_add() 876 square_f256(t4, P1->z); in p256_add() [all …]
|
| H A D | ec_prime_i15.c | 183 #define t4 9 macro 253 MMUL(t4, Py, Pz), 254 MSET(Pz, t4), 255 MADD(Pz, t4), 263 MMUL(t4, t3, t3), 264 MSUB(Py, t4), 265 MSUB(Py, t4), 326 MMUL(t4, P2z, t3), 327 MMUL(t3, P1y, t4), 332 MMUL(t4, P1z, P1z), [all …]
|
| H A D | ec_prime_i31.c | 182 #define t4 9 macro 252 MMUL(t4, Py, Pz), 253 MSET(Pz, t4), 254 MADD(Pz, t4), 262 MMUL(t4, t3, t3), 263 MSUB(Py, t4), 264 MSUB(Py, t4), 325 MMUL(t4, P2z, t3), 326 MMUL(t3, P1y, t4), 331 MMUL(t4, P1z, P1z), [all …]
|
| H A D | ec_p256_m15.c | 1357 uint32_t t1[20], t2[20], t3[20], t4[20]; in p256_double() local 1412 mul_f256(t4, Q->y, Q->z); in p256_double() 1414 Q->z[i] = t4[i] << 1; in p256_double() 1428 square_f256(t4, t3); in p256_double() 1430 Q->y[i] += (F256[i] << 2) - (t4[i] << 1); in p256_double() 1483 uint32_t t1[20], t2[20], t3[20], t4[20], t5[20], t6[20], t7[20]; in p256_add() local 1492 mul_f256(t4, P2->z, t3); in p256_add() 1493 mul_f256(t3, P1->y, t4); in p256_add() 1498 square_f256(t4, P1->z); in p256_add() 1499 mul_f256(t2, P2->x, t4); in p256_add() [all …]
|
| /freebsd/sys/riscv/riscv/ |
| H A D | locore.S | 154 li t4, (PTE_KERN) 155 or t4, t4, s8 /* t4 |= pte bits */ 157 or t6, t4, s2 175 li t4, PTE_V 177 or t6, t4, t5 187 srli t4, s9, L2_SHIFT /* Div physmem base by 2 MiB */ 189 add t3, t4, t2 193 slli t2, t4, PTE_PPN1_S /* << PTE_PPN1_S */ 198 addi t4, t4, 1 199 bltu t4, t3, 1b
|
| /freebsd/crypto/openssl/crypto/aes/asm/ |
| H A D | aes-mips.pl | 125 my ($t0,$t1,$t2,$t3,$t4,$t5,$t6,$t7,$t8,$t9,$t10,$t11) = map("\$$_",(12..23)); 164 lwxs $t4,$i0($Tbl) # Te2[s2>>8] 187 rotr $t4,$t4,16 192 xor $t0,$t4 193 lwxs $t4,$i0($Tbl) # Te0[s0>>24] 215 xor $t0,$t4 247 ext $t4,$s2,8,8 254 $PTR_INS $i0,$t4,2,8 299 lw $t4,0($i0) # Te2[s2>>8] 321 lw $t4,0($i0) # Te2[s2>>8] [all …]
|
| /freebsd/share/misc/ |
| H A D | scsi_modes | 62 {Queue Algorithm Modifier} t4 85 {Reserved} *t4 90 {Reserved} *t4 91 {Initial Command Priority} t4 125 {Reserved} *t4 192 {Demand Retention Priority} t4 193 {Write Retention Priority} t4 206 {Reserved} *t4 226 {Reserved} *t4 227 {SPC} t4 [all …]
|
| /freebsd/crypto/openssl/crypto/sha/asm/ |
| H A D | sha256-armv4.pl | 65 $inp="r1"; $t4="r1"; 92 str $inp,[sp,#17*4] @ make room for $t4 109 str $inp,[sp,#17*4] @ make room for $t4 141 ldr $t4,[sp,#`($i+15)%16`*4] @ from future BODY_16_xx 158 @ ldr $t4,[sp,#`($i+14)%16`*4] 161 mov $t2,$t4,ror#$sigma1[0] 163 eor $t2,$t2,$t4,ror#$sigma1[1] 166 eor $t2,$t2,$t4,lsr#$sigma1[2] @ sigma1(X[i+14]) 167 ldr $t4,[sp,#`($i+9)%16`*4] 173 add $t1,$t1,$t4 @ X[i] [all …]
|
| /freebsd/sys/riscv/vmm/ |
| H A D | vmm_switch.S | 70 la t4, .Lswitch_return 77 csrrw t4, stvec, t4 84 sd t4, (HYP_H_STVEC)(a0) 94 ld t4, (HYP_G_T + 4 * 8)(a0) 134 sd t4, (HYP_G_T + 4 * 8)(a0) 161 ld t4, (HYP_H_HSTATUS)(a0) 168 csrrw t4, hstatus, t4 174 sd t4, (HYP_G_HSTATUS)(a0)
|
| /freebsd/contrib/llvm-project/openmp/runtime/src/ |
| H A D | z_Linux_asm.S | 1886 mv t4, a4 1902 ld a2, 0(t4) 1906 ld a3, 8(t4) 1910 ld a4, 16(t4) 1914 ld a5, 24(t4) 1918 ld a6, 32(t4) 1922 ld a7, 40(t4) 1925 addi t4, t4, 48 1929 ld t2, 0(t4) 1931 addi t4, t4, 8 [all …]
|
| /freebsd/contrib/bearssl/src/kdf/ |
| H A D | shake.c | 66 uint64_t t0, t1, t2, t3, t4; in process_block() local 126 t4 = tt0 ^ tt2; in process_block() 148 A[ 4] = A[ 4] ^ t4; in process_block() 149 A[ 9] = A[ 9] ^ t4; in process_block() 150 A[14] = A[14] ^ t4; in process_block() 151 A[19] = A[19] ^ t4; in process_block() 152 A[24] = A[24] ^ t4; in process_block() 307 t4 = tt0 ^ tt2; in process_block() 329 A[24] = A[24] ^ t4; in process_block() 330 A[22] = A[22] ^ t4; in process_block() [all …]
|
| /freebsd/lib/libc/riscv/string/ |
| H A D | memchr.S | 117 not t4, t3 121 and t4, t4, t1 123 and t3, t3, t4
|
| H A D | memset.S | 65 srli t4, t3, 1 66 sub t2, t2, t4
|
| /freebsd/crypto/openssl/crypto/ec/ |
| H A D | ecp_smpl.c | 1494 BIGNUM *t1, *t2, *t3, *t4, *t5 = NULL; in ossl_ec_GFp_simple_ladder_pre() local 1499 t4 = r->X; in ossl_ec_GFp_simple_ladder_pre() 1504 || !BN_mod_sub_quick(t4, t3, group->a, group->field) in ossl_ec_GFp_simple_ladder_pre() 1505 || !group->meth->field_sqr(group, t4, t4, ctx) in ossl_ec_GFp_simple_ladder_pre() 1509 || !BN_mod_sub_quick(r->X, t4, t5, group->field) in ossl_ec_GFp_simple_ladder_pre() 1565 BIGNUM *t0, *t1, *t2, *t3, *t4, *t5, *t6 = NULL; in ossl_ec_GFp_simple_ladder_step() local 1572 t4 = BN_CTX_get(ctx); in ossl_ec_GFp_simple_ladder_step() 1579 || !group->meth->field_mul(group, t4, r->X, s->Z, ctx) in ossl_ec_GFp_simple_ladder_step() 1583 || !BN_mod_add_quick(t6, t3, t4, group->field) in ossl_ec_GFp_simple_ladder_step() 1589 || !BN_mod_sub_quick(t3, t4, t3, group->field) in ossl_ec_GFp_simple_ladder_step() [all …]
|