/linux/include/crypto/ |
H A D | aria.h | 337 static inline u32 aria_m(u32 t0) in aria_m() 343 static inline void aria_sbox_layer1_with_pre_diff(u32 *t0, u32 *t1, u32 *t2, in aria_sbox_layer1_with_pre_diff() 365 static inline void aria_sbox_layer2_with_pre_diff(u32 *t0, u32 *t1, u32 *t2, in aria_sbox_layer2_with_pre_diff() 387 static inline void aria_diff_word(u32 *t0, u32 *t1, u32 *t2, u32 *t3) in aria_diff_word() 407 static inline void aria_add_round_key(u32 *rk, u32 *t0, u32 *t1, u32 *t2, in aria_add_round_key() 416 static inline void aria_subst_diff_odd(u32 *t0, u32 *t1, u32 *t2, u32 *t3) in aria_subst_diff_odd() 425 static inline void aria_subst_diff_even(u32 *t0, u32 *t1, u32 *t2, u32 *t3) in aria_subst_diff_even()
|
/linux/arch/sparc/lib/ |
H A D | memcpy.S | 20 #define MOVE_BIGCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \ argument 34 #define MOVE_BIGALIGNCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \ argument 44 #define MOVE_LASTCHUNK(src, dst, offset, t0, t1, t2, t3) \ argument 52 #define MOVE_LASTALIGNCHUNK(src, dst, offset, t0, t1, t2, t3) \ argument 58 #define MOVE_SHORTCHUNK(src, dst, offset, t0, t1) \ argument
|
H A D | copy_user.S | 68 #define MOVE_BIGCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \ argument 83 #define MOVE_BIGALIGNCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \ argument 101 #define MOVE_LASTCHUNK(src, dst, offset, t0, t1, t2, t3) \ argument 116 #define MOVE_HALFCHUNK(src, dst, offset, t0, t1, t2, t3) \ argument 127 #define MOVE_SHORTCHUNK(src, dst, offset, t0, t1) \ argument 141 #define MOVE_LAST_SHORTCHUNK(src, dst, offset, t0, t1) \ argument
|
H A D | checksum_32.S | 20 #define CSUM_BIGCHUNK(buf, offset, sum, t0, t1, t2, t3, t4, t5) \ argument 34 #define CSUM_LASTCHUNK(buf, offset, sum, t0, t1, t2, t3) \ argument 164 #define CSUMCOPY_BIGCHUNK_ALIGNED(src, dst, sum, off, t0, t1, t2, t3, t4, t5, t6, t7) \ argument 186 #define CSUMCOPY_BIGCHUNK(src, dst, sum, off, t0, t1, t2, t3, t4, t5, t6, t7) \ argument 209 #define CSUMCOPY_LASTCHUNK(src, dst, sum, off, t0, t1, t2, t3) \ argument
|
H A D | blockops.S | 28 #define MIRROR_BLOCK(dst, src, offset, t0, t1, t2, t3, t4, t5, t6, t7) \ argument
|
/linux/lib/crypto/ |
H A D | poly1305-donna64.c | 16 u64 t0, t1; in poly1305_core_setkey() local 61 u64 t0, t1; in poly1305_core_blocks() local 117 u64 t0, t1; in poly1305_core_emit() local
|
/linux/crypto/ |
H A D | camellia_generic.c | 336 #define CAMELLIA_F(xl, xr, kl, kr, yl, yr, il, ir, t0, t1) ({ \ argument 540 u32 il, ir, t0, t1, w0, w1; in camellia_setup128() local 647 u32 il, ir, t0, t1, w0, w1; /* temporary variables */ in camellia_setup256() local 811 #define CAMELLIA_FLS(ll, lr, rl, rr, kll, klr, krl, krr, t0, t1, t2, t3) ({ \ argument 845 u32 il, ir, t0, t1; /* temporary variables */ in camellia_do_encrypt() local 900 u32 il, ir, t0, t1; /* temporary variables */ in camellia_do_decrypt() local
|
H A D | seed.c | 333 u32 i, t0, t1, x1, x2, x3, x4; in seed_set_key() local 369 u32 x1, x2, x3, x4, t0, t1; in seed_encrypt() local 407 u32 x1, x2, x3, x4, t0, t1; in seed_decrypt() local
|
/linux/arch/x86/crypto/ |
H A D | twofish-avx-x86_64-asm_64.S | 86 #define lookup_32bit(t0, t1, t2, t3, src, dst, interleave_op, il_reg) \ argument 104 #define G(gi1, gi2, x, t0, t1, t2, t3) \ argument 204 #define transpose_4x4(x0, x1, x2, x3, t0, t1, t2) \ argument 215 #define inpack_blocks(x0, x1, x2, x3, wkey, t0, t1, t2) \ argument 223 #define outunpack_blocks(x0, x1, x2, x3, wkey, t0, t1, t2) \ argument
|
H A D | serpent-sse2-i586-asm_32.S | 453 #define transpose_4x4(x0, x1, x2, x3, t0, t1, t2) \ argument 468 #define read_blocks(in, x0, x1, x2, x3, t0, t1, t2) \ argument 476 #define write_blocks(out, x0, x1, x2, x3, t0, t1, t2) \ argument 484 #define xor_blocks(out, x0, x1, x2, x3, t0, t1, t2) \ argument
|
H A D | cast6-avx-x86_64-asm_64.S | 190 #define transpose_4x4(x0, x1, x2, x3, t0, t1, t2) \ argument 201 #define inpack_blocks(x0, x1, x2, x3, t0, t1, t2, rmask) \ argument 209 #define outunpack_blocks(x0, x1, x2, x3, t0, t1, t2, rmask) \ argument
|
H A D | serpent-sse2-x86_64-asm_64.S | 575 #define transpose_4x4(x0, x1, x2, x3, t0, t1, t2) \ argument 590 #define read_blocks(in, x0, x1, x2, x3, t0, t1, t2) \ argument 598 #define write_blocks(out, x0, x1, x2, x3, t0, t1, t2) \ argument 606 #define xor_blocks(out, x0, x1, x2, x3, t0, t1, t2) \ argument
|
H A D | aes-gcm-aesni-x86_64.S | 332 .macro _ghash_mul_noreduce a, a_xored, b, lo, mi, hi, t0 353 .macro _ghash_reduce lo, mi, hi, dst, t0
|
H A D | glue_helper-asm-avx2.S | 28 #define store_cbc_16way(src, dst, x0, x1, x2, x3, x4, x5, x6, x7, t0) \ argument
|
/linux/arch/parisc/include/asm/ |
H A D | checksum.h | 30 unsigned long t0, t1, t2; in ip_fast_csum() local 115 unsigned long t0, t1, t2, t3; in csum_ipv6_magic() local
|
/linux/kernel/ |
H A D | kallsyms_selftest.c | 158 u64 t0, t1, t; in lookup_name() local 212 u64 t0, t1; in test_perf_kallsyms_on_each_symbol() local 245 u64 t0, t1; in test_perf_kallsyms_on_each_match_symbol() local
|
/linux/arch/powerpc/crypto/ |
H A D | aes-spe-modes.S | 125 #define ENDIAN_SWAP(t0, t1, s0, s1) \ argument 133 #define GF128_MUL(d0, d1, d2, d3, t0) \ argument
|
/linux/arch/loongarch/lib/ |
H A D | delay.c | 14 u64 t0 = get_cycles(); in __delay() local
|
/linux/arch/alpha/include/uapi/asm/ |
H A D | swab.h | 27 __u64 t0, t1, t2, t3; in __arch_swab32() local
|
/linux/arch/riscv/lib/ |
H A D | delay.c | 78 u64 t0 = get_cycles(); in __delay() local
|
/linux/arch/sparc/mm/ |
H A D | tsunami.S | 87 #define MIRROR_BLOCK(dst, src, offset, t0, t1, t2, t3) \ argument
|
/linux/include/soc/arc/ |
H A D | timers.h | 29 unsigned int pad2:15, rtsc:1, pad1:5, rtc:1, t1:1, t0:1, ver:8; member
|
/linux/drivers/gpu/drm/vmwgfx/ |
H A D | vmwgfx_mksstat.h | 95 const u64 t0; member
|
/linux/arch/x86/boot/ |
H A D | tty.c | 120 int t0, t1; in getchar_timeout() local
|
/linux/arch/loongarch/include/asm/ |
H A D | regdef.h | 20 #define t0 $r12 /* caller saved */ macro
|