| /kernel/linux/linux-6.6/include/crypto/ |
| D | aria.h | 343 static inline void aria_sbox_layer1_with_pre_diff(u32 *t0, u32 *t1, u32 *t2, in aria_sbox_layer1_with_pre_diff() 365 static inline void aria_sbox_layer2_with_pre_diff(u32 *t0, u32 *t1, u32 *t2, in aria_sbox_layer2_with_pre_diff() 387 static inline void aria_diff_word(u32 *t0, u32 *t1, u32 *t2, u32 *t3) in aria_diff_word() 399 static inline void aria_diff_byte(u32 *t1, u32 *t2, u32 *t3) in aria_diff_byte() 407 static inline void aria_add_round_key(u32 *rk, u32 *t0, u32 *t1, u32 *t2, in aria_add_round_key() 416 static inline void aria_subst_diff_odd(u32 *t0, u32 *t1, u32 *t2, u32 *t3) in aria_subst_diff_odd() 425 static inline void aria_subst_diff_even(u32 *t0, u32 *t1, u32 *t2, u32 *t3) in aria_subst_diff_even()
|
| /kernel/linux/linux-6.6/arch/sparc/lib/ |
| D | memcpy.S | 20 #define MOVE_BIGCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \ argument 34 #define MOVE_BIGALIGNCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \ argument 44 #define MOVE_LASTCHUNK(src, dst, offset, t0, t1, t2, t3) \ argument 52 #define MOVE_LASTALIGNCHUNK(src, dst, offset, t0, t1, t2, t3) \ argument 58 #define MOVE_SHORTCHUNK(src, dst, offset, t0, t1) \ argument
|
| D | checksum_32.S | 20 #define CSUM_BIGCHUNK(buf, offset, sum, t0, t1, t2, t3, t4, t5) \ argument 34 #define CSUM_LASTCHUNK(buf, offset, sum, t0, t1, t2, t3) \ argument 164 #define CSUMCOPY_BIGCHUNK_ALIGNED(src, dst, sum, off, t0, t1, t2, t3, t4, t5, t6, t7) \ argument 186 #define CSUMCOPY_BIGCHUNK(src, dst, sum, off, t0, t1, t2, t3, t4, t5, t6, t7) \ argument 209 #define CSUMCOPY_LASTCHUNK(src, dst, sum, off, t0, t1, t2, t3) \ argument
|
| D | copy_user.S | 68 #define MOVE_BIGCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \ argument 83 #define MOVE_BIGALIGNCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \ argument 101 #define MOVE_LASTCHUNK(src, dst, offset, t0, t1, t2, t3) \ argument 116 #define MOVE_HALFCHUNK(src, dst, offset, t0, t1, t2, t3) \ argument 127 #define MOVE_SHORTCHUNK(src, dst, offset, t0, t1) \ argument 141 #define MOVE_LAST_SHORTCHUNK(src, dst, offset, t0, t1) \ argument
|
| /kernel/linux/linux-5.10/arch/sparc/lib/ |
| D | memcpy.S | 19 #define MOVE_BIGCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \ argument 33 #define MOVE_BIGALIGNCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \ argument 43 #define MOVE_LASTCHUNK(src, dst, offset, t0, t1, t2, t3) \ argument 51 #define MOVE_LASTALIGNCHUNK(src, dst, offset, t0, t1, t2, t3) \ argument 57 #define MOVE_SHORTCHUNK(src, dst, offset, t0, t1) \ argument
|
| D | checksum_32.S | 20 #define CSUM_BIGCHUNK(buf, offset, sum, t0, t1, t2, t3, t4, t5) \ argument 34 #define CSUM_LASTCHUNK(buf, offset, sum, t0, t1, t2, t3) \ argument 171 #define CSUMCOPY_BIGCHUNK_ALIGNED(src, dst, sum, off, t0, t1, t2, t3, t4, t5, t6, t7) \ argument 193 #define CSUMCOPY_BIGCHUNK(src, dst, sum, off, t0, t1, t2, t3, t4, t5, t6, t7) \ argument 216 #define CSUMCOPY_LASTCHUNK(src, dst, sum, off, t0, t1, t2, t3) \ argument
|
| D | copy_user.S | 69 #define MOVE_BIGCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \ argument 83 #define MOVE_BIGALIGNCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \ argument 93 #define MOVE_LASTCHUNK(src, dst, offset, t0, t1, t2, t3) \ argument 101 #define MOVE_HALFCHUNK(src, dst, offset, t0, t1, t2, t3) \ argument 111 #define MOVE_SHORTCHUNK(src, dst, offset, t0, t1) \ argument
|
| /kernel/linux/linux-6.6/lib/crypto/ |
| D | poly1305-donna64.c | 16 u64 t0, t1; in poly1305_core_setkey() local 61 u64 t0, t1; in poly1305_core_blocks() local 117 u64 t0, t1; in poly1305_core_emit() local
|
| /kernel/linux/linux-5.10/arch/openrisc/kernel/ |
| D | entry.S | 27 #define DISABLE_INTERRUPTS(t1,t2) \ argument 34 #define ENABLE_INTERRUPTS(t1) \ argument 58 #define TRACE_IRQS_SAVE(t1,trace_op) \ argument
|
| /kernel/linux/linux-5.10/lib/crypto/ |
| D | poly1305-donna64.c | 18 u64 t0, t1; in poly1305_core_setkey() local 63 u64 t0, t1; in poly1305_core_blocks() local 119 u64 t0, t1; in poly1305_core_emit() local
|
| /kernel/linux/linux-6.6/arch/openrisc/kernel/ |
| D | entry.S | 27 #define DISABLE_INTERRUPTS(t1,t2) \ argument 34 #define ENABLE_INTERRUPTS(t1) \ argument 58 #define TRACE_IRQS_SAVE(t1,trace_op) \ argument
|
| /kernel/linux/linux-6.6/arch/x86/include/asm/ |
| D | syscall_wrapper.h | 63 #define SYSCALL_PT_ARG6(m, t1, t2, t3, t4, t5, t6) \ argument 65 #define SYSCALL_PT_ARG5(m, t1, t2, t3, t4, t5) \ argument 67 #define SYSCALL_PT_ARG4(m, t1, t2, t3, t4) \ argument 69 #define SYSCALL_PT_ARG3(m, t1, t2, t3) \ argument 71 #define SYSCALL_PT_ARG2(m, t1, t2) \ argument 73 #define SYSCALL_PT_ARG1(m, t1) m(t1, (regs->bx)) argument
|
| /kernel/linux/linux-5.10/arch/arm64/crypto/ |
| D | aes-neonbs-core.S | 86 .macro mul_gf4, x0, x1, y0, y1, t0, t1 96 .macro mul_gf4_n_gf4, x0, x1, y0, y1, t0, x2, x3, y2, y3, t1 341 .macro swapmove_2x, a0, b0, a1, b1, n, mask, t0, t1
|
| /kernel/linux/linux-6.6/arch/arm64/crypto/ |
| D | aes-neonbs-core.S | 87 .macro mul_gf4, x0, x1, y0, y1, t0, t1 97 .macro mul_gf4_n_gf4, x0, x1, y0, y1, t0, x2, x3, y2, y3, t1 342 .macro swapmove_2x, a0, b0, a1, b1, n, mask, t0, t1
|
| /kernel/linux/linux-5.10/arch/arm/crypto/ |
| D | aes-neonbs-core.S | 141 .macro mul_gf4, x0, x1, y0, y1, t0, t1 151 .macro mul_gf4_n_gf4, x0, x1, y0, y1, t0, x2, x3, y2, y3, t1 401 .macro swapmove_2x, a0, b0, a1, b1, n, mask, t0, t1
|
| /kernel/linux/linux-6.6/include/net/netfilter/ |
| D | nf_conntrack_tuple.h | 127 static inline bool __nf_ct_tuple_src_equal(const struct nf_conntrack_tuple *t1, in __nf_ct_tuple_src_equal() 135 static inline bool __nf_ct_tuple_dst_equal(const struct nf_conntrack_tuple *t1, in __nf_ct_tuple_dst_equal() 143 static inline bool nf_ct_tuple_equal(const struct nf_conntrack_tuple *t1, in nf_ct_tuple_equal() 159 nf_ct_tuple_src_mask_cmp(const struct nf_conntrack_tuple *t1, in nf_ct_tuple_src_mask_cmp()
|
| /kernel/linux/linux-5.10/include/net/netfilter/ |
| D | nf_conntrack_tuple.h | 124 static inline bool __nf_ct_tuple_src_equal(const struct nf_conntrack_tuple *t1, in __nf_ct_tuple_src_equal() 132 static inline bool __nf_ct_tuple_dst_equal(const struct nf_conntrack_tuple *t1, in __nf_ct_tuple_dst_equal() 140 static inline bool nf_ct_tuple_equal(const struct nf_conntrack_tuple *t1, in nf_ct_tuple_equal() 156 nf_ct_tuple_src_mask_cmp(const struct nf_conntrack_tuple *t1, in nf_ct_tuple_src_mask_cmp()
|
| /kernel/linux/linux-6.6/crypto/ |
| D | camellia_generic.c | 336 #define CAMELLIA_F(xl, xr, kl, kr, yl, yr, il, ir, t0, t1) ({ \ argument 540 u32 il, ir, t0, t1, w0, w1; in camellia_setup128() local 647 u32 il, ir, t0, t1, w0, w1; /* temporary variables */ in camellia_setup256() local 811 #define CAMELLIA_FLS(ll, lr, rl, rr, kll, klr, krl, krr, t0, t1, t2, t3) ({ \ argument 845 u32 il, ir, t0, t1; /* temporary variables */ in camellia_do_encrypt() local 900 u32 il, ir, t0, t1; /* temporary variables */ in camellia_do_decrypt() local
|
| /kernel/linux/linux-5.10/crypto/ |
| D | camellia_generic.c | 344 #define CAMELLIA_F(xl, xr, kl, kr, yl, yr, il, ir, t0, t1) ({ \ argument 548 u32 il, ir, t0, t1, w0, w1; in camellia_setup128() local 655 u32 il, ir, t0, t1, w0, w1; /* temporary variables */ in camellia_setup256() local 819 #define CAMELLIA_FLS(ll, lr, rl, rr, kll, klr, krl, krr, t0, t1, t2, t3) ({ \ argument 853 u32 il, ir, t0, t1; /* temporary variables */ in camellia_do_encrypt() local 908 u32 il, ir, t0, t1; /* temporary variables */ in camellia_do_decrypt() local
|
| /kernel/linux/linux-6.6/arch/powerpc/crypto/ |
| D | aes-spe-keys.S | 39 #define LS_BOX(r, t1, t2) \ argument 55 #define GF8_MUL(out, in, t1, t2) \ argument
|
| /kernel/linux/linux-5.10/arch/powerpc/crypto/ |
| D | aes-spe-keys.S | 39 #define LS_BOX(r, t1, t2) \ argument 55 #define GF8_MUL(out, in, t1, t2) \ argument
|
| /kernel/linux/linux-6.6/tools/testing/selftests/kvm/x86_64/ |
| D | hyperv_clock.c | 53 u64 tsc_freq, r1, r2, t1, t2; in check_tsc_msr_rdtsc() local 86 u64 r1, r2, t1, t2; in check_tsc_msr_tsc_page() local 178 u64 tsc_freq, r1, r2, t1, t2; in host_check_tsc_msr_rdtsc() local
|
| /kernel/linux/linux-6.6/arch/arm/crypto/ |
| D | aes-neonbs-core.S | 141 .macro mul_gf4, x0, x1, y0, y1, t0, t1 151 .macro mul_gf4_n_gf4, x0, x1, y0, y1, t0, x2, x3, y2, y3, t1 401 .macro swapmove_2x, a0, b0, a1, b1, n, mask, t0, t1
|
| /kernel/linux/linux-5.10/arch/x86/crypto/ |
| D | twofish-avx-x86_64-asm_64.S | 91 #define lookup_32bit(t0, t1, t2, t3, src, dst, interleave_op, il_reg) \ argument 109 #define G(gi1, gi2, x, t0, t1, t2, t3) \ argument 209 #define transpose_4x4(x0, x1, x2, x3, t0, t1, t2) \ argument 220 #define inpack_blocks(x0, x1, x2, x3, wkey, t0, t1, t2) \ argument 228 #define outunpack_blocks(x0, x1, x2, x3, wkey, t0, t1, t2) \ argument
|
| /kernel/linux/linux-6.6/arch/x86/crypto/ |
| D | twofish-avx-x86_64-asm_64.S | 86 #define lookup_32bit(t0, t1, t2, t3, src, dst, interleave_op, il_reg) \ argument 104 #define G(gi1, gi2, x, t0, t1, t2, t3) \ argument 204 #define transpose_4x4(x0, x1, x2, x3, t0, t1, t2) \ argument 215 #define inpack_blocks(x0, x1, x2, x3, wkey, t0, t1, t2) \ argument 223 #define outunpack_blocks(x0, x1, x2, x3, wkey, t0, t1, t2) \ argument
|