/arch/x86/net/ |
D | bpf_jit_comp.c | 34 #define EMIT3(b1, b2, b3) EMIT((b1) + ((b2) << 8) + ((b3) << 16), 3) argument 35 #define EMIT4(b1, b2, b3, b4) EMIT((b1) + ((b2) << 8) + ((b3) << 16) + ((b4) << 24), 4) argument 41 #define EMIT3_off32(b1, b2, b3, off) \ argument 42 do { EMIT3(b1, b2, b3); EMIT(off, 4); } while (0) 43 #define EMIT4_off32(b1, b2, b3, b4, off) \ argument 44 do { EMIT4(b1, b2, b3, b4); EMIT(off, 4); } while (0) 301 u8 b1, b2, b3; in emit_mov_imm32() local 312 b3 = 0xC0; in emit_mov_imm32() 313 EMIT3_off32(b1, b2, add_1reg(b3, dst_reg), imm32); in emit_mov_imm32() 325 b3 = 0xC0; in emit_mov_imm32() [all …]
|
D | bpf_jit_comp32.c | 68 #define EMIT3(b1, b2, b3) EMIT((b1) + ((b2) << 8) + ((b3) << 16), 3) argument 69 #define EMIT4(b1, b2, b3, b4) \ argument 70 EMIT((b1) + ((b2) << 8) + ((b3) << 16) + ((b4) << 24), 4) 76 #define EMIT3_off32(b1, b2, b3, off) \ argument 77 do { EMIT3(b1, b2, b3); EMIT(off, 4); } while (0) 78 #define EMIT4_off32(b1, b2, b3, b4, off) \ argument 79 do { EMIT4(b1, b2, b3, b4); EMIT(off, 4); } while (0)
|
/arch/arm/crypto/ |
D | aes-neonbs-core.S | 85 .macro in_bs_ch, b0, b1, b2, b3, b4, b5, b6, b7 88 veor \b3, \b3, \b0 91 veor \b6, \b6, \b3 92 veor \b3, \b3, \b7 94 veor \b3, \b3, \b4 97 veor \b3, \b3, \b1 101 .macro out_bs_ch, b0, b1, b2, b3, b4, b5, b6, b7 108 veor \b5, \b5, \b3 109 veor \b3, \b3, \b7 115 .macro inv_in_bs_ch, b6, b1, b2, b4, b7, b0, b3, b5 [all …]
|
D | ghash-ce-core.S | 93 .macro __pmull_p64, rd, rn, rm, b1, b2, b3, b4 107 .macro __pmull_p8, rq, ad, bd, b1=t4l, b2=t3l, b3=t4l, b4=t3l 122 .ifc \b3, t4l 131 vmull.p8 t4q, \ad, \b3 @ I = A*B3
|
/arch/arm64/crypto/ |
D | aes-neonbs-core.S | 25 .macro in_bs_ch, b0, b1, b2, b3, b4, b5, b6, b7 28 eor \b3, \b3, \b0 31 eor \b6, \b6, \b3 32 eor \b3, \b3, \b7 34 eor \b3, \b3, \b4 37 eor \b3, \b3, \b1 41 .macro out_bs_ch, b0, b1, b2, b3, b4, b5, b6, b7 48 eor \b5, \b5, \b3 49 eor \b3, \b3, \b7 55 .macro inv_in_bs_ch, b6, b1, b2, b4, b7, b0, b3, b5 [all …]
|
/arch/arm/include/asm/ |
D | xor.h | 32 : "=r" (src), "=r" (b1), "=r" (b2), "=r" (b3), "=r" (b4) \ 34 __XOR(a1, b1); __XOR(a2, b2); __XOR(a3, b3); __XOR(a4, b4) 56 register unsigned int b3 __asm__("ip"); in xor_arm4regs_2() 77 register unsigned int b3 __asm__("ip"); in xor_arm4regs_3()
|
/arch/c6x/lib/ |
D | strasgi_64plus.S | 25 ret .s2 b3
|
D | strasgi.S | 64 || ret .s2 b3
|
/arch/c6x/kernel/ |
D | signal.c | 44 COPY(b0); COPY(b1); COPY(b2); COPY(b3); COPY(b5); COPY(b7); COPY(b9); in restore_sigcontext() 109 COPY(b0); COPY(b1); COPY(b2); COPY(b3); COPY(b5); COPY(b7); COPY(b9); in setup_sigcontext() 191 regs->b3 = (unsigned long) retcode; in setup_rt_frame()
|
D | asm-offsets.c | 70 OFFSET(REGS_B3, pt_regs, b3); in foo()
|
D | traps.c | 37 pr_err("A3: %08lx B3: %08lx\n", regs->a3, regs->b3); in show_regs()
|
/arch/powerpc/crypto/ |
D | aes-tab-4k.S | 45 .long R(41, ad, ad, ec), R(b3, d4, d4, 67) 70 .long R(b7, d6, d6, 61), R(7d, b3, b3, ce) 146 .long R(2b, 98, 98, b3), R(22, 11, 11, 33) 240 .long R(2b, b3, 16, 6c), R(a9, 70, b9, 99) 253 .long R(6f, d5, 2d, a9), R(cf, 25, 12, b3) 274 .long R(b3, 67, 1d, 5a), R(92, db, d2, 52) 288 .long R(39, a8, 01, 71), R(08, 0c, b3, de)
|
/arch/ia64/include/uapi/asm/ |
D | ptrace.h | 195 unsigned long b3; member
|
/arch/arm/vdso/ |
D | Makefile | 65 last=`echo $$buildid | cut -b3-`; \
|
/arch/s390/net/ |
D | bpf_jit_comp.c | 160 #define EMIT4_RRF(op, b1, b2, b3) \ argument 162 _EMIT4(op | reg_high(b3) << 8 | reg(b1, b2)); \ 165 REG_SET_SEEN(b3); \ 218 #define EMIT6_DISP_LH(op1, op2, b1, b2, b3, disp) \ argument 221 reg_high(b3) << 8, op2, disp); \ 224 REG_SET_SEEN(b3); \
|
/arch/c6x/include/uapi/asm/ |
D | ptrace.h | 135 REG_PAIR(b3, b2);
|
/arch/ia64/kernel/ |
D | entry.h | 58 .spillsp b2,SW(B2)+16+(off); .spillsp b3,SW(B3)+16+(off); \
|
D | relocate_kernel.S | 223 mov r5=b3
|
/arch/sparc/vdso/ |
D | Makefile | 131 last=`echo $$buildid | cut -b3-`; \
|
/arch/arm/boot/dts/ |
D | kirkwood-b3.dts | 22 compatible = "excito,b3", "marvell,kirkwood-88f6281", "marvell,kirkwood";
|
/arch/x86/kernel/ |
D | uprobes.c | 46 #define W(row, b0, b1, b2, b3, b4, b5, b6, b7, b8, b9, ba, bb, bc, bd, be, bf)\ argument 47 (((b0##UL << 0x0)|(b1##UL << 0x1)|(b2##UL << 0x2)|(b3##UL << 0x3) | \
|
/arch/x86/kernel/kprobes/ |
D | core.c | 61 #define W(row, b0, b1, b2, b3, b4, b5, b6, b7, b8, b9, ba, bb, bc, bd, be, bf)\ argument 62 (((b0##UL << 0x0)|(b1##UL << 0x1)|(b2##UL << 0x2)|(b3##UL << 0x3) | \
|
/arch/x86/crypto/ |
D | camellia-aesni-avx-asm_64.S | 434 b3, c3, d3, st0, st1) \ argument 438 transpose_4x4(b0, b1, b2, b3, d2, d3); \ 454 vpshufb a0, b3, b3; \ 477 transpose_4x4(a3, b3, c3, d3, b0, b1); \
|
D | camellia-aesni-avx2-asm_64.S | 468 a3, b3, c3, d3, st0, st1) \ argument 472 transpose_4x4(b0, b1, b2, b3, d2, d3); \ 488 vpshufb a0, b3, b3; \ 511 transpose_4x4(a3, b3, c3, d3, b0, b1); \
|
/arch/x86/entry/vdso/ |
D | Makefile | 197 last=`echo $$buildid | cut -b3-`; \
|