/arch/metag/tbx/ |
D | tbisoft.S | 75 ADD A0FrP,A0StP,#0 83 ADD D1Re0,D1Re0,#LO($LSwitchExit) 87 ADD D0FrT,A0StP,#TBICTX_AX-TBICTX_DX /* Address AX save area */ 93 ADD A0StP,A0StP,#(TBICTX_AX_REGS*8) /* Reserve AX save space */ 102 ADD D1RtP,D1Ar1,#TBICTX_AX 105 ADD A0.2,D1Ar1,#TBICTX_DX+(8*5) 132 ADD D1RtP,D1Ar1,#TBICTX_AX 156 ADD A1LbP,A1LbP,#LO(__exit) 179 ADD A0FrP,A0StP,#0 182 ADD A1LbP,A1LbP,#LO(__exit) [all …]
|
D | tbipcx.S | 69 ADD A0FrP,A0StP,#0 73 ADD D0FrT,D0FrT,#LO(___TBIBoingRTI+4) 76 ADD D1Ar1,D1Ar1,#7 /* PRIV system stack here */ 86 ADD D1Ar5,D1Ar5,#LO(___TBIBoingExit) 87 ADD A0.3,A0.2,#TBICTX_DX /* DX Save area */ 90 ADD D0Ar6,D0Ar6,D0Ar2 /* Add in PRIV bit if requested */ 92 ADD D0FrT,A0.2,#TBICTX_AX /* Address AX save area */ 109 ADD A0.2,A0.2,#LO($Lpcx_target) 127 ADD D1Re0,A0StP,#TBICTX_AX /* Address AX save area */ 140 ADD A1.2,A0StP,#TBICTX_DX+(8*1) /* Address DX.1 save area */ [all …]
|
D | tbitimer.S | 94 ADD D1Re0,D1Ar3,D1Ar5 /* to 64-bit signed extend time */ 117 ADD D0Re0,D0Ar2,D0Ar6 /* Regenerate new value = result */ 121 ADD D1Ar3,D1Ar3,D1Re0 /* ... real timer ... */ 153 ADD D1Ar3,D1Ar3,D1Re0 /* ... real timer ... */ 180 ADD A1LbP,A1LbP,#LO(___TBITimes)
|
D | tbictx.S | 60 ADD A0FrP,A0StP,#0 /* can make sub-calls */ 191 ADD A0.2,A0.2,#(8*18+4*16) 215 ADD A0.2,D1Ar1,#TBICTX_BYTES /* Dump CBUF state after TBICTX */ 314 ADD A0.2,A0.2,#(2*8) 316 ADD D0Re0,D0Re0,#(2*4) 330 ADD A0.2,A0.2,#(8*8) 359 ADD A0.2,A0.2,#(8*8+4*16)
|
D | tbidefr.S | 87 ADD D0Re0,D0Re0,#TBI_fnSigs 165 ADD D0Re0,D0Re0,#TBI_fnSigs
|
D | tbicore.S | 40 ADD A1LbP,A1LbP,#LO(___pTBISegs) 58 ADD A1LbP,A1LbP,#LO(___pTBISegs)
|
D | tbiroot.S | 48 ADD A1LbP,A1LbP,#LO(___pTBIs)
|
D | tbictxfpu.S | 48 ADD D0Ar4, D0Ar4, #LO(METAC_CORE_ID)
|
/arch/c6x/lib/ |
D | csum_64plus.S | 54 || ADD .L1 A16,A9,A9 57 || ADD .L2 B8,B9,B9 60 || ADD .L1X A9,B9,A9 ; add csum from a and b sides 77 ADD .L1 A16,A9,A9 80 || ADD .L1 A8,A9,A9 96 ADD .S1 A8,A9,A9 98 || ADD .S1 A0,A9,A9 106 ADD .S1 A7,A9,A9 110 || ADD .S1 A0,A9,A9 130 ADD .S1 A7,A9,A9 [all …]
|
/arch/mips/cavium-octeon/ |
D | octeon-memcpy.S | 90 #define ADD daddu macro 201 ADD src, src, 16*NBYTES 203 ADD dst, dst, 16*NBYTES 250 ADD src, src, 8*NBYTES 252 ADD dst, dst, 8*NBYTES 269 ADD src, src, 4*NBYTES 271 ADD dst, dst, 4*NBYTES 287 ADD src, src, NBYTES 289 ADD dst, dst, NBYTES 297 ADD src, src, NBYTES [all …]
|
/arch/mips/lib/ |
D | memcpy.S | 155 #define ADD daddu macro 192 #define ADD addu macro 332 ADD src, src, 8*NBYTES 333 ADD dst, dst, 8*NBYTES 361 ADD src, src, 4*NBYTES 368 ADD dst, dst, 4*NBYTES 380 ADD src, src, NBYTES 384 ADD dst, dst, NBYTES 402 ADD t1, dst, len # t1 is just past last byte of dst 423 ADD t2, zero, NBYTES [all …]
|
D | csum_partial.S | 45 #define ADD daddu macro 52 #define ADD addu macro 62 ADD sum, reg; \ 64 ADD sum, v1; \ 394 #define ADD daddu macro 414 #define ADD addu macro 508 ADD src, src, 8*NBYTES 526 ADD dst, dst, 8*NBYTES 529 ADD len, 8*NBYTES # revert len (see above) 548 ADD src, src, 4*NBYTES [all …]
|
/arch/metag/kernel/ |
D | head.S | 20 ADD D0Re0,D0Re0,#LO(___pTBIs) 23 ADD D0Re0,D0Re0,#LO(___pTBISegs) 36 ADD A0StP,A0StP,#LO(_init_thread_union) 37 ADD A0StP,A0StP,#THREAD_INFO_SIZE 61 ADD A0StP,A0StP,#LO(_secondary_data_stack) 63 ADD A0StP,A0StP,#THREAD_INFO_SIZE
|
D | ftrace_stub.S | 42 ADD D0Re0,D0Re0,#LO(_ftrace_trace_function) 45 ADD D1Re0,D1Re0,#LO(_ftrace_stub)
|
D | user_gateway.S | 41 ADD D1Ar1,D1Ar1,#LO(USER_GATEWAY_PAGE + USER_GATEWAY_TLS)
|
/arch/metag/lib/ |
D | memmove.S | 20 ADD D0Ar4, D0Ar4, #1 26 ADD D0Ar2, D1Re0, D1Ar3 27 ADD D1Ar1, D1Ar1, D1Ar3 83 ADD D0Ar4, D0Ar4, #7 122 ADD D1Re0, D1Re0, D1.5 129 ADD D0Re0, D0Re0, D0.5 141 ADD A1.2, A1.2, #8 158 ADD D1Re0, D1Re0, D1.5 166 ADD D0Re0, D0Re0, D0.5 177 ADD A1.2, A1.2, #8 [all …]
|
D | muldi3.S | 21 ADD D1Re0,D1Re0,D0Re0 32 ADD D1Re0,D1Re0,D1Ar5 35 ADD D1Re0,D1Re0,D0Ar6 42 ADD D1Re0,D1Re0,D1Ar5
|
D | ip_fast_csum.S | 26 ADD D0Re0,D0Re0,D0Ar4 28 ADD D0Re0,D0Re0,D0Ar4
|
D | div64.S | 27 ADD D1Ar5,D1Ar3,D1Ar3 36 ADD D1Re0,D1Re0,D1Re0 52 ADD D1Ar5,D1Ar5,D1Re0
|
D | memcpy.S | 43 ADD D1Ar5, D1Ar5, #1 ! dest is aligned when D1Ar5 reaches #8 124 ADD D0Re0, D0Re0, D1Re0 128 ADD D1Re0, D1Re0, D0Ar2 148 ADD D0Re0, D0Re0, D1Ar1 155 ADD D1Re0, D1Re0, D0FrT 182 ADD D1Ar5, D0Ar4, D0Ar6
|
D | memset.S | 16 ADD A0.2,D0Ar2,D0Re0 ! Duplicate byte value into 4 (A0.2) 68 ADD D1Ar1,D1Ar1,D1Ar5 ! Advance pointer to end of area
|
/arch/sparc/net/ |
D | bpf_jit_comp_32.c | 73 #define ADD F3(2, 0x00) macro 280 *prog++ = (ADD | RS1(R1) | RS2(R2) | RD(R3)) 283 *prog++ = (ADD | IMMED | RS1(R1) | S13(IMM) | RD(R3)) 295 *prog++ = (ADD | IMMED | RS1(SP) | S13(SZ) | RD(SP)) 400 emit_alu_X(ADD); in bpf_jit_compile() 403 emit_alu_K(ADD, K); in bpf_jit_compile()
|
D | bpf_jit_comp_64.c | 145 #define ADD F3(2, 0x00) macro 855 emit(ADD | IMMED | RS1(FP) | S13(STACK_BIAS) | RD(vfp), ctx); in build_prologue() 899 emit_alu_K(ADD, tmp, 1, ctx); in emit_tail_call() 904 emit_alu(ADD, bpf_array, tmp, ctx); in emit_tail_call() 945 emit_alu(ADD, src, dst, ctx); in build_insn() 1088 emit_alu3_K(ADD, SP, STACK_BIAS + 128, tmp, ctx); in build_insn() 1105 emit_alu_K(ADD, dst, imm, ctx); in build_insn() 1412 emit_alu3(ADD, dst, tmp, tmp, ctx); in build_insn() 1415 emit_alu3(ADD, tmp2, src, tmp3, ctx); in build_insn() 1435 emit_alu3(ADD, dst, tmp, tmp, ctx); in build_insn() [all …]
|
/arch/arm64/net/ |
D | bpf_jit.h | 112 #define A64_ADD_I(sf, Rd, Rn, imm12) A64_ADDSUB_IMM(sf, Rd, Rn, imm12, ADD) 156 #define A64_ADD(sf, Rd, Rn, Rm) A64_ADDSUB_SREG(sf, Rd, Rn, Rm, ADD)
|
/arch/c6x/kernel/ |
D | entry.S | 68 ADD .D1X SP,-8,A15 215 ADD .S1X 8,SP,A4
|