/arch/metag/tbx/ |
D | tbisoft.S | 75 ADD A0FrP,A0StP,#0 83 ADD D1Re0,D1Re0,#LO($LSwitchExit) 87 ADD D0FrT,A0StP,#TBICTX_AX-TBICTX_DX /* Address AX save area */ 93 ADD A0StP,A0StP,#(TBICTX_AX_REGS*8) /* Reserve AX save space */ 102 ADD D1RtP,D1Ar1,#TBICTX_AX 105 ADD A0.2,D1Ar1,#TBICTX_DX+(8*5) 132 ADD D1RtP,D1Ar1,#TBICTX_AX 156 ADD A1LbP,A1LbP,#LO(__exit) 179 ADD A0FrP,A0StP,#0 182 ADD A1LbP,A1LbP,#LO(__exit) [all …]
|
D | tbipcx.S | 69 ADD A0FrP,A0StP,#0 73 ADD D0FrT,D0FrT,#LO(___TBIBoingRTI+4) 76 ADD D1Ar1,D1Ar1,#7 /* PRIV system stack here */ 86 ADD D1Ar5,D1Ar5,#LO(___TBIBoingExit) 87 ADD A0.3,A0.2,#TBICTX_DX /* DX Save area */ 90 ADD D0Ar6,D0Ar6,D0Ar2 /* Add in PRIV bit if requested */ 92 ADD D0FrT,A0.2,#TBICTX_AX /* Address AX save area */ 109 ADD A0.2,A0.2,#LO($Lpcx_target) 127 ADD D1Re0,A0StP,#TBICTX_AX /* Address AX save area */ 140 ADD A1.2,A0StP,#TBICTX_DX+(8*1) /* Address DX.1 save area */ [all …]
|
D | tbitimer.S | 94 ADD D1Re0,D1Ar3,D1Ar5 /* to 64-bit signed extend time */ 117 ADD D0Re0,D0Ar2,D0Ar6 /* Regenerate new value = result */ 121 ADD D1Ar3,D1Ar3,D1Re0 /* ... real timer ... */ 153 ADD D1Ar3,D1Ar3,D1Re0 /* ... real timer ... */ 180 ADD A1LbP,A1LbP,#LO(___TBITimes)
|
D | tbictx.S | 60 ADD A0FrP,A0StP,#0 /* can make sub-calls */ 191 ADD A0.2,A0.2,#(8*18+4*16) 215 ADD A0.2,D1Ar1,#TBICTX_BYTES /* Dump CBUF state after TBICTX */ 314 ADD A0.2,A0.2,#(2*8) 316 ADD D0Re0,D0Re0,#(2*4) 330 ADD A0.2,A0.2,#(8*8) 359 ADD A0.2,A0.2,#(8*8+4*16)
|
D | tbidefr.S | 87 ADD D0Re0,D0Re0,#TBI_fnSigs 165 ADD D0Re0,D0Re0,#TBI_fnSigs
|
D | tbicore.S | 40 ADD A1LbP,A1LbP,#LO(___pTBISegs) 58 ADD A1LbP,A1LbP,#LO(___pTBISegs)
|
D | tbiroot.S | 48 ADD A1LbP,A1LbP,#LO(___pTBIs)
|
D | tbictxfpu.S | 48 ADD D0Ar4, D0Ar4, #LO(METAC_CORE_ID)
|
/arch/mips/cavium-octeon/ |
D | octeon-memcpy.S | 89 #define ADD daddu macro 208 ADD src, src, 16*NBYTES 210 ADD dst, dst, 16*NBYTES 257 ADD src, src, 8*NBYTES 259 ADD dst, dst, 8*NBYTES 276 ADD src, src, 4*NBYTES 278 ADD dst, dst, 4*NBYTES 294 ADD src, src, NBYTES 296 ADD dst, dst, NBYTES 304 ADD src, src, NBYTES [all …]
|
/arch/c6x/lib/ |
D | csum_64plus.S | 54 || ADD .L1 A16,A9,A9 57 || ADD .L2 B8,B9,B9 60 || ADD .L1X A9,B9,A9 ; add csum from a and b sides 77 ADD .L1 A16,A9,A9 80 || ADD .L1 A8,A9,A9 96 ADD .S1 A8,A9,A9 98 || ADD .S1 A0,A9,A9 106 ADD .S1 A7,A9,A9 110 || ADD .S1 A0,A9,A9 130 ADD .S1 A7,A9,A9 [all …]
|
/arch/mips/lib/ |
D | memcpy.S | 151 #define ADD daddu macro 188 #define ADD addu macro 328 ADD src, src, 8*NBYTES 329 ADD dst, dst, 8*NBYTES 357 ADD src, src, 4*NBYTES 364 ADD dst, dst, 4*NBYTES 376 ADD src, src, NBYTES 380 ADD dst, dst, NBYTES 398 ADD t1, dst, len # t1 is just past last byte of dst 419 ADD t2, zero, NBYTES [all …]
|
D | csum_partial.S | 44 #define ADD daddu macro 51 #define ADD addu macro 61 ADD sum, reg; \ 63 ADD sum, v1; \ 392 #define ADD daddu macro 412 #define ADD addu macro 505 ADD src, src, 8*NBYTES 523 ADD dst, dst, 8*NBYTES 526 ADD len, 8*NBYTES # revert len (see above) 545 ADD src, src, 4*NBYTES [all …]
|
/arch/metag/kernel/ |
D | head.S | 19 ADD D0Re0,D0Re0,#LO(___pTBIs) 22 ADD D0Re0,D0Re0,#LO(___pTBISegs) 35 ADD A0StP,A0StP,#LO(_init_thread_union) 36 ADD A0StP,A0StP,#THREAD_INFO_SIZE 60 ADD A0StP,A0StP,#LO(_secondary_data_stack) 62 ADD A0StP,A0StP,#THREAD_INFO_SIZE
|
D | ftrace_stub.S | 42 ADD D0Re0,D0Re0,#LO(_ftrace_trace_function) 45 ADD D1Re0,D1Re0,#LO(_ftrace_stub)
|
D | user_gateway.S | 40 ADD D1Ar1,D1Ar1,#LO(USER_GATEWAY_PAGE + USER_GATEWAY_TLS)
|
/arch/metag/lib/ |
D | memmove.S | 19 ADD D0Ar4, D0Ar4, #1 25 ADD D0Ar2, D1Re0, D1Ar3 26 ADD D1Ar1, D1Ar1, D1Ar3 82 ADD D0Ar4, D0Ar4, #7 121 ADD D1Re0, D1Re0, D1.5 128 ADD D0Re0, D0Re0, D0.5 140 ADD A1.2, A1.2, #8 157 ADD D1Re0, D1Re0, D1.5 165 ADD D0Re0, D0Re0, D0.5 176 ADD A1.2, A1.2, #8 [all …]
|
D | muldi3.S | 20 ADD D1Re0,D1Re0,D0Re0 31 ADD D1Re0,D1Re0,D1Ar5 34 ADD D1Re0,D1Re0,D0Ar6 41 ADD D1Re0,D1Re0,D1Ar5
|
D | ip_fast_csum.S | 25 ADD D0Re0,D0Re0,D0Ar4 27 ADD D0Re0,D0Re0,D0Ar4
|
D | div64.S | 26 ADD D1Ar5,D1Ar3,D1Ar3 35 ADD D1Re0,D1Re0,D1Re0 51 ADD D1Ar5,D1Ar5,D1Re0
|
D | memcpy.S | 42 ADD D1Ar5, D1Ar5, #1 ! dest is aligned when D1Ar5 reaches #8 123 ADD D0Re0, D0Re0, D1Re0 127 ADD D1Re0, D1Re0, D0Ar2 147 ADD D0Re0, D0Re0, D1Ar1 154 ADD D1Re0, D1Re0, D0FrT 181 ADD D1Ar5, D0Ar4, D0Ar6
|
D | memset.S | 15 ADD A0.2,D0Ar2,D0Re0 ! Duplicate byte value into 4 (A0.2) 67 ADD D1Ar1,D1Ar1,D1Ar5 ! Advance pointer to end of area
|
/arch/sparc/net/ |
D | bpf_jit_comp.c | 96 #define ADD F3(2, 0x00) macro 317 *prog++ = (ADD | RS1(R1) | RS2(R2) | RD(R3)) 320 *prog++ = (ADD | IMMED | RS1(R1) | S13(IMM) | RD(R3)) 332 *prog++ = (ADD | IMMED | RS1(SP) | S13(SZ) | RD(SP)) 437 emit_alu_X(ADD); in bpf_jit_compile() 440 emit_alu_K(ADD, K); in bpf_jit_compile()
|
/arch/arm64/net/ |
D | bpf_jit.h | 90 #define A64_ADD_I(sf, Rd, Rn, imm12) A64_ADDSUB_IMM(sf, Rd, Rn, imm12, ADD) 134 #define A64_ADD(sf, Rd, Rn, Rm) A64_ADDSUB_SREG(sf, Rd, Rn, Rm, ADD)
|
/arch/c6x/kernel/ |
D | entry.S | 68 ADD .D1X SP,-8,A15 215 ADD .S1X 8,SP,A4
|
/arch/x86/lib/ |
D | x86-opcode-map.txt | 36 00: ADD Eb,Gb 37 01: ADD Ev,Gv 38 02: ADD Gb,Eb 39 03: ADD Gv,Ev 40 04: ADD AL,Ib 41 05: ADD rAX,Iz 809 0: ADD
|