• Home
  • Raw
  • Download

Lines Matching +full:src +full:- +full:2

17 #include <asm/asm-offsets.h>
22 #define src a1 macro
28 * memcpy copies len bytes from src to dst and sets v0 to dst.
30 * - src and dst don't overlap
31 * - src is readable
32 * - dst is writable
35 * __copy_user copies up to len bytes from src to dst and sets a2 (len) to
37 * __copy_user assumes that src and dst don't overlap, and that the call is
40 * - src is readable (no exceptions when reading src)
42 * - dst is writable (no exceptions when writing dst)
43 * __copy_user uses a non-standard calling convention; see
57 * 1- AT contain the address of the byte just past the end of the source
59 * 2- src_entry <= src < AT, and
60 * 3- (dst - src) == (dst_entry - src_entry),
64 * (2) is met by incrementing src by the number of bytes copied
65 * (3) is met by not doing loads between a pair of increments of dst and src
81 * Only on the 64-bit kernel we can made use of 64-bit registers.
133 #define REST(unit) (FIRST(unit)+NBYTES-1)
136 #define ADDRMASK (NBYTES-1)
149 LEAF(memcpy) /* a0=dst a1=src a2=len */
156 * Note: dst & src may be unaligned, len may be 0
163 pref 0, 0(src)
166 and t0, src, ADDRMASK # Check if src unaligned
178 pref 0, 128(src) # We must not prefetch invalid addresses
181 2: pref 0, 256(src) # We must not prefetch invalid addresses
185 EXC( LOAD t0, UNIT(0)(src), l_exc)
186 EXC( LOAD t1, UNIT(1)(src), l_exc_copy)
187 EXC( LOAD t2, UNIT(2)(src), l_exc_copy)
188 EXC( LOAD t3, UNIT(3)(src), l_exc_copy)
192 EXC( STORE t2, UNIT(2)(dst), s_exc_p14u)
194 EXC( LOAD t0, UNIT(4)(src), l_exc_copy)
195 EXC( LOAD t1, UNIT(5)(src), l_exc_copy)
196 EXC( LOAD t2, UNIT(6)(src), l_exc_copy)
197 EXC( LOAD t3, UNIT(7)(src), l_exc_copy)
201 ADD src, src, 16*NBYTES
204 EXC( LOAD t0, UNIT(-8)(src), l_exc_copy_rewind16)
205 EXC( LOAD t1, UNIT(-7)(src), l_exc_copy_rewind16)
206 EXC( LOAD t2, UNIT(-6)(src), l_exc_copy_rewind16)
207 EXC( LOAD t3, UNIT(-5)(src), l_exc_copy_rewind16)
208 EXC( STORE t0, UNIT(-8)(dst), s_exc_p8u)
209 EXC( STORE t1, UNIT(-7)(dst), s_exc_p7u)
210 EXC( STORE t2, UNIT(-6)(dst), s_exc_p6u)
211 EXC( STORE t3, UNIT(-5)(dst), s_exc_p5u)
212 EXC( LOAD t0, UNIT(-4)(src), l_exc_copy_rewind16)
213 EXC( LOAD t1, UNIT(-3)(src), l_exc_copy_rewind16)
214 EXC( LOAD t2, UNIT(-2)(src), l_exc_copy_rewind16)
215 EXC( LOAD t3, UNIT(-1)(src), l_exc_copy_rewind16)
216 EXC( STORE t0, UNIT(-4)(dst), s_exc_p4u)
217 EXC( STORE t1, UNIT(-3)(dst), s_exc_p3u)
218 EXC( STORE t2, UNIT(-2)(dst), s_exc_p2u)
219 EXC( STORE t3, UNIT(-1)(dst), s_exc_p1u)
221 beqz t0, 2b
233 EXC( LOAD t0, UNIT(0)(src), l_exc)
234 EXC( LOAD t1, UNIT(1)(src), l_exc_copy)
235 EXC( LOAD t2, UNIT(2)(src), l_exc_copy)
236 EXC( LOAD t3, UNIT(3)(src), l_exc_copy)
240 EXC( STORE t2, UNIT(2)(dst), s_exc_p6u)
242 EXC( LOAD t0, UNIT(4)(src), l_exc_copy)
243 EXC( LOAD t1, UNIT(5)(src), l_exc_copy)
244 EXC( LOAD t2, UNIT(6)(src), l_exc_copy)
245 EXC( LOAD t3, UNIT(7)(src), l_exc_copy)
250 ADD src, src, 8*NBYTES
260 EXC( LOAD t0, UNIT(0)(src), l_exc)
261 EXC( LOAD t1, UNIT(1)(src), l_exc_copy)
262 EXC( LOAD t2, UNIT(2)(src), l_exc_copy)
263 EXC( LOAD t3, UNIT(3)(src), l_exc_copy)
267 EXC( STORE t2, UNIT(2)(dst), s_exc_p2u)
269 ADD src, src, 4*NBYTES
283 EXC( LOAD t0, 0(src), l_exc)
287 ADD src, src, NBYTES
291 # 2) Copy NBYTES, then check length again
293 EXC( LOAD t0, 0(src), l_exc)
297 ADD src, src, NBYTES
303 EXC( LOAD t0, 0(src), l_exc)
305 ADD src, src, NBYTES
308 EXC( STORE t0, -8(dst), s_exc_p1u)
312 SRL t0, len, LOG_NBYTES+2 # +2 for 4 units/iter
314 and rem, len, (4*NBYTES-1) # rem = len % 4*NBYTES
320 * are to the same unit (unless src is aligned, but it's not).
322 EXC( LDFIRST t0, FIRST(0)(src), l_exc)
323 EXC( LDFIRST t1, FIRST(1)(src), l_exc_copy)
325 EXC( LDREST t0, REST(0)(src), l_exc_copy)
326 EXC( LDREST t1, REST(1)(src), l_exc_copy)
327 EXC( LDFIRST t2, FIRST(2)(src), l_exc_copy)
328 EXC( LDFIRST t3, FIRST(3)(src), l_exc_copy)
329 EXC( LDREST t2, REST(2)(src), l_exc_copy)
330 EXC( LDREST t3, REST(3)(src), l_exc_copy)
331 ADD src, src, 4*NBYTES
334 EXC( STORE t2, UNIT(2)(dst), s_exc_p2u)
341 and rem, len, NBYTES-1 # rem = len % NBYTES
345 EXC( LDFIRST t0, FIRST(0)(src), l_exc)
346 EXC( LDREST t0, REST(0)(src), l_exc_copy)
349 ADD src, src, NBYTES
359 EXC( lb t0, N(src), l_exc); \
366 COPY_BYTE(2)
370 EXC( lb t0, NBYTES-2(src), l_exc)
373 EXC( sb t0, NBYTES-2(dst), s_exc_p1)
380 /* Rewind src and dst by 16*NBYTES for l_exc_copy */
381 SUB src, src, 16*NBYTES
385 * Copy bytes from src until faulting load address (or until a
392 * Assumes src < THREAD_BUADDR($28)
397 EXC( lb t1, 0(src), l_exc)
398 ADD src, src, 1
399 sb t1, 0(dst) # can't fault -- we're copy_from_user
400 bne src, t0, 1b
429 SEXC(2)
444 sltu t0, a1, t0 # dst + len <= src -> memcpy
445 sltu t1, a0, t1 # dst >= src + len -> memcpy
453 LEAF(__rmemcpy) /* a0=dst a1=src a2=len */
455 beqz t0, r_end_bytes_up # src >= dst
458 ADD a1, a2 # src = src + len
461 lb t0, -1(a1)
463 sb t0, -1(a0)