/external/arm-optimized-routines/string/aarch64/ |
D | strcpy-mte.S | 17 #define srcin x1 macro 60 bic src, srcin, 15 65 lsl shift, srcin, 2 82 sub tmp, src, srcin 87 ldr dataq, [srcin] 88 ldr dataq2, [srcin, tmp] 104 ldr data1, [srcin] 105 ldr data2, [srcin, tmp] 115 ldr dataw1, [srcin] 116 ldr dataw2, [srcin, tmp] [all …]
|
D | strlen.S | 16 #define srcin x0 macro 78 and tmp1, srcin, MIN_PAGE_SIZE - 1 83 ldp data1, data2, [srcin] 116 ldp data1, data2, [srcin, 16] 142 bic src, srcin, 31 155 sub len, src, srcin 179 bic src, srcin, 31 191 lsl shift, srcin, 1
|
D | memchr-mte.S | 16 #define srcin x0 macro 49 bic src, srcin, 15 56 lsl shift, srcin, 2 65 add result, srcin, synd, lsr 2 71 sub tmp, src, srcin 100 add tmp, srcin, cntin
|
D | strnlen.S | 16 #define srcin x0 macro 47 bic src, srcin, 15 53 lsl shift, srcin, 2 68 sub tmp, src, srcin 97 sub result, src, srcin
|
D | strlen-mte.S | 16 #define srcin x0 macro 43 bic src, srcin, 15 48 lsl shift, srcin, 2 70 sub result, src, srcin
|
D | strchrnul-mte.S | 16 #define srcin x0 macro 45 bic src, srcin, 15 52 lsl tmp2, srcin, 2 61 add result, srcin, tmp1, lsr 2
|
D | strchr-mte.S | 16 #define srcin x0 macro 47 bic src, srcin, 15 59 lsl tmp3, srcin, 2 71 add result, srcin, tmp1, lsr 2
|
D | strcpy.S | 23 #define srcin x1 macro 95 and tmp2, srcin, #(MIN_PAGE_SIZE - 1) 97 and to_align, srcin, #15 107 ldp data1, data2, [srcin] 197 sub src, srcin, to_align 250 bic src, srcin, #15
|
D | strrchr-mte.S | 16 #define srcin x0 macro 48 bic src, srcin, 15 52 tst srcin, 15 63 lsl shift, srcin, 2
|
D | memrchr.S | 16 #define srcin x0 macro 50 add end, srcin, cntin 108 cmp tmp, srcin
|
D | strchrnul.S | 17 #define srcin x0 macro 56 bic src, srcin, #31 /* Work with aligned 32-byte hunks. */ 58 ands tmp1, srcin, #31
|
D | memchr.S | 17 #define srcin x0 macro 62 bic src, srcin, #31 64 ands soff, srcin, #31
|
D | strchr.S | 17 #define srcin x0 macro 61 bic src, srcin, #31 /* Work with aligned 32-byte hunks. */ 63 ands tmp1, srcin, #31
|
D | strrchr.S | 17 #define srcin x0 macro 65 bic src, srcin, #31 /* Work with aligned 32-byte hunks. */ 68 ands tmp1, srcin, #31
|
/external/llvm-project/libc/AOR_v20.02/string/aarch64/ |
D | strlen.S | 21 #define srcin x0 macro 78 and tmp1, srcin, MIN_PAGE_SIZE - 1 82 ldp data1, data2, [srcin] 114 bic src, srcin, 15 155 sub len, src, srcin 188 bic src, srcin, 15 190 lsl tmp1, srcin, 3 202 tst srcin, 8
|
D | strlen-mte.S | 17 #define srcin x0 macro 65 bic src, srcin, 15 /* Align down to 16 bytes. */ 68 lsl offset, srcin, 3 80 tbnz srcin, 3, L(skip_first_8_bytes) 93 and tmp2, srcin, 7 /* Bytes to ignore. */ 157 sub len, src, srcin
|
D | strnlen.S | 17 #define srcin x0 macro 55 bic src, srcin, #15 56 ands tmp1, srcin, #15 93 sub len, src, srcin
|
D | strcpy.S | 24 #define srcin x1 macro 94 and tmp2, srcin, #(MIN_PAGE_SIZE - 1) 96 and to_align, srcin, #15 106 ldp data1, data2, [srcin] 196 sub src, srcin, to_align 249 bic src, srcin, #15
|
D | strchrnul.S | 18 #define srcin x0 macro 56 bic src, srcin, #31 /* Work with aligned 32-byte hunks. */ 58 ands tmp1, srcin, #31
|
D | strchr-mte.S | 18 #define srcin x0 macro 62 bic src, srcin, #15 /* Work with aligned 16-byte chunks. */ 64 ands tmp1, srcin, #15
|
D | memchr.S | 18 #define srcin x0 macro 61 bic src, srcin, #31 63 ands soff, srcin, #31
|
D | strchr.S | 18 #define srcin x0 macro 61 bic src, srcin, #31 /* Work with aligned 32-byte hunks. */ 63 ands tmp1, srcin, #31
|
D | strrchr.S | 18 #define srcin x0 macro 65 bic src, srcin, #31 /* Work with aligned 32-byte hunks. */ 68 ands tmp1, srcin, #31
|
/external/llvm-project/libc/AOR_v20.02/string/arm/ |
D | strlen-armv6t2.S | 32 #define srcin r0 macro 45 pld [srcin, #0] 47 bic src, srcin, #7 49 ands tmp1, srcin, #7 /* (8 - bytes) to alignment. */
|
/external/arm-optimized-routines/string/arm/ |
D | strlen-armv6t2.S | 31 #define srcin r0 macro 44 pld [srcin, #0] 46 bic src, srcin, #7 48 ands tmp1, srcin, #7 /* (8 - bytes) to alignment. */
|