/bionic/libc/arch-x86_64/string/ |
D | sse2-strlen-slm.S | 76 pmovmskb %xmm0, %edx 77 test %edx, %edx 89 pmovmskb %xmm0, %edx 90 and %r10d, %edx 98 pmovmskb %xmm0, %edx 99 test %edx, %edx 103 pmovmskb %xmm1, %edx 104 test %edx, %edx 108 pmovmskb %xmm2, %edx 109 test %edx, %edx [all …]
|
D | ssse3-strcmp-slm.S | 106 pmovmskb %xmm1, %edx 107 sub $0xffff, %edx /* if first 16 bytes are same, edx == 0xffff */ 125 mov $0xffff, %edx /* for equivalent offset */ 132 mov %edx, %r8d /* r8d is offset flag for exit tail */ 157 shr %cl, %edx /* adjust 0xffff for offset */ 159 sub %r9d, %edx 182 pmovmskb %xmm1, %edx 183 sub $0xffff, %edx 197 pmovmskb %xmm1, %edx 198 sub $0xffff, %edx [all …]
|
/bionic/libc/arch-x86/string/ |
D | sse2-memset-slm.S | 77 add %ecx, %edx; \ 108 movl %eax, %edx 110 or %edx, %eax 111 movl DST(%esp), %edx 119 movl %eax, (%edx) 120 movl %eax, -4(%edx, %ecx) 123 movl %eax, 4(%edx) 124 movl %eax, -8(%edx, %ecx) 130 movw %ax, (%edx) 131 movw %ax, -2(%edx, %ecx) [all …]
|
D | sse2-memset-atom.S | 77 add %ecx, %edx; \ 102 movl %eax, %edx 104 or %edx, %eax 105 movl DST(%esp), %edx 152 movl %eax, -28(%edx) 154 movl %eax, -24(%edx) 156 movl %eax, -20(%edx) 158 movl %eax, -16(%edx) 160 movl %eax, -12(%edx) 162 movl %eax, -8(%edx) [all …]
|
D | sse2-wcslen-atom.S | 66 mov STR(%esp), %edx 68 cmpl $0, (%edx) 70 cmpl $0, 4(%edx) 72 cmpl $0, 8(%edx) 74 cmpl $0, 12(%edx) 76 cmpl $0, 16(%edx) 78 cmpl $0, 20(%edx) 80 cmpl $0, 24(%edx) 82 cmpl $0, 28(%edx) 87 lea 32(%edx), %eax [all …]
|
D | ssse3-strlcpy-atom.S | 39 add %esi, %edx 59 movl %eax, (%edx) 75 movlpd %xmm0, (%edx) 95 movlpd %xmm0, (%edx) 97 movl %eax, 8(%edx) 114 movlpd %xmm0, (%edx) 115 movlpd %xmm1, 8(%edx) 127 add %esi, %edx 216 add %esi, %edx 235 movb %bh, 3(%edx) [all …]
|
D | sse2-strlen-atom.S | 102 mov STR(%esp), %edx 111 cmpb $0, (%edx) 113 cmpb $0, 1(%edx) 115 cmpb $0, 2(%edx) 117 cmpb $0, 3(%edx) 125 cmpb $0, 4(%edx) 127 cmpb $0, 5(%edx) 129 cmpb $0, 6(%edx) 131 cmpb $0, 7(%edx) 139 cmpb $0, 8(%edx) [all …]
|
D | ssse3-memcpy-atom.S | 136 movl DEST(%esp), %edx 139 cmp %eax, %edx 149 cmp %eax, %edx 163 add %ecx, %edx 177 movlpd %xmm0, (%edx) 178 movlpd %xmm1, 8(%edx) 183 movl %edx, %edi 184 and $-16, %edx 185 add $16, %edx 186 sub %edx, %edi [all …]
|
D | ssse3-strlcat-atom.S | 86 mov DST(%esp), %edx 121 mov DST + 4(%esp), %edx 124 add %eax, %edx 173 add %esi, %edx 193 movl %eax, (%edx) 209 movlpd %xmm0, (%edx) 229 movlpd %xmm0, (%edx) 231 movl %eax, 8(%edx) 248 movlpd %xmm0, (%edx) 249 movlpd %xmm1, 8(%edx) [all …]
|
D | ssse3-wcscpy-atom.S | 92 mov STR1(%esp), %edx 105 mov %edx, %edi 115 movdqu %xmm1, (%edx) 123 mov %edx, %eax 124 lea 16(%edx), %edx 125 and $-16, %edx 126 sub %edx, %eax 143 movaps %xmm1, (%edx) 152 movaps %xmm2, (%edx, %esi) 161 movaps %xmm3, (%edx, %esi) [all …]
|
D | ssse3-strcpy-atom.S | 100 # define SAVE_RESULT(n) lea n(%edx), %eax 101 # define SAVE_RESULT_TAIL(n) lea n(%edx), %eax 104 # define SAVE_RESULT_TAIL(n) movl %edx, %eax 123 mov STR1(%esp), %edx 178 mov %edx, %edi 197 movlpd %xmm1, (%edx) 201 movlpd %xmm1, 8(%edx) 213 mov %edx, %eax 214 lea 16(%edx), %edx 215 and $-16, %edx [all …]
|
D | ssse3-memcmp-atom.S | 119 movl BLK2(%esp), %edx 128 add %ecx, %edx 139 cmp (%edx), %cl 160 movdqu (%edx), %xmm0 162 movl %edx, %esi 164 pmovmskb %xmm3, %edx 167 sub $0xffff, %edx 170 mov %edi, %edx 171 and $0xf, %edx 172 xor %edx, %edi [all …]
|
D | sse2-strcpy-slm.S | 159 pmovmskb %xmm1, %edx 160 shr %cl, %edx 170 test %edx, %edx 174 pmovmskb %xmm0, %edx 184 test %edx, %edx 191 mov %edi, %edx 193 and $15, %edx 203 pmovmskb %xmm0, %edx 208 test %edx, %edx 211 test %edx, %edx [all …]
|
D | sse2-memmove-slm.S | 102 movl DEST(%esp), %edx 105 cmp %eax, %edx 120 movdqu %xmm0, (%edx) 121 movdqu %xmm1, -16(%edx, %ecx) 133 movdqu %xmm0, (%edx) 134 movdqu %xmm1, 16(%edx) 135 movdqu %xmm2, -16(%edx, %ecx) 136 movdqu %xmm3, -32(%edx, %ecx) 152 movdqu %xmm0, (%edx) 153 movdqu %xmm1, 16(%edx) [all …]
|
D | ssse3-strcat-atom.S | 107 mov %edi, %edx 114 lea (%edi, %eax), %edx 173 add %esi, %edx 194 movlpd %xmm0, (%edx) 216 movlpd %xmm0, (%edx) 217 movlpd %xmm1, 8(%edx) 223 movb %bh, 1(%edx) 226 movb %al, (%edx) 232 movb %bh, 2(%edx) 235 movw %ax, (%edx) [all …]
|
D | sse4-memcmp-slm.S | 136 movl BLK2(%esp), %edx 161 add %ecx, %edx 169 cmpb (%edx), %bl 173 cmpb 1(%edx), %bl 180 cmpb 2(%edx), %bl 187 cmpb 3(%edx), %bl 194 cmpb 4(%edx), %bl 201 cmpb 5(%edx), %bl 208 cmpb 6(%edx), %bl 234 cmp (%edx), %ecx [all …]
|
D | sse2-wcscmp-atom.S | 92 mov STR1(%esp), %edx 96 cmp %ecx, (%edx) 102 cmp %ecx, 4(%edx) 108 cmp %ecx, 8(%edx) 114 cmp %ecx, 12(%edx) 121 add $16, %edx 124 mov %edx, %edi 129 and $63, %edx /* edi alignment in cache line */ 132 cmp $16, %edx 134 cmp $32, %edx [all …]
|
D | sse2-memrchr-atom.S | 90 mov LEN(%esp), %edx 92 test %edx, %edx 94 sub $16, %edx 98 add %edx, %ecx 115 add $16, %edx 117 sub %eax, %edx 122 sub $64, %edx 150 sub $64, %edx 183 add $64, %edx 185 sub %eax, %edx [all …]
|
D | sse2-wcschr-atom.S | 105 pmovmskb %xmm2, %edx 107 or %eax, %edx 122 pmovmskb %xmm2, %edx 125 sarl %cl, %edx 133 test %edx, %edx 151 test %edx, %edx 163 pmovmskb %xmm2, %edx 165 or %eax, %edx 172 pmovmskb %xmm2, %edx 174 or %eax, %edx [all …]
|
D | ssse3-strcmp-atom.S | 120 movl STR1(%esp), %edx 130 cmpb %cl, (%edx) 136 cmpb %cl, 1(%edx) 142 cmpb %cl, 2(%edx) 148 cmpb %cl, 3(%edx) 154 cmpb %cl, 4(%edx) 160 cmpb %cl, 5(%edx) 166 cmpb %cl, 6(%edx) 172 cmpb %cl, 7(%edx) 177 add $8, %edx [all …]
|
D | sse2-memchr-atom.S | 94 mov LEN(%esp), %edx 95 test %edx, %edx 113 sub $16, %edx 118 add %ecx, %edx 119 sub $64, %edx 134 lea -16(%edx), %edx 135 add %ecx, %edx 138 sub $64, %edx 172 sub $64, %edx 207 add %ecx, %edx [all …]
|
/bionic/libc/arch-x86/generic/string/ |
D | strcmp.S | 17 movl 0x08(%esp),%edx 22 incl %edx 26 cmpb %cl,(%edx) /* chars match??? */ 29 incl %edx 33 cmpb %cl,(%edx) 36 incl %edx 40 cmpb %cl,(%edx) 43 incl %edx 47 cmpb %cl,(%edx) 50 incl %edx [all …]
|
D | strcat.S | 25 movl 12(%esp),%edx /* src address */ 37 L1: movb (%edx),%al /* unroll loop, but not too much */ 41 movb 1(%edx),%al 45 movb 2(%edx),%al 49 movb 3(%edx),%al 53 movb 4(%edx),%al 57 movb 5(%edx),%al 61 movb 6(%edx),%al 65 movb 7(%edx),%al 67 addl $8,%edx
|
D | strncmp.S | 19 movl 16(%esp),%edx 20 testl %edx,%edx 26 decl %edx 36 decl %edx 46 decl %edx 56 decl %edx 66 decl %edx 76 decl %edx 86 decl %edx 96 decl %edx
|
/bionic/libc/arch-x86/bionic/ |
D | setjmp.S | 118 movl 0(%esp),%edx 124 m_mangle_register %edx, _JB_EDX 140 movl 4(%esp),%edx 143 m_calculate_checksum %eax, %edx 144 xorl (_JB_CHECKSUM * 4)(%edx), %eax 148 movl (_JB_SIGFLAG * 4)(%edx), %eax 153 leal (_JB_SIGMASK * 4)(%edx),%eax 164 movl 4(%esp),%edx 168 movl (_JB_SIGFLAG * 4)(%edx),%ecx 173 movl (_JB_ESP * 4)(%edx),%edi [all …]
|