/bionic/libc/arch-x86_64/string/ |
D | sse2-strlen-slm.S | 76 pmovmskb %xmm0, %edx 77 test %edx, %edx 89 pmovmskb %xmm0, %edx 90 and %r10d, %edx 98 pmovmskb %xmm0, %edx 99 test %edx, %edx 103 pmovmskb %xmm1, %edx 104 test %edx, %edx 108 pmovmskb %xmm2, %edx 109 test %edx, %edx [all …]
|
/bionic/libc/arch-x86/atom/string/ |
D | sse2-wcslen-atom.S | 66 mov STR(%esp), %edx 68 cmpl $0, (%edx) 70 cmpl $0, 4(%edx) 72 cmpl $0, 8(%edx) 74 cmpl $0, 12(%edx) 76 cmpl $0, 16(%edx) 78 cmpl $0, 20(%edx) 80 cmpl $0, 24(%edx) 82 cmpl $0, 28(%edx) 87 lea 32(%edx), %eax [all …]
|
D | sse2-memset-atom.S | 115 add %ecx, %edx; \ 137 add %ecx, %edx; \ 157 movl %eax, %edx 159 or %edx, %eax 161 movl DEST(%esp), %edx 208 movl %eax, -28(%edx) 210 movl %eax, -24(%edx) 212 movl %eax, -20(%edx) 214 movl %eax, -16(%edx) 216 movl %eax, -12(%edx) [all …]
|
D | ssse3-strlcpy-atom.S | 39 add %esi, %edx 59 movl %eax, (%edx) 75 movlpd %xmm0, (%edx) 95 movlpd %xmm0, (%edx) 97 movl %eax, 8(%edx) 114 movlpd %xmm0, (%edx) 115 movlpd %xmm1, 8(%edx) 127 add %esi, %edx 216 add %esi, %edx 235 movb %bh, 3(%edx) [all …]
|
D | sse2-strlen-atom.S | 102 mov STR(%esp), %edx 111 cmpb $0, (%edx) 113 cmpb $0, 1(%edx) 115 cmpb $0, 2(%edx) 117 cmpb $0, 3(%edx) 125 cmpb $0, 4(%edx) 127 cmpb $0, 5(%edx) 129 cmpb $0, 6(%edx) 131 cmpb $0, 7(%edx) 139 cmpb $0, 8(%edx) [all …]
|
D | ssse3-strlcat-atom.S | 86 mov DST(%esp), %edx 121 mov DST + 4(%esp), %edx 124 add %eax, %edx 173 add %esi, %edx 193 movl %eax, (%edx) 209 movlpd %xmm0, (%edx) 229 movlpd %xmm0, (%edx) 231 movl %eax, 8(%edx) 248 movlpd %xmm0, (%edx) 249 movlpd %xmm1, 8(%edx) [all …]
|
D | ssse3-wcscpy-atom.S | 92 mov STR1(%esp), %edx 105 mov %edx, %edi 115 movdqu %xmm1, (%edx) 123 mov %edx, %eax 124 lea 16(%edx), %edx 125 and $-16, %edx 126 sub %edx, %eax 143 movaps %xmm1, (%edx) 152 movaps %xmm2, (%edx, %esi) 161 movaps %xmm3, (%edx, %esi) [all …]
|
D | ssse3-memcpy-atom.S | 141 movl DEST(%esp), %edx 144 cmp %eax, %edx 154 cmp %eax, %edx 168 add %ecx, %edx 182 movlpd %xmm0, (%edx) 183 movlpd %xmm1, 8(%edx) 188 movl %edx, %edi 189 and $-16, %edx 190 add $16, %edx 191 sub %edx, %edi [all …]
|
D | ssse3-strcpy-atom.S | 100 # define SAVE_RESULT(n) lea n(%edx), %eax 101 # define SAVE_RESULT_TAIL(n) lea n(%edx), %eax 104 # define SAVE_RESULT_TAIL(n) movl %edx, %eax 123 mov STR1(%esp), %edx 178 mov %edx, %edi 197 movlpd %xmm1, (%edx) 201 movlpd %xmm1, 8(%edx) 213 mov %edx, %eax 214 lea 16(%edx), %edx 215 and $-16, %edx [all …]
|
D | ssse3-memcmp-atom.S | 119 movl BLK2(%esp), %edx 128 add %ecx, %edx 139 cmp (%edx), %cl 160 movdqu (%edx), %xmm0 162 movl %edx, %esi 164 pmovmskb %xmm3, %edx 167 sub $0xffff, %edx 170 mov %edi, %edx 171 and $0xf, %edx 172 xor %edx, %edi [all …]
|
D | ssse3-strcat-atom.S | 107 mov %edi, %edx 114 lea (%edi, %eax), %edx 173 add %esi, %edx 194 movlpd %xmm0, (%edx) 216 movlpd %xmm0, (%edx) 217 movlpd %xmm1, 8(%edx) 223 movb %bh, 1(%edx) 226 movb %al, (%edx) 232 movb %bh, 2(%edx) 235 movw %ax, (%edx) [all …]
|
D | sse2-wcscmp-atom.S | 92 mov STR1(%esp), %edx 96 cmp %ecx, (%edx) 102 cmp %ecx, 4(%edx) 108 cmp %ecx, 8(%edx) 114 cmp %ecx, 12(%edx) 121 add $16, %edx 124 mov %edx, %edi 129 and $63, %edx /* edi alignment in cache line */ 132 cmp $16, %edx 134 cmp $32, %edx [all …]
|
/bionic/libc/arch-x86/silvermont/string/ |
D | sse2-memset-slm.S | 119 add %ecx, %edx; \ 141 add %ecx, %edx; \ 163 movl %eax, %edx 165 or %edx, %eax 167 movl DEST(%esp), %edx 175 movl %eax, (%edx) 176 movl %eax, -4(%edx, %ecx) 179 movl %eax, 4(%edx) 180 movl %eax, -8(%edx, %ecx) 186 movw %ax, (%edx) [all …]
|
D | sse2-memcpy-slm.S | 101 movl DEST(%esp), %edx 103 cmp %eax, %edx 115 movdqu %xmm0, (%edx) 116 movdqu %xmm1, -16(%edx, %ecx) 122 movdqu %xmm0, 16(%edx) 123 movdqu %xmm1, -32(%edx, %ecx) 131 movdqu %xmm0, 32(%edx) 132 movdqu %xmm1, 48(%edx) 133 movdqu %xmm2, -48(%edx, %ecx) 134 movdqu %xmm3, -64(%edx, %ecx) [all …]
|
D | sse2-strcpy-slm.S | 159 pmovmskb %xmm1, %edx 160 shr %cl, %edx 170 test %edx, %edx 174 pmovmskb %xmm0, %edx 184 test %edx, %edx 191 mov %edi, %edx 193 and $15, %edx 203 pmovmskb %xmm0, %edx 208 test %edx, %edx 211 test %edx, %edx [all …]
|
D | sse2-memmove-slm.S | 107 movl DEST(%esp), %edx 110 cmp %eax, %edx 125 movdqu %xmm0, (%edx) 126 movdqu %xmm1, -16(%edx, %ecx) 138 movdqu %xmm0, (%edx) 139 movdqu %xmm1, 16(%edx) 140 movdqu %xmm2, -16(%edx, %ecx) 141 movdqu %xmm3, -32(%edx, %ecx) 157 movdqu %xmm0, (%edx) 158 movdqu %xmm1, 16(%edx) [all …]
|
D | sse4-memcmp-slm.S | 136 movl BLK2(%esp), %edx 161 add %ecx, %edx 169 cmpb (%edx), %bl 173 cmpb 1(%edx), %bl 180 cmpb 2(%edx), %bl 187 cmpb 3(%edx), %bl 194 cmpb 4(%edx), %bl 201 cmpb 5(%edx), %bl 208 cmpb 6(%edx), %bl 234 cmp (%edx), %ecx [all …]
|
/bionic/libc/arch-x86/generic/string/ |
D | strcmp.S | 17 movl 0x08(%esp),%edx 22 incl %edx 26 cmpb %cl,(%edx) /* chars match??? */ 29 incl %edx 33 cmpb %cl,(%edx) 36 incl %edx 40 cmpb %cl,(%edx) 43 incl %edx 47 cmpb %cl,(%edx) 50 incl %edx [all …]
|
D | strcat.S | 25 movl 12(%esp),%edx /* src address */ 37 L1: movb (%edx),%al /* unroll loop, but not too much */ 41 movb 1(%edx),%al 45 movb 2(%edx),%al 49 movb 3(%edx),%al 53 movb 4(%edx),%al 57 movb 5(%edx),%al 61 movb 6(%edx),%al 65 movb 7(%edx),%al 67 addl $8,%edx
|
D | strncmp.S | 19 movl 16(%esp),%edx 20 testl %edx,%edx 26 decl %edx 36 decl %edx 46 decl %edx 56 decl %edx 66 decl %edx 76 decl %edx 86 decl %edx 96 decl %edx
|
/bionic/libm/x86/ |
D | e_pow.S | 120 movl $30704, %edx 121 pinsrw $3, %edx, %xmm1 124 movl $8192, %edx 125 movd %edx, %xmm4 132 movl %eax, %edx 133 andl $32752, %edx 134 subl $16368, %edx 135 movl %edx, %ecx 136 sarl $31, %edx 137 addl %edx, %ecx [all …]
|
D | s_atan.S | 91 pextrw $3, %xmm0, %edx 95 movl %edx, %eax 96 andl $32767, %edx 97 subl $16288, %edx 98 cmpl $159, %edx 104 addl $1, %edx 110 addl %edx, %edx 111 movsd (%ebx,%edx,8), %xmm6 112 movsd 8(%ebx,%edx,8), %xmm5 137 addl $944, %edx [all …]
|
/bionic/libc/arch-x86/bionic/ |
D | setjmp.S | 61 xorl \reg,%edx 127 movl 0(%esp),%edx 129 movl %edx,(_JB_EDX * 4)(%ecx) 145 movl 4(%esp),%edx 148 m_calculate_checksum %eax, %edx 149 xorl (_JB_CHECKSUM * 4)(%edx), %eax 153 movl (_JB_SIGFLAG * 4)(%edx), %eax 159 pushl (_JB_SIGMASK * 4)(%edx) 166 movl 4(%esp),%edx 169 movl (_JB_SIGFLAG * 4)(%edx),%ecx [all …]
|
/bionic/libm/x86_64/ |
D | e_pow.S | 105 movl $32752, %edx 106 andl %eax, %edx 107 subl $16368, %edx 108 movl %edx, %ecx 109 sarl $31, %edx 110 addl %edx, %ecx 111 xorl %edx, %ecx 130 movl $-1, %edx 132 shll %cl, %edx 141 movd %xmm0, %edx [all …]
|
D | s_atan.S | 74 pextrw $3, %xmm0, %edx 78 movl %edx, %eax 79 andl $32767, %edx 80 subl $16288, %edx 81 cmpl $159, %edx 87 addl $1, %edx 93 addl %edx, %edx 119 addl $944, %edx 120 cmpl $1103, %edx 140 addl $15344, %edx [all …]
|