/bionic/libc/arch-x86_64/bionic/ |
D | _setjmp.S | 54 movq %rbx,(_JB_RBX * 8)(%rdi) 55 movq %rbp,(_JB_RBP * 8)(%rdi) 56 movq %r12,(_JB_R12 * 8)(%rdi) 57 movq %r13,(_JB_R13 * 8)(%rdi) 58 movq %r14,(_JB_R14 * 8)(%rdi) 59 movq %r15,(_JB_R15 * 8)(%rdi) 60 movq %rsp,(_JB_RSP * 8)(%rdi) 61 movq %r11,(_JB_PC * 8)(%rdi) 68 movq (_JB_RBX * 8)(%rdi),%rbx 69 movq (_JB_RBP * 8)(%rdi),%rbp [all …]
|
D | sigsetjmp.S | 54 movl %esi,(_JB_SIGFLAG * 8)(%rdi) 58 pushq %rdi 59 xorq %rdi,%rdi 61 popq %rdi 62 movq %rax,(_JB_SIGMASK * 8)(%rdi) 65 movq %rbx,(_JB_RBX * 8)(%rdi) 66 movq %rbp,(_JB_RBP * 8)(%rdi) 67 movq %r12,(_JB_R12 * 8)(%rdi) 68 movq %r13,(_JB_R13 * 8)(%rdi) 69 movq %r14,(_JB_R14 * 8)(%rdi) [all …]
|
D | setjmp.S | 53 pushq %rdi 54 xorq %rdi,%rdi 56 popq %rdi 57 movq %rax,(_JB_SIGMASK * 8)(%rdi) 60 movq %rbx,(_JB_RBX * 8)(%rdi) 61 movq %rbp,(_JB_RBP * 8)(%rdi) 62 movq %r12,(_JB_R12 * 8)(%rdi) 63 movq %r13,(_JB_R13 * 8)(%rdi) 64 movq %r14,(_JB_R14 * 8)(%rdi) 65 movq %r15,(_JB_R15 * 8)(%rdi) [all …]
|
D | vfork.S | 34 popq %rdi // Grab the return address. 37 pushq %rdi // Restore the return address.
|
D | _exit_with_stack_teardown.S | 37 mov $0, %rdi
|
D | syscall.S | 47 mov %rsi, %rdi
|
/bionic/libc/arch-x86_64/string/ |
D | sse2-strcpy-slm.S | 86 mov %rdi, %rax /* save result */ 128 movdqu %xmm1, (%rdi) 133 sub %rcx, %rdi 140 movdqu %xmm1, (%rdi, %rcx) 156 movdqu %xmm2, (%rdi, %rcx) 172 movdqu %xmm3, (%rdi, %rcx) 188 movdqu %xmm4, (%rdi, %rcx) 204 movdqu %xmm1, (%rdi, %rcx) 220 movdqu %xmm2, (%rdi, %rcx) 235 movdqu %xmm3, (%rdi, %rcx) [all …]
|
D | sse2-memset-slm.S | 69 movq %rdi, %rax 88 movb %cl, (%rdi) 93 movq %rcx, (%rdi) 94 movq %rcx, -8(%rdi, %rdx) 98 movl %ecx, (%rdi) 99 movl %ecx, -4(%rdi, %rdx) 103 movw %cx, (%rdi) 104 movw %cx, -2(%rdi, %rdx) 115 movdqu %xmm0, (%rdi) 116 movdqu %xmm0, -16(%rdi, %rdx) [all …]
|
D | sse4-memcmp-slm.S | 94 add %rdx, %rdi 100 movzbl (%rdi), %eax 109 movdqu (%rdi), %xmm2 118 sub %rcx, %rdi 120 test $0xf, %rdi 128 movdqu (%rdi), %xmm2 133 movdqu 16(%rdi), %xmm2 138 movdqu 32(%rdi), %xmm2 143 movdqu 48(%rdi), %xmm2 150 movdqu 64(%rdi), %xmm2 [all …]
|
D | sse2-memcpy-slm.S | 94 cmp %rsi, %rdi 106 movdqu %xmm0, (%rdi) 107 movdqu %xmm1, -16(%rdi, %rdx) 113 movdqu %xmm0, 16(%rdi) 114 movdqu %xmm1, -32(%rdi, %rdx) 122 movdqu %xmm0, 32(%rdi) 123 movdqu %xmm1, 48(%rdi) 124 movdqu %xmm2, -48(%rdi, %rdx) 125 movdqu %xmm3, -64(%rdi, %rdx) 129 lea 64(%rdi), %r8 [all …]
|
D | sse2-memmove-slm.S | 95 xchg %rsi, %rdi 97 mov %rdi, %rax 100 cmp %rsi, %rdi 115 movdqu %xmm0, (%rdi) 116 movdqu %xmm1, -16(%rdi, %rdx) 128 movdqu %xmm0, (%rdi) 129 movdqu %xmm1, 16(%rdi) 130 movdqu %xmm2, -16(%rdi, %rdx) 131 movdqu %xmm3, -32(%rdi, %rdx) 147 movdqu %xmm0, (%rdi) [all …]
|
D | ssse3-strcmp-slm.S | 98 movlpd (%rdi), %xmm1 100 movhpd 8(%rdi), %xmm1 114 add $16, %rdi /* prepare to search next 16 bytes */ 124 and $0xfffffffffffffff0, %rdi /* force %rdi is 16 byte aligned */ 134 xchg %rsi, %rdi 154 pcmpeqb (%rdi), %xmm1 /* compare 16 bytes for equality */ 177 movdqa (%rdi, %rcx), %xmm2 192 movdqa (%rdi, %rcx), %xmm2 215 movdqa (%rdi), %xmm2 226 movdqa (%rdi), %xmm3 [all …]
|
D | sse2-strcat-slm.S | 66 mov %rdi, %r9 78 lea (%r9, %rax), %rdi
|
D | sse2-strlen-slm.S | 74 movdqu (%rdi), %xmm1 79 mov %rdi, %rax 83 mov %rdi, %rax 250 sub %rdi, %rax 258 sub %rdi, %rax 265 sub %rdi, %rax 272 sub %rdi, %rax 279 sub %rdi, %rax 286 sub %rdi, %rax
|
/bionic/linker/arch/x86_64/ |
D | begin.S | 32 mov %rsp, %rdi
|
/bionic/libc/kernel/uapi/asm-x86/asm/ |
D | ptrace.h | 71 unsigned long rdi; member
|
D | sigcontext.h | 145 __u64 rdi; member
|
D | kvm.h | 131 __u64 rsi, rdi, rsp, rbp; member
|
/bionic/libc/include/sys/ |
D | user.h | 131 unsigned long rdi;
|