/arch/x86/power/ |
D | hibernate_asm_64.S | 33 movq %rdx, pt_regs_dx(%rax) 57 movq $__PAGE_OFFSET, %rdx 59 subq %rdx, %rax 63 movq %rax, %rdx 64 andq $~(X86_CR4_PGE), %rdx 65 movq %rdx, %cr4; # turn off PGE 75 movq restore_pblist(%rip), %rdx 82 testq %rdx, %rdx 86 movq pbe_address(%rdx), %rsi 87 movq pbe_orig_address(%rdx), %rdi [all …]
|
/arch/x86/crypto/ |
D | salsa20-x86_64-asm_64.S | 15 mov %rdx,%rdi 17 mov %rcx,%rdx 19 cmp $0,%rdx 77 cmp $64,%rdx 86 mov %rdx,%rcx 101 movq %rdx,152(%rsp) 105 mov %rdi,%rdx 165 lea (%r14,%rdx),%rbp 177 lea (%rdx,%r9),%rbp 205 xor %rbp,%rdx [all …]
|
D | aes-x86_64-asm_64.S | 37 #define R4 %rdx
|
D | twofish-x86_64-asm_64.S | 56 #define R3 %rdx
|
/arch/x86/kernel/ |
D | head_64.S | 79 leaq _text(%rip), %rdx 81 cmpq %rax, %rdx 106 leaq (level2_spare_pgt - __START_KERNEL_map + _KERNPG_TABLE)(%rbp), %rdx 108 movq %rdx, 0(%rbx, %rax, 8) 113 leaq __PAGE_KERNEL_IDENT_LARGE_EXEC(%rdi), %rdx 115 movq %rdx, 0(%rbx, %rax, 8) 235 movq %rax,%rdx 236 shrq $32,%rdx 299 movq 8(%rsp),%rdx # get cs
|
D | entry_64.S | 157 movq %rdx, 16(%rsp) 174 movq 16(%rsp), %rdx 296 CFI_REL_OFFSET rdx, RDX+\offset-ARGOFFSET 324 movq_cfi rdx, RDX+16-ARGOFFSET 380 movq_cfi rdx, RDX+8 579 movq %rdx,%r8 /* 5th arg: 3rd syscall arg */ 581 movq %rdi,%rdx /* 3rd arg: 1st syscall arg */ 720 PTREGSCALL stub_sigaltstack, sys_sigaltstack, %rdx 1180 # rdi: flags, rsi: usp, rdx: will be &pt_regs 1181 movq %rdx,%rdi [all …]
|
D | relocate_kernel_64.S | 173 pushq %rdx 253 xorq %rdx, %rdx
|
/arch/x86/include/asm/ |
D | calling.h | 42 movq %rdx, 6*8(%rsp) 43 CFI_REL_OFFSET rdx, 6*8 94 movq 6*8(%rsp), %rdx 95 CFI_RESTORE rdx 113 movq \offset+48(%rsp), %rdx
|
D | ftrace.h | 11 movq %rdx, 16(%rsp) 23 movq 16(%rsp), %rdx
|
D | sigcontext.h | 245 unsigned long rdx; member
|
D | ptrace.h | 82 unsigned long rdx; member
|
D | kvm.h | 82 __u64 rax, rbx, rcx, rdx; member
|
D | paravirt.h | 1536 push %rdx; \ 1550 pop %rdx; \
|
/arch/x86/lib/ |
D | copy_page_64.S | 38 movq 16 (%rsi), %rdx 49 movq %rdx, 16 (%rdi) 68 movq 16 (%rsi), %rdx 77 movq %rdx, 16 (%rdi)
|
D | copy_user_64.S | 72 addq %rdx,%rcx 84 addq %rdx,%rcx 188 40: lea (%rdx,%rcx,8),%rdx 257 11: lea (%rdx,%rcx,8),%rcx
|
D | csum-copy_64.S | 103 movq 24(%rdi),%rdx 120 adcq %rdx,%rax 135 movq %rdx,24(%rsi)
|
D | copy_user_nocache_64.S | 107 40: lea (%rdx,%rcx,8),%rdx
|
D | memset_64.S | 39 movq %rdx,%r11
|
/arch/x86/kernel/acpi/ |
D | wakeup_64.S | 17 movq $0x123456789abcdef0, %rdx 18 cmpq %rdx, %rax 53 movq %rdx, pt_regs_dx(%rax) 100 movq pt_regs_dx(%rax), %rdx
|
/arch/x86/xen/ |
D | xen-asm_64.S | 133 push %rdx 147 pop %rdx 188 pushq %rdx
|
/arch/um/sys-x86_64/ |
D | stub.S | 48 pop %rdx
|
/arch/x86/ia32/ |
D | ia32entry.S | 173 CFI_REGISTER rip,rdx 465 PTREGSCALL stub32_sigaltstack, sys32_sigaltstack, %rdx 468 PTREGSCALL stub32_clone, sys32_clone, %rdx 480 CFI_REL_OFFSET rdx,RDX-ARGOFFSET
|
/arch/x86/boot/compressed/ |
D | head_64.S | 291 leaq input_data(%rip), %rdx # input_data
|
/arch/x86/kvm/ |
D | svm.c | 1806 [rdx]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_RDX])), in svm_vcpu_run()
|
D | x86.c | 3173 regs->rdx = kvm_register_read(vcpu, VCPU_REGS_RDX); in kvm_arch_vcpu_ioctl_get_regs() 3210 kvm_register_write(vcpu, VCPU_REGS_RDX, regs->rdx); in kvm_arch_vcpu_ioctl_set_regs()
|