Home
last modified time | relevance | path

Searched refs:rip (Results 1 – 25 of 60) sorted by relevance

123

/arch/x86/purgatory/
Dentry64.S19 lgdt gdt(%rip)
30 leaq stack_init(%rip), %rsp
32 leaq new_cs_exit(%rip), %rax
38 movq rax(%rip), %rax
39 movq rbx(%rip), %rbx
40 movq rcx(%rip), %rcx
41 movq rdx(%rip), %rdx
42 movq rsi(%rip), %rsi
43 movq rdi(%rip), %rdi
44 movq rsp(%rip), %rsp
[all …]
Dsetup-x86_64.S19 lgdt gdt(%rip)
30 leaq lstack_end(%rip), %rsp
/arch/x86/power/
Dhibernate_asm_64.S33 movq mmu_cr4_features(%rip), %rax
67 movq %rax, in_suspend(%rip)
94 movq %rax, restore_cr3(%rip)
104 movq restore_jump_address(%rip), %r8
105 movq restore_cr3(%rip), %r9
108 movq temp_pgt(%rip), %rax
109 movq mmu_cr4_features(%rip), %rbx
112 movq restore_pblist(%rip), %rdx
115 movq relocated_restore_code(%rip), %rcx
/arch/x86/kernel/
Dhead_64.S65 leaq (__end_init_task - FRAME_SIZE)(%rip), %rsp
67 leaq _text(%rip), %rdi
74 leaq .Lon_kernel_cs(%rip), %rax
90 leaq _text(%rip), %rdi
146 testl $1, __pgtable_l5_enabled(%rip)
154 addq phys_base(%rip), %rax
185 lgdt early_gdt_descr(%rip)
209 movl initial_gs(%rip),%eax
210 movl initial_gs+4(%rip),%edx
217 movq initial_stack(%rip), %rsp
[all …]
Dsev_verify_cbit.S25 movq sme_me_mask(%rip), %rsi
30 movq sev_status(%rip), %rsi
53 movq %rdx, sev_check_data(%rip)
65 cmpq %rdx, sev_check_data(%rip)
Dftrace_64.S149 movq function_trace_op(%rip), %rdx
200 movq function_trace_op(%rip), %rdx
/arch/x86/platform/efi/
Defi_thunk_64.S37 movq efi_mixed_mode_stack_pa(%rip), %rsp
44 subq phys_base(%rip), %rax
46 leaq 1f(%rip), %rbp
47 leaq 2f(%rip), %rbx
/arch/x86/crypto/
Daesni-intel_avx-x86_64.S373 vpshufb SHUF_MASK(%rip), %xmm9, %xmm9
388 vpshufb SHUF_MASK(%rip), %xmm9, %xmm9
392 vpshufb SHUF_MASK(%rip), %xmm9, %xmm9
395 vpshufb SHUF_MASK(%rip), %xmm9, %xmm9
400 vpshufb SHUF_MASK(%rip), %xmm9, %xmm9
423 vpaddd ONE(%rip), %xmm9, %xmm9 # INCR CNT to get Yn
425 vpshufb SHUF_MASK(%rip), %xmm9, %xmm9
438 lea SHIFT_MASK+16(%rip), %r12
455 lea SHIFT_MASK+16(%rip), %r12
472 vpshufb SHUF_MASK(%rip), %xmm2, %xmm2
[all …]
Dcrc32-pclmul_asm.S118 movdqa .Lconstant_R2R1(%rip), CONSTANT
167 movdqa .Lconstant_R4R3(%rip), CONSTANT
214 movdqa .Lconstant_R5(%rip), CONSTANT
215 movdqa .Lconstant_mask32(%rip), %xmm3
227 movdqa .Lconstant_RUpoly(%rip), CONSTANT
Dblake2s-core.S54 movdqa ROT16(%rip),%xmm12
55 movdqa ROR328(%rip),%xmm13
58 leaq SIGMA+0xa0(%rip),%r8
65 movdqa IV(%rip),%xmm2
67 pxor IV+0x10(%rip),%xmm3
68 leaq SIGMA(%rip),%rcx
183 vmovdqa IV(%rip),%xmm14
184 vmovdqa IV+16(%rip),%xmm15
196 leaq SIGMA2(%rip),%rax
Daes_ctrby8_avx-x86_64.S169 vpaddq (ddq_add_1 + 16 * i)(%rip), xtmp, var_xdata
183 vpaddq (ddq_add_1 + 16 * (i - 1))(%rip), xcounter, var_xdata
184 vptest ddq_low_msk(%rip), var_xdata
186 vpaddq ddq_high_add_1(%rip), var_xdata, var_xdata
187 vpaddq ddq_high_add_1(%rip), xcounter, xcounter
200 vpaddq (ddq_add_1 + 16 * (by - 1))(%rip), xcounter, xcounter
201 vptest ddq_low_msk(%rip), xcounter
203 vpaddq ddq_high_add_1(%rip), xcounter, xcounter
461 vmovdqa byteswap_const(%rip), xbyteswap
Daesni-intel_asm.S198 movdqa SHUF_MASK(%rip), \TMP2
214 pcmpeqd TWOONE(%rip), \TMP2
215 pand POLY(%rip), \TMP2
260 movdqa SHUF_MASK(%rip), %xmm2
342 paddd ONE(%rip), %xmm0 # INCR CNT to get Yn
344 movdqa SHUF_MASK(%rip), %xmm10
368 lea SHIFT_MASK+16(%rip), %r12
378 lea ALL_F+16(%rip), %r12
390 movdqa SHUF_MASK(%rip), %xmm10
395 movdqa SHUF_MASK(%rip), %xmm10
[all …]
Dpolyval-clmulni_asm.S274 vmovdqa .Lgstar(%rip), GSTAR
300 vmovdqa .Lgstar(%rip), GSTAR
Dsha1_avx2_x86_64_asm.S214 vpaddd K_XMM + K_XMM_AR(%rip), WY, WY_TMP
257 vpaddd K_XMM + K_XMM_AR(%rip), WY, WY_TMP
293 vpaddd K_XMM + K_XMM_AR(%rip), WY, WY_TMP
662 xmm_mov BSWAP_SHUFB_CTL(%rip), YMM_SHUFB_BSWAP
Dsha1_ni_asm.S109 pand UPPER_WORD_MASK(%rip), E0
112 movdqa PSHUFFLE_BYTE_FLIP_MASK(%rip), SHUF_MASK
/arch/x86/boot/compressed/
Defi_thunk_64.S29 leaq 1f(%rip), %rbp
62 leaq efi32_boot_idt(%rip), %rax
64 leaq efi32_boot_gdt(%rip), %rax
67 movzwl efi32_boot_ds(%rip), %edx
68 movzwq efi32_boot_cs(%rip), %rax
70 leaq efi_enter32(%rip), %rax
Dhead_64.S374 leaq startup_32(%rip) /* - $startup_32 */, %rbp
385 movl image_offset(%rip), %eax
434 leaq gdt64(%rip), %rax
440 leaq .Lon_kernel_cs(%rip), %rax
488 leaq trampoline_return(%rip), %rdi
528 leaq (_bss-8)(%rip), %rsi
575 leaq _bss(%rip), %rdi
576 leaq _ebss(%rip), %rcx
604 leaq boot_heap(%rip), %rsi /* malloc area for uncompression */
605 leaq input_data(%rip), %rdx /* input_data */
[all …]
Dmem_encrypt.S201 bts %rax, sme_me_mask(%rip) /* Create the encryption mask */
214 movq %rax, sev_status(%rip)
/arch/x86/kvm/
Dtrace.h23 __field( unsigned long, rip )
28 __entry->rip = kvm_rip_read(vcpu);
31 TP_printk("vcpu %u, rip 0x%lx", __entry->vcpu_id, __entry->rip)
583 TP_PROTO(__u64 rip, __u64 vmcb, __u64 nested_rip, __u32 int_ctl,
585 TP_ARGS(rip, vmcb, nested_rip, int_ctl, event_inj, npt),
588 __field( __u64, rip )
597 __entry->rip = rip;
607 __entry->rip, __entry->vmcb, __entry->nested_rip,
685 TP_PROTO(__u64 rip),
686 TP_ARGS(rip),
[all …]
/arch/x86/include/uapi/asm/
Dptrace.h71 unsigned long rip; member
Dsigcontext.h155 __u64 rip; member
342 __u64 rip; member
/arch/x86/include/asm/
Duser_64.h57 __u64 rip; member
/arch/x86/kvm/svm/
Dnested.c493 kvm_rip_write(&svm->vcpu, vmcb12->save.rip); in nested_vmcb02_prepare_save()
498 svm->vmcb->save.rip = vmcb12->save.rip; in nested_vmcb02_prepare_save()
587 trace_kvm_nested_vmrun(svm->vmcb->save.rip, vmcb12_gpa, in enter_svm_guest_mode()
588 vmcb12->save.rip, in enter_svm_guest_mode()
680 svm->vmcb01.ptr->save.rip = kvm_rip_read(vcpu); in nested_svm_vmrun()
726 to_save->rip = from_save->rip; in svm_copy_vmrun_state()
787 vmcb12->save.rip = kvm_rip_read(vcpu); in nested_svm_vmexit()
844 kvm_rip_write(vcpu, svm->vmcb->save.rip); in nested_svm_vmexit()
1184 trace_kvm_nested_intr_vmexit(svm->vmcb->save.rip); in svm_check_nested_events()
/arch/x86/entry/vdso/
Dvsgx.S59 lea .Lasync_exit_pointer(%rip), %rcx
/arch/x86/xen/
Dxen-head.S38 mov initial_stack(%rip), %rsp

123