Home
last modified time | relevance | path

Searched refs:rflags (Results 1 – 20 of 20) sorted by relevance

/arch/powerpc/mm/
Dhash64_64k.c56 unsigned long rflags, pa, hidx; in __hash_page_4K() local
88 rflags = htab_convert_pte_flags(subpg_pte); in __hash_page_4K()
97 rflags = hash_page_do_lazy_icache(rflags, __pte(old_pte), trap); in __hash_page_4K()
136 ret = mmu_hash_ops.hpte_updatepp(slot, rflags, vpn, in __hash_page_4K()
169 slot = mmu_hash_ops.hpte_insert(hpte_group, vpn, pa, rflags, 0, in __hash_page_4K()
177 rflags, HPTE_V_SECONDARY, in __hash_page_4K()
225 unsigned long rflags, pa; in __hash_page_64K() local
259 rflags = htab_convert_pte_flags(new_pte); in __hash_page_64K()
263 rflags = hash_page_do_lazy_icache(rflags, __pte(old_pte), trap); in __hash_page_64K()
276 if (mmu_hash_ops.hpte_updatepp(slot, rflags, vpn, MMU_PAGE_64K, in __hash_page_64K()
[all …]
Dhash64_4k.c24 unsigned long rflags, pa; in __hash_page_4K() local
56 rflags = htab_convert_pte_flags(new_pte); in __hash_page_4K()
60 rflags = hash_page_do_lazy_icache(rflags, __pte(old_pte), trap); in __hash_page_4K()
73 if (mmu_hash_ops.hpte_updatepp(slot, rflags, vpn, MMU_PAGE_4K, in __hash_page_4K()
87 slot = mmu_hash_ops.hpte_insert(hpte_group, vpn, pa, rflags, 0, in __hash_page_4K()
95 rflags, in __hash_page_4K()
Dhugetlbpage-hash64.c28 unsigned long rflags, pa, sz; in __hash_page_huge() local
64 rflags = htab_convert_pte_flags(new_pte); in __hash_page_huge()
70 rflags = hash_page_do_lazy_icache(rflags, __pte(old_pte), trap); in __hash_page_huge()
83 if (mmu_hash_ops.hpte_updatepp(slot, rflags, vpn, mmu_psize, in __hash_page_huge()
96 slot = hpte_insert_repeating(hash, vpn, pa, rflags, 0, in __hash_page_huge()
Dhugepage-hash64.c27 unsigned long rflags, pa, hidx; in __hash_page_thp() local
54 rflags = htab_convert_pte_flags(new_pmd); in __hash_page_thp()
63 rflags = hash_page_do_lazy_icache(rflags, __pte(old_pte), trap); in __hash_page_thp()
106 ret = mmu_hash_ops.hpte_updatepp(slot, rflags, vpn, in __hash_page_thp()
134 slot = mmu_hash_ops.hpte_insert(hpte_group, vpn, pa, rflags, 0, in __hash_page_thp()
143 rflags, in __hash_page_thp()
Dhash_utils_64.c183 unsigned long rflags = 0; in htab_convert_pte_flags() local
187 rflags |= HPTE_R_N; in htab_convert_pte_flags()
201 rflags |= (HPTE_R_PP0 | 0x2); in htab_convert_pte_flags()
203 rflags |= 0x3; in htab_convert_pte_flags()
207 rflags |= 0x2; in htab_convert_pte_flags()
209 rflags |= 0x1; in htab_convert_pte_flags()
215 rflags |= HPTE_R_R; in htab_convert_pte_flags()
218 rflags |= HPTE_R_C; in htab_convert_pte_flags()
224 rflags |= HPTE_R_I; in htab_convert_pte_flags()
226 rflags |= (HPTE_R_I | HPTE_R_G); in htab_convert_pte_flags()
[all …]
Dhash_native_64.c244 unsigned long pa, unsigned long rflags, in native_hpte_insert() argument
254 hpte_group, vpn, pa, rflags, vflags, psize); in native_hpte_insert()
273 hpte_r = hpte_encode_r(pa, psize, apsize) | rflags; in native_hpte_insert()
/arch/powerpc/kvm/
Dbook3s_64_mmu_host.c90 int rflags = 0x192; in kvmppc_mmu_map_page() local
137 rflags |= PP_RXRX; in kvmppc_mmu_map_page()
144 rflags |= HPTE_R_N; in kvmppc_mmu_map_page()
148 rflags = (rflags & ~HPTE_R_WIMG) | orig_pte->wimg; in kvmppc_mmu_map_page()
179 ret = mmu_hash_ops.hpte_insert(hpteg, vpn, hpaddr, rflags, vflags, in kvmppc_mmu_map_page()
192 trace_kvm_book3s_64_mmu_map(rflags, hpteg, in kvmppc_mmu_map_page()
Dtrace_pr.h32 TP_PROTO(int rflags, ulong hpteg, ulong va, kvm_pfn_t hpaddr,
34 TP_ARGS(rflags, hpteg, va, hpaddr, orig_pte),
47 __entry->flag_w = ((rflags & HPTE_R_PP) == 3) ? '-' : 'w';
48 __entry->flag_x = (rflags & HPTE_R_N) ? '-' : 'x';
/arch/powerpc/platforms/ps3/
Dhtab.c48 unsigned long pa, unsigned long rflags, unsigned long vflags, in ps3_hpte_insert() argument
66 hpte_r = hpte_encode_r(ps3_mm_phys_to_lpar(pa), psize, apsize) | rflags; in ps3_hpte_insert()
/arch/x86/kvm/
Dmmu.h145 unsigned long rflags = kvm_x86_ops->get_rflags(vcpu); in permission_fault() local
160 unsigned long smap = (cpl - 3) & (rflags & X86_EFLAGS_AC); in permission_fault()
Dsvm.c1034 svm->vmcb->save.rflags &= ~X86_EFLAGS_TF; in disable_nmi_singlestep()
1036 svm->vmcb->save.rflags &= ~X86_EFLAGS_RF; in disable_nmi_singlestep()
1893 unsigned long rflags = svm->vmcb->save.rflags; in svm_get_rflags() local
1898 rflags &= ~X86_EFLAGS_TF; in svm_get_rflags()
1900 rflags &= ~X86_EFLAGS_RF; in svm_get_rflags()
1902 return rflags; in svm_get_rflags()
1905 static void svm_set_rflags(struct kvm_vcpu *vcpu, unsigned long rflags) in svm_set_rflags() argument
1908 rflags |= (X86_EFLAGS_TF | X86_EFLAGS_RF); in svm_set_rflags()
1915 to_svm(vcpu)->vmcb->save.rflags = rflags; in svm_set_rflags()
2904 nested_vmcb->save.rflags = kvm_get_rflags(&svm->vcpu); in nested_svm_vmexit()
[all …]
Dx86.c102 static void __kvm_set_rflags(struct kvm_vcpu *vcpu, unsigned long rflags);
5848 unsigned long rflags = kvm_x86_ops->get_rflags(vcpu); in kvm_skip_emulated_instruction() local
5861 if (unlikely(rflags & X86_EFLAGS_TF)) in kvm_skip_emulated_instruction()
6030 unsigned long rflags = kvm_x86_ops->get_rflags(vcpu); in x86_emulate_instruction() local
6047 if (unlikely((ctxt->eflags & ~rflags) & X86_EFLAGS_IF)) in x86_emulate_instruction()
7589 regs->rflags = kvm_get_rflags(vcpu); in kvm_arch_vcpu_ioctl_get_regs()
7619 kvm_set_rflags(vcpu, regs->rflags | X86_EFLAGS_FIXED); in kvm_arch_vcpu_ioctl_set_regs()
7854 unsigned long rflags; in kvm_arch_vcpu_ioctl_set_guest_debug() local
7871 rflags = kvm_get_rflags(vcpu); in kvm_arch_vcpu_ioctl_set_guest_debug()
7895 kvm_set_rflags(vcpu, rflags); in kvm_arch_vcpu_ioctl_set_guest_debug()
[all …]
Dvmx.c734 ulong rflags; member
2604 unsigned long rflags, save_rflags; in vmx_get_rflags() local
2608 rflags = vmcs_readl(GUEST_RFLAGS); in vmx_get_rflags()
2610 rflags &= RMODE_GUEST_OWNED_EFLAGS_BITS; in vmx_get_rflags()
2612 rflags |= save_rflags & ~RMODE_GUEST_OWNED_EFLAGS_BITS; in vmx_get_rflags()
2614 to_vmx(vcpu)->rflags = rflags; in vmx_get_rflags()
2616 return to_vmx(vcpu)->rflags; in vmx_get_rflags()
2619 static void vmx_set_rflags(struct kvm_vcpu *vcpu, unsigned long rflags) in vmx_set_rflags() argument
2624 to_vmx(vcpu)->rflags = rflags; in vmx_set_rflags()
2626 to_vmx(vcpu)->rmode.save_rflags = rflags; in vmx_set_rflags()
[all …]
/arch/x86/include/asm/xen/
Dinterface_64.h84 uint64_t rax, r11, rcx, flags, rip, cs, rflags, rsp, ss; member
/arch/powerpc/platforms/pseries/
Dlpar.c137 unsigned long rflags, unsigned long vflags, in pSeries_lpar_hpte_insert() argument
148 hpte_group, vpn, pa, rflags, vflags, psize); in pSeries_lpar_hpte_insert()
151 hpte_r = hpte_encode_r(pa, psize, apsize) | rflags; in pSeries_lpar_hpte_insert()
/arch/x86/include/asm/
Dsvm.h177 u64 rflags; member
Dkvm_host.h980 void (*set_rflags)(struct kvm_vcpu *vcpu, unsigned long rflags);
1229 void kvm_set_rflags(struct kvm_vcpu *vcpu, unsigned long rflags);
/arch/s390/include/asm/
Dqdio.h104 u32 rflags : 8; member
/arch/x86/include/uapi/asm/
Dkvm.h122 __u64 rip, rflags; member
/arch/powerpc/include/asm/book3s/64/
Dmmu-hash.h150 unsigned long rflags,