Searched refs:spte (Results 1 – 5 of 5) sorted by relevance
/arch/x86/kvm/ |
D | mmu.c | 207 #define for_each_shadow_entry_lockless(_vcpu, _addr, _walker, spte) \ argument 210 ({ spte = mmu_spte_get_lockless(_walker.sptep); 1; }); \ 211 __shadow_walk_next(&(_walker), spte)) 267 static void mmu_spte_set(u64 *sptep, u64 spte); 269 static bool is_executable_pte(u64 spte); 288 static inline bool spte_ad_enabled(u64 spte) in spte_ad_enabled() argument 290 MMU_WARN_ON((spte & shadow_mmio_mask) == shadow_mmio_value); in spte_ad_enabled() 291 return !(spte & shadow_acc_track_value); in spte_ad_enabled() 299 static inline u64 spte_shadow_accessed_mask(u64 spte) in spte_shadow_accessed_mask() argument 301 MMU_WARN_ON((spte & shadow_mmio_mask) == shadow_mmio_value); in spte_shadow_accessed_mask() [all …]
|
D | mmutrace.h | 308 TP_PROTO(u64 spte, unsigned int kvm_gen, unsigned int spte_gen), 309 TP_ARGS(spte, kvm_gen, spte_gen), 314 __field(u64, spte) 320 __entry->spte = spte; 323 TP_printk("spte %llx kvm_gen %x spte-gen %x valid %d", __entry->spte, 336 __field(u64, spte) 347 __entry->spte = *sptep; 350 __entry->r = shadow_present_mask || (__entry->spte & PT_PRESENT_MASK); 351 __entry->x = is_executable_pte(__entry->spte); 352 __entry->u = shadow_user_mask ? !!(__entry->spte & shadow_user_mask) : -1; [all …]
|
D | paging_tmpl.h | 158 struct kvm_mmu_page *sp, u64 *spte, in FNAME() 174 drop_spte(vcpu->kvm, spte); in FNAME() 499 u64 *spte, pt_element_t gpte, bool no_dirty_log) in FNAME() 505 if (FNAME(prefetch_invalid_gpte)(vcpu, sp, spte, gpte)) in FNAME() 508 pgprintk("%s: gpte %llx spte %p\n", __func__, (u64)gpte, spte); in FNAME() 522 mmu_set_spte(vcpu, spte, pte_access, 0, PT_PAGE_TABLE_LEVEL, gfn, pfn, in FNAME() 530 u64 *spte, const void *pte) in FNAME() 534 FNAME(prefetch_gpte)(vcpu, sp, spte, gpte, false); in FNAME() 565 u64 *spte; in FNAME() local 577 spte = sp->spt + i; in FNAME() [all …]
|
/arch/s390/mm/ |
D | pgtable.c | 602 pte_t spte, tpte; in ptep_shadow_pte() local 608 spte = *sptep; in ptep_shadow_pte() 609 if (!(pte_val(spte) & _PAGE_INVALID) && in ptep_shadow_pte() 610 !((pte_val(spte) & _PAGE_PROTECT) && in ptep_shadow_pte() 614 pte_val(tpte) = (pte_val(spte) & PAGE_MASK) | in ptep_shadow_pte()
|
/arch/x86/include/asm/ |
D | kvm_host.h | 350 u64 *spte, const void *pte);
|