Lines Matching refs:hptep
357 __be64 *hptep; in kvmppc_mmu_book3s_64_hv_xlate() local
380 hptep = (__be64 *)(kvm->arch.hpt.virt + (index << 4)); in kvmppc_mmu_book3s_64_hv_xlate()
381 v = orig_v = be64_to_cpu(hptep[0]) & ~HPTE_V_HVLOCK; in kvmppc_mmu_book3s_64_hv_xlate()
383 v = hpte_new_to_old_v(v, be64_to_cpu(hptep[1])); in kvmppc_mmu_book3s_64_hv_xlate()
386 unlock_hpte(hptep, orig_v); in kvmppc_mmu_book3s_64_hv_xlate()
485 __be64 *hptep; in kvmppc_book3s_hv_page_fault() local
525 hptep = (__be64 *)(kvm->arch.hpt.virt + (index << 4)); in kvmppc_book3s_hv_page_fault()
528 while (!try_lock_hpte(hptep, HPTE_V_HVLOCK)) in kvmppc_book3s_hv_page_fault()
530 hpte[0] = be64_to_cpu(hptep[0]) & ~HPTE_V_HVLOCK; in kvmppc_book3s_hv_page_fault()
531 hpte[1] = be64_to_cpu(hptep[1]); in kvmppc_book3s_hv_page_fault()
533 unlock_hpte(hptep, hpte[0]); in kvmppc_book3s_hv_page_fault()
649 while (!try_lock_hpte(hptep, HPTE_V_HVLOCK)) in kvmppc_book3s_hv_page_fault()
651 hnow_v = be64_to_cpu(hptep[0]); in kvmppc_book3s_hv_page_fault()
652 hnow_r = be64_to_cpu(hptep[1]); in kvmppc_book3s_hv_page_fault()
688 if (be64_to_cpu(hptep[0]) & HPTE_V_VALID) { in kvmppc_book3s_hv_page_fault()
691 hptep[0] |= cpu_to_be64(HPTE_V_ABSENT); in kvmppc_book3s_hv_page_fault()
692 kvmppc_invalidate_hpte(kvm, hptep, index); in kvmppc_book3s_hv_page_fault()
694 r |= be64_to_cpu(hptep[1]) & (HPTE_R_R | HPTE_R_C); in kvmppc_book3s_hv_page_fault()
703 hptep[1] = cpu_to_be64(r); in kvmppc_book3s_hv_page_fault()
705 __unlock_hpte(hptep, hpte[0]); in kvmppc_book3s_hv_page_fault()
726 __unlock_hpte(hptep, be64_to_cpu(hptep[0])); in kvmppc_book3s_hv_page_fault()
799 __be64 *hptep = (__be64 *) (kvm->arch.hpt.virt + (i << 4)); in kvmppc_unmap_hpte() local
819 psize = hpte_page_size(be64_to_cpu(hptep[0]), ptel); in kvmppc_unmap_hpte()
820 if ((be64_to_cpu(hptep[0]) & HPTE_V_VALID) && in kvmppc_unmap_hpte()
822 hptep[0] |= cpu_to_be64(HPTE_V_ABSENT); in kvmppc_unmap_hpte()
823 kvmppc_invalidate_hpte(kvm, hptep, i); in kvmppc_unmap_hpte()
824 hptep[1] &= ~cpu_to_be64(HPTE_R_KEY_HI | HPTE_R_KEY_LO); in kvmppc_unmap_hpte()
826 rcbits = be64_to_cpu(hptep[1]) & (HPTE_R_R | HPTE_R_C); in kvmppc_unmap_hpte()
841 __be64 *hptep; in kvm_unmap_rmapp() local
858 hptep = (__be64 *) (kvm->arch.hpt.virt + (i << 4)); in kvm_unmap_rmapp()
859 if (!try_lock_hpte(hptep, HPTE_V_HVLOCK)) { in kvm_unmap_rmapp()
862 while (be64_to_cpu(hptep[0]) & HPTE_V_HVLOCK) in kvm_unmap_rmapp()
869 __unlock_hpte(hptep, be64_to_cpu(hptep[0])); in kvm_unmap_rmapp()
923 __be64 *hptep; in kvm_age_rmapp() local
941 hptep = (__be64 *) (kvm->arch.hpt.virt + (i << 4)); in kvm_age_rmapp()
945 if (!(be64_to_cpu(hptep[1]) & HPTE_R_R)) in kvm_age_rmapp()
948 if (!try_lock_hpte(hptep, HPTE_V_HVLOCK)) { in kvm_age_rmapp()
951 while (be64_to_cpu(hptep[0]) & HPTE_V_HVLOCK) in kvm_age_rmapp()
957 if ((be64_to_cpu(hptep[0]) & HPTE_V_VALID) && in kvm_age_rmapp()
958 (be64_to_cpu(hptep[1]) & HPTE_R_R)) { in kvm_age_rmapp()
959 kvmppc_clear_ref_hpte(kvm, hptep, i); in kvm_age_rmapp()
966 __unlock_hpte(hptep, be64_to_cpu(hptep[0])); in kvm_age_rmapp()
1045 __be64 *hptep; in kvm_test_clear_dirty_npages() local
1066 hptep = (__be64 *) (kvm->arch.hpt.virt + (i << 4)); in kvm_test_clear_dirty_npages()
1083 hptep1 = be64_to_cpu(hptep[1]); in kvm_test_clear_dirty_npages()
1088 if (!try_lock_hpte(hptep, HPTE_V_HVLOCK)) { in kvm_test_clear_dirty_npages()
1091 while (hptep[0] & cpu_to_be64(HPTE_V_HVLOCK)) in kvm_test_clear_dirty_npages()
1097 if (!(hptep[0] & cpu_to_be64(HPTE_V_VALID))) { in kvm_test_clear_dirty_npages()
1098 __unlock_hpte(hptep, be64_to_cpu(hptep[0])); in kvm_test_clear_dirty_npages()
1103 hptep[0] |= cpu_to_be64(HPTE_V_ABSENT); in kvm_test_clear_dirty_npages()
1104 kvmppc_invalidate_hpte(kvm, hptep, i); in kvm_test_clear_dirty_npages()
1105 v = be64_to_cpu(hptep[0]); in kvm_test_clear_dirty_npages()
1106 r = be64_to_cpu(hptep[1]); in kvm_test_clear_dirty_npages()
1108 hptep[1] = cpu_to_be64(r & ~HPTE_R_C); in kvm_test_clear_dirty_npages()
1121 __unlock_hpte(hptep, v); in kvm_test_clear_dirty_npages()
1257 __be64 *hptep, *new_hptep; in resize_hpt_rehash_hpte() local
1264 hptep = (__be64 *)(old->virt + (idx << 4)); in resize_hpt_rehash_hpte()
1269 vpte = be64_to_cpu(hptep[0]); in resize_hpt_rehash_hpte()
1273 while (!try_lock_hpte(hptep, HPTE_V_HVLOCK)) in resize_hpt_rehash_hpte()
1276 vpte = be64_to_cpu(hptep[0]); in resize_hpt_rehash_hpte()
1311 vpte = be64_to_cpu(hptep[0]); in resize_hpt_rehash_hpte()
1320 rpte = be64_to_cpu(hptep[1]); in resize_hpt_rehash_hpte()
1380 unlock_hpte(hptep, vpte); in resize_hpt_rehash_hpte()