• Home
  • Raw
  • Download

Lines Matching refs:arch

81 	if (vcpu->arch.hflags & BOOK3S_HFLAG_SPLIT_HACK)  in kvmppc_fixup_split_real()
88 vcpu->arch.hflags |= BOOK3S_HFLAG_SPLIT_HACK; in kvmppc_fixup_split_real()
111 current->thread.kvm_shadow_vcpu = vcpu->arch.shadow_vcpu; in kvmppc_core_vcpu_load_pr()
148 svcpu->gpr[0] = vcpu->arch.gpr[0]; in kvmppc_copy_to_svcpu()
149 svcpu->gpr[1] = vcpu->arch.gpr[1]; in kvmppc_copy_to_svcpu()
150 svcpu->gpr[2] = vcpu->arch.gpr[2]; in kvmppc_copy_to_svcpu()
151 svcpu->gpr[3] = vcpu->arch.gpr[3]; in kvmppc_copy_to_svcpu()
152 svcpu->gpr[4] = vcpu->arch.gpr[4]; in kvmppc_copy_to_svcpu()
153 svcpu->gpr[5] = vcpu->arch.gpr[5]; in kvmppc_copy_to_svcpu()
154 svcpu->gpr[6] = vcpu->arch.gpr[6]; in kvmppc_copy_to_svcpu()
155 svcpu->gpr[7] = vcpu->arch.gpr[7]; in kvmppc_copy_to_svcpu()
156 svcpu->gpr[8] = vcpu->arch.gpr[8]; in kvmppc_copy_to_svcpu()
157 svcpu->gpr[9] = vcpu->arch.gpr[9]; in kvmppc_copy_to_svcpu()
158 svcpu->gpr[10] = vcpu->arch.gpr[10]; in kvmppc_copy_to_svcpu()
159 svcpu->gpr[11] = vcpu->arch.gpr[11]; in kvmppc_copy_to_svcpu()
160 svcpu->gpr[12] = vcpu->arch.gpr[12]; in kvmppc_copy_to_svcpu()
161 svcpu->gpr[13] = vcpu->arch.gpr[13]; in kvmppc_copy_to_svcpu()
162 svcpu->cr = vcpu->arch.cr; in kvmppc_copy_to_svcpu()
163 svcpu->xer = vcpu->arch.xer; in kvmppc_copy_to_svcpu()
164 svcpu->ctr = vcpu->arch.ctr; in kvmppc_copy_to_svcpu()
165 svcpu->lr = vcpu->arch.lr; in kvmppc_copy_to_svcpu()
166 svcpu->pc = vcpu->arch.pc; in kvmppc_copy_to_svcpu()
168 svcpu->shadow_fscr = vcpu->arch.shadow_fscr; in kvmppc_copy_to_svcpu()
174 vcpu->arch.entry_tb = get_tb(); in kvmppc_copy_to_svcpu()
175 vcpu->arch.entry_vtb = get_vtb(); in kvmppc_copy_to_svcpu()
177 vcpu->arch.entry_ic = mfspr(SPRN_IC); in kvmppc_copy_to_svcpu()
198 vcpu->arch.gpr[0] = svcpu->gpr[0]; in kvmppc_copy_from_svcpu()
199 vcpu->arch.gpr[1] = svcpu->gpr[1]; in kvmppc_copy_from_svcpu()
200 vcpu->arch.gpr[2] = svcpu->gpr[2]; in kvmppc_copy_from_svcpu()
201 vcpu->arch.gpr[3] = svcpu->gpr[3]; in kvmppc_copy_from_svcpu()
202 vcpu->arch.gpr[4] = svcpu->gpr[4]; in kvmppc_copy_from_svcpu()
203 vcpu->arch.gpr[5] = svcpu->gpr[5]; in kvmppc_copy_from_svcpu()
204 vcpu->arch.gpr[6] = svcpu->gpr[6]; in kvmppc_copy_from_svcpu()
205 vcpu->arch.gpr[7] = svcpu->gpr[7]; in kvmppc_copy_from_svcpu()
206 vcpu->arch.gpr[8] = svcpu->gpr[8]; in kvmppc_copy_from_svcpu()
207 vcpu->arch.gpr[9] = svcpu->gpr[9]; in kvmppc_copy_from_svcpu()
208 vcpu->arch.gpr[10] = svcpu->gpr[10]; in kvmppc_copy_from_svcpu()
209 vcpu->arch.gpr[11] = svcpu->gpr[11]; in kvmppc_copy_from_svcpu()
210 vcpu->arch.gpr[12] = svcpu->gpr[12]; in kvmppc_copy_from_svcpu()
211 vcpu->arch.gpr[13] = svcpu->gpr[13]; in kvmppc_copy_from_svcpu()
212 vcpu->arch.cr = svcpu->cr; in kvmppc_copy_from_svcpu()
213 vcpu->arch.xer = svcpu->xer; in kvmppc_copy_from_svcpu()
214 vcpu->arch.ctr = svcpu->ctr; in kvmppc_copy_from_svcpu()
215 vcpu->arch.lr = svcpu->lr; in kvmppc_copy_from_svcpu()
216 vcpu->arch.pc = svcpu->pc; in kvmppc_copy_from_svcpu()
217 vcpu->arch.shadow_srr1 = svcpu->shadow_srr1; in kvmppc_copy_from_svcpu()
218 vcpu->arch.fault_dar = svcpu->fault_dar; in kvmppc_copy_from_svcpu()
219 vcpu->arch.fault_dsisr = svcpu->fault_dsisr; in kvmppc_copy_from_svcpu()
220 vcpu->arch.last_inst = svcpu->last_inst; in kvmppc_copy_from_svcpu()
222 vcpu->arch.shadow_fscr = svcpu->shadow_fscr; in kvmppc_copy_from_svcpu()
227 vcpu->arch.purr += get_tb() - vcpu->arch.entry_tb; in kvmppc_copy_from_svcpu()
228 vcpu->arch.spurr += get_tb() - vcpu->arch.entry_tb; in kvmppc_copy_from_svcpu()
229 vcpu->arch.vtb += get_vtb() - vcpu->arch.entry_vtb; in kvmppc_copy_from_svcpu()
231 vcpu->arch.ic += mfspr(SPRN_IC) - vcpu->arch.entry_ic; in kvmppc_copy_from_svcpu()
329 smsr |= (guest_msr & vcpu->arch.guest_owned_ext); in kvmppc_recalc_shadow_msr()
334 vcpu->arch.shadow_msr = smsr; in kvmppc_recalc_shadow_msr()
350 if (!vcpu->arch.pending_exceptions) { in kvmppc_set_msr_pr()
372 if (!(msr & MSR_PR) && vcpu->arch.magic_page_pa) { in kvmppc_set_msr_pr()
373 struct kvm_vcpu_arch *a = &vcpu->arch; in kvmppc_set_msr_pr()
390 if (vcpu->arch.magic_page_pa && in kvmppc_set_msr_pr()
393 kvmppc_mmu_pte_flush(vcpu, (uint32_t)vcpu->arch.magic_page_pa, in kvmppc_set_msr_pr()
406 vcpu->arch.hflags &= ~BOOK3S_HFLAG_SLB; in kvmppc_set_pvr_pr()
407 vcpu->arch.pvr = pvr; in kvmppc_set_pvr_pr()
414 vcpu->arch.cpu_type = KVM_CPU_3S_64; in kvmppc_set_pvr_pr()
422 vcpu->arch.cpu_type = KVM_CPU_3S_32; in kvmppc_set_pvr_pr()
429 vcpu->arch.hflags &= ~BOOK3S_HFLAG_DCBZ32; in kvmppc_set_pvr_pr()
430 if (vcpu->arch.mmu.is_dcbz32(vcpu) && (mfmsr() & MSR_HV) && in kvmppc_set_pvr_pr()
432 vcpu->arch.hflags |= BOOK3S_HFLAG_DCBZ32; in kvmppc_set_pvr_pr()
451 vcpu->arch.hflags |= BOOK3S_HFLAG_MULTI_PGSIZE | in kvmppc_set_pvr_pr()
458 vcpu->arch.hflags |= BOOK3S_HFLAG_DCBZ32; in kvmppc_set_pvr_pr()
473 vcpu->arch.hflags |= BOOK3S_HFLAG_NATIVE_PS; in kvmppc_set_pvr_pr()
517 ulong mp_pa = vcpu->arch.magic_page_pa; in kvmppc_visible_gpa()
545 if (data && (vcpu->arch.fault_dsisr & DSISR_ISSTORE)) in kvmppc_handle_pagefault()
550 page_found = vcpu->arch.mmu.xlate(vcpu, eaddr, &pte, data, iswrite); in kvmppc_handle_pagefault()
567 (vcpu->arch.hflags & BOOK3S_HFLAG_SPLIT_HACK) && in kvmppc_handle_pagefault()
572 vcpu->arch.mmu.esid_to_vsid(vcpu, eaddr >> SID_SHIFT, &vsid); in kvmppc_handle_pagefault()
585 if (vcpu->arch.mmu.is_dcbz32(vcpu) && in kvmppc_handle_pagefault()
586 (!(vcpu->arch.hflags & BOOK3S_HFLAG_DCBZ32))) { in kvmppc_handle_pagefault()
597 u64 ssrr1 = vcpu->arch.shadow_srr1; in kvmppc_handle_pagefault()
600 kvmppc_set_dsisr(vcpu, vcpu->arch.fault_dsisr); in kvmppc_handle_pagefault()
605 u32 dsisr = vcpu->arch.fault_dsisr; in kvmppc_handle_pagefault()
606 u64 ssrr1 = vcpu->arch.shadow_srr1; in kvmppc_handle_pagefault()
619 if (data && !(vcpu->arch.fault_dsisr & DSISR_NOHPTE)) { in kvmppc_handle_pagefault()
631 else if (vcpu->arch.mmu.is_dcbz32(vcpu) && in kvmppc_handle_pagefault()
632 (!(vcpu->arch.hflags & BOOK3S_HFLAG_DCBZ32))) in kvmppc_handle_pagefault()
637 vcpu->arch.paddr_accessed = pte.raddr; in kvmppc_handle_pagefault()
638 vcpu->arch.vaddr_accessed = pte.eaddr; in kvmppc_handle_pagefault()
664 msr &= vcpu->arch.guest_owned_ext; in kvmppc_giveup_ext()
691 vcpu->arch.guest_owned_ext &= ~(msr | MSR_VSX); in kvmppc_giveup_ext()
699 if (!(vcpu->arch.shadow_fscr & (1ULL << fac))) { in kvmppc_giveup_fac()
706 vcpu->arch.tar = mfspr(SPRN_TAR); in kvmppc_giveup_fac()
708 vcpu->arch.shadow_fscr &= ~FSCR_TAR; in kvmppc_giveup_fac()
721 if (vcpu->arch.hflags & BOOK3S_HFLAG_PAIRED_SINGLE) in kvmppc_handle_ext()
747 msr &= ~vcpu->arch.guest_owned_ext; in kvmppc_handle_ext()
758 load_fp_state(&vcpu->arch.fp); in kvmppc_handle_ext()
759 t->fp_save_area = &vcpu->arch.fp; in kvmppc_handle_ext()
767 load_vr_state(&vcpu->arch.vr); in kvmppc_handle_ext()
768 t->vr_save_area = &vcpu->arch.vr; in kvmppc_handle_ext()
774 vcpu->arch.guest_owned_ext |= msr; in kvmppc_handle_ext()
788 lost_ext = vcpu->arch.guest_owned_ext & ~current->thread.regs->msr; in kvmppc_handle_lost_ext()
795 load_fp_state(&vcpu->arch.fp); in kvmppc_handle_lost_ext()
802 load_vr_state(&vcpu->arch.vr); in kvmppc_handle_lost_ext()
814 vcpu->arch.fscr &= ~(0xffULL << 56); in kvmppc_trigger_fac_interrupt()
815 vcpu->arch.fscr |= (fac << 56); in kvmppc_trigger_fac_interrupt()
845 guest_fac_enabled = (vcpu->arch.fscr & (1ULL << fac)); in kvmppc_handle_fac()
865 mtspr(SPRN_TAR, vcpu->arch.tar); in kvmppc_handle_fac()
866 vcpu->arch.shadow_fscr |= FSCR_TAR; in kvmppc_handle_fac()
878 if ((vcpu->arch.fscr & FSCR_TAR) && !(fscr & FSCR_TAR)) { in kvmppc_set_fscr()
882 vcpu->arch.fscr = fscr; in kvmppc_set_fscr()
905 ulong shadow_srr1 = vcpu->arch.shadow_srr1; in kvmppc_handle_exit_pr()
935 } else if (vcpu->arch.mmu.is_dcbz32(vcpu) && in kvmppc_handle_exit_pr()
936 (!(vcpu->arch.hflags & BOOK3S_HFLAG_DCBZ32))) { in kvmppc_handle_exit_pr()
956 u32 fault_dsisr = vcpu->arch.fault_dsisr; in kvmppc_handle_exit_pr()
1035 flags = vcpu->arch.shadow_srr1 & 0x1f0000ull; in kvmppc_handle_exit_pr()
1089 if (vcpu->arch.papr_enabled) { in kvmppc_handle_exit_pr()
1099 if (vcpu->arch.papr_enabled && in kvmppc_handle_exit_pr()
1119 vcpu->arch.hcall_needed = 1; in kvmppc_handle_exit_pr()
1121 } else if (vcpu->arch.osi_enabled && in kvmppc_handle_exit_pr()
1131 vcpu->arch.osi_needed = 1; in kvmppc_handle_exit_pr()
1154 if (vcpu->arch.hflags & BOOK3S_HFLAG_PAIRED_SINGLE) { in kvmppc_handle_exit_pr()
1206 kvmppc_handle_fac(vcpu, vcpu->arch.shadow_fscr >> 56); in kvmppc_handle_exit_pr()
1217 ulong shadow_srr1 = vcpu->arch.shadow_srr1; in kvmppc_handle_exit_pr()
1260 sregs->pvr = vcpu->arch.pvr; in kvm_arch_vcpu_ioctl_get_sregs_pr()
1263 if (vcpu->arch.hflags & BOOK3S_HFLAG_SLB) { in kvm_arch_vcpu_ioctl_get_sregs_pr()
1265 sregs->u.s.ppc64.slb[i].slbe = vcpu->arch.slb[i].orige | i; in kvm_arch_vcpu_ioctl_get_sregs_pr()
1266 sregs->u.s.ppc64.slb[i].slbv = vcpu->arch.slb[i].origv; in kvm_arch_vcpu_ioctl_get_sregs_pr()
1290 if (vcpu->arch.hflags & BOOK3S_HFLAG_SLB) { in kvm_arch_vcpu_ioctl_set_sregs_pr()
1292 vcpu->arch.mmu.slbmte(vcpu, sregs->u.s.ppc64.slb[i].slbv, in kvm_arch_vcpu_ioctl_set_sregs_pr()
1297 vcpu->arch.mmu.mtsrin(vcpu, i, sregs->u.s.ppc32.sr[i]); in kvm_arch_vcpu_ioctl_set_sregs_pr()
1334 if (vcpu->arch.intr_msr & MSR_LE) in kvmppc_get_one_reg_pr()
1350 vcpu->arch.intr_msr |= MSR_LE; in kvmppc_set_lpcr_pr()
1352 vcpu->arch.intr_msr &= ~MSR_LE; in kvmppc_set_lpcr_pr()
1392 vcpu->arch.book3s = vcpu_book3s; in kvmppc_core_vcpu_create_pr()
1395 vcpu->arch.shadow_vcpu = in kvmppc_core_vcpu_create_pr()
1396 kzalloc(sizeof(*vcpu->arch.shadow_vcpu), GFP_KERNEL); in kvmppc_core_vcpu_create_pr()
1397 if (!vcpu->arch.shadow_vcpu) in kvmppc_core_vcpu_create_pr()
1409 vcpu->arch.shared = (void *)p; in kvmppc_core_vcpu_create_pr()
1413 vcpu->arch.shared_big_endian = true; in kvmppc_core_vcpu_create_pr()
1415 vcpu->arch.shared_big_endian = false; in kvmppc_core_vcpu_create_pr()
1423 vcpu->arch.pvr = 0x3C0301; in kvmppc_core_vcpu_create_pr()
1425 vcpu->arch.pvr = mfspr(SPRN_PVR); in kvmppc_core_vcpu_create_pr()
1426 vcpu->arch.intr_msr = MSR_SF; in kvmppc_core_vcpu_create_pr()
1429 vcpu->arch.pvr = 0x84202; in kvmppc_core_vcpu_create_pr()
1431 kvmppc_set_pvr_pr(vcpu, vcpu->arch.pvr); in kvmppc_core_vcpu_create_pr()
1432 vcpu->arch.slb_nr = 64; in kvmppc_core_vcpu_create_pr()
1434 vcpu->arch.shadow_msr = MSR_USER64 & ~MSR_LE; in kvmppc_core_vcpu_create_pr()
1446 kfree(vcpu->arch.shadow_vcpu); in kvmppc_core_vcpu_create_pr()
1460 free_page((unsigned long)vcpu->arch.shared & PAGE_MASK); in kvmppc_core_vcpu_free_pr()
1463 kfree(vcpu->arch.shadow_vcpu); in kvmppc_core_vcpu_free_pr()
1477 if (!vcpu->arch.sane) { in kvmppc_vcpu_run_pr()
1633 if (vcpu && (vcpu->arch.hflags & BOOK3S_HFLAG_MULTI_PGSIZE)) { in kvm_vm_ioctl_get_smmu_info_pr()
1664 mutex_init(&kvm->arch.hpt_mutex); in kvmppc_core_init_vm_pr()
1683 WARN_ON(!list_empty(&kvm->arch.spapr_tce_tables)); in kvmppc_core_destroy_vm_pr()