Lines Matching refs:arch
46 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_compute_return_epc() local
66 arch->gprs[insn.r_format.rd] = epc + 8; in kvm_compute_return_epc()
69 nextpc = arch->gprs[insn.r_format.rs]; in kvm_compute_return_epc()
85 if ((long)arch->gprs[insn.i_format.rs] < 0) in kvm_compute_return_epc()
94 if ((long)arch->gprs[insn.i_format.rs] >= 0) in kvm_compute_return_epc()
103 arch->gprs[31] = epc + 8; in kvm_compute_return_epc()
104 if ((long)arch->gprs[insn.i_format.rs] < 0) in kvm_compute_return_epc()
113 arch->gprs[31] = epc + 8; in kvm_compute_return_epc()
114 if ((long)arch->gprs[insn.i_format.rs] >= 0) in kvm_compute_return_epc()
142 arch->gprs[31] = instpc + 8; in kvm_compute_return_epc()
155 if (arch->gprs[insn.i_format.rs] == in kvm_compute_return_epc()
156 arch->gprs[insn.i_format.rt]) in kvm_compute_return_epc()
165 if (arch->gprs[insn.i_format.rs] != in kvm_compute_return_epc()
166 arch->gprs[insn.i_format.rt]) in kvm_compute_return_epc()
179 if ((long)arch->gprs[insn.i_format.rs] <= 0) in kvm_compute_return_epc()
192 if ((long)arch->gprs[insn.i_format.rs] > 0) in kvm_compute_return_epc()
249 err = kvm_compute_return_epc(vcpu, vcpu->arch.pc, in update_pc()
250 &vcpu->arch.pc); in update_pc()
254 vcpu->arch.pc += 4; in update_pc()
257 kvm_debug("update_pc(): New PC: %#lx\n", vcpu->arch.pc); in update_pc()
276 *out = vcpu->arch.host_cp0_badinstr; in kvm_get_badinstr()
297 *out = vcpu->arch.host_cp0_badinstrp; in kvm_get_badinstrp()
314 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_count_disabled()
316 return (vcpu->arch.count_ctl & KVM_REG_MIPS_COUNT_CTL_DC) || in kvm_mips_count_disabled()
333 delta = now_ns + vcpu->arch.count_dyn_bias; in kvm_mips_ktime_to_count()
335 if (delta >= vcpu->arch.count_period) { in kvm_mips_ktime_to_count()
337 periods = div64_s64(now_ns, vcpu->arch.count_period); in kvm_mips_ktime_to_count()
338 vcpu->arch.count_dyn_bias = -periods * vcpu->arch.count_period; in kvm_mips_ktime_to_count()
340 delta = now_ns + vcpu->arch.count_dyn_bias; in kvm_mips_ktime_to_count()
353 return div_u64(delta * vcpu->arch.count_hz, NSEC_PER_SEC); in kvm_mips_ktime_to_count()
368 if (unlikely(vcpu->arch.count_ctl & KVM_REG_MIPS_COUNT_CTL_DC)) in kvm_mips_count_time()
369 return vcpu->arch.count_resume; in kvm_mips_count_time()
386 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_read_count_running()
392 count = vcpu->arch.count_bias + kvm_mips_ktime_to_count(vcpu, now); in kvm_mips_read_count_running()
408 expires = hrtimer_get_expires(&vcpu->arch.comparecount_timer); in kvm_mips_read_count_running()
409 threshold = ktime_add_ns(now, vcpu->arch.count_period / 4); in kvm_mips_read_count_running()
415 running = hrtimer_cancel(&vcpu->arch.comparecount_timer); in kvm_mips_read_count_running()
426 vcpu->arch.count_period); in kvm_mips_read_count_running()
427 hrtimer_start(&vcpu->arch.comparecount_timer, expires, in kvm_mips_read_count_running()
446 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_read_count()
476 hrtimer_cancel(&vcpu->arch.comparecount_timer); in kvm_mips_freeze_hrtimer()
504 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_resume_hrtimer()
512 delta = div_u64(delta * NSEC_PER_SEC, vcpu->arch.count_hz); in kvm_mips_resume_hrtimer()
516 hrtimer_cancel(&vcpu->arch.comparecount_timer); in kvm_mips_resume_hrtimer()
517 hrtimer_start(&vcpu->arch.comparecount_timer, expire, HRTIMER_MODE_ABS); in kvm_mips_resume_hrtimer()
551 before_count = vcpu->arch.count_bias + in kvm_mips_restore_hrtimer()
564 vcpu->arch.count_bias += drift; in kvm_mips_restore_hrtimer()
571 now_count = vcpu->arch.count_bias + kvm_mips_ktime_to_count(vcpu, now); in kvm_mips_restore_hrtimer()
580 vcpu->arch.count_bias += drift; in kvm_mips_restore_hrtimer()
587 delta = div_u64(delta * NSEC_PER_SEC, vcpu->arch.count_hz); in kvm_mips_restore_hrtimer()
605 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_write_count()
610 vcpu->arch.count_bias = count - kvm_mips_ktime_to_count(vcpu, now); in kvm_mips_write_count()
630 vcpu->arch.count_hz = count_hz; in kvm_mips_init_count()
631 vcpu->arch.count_period = div_u64((u64)NSEC_PER_SEC << 32, count_hz); in kvm_mips_init_count()
632 vcpu->arch.count_dyn_bias = 0; in kvm_mips_init_count()
651 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_set_count_hz()
660 if (vcpu->arch.count_hz == count_hz) in kvm_mips_set_count_hz()
673 vcpu->arch.count_hz = count_hz; in kvm_mips_set_count_hz()
674 vcpu->arch.count_period = div_u64((u64)NSEC_PER_SEC << 32, count_hz); in kvm_mips_set_count_hz()
675 vcpu->arch.count_dyn_bias = 0; in kvm_mips_set_count_hz()
678 vcpu->arch.count_bias = count - kvm_mips_ktime_to_count(vcpu, now); in kvm_mips_set_count_hz()
698 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_write_compare()
783 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_count_disable()
788 hrtimer_cancel(&vcpu->arch.comparecount_timer); in kvm_mips_count_disable()
810 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_count_disable_cause()
813 if (!(vcpu->arch.count_ctl & KVM_REG_MIPS_COUNT_CTL_DC)) in kvm_mips_count_disable_cause()
830 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_count_enable_cause()
856 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_set_count_ctl()
857 s64 changed = count_ctl ^ vcpu->arch.count_ctl; in kvm_mips_set_count_ctl()
867 vcpu->arch.count_ctl = count_ctl; in kvm_mips_set_count_ctl()
875 vcpu->arch.count_resume = ktime_get(); in kvm_mips_set_count_ctl()
878 vcpu->arch.count_resume = kvm_mips_count_disable(vcpu); in kvm_mips_set_count_ctl()
888 vcpu->arch.count_hz); in kvm_mips_set_count_ctl()
889 expire = ktime_add_ns(vcpu->arch.count_resume, delta); in kvm_mips_set_count_ctl()
926 vcpu->arch.count_resume = ns_to_ktime(count_resume); in kvm_mips_set_count_resume()
941 hrtimer_add_expires_ns(&vcpu->arch.comparecount_timer, in kvm_mips_count_timeout()
942 vcpu->arch.count_period); in kvm_mips_count_timeout()
948 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emul_eret()
953 vcpu->arch.pc = kvm_read_c0_guest_errorepc(cop0); in kvm_mips_emul_eret()
955 kvm_debug("[%#lx] ERET to %#lx\n", vcpu->arch.pc, in kvm_mips_emul_eret()
958 vcpu->arch.pc = kvm_read_c0_guest_epc(cop0); in kvm_mips_emul_eret()
962 vcpu->arch.pc); in kvm_mips_emul_eret()
971 kvm_debug("[%#lx] !!!WAIT!!! (%#lx)\n", vcpu->arch.pc, in kvm_mips_emul_wait()
972 vcpu->arch.pending_exceptions); in kvm_mips_emul_wait()
976 if (!vcpu->arch.pending_exceptions) { in kvm_mips_emul_wait()
978 vcpu->arch.wait = 1; in kvm_mips_emul_wait()
997 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_change_entryhi()
998 struct mm_struct *kern_mm = &vcpu->arch.guest_kernel_mm; in kvm_mips_change_entryhi()
1031 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emul_tlbr()
1033 unsigned long pc = vcpu->arch.pc; in kvm_mips_emul_tlbr()
1043 tlb = &vcpu->arch.guest_tlb[index]; in kvm_mips_emul_tlbr()
1063 struct mm_struct *kern_mm = &vcpu->arch.guest_kernel_mm; in kvm_mips_invalidate_guest_tlb()
1064 struct mm_struct *user_mm = &vcpu->arch.guest_user_mm; in kvm_mips_invalidate_guest_tlb()
1104 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emul_tlbwi()
1107 unsigned long pc = vcpu->arch.pc; in kvm_mips_emul_tlbwi()
1119 tlb = &vcpu->arch.guest_tlb[index]; in kvm_mips_emul_tlbwi()
1140 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emul_tlbwr()
1142 unsigned long pc = vcpu->arch.pc; in kvm_mips_emul_tlbwr()
1146 tlb = &vcpu->arch.guest_tlb[index]; in kvm_mips_emul_tlbwr()
1165 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emul_tlbp()
1167 unsigned long pc = vcpu->arch.pc; in kvm_mips_emul_tlbp()
1192 if (kvm_mips_guest_can_have_fpu(&vcpu->arch)) in kvm_mips_config1_wrmask()
1211 if (kvm_mips_guest_can_have_msa(&vcpu->arch)) in kvm_mips_config3_wrmask()
1247 if (kvm_mips_guest_has_msa(&vcpu->arch)) in kvm_mips_config5_wrmask()
1254 if (kvm_mips_guest_has_fpu(&vcpu->arch)) { in kvm_mips_config5_wrmask()
1267 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_CP0()
1276 curr_pc = vcpu->arch.pc; in kvm_mips_emulate_CP0()
1320 vcpu->arch.gprs[rt] = in kvm_mips_emulate_CP0()
1323 vcpu->arch.gprs[rt] = 0x0; in kvm_mips_emulate_CP0()
1328 vcpu->arch.gprs[rt] = (s32)cop0->reg[rd][sel]; in kvm_mips_emulate_CP0()
1337 vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1341 vcpu->arch.gprs[rt] = cop0->reg[rd][sel]; in kvm_mips_emulate_CP0()
1345 vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1354 vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1357 && (vcpu->arch.gprs[rt] >= in kvm_mips_emulate_CP0()
1360 vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1370 vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1373 vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1377 kvm_mips_write_count(vcpu, vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1383 vcpu->arch.gprs[rt], in kvm_mips_emulate_CP0()
1389 val = vcpu->arch.gprs[rt]; in kvm_mips_emulate_CP0()
1400 if (!kvm_mips_guest_has_fpu(&vcpu->arch)) in kvm_mips_emulate_CP0()
1431 vcpu->arch.aux_inuse & KVM_MIPS_AUX_MSA) in kvm_mips_emulate_CP0()
1442 vcpu->arch.aux_inuse & KVM_MIPS_AUX_FPU) in kvm_mips_emulate_CP0()
1454 if (!kvm_mips_guest_has_fpu(&vcpu->arch)) in kvm_mips_emulate_CP0()
1461 val = vcpu->arch.gprs[rt]; in kvm_mips_emulate_CP0()
1477 vcpu->arch.aux_inuse & KVM_MIPS_AUX_FPU) in kvm_mips_emulate_CP0()
1487 vcpu->arch.aux_inuse & KVM_MIPS_AUX_MSA) in kvm_mips_emulate_CP0()
1498 new_cause = vcpu->arch.gprs[rt]; in kvm_mips_emulate_CP0()
1518 cop0->reg[rd][sel] = vcpu->arch.gprs[rt] & mask; in kvm_mips_emulate_CP0()
1520 cop0->reg[rd][sel] = vcpu->arch.gprs[rt]; in kvm_mips_emulate_CP0()
1529 vcpu->arch.pc, rt, rd, sel); in kvm_mips_emulate_CP0()
1532 vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1541 vcpu->arch.gprs[rt] = in kvm_mips_emulate_CP0()
1546 vcpu->arch.pc); in kvm_mips_emulate_CP0()
1550 vcpu->arch.pc); in kvm_mips_emulate_CP0()
1570 vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1571 vcpu->arch.gprs[rd] = vcpu->arch.gprs[rt]; in kvm_mips_emulate_CP0()
1576 vcpu->arch.pc, inst.c0r_format.rs); in kvm_mips_emulate_CP0()
1585 vcpu->arch.pc = curr_pc; in kvm_mips_emulate_CP0()
1613 curr_pc = vcpu->arch.pc; in kvm_mips_emulate_store()
1621 vcpu->arch.host_cp0_badvaddr); in kvm_mips_emulate_store()
1629 *(u64 *)data = vcpu->arch.gprs[rt]; in kvm_mips_emulate_store()
1632 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1633 vcpu->arch.gprs[rt], *(u64 *)data); in kvm_mips_emulate_store()
1639 *(u32 *)data = vcpu->arch.gprs[rt]; in kvm_mips_emulate_store()
1642 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1643 vcpu->arch.gprs[rt], *(u32 *)data); in kvm_mips_emulate_store()
1648 *(u16 *)data = vcpu->arch.gprs[rt]; in kvm_mips_emulate_store()
1651 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1652 vcpu->arch.gprs[rt], *(u16 *)data); in kvm_mips_emulate_store()
1657 *(u8 *)data = vcpu->arch.gprs[rt]; in kvm_mips_emulate_store()
1660 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1661 vcpu->arch.gprs[rt], *(u8 *)data); in kvm_mips_emulate_store()
1666 vcpu->arch.host_cp0_badvaddr) & (~0x3); in kvm_mips_emulate_store()
1668 imme = vcpu->arch.host_cp0_badvaddr & 0x3; in kvm_mips_emulate_store()
1672 (vcpu->arch.gprs[rt] >> 24); in kvm_mips_emulate_store()
1676 (vcpu->arch.gprs[rt] >> 16); in kvm_mips_emulate_store()
1680 (vcpu->arch.gprs[rt] >> 8); in kvm_mips_emulate_store()
1683 *(u32 *)data = vcpu->arch.gprs[rt]; in kvm_mips_emulate_store()
1690 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1691 vcpu->arch.gprs[rt], *(u32 *)data); in kvm_mips_emulate_store()
1696 vcpu->arch.host_cp0_badvaddr) & (~0x3); in kvm_mips_emulate_store()
1698 imme = vcpu->arch.host_cp0_badvaddr & 0x3; in kvm_mips_emulate_store()
1701 *(u32 *)data = vcpu->arch.gprs[rt]; in kvm_mips_emulate_store()
1705 (vcpu->arch.gprs[rt] << 8); in kvm_mips_emulate_store()
1709 (vcpu->arch.gprs[rt] << 16); in kvm_mips_emulate_store()
1713 (vcpu->arch.gprs[rt] << 24); in kvm_mips_emulate_store()
1720 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1721 vcpu->arch.gprs[rt], *(u32 *)data); in kvm_mips_emulate_store()
1727 vcpu->arch.host_cp0_badvaddr) & (~0x7); in kvm_mips_emulate_store()
1730 imme = vcpu->arch.host_cp0_badvaddr & 0x7; in kvm_mips_emulate_store()
1734 ((vcpu->arch.gprs[rt] >> 56) & 0xff); in kvm_mips_emulate_store()
1738 ((vcpu->arch.gprs[rt] >> 48) & 0xffff); in kvm_mips_emulate_store()
1742 ((vcpu->arch.gprs[rt] >> 40) & 0xffffff); in kvm_mips_emulate_store()
1746 ((vcpu->arch.gprs[rt] >> 32) & 0xffffffff); in kvm_mips_emulate_store()
1750 ((vcpu->arch.gprs[rt] >> 24) & 0xffffffffff); in kvm_mips_emulate_store()
1754 ((vcpu->arch.gprs[rt] >> 16) & 0xffffffffffff); in kvm_mips_emulate_store()
1758 ((vcpu->arch.gprs[rt] >> 8) & 0xffffffffffffff); in kvm_mips_emulate_store()
1761 *(u64 *)data = vcpu->arch.gprs[rt]; in kvm_mips_emulate_store()
1768 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1769 vcpu->arch.gprs[rt], *(u64 *)data); in kvm_mips_emulate_store()
1774 vcpu->arch.host_cp0_badvaddr) & (~0x7); in kvm_mips_emulate_store()
1777 imme = vcpu->arch.host_cp0_badvaddr & 0x7; in kvm_mips_emulate_store()
1780 *(u64 *)data = vcpu->arch.gprs[rt]; in kvm_mips_emulate_store()
1784 (vcpu->arch.gprs[rt] << 8); in kvm_mips_emulate_store()
1788 (vcpu->arch.gprs[rt] << 16); in kvm_mips_emulate_store()
1792 (vcpu->arch.gprs[rt] << 24); in kvm_mips_emulate_store()
1796 (vcpu->arch.gprs[rt] << 32); in kvm_mips_emulate_store()
1800 (vcpu->arch.gprs[rt] << 40); in kvm_mips_emulate_store()
1804 (vcpu->arch.gprs[rt] << 48); in kvm_mips_emulate_store()
1808 (vcpu->arch.gprs[rt] << 56); in kvm_mips_emulate_store()
1815 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1816 vcpu->arch.gprs[rt], *(u64 *)data); in kvm_mips_emulate_store()
1834 *(u8 *)data = vcpu->arch.gprs[rt]; in kvm_mips_emulate_store()
1837 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1838 vcpu->arch.gprs[rt], *(u8 *)data); in kvm_mips_emulate_store()
1842 *(u16 *)data = vcpu->arch.gprs[rt]; in kvm_mips_emulate_store()
1845 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1846 vcpu->arch.gprs[rt], *(u16 *)data); in kvm_mips_emulate_store()
1850 *(u32 *)data = vcpu->arch.gprs[rt]; in kvm_mips_emulate_store()
1853 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1854 vcpu->arch.gprs[rt], *(u32 *)data); in kvm_mips_emulate_store()
1858 *(u64 *)data = vcpu->arch.gprs[rt]; in kvm_mips_emulate_store()
1861 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1862 vcpu->arch.gprs[rt], *(u64 *)data); in kvm_mips_emulate_store()
1893 vcpu->arch.pc = curr_pc; in kvm_mips_emulate_store()
1915 curr_pc = vcpu->arch.pc; in kvm_mips_emulate_load()
1919 vcpu->arch.io_pc = vcpu->arch.pc; in kvm_mips_emulate_load()
1920 vcpu->arch.pc = curr_pc; in kvm_mips_emulate_load()
1922 vcpu->arch.io_gpr = rt; in kvm_mips_emulate_load()
1925 vcpu->arch.host_cp0_badvaddr); in kvm_mips_emulate_load()
1960 vcpu->arch.host_cp0_badvaddr) & (~0x3); in kvm_mips_emulate_load()
1963 imme = vcpu->arch.host_cp0_badvaddr & 0x3; in kvm_mips_emulate_load()
1984 vcpu->arch.host_cp0_badvaddr) & (~0x3); in kvm_mips_emulate_load()
1987 imme = vcpu->arch.host_cp0_badvaddr & 0x3; in kvm_mips_emulate_load()
2009 vcpu->arch.host_cp0_badvaddr) & (~0x7); in kvm_mips_emulate_load()
2012 imme = vcpu->arch.host_cp0_badvaddr & 0x7; in kvm_mips_emulate_load()
2045 vcpu->arch.host_cp0_badvaddr) & (~0x7); in kvm_mips_emulate_load()
2048 imme = vcpu->arch.host_cp0_badvaddr & 0x7; in kvm_mips_emulate_load()
2167 vcpu->arch.host_cp0_badvaddr = addr; in kvm_mips_guest_cache_op()
2168 vcpu->arch.pc = curr_pc; in kvm_mips_guest_cache_op()
2173 vcpu->arch.host_cp0_badvaddr = addr; in kvm_mips_guest_cache_op()
2174 vcpu->arch.pc = curr_pc; in kvm_mips_guest_cache_op()
2190 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_cache() local
2198 curr_pc = vcpu->arch.pc; in kvm_mips_emulate_cache()
2212 va = arch->gprs[base] + offset; in kvm_mips_emulate_cache()
2215 cache, op, base, arch->gprs[base], offset); in kvm_mips_emulate_cache()
2224 vcpu->arch.pc, vcpu->arch.gprs[31], cache, op, base, in kvm_mips_emulate_cache()
2225 arch->gprs[base], offset); in kvm_mips_emulate_cache()
2301 cache, op, base, arch->gprs[base], offset); in kvm_mips_emulate_cache()
2308 vcpu->arch.pc = curr_pc; in kvm_mips_emulate_cache()
2377 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_guest_exception_base()
2389 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_syscall()
2390 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_syscall() local
2395 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_syscall()
2403 kvm_debug("Delivering SYSCALL @ pc %#lx\n", arch->pc); in kvm_mips_emulate_syscall()
2409 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_syscall()
2423 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_tlbmiss_ld()
2424 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_tlbmiss_ld() local
2425 unsigned long entryhi = (vcpu->arch. host_cp0_badvaddr & VPN2_MASK) | in kvm_mips_emulate_tlbmiss_ld()
2430 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_tlbmiss_ld()
2439 arch->pc); in kvm_mips_emulate_tlbmiss_ld()
2442 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x0; in kvm_mips_emulate_tlbmiss_ld()
2446 arch->pc); in kvm_mips_emulate_tlbmiss_ld()
2448 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_tlbmiss_ld()
2455 kvm_write_c0_guest_badvaddr(cop0, vcpu->arch.host_cp0_badvaddr); in kvm_mips_emulate_tlbmiss_ld()
2466 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_tlbinv_ld()
2467 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_tlbinv_ld() local
2469 (vcpu->arch.host_cp0_badvaddr & VPN2_MASK) | in kvm_mips_emulate_tlbinv_ld()
2474 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_tlbinv_ld()
2483 arch->pc); in kvm_mips_emulate_tlbinv_ld()
2486 arch->pc); in kvm_mips_emulate_tlbinv_ld()
2490 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_tlbinv_ld()
2496 kvm_write_c0_guest_badvaddr(cop0, vcpu->arch.host_cp0_badvaddr); in kvm_mips_emulate_tlbinv_ld()
2507 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_tlbmiss_st()
2508 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_tlbmiss_st() local
2509 unsigned long entryhi = (vcpu->arch.host_cp0_badvaddr & VPN2_MASK) | in kvm_mips_emulate_tlbmiss_st()
2514 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_tlbmiss_st()
2523 arch->pc); in kvm_mips_emulate_tlbmiss_st()
2526 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x0; in kvm_mips_emulate_tlbmiss_st()
2529 arch->pc); in kvm_mips_emulate_tlbmiss_st()
2530 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_tlbmiss_st()
2537 kvm_write_c0_guest_badvaddr(cop0, vcpu->arch.host_cp0_badvaddr); in kvm_mips_emulate_tlbmiss_st()
2548 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_tlbinv_st()
2549 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_tlbinv_st() local
2550 unsigned long entryhi = (vcpu->arch.host_cp0_badvaddr & VPN2_MASK) | in kvm_mips_emulate_tlbinv_st()
2555 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_tlbinv_st()
2564 arch->pc); in kvm_mips_emulate_tlbinv_st()
2567 arch->pc); in kvm_mips_emulate_tlbinv_st()
2571 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_tlbinv_st()
2577 kvm_write_c0_guest_badvaddr(cop0, vcpu->arch.host_cp0_badvaddr); in kvm_mips_emulate_tlbinv_st()
2588 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_tlbmod()
2589 unsigned long entryhi = (vcpu->arch.host_cp0_badvaddr & VPN2_MASK) | in kvm_mips_emulate_tlbmod()
2591 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_tlbmod() local
2595 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_tlbmod()
2604 arch->pc); in kvm_mips_emulate_tlbmod()
2607 arch->pc); in kvm_mips_emulate_tlbmod()
2610 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_tlbmod()
2616 kvm_write_c0_guest_badvaddr(cop0, vcpu->arch.host_cp0_badvaddr); in kvm_mips_emulate_tlbmod()
2627 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_fpu_exc()
2628 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_fpu_exc() local
2632 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_fpu_exc()
2642 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_fpu_exc()
2655 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_ri_exc()
2656 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_ri_exc() local
2661 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_ri_exc()
2669 kvm_debug("Delivering RI @ pc %#lx\n", arch->pc); in kvm_mips_emulate_ri_exc()
2675 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_ri_exc()
2689 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_bp_exc()
2690 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_bp_exc() local
2695 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_bp_exc()
2703 kvm_debug("Delivering BP @ pc %#lx\n", arch->pc); in kvm_mips_emulate_bp_exc()
2709 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_bp_exc()
2723 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_trap_exc()
2724 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_trap_exc() local
2729 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_trap_exc()
2737 kvm_debug("Delivering TRAP @ pc %#lx\n", arch->pc); in kvm_mips_emulate_trap_exc()
2743 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_trap_exc()
2757 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_msafpe_exc()
2758 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_msafpe_exc() local
2763 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_msafpe_exc()
2771 kvm_debug("Delivering MSAFPE @ pc %#lx\n", arch->pc); in kvm_mips_emulate_msafpe_exc()
2777 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_msafpe_exc()
2791 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_fpe_exc()
2792 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_fpe_exc() local
2797 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_fpe_exc()
2805 kvm_debug("Delivering FPE @ pc %#lx\n", arch->pc); in kvm_mips_emulate_fpe_exc()
2811 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_fpe_exc()
2825 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_msadis_exc()
2826 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_msadis_exc() local
2831 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_msadis_exc()
2839 kvm_debug("Delivering MSADIS @ pc %#lx\n", arch->pc); in kvm_mips_emulate_msadis_exc()
2845 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_msadis_exc()
2858 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_handle_ri()
2859 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_handle_ri() local
2869 curr_pc = vcpu->arch.pc; in kvm_mips_handle_ri()
2900 arch->gprs[rt] = vcpu->vcpu_id; in kvm_mips_handle_ri()
2903 arch->gprs[rt] = min(current_cpu_data.dcache.linesz, in kvm_mips_handle_ri()
2907 arch->gprs[rt] = (s32)kvm_mips_read_count(vcpu); in kvm_mips_handle_ri()
2913 arch->gprs[rt] = 1; in kvm_mips_handle_ri()
2916 arch->gprs[rt] = 2; in kvm_mips_handle_ri()
2920 arch->gprs[rt] = kvm_read_c0_guest_userlocal(cop0); in kvm_mips_handle_ri()
2929 vcpu->arch.gprs[rt]); in kvm_mips_handle_ri()
2943 vcpu->arch.pc = curr_pc; in kvm_mips_handle_ri()
2950 unsigned long *gpr = &vcpu->arch.gprs[vcpu->arch.io_gpr]; in kvm_mips_complete_mmio_load()
2960 vcpu->arch.pc = vcpu->arch.io_pc; in kvm_mips_complete_mmio_load()
2966 *gpr = (vcpu->arch.gprs[vcpu->arch.io_gpr] & 0xffffffffffffff) | in kvm_mips_complete_mmio_load()
2970 *gpr = (vcpu->arch.gprs[vcpu->arch.io_gpr] & 0xffffffffffff) | in kvm_mips_complete_mmio_load()
2974 *gpr = (vcpu->arch.gprs[vcpu->arch.io_gpr] & 0xffffffffff) | in kvm_mips_complete_mmio_load()
2978 *gpr = (vcpu->arch.gprs[vcpu->arch.io_gpr] & 0xffffffff) | in kvm_mips_complete_mmio_load()
2982 *gpr = (vcpu->arch.gprs[vcpu->arch.io_gpr] & 0xffffff) | in kvm_mips_complete_mmio_load()
2986 *gpr = (vcpu->arch.gprs[vcpu->arch.io_gpr] & 0xffff) | in kvm_mips_complete_mmio_load()
2990 *gpr = (vcpu->arch.gprs[vcpu->arch.io_gpr] & 0xff) | in kvm_mips_complete_mmio_load()
2998 *gpr = (vcpu->arch.gprs[vcpu->arch.io_gpr] & 0xff00000000000000) | in kvm_mips_complete_mmio_load()
3002 *gpr = (vcpu->arch.gprs[vcpu->arch.io_gpr] & 0xffff000000000000) | in kvm_mips_complete_mmio_load()
3006 *gpr = (vcpu->arch.gprs[vcpu->arch.io_gpr] & 0xffffff0000000000) | in kvm_mips_complete_mmio_load()
3010 *gpr = (vcpu->arch.gprs[vcpu->arch.io_gpr] & 0xffffffff00000000) | in kvm_mips_complete_mmio_load()
3014 *gpr = (vcpu->arch.gprs[vcpu->arch.io_gpr] & 0xffffffffff000000) | in kvm_mips_complete_mmio_load()
3018 *gpr = (vcpu->arch.gprs[vcpu->arch.io_gpr] & 0xffffffffffff0000) | in kvm_mips_complete_mmio_load()
3022 *gpr = (vcpu->arch.gprs[vcpu->arch.io_gpr] & 0xffffffffffffff00) | in kvm_mips_complete_mmio_load()
3039 *gpr = (vcpu->arch.gprs[vcpu->arch.io_gpr] & 0xffffff) | in kvm_mips_complete_mmio_load()
3043 *gpr = (vcpu->arch.gprs[vcpu->arch.io_gpr] & 0xffff) | in kvm_mips_complete_mmio_load()
3047 *gpr = (vcpu->arch.gprs[vcpu->arch.io_gpr] & 0xff) | in kvm_mips_complete_mmio_load()
3055 *gpr = (vcpu->arch.gprs[vcpu->arch.io_gpr] & 0xff000000) | in kvm_mips_complete_mmio_load()
3059 *gpr = (vcpu->arch.gprs[vcpu->arch.io_gpr] & 0xffff0000) | in kvm_mips_complete_mmio_load()
3063 *gpr = (vcpu->arch.gprs[vcpu->arch.io_gpr] & 0xffffff00) | in kvm_mips_complete_mmio_load()
3095 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_exc()
3096 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_exc() local
3101 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_exc()
3113 arch->pc = kvm_mips_guest_exception_base(vcpu) + 0x180; in kvm_mips_emulate_exc()
3114 kvm_write_c0_guest_badvaddr(cop0, vcpu->arch.host_cp0_badvaddr); in kvm_mips_emulate_exc()
3133 unsigned long badvaddr = vcpu->arch.host_cp0_badvaddr; in kvm_mips_check_privilege()
3229 unsigned long va = vcpu->arch.host_cp0_badvaddr; in kvm_mips_handle_tlbmiss()
3233 vcpu->arch.host_cp0_badvaddr); in kvm_mips_handle_tlbmiss()
3243 (kvm_read_c0_guest_entryhi(vcpu->arch.cop0) & in kvm_mips_handle_tlbmiss()
3256 struct kvm_mips_tlb *tlb = &vcpu->arch.guest_tlb[index]; in kvm_mips_handle_tlbmiss()