Lines Matching refs:shared
76 printk("pc: %08lx msr: %08llx\n", vcpu->arch.pc, vcpu->arch.shared->msr); in kvmppc_dump_vcpu()
78 printk("srr0: %08llx srr1: %08llx\n", vcpu->arch.shared->srr0, in kvmppc_dump_vcpu()
79 vcpu->arch.shared->srr1); in kvmppc_dump_vcpu()
113 if (vcpu->arch.shared->msr & MSR_SPE) { in kvmppc_vcpu_sync_spe()
132 vcpu->arch.shadow_msr |= vcpu->arch.shared->msr & MSR_FP; in kvmppc_vcpu_sync_fpu()
142 u32 old_msr = vcpu->arch.shared->msr; in kvmppc_set_msr()
148 vcpu->arch.shared->msr = new_msr; in kvmppc_set_msr()
247 vcpu->arch.shared->srr0 = srr0; in set_guest_srr()
248 vcpu->arch.shared->srr1 = srr1; in set_guest_srr()
279 return vcpu->arch.shared->dar; in get_guest_dear()
288 vcpu->arch.shared->dar = dear; in set_guest_dear()
297 return vcpu->arch.shared->esr; in get_guest_esr()
306 vcpu->arch.shared->esr = esr; in set_guest_esr()
326 ulong crit_raw = vcpu->arch.shared->critical; in kvmppc_booke_irqprio_deliver()
331 ulong new_msr = vcpu->arch.shared->msr; in kvmppc_booke_irqprio_deliver()
334 if (!(vcpu->arch.shared->msr & MSR_SF)) { in kvmppc_booke_irqprio_deliver()
342 crit = crit && !(vcpu->arch.shared->msr & MSR_PR); in kvmppc_booke_irqprio_deliver()
376 allowed = vcpu->arch.shared->msr & MSR_CE; in kvmppc_booke_irqprio_deliver()
382 allowed = vcpu->arch.shared->msr & MSR_ME; in kvmppc_booke_irqprio_deliver()
392 allowed = vcpu->arch.shared->msr & MSR_EE; in kvmppc_booke_irqprio_deliver()
398 allowed = vcpu->arch.shared->msr & MSR_DE; in kvmppc_booke_irqprio_deliver()
409 vcpu->arch.shared->msr); in kvmppc_booke_irqprio_deliver()
413 vcpu->arch.shared->msr); in kvmppc_booke_irqprio_deliver()
417 vcpu->arch.shared->msr); in kvmppc_booke_irqprio_deliver()
421 vcpu->arch.shared->msr); in kvmppc_booke_irqprio_deliver()
600 vcpu->arch.shared->int_pending = !!*pending; in kvmppc_core_check_exceptions()
616 if (vcpu->arch.shared->msr & MSR_WE) { in kvmppc_core_prepare_to_enter()
923 if (vcpu->arch.shared->msr & (MSR_PR | MSR_GS)) { in kvmppc_handle_exit()
949 if (vcpu->arch.shared->msr & MSR_SPE) in kvmppc_handle_exit()
1011 if (!(vcpu->arch.shared->msr & MSR_PR)) { in kvmppc_handle_exit()
1025 if (!(vcpu->arch.shared->msr & MSR_PR) && in kvmppc_handle_exit()
1046 if (!(vcpu->arch.shared->msr & MSR_PR) && in kvmppc_handle_exit()
1197 vcpu->arch.shared->pir = vcpu->vcpu_id; in kvm_arch_vcpu_setup()
1204 vcpu->arch.shared->msr = 0; in kvm_arch_vcpu_setup()
1244 regs->msr = vcpu->arch.shared->msr; in kvm_arch_vcpu_ioctl_get_regs()
1245 regs->srr0 = vcpu->arch.shared->srr0; in kvm_arch_vcpu_ioctl_get_regs()
1246 regs->srr1 = vcpu->arch.shared->srr1; in kvm_arch_vcpu_ioctl_get_regs()
1248 regs->sprg0 = vcpu->arch.shared->sprg0; in kvm_arch_vcpu_ioctl_get_regs()
1249 regs->sprg1 = vcpu->arch.shared->sprg1; in kvm_arch_vcpu_ioctl_get_regs()
1250 regs->sprg2 = vcpu->arch.shared->sprg2; in kvm_arch_vcpu_ioctl_get_regs()
1251 regs->sprg3 = vcpu->arch.shared->sprg3; in kvm_arch_vcpu_ioctl_get_regs()
1252 regs->sprg4 = vcpu->arch.shared->sprg4; in kvm_arch_vcpu_ioctl_get_regs()
1253 regs->sprg5 = vcpu->arch.shared->sprg5; in kvm_arch_vcpu_ioctl_get_regs()
1254 regs->sprg6 = vcpu->arch.shared->sprg6; in kvm_arch_vcpu_ioctl_get_regs()
1255 regs->sprg7 = vcpu->arch.shared->sprg7; in kvm_arch_vcpu_ioctl_get_regs()
1273 vcpu->arch.shared->srr0 = regs->srr0; in kvm_arch_vcpu_ioctl_set_regs()
1274 vcpu->arch.shared->srr1 = regs->srr1; in kvm_arch_vcpu_ioctl_set_regs()
1276 vcpu->arch.shared->sprg0 = regs->sprg0; in kvm_arch_vcpu_ioctl_set_regs()
1277 vcpu->arch.shared->sprg1 = regs->sprg1; in kvm_arch_vcpu_ioctl_set_regs()
1278 vcpu->arch.shared->sprg2 = regs->sprg2; in kvm_arch_vcpu_ioctl_set_regs()
1279 vcpu->arch.shared->sprg3 = regs->sprg3; in kvm_arch_vcpu_ioctl_set_regs()
1280 vcpu->arch.shared->sprg4 = regs->sprg4; in kvm_arch_vcpu_ioctl_set_regs()
1281 vcpu->arch.shared->sprg5 = regs->sprg5; in kvm_arch_vcpu_ioctl_set_regs()
1282 vcpu->arch.shared->sprg6 = regs->sprg6; in kvm_arch_vcpu_ioctl_set_regs()
1283 vcpu->arch.shared->sprg7 = regs->sprg7; in kvm_arch_vcpu_ioctl_set_regs()