/arch/x86/xen/ |
D | grant-table.c | 84 void *shared = *__shared; in arch_gnttab_map_shared() local 86 if (shared == NULL) { in arch_gnttab_map_shared() 90 shared = area->addr; in arch_gnttab_map_shared() 91 *__shared = shared; in arch_gnttab_map_shared() 94 rc = apply_to_page_range(&init_mm, (unsigned long)shared, in arch_gnttab_map_shared() 105 grant_status_t *shared = *__shared; in arch_gnttab_map_status() local 107 if (shared == NULL) { in arch_gnttab_map_status() 113 shared = area->addr; in arch_gnttab_map_status() 114 *__shared = shared; in arch_gnttab_map_status() 117 rc = apply_to_page_range(&init_mm, (unsigned long)shared, in arch_gnttab_map_status() [all …]
|
/arch/powerpc/kvm/ |
D | e500_mmu.c | 74 esel += gtlb0_set_base(vcpu_e500, vcpu->arch.shared->mas2); in get_tlb_esel() 137 tlbsel = (vcpu->arch.shared->mas4 >> 28) & 0x1; in kvmppc_e500_deliver_tlb_miss() 139 tsized = (vcpu->arch.shared->mas4 >> 7) & 0x1f; in kvmppc_e500_deliver_tlb_miss() 141 vcpu->arch.shared->mas0 = MAS0_TLBSEL(tlbsel) | MAS0_ESEL(victim) in kvmppc_e500_deliver_tlb_miss() 143 vcpu->arch.shared->mas1 = MAS1_VALID | (as ? MAS1_TS : 0) in kvmppc_e500_deliver_tlb_miss() 146 vcpu->arch.shared->mas2 = (eaddr & MAS2_EPN) in kvmppc_e500_deliver_tlb_miss() 147 | (vcpu->arch.shared->mas4 & MAS2_ATTRIB_MASK); in kvmppc_e500_deliver_tlb_miss() 148 vcpu->arch.shared->mas7_3 &= MAS3_U0 | MAS3_U1 | MAS3_U2 | MAS3_U3; in kvmppc_e500_deliver_tlb_miss() 149 vcpu->arch.shared->mas6 = (vcpu->arch.shared->mas6 & MAS6_SPID1) in kvmppc_e500_deliver_tlb_miss() 335 vcpu->arch.shared->mas0 &= ~MAS0_NV(~0); in kvmppc_e500_emul_tlbre() [all …]
|
D | book3s.c | 74 vcpu->arch.shared->srr0 = kvmppc_get_pc(vcpu); in kvmppc_inject_interrupt() 75 vcpu->arch.shared->srr1 = vcpu->arch.shared->msr | flags; in kvmppc_inject_interrupt() 177 deliver = (vcpu->arch.shared->msr & MSR_EE) && !crit; in kvmppc_book3s_irqprio_deliver() 182 deliver = (vcpu->arch.shared->msr & MSR_EE) && !crit; in kvmppc_book3s_irqprio_deliver() 293 if (!(vcpu->arch.shared->msr & MSR_SF)) in kvmppc_gfn_to_pfn() 300 ulong shared_page = ((ulong)vcpu->arch.shared) & PAGE_MASK; in kvmppc_gfn_to_pfn() 314 int relocated = (vcpu->arch.shared->msr & (data ? MSR_DR : MSR_IR)); in kvmppc_xlate() 431 regs->msr = vcpu->arch.shared->msr; in kvm_arch_vcpu_ioctl_get_regs() 432 regs->srr0 = vcpu->arch.shared->srr0; in kvm_arch_vcpu_ioctl_get_regs() 433 regs->srr1 = vcpu->arch.shared->srr1; in kvm_arch_vcpu_ioctl_get_regs() [all …]
|
D | e500_emulate.c | 74 int cpir = cvcpu->arch.shared->pir; in kvmppc_e500_emul_msgsnd() 170 vcpu->arch.shared->mas0 = spr_val; in kvmppc_core_emulate_mtspr() 173 vcpu->arch.shared->mas1 = spr_val; in kvmppc_core_emulate_mtspr() 176 vcpu->arch.shared->mas2 = spr_val; in kvmppc_core_emulate_mtspr() 179 vcpu->arch.shared->mas7_3 &= ~(u64)0xffffffff; in kvmppc_core_emulate_mtspr() 180 vcpu->arch.shared->mas7_3 |= spr_val; in kvmppc_core_emulate_mtspr() 183 vcpu->arch.shared->mas4 = spr_val; in kvmppc_core_emulate_mtspr() 186 vcpu->arch.shared->mas6 = spr_val; in kvmppc_core_emulate_mtspr() 189 vcpu->arch.shared->mas7_3 &= (u64)0xffffffff; in kvmppc_core_emulate_mtspr() 190 vcpu->arch.shared->mas7_3 |= (u64)spr_val << 32; in kvmppc_core_emulate_mtspr() [all …]
|
D | e500mc.c | 129 mtspr(SPRN_GSPRG0, (unsigned long)vcpu->arch.shared->sprg0); in kvmppc_core_vcpu_load() 130 mtspr(SPRN_GSPRG1, (unsigned long)vcpu->arch.shared->sprg1); in kvmppc_core_vcpu_load() 131 mtspr(SPRN_GSPRG2, (unsigned long)vcpu->arch.shared->sprg2); in kvmppc_core_vcpu_load() 132 mtspr(SPRN_GSPRG3, (unsigned long)vcpu->arch.shared->sprg3); in kvmppc_core_vcpu_load() 134 mtspr(SPRN_GSRR0, vcpu->arch.shared->srr0); in kvmppc_core_vcpu_load() 135 mtspr(SPRN_GSRR1, vcpu->arch.shared->srr1); in kvmppc_core_vcpu_load() 138 mtspr(SPRN_GDEAR, vcpu->arch.shared->dar); in kvmppc_core_vcpu_load() 139 mtspr(SPRN_GESR, vcpu->arch.shared->esr); in kvmppc_core_vcpu_load() 155 vcpu->arch.shared->sprg0 = mfspr(SPRN_GSPRG0); in kvmppc_core_vcpu_put() 156 vcpu->arch.shared->sprg1 = mfspr(SPRN_GSPRG1); in kvmppc_core_vcpu_put() [all …]
|
D | booke_emulate.c | 36 vcpu->arch.pc = vcpu->arch.shared->srr0; in kvmppc_emul_rfi() 37 kvmppc_set_msr(vcpu, vcpu->arch.shared->srr1); in kvmppc_emul_rfi() 78 kvmppc_set_gpr(vcpu, rt, vcpu->arch.shared->msr); in kvmppc_booke_emulate_op() 88 vcpu->arch.shared->msr = (vcpu->arch.shared->msr & ~MSR_EE) in kvmppc_booke_emulate_op() 94 vcpu->arch.shared->msr = (vcpu->arch.shared->msr & ~MSR_EE) in kvmppc_booke_emulate_op() 124 vcpu->arch.shared->dar = spr_val; in kvmppc_booke_emulate_mtspr() 127 vcpu->arch.shared->esr = spr_val; in kvmppc_booke_emulate_mtspr() 168 vcpu->arch.shared->sprg4 = spr_val; in kvmppc_booke_emulate_mtspr() 171 vcpu->arch.shared->sprg5 = spr_val; in kvmppc_booke_emulate_mtspr() 174 vcpu->arch.shared->sprg6 = spr_val; in kvmppc_booke_emulate_mtspr() [all …]
|
D | e500.h | 215 return !!(vcpu->arch.shared->msr & (MSR_IS | MSR_DS)); in get_cur_as() 220 return !!(vcpu->arch.shared->msr & MSR_PR); in get_cur_pr() 225 return (vcpu->arch.shared->mas6 >> 16) & 0xff; in get_cur_spid() 230 return vcpu->arch.shared->mas6 & 0x1; in get_cur_sas() 239 return (vcpu->arch.shared->mas0 >> 28) & 0x1; in get_tlb_tlbsel() 244 return vcpu->arch.shared->mas0 & 0xfff; in get_tlb_nv_bit() 249 return (vcpu->arch.shared->mas0 >> 16) & 0xfff; in get_tlb_esel_bit() 263 if (get_tlb_ts(tlbe) != !!(vcpu->arch.shared->msr & MSR_IS)) in tlbe_is_host_safe() 297 unsigned int tidseld = (vcpu->arch.shared->mas4 >> 16) & 0xf; in get_tlbmiss_tid()
|
D | booke.c | 76 printk("pc: %08lx msr: %08llx\n", vcpu->arch.pc, vcpu->arch.shared->msr); in kvmppc_dump_vcpu() 78 printk("srr0: %08llx srr1: %08llx\n", vcpu->arch.shared->srr0, in kvmppc_dump_vcpu() 79 vcpu->arch.shared->srr1); in kvmppc_dump_vcpu() 113 if (vcpu->arch.shared->msr & MSR_SPE) { in kvmppc_vcpu_sync_spe() 132 vcpu->arch.shadow_msr |= vcpu->arch.shared->msr & MSR_FP; in kvmppc_vcpu_sync_fpu() 142 u32 old_msr = vcpu->arch.shared->msr; in kvmppc_set_msr() 148 vcpu->arch.shared->msr = new_msr; in kvmppc_set_msr() 247 vcpu->arch.shared->srr0 = srr0; in set_guest_srr() 248 vcpu->arch.shared->srr1 = srr1; in set_guest_srr() 279 return vcpu->arch.shared->dar; in get_guest_dear() [all …]
|
D | emulate.c | 143 vcpu->arch.shared->srr0 = spr_val; in kvmppc_emulate_mtspr() 146 vcpu->arch.shared->srr1 = spr_val; in kvmppc_emulate_mtspr() 160 vcpu->arch.shared->sprg0 = spr_val; in kvmppc_emulate_mtspr() 163 vcpu->arch.shared->sprg1 = spr_val; in kvmppc_emulate_mtspr() 166 vcpu->arch.shared->sprg2 = spr_val; in kvmppc_emulate_mtspr() 169 vcpu->arch.shared->sprg3 = spr_val; in kvmppc_emulate_mtspr() 193 spr_val = vcpu->arch.shared->srr0; in kvmppc_emulate_mfspr() 196 spr_val = vcpu->arch.shared->srr1; in kvmppc_emulate_mfspr() 216 spr_val = vcpu->arch.shared->sprg0; in kvmppc_emulate_mfspr() 219 spr_val = vcpu->arch.shared->sprg1; in kvmppc_emulate_mfspr() [all …]
|
D | book3s_pr.c | 147 ulong smsr = vcpu->arch.shared->msr; in kvmppc_recalc_shadow_msr() 154 smsr |= (vcpu->arch.shared->msr & vcpu->arch.guest_owned_ext); in kvmppc_recalc_shadow_msr() 164 ulong old_msr = vcpu->arch.shared->msr; in kvmppc_set_msr() 171 vcpu->arch.shared->msr = msr; in kvmppc_set_msr() 182 vcpu->arch.shared->msr = msr; in kvmppc_set_msr() 186 if ((vcpu->arch.shared->msr & (MSR_PR|MSR_IR|MSR_DR)) != in kvmppc_set_msr() 218 if (vcpu->arch.shared->msr & MSR_FP) in kvmppc_set_msr() 322 if (!(vcpu->arch.shared->msr & MSR_SF)) in kvmppc_visible_gfn() 342 bool dr = (vcpu->arch.shared->msr & MSR_DR) ? true : false; in kvmppc_handle_pagefault() 343 bool ir = (vcpu->arch.shared->msr & MSR_IR) ? true : false; in kvmppc_handle_pagefault() [all …]
|
D | book3s_emulate.c | 83 if ((vcpu->arch.shared->msr & MSR_PR) && level > PRIV_PROBLEM) in spr_allowed() 103 kvmppc_set_pc(vcpu, vcpu->arch.shared->srr0); in kvmppc_core_emulate_op() 104 kvmppc_set_msr(vcpu, vcpu->arch.shared->srr1); in kvmppc_core_emulate_op() 116 kvmppc_set_gpr(vcpu, rt, vcpu->arch.shared->msr); in kvmppc_core_emulate_op() 122 ulong new_msr = vcpu->arch.shared->msr; in kvmppc_core_emulate_op() 125 vcpu->arch.shared->msr = new_msr; in kvmppc_core_emulate_op() 182 if ((vcpu->arch.shared->msr & MSR_PR) || in kvmppc_core_emulate_op() 264 if (!(vcpu->arch.shared->msr & MSR_SF)) in kvmppc_core_emulate_op() 274 vcpu->arch.shared->dar = vaddr; in kvmppc_core_emulate_op() 283 vcpu->arch.shared->dsisr = dsisr; in kvmppc_core_emulate_op() [all …]
|
D | book3s_32_mmu.c | 93 return vcpu->arch.shared->sr[(eaddr >> 28) & 0xf]; in find_sr() 160 if (vcpu->arch.shared->msr & MSR_PR) { in kvmppc_mmu_book3s_32_xlate_bat() 241 if ((sr_kp(sre) && (vcpu->arch.shared->msr & MSR_PR)) || in kvmppc_mmu_book3s_32_xlate_pte() 242 (sr_ks(sre) && !(vcpu->arch.shared->msr & MSR_PR))) in kvmppc_mmu_book3s_32_xlate_pte() 315 !(vcpu->arch.shared->msr & MSR_PR)) { in kvmppc_mmu_book3s_32_xlate() 338 return vcpu->arch.shared->sr[srnum]; in kvmppc_mmu_book3s_32_mfsrin() 344 vcpu->arch.shared->sr[srnum] = value; in kvmppc_mmu_book3s_32_mtsrin() 360 if (vcpu->arch.shared->msr & (MSR_DR|MSR_IR)) { in kvmppc_mmu_book3s_32_esid_to_vsid() 369 switch (vcpu->arch.shared->msr & (MSR_DR|MSR_IR)) { in kvmppc_mmu_book3s_32_esid_to_vsid() 389 if (vcpu->arch.shared->msr & MSR_PR) in kvmppc_mmu_book3s_32_esid_to_vsid()
|
D | book3s_64_mmu.c | 177 !(vcpu->arch.shared->msr & MSR_PR)) { in kvmppc_mmu_book3s_64_xlate() 205 if ((vcpu->arch.shared->msr & MSR_PR) && slbe->Kp) in kvmppc_mmu_book3s_64_xlate() 207 else if (!(vcpu->arch.shared->msr & MSR_PR) && slbe->Ks) in kvmppc_mmu_book3s_64_xlate() 402 if (vcpu->arch.shared->msr & MSR_IR) { in kvmppc_mmu_book3s_64_slbia() 468 if (vcpu->arch.shared->msr & (MSR_DR|MSR_IR)) { in kvmppc_mmu_book3s_64_esid_to_vsid() 474 switch (vcpu->arch.shared->msr & (MSR_DR|MSR_IR)) { in kvmppc_mmu_book3s_64_esid_to_vsid() 495 if (vcpu->arch.shared->msr & MSR_PR) in kvmppc_mmu_book3s_64_esid_to_vsid() 504 !(vcpu->arch.shared->msr & MSR_PR)) { in kvmppc_mmu_book3s_64_esid_to_vsid()
|
D | 44x.c | 156 vcpu->arch.shared = (void*)__get_free_page(GFP_KERNEL|__GFP_ZERO); in kvmppc_core_vcpu_create() 157 if (!vcpu->arch.shared) in kvmppc_core_vcpu_create() 174 free_page((unsigned long)vcpu->arch.shared); in kvmppc_core_vcpu_free()
|
D | 44x_tlb.c | 225 unsigned int as = !!(vcpu->arch.shared->msr & MSR_IS); in kvmppc_mmu_itlb_index() 232 unsigned int as = !!(vcpu->arch.shared->msr & MSR_DS); in kvmppc_mmu_dtlb_index() 357 vcpu->arch.shared->msr & MSR_PR); in kvmppc_mmu_map() 391 int usermode = vcpu->arch.shared->msr & MSR_PR; in kvmppc_mmu_msr_notify() 428 if (get_tlb_ts(tlbe) != !!(vcpu->arch.shared->msr & MSR_IS)) in tlbe_is_host_safe()
|
/arch/arm/mach-msm/ |
D | smd_debug.c | 112 struct smem_shared *shared = (void *) MSM_SHARED_RAM_BASE; in debug_read_mem() local 113 struct smem_heap_entry *toc = shared->heap_toc; in debug_read_mem() 118 shared->heap_info.initialized, in debug_read_mem() 119 shared->heap_info.free_offset, in debug_read_mem() 120 shared->heap_info.heap_remaining); in debug_read_mem() 152 struct smem_shared *shared = (void *) MSM_SHARED_RAM_BASE; in debug_read_version() local 153 unsigned version = shared->version[VERSION_MODEM]; in debug_read_version() 175 struct smd_alloc_elm *shared; in debug_read_alloc_tbl() local 178 shared = smem_find(ID_CH_ALLOC_TBL, sizeof(*shared) * 64); in debug_read_alloc_tbl() 181 if (shared[n].ref_count == 0) in debug_read_alloc_tbl() [all …]
|
D | smd.c | 299 struct smem_shared *shared = (void *) MSM_SHARED_RAM_BASE; in do_smd_probe() local 300 if (shared->heap_info.free_offset != last_heap_free) { in do_smd_probe() 301 last_heap_free = shared->heap_info.free_offset; in do_smd_probe() 613 struct smd_alloc_elm *shared; in smd_channel_probe_worker() local 618 shared = smem_find(ID_CH_ALLOC_TBL, sizeof(*shared) * 64); in smd_channel_probe_worker() 619 if (!shared) { in smd_channel_probe_worker() 626 if (!shared[n].ref_count) in smd_channel_probe_worker() 628 if (!shared[n].name[0]) in smd_channel_probe_worker() 630 ctype = shared[n].ctype; in smd_channel_probe_worker() 636 if (!memcmp(shared[n].name, "DAL", 3)) in smd_channel_probe_worker() [all …]
|
/arch/arm/xen/ |
D | grant-table.c | 43 void arch_gnttab_unmap(void *shared, unsigned long nr_gframes) in arch_gnttab_unmap() argument
|
/arch/um/ |
D | Makefile | 21 MODE_INCLUDE += -I$(srctree)/$(ARCH_DIR)/include/shared/skas 41 SHARED_HEADERS := $(ARCH_DIR)/include/shared 43 ARCH_INCLUDE += -I$(srctree)/$(HOST_DIR)/um/shared
|
/arch/ia64/mm/ |
D | contig.c | 55 int shared = 0, cached = 0, reserved = 0; in show_mem() local 81 shared += page_count(page)-1; in show_mem() 87 total_shared += shared; in show_mem() 90 present, reserved, shared, cached); in show_mem()
|
/arch/x86/kernel/cpu/ |
D | perf_event_p4.c | 26 unsigned int shared; /* event is shared across threads */ member 83 .shared = 1, 148 .shared = 1, 210 .shared = 1, 256 .shared = 1, 264 .shared = 1, 272 .shared = 1, 280 .shared = 1, 288 .shared = 1, 296 .shared = 1, [all …]
|
/arch/arm/include/asm/ |
D | kvm_vgic.h | 63 } shared; member 68 u32 shared[VGIC_NR_SHARED_IRQS / 4]; member
|
/arch/powerpc/platforms/pseries/ |
D | Kconfig | 28 bool "Support for shared-processor logical partitions" 32 on logically-partitioned pSeries systems which use shared 97 bool "Support for shared-memory logical partitions" 102 Select this option to enable shared memory partition support.
|
/arch/ia64/xen/ |
D | grant-table.c | 47 void arch_gnttab_unmap_shared(struct grant_entry *shared, in arch_gnttab_unmap_shared() argument
|
/arch/powerpc/include/asm/ |
D | kvm_book3s.h | 210 vcpu->arch.shared->int_pending = 1; in kvmppc_update_int_pending() 212 vcpu->arch.shared->int_pending = 0; in kvmppc_update_int_pending() 346 ulong crit_raw = vcpu->arch.shared->critical; in kvmppc_critical_section() 351 if (!(vcpu->arch.shared->msr & MSR_SF)) { in kvmppc_critical_section() 359 crit = crit && !(vcpu->arch.shared->msr & MSR_PR); in kvmppc_critical_section()
|