/arch/parisc/kernel/ |
D | smp.c | 123 int this_cpu = smp_processor_id(); in ipi_interrupt() local 124 struct cpuinfo_parisc *p = &per_cpu(cpu_data, this_cpu); in ipi_interrupt() 129 spinlock_t *lock = &per_cpu(ipi_lock, this_cpu); in ipi_interrupt() 147 smp_debug(100, KERN_DEBUG "CPU%d IPI_NOP\n", this_cpu); in ipi_interrupt() 151 smp_debug(100, KERN_DEBUG "CPU%d IPI_RESCHEDULE\n", this_cpu); in ipi_interrupt() 157 smp_debug(100, KERN_DEBUG "CPU%d IPI_CALL_FUNC\n", this_cpu); in ipi_interrupt() 163 smp_debug(100, KERN_DEBUG "CPU%d IPI_CPU_START\n", this_cpu); in ipi_interrupt() 167 smp_debug(100, KERN_DEBUG "CPU%d IPI_CPU_STOP\n", this_cpu); in ipi_interrupt() 172 smp_debug(100, KERN_DEBUG "CPU%d is alive!\n", this_cpu); in ipi_interrupt() 176 smp_debug(100, KERN_DEBUG "CPU%d ENTER_KGDB\n", this_cpu); in ipi_interrupt() [all …]
|
/arch/sparc/kernel/ |
D | process_64.c | 201 int this_cpu) in __global_reg_self() argument 207 rp = &global_cpu_snapshot[this_cpu].reg; in __global_reg_self() 253 int this_cpu, cpu; in arch_trigger_cpumask_backtrace() local 260 this_cpu = raw_smp_processor_id(); in arch_trigger_cpumask_backtrace() 264 if (cpumask_test_cpu(this_cpu, mask) && !exclude_self) in arch_trigger_cpumask_backtrace() 265 __global_reg_self(tp, regs, this_cpu); in arch_trigger_cpumask_backtrace() 272 if (exclude_self && cpu == this_cpu) in arch_trigger_cpumask_backtrace() 281 (cpu == this_cpu ? '*' : ' '), cpu, in arch_trigger_cpumask_backtrace() 318 static void __global_pmu_self(int this_cpu) in __global_pmu_self() argument 326 pp = &global_cpu_snapshot[this_cpu].pmu; in __global_pmu_self() [all …]
|
D | smp_64.c | 647 int this_cpu, tot_cpus, prev_sent, i, rem; in hypervisor_xcall_deliver() local 657 this_cpu = smp_processor_id(); in hypervisor_xcall_deliver() 764 this_cpu, ecpuerror_id - 1); in hypervisor_xcall_deliver() 767 this_cpu, enocpu_id - 1); in hypervisor_xcall_deliver() 774 this_cpu, tot_cpus, tb->cpu_list_pa, tb->cpu_mondo_block_pa); in hypervisor_xcall_deliver() 780 this_cpu, first_cpu, (tot_retries + retries), tot_cpus); in hypervisor_xcall_deliver() 789 int this_cpu, i, cnt; in xcall_deliver() local 806 this_cpu = smp_processor_id(); in xcall_deliver() 807 tb = &trap_block[this_cpu]; in xcall_deliver() 820 if (i == this_cpu || !cpu_online(i)) in xcall_deliver() [all …]
|
D | nmi.c | 70 int this_cpu = smp_processor_id(); in die_nmi() local 77 panic("Watchdog detected hard LOCKUP on cpu %d", this_cpu); in die_nmi() 79 WARN(1, "Watchdog detected hard LOCKUP on cpu %d", this_cpu); in die_nmi()
|
D | chmc.c | 592 unsigned long ret, this_cpu; in chmc_read_mcreg() local 596 this_cpu = real_hard_smp_processor_id(); in chmc_read_mcreg() 598 if (p->portid == this_cpu) { in chmc_read_mcreg()
|
/arch/alpha/kernel/ |
D | smp.c | 523 int this_cpu = smp_processor_id(); in handle_ipi() local 524 unsigned long *pending_ipis = &ipi_data[this_cpu].bits; in handle_ipi() 529 this_cpu, *pending_ipis, regs->pc)); in handle_ipi() 556 this_cpu, which); in handle_ipi() 564 cpu_data[this_cpu].ipi_count++; in handle_ipi() 652 int cpu, this_cpu = smp_processor_id(); in flush_tlb_mm() local 654 if (!cpu_online(cpu) || cpu == this_cpu) in flush_tlb_mm() 699 int cpu, this_cpu = smp_processor_id(); in flush_tlb_page() local 701 if (!cpu_online(cpu) || cpu == this_cpu) in flush_tlb_page() 753 int cpu, this_cpu = smp_processor_id(); in flush_icache_user_page() local [all …]
|
/arch/x86/lib/ |
D | msr-smp.c | 12 int this_cpu = raw_smp_processor_id(); in __rdmsr_on_cpu() local 15 reg = per_cpu_ptr(rv->msrs, this_cpu); in __rdmsr_on_cpu() 26 int this_cpu = raw_smp_processor_id(); in __wrmsr_on_cpu() local 29 reg = per_cpu_ptr(rv->msrs, this_cpu); in __wrmsr_on_cpu() 104 int this_cpu; in __rwmsr_on_cpus() local 111 this_cpu = get_cpu(); in __rwmsr_on_cpus() 113 if (cpumask_test_cpu(this_cpu, mask)) in __rwmsr_on_cpus()
|
/arch/x86/kernel/apic/ |
D | ipi.c | 206 unsigned int this_cpu = smp_processor_id(); in default_send_IPI_mask_allbutself_phys() local 214 if (query_cpu == this_cpu) in default_send_IPI_mask_allbutself_phys() 272 unsigned int this_cpu = smp_processor_id(); in default_send_IPI_mask_allbutself_logical() local 278 if (query_cpu == this_cpu) in default_send_IPI_mask_allbutself_logical()
|
D | x2apic_phys.c | 55 unsigned long this_cpu; in __x2apic_send_IPI_mask() local 63 this_cpu = smp_processor_id(); in __x2apic_send_IPI_mask() 65 if (apic_dest == APIC_DEST_ALLBUT && this_cpu == query_cpu) in __x2apic_send_IPI_mask()
|
D | apic_numachip.c | 130 unsigned int this_cpu = smp_processor_id(); in numachip_send_IPI_mask_allbutself() local 134 if (cpu != this_cpu) in numachip_send_IPI_mask_allbutself() 141 unsigned int this_cpu = smp_processor_id(); in numachip_send_IPI_allbutself() local 145 if (cpu != this_cpu) in numachip_send_IPI_allbutself()
|
/arch/arm/common/ |
D | bL_switcher.c | 149 unsigned int mpidr, this_cpu, that_cpu; in bL_switch_to() local 155 this_cpu = smp_processor_id(); in bL_switch_to() 159 BUG_ON(cpu_logical_map(this_cpu) != ob_mpidr); in bL_switch_to() 164 that_cpu = bL_switcher_cpu_pairing[this_cpu]; in bL_switch_to() 170 this_cpu, ob_mpidr, ib_mpidr); in bL_switch_to() 172 this_cpu = smp_processor_id(); in bL_switch_to() 180 ipi_nr = register_ipi_completion(&inbound_alive, this_cpu); in bL_switch_to() 227 cpu_logical_map(this_cpu) = ib_mpidr; in bL_switch_to() 237 pr_debug("after switch: CPU %d MPIDR %#x\n", this_cpu, mpidr); in bL_switch_to()
|
/arch/arm/include/asm/ |
D | mmu_context.h | 39 void a15_erratum_get_cpumask(int this_cpu, struct mm_struct *mm, 42 static inline void a15_erratum_get_cpumask(int this_cpu, struct mm_struct *mm, in a15_erratum_get_cpumask() argument
|
/arch/s390/kernel/ |
D | machine_kexec.c | 77 int this_cpu, cpu; in __machine_kdump() local 81 this_cpu = smp_find_processor_id(stap()); in __machine_kdump() 83 if (cpu == this_cpu) in __machine_kdump()
|
D | processor.c | 70 int cpu, this_cpu; in stop_machine_yield() local 72 this_cpu = smp_processor_id(); in stop_machine_yield() 75 cpu = cpumask_next_wrap(this_cpu, cpumask, this_cpu, false); in stop_machine_yield()
|
/arch/arm/kernel/ |
D | smp_tlb.c | 166 int this_cpu; in broadcast_tlb_mm_a15_erratum() local 172 this_cpu = get_cpu(); in broadcast_tlb_mm_a15_erratum() 173 a15_erratum_get_cpumask(this_cpu, mm, &mask); in broadcast_tlb_mm_a15_erratum()
|
D | machine_kexec.c | 103 int cpu, this_cpu = raw_smp_processor_id(); in crash_smp_send_stop() local 110 if (cpu == this_cpu) in crash_smp_send_stop()
|
/arch/powerpc/include/asm/ |
D | dbell.h | 146 int this_cpu = get_cpu(); in doorbell_try_core_ipi() local 149 if (cpumask_test_cpu(cpu, cpu_sibling_mask(this_cpu))) { in doorbell_try_core_ipi()
|
/arch/x86/kernel/cpu/ |
D | common.c | 124 static const struct cpu_dev *this_cpu = &default_cpu; variable 582 if (!this_cpu) in table_lookup_model() 585 info = this_cpu->legacy_models; in table_lookup_model() 720 if (this_cpu->legacy_cache_size) in cpu_detect_cache_sizes() 721 l2size = this_cpu->legacy_cache_size(c, l2size); in cpu_detect_cache_sizes() 744 if (this_cpu->c_detect_tlb) in cpu_detect_tlb() 745 this_cpu->c_detect_tlb(c); in cpu_detect_tlb() 814 this_cpu = cpu_devs[i]; in get_cpu_vendor() 815 c->x86_vendor = this_cpu->c_x86_vendor; in get_cpu_vendor() 824 this_cpu = &default_cpu; in get_cpu_vendor() [all …]
|
/arch/x86/hyperv/ |
D | hv_apic.c | 158 int cur_cpu, vcpu, this_cpu = smp_processor_id(); in __send_ipi_mask() local 173 (exclude_self && weight == 1 && cpumask_test_cpu(this_cpu, mask))) in __send_ipi_mask() 199 if (exclude_self && cur_cpu == this_cpu) in __send_ipi_mask()
|
/arch/ia64/kernel/ |
D | smp.c | 98 int this_cpu = get_cpu(); in handle_IPI() local 128 this_cpu, which); in handle_IPI()
|
D | process.c | 205 unsigned int this_cpu = smp_processor_id(); in play_dead() local 213 ia64_jump_to_sal(&sal_boot_rendez_state[this_cpu]); in play_dead()
|
/arch/arm/mm/ |
D | context.c | 51 void a15_erratum_get_cpumask(int this_cpu, struct mm_struct *mm, in a15_erratum_get_cpumask() argument 61 if (cpu == this_cpu) in a15_erratum_get_cpumask()
|
/arch/arm64/kernel/ |
D | smp.c | 755 unsigned int this_cpu; in smp_prepare_cpus() local 759 this_cpu = smp_processor_id(); in smp_prepare_cpus() 760 store_cpu_topology(this_cpu); in smp_prepare_cpus() 761 numa_store_cpu_info(this_cpu); in smp_prepare_cpus() 762 numa_add_cpu(this_cpu); in smp_prepare_cpus()
|
/arch/x86/xen/ |
D | smp.c | 250 unsigned int this_cpu = smp_processor_id(); in xen_send_IPI_mask_allbutself() local 257 if (this_cpu == cpu) in xen_send_IPI_mask_allbutself()
|
/arch/x86/kernel/ |
D | process.c | 449 unsigned int this_cpu = smp_processor_id(); in speculative_store_bypass_ht_init() local 467 for_each_cpu(cpu, topology_sibling_cpumask(this_cpu)) { in speculative_store_bypass_ht_init() 468 if (cpu == this_cpu) in speculative_store_bypass_ht_init()
|