/arch/parisc/kernel/ |
D | smp.c | 126 int this_cpu = smp_processor_id(); in ipi_interrupt() local 127 struct cpuinfo_parisc *p = &per_cpu(cpu_data, this_cpu); in ipi_interrupt() 137 spinlock_t *lock = &per_cpu(ipi_lock, this_cpu); in ipi_interrupt() 155 smp_debug(100, KERN_DEBUG "CPU%d IPI_NOP\n", this_cpu); in ipi_interrupt() 159 smp_debug(100, KERN_DEBUG "CPU%d IPI_RESCHEDULE\n", this_cpu); in ipi_interrupt() 167 smp_debug(100, KERN_DEBUG "CPU%d IPI_CALL_FUNC\n", this_cpu); in ipi_interrupt() 172 smp_debug(100, KERN_DEBUG "CPU%d IPI_CALL_FUNC_SINGLE\n", this_cpu); in ipi_interrupt() 177 smp_debug(100, KERN_DEBUG "CPU%d IPI_CPU_START\n", this_cpu); in ipi_interrupt() 181 smp_debug(100, KERN_DEBUG "CPU%d IPI_CPU_STOP\n", this_cpu); in ipi_interrupt() 186 smp_debug(100, KERN_DEBUG "CPU%d is alive!\n", this_cpu); in ipi_interrupt() [all …]
|
/arch/arm/mach-realview/ |
D | hotplug.c | 109 unsigned int this_cpu = hard_smp_processor_id(); in platform_cpu_die() local 111 if (cpu != this_cpu) { in platform_cpu_die() 113 this_cpu, cpu); in platform_cpu_die()
|
/arch/x86/kernel/ |
D | genx2apic_cluster.c | 76 unsigned long this_cpu = smp_processor_id(); in x2apic_send_IPI_mask_allbutself() local 80 if (query_cpu != this_cpu) in x2apic_send_IPI_mask_allbutself() 91 unsigned long this_cpu = smp_processor_id(); in x2apic_send_IPI_allbutself() local 95 if (query_cpu != this_cpu) in x2apic_send_IPI_allbutself()
|
D | genx2apic_phys.c | 74 unsigned long this_cpu = smp_processor_id(); in x2apic_send_IPI_mask_allbutself() local 78 if (query_cpu != this_cpu) in x2apic_send_IPI_mask_allbutself() 90 unsigned long this_cpu = smp_processor_id(); in x2apic_send_IPI_allbutself() local 94 if (query_cpu != this_cpu) in x2apic_send_IPI_allbutself()
|
D | ipi.c | 151 unsigned int this_cpu = smp_processor_id(); in send_IPI_mask_allbutself() local 157 if (query_cpu != this_cpu) in send_IPI_mask_allbutself()
|
D | genx2apic_uv_x.c | 140 unsigned int this_cpu = smp_processor_id(); in uv_send_IPI_mask_allbutself() local 143 if (cpu != this_cpu) in uv_send_IPI_mask_allbutself() 150 unsigned int this_cpu = smp_processor_id(); in uv_send_IPI_allbutself() local 153 if (cpu != this_cpu) in uv_send_IPI_allbutself()
|
/arch/alpha/kernel/ |
D | smp.c | 567 int this_cpu = smp_processor_id(); in handle_ipi() local 568 unsigned long *pending_ipis = &ipi_data[this_cpu].bits; in handle_ipi() 573 this_cpu, *pending_ipis, regs->pc)); in handle_ipi() 605 this_cpu, which); in handle_ipi() 613 cpu_data[this_cpu].ipi_count++; in handle_ipi() 703 int cpu, this_cpu = smp_processor_id(); in flush_tlb_mm() local 705 if (!cpu_online(cpu) || cpu == this_cpu) in flush_tlb_mm() 752 int cpu, this_cpu = smp_processor_id(); in flush_tlb_page() local 754 if (!cpu_online(cpu) || cpu == this_cpu) in flush_tlb_page() 808 int cpu, this_cpu = smp_processor_id(); in flush_icache_user_range() local [all …]
|
/arch/sparc/kernel/ |
D | process_64.c | 211 int this_cpu) in __global_reg_self() argument 215 global_reg_snapshot[this_cpu].tstate = regs->tstate; in __global_reg_self() 216 global_reg_snapshot[this_cpu].tpc = regs->tpc; in __global_reg_self() 217 global_reg_snapshot[this_cpu].tnpc = regs->tnpc; in __global_reg_self() 218 global_reg_snapshot[this_cpu].o7 = regs->u_regs[UREG_I7]; in __global_reg_self() 226 global_reg_snapshot[this_cpu].i7 = rw->ins[7]; in __global_reg_self() 230 global_reg_snapshot[this_cpu].rpc = rw->ins[7]; in __global_reg_self() 233 global_reg_snapshot[this_cpu].i7 = 0; in __global_reg_self() 234 global_reg_snapshot[this_cpu].rpc = 0; in __global_reg_self() 236 global_reg_snapshot[this_cpu].thread = tp; in __global_reg_self() [all …]
|
D | smp_64.c | 627 int retries, this_cpu, prev_sent, i, saw_cpu_error; in hypervisor_xcall_deliver() local 631 this_cpu = smp_processor_id(); in hypervisor_xcall_deliver() 714 this_cpu, saw_cpu_error - 1); in hypervisor_xcall_deliver() 720 this_cpu, retries); in hypervisor_xcall_deliver() 725 this_cpu, status); in hypervisor_xcall_deliver() 728 this_cpu, cnt, tb->cpu_list_pa, tb->cpu_mondo_block_pa); in hypervisor_xcall_deliver() 731 printk(KERN_CRIT "CPU[%d]: CPU list [ ", this_cpu); in hypervisor_xcall_deliver() 742 int this_cpu, i, cnt; in xcall_deliver() local 759 this_cpu = smp_processor_id(); in xcall_deliver() 760 tb = &trap_block[this_cpu]; in xcall_deliver() [all …]
|
D | chmc.c | 592 unsigned long ret, this_cpu; in chmc_read_mcreg() local 596 this_cpu = real_hard_smp_processor_id(); in chmc_read_mcreg() 598 if (p->portid == this_cpu) { in chmc_read_mcreg()
|
D | entry.h | 226 extern void __cpuinit sun4v_register_mondo_queues(int this_cpu);
|
D | irq_64.c | 917 void __cpuinit notrace sun4v_register_mondo_queues(int this_cpu) in sun4v_register_mondo_queues() argument 919 struct trap_per_cpu *tb = &trap_block[this_cpu]; in sun4v_register_mondo_queues()
|
/arch/x86/kernel/cpu/ |
D | common.c | 63 static struct cpu_dev *this_cpu __cpuinitdata; 230 if (!this_cpu) in table_lookup_model() 233 info = this_cpu->c_models; in table_lookup_model() 338 if (this_cpu->c_size_cache) in display_cacheinfo() 339 l2size = this_cpu->c_size_cache(c, l2size); in display_cacheinfo() 430 this_cpu = cpu_devs[i]; in get_cpu_vendor() 431 c->x86_vendor = this_cpu->c_x86_vendor; in get_cpu_vendor() 443 this_cpu = &default_cpu; in get_cpu_vendor() 570 if (this_cpu->c_early_init) in early_identify_cpu() 571 this_cpu->c_early_init(c); in early_identify_cpu() [all …]
|
/arch/x86/include/asm/ |
D | ipi.h | 144 unsigned int this_cpu = smp_processor_id(); in send_IPI_mask_allbutself() local 150 if (query_cpu != this_cpu) in send_IPI_mask_allbutself()
|
/arch/ia64/kernel/ |
D | smp.c | 99 int this_cpu = get_cpu(); in handle_IPI() local 129 this_cpu, which); in handle_IPI()
|
D | perfmon.c | 5304 int this_cpu = smp_processor_id(); in pfm_overflow_handler() local 5339 pfm_stats[this_cpu].pfm_smpl_handler_calls++; in pfm_overflow_handler() 5362 pfm_stats[this_cpu].pfm_smpl_handler_cycles += end_cycles - start_cycles; in pfm_overflow_handler() 5504 int this_cpu = smp_processor_id(); in pfm_do_interrupt_handler() local 5507 pfm_stats[this_cpu].pfm_ovfl_intr_count++; in pfm_do_interrupt_handler() 5539 pfm_stats[this_cpu].pfm_spurious_ovfl_intr_count++; in pfm_do_interrupt_handler() 5551 this_cpu, task_pid_nr(task)); in pfm_do_interrupt_handler() 5556 this_cpu, in pfm_do_interrupt_handler() 5567 int this_cpu; in pfm_interrupt_handler() local 5571 this_cpu = get_cpu(); in pfm_interrupt_handler() [all …]
|
D | process.c | 245 unsigned int this_cpu = smp_processor_id(); in play_dead() local 253 ia64_jump_to_sal(&sal_boot_rendez_state[this_cpu]); in play_dead()
|
/arch/sparc/mm/ |
D | init_64.c | 204 static inline void set_dcache_dirty(struct page *page, int this_cpu) in set_dcache_dirty() argument 206 unsigned long mask = this_cpu; in set_dcache_dirty() 274 int this_cpu = get_cpu(); in flush_dcache() local 279 if (cpu == this_cpu) in flush_dcache() 336 int this_cpu; in flush_dcache_page() local 348 this_cpu = get_cpu(); in flush_dcache_page() 356 if (dirty_cpu == this_cpu) in flush_dcache_page() 360 set_dcache_dirty(page, this_cpu); in flush_dcache_page()
|