/arch/sparc/kernel/ |
D | prom_64.c | 437 int cpuid = of_getintprop_default(dp, mid_prop, -1); in of_iterate_over_cpus() local 441 if (cpuid < 0) { in of_iterate_over_cpus() 443 cpuid = of_getintprop_default(dp, this_mid_prop, -1); in of_iterate_over_cpus() 445 if (cpuid < 0) { in of_iterate_over_cpus() 451 if (cpuid >= NR_CPUS) { in of_iterate_over_cpus() 454 cpuid, NR_CPUS); in of_iterate_over_cpus() 458 ret = func(dp, cpuid, arg); in of_iterate_over_cpus() 465 static void *check_cpu_node(struct device_node *dp, int cpuid, int id) in check_cpu_node() argument 467 if (id == cpuid) in check_cpu_node() 472 struct device_node *of_find_node_by_cpuid(int cpuid) in of_find_node_by_cpuid() argument [all …]
|
D | sun4d_smp.c | 45 static inline void show_leds(int cpuid) in show_leds() argument 47 cpuid &= 0x1e; in show_leds() 49 "r" ((cpu_leds[cpuid] << 4) | cpu_leds[cpuid+1]), in show_leds() 50 "r" (ECSR_BASE(cpuid) | BB_LEDS), in show_leds() 56 int cpuid = hard_smp_processor_id(); in sun4d_cpu_pre_starting() local 59 cpu_leds[cpuid] = 0x6; in sun4d_cpu_pre_starting() 60 show_leds(cpuid); in sun4d_cpu_pre_starting() 69 int cpuid; in sun4d_cpu_pre_online() local 71 cpuid = hard_smp_processor_id(); in sun4d_cpu_pre_online() 78 sun4d_swap((unsigned long *)&cpu_callin_map[cpuid], 1); in sun4d_cpu_pre_online() [all …]
|
D | smp_32.c | 186 int i, cpuid, extra; in smp_prepare_cpus() local 191 for (i = 0; !cpu_find_by_instance(i, NULL, &cpuid); i++) { in smp_prepare_cpus() 192 if (cpuid >= NR_CPUS) in smp_prepare_cpus() 246 int cpuid = hard_smp_processor_id(); in smp_prepare_boot_cpu() local 248 if (cpuid >= NR_CPUS) { in smp_prepare_boot_cpu() 252 if (cpuid != 0) in smp_prepare_boot_cpu() 255 current_thread_info()->cpu = cpuid; in smp_prepare_boot_cpu() 256 set_cpu_online(cpuid, true); in smp_prepare_boot_cpu() 257 set_cpu_possible(cpuid, true); in smp_prepare_boot_cpu() 318 unsigned int cpuid = hard_smp_processor_id(); in arch_cpu_pre_online() local [all …]
|
D | sun4d_irq.c | 33 unsigned int cpuid; /* target cpu */ member 195 int cpuid = handler_data->cpuid; in sun4d_mask_irq() local 201 cc_set_imsk_other(cpuid, cc_get_imsk_other(cpuid) | (1 << real_irq)); in sun4d_mask_irq() 213 int cpuid = handler_data->cpuid; in sun4d_unmask_irq() local 220 cc_set_imsk_other(cpuid, cc_get_imsk_other(cpuid) & ~(1 << real_irq)); in sun4d_unmask_irq() 254 int cpuid = cpu_logical_map(1); in sun4d_distribute_irqs() local 256 if (cpuid == -1) in sun4d_distribute_irqs() 257 cpuid = cpu_logical_map(0); in sun4d_distribute_irqs() 261 board_to_cpu[board] = cpuid; in sun4d_distribute_irqs() 262 set_sbi_tid(devid, cpuid << 3); in sun4d_distribute_irqs() [all …]
|
D | irq_64.c | 314 static unsigned int sun4u_compute_tid(unsigned long imap, unsigned long cpuid) in sun4u_compute_tid() argument 319 tid = starfire_translate(imap, cpuid); in sun4u_compute_tid() 329 tid = cpuid << IMAP_TID_SHIFT; in sun4u_compute_tid() 332 unsigned int a = cpuid & 0x1f; in sun4u_compute_tid() 333 unsigned int n = (cpuid >> 5) & 0x1f; in sun4u_compute_tid() 341 tid = cpuid << IMAP_TID_SHIFT; in sun4u_compute_tid() 353 int cpuid; in irq_choose_cpu() local 357 cpuid = map_to_cpu(irq); in irq_choose_cpu() 362 cpuid = cpumask_empty(&tmp) ? map_to_cpu(irq) : cpumask_first(&tmp); in irq_choose_cpu() 365 return cpuid; in irq_choose_cpu() [all …]
|
/arch/arm/kernel/ |
D | topology.c | 196 static inline void update_cpu_capacity(unsigned int cpuid) {} in update_cpu_capacity() argument 219 static void update_siblings_masks(unsigned int cpuid) in update_siblings_masks() argument 221 struct cputopo_arm *cpu_topo, *cpuid_topo = &cpu_topology[cpuid]; in update_siblings_masks() 231 cpumask_set_cpu(cpuid, &cpu_topo->core_sibling); in update_siblings_masks() 232 if (cpu != cpuid) in update_siblings_masks() 238 cpumask_set_cpu(cpuid, &cpu_topo->thread_sibling); in update_siblings_masks() 239 if (cpu != cpuid) in update_siblings_masks() 250 void store_cpu_topology(unsigned int cpuid) in store_cpu_topology() argument 252 struct cputopo_arm *cpuid_topo = &cpu_topology[cpuid]; in store_cpu_topology() 290 update_siblings_masks(cpuid); in store_cpu_topology() [all …]
|
/arch/alpha/kernel/ |
D | smp.c | 82 smp_store_cpu_info(int cpuid) in smp_store_cpu_info() argument 84 cpu_data[cpuid].loops_per_jiffy = loops_per_jiffy; in smp_store_cpu_info() 85 cpu_data[cpuid].last_asn = ASN_FIRST_VERSION; in smp_store_cpu_info() 86 cpu_data[cpuid].need_new_asn = 0; in smp_store_cpu_info() 87 cpu_data[cpuid].asn_lock = 0; in smp_store_cpu_info() 94 smp_setup_percpu_timer(int cpuid) in smp_setup_percpu_timer() argument 96 cpu_data[cpuid].prof_counter = 1; in smp_setup_percpu_timer() 97 cpu_data[cpuid].prof_multiplier = 1; in smp_setup_percpu_timer() 101 wait_boot_cpu_to_stop(int cpuid) in wait_boot_cpu_to_stop() argument 111 printk("wait_boot_cpu_to_stop: FAILED on CPU %d, hanging now\n", cpuid); in wait_boot_cpu_to_stop() [all …]
|
D | sys_marvel.c | 406 int cpuid = hard_smp_processor_id(); in marvel_smp_callin() local 407 struct io7 *io7 = marvel_find_io7(cpuid); in marvel_smp_callin() 416 printk("Redirecting IO7 interrupts to local CPU at PE %u\n", cpuid); in marvel_smp_callin() 419 io7_redirect_irq(io7, &io7->csrs->HLT_CTL.csr, cpuid); in marvel_smp_callin() 420 io7_redirect_irq(io7, &io7->csrs->HPI_CTL.csr, cpuid); in marvel_smp_callin() 421 io7_redirect_irq(io7, &io7->csrs->CRD_CTL.csr, cpuid); in marvel_smp_callin() 422 io7_redirect_irq(io7, &io7->csrs->STV_CTL.csr, cpuid); in marvel_smp_callin() 423 io7_redirect_irq(io7, &io7->csrs->HEI_CTL.csr, cpuid); in marvel_smp_callin() 427 io7_redirect_one_lsi(io7, i, cpuid); in marvel_smp_callin() 429 io7_redirect_one_lsi(io7, 0x74, cpuid); in marvel_smp_callin() [all …]
|
/arch/parisc/kernel/ |
D | processor.c | 96 unsigned long cpuid; in processor_probe() local 115 cpuid = boot_cpu_data.cpu_count; in processor_probe() 117 cpu_info.cpu_num = cpu_info.cpu_loc = cpuid; in processor_probe() 148 cpuid, cpu_info.cpu_num, cpu_info.cpu_loc, in processor_probe() 167 cpuid = cpu_info.cpu_num; in processor_probe() 173 p = &per_cpu(cpu_data, cpuid); in processor_probe() 177 if (cpuid) in processor_probe() 183 p->cpuid = cpuid; /* save CPU id */ in processor_probe() 192 init_percpu_prof(cpuid); in processor_probe() 203 if (cpuid) { in processor_probe() [all …]
|
D | smp.c | 90 ipi_init(int cpuid) in ipi_init() argument 94 if(cpu_online(cpuid) ) in ipi_init() 316 int smp_boot_one_cpu(int cpuid, struct task_struct *idle) in smp_boot_one_cpu() argument 318 const struct cpuinfo_parisc *p = &per_cpu(cpu_data, cpuid); in smp_boot_one_cpu() 321 task_thread_info(idle)->cpu = cpuid; in smp_boot_one_cpu() 326 cpu_now_booting = cpuid; in smp_boot_one_cpu() 335 printk(KERN_INFO "Releasing cpu %d now, hpa=%lx\n", cpuid, p->hpa); in smp_boot_one_cpu() 355 if(cpu_online(cpuid)) { in smp_boot_one_cpu() 364 printk(KERN_CRIT "SMP: CPU:%d is stuck.\n", cpuid); in smp_boot_one_cpu() 370 cpuid, timeout * 100); in smp_boot_one_cpu() [all …]
|
/arch/ia64/include/asm/sn/ |
D | sn_cpuid.h | 104 #define cpuid_to_nasid(cpuid) (sn_nodepda->phys_cpuid[cpuid].nasid) argument 105 #define cpuid_to_subnode(cpuid) (sn_nodepda->phys_cpuid[cpuid].subnode) argument 106 #define cpuid_to_slice(cpuid) (sn_nodepda->phys_cpuid[cpuid].slice) argument
|
/arch/x86/kernel/ |
D | verify_cpu.S | 42 pushfl # standard way to check for cpuid 51 jz .Lverify_cpu_no_longmode # cpu has no cpuid 54 movl $0x0,%eax # See if cpuid 1 is implemented 55 cpuid 57 jb .Lverify_cpu_no_longmode # no cpuid 1 80 cpuid 103 cpuid 108 movl $0x80000000,%eax # See if extended cpuid is implemented 109 cpuid 111 jb .Lverify_cpu_no_longmode # no extended cpuid [all …]
|
/arch/x86/kernel/cpu/ |
D | transmeta.c | 37 cpuid(0x80860001, &dummy, &cpu_rev, &cpu_freq, &cpu_flags); in init_transmeta() 48 cpuid(0x80860002, &new_cpu_rev, &cms_rev1, &cms_rev2, &dummy); in init_transmeta() 61 cpuid(0x80860003, in init_transmeta() 66 cpuid(0x80860004, in init_transmeta() 71 cpuid(0x80860005, in init_transmeta() 76 cpuid(0x80860006, in init_transmeta()
|
/arch/x86/kvm/ |
D | cpuid.h | 13 int kvm_dev_ioctl_get_cpuid(struct kvm_cpuid2 *cpuid, 17 struct kvm_cpuid *cpuid, 20 struct kvm_cpuid2 *cpuid, 23 struct kvm_cpuid2 *cpuid, 71 const struct cpuid_reg cpuid = x86_feature_cpuid(x86_feature); in guest_cpuid_get_register() local 73 entry = kvm_find_cpuid_entry(vcpu, cpuid.function, cpuid.index); in guest_cpuid_get_register() 77 switch (cpuid.reg) { in guest_cpuid_get_register()
|
D | cpuid.c | 190 struct kvm_cpuid *cpuid, in kvm_vcpu_ioctl_set_cpuid() argument 197 if (cpuid->nent > KVM_MAX_CPUID_ENTRIES) in kvm_vcpu_ioctl_set_cpuid() 200 if (cpuid->nent) { in kvm_vcpu_ioctl_set_cpuid() 202 cpuid->nent); in kvm_vcpu_ioctl_set_cpuid() 207 cpuid->nent * sizeof(struct kvm_cpuid_entry))) in kvm_vcpu_ioctl_set_cpuid() 210 for (i = 0; i < cpuid->nent; i++) { in kvm_vcpu_ioctl_set_cpuid() 222 vcpu->arch.cpuid_nent = cpuid->nent; in kvm_vcpu_ioctl_set_cpuid() 234 struct kvm_cpuid2 *cpuid, in kvm_vcpu_ioctl_set_cpuid2() argument 240 if (cpuid->nent > KVM_MAX_CPUID_ENTRIES) in kvm_vcpu_ioctl_set_cpuid2() 244 cpuid->nent * sizeof(struct kvm_cpuid_entry2))) in kvm_vcpu_ioctl_set_cpuid2() [all …]
|
/arch/mips/paravirt/ |
D | paravirt-irq.c | 223 unsigned int cpuid = cpunum_for_cpu(cpu); in irq_mbox_all() local 224 mask = 1 << (cpuid * MBOX_BITS_PER_CPU + mbox); in irq_mbox_all() 225 __raw_writel(mask, base + (cpuid * mips_irq_cpu_stride)); in irq_mbox_all() 252 unsigned int cpuid = cpunum_for_cpu(cpu); in irq_mbox_ipi() local 257 mask = actions << (cpuid * MBOX_BITS_PER_CPU); in irq_mbox_ipi() 264 unsigned int cpuid = get_ebase_cpunum(); in irq_mbox_cpu_onoffline() local 269 mask = 1 << (cpuid * MBOX_BITS_PER_CPU + mbox); in irq_mbox_cpu_onoffline() 270 __raw_writel(mask, base + (cpuid * mips_irq_cpu_stride)); in irq_mbox_cpu_onoffline() 327 unsigned int cpuid = get_ebase_cpunum(); in irq_pci_dispatch() local 331 (cpuid * mips_irq_cpu_stride)); in irq_pci_dispatch() [all …]
|
/arch/arm64/kernel/ |
D | topology.c | 223 static void update_siblings_masks(unsigned int cpuid) in update_siblings_masks() argument 225 struct cpu_topology *cpu_topo, *cpuid_topo = &cpu_topology[cpuid]; in update_siblings_masks() 235 cpumask_set_cpu(cpuid, &cpu_topo->core_sibling); in update_siblings_masks() 236 if (cpu != cpuid) in update_siblings_masks() 242 cpumask_set_cpu(cpuid, &cpu_topo->thread_sibling); in update_siblings_masks() 243 if (cpu != cpuid) in update_siblings_masks() 248 void store_cpu_topology(unsigned int cpuid) in store_cpu_topology() argument 250 struct cpu_topology *cpuid_topo = &cpu_topology[cpuid]; in store_cpu_topology() 279 cpuid, cpuid_topo->cluster_id, cpuid_topo->core_id, in store_cpu_topology() 283 update_siblings_masks(cpuid); in store_cpu_topology()
|
/arch/ia64/kernel/ |
D | smpboot.c | 173 int cpuid; in fix_b0_for_bsp() local 176 cpuid = smp_processor_id(); in fix_b0_for_bsp() 181 if (!(fix_bsp_b0 && cpuid)) in fix_b0_for_bsp() 184 sal_boot_rendez_state[0].br[0] = sal_boot_rendez_state[cpuid].br[0]; in fix_b0_for_bsp() 185 printk ("Fixed BSP b0 value from CPU %d\n", cpuid); in fix_b0_for_bsp() 356 int cpuid, phys_id, itc_master; in smp_callin() local 365 cpuid = smp_processor_id(); in smp_callin() 369 if (cpu_online(cpuid)) { in smp_callin() 371 phys_id, cpuid); in smp_callin() 380 set_numa_node(cpu_to_node_map[cpuid]); in smp_callin() [all …]
|
/arch/x86/boot/ |
D | cpuflags.c | 85 #define cpuid(id, a, b, c, d) cpuid_count(id, 0, a, b, c, d) macro 101 cpuid(0x0, &max_intel_level, &cpu_vendor[0], &cpu_vendor[2], in get_cpuflags() 106 cpuid(0x1, &tfms, &ignored, &cpu.flags[4], in get_cpuflags() 120 cpuid(0x80000000, &max_amd_level, &ignored, &ignored, in get_cpuflags() 125 cpuid(0x80000001, &ignored, &ignored, &cpu.flags[6], in get_cpuflags()
|
/arch/mips/sgi-ip27/ |
D | ip27-smp.c | 67 cpuid_t cpuid; in do_cpumask() local 74 cpuid = acpu->cpu_info.virtid; in do_cpumask() 77 cpuid_to_compact_node[cpuid] = cnode; in do_cpumask() 78 if (cpuid > highest) in do_cpumask() 79 highest = cpuid; in do_cpumask() 83 set_cpu_possible(cpuid, true); in do_cpumask() 84 alloc_cpupda(cpuid, tot_cpus_found); in do_cpumask()
|
/arch/tile/kernel/ |
D | smpboot.c | 145 int cpuid; in start_secondary() local 149 cpuid = smp_processor_id(); in start_secondary() 152 set_my_cpu_offset(__per_cpu_offset[cpuid]); in start_secondary() 176 if (cpumask_test_and_set_cpu(cpuid, &cpu_started)) { in start_secondary() 177 pr_warn("CPU#%d already started!\n", cpuid); in start_secondary()
|
/arch/mips/include/asm/sn/ |
D | agent.h | 37 SET_HUB_NIC(cpuid(), (_v)) 43 GET_HUB_NIC(cpuid())
|
/arch/sparc/include/asm/ |
D | obio.h | 197 static inline unsigned int cc_get_imsk_other(int cpuid) in cc_get_imsk_other() argument 203 "r" (ECSR_BASE(cpuid) | CC_IMSK), in cc_get_imsk_other() 208 static inline void cc_set_imsk_other(int cpuid, unsigned int mask) in cc_set_imsk_other() argument 212 "r" (ECSR_BASE(cpuid) | CC_IMSK), in cc_set_imsk_other()
|
/arch/x86/kernel/apic/ |
D | ipi.c | 231 int apicid, cpuid; in safe_smp_processor_id() local 240 cpuid = convert_apicid_to_cpu(apicid); in safe_smp_processor_id() 242 return cpuid >= 0 ? cpuid : 0; in safe_smp_processor_id()
|
/arch/arm/include/asm/ |
D | topology.h | 25 void store_cpu_topology(unsigned int cpuid); 45 static inline void store_cpu_topology(unsigned int cpuid) { } in store_cpu_topology() argument
|