Home
last modified time | relevance | path

Searched refs:cpu_data (Results 1 – 25 of 128) sorted by relevance

123456

/arch/mips/include/asm/
Dcpu-features.h21 #define cpu_has_tlb (cpu_data[0].options & MIPS_CPU_TLB)
24 #define cpu_has_ftlb (cpu_data[0].options & MIPS_CPU_FTLB)
27 #define cpu_has_tlbinv (cpu_data[0].options & MIPS_CPU_TLBINV)
30 #define cpu_has_segments (cpu_data[0].options & MIPS_CPU_SEGMENTS)
33 #define cpu_has_eva (cpu_data[0].options & MIPS_CPU_EVA)
36 #define cpu_has_htw (cpu_data[0].options & MIPS_CPU_HTW)
39 #define cpu_has_ldpte (cpu_data[0].options & MIPS_CPU_LDPTE)
42 #define cpu_has_rixiex (cpu_data[0].options & MIPS_CPU_RIXIEX)
45 #define cpu_has_maar (cpu_data[0].options & MIPS_CPU_MAAR)
48 #define cpu_has_rw_llb (cpu_data[0].options & MIPS_CPU_RW_LLB)
[all …]
Dcpu-info.h110 extern struct cpuinfo_mips cpu_data[];
111 #define current_cpu_data cpu_data[smp_processor_id()]
112 #define raw_current_cpu_data cpu_data[raw_smp_processor_id()]
113 #define boot_cpu_data cpu_data[0]
174 struct cpuinfo_mips *infoa = &cpu_data[cpua]; in cpus_are_siblings()
175 struct cpuinfo_mips *infob = &cpu_data[cpub]; in cpus_are_siblings()
Dtopology.h15 #define topology_physical_package_id(cpu) (cpu_data[cpu].package)
16 #define topology_core_id(cpu) (cpu_core(&cpu_data[cpu]))
Dmmu_context.h80 unsigned long asid_mask = cpu_asid_mask(&cpu_data[cpu]); in asid_version_mask()
91 #define asid_cache(cpu) (cpu_data[cpu].asid_cache)
93 (cpu_context((cpu), (mm)) & cpu_asid_mask(&cpu_data[cpu]))
106 if (!((asid += cpu_asid_inc()) & cpu_asid_mask(&cpu_data[cpu]))) { in get_new_mmu_context()
/arch/sh/mm/
Dtlb-sh5.c23 cpu_data->dtlb.entries = 64; in sh64_tlb_init()
24 cpu_data->dtlb.step = 0x10; in sh64_tlb_init()
26 cpu_data->dtlb.first = DTLB_FIXED | cpu_data->dtlb.step; in sh64_tlb_init()
27 cpu_data->dtlb.next = cpu_data->dtlb.first; in sh64_tlb_init()
29 cpu_data->dtlb.last = DTLB_FIXED | in sh64_tlb_init()
30 ((cpu_data->dtlb.entries - 1) * in sh64_tlb_init()
31 cpu_data->dtlb.step); in sh64_tlb_init()
34 cpu_data->itlb.entries = 64; in sh64_tlb_init()
35 cpu_data->itlb.step = 0x10; in sh64_tlb_init()
37 cpu_data->itlb.first = ITLB_FIXED | cpu_data->itlb.step; in sh64_tlb_init()
[all …]
Dcache-sh5.c249 cpu_data->dcache.entry_mask) >> in sh64_dcache_purge_sets()
250 cpu_data->dcache.entry_shift; in sh64_dcache_purge_sets()
254 set_offset &= (cpu_data->dcache.sets - 1); in sh64_dcache_purge_sets()
256 (set_offset << cpu_data->dcache.entry_shift); in sh64_dcache_purge_sets()
265 eaddr1 = eaddr0 + cpu_data->dcache.way_size * in sh64_dcache_purge_sets()
266 cpu_data->dcache.ways; in sh64_dcache_purge_sets()
269 eaddr += cpu_data->dcache.way_size) { in sh64_dcache_purge_sets()
274 eaddr1 = eaddr0 + cpu_data->dcache.way_size * in sh64_dcache_purge_sets()
275 cpu_data->dcache.ways; in sh64_dcache_purge_sets()
278 eaddr += cpu_data->dcache.way_size) { in sh64_dcache_purge_sets()
[all …]
/arch/mips/kernel/
Dproc.c40 unsigned int version = cpu_data[n].processor_id; in show_cpuinfo()
41 unsigned int fp_vers = cpu_data[n].fpu_id; in show_cpuinfo()
62 cpu_data[n].options & MIPS_CPU_FPU ? " FPU V%d.%d" : ""); in show_cpuinfo()
67 cpu_data[n].udelay_val / (500000/HZ), in show_cpuinfo()
68 (cpu_data[n].udelay_val / (5000/HZ)) % 100); in show_cpuinfo()
72 seq_printf(m, "tlb_entries\t\t: %d\n", cpu_data[n].tlbsize); in show_cpuinfo()
79 cpu_data[n].watch_reg_count); in show_cpuinfo()
80 for (i = 0; i < cpu_data[n].watch_reg_count; i++) in show_cpuinfo()
82 cpu_data[n].watch_reg_masks[i]); in show_cpuinfo()
134 cpu_data[n].srsets); in show_cpuinfo()
[all …]
Dsmp-cps.c78 cpu_set_cluster(&cpu_data[nvpes + v], cl); in cps_smp_setup()
79 cpu_set_core(&cpu_data[nvpes + v], c); in cps_smp_setup()
80 cpu_set_vpe_id(&cpu_data[nvpes + v], v); in cps_smp_setup()
92 set_cpu_possible(v, cpu_cluster(&cpu_data[v]) == 0); in cps_smp_setup()
93 set_cpu_present(v, cpu_cluster(&cpu_data[v]) == 0); in cps_smp_setup()
300 unsigned core = cpu_core(&cpu_data[cpu]); in cps_boot_secondary()
301 unsigned vpe_id = cpu_vpe_id(&cpu_data[cpu]); in cps_boot_secondary()
309 if (cpu_cluster(&cpu_data[cpu]) != cpu_cluster(&raw_current_cpu_data)) in cps_boot_secondary()
316 atomic_or(1 << cpu_vpe_id(&cpu_data[cpu]), &core_cfg->vpe_mask); in cps_boot_secondary()
436 core = cpu_core(&cpu_data[cpu]); in play_dead()
[all …]
/arch/sh/include/asm/
Dtlb_64.h29 for (tlb = cpu_data->dtlb.first; \
30 tlb <= cpu_data->dtlb.last; \
31 tlb += cpu_data->dtlb.step)
39 for (tlb = cpu_data->itlb.first; \
40 tlb <= cpu_data->itlb.last; \
41 tlb += cpu_data->itlb.step)
Dprocessor.h94 extern struct sh_cpuinfo cpu_data[];
95 #define boot_cpu_data cpu_data[0]
96 #define current_cpu_data cpu_data[smp_processor_id()]
97 #define raw_current_cpu_data cpu_data[raw_smp_processor_id()]
/arch/sparc/kernel/
Dprom_64.c412 int proc_id = cpu_data(cpu).proc_id; in arch_find_n_match_cpu_physical_id()
526 cpu_data(cpuid).clock_tick = in fill_in_one_cpu()
530 cpu_data(cpuid).dcache_size = in fill_in_one_cpu()
533 cpu_data(cpuid).dcache_line_size = in fill_in_one_cpu()
536 cpu_data(cpuid).icache_size = in fill_in_one_cpu()
539 cpu_data(cpuid).icache_line_size = in fill_in_one_cpu()
542 cpu_data(cpuid).ecache_size = in fill_in_one_cpu()
544 cpu_data(cpuid).ecache_line_size = in fill_in_one_cpu()
546 if (!cpu_data(cpuid).ecache_size || in fill_in_one_cpu()
547 !cpu_data(cpuid).ecache_line_size) { in fill_in_one_cpu()
[all …]
Dsmp_32.c63 cpu_data(id).udelay_val = loops_per_jiffy; in smp_store_cpu_info()
66 cpu_data(id).clock_tick = prom_getintdefault(cpu_node, in smp_store_cpu_info()
68 cpu_data(id).prom_node = cpu_node; in smp_store_cpu_info()
75 cpu_data(id).mid = mid; in smp_store_cpu_info()
85 bogosum += cpu_data(cpu).udelay_val; in smp_cpus_done()
385 cpu_data(i).udelay_val/(500000/HZ), in smp_bogo()
386 (cpu_data(i).udelay_val/(5000/HZ))%100); in smp_bogo()
/arch/alpha/include/asm/
Dmmu_context.h93 #define cpu_last_asn(cpuid) (cpu_data[cpuid].last_asn)
145 cpu_data[cpu].asn_lock = 1; in ev5_switch_mm()
156 cpu_data[cpu].need_new_asn = 1; in ev5_switch_mm()
191 cpu_data[cpu].asn_lock = 0; \
193 if (cpu_data[cpu].need_new_asn) { \
195 cpu_data[cpu].need_new_asn = 0; \
/arch/ia64/mm/
Dcontig.c85 static void *cpu_data; variable
109 memcpy(cpu_data, src, __per_cpu_end - __per_cpu_start); in per_cpu_init()
110 __per_cpu_offset[cpu] = (char *)cpu_data - __per_cpu_start; in per_cpu_init()
123 ia64_set_kr(IA64_KR_PER_CPU_DATA, __pa(cpu_data) - in per_cpu_init()
126 cpu_data += PERCPU_PAGE_SIZE; in per_cpu_init()
135 cpu_data = __alloc_bootmem(PERCPU_PAGE_SIZE * num_possible_cpus(), in alloc_per_cpu_data()
/arch/ia64/kernel/
Dsmpboot.c421 last_cpuinfo = cpu_data(cpuid - 1); in smp_callin()
592 if (cpu_data(cpu)->threads_per_core == 1 && in remove_siblinginfo()
593 cpu_data(cpu)->cores_per_socket == 1) { in remove_siblinginfo()
707 bogosum += cpu_data(cpu)->loops_per_jiffy; in smp_cpus_done()
719 if ((cpu_data(cpu)->socket_id == cpu_data(i)->socket_id)) { in set_cpu_sibling_map()
722 if (cpu_data(cpu)->core_id == cpu_data(i)->core_id) { in set_cpu_sibling_map()
755 if (cpu_data(cpu)->threads_per_core == 1 && in __cpu_up()
756 cpu_data(cpu)->cores_per_socket == 1) { in __cpu_up()
851 if ((cpu_data(j)->socket_id == cpu_data(i)->socket_id)) { in is_multithreading_enabled()
852 if (cpu_data(j)->core_id == cpu_data(i)->core_id) in is_multithreading_enabled()
Dtopology.c35 if (cpu_data(num)->socket_id == -1) in arch_fix_phys_package_id()
36 cpu_data(num)->socket_id = slot; in arch_fix_phys_package_id()
149 if (cpu_data(cpu)->threads_per_core <= 1 && in cache_shared_cpu_map_setup()
150 cpu_data(cpu)->cores_per_socket <= 1) { in cache_shared_cpu_map_setup()
164 if (cpu_data(cpu)->socket_id == cpu_data(j)->socket_id in cache_shared_cpu_map_setup()
165 && cpu_data(j)->core_id == csi.log1_cid in cache_shared_cpu_map_setup()
166 && cpu_data(j)->thread_id == csi.log1_tid) in cache_shared_cpu_map_setup()
/arch/m32r/include/asm/
Dprocessor.h47 extern struct cpuinfo_m32r cpu_data[];
48 #define current_cpu_data cpu_data[smp_processor_id()]
50 #define cpu_data (&boot_cpu_data) macro
/arch/mips/loongson64/loongson-3/
Dsmp.c322 cpu_set_core(&cpu_data[cpu], in loongson3_init_secondary()
324 cpu_data[cpu].package = in loongson3_init_secondary()
337 if (cpu_data[cpu].package) in loongson3_init_secondary()
389 cpu_set_core(&cpu_data[0], in loongson3_smp_setup()
391 cpu_data[0].package = cpu_logical_map(0) / loongson_sysconf.cores_per_package; in loongson3_smp_setup()
490 [sets] "r" (cpu_data[smp_processor_id()].dcache.sets)); in loongson3a_r1_play_dead()
572 [sets] "r" (cpu_data[smp_processor_id()].dcache.sets), in loongson3a_r2r3_play_dead()
573 [vsets] "r" (cpu_data[smp_processor_id()].vcache.sets)); in loongson3a_r2r3_play_dead()
635 [sets] "r" (cpu_data[smp_processor_id()].dcache.sets)); in loongson3b_play_dead()
702 uint64_t core_id = cpu_core(&cpu_data[cpu]); in loongson3_disable_clock()
[all …]
/arch/arm64/kernel/
Dcpuinfo.c42 DEFINE_PER_CPU(struct cpuinfo_arm64, cpu_data);
130 struct cpuinfo_arm64 *cpuinfo = &per_cpu(cpu_data, i); in c_show()
252 struct cpuinfo_arm64 *info = &per_cpu(cpu_data, cpu); in cpuid_cpu_online()
272 struct cpuinfo_arm64 *info = &per_cpu(cpu_data, cpu); in cpuid_cpu_offline()
290 struct cpuinfo_arm64 *info = &per_cpu(cpu_data, cpu); in cpuinfo_regs_init()
368 struct cpuinfo_arm64 *info = this_cpu_ptr(&cpu_data); in cpuinfo_store_cpu()
375 struct cpuinfo_arm64 *info = &per_cpu(cpu_data, 0); in cpuinfo_store_boot_cpu()
/arch/x86/include/asm/
Dtopology.h107 #define topology_logical_package_id(cpu) (cpu_data(cpu).logical_proc_id)
108 #define topology_physical_package_id(cpu) (cpu_data(cpu).phys_proc_id)
109 #define topology_core_id(cpu) (cpu_data(cpu).cpu_core_id)
/arch/alpha/kernel/
Dsmp.c56 struct cpuinfo_alpha cpu_data[NR_CPUS]; variable
57 EXPORT_SYMBOL(cpu_data);
84 cpu_data[cpuid].loops_per_jiffy = loops_per_jiffy; in smp_store_cpu_info()
85 cpu_data[cpuid].last_asn = ASN_FIRST_VERSION; in smp_store_cpu_info()
86 cpu_data[cpuid].need_new_asn = 0; in smp_store_cpu_info()
87 cpu_data[cpuid].asn_lock = 0; in smp_store_cpu_info()
96 cpu_data[cpuid].prof_counter = 1; in smp_setup_percpu_timer()
97 cpu_data[cpuid].prof_multiplier = 1; in smp_setup_percpu_timer()
494 bogosum += cpu_data[cpu].loops_per_jiffy; in smp_cpus_done()
567 cpu_data[this_cpu].ipi_count++; in handle_ipi()
[all …]
/arch/parisc/kernel/
Dtime.c68 struct cpuinfo_parisc *cpuinfo = &per_cpu(cpu_data, cpu); in timer_interrupt()
163 per_cpu(cpu_data, cpu).it_value = next_tick; in start_cpu_itimer()
255 cpu0_loc = per_cpu(cpu_data, 0).cpu_loc; in init_cr16_clocksource()
261 (cpu0_loc == per_cpu(cpu_data, cpu).cpu_loc)) in init_cr16_clocksource()
/arch/ia64/include/asm/
Dtopology.h53 #define topology_physical_package_id(cpu) (cpu_data(cpu)->socket_id)
54 #define topology_core_id(cpu) (cpu_data(cpu)->core_id)
/arch/mn10300/include/asm/
Dprocessor.h60 extern struct mn10300_cpuinfo cpu_data[];
61 #define current_cpu_data cpu_data[smp_processor_id()]
63 #define cpu_data &boot_cpu_data macro
/arch/metag/kernel/
Dtopology.c19 DEFINE_PER_CPU(struct cpuinfo_metag, cpu_data);
54 struct cpuinfo_metag *cpuinfo = &per_cpu(cpu_data, i); in topology_init()

123456