Home
last modified time | relevance | path

Searched refs:boot_cpu_data (Results 1 – 25 of 137) sorted by relevance

123456

/arch/sh/kernel/cpu/sh4/
Dprobe.c35 boot_cpu_data.icache.way_incr = (1 << 13); in cpu_probe()
36 boot_cpu_data.icache.entry_shift = 5; in cpu_probe()
37 boot_cpu_data.icache.sets = 256; in cpu_probe()
38 boot_cpu_data.icache.ways = 1; in cpu_probe()
39 boot_cpu_data.icache.linesz = L1_CACHE_BYTES; in cpu_probe()
44 boot_cpu_data.dcache.way_incr = (1 << 14); in cpu_probe()
45 boot_cpu_data.dcache.entry_shift = 5; in cpu_probe()
46 boot_cpu_data.dcache.sets = 512; in cpu_probe()
47 boot_cpu_data.dcache.ways = 1; in cpu_probe()
48 boot_cpu_data.dcache.linesz = L1_CACHE_BYTES; in cpu_probe()
[all …]
/arch/sh/kernel/cpu/sh2a/
Dprobe.c15 boot_cpu_data.family = CPU_FAMILY_SH2A; in cpu_probe()
18 boot_cpu_data.flags |= CPU_HAS_OP32; in cpu_probe()
21 boot_cpu_data.type = CPU_SH7201; in cpu_probe()
22 boot_cpu_data.flags |= CPU_HAS_FPU; in cpu_probe()
24 boot_cpu_data.type = CPU_SH7203; in cpu_probe()
25 boot_cpu_data.flags |= CPU_HAS_FPU; in cpu_probe()
27 boot_cpu_data.type = CPU_SH7263; in cpu_probe()
28 boot_cpu_data.flags |= CPU_HAS_FPU; in cpu_probe()
30 boot_cpu_data.type = CPU_SH7264; in cpu_probe()
31 boot_cpu_data.flags |= CPU_HAS_FPU; in cpu_probe()
[all …]
/arch/sh/kernel/cpu/sh3/
Dprobe.c50 boot_cpu_data.dcache.ways = 4; in cpu_probe()
51 boot_cpu_data.dcache.entry_shift = 4; in cpu_probe()
52 boot_cpu_data.dcache.linesz = L1_CACHE_BYTES; in cpu_probe()
53 boot_cpu_data.dcache.flags = 0; in cpu_probe()
60 boot_cpu_data.dcache.way_incr = (1 << 11); in cpu_probe()
61 boot_cpu_data.dcache.entry_mask = 0x7f0; in cpu_probe()
62 boot_cpu_data.dcache.sets = 128; in cpu_probe()
63 boot_cpu_data.type = CPU_SH7708; in cpu_probe()
65 boot_cpu_data.flags |= CPU_HAS_MMU_PAGE_ASSOC; in cpu_probe()
67 boot_cpu_data.dcache.way_incr = (1 << 12); in cpu_probe()
[all …]
/arch/sh/kernel/cpu/sh2/
Dprobe.c33 boot_cpu_data.type = CPU_SH7619; in cpu_probe()
34 boot_cpu_data.dcache.ways = 4; in cpu_probe()
35 boot_cpu_data.dcache.way_incr = (1<<12); in cpu_probe()
36 boot_cpu_data.dcache.sets = 256; in cpu_probe()
37 boot_cpu_data.dcache.entry_shift = 4; in cpu_probe()
38 boot_cpu_data.dcache.linesz = L1_CACHE_BYTES; in cpu_probe()
39 boot_cpu_data.dcache.flags = 0; in cpu_probe()
51 boot_cpu_data.type = CPU_J2; in cpu_probe()
56 boot_cpu_data.dcache.ways = 1; in cpu_probe()
57 boot_cpu_data.dcache.sets = 256; in cpu_probe()
[all …]
/arch/parisc/kernel/
Dprocessor.c32 struct system_cpuinfo_parisc boot_cpu_data __ro_after_init;
33 EXPORT_SYMBOL(boot_cpu_data);
92 if (boot_cpu_data.cpu_count > 0) { in processor_probe()
101 cpuid = boot_cpu_data.cpu_count; in processor_probe()
150 boot_cpu_data.cpu_count--; in processor_probe()
160 boot_cpu_data.cpu_count++; in processor_probe()
232 memset(&boot_cpu_data, 0, sizeof(boot_cpu_data)); in collect_boot_cpu_data()
237 boot_cpu_data.cpu_hz = 100 * PAGE0->mem_10msec; /* Hz of this PARISC */ in collect_boot_cpu_data()
240 #define p ((unsigned long *)&boot_cpu_data.pdc.model) in collect_boot_cpu_data()
241 if (pdc_model_info(&boot_cpu_data.pdc.model) == PDC_OK) { in collect_boot_cpu_data()
[all …]
Dperf.c494 if (boot_cpu_data.cpu_type == pcxu || in perf_init()
495 boot_cpu_data.cpu_type == pcxu_) { in perf_init()
497 } else if (boot_cpu_data.cpu_type == pcxw || in perf_init()
498 boot_cpu_data.cpu_type == pcxw_ || in perf_init()
499 boot_cpu_data.cpu_type == pcxw2 || in perf_init()
500 boot_cpu_data.cpu_type == mako || in perf_init()
501 boot_cpu_data.cpu_type == mako2) { in perf_init()
503 if (boot_cpu_data.cpu_type == pcxw2 || in perf_init()
504 boot_cpu_data.cpu_type == mako || in perf_init()
505 boot_cpu_data.cpu_type == mako2) in perf_init()
Dalternative.c41 (((boot_cpu_data.cpu_type > pcxw_) && in apply_alternatives()
42 ((boot_cpu_data.pdc.capabilities & PDC_MODEL_IOPDIR_FDC) == 0)) in apply_alternatives()
70 if (boot_cpu_data.cpu_type >= pcxu) /* >= pa2.0 ? */ in apply_alternatives()
Dsetup.c78 switch (boot_cpu_data.cpu_type) { in dma_ops_init()
201 switch (boot_cpu_data.cpu_type) { in parisc_proc_mkdir()
324 boot_cpu_data.cpu_name, in parisc_init()
325 boot_cpu_data.cpu_hz / 1000000, in parisc_init()
326 boot_cpu_data.cpu_hz % 1000000 ); in parisc_init()
/arch/sh/mm/
Dcache.c64 if (boot_cpu_data.dcache.n_aliases && page_mapcount(page) && in copy_to_user_page()
71 if (boot_cpu_data.dcache.n_aliases) in copy_to_user_page()
83 if (boot_cpu_data.dcache.n_aliases && page_mapcount(page) && in copy_from_user_page()
90 if (boot_cpu_data.dcache.n_aliases) in copy_from_user_page()
102 if (boot_cpu_data.dcache.n_aliases && page_mapcount(from) && in copy_user_highpage()
142 if (!boot_cpu_data.dcache.n_aliases) in __update_cache()
158 if (boot_cpu_data.dcache.n_aliases && page_mapcount(page) && in __flush_anon_page()
179 if (boot_cpu_data.dcache.n_aliases == 0) in flush_cache_mm()
187 if (boot_cpu_data.dcache.n_aliases == 0) in flush_cache_dup_mm()
260 boot_cpu_data.icache.ways, in emit_cache_params()
[all …]
Dcache-shx3.c27 if (boot_cpu_data.dcache.n_aliases || boot_cpu_data.icache.n_aliases) { in shx3_cache_init()
30 boot_cpu_data.icache.n_aliases = 0; in shx3_cache_init()
31 boot_cpu_data.dcache.n_aliases = 0; in shx3_cache_init()
/arch/x86/include/asm/
Dgeode.h16 return ((boot_cpu_data.x86_vendor == X86_VENDOR_NSC) && in is_geode_gx()
17 (boot_cpu_data.x86 == 5) && in is_geode_gx()
18 (boot_cpu_data.x86_model == 5)); in is_geode_gx()
23 return ((boot_cpu_data.x86_vendor == X86_VENDOR_AMD) && in is_geode_lx()
24 (boot_cpu_data.x86 == 5) && in is_geode_lx()
25 (boot_cpu_data.x86_model == 10)); in is_geode_lx()
Dcpufeature.h144 #define boot_cpu_has(bit) cpu_has(&boot_cpu_data, bit)
152 set_cpu_cap(&boot_cpu_data, bit); \
192 [cap_byte] "m" (((const char *)boot_cpu_data.x86_capability)[bit >> 3]) in _static_cpu_has()
213 #define boot_cpu_has_bug(bit) cpu_has_bug(&boot_cpu_data, (bit))
214 #define boot_cpu_set_bug(bit) set_cpu_cap(&boot_cpu_data, (bit))
220 #define CPU_FEATURE_TYPEVAL boot_cpu_data.x86_vendor, boot_cpu_data.x86, \
221 boot_cpu_data.x86_model
Damd_nb.h106 if (boot_cpu_data.x86_vendor != X86_VENDOR_AMD) in amd_gart_present()
110 if (boot_cpu_data.x86 == 0xf || boot_cpu_data.x86 == 0x10 || in amd_gart_present()
111 (boot_cpu_data.x86 == 0x15 && boot_cpu_data.x86_model < 0x10)) in amd_gart_present()
Dacpi.h77 if (boot_cpu_data.x86 == 0x0F && in acpi_processor_cstate_check()
78 boot_cpu_data.x86_vendor == X86_VENDOR_AMD && in acpi_processor_cstate_check()
79 boot_cpu_data.x86_model <= 0x05 && in acpi_processor_cstate_check()
80 boot_cpu_data.x86_stepping < 0x0A) in acpi_processor_cstate_check()
Dvirtext.h97 if (boot_cpu_data.x86_vendor != X86_VENDOR_AMD && in cpu_has_svm()
98 boot_cpu_data.x86_vendor != X86_VENDOR_HYGON) { in cpu_has_svm()
/arch/um/include/asm/
Dcpufeature.h55 #define boot_cpu_has(bit) cpu_has(&boot_cpu_data, bit)
62 set_cpu_cap(&boot_cpu_data, bit); \
126 [cap_byte] "m" (((const char *)boot_cpu_data.x86_capability)[bit >> 3]) in _static_cpu_has()
146 #define boot_cpu_has_bug(bit) cpu_has_bug(&boot_cpu_data, (bit))
147 #define boot_cpu_set_bug(bit) set_cpu_cap(&boot_cpu_data, (bit))
153 #define CPU_FEATURE_TYPEVAL boot_cpu_data.x86_vendor, boot_cpu_data.x86, \
154 boot_cpu_data.x86_model
Dprocessor-generic.h102 extern struct cpuinfo_um boot_cpu_data;
104 #define cpu_data (&boot_cpu_data)
105 #define current_cpu_data boot_cpu_data
106 #define cache_line_size() (boot_cpu_data.cache_alignment)
/arch/x86/kernel/cpu/
Dperfctr-watchdog.c48 switch (boot_cpu_data.x86_vendor) { in nmi_perfctr_msr_to_bit()
55 if (cpu_has(&boot_cpu_data, X86_FEATURE_ARCH_PERFMON)) in nmi_perfctr_msr_to_bit()
58 switch (boot_cpu_data.x86) { in nmi_perfctr_msr_to_bit()
81 switch (boot_cpu_data.x86_vendor) { in nmi_evntsel_msr_to_bit()
88 if (cpu_has(&boot_cpu_data, X86_FEATURE_ARCH_PERFMON)) in nmi_evntsel_msr_to_bit()
91 switch (boot_cpu_data.x86) { in nmi_evntsel_msr_to_bit()
Dmatch.c40 struct cpuinfo_x86 *c = &boot_cpu_data; in x86_match_cpu()
65 struct cpuinfo_x86 *c = &boot_cpu_data; in x86_match_cpu_with_stepping()
86 if (!res || res->x86_microcode_rev > boot_cpu_data.microcode) in x86_cpu_has_min_microcode_rev()
/arch/x86/crypto/
Dtwofish_glue_3way.c107 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL) in is_blacklisted_cpu()
110 if (boot_cpu_data.x86 == 0x06 && in is_blacklisted_cpu()
111 (boot_cpu_data.x86_model == 0x1c || in is_blacklisted_cpu()
112 boot_cpu_data.x86_model == 0x26 || in is_blacklisted_cpu()
113 boot_cpu_data.x86_model == 0x36)) { in is_blacklisted_cpu()
126 if (boot_cpu_data.x86 == 0x0f) { in is_blacklisted_cpu()
/arch/x86/kernel/
Damd_nb.c248 if (boot_cpu_data.x86_vendor == X86_VENDOR_HYGON) { in amd_cache_northbridges()
320 if (boot_cpu_data.x86 == 0x10 && in amd_cache_northbridges()
321 boot_cpu_data.x86_model >= 0x8 && in amd_cache_northbridges()
322 (boot_cpu_data.x86_model > 0x9 || in amd_cache_northbridges()
323 boot_cpu_data.x86_stepping >= 0x1)) in amd_cache_northbridges()
326 if (boot_cpu_data.x86 == 0x15) in amd_cache_northbridges()
330 if (boot_cpu_data.x86 == 0x15) in amd_cache_northbridges()
347 if (boot_cpu_data.x86_vendor != X86_VENDOR_AMD && in early_is_amd_nb()
348 boot_cpu_data.x86_vendor != X86_VENDOR_HYGON) in early_is_amd_nb()
351 if (boot_cpu_data.x86_vendor == X86_VENDOR_HYGON) in early_is_amd_nb()
[all …]
/arch/x86/mm/
Dmaccess.c23 if (!boot_cpu_data.x86_virt_bits) in copy_from_kernel_nofault_allowed()
26 return __is_canonical_address(vaddr, boot_cpu_data.x86_virt_bits); in copy_from_kernel_nofault_allowed()
/arch/um/kernel/
Dum_arch.c56 struct cpuinfo_um boot_cpu_data = { variable
63 EXPORT_SYMBOL(boot_cpu_data);
81 seq_printf(m, "fpu\t\t: %s\n", cpu_has(&boot_cpu_data, X86_FEATURE_FPU) ? "yes" : "no"); in show_cpuinfo()
84 if (cpu_has(&boot_cpu_data, i) && (x86_cap_flags[i] != NULL)) in show_cpuinfo()
87 seq_printf(m, "cache_alignment\t: %d\n", boot_cpu_data.cache_alignment); in show_cpuinfo()
286 set_cpu_cap(&boot_cpu_data, i); in parse_host_cpu_flags()
299 boot_cpu_data.cache_alignment = res; in parse_cache_line()
301 boot_cpu_data.cache_alignment = L1_CACHE_BYTES; in parse_cache_line()
/arch/x86/kernel/cpu/mtrr/
Dmtrr.c330 (boot_cpu_data.x86_phys_bits - PAGE_SHIFT)) { in mtrr_add_page()
709 if (boot_cpu_data.x86_vendor == X86_VENDOR_INTEL && in mtrr_bp_init()
710 boot_cpu_data.x86 == 0xF && in mtrr_bp_init()
711 boot_cpu_data.x86_model == 0x3 && in mtrr_bp_init()
712 (boot_cpu_data.x86_stepping == 0x3 || in mtrr_bp_init()
713 boot_cpu_data.x86_stepping == 0x4)) in mtrr_bp_init()
718 } else if (boot_cpu_data.x86_vendor == X86_VENDOR_CENTAUR && in mtrr_bp_init()
719 boot_cpu_data.x86 == 6) { in mtrr_bp_init()
729 switch (boot_cpu_data.x86_vendor) { in mtrr_bp_init()
/arch/x86/kvm/mmu/
Dspte.c224 if (likely(boot_cpu_data.extended_cpuid_level >= 0x80000008)) in kvm_get_shadow_phys_bits()
232 return boot_cpu_data.x86_phys_bits; in kvm_get_shadow_phys_bits()
339 low_phys_bits = boot_cpu_data.x86_phys_bits; in kvm_mmu_reset_all_pte_masks()
341 !WARN_ON_ONCE(boot_cpu_data.x86_cache_bits >= in kvm_mmu_reset_all_pte_masks()
343 low_phys_bits = boot_cpu_data.x86_cache_bits in kvm_mmu_reset_all_pte_masks()
346 rsvd_bits(low_phys_bits, boot_cpu_data.x86_cache_bits - 1); in kvm_mmu_reset_all_pte_masks()

123456