/arch/x86/kernel/acpi/ |
D | cstate.c | 37 else if (c->x86_vendor == X86_VENDOR_INTEL) { in acpi_processor_power_init_bm_check() 52 if (c->x86_vendor == X86_VENDOR_INTEL && in acpi_processor_power_init_bm_check() 62 if (c->x86_vendor == X86_VENDOR_CENTAUR) { in acpi_processor_power_init_bm_check() 68 if (c->x86_vendor == X86_VENDOR_ZHAOXIN) { in acpi_processor_power_init_bm_check() 177 if ((c->x86_vendor == X86_VENDOR_INTEL) && !(reg->access_size & 0x2)) in acpi_processor_ffh_cstate_probe() 199 if (c->x86_vendor != X86_VENDOR_INTEL && in ffh_cstate_init() 200 c->x86_vendor != X86_VENDOR_AMD) in ffh_cstate_init()
|
/arch/x86/include/asm/ |
D | geode.h | 16 return ((boot_cpu_data.x86_vendor == X86_VENDOR_NSC) && in is_geode_gx() 23 return ((boot_cpu_data.x86_vendor == X86_VENDOR_AMD) && in is_geode_lx()
|
D | acpi.h | 79 boot_cpu_data.x86_vendor == X86_VENDOR_AMD && in acpi_processor_cstate_check() 92 return (c->x86_vendor == X86_VENDOR_INTEL || in arch_has_acpi_pdc() 93 c->x86_vendor == X86_VENDOR_CENTAUR); in arch_has_acpi_pdc()
|
D | virtext.h | 91 if (boot_cpu_data.x86_vendor != X86_VENDOR_AMD && in cpu_has_svm() 92 boot_cpu_data.x86_vendor != X86_VENDOR_HYGON) { in cpu_has_svm()
|
D | cpu_device_id.h | 178 u8 x86_vendor; member 186 .x86_vendor = X86_VENDOR_INTEL, \
|
D | amd_nb.h | 106 if (boot_cpu_data.x86_vendor != X86_VENDOR_AMD) in amd_gart_present()
|
D | cpufeature.h | 245 #define CPU_FEATURE_TYPEVAL boot_cpu_data.x86_vendor, boot_cpu_data.x86, \
|
/arch/x86/kernel/cpu/ |
D | match.c | 45 if (m->vendor != X86_VENDOR_ANY && c->x86_vendor != m->vendor) in x86_match_cpu() 69 if (c->x86_vendor != m->x86_vendor) in x86_match_cpu_with_stepping()
|
D | perfctr-watchdog.c | 48 switch (boot_cpu_data.x86_vendor) { in nmi_perfctr_msr_to_bit() 77 switch (boot_cpu_data.x86_vendor) { in nmi_evntsel_msr_to_bit()
|
D | cacheinfo.c | 600 if (boot_cpu_data.x86_vendor == X86_VENDOR_AMD) { in cpuid4_cache_lookup_regs() 607 } else if (boot_cpu_data.x86_vendor == X86_VENDOR_HYGON) { in cpuid4_cache_lookup_regs() 634 if (c->x86_vendor == X86_VENDOR_AMD || in find_num_cache_leaves() 635 c->x86_vendor == X86_VENDOR_HYGON) in find_num_cache_leaves() 943 if (c->x86_vendor == X86_VENDOR_AMD || in __cache_cpumap_setup() 944 c->x86_vendor == X86_VENDOR_HYGON) { in __cache_cpumap_setup()
|
/arch/x86/kernel/ |
D | amd_nb.c | 242 if (boot_cpu_data.x86_vendor == X86_VENDOR_HYGON) { in amd_cache_northbridges() 341 if (boot_cpu_data.x86_vendor != X86_VENDOR_AMD && in early_is_amd_nb() 342 boot_cpu_data.x86_vendor != X86_VENDOR_HYGON) in early_is_amd_nb() 345 if (boot_cpu_data.x86_vendor == X86_VENDOR_HYGON) in early_is_amd_nb() 361 if (boot_cpu_data.x86_vendor != X86_VENDOR_AMD && in amd_get_mmconfig_range() 362 boot_cpu_data.x86_vendor != X86_VENDOR_HYGON) in amd_get_mmconfig_range()
|
D | topology.c | 116 if (c->x86_vendor != X86_VENDOR_INTEL || in arch_register_cpu()
|
D | asm-offsets_32.c | 19 OFFSET(CPUINFO_x86_vendor, cpuinfo_x86, x86_vendor); in foo()
|
D | smpboot.c | 751 if (((boot_cpu_data.x86_vendor == X86_VENDOR_INTEL) && (boot_cpu_data.x86 == 6)) || in smp_quirk_init_udelay() 752 ((boot_cpu_data.x86_vendor == X86_VENDOR_HYGON) && (boot_cpu_data.x86 >= 0x18)) || in smp_quirk_init_udelay() 753 ((boot_cpu_data.x86_vendor == X86_VENDOR_AMD) && (boot_cpu_data.x86 >= 0xF))) { in smp_quirk_init_udelay() 1696 if (boot_cpu_data.x86_vendor == X86_VENDOR_AMD || in mwait_play_dead() 1697 boot_cpu_data.x86_vendor == X86_VENDOR_HYGON) in mwait_play_dead()
|
/arch/x86/xen/ |
D | pmu.c | 72 if (boot_cpu_data.x86_vendor == X86_VENDOR_AMD) { in xen_pmu_arch_init() 94 } else if (boot_cpu_data.x86_vendor == X86_VENDOR_HYGON) { in xen_pmu_arch_init() 295 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL) { in pmu_msr_read() 318 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL) { in pmu_msr_write() 389 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL) in xen_read_pmc()
|
/arch/x86/kernel/cpu/mce/ |
D | core.c | 140 m->cpuvendor = boot_cpu_data.x86_vendor; in mce_setup() 565 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL) in mce_usable_address() 1721 if (c->x86_vendor == X86_VENDOR_UNKNOWN) { in __mcheck_cpu_apply_quirks() 1727 if (c->x86_vendor == X86_VENDOR_AMD) { in __mcheck_cpu_apply_quirks() 1759 if (c->x86_vendor == X86_VENDOR_INTEL) { in __mcheck_cpu_apply_quirks() 1803 switch (c->x86_vendor) { in __mcheck_cpu_ancient_init() 1824 if (c->x86_vendor == X86_VENDOR_AMD || c->x86_vendor == X86_VENDOR_HYGON) { in __mcheck_cpu_init_early() 1855 switch (c->x86_vendor) { in __mcheck_cpu_init_vendor() 1881 switch (c->x86_vendor) { in __mcheck_cpu_clear_vendor() 1918 if (boot_cpu_data.x86_vendor == X86_VENDOR_AMD) in filter_mce() [all …]
|
D | severity.c | 364 if (boot_cpu_data.x86_vendor == X86_VENDOR_AMD || in mcheck_vendor_init_severity() 365 boot_cpu_data.x86_vendor == X86_VENDOR_HYGON) in mcheck_vendor_init_severity()
|
/arch/x86/events/amd/ |
D | uncore.c | 516 if (boot_cpu_data.x86_vendor != X86_VENDOR_AMD && in amd_uncore_init() 517 boot_cpu_data.x86_vendor != X86_VENDOR_HYGON) in amd_uncore_init() 561 boot_cpu_data.x86_vendor == X86_VENDOR_HYGON ? in amd_uncore_init() 577 boot_cpu_data.x86_vendor == X86_VENDOR_HYGON ? in amd_uncore_init()
|
/arch/x86/kernel/cpu/mtrr/ |
D | mtrr.c | 711 if (boot_cpu_data.x86_vendor == X86_VENDOR_INTEL && in mtrr_bp_init() 720 } else if (boot_cpu_data.x86_vendor == X86_VENDOR_CENTAUR && in mtrr_bp_init() 731 switch (boot_cpu_data.x86_vendor) { in mtrr_bp_init()
|
/arch/x86/kernel/cpu/resctrl/ |
D | core.c | 833 if (boot_cpu_data.x86_vendor == X86_VENDOR_INTEL) in get_mem_config() 835 else if (boot_cpu_data.x86_vendor == X86_VENDOR_AMD) in get_mem_config() 903 if (boot_cpu_data.x86_vendor == X86_VENDOR_INTEL) in check_quirks() 957 if (boot_cpu_data.x86_vendor == X86_VENDOR_INTEL) in rdt_init_res_defs() 959 else if (boot_cpu_data.x86_vendor == X86_VENDOR_AMD) in rdt_init_res_defs()
|
/arch/x86/pci/ |
D | amd_bus.c | 394 if (boot_cpu_data.x86_vendor != X86_VENDOR_AMD && in amd_postcore_init() 395 boot_cpu_data.x86_vendor != X86_VENDOR_HYGON) in amd_postcore_init()
|
/arch/x86/kernel/apic/ |
D | probe_32.c | 144 switch (boot_cpu_data.x86_vendor) { in default_setup_apic_routing()
|
/arch/x86/kernel/cpu/microcode/ |
D | core.c | 239 switch (c->x86_vendor) { in save_microcode_in_initrd() 838 if (c->x86_vendor == X86_VENDOR_INTEL) in microcode_init() 840 else if (c->x86_vendor == X86_VENDOR_AMD) in microcode_init()
|
/arch/x86/mm/ |
D | cpu_entry_area.c | 61 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL) in percpu_setup_debug_store()
|
/arch/x86/crypto/ |
D | twofish_glue_3way.c | 217 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL) in is_blacklisted_cpu()
|