/arch/mips/kernel/ |
D | cpu-probe.c | 86 static int set_ftlb_enable(struct cpuinfo_mips *c, enum ftlb_flags flags); 143 static inline void cpu_set_mt_per_tc_perf(struct cpuinfo_mips *c) in cpu_set_mt_per_tc_perf() argument 146 c->options |= MIPS_CPU_MT_PER_TC_PERF_COUNTERS; in cpu_set_mt_per_tc_perf() 151 struct cpuinfo_mips *c = ¤t_cpu_data; in check_errata() local 160 if ((c->processor_id & PRID_REV_MASK) <= PRID_REV_34K_V1_0_2) in check_errata() 208 static inline void cpu_probe_vmbits(struct cpuinfo_mips *c) in cpu_probe_vmbits() argument 213 c->vmbits = fls64(read_c0_entryhi() & 0x3fffffffffffe000ULL); in cpu_probe_vmbits() 217 static void set_isa(struct cpuinfo_mips *c, unsigned int isa) in set_isa() argument 221 c->isa_level |= MIPS_CPU_ISA_M32R5 | MIPS_CPU_ISA_M64R5; in set_isa() 225 c->isa_level |= MIPS_CPU_ISA_M32R2 | MIPS_CPU_ISA_M64R2; in set_isa() [all …]
|
D | cpu-r3k-probe.c | 70 struct cpuinfo_mips *c = ¤t_cpu_data; in cpu_probe() local 79 c->processor_id = PRID_IMP_UNKNOWN; in cpu_probe() 80 c->fpu_id = FPIR_IMP_NONE; in cpu_probe() 81 c->cputype = CPU_UNKNOWN; in cpu_probe() 82 c->writecombine = _CACHE_UNCACHED; in cpu_probe() 84 c->fpu_csr31 = FPU_CSR_RN; in cpu_probe() 85 c->fpu_msk31 = FPU_CSR_RSVD | FPU_CSR_ABS2008 | FPU_CSR_NAN2008 | in cpu_probe() 88 c->srsets = 1; in cpu_probe() 90 c->processor_id = read_c0_prid(); in cpu_probe() 91 switch (c->processor_id & (PRID_COMP_MASK | PRID_IMP_MASK)) { in cpu_probe() [all …]
|
D | fpu-probe.c | 49 static inline void cpu_set_fpu_fcsr_mask(struct cpuinfo_mips *c) in cpu_set_fpu_fcsr_mask() argument 53 fcsr = c->fpu_csr31; in cpu_set_fpu_fcsr_mask() 71 c->fpu_msk31 = ~(fcsr0 ^ fcsr1) & ~mask; in cpu_set_fpu_fcsr_mask() 78 static void cpu_set_fpu_2008(struct cpuinfo_mips *c) in cpu_set_fpu_2008() argument 80 if (c->isa_level & (MIPS_CPU_ISA_M32R1 | MIPS_CPU_ISA_M64R1 | in cpu_set_fpu_2008() 109 if (c->isa_level & (MIPS_CPU_ISA_M32R2 | in cpu_set_fpu_2008() 116 c->options |= MIPS_CPU_MAC_2008_ONLY; in cpu_set_fpu_2008() 120 c->options |= MIPS_CPU_NAN_LEGACY; in cpu_set_fpu_2008() 122 c->options |= MIPS_CPU_NAN_2008; in cpu_set_fpu_2008() 125 c->fpu_msk31 &= ~FPU_CSR_ABS2008; in cpu_set_fpu_2008() [all …]
|
/arch/x86/kernel/cpu/ |
D | amd.c | 175 static void init_amd_k5(struct cpuinfo_x86 *c) in init_amd_k5() argument 187 if (c->x86_model == 9 || c->x86_model == 10) { in init_amd_k5() 194 static void init_amd_k6(struct cpuinfo_x86 *c) in init_amd_k6() argument 200 if (c->x86_model < 6) { in init_amd_k6() 202 if (c->x86_model == 0) { in init_amd_k6() 203 clear_cpu_cap(c, X86_FEATURE_APIC); in init_amd_k6() 204 set_cpu_cap(c, X86_FEATURE_PGE); in init_amd_k6() 209 if (c->x86_model == 6 && c->x86_stepping == 1) { in init_amd_k6() 238 if (c->x86_model < 8 || in init_amd_k6() 239 (c->x86_model == 8 && c->x86_stepping < 8)) { in init_amd_k6() [all …]
|
D | hygon.c | 54 static void hygon_get_topology_early(struct cpuinfo_x86 *c) in hygon_get_topology_early() argument 56 if (cpu_has(c, X86_FEATURE_TOPOEXT)) in hygon_get_topology_early() 66 static void hygon_get_topology(struct cpuinfo_x86 *c) in hygon_get_topology() argument 77 c->cpu_die_id = ecx & 0xff; in hygon_get_topology() 79 c->cpu_core_id = ebx & 0xff; in hygon_get_topology() 82 c->x86_max_cores /= smp_num_siblings; in hygon_get_topology() 88 err = detect_extended_topology(c); in hygon_get_topology() 90 c->x86_coreid_bits = get_count_order(c->x86_max_cores); in hygon_get_topology() 96 if (!boot_cpu_has(X86_FEATURE_HYPERVISOR) && c->x86_model <= 0x3) in hygon_get_topology() 97 c->phys_proc_id = c->apicid >> APICID_SOCKET_ID_BIT; in hygon_get_topology() [all …]
|
D | centaur.c | 22 static void init_c3(struct cpuinfo_x86 *c) in init_c3() argument 49 c->x86_capability[CPUID_C000_0001_EDX] = cpuid_edx(0xC0000001); in init_c3() 53 if (c->x86_model >= 6 && c->x86_model <= 13) { in init_c3() 57 set_cpu_cap(c, X86_FEATURE_CX8); in init_c3() 61 if (c->x86_model >= 6 && c->x86_model < 9) in init_c3() 62 set_cpu_cap(c, X86_FEATURE_3DNOW); in init_c3() 64 if (c->x86 == 0x6 && c->x86_model >= 0xf) { in init_c3() 65 c->x86_cache_alignment = c->x86_clflush_size * 2; in init_c3() 66 set_cpu_cap(c, X86_FEATURE_REP_GOOD); in init_c3() 69 if (c->x86 >= 7) in init_c3() [all …]
|
D | intel.c | 67 static void check_memory_type_self_snoop_errata(struct cpuinfo_x86 *c) in check_memory_type_self_snoop_errata() argument 69 switch (c->x86_model) { in check_memory_type_self_snoop_errata() 95 static void probe_xeon_phi_r3mwait(struct cpuinfo_x86 *c) in probe_xeon_phi_r3mwait() argument 101 if (c->x86 != 6) in probe_xeon_phi_r3mwait() 103 switch (c->x86_model) { in probe_xeon_phi_r3mwait() 114 set_cpu_cap(c, X86_FEATURE_RING3MWAIT); in probe_xeon_phi_r3mwait() 118 if (c == &boot_cpu_data) in probe_xeon_phi_r3mwait() 159 static bool bad_spectre_microcode(struct cpuinfo_x86 *c) in bad_spectre_microcode() argument 167 if (cpu_has(c, X86_FEATURE_HYPERVISOR)) in bad_spectre_microcode() 170 if (c->x86 != 6) in bad_spectre_microcode() [all …]
|
D | proc.c | 17 static void show_cpuinfo_core(struct seq_file *m, struct cpuinfo_x86 *c, in show_cpuinfo_core() argument 21 seq_printf(m, "physical id\t: %d\n", c->phys_proc_id); in show_cpuinfo_core() 24 seq_printf(m, "core id\t\t: %d\n", c->cpu_core_id); in show_cpuinfo_core() 25 seq_printf(m, "cpu cores\t: %d\n", c->booted_cores); in show_cpuinfo_core() 26 seq_printf(m, "apicid\t\t: %d\n", c->apicid); in show_cpuinfo_core() 27 seq_printf(m, "initial apicid\t: %d\n", c->initial_apicid); in show_cpuinfo_core() 32 static void show_cpuinfo_misc(struct seq_file *m, struct cpuinfo_x86 *c) in show_cpuinfo_misc() argument 47 c->cpuid_level); in show_cpuinfo_misc() 50 static void show_cpuinfo_misc(struct seq_file *m, struct cpuinfo_x86 *c) in show_cpuinfo_misc() argument 57 c->cpuid_level); in show_cpuinfo_misc() [all …]
|
D | common.c | 95 static void default_init(struct cpuinfo_x86 *c) in default_init() argument 98 cpu_detect_cache_sizes(c); in default_init() 102 if (c->cpuid_level == -1) { in default_init() 104 if (c->x86 == 4) in default_init() 105 strcpy(c->x86_model_id, "486"); in default_init() 106 else if (c->x86 == 3) in default_init() 107 strcpy(c->x86_model_id, "386"); in default_init() 261 static void squash_the_stupid_serial_number(struct cpuinfo_x86 *c) in squash_the_stupid_serial_number() argument 265 if (!cpu_has(c, X86_FEATURE_PN) || !disable_x86_serial_nr) in squash_the_stupid_serial_number() 275 clear_cpu_cap(c, X86_FEATURE_PN); in squash_the_stupid_serial_number() [all …]
|
D | feat_ctl.c | 23 static void init_vmx_capabilities(struct cpuinfo_x86 *c) in init_vmx_capabilities() argument 40 c->vmx_capability[PRIMARY_CTLS] = supported; in init_vmx_capabilities() 43 c->vmx_capability[SECONDARY_CTLS] = supported; in init_vmx_capabilities() 57 c->vmx_capability[MISC_FEATURES] = (supported & 0xffff) | in init_vmx_capabilities() 63 c->vmx_capability[MISC_FEATURES] |= VMX_F(EPT_EXECUTE_ONLY); in init_vmx_capabilities() 65 c->vmx_capability[MISC_FEATURES] |= VMX_F(EPT_AD); in init_vmx_capabilities() 67 c->vmx_capability[MISC_FEATURES] |= VMX_F(EPT_1GB); in init_vmx_capabilities() 70 if ((c->vmx_capability[PRIMARY_CTLS] & VMX_F(VIRTUAL_TPR)) && in init_vmx_capabilities() 71 (c->vmx_capability[SECONDARY_CTLS] & VMX_F(VIRT_APIC_ACCESSES))) in init_vmx_capabilities() 72 c->vmx_capability[MISC_FEATURES] |= VMX_F(FLEXPRIORITY); in init_vmx_capabilities() [all …]
|
D | zhaoxin.c | 20 static void init_zhaoxin_cap(struct cpuinfo_x86 *c) in init_zhaoxin_cap() argument 50 c->x86_capability[CPUID_C000_0001_EDX] = cpuid_edx(0xC0000001); in init_zhaoxin_cap() 53 if (c->x86 >= 0x6) in init_zhaoxin_cap() 54 set_cpu_cap(c, X86_FEATURE_REP_GOOD); in init_zhaoxin_cap() 57 static void early_init_zhaoxin(struct cpuinfo_x86 *c) in early_init_zhaoxin() argument 59 if (c->x86 >= 0x6) in early_init_zhaoxin() 60 set_cpu_cap(c, X86_FEATURE_CONSTANT_TSC); in early_init_zhaoxin() 62 set_cpu_cap(c, X86_FEATURE_SYSENTER32); in early_init_zhaoxin() 64 if (c->x86_power & (1 << 8)) { in early_init_zhaoxin() 65 set_cpu_cap(c, X86_FEATURE_CONSTANT_TSC); in early_init_zhaoxin() [all …]
|
/arch/mips/loongson64/ |
D | cpucfg-emul.c | 12 static bool is_loongson(struct cpuinfo_mips *c) in is_loongson() argument 14 switch (c->processor_id & PRID_COMP_MASK) { in is_loongson() 16 return ((c->processor_id & PRID_IMP_MASK) == in is_loongson() 27 static u32 get_loongson_fprev(struct cpuinfo_mips *c) in get_loongson_fprev() argument 29 return c->fpu_id & LOONGSON_FPREV_MASK; in get_loongson_fprev() 50 static void probe_uca(struct cpuinfo_mips *c) in probe_uca() argument 53 c->loongson3_cpucfg_data[0] |= LOONGSON_CFG1_LSUCA; in probe_uca() 56 static void decode_loongson_config6(struct cpuinfo_mips *c) in decode_loongson_config6() argument 61 c->loongson3_cpucfg_data[0] |= LOONGSON_CFG1_SFBP; in decode_loongson_config6() 63 c->loongson3_cpucfg_data[0] |= LOONGSON_CFG1_LLEXC; in decode_loongson_config6() [all …]
|
/arch/mips/mm/ |
D | c-octeon.c | 172 struct cpuinfo_mips *c = ¤t_cpu_data; in probe_octeon() local 179 c->icache.linesz = 2 << ((config1 >> 19) & 7); in probe_octeon() 180 c->icache.sets = 64 << ((config1 >> 22) & 7); in probe_octeon() 181 c->icache.ways = 1 + ((config1 >> 16) & 7); in probe_octeon() 182 c->icache.flags |= MIPS_CACHE_VTAG; in probe_octeon() 184 c->icache.sets * c->icache.ways * c->icache.linesz; in probe_octeon() 185 c->icache.waybit = ffs(icache_size / c->icache.ways) - 1; in probe_octeon() 186 c->dcache.linesz = 128; in probe_octeon() 188 c->dcache.sets = 2; /* CN5XXX has two Dcache sets */ in probe_octeon() 190 c->dcache.sets = 1; /* CN3XXX has one Dcache set */ in probe_octeon() [all …]
|
D | c-r4k.c | 1052 static inline int alias_74k_erratum(struct cpuinfo_mips *c) in alias_74k_erratum() argument 1054 unsigned int imp = c->processor_id & PRID_IMP_MASK; in alias_74k_erratum() 1055 unsigned int rev = c->processor_id & PRID_REV_MASK; in alias_74k_erratum() 1107 struct cpuinfo_mips *c = ¤t_cpu_data; in probe_pcache() local 1120 c->icache.linesz = 16 << ((config & CONF_IB) >> 5); in probe_pcache() 1121 c->icache.ways = 2; in probe_pcache() 1122 c->icache.waybit = __ffs(icache_size/2); in probe_pcache() 1125 c->dcache.linesz = 16 << ((config & CONF_DB) >> 4); in probe_pcache() 1126 c->dcache.ways = 2; in probe_pcache() 1127 c->dcache.waybit= __ffs(dcache_size/2); in probe_pcache() [all …]
|
D | sc-mips.c | 120 static inline int mips_sc_is_activated(struct cpuinfo_mips *c) in mips_sc_is_activated() argument 143 c->scache.linesz = 2 << tmp; in mips_sc_is_activated() 151 struct cpuinfo_mips *c = ¤t_cpu_data; in mips_sc_probe_cm3() local 161 c->scache.sets = 64 << sets; in mips_sc_probe_cm3() 166 c->scache.linesz = 2 << line_sz; in mips_sc_probe_cm3() 170 c->scache.ways = assoc + 1; in mips_sc_probe_cm3() 171 c->scache.waysize = c->scache.sets * c->scache.linesz; in mips_sc_probe_cm3() 172 c->scache.waybit = __ffs(c->scache.waysize); in mips_sc_probe_cm3() 174 if (c->scache.linesz) { in mips_sc_probe_cm3() 175 c->scache.flags &= ~MIPS_CACHE_NOT_PRESENT; in mips_sc_probe_cm3() [all …]
|
/arch/riscv/include/asm/ |
D | mmio.h | 87 #define readb_cpu(c) ({ u8 __r = __raw_readb(c); __r; }) argument 88 #define readw_cpu(c) ({ u16 __r = le16_to_cpu((__force __le16)__raw_readw(c)); __r; }) argument 89 #define readl_cpu(c) ({ u32 __r = le32_to_cpu((__force __le32)__raw_readl(c)); __r; }) argument 91 #define writeb_cpu(v, c) ((void)__raw_writeb((v), (c))) argument 92 #define writew_cpu(v, c) ((void)__raw_writew((__force u16)cpu_to_le16(v), (c))) argument 93 #define writel_cpu(v, c) ((void)__raw_writel((__force u32)cpu_to_le32(v), (c))) argument 96 #define readq_cpu(c) ({ u64 __r = le64_to_cpu((__force __le64)__raw_readq(c)); __r; }) argument 97 #define writeq_cpu(v, c) ((void)__raw_writeq((__force u64)cpu_to_le64(v), (c))) argument 114 #define readb_relaxed(c) ({ u8 __v; __io_rbr(); __v = readb_cpu(c); __io_rar(); __v; }) argument 115 #define readw_relaxed(c) ({ u16 __v; __io_rbr(); __v = readw_cpu(c); __io_rar(); __v; }) argument [all …]
|
/arch/microblaze/lib/ |
D | memmove.c | 35 void *memmove(void *v_dst, const void *v_src, __kernel_size_t c) in memmove() argument 40 if (!c) in memmove() 45 return memcpy(v_dst, v_src, c); in memmove() 48 src += c; in memmove() 49 dst += c; in memmove() 52 while (c--) in memmove() 58 void *memmove(void *v_dst, const void *v_src, __kernel_size_t c) in memmove() argument 65 if (!c) in memmove() 70 return memcpy(v_dst, v_src, c); in memmove() 80 dst += c; in memmove() [all …]
|
D | memcpy.c | 36 void *memcpy(void *v_dst, const void *v_src, __kernel_size_t c) in memcpy() argument 42 while (c--) in memcpy() 48 void *memcpy(void *v_dst, const void *v_src, __kernel_size_t c) in memcpy() argument 62 if (likely(c >= 4)) { in memcpy() 70 --c; in memcpy() 73 --c; in memcpy() 76 --c; in memcpy() 87 for (; c >= 4; c -= 4) in memcpy() 99 for (; c >= 4; c -= 4) { in memcpy() 108 for (; c >= 4; c -= 4) { in memcpy() [all …]
|
/arch/mips/alchemy/common/ |
D | clock.c | 245 struct clk *c; in alchemy_clk_setup_aux() local 262 c = clk_register(NULL, &a->hw); in alchemy_clk_setup_aux() 263 if (!IS_ERR(c)) in alchemy_clk_setup_aux() 264 clk_register_clkdev(c, name, NULL); in alchemy_clk_setup_aux() 268 return c; in alchemy_clk_setup_aux() 276 struct clk *c; in alchemy_clk_setup_sysbus() local 278 c = clk_register_fixed_factor(NULL, ALCHEMY_SYSBUS_CLK, in alchemy_clk_setup_sysbus() 280 if (!IS_ERR(c)) in alchemy_clk_setup_sysbus() 281 clk_register_clkdev(c, ALCHEMY_SYSBUS_CLK, NULL); in alchemy_clk_setup_sysbus() 282 return c; in alchemy_clk_setup_sysbus() [all …]
|
/arch/arm64/include/asm/ |
D | arch_gicv3.h | 122 #define gic_read_typer(c) readq_relaxed(c) argument 123 #define gic_write_irouter(v, c) writeq_relaxed(v, c) argument 124 #define gic_read_lpir(c) readq_relaxed(c) argument 125 #define gic_write_lpir(v, c) writeq_relaxed(v, c) argument 129 #define gits_read_baser(c) readq_relaxed(c) argument 130 #define gits_write_baser(v, c) writeq_relaxed(v, c) argument 132 #define gits_read_cbaser(c) readq_relaxed(c) argument 133 #define gits_write_cbaser(v, c) writeq_relaxed(v, c) argument 135 #define gits_write_cwriter(v, c) writeq_relaxed(v, c) argument 137 #define gicr_read_propbaser(c) readq_relaxed(c) argument [all …]
|
/arch/csky/include/asm/ |
D | io.h | 22 #define readb(c) ({ u8 __v = readb_relaxed(c); rmb(); __v; }) argument 23 #define readw(c) ({ u16 __v = readw_relaxed(c); rmb(); __v; }) argument 24 #define readl(c) ({ u32 __v = readl_relaxed(c); rmb(); __v; }) argument 27 #define writeb(v,c) ({ wmb(); writeb_relaxed((v),(c)); }) argument 28 #define writew(v,c) ({ wmb(); writew_relaxed((v),(c)); }) argument 29 #define writel(v,c) ({ wmb(); writel_relaxed((v),(c)); }) argument 31 #define writeb(v,c) ({ wmb(); writeb_relaxed((v),(c)); mb(); }) argument 32 #define writew(v,c) ({ wmb(); writew_relaxed((v),(c)); mb(); }) argument 33 #define writel(v,c) ({ wmb(); writel_relaxed((v),(c)); mb(); }) argument
|
/arch/nds32/include/asm/ |
D | io.h | 63 #define readb_relaxed(c) ({ u8 __v = __raw_readb(c); __v; }) argument 64 #define readw_relaxed(c) ({ u16 __v = le16_to_cpu((__force __le16)__raw_readw(c)); __v; }) argument 65 #define readl_relaxed(c) ({ u32 __v = le32_to_cpu((__force __le32)__raw_readl(c)); __v; }) argument 66 #define writeb_relaxed(v,c) ((void)__raw_writeb((v),(c))) argument 67 #define writew_relaxed(v,c) ((void)__raw_writew((__force u16)cpu_to_le16(v),(c))) argument 68 #define writel_relaxed(v,c) ((void)__raw_writel((__force u32)cpu_to_le32(v),(c))) argument 74 #define readb(c) ({ u8 __v = readb_relaxed(c); __iormb(); __v; }) argument 75 #define readw(c) ({ u16 __v = readw_relaxed(c); __iormb(); __v; }) argument 76 #define readl(c) ({ u32 __v = readl_relaxed(c); __iormb(); __v; }) argument 78 #define writeb(v,c) ({ __iowmb(); writeb_relaxed((v),(c)); }) argument [all …]
|
/arch/x86/boot/ |
D | cmdline.c | 15 static inline int myisspace(u8 c) in myisspace() argument 17 return c <= ' '; /* Close enough approximation */ in myisspace() 31 char c; in __cmdline_find_option() local 48 while (cptr < 0x10000 && (c = rdfs8(cptr++))) { in __cmdline_find_option() 51 if (myisspace(c)) in __cmdline_find_option() 60 if (c == '=' && !*opptr) { in __cmdline_find_option() 64 } else if (myisspace(c)) { in __cmdline_find_option() 66 } else if (c != *opptr++) { in __cmdline_find_option() 72 if (myisspace(c)) in __cmdline_find_option() 77 if (myisspace(c)) { in __cmdline_find_option() [all …]
|
/arch/x86/lib/ |
D | cmdline.c | 11 static inline int myisspace(u8 c) in myisspace() argument 13 return c <= ' '; /* Close enough approximation */ in myisspace() 31 char c; in __cmdline_find_option_bool() local 48 c = *(char *)cmdline++; in __cmdline_find_option_bool() 53 if (!c) in __cmdline_find_option_bool() 55 else if (myisspace(c)) in __cmdline_find_option_bool() 71 if (!c || myisspace(c)) in __cmdline_find_option_bool() 78 } else if (!c) { in __cmdline_find_option_bool() 84 } else if (c == *opptr++) { in __cmdline_find_option_bool() 95 if (!c) in __cmdline_find_option_bool() [all …]
|
/arch/sparc/crypto/ |
D | opcodes.h | 18 #define CRC32C(a,b,c) \ argument 19 .word (F3F(2,0x36,0x147)|RS1(a)|RS2(b)|RD(c)); 30 #define AES_EROUND01(a,b,c,d) \ argument 31 .word (F3F(2, 0x19, 0)|RS1(a)|RS2(b)|RS3(c)|RD(d)); 32 #define AES_EROUND23(a,b,c,d) \ argument 33 .word (F3F(2, 0x19, 1)|RS1(a)|RS2(b)|RS3(c)|RD(d)); 34 #define AES_DROUND01(a,b,c,d) \ argument 35 .word (F3F(2, 0x19, 2)|RS1(a)|RS2(b)|RS3(c)|RD(d)); 36 #define AES_DROUND23(a,b,c,d) \ argument 37 .word (F3F(2, 0x19, 3)|RS1(a)|RS2(b)|RS3(c)|RD(d)); [all …]
|