Searched refs:NR_CPUS (Results 1 – 25 of 256) sorted by relevance
1234567891011
/arch/powerpc/include/asm/ |
D | irq.h | 56 extern struct thread_info *critirq_ctx[NR_CPUS]; 57 extern struct thread_info *dbgirq_ctx[NR_CPUS]; 58 extern struct thread_info *mcheckirq_ctx[NR_CPUS]; 67 extern struct thread_info *hardirq_ctx[NR_CPUS]; 68 extern struct thread_info *softirq_ctx[NR_CPUS];
|
D | cputhreads.h | 46 for (i = 0; i < NR_CPUS; i += threads_per_core) { in cpu_thread_mask_to_cores() 56 return NR_CPUS >> threads_shift; in cpu_nr_cores()
|
/arch/mips/kernel/ |
D | smtc-proc.c | 30 unsigned long selfipis[NR_CPUS]; 32 struct smtc_cpu_proc smtc_cpu_stats[NR_CPUS]; 45 for (i=0; i < NR_CPUS; i++) in smtc_proc_show() 48 for(i = 0; i < NR_CPUS; i++) in smtc_proc_show() 71 for (i=0; i<NR_CPUS; i++) { in init_smtc_stats()
|
D | smtc.c | 87 struct smtc_ipi_q IPIQ[NR_CPUS]; 192 int tcnoprog[NR_CPUS]; 194 static int clock_hang_reported[NR_CPUS]; 316 for (i=start_cpu_slot; i<NR_CPUS && i<ntcs; i++) { in smtc_build_cpu_map() 401 int i, vpe, tc, ntc, nvpe, tcpervpe[NR_CPUS], slop, cpu; in smtc_prepare_cpus() 419 for (i=0; i<NR_CPUS; i++) { in smtc_prepare_cpus() 459 if (ntc > NR_CPUS) in smtc_prepare_cpus() 460 ntc = NR_CPUS; in smtc_prepare_cpus() 617 nipi = NR_CPUS * IPIBUF_PER_CPU; in smtc_prepare_cpus() 650 extern u32 kernelsp[NR_CPUS]; in smtc_boot_secondary() [all …]
|
/arch/mips/include/asm/ |
D | mmu.h | 5 unsigned long asid[NR_CPUS]; 6 unsigned long vdso_asid[NR_CPUS]; 7 struct page *vdso_page[NR_CPUS];
|
D | smtc.h | 26 #if NR_CPUS <= 8 29 #if NR_CPUS <= 16
|
D | fixmap.h | 65 FIX_CMAP_END = FIX_CMAP_BEGIN + (FIX_N_COLOURS * NR_CPUS * 2), 72 FIX_KMAP_END = FIX_KMAP_BEGIN+(8*NR_CPUS*FIX_N_COLOURS)-1,
|
D | smp.h | 32 extern int __cpu_number_map[NR_CPUS]; 36 extern int __cpu_logical_map[NR_CPUS];
|
/arch/blackfin/include/asm/ |
D | smp.h | 26 extern unsigned long blackfin_iflush_l1_entry[NR_CPUS]; 35 extern unsigned long icache_invld_count[NR_CPUS]; 38 extern unsigned long dcache_invld_count[NR_CPUS];
|
D | cplbinit.h | 35 extern struct cplb_entry dcplb_tbl[NR_CPUS][MAX_CPLBS]; 36 extern struct cplb_entry icplb_tbl[NR_CPUS][MAX_CPLBS]; 50 extern unsigned long *current_rwx_mask[NR_CPUS];
|
/arch/blackfin/kernel/cplb-nompu/ |
D | cplbmgr.c | 29 int nr_dcplb_miss[NR_CPUS], nr_icplb_miss[NR_CPUS]; 30 int nr_dcplb_supv_miss[NR_CPUS], nr_icplb_supv_miss[NR_CPUS]; 31 int nr_cplb_flush[NR_CPUS], nr_dcplb_prot[NR_CPUS]; 68 static int icplb_rr_index[NR_CPUS] PDT_ATTR; 69 static int dcplb_rr_index[NR_CPUS] PDT_ATTR;
|
/arch/arm/mach-highbank/ |
D | platsmp.c | 43 if (ncores > NR_CPUS) { in highbank_smp_init_cpus() 47 ncores, NR_CPUS); in highbank_smp_init_cpus() 48 ncores = NR_CPUS; in highbank_smp_init_cpus()
|
/arch/ia64/include/asm/native/ |
D | irq.h | 27 #if (NR_VECTORS + 32 * NR_CPUS) < 1024 28 #define IA64_NATIVE_NR_IRQS (NR_VECTORS + 32 * NR_CPUS)
|
/arch/ia64/kernel/ |
D | err_inject.c | 43 static u64 call_start[NR_CPUS]; 44 static u64 phys_addr[NR_CPUS]; 45 static u64 err_type_info[NR_CPUS]; 46 static u64 err_struct_info[NR_CPUS]; 51 } __attribute__((__aligned__(16))) err_data_buffer[NR_CPUS]; 52 static s64 status[NR_CPUS]; 53 static u64 capabilities[NR_CPUS]; 54 static u64 resources[NR_CPUS];
|
/arch/ia64/include/asm/ |
D | smp.h | 55 int cpu_phys_id[NR_CPUS]; 60 extern cpumask_t cpu_core_map[NR_CPUS]; 80 for (i = 0; i < NR_CPUS; ++i) in cpu_logical_id()
|
D | numa.h | 25 extern u16 cpu_to_node_map[NR_CPUS] __cacheline_aligned; 51 extern struct node_cpuid_s node_cpuid[NR_CPUS];
|
/arch/blackfin/kernel/cplb-mpu/ |
D | cplbmgr.c | 28 unsigned long *current_rwx_mask[NR_CPUS]; 30 int nr_dcplb_miss[NR_CPUS], nr_icplb_miss[NR_CPUS]; 31 int nr_icplb_supv_miss[NR_CPUS], nr_dcplb_prot[NR_CPUS]; 32 int nr_cplb_flush[NR_CPUS]; 63 static int icplb_rr_index[NR_CPUS], dcplb_rr_index[NR_CPUS];
|
/arch/sh/include/asm/ |
D | smp.h | 18 extern int __cpu_number_map[NR_CPUS]; 22 extern int __cpu_logical_map[NR_CPUS];
|
/arch/mn10300/mm/ |
D | mmu-context.c | 20 unsigned long mmu_context_cache[NR_CPUS] = { 21 [0 ... NR_CPUS - 1] =
|
/arch/m32r/kernel/ |
D | smpboot.c | 83 struct cpuinfo_m32r cpu_data[NR_CPUS] __cacheline_aligned; 94 static volatile int physid_2_cpu[NR_CPUS]; 98 volatile int cpu_2_physid[NR_CPUS]; 169 if (nr_cpu > NR_CPUS) { in smp_prepare_cpus() 171 nr_cpu, NR_CPUS); in smp_prepare_cpus() 202 for (phys_id = 0 ; phys_id < NR_CPUS ; phys_id++) { in smp_prepare_cpus() 602 for (i = 0 ; i < NR_CPUS ; i++) { in init_cpu_to_physid()
|
/arch/powerpc/lib/ |
D | locks.c | 34 BUG_ON(holder_cpu >= NR_CPUS); in __spin_yield() 59 BUG_ON(holder_cpu >= NR_CPUS); in __rw_yield()
|
/arch/m32r/mm/ |
D | fault-nommu.c | 39 unsigned int tlb_entry_i_dat[NR_CPUS]; 40 unsigned int tlb_entry_d_dat[NR_CPUS];
|
/arch/arm/mach-omap2/ |
D | cpuidle44xx.c | 51 static struct powerdomain *mpu_pd, *cpu_pd[NR_CPUS]; 52 static struct clockdomain *cpu_clkdm[NR_CPUS]; 55 static bool cpu_done[NR_CPUS];
|
/arch/sparc/include/asm/ |
D | irq_64.h | 95 extern void *hardirq_stack[NR_CPUS]; 96 extern void *softirq_stack[NR_CPUS];
|
/arch/tile/include/asm/ |
D | fixmap.h | 59 FIX_KMAP_END = FIX_KMAP_BEGIN+(KM_TYPE_NR*NR_CPUS)-1, 63 FIX_HOMECACHE_END = FIX_HOMECACHE_BEGIN+(NR_CPUS)-1,
|
1234567891011