/arch/x86/oprofile/ |
D | op_model_ppro.c | 26 static int num_counters = 2; variable 37 for (i = 0; i < num_counters; ++i) { in ppro_shutdown() 49 for (i = 0; i < num_counters; i++) { in ppro_fill_in_addresses() 96 for (i = 0; i < num_counters; ++i) { in ppro_setup_ctrs() 112 for (i = 0; i < num_counters; ++i) { in ppro_setup_ctrs() 133 for (i = 0; i < num_counters; ++i) { in ppro_check_ctrs() 163 for (i = 0; i < num_counters; ++i) { in ppro_start() 178 for (i = 0; i < num_counters; ++i) { in ppro_stop() 188 .num_counters = 2, 218 eax.split.num_counters = 2; in arch_perfmon_setup_counters() [all …]
|
D | op_model_amd.c | 42 static int num_counters; variable 275 for (i = 0; i < num_counters; ++i) { in op_mux_switch_ctrl() 294 for (i = 0; i < num_counters; ++i) { in op_amd_shutdown() 306 for (i = 0; i < num_counters; i++) { in op_amd_fill_in_addresses() 314 if (num_counters == AMD64_NUM_COUNTERS_CORE) { in op_amd_fill_in_addresses() 349 for (i = 0; i < num_counters; ++i) { in op_amd_setup_ctrs() 365 for (i = 0; i < num_counters; ++i) { in op_amd_setup_ctrs() 387 for (i = 0; i < num_counters; ++i) { in op_amd_check_ctrs() 410 for (i = 0; i < num_counters; ++i) { in op_amd_start() 430 for (i = 0; i < num_counters; ++i) { in op_amd_stop() [all …]
|
D | op_model_p4.c | 37 static unsigned int num_counters = NUM_COUNTERS_NON_HT; variable 47 num_counters = NUM_COUNTERS_HT2; in setup_num_counters() 384 #define VIRT_CTR(stagger, i) ((i) + ((num_counters) * (stagger))) 392 for (i = 0; i < num_counters; ++i) { in p4_shutdown() 401 for (i = num_counters; i < num_controls; ++i) { in p4_shutdown() 416 for (i = 0; i < num_counters; ++i) { in p4_fill_in_addresses() 468 if (num_counters == NUM_COUNTERS_NON_HT) { in p4_fill_in_addresses() 490 for (i = 0; i < num_counters; ++i) { in p4_fill_in_addresses() 585 for (i = 0; i < num_counters; i++) { in p4_setup_ctrs() 595 for (i = num_counters; i < num_controls; i++) { in p4_setup_ctrs() [all …]
|
D | nmi_int.c | 81 for (i = 0; i < model->num_counters; ++i) { in nmi_cpu_save_registers() 145 return virt % model->num_counters; in op_x86_virt_to_phys() 206 for (i = 0; i < model->num_counters; ++i) { in nmi_cpu_save_mpx_registers() 219 for (i = 0; i < model->num_counters; ++i) { in nmi_cpu_restore_mpx_registers() 236 si += model->num_counters; in nmi_cpu_switch() 256 return counter_config[model->num_counters].count ? 0 : -EINVAL; in nmi_multiplex_on() 318 size_t counters_size = sizeof(struct op_msr) * model->num_counters; in allocate_msrs() 367 for (i = 0; i < model->num_counters; ++i) { in nmi_cpu_restore_registers() 468 sizeof(struct op_msr) * model->num_counters); in nmi_setup() 767 model->num_virt_counters = model->num_counters; in op_nmi_init()
|
D | op_x86_model.h | 37 unsigned int num_counters; member
|
/arch/mips/oprofile/ |
D | op_model_mipsxx.c | 137 unsigned int counters = op_model_mipsxx_ops.num_counters; in mipsxx_reg_setup() 166 unsigned int counters = op_model_mipsxx_ops.num_counters; in mipsxx_cpu_setup() 193 unsigned int counters = op_model_mipsxx_ops.num_counters; in mipsxx_cpu_start() 216 unsigned int counters = op_model_mipsxx_ops.num_counters; in mipsxx_cpu_stop() 238 unsigned int counters = op_model_mipsxx_ops.num_counters; in mipsxx_perfcount_handler() 346 op_model_mipsxx_ops.num_counters = counters; in mipsxx_init() 459 int counters = op_model_mipsxx_ops.num_counters; in mipsxx_exit()
|
D | op_impl.h | 36 unsigned char num_counters; member
|
/arch/powerpc/oprofile/ |
D | op_model_fsl_emb.c | 24 static int num_counters; variable 262 for (i = 0;i < num_counters;i++) { in fsl_emb_cpu_setup() 279 num_counters = num_ctrs; in fsl_emb_reg_setup() 286 for (i = 0; i < num_counters; ++i) in fsl_emb_reg_setup() 298 for (i = 0; i < num_counters; ++i) { in fsl_emb_start() 351 for (i = 0; i < num_counters; ++i) { in fsl_emb_handle_interrupt()
|
D | common.c | 56 op_per_cpu_rc = model->reg_setup(ctr, &sys, model->num_counters); in op_powerpc_setup() 154 for (i = 0; i < model->num_counters; ++i) { in op_powerpc_create_files() 225 model->num_counters = cur_cpu_spec->num_pmcs; in oprofile_arch_init()
|
D | op_model_cell.c | 179 static int num_counters; variable 479 for (i = 0; i < num_counters; i++) in cell_virtual_cntr() 498 for (i = 0; i < num_counters; i++) { in cell_virtual_cntr() 527 for (i = 0; i < num_counters; i++) { in cell_virtual_cntr() 735 num_counters = 1; /* Only support one SPU event at a time */ in cell_reg_setup_spu_events() 756 num_counters = num_ctrs; in cell_reg_setup_ppu() 809 for (i = 0; i < num_counters; ++i) { in cell_reg_setup_ppu() 825 for (i = 0; i < num_counters; ++i) { in cell_reg_setup_ppu() 921 for (i = 0; i < num_counters; ++i) { in cell_cpu_setup() 1409 for (i = 0; i < num_counters; ++i) { in cell_global_start_ppu() [all …]
|
/arch/x86/events/intel/ |
D | uncore_snbep.c | 799 .num_counters = 2, 956 .num_counters = 4, 1066 .num_counters = 4, 1159 .num_counters = 4, 1167 .num_counters = 4, 1179 .num_counters = 4, 1195 .num_counters = 4, 1204 .num_counters = 3, 1527 .num_counters = 2, 1658 .num_counters = 4, [all …]
|
D | uncore_snb.c | 196 .num_counters = 2, 214 .num_counters = 2, 274 .num_counters = 4, 312 .num_counters = 2, 339 .num_counters = 1, 353 .num_counters = 1, 618 .num_counters = 2, 1017 .num_counters = 8,
|
D | uncore.h | 48 int num_counters; member 155 unsigned int num_counters; member 402 return box->pmu->type->freerunning[type].num_counters; in uncore_num_freerunning() 423 return box->pmu->type->num_counters; in uncore_num_counters()
|
/arch/powerpc/include/asm/ |
D | oprofile_impl.h | 46 int num_counters); 56 int num_counters; member
|
/arch/x86/kvm/vmx/ |
D | pmu_intel.c | 136 unsigned int num_counters; in intel_msr_idx_to_pmc() local 141 num_counters = pmu->nr_arch_fixed_counters; in intel_msr_idx_to_pmc() 144 num_counters = pmu->nr_arch_gp_counters; in intel_msr_idx_to_pmc() 146 if (idx >= num_counters) in intel_msr_idx_to_pmc() 149 return &counters[array_index_nospec(idx, num_counters)]; in intel_msr_idx_to_pmc() 298 pmu->nr_arch_gp_counters = min_t(int, eax.split.num_counters, in intel_pmu_refresh()
|
/arch/x86/events/amd/ |
D | core.c | 389 for (i = 0; i < x86_pmu.num_counters; i++) { in __amd_put_nb_event_constraints() 456 for_each_set_bit(idx, c->idxmsk, x86_pmu.num_counters) { in __amd_get_nb_event_constraints() 499 for (i = 0; i < x86_pmu.num_counters; i++) { in amd_alloc_nb() 622 for (idx = 0; idx < x86_pmu.num_counters; idx++) { in amd_pmu_disable_all() 915 .num_counters = AMD64_NUM_COUNTERS, 952 x86_pmu.num_counters = AMD64_NUM_COUNTERS_CORE; in amd_core_pmu_init() 971 for (i = 0; i < x86_pmu.num_counters - 1; i += 2) in amd_core_pmu_init() 976 x86_pmu.num_counters / 2, 0, in amd_core_pmu_init()
|
D | uncore.c | 44 int num_counters; member 136 for (i = 0; i < uncore->num_counters; i++) { in amd_uncore_add() 145 for (i = 0; i < uncore->num_counters; i++) { in amd_uncore_add() 175 for (i = 0; i < uncore->num_counters; i++) { in amd_uncore_del() 335 uncore_nb->num_counters = num_counters_nb; in amd_uncore_cpu_up_prepare() 349 uncore_llc->num_counters = num_counters_llc; in amd_uncore_cpu_up_prepare()
|
/arch/alpha/oprofile/ |
D | op_impl.h | 51 unsigned char num_counters; member
|
D | common.c | 59 for (i = e = 0; i < model->num_counters; ++i) in op_axp_setup() 113 for (i = 0; i < model->num_counters; ++i) { in op_axp_create_files()
|
D | op_model_ev5.c | 197 .num_counters = 3, 207 .num_counters = 3,
|
D | op_model_ev4.c | 112 .num_counters = 2,
|
D | op_model_ev6.c | 99 .num_counters = 2,
|
/arch/riscv/include/asm/ |
D | perf_event.h | 74 int num_counters; member
|
/arch/x86/events/ |
D | core.c | 151 for (i = 0; i < x86_pmu.num_counters; i++) { in reserve_pmc_hardware() 156 for (i = 0; i < x86_pmu.num_counters; i++) { in reserve_pmc_hardware() 167 i = x86_pmu.num_counters; in reserve_pmc_hardware() 180 for (i = 0; i < x86_pmu.num_counters; i++) { in release_pmc_hardware() 204 for (i = 0; i < x86_pmu.num_counters; i++) { in check_hw_exists() 619 for (idx = 0; idx < x86_pmu.num_counters; idx++) { in x86_pmu_disable_all() 667 for (idx = 0; idx < x86_pmu.num_counters; idx++) { in x86_pmu_enable_all() 944 int gpmax = x86_pmu.num_counters; in x86_schedule_events() 1009 max_count = x86_pmu.num_counters + x86_pmu.num_counters_fixed; in collect_events() 1360 if (!x86_pmu.num_counters) in perf_event_print_debug() [all …]
|
/arch/mips/kernel/ |
D | perf_event_mipsxx.c | 90 unsigned int num_counters; member 296 for (i = mipspmu.num_counters - 1; i >= 0; i--) { in mipsxx_pmu_alloc_counter() 321 WARN_ON(idx < 0 || idx >= mipspmu.num_counters); in mipsxx_pmu_enable_event() 361 WARN_ON(idx < 0 || idx >= mipspmu.num_counters); in mipsxx_pmu_disable_event() 498 WARN_ON(idx < 0 || idx >= mipspmu.num_counters); in mipspmu_del() 603 (void *)(long)mipspmu.num_counters, 1); in hw_perf_event_destroy() 1357 int ctr = mipspmu.num_counters; in pause_local_counters() 1373 int ctr = mipspmu.num_counters; in resume_local_counters() 1385 unsigned int counters = mipspmu.num_counters; in mipsxx_pmu_handle_shared_irq() 1802 mipspmu.num_counters = counters; in init_hw_perf_events()
|