/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/ |
D | base.c | 32 struct nvkm_pmu *pmu = device->pmu; in nvkm_pmu_fan_controlled() local 37 if (pmu && pmu->func->code.size) in nvkm_pmu_fan_controlled() 48 nvkm_pmu_pgob(struct nvkm_pmu *pmu, bool enable) in nvkm_pmu_pgob() argument 50 if (pmu && pmu->func->pgob) in nvkm_pmu_pgob() 51 pmu->func->pgob(pmu, enable); in nvkm_pmu_pgob() 57 struct nvkm_pmu *pmu = container_of(work, typeof(*pmu), recv.work); in nvkm_pmu_recv() local 58 return pmu->func->recv(pmu); in nvkm_pmu_recv() 62 nvkm_pmu_send(struct nvkm_pmu *pmu, u32 reply[2], in nvkm_pmu_send() argument 65 if (!pmu || !pmu->func->send) in nvkm_pmu_send() 67 return pmu->func->send(pmu, reply, process, message, data0, data1); in nvkm_pmu_send() [all …]
|
D | gt215.c | 30 gt215_pmu_send(struct nvkm_pmu *pmu, u32 reply[2], in gt215_pmu_send() argument 33 struct nvkm_subdev *subdev = &pmu->subdev; in gt215_pmu_send() 37 mutex_lock(&pmu->send.mutex); in gt215_pmu_send() 45 mutex_unlock(&pmu->send.mutex); in gt215_pmu_send() 54 pmu->recv.message = message; in gt215_pmu_send() 55 pmu->recv.process = process; in gt215_pmu_send() 65 pmu->send.base)); in gt215_pmu_send() 77 wait_event(pmu->recv.wait, (pmu->recv.process == 0)); in gt215_pmu_send() 78 reply[0] = pmu->recv.data[0]; in gt215_pmu_send() 79 reply[1] = pmu->recv.data[1]; in gt215_pmu_send() [all …]
|
D | gk20a.c | 51 gk20a_pmu_dvfs_target(struct gk20a_pmu *pmu, int *state) in gk20a_pmu_dvfs_target() argument 53 struct nvkm_clk *clk = pmu->base.subdev.device->clk; in gk20a_pmu_dvfs_target() 59 gk20a_pmu_dvfs_get_cur_state(struct gk20a_pmu *pmu, int *state) in gk20a_pmu_dvfs_get_cur_state() argument 61 struct nvkm_clk *clk = pmu->base.subdev.device->clk; in gk20a_pmu_dvfs_get_cur_state() 67 gk20a_pmu_dvfs_get_target_state(struct gk20a_pmu *pmu, in gk20a_pmu_dvfs_get_target_state() argument 70 struct gk20a_pmu_dvfs_data *data = pmu->data; in gk20a_pmu_dvfs_get_target_state() 71 struct nvkm_clk *clk = pmu->base.subdev.device->clk; in gk20a_pmu_dvfs_get_target_state() 86 nvkm_trace(&pmu->base.subdev, "cur level = %d, new level = %d\n", in gk20a_pmu_dvfs_get_target_state() 95 gk20a_pmu_dvfs_get_dev_status(struct gk20a_pmu *pmu, in gk20a_pmu_dvfs_get_dev_status() argument 98 struct nvkm_falcon *falcon = &pmu->base.falcon; in gk20a_pmu_dvfs_get_dev_status() [all …]
|
D | Kbuild | 2 nvkm-y += nvkm/subdev/pmu/base.o 3 nvkm-y += nvkm/subdev/pmu/memx.o 4 nvkm-y += nvkm/subdev/pmu/gt215.o 5 nvkm-y += nvkm/subdev/pmu/gf100.o 6 nvkm-y += nvkm/subdev/pmu/gf119.o 7 nvkm-y += nvkm/subdev/pmu/gk104.o 8 nvkm-y += nvkm/subdev/pmu/gk110.o 9 nvkm-y += nvkm/subdev/pmu/gk208.o 10 nvkm-y += nvkm/subdev/pmu/gk20a.o 11 nvkm-y += nvkm/subdev/pmu/gm107.o [all …]
|
D | gm20b.c | 42 struct nvkm_pmu *pmu = container_of(falcon, typeof(*pmu), falcon); in gm20b_pmu_acr_bootstrap_falcon() local 52 ret = nvkm_falcon_cmdq_send(pmu->hpq, &cmd.cmd.hdr, in gm20b_pmu_acr_bootstrap_falcon() 54 &pmu->subdev, msecs_to_jiffies(1000)); in gm20b_pmu_acr_bootstrap_falcon() 140 struct nvkm_pmu *pmu = priv; in gm20b_pmu_acr_init_wpr_callback() local 141 struct nvkm_subdev *subdev = &pmu->subdev; in gm20b_pmu_acr_init_wpr_callback() 150 complete_all(&pmu->wpr_ready); in gm20b_pmu_acr_init_wpr_callback() 155 gm20b_pmu_acr_init_wpr(struct nvkm_pmu *pmu) in gm20b_pmu_acr_init_wpr() argument 165 return nvkm_falcon_cmdq_send(pmu->hpq, &cmd.cmd.hdr, in gm20b_pmu_acr_init_wpr() 166 gm20b_pmu_acr_init_wpr_callback, pmu, 0); in gm20b_pmu_acr_init_wpr() 170 gm20b_pmu_initmsg(struct nvkm_pmu *pmu) in gm20b_pmu_initmsg() argument [all …]
|
D | memx.c | 7 struct nvkm_pmu *pmu; member 20 struct nvkm_device *device = memx->pmu->subdev.device; in memx_out() 44 nvkm_memx_init(struct nvkm_pmu *pmu, struct nvkm_memx **pmemx) in nvkm_memx_init() argument 46 struct nvkm_device *device = pmu->subdev.device; in nvkm_memx_init() 51 ret = nvkm_pmu_send(pmu, reply, PROC_MEMX, MEMX_MSG_INFO, in nvkm_memx_init() 59 memx->pmu = pmu; in nvkm_memx_init() 75 struct nvkm_pmu *pmu = memx->pmu; in nvkm_memx_fini() local 76 struct nvkm_subdev *subdev = &pmu->subdev; in nvkm_memx_fini() 89 nvkm_pmu_send(pmu, reply, PROC_MEMX, MEMX_MSG_EXEC, in nvkm_memx_fini() 102 nvkm_debug(&memx->pmu->subdev, "R[%06x] = %08x\n", addr, data); in nvkm_memx_wr32() [all …]
|
D | gm200.c | 29 struct nvkm_pmu *pmu = container_of(falcon, typeof(*pmu), falcon); in gm200_pmu_flcn_reset() local 32 pmu->func->reset(pmu); in gm200_pmu_flcn_reset() 64 gm200_pmu_nofw(struct nvkm_pmu *pmu, int ver, const struct nvkm_pmu_fwif *fwif) in gm200_pmu_nofw() argument 66 nvkm_warn(&pmu->subdev, "firmware unavailable\n"); in gm200_pmu_nofw()
|
/drivers/gpu/drm/i915/ |
D | i915_pmu.c | 105 static bool pmu_needs_timer(struct i915_pmu *pmu, bool gpu_active) in pmu_needs_timer() argument 107 struct drm_i915_private *i915 = container_of(pmu, typeof(*i915), pmu); in pmu_needs_timer() 115 enable = pmu->enable; in pmu_needs_timer() 171 struct i915_pmu *pmu = &i915->pmu; in get_rc6() local 182 spin_lock_irqsave(&pmu->lock, flags); in get_rc6() 185 pmu->sample[__I915_SAMPLE_RC6].cur = val; in get_rc6() 194 val = ktime_since_raw(pmu->sleep_last); in get_rc6() 195 val += pmu->sample[__I915_SAMPLE_RC6].cur; in get_rc6() 198 if (val < pmu->sample[__I915_SAMPLE_RC6_LAST_REPORTED].cur) in get_rc6() 199 val = pmu->sample[__I915_SAMPLE_RC6_LAST_REPORTED].cur; in get_rc6() [all …]
|
/drivers/perf/ |
D | fsl_imx8_ddr_perf.c | 44 #define to_ddr_pmu(p) container_of(p, struct ddr_pmu, pmu) 98 struct pmu pmu; member 116 struct ddr_pmu *pmu = dev_get_drvdata(dev); in ddr_perf_identifier_show() local 118 return sysfs_emit(page, "%s\n", pmu->devtype_data->identifier); in ddr_perf_identifier_show() 126 struct ddr_pmu *pmu = dev_get_drvdata(dev); in ddr_perf_identifier_attr_visible() local 128 if (!pmu->devtype_data->identifier) in ddr_perf_identifier_attr_visible() 152 static u32 ddr_perf_filter_cap_get(struct ddr_pmu *pmu, int cap) in ddr_perf_filter_cap_get() argument 154 u32 quirks = pmu->devtype_data->quirks; in ddr_perf_filter_cap_get() 173 struct ddr_pmu *pmu = dev_get_drvdata(dev); in ddr_perf_filter_cap_show() local 178 return sysfs_emit(buf, "%u\n", ddr_perf_filter_cap_get(pmu, cap)); in ddr_perf_filter_cap_show() [all …]
|
D | arm_pmu_platform.c | 26 static int probe_current_pmu(struct arm_pmu *pmu, in probe_current_pmu() argument 38 ret = info->init(pmu); in probe_current_pmu() 46 static int pmu_parse_percpu_irq(struct arm_pmu *pmu, int irq) in pmu_parse_percpu_irq() argument 49 struct pmu_hw_events __percpu *hw_events = pmu->hw_events; in pmu_parse_percpu_irq() 51 ret = irq_get_percpu_devid_partition(irq, &pmu->supported_cpus); in pmu_parse_percpu_irq() 55 for_each_cpu(cpu, &pmu->supported_cpus) in pmu_parse_percpu_irq() 96 static int pmu_parse_irqs(struct arm_pmu *pmu) in pmu_parse_irqs() argument 99 struct platform_device *pdev = pmu->plat_device; in pmu_parse_irqs() 100 struct pmu_hw_events __percpu *hw_events = pmu->hw_events; in pmu_parse_irqs() 113 pmu->pmu.capabilities |= PERF_PMU_CAP_NO_INTERRUPT; in pmu_parse_irqs() [all …]
|
D | arm_pmu_acpi.c | 191 struct arm_pmu *pmu; in arm_pmu_acpi_find_alloc_pmu() local 195 pmu = per_cpu(probed_pmus, cpu); in arm_pmu_acpi_find_alloc_pmu() 196 if (!pmu || pmu->acpi_cpuid != cpuid) in arm_pmu_acpi_find_alloc_pmu() 199 return pmu; in arm_pmu_acpi_find_alloc_pmu() 202 pmu = armpmu_alloc_atomic(); in arm_pmu_acpi_find_alloc_pmu() 203 if (!pmu) { in arm_pmu_acpi_find_alloc_pmu() 209 pmu->acpi_cpuid = cpuid; in arm_pmu_acpi_find_alloc_pmu() 211 return pmu; in arm_pmu_acpi_find_alloc_pmu() 218 static bool pmu_irq_matches(struct arm_pmu *pmu, int irq) in pmu_irq_matches() argument 220 struct pmu_hw_events __percpu *hw_events = pmu->hw_events; in pmu_irq_matches() [all …]
|
D | arm_pmu.c | 181 if (type == event->pmu->type) in armpmu_map_event() 198 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_event_set_period() 240 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_event_update() 270 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_stop() 286 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_start() 311 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_del() 327 struct arm_pmu *armpmu = to_arm_pmu(event->pmu); in armpmu_add() 360 validate_event(struct pmu *pmu, struct pmu_hw_events *hw_events, in validate_event() argument 373 if (event->pmu != pmu) in validate_event() 382 armpmu = to_arm_pmu(event->pmu); in validate_event() [all …]
|
D | arm_dsu_pmu.c | 114 struct pmu pmu; member 128 static inline struct dsu_pmu *to_dsu_pmu(struct pmu *pmu) in to_dsu_pmu() argument 130 return container_of(pmu, struct dsu_pmu, pmu); in to_dsu_pmu() 155 struct pmu *pmu = dev_get_drvdata(dev); in dsu_pmu_cpumask_show() local 156 struct dsu_pmu *dsu_pmu = to_dsu_pmu(pmu); in dsu_pmu_cpumask_show() 201 struct pmu *pmu = dev_get_drvdata(kobj_to_dev(kobj)); in dsu_pmu_event_attr_is_visible() local 202 struct dsu_pmu *dsu_pmu = to_dsu_pmu(pmu); in dsu_pmu_event_attr_is_visible() 252 struct dsu_pmu *dsu_pmu = to_dsu_pmu(event->pmu); in dsu_pmu_read_counter() 260 dev_err(event->pmu->dev, in dsu_pmu_read_counter() 278 struct dsu_pmu *dsu_pmu = to_dsu_pmu(event->pmu); in dsu_pmu_write_counter() [all …]
|
D | arm_smmuv3_pmu.c | 110 struct pmu pmu; member 121 #define to_smmu_pmu(p) (container_of(p, struct smmu_pmu, pmu)) 135 static inline void smmu_pmu_enable(struct pmu *pmu) in smmu_pmu_enable() argument 137 struct smmu_pmu *smmu_pmu = to_smmu_pmu(pmu); in smmu_pmu_enable() 147 static inline void smmu_pmu_enable_quirk_hip08_09(struct pmu *pmu) in smmu_pmu_enable_quirk_hip08_09() argument 149 struct smmu_pmu *smmu_pmu = to_smmu_pmu(pmu); in smmu_pmu_enable_quirk_hip08_09() 155 smmu_pmu_enable(pmu); in smmu_pmu_enable_quirk_hip08_09() 158 static inline void smmu_pmu_disable(struct pmu *pmu) in smmu_pmu_disable() argument 160 struct smmu_pmu *smmu_pmu = to_smmu_pmu(pmu); in smmu_pmu_disable() 166 static inline void smmu_pmu_disable_quirk_hip08_09(struct pmu *pmu) in smmu_pmu_disable_quirk_hip08_09() argument [all …]
|
D | qcom_l3_pmu.c | 155 struct pmu pmu; member 163 #define to_l3cache_pmu(p) (container_of(p, struct l3cache_pmu, pmu)) 195 struct l3cache_pmu *l3pmu = to_l3cache_pmu(event->pmu); in qcom_l3_cache__64bit_counter_start() 227 struct l3cache_pmu *l3pmu = to_l3cache_pmu(event->pmu); in qcom_l3_cache__64bit_counter_stop() 241 struct l3cache_pmu *l3pmu = to_l3cache_pmu(event->pmu); in qcom_l3_cache__64bit_counter_update() 276 struct l3cache_pmu *l3pmu = to_l3cache_pmu(event->pmu); in qcom_l3_cache__32bit_counter_start() 302 struct l3cache_pmu *l3pmu = to_l3cache_pmu(event->pmu); in qcom_l3_cache__32bit_counter_stop() 318 struct l3cache_pmu *l3pmu = to_l3cache_pmu(event->pmu); in qcom_l3_cache__32bit_counter_update() 427 static void qcom_l3_cache__pmu_enable(struct pmu *pmu) in qcom_l3_cache__pmu_enable() argument 429 struct l3cache_pmu *l3pmu = to_l3cache_pmu(pmu); in qcom_l3_cache__pmu_enable() [all …]
|
D | thunderx2_pmu.c | 93 struct pmu pmu; member 119 static inline struct tx2_uncore_pmu *pmu_to_tx2_pmu(struct pmu *pmu) in pmu_to_tx2_pmu() argument 121 return container_of(pmu, struct tx2_uncore_pmu, pmu); in pmu_to_tx2_pmu() 328 tx2_pmu = pmu_to_tx2_pmu(event->pmu); in init_cntr_base_l3c() 344 tx2_pmu = pmu_to_tx2_pmu(event->pmu); in init_cntr_base_dmc() 373 tx2_pmu = pmu_to_tx2_pmu(event->pmu); in uncore_start_event_l3c() 395 tx2_pmu = pmu_to_tx2_pmu(event->pmu); in uncore_start_event_dmc() 420 tx2_pmu = pmu_to_tx2_pmu(event->pmu); in uncore_stop_event_dmc() 436 tx2_pmu = pmu_to_tx2_pmu(event->pmu); in uncore_start_event_ccpi2() 472 tx2_pmu = pmu_to_tx2_pmu(event->pmu); in tx2_uncore_event_update() [all …]
|
/drivers/soc/dove/ |
D | pmu.c | 50 struct pmu_data *pmu = rcdev_to_pmu(rc); in pmu_reset_reset() local 54 spin_lock_irqsave(&pmu->lock, flags); in pmu_reset_reset() 55 val = readl_relaxed(pmu->pmc_base + PMC_SW_RST); in pmu_reset_reset() 56 writel_relaxed(val & ~BIT(id), pmu->pmc_base + PMC_SW_RST); in pmu_reset_reset() 57 writel_relaxed(val | BIT(id), pmu->pmc_base + PMC_SW_RST); in pmu_reset_reset() 58 spin_unlock_irqrestore(&pmu->lock, flags); in pmu_reset_reset() 65 struct pmu_data *pmu = rcdev_to_pmu(rc); in pmu_reset_assert() local 69 spin_lock_irqsave(&pmu->lock, flags); in pmu_reset_assert() 70 val &= readl_relaxed(pmu->pmc_base + PMC_SW_RST); in pmu_reset_assert() 71 writel_relaxed(val, pmu->pmc_base + PMC_SW_RST); in pmu_reset_assert() [all …]
|
/drivers/soc/rockchip/ |
D | pm_domains.c | 71 struct rockchip_pmu *pmu; member 144 struct rockchip_pmu *pmu = pd->pmu; in rockchip_pmu_domain_is_idle() local 148 regmap_read(pmu->regmap, pmu->info->idle_offset, &val); in rockchip_pmu_domain_is_idle() 152 static unsigned int rockchip_pmu_read_ack(struct rockchip_pmu *pmu) in rockchip_pmu_read_ack() argument 156 regmap_read(pmu->regmap, pmu->info->ack_offset, &val); in rockchip_pmu_read_ack() 165 struct rockchip_pmu *pmu = pd->pmu; in rockchip_pmu_set_idle_request() local 174 regmap_write(pmu->regmap, pmu->info->req_offset, in rockchip_pmu_set_idle_request() 178 regmap_update_bits(pmu->regmap, pmu->info->req_offset, in rockchip_pmu_set_idle_request() 185 ret = readx_poll_timeout_atomic(rockchip_pmu_read_ack, pmu, val, in rockchip_pmu_set_idle_request() 189 dev_err(pmu->dev, in rockchip_pmu_set_idle_request() [all …]
|
/drivers/regulator/ |
D | bcm590xx-regulator.c | 278 struct bcm590xx_reg *pmu; in bcm590xx_probe() local 284 pmu = devm_kzalloc(&pdev->dev, sizeof(*pmu), GFP_KERNEL); in bcm590xx_probe() 285 if (!pmu) in bcm590xx_probe() 288 pmu->mfd = bcm590xx; in bcm590xx_probe() 290 platform_set_drvdata(pdev, pmu); in bcm590xx_probe() 292 pmu->desc = devm_kcalloc(&pdev->dev, in bcm590xx_probe() 296 if (!pmu->desc) in bcm590xx_probe() 303 pmu->desc[i].name = info->name; in bcm590xx_probe() 304 pmu->desc[i].of_match = of_match_ptr(info->name); in bcm590xx_probe() 305 pmu->desc[i].regulators_node = of_match_ptr("regulators"); in bcm590xx_probe() [all …]
|
/drivers/gpu/drm/nouveau/nvkm/subdev/devinit/ |
D | gm200.c | 32 pmu_code(struct nv50_devinit *init, u32 pmu, u32 img, u32 len, bool sec) in pmu_code() argument 38 nvkm_wr32(device, 0x10a180, 0x01000000 | (sec ? 0x10000000 : 0) | pmu); in pmu_code() 41 nvkm_wr32(device, 0x10a188, (pmu + i) >> 8); in pmu_code() 52 pmu_data(struct nv50_devinit *init, u32 pmu, u32 img, u32 len) in pmu_data() argument 58 nvkm_wr32(device, 0x10a1c0, 0x01000000 | pmu); in pmu_data() 87 struct nvbios_pmuR pmu; in pmu_load() local 89 if (!nvbios_pmuRm(bios, type, &pmu)) in pmu_load() 95 pmu_code(init, pmu.boot_addr_pmu, pmu.boot_addr, pmu.boot_size, false); in pmu_load() 96 pmu_code(init, pmu.code_addr_pmu, pmu.code_addr, pmu.code_size, true); in pmu_load() 97 pmu_data(init, pmu.data_addr_pmu, pmu.data_addr, pmu.data_size); in pmu_load() [all …]
|
/drivers/dma/idxd/ |
D | perfmon.h | 22 struct pmu *pmu; in event_to_pmu() local 24 pmu = event->pmu; in event_to_pmu() 25 idxd_pmu = container_of(pmu, struct idxd_pmu, pmu); in event_to_pmu() 33 struct pmu *pmu; in event_to_idxd() local 35 pmu = event->pmu; in event_to_idxd() 36 idxd_pmu = container_of(pmu, struct idxd_pmu, pmu); in event_to_idxd() 41 static inline struct idxd_device *pmu_to_idxd(struct pmu *pmu) in pmu_to_idxd() argument 45 idxd_pmu = container_of(pmu, struct idxd_pmu, pmu); in pmu_to_idxd()
|
D | perfmon.c | 82 return &idxd_pmu->pmu == event->pmu; in is_idxd_event() 154 static int perfmon_validate_group(struct idxd_pmu *pmu, in perfmon_validate_group() argument 165 fake_pmu->pmu.name = pmu->pmu.name; in perfmon_validate_group() 166 fake_pmu->n_counters = pmu->n_counters; in perfmon_validate_group() 206 if (event->attr.type != event->pmu->type) in perfmon_pmu_event_init() 216 if (event->pmu != &idxd->idxd_pmu->pmu) in perfmon_pmu_event_init() 448 static void perfmon_pmu_enable(struct pmu *pmu) in perfmon_pmu_enable() argument 450 struct idxd_device *idxd = pmu_to_idxd(pmu); in perfmon_pmu_enable() 455 static void perfmon_pmu_disable(struct pmu *pmu) in perfmon_pmu_disable() argument 457 struct idxd_device *idxd = pmu_to_idxd(pmu); in perfmon_pmu_disable() [all …]
|
/drivers/perf/hisilicon/ |
D | hisi_uncore_pmu.c | 69 struct hisi_pmu *hisi_pmu = to_hisi_pmu(event->pmu); in hisi_validate_event_group() 78 if (leader->pmu != event->pmu) in hisi_validate_event_group() 89 if (sibling->pmu != event->pmu) in hisi_validate_event_group() 101 struct hisi_pmu *hisi_pmu = to_hisi_pmu(event->pmu); in hisi_uncore_pmu_get_event_idx() 190 if (event->attr.type != event->pmu->type) in hisi_uncore_pmu_event_init() 215 hisi_pmu = to_hisi_pmu(event->pmu); in hisi_uncore_pmu_event_init() 242 struct hisi_pmu *hisi_pmu = to_hisi_pmu(event->pmu); in hisi_uncore_pmu_enable_event() 260 struct hisi_pmu *hisi_pmu = to_hisi_pmu(event->pmu); in hisi_uncore_pmu_disable_event() 272 struct hisi_pmu *hisi_pmu = to_hisi_pmu(event->pmu); in hisi_uncore_pmu_set_event_period() 292 struct hisi_pmu *hisi_pmu = to_hisi_pmu(event->pmu); in hisi_uncore_pmu_event_update() [all …]
|
/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_pmu.c | 50 struct pmu pmu; member 214 if (event->attr.type != event->pmu->type) in amdgpu_perf_event_init() 228 struct amdgpu_pmu_entry *pe = container_of(event->pmu, in amdgpu_perf_start() 230 pmu); in amdgpu_perf_start() 266 struct amdgpu_pmu_entry *pe = container_of(event->pmu, in amdgpu_perf_read() 268 pmu); in amdgpu_perf_read() 293 struct amdgpu_pmu_entry *pe = container_of(event->pmu, in amdgpu_perf_stop() 295 pmu); in amdgpu_perf_stop() 325 struct amdgpu_pmu_entry *pe = container_of(event->pmu, in amdgpu_perf_add() 327 pmu); in amdgpu_perf_add() [all …]
|
/drivers/fpga/ |
D | dfl-fme-perf.c | 150 struct pmu pmu; member 176 #define to_fme_perf_priv(_pmu) container_of(_pmu, struct fme_perf_priv, pmu) 181 struct pmu *pmu = dev_get_drvdata(dev); in cpumask_show() local 184 priv = to_fme_perf_priv(pmu); in cpumask_show() 609 struct pmu *pmu = dev_get_drvdata(kobj_to_dev(kobj)); in fme_perf_events_visible() local 610 struct fme_perf_priv *priv = to_fme_perf_priv(pmu); in fme_perf_events_visible() 675 struct pmu *pmu = dev_get_drvdata(kobj_to_dev(kobj)); in fme_perf_fabric_events_visible() local 676 struct fme_perf_priv *priv = to_fme_perf_priv(pmu); in fme_perf_fabric_events_visible() 789 struct fme_perf_priv *priv = to_fme_perf_priv(event->pmu); in fme_perf_event_destroy() 797 struct fme_perf_priv *priv = to_fme_perf_priv(event->pmu); in fme_perf_event_init() [all …]
|