• Home
  • Raw
  • Download

Lines Matching refs:i915

84 	struct drm_i915_private *i915 = container_of(pmu, typeof(*i915), pmu);  in pmu_needs_timer()  local
112 else if (i915->caps.scheduler & I915_SCHEDULER_CAP_ENGINE_BUSY_STATS) in pmu_needs_timer()
123 struct drm_i915_private *i915 = gt->i915; in __get_rc6() local
127 IS_VALLEYVIEW(i915) ? in __get_rc6()
131 if (HAS_RC6p(i915)) in __get_rc6()
134 if (HAS_RC6pp(i915)) in __get_rc6()
149 struct drm_i915_private *i915 = gt->i915; in get_rc6() local
150 struct i915_pmu *pmu = &i915->pmu; in get_rc6()
189 struct drm_i915_private *i915 = container_of(pmu, typeof(*i915), pmu); in init_rc6() local
192 with_intel_runtime_pm(i915->gt.uncore->rpm, wakeref) { in init_rc6()
193 pmu->sample[__I915_SAMPLE_RC6].cur = __get_rc6(&i915->gt); in init_rc6()
200 static void park_rc6(struct drm_i915_private *i915) in park_rc6() argument
202 struct i915_pmu *pmu = &i915->pmu; in park_rc6()
204 pmu->sample[__I915_SAMPLE_RC6].cur = __get_rc6(&i915->gt); in park_rc6()
216 static void park_rc6(struct drm_i915_private *i915) {} in park_rc6() argument
231 void i915_pmu_gt_parked(struct drm_i915_private *i915) in i915_pmu_gt_parked() argument
233 struct i915_pmu *pmu = &i915->pmu; in i915_pmu_gt_parked()
240 park_rc6(i915); in i915_pmu_gt_parked()
251 void i915_pmu_gt_unparked(struct drm_i915_private *i915) in i915_pmu_gt_unparked() argument
253 struct i915_pmu *pmu = &i915->pmu; in i915_pmu_gt_unparked()
274 static bool exclusive_mmio_access(const struct drm_i915_private *i915) in exclusive_mmio_access() argument
281 return IS_GEN(i915, 7); in exclusive_mmio_access()
322 struct drm_i915_private *i915 = gt->i915; in engines_sample() local
327 if ((i915->pmu.enable & ENGINE_SAMPLE_MASK) == 0) in engines_sample()
337 if (exclusive_mmio_access(i915)) { in engines_sample()
365 struct drm_i915_private *i915 = gt->i915; in frequency_sample() local
367 struct i915_pmu *pmu = &i915->pmu; in frequency_sample()
410 struct drm_i915_private *i915 = in i915_sample() local
412 struct i915_pmu *pmu = &i915->pmu; in i915_sample()
413 struct intel_gt *gt = &i915->gt; in i915_sample()
438 static u64 count_interrupts(struct drm_i915_private *i915) in count_interrupts() argument
441 struct irq_desc *desc = irq_to_desc(i915->drm.pdev->irq); in count_interrupts()
456 struct drm_i915_private *i915 = in i915_pmu_event_destroy() local
457 container_of(event->pmu, typeof(*i915), pmu.base); in i915_pmu_event_destroy()
459 drm_WARN_ON(&i915->drm, event->parent); in i915_pmu_event_destroy()
471 if (INTEL_GEN(engine->i915) < 6) in engine_event_status()
482 config_status(struct drm_i915_private *i915, u64 config) in config_status() argument
486 if (IS_VALLEYVIEW(i915) || IS_CHERRYVIEW(i915)) in config_status()
491 if (INTEL_GEN(i915) < 6) in config_status()
497 if (!HAS_RC6(i915)) in config_status()
509 struct drm_i915_private *i915 = in engine_event_init() local
510 container_of(event->pmu, typeof(*i915), pmu.base); in engine_event_init()
513 engine = intel_engine_lookup_user(i915, engine_event_class(event), in engine_event_init()
523 struct drm_i915_private *i915 = in i915_pmu_event_init() local
524 container_of(event->pmu, typeof(*i915), pmu.base); in i915_pmu_event_init()
547 ret = config_status(i915, event->attr.config); in i915_pmu_event_init()
559 struct drm_i915_private *i915 = in __i915_pmu_event_read() local
560 container_of(event->pmu, typeof(*i915), pmu.base); in __i915_pmu_event_read()
561 struct i915_pmu *pmu = &i915->pmu; in __i915_pmu_event_read()
568 engine = intel_engine_lookup_user(i915, in __i915_pmu_event_read()
572 if (drm_WARN_ON_ONCE(&i915->drm, !engine)) { in __i915_pmu_event_read()
596 val = count_interrupts(i915); in __i915_pmu_event_read()
599 val = get_rc6(&i915->gt); in __i915_pmu_event_read()
624 struct drm_i915_private *i915 = in i915_pmu_enable() local
625 container_of(event->pmu, typeof(*i915), pmu.base); in i915_pmu_enable()
627 struct i915_pmu *pmu = &i915->pmu; in i915_pmu_enable()
656 engine = intel_engine_lookup_user(i915, in i915_pmu_enable()
684 struct drm_i915_private *i915 = in i915_pmu_disable() local
685 container_of(event->pmu, typeof(*i915), pmu.base); in i915_pmu_disable()
687 struct i915_pmu *pmu = &i915->pmu; in i915_pmu_disable()
696 engine = intel_engine_lookup_user(i915, in i915_pmu_disable()
862 struct drm_i915_private *i915 = container_of(pmu, typeof(*i915), pmu); in create_event_attributes() local
890 if (!config_status(i915, events[i].config)) in create_event_attributes()
894 for_each_uabi_engine(engine, i915) { in create_event_attributes()
924 if (config_status(i915, events[i].config)) in create_event_attributes()
945 for_each_uabi_engine(engine, i915) { in create_event_attributes()
1065 struct drm_i915_private *i915 = container_of(pmu, typeof(*i915), pmu); in i915_pmu_unregister_cpuhp_state() local
1067 drm_WARN_ON(&i915->drm, pmu->cpuhp.slot == CPUHP_INVALID); in i915_pmu_unregister_cpuhp_state()
1068 drm_WARN_ON(&i915->drm, cpuhp_state_remove_instance(pmu->cpuhp.slot, &pmu->cpuhp.node)); in i915_pmu_unregister_cpuhp_state()
1073 static bool is_igp(struct drm_i915_private *i915) in is_igp() argument
1075 struct pci_dev *pdev = i915->drm.pdev; in is_igp()
1084 void i915_pmu_register(struct drm_i915_private *i915) in i915_pmu_register() argument
1086 struct i915_pmu *pmu = &i915->pmu; in i915_pmu_register()
1096 if (INTEL_GEN(i915) <= 2) { in i915_pmu_register()
1097 drm_info(&i915->drm, "PMU not supported for this GPU."); in i915_pmu_register()
1107 if (!is_igp(i915)) { in i915_pmu_register()
1110 dev_name(i915->drm.dev)); in i915_pmu_register()
1159 if (!is_igp(i915)) in i915_pmu_register()
1162 drm_notice(&i915->drm, "Failed to register PMU!\n"); in i915_pmu_register()
1165 void i915_pmu_unregister(struct drm_i915_private *i915) in i915_pmu_unregister() argument
1167 struct i915_pmu *pmu = &i915->pmu; in i915_pmu_unregister()
1172 drm_WARN_ON(&i915->drm, pmu->enable); in i915_pmu_unregister()
1181 if (!is_igp(i915)) in i915_pmu_unregister()