Home
last modified time | relevance | path

Searched refs:dpu_kms (Results 1 – 16 of 16) sorted by relevance

/drivers/gpu/drm/msm/disp/dpu1/
Ddpu_core_irq.c22 struct dpu_kms *dpu_kms = arg; in dpu_core_irq_callback_handler() local
23 struct dpu_irq *irq_obj = &dpu_kms->irq_obj; in dpu_core_irq_callback_handler()
41 u32 dpu_core_irq_read(struct dpu_kms *dpu_kms, int irq_idx, bool clear) in dpu_core_irq_read() argument
43 if (!dpu_kms->hw_intr || in dpu_core_irq_read()
44 !dpu_kms->hw_intr->ops.get_interrupt_status) in dpu_core_irq_read()
53 return dpu_kms->hw_intr->ops.get_interrupt_status(dpu_kms->hw_intr, in dpu_core_irq_read()
57 int dpu_core_irq_register_callback(struct dpu_kms *dpu_kms, int irq_idx, in dpu_core_irq_register_callback() argument
62 if (!dpu_kms->irq_obj.irq_cb_tbl) { in dpu_core_irq_register_callback()
75 if (irq_idx < 0 || irq_idx >= dpu_kms->hw_intr->total_irqs) { in dpu_core_irq_register_callback()
82 irq_flags = dpu_kms->hw_intr->ops.lock(dpu_kms->hw_intr); in dpu_core_irq_register_callback()
[all …]
Ddpu_kms.c51 static void _dpu_kms_mmu_destroy(struct dpu_kms *dpu_kms);
57 struct dpu_kms *kms = (struct dpu_kms *)s->private; in _dpu_danger_signal_status()
104 static void dpu_debugfs_danger_init(struct dpu_kms *dpu_kms, in dpu_debugfs_danger_init() argument
110 dpu_kms, &dpu_debugfs_danger_stats_fops); in dpu_debugfs_danger_init()
112 dpu_kms, &dpu_debugfs_safe_stats_fops); in dpu_debugfs_danger_init()
118 struct dpu_kms *dpu_kms = regset->dpu_kms; in _dpu_debugfs_show_regset32() local
122 if (!dpu_kms->mmio) in _dpu_debugfs_show_regset32()
125 base = dpu_kms->mmio + regset->offset; in _dpu_debugfs_show_regset32()
134 pm_runtime_get_sync(&dpu_kms->pdev->dev); in _dpu_debugfs_show_regset32()
144 pm_runtime_put_sync(&dpu_kms->pdev->dev); in _dpu_debugfs_show_regset32()
[all …]
Ddpu_core_irq.h16 void dpu_core_irq_preinstall(struct dpu_kms *dpu_kms);
23 void dpu_core_irq_uninstall(struct dpu_kms *dpu_kms);
30 irqreturn_t dpu_core_irq(struct dpu_kms *dpu_kms);
40 struct dpu_kms *dpu_kms,
58 struct dpu_kms *dpu_kms,
76 struct dpu_kms *dpu_kms,
85 void dpu_debugfs_core_irq_init(struct dpu_kms *dpu_kms,
Ddpu_vbif.h50 void dpu_vbif_set_ot_limit(struct dpu_kms *dpu_kms,
58 void dpu_vbif_set_qos_remap(struct dpu_kms *dpu_kms,
65 void dpu_vbif_clear_errors(struct dpu_kms *dpu_kms);
71 void dpu_vbif_init_memtypes(struct dpu_kms *dpu_kms);
73 void dpu_debugfs_vbif_init(struct dpu_kms *dpu_kms, struct dentry *debugfs_root);
Ddpu_vbif.c14 static struct dpu_hw_vbif *dpu_get_vbif(struct dpu_kms *dpu_kms, enum dpu_vbif vbif_idx) in dpu_get_vbif() argument
16 if (vbif_idx < ARRAY_SIZE(dpu_kms->hw_vbif)) in dpu_get_vbif()
17 return dpu_kms->hw_vbif[vbif_idx]; in dpu_get_vbif()
156 void dpu_vbif_set_ot_limit(struct dpu_kms *dpu_kms, in dpu_vbif_set_ot_limit() argument
165 mdp = dpu_kms->hw_mdp; in dpu_vbif_set_ot_limit()
167 vbif = dpu_get_vbif(dpu_kms, params->vbif_idx); in dpu_vbif_set_ot_limit()
207 void dpu_vbif_set_qos_remap(struct dpu_kms *dpu_kms, in dpu_vbif_set_qos_remap() argument
216 if (!params || !dpu_kms->hw_mdp) { in dpu_vbif_set_qos_remap()
220 mdp = dpu_kms->hw_mdp; in dpu_vbif_set_qos_remap()
222 vbif = dpu_get_vbif(dpu_kms, params->vbif_idx); in dpu_vbif_set_qos_remap()
[all …]
Ddpu_kms.h94 struct dpu_kms { struct
148 #define to_dpu_kms(x) container_of(x, struct dpu_kms, base) argument
166 *dpu_kms_get_existing_global_state(struct dpu_kms *dpu_kms);
189 struct dpu_kms *dpu_kms; member
202 uint32_t offset, uint32_t length, struct dpu_kms *dpu_kms);
237 void *dpu_debugfs_get_root(struct dpu_kms *dpu_kms);
261 u64 dpu_kms_get_clk_rate(struct dpu_kms *dpu_kms, char *clock_name);
Ddpu_core_perf.c40 static u64 _dpu_core_perf_calc_bw(struct dpu_kms *kms, in _dpu_core_perf_calc_bw()
72 static u64 _dpu_core_perf_calc_clk(struct dpu_kms *kms, in _dpu_core_perf_calc_clk()
102 static struct dpu_kms *_dpu_crtc_get_kms(struct drm_crtc *crtc) in _dpu_crtc_get_kms()
109 static void _dpu_core_perf_calc_crtc(struct dpu_kms *kms, in _dpu_core_perf_calc_crtc()
149 struct dpu_kms *kms; in dpu_core_perf_crtc_check()
209 static int _dpu_core_perf_crtc_update_bus(struct dpu_kms *kms, in _dpu_core_perf_crtc_update_bus()
260 struct dpu_kms *kms; in dpu_core_perf_crtc_release_bw()
287 static int _dpu_core_perf_set_core_clk_rate(struct dpu_kms *kms, u64 rate) in _dpu_core_perf_set_core_clk_rate()
298 static u64 _dpu_core_perf_get_core_clk_rate(struct dpu_kms *kms) in _dpu_core_perf_get_core_clk_rate()
330 struct dpu_kms *kms; in dpu_core_perf_crtc_update()
[all …]
Ddpu_encoder.c307 irq_status = dpu_core_irq_read(phys_enc->dpu_kms, in dpu_encoder_helper_wait_for_irq()
358 ret = dpu_core_irq_register_callback(phys_enc->dpu_kms, irq->irq_idx, in dpu_encoder_helper_register_irq()
390 ret = dpu_core_irq_unregister_callback(phys_enc->dpu_kms, irq->irq_idx, in dpu_encoder_helper_unregister_irq()
540 struct dpu_kms *dpu_kms, in dpu_encoder_get_topology() argument
564 else if (!dpu_kms->catalog->caps->has_3d_merge) in dpu_encoder_get_topology()
570 if (dpu_kms->catalog->dspp && in dpu_encoder_get_topology()
571 (dpu_kms->catalog->dspp_count >= topology.num_lm)) in dpu_encoder_get_topology()
587 struct dpu_kms *dpu_kms; in dpu_encoder_virt_atomic_check() local
605 dpu_kms = to_dpu_kms(priv->kms); in dpu_encoder_virt_atomic_check()
632 topology = dpu_encoder_get_topology(dpu_enc, dpu_kms, adj_mode); in dpu_encoder_virt_atomic_check()
[all …]
Ddpu_core_perf.h112 struct dpu_kms;
119 int dpu_core_perf_debugfs_init(struct dpu_kms *dpu_kms, struct dentry *parent);
Ddpu_encoder_phys.h221 struct dpu_kms *dpu_kms; member
274 struct dpu_kms *dpu_kms; member
Ddpu_plane.c137 static struct dpu_kms *_dpu_plane_get_kms(struct drm_plane *plane) in _dpu_plane_get_kms()
159 struct dpu_kms *dpu_kms = _dpu_plane_get_kms(plane); in _dpu_plane_calc_bw() local
180 hw_latency_lines = dpu_kms->catalog->perf.min_prefill_lines; in _dpu_plane_calc_bw()
474 struct dpu_kms *dpu_kms = _dpu_plane_get_kms(plane); in _dpu_plane_set_ot_limit() local
487 dpu_vbif_set_ot_limit(dpu_kms, &ot_params); in _dpu_plane_set_ot_limit()
498 struct dpu_kms *dpu_kms = _dpu_plane_get_kms(plane); in _dpu_plane_set_qos_remap() local
513 dpu_vbif_set_qos_remap(dpu_kms, &qos_params); in _dpu_plane_set_qos_remap()
522 struct dpu_kms *kms = _dpu_plane_get_kms(&pdpu->base); in _dpu_plane_set_scanout()
877 struct dpu_kms *kms = _dpu_plane_get_kms(&pdpu->base); in dpu_plane_prepare_fb()
1347 struct dpu_kms *dpu_kms = _dpu_plane_get_kms(plane); in dpu_plane_danger_signal_ctrl() local
[all …]
Ddpu_encoder_phys_cmd.c331 struct dpu_kms *dpu_kms; in dpu_encoder_phys_cmd_tearcheck_config() local
347 dpu_kms = phys_enc->dpu_kms; in dpu_encoder_phys_cmd_tearcheck_config()
358 vsync_hz = dpu_kms_get_clk_rate(dpu_kms, "vsync"); in dpu_encoder_phys_cmd_tearcheck_config()
771 phys_enc->hw_mdptop = p->dpu_kms->hw_mdp; in dpu_encoder_phys_cmd_init()
777 phys_enc->dpu_kms = p->dpu_kms; in dpu_encoder_phys_cmd_init()
Ddpu_crtc.c43 static struct dpu_kms *_dpu_crtc_get_kms(struct drm_crtc *crtc) in _dpu_crtc_get_kms()
728 struct dpu_kms *dpu_kms = _dpu_crtc_get_kms(crtc); in dpu_crtc_commit_kickoff() local
757 dpu_vbif_clear_errors(dpu_kms); in dpu_crtc_commit_kickoff()
1384 struct dpu_kms *dpu_kms = to_dpu_kms(priv->kms); in dpu_crtc_init() local
1416 if (dpu_kms->catalog->dspp_count) in dpu_crtc_init()
Ddpu_crtc.h269 int dpu_crtc_register_custom_event(struct dpu_kms *kms,
Ddpu_encoder_phys_vid.c714 phys_enc->hw_mdptop = p->dpu_kms->hw_mdp; in dpu_encoder_phys_vid_init()
722 phys_enc->dpu_kms = p->dpu_kms; in dpu_encoder_phys_vid_init()
/drivers/gpu/drm/msm/
DMakefile74 disp/dpu1/dpu_kms.o \