/kernel/linux/linux-5.10/drivers/gpu/drm/msm/disp/dpu1/ |
D | dpu_core_irq.c | 22 struct dpu_kms *dpu_kms = arg; in dpu_core_irq_callback_handler() local 23 struct dpu_irq *irq_obj = &dpu_kms->irq_obj; in dpu_core_irq_callback_handler() 31 atomic_read(&dpu_kms->irq_obj.enable_counts[irq_idx])); in dpu_core_irq_callback_handler() 39 spin_lock_irqsave(&dpu_kms->irq_obj.cb_lock, irq_flags); in dpu_core_irq_callback_handler() 43 spin_unlock_irqrestore(&dpu_kms->irq_obj.cb_lock, irq_flags); in dpu_core_irq_callback_handler() 50 dpu_kms->hw_intr->ops.clear_intr_status_nolock( in dpu_core_irq_callback_handler() 51 dpu_kms->hw_intr, in dpu_core_irq_callback_handler() 55 int dpu_core_irq_idx_lookup(struct dpu_kms *dpu_kms, in dpu_core_irq_idx_lookup() argument 58 if (!dpu_kms->hw_intr || !dpu_kms->hw_intr->ops.irq_idx_lookup) in dpu_core_irq_idx_lookup() 61 return dpu_kms->hw_intr->ops.irq_idx_lookup(intr_type, in dpu_core_irq_idx_lookup() [all …]
|
D | dpu_kms.c | 49 static void _dpu_kms_mmu_destroy(struct dpu_kms *dpu_kms); 55 struct dpu_kms *kms = (struct dpu_kms *)s->private; in _dpu_danger_signal_status() 102 static void dpu_debugfs_danger_init(struct dpu_kms *dpu_kms, in dpu_debugfs_danger_init() argument 108 dpu_kms, &dpu_debugfs_danger_stats_fops); in dpu_debugfs_danger_init() 110 dpu_kms, &dpu_debugfs_safe_stats_fops); in dpu_debugfs_danger_init() 116 struct dpu_kms *dpu_kms = regset->dpu_kms; in _dpu_debugfs_show_regset32() local 120 if (!dpu_kms->mmio) in _dpu_debugfs_show_regset32() 123 base = dpu_kms->mmio + regset->offset; in _dpu_debugfs_show_regset32() 132 pm_runtime_get_sync(&dpu_kms->pdev->dev); in _dpu_debugfs_show_regset32() 142 pm_runtime_put_sync(&dpu_kms->pdev->dev); in _dpu_debugfs_show_regset32() [all …]
|
D | dpu_core_irq.h | 16 void dpu_core_irq_preinstall(struct dpu_kms *dpu_kms); 23 void dpu_core_irq_uninstall(struct dpu_kms *dpu_kms); 30 irqreturn_t dpu_core_irq(struct dpu_kms *dpu_kms); 41 struct dpu_kms *dpu_kms, 56 struct dpu_kms *dpu_kms, 71 struct dpu_kms *dpu_kms, 83 struct dpu_kms *dpu_kms, 101 struct dpu_kms *dpu_kms, 119 struct dpu_kms *dpu_kms, 128 void dpu_debugfs_core_irq_init(struct dpu_kms *dpu_kms,
|
D | dpu_vbif.h | 50 void dpu_vbif_set_ot_limit(struct dpu_kms *dpu_kms, 58 void dpu_vbif_set_qos_remap(struct dpu_kms *dpu_kms, 65 void dpu_vbif_clear_errors(struct dpu_kms *dpu_kms); 71 void dpu_vbif_init_memtypes(struct dpu_kms *dpu_kms); 73 void dpu_debugfs_vbif_init(struct dpu_kms *dpu_kms, struct dentry *debugfs_root);
|
D | dpu_vbif.c | 14 static struct dpu_hw_vbif *dpu_get_vbif(struct dpu_kms *dpu_kms, enum dpu_vbif vbif_idx) in dpu_get_vbif() argument 16 if (vbif_idx < ARRAY_SIZE(dpu_kms->hw_vbif)) in dpu_get_vbif() 17 return dpu_kms->hw_vbif[vbif_idx]; in dpu_get_vbif() 156 void dpu_vbif_set_ot_limit(struct dpu_kms *dpu_kms, in dpu_vbif_set_ot_limit() argument 165 mdp = dpu_kms->hw_mdp; in dpu_vbif_set_ot_limit() 167 vbif = dpu_get_vbif(dpu_kms, params->vbif_idx); in dpu_vbif_set_ot_limit() 207 void dpu_vbif_set_qos_remap(struct dpu_kms *dpu_kms, in dpu_vbif_set_qos_remap() argument 216 if (!params || !dpu_kms->hw_mdp) { in dpu_vbif_set_qos_remap() 220 mdp = dpu_kms->hw_mdp; in dpu_vbif_set_qos_remap() 222 vbif = dpu_get_vbif(dpu_kms, params->vbif_idx); in dpu_vbif_set_qos_remap() [all …]
|
D | dpu_kms.h | 97 struct dpu_kms { struct 154 #define to_dpu_kms(x) container_of(x, struct dpu_kms, base) argument 172 *dpu_kms_get_existing_global_state(struct dpu_kms *dpu_kms); 195 struct dpu_kms *dpu_kms; member 208 uint32_t offset, uint32_t length, struct dpu_kms *dpu_kms); 243 void *dpu_debugfs_get_root(struct dpu_kms *dpu_kms); 267 u64 dpu_kms_get_clk_rate(struct dpu_kms *dpu_kms, char *clock_name);
|
D | dpu_encoder.c | 299 irq_status = dpu_core_irq_read(phys_enc->dpu_kms, in dpu_encoder_helper_wait_for_irq() 353 irq->irq_idx = dpu_core_irq_idx_lookup(phys_enc->dpu_kms, in dpu_encoder_helper_register_irq() 362 ret = dpu_core_irq_register_callback(phys_enc->dpu_kms, irq->irq_idx, in dpu_encoder_helper_register_irq() 372 ret = dpu_core_irq_enable(phys_enc->dpu_kms, &irq->irq_idx, 1); in dpu_encoder_helper_register_irq() 377 dpu_core_irq_unregister_callback(phys_enc->dpu_kms, in dpu_encoder_helper_register_irq() 405 ret = dpu_core_irq_disable(phys_enc->dpu_kms, &irq->irq_idx, 1); in dpu_encoder_helper_unregister_irq() 412 ret = dpu_core_irq_unregister_callback(phys_enc->dpu_kms, irq->irq_idx, in dpu_encoder_helper_unregister_irq() 534 struct dpu_kms *dpu_kms, in dpu_encoder_get_topology() argument 558 else if (!dpu_kms->catalog->caps->has_3d_merge) in dpu_encoder_get_topology() 564 if (dpu_kms->catalog->dspp && in dpu_encoder_get_topology() [all …]
|
D | dpu_core_perf.c | 39 static u64 _dpu_core_perf_calc_bw(struct dpu_kms *kms, in _dpu_core_perf_calc_bw() 71 static u64 _dpu_core_perf_calc_clk(struct dpu_kms *kms, in _dpu_core_perf_calc_clk() 101 static struct dpu_kms *_dpu_crtc_get_kms(struct drm_crtc *crtc) in _dpu_crtc_get_kms() 108 static void _dpu_core_perf_calc_crtc(struct dpu_kms *kms, in _dpu_core_perf_calc_crtc() 151 struct dpu_kms *kms; in dpu_core_perf_crtc_check() 211 static int _dpu_core_perf_crtc_update_bus(struct dpu_kms *kms, in _dpu_core_perf_crtc_update_bus() 262 struct dpu_kms *kms; in dpu_core_perf_crtc_release_bw() 289 static int _dpu_core_perf_set_core_clk_rate(struct dpu_kms *kms, u64 rate) in _dpu_core_perf_set_core_clk_rate() 300 static u64 _dpu_core_perf_get_core_clk_rate(struct dpu_kms *kms) in _dpu_core_perf_get_core_clk_rate() 332 struct dpu_kms *kms; in dpu_core_perf_crtc_update() [all …]
|
D | dpu_core_perf.h | 125 struct dpu_kms; 132 int dpu_core_perf_debugfs_init(struct dpu_kms *dpu_kms, struct dentry *parent);
|
D | dpu_encoder_phys.h | 224 struct dpu_kms *dpu_kms; member 277 struct dpu_kms *dpu_kms; member
|
D | dpu_plane.c | 127 static struct dpu_kms *_dpu_plane_get_kms(struct drm_plane *plane) in _dpu_plane_get_kms() 148 struct dpu_kms *dpu_kms = _dpu_plane_get_kms(plane); in _dpu_plane_calc_bw() local 167 hw_latency_lines = dpu_kms->catalog->perf.min_prefill_lines; in _dpu_plane_calc_bw() 459 struct dpu_kms *dpu_kms = _dpu_plane_get_kms(plane); in _dpu_plane_set_ot_limit() local 472 dpu_vbif_set_ot_limit(dpu_kms, &ot_params); in _dpu_plane_set_ot_limit() 483 struct dpu_kms *dpu_kms = _dpu_plane_get_kms(plane); in _dpu_plane_set_qos_remap() local 498 dpu_vbif_set_qos_remap(dpu_kms, &qos_params); in _dpu_plane_set_qos_remap() 507 struct dpu_kms *kms = _dpu_plane_get_kms(&pdpu->base); in _dpu_plane_set_scanout() 862 struct dpu_kms *kms = _dpu_plane_get_kms(&pdpu->base); in dpu_plane_prepare_fb() 1335 struct dpu_kms *dpu_kms = _dpu_plane_get_kms(plane); in dpu_plane_danger_signal_ctrl() local [all …]
|
D | dpu_encoder_phys_cmd.c | 334 struct dpu_kms *dpu_kms; in dpu_encoder_phys_cmd_tearcheck_config() local 350 dpu_kms = phys_enc->dpu_kms; in dpu_encoder_phys_cmd_tearcheck_config() 361 vsync_hz = dpu_kms_get_clk_rate(dpu_kms, "vsync"); in dpu_encoder_phys_cmd_tearcheck_config() 724 phys_enc->hw_mdptop = p->dpu_kms->hw_mdp; in dpu_encoder_phys_cmd_init() 730 phys_enc->dpu_kms = p->dpu_kms; in dpu_encoder_phys_cmd_init()
|
D | dpu_crtc.h | 269 int dpu_crtc_register_custom_event(struct dpu_kms *kms,
|
D | dpu_encoder_phys_vid.c | 698 phys_enc->hw_mdptop = p->dpu_kms->hw_mdp; in dpu_encoder_phys_vid_init() 706 phys_enc->dpu_kms = p->dpu_kms; in dpu_encoder_phys_vid_init()
|
D | dpu_crtc.c | 48 static struct dpu_kms *_dpu_crtc_get_kms(struct drm_crtc *crtc) in _dpu_crtc_get_kms() 640 struct dpu_kms *dpu_kms = _dpu_crtc_get_kms(crtc); in dpu_crtc_commit_kickoff() local 669 dpu_vbif_clear_errors(dpu_kms); in dpu_crtc_commit_kickoff()
|
/kernel/linux/linux-5.10/drivers/gpu/drm/msm/ |
D | Makefile | 74 disp/dpu1/dpu_kms.o \
|