Home
last modified time | relevance | path

Searched refs:mdp5_kms (Results 1 – 15 of 15) sorted by relevance

/drivers/gpu/drm/msm/disp/mdp5/
Dmdp5_kms.c24 struct mdp5_kms *mdp5_kms = to_mdp5_kms(to_mdp_kms(kms)); in mdp5_hw_init() local
25 struct device *dev = &mdp5_kms->pdev->dev; in mdp5_hw_init()
54 spin_lock_irqsave(&mdp5_kms->resource_lock, flags); in mdp5_hw_init()
55 mdp5_write(mdp5_kms, REG_MDP5_DISP_INTF_SEL, 0); in mdp5_hw_init()
56 spin_unlock_irqrestore(&mdp5_kms->resource_lock, flags); in mdp5_hw_init()
58 mdp5_ctlm_hw_reset(mdp5_kms->ctlm); in mdp5_hw_init()
73 mdp5_get_existing_global_state(struct mdp5_kms *mdp5_kms) in mdp5_get_existing_global_state() argument
75 return to_mdp5_global_state(mdp5_kms->glob_state.state); in mdp5_get_existing_global_state()
85 struct mdp5_kms *mdp5_kms = to_mdp5_kms(to_mdp_kms(priv->kms)); in mdp5_get_global_state() local
89 ret = drm_modeset_lock(&mdp5_kms->glob_state_lock, s->acquire_ctx); in mdp5_get_global_state()
[all …]
Dmdp5_irq.c25 struct mdp5_kms *mdp5_kms = container_of(irq, struct mdp5_kms, error_handler); in mdp5_irq_error_handler() local
32 struct drm_printer p = drm_info_printer(mdp5_kms->dev->dev); in mdp5_irq_error_handler()
33 drm_state_dump(mdp5_kms->dev, &p); in mdp5_irq_error_handler()
34 if (mdp5_kms->smp) in mdp5_irq_error_handler()
35 mdp5_smp_dump(mdp5_kms->smp, &p); in mdp5_irq_error_handler()
41 struct mdp5_kms *mdp5_kms = to_mdp5_kms(to_mdp_kms(kms)); in mdp5_irq_preinstall() local
42 struct device *dev = &mdp5_kms->pdev->dev; in mdp5_irq_preinstall()
45 mdp5_write(mdp5_kms, REG_MDP5_INTR_CLEAR, 0xffffffff); in mdp5_irq_preinstall()
46 mdp5_write(mdp5_kms, REG_MDP5_INTR_EN, 0x00000000); in mdp5_irq_preinstall()
53 struct mdp5_kms *mdp5_kms = to_mdp5_kms(mdp_kms); in mdp5_irq_postinstall() local
[all …]
Dmdp5_cmd_encoder.c13 static struct mdp5_kms *get_kms(struct drm_encoder *encoder) in get_kms()
23 struct mdp5_kms *mdp5_kms = get_kms(encoder); in pingpong_tearcheck_setup() local
30 if (IS_ERR_OR_NULL(mdp5_kms->vsync_clk)) { in pingpong_tearcheck_setup()
42 vsync_clk_speed = clk_round_rate(mdp5_kms->vsync_clk, VSYNC_CLK_RATE); in pingpong_tearcheck_setup()
61 mdp5_write(mdp5_kms, REG_MDP5_PP_SYNC_CONFIG_VSYNC(pp_id), cfg); in pingpong_tearcheck_setup()
62 mdp5_write(mdp5_kms, in pingpong_tearcheck_setup()
65 mdp5_write(mdp5_kms, in pingpong_tearcheck_setup()
67 mdp5_write(mdp5_kms, REG_MDP5_PP_RD_PTR_IRQ(pp_id), mode->vdisplay + 1); in pingpong_tearcheck_setup()
68 mdp5_write(mdp5_kms, REG_MDP5_PP_START_POS(pp_id), mode->vdisplay); in pingpong_tearcheck_setup()
69 mdp5_write(mdp5_kms, REG_MDP5_PP_SYNC_THRESH(pp_id), in pingpong_tearcheck_setup()
[all …]
Dmdp5_encoder.c13 static struct mdp5_kms *get_kms(struct drm_encoder *encoder) in get_kms()
35 struct mdp5_kms *mdp5_kms = get_kms(encoder); in mdp5_vid_encoder_mode_set() local
104 mdp5_write(mdp5_kms, REG_MDP5_INTF_HSYNC_CTL(intf), in mdp5_vid_encoder_mode_set()
107 mdp5_write(mdp5_kms, REG_MDP5_INTF_VSYNC_PERIOD_F0(intf), vsync_period); in mdp5_vid_encoder_mode_set()
108 mdp5_write(mdp5_kms, REG_MDP5_INTF_VSYNC_LEN_F0(intf), vsync_len); in mdp5_vid_encoder_mode_set()
109 mdp5_write(mdp5_kms, REG_MDP5_INTF_DISPLAY_HCTL(intf), in mdp5_vid_encoder_mode_set()
112 mdp5_write(mdp5_kms, REG_MDP5_INTF_DISPLAY_VSTART_F0(intf), display_v_start); in mdp5_vid_encoder_mode_set()
113 mdp5_write(mdp5_kms, REG_MDP5_INTF_DISPLAY_VEND_F0(intf), display_v_end); in mdp5_vid_encoder_mode_set()
114 mdp5_write(mdp5_kms, REG_MDP5_INTF_BORDER_COLOR(intf), 0); in mdp5_vid_encoder_mode_set()
115 mdp5_write(mdp5_kms, REG_MDP5_INTF_UNDERFLOW_COLOR(intf), 0xff); in mdp5_vid_encoder_mode_set()
[all …]
Dmdp5_plane.c30 static struct mdp5_kms *get_kms(struct drm_plane *plane) in get_kms()
80 struct mdp5_kms *mdp5_kms = get_kms(state->plane); in mdp5_plane_atomic_print_state() local
84 if (mdp5_kms->caps & MDP_CAP_SRC_SPLIT) in mdp5_plane_atomic_print_state()
166 struct mdp5_kms *mdp5_kms = get_kms(plane); in mdp5_plane_cleanup_fb() local
167 struct msm_kms *kms = &mdp5_kms->base.base; in mdp5_plane_cleanup_fb()
235 struct mdp5_kms *mdp5_kms = get_kms(plane); in mdp5_plane_atomic_check_with_state() local
273 if (mdp5_kms->smp) { in mdp5_plane_atomic_check_with_state()
277 blkcfg = mdp5_smp_calculate(mdp5_kms->smp, format, in mdp5_plane_atomic_check_with_state()
479 static void set_scanout_locked(struct mdp5_kms *mdp5_kms, in set_scanout_locked() argument
483 struct msm_kms *kms = &mdp5_kms->base.base; in set_scanout_locked()
[all …]
Dmdp5_smp.c32 struct mdp5_kms *get_kms(struct mdp5_smp *smp) in get_kms()
121 struct mdp5_kms *mdp5_kms = get_kms(smp); in mdp5_smp_calculate() local
122 int rev = mdp5_cfg_get_hw_rev(mdp5_kms->cfg); in mdp5_smp_calculate()
167 struct mdp5_kms *mdp5_kms = get_kms(smp); in mdp5_smp_assign() local
168 struct drm_device *dev = mdp5_kms->dev; in mdp5_smp_assign()
257 struct mdp5_kms *mdp5_kms = get_kms(smp); in write_smp_alloc_regs() local
263 mdp5_write(mdp5_kms, REG_MDP5_SMP_ALLOC_W_REG(i), in write_smp_alloc_regs()
265 mdp5_write(mdp5_kms, REG_MDP5_SMP_ALLOC_R_REG(i), in write_smp_alloc_regs()
272 struct mdp5_kms *mdp5_kms = get_kms(smp); in write_smp_fifo_regs() local
275 for (i = 0; i < mdp5_kms->num_hwpipes; i++) { in write_smp_fifo_regs()
[all …]
Dmdp5_kms.h20 struct mdp5_kms { struct
72 #define to_mdp5_kms(x) container_of(x, struct mdp5_kms, base) argument
82 struct mdp5_kms *mdp5_kms; member
89 struct mdp5_global_state * mdp5_get_existing_global_state(struct mdp5_kms *mdp5_kms);
172 static inline void mdp5_write(struct mdp5_kms *mdp5_kms, u32 reg, u32 data) in mdp5_write() argument
174 WARN_ON(mdp5_kms->enable_count <= 0); in mdp5_write()
175 msm_writel(data, mdp5_kms->mmio + reg); in mdp5_write()
178 static inline u32 mdp5_read(struct mdp5_kms *mdp5_kms, u32 reg) in mdp5_read() argument
180 WARN_ON(mdp5_kms->enable_count <= 0); in mdp5_read()
181 return msm_readl(mdp5_kms->mmio + reg); in mdp5_read()
[all …]
Dmdp5_crtc.c70 static struct mdp5_kms *get_kms(struct drm_crtc *crtc) in get_kms()
168 struct mdp5_kms *mdp5_kms = get_kms(&mdp5_crtc->base); in unref_cursor_worker() local
169 struct msm_kms *kms = &mdp5_kms->base.base; in unref_cursor_worker()
218 struct mdp5_kms *mdp5_kms = get_kms(crtc); in blend_setup() local
333 mdp5_write(mdp5_kms, REG_MDP5_LM_BLEND_OP_MODE(lm, in blend_setup()
335 mdp5_write(mdp5_kms, REG_MDP5_LM_BLEND_FG_ALPHA(lm, in blend_setup()
337 mdp5_write(mdp5_kms, REG_MDP5_LM_BLEND_BG_ALPHA(lm, in blend_setup()
340 mdp5_write(mdp5_kms, REG_MDP5_LM_BLEND_OP_MODE(r_lm, in blend_setup()
342 mdp5_write(mdp5_kms, REG_MDP5_LM_BLEND_FG_ALPHA(r_lm, in blend_setup()
344 mdp5_write(mdp5_kms, REG_MDP5_LM_BLEND_BG_ALPHA(r_lm, in blend_setup()
[all …]
Dmdp5_pipe.c15 struct mdp5_kms *mdp5_kms = to_mdp5_kms(to_mdp_kms(priv->kms)); in mdp5_pipe_assign() local
25 old_global_state = mdp5_get_existing_global_state(mdp5_kms); in mdp5_pipe_assign()
30 for (i = 0; i < mdp5_kms->num_hwpipes; i++) { in mdp5_pipe_assign()
31 struct mdp5_hw_pipe *cur = mdp5_kms->hwpipes[i]; in mdp5_pipe_assign()
64 for (j = i + 1; j < mdp5_kms->num_hwpipes; in mdp5_pipe_assign()
67 mdp5_kms->hwpipes[j]; in mdp5_pipe_assign()
94 if (mdp5_kms->smp) { in mdp5_pipe_assign()
101 ret = mdp5_smp_assign(mdp5_kms->smp, &new_global_state->smp, in mdp5_pipe_assign()
125 struct mdp5_kms *mdp5_kms = to_mdp5_kms(to_mdp_kms(priv->kms)); in mdp5_pipe_release() local
144 if (mdp5_kms->smp) { in mdp5_pipe_release()
[all …]
Dmdp5_mixer.c19 static int get_right_pair_idx(struct mdp5_kms *mdp5_kms, int lm) in get_right_pair_idx() argument
28 for (i = 0; i < mdp5_kms->num_hwmixers; i++) { in get_right_pair_idx()
29 struct mdp5_hw_mixer *mixer = mdp5_kms->hwmixers[i]; in get_right_pair_idx()
43 struct mdp5_kms *mdp5_kms = to_mdp5_kms(to_mdp_kms(priv->kms)); in mdp5_mixer_assign() local
53 for (i = 0; i < mdp5_kms->num_hwmixers; i++) { in mdp5_mixer_assign()
54 struct mdp5_hw_mixer *cur = mdp5_kms->hwmixers[i]; in mdp5_mixer_assign()
74 pair_idx = get_right_pair_idx(mdp5_kms, cur->lm); in mdp5_mixer_assign()
81 *r_mixer = mdp5_kms->hwmixers[pair_idx]; in mdp5_mixer_assign()
Dmdp5_ctl.c75 struct mdp5_kms *get_kms(struct mdp5_ctl_manager *ctl_mgr) in get_kms()
85 struct mdp5_kms *mdp5_kms = get_kms(ctl->ctlm); in ctl_write() local
88 mdp5_write(mdp5_kms, reg, data); in ctl_write()
94 struct mdp5_kms *mdp5_kms = get_kms(ctl->ctlm); in ctl_read() local
97 return mdp5_read(mdp5_kms, reg); in ctl_read()
100 static void set_display_intf(struct mdp5_kms *mdp5_kms, in set_display_intf() argument
106 spin_lock_irqsave(&mdp5_kms->resource_lock, flags); in set_display_intf()
107 intf_sel = mdp5_read(mdp5_kms, REG_MDP5_DISP_INTF_SEL); in set_display_intf()
131 mdp5_write(mdp5_kms, REG_MDP5_DISP_INTF_SEL, intf_sel); in set_display_intf()
132 spin_unlock_irqrestore(&mdp5_kms->resource_lock, flags); in set_display_intf()
[all …]
Dmdp5_smp.h60 struct mdp5_kms;
69 struct mdp5_smp *mdp5_smp_init(struct mdp5_kms *mdp5_kms,
Dmdp5_cfg.h111 struct mdp5_kms;
122 struct mdp5_cfg_handler *mdp5_cfg_init(struct mdp5_kms *mdp5_kms,
Dmdp5_cfg.c1358 struct mdp5_cfg_handler *mdp5_cfg_init(struct mdp5_kms *mdp5_kms, in mdp5_cfg_init() argument
1361 struct drm_device *dev = mdp5_kms->dev; in mdp5_cfg_init()
/drivers/gpu/drm/msm/
DMakefile51 disp/mdp5/mdp5_kms.o \