1 /*
2 * Copyright (c) 2014-2018, The Linux Foundation. All rights reserved.
3 * Copyright (C) 2013 Red Hat
4 * Author: Rob Clark <robdclark@gmail.com>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published by
8 * the Free Software Foundation.
9 *
10 * This program is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
13 * more details.
14 *
15 * You should have received a copy of the GNU General Public License along with
16 * this program. If not, see <http://www.gnu.org/licenses/>.
17 */
18
19 #define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
20 #include <linux/kthread.h>
21 #include <linux/debugfs.h>
22 #include <linux/seq_file.h>
23
24 #include "msm_drv.h"
25 #include "dpu_kms.h"
26 #include <drm/drm_crtc.h>
27 #include <drm/drm_crtc_helper.h>
28 #include "dpu_hwio.h"
29 #include "dpu_hw_catalog.h"
30 #include "dpu_hw_intf.h"
31 #include "dpu_hw_ctl.h"
32 #include "dpu_formats.h"
33 #include "dpu_encoder_phys.h"
34 #include "dpu_crtc.h"
35 #include "dpu_trace.h"
36 #include "dpu_core_irq.h"
37
38 #define DPU_DEBUG_ENC(e, fmt, ...) DPU_DEBUG("enc%d " fmt,\
39 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
40
41 #define DPU_ERROR_ENC(e, fmt, ...) DPU_ERROR("enc%d " fmt,\
42 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
43
44 #define DPU_DEBUG_PHYS(p, fmt, ...) DPU_DEBUG("enc%d intf%d pp%d " fmt,\
45 (p) ? (p)->parent->base.id : -1, \
46 (p) ? (p)->intf_idx - INTF_0 : -1, \
47 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
48 ##__VA_ARGS__)
49
50 #define DPU_ERROR_PHYS(p, fmt, ...) DPU_ERROR("enc%d intf%d pp%d " fmt,\
51 (p) ? (p)->parent->base.id : -1, \
52 (p) ? (p)->intf_idx - INTF_0 : -1, \
53 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
54 ##__VA_ARGS__)
55
56 /*
57 * Two to anticipate panels that can do cmd/vid dynamic switching
58 * plan is to create all possible physical encoder types, and switch between
59 * them at runtime
60 */
61 #define NUM_PHYS_ENCODER_TYPES 2
62
63 #define MAX_PHYS_ENCODERS_PER_VIRTUAL \
64 (MAX_H_TILES_PER_DISPLAY * NUM_PHYS_ENCODER_TYPES)
65
66 #define MAX_CHANNELS_PER_ENC 2
67
68 #define MISR_BUFF_SIZE 256
69
70 #define IDLE_SHORT_TIMEOUT 1
71
72 #define MAX_VDISPLAY_SPLIT 1080
73
74 /**
75 * enum dpu_enc_rc_events - events for resource control state machine
76 * @DPU_ENC_RC_EVENT_KICKOFF:
77 * This event happens at NORMAL priority.
78 * Event that signals the start of the transfer. When this event is
79 * received, enable MDP/DSI core clocks. Regardless of the previous
80 * state, the resource should be in ON state at the end of this event.
81 * @DPU_ENC_RC_EVENT_FRAME_DONE:
82 * This event happens at INTERRUPT level.
83 * Event signals the end of the data transfer after the PP FRAME_DONE
84 * event. At the end of this event, a delayed work is scheduled to go to
85 * IDLE_PC state after IDLE_TIMEOUT time.
86 * @DPU_ENC_RC_EVENT_PRE_STOP:
87 * This event happens at NORMAL priority.
88 * This event, when received during the ON state, leave the RC STATE
89 * in the PRE_OFF state. It should be followed by the STOP event as
90 * part of encoder disable.
91 * If received during IDLE or OFF states, it will do nothing.
92 * @DPU_ENC_RC_EVENT_STOP:
93 * This event happens at NORMAL priority.
94 * When this event is received, disable all the MDP/DSI core clocks, and
95 * disable IRQs. It should be called from the PRE_OFF or IDLE states.
96 * IDLE is expected when IDLE_PC has run, and PRE_OFF did nothing.
97 * PRE_OFF is expected when PRE_STOP was executed during the ON state.
98 * Resource state should be in OFF at the end of the event.
99 * @DPU_ENC_RC_EVENT_ENTER_IDLE:
100 * This event happens at NORMAL priority from a work item.
101 * Event signals that there were no frame updates for IDLE_TIMEOUT time.
102 * This would disable MDP/DSI core clocks and change the resource state
103 * to IDLE.
104 */
105 enum dpu_enc_rc_events {
106 DPU_ENC_RC_EVENT_KICKOFF = 1,
107 DPU_ENC_RC_EVENT_FRAME_DONE,
108 DPU_ENC_RC_EVENT_PRE_STOP,
109 DPU_ENC_RC_EVENT_STOP,
110 DPU_ENC_RC_EVENT_ENTER_IDLE
111 };
112
113 /*
114 * enum dpu_enc_rc_states - states that the resource control maintains
115 * @DPU_ENC_RC_STATE_OFF: Resource is in OFF state
116 * @DPU_ENC_RC_STATE_PRE_OFF: Resource is transitioning to OFF state
117 * @DPU_ENC_RC_STATE_ON: Resource is in ON state
118 * @DPU_ENC_RC_STATE_MODESET: Resource is in modeset state
119 * @DPU_ENC_RC_STATE_IDLE: Resource is in IDLE state
120 */
121 enum dpu_enc_rc_states {
122 DPU_ENC_RC_STATE_OFF,
123 DPU_ENC_RC_STATE_PRE_OFF,
124 DPU_ENC_RC_STATE_ON,
125 DPU_ENC_RC_STATE_IDLE
126 };
127
128 /**
129 * struct dpu_encoder_virt - virtual encoder. Container of one or more physical
130 * encoders. Virtual encoder manages one "logical" display. Physical
131 * encoders manage one intf block, tied to a specific panel/sub-panel.
132 * Virtual encoder defers as much as possible to the physical encoders.
133 * Virtual encoder registers itself with the DRM Framework as the encoder.
134 * @base: drm_encoder base class for registration with DRM
135 * @enc_spin_lock: Virtual-Encoder-Wide Spin Lock for IRQ purposes
136 * @bus_scaling_client: Client handle to the bus scaling interface
137 * @num_phys_encs: Actual number of physical encoders contained.
138 * @phys_encs: Container of physical encoders managed.
139 * @cur_master: Pointer to the current master in this mode. Optimization
140 * Only valid after enable. Cleared as disable.
141 * @hw_pp Handle to the pingpong blocks used for the display. No.
142 * pingpong blocks can be different than num_phys_encs.
143 * @intfs_swapped Whether or not the phys_enc interfaces have been swapped
144 * for partial update right-only cases, such as pingpong
145 * split where virtual pingpong does not generate IRQs
146 * @crtc_vblank_cb: Callback into the upper layer / CRTC for
147 * notification of the VBLANK
148 * @crtc_vblank_cb_data: Data from upper layer for VBLANK notification
149 * @crtc_kickoff_cb: Callback into CRTC that will flush & start
150 * all CTL paths
151 * @crtc_kickoff_cb_data: Opaque user data given to crtc_kickoff_cb
152 * @debugfs_root: Debug file system root file node
153 * @enc_lock: Lock around physical encoder create/destroy and
154 access.
155 * @frame_busy_mask: Bitmask tracking which phys_enc we are still
156 * busy processing current command.
157 * Bit0 = phys_encs[0] etc.
158 * @crtc_frame_event_cb: callback handler for frame event
159 * @crtc_frame_event_cb_data: callback handler private data
160 * @frame_done_timeout: frame done timeout in Hz
161 * @frame_done_timer: watchdog timer for frame done event
162 * @vsync_event_timer: vsync timer
163 * @disp_info: local copy of msm_display_info struct
164 * @misr_enable: misr enable/disable status
165 * @misr_frame_count: misr frame count before start capturing the data
166 * @idle_pc_supported: indicate if idle power collaps is supported
167 * @rc_lock: resource control mutex lock to protect
168 * virt encoder over various state changes
169 * @rc_state: resource controller state
170 * @delayed_off_work: delayed worker to schedule disabling of
171 * clks and resources after IDLE_TIMEOUT time.
172 * @vsync_event_work: worker to handle vsync event for autorefresh
173 * @topology: topology of the display
174 * @mode_set_complete: flag to indicate modeset completion
175 * @idle_timeout: idle timeout duration in milliseconds
176 */
177 struct dpu_encoder_virt {
178 struct drm_encoder base;
179 spinlock_t enc_spinlock;
180 uint32_t bus_scaling_client;
181
182 uint32_t display_num_of_h_tiles;
183
184 unsigned int num_phys_encs;
185 struct dpu_encoder_phys *phys_encs[MAX_PHYS_ENCODERS_PER_VIRTUAL];
186 struct dpu_encoder_phys *cur_master;
187 struct dpu_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
188
189 bool intfs_swapped;
190
191 void (*crtc_vblank_cb)(void *);
192 void *crtc_vblank_cb_data;
193
194 struct dentry *debugfs_root;
195 struct mutex enc_lock;
196 DECLARE_BITMAP(frame_busy_mask, MAX_PHYS_ENCODERS_PER_VIRTUAL);
197 void (*crtc_frame_event_cb)(void *, u32 event);
198 void *crtc_frame_event_cb_data;
199
200 atomic_t frame_done_timeout;
201 struct timer_list frame_done_timer;
202 struct timer_list vsync_event_timer;
203
204 struct msm_display_info disp_info;
205 bool misr_enable;
206 u32 misr_frame_count;
207
208 bool idle_pc_supported;
209 struct mutex rc_lock;
210 enum dpu_enc_rc_states rc_state;
211 struct kthread_delayed_work delayed_off_work;
212 struct kthread_work vsync_event_work;
213 struct msm_display_topology topology;
214 bool mode_set_complete;
215
216 u32 idle_timeout;
217 };
218
219 #define to_dpu_encoder_virt(x) container_of(x, struct dpu_encoder_virt, base)
_dpu_encoder_power_enable(struct dpu_encoder_virt * dpu_enc,bool enable)220 static inline int _dpu_encoder_power_enable(struct dpu_encoder_virt *dpu_enc,
221 bool enable)
222 {
223 struct drm_encoder *drm_enc;
224 struct msm_drm_private *priv;
225 struct dpu_kms *dpu_kms;
226
227 if (!dpu_enc) {
228 DPU_ERROR("invalid dpu enc\n");
229 return -EINVAL;
230 }
231
232 drm_enc = &dpu_enc->base;
233 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
234 DPU_ERROR("drm device invalid\n");
235 return -EINVAL;
236 }
237
238 priv = drm_enc->dev->dev_private;
239 if (!priv->kms) {
240 DPU_ERROR("invalid kms\n");
241 return -EINVAL;
242 }
243
244 dpu_kms = to_dpu_kms(priv->kms);
245
246 if (enable)
247 pm_runtime_get_sync(&dpu_kms->pdev->dev);
248 else
249 pm_runtime_put_sync(&dpu_kms->pdev->dev);
250
251 return 0;
252 }
253
dpu_encoder_helper_report_irq_timeout(struct dpu_encoder_phys * phys_enc,enum dpu_intr_idx intr_idx)254 void dpu_encoder_helper_report_irq_timeout(struct dpu_encoder_phys *phys_enc,
255 enum dpu_intr_idx intr_idx)
256 {
257 DRM_ERROR("irq timeout id=%u, intf=%d, pp=%d, intr=%d\n",
258 DRMID(phys_enc->parent), phys_enc->intf_idx - INTF_0,
259 phys_enc->hw_pp->idx - PINGPONG_0, intr_idx);
260
261 if (phys_enc->parent_ops->handle_frame_done)
262 phys_enc->parent_ops->handle_frame_done(
263 phys_enc->parent, phys_enc,
264 DPU_ENCODER_FRAME_EVENT_ERROR);
265 }
266
267 static int dpu_encoder_helper_wait_event_timeout(int32_t drm_id,
268 int32_t hw_id, struct dpu_encoder_wait_info *info);
269
dpu_encoder_helper_wait_for_irq(struct dpu_encoder_phys * phys_enc,enum dpu_intr_idx intr_idx,struct dpu_encoder_wait_info * wait_info)270 int dpu_encoder_helper_wait_for_irq(struct dpu_encoder_phys *phys_enc,
271 enum dpu_intr_idx intr_idx,
272 struct dpu_encoder_wait_info *wait_info)
273 {
274 struct dpu_encoder_irq *irq;
275 u32 irq_status;
276 int ret;
277
278 if (!phys_enc || !wait_info || intr_idx >= INTR_IDX_MAX) {
279 DPU_ERROR("invalid params\n");
280 return -EINVAL;
281 }
282 irq = &phys_enc->irq[intr_idx];
283
284 /* note: do master / slave checking outside */
285
286 /* return EWOULDBLOCK since we know the wait isn't necessary */
287 if (phys_enc->enable_state == DPU_ENC_DISABLED) {
288 DRM_ERROR("encoder is disabled id=%u, intr=%d, hw=%d, irq=%d",
289 DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
290 irq->irq_idx);
291 return -EWOULDBLOCK;
292 }
293
294 if (irq->irq_idx < 0) {
295 DRM_DEBUG_KMS("skip irq wait id=%u, intr=%d, hw=%d, irq=%s",
296 DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
297 irq->name);
298 return 0;
299 }
300
301 DRM_DEBUG_KMS("id=%u, intr=%d, hw=%d, irq=%d, pp=%d, pending_cnt=%d",
302 DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
303 irq->irq_idx, phys_enc->hw_pp->idx - PINGPONG_0,
304 atomic_read(wait_info->atomic_cnt));
305
306 ret = dpu_encoder_helper_wait_event_timeout(
307 DRMID(phys_enc->parent),
308 irq->hw_idx,
309 wait_info);
310
311 if (ret <= 0) {
312 irq_status = dpu_core_irq_read(phys_enc->dpu_kms,
313 irq->irq_idx, true);
314 if (irq_status) {
315 unsigned long flags;
316
317 DRM_DEBUG_KMS("irq not triggered id=%u, intr=%d, "
318 "hw=%d, irq=%d, pp=%d, atomic_cnt=%d",
319 DRMID(phys_enc->parent), intr_idx,
320 irq->hw_idx, irq->irq_idx,
321 phys_enc->hw_pp->idx - PINGPONG_0,
322 atomic_read(wait_info->atomic_cnt));
323 local_irq_save(flags);
324 irq->cb.func(phys_enc, irq->irq_idx);
325 local_irq_restore(flags);
326 ret = 0;
327 } else {
328 ret = -ETIMEDOUT;
329 DRM_DEBUG_KMS("irq timeout id=%u, intr=%d, "
330 "hw=%d, irq=%d, pp=%d, atomic_cnt=%d",
331 DRMID(phys_enc->parent), intr_idx,
332 irq->hw_idx, irq->irq_idx,
333 phys_enc->hw_pp->idx - PINGPONG_0,
334 atomic_read(wait_info->atomic_cnt));
335 }
336 } else {
337 ret = 0;
338 trace_dpu_enc_irq_wait_success(DRMID(phys_enc->parent),
339 intr_idx, irq->hw_idx, irq->irq_idx,
340 phys_enc->hw_pp->idx - PINGPONG_0,
341 atomic_read(wait_info->atomic_cnt));
342 }
343
344 return ret;
345 }
346
dpu_encoder_helper_register_irq(struct dpu_encoder_phys * phys_enc,enum dpu_intr_idx intr_idx)347 int dpu_encoder_helper_register_irq(struct dpu_encoder_phys *phys_enc,
348 enum dpu_intr_idx intr_idx)
349 {
350 struct dpu_encoder_irq *irq;
351 int ret = 0;
352
353 if (!phys_enc || intr_idx >= INTR_IDX_MAX) {
354 DPU_ERROR("invalid params\n");
355 return -EINVAL;
356 }
357 irq = &phys_enc->irq[intr_idx];
358
359 if (irq->irq_idx >= 0) {
360 DPU_DEBUG_PHYS(phys_enc,
361 "skipping already registered irq %s type %d\n",
362 irq->name, irq->intr_type);
363 return 0;
364 }
365
366 irq->irq_idx = dpu_core_irq_idx_lookup(phys_enc->dpu_kms,
367 irq->intr_type, irq->hw_idx);
368 if (irq->irq_idx < 0) {
369 DPU_ERROR_PHYS(phys_enc,
370 "failed to lookup IRQ index for %s type:%d\n",
371 irq->name, irq->intr_type);
372 return -EINVAL;
373 }
374
375 ret = dpu_core_irq_register_callback(phys_enc->dpu_kms, irq->irq_idx,
376 &irq->cb);
377 if (ret) {
378 DPU_ERROR_PHYS(phys_enc,
379 "failed to register IRQ callback for %s\n",
380 irq->name);
381 irq->irq_idx = -EINVAL;
382 return ret;
383 }
384
385 ret = dpu_core_irq_enable(phys_enc->dpu_kms, &irq->irq_idx, 1);
386 if (ret) {
387 DRM_ERROR("enable failed id=%u, intr=%d, hw=%d, irq=%d",
388 DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
389 irq->irq_idx);
390 dpu_core_irq_unregister_callback(phys_enc->dpu_kms,
391 irq->irq_idx, &irq->cb);
392 irq->irq_idx = -EINVAL;
393 return ret;
394 }
395
396 trace_dpu_enc_irq_register_success(DRMID(phys_enc->parent), intr_idx,
397 irq->hw_idx, irq->irq_idx);
398
399 return ret;
400 }
401
dpu_encoder_helper_unregister_irq(struct dpu_encoder_phys * phys_enc,enum dpu_intr_idx intr_idx)402 int dpu_encoder_helper_unregister_irq(struct dpu_encoder_phys *phys_enc,
403 enum dpu_intr_idx intr_idx)
404 {
405 struct dpu_encoder_irq *irq;
406 int ret;
407
408 if (!phys_enc) {
409 DPU_ERROR("invalid encoder\n");
410 return -EINVAL;
411 }
412 irq = &phys_enc->irq[intr_idx];
413
414 /* silently skip irqs that weren't registered */
415 if (irq->irq_idx < 0) {
416 DRM_ERROR("duplicate unregister id=%u, intr=%d, hw=%d, irq=%d",
417 DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
418 irq->irq_idx);
419 return 0;
420 }
421
422 ret = dpu_core_irq_disable(phys_enc->dpu_kms, &irq->irq_idx, 1);
423 if (ret) {
424 DRM_ERROR("disable failed id=%u, intr=%d, hw=%d, irq=%d ret=%d",
425 DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
426 irq->irq_idx, ret);
427 }
428
429 ret = dpu_core_irq_unregister_callback(phys_enc->dpu_kms, irq->irq_idx,
430 &irq->cb);
431 if (ret) {
432 DRM_ERROR("unreg cb fail id=%u, intr=%d, hw=%d, irq=%d ret=%d",
433 DRMID(phys_enc->parent), intr_idx, irq->hw_idx,
434 irq->irq_idx, ret);
435 }
436
437 trace_dpu_enc_irq_unregister_success(DRMID(phys_enc->parent), intr_idx,
438 irq->hw_idx, irq->irq_idx);
439
440 irq->irq_idx = -EINVAL;
441
442 return 0;
443 }
444
dpu_encoder_get_hw_resources(struct drm_encoder * drm_enc,struct dpu_encoder_hw_resources * hw_res,struct drm_connector_state * conn_state)445 void dpu_encoder_get_hw_resources(struct drm_encoder *drm_enc,
446 struct dpu_encoder_hw_resources *hw_res,
447 struct drm_connector_state *conn_state)
448 {
449 struct dpu_encoder_virt *dpu_enc = NULL;
450 int i = 0;
451
452 if (!hw_res || !drm_enc || !conn_state) {
453 DPU_ERROR("invalid argument(s), drm_enc %d, res %d, state %d\n",
454 drm_enc != 0, hw_res != 0, conn_state != 0);
455 return;
456 }
457
458 dpu_enc = to_dpu_encoder_virt(drm_enc);
459 DPU_DEBUG_ENC(dpu_enc, "\n");
460
461 /* Query resources used by phys encs, expected to be without overlap */
462 memset(hw_res, 0, sizeof(*hw_res));
463 hw_res->display_num_of_h_tiles = dpu_enc->display_num_of_h_tiles;
464
465 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
466 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
467
468 if (phys && phys->ops.get_hw_resources)
469 phys->ops.get_hw_resources(phys, hw_res, conn_state);
470 }
471 }
472
dpu_encoder_destroy(struct drm_encoder * drm_enc)473 static void dpu_encoder_destroy(struct drm_encoder *drm_enc)
474 {
475 struct dpu_encoder_virt *dpu_enc = NULL;
476 int i = 0;
477
478 if (!drm_enc) {
479 DPU_ERROR("invalid encoder\n");
480 return;
481 }
482
483 dpu_enc = to_dpu_encoder_virt(drm_enc);
484 DPU_DEBUG_ENC(dpu_enc, "\n");
485
486 mutex_lock(&dpu_enc->enc_lock);
487
488 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
489 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
490
491 if (phys && phys->ops.destroy) {
492 phys->ops.destroy(phys);
493 --dpu_enc->num_phys_encs;
494 dpu_enc->phys_encs[i] = NULL;
495 }
496 }
497
498 if (dpu_enc->num_phys_encs)
499 DPU_ERROR_ENC(dpu_enc, "expected 0 num_phys_encs not %d\n",
500 dpu_enc->num_phys_encs);
501 dpu_enc->num_phys_encs = 0;
502 mutex_unlock(&dpu_enc->enc_lock);
503
504 drm_encoder_cleanup(drm_enc);
505 mutex_destroy(&dpu_enc->enc_lock);
506 }
507
dpu_encoder_helper_split_config(struct dpu_encoder_phys * phys_enc,enum dpu_intf interface)508 void dpu_encoder_helper_split_config(
509 struct dpu_encoder_phys *phys_enc,
510 enum dpu_intf interface)
511 {
512 struct dpu_encoder_virt *dpu_enc;
513 struct split_pipe_cfg cfg = { 0 };
514 struct dpu_hw_mdp *hw_mdptop;
515 struct msm_display_info *disp_info;
516
517 if (!phys_enc || !phys_enc->hw_mdptop || !phys_enc->parent) {
518 DPU_ERROR("invalid arg(s), encoder %d\n", phys_enc != 0);
519 return;
520 }
521
522 dpu_enc = to_dpu_encoder_virt(phys_enc->parent);
523 hw_mdptop = phys_enc->hw_mdptop;
524 disp_info = &dpu_enc->disp_info;
525
526 if (disp_info->intf_type != DRM_MODE_CONNECTOR_DSI)
527 return;
528
529 /**
530 * disable split modes since encoder will be operating in as the only
531 * encoder, either for the entire use case in the case of, for example,
532 * single DSI, or for this frame in the case of left/right only partial
533 * update.
534 */
535 if (phys_enc->split_role == ENC_ROLE_SOLO) {
536 if (hw_mdptop->ops.setup_split_pipe)
537 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
538 return;
539 }
540
541 cfg.en = true;
542 cfg.mode = phys_enc->intf_mode;
543 cfg.intf = interface;
544
545 if (cfg.en && phys_enc->ops.needs_single_flush &&
546 phys_enc->ops.needs_single_flush(phys_enc))
547 cfg.split_flush_en = true;
548
549 if (phys_enc->split_role == ENC_ROLE_MASTER) {
550 DPU_DEBUG_ENC(dpu_enc, "enable %d\n", cfg.en);
551
552 if (hw_mdptop->ops.setup_split_pipe)
553 hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
554 }
555 }
556
_dpu_encoder_adjust_mode(struct drm_connector * connector,struct drm_display_mode * adj_mode)557 static void _dpu_encoder_adjust_mode(struct drm_connector *connector,
558 struct drm_display_mode *adj_mode)
559 {
560 struct drm_display_mode *cur_mode;
561
562 if (!connector || !adj_mode)
563 return;
564
565 list_for_each_entry(cur_mode, &connector->modes, head) {
566 if (cur_mode->vdisplay == adj_mode->vdisplay &&
567 cur_mode->hdisplay == adj_mode->hdisplay &&
568 cur_mode->vrefresh == adj_mode->vrefresh) {
569 adj_mode->private = cur_mode->private;
570 adj_mode->private_flags |= cur_mode->private_flags;
571 }
572 }
573 }
574
dpu_encoder_get_topology(struct dpu_encoder_virt * dpu_enc,struct dpu_kms * dpu_kms,struct drm_display_mode * mode)575 static struct msm_display_topology dpu_encoder_get_topology(
576 struct dpu_encoder_virt *dpu_enc,
577 struct dpu_kms *dpu_kms,
578 struct drm_display_mode *mode)
579 {
580 struct msm_display_topology topology;
581 int i, intf_count = 0;
582
583 for (i = 0; i < MAX_PHYS_ENCODERS_PER_VIRTUAL; i++)
584 if (dpu_enc->phys_encs[i])
585 intf_count++;
586
587 /* User split topology for width > 1080 */
588 topology.num_lm = (mode->vdisplay > MAX_VDISPLAY_SPLIT) ? 2 : 1;
589 topology.num_enc = 0;
590 topology.num_intf = intf_count;
591
592 return topology;
593 }
dpu_encoder_virt_atomic_check(struct drm_encoder * drm_enc,struct drm_crtc_state * crtc_state,struct drm_connector_state * conn_state)594 static int dpu_encoder_virt_atomic_check(
595 struct drm_encoder *drm_enc,
596 struct drm_crtc_state *crtc_state,
597 struct drm_connector_state *conn_state)
598 {
599 struct dpu_encoder_virt *dpu_enc;
600 struct msm_drm_private *priv;
601 struct dpu_kms *dpu_kms;
602 const struct drm_display_mode *mode;
603 struct drm_display_mode *adj_mode;
604 struct msm_display_topology topology;
605 int i = 0;
606 int ret = 0;
607
608 if (!drm_enc || !crtc_state || !conn_state) {
609 DPU_ERROR("invalid arg(s), drm_enc %d, crtc/conn state %d/%d\n",
610 drm_enc != 0, crtc_state != 0, conn_state != 0);
611 return -EINVAL;
612 }
613
614 dpu_enc = to_dpu_encoder_virt(drm_enc);
615 DPU_DEBUG_ENC(dpu_enc, "\n");
616
617 priv = drm_enc->dev->dev_private;
618 dpu_kms = to_dpu_kms(priv->kms);
619 mode = &crtc_state->mode;
620 adj_mode = &crtc_state->adjusted_mode;
621 trace_dpu_enc_atomic_check(DRMID(drm_enc));
622
623 /*
624 * display drivers may populate private fields of the drm display mode
625 * structure while registering possible modes of a connector with DRM.
626 * These private fields are not populated back while DRM invokes
627 * the mode_set callbacks. This module retrieves and populates the
628 * private fields of the given mode.
629 */
630 _dpu_encoder_adjust_mode(conn_state->connector, adj_mode);
631
632 /* perform atomic check on the first physical encoder (master) */
633 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
634 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
635
636 if (phys && phys->ops.atomic_check)
637 ret = phys->ops.atomic_check(phys, crtc_state,
638 conn_state);
639 else if (phys && phys->ops.mode_fixup)
640 if (!phys->ops.mode_fixup(phys, mode, adj_mode))
641 ret = -EINVAL;
642
643 if (ret) {
644 DPU_ERROR_ENC(dpu_enc,
645 "mode unsupported, phys idx %d\n", i);
646 break;
647 }
648 }
649
650 topology = dpu_encoder_get_topology(dpu_enc, dpu_kms, adj_mode);
651
652 /* Reserve dynamic resources now. Indicating AtomicTest phase */
653 if (!ret) {
654 /*
655 * Avoid reserving resources when mode set is pending. Topology
656 * info may not be available to complete reservation.
657 */
658 if (drm_atomic_crtc_needs_modeset(crtc_state)
659 && dpu_enc->mode_set_complete) {
660 ret = dpu_rm_reserve(&dpu_kms->rm, drm_enc, crtc_state,
661 conn_state, topology, true);
662 dpu_enc->mode_set_complete = false;
663 }
664 }
665
666 if (!ret)
667 drm_mode_set_crtcinfo(adj_mode, 0);
668
669 trace_dpu_enc_atomic_check_flags(DRMID(drm_enc), adj_mode->flags,
670 adj_mode->private_flags);
671
672 return ret;
673 }
674
_dpu_encoder_update_vsync_source(struct dpu_encoder_virt * dpu_enc,struct msm_display_info * disp_info)675 static void _dpu_encoder_update_vsync_source(struct dpu_encoder_virt *dpu_enc,
676 struct msm_display_info *disp_info)
677 {
678 struct dpu_vsync_source_cfg vsync_cfg = { 0 };
679 struct msm_drm_private *priv;
680 struct dpu_kms *dpu_kms;
681 struct dpu_hw_mdp *hw_mdptop;
682 struct drm_encoder *drm_enc;
683 int i;
684
685 if (!dpu_enc || !disp_info) {
686 DPU_ERROR("invalid param dpu_enc:%d or disp_info:%d\n",
687 dpu_enc != NULL, disp_info != NULL);
688 return;
689 } else if (dpu_enc->num_phys_encs > ARRAY_SIZE(dpu_enc->hw_pp)) {
690 DPU_ERROR("invalid num phys enc %d/%d\n",
691 dpu_enc->num_phys_encs,
692 (int) ARRAY_SIZE(dpu_enc->hw_pp));
693 return;
694 }
695
696 drm_enc = &dpu_enc->base;
697 /* this pointers are checked in virt_enable_helper */
698 priv = drm_enc->dev->dev_private;
699
700 dpu_kms = to_dpu_kms(priv->kms);
701 if (!dpu_kms) {
702 DPU_ERROR("invalid dpu_kms\n");
703 return;
704 }
705
706 hw_mdptop = dpu_kms->hw_mdp;
707 if (!hw_mdptop) {
708 DPU_ERROR("invalid mdptop\n");
709 return;
710 }
711
712 if (hw_mdptop->ops.setup_vsync_source &&
713 disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) {
714 for (i = 0; i < dpu_enc->num_phys_encs; i++)
715 vsync_cfg.ppnumber[i] = dpu_enc->hw_pp[i]->idx;
716
717 vsync_cfg.pp_count = dpu_enc->num_phys_encs;
718 if (disp_info->is_te_using_watchdog_timer)
719 vsync_cfg.vsync_source = DPU_VSYNC_SOURCE_WD_TIMER_0;
720 else
721 vsync_cfg.vsync_source = DPU_VSYNC0_SOURCE_GPIO;
722
723 hw_mdptop->ops.setup_vsync_source(hw_mdptop, &vsync_cfg);
724 }
725 }
726
_dpu_encoder_irq_control(struct drm_encoder * drm_enc,bool enable)727 static void _dpu_encoder_irq_control(struct drm_encoder *drm_enc, bool enable)
728 {
729 struct dpu_encoder_virt *dpu_enc;
730 int i;
731
732 if (!drm_enc) {
733 DPU_ERROR("invalid encoder\n");
734 return;
735 }
736
737 dpu_enc = to_dpu_encoder_virt(drm_enc);
738
739 DPU_DEBUG_ENC(dpu_enc, "enable:%d\n", enable);
740 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
741 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
742
743 if (phys && phys->ops.irq_control)
744 phys->ops.irq_control(phys, enable);
745 }
746
747 }
748
_dpu_encoder_resource_control_helper(struct drm_encoder * drm_enc,bool enable)749 static void _dpu_encoder_resource_control_helper(struct drm_encoder *drm_enc,
750 bool enable)
751 {
752 struct msm_drm_private *priv;
753 struct dpu_kms *dpu_kms;
754 struct dpu_encoder_virt *dpu_enc;
755
756 dpu_enc = to_dpu_encoder_virt(drm_enc);
757 priv = drm_enc->dev->dev_private;
758 dpu_kms = to_dpu_kms(priv->kms);
759
760 trace_dpu_enc_rc_helper(DRMID(drm_enc), enable);
761
762 if (!dpu_enc->cur_master) {
763 DPU_ERROR("encoder master not set\n");
764 return;
765 }
766
767 if (enable) {
768 /* enable DPU core clks */
769 pm_runtime_get_sync(&dpu_kms->pdev->dev);
770
771 /* enable all the irq */
772 _dpu_encoder_irq_control(drm_enc, true);
773
774 } else {
775 /* disable all the irq */
776 _dpu_encoder_irq_control(drm_enc, false);
777
778 /* disable DPU core clks */
779 pm_runtime_put_sync(&dpu_kms->pdev->dev);
780 }
781
782 }
783
dpu_encoder_resource_control(struct drm_encoder * drm_enc,u32 sw_event)784 static int dpu_encoder_resource_control(struct drm_encoder *drm_enc,
785 u32 sw_event)
786 {
787 struct dpu_encoder_virt *dpu_enc;
788 struct msm_drm_private *priv;
789 struct msm_drm_thread *disp_thread;
790 bool is_vid_mode = false;
791
792 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private ||
793 !drm_enc->crtc) {
794 DPU_ERROR("invalid parameters\n");
795 return -EINVAL;
796 }
797 dpu_enc = to_dpu_encoder_virt(drm_enc);
798 priv = drm_enc->dev->dev_private;
799 is_vid_mode = dpu_enc->disp_info.capabilities &
800 MSM_DISPLAY_CAP_VID_MODE;
801
802 if (drm_enc->crtc->index >= ARRAY_SIZE(priv->disp_thread)) {
803 DPU_ERROR("invalid crtc index\n");
804 return -EINVAL;
805 }
806 disp_thread = &priv->disp_thread[drm_enc->crtc->index];
807
808 /*
809 * when idle_pc is not supported, process only KICKOFF, STOP and MODESET
810 * events and return early for other events (ie wb display).
811 */
812 if (!dpu_enc->idle_pc_supported &&
813 (sw_event != DPU_ENC_RC_EVENT_KICKOFF &&
814 sw_event != DPU_ENC_RC_EVENT_STOP &&
815 sw_event != DPU_ENC_RC_EVENT_PRE_STOP))
816 return 0;
817
818 trace_dpu_enc_rc(DRMID(drm_enc), sw_event, dpu_enc->idle_pc_supported,
819 dpu_enc->rc_state, "begin");
820
821 switch (sw_event) {
822 case DPU_ENC_RC_EVENT_KICKOFF:
823 /* cancel delayed off work, if any */
824 if (kthread_cancel_delayed_work_sync(
825 &dpu_enc->delayed_off_work))
826 DPU_DEBUG_ENC(dpu_enc, "sw_event:%d, work cancelled\n",
827 sw_event);
828
829 mutex_lock(&dpu_enc->rc_lock);
830
831 /* return if the resource control is already in ON state */
832 if (dpu_enc->rc_state == DPU_ENC_RC_STATE_ON) {
833 DRM_DEBUG_KMS("id;%u, sw_event:%d, rc in ON state\n",
834 DRMID(drm_enc), sw_event);
835 mutex_unlock(&dpu_enc->rc_lock);
836 return 0;
837 } else if (dpu_enc->rc_state != DPU_ENC_RC_STATE_OFF &&
838 dpu_enc->rc_state != DPU_ENC_RC_STATE_IDLE) {
839 DRM_DEBUG_KMS("id;%u, sw_event:%d, rc in state %d\n",
840 DRMID(drm_enc), sw_event,
841 dpu_enc->rc_state);
842 mutex_unlock(&dpu_enc->rc_lock);
843 return -EINVAL;
844 }
845
846 if (is_vid_mode && dpu_enc->rc_state == DPU_ENC_RC_STATE_IDLE)
847 _dpu_encoder_irq_control(drm_enc, true);
848 else
849 _dpu_encoder_resource_control_helper(drm_enc, true);
850
851 dpu_enc->rc_state = DPU_ENC_RC_STATE_ON;
852
853 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
854 dpu_enc->idle_pc_supported, dpu_enc->rc_state,
855 "kickoff");
856
857 mutex_unlock(&dpu_enc->rc_lock);
858 break;
859
860 case DPU_ENC_RC_EVENT_FRAME_DONE:
861 /*
862 * mutex lock is not used as this event happens at interrupt
863 * context. And locking is not required as, the other events
864 * like KICKOFF and STOP does a wait-for-idle before executing
865 * the resource_control
866 */
867 if (dpu_enc->rc_state != DPU_ENC_RC_STATE_ON) {
868 DRM_DEBUG_KMS("id:%d, sw_event:%d,rc:%d-unexpected\n",
869 DRMID(drm_enc), sw_event,
870 dpu_enc->rc_state);
871 return -EINVAL;
872 }
873
874 /*
875 * schedule off work item only when there are no
876 * frames pending
877 */
878 if (dpu_crtc_frame_pending(drm_enc->crtc) > 1) {
879 DRM_DEBUG_KMS("id:%d skip schedule work\n",
880 DRMID(drm_enc));
881 return 0;
882 }
883
884 kthread_queue_delayed_work(
885 &disp_thread->worker,
886 &dpu_enc->delayed_off_work,
887 msecs_to_jiffies(dpu_enc->idle_timeout));
888
889 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
890 dpu_enc->idle_pc_supported, dpu_enc->rc_state,
891 "frame done");
892 break;
893
894 case DPU_ENC_RC_EVENT_PRE_STOP:
895 /* cancel delayed off work, if any */
896 if (kthread_cancel_delayed_work_sync(
897 &dpu_enc->delayed_off_work))
898 DPU_DEBUG_ENC(dpu_enc, "sw_event:%d, work cancelled\n",
899 sw_event);
900
901 mutex_lock(&dpu_enc->rc_lock);
902
903 if (is_vid_mode &&
904 dpu_enc->rc_state == DPU_ENC_RC_STATE_IDLE) {
905 _dpu_encoder_irq_control(drm_enc, true);
906 }
907 /* skip if is already OFF or IDLE, resources are off already */
908 else if (dpu_enc->rc_state == DPU_ENC_RC_STATE_OFF ||
909 dpu_enc->rc_state == DPU_ENC_RC_STATE_IDLE) {
910 DRM_DEBUG_KMS("id:%u, sw_event:%d, rc in %d state\n",
911 DRMID(drm_enc), sw_event,
912 dpu_enc->rc_state);
913 mutex_unlock(&dpu_enc->rc_lock);
914 return 0;
915 }
916
917 dpu_enc->rc_state = DPU_ENC_RC_STATE_PRE_OFF;
918
919 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
920 dpu_enc->idle_pc_supported, dpu_enc->rc_state,
921 "pre stop");
922
923 mutex_unlock(&dpu_enc->rc_lock);
924 break;
925
926 case DPU_ENC_RC_EVENT_STOP:
927 mutex_lock(&dpu_enc->rc_lock);
928
929 /* return if the resource control is already in OFF state */
930 if (dpu_enc->rc_state == DPU_ENC_RC_STATE_OFF) {
931 DRM_DEBUG_KMS("id: %u, sw_event:%d, rc in OFF state\n",
932 DRMID(drm_enc), sw_event);
933 mutex_unlock(&dpu_enc->rc_lock);
934 return 0;
935 } else if (dpu_enc->rc_state == DPU_ENC_RC_STATE_ON) {
936 DRM_ERROR("id: %u, sw_event:%d, rc in state %d\n",
937 DRMID(drm_enc), sw_event, dpu_enc->rc_state);
938 mutex_unlock(&dpu_enc->rc_lock);
939 return -EINVAL;
940 }
941
942 /**
943 * expect to arrive here only if in either idle state or pre-off
944 * and in IDLE state the resources are already disabled
945 */
946 if (dpu_enc->rc_state == DPU_ENC_RC_STATE_PRE_OFF)
947 _dpu_encoder_resource_control_helper(drm_enc, false);
948
949 dpu_enc->rc_state = DPU_ENC_RC_STATE_OFF;
950
951 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
952 dpu_enc->idle_pc_supported, dpu_enc->rc_state,
953 "stop");
954
955 mutex_unlock(&dpu_enc->rc_lock);
956 break;
957
958 case DPU_ENC_RC_EVENT_ENTER_IDLE:
959 mutex_lock(&dpu_enc->rc_lock);
960
961 if (dpu_enc->rc_state != DPU_ENC_RC_STATE_ON) {
962 DRM_ERROR("id: %u, sw_event:%d, rc:%d !ON state\n",
963 DRMID(drm_enc), sw_event, dpu_enc->rc_state);
964 mutex_unlock(&dpu_enc->rc_lock);
965 return 0;
966 }
967
968 /*
969 * if we are in ON but a frame was just kicked off,
970 * ignore the IDLE event, it's probably a stale timer event
971 */
972 if (dpu_enc->frame_busy_mask[0]) {
973 DRM_ERROR("id:%u, sw_event:%d, rc:%d frame pending\n",
974 DRMID(drm_enc), sw_event, dpu_enc->rc_state);
975 mutex_unlock(&dpu_enc->rc_lock);
976 return 0;
977 }
978
979 if (is_vid_mode)
980 _dpu_encoder_irq_control(drm_enc, false);
981 else
982 _dpu_encoder_resource_control_helper(drm_enc, false);
983
984 dpu_enc->rc_state = DPU_ENC_RC_STATE_IDLE;
985
986 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
987 dpu_enc->idle_pc_supported, dpu_enc->rc_state,
988 "idle");
989
990 mutex_unlock(&dpu_enc->rc_lock);
991 break;
992
993 default:
994 DRM_ERROR("id:%u, unexpected sw_event: %d\n", DRMID(drm_enc),
995 sw_event);
996 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
997 dpu_enc->idle_pc_supported, dpu_enc->rc_state,
998 "error");
999 break;
1000 }
1001
1002 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
1003 dpu_enc->idle_pc_supported, dpu_enc->rc_state,
1004 "end");
1005 return 0;
1006 }
1007
dpu_encoder_virt_mode_set(struct drm_encoder * drm_enc,struct drm_display_mode * mode,struct drm_display_mode * adj_mode)1008 static void dpu_encoder_virt_mode_set(struct drm_encoder *drm_enc,
1009 struct drm_display_mode *mode,
1010 struct drm_display_mode *adj_mode)
1011 {
1012 struct dpu_encoder_virt *dpu_enc;
1013 struct msm_drm_private *priv;
1014 struct dpu_kms *dpu_kms;
1015 struct list_head *connector_list;
1016 struct drm_connector *conn = NULL, *conn_iter;
1017 struct dpu_rm_hw_iter pp_iter;
1018 struct msm_display_topology topology;
1019 enum dpu_rm_topology_name topology_name;
1020 int i = 0, ret;
1021
1022 if (!drm_enc) {
1023 DPU_ERROR("invalid encoder\n");
1024 return;
1025 }
1026
1027 dpu_enc = to_dpu_encoder_virt(drm_enc);
1028 DPU_DEBUG_ENC(dpu_enc, "\n");
1029
1030 priv = drm_enc->dev->dev_private;
1031 dpu_kms = to_dpu_kms(priv->kms);
1032 connector_list = &dpu_kms->dev->mode_config.connector_list;
1033
1034 trace_dpu_enc_mode_set(DRMID(drm_enc));
1035
1036 list_for_each_entry(conn_iter, connector_list, head)
1037 if (conn_iter->encoder == drm_enc)
1038 conn = conn_iter;
1039
1040 if (!conn) {
1041 DPU_ERROR_ENC(dpu_enc, "failed to find attached connector\n");
1042 return;
1043 } else if (!conn->state) {
1044 DPU_ERROR_ENC(dpu_enc, "invalid connector state\n");
1045 return;
1046 }
1047
1048 topology = dpu_encoder_get_topology(dpu_enc, dpu_kms, adj_mode);
1049
1050 /* Reserve dynamic resources now. Indicating non-AtomicTest phase */
1051 ret = dpu_rm_reserve(&dpu_kms->rm, drm_enc, drm_enc->crtc->state,
1052 conn->state, topology, false);
1053 if (ret) {
1054 DPU_ERROR_ENC(dpu_enc,
1055 "failed to reserve hw resources, %d\n", ret);
1056 return;
1057 }
1058
1059 dpu_rm_init_hw_iter(&pp_iter, drm_enc->base.id, DPU_HW_BLK_PINGPONG);
1060 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1061 dpu_enc->hw_pp[i] = NULL;
1062 if (!dpu_rm_get_hw(&dpu_kms->rm, &pp_iter))
1063 break;
1064 dpu_enc->hw_pp[i] = (struct dpu_hw_pingpong *) pp_iter.hw;
1065 }
1066
1067 topology_name = dpu_rm_get_topology_name(topology);
1068 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1069 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1070
1071 if (phys) {
1072 if (!dpu_enc->hw_pp[i]) {
1073 DPU_ERROR_ENC(dpu_enc,
1074 "invalid pingpong block for the encoder\n");
1075 return;
1076 }
1077 phys->hw_pp = dpu_enc->hw_pp[i];
1078 phys->connector = conn->state->connector;
1079 phys->topology_name = topology_name;
1080 if (phys->ops.mode_set)
1081 phys->ops.mode_set(phys, mode, adj_mode);
1082 }
1083 }
1084
1085 dpu_enc->mode_set_complete = true;
1086 }
1087
_dpu_encoder_virt_enable_helper(struct drm_encoder * drm_enc)1088 static void _dpu_encoder_virt_enable_helper(struct drm_encoder *drm_enc)
1089 {
1090 struct dpu_encoder_virt *dpu_enc = NULL;
1091 struct msm_drm_private *priv;
1092 struct dpu_kms *dpu_kms;
1093
1094 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
1095 DPU_ERROR("invalid parameters\n");
1096 return;
1097 }
1098
1099 priv = drm_enc->dev->dev_private;
1100 dpu_kms = to_dpu_kms(priv->kms);
1101 if (!dpu_kms) {
1102 DPU_ERROR("invalid dpu_kms\n");
1103 return;
1104 }
1105
1106 dpu_enc = to_dpu_encoder_virt(drm_enc);
1107 if (!dpu_enc || !dpu_enc->cur_master) {
1108 DPU_ERROR("invalid dpu encoder/master\n");
1109 return;
1110 }
1111
1112 if (dpu_enc->disp_info.intf_type == DRM_MODE_CONNECTOR_DisplayPort &&
1113 dpu_enc->cur_master->hw_mdptop &&
1114 dpu_enc->cur_master->hw_mdptop->ops.intf_audio_select)
1115 dpu_enc->cur_master->hw_mdptop->ops.intf_audio_select(
1116 dpu_enc->cur_master->hw_mdptop);
1117
1118 if (dpu_enc->cur_master->hw_mdptop &&
1119 dpu_enc->cur_master->hw_mdptop->ops.reset_ubwc)
1120 dpu_enc->cur_master->hw_mdptop->ops.reset_ubwc(
1121 dpu_enc->cur_master->hw_mdptop,
1122 dpu_kms->catalog);
1123
1124 _dpu_encoder_update_vsync_source(dpu_enc, &dpu_enc->disp_info);
1125 }
1126
dpu_encoder_virt_restore(struct drm_encoder * drm_enc)1127 void dpu_encoder_virt_restore(struct drm_encoder *drm_enc)
1128 {
1129 struct dpu_encoder_virt *dpu_enc = NULL;
1130 int i;
1131
1132 if (!drm_enc) {
1133 DPU_ERROR("invalid encoder\n");
1134 return;
1135 }
1136 dpu_enc = to_dpu_encoder_virt(drm_enc);
1137
1138 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1139 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1140
1141 if (phys && (phys != dpu_enc->cur_master) && phys->ops.restore)
1142 phys->ops.restore(phys);
1143 }
1144
1145 if (dpu_enc->cur_master && dpu_enc->cur_master->ops.restore)
1146 dpu_enc->cur_master->ops.restore(dpu_enc->cur_master);
1147
1148 _dpu_encoder_virt_enable_helper(drm_enc);
1149 }
1150
dpu_encoder_virt_enable(struct drm_encoder * drm_enc)1151 static void dpu_encoder_virt_enable(struct drm_encoder *drm_enc)
1152 {
1153 struct dpu_encoder_virt *dpu_enc = NULL;
1154 int i, ret = 0;
1155 struct drm_display_mode *cur_mode = NULL;
1156
1157 if (!drm_enc) {
1158 DPU_ERROR("invalid encoder\n");
1159 return;
1160 }
1161 dpu_enc = to_dpu_encoder_virt(drm_enc);
1162 cur_mode = &dpu_enc->base.crtc->state->adjusted_mode;
1163
1164 trace_dpu_enc_enable(DRMID(drm_enc), cur_mode->hdisplay,
1165 cur_mode->vdisplay);
1166
1167 dpu_enc->cur_master = NULL;
1168 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1169 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1170
1171 if (phys && phys->ops.is_master && phys->ops.is_master(phys)) {
1172 DPU_DEBUG_ENC(dpu_enc, "master is now idx %d\n", i);
1173 dpu_enc->cur_master = phys;
1174 break;
1175 }
1176 }
1177
1178 if (!dpu_enc->cur_master) {
1179 DPU_ERROR("virt encoder has no master! num_phys %d\n", i);
1180 return;
1181 }
1182
1183 ret = dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_KICKOFF);
1184 if (ret) {
1185 DPU_ERROR_ENC(dpu_enc, "dpu resource control failed: %d\n",
1186 ret);
1187 return;
1188 }
1189
1190 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1191 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1192
1193 if (!phys)
1194 continue;
1195
1196 if (phys != dpu_enc->cur_master) {
1197 if (phys->ops.enable)
1198 phys->ops.enable(phys);
1199 }
1200
1201 if (dpu_enc->misr_enable && (dpu_enc->disp_info.capabilities &
1202 MSM_DISPLAY_CAP_VID_MODE) && phys->ops.setup_misr)
1203 phys->ops.setup_misr(phys, true,
1204 dpu_enc->misr_frame_count);
1205 }
1206
1207 if (dpu_enc->cur_master->ops.enable)
1208 dpu_enc->cur_master->ops.enable(dpu_enc->cur_master);
1209
1210 _dpu_encoder_virt_enable_helper(drm_enc);
1211 }
1212
dpu_encoder_virt_disable(struct drm_encoder * drm_enc)1213 static void dpu_encoder_virt_disable(struct drm_encoder *drm_enc)
1214 {
1215 struct dpu_encoder_virt *dpu_enc = NULL;
1216 struct msm_drm_private *priv;
1217 struct dpu_kms *dpu_kms;
1218 struct drm_display_mode *mode;
1219 int i = 0;
1220
1221 if (!drm_enc) {
1222 DPU_ERROR("invalid encoder\n");
1223 return;
1224 } else if (!drm_enc->dev) {
1225 DPU_ERROR("invalid dev\n");
1226 return;
1227 } else if (!drm_enc->dev->dev_private) {
1228 DPU_ERROR("invalid dev_private\n");
1229 return;
1230 }
1231
1232 mode = &drm_enc->crtc->state->adjusted_mode;
1233
1234 dpu_enc = to_dpu_encoder_virt(drm_enc);
1235 DPU_DEBUG_ENC(dpu_enc, "\n");
1236
1237 priv = drm_enc->dev->dev_private;
1238 dpu_kms = to_dpu_kms(priv->kms);
1239
1240 trace_dpu_enc_disable(DRMID(drm_enc));
1241
1242 /* wait for idle */
1243 dpu_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
1244
1245 dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_PRE_STOP);
1246
1247 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1248 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1249
1250 if (phys && phys->ops.disable)
1251 phys->ops.disable(phys);
1252 }
1253
1254 /* after phys waits for frame-done, should be no more frames pending */
1255 if (atomic_xchg(&dpu_enc->frame_done_timeout, 0)) {
1256 DPU_ERROR("enc%d timeout pending\n", drm_enc->base.id);
1257 del_timer_sync(&dpu_enc->frame_done_timer);
1258 }
1259
1260 dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_STOP);
1261
1262 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1263 if (dpu_enc->phys_encs[i])
1264 dpu_enc->phys_encs[i]->connector = NULL;
1265 }
1266
1267 dpu_enc->cur_master = NULL;
1268
1269 DPU_DEBUG_ENC(dpu_enc, "encoder disabled\n");
1270
1271 dpu_rm_release(&dpu_kms->rm, drm_enc);
1272 }
1273
dpu_encoder_get_intf(struct dpu_mdss_cfg * catalog,enum dpu_intf_type type,u32 controller_id)1274 static enum dpu_intf dpu_encoder_get_intf(struct dpu_mdss_cfg *catalog,
1275 enum dpu_intf_type type, u32 controller_id)
1276 {
1277 int i = 0;
1278
1279 for (i = 0; i < catalog->intf_count; i++) {
1280 if (catalog->intf[i].type == type
1281 && catalog->intf[i].controller_id == controller_id) {
1282 return catalog->intf[i].id;
1283 }
1284 }
1285
1286 return INTF_MAX;
1287 }
1288
dpu_encoder_vblank_callback(struct drm_encoder * drm_enc,struct dpu_encoder_phys * phy_enc)1289 static void dpu_encoder_vblank_callback(struct drm_encoder *drm_enc,
1290 struct dpu_encoder_phys *phy_enc)
1291 {
1292 struct dpu_encoder_virt *dpu_enc = NULL;
1293 unsigned long lock_flags;
1294
1295 if (!drm_enc || !phy_enc)
1296 return;
1297
1298 DPU_ATRACE_BEGIN("encoder_vblank_callback");
1299 dpu_enc = to_dpu_encoder_virt(drm_enc);
1300
1301 spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1302 if (dpu_enc->crtc_vblank_cb)
1303 dpu_enc->crtc_vblank_cb(dpu_enc->crtc_vblank_cb_data);
1304 spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1305
1306 atomic_inc(&phy_enc->vsync_cnt);
1307 DPU_ATRACE_END("encoder_vblank_callback");
1308 }
1309
dpu_encoder_underrun_callback(struct drm_encoder * drm_enc,struct dpu_encoder_phys * phy_enc)1310 static void dpu_encoder_underrun_callback(struct drm_encoder *drm_enc,
1311 struct dpu_encoder_phys *phy_enc)
1312 {
1313 if (!phy_enc)
1314 return;
1315
1316 DPU_ATRACE_BEGIN("encoder_underrun_callback");
1317 atomic_inc(&phy_enc->underrun_cnt);
1318 trace_dpu_enc_underrun_cb(DRMID(drm_enc),
1319 atomic_read(&phy_enc->underrun_cnt));
1320 DPU_ATRACE_END("encoder_underrun_callback");
1321 }
1322
dpu_encoder_register_vblank_callback(struct drm_encoder * drm_enc,void (* vbl_cb)(void *),void * vbl_data)1323 void dpu_encoder_register_vblank_callback(struct drm_encoder *drm_enc,
1324 void (*vbl_cb)(void *), void *vbl_data)
1325 {
1326 struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1327 unsigned long lock_flags;
1328 bool enable;
1329 int i;
1330
1331 enable = vbl_cb ? true : false;
1332
1333 if (!drm_enc) {
1334 DPU_ERROR("invalid encoder\n");
1335 return;
1336 }
1337 trace_dpu_enc_vblank_cb(DRMID(drm_enc), enable);
1338
1339 spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1340 dpu_enc->crtc_vblank_cb = vbl_cb;
1341 dpu_enc->crtc_vblank_cb_data = vbl_data;
1342 spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1343
1344 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1345 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1346
1347 if (phys && phys->ops.control_vblank_irq)
1348 phys->ops.control_vblank_irq(phys, enable);
1349 }
1350 }
1351
dpu_encoder_register_frame_event_callback(struct drm_encoder * drm_enc,void (* frame_event_cb)(void *,u32 event),void * frame_event_cb_data)1352 void dpu_encoder_register_frame_event_callback(struct drm_encoder *drm_enc,
1353 void (*frame_event_cb)(void *, u32 event),
1354 void *frame_event_cb_data)
1355 {
1356 struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1357 unsigned long lock_flags;
1358 bool enable;
1359
1360 enable = frame_event_cb ? true : false;
1361
1362 if (!drm_enc) {
1363 DPU_ERROR("invalid encoder\n");
1364 return;
1365 }
1366 trace_dpu_enc_frame_event_cb(DRMID(drm_enc), enable);
1367
1368 spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1369 dpu_enc->crtc_frame_event_cb = frame_event_cb;
1370 dpu_enc->crtc_frame_event_cb_data = frame_event_cb_data;
1371 spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1372 }
1373
dpu_encoder_frame_done_callback(struct drm_encoder * drm_enc,struct dpu_encoder_phys * ready_phys,u32 event)1374 static void dpu_encoder_frame_done_callback(
1375 struct drm_encoder *drm_enc,
1376 struct dpu_encoder_phys *ready_phys, u32 event)
1377 {
1378 struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1379 unsigned int i;
1380
1381 if (event & (DPU_ENCODER_FRAME_EVENT_DONE
1382 | DPU_ENCODER_FRAME_EVENT_ERROR
1383 | DPU_ENCODER_FRAME_EVENT_PANEL_DEAD)) {
1384
1385 if (!dpu_enc->frame_busy_mask[0]) {
1386 /**
1387 * suppress frame_done without waiter,
1388 * likely autorefresh
1389 */
1390 trace_dpu_enc_frame_done_cb_not_busy(DRMID(drm_enc),
1391 event, ready_phys->intf_idx);
1392 return;
1393 }
1394
1395 /* One of the physical encoders has become idle */
1396 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1397 if (dpu_enc->phys_encs[i] == ready_phys) {
1398 clear_bit(i, dpu_enc->frame_busy_mask);
1399 trace_dpu_enc_frame_done_cb(DRMID(drm_enc), i,
1400 dpu_enc->frame_busy_mask[0]);
1401 }
1402 }
1403
1404 if (!dpu_enc->frame_busy_mask[0]) {
1405 atomic_set(&dpu_enc->frame_done_timeout, 0);
1406 del_timer(&dpu_enc->frame_done_timer);
1407
1408 dpu_encoder_resource_control(drm_enc,
1409 DPU_ENC_RC_EVENT_FRAME_DONE);
1410
1411 if (dpu_enc->crtc_frame_event_cb)
1412 dpu_enc->crtc_frame_event_cb(
1413 dpu_enc->crtc_frame_event_cb_data,
1414 event);
1415 }
1416 } else {
1417 if (dpu_enc->crtc_frame_event_cb)
1418 dpu_enc->crtc_frame_event_cb(
1419 dpu_enc->crtc_frame_event_cb_data, event);
1420 }
1421 }
1422
dpu_encoder_off_work(struct kthread_work * work)1423 static void dpu_encoder_off_work(struct kthread_work *work)
1424 {
1425 struct dpu_encoder_virt *dpu_enc = container_of(work,
1426 struct dpu_encoder_virt, delayed_off_work.work);
1427
1428 if (!dpu_enc) {
1429 DPU_ERROR("invalid dpu encoder\n");
1430 return;
1431 }
1432
1433 dpu_encoder_resource_control(&dpu_enc->base,
1434 DPU_ENC_RC_EVENT_ENTER_IDLE);
1435
1436 dpu_encoder_frame_done_callback(&dpu_enc->base, NULL,
1437 DPU_ENCODER_FRAME_EVENT_IDLE);
1438 }
1439
1440 /**
1441 * _dpu_encoder_trigger_flush - trigger flush for a physical encoder
1442 * drm_enc: Pointer to drm encoder structure
1443 * phys: Pointer to physical encoder structure
1444 * extra_flush_bits: Additional bit mask to include in flush trigger
1445 */
_dpu_encoder_trigger_flush(struct drm_encoder * drm_enc,struct dpu_encoder_phys * phys,uint32_t extra_flush_bits)1446 static inline void _dpu_encoder_trigger_flush(struct drm_encoder *drm_enc,
1447 struct dpu_encoder_phys *phys, uint32_t extra_flush_bits)
1448 {
1449 struct dpu_hw_ctl *ctl;
1450 int pending_kickoff_cnt;
1451 u32 ret = UINT_MAX;
1452
1453 if (!drm_enc || !phys) {
1454 DPU_ERROR("invalid argument(s), drm_enc %d, phys_enc %d\n",
1455 drm_enc != 0, phys != 0);
1456 return;
1457 }
1458
1459 if (!phys->hw_pp) {
1460 DPU_ERROR("invalid pingpong hw\n");
1461 return;
1462 }
1463
1464 ctl = phys->hw_ctl;
1465 if (!ctl || !ctl->ops.trigger_flush) {
1466 DPU_ERROR("missing trigger cb\n");
1467 return;
1468 }
1469
1470 pending_kickoff_cnt = dpu_encoder_phys_inc_pending(phys);
1471
1472 if (extra_flush_bits && ctl->ops.update_pending_flush)
1473 ctl->ops.update_pending_flush(ctl, extra_flush_bits);
1474
1475 ctl->ops.trigger_flush(ctl);
1476
1477 if (ctl->ops.get_pending_flush)
1478 ret = ctl->ops.get_pending_flush(ctl);
1479
1480 trace_dpu_enc_trigger_flush(DRMID(drm_enc), phys->intf_idx,
1481 pending_kickoff_cnt, ctl->idx, ret);
1482 }
1483
1484 /**
1485 * _dpu_encoder_trigger_start - trigger start for a physical encoder
1486 * phys: Pointer to physical encoder structure
1487 */
_dpu_encoder_trigger_start(struct dpu_encoder_phys * phys)1488 static inline void _dpu_encoder_trigger_start(struct dpu_encoder_phys *phys)
1489 {
1490 if (!phys) {
1491 DPU_ERROR("invalid argument(s)\n");
1492 return;
1493 }
1494
1495 if (!phys->hw_pp) {
1496 DPU_ERROR("invalid pingpong hw\n");
1497 return;
1498 }
1499
1500 if (phys->ops.trigger_start && phys->enable_state != DPU_ENC_DISABLED)
1501 phys->ops.trigger_start(phys);
1502 }
1503
dpu_encoder_helper_trigger_start(struct dpu_encoder_phys * phys_enc)1504 void dpu_encoder_helper_trigger_start(struct dpu_encoder_phys *phys_enc)
1505 {
1506 struct dpu_hw_ctl *ctl;
1507
1508 if (!phys_enc) {
1509 DPU_ERROR("invalid encoder\n");
1510 return;
1511 }
1512
1513 ctl = phys_enc->hw_ctl;
1514 if (ctl && ctl->ops.trigger_start) {
1515 ctl->ops.trigger_start(ctl);
1516 trace_dpu_enc_trigger_start(DRMID(phys_enc->parent), ctl->idx);
1517 }
1518 }
1519
dpu_encoder_helper_wait_event_timeout(int32_t drm_id,int32_t hw_id,struct dpu_encoder_wait_info * info)1520 static int dpu_encoder_helper_wait_event_timeout(
1521 int32_t drm_id,
1522 int32_t hw_id,
1523 struct dpu_encoder_wait_info *info)
1524 {
1525 int rc = 0;
1526 s64 expected_time = ktime_to_ms(ktime_get()) + info->timeout_ms;
1527 s64 jiffies = msecs_to_jiffies(info->timeout_ms);
1528 s64 time;
1529
1530 do {
1531 rc = wait_event_timeout(*(info->wq),
1532 atomic_read(info->atomic_cnt) == 0, jiffies);
1533 time = ktime_to_ms(ktime_get());
1534
1535 trace_dpu_enc_wait_event_timeout(drm_id, hw_id, rc, time,
1536 expected_time,
1537 atomic_read(info->atomic_cnt));
1538 /* If we timed out, counter is valid and time is less, wait again */
1539 } while (atomic_read(info->atomic_cnt) && (rc == 0) &&
1540 (time < expected_time));
1541
1542 return rc;
1543 }
1544
dpu_encoder_helper_hw_reset(struct dpu_encoder_phys * phys_enc)1545 void dpu_encoder_helper_hw_reset(struct dpu_encoder_phys *phys_enc)
1546 {
1547 struct dpu_encoder_virt *dpu_enc;
1548 struct dpu_hw_ctl *ctl;
1549 int rc;
1550
1551 if (!phys_enc) {
1552 DPU_ERROR("invalid encoder\n");
1553 return;
1554 }
1555 dpu_enc = to_dpu_encoder_virt(phys_enc->parent);
1556 ctl = phys_enc->hw_ctl;
1557
1558 if (!ctl || !ctl->ops.reset)
1559 return;
1560
1561 DRM_DEBUG_KMS("id:%u ctl %d reset\n", DRMID(phys_enc->parent),
1562 ctl->idx);
1563
1564 rc = ctl->ops.reset(ctl);
1565 if (rc) {
1566 DPU_ERROR_ENC(dpu_enc, "ctl %d reset failure\n", ctl->idx);
1567 dpu_dbg_dump(false, __func__, true, true);
1568 }
1569
1570 phys_enc->enable_state = DPU_ENC_ENABLED;
1571 }
1572
1573 /**
1574 * _dpu_encoder_kickoff_phys - handle physical encoder kickoff
1575 * Iterate through the physical encoders and perform consolidated flush
1576 * and/or control start triggering as needed. This is done in the virtual
1577 * encoder rather than the individual physical ones in order to handle
1578 * use cases that require visibility into multiple physical encoders at
1579 * a time.
1580 * dpu_enc: Pointer to virtual encoder structure
1581 */
_dpu_encoder_kickoff_phys(struct dpu_encoder_virt * dpu_enc)1582 static void _dpu_encoder_kickoff_phys(struct dpu_encoder_virt *dpu_enc)
1583 {
1584 struct dpu_hw_ctl *ctl;
1585 uint32_t i, pending_flush;
1586 unsigned long lock_flags;
1587
1588 if (!dpu_enc) {
1589 DPU_ERROR("invalid encoder\n");
1590 return;
1591 }
1592
1593 pending_flush = 0x0;
1594
1595 /* update pending counts and trigger kickoff ctl flush atomically */
1596 spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1597
1598 /* don't perform flush/start operations for slave encoders */
1599 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1600 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1601
1602 if (!phys || phys->enable_state == DPU_ENC_DISABLED)
1603 continue;
1604
1605 ctl = phys->hw_ctl;
1606 if (!ctl)
1607 continue;
1608
1609 if (phys->split_role != ENC_ROLE_SLAVE)
1610 set_bit(i, dpu_enc->frame_busy_mask);
1611 if (!phys->ops.needs_single_flush ||
1612 !phys->ops.needs_single_flush(phys))
1613 _dpu_encoder_trigger_flush(&dpu_enc->base, phys, 0x0);
1614 else if (ctl->ops.get_pending_flush)
1615 pending_flush |= ctl->ops.get_pending_flush(ctl);
1616 }
1617
1618 /* for split flush, combine pending flush masks and send to master */
1619 if (pending_flush && dpu_enc->cur_master) {
1620 _dpu_encoder_trigger_flush(
1621 &dpu_enc->base,
1622 dpu_enc->cur_master,
1623 pending_flush);
1624 }
1625
1626 _dpu_encoder_trigger_start(dpu_enc->cur_master);
1627
1628 spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1629 }
1630
dpu_encoder_trigger_kickoff_pending(struct drm_encoder * drm_enc)1631 void dpu_encoder_trigger_kickoff_pending(struct drm_encoder *drm_enc)
1632 {
1633 struct dpu_encoder_virt *dpu_enc;
1634 struct dpu_encoder_phys *phys;
1635 unsigned int i;
1636 struct dpu_hw_ctl *ctl;
1637 struct msm_display_info *disp_info;
1638
1639 if (!drm_enc) {
1640 DPU_ERROR("invalid encoder\n");
1641 return;
1642 }
1643 dpu_enc = to_dpu_encoder_virt(drm_enc);
1644 disp_info = &dpu_enc->disp_info;
1645
1646 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1647 phys = dpu_enc->phys_encs[i];
1648
1649 if (phys && phys->hw_ctl) {
1650 ctl = phys->hw_ctl;
1651 if (ctl->ops.clear_pending_flush)
1652 ctl->ops.clear_pending_flush(ctl);
1653
1654 /* update only for command mode primary ctl */
1655 if ((phys == dpu_enc->cur_master) &&
1656 (disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE)
1657 && ctl->ops.trigger_pending)
1658 ctl->ops.trigger_pending(ctl);
1659 }
1660 }
1661 }
1662
_dpu_encoder_calculate_linetime(struct dpu_encoder_virt * dpu_enc,struct drm_display_mode * mode)1663 static u32 _dpu_encoder_calculate_linetime(struct dpu_encoder_virt *dpu_enc,
1664 struct drm_display_mode *mode)
1665 {
1666 u64 pclk_rate;
1667 u32 pclk_period;
1668 u32 line_time;
1669
1670 /*
1671 * For linetime calculation, only operate on master encoder.
1672 */
1673 if (!dpu_enc->cur_master)
1674 return 0;
1675
1676 if (!dpu_enc->cur_master->ops.get_line_count) {
1677 DPU_ERROR("get_line_count function not defined\n");
1678 return 0;
1679 }
1680
1681 pclk_rate = mode->clock; /* pixel clock in kHz */
1682 if (pclk_rate == 0) {
1683 DPU_ERROR("pclk is 0, cannot calculate line time\n");
1684 return 0;
1685 }
1686
1687 pclk_period = DIV_ROUND_UP_ULL(1000000000ull, pclk_rate);
1688 if (pclk_period == 0) {
1689 DPU_ERROR("pclk period is 0\n");
1690 return 0;
1691 }
1692
1693 /*
1694 * Line time calculation based on Pixel clock and HTOTAL.
1695 * Final unit is in ns.
1696 */
1697 line_time = (pclk_period * mode->htotal) / 1000;
1698 if (line_time == 0) {
1699 DPU_ERROR("line time calculation is 0\n");
1700 return 0;
1701 }
1702
1703 DPU_DEBUG_ENC(dpu_enc,
1704 "clk_rate=%lldkHz, clk_period=%d, linetime=%dns\n",
1705 pclk_rate, pclk_period, line_time);
1706
1707 return line_time;
1708 }
1709
_dpu_encoder_wakeup_time(struct drm_encoder * drm_enc,ktime_t * wakeup_time)1710 static int _dpu_encoder_wakeup_time(struct drm_encoder *drm_enc,
1711 ktime_t *wakeup_time)
1712 {
1713 struct drm_display_mode *mode;
1714 struct dpu_encoder_virt *dpu_enc;
1715 u32 cur_line;
1716 u32 line_time;
1717 u32 vtotal, time_to_vsync;
1718 ktime_t cur_time;
1719
1720 dpu_enc = to_dpu_encoder_virt(drm_enc);
1721
1722 if (!drm_enc->crtc || !drm_enc->crtc->state) {
1723 DPU_ERROR("crtc/crtc state object is NULL\n");
1724 return -EINVAL;
1725 }
1726 mode = &drm_enc->crtc->state->adjusted_mode;
1727
1728 line_time = _dpu_encoder_calculate_linetime(dpu_enc, mode);
1729 if (!line_time)
1730 return -EINVAL;
1731
1732 cur_line = dpu_enc->cur_master->ops.get_line_count(dpu_enc->cur_master);
1733
1734 vtotal = mode->vtotal;
1735 if (cur_line >= vtotal)
1736 time_to_vsync = line_time * vtotal;
1737 else
1738 time_to_vsync = line_time * (vtotal - cur_line);
1739
1740 if (time_to_vsync == 0) {
1741 DPU_ERROR("time to vsync should not be zero, vtotal=%d\n",
1742 vtotal);
1743 return -EINVAL;
1744 }
1745
1746 cur_time = ktime_get();
1747 *wakeup_time = ktime_add_ns(cur_time, time_to_vsync);
1748
1749 DPU_DEBUG_ENC(dpu_enc,
1750 "cur_line=%u vtotal=%u time_to_vsync=%u, cur_time=%lld, wakeup_time=%lld\n",
1751 cur_line, vtotal, time_to_vsync,
1752 ktime_to_ms(cur_time),
1753 ktime_to_ms(*wakeup_time));
1754 return 0;
1755 }
1756
dpu_encoder_vsync_event_handler(struct timer_list * t)1757 static void dpu_encoder_vsync_event_handler(struct timer_list *t)
1758 {
1759 struct dpu_encoder_virt *dpu_enc = from_timer(dpu_enc, t,
1760 vsync_event_timer);
1761 struct drm_encoder *drm_enc = &dpu_enc->base;
1762 struct msm_drm_private *priv;
1763 struct msm_drm_thread *event_thread;
1764
1765 if (!drm_enc->dev || !drm_enc->dev->dev_private ||
1766 !drm_enc->crtc) {
1767 DPU_ERROR("invalid parameters\n");
1768 return;
1769 }
1770
1771 priv = drm_enc->dev->dev_private;
1772
1773 if (drm_enc->crtc->index >= ARRAY_SIZE(priv->event_thread)) {
1774 DPU_ERROR("invalid crtc index\n");
1775 return;
1776 }
1777 event_thread = &priv->event_thread[drm_enc->crtc->index];
1778 if (!event_thread) {
1779 DPU_ERROR("event_thread not found for crtc:%d\n",
1780 drm_enc->crtc->index);
1781 return;
1782 }
1783
1784 del_timer(&dpu_enc->vsync_event_timer);
1785 }
1786
dpu_encoder_vsync_event_work_handler(struct kthread_work * work)1787 static void dpu_encoder_vsync_event_work_handler(struct kthread_work *work)
1788 {
1789 struct dpu_encoder_virt *dpu_enc = container_of(work,
1790 struct dpu_encoder_virt, vsync_event_work);
1791 ktime_t wakeup_time;
1792
1793 if (!dpu_enc) {
1794 DPU_ERROR("invalid dpu encoder\n");
1795 return;
1796 }
1797
1798 if (_dpu_encoder_wakeup_time(&dpu_enc->base, &wakeup_time))
1799 return;
1800
1801 trace_dpu_enc_vsync_event_work(DRMID(&dpu_enc->base), wakeup_time);
1802 mod_timer(&dpu_enc->vsync_event_timer,
1803 nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
1804 }
1805
dpu_encoder_prepare_for_kickoff(struct drm_encoder * drm_enc,struct dpu_encoder_kickoff_params * params)1806 void dpu_encoder_prepare_for_kickoff(struct drm_encoder *drm_enc,
1807 struct dpu_encoder_kickoff_params *params)
1808 {
1809 struct dpu_encoder_virt *dpu_enc;
1810 struct dpu_encoder_phys *phys;
1811 bool needs_hw_reset = false;
1812 unsigned int i;
1813
1814 if (!drm_enc || !params) {
1815 DPU_ERROR("invalid args\n");
1816 return;
1817 }
1818 dpu_enc = to_dpu_encoder_virt(drm_enc);
1819
1820 trace_dpu_enc_prepare_kickoff(DRMID(drm_enc));
1821
1822 /* prepare for next kickoff, may include waiting on previous kickoff */
1823 DPU_ATRACE_BEGIN("enc_prepare_for_kickoff");
1824 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1825 phys = dpu_enc->phys_encs[i];
1826 if (phys) {
1827 if (phys->ops.prepare_for_kickoff)
1828 phys->ops.prepare_for_kickoff(phys, params);
1829 if (phys->enable_state == DPU_ENC_ERR_NEEDS_HW_RESET)
1830 needs_hw_reset = true;
1831 }
1832 }
1833 DPU_ATRACE_END("enc_prepare_for_kickoff");
1834
1835 dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_KICKOFF);
1836
1837 /* if any phys needs reset, reset all phys, in-order */
1838 if (needs_hw_reset) {
1839 trace_dpu_enc_prepare_kickoff_reset(DRMID(drm_enc));
1840 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1841 phys = dpu_enc->phys_encs[i];
1842 if (phys && phys->ops.hw_reset)
1843 phys->ops.hw_reset(phys);
1844 }
1845 }
1846 }
1847
dpu_encoder_kickoff(struct drm_encoder * drm_enc)1848 void dpu_encoder_kickoff(struct drm_encoder *drm_enc)
1849 {
1850 struct dpu_encoder_virt *dpu_enc;
1851 struct dpu_encoder_phys *phys;
1852 ktime_t wakeup_time;
1853 unsigned int i;
1854
1855 if (!drm_enc) {
1856 DPU_ERROR("invalid encoder\n");
1857 return;
1858 }
1859 DPU_ATRACE_BEGIN("encoder_kickoff");
1860 dpu_enc = to_dpu_encoder_virt(drm_enc);
1861
1862 trace_dpu_enc_kickoff(DRMID(drm_enc));
1863
1864 atomic_set(&dpu_enc->frame_done_timeout,
1865 DPU_FRAME_DONE_TIMEOUT * 1000 /
1866 drm_enc->crtc->state->adjusted_mode.vrefresh);
1867 mod_timer(&dpu_enc->frame_done_timer, jiffies +
1868 ((atomic_read(&dpu_enc->frame_done_timeout) * HZ) / 1000));
1869
1870 /* All phys encs are ready to go, trigger the kickoff */
1871 _dpu_encoder_kickoff_phys(dpu_enc);
1872
1873 /* allow phys encs to handle any post-kickoff business */
1874 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1875 phys = dpu_enc->phys_encs[i];
1876 if (phys && phys->ops.handle_post_kickoff)
1877 phys->ops.handle_post_kickoff(phys);
1878 }
1879
1880 if (dpu_enc->disp_info.intf_type == DRM_MODE_CONNECTOR_DSI &&
1881 !_dpu_encoder_wakeup_time(drm_enc, &wakeup_time)) {
1882 trace_dpu_enc_early_kickoff(DRMID(drm_enc),
1883 ktime_to_ms(wakeup_time));
1884 mod_timer(&dpu_enc->vsync_event_timer,
1885 nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
1886 }
1887
1888 DPU_ATRACE_END("encoder_kickoff");
1889 }
1890
dpu_encoder_prepare_commit(struct drm_encoder * drm_enc)1891 void dpu_encoder_prepare_commit(struct drm_encoder *drm_enc)
1892 {
1893 struct dpu_encoder_virt *dpu_enc;
1894 struct dpu_encoder_phys *phys;
1895 int i;
1896
1897 if (!drm_enc) {
1898 DPU_ERROR("invalid encoder\n");
1899 return;
1900 }
1901 dpu_enc = to_dpu_encoder_virt(drm_enc);
1902
1903 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1904 phys = dpu_enc->phys_encs[i];
1905 if (phys && phys->ops.prepare_commit)
1906 phys->ops.prepare_commit(phys);
1907 }
1908 }
1909
1910 #ifdef CONFIG_DEBUG_FS
_dpu_encoder_status_show(struct seq_file * s,void * data)1911 static int _dpu_encoder_status_show(struct seq_file *s, void *data)
1912 {
1913 struct dpu_encoder_virt *dpu_enc;
1914 int i;
1915
1916 if (!s || !s->private)
1917 return -EINVAL;
1918
1919 dpu_enc = s->private;
1920
1921 mutex_lock(&dpu_enc->enc_lock);
1922 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1923 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1924
1925 if (!phys)
1926 continue;
1927
1928 seq_printf(s, "intf:%d vsync:%8d underrun:%8d ",
1929 phys->intf_idx - INTF_0,
1930 atomic_read(&phys->vsync_cnt),
1931 atomic_read(&phys->underrun_cnt));
1932
1933 switch (phys->intf_mode) {
1934 case INTF_MODE_VIDEO:
1935 seq_puts(s, "mode: video\n");
1936 break;
1937 case INTF_MODE_CMD:
1938 seq_puts(s, "mode: command\n");
1939 break;
1940 default:
1941 seq_puts(s, "mode: ???\n");
1942 break;
1943 }
1944 }
1945 mutex_unlock(&dpu_enc->enc_lock);
1946
1947 return 0;
1948 }
1949
_dpu_encoder_debugfs_status_open(struct inode * inode,struct file * file)1950 static int _dpu_encoder_debugfs_status_open(struct inode *inode,
1951 struct file *file)
1952 {
1953 return single_open(file, _dpu_encoder_status_show, inode->i_private);
1954 }
1955
_dpu_encoder_misr_setup(struct file * file,const char __user * user_buf,size_t count,loff_t * ppos)1956 static ssize_t _dpu_encoder_misr_setup(struct file *file,
1957 const char __user *user_buf, size_t count, loff_t *ppos)
1958 {
1959 struct dpu_encoder_virt *dpu_enc;
1960 int i = 0, rc;
1961 char buf[MISR_BUFF_SIZE + 1];
1962 size_t buff_copy;
1963 u32 frame_count, enable;
1964
1965 if (!file || !file->private_data)
1966 return -EINVAL;
1967
1968 dpu_enc = file->private_data;
1969
1970 buff_copy = min_t(size_t, count, MISR_BUFF_SIZE);
1971 if (copy_from_user(buf, user_buf, buff_copy))
1972 return -EINVAL;
1973
1974 buf[buff_copy] = 0; /* end of string */
1975
1976 if (sscanf(buf, "%u %u", &enable, &frame_count) != 2)
1977 return -EINVAL;
1978
1979 rc = _dpu_encoder_power_enable(dpu_enc, true);
1980 if (rc)
1981 return rc;
1982
1983 mutex_lock(&dpu_enc->enc_lock);
1984 dpu_enc->misr_enable = enable;
1985 dpu_enc->misr_frame_count = frame_count;
1986 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1987 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1988
1989 if (!phys || !phys->ops.setup_misr)
1990 continue;
1991
1992 phys->ops.setup_misr(phys, enable, frame_count);
1993 }
1994 mutex_unlock(&dpu_enc->enc_lock);
1995 _dpu_encoder_power_enable(dpu_enc, false);
1996
1997 return count;
1998 }
1999
_dpu_encoder_misr_read(struct file * file,char __user * user_buff,size_t count,loff_t * ppos)2000 static ssize_t _dpu_encoder_misr_read(struct file *file,
2001 char __user *user_buff, size_t count, loff_t *ppos)
2002 {
2003 struct dpu_encoder_virt *dpu_enc;
2004 int i = 0, len = 0;
2005 char buf[MISR_BUFF_SIZE + 1] = {'\0'};
2006 int rc;
2007
2008 if (*ppos)
2009 return 0;
2010
2011 if (!file || !file->private_data)
2012 return -EINVAL;
2013
2014 dpu_enc = file->private_data;
2015
2016 rc = _dpu_encoder_power_enable(dpu_enc, true);
2017 if (rc)
2018 return rc;
2019
2020 mutex_lock(&dpu_enc->enc_lock);
2021 if (!dpu_enc->misr_enable) {
2022 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
2023 "disabled\n");
2024 goto buff_check;
2025 } else if (dpu_enc->disp_info.capabilities &
2026 ~MSM_DISPLAY_CAP_VID_MODE) {
2027 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
2028 "unsupported\n");
2029 goto buff_check;
2030 }
2031
2032 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
2033 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
2034
2035 if (!phys || !phys->ops.collect_misr)
2036 continue;
2037
2038 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
2039 "Intf idx:%d\n", phys->intf_idx - INTF_0);
2040 len += snprintf(buf + len, MISR_BUFF_SIZE - len, "0x%x\n",
2041 phys->ops.collect_misr(phys));
2042 }
2043
2044 buff_check:
2045 if (count <= len) {
2046 len = 0;
2047 goto end;
2048 }
2049
2050 if (copy_to_user(user_buff, buf, len)) {
2051 len = -EFAULT;
2052 goto end;
2053 }
2054
2055 *ppos += len; /* increase offset */
2056
2057 end:
2058 mutex_unlock(&dpu_enc->enc_lock);
2059 _dpu_encoder_power_enable(dpu_enc, false);
2060 return len;
2061 }
2062
_dpu_encoder_init_debugfs(struct drm_encoder * drm_enc)2063 static int _dpu_encoder_init_debugfs(struct drm_encoder *drm_enc)
2064 {
2065 struct dpu_encoder_virt *dpu_enc;
2066 struct msm_drm_private *priv;
2067 struct dpu_kms *dpu_kms;
2068 int i;
2069
2070 static const struct file_operations debugfs_status_fops = {
2071 .open = _dpu_encoder_debugfs_status_open,
2072 .read = seq_read,
2073 .llseek = seq_lseek,
2074 .release = single_release,
2075 };
2076
2077 static const struct file_operations debugfs_misr_fops = {
2078 .open = simple_open,
2079 .read = _dpu_encoder_misr_read,
2080 .write = _dpu_encoder_misr_setup,
2081 };
2082
2083 char name[DPU_NAME_SIZE];
2084
2085 if (!drm_enc || !drm_enc->dev || !drm_enc->dev->dev_private) {
2086 DPU_ERROR("invalid encoder or kms\n");
2087 return -EINVAL;
2088 }
2089
2090 dpu_enc = to_dpu_encoder_virt(drm_enc);
2091 priv = drm_enc->dev->dev_private;
2092 dpu_kms = to_dpu_kms(priv->kms);
2093
2094 snprintf(name, DPU_NAME_SIZE, "encoder%u", drm_enc->base.id);
2095
2096 /* create overall sub-directory for the encoder */
2097 dpu_enc->debugfs_root = debugfs_create_dir(name,
2098 drm_enc->dev->primary->debugfs_root);
2099 if (!dpu_enc->debugfs_root)
2100 return -ENOMEM;
2101
2102 /* don't error check these */
2103 debugfs_create_file("status", 0600,
2104 dpu_enc->debugfs_root, dpu_enc, &debugfs_status_fops);
2105
2106 debugfs_create_file("misr_data", 0600,
2107 dpu_enc->debugfs_root, dpu_enc, &debugfs_misr_fops);
2108
2109 for (i = 0; i < dpu_enc->num_phys_encs; i++)
2110 if (dpu_enc->phys_encs[i] &&
2111 dpu_enc->phys_encs[i]->ops.late_register)
2112 dpu_enc->phys_encs[i]->ops.late_register(
2113 dpu_enc->phys_encs[i],
2114 dpu_enc->debugfs_root);
2115
2116 return 0;
2117 }
2118
_dpu_encoder_destroy_debugfs(struct drm_encoder * drm_enc)2119 static void _dpu_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
2120 {
2121 struct dpu_encoder_virt *dpu_enc;
2122
2123 if (!drm_enc)
2124 return;
2125
2126 dpu_enc = to_dpu_encoder_virt(drm_enc);
2127 debugfs_remove_recursive(dpu_enc->debugfs_root);
2128 }
2129 #else
_dpu_encoder_init_debugfs(struct drm_encoder * drm_enc)2130 static int _dpu_encoder_init_debugfs(struct drm_encoder *drm_enc)
2131 {
2132 return 0;
2133 }
2134
_dpu_encoder_destroy_debugfs(struct drm_encoder * drm_enc)2135 static void _dpu_encoder_destroy_debugfs(struct drm_encoder *drm_enc)
2136 {
2137 }
2138 #endif
2139
dpu_encoder_late_register(struct drm_encoder * encoder)2140 static int dpu_encoder_late_register(struct drm_encoder *encoder)
2141 {
2142 return _dpu_encoder_init_debugfs(encoder);
2143 }
2144
dpu_encoder_early_unregister(struct drm_encoder * encoder)2145 static void dpu_encoder_early_unregister(struct drm_encoder *encoder)
2146 {
2147 _dpu_encoder_destroy_debugfs(encoder);
2148 }
2149
dpu_encoder_virt_add_phys_encs(u32 display_caps,struct dpu_encoder_virt * dpu_enc,struct dpu_enc_phys_init_params * params)2150 static int dpu_encoder_virt_add_phys_encs(
2151 u32 display_caps,
2152 struct dpu_encoder_virt *dpu_enc,
2153 struct dpu_enc_phys_init_params *params)
2154 {
2155 struct dpu_encoder_phys *enc = NULL;
2156
2157 DPU_DEBUG_ENC(dpu_enc, "\n");
2158
2159 /*
2160 * We may create up to NUM_PHYS_ENCODER_TYPES physical encoder types
2161 * in this function, check up-front.
2162 */
2163 if (dpu_enc->num_phys_encs + NUM_PHYS_ENCODER_TYPES >=
2164 ARRAY_SIZE(dpu_enc->phys_encs)) {
2165 DPU_ERROR_ENC(dpu_enc, "too many physical encoders %d\n",
2166 dpu_enc->num_phys_encs);
2167 return -EINVAL;
2168 }
2169
2170 if (display_caps & MSM_DISPLAY_CAP_VID_MODE) {
2171 enc = dpu_encoder_phys_vid_init(params);
2172
2173 if (IS_ERR_OR_NULL(enc)) {
2174 DPU_ERROR_ENC(dpu_enc, "failed to init vid enc: %ld\n",
2175 PTR_ERR(enc));
2176 return enc == 0 ? -EINVAL : PTR_ERR(enc);
2177 }
2178
2179 dpu_enc->phys_encs[dpu_enc->num_phys_encs] = enc;
2180 ++dpu_enc->num_phys_encs;
2181 }
2182
2183 if (display_caps & MSM_DISPLAY_CAP_CMD_MODE) {
2184 enc = dpu_encoder_phys_cmd_init(params);
2185
2186 if (IS_ERR_OR_NULL(enc)) {
2187 DPU_ERROR_ENC(dpu_enc, "failed to init cmd enc: %ld\n",
2188 PTR_ERR(enc));
2189 return enc == 0 ? -EINVAL : PTR_ERR(enc);
2190 }
2191
2192 dpu_enc->phys_encs[dpu_enc->num_phys_encs] = enc;
2193 ++dpu_enc->num_phys_encs;
2194 }
2195
2196 return 0;
2197 }
2198
2199 static const struct dpu_encoder_virt_ops dpu_encoder_parent_ops = {
2200 .handle_vblank_virt = dpu_encoder_vblank_callback,
2201 .handle_underrun_virt = dpu_encoder_underrun_callback,
2202 .handle_frame_done = dpu_encoder_frame_done_callback,
2203 };
2204
dpu_encoder_setup_display(struct dpu_encoder_virt * dpu_enc,struct dpu_kms * dpu_kms,struct msm_display_info * disp_info,int * drm_enc_mode)2205 static int dpu_encoder_setup_display(struct dpu_encoder_virt *dpu_enc,
2206 struct dpu_kms *dpu_kms,
2207 struct msm_display_info *disp_info,
2208 int *drm_enc_mode)
2209 {
2210 int ret = 0;
2211 int i = 0;
2212 enum dpu_intf_type intf_type;
2213 struct dpu_enc_phys_init_params phys_params;
2214
2215 if (!dpu_enc || !dpu_kms) {
2216 DPU_ERROR("invalid arg(s), enc %d kms %d\n",
2217 dpu_enc != 0, dpu_kms != 0);
2218 return -EINVAL;
2219 }
2220
2221 memset(&phys_params, 0, sizeof(phys_params));
2222 phys_params.dpu_kms = dpu_kms;
2223 phys_params.parent = &dpu_enc->base;
2224 phys_params.parent_ops = &dpu_encoder_parent_ops;
2225 phys_params.enc_spinlock = &dpu_enc->enc_spinlock;
2226
2227 DPU_DEBUG("\n");
2228
2229 if (disp_info->intf_type == DRM_MODE_CONNECTOR_DSI) {
2230 *drm_enc_mode = DRM_MODE_ENCODER_DSI;
2231 intf_type = INTF_DSI;
2232 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_HDMIA) {
2233 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
2234 intf_type = INTF_HDMI;
2235 } else if (disp_info->intf_type == DRM_MODE_CONNECTOR_DisplayPort) {
2236 *drm_enc_mode = DRM_MODE_ENCODER_TMDS;
2237 intf_type = INTF_DP;
2238 } else {
2239 DPU_ERROR_ENC(dpu_enc, "unsupported display interface type\n");
2240 return -EINVAL;
2241 }
2242
2243 WARN_ON(disp_info->num_of_h_tiles < 1);
2244
2245 dpu_enc->display_num_of_h_tiles = disp_info->num_of_h_tiles;
2246
2247 DPU_DEBUG("dsi_info->num_of_h_tiles %d\n", disp_info->num_of_h_tiles);
2248
2249 if ((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) ||
2250 (disp_info->capabilities & MSM_DISPLAY_CAP_VID_MODE))
2251 dpu_enc->idle_pc_supported =
2252 dpu_kms->catalog->caps->has_idle_pc;
2253
2254 mutex_lock(&dpu_enc->enc_lock);
2255 for (i = 0; i < disp_info->num_of_h_tiles && !ret; i++) {
2256 /*
2257 * Left-most tile is at index 0, content is controller id
2258 * h_tile_instance_ids[2] = {0, 1}; DSI0 = left, DSI1 = right
2259 * h_tile_instance_ids[2] = {1, 0}; DSI1 = left, DSI0 = right
2260 */
2261 u32 controller_id = disp_info->h_tile_instance[i];
2262
2263 if (disp_info->num_of_h_tiles > 1) {
2264 if (i == 0)
2265 phys_params.split_role = ENC_ROLE_MASTER;
2266 else
2267 phys_params.split_role = ENC_ROLE_SLAVE;
2268 } else {
2269 phys_params.split_role = ENC_ROLE_SOLO;
2270 }
2271
2272 DPU_DEBUG("h_tile_instance %d = %d, split_role %d\n",
2273 i, controller_id, phys_params.split_role);
2274
2275 phys_params.intf_idx = dpu_encoder_get_intf(dpu_kms->catalog,
2276 intf_type,
2277 controller_id);
2278 if (phys_params.intf_idx == INTF_MAX) {
2279 DPU_ERROR_ENC(dpu_enc, "could not get intf: type %d, id %d\n",
2280 intf_type, controller_id);
2281 ret = -EINVAL;
2282 }
2283
2284 if (!ret) {
2285 ret = dpu_encoder_virt_add_phys_encs(disp_info->capabilities,
2286 dpu_enc,
2287 &phys_params);
2288 if (ret)
2289 DPU_ERROR_ENC(dpu_enc, "failed to add phys encs\n");
2290 }
2291 }
2292
2293 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
2294 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
2295
2296 if (phys) {
2297 atomic_set(&phys->vsync_cnt, 0);
2298 atomic_set(&phys->underrun_cnt, 0);
2299 }
2300 }
2301 mutex_unlock(&dpu_enc->enc_lock);
2302
2303 return ret;
2304 }
2305
dpu_encoder_frame_done_timeout(struct timer_list * t)2306 static void dpu_encoder_frame_done_timeout(struct timer_list *t)
2307 {
2308 struct dpu_encoder_virt *dpu_enc = from_timer(dpu_enc, t,
2309 frame_done_timer);
2310 struct drm_encoder *drm_enc = &dpu_enc->base;
2311 struct msm_drm_private *priv;
2312 u32 event;
2313
2314 if (!drm_enc->dev || !drm_enc->dev->dev_private) {
2315 DPU_ERROR("invalid parameters\n");
2316 return;
2317 }
2318 priv = drm_enc->dev->dev_private;
2319
2320 if (!dpu_enc->frame_busy_mask[0] || !dpu_enc->crtc_frame_event_cb) {
2321 DRM_DEBUG_KMS("id:%u invalid timeout frame_busy_mask=%lu\n",
2322 DRMID(drm_enc), dpu_enc->frame_busy_mask[0]);
2323 return;
2324 } else if (!atomic_xchg(&dpu_enc->frame_done_timeout, 0)) {
2325 DRM_DEBUG_KMS("id:%u invalid timeout\n", DRMID(drm_enc));
2326 return;
2327 }
2328
2329 DPU_ERROR_ENC(dpu_enc, "frame done timeout\n");
2330
2331 event = DPU_ENCODER_FRAME_EVENT_ERROR;
2332 trace_dpu_enc_frame_done_timeout(DRMID(drm_enc), event);
2333 dpu_enc->crtc_frame_event_cb(dpu_enc->crtc_frame_event_cb_data, event);
2334 }
2335
2336 static const struct drm_encoder_helper_funcs dpu_encoder_helper_funcs = {
2337 .mode_set = dpu_encoder_virt_mode_set,
2338 .disable = dpu_encoder_virt_disable,
2339 .enable = dpu_kms_encoder_enable,
2340 .atomic_check = dpu_encoder_virt_atomic_check,
2341
2342 /* This is called by dpu_kms_encoder_enable */
2343 .commit = dpu_encoder_virt_enable,
2344 };
2345
2346 static const struct drm_encoder_funcs dpu_encoder_funcs = {
2347 .destroy = dpu_encoder_destroy,
2348 .late_register = dpu_encoder_late_register,
2349 .early_unregister = dpu_encoder_early_unregister,
2350 };
2351
dpu_encoder_setup(struct drm_device * dev,struct drm_encoder * enc,struct msm_display_info * disp_info)2352 int dpu_encoder_setup(struct drm_device *dev, struct drm_encoder *enc,
2353 struct msm_display_info *disp_info)
2354 {
2355 struct msm_drm_private *priv = dev->dev_private;
2356 struct dpu_kms *dpu_kms = to_dpu_kms(priv->kms);
2357 struct drm_encoder *drm_enc = NULL;
2358 struct dpu_encoder_virt *dpu_enc = NULL;
2359 int drm_enc_mode = DRM_MODE_ENCODER_NONE;
2360 int ret = 0;
2361
2362 dpu_enc = to_dpu_encoder_virt(enc);
2363
2364 mutex_init(&dpu_enc->enc_lock);
2365 ret = dpu_encoder_setup_display(dpu_enc, dpu_kms, disp_info,
2366 &drm_enc_mode);
2367 if (ret)
2368 goto fail;
2369
2370 dpu_enc->cur_master = NULL;
2371 spin_lock_init(&dpu_enc->enc_spinlock);
2372
2373 atomic_set(&dpu_enc->frame_done_timeout, 0);
2374 timer_setup(&dpu_enc->frame_done_timer,
2375 dpu_encoder_frame_done_timeout, 0);
2376
2377 if (disp_info->intf_type == DRM_MODE_CONNECTOR_DSI)
2378 timer_setup(&dpu_enc->vsync_event_timer,
2379 dpu_encoder_vsync_event_handler,
2380 0);
2381
2382
2383 mutex_init(&dpu_enc->rc_lock);
2384 kthread_init_delayed_work(&dpu_enc->delayed_off_work,
2385 dpu_encoder_off_work);
2386 dpu_enc->idle_timeout = IDLE_TIMEOUT;
2387
2388 kthread_init_work(&dpu_enc->vsync_event_work,
2389 dpu_encoder_vsync_event_work_handler);
2390
2391 memcpy(&dpu_enc->disp_info, disp_info, sizeof(*disp_info));
2392
2393 DPU_DEBUG_ENC(dpu_enc, "created\n");
2394
2395 return ret;
2396
2397 fail:
2398 DPU_ERROR("failed to create encoder\n");
2399 if (drm_enc)
2400 dpu_encoder_destroy(drm_enc);
2401
2402 return ret;
2403
2404
2405 }
2406
dpu_encoder_init(struct drm_device * dev,int drm_enc_mode)2407 struct drm_encoder *dpu_encoder_init(struct drm_device *dev,
2408 int drm_enc_mode)
2409 {
2410 struct dpu_encoder_virt *dpu_enc = NULL;
2411 int rc = 0;
2412
2413 dpu_enc = devm_kzalloc(dev->dev, sizeof(*dpu_enc), GFP_KERNEL);
2414 if (!dpu_enc)
2415 return ERR_PTR(-ENOMEM);
2416
2417 rc = drm_encoder_init(dev, &dpu_enc->base, &dpu_encoder_funcs,
2418 drm_enc_mode, NULL);
2419 if (rc) {
2420 devm_kfree(dev->dev, dpu_enc);
2421 return ERR_PTR(rc);
2422 }
2423
2424 drm_encoder_helper_add(&dpu_enc->base, &dpu_encoder_helper_funcs);
2425
2426 return &dpu_enc->base;
2427 }
2428
dpu_encoder_wait_for_event(struct drm_encoder * drm_enc,enum msm_event_wait event)2429 int dpu_encoder_wait_for_event(struct drm_encoder *drm_enc,
2430 enum msm_event_wait event)
2431 {
2432 int (*fn_wait)(struct dpu_encoder_phys *phys_enc) = NULL;
2433 struct dpu_encoder_virt *dpu_enc = NULL;
2434 int i, ret = 0;
2435
2436 if (!drm_enc) {
2437 DPU_ERROR("invalid encoder\n");
2438 return -EINVAL;
2439 }
2440 dpu_enc = to_dpu_encoder_virt(drm_enc);
2441 DPU_DEBUG_ENC(dpu_enc, "\n");
2442
2443 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
2444 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
2445 if (!phys)
2446 continue;
2447
2448 switch (event) {
2449 case MSM_ENC_COMMIT_DONE:
2450 fn_wait = phys->ops.wait_for_commit_done;
2451 break;
2452 case MSM_ENC_TX_COMPLETE:
2453 fn_wait = phys->ops.wait_for_tx_complete;
2454 break;
2455 case MSM_ENC_VBLANK:
2456 fn_wait = phys->ops.wait_for_vblank;
2457 break;
2458 default:
2459 DPU_ERROR_ENC(dpu_enc, "unknown wait event %d\n",
2460 event);
2461 return -EINVAL;
2462 };
2463
2464 if (fn_wait) {
2465 DPU_ATRACE_BEGIN("wait_for_completion_event");
2466 ret = fn_wait(phys);
2467 DPU_ATRACE_END("wait_for_completion_event");
2468 if (ret)
2469 return ret;
2470 }
2471 }
2472
2473 return ret;
2474 }
2475
dpu_encoder_get_intf_mode(struct drm_encoder * encoder)2476 enum dpu_intf_mode dpu_encoder_get_intf_mode(struct drm_encoder *encoder)
2477 {
2478 struct dpu_encoder_virt *dpu_enc = NULL;
2479 int i;
2480
2481 if (!encoder) {
2482 DPU_ERROR("invalid encoder\n");
2483 return INTF_MODE_NONE;
2484 }
2485 dpu_enc = to_dpu_encoder_virt(encoder);
2486
2487 if (dpu_enc->cur_master)
2488 return dpu_enc->cur_master->intf_mode;
2489
2490 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
2491 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
2492
2493 if (phys)
2494 return phys->intf_mode;
2495 }
2496
2497 return INTF_MODE_NONE;
2498 }
2499