1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
4 */
5
6 #include <linux/module.h>
7 #include <linux/slab.h>
8 #include <linux/uaccess.h>
9 #include <linux/debugfs.h>
10 #include <linux/component.h>
11 #include <linux/of_irq.h>
12 #include <linux/phy/phy.h>
13 #include <linux/delay.h>
14 #include <drm/display/drm_dp_aux_bus.h>
15 #include <drm/drm_edid.h>
16
17 #include "msm_drv.h"
18 #include "msm_kms.h"
19 #include "dp_ctrl.h"
20 #include "dp_catalog.h"
21 #include "dp_aux.h"
22 #include "dp_reg.h"
23 #include "dp_link.h"
24 #include "dp_panel.h"
25 #include "dp_display.h"
26 #include "dp_drm.h"
27 #include "dp_audio.h"
28 #include "dp_debug.h"
29
30 static bool psr_enabled = false;
31 module_param(psr_enabled, bool, 0);
32 MODULE_PARM_DESC(psr_enabled, "enable PSR for eDP and DP displays");
33
34 #define HPD_STRING_SIZE 30
35
36 enum {
37 ISR_DISCONNECTED,
38 ISR_CONNECT_PENDING,
39 ISR_CONNECTED,
40 ISR_HPD_REPLUG_COUNT,
41 ISR_IRQ_HPD_PULSE_COUNT,
42 ISR_HPD_LO_GLITH_COUNT,
43 };
44
45 /* event thread connection state */
46 enum {
47 ST_DISCONNECTED,
48 ST_MAINLINK_READY,
49 ST_CONNECTED,
50 ST_DISCONNECT_PENDING,
51 ST_DISPLAY_OFF,
52 };
53
54 enum {
55 EV_NO_EVENT,
56 /* hpd events */
57 EV_HPD_PLUG_INT,
58 EV_IRQ_HPD_INT,
59 EV_HPD_UNPLUG_INT,
60 EV_USER_NOTIFICATION,
61 };
62
63 #define EVENT_TIMEOUT (HZ/10) /* 100ms */
64 #define DP_EVENT_Q_MAX 8
65
66 #define DP_TIMEOUT_NONE 0
67
68 #define WAIT_FOR_RESUME_TIMEOUT_JIFFIES (HZ / 2)
69
70 struct dp_event {
71 u32 event_id;
72 u32 data;
73 u32 delay;
74 };
75
76 struct dp_display_private {
77 int irq;
78
79 unsigned int id;
80
81 /* state variables */
82 bool core_initialized;
83 bool phy_initialized;
84 bool audio_supported;
85
86 struct drm_device *drm_dev;
87
88 struct dp_catalog *catalog;
89 struct drm_dp_aux *aux;
90 struct dp_link *link;
91 struct dp_panel *panel;
92 struct dp_ctrl *ctrl;
93
94 struct dp_display_mode dp_mode;
95 struct msm_dp dp_display;
96
97 /* wait for audio signaling */
98 struct completion audio_comp;
99
100 /* event related only access by event thread */
101 struct mutex event_mutex;
102 wait_queue_head_t event_q;
103 u32 hpd_state;
104 u32 event_pndx;
105 u32 event_gndx;
106 struct task_struct *ev_tsk;
107 struct dp_event event_list[DP_EVENT_Q_MAX];
108 spinlock_t event_lock;
109
110 bool wide_bus_supported;
111
112 struct dp_audio *audio;
113 };
114
115 struct msm_dp_desc {
116 phys_addr_t io_start;
117 unsigned int id;
118 bool wide_bus_supported;
119 };
120
121 static const struct msm_dp_desc sc7180_dp_descs[] = {
122 { .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .wide_bus_supported = true },
123 {}
124 };
125
126 static const struct msm_dp_desc sc7280_dp_descs[] = {
127 { .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .wide_bus_supported = true },
128 { .io_start = 0x0aea0000, .id = MSM_DP_CONTROLLER_1, .wide_bus_supported = true },
129 {}
130 };
131
132 static const struct msm_dp_desc sc8180x_dp_descs[] = {
133 { .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .wide_bus_supported = true },
134 { .io_start = 0x0ae98000, .id = MSM_DP_CONTROLLER_1, .wide_bus_supported = true },
135 { .io_start = 0x0ae9a000, .id = MSM_DP_CONTROLLER_2, .wide_bus_supported = true },
136 {}
137 };
138
139 static const struct msm_dp_desc sc8280xp_dp_descs[] = {
140 { .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .wide_bus_supported = true },
141 { .io_start = 0x0ae98000, .id = MSM_DP_CONTROLLER_1, .wide_bus_supported = true },
142 { .io_start = 0x0ae9a000, .id = MSM_DP_CONTROLLER_2, .wide_bus_supported = true },
143 { .io_start = 0x0aea0000, .id = MSM_DP_CONTROLLER_3, .wide_bus_supported = true },
144 { .io_start = 0x22090000, .id = MSM_DP_CONTROLLER_0, .wide_bus_supported = true },
145 { .io_start = 0x22098000, .id = MSM_DP_CONTROLLER_1, .wide_bus_supported = true },
146 { .io_start = 0x2209a000, .id = MSM_DP_CONTROLLER_2, .wide_bus_supported = true },
147 { .io_start = 0x220a0000, .id = MSM_DP_CONTROLLER_3, .wide_bus_supported = true },
148 {}
149 };
150
151 static const struct msm_dp_desc sm8650_dp_descs[] = {
152 { .io_start = 0x0af54000, .id = MSM_DP_CONTROLLER_0, .wide_bus_supported = true },
153 {}
154 };
155
156 static const struct msm_dp_desc x1e80100_dp_descs[] = {
157 { .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .wide_bus_supported = true },
158 { .io_start = 0x0ae98000, .id = MSM_DP_CONTROLLER_1, .wide_bus_supported = true },
159 { .io_start = 0x0ae9a000, .id = MSM_DP_CONTROLLER_2, .wide_bus_supported = true },
160 { .io_start = 0x0aea0000, .id = MSM_DP_CONTROLLER_3, .wide_bus_supported = true },
161 {}
162 };
163
164 static const struct of_device_id dp_dt_match[] = {
165 { .compatible = "qcom,sc7180-dp", .data = &sc7180_dp_descs },
166 { .compatible = "qcom,sc7280-dp", .data = &sc7280_dp_descs },
167 { .compatible = "qcom,sc7280-edp", .data = &sc7280_dp_descs },
168 { .compatible = "qcom,sc8180x-dp", .data = &sc8180x_dp_descs },
169 { .compatible = "qcom,sc8180x-edp", .data = &sc8180x_dp_descs },
170 { .compatible = "qcom,sc8280xp-dp", .data = &sc8280xp_dp_descs },
171 { .compatible = "qcom,sc8280xp-edp", .data = &sc8280xp_dp_descs },
172 { .compatible = "qcom,sdm845-dp", .data = &sc7180_dp_descs },
173 { .compatible = "qcom,sm8350-dp", .data = &sc7180_dp_descs },
174 { .compatible = "qcom,sm8650-dp", .data = &sm8650_dp_descs },
175 { .compatible = "qcom,x1e80100-dp", .data = &x1e80100_dp_descs },
176 {}
177 };
178
dev_get_dp_display_private(struct device * dev)179 static struct dp_display_private *dev_get_dp_display_private(struct device *dev)
180 {
181 struct msm_dp *dp = dev_get_drvdata(dev);
182
183 return container_of(dp, struct dp_display_private, dp_display);
184 }
185
dp_add_event(struct dp_display_private * dp_priv,u32 event,u32 data,u32 delay)186 static int dp_add_event(struct dp_display_private *dp_priv, u32 event,
187 u32 data, u32 delay)
188 {
189 unsigned long flag;
190 struct dp_event *todo;
191 int pndx;
192
193 spin_lock_irqsave(&dp_priv->event_lock, flag);
194 pndx = dp_priv->event_pndx + 1;
195 pndx %= DP_EVENT_Q_MAX;
196 if (pndx == dp_priv->event_gndx) {
197 pr_err("event_q is full: pndx=%d gndx=%d\n",
198 dp_priv->event_pndx, dp_priv->event_gndx);
199 spin_unlock_irqrestore(&dp_priv->event_lock, flag);
200 return -EPERM;
201 }
202 todo = &dp_priv->event_list[dp_priv->event_pndx++];
203 dp_priv->event_pndx %= DP_EVENT_Q_MAX;
204 todo->event_id = event;
205 todo->data = data;
206 todo->delay = delay;
207 wake_up(&dp_priv->event_q);
208 spin_unlock_irqrestore(&dp_priv->event_lock, flag);
209
210 return 0;
211 }
212
dp_del_event(struct dp_display_private * dp_priv,u32 event)213 static int dp_del_event(struct dp_display_private *dp_priv, u32 event)
214 {
215 unsigned long flag;
216 struct dp_event *todo;
217 u32 gndx;
218
219 spin_lock_irqsave(&dp_priv->event_lock, flag);
220 if (dp_priv->event_pndx == dp_priv->event_gndx) {
221 spin_unlock_irqrestore(&dp_priv->event_lock, flag);
222 return -ENOENT;
223 }
224
225 gndx = dp_priv->event_gndx;
226 while (dp_priv->event_pndx != gndx) {
227 todo = &dp_priv->event_list[gndx];
228 if (todo->event_id == event) {
229 todo->event_id = EV_NO_EVENT; /* deleted */
230 todo->delay = 0;
231 }
232 gndx++;
233 gndx %= DP_EVENT_Q_MAX;
234 }
235 spin_unlock_irqrestore(&dp_priv->event_lock, flag);
236
237 return 0;
238 }
239
dp_display_signal_audio_start(struct msm_dp * dp_display)240 void dp_display_signal_audio_start(struct msm_dp *dp_display)
241 {
242 struct dp_display_private *dp;
243
244 dp = container_of(dp_display, struct dp_display_private, dp_display);
245
246 reinit_completion(&dp->audio_comp);
247 }
248
dp_display_signal_audio_complete(struct msm_dp * dp_display)249 void dp_display_signal_audio_complete(struct msm_dp *dp_display)
250 {
251 struct dp_display_private *dp;
252
253 dp = container_of(dp_display, struct dp_display_private, dp_display);
254
255 complete_all(&dp->audio_comp);
256 }
257
258 static int dp_hpd_event_thread_start(struct dp_display_private *dp_priv);
259
dp_display_bind(struct device * dev,struct device * master,void * data)260 static int dp_display_bind(struct device *dev, struct device *master,
261 void *data)
262 {
263 int rc = 0;
264 struct dp_display_private *dp = dev_get_dp_display_private(dev);
265 struct msm_drm_private *priv = dev_get_drvdata(master);
266 struct drm_device *drm = priv->dev;
267
268 dp->dp_display.drm_dev = drm;
269 priv->dp[dp->id] = &dp->dp_display;
270
271
272
273 dp->drm_dev = drm;
274 dp->aux->drm_dev = drm;
275 rc = dp_aux_register(dp->aux);
276 if (rc) {
277 DRM_ERROR("DRM DP AUX register failed\n");
278 goto end;
279 }
280
281
282 rc = dp_register_audio_driver(dev, dp->audio);
283 if (rc) {
284 DRM_ERROR("Audio registration Dp failed\n");
285 goto end;
286 }
287
288 rc = dp_hpd_event_thread_start(dp);
289 if (rc) {
290 DRM_ERROR("Event thread create failed\n");
291 goto end;
292 }
293
294 return 0;
295 end:
296 return rc;
297 }
298
dp_display_unbind(struct device * dev,struct device * master,void * data)299 static void dp_display_unbind(struct device *dev, struct device *master,
300 void *data)
301 {
302 struct dp_display_private *dp = dev_get_dp_display_private(dev);
303 struct msm_drm_private *priv = dev_get_drvdata(master);
304
305 kthread_stop(dp->ev_tsk);
306
307 of_dp_aux_depopulate_bus(dp->aux);
308
309 dp_unregister_audio_driver(dev, dp->audio);
310 dp_aux_unregister(dp->aux);
311 dp->drm_dev = NULL;
312 dp->aux->drm_dev = NULL;
313 priv->dp[dp->id] = NULL;
314 }
315
316 static const struct component_ops dp_display_comp_ops = {
317 .bind = dp_display_bind,
318 .unbind = dp_display_unbind,
319 };
320
dp_display_send_hpd_event(struct msm_dp * dp_display)321 static void dp_display_send_hpd_event(struct msm_dp *dp_display)
322 {
323 struct dp_display_private *dp;
324 struct drm_connector *connector;
325
326 dp = container_of(dp_display, struct dp_display_private, dp_display);
327
328 connector = dp->dp_display.connector;
329 drm_helper_hpd_irq_event(connector->dev);
330 }
331
dp_display_send_hpd_notification(struct dp_display_private * dp,bool hpd)332 static int dp_display_send_hpd_notification(struct dp_display_private *dp,
333 bool hpd)
334 {
335 if ((hpd && dp->dp_display.link_ready) ||
336 (!hpd && !dp->dp_display.link_ready)) {
337 drm_dbg_dp(dp->drm_dev, "HPD already %s\n",
338 (hpd ? "on" : "off"));
339 return 0;
340 }
341
342 /* reset video pattern flag on disconnect */
343 if (!hpd) {
344 dp->panel->video_test = false;
345 if (!dp->dp_display.is_edp)
346 drm_dp_set_subconnector_property(dp->dp_display.connector,
347 connector_status_disconnected,
348 dp->panel->dpcd,
349 dp->panel->downstream_ports);
350 }
351
352 dp->dp_display.link_ready = hpd;
353
354 drm_dbg_dp(dp->drm_dev, "type=%d hpd=%d\n",
355 dp->dp_display.connector_type, hpd);
356 dp_display_send_hpd_event(&dp->dp_display);
357
358 return 0;
359 }
360
dp_display_process_hpd_high(struct dp_display_private * dp)361 static int dp_display_process_hpd_high(struct dp_display_private *dp)
362 {
363 struct drm_connector *connector = dp->dp_display.connector;
364 const struct drm_display_info *info = &connector->display_info;
365 int rc = 0;
366
367 rc = dp_panel_read_sink_caps(dp->panel, connector);
368 if (rc)
369 goto end;
370
371 dp_link_process_request(dp->link);
372
373 if (!dp->dp_display.is_edp)
374 drm_dp_set_subconnector_property(connector,
375 connector_status_connected,
376 dp->panel->dpcd,
377 dp->panel->downstream_ports);
378
379 dp->dp_display.psr_supported = dp->panel->psr_cap.version && psr_enabled;
380
381 dp->audio_supported = info->has_audio;
382 dp_panel_handle_sink_request(dp->panel);
383
384 /*
385 * set sink to normal operation mode -- D0
386 * before dpcd read
387 */
388 dp_link_psm_config(dp->link, &dp->panel->link_info, false);
389
390 dp_link_reset_phy_params_vx_px(dp->link);
391 rc = dp_ctrl_on_link(dp->ctrl);
392 if (rc) {
393 DRM_ERROR("failed to complete DP link training\n");
394 goto end;
395 }
396
397 dp_add_event(dp, EV_USER_NOTIFICATION, true, 0);
398
399 end:
400 return rc;
401 }
402
dp_display_host_phy_init(struct dp_display_private * dp)403 static void dp_display_host_phy_init(struct dp_display_private *dp)
404 {
405 drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n",
406 dp->dp_display.connector_type, dp->core_initialized,
407 dp->phy_initialized);
408
409 if (!dp->phy_initialized) {
410 dp_ctrl_phy_init(dp->ctrl);
411 dp->phy_initialized = true;
412 }
413 }
414
dp_display_host_phy_exit(struct dp_display_private * dp)415 static void dp_display_host_phy_exit(struct dp_display_private *dp)
416 {
417 drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n",
418 dp->dp_display.connector_type, dp->core_initialized,
419 dp->phy_initialized);
420
421 if (dp->phy_initialized) {
422 dp_ctrl_phy_exit(dp->ctrl);
423 dp->phy_initialized = false;
424 }
425 }
426
dp_display_host_init(struct dp_display_private * dp)427 static void dp_display_host_init(struct dp_display_private *dp)
428 {
429 drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n",
430 dp->dp_display.connector_type, dp->core_initialized,
431 dp->phy_initialized);
432
433 dp_ctrl_core_clk_enable(dp->ctrl);
434 dp_ctrl_reset_irq_ctrl(dp->ctrl, true);
435 dp_aux_init(dp->aux);
436 dp->core_initialized = true;
437 }
438
dp_display_host_deinit(struct dp_display_private * dp)439 static void dp_display_host_deinit(struct dp_display_private *dp)
440 {
441 drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n",
442 dp->dp_display.connector_type, dp->core_initialized,
443 dp->phy_initialized);
444
445 dp_ctrl_reset_irq_ctrl(dp->ctrl, false);
446 dp_aux_deinit(dp->aux);
447 dp_ctrl_core_clk_disable(dp->ctrl);
448 dp->core_initialized = false;
449 }
450
dp_display_usbpd_configure_cb(struct device * dev)451 static int dp_display_usbpd_configure_cb(struct device *dev)
452 {
453 struct dp_display_private *dp = dev_get_dp_display_private(dev);
454
455 dp_display_host_phy_init(dp);
456
457 return dp_display_process_hpd_high(dp);
458 }
459
dp_display_notify_disconnect(struct device * dev)460 static int dp_display_notify_disconnect(struct device *dev)
461 {
462 struct dp_display_private *dp = dev_get_dp_display_private(dev);
463
464 dp_add_event(dp, EV_USER_NOTIFICATION, false, 0);
465
466 return 0;
467 }
468
dp_display_handle_video_request(struct dp_display_private * dp)469 static void dp_display_handle_video_request(struct dp_display_private *dp)
470 {
471 if (dp->link->sink_request & DP_TEST_LINK_VIDEO_PATTERN) {
472 dp->panel->video_test = true;
473 dp_link_send_test_response(dp->link);
474 }
475 }
476
dp_display_handle_port_status_changed(struct dp_display_private * dp)477 static int dp_display_handle_port_status_changed(struct dp_display_private *dp)
478 {
479 int rc = 0;
480
481 if (drm_dp_is_branch(dp->panel->dpcd) && dp->link->sink_count == 0) {
482 drm_dbg_dp(dp->drm_dev, "sink count is zero, nothing to do\n");
483 if (dp->hpd_state != ST_DISCONNECTED) {
484 dp->hpd_state = ST_DISCONNECT_PENDING;
485 dp_add_event(dp, EV_USER_NOTIFICATION, false, 0);
486 }
487 } else {
488 if (dp->hpd_state == ST_DISCONNECTED) {
489 dp->hpd_state = ST_MAINLINK_READY;
490 rc = dp_display_process_hpd_high(dp);
491 if (rc)
492 dp->hpd_state = ST_DISCONNECTED;
493 }
494 }
495
496 return rc;
497 }
498
dp_display_handle_irq_hpd(struct dp_display_private * dp)499 static int dp_display_handle_irq_hpd(struct dp_display_private *dp)
500 {
501 u32 sink_request = dp->link->sink_request;
502
503 drm_dbg_dp(dp->drm_dev, "%d\n", sink_request);
504 if (dp->hpd_state == ST_DISCONNECTED) {
505 if (sink_request & DP_LINK_STATUS_UPDATED) {
506 drm_dbg_dp(dp->drm_dev, "Disconnected sink_request: %d\n",
507 sink_request);
508 DRM_ERROR("Disconnected, no DP_LINK_STATUS_UPDATED\n");
509 return -EINVAL;
510 }
511 }
512
513 dp_ctrl_handle_sink_request(dp->ctrl);
514
515 if (sink_request & DP_TEST_LINK_VIDEO_PATTERN)
516 dp_display_handle_video_request(dp);
517
518 return 0;
519 }
520
dp_display_usbpd_attention_cb(struct device * dev)521 static int dp_display_usbpd_attention_cb(struct device *dev)
522 {
523 int rc = 0;
524 u32 sink_request;
525 struct dp_display_private *dp = dev_get_dp_display_private(dev);
526
527 /* check for any test request issued by sink */
528 rc = dp_link_process_request(dp->link);
529 if (!rc) {
530 sink_request = dp->link->sink_request;
531 drm_dbg_dp(dp->drm_dev, "hpd_state=%d sink_request=%d\n",
532 dp->hpd_state, sink_request);
533 if (sink_request & DS_PORT_STATUS_CHANGED)
534 rc = dp_display_handle_port_status_changed(dp);
535 else
536 rc = dp_display_handle_irq_hpd(dp);
537 }
538
539 return rc;
540 }
541
dp_hpd_plug_handle(struct dp_display_private * dp,u32 data)542 static int dp_hpd_plug_handle(struct dp_display_private *dp, u32 data)
543 {
544 u32 state;
545 int ret;
546 struct platform_device *pdev = dp->dp_display.pdev;
547
548 dp_aux_enable_xfers(dp->aux, true);
549
550 mutex_lock(&dp->event_mutex);
551
552 state = dp->hpd_state;
553 drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n",
554 dp->dp_display.connector_type, state);
555
556 if (state == ST_DISPLAY_OFF) {
557 mutex_unlock(&dp->event_mutex);
558 return 0;
559 }
560
561 if (state == ST_MAINLINK_READY || state == ST_CONNECTED) {
562 mutex_unlock(&dp->event_mutex);
563 return 0;
564 }
565
566 if (state == ST_DISCONNECT_PENDING) {
567 /* wait until ST_DISCONNECTED */
568 dp_add_event(dp, EV_HPD_PLUG_INT, 0, 1); /* delay = 1 */
569 mutex_unlock(&dp->event_mutex);
570 return 0;
571 }
572
573 ret = pm_runtime_resume_and_get(&pdev->dev);
574 if (ret) {
575 DRM_ERROR("failed to pm_runtime_resume\n");
576 mutex_unlock(&dp->event_mutex);
577 return ret;
578 }
579
580 ret = dp_display_usbpd_configure_cb(&pdev->dev);
581 if (ret) { /* link train failed */
582 dp->hpd_state = ST_DISCONNECTED;
583 pm_runtime_put_sync(&pdev->dev);
584 } else {
585 dp->hpd_state = ST_MAINLINK_READY;
586 }
587
588 drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n",
589 dp->dp_display.connector_type, state);
590 mutex_unlock(&dp->event_mutex);
591
592 /* uevent will complete connection part */
593 return 0;
594 };
595
dp_display_handle_plugged_change(struct msm_dp * dp_display,bool plugged)596 static void dp_display_handle_plugged_change(struct msm_dp *dp_display,
597 bool plugged)
598 {
599 struct dp_display_private *dp;
600
601 dp = container_of(dp_display,
602 struct dp_display_private, dp_display);
603
604 /* notify audio subsystem only if sink supports audio */
605 if (dp_display->plugged_cb && dp_display->codec_dev &&
606 dp->audio_supported)
607 dp_display->plugged_cb(dp_display->codec_dev, plugged);
608 }
609
dp_hpd_unplug_handle(struct dp_display_private * dp,u32 data)610 static int dp_hpd_unplug_handle(struct dp_display_private *dp, u32 data)
611 {
612 u32 state;
613 struct platform_device *pdev = dp->dp_display.pdev;
614
615 dp_aux_enable_xfers(dp->aux, false);
616
617 mutex_lock(&dp->event_mutex);
618
619 state = dp->hpd_state;
620
621 drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n",
622 dp->dp_display.connector_type, state);
623
624 /* unplugged, no more irq_hpd handle */
625 dp_del_event(dp, EV_IRQ_HPD_INT);
626
627 if (state == ST_DISCONNECTED) {
628 /* triggered by irq_hdp with sink_count = 0 */
629 if (dp->link->sink_count == 0) {
630 dp_display_host_phy_exit(dp);
631 }
632 dp_display_notify_disconnect(&dp->dp_display.pdev->dev);
633 mutex_unlock(&dp->event_mutex);
634 return 0;
635 } else if (state == ST_DISCONNECT_PENDING) {
636 mutex_unlock(&dp->event_mutex);
637 return 0;
638 } else if (state == ST_MAINLINK_READY) {
639 dp_ctrl_off_link(dp->ctrl);
640 dp_display_host_phy_exit(dp);
641 dp->hpd_state = ST_DISCONNECTED;
642 dp_display_notify_disconnect(&dp->dp_display.pdev->dev);
643 pm_runtime_put_sync(&pdev->dev);
644 mutex_unlock(&dp->event_mutex);
645 return 0;
646 }
647
648 /*
649 * We don't need separate work for disconnect as
650 * connect/attention interrupts are disabled
651 */
652 dp_display_notify_disconnect(&dp->dp_display.pdev->dev);
653
654 if (state == ST_DISPLAY_OFF) {
655 dp->hpd_state = ST_DISCONNECTED;
656 } else {
657 dp->hpd_state = ST_DISCONNECT_PENDING;
658 }
659
660 /* signal the disconnect event early to ensure proper teardown */
661 dp_display_handle_plugged_change(&dp->dp_display, false);
662
663 drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n",
664 dp->dp_display.connector_type, state);
665
666 /* uevent will complete disconnection part */
667 pm_runtime_put_sync(&pdev->dev);
668 mutex_unlock(&dp->event_mutex);
669 return 0;
670 }
671
dp_irq_hpd_handle(struct dp_display_private * dp,u32 data)672 static int dp_irq_hpd_handle(struct dp_display_private *dp, u32 data)
673 {
674 u32 state;
675
676 mutex_lock(&dp->event_mutex);
677
678 /* irq_hpd can happen at either connected or disconnected state */
679 state = dp->hpd_state;
680 drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n",
681 dp->dp_display.connector_type, state);
682
683 if (state == ST_DISPLAY_OFF) {
684 mutex_unlock(&dp->event_mutex);
685 return 0;
686 }
687
688 if (state == ST_MAINLINK_READY || state == ST_DISCONNECT_PENDING) {
689 /* wait until ST_CONNECTED */
690 dp_add_event(dp, EV_IRQ_HPD_INT, 0, 1); /* delay = 1 */
691 mutex_unlock(&dp->event_mutex);
692 return 0;
693 }
694
695 dp_display_usbpd_attention_cb(&dp->dp_display.pdev->dev);
696
697 drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n",
698 dp->dp_display.connector_type, state);
699
700 mutex_unlock(&dp->event_mutex);
701
702 return 0;
703 }
704
dp_display_deinit_sub_modules(struct dp_display_private * dp)705 static void dp_display_deinit_sub_modules(struct dp_display_private *dp)
706 {
707 dp_audio_put(dp->audio);
708 dp_panel_put(dp->panel);
709 dp_aux_put(dp->aux);
710 }
711
dp_init_sub_modules(struct dp_display_private * dp)712 static int dp_init_sub_modules(struct dp_display_private *dp)
713 {
714 int rc = 0;
715 struct device *dev = &dp->dp_display.pdev->dev;
716 struct dp_panel_in panel_in = {
717 .dev = dev,
718 };
719 struct phy *phy;
720
721 phy = devm_phy_get(dev, "dp");
722 if (IS_ERR(phy))
723 return PTR_ERR(phy);
724
725 rc = phy_set_mode_ext(phy, PHY_MODE_DP,
726 dp->dp_display.is_edp ? PHY_SUBMODE_EDP : PHY_SUBMODE_DP);
727 if (rc) {
728 DRM_ERROR("failed to set phy submode, rc = %d\n", rc);
729 dp->catalog = NULL;
730 goto error;
731 }
732
733 dp->catalog = dp_catalog_get(dev);
734 if (IS_ERR(dp->catalog)) {
735 rc = PTR_ERR(dp->catalog);
736 DRM_ERROR("failed to initialize catalog, rc = %d\n", rc);
737 dp->catalog = NULL;
738 goto error;
739 }
740
741 dp->aux = dp_aux_get(dev, dp->catalog,
742 phy,
743 dp->dp_display.is_edp);
744 if (IS_ERR(dp->aux)) {
745 rc = PTR_ERR(dp->aux);
746 DRM_ERROR("failed to initialize aux, rc = %d\n", rc);
747 dp->aux = NULL;
748 goto error;
749 }
750
751 dp->link = dp_link_get(dev, dp->aux);
752 if (IS_ERR(dp->link)) {
753 rc = PTR_ERR(dp->link);
754 DRM_ERROR("failed to initialize link, rc = %d\n", rc);
755 dp->link = NULL;
756 goto error_link;
757 }
758
759 panel_in.aux = dp->aux;
760 panel_in.catalog = dp->catalog;
761 panel_in.link = dp->link;
762
763 dp->panel = dp_panel_get(&panel_in);
764 if (IS_ERR(dp->panel)) {
765 rc = PTR_ERR(dp->panel);
766 DRM_ERROR("failed to initialize panel, rc = %d\n", rc);
767 dp->panel = NULL;
768 goto error_link;
769 }
770
771 dp->ctrl = dp_ctrl_get(dev, dp->link, dp->panel, dp->aux,
772 dp->catalog,
773 phy);
774 if (IS_ERR(dp->ctrl)) {
775 rc = PTR_ERR(dp->ctrl);
776 DRM_ERROR("failed to initialize ctrl, rc = %d\n", rc);
777 dp->ctrl = NULL;
778 goto error_ctrl;
779 }
780
781 dp->audio = dp_audio_get(dp->dp_display.pdev, dp->panel, dp->catalog);
782 if (IS_ERR(dp->audio)) {
783 rc = PTR_ERR(dp->audio);
784 pr_err("failed to initialize audio, rc = %d\n", rc);
785 dp->audio = NULL;
786 goto error_ctrl;
787 }
788
789 return rc;
790
791 error_ctrl:
792 dp_panel_put(dp->panel);
793 error_link:
794 dp_aux_put(dp->aux);
795 error:
796 return rc;
797 }
798
dp_display_set_mode(struct msm_dp * dp_display,struct dp_display_mode * mode)799 static int dp_display_set_mode(struct msm_dp *dp_display,
800 struct dp_display_mode *mode)
801 {
802 struct dp_display_private *dp;
803
804 dp = container_of(dp_display, struct dp_display_private, dp_display);
805
806 drm_mode_copy(&dp->panel->dp_mode.drm_mode, &mode->drm_mode);
807 dp->panel->dp_mode.bpp = mode->bpp;
808 dp->panel->dp_mode.out_fmt_is_yuv_420 = mode->out_fmt_is_yuv_420;
809 dp_panel_init_panel_info(dp->panel);
810 return 0;
811 }
812
dp_display_enable(struct dp_display_private * dp,bool force_link_train)813 static int dp_display_enable(struct dp_display_private *dp, bool force_link_train)
814 {
815 int rc = 0;
816 struct msm_dp *dp_display = &dp->dp_display;
817
818 drm_dbg_dp(dp->drm_dev, "sink_count=%d\n", dp->link->sink_count);
819 if (dp_display->power_on) {
820 drm_dbg_dp(dp->drm_dev, "Link already setup, return\n");
821 return 0;
822 }
823
824 rc = dp_ctrl_on_stream(dp->ctrl, force_link_train);
825 if (!rc)
826 dp_display->power_on = true;
827
828 return rc;
829 }
830
dp_display_post_enable(struct msm_dp * dp_display)831 static int dp_display_post_enable(struct msm_dp *dp_display)
832 {
833 struct dp_display_private *dp;
834 u32 rate;
835
836 dp = container_of(dp_display, struct dp_display_private, dp_display);
837
838 rate = dp->link->link_params.rate;
839
840 if (dp->audio_supported) {
841 dp->audio->bw_code = drm_dp_link_rate_to_bw_code(rate);
842 dp->audio->lane_count = dp->link->link_params.num_lanes;
843 }
844
845 /* signal the connect event late to synchronize video and display */
846 dp_display_handle_plugged_change(dp_display, true);
847
848 if (dp_display->psr_supported)
849 dp_ctrl_config_psr(dp->ctrl);
850
851 return 0;
852 }
853
dp_display_disable(struct dp_display_private * dp)854 static int dp_display_disable(struct dp_display_private *dp)
855 {
856 struct msm_dp *dp_display = &dp->dp_display;
857
858 if (!dp_display->power_on)
859 return 0;
860
861 /* wait only if audio was enabled */
862 if (dp_display->audio_enabled) {
863 /* signal the disconnect event */
864 dp_display_handle_plugged_change(dp_display, false);
865 if (!wait_for_completion_timeout(&dp->audio_comp,
866 HZ * 5))
867 DRM_ERROR("audio comp timeout\n");
868 }
869
870 dp_display->audio_enabled = false;
871
872 if (dp->link->sink_count == 0) {
873 /*
874 * irq_hpd with sink_count = 0
875 * hdmi unplugged out of dongle
876 */
877 dp_ctrl_off_link_stream(dp->ctrl);
878 } else {
879 /*
880 * unplugged interrupt
881 * dongle unplugged out of DUT
882 */
883 dp_ctrl_off(dp->ctrl);
884 dp_display_host_phy_exit(dp);
885 }
886
887 dp_display->power_on = false;
888
889 drm_dbg_dp(dp->drm_dev, "sink count: %d\n", dp->link->sink_count);
890 return 0;
891 }
892
dp_display_set_plugged_cb(struct msm_dp * dp_display,hdmi_codec_plugged_cb fn,struct device * codec_dev)893 int dp_display_set_plugged_cb(struct msm_dp *dp_display,
894 hdmi_codec_plugged_cb fn, struct device *codec_dev)
895 {
896 bool plugged;
897
898 dp_display->plugged_cb = fn;
899 dp_display->codec_dev = codec_dev;
900 plugged = dp_display->link_ready;
901 dp_display_handle_plugged_change(dp_display, plugged);
902
903 return 0;
904 }
905
906 /**
907 * dp_bridge_mode_valid - callback to determine if specified mode is valid
908 * @bridge: Pointer to drm bridge structure
909 * @info: display info
910 * @mode: Pointer to drm mode structure
911 * Returns: Validity status for specified mode
912 */
dp_bridge_mode_valid(struct drm_bridge * bridge,const struct drm_display_info * info,const struct drm_display_mode * mode)913 enum drm_mode_status dp_bridge_mode_valid(struct drm_bridge *bridge,
914 const struct drm_display_info *info,
915 const struct drm_display_mode *mode)
916 {
917 const u32 num_components = 3, default_bpp = 24;
918 struct dp_display_private *dp_display;
919 struct dp_link_info *link_info;
920 u32 mode_rate_khz = 0, supported_rate_khz = 0, mode_bpp = 0;
921 struct msm_dp *dp;
922 int mode_pclk_khz = mode->clock;
923
924 dp = to_dp_bridge(bridge)->dp_display;
925
926 if (!dp || !mode_pclk_khz || !dp->connector) {
927 DRM_ERROR("invalid params\n");
928 return -EINVAL;
929 }
930
931 dp_display = container_of(dp, struct dp_display_private, dp_display);
932 link_info = &dp_display->panel->link_info;
933
934 if ((drm_mode_is_420_only(&dp->connector->display_info, mode) &&
935 dp_display->panel->vsc_sdp_supported) ||
936 msm_dp_wide_bus_available(dp))
937 mode_pclk_khz /= 2;
938
939 if (mode_pclk_khz > DP_MAX_PIXEL_CLK_KHZ)
940 return MODE_CLOCK_HIGH;
941
942 mode_bpp = dp->connector->display_info.bpc * num_components;
943 if (!mode_bpp)
944 mode_bpp = default_bpp;
945
946 mode_bpp = dp_panel_get_mode_bpp(dp_display->panel,
947 mode_bpp, mode_pclk_khz);
948
949 mode_rate_khz = mode_pclk_khz * mode_bpp;
950 supported_rate_khz = link_info->num_lanes * link_info->rate * 8;
951
952 if (mode_rate_khz > supported_rate_khz)
953 return MODE_BAD;
954
955 return MODE_OK;
956 }
957
dp_display_get_modes(struct msm_dp * dp)958 int dp_display_get_modes(struct msm_dp *dp)
959 {
960 struct dp_display_private *dp_display;
961
962 if (!dp) {
963 DRM_ERROR("invalid params\n");
964 return 0;
965 }
966
967 dp_display = container_of(dp, struct dp_display_private, dp_display);
968
969 return dp_panel_get_modes(dp_display->panel,
970 dp->connector);
971 }
972
dp_display_check_video_test(struct msm_dp * dp)973 bool dp_display_check_video_test(struct msm_dp *dp)
974 {
975 struct dp_display_private *dp_display;
976
977 dp_display = container_of(dp, struct dp_display_private, dp_display);
978
979 return dp_display->panel->video_test;
980 }
981
dp_display_get_test_bpp(struct msm_dp * dp)982 int dp_display_get_test_bpp(struct msm_dp *dp)
983 {
984 struct dp_display_private *dp_display;
985
986 if (!dp) {
987 DRM_ERROR("invalid params\n");
988 return 0;
989 }
990
991 dp_display = container_of(dp, struct dp_display_private, dp_display);
992
993 return dp_link_bit_depth_to_bpp(
994 dp_display->link->test_video.test_bit_depth);
995 }
996
msm_dp_snapshot(struct msm_disp_state * disp_state,struct msm_dp * dp)997 void msm_dp_snapshot(struct msm_disp_state *disp_state, struct msm_dp *dp)
998 {
999 struct dp_display_private *dp_display;
1000
1001 dp_display = container_of(dp, struct dp_display_private, dp_display);
1002
1003 /*
1004 * if we are reading registers we need the link clocks to be on
1005 * however till DP cable is connected this will not happen as we
1006 * do not know the resolution to power up with. Hence check the
1007 * power_on status before dumping DP registers to avoid crash due
1008 * to unclocked access
1009 */
1010 mutex_lock(&dp_display->event_mutex);
1011
1012 if (!dp->power_on) {
1013 mutex_unlock(&dp_display->event_mutex);
1014 return;
1015 }
1016
1017 dp_catalog_snapshot(dp_display->catalog, disp_state);
1018
1019 mutex_unlock(&dp_display->event_mutex);
1020 }
1021
dp_display_set_psr(struct msm_dp * dp_display,bool enter)1022 void dp_display_set_psr(struct msm_dp *dp_display, bool enter)
1023 {
1024 struct dp_display_private *dp;
1025
1026 if (!dp_display) {
1027 DRM_ERROR("invalid params\n");
1028 return;
1029 }
1030
1031 dp = container_of(dp_display, struct dp_display_private, dp_display);
1032 dp_ctrl_set_psr(dp->ctrl, enter);
1033 }
1034
hpd_event_thread(void * data)1035 static int hpd_event_thread(void *data)
1036 {
1037 struct dp_display_private *dp_priv;
1038 unsigned long flag;
1039 struct dp_event *todo;
1040 int timeout_mode = 0;
1041
1042 dp_priv = (struct dp_display_private *)data;
1043
1044 while (1) {
1045 if (timeout_mode) {
1046 wait_event_timeout(dp_priv->event_q,
1047 (dp_priv->event_pndx == dp_priv->event_gndx) ||
1048 kthread_should_stop(), EVENT_TIMEOUT);
1049 } else {
1050 wait_event_interruptible(dp_priv->event_q,
1051 (dp_priv->event_pndx != dp_priv->event_gndx) ||
1052 kthread_should_stop());
1053 }
1054
1055 if (kthread_should_stop())
1056 break;
1057
1058 spin_lock_irqsave(&dp_priv->event_lock, flag);
1059 todo = &dp_priv->event_list[dp_priv->event_gndx];
1060 if (todo->delay) {
1061 struct dp_event *todo_next;
1062
1063 dp_priv->event_gndx++;
1064 dp_priv->event_gndx %= DP_EVENT_Q_MAX;
1065
1066 /* re enter delay event into q */
1067 todo_next = &dp_priv->event_list[dp_priv->event_pndx++];
1068 dp_priv->event_pndx %= DP_EVENT_Q_MAX;
1069 todo_next->event_id = todo->event_id;
1070 todo_next->data = todo->data;
1071 todo_next->delay = todo->delay - 1;
1072
1073 /* clean up older event */
1074 todo->event_id = EV_NO_EVENT;
1075 todo->delay = 0;
1076
1077 /* switch to timeout mode */
1078 timeout_mode = 1;
1079 spin_unlock_irqrestore(&dp_priv->event_lock, flag);
1080 continue;
1081 }
1082
1083 /* timeout with no events in q */
1084 if (dp_priv->event_pndx == dp_priv->event_gndx) {
1085 spin_unlock_irqrestore(&dp_priv->event_lock, flag);
1086 continue;
1087 }
1088
1089 dp_priv->event_gndx++;
1090 dp_priv->event_gndx %= DP_EVENT_Q_MAX;
1091 timeout_mode = 0;
1092 spin_unlock_irqrestore(&dp_priv->event_lock, flag);
1093
1094 switch (todo->event_id) {
1095 case EV_HPD_PLUG_INT:
1096 dp_hpd_plug_handle(dp_priv, todo->data);
1097 break;
1098 case EV_HPD_UNPLUG_INT:
1099 dp_hpd_unplug_handle(dp_priv, todo->data);
1100 break;
1101 case EV_IRQ_HPD_INT:
1102 dp_irq_hpd_handle(dp_priv, todo->data);
1103 break;
1104 case EV_USER_NOTIFICATION:
1105 dp_display_send_hpd_notification(dp_priv,
1106 todo->data);
1107 break;
1108 default:
1109 break;
1110 }
1111 }
1112
1113 return 0;
1114 }
1115
dp_hpd_event_thread_start(struct dp_display_private * dp_priv)1116 static int dp_hpd_event_thread_start(struct dp_display_private *dp_priv)
1117 {
1118 /* set event q to empty */
1119 dp_priv->event_gndx = 0;
1120 dp_priv->event_pndx = 0;
1121
1122 dp_priv->ev_tsk = kthread_run(hpd_event_thread, dp_priv, "dp_hpd_handler");
1123 if (IS_ERR(dp_priv->ev_tsk))
1124 return PTR_ERR(dp_priv->ev_tsk);
1125
1126 return 0;
1127 }
1128
dp_display_irq_handler(int irq,void * dev_id)1129 static irqreturn_t dp_display_irq_handler(int irq, void *dev_id)
1130 {
1131 struct dp_display_private *dp = dev_id;
1132 irqreturn_t ret = IRQ_NONE;
1133 u32 hpd_isr_status;
1134
1135 if (!dp) {
1136 DRM_ERROR("invalid data\n");
1137 return IRQ_NONE;
1138 }
1139
1140 hpd_isr_status = dp_catalog_hpd_get_intr_status(dp->catalog);
1141
1142 if (hpd_isr_status & 0x0F) {
1143 drm_dbg_dp(dp->drm_dev, "type=%d isr=0x%x\n",
1144 dp->dp_display.connector_type, hpd_isr_status);
1145 /* hpd related interrupts */
1146 if (hpd_isr_status & DP_DP_HPD_PLUG_INT_MASK)
1147 dp_add_event(dp, EV_HPD_PLUG_INT, 0, 0);
1148
1149 if (hpd_isr_status & DP_DP_IRQ_HPD_INT_MASK) {
1150 dp_add_event(dp, EV_IRQ_HPD_INT, 0, 0);
1151 }
1152
1153 if (hpd_isr_status & DP_DP_HPD_REPLUG_INT_MASK) {
1154 dp_add_event(dp, EV_HPD_UNPLUG_INT, 0, 0);
1155 dp_add_event(dp, EV_HPD_PLUG_INT, 0, 3);
1156 }
1157
1158 if (hpd_isr_status & DP_DP_HPD_UNPLUG_INT_MASK)
1159 dp_add_event(dp, EV_HPD_UNPLUG_INT, 0, 0);
1160
1161 ret = IRQ_HANDLED;
1162 }
1163
1164 /* DP controller isr */
1165 ret |= dp_ctrl_isr(dp->ctrl);
1166
1167 /* DP aux isr */
1168 ret |= dp_aux_isr(dp->aux);
1169
1170 return ret;
1171 }
1172
dp_display_request_irq(struct dp_display_private * dp)1173 static int dp_display_request_irq(struct dp_display_private *dp)
1174 {
1175 int rc = 0;
1176 struct platform_device *pdev = dp->dp_display.pdev;
1177
1178 dp->irq = platform_get_irq(pdev, 0);
1179 if (dp->irq < 0) {
1180 DRM_ERROR("failed to get irq\n");
1181 return dp->irq;
1182 }
1183
1184 rc = devm_request_irq(&pdev->dev, dp->irq, dp_display_irq_handler,
1185 IRQF_TRIGGER_HIGH|IRQF_NO_AUTOEN,
1186 "dp_display_isr", dp);
1187
1188 if (rc < 0) {
1189 DRM_ERROR("failed to request IRQ%u: %d\n",
1190 dp->irq, rc);
1191 return rc;
1192 }
1193
1194 return 0;
1195 }
1196
dp_display_get_desc(struct platform_device * pdev)1197 static const struct msm_dp_desc *dp_display_get_desc(struct platform_device *pdev)
1198 {
1199 const struct msm_dp_desc *descs = of_device_get_match_data(&pdev->dev);
1200 struct resource *res;
1201 int i;
1202
1203 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1204 if (!res)
1205 return NULL;
1206
1207 for (i = 0; i < descs[i].io_start; i++) {
1208 if (descs[i].io_start == res->start)
1209 return &descs[i];
1210 }
1211
1212 dev_err(&pdev->dev, "unknown displayport instance\n");
1213 return NULL;
1214 }
1215
dp_display_probe_tail(struct device * dev)1216 static int dp_display_probe_tail(struct device *dev)
1217 {
1218 struct msm_dp *dp = dev_get_drvdata(dev);
1219 int ret;
1220
1221 /*
1222 * External bridges are mandatory for eDP interfaces: one has to
1223 * provide at least an eDP panel (which gets wrapped into panel-bridge).
1224 *
1225 * For DisplayPort interfaces external bridges are optional, so
1226 * silently ignore an error if one is not present (-ENODEV).
1227 */
1228 dp->next_bridge = devm_drm_of_get_bridge(&dp->pdev->dev, dp->pdev->dev.of_node, 1, 0);
1229 if (IS_ERR(dp->next_bridge)) {
1230 ret = PTR_ERR(dp->next_bridge);
1231 dp->next_bridge = NULL;
1232 if (dp->is_edp || ret != -ENODEV)
1233 return ret;
1234 }
1235
1236 ret = component_add(dev, &dp_display_comp_ops);
1237 if (ret)
1238 DRM_ERROR("component add failed, rc=%d\n", ret);
1239
1240 return ret;
1241 }
1242
dp_auxbus_done_probe(struct drm_dp_aux * aux)1243 static int dp_auxbus_done_probe(struct drm_dp_aux *aux)
1244 {
1245 return dp_display_probe_tail(aux->dev);
1246 }
1247
dp_display_get_connector_type(struct platform_device * pdev,const struct msm_dp_desc * desc)1248 static int dp_display_get_connector_type(struct platform_device *pdev,
1249 const struct msm_dp_desc *desc)
1250 {
1251 struct device_node *node = pdev->dev.of_node;
1252 struct device_node *aux_bus = of_get_child_by_name(node, "aux-bus");
1253 struct device_node *panel = of_get_child_by_name(aux_bus, "panel");
1254 int connector_type;
1255
1256 if (panel)
1257 connector_type = DRM_MODE_CONNECTOR_eDP;
1258 else
1259 connector_type = DRM_MODE_SUBCONNECTOR_DisplayPort;
1260
1261 of_node_put(panel);
1262 of_node_put(aux_bus);
1263
1264 return connector_type;
1265 }
1266
dp_display_probe(struct platform_device * pdev)1267 static int dp_display_probe(struct platform_device *pdev)
1268 {
1269 int rc = 0;
1270 struct dp_display_private *dp;
1271 const struct msm_dp_desc *desc;
1272
1273 if (!pdev || !pdev->dev.of_node) {
1274 DRM_ERROR("pdev not found\n");
1275 return -ENODEV;
1276 }
1277
1278 dp = devm_kzalloc(&pdev->dev, sizeof(*dp), GFP_KERNEL);
1279 if (!dp)
1280 return -ENOMEM;
1281
1282 desc = dp_display_get_desc(pdev);
1283 if (!desc)
1284 return -EINVAL;
1285
1286 dp->dp_display.pdev = pdev;
1287 dp->id = desc->id;
1288 dp->dp_display.connector_type = dp_display_get_connector_type(pdev, desc);
1289 dp->wide_bus_supported = desc->wide_bus_supported;
1290 dp->dp_display.is_edp =
1291 (dp->dp_display.connector_type == DRM_MODE_CONNECTOR_eDP);
1292
1293 rc = dp_init_sub_modules(dp);
1294 if (rc) {
1295 DRM_ERROR("init sub module failed\n");
1296 return -EPROBE_DEFER;
1297 }
1298
1299 /* setup event q */
1300 mutex_init(&dp->event_mutex);
1301 init_waitqueue_head(&dp->event_q);
1302 spin_lock_init(&dp->event_lock);
1303
1304 /* Store DP audio handle inside DP display */
1305 dp->dp_display.dp_audio = dp->audio;
1306
1307 init_completion(&dp->audio_comp);
1308
1309 platform_set_drvdata(pdev, &dp->dp_display);
1310
1311 rc = devm_pm_runtime_enable(&pdev->dev);
1312 if (rc)
1313 goto err;
1314
1315 rc = dp_display_request_irq(dp);
1316 if (rc)
1317 goto err;
1318
1319 if (dp->dp_display.is_edp) {
1320 rc = devm_of_dp_aux_populate_bus(dp->aux, dp_auxbus_done_probe);
1321 if (rc) {
1322 DRM_ERROR("eDP auxbus population failed, rc=%d\n", rc);
1323 goto err;
1324 }
1325 } else {
1326 rc = dp_display_probe_tail(&pdev->dev);
1327 if (rc)
1328 goto err;
1329 }
1330
1331 return rc;
1332
1333 err:
1334 dp_display_deinit_sub_modules(dp);
1335 return rc;
1336 }
1337
dp_display_remove(struct platform_device * pdev)1338 static void dp_display_remove(struct platform_device *pdev)
1339 {
1340 struct dp_display_private *dp = dev_get_dp_display_private(&pdev->dev);
1341
1342 component_del(&pdev->dev, &dp_display_comp_ops);
1343 dp_display_deinit_sub_modules(dp);
1344 platform_set_drvdata(pdev, NULL);
1345 }
1346
dp_pm_runtime_suspend(struct device * dev)1347 static int dp_pm_runtime_suspend(struct device *dev)
1348 {
1349 struct dp_display_private *dp = dev_get_dp_display_private(dev);
1350
1351 disable_irq(dp->irq);
1352
1353 if (dp->dp_display.is_edp) {
1354 dp_display_host_phy_exit(dp);
1355 dp_catalog_ctrl_hpd_disable(dp->catalog);
1356 }
1357 dp_display_host_deinit(dp);
1358
1359 return 0;
1360 }
1361
dp_pm_runtime_resume(struct device * dev)1362 static int dp_pm_runtime_resume(struct device *dev)
1363 {
1364 struct dp_display_private *dp = dev_get_dp_display_private(dev);
1365
1366 /*
1367 * for eDP, host cotroller, HPD block and PHY are enabled here
1368 * but with HPD irq disabled
1369 *
1370 * for DP, only host controller is enabled here.
1371 * HPD block is enabled at dp_bridge_hpd_enable()
1372 * PHY will be enabled at plugin handler later
1373 */
1374 dp_display_host_init(dp);
1375 if (dp->dp_display.is_edp) {
1376 dp_catalog_ctrl_hpd_enable(dp->catalog);
1377 dp_display_host_phy_init(dp);
1378 }
1379
1380 enable_irq(dp->irq);
1381 return 0;
1382 }
1383
1384 static const struct dev_pm_ops dp_pm_ops = {
1385 SET_RUNTIME_PM_OPS(dp_pm_runtime_suspend, dp_pm_runtime_resume, NULL)
1386 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
1387 pm_runtime_force_resume)
1388 };
1389
1390 static struct platform_driver dp_display_driver = {
1391 .probe = dp_display_probe,
1392 .remove_new = dp_display_remove,
1393 .driver = {
1394 .name = "msm-dp-display",
1395 .of_match_table = dp_dt_match,
1396 .suppress_bind_attrs = true,
1397 .pm = &dp_pm_ops,
1398 },
1399 };
1400
msm_dp_register(void)1401 int __init msm_dp_register(void)
1402 {
1403 int ret;
1404
1405 ret = platform_driver_register(&dp_display_driver);
1406 if (ret)
1407 DRM_ERROR("Dp display driver register failed");
1408
1409 return ret;
1410 }
1411
msm_dp_unregister(void)1412 void __exit msm_dp_unregister(void)
1413 {
1414 platform_driver_unregister(&dp_display_driver);
1415 }
1416
msm_dp_is_yuv_420_enabled(const struct msm_dp * dp_display,const struct drm_display_mode * mode)1417 bool msm_dp_is_yuv_420_enabled(const struct msm_dp *dp_display,
1418 const struct drm_display_mode *mode)
1419 {
1420 struct dp_display_private *dp;
1421 const struct drm_display_info *info;
1422
1423 dp = container_of(dp_display, struct dp_display_private, dp_display);
1424 info = &dp_display->connector->display_info;
1425
1426 return dp->panel->vsc_sdp_supported && drm_mode_is_420_only(info, mode);
1427 }
1428
msm_dp_needs_periph_flush(const struct msm_dp * dp_display,const struct drm_display_mode * mode)1429 bool msm_dp_needs_periph_flush(const struct msm_dp *dp_display,
1430 const struct drm_display_mode *mode)
1431 {
1432 return msm_dp_is_yuv_420_enabled(dp_display, mode);
1433 }
1434
msm_dp_wide_bus_available(const struct msm_dp * dp_display)1435 bool msm_dp_wide_bus_available(const struct msm_dp *dp_display)
1436 {
1437 struct dp_display_private *dp;
1438
1439 dp = container_of(dp_display, struct dp_display_private, dp_display);
1440
1441 if (dp->dp_mode.out_fmt_is_yuv_420)
1442 return false;
1443
1444 return dp->wide_bus_supported;
1445 }
1446
dp_display_debugfs_init(struct msm_dp * dp_display,struct dentry * root,bool is_edp)1447 void dp_display_debugfs_init(struct msm_dp *dp_display, struct dentry *root, bool is_edp)
1448 {
1449 struct dp_display_private *dp;
1450 struct device *dev;
1451 int rc;
1452
1453 dp = container_of(dp_display, struct dp_display_private, dp_display);
1454 dev = &dp->dp_display.pdev->dev;
1455
1456 rc = dp_debug_init(dev, dp->panel, dp->link, dp->dp_display.connector, root, is_edp);
1457 if (rc)
1458 DRM_ERROR("failed to initialize debug, rc = %d\n", rc);
1459 }
1460
msm_dp_modeset_init(struct msm_dp * dp_display,struct drm_device * dev,struct drm_encoder * encoder,bool yuv_supported)1461 int msm_dp_modeset_init(struct msm_dp *dp_display, struct drm_device *dev,
1462 struct drm_encoder *encoder, bool yuv_supported)
1463 {
1464 struct dp_display_private *dp_priv;
1465 int ret;
1466
1467 dp_display->drm_dev = dev;
1468
1469 dp_priv = container_of(dp_display, struct dp_display_private, dp_display);
1470
1471 ret = dp_bridge_init(dp_display, dev, encoder);
1472 if (ret) {
1473 DRM_DEV_ERROR(dev->dev,
1474 "failed to create dp bridge: %d\n", ret);
1475 return ret;
1476 }
1477
1478 dp_display->connector = dp_drm_connector_init(dp_display, encoder, yuv_supported);
1479 if (IS_ERR(dp_display->connector)) {
1480 ret = PTR_ERR(dp_display->connector);
1481 DRM_DEV_ERROR(dev->dev,
1482 "failed to create dp connector: %d\n", ret);
1483 dp_display->connector = NULL;
1484 return ret;
1485 }
1486
1487 dp_priv->panel->connector = dp_display->connector;
1488
1489 return 0;
1490 }
1491
dp_bridge_atomic_enable(struct drm_bridge * drm_bridge,struct drm_bridge_state * old_bridge_state)1492 void dp_bridge_atomic_enable(struct drm_bridge *drm_bridge,
1493 struct drm_bridge_state *old_bridge_state)
1494 {
1495 struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge);
1496 struct msm_dp *dp = dp_bridge->dp_display;
1497 int rc = 0;
1498 struct dp_display_private *dp_display;
1499 u32 state;
1500 bool force_link_train = false;
1501
1502 dp_display = container_of(dp, struct dp_display_private, dp_display);
1503 if (!dp_display->dp_mode.drm_mode.clock) {
1504 DRM_ERROR("invalid params\n");
1505 return;
1506 }
1507
1508 if (dp->is_edp)
1509 dp_hpd_plug_handle(dp_display, 0);
1510
1511 mutex_lock(&dp_display->event_mutex);
1512 if (pm_runtime_resume_and_get(&dp->pdev->dev)) {
1513 DRM_ERROR("failed to pm_runtime_resume\n");
1514 mutex_unlock(&dp_display->event_mutex);
1515 return;
1516 }
1517
1518 state = dp_display->hpd_state;
1519 if (state != ST_DISPLAY_OFF && state != ST_MAINLINK_READY) {
1520 mutex_unlock(&dp_display->event_mutex);
1521 return;
1522 }
1523
1524 rc = dp_display_set_mode(dp, &dp_display->dp_mode);
1525 if (rc) {
1526 DRM_ERROR("Failed to perform a mode set, rc=%d\n", rc);
1527 mutex_unlock(&dp_display->event_mutex);
1528 return;
1529 }
1530
1531 state = dp_display->hpd_state;
1532
1533 if (state == ST_DISPLAY_OFF) {
1534 dp_display_host_phy_init(dp_display);
1535 force_link_train = true;
1536 }
1537
1538 dp_display_enable(dp_display, force_link_train);
1539
1540 rc = dp_display_post_enable(dp);
1541 if (rc) {
1542 DRM_ERROR("DP display post enable failed, rc=%d\n", rc);
1543 dp_display_disable(dp_display);
1544 }
1545
1546 /* completed connection */
1547 dp_display->hpd_state = ST_CONNECTED;
1548
1549 drm_dbg_dp(dp->drm_dev, "type=%d Done\n", dp->connector_type);
1550 mutex_unlock(&dp_display->event_mutex);
1551 }
1552
dp_bridge_atomic_disable(struct drm_bridge * drm_bridge,struct drm_bridge_state * old_bridge_state)1553 void dp_bridge_atomic_disable(struct drm_bridge *drm_bridge,
1554 struct drm_bridge_state *old_bridge_state)
1555 {
1556 struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge);
1557 struct msm_dp *dp = dp_bridge->dp_display;
1558 struct dp_display_private *dp_display;
1559
1560 dp_display = container_of(dp, struct dp_display_private, dp_display);
1561
1562 dp_ctrl_push_idle(dp_display->ctrl);
1563 }
1564
dp_bridge_atomic_post_disable(struct drm_bridge * drm_bridge,struct drm_bridge_state * old_bridge_state)1565 void dp_bridge_atomic_post_disable(struct drm_bridge *drm_bridge,
1566 struct drm_bridge_state *old_bridge_state)
1567 {
1568 struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge);
1569 struct msm_dp *dp = dp_bridge->dp_display;
1570 u32 state;
1571 struct dp_display_private *dp_display;
1572
1573 dp_display = container_of(dp, struct dp_display_private, dp_display);
1574
1575 if (dp->is_edp)
1576 dp_hpd_unplug_handle(dp_display, 0);
1577
1578 mutex_lock(&dp_display->event_mutex);
1579
1580 state = dp_display->hpd_state;
1581 if (state != ST_DISCONNECT_PENDING && state != ST_CONNECTED)
1582 drm_dbg_dp(dp->drm_dev, "type=%d wrong hpd_state=%d\n",
1583 dp->connector_type, state);
1584
1585 dp_display_disable(dp_display);
1586
1587 state = dp_display->hpd_state;
1588 if (state == ST_DISCONNECT_PENDING) {
1589 /* completed disconnection */
1590 dp_display->hpd_state = ST_DISCONNECTED;
1591 } else {
1592 dp_display->hpd_state = ST_DISPLAY_OFF;
1593 }
1594
1595 drm_dbg_dp(dp->drm_dev, "type=%d Done\n", dp->connector_type);
1596
1597 pm_runtime_put_sync(&dp->pdev->dev);
1598 mutex_unlock(&dp_display->event_mutex);
1599 }
1600
dp_bridge_mode_set(struct drm_bridge * drm_bridge,const struct drm_display_mode * mode,const struct drm_display_mode * adjusted_mode)1601 void dp_bridge_mode_set(struct drm_bridge *drm_bridge,
1602 const struct drm_display_mode *mode,
1603 const struct drm_display_mode *adjusted_mode)
1604 {
1605 struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge);
1606 struct msm_dp *dp = dp_bridge->dp_display;
1607 struct dp_display_private *dp_display;
1608 struct dp_panel *dp_panel;
1609
1610 dp_display = container_of(dp, struct dp_display_private, dp_display);
1611 dp_panel = dp_display->panel;
1612
1613 memset(&dp_display->dp_mode, 0x0, sizeof(struct dp_display_mode));
1614
1615 if (dp_display_check_video_test(dp))
1616 dp_display->dp_mode.bpp = dp_display_get_test_bpp(dp);
1617 else /* Default num_components per pixel = 3 */
1618 dp_display->dp_mode.bpp = dp->connector->display_info.bpc * 3;
1619
1620 if (!dp_display->dp_mode.bpp)
1621 dp_display->dp_mode.bpp = 24; /* Default bpp */
1622
1623 drm_mode_copy(&dp_display->dp_mode.drm_mode, adjusted_mode);
1624
1625 dp_display->dp_mode.v_active_low =
1626 !!(dp_display->dp_mode.drm_mode.flags & DRM_MODE_FLAG_NVSYNC);
1627
1628 dp_display->dp_mode.h_active_low =
1629 !!(dp_display->dp_mode.drm_mode.flags & DRM_MODE_FLAG_NHSYNC);
1630
1631 dp_display->dp_mode.out_fmt_is_yuv_420 =
1632 drm_mode_is_420_only(&dp->connector->display_info, adjusted_mode) &&
1633 dp_panel->vsc_sdp_supported;
1634
1635 /* populate wide_bus_support to different layers */
1636 dp_display->ctrl->wide_bus_en =
1637 dp_display->dp_mode.out_fmt_is_yuv_420 ? false : dp_display->wide_bus_supported;
1638 dp_display->catalog->wide_bus_en =
1639 dp_display->dp_mode.out_fmt_is_yuv_420 ? false : dp_display->wide_bus_supported;
1640 }
1641
dp_bridge_hpd_enable(struct drm_bridge * bridge)1642 void dp_bridge_hpd_enable(struct drm_bridge *bridge)
1643 {
1644 struct msm_dp_bridge *dp_bridge = to_dp_bridge(bridge);
1645 struct msm_dp *dp_display = dp_bridge->dp_display;
1646 struct dp_display_private *dp = container_of(dp_display, struct dp_display_private, dp_display);
1647
1648 /*
1649 * this is for external DP with hpd irq enabled case,
1650 * step-1: dp_pm_runtime_resume() enable dp host only
1651 * step-2: enable hdp block and have hpd irq enabled here
1652 * step-3: waiting for plugin irq while phy is not initialized
1653 * step-4: DP PHY is initialized at plugin handler before link training
1654 *
1655 */
1656 mutex_lock(&dp->event_mutex);
1657 if (pm_runtime_resume_and_get(&dp_display->pdev->dev)) {
1658 DRM_ERROR("failed to resume power\n");
1659 mutex_unlock(&dp->event_mutex);
1660 return;
1661 }
1662
1663 dp_catalog_ctrl_hpd_enable(dp->catalog);
1664
1665 /* enable HDP interrupts */
1666 dp_catalog_hpd_config_intr(dp->catalog, DP_DP_HPD_INT_MASK, true);
1667
1668 dp_display->internal_hpd = true;
1669 mutex_unlock(&dp->event_mutex);
1670 }
1671
dp_bridge_hpd_disable(struct drm_bridge * bridge)1672 void dp_bridge_hpd_disable(struct drm_bridge *bridge)
1673 {
1674 struct msm_dp_bridge *dp_bridge = to_dp_bridge(bridge);
1675 struct msm_dp *dp_display = dp_bridge->dp_display;
1676 struct dp_display_private *dp = container_of(dp_display, struct dp_display_private, dp_display);
1677
1678 mutex_lock(&dp->event_mutex);
1679 /* disable HDP interrupts */
1680 dp_catalog_hpd_config_intr(dp->catalog, DP_DP_HPD_INT_MASK, false);
1681 dp_catalog_ctrl_hpd_disable(dp->catalog);
1682
1683 dp_display->internal_hpd = false;
1684
1685 pm_runtime_put_sync(&dp_display->pdev->dev);
1686 mutex_unlock(&dp->event_mutex);
1687 }
1688
dp_bridge_hpd_notify(struct drm_bridge * bridge,enum drm_connector_status status)1689 void dp_bridge_hpd_notify(struct drm_bridge *bridge,
1690 enum drm_connector_status status)
1691 {
1692 struct msm_dp_bridge *dp_bridge = to_dp_bridge(bridge);
1693 struct msm_dp *dp_display = dp_bridge->dp_display;
1694 struct dp_display_private *dp = container_of(dp_display, struct dp_display_private, dp_display);
1695
1696 /* Without next_bridge interrupts are handled by the DP core directly */
1697 if (dp_display->internal_hpd)
1698 return;
1699
1700 if (!dp_display->link_ready && status == connector_status_connected)
1701 dp_add_event(dp, EV_HPD_PLUG_INT, 0, 0);
1702 else if (dp_display->link_ready && status == connector_status_disconnected)
1703 dp_add_event(dp, EV_HPD_UNPLUG_INT, 0, 0);
1704 }
1705