1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Copyright (c) 2014-2015, The Linux Foundation. All rights reserved.
4 */
5
6 #include <linux/clk.h>
7 #include <linux/gpio/consumer.h>
8 #include <linux/regulator/consumer.h>
9 #include <drm/drm_crtc.h>
10 #include <drm/drm_dp_helper.h>
11 #include <drm/drm_edid.h>
12
13 #include "edp.h"
14 #include "edp.xml.h"
15
16 #define VDDA_UA_ON_LOAD 100000 /* uA units */
17 #define VDDA_UA_OFF_LOAD 100 /* uA units */
18
19 #define DPCD_LINK_VOLTAGE_MAX 4
20 #define DPCD_LINK_PRE_EMPHASIS_MAX 4
21
22 #define EDP_LINK_BW_MAX DP_LINK_BW_2_7
23
24 /* Link training return value */
25 #define EDP_TRAIN_FAIL -1
26 #define EDP_TRAIN_SUCCESS 0
27 #define EDP_TRAIN_RECONFIG 1
28
29 #define EDP_CLK_MASK_AHB BIT(0)
30 #define EDP_CLK_MASK_AUX BIT(1)
31 #define EDP_CLK_MASK_LINK BIT(2)
32 #define EDP_CLK_MASK_PIXEL BIT(3)
33 #define EDP_CLK_MASK_MDP_CORE BIT(4)
34 #define EDP_CLK_MASK_LINK_CHAN (EDP_CLK_MASK_LINK | EDP_CLK_MASK_PIXEL)
35 #define EDP_CLK_MASK_AUX_CHAN \
36 (EDP_CLK_MASK_AHB | EDP_CLK_MASK_AUX | EDP_CLK_MASK_MDP_CORE)
37 #define EDP_CLK_MASK_ALL (EDP_CLK_MASK_AUX_CHAN | EDP_CLK_MASK_LINK_CHAN)
38
39 #define EDP_BACKLIGHT_MAX 255
40
41 #define EDP_INTR_STATUS1 \
42 (EDP_INTERRUPT_REG_1_HPD | EDP_INTERRUPT_REG_1_AUX_I2C_DONE | \
43 EDP_INTERRUPT_REG_1_WRONG_ADDR | EDP_INTERRUPT_REG_1_TIMEOUT | \
44 EDP_INTERRUPT_REG_1_NACK_DEFER | EDP_INTERRUPT_REG_1_WRONG_DATA_CNT | \
45 EDP_INTERRUPT_REG_1_I2C_NACK | EDP_INTERRUPT_REG_1_I2C_DEFER | \
46 EDP_INTERRUPT_REG_1_PLL_UNLOCK | EDP_INTERRUPT_REG_1_AUX_ERROR)
47 #define EDP_INTR_MASK1 (EDP_INTR_STATUS1 << 2)
48 #define EDP_INTR_STATUS2 \
49 (EDP_INTERRUPT_REG_2_READY_FOR_VIDEO | \
50 EDP_INTERRUPT_REG_2_IDLE_PATTERNs_SENT | \
51 EDP_INTERRUPT_REG_2_FRAME_END | EDP_INTERRUPT_REG_2_CRC_UPDATED)
52 #define EDP_INTR_MASK2 (EDP_INTR_STATUS2 << 2)
53
54 struct edp_ctrl {
55 struct platform_device *pdev;
56
57 void __iomem *base;
58
59 /* regulators */
60 struct regulator *vdda_vreg; /* 1.8 V */
61 struct regulator *lvl_vreg;
62
63 /* clocks */
64 struct clk *aux_clk;
65 struct clk *pixel_clk;
66 struct clk *ahb_clk;
67 struct clk *link_clk;
68 struct clk *mdp_core_clk;
69
70 /* gpios */
71 struct gpio_desc *panel_en_gpio;
72 struct gpio_desc *panel_hpd_gpio;
73
74 /* completion and mutex */
75 struct completion idle_comp;
76 struct mutex dev_mutex; /* To protect device power status */
77
78 /* work queue */
79 struct work_struct on_work;
80 struct work_struct off_work;
81 struct workqueue_struct *workqueue;
82
83 /* Interrupt register lock */
84 spinlock_t irq_lock;
85
86 bool edp_connected;
87 bool power_on;
88
89 /* edid raw data */
90 struct edid *edid;
91
92 struct drm_dp_aux *drm_aux;
93
94 /* dpcd raw data */
95 u8 dpcd[DP_RECEIVER_CAP_SIZE];
96
97 /* Link status */
98 u8 link_rate;
99 u8 lane_cnt;
100 u8 v_level;
101 u8 p_level;
102
103 /* Timing status */
104 u8 interlaced;
105 u32 pixel_rate; /* in kHz */
106 u32 color_depth;
107
108 struct edp_aux *aux;
109 struct edp_phy *phy;
110 };
111
112 struct edp_pixel_clk_div {
113 u32 rate; /* in kHz */
114 u32 m;
115 u32 n;
116 };
117
118 #define EDP_PIXEL_CLK_NUM 8
119 static const struct edp_pixel_clk_div clk_divs[2][EDP_PIXEL_CLK_NUM] = {
120 { /* Link clock = 162MHz, source clock = 810MHz */
121 {119000, 31, 211}, /* WSXGA+ 1680x1050@60Hz CVT */
122 {130250, 32, 199}, /* UXGA 1600x1200@60Hz CVT */
123 {148500, 11, 60}, /* FHD 1920x1080@60Hz */
124 {154000, 50, 263}, /* WUXGA 1920x1200@60Hz CVT */
125 {209250, 31, 120}, /* QXGA 2048x1536@60Hz CVT */
126 {268500, 119, 359}, /* WQXGA 2560x1600@60Hz CVT */
127 {138530, 33, 193}, /* AUO B116HAN03.0 Panel */
128 {141400, 48, 275}, /* AUO B133HTN01.2 Panel */
129 },
130 { /* Link clock = 270MHz, source clock = 675MHz */
131 {119000, 52, 295}, /* WSXGA+ 1680x1050@60Hz CVT */
132 {130250, 11, 57}, /* UXGA 1600x1200@60Hz CVT */
133 {148500, 11, 50}, /* FHD 1920x1080@60Hz */
134 {154000, 47, 206}, /* WUXGA 1920x1200@60Hz CVT */
135 {209250, 31, 100}, /* QXGA 2048x1536@60Hz CVT */
136 {268500, 107, 269}, /* WQXGA 2560x1600@60Hz CVT */
137 {138530, 63, 307}, /* AUO B116HAN03.0 Panel */
138 {141400, 53, 253}, /* AUO B133HTN01.2 Panel */
139 },
140 };
141
edp_clk_init(struct edp_ctrl * ctrl)142 static int edp_clk_init(struct edp_ctrl *ctrl)
143 {
144 struct platform_device *pdev = ctrl->pdev;
145 int ret;
146
147 ctrl->aux_clk = msm_clk_get(pdev, "core");
148 if (IS_ERR(ctrl->aux_clk)) {
149 ret = PTR_ERR(ctrl->aux_clk);
150 pr_err("%s: Can't find core clock, %d\n", __func__, ret);
151 ctrl->aux_clk = NULL;
152 return ret;
153 }
154
155 ctrl->pixel_clk = msm_clk_get(pdev, "pixel");
156 if (IS_ERR(ctrl->pixel_clk)) {
157 ret = PTR_ERR(ctrl->pixel_clk);
158 pr_err("%s: Can't find pixel clock, %d\n", __func__, ret);
159 ctrl->pixel_clk = NULL;
160 return ret;
161 }
162
163 ctrl->ahb_clk = msm_clk_get(pdev, "iface");
164 if (IS_ERR(ctrl->ahb_clk)) {
165 ret = PTR_ERR(ctrl->ahb_clk);
166 pr_err("%s: Can't find iface clock, %d\n", __func__, ret);
167 ctrl->ahb_clk = NULL;
168 return ret;
169 }
170
171 ctrl->link_clk = msm_clk_get(pdev, "link");
172 if (IS_ERR(ctrl->link_clk)) {
173 ret = PTR_ERR(ctrl->link_clk);
174 pr_err("%s: Can't find link clock, %d\n", __func__, ret);
175 ctrl->link_clk = NULL;
176 return ret;
177 }
178
179 /* need mdp core clock to receive irq */
180 ctrl->mdp_core_clk = msm_clk_get(pdev, "mdp_core");
181 if (IS_ERR(ctrl->mdp_core_clk)) {
182 ret = PTR_ERR(ctrl->mdp_core_clk);
183 pr_err("%s: Can't find mdp_core clock, %d\n", __func__, ret);
184 ctrl->mdp_core_clk = NULL;
185 return ret;
186 }
187
188 return 0;
189 }
190
edp_clk_enable(struct edp_ctrl * ctrl,u32 clk_mask)191 static int edp_clk_enable(struct edp_ctrl *ctrl, u32 clk_mask)
192 {
193 int ret;
194
195 DBG("mask=%x", clk_mask);
196 /* ahb_clk should be enabled first */
197 if (clk_mask & EDP_CLK_MASK_AHB) {
198 ret = clk_prepare_enable(ctrl->ahb_clk);
199 if (ret) {
200 pr_err("%s: Failed to enable ahb clk\n", __func__);
201 goto f0;
202 }
203 }
204 if (clk_mask & EDP_CLK_MASK_AUX) {
205 ret = clk_set_rate(ctrl->aux_clk, 19200000);
206 if (ret) {
207 pr_err("%s: Failed to set rate aux clk\n", __func__);
208 goto f1;
209 }
210 ret = clk_prepare_enable(ctrl->aux_clk);
211 if (ret) {
212 pr_err("%s: Failed to enable aux clk\n", __func__);
213 goto f1;
214 }
215 }
216 /* Need to set rate and enable link_clk prior to pixel_clk */
217 if (clk_mask & EDP_CLK_MASK_LINK) {
218 DBG("edp->link_clk, set_rate %ld",
219 (unsigned long)ctrl->link_rate * 27000000);
220 ret = clk_set_rate(ctrl->link_clk,
221 (unsigned long)ctrl->link_rate * 27000000);
222 if (ret) {
223 pr_err("%s: Failed to set rate to link clk\n",
224 __func__);
225 goto f2;
226 }
227
228 ret = clk_prepare_enable(ctrl->link_clk);
229 if (ret) {
230 pr_err("%s: Failed to enable link clk\n", __func__);
231 goto f2;
232 }
233 }
234 if (clk_mask & EDP_CLK_MASK_PIXEL) {
235 DBG("edp->pixel_clk, set_rate %ld",
236 (unsigned long)ctrl->pixel_rate * 1000);
237 ret = clk_set_rate(ctrl->pixel_clk,
238 (unsigned long)ctrl->pixel_rate * 1000);
239 if (ret) {
240 pr_err("%s: Failed to set rate to pixel clk\n",
241 __func__);
242 goto f3;
243 }
244
245 ret = clk_prepare_enable(ctrl->pixel_clk);
246 if (ret) {
247 pr_err("%s: Failed to enable pixel clk\n", __func__);
248 goto f3;
249 }
250 }
251 if (clk_mask & EDP_CLK_MASK_MDP_CORE) {
252 ret = clk_prepare_enable(ctrl->mdp_core_clk);
253 if (ret) {
254 pr_err("%s: Failed to enable mdp core clk\n", __func__);
255 goto f4;
256 }
257 }
258
259 return 0;
260
261 f4:
262 if (clk_mask & EDP_CLK_MASK_PIXEL)
263 clk_disable_unprepare(ctrl->pixel_clk);
264 f3:
265 if (clk_mask & EDP_CLK_MASK_LINK)
266 clk_disable_unprepare(ctrl->link_clk);
267 f2:
268 if (clk_mask & EDP_CLK_MASK_AUX)
269 clk_disable_unprepare(ctrl->aux_clk);
270 f1:
271 if (clk_mask & EDP_CLK_MASK_AHB)
272 clk_disable_unprepare(ctrl->ahb_clk);
273 f0:
274 return ret;
275 }
276
edp_clk_disable(struct edp_ctrl * ctrl,u32 clk_mask)277 static void edp_clk_disable(struct edp_ctrl *ctrl, u32 clk_mask)
278 {
279 if (clk_mask & EDP_CLK_MASK_MDP_CORE)
280 clk_disable_unprepare(ctrl->mdp_core_clk);
281 if (clk_mask & EDP_CLK_MASK_PIXEL)
282 clk_disable_unprepare(ctrl->pixel_clk);
283 if (clk_mask & EDP_CLK_MASK_LINK)
284 clk_disable_unprepare(ctrl->link_clk);
285 if (clk_mask & EDP_CLK_MASK_AUX)
286 clk_disable_unprepare(ctrl->aux_clk);
287 if (clk_mask & EDP_CLK_MASK_AHB)
288 clk_disable_unprepare(ctrl->ahb_clk);
289 }
290
edp_regulator_init(struct edp_ctrl * ctrl)291 static int edp_regulator_init(struct edp_ctrl *ctrl)
292 {
293 struct device *dev = &ctrl->pdev->dev;
294 int ret;
295
296 DBG("");
297 ctrl->vdda_vreg = devm_regulator_get(dev, "vdda");
298 ret = PTR_ERR_OR_ZERO(ctrl->vdda_vreg);
299 if (ret) {
300 pr_err("%s: Could not get vdda reg, ret = %d\n", __func__,
301 ret);
302 ctrl->vdda_vreg = NULL;
303 return ret;
304 }
305 ctrl->lvl_vreg = devm_regulator_get(dev, "lvl-vdd");
306 ret = PTR_ERR_OR_ZERO(ctrl->lvl_vreg);
307 if (ret) {
308 pr_err("%s: Could not get lvl-vdd reg, ret = %d\n", __func__,
309 ret);
310 ctrl->lvl_vreg = NULL;
311 return ret;
312 }
313
314 return 0;
315 }
316
edp_regulator_enable(struct edp_ctrl * ctrl)317 static int edp_regulator_enable(struct edp_ctrl *ctrl)
318 {
319 int ret;
320
321 ret = regulator_set_load(ctrl->vdda_vreg, VDDA_UA_ON_LOAD);
322 if (ret < 0) {
323 pr_err("%s: vdda_vreg set regulator mode failed.\n", __func__);
324 goto vdda_set_fail;
325 }
326
327 ret = regulator_enable(ctrl->vdda_vreg);
328 if (ret) {
329 pr_err("%s: Failed to enable vdda_vreg regulator.\n", __func__);
330 goto vdda_enable_fail;
331 }
332
333 ret = regulator_enable(ctrl->lvl_vreg);
334 if (ret) {
335 pr_err("Failed to enable lvl-vdd reg regulator, %d", ret);
336 goto lvl_enable_fail;
337 }
338
339 DBG("exit");
340 return 0;
341
342 lvl_enable_fail:
343 regulator_disable(ctrl->vdda_vreg);
344 vdda_enable_fail:
345 regulator_set_load(ctrl->vdda_vreg, VDDA_UA_OFF_LOAD);
346 vdda_set_fail:
347 return ret;
348 }
349
edp_regulator_disable(struct edp_ctrl * ctrl)350 static void edp_regulator_disable(struct edp_ctrl *ctrl)
351 {
352 regulator_disable(ctrl->lvl_vreg);
353 regulator_disable(ctrl->vdda_vreg);
354 regulator_set_load(ctrl->vdda_vreg, VDDA_UA_OFF_LOAD);
355 }
356
edp_gpio_config(struct edp_ctrl * ctrl)357 static int edp_gpio_config(struct edp_ctrl *ctrl)
358 {
359 struct device *dev = &ctrl->pdev->dev;
360 int ret;
361
362 ctrl->panel_hpd_gpio = devm_gpiod_get(dev, "panel-hpd", GPIOD_IN);
363 if (IS_ERR(ctrl->panel_hpd_gpio)) {
364 ret = PTR_ERR(ctrl->panel_hpd_gpio);
365 ctrl->panel_hpd_gpio = NULL;
366 pr_err("%s: cannot get panel-hpd-gpios, %d\n", __func__, ret);
367 return ret;
368 }
369
370 ctrl->panel_en_gpio = devm_gpiod_get(dev, "panel-en", GPIOD_OUT_LOW);
371 if (IS_ERR(ctrl->panel_en_gpio)) {
372 ret = PTR_ERR(ctrl->panel_en_gpio);
373 ctrl->panel_en_gpio = NULL;
374 pr_err("%s: cannot get panel-en-gpios, %d\n", __func__, ret);
375 return ret;
376 }
377
378 DBG("gpio on");
379
380 return 0;
381 }
382
edp_ctrl_irq_enable(struct edp_ctrl * ctrl,int enable)383 static void edp_ctrl_irq_enable(struct edp_ctrl *ctrl, int enable)
384 {
385 unsigned long flags;
386
387 DBG("%d", enable);
388 spin_lock_irqsave(&ctrl->irq_lock, flags);
389 if (enable) {
390 edp_write(ctrl->base + REG_EDP_INTERRUPT_REG_1, EDP_INTR_MASK1);
391 edp_write(ctrl->base + REG_EDP_INTERRUPT_REG_2, EDP_INTR_MASK2);
392 } else {
393 edp_write(ctrl->base + REG_EDP_INTERRUPT_REG_1, 0x0);
394 edp_write(ctrl->base + REG_EDP_INTERRUPT_REG_2, 0x0);
395 }
396 spin_unlock_irqrestore(&ctrl->irq_lock, flags);
397 DBG("exit");
398 }
399
edp_fill_link_cfg(struct edp_ctrl * ctrl)400 static void edp_fill_link_cfg(struct edp_ctrl *ctrl)
401 {
402 u32 prate;
403 u32 lrate;
404 u32 bpp;
405 u8 max_lane = drm_dp_max_lane_count(ctrl->dpcd);
406 u8 lane;
407
408 prate = ctrl->pixel_rate;
409 bpp = ctrl->color_depth * 3;
410
411 /*
412 * By default, use the maximum link rate and minimum lane count,
413 * so that we can do rate down shift during link training.
414 */
415 ctrl->link_rate = ctrl->dpcd[DP_MAX_LINK_RATE];
416
417 prate *= bpp;
418 prate /= 8; /* in kByte */
419
420 lrate = 270000; /* in kHz */
421 lrate *= ctrl->link_rate;
422 lrate /= 10; /* in kByte, 10 bits --> 8 bits */
423
424 for (lane = 1; lane <= max_lane; lane <<= 1) {
425 if (lrate >= prate)
426 break;
427 lrate <<= 1;
428 }
429
430 ctrl->lane_cnt = lane;
431 DBG("rate=%d lane=%d", ctrl->link_rate, ctrl->lane_cnt);
432 }
433
edp_config_ctrl(struct edp_ctrl * ctrl)434 static void edp_config_ctrl(struct edp_ctrl *ctrl)
435 {
436 u32 data;
437 enum edp_color_depth depth;
438
439 data = EDP_CONFIGURATION_CTRL_LANES(ctrl->lane_cnt - 1);
440
441 if (drm_dp_enhanced_frame_cap(ctrl->dpcd))
442 data |= EDP_CONFIGURATION_CTRL_ENHANCED_FRAMING;
443
444 depth = EDP_6BIT;
445 if (ctrl->color_depth == 8)
446 depth = EDP_8BIT;
447
448 data |= EDP_CONFIGURATION_CTRL_COLOR(depth);
449
450 if (!ctrl->interlaced) /* progressive */
451 data |= EDP_CONFIGURATION_CTRL_PROGRESSIVE;
452
453 data |= (EDP_CONFIGURATION_CTRL_SYNC_CLK |
454 EDP_CONFIGURATION_CTRL_STATIC_MVID);
455
456 edp_write(ctrl->base + REG_EDP_CONFIGURATION_CTRL, data);
457 }
458
edp_state_ctrl(struct edp_ctrl * ctrl,u32 state)459 static void edp_state_ctrl(struct edp_ctrl *ctrl, u32 state)
460 {
461 edp_write(ctrl->base + REG_EDP_STATE_CTRL, state);
462 /* Make sure H/W status is set */
463 wmb();
464 }
465
edp_lane_set_write(struct edp_ctrl * ctrl,u8 voltage_level,u8 pre_emphasis_level)466 static int edp_lane_set_write(struct edp_ctrl *ctrl,
467 u8 voltage_level, u8 pre_emphasis_level)
468 {
469 int i;
470 u8 buf[4];
471
472 if (voltage_level >= DPCD_LINK_VOLTAGE_MAX)
473 voltage_level |= 0x04;
474
475 if (pre_emphasis_level >= DPCD_LINK_PRE_EMPHASIS_MAX)
476 pre_emphasis_level |= 0x04;
477
478 pre_emphasis_level <<= 3;
479
480 for (i = 0; i < 4; i++)
481 buf[i] = voltage_level | pre_emphasis_level;
482
483 DBG("%s: p|v=0x%x", __func__, voltage_level | pre_emphasis_level);
484 if (drm_dp_dpcd_write(ctrl->drm_aux, 0x103, buf, 4) < 4) {
485 pr_err("%s: Set sw/pe to panel failed\n", __func__);
486 return -ENOLINK;
487 }
488
489 return 0;
490 }
491
edp_train_pattern_set_write(struct edp_ctrl * ctrl,u8 pattern)492 static int edp_train_pattern_set_write(struct edp_ctrl *ctrl, u8 pattern)
493 {
494 u8 p = pattern;
495
496 DBG("pattern=%x", p);
497 if (drm_dp_dpcd_write(ctrl->drm_aux,
498 DP_TRAINING_PATTERN_SET, &p, 1) < 1) {
499 pr_err("%s: Set training pattern to panel failed\n", __func__);
500 return -ENOLINK;
501 }
502
503 return 0;
504 }
505
edp_sink_train_set_adjust(struct edp_ctrl * ctrl,const u8 * link_status)506 static void edp_sink_train_set_adjust(struct edp_ctrl *ctrl,
507 const u8 *link_status)
508 {
509 int i;
510 u8 max = 0;
511 u8 data;
512
513 /* use the max level across lanes */
514 for (i = 0; i < ctrl->lane_cnt; i++) {
515 data = drm_dp_get_adjust_request_voltage(link_status, i);
516 DBG("lane=%d req_voltage_swing=0x%x", i, data);
517 if (max < data)
518 max = data;
519 }
520
521 ctrl->v_level = max >> DP_TRAIN_VOLTAGE_SWING_SHIFT;
522
523 /* use the max level across lanes */
524 max = 0;
525 for (i = 0; i < ctrl->lane_cnt; i++) {
526 data = drm_dp_get_adjust_request_pre_emphasis(link_status, i);
527 DBG("lane=%d req_pre_emphasis=0x%x", i, data);
528 if (max < data)
529 max = data;
530 }
531
532 ctrl->p_level = max >> DP_TRAIN_PRE_EMPHASIS_SHIFT;
533 DBG("v_level=%d, p_level=%d", ctrl->v_level, ctrl->p_level);
534 }
535
edp_host_train_set(struct edp_ctrl * ctrl,u32 train)536 static void edp_host_train_set(struct edp_ctrl *ctrl, u32 train)
537 {
538 int cnt = 10;
539 u32 data;
540 u32 shift = train - 1;
541
542 DBG("train=%d", train);
543
544 edp_state_ctrl(ctrl, EDP_STATE_CTRL_TRAIN_PATTERN_1 << shift);
545 while (--cnt) {
546 data = edp_read(ctrl->base + REG_EDP_MAINLINK_READY);
547 if (data & (EDP_MAINLINK_READY_TRAIN_PATTERN_1_READY << shift))
548 break;
549 }
550
551 if (cnt == 0)
552 pr_err("%s: set link_train=%d failed\n", __func__, train);
553 }
554
555 static const u8 vm_pre_emphasis[4][4] = {
556 {0x03, 0x06, 0x09, 0x0C}, /* pe0, 0 db */
557 {0x03, 0x06, 0x09, 0xFF}, /* pe1, 3.5 db */
558 {0x03, 0x06, 0xFF, 0xFF}, /* pe2, 6.0 db */
559 {0x03, 0xFF, 0xFF, 0xFF} /* pe3, 9.5 db */
560 };
561
562 /* voltage swing, 0.2v and 1.0v are not support */
563 static const u8 vm_voltage_swing[4][4] = {
564 {0x14, 0x18, 0x1A, 0x1E}, /* sw0, 0.4v */
565 {0x18, 0x1A, 0x1E, 0xFF}, /* sw1, 0.6 v */
566 {0x1A, 0x1E, 0xFF, 0xFF}, /* sw1, 0.8 v */
567 {0x1E, 0xFF, 0xFF, 0xFF} /* sw1, 1.2 v, optional */
568 };
569
edp_voltage_pre_emphasise_set(struct edp_ctrl * ctrl)570 static int edp_voltage_pre_emphasise_set(struct edp_ctrl *ctrl)
571 {
572 u32 value0;
573 u32 value1;
574
575 DBG("v=%d p=%d", ctrl->v_level, ctrl->p_level);
576
577 value0 = vm_pre_emphasis[(int)(ctrl->v_level)][(int)(ctrl->p_level)];
578 value1 = vm_voltage_swing[(int)(ctrl->v_level)][(int)(ctrl->p_level)];
579
580 /* Configure host and panel only if both values are allowed */
581 if (value0 != 0xFF && value1 != 0xFF) {
582 msm_edp_phy_vm_pe_cfg(ctrl->phy, value0, value1);
583 return edp_lane_set_write(ctrl, ctrl->v_level, ctrl->p_level);
584 }
585
586 return -EINVAL;
587 }
588
edp_start_link_train_1(struct edp_ctrl * ctrl)589 static int edp_start_link_train_1(struct edp_ctrl *ctrl)
590 {
591 u8 link_status[DP_LINK_STATUS_SIZE];
592 u8 old_v_level;
593 int tries;
594 int ret;
595 int rlen;
596
597 DBG("");
598
599 edp_host_train_set(ctrl, DP_TRAINING_PATTERN_1);
600 ret = edp_voltage_pre_emphasise_set(ctrl);
601 if (ret)
602 return ret;
603 ret = edp_train_pattern_set_write(ctrl,
604 DP_TRAINING_PATTERN_1 | DP_RECOVERED_CLOCK_OUT_EN);
605 if (ret)
606 return ret;
607
608 tries = 0;
609 old_v_level = ctrl->v_level;
610 while (1) {
611 drm_dp_link_train_clock_recovery_delay(ctrl->dpcd);
612
613 rlen = drm_dp_dpcd_read_link_status(ctrl->drm_aux, link_status);
614 if (rlen < DP_LINK_STATUS_SIZE) {
615 pr_err("%s: read link status failed\n", __func__);
616 return -ENOLINK;
617 }
618 if (drm_dp_clock_recovery_ok(link_status, ctrl->lane_cnt)) {
619 ret = 0;
620 break;
621 }
622
623 if (ctrl->v_level == DPCD_LINK_VOLTAGE_MAX) {
624 ret = -1;
625 break;
626 }
627
628 if (old_v_level == ctrl->v_level) {
629 tries++;
630 if (tries >= 5) {
631 ret = -1;
632 break;
633 }
634 } else {
635 tries = 0;
636 old_v_level = ctrl->v_level;
637 }
638
639 edp_sink_train_set_adjust(ctrl, link_status);
640 ret = edp_voltage_pre_emphasise_set(ctrl);
641 if (ret)
642 return ret;
643 }
644
645 return ret;
646 }
647
edp_start_link_train_2(struct edp_ctrl * ctrl)648 static int edp_start_link_train_2(struct edp_ctrl *ctrl)
649 {
650 u8 link_status[DP_LINK_STATUS_SIZE];
651 int tries = 0;
652 int ret;
653 int rlen;
654
655 DBG("");
656
657 edp_host_train_set(ctrl, DP_TRAINING_PATTERN_2);
658 ret = edp_voltage_pre_emphasise_set(ctrl);
659 if (ret)
660 return ret;
661
662 ret = edp_train_pattern_set_write(ctrl,
663 DP_TRAINING_PATTERN_2 | DP_RECOVERED_CLOCK_OUT_EN);
664 if (ret)
665 return ret;
666
667 while (1) {
668 drm_dp_link_train_channel_eq_delay(ctrl->dpcd);
669
670 rlen = drm_dp_dpcd_read_link_status(ctrl->drm_aux, link_status);
671 if (rlen < DP_LINK_STATUS_SIZE) {
672 pr_err("%s: read link status failed\n", __func__);
673 return -ENOLINK;
674 }
675 if (drm_dp_channel_eq_ok(link_status, ctrl->lane_cnt)) {
676 ret = 0;
677 break;
678 }
679
680 tries++;
681 if (tries > 10) {
682 ret = -1;
683 break;
684 }
685
686 edp_sink_train_set_adjust(ctrl, link_status);
687 ret = edp_voltage_pre_emphasise_set(ctrl);
688 if (ret)
689 return ret;
690 }
691
692 return ret;
693 }
694
edp_link_rate_down_shift(struct edp_ctrl * ctrl)695 static int edp_link_rate_down_shift(struct edp_ctrl *ctrl)
696 {
697 u32 prate, lrate, bpp;
698 u8 rate, lane, max_lane;
699 int changed = 0;
700
701 rate = ctrl->link_rate;
702 lane = ctrl->lane_cnt;
703 max_lane = drm_dp_max_lane_count(ctrl->dpcd);
704
705 bpp = ctrl->color_depth * 3;
706 prate = ctrl->pixel_rate;
707 prate *= bpp;
708 prate /= 8; /* in kByte */
709
710 if (rate > DP_LINK_BW_1_62 && rate <= EDP_LINK_BW_MAX) {
711 rate -= 4; /* reduce rate */
712 changed++;
713 }
714
715 if (changed) {
716 if (lane >= 1 && lane < max_lane)
717 lane <<= 1; /* increase lane */
718
719 lrate = 270000; /* in kHz */
720 lrate *= rate;
721 lrate /= 10; /* kByte, 10 bits --> 8 bits */
722 lrate *= lane;
723
724 DBG("new lrate=%u prate=%u(kHz) rate=%d lane=%d p=%u b=%d",
725 lrate, prate, rate, lane,
726 ctrl->pixel_rate,
727 bpp);
728
729 if (lrate > prate) {
730 ctrl->link_rate = rate;
731 ctrl->lane_cnt = lane;
732 DBG("new rate=%d %d", rate, lane);
733 return 0;
734 }
735 }
736
737 return -EINVAL;
738 }
739
edp_clear_training_pattern(struct edp_ctrl * ctrl)740 static int edp_clear_training_pattern(struct edp_ctrl *ctrl)
741 {
742 int ret;
743
744 ret = edp_train_pattern_set_write(ctrl, 0);
745
746 drm_dp_link_train_channel_eq_delay(ctrl->dpcd);
747
748 return ret;
749 }
750
edp_do_link_train(struct edp_ctrl * ctrl)751 static int edp_do_link_train(struct edp_ctrl *ctrl)
752 {
753 u8 values[2];
754 int ret;
755
756 DBG("");
757 /*
758 * Set the current link rate and lane cnt to panel. They may have been
759 * adjusted and the values are different from them in DPCD CAP
760 */
761 values[0] = ctrl->lane_cnt;
762 values[1] = ctrl->link_rate;
763
764 if (drm_dp_enhanced_frame_cap(ctrl->dpcd))
765 values[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
766
767 if (drm_dp_dpcd_write(ctrl->drm_aux, DP_LINK_BW_SET, values,
768 sizeof(values)) < 0)
769 return EDP_TRAIN_FAIL;
770
771 ctrl->v_level = 0; /* start from default level */
772 ctrl->p_level = 0;
773
774 edp_state_ctrl(ctrl, 0);
775 if (edp_clear_training_pattern(ctrl))
776 return EDP_TRAIN_FAIL;
777
778 ret = edp_start_link_train_1(ctrl);
779 if (ret < 0) {
780 if (edp_link_rate_down_shift(ctrl) == 0) {
781 DBG("link reconfig");
782 ret = EDP_TRAIN_RECONFIG;
783 goto clear;
784 } else {
785 pr_err("%s: Training 1 failed", __func__);
786 ret = EDP_TRAIN_FAIL;
787 goto clear;
788 }
789 }
790 DBG("Training 1 completed successfully");
791
792 edp_state_ctrl(ctrl, 0);
793 if (edp_clear_training_pattern(ctrl))
794 return EDP_TRAIN_FAIL;
795
796 ret = edp_start_link_train_2(ctrl);
797 if (ret < 0) {
798 if (edp_link_rate_down_shift(ctrl) == 0) {
799 DBG("link reconfig");
800 ret = EDP_TRAIN_RECONFIG;
801 goto clear;
802 } else {
803 pr_err("%s: Training 2 failed", __func__);
804 ret = EDP_TRAIN_FAIL;
805 goto clear;
806 }
807 }
808 DBG("Training 2 completed successfully");
809
810 edp_state_ctrl(ctrl, EDP_STATE_CTRL_SEND_VIDEO);
811 clear:
812 edp_clear_training_pattern(ctrl);
813
814 return ret;
815 }
816
edp_clock_synchrous(struct edp_ctrl * ctrl,int sync)817 static void edp_clock_synchrous(struct edp_ctrl *ctrl, int sync)
818 {
819 u32 data;
820 enum edp_color_depth depth;
821
822 data = edp_read(ctrl->base + REG_EDP_MISC1_MISC0);
823
824 if (sync)
825 data |= EDP_MISC1_MISC0_SYNC;
826 else
827 data &= ~EDP_MISC1_MISC0_SYNC;
828
829 /* only legacy rgb mode supported */
830 depth = EDP_6BIT; /* Default */
831 if (ctrl->color_depth == 8)
832 depth = EDP_8BIT;
833 else if (ctrl->color_depth == 10)
834 depth = EDP_10BIT;
835 else if (ctrl->color_depth == 12)
836 depth = EDP_12BIT;
837 else if (ctrl->color_depth == 16)
838 depth = EDP_16BIT;
839
840 data |= EDP_MISC1_MISC0_COLOR(depth);
841
842 edp_write(ctrl->base + REG_EDP_MISC1_MISC0, data);
843 }
844
edp_sw_mvid_nvid(struct edp_ctrl * ctrl,u32 m,u32 n)845 static int edp_sw_mvid_nvid(struct edp_ctrl *ctrl, u32 m, u32 n)
846 {
847 u32 n_multi, m_multi = 5;
848
849 if (ctrl->link_rate == DP_LINK_BW_1_62) {
850 n_multi = 1;
851 } else if (ctrl->link_rate == DP_LINK_BW_2_7) {
852 n_multi = 2;
853 } else {
854 pr_err("%s: Invalid link rate, %d\n", __func__,
855 ctrl->link_rate);
856 return -EINVAL;
857 }
858
859 edp_write(ctrl->base + REG_EDP_SOFTWARE_MVID, m * m_multi);
860 edp_write(ctrl->base + REG_EDP_SOFTWARE_NVID, n * n_multi);
861
862 return 0;
863 }
864
edp_mainlink_ctrl(struct edp_ctrl * ctrl,int enable)865 static void edp_mainlink_ctrl(struct edp_ctrl *ctrl, int enable)
866 {
867 u32 data = 0;
868
869 edp_write(ctrl->base + REG_EDP_MAINLINK_CTRL, EDP_MAINLINK_CTRL_RESET);
870 /* Make sure fully reset */
871 wmb();
872 usleep_range(500, 1000);
873
874 if (enable)
875 data |= EDP_MAINLINK_CTRL_ENABLE;
876
877 edp_write(ctrl->base + REG_EDP_MAINLINK_CTRL, data);
878 }
879
edp_ctrl_phy_aux_enable(struct edp_ctrl * ctrl,int enable)880 static void edp_ctrl_phy_aux_enable(struct edp_ctrl *ctrl, int enable)
881 {
882 if (enable) {
883 edp_regulator_enable(ctrl);
884 edp_clk_enable(ctrl, EDP_CLK_MASK_AUX_CHAN);
885 msm_edp_phy_ctrl(ctrl->phy, 1);
886 msm_edp_aux_ctrl(ctrl->aux, 1);
887 gpiod_set_value(ctrl->panel_en_gpio, 1);
888 } else {
889 gpiod_set_value(ctrl->panel_en_gpio, 0);
890 msm_edp_aux_ctrl(ctrl->aux, 0);
891 msm_edp_phy_ctrl(ctrl->phy, 0);
892 edp_clk_disable(ctrl, EDP_CLK_MASK_AUX_CHAN);
893 edp_regulator_disable(ctrl);
894 }
895 }
896
edp_ctrl_link_enable(struct edp_ctrl * ctrl,int enable)897 static void edp_ctrl_link_enable(struct edp_ctrl *ctrl, int enable)
898 {
899 u32 m, n;
900
901 if (enable) {
902 /* Enable link channel clocks */
903 edp_clk_enable(ctrl, EDP_CLK_MASK_LINK_CHAN);
904
905 msm_edp_phy_lane_power_ctrl(ctrl->phy, true, ctrl->lane_cnt);
906
907 msm_edp_phy_vm_pe_init(ctrl->phy);
908
909 /* Make sure phy is programed */
910 wmb();
911 msm_edp_phy_ready(ctrl->phy);
912
913 edp_config_ctrl(ctrl);
914 msm_edp_ctrl_pixel_clock_valid(ctrl, ctrl->pixel_rate, &m, &n);
915 edp_sw_mvid_nvid(ctrl, m, n);
916 edp_mainlink_ctrl(ctrl, 1);
917 } else {
918 edp_mainlink_ctrl(ctrl, 0);
919
920 msm_edp_phy_lane_power_ctrl(ctrl->phy, false, 0);
921 edp_clk_disable(ctrl, EDP_CLK_MASK_LINK_CHAN);
922 }
923 }
924
edp_ctrl_training(struct edp_ctrl * ctrl)925 static int edp_ctrl_training(struct edp_ctrl *ctrl)
926 {
927 int ret;
928
929 /* Do link training only when power is on */
930 if (!ctrl->power_on)
931 return -EINVAL;
932
933 train_start:
934 ret = edp_do_link_train(ctrl);
935 if (ret == EDP_TRAIN_RECONFIG) {
936 /* Re-configure main link */
937 edp_ctrl_irq_enable(ctrl, 0);
938 edp_ctrl_link_enable(ctrl, 0);
939 msm_edp_phy_ctrl(ctrl->phy, 0);
940
941 /* Make sure link is fully disabled */
942 wmb();
943 usleep_range(500, 1000);
944
945 msm_edp_phy_ctrl(ctrl->phy, 1);
946 edp_ctrl_link_enable(ctrl, 1);
947 edp_ctrl_irq_enable(ctrl, 1);
948 goto train_start;
949 }
950
951 return ret;
952 }
953
edp_ctrl_on_worker(struct work_struct * work)954 static void edp_ctrl_on_worker(struct work_struct *work)
955 {
956 struct edp_ctrl *ctrl = container_of(
957 work, struct edp_ctrl, on_work);
958 u8 value;
959 int ret;
960
961 mutex_lock(&ctrl->dev_mutex);
962
963 if (ctrl->power_on) {
964 DBG("already on");
965 goto unlock_ret;
966 }
967
968 edp_ctrl_phy_aux_enable(ctrl, 1);
969 edp_ctrl_link_enable(ctrl, 1);
970
971 edp_ctrl_irq_enable(ctrl, 1);
972
973 /* DP_SET_POWER register is only available on DPCD v1.1 and later */
974 if (ctrl->dpcd[DP_DPCD_REV] >= 0x11) {
975 ret = drm_dp_dpcd_readb(ctrl->drm_aux, DP_SET_POWER, &value);
976 if (ret < 0)
977 goto fail;
978
979 value &= ~DP_SET_POWER_MASK;
980 value |= DP_SET_POWER_D0;
981
982 ret = drm_dp_dpcd_writeb(ctrl->drm_aux, DP_SET_POWER, value);
983 if (ret < 0)
984 goto fail;
985
986 /*
987 * According to the DP 1.1 specification, a "Sink Device must
988 * exit the power saving state within 1 ms" (Section 2.5.3.1,
989 * Table 5-52, "Sink Control Field" (register 0x600).
990 */
991 usleep_range(1000, 2000);
992 }
993
994 ctrl->power_on = true;
995
996 /* Start link training */
997 ret = edp_ctrl_training(ctrl);
998 if (ret != EDP_TRAIN_SUCCESS)
999 goto fail;
1000
1001 DBG("DONE");
1002 goto unlock_ret;
1003
1004 fail:
1005 edp_ctrl_irq_enable(ctrl, 0);
1006 edp_ctrl_link_enable(ctrl, 0);
1007 edp_ctrl_phy_aux_enable(ctrl, 0);
1008 ctrl->power_on = false;
1009 unlock_ret:
1010 mutex_unlock(&ctrl->dev_mutex);
1011 }
1012
edp_ctrl_off_worker(struct work_struct * work)1013 static void edp_ctrl_off_worker(struct work_struct *work)
1014 {
1015 struct edp_ctrl *ctrl = container_of(
1016 work, struct edp_ctrl, off_work);
1017 unsigned long time_left;
1018
1019 mutex_lock(&ctrl->dev_mutex);
1020
1021 if (!ctrl->power_on) {
1022 DBG("already off");
1023 goto unlock_ret;
1024 }
1025
1026 reinit_completion(&ctrl->idle_comp);
1027 edp_state_ctrl(ctrl, EDP_STATE_CTRL_PUSH_IDLE);
1028
1029 time_left = wait_for_completion_timeout(&ctrl->idle_comp,
1030 msecs_to_jiffies(500));
1031 if (!time_left)
1032 DBG("%s: idle pattern timedout\n", __func__);
1033
1034 edp_state_ctrl(ctrl, 0);
1035
1036 /* DP_SET_POWER register is only available on DPCD v1.1 and later */
1037 if (ctrl->dpcd[DP_DPCD_REV] >= 0x11) {
1038 u8 value;
1039 int ret;
1040
1041 ret = drm_dp_dpcd_readb(ctrl->drm_aux, DP_SET_POWER, &value);
1042 if (ret > 0) {
1043 value &= ~DP_SET_POWER_MASK;
1044 value |= DP_SET_POWER_D3;
1045
1046 drm_dp_dpcd_writeb(ctrl->drm_aux, DP_SET_POWER, value);
1047 }
1048 }
1049
1050 edp_ctrl_irq_enable(ctrl, 0);
1051
1052 edp_ctrl_link_enable(ctrl, 0);
1053
1054 edp_ctrl_phy_aux_enable(ctrl, 0);
1055
1056 ctrl->power_on = false;
1057
1058 unlock_ret:
1059 mutex_unlock(&ctrl->dev_mutex);
1060 }
1061
msm_edp_ctrl_irq(struct edp_ctrl * ctrl)1062 irqreturn_t msm_edp_ctrl_irq(struct edp_ctrl *ctrl)
1063 {
1064 u32 isr1, isr2, mask1, mask2;
1065 u32 ack;
1066
1067 DBG("");
1068 spin_lock(&ctrl->irq_lock);
1069 isr1 = edp_read(ctrl->base + REG_EDP_INTERRUPT_REG_1);
1070 isr2 = edp_read(ctrl->base + REG_EDP_INTERRUPT_REG_2);
1071
1072 mask1 = isr1 & EDP_INTR_MASK1;
1073 mask2 = isr2 & EDP_INTR_MASK2;
1074
1075 isr1 &= ~mask1; /* remove masks bit */
1076 isr2 &= ~mask2;
1077
1078 DBG("isr=%x mask=%x isr2=%x mask2=%x",
1079 isr1, mask1, isr2, mask2);
1080
1081 ack = isr1 & EDP_INTR_STATUS1;
1082 ack <<= 1; /* ack bits */
1083 ack |= mask1;
1084 edp_write(ctrl->base + REG_EDP_INTERRUPT_REG_1, ack);
1085
1086 ack = isr2 & EDP_INTR_STATUS2;
1087 ack <<= 1; /* ack bits */
1088 ack |= mask2;
1089 edp_write(ctrl->base + REG_EDP_INTERRUPT_REG_2, ack);
1090 spin_unlock(&ctrl->irq_lock);
1091
1092 if (isr1 & EDP_INTERRUPT_REG_1_HPD)
1093 DBG("edp_hpd");
1094
1095 if (isr2 & EDP_INTERRUPT_REG_2_READY_FOR_VIDEO)
1096 DBG("edp_video_ready");
1097
1098 if (isr2 & EDP_INTERRUPT_REG_2_IDLE_PATTERNs_SENT) {
1099 DBG("idle_patterns_sent");
1100 complete(&ctrl->idle_comp);
1101 }
1102
1103 msm_edp_aux_irq(ctrl->aux, isr1);
1104
1105 return IRQ_HANDLED;
1106 }
1107
msm_edp_ctrl_power(struct edp_ctrl * ctrl,bool on)1108 void msm_edp_ctrl_power(struct edp_ctrl *ctrl, bool on)
1109 {
1110 if (on)
1111 queue_work(ctrl->workqueue, &ctrl->on_work);
1112 else
1113 queue_work(ctrl->workqueue, &ctrl->off_work);
1114 }
1115
msm_edp_ctrl_init(struct msm_edp * edp)1116 int msm_edp_ctrl_init(struct msm_edp *edp)
1117 {
1118 struct edp_ctrl *ctrl = NULL;
1119 struct device *dev;
1120 int ret;
1121
1122 if (!edp) {
1123 pr_err("%s: edp is NULL!\n", __func__);
1124 return -EINVAL;
1125 }
1126
1127 dev = &edp->pdev->dev;
1128 ctrl = devm_kzalloc(dev, sizeof(*ctrl), GFP_KERNEL);
1129 if (!ctrl)
1130 return -ENOMEM;
1131
1132 edp->ctrl = ctrl;
1133 ctrl->pdev = edp->pdev;
1134
1135 ctrl->base = msm_ioremap(ctrl->pdev, "edp", "eDP");
1136 if (IS_ERR(ctrl->base))
1137 return PTR_ERR(ctrl->base);
1138
1139 /* Get regulator, clock, gpio, pwm */
1140 ret = edp_regulator_init(ctrl);
1141 if (ret) {
1142 pr_err("%s:regulator init fail\n", __func__);
1143 return ret;
1144 }
1145 ret = edp_clk_init(ctrl);
1146 if (ret) {
1147 pr_err("%s:clk init fail\n", __func__);
1148 return ret;
1149 }
1150 ret = edp_gpio_config(ctrl);
1151 if (ret) {
1152 pr_err("%s:failed to configure GPIOs: %d", __func__, ret);
1153 return ret;
1154 }
1155
1156 /* Init aux and phy */
1157 ctrl->aux = msm_edp_aux_init(dev, ctrl->base, &ctrl->drm_aux);
1158 if (!ctrl->aux || !ctrl->drm_aux) {
1159 pr_err("%s:failed to init aux\n", __func__);
1160 return -ENOMEM;
1161 }
1162
1163 ctrl->phy = msm_edp_phy_init(dev, ctrl->base);
1164 if (!ctrl->phy) {
1165 pr_err("%s:failed to init phy\n", __func__);
1166 ret = -ENOMEM;
1167 goto err_destory_aux;
1168 }
1169
1170 spin_lock_init(&ctrl->irq_lock);
1171 mutex_init(&ctrl->dev_mutex);
1172 init_completion(&ctrl->idle_comp);
1173
1174 /* setup workqueue */
1175 ctrl->workqueue = alloc_ordered_workqueue("edp_drm_work", 0);
1176 INIT_WORK(&ctrl->on_work, edp_ctrl_on_worker);
1177 INIT_WORK(&ctrl->off_work, edp_ctrl_off_worker);
1178
1179 return 0;
1180
1181 err_destory_aux:
1182 msm_edp_aux_destroy(dev, ctrl->aux);
1183 ctrl->aux = NULL;
1184 return ret;
1185 }
1186
msm_edp_ctrl_destroy(struct edp_ctrl * ctrl)1187 void msm_edp_ctrl_destroy(struct edp_ctrl *ctrl)
1188 {
1189 if (!ctrl)
1190 return;
1191
1192 if (ctrl->workqueue) {
1193 flush_workqueue(ctrl->workqueue);
1194 destroy_workqueue(ctrl->workqueue);
1195 ctrl->workqueue = NULL;
1196 }
1197
1198 if (ctrl->aux) {
1199 msm_edp_aux_destroy(&ctrl->pdev->dev, ctrl->aux);
1200 ctrl->aux = NULL;
1201 }
1202
1203 kfree(ctrl->edid);
1204 ctrl->edid = NULL;
1205
1206 mutex_destroy(&ctrl->dev_mutex);
1207 }
1208
msm_edp_ctrl_panel_connected(struct edp_ctrl * ctrl)1209 bool msm_edp_ctrl_panel_connected(struct edp_ctrl *ctrl)
1210 {
1211 mutex_lock(&ctrl->dev_mutex);
1212 DBG("connect status = %d", ctrl->edp_connected);
1213 if (ctrl->edp_connected) {
1214 mutex_unlock(&ctrl->dev_mutex);
1215 return true;
1216 }
1217
1218 if (!ctrl->power_on) {
1219 edp_ctrl_phy_aux_enable(ctrl, 1);
1220 edp_ctrl_irq_enable(ctrl, 1);
1221 }
1222
1223 if (drm_dp_dpcd_read(ctrl->drm_aux, DP_DPCD_REV, ctrl->dpcd,
1224 DP_RECEIVER_CAP_SIZE) < DP_RECEIVER_CAP_SIZE) {
1225 pr_err("%s: AUX channel is NOT ready\n", __func__);
1226 memset(ctrl->dpcd, 0, DP_RECEIVER_CAP_SIZE);
1227 } else {
1228 ctrl->edp_connected = true;
1229 }
1230
1231 if (!ctrl->power_on) {
1232 edp_ctrl_irq_enable(ctrl, 0);
1233 edp_ctrl_phy_aux_enable(ctrl, 0);
1234 }
1235
1236 DBG("exit: connect status=%d", ctrl->edp_connected);
1237
1238 mutex_unlock(&ctrl->dev_mutex);
1239
1240 return ctrl->edp_connected;
1241 }
1242
msm_edp_ctrl_get_panel_info(struct edp_ctrl * ctrl,struct drm_connector * connector,struct edid ** edid)1243 int msm_edp_ctrl_get_panel_info(struct edp_ctrl *ctrl,
1244 struct drm_connector *connector, struct edid **edid)
1245 {
1246 int ret = 0;
1247
1248 mutex_lock(&ctrl->dev_mutex);
1249
1250 if (ctrl->edid) {
1251 if (edid) {
1252 DBG("Just return edid buffer");
1253 *edid = ctrl->edid;
1254 }
1255 goto unlock_ret;
1256 }
1257
1258 if (!ctrl->power_on) {
1259 edp_ctrl_phy_aux_enable(ctrl, 1);
1260 edp_ctrl_irq_enable(ctrl, 1);
1261 }
1262
1263 /* Initialize link rate as panel max link rate */
1264 ctrl->link_rate = ctrl->dpcd[DP_MAX_LINK_RATE];
1265
1266 ctrl->edid = drm_get_edid(connector, &ctrl->drm_aux->ddc);
1267 if (!ctrl->edid) {
1268 pr_err("%s: edid read fail\n", __func__);
1269 goto disable_ret;
1270 }
1271
1272 if (edid)
1273 *edid = ctrl->edid;
1274
1275 disable_ret:
1276 if (!ctrl->power_on) {
1277 edp_ctrl_irq_enable(ctrl, 0);
1278 edp_ctrl_phy_aux_enable(ctrl, 0);
1279 }
1280 unlock_ret:
1281 mutex_unlock(&ctrl->dev_mutex);
1282 return ret;
1283 }
1284
msm_edp_ctrl_timing_cfg(struct edp_ctrl * ctrl,const struct drm_display_mode * mode,const struct drm_display_info * info)1285 int msm_edp_ctrl_timing_cfg(struct edp_ctrl *ctrl,
1286 const struct drm_display_mode *mode,
1287 const struct drm_display_info *info)
1288 {
1289 u32 hstart_from_sync, vstart_from_sync;
1290 u32 data;
1291 int ret = 0;
1292
1293 mutex_lock(&ctrl->dev_mutex);
1294 /*
1295 * Need to keep color depth, pixel rate and
1296 * interlaced information in ctrl context
1297 */
1298 ctrl->color_depth = info->bpc;
1299 ctrl->pixel_rate = mode->clock;
1300 ctrl->interlaced = !!(mode->flags & DRM_MODE_FLAG_INTERLACE);
1301
1302 /* Fill initial link config based on passed in timing */
1303 edp_fill_link_cfg(ctrl);
1304
1305 if (edp_clk_enable(ctrl, EDP_CLK_MASK_AHB)) {
1306 pr_err("%s, fail to prepare enable ahb clk\n", __func__);
1307 ret = -EINVAL;
1308 goto unlock_ret;
1309 }
1310 edp_clock_synchrous(ctrl, 1);
1311
1312 /* Configure eDP timing to HW */
1313 edp_write(ctrl->base + REG_EDP_TOTAL_HOR_VER,
1314 EDP_TOTAL_HOR_VER_HORIZ(mode->htotal) |
1315 EDP_TOTAL_HOR_VER_VERT(mode->vtotal));
1316
1317 vstart_from_sync = mode->vtotal - mode->vsync_start;
1318 hstart_from_sync = mode->htotal - mode->hsync_start;
1319 edp_write(ctrl->base + REG_EDP_START_HOR_VER_FROM_SYNC,
1320 EDP_START_HOR_VER_FROM_SYNC_HORIZ(hstart_from_sync) |
1321 EDP_START_HOR_VER_FROM_SYNC_VERT(vstart_from_sync));
1322
1323 data = EDP_HSYNC_VSYNC_WIDTH_POLARITY_VERT(
1324 mode->vsync_end - mode->vsync_start);
1325 data |= EDP_HSYNC_VSYNC_WIDTH_POLARITY_HORIZ(
1326 mode->hsync_end - mode->hsync_start);
1327 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1328 data |= EDP_HSYNC_VSYNC_WIDTH_POLARITY_NVSYNC;
1329 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1330 data |= EDP_HSYNC_VSYNC_WIDTH_POLARITY_NHSYNC;
1331 edp_write(ctrl->base + REG_EDP_HSYNC_VSYNC_WIDTH_POLARITY, data);
1332
1333 edp_write(ctrl->base + REG_EDP_ACTIVE_HOR_VER,
1334 EDP_ACTIVE_HOR_VER_HORIZ(mode->hdisplay) |
1335 EDP_ACTIVE_HOR_VER_VERT(mode->vdisplay));
1336
1337 edp_clk_disable(ctrl, EDP_CLK_MASK_AHB);
1338
1339 unlock_ret:
1340 mutex_unlock(&ctrl->dev_mutex);
1341 return ret;
1342 }
1343
msm_edp_ctrl_pixel_clock_valid(struct edp_ctrl * ctrl,u32 pixel_rate,u32 * pm,u32 * pn)1344 bool msm_edp_ctrl_pixel_clock_valid(struct edp_ctrl *ctrl,
1345 u32 pixel_rate, u32 *pm, u32 *pn)
1346 {
1347 const struct edp_pixel_clk_div *divs;
1348 u32 err = 1; /* 1% error tolerance */
1349 u32 clk_err;
1350 int i;
1351
1352 if (ctrl->link_rate == DP_LINK_BW_1_62) {
1353 divs = clk_divs[0];
1354 } else if (ctrl->link_rate == DP_LINK_BW_2_7) {
1355 divs = clk_divs[1];
1356 } else {
1357 pr_err("%s: Invalid link rate,%d\n", __func__, ctrl->link_rate);
1358 return false;
1359 }
1360
1361 for (i = 0; i < EDP_PIXEL_CLK_NUM; i++) {
1362 clk_err = abs(divs[i].rate - pixel_rate);
1363 if ((divs[i].rate * err / 100) >= clk_err) {
1364 if (pm)
1365 *pm = divs[i].m;
1366 if (pn)
1367 *pn = divs[i].n;
1368 return true;
1369 }
1370 }
1371
1372 DBG("pixel clock %d(kHz) not supported", pixel_rate);
1373
1374 return false;
1375 }
1376
1377