• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1  // SPDX-License-Identifier: GPL-2.0-only
2  /*
3   * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
4   */
5  
6  #define pr_fmt(fmt)	"[drm-dp] %s: " fmt, __func__
7  
8  #include <linux/rational.h>
9  #include <linux/delay.h>
10  #include <linux/iopoll.h>
11  #include <linux/phy/phy.h>
12  #include <linux/phy/phy-dp.h>
13  #include <linux/rational.h>
14  #include <drm/drm_dp_helper.h>
15  #include <drm/drm_print.h>
16  
17  #include "dp_catalog.h"
18  #include "dp_reg.h"
19  
20  #define POLLING_SLEEP_US			1000
21  #define POLLING_TIMEOUT_US			10000
22  
23  #define SCRAMBLER_RESET_COUNT_VALUE		0xFC
24  
25  #define DP_INTERRUPT_STATUS_ACK_SHIFT	1
26  #define DP_INTERRUPT_STATUS_MASK_SHIFT	2
27  
28  #define MSM_DP_CONTROLLER_AHB_OFFSET	0x0000
29  #define MSM_DP_CONTROLLER_AHB_SIZE	0x0200
30  #define MSM_DP_CONTROLLER_AUX_OFFSET	0x0200
31  #define MSM_DP_CONTROLLER_AUX_SIZE	0x0200
32  #define MSM_DP_CONTROLLER_LINK_OFFSET	0x0400
33  #define MSM_DP_CONTROLLER_LINK_SIZE	0x0C00
34  #define MSM_DP_CONTROLLER_P0_OFFSET	0x1000
35  #define MSM_DP_CONTROLLER_P0_SIZE	0x0400
36  
37  #define DP_INTERRUPT_STATUS1 \
38  	(DP_INTR_AUX_I2C_DONE| \
39  	DP_INTR_WRONG_ADDR | DP_INTR_TIMEOUT | \
40  	DP_INTR_NACK_DEFER | DP_INTR_WRONG_DATA_CNT | \
41  	DP_INTR_I2C_NACK | DP_INTR_I2C_DEFER | \
42  	DP_INTR_PLL_UNLOCKED | DP_INTR_AUX_ERROR)
43  
44  #define DP_INTERRUPT_STATUS1_ACK \
45  	(DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_ACK_SHIFT)
46  #define DP_INTERRUPT_STATUS1_MASK \
47  	(DP_INTERRUPT_STATUS1 << DP_INTERRUPT_STATUS_MASK_SHIFT)
48  
49  #define DP_INTERRUPT_STATUS2 \
50  	(DP_INTR_READY_FOR_VIDEO | DP_INTR_IDLE_PATTERN_SENT | \
51  	DP_INTR_FRAME_END | DP_INTR_CRC_UPDATED)
52  
53  #define DP_INTERRUPT_STATUS2_ACK \
54  	(DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_ACK_SHIFT)
55  #define DP_INTERRUPT_STATUS2_MASK \
56  	(DP_INTERRUPT_STATUS2 << DP_INTERRUPT_STATUS_MASK_SHIFT)
57  
58  struct dp_catalog_private {
59  	struct device *dev;
60  	struct dp_io *io;
61  	u32 (*audio_map)[DP_AUDIO_SDP_HEADER_MAX];
62  	struct dp_catalog dp_catalog;
63  	u8 aux_lut_cfg_index[PHY_AUX_CFG_MAX];
64  };
65  
dp_read_aux(struct dp_catalog_private * catalog,u32 offset)66  static inline u32 dp_read_aux(struct dp_catalog_private *catalog, u32 offset)
67  {
68  	offset += MSM_DP_CONTROLLER_AUX_OFFSET;
69  	return readl_relaxed(catalog->io->dp_controller.base + offset);
70  }
71  
dp_write_aux(struct dp_catalog_private * catalog,u32 offset,u32 data)72  static inline void dp_write_aux(struct dp_catalog_private *catalog,
73  			       u32 offset, u32 data)
74  {
75  	offset += MSM_DP_CONTROLLER_AUX_OFFSET;
76  	/*
77  	 * To make sure aux reg writes happens before any other operation,
78  	 * this function uses writel() instread of writel_relaxed()
79  	 */
80  	writel(data, catalog->io->dp_controller.base + offset);
81  }
82  
dp_read_ahb(struct dp_catalog_private * catalog,u32 offset)83  static inline u32 dp_read_ahb(struct dp_catalog_private *catalog, u32 offset)
84  {
85  	offset += MSM_DP_CONTROLLER_AHB_OFFSET;
86  	return readl_relaxed(catalog->io->dp_controller.base + offset);
87  }
88  
dp_write_ahb(struct dp_catalog_private * catalog,u32 offset,u32 data)89  static inline void dp_write_ahb(struct dp_catalog_private *catalog,
90  			       u32 offset, u32 data)
91  {
92  	offset += MSM_DP_CONTROLLER_AHB_OFFSET;
93  	/*
94  	 * To make sure phy reg writes happens before any other operation,
95  	 * this function uses writel() instread of writel_relaxed()
96  	 */
97  	writel(data, catalog->io->dp_controller.base + offset);
98  }
99  
dp_write_p0(struct dp_catalog_private * catalog,u32 offset,u32 data)100  static inline void dp_write_p0(struct dp_catalog_private *catalog,
101  			       u32 offset, u32 data)
102  {
103  	offset += MSM_DP_CONTROLLER_P0_OFFSET;
104  	/*
105  	 * To make sure interface reg writes happens before any other operation,
106  	 * this function uses writel() instread of writel_relaxed()
107  	 */
108  	writel(data, catalog->io->dp_controller.base + offset);
109  }
110  
dp_read_p0(struct dp_catalog_private * catalog,u32 offset)111  static inline u32 dp_read_p0(struct dp_catalog_private *catalog,
112  			       u32 offset)
113  {
114  	offset += MSM_DP_CONTROLLER_P0_OFFSET;
115  	/*
116  	 * To make sure interface reg writes happens before any other operation,
117  	 * this function uses writel() instread of writel_relaxed()
118  	 */
119  	return readl_relaxed(catalog->io->dp_controller.base + offset);
120  }
121  
dp_read_link(struct dp_catalog_private * catalog,u32 offset)122  static inline u32 dp_read_link(struct dp_catalog_private *catalog, u32 offset)
123  {
124  	offset += MSM_DP_CONTROLLER_LINK_OFFSET;
125  	return readl_relaxed(catalog->io->dp_controller.base + offset);
126  }
127  
dp_write_link(struct dp_catalog_private * catalog,u32 offset,u32 data)128  static inline void dp_write_link(struct dp_catalog_private *catalog,
129  			       u32 offset, u32 data)
130  {
131  	offset += MSM_DP_CONTROLLER_LINK_OFFSET;
132  	/*
133  	 * To make sure link reg writes happens before any other operation,
134  	 * this function uses writel() instread of writel_relaxed()
135  	 */
136  	writel(data, catalog->io->dp_controller.base + offset);
137  }
138  
139  /* aux related catalog functions */
dp_catalog_aux_read_data(struct dp_catalog * dp_catalog)140  u32 dp_catalog_aux_read_data(struct dp_catalog *dp_catalog)
141  {
142  	struct dp_catalog_private *catalog = container_of(dp_catalog,
143  				struct dp_catalog_private, dp_catalog);
144  
145  	return dp_read_aux(catalog, REG_DP_AUX_DATA);
146  }
147  
dp_catalog_aux_write_data(struct dp_catalog * dp_catalog)148  int dp_catalog_aux_write_data(struct dp_catalog *dp_catalog)
149  {
150  	struct dp_catalog_private *catalog = container_of(dp_catalog,
151  				struct dp_catalog_private, dp_catalog);
152  
153  	dp_write_aux(catalog, REG_DP_AUX_DATA, dp_catalog->aux_data);
154  	return 0;
155  }
156  
dp_catalog_aux_write_trans(struct dp_catalog * dp_catalog)157  int dp_catalog_aux_write_trans(struct dp_catalog *dp_catalog)
158  {
159  	struct dp_catalog_private *catalog = container_of(dp_catalog,
160  				struct dp_catalog_private, dp_catalog);
161  
162  	dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, dp_catalog->aux_data);
163  	return 0;
164  }
165  
dp_catalog_aux_clear_trans(struct dp_catalog * dp_catalog,bool read)166  int dp_catalog_aux_clear_trans(struct dp_catalog *dp_catalog, bool read)
167  {
168  	u32 data;
169  	struct dp_catalog_private *catalog = container_of(dp_catalog,
170  				struct dp_catalog_private, dp_catalog);
171  
172  	if (read) {
173  		data = dp_read_aux(catalog, REG_DP_AUX_TRANS_CTRL);
174  		data &= ~DP_AUX_TRANS_CTRL_GO;
175  		dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, data);
176  	} else {
177  		dp_write_aux(catalog, REG_DP_AUX_TRANS_CTRL, 0);
178  	}
179  	return 0;
180  }
181  
dp_catalog_aux_clear_hw_interrupts(struct dp_catalog * dp_catalog)182  int dp_catalog_aux_clear_hw_interrupts(struct dp_catalog *dp_catalog)
183  {
184  	struct dp_catalog_private *catalog = container_of(dp_catalog,
185  				struct dp_catalog_private, dp_catalog);
186  
187  	dp_read_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_STATUS);
188  	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x1f);
189  	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0x9f);
190  	dp_write_aux(catalog, REG_DP_PHY_AUX_INTERRUPT_CLEAR, 0);
191  	return 0;
192  }
193  
dp_catalog_aux_reset(struct dp_catalog * dp_catalog)194  void dp_catalog_aux_reset(struct dp_catalog *dp_catalog)
195  {
196  	u32 aux_ctrl;
197  	struct dp_catalog_private *catalog = container_of(dp_catalog,
198  				struct dp_catalog_private, dp_catalog);
199  
200  	aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
201  
202  	aux_ctrl |= DP_AUX_CTRL_RESET;
203  	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
204  	usleep_range(1000, 1100); /* h/w recommended delay */
205  
206  	aux_ctrl &= ~DP_AUX_CTRL_RESET;
207  	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
208  }
209  
dp_catalog_aux_enable(struct dp_catalog * dp_catalog,bool enable)210  void dp_catalog_aux_enable(struct dp_catalog *dp_catalog, bool enable)
211  {
212  	u32 aux_ctrl;
213  	struct dp_catalog_private *catalog = container_of(dp_catalog,
214  				struct dp_catalog_private, dp_catalog);
215  
216  	aux_ctrl = dp_read_aux(catalog, REG_DP_AUX_CTRL);
217  
218  	if (enable) {
219  		dp_write_aux(catalog, REG_DP_TIMEOUT_COUNT, 0xffff);
220  		dp_write_aux(catalog, REG_DP_AUX_LIMITS, 0xffff);
221  		aux_ctrl |= DP_AUX_CTRL_ENABLE;
222  	} else {
223  		aux_ctrl &= ~DP_AUX_CTRL_ENABLE;
224  	}
225  
226  	dp_write_aux(catalog, REG_DP_AUX_CTRL, aux_ctrl);
227  }
228  
dp_catalog_aux_update_cfg(struct dp_catalog * dp_catalog)229  void dp_catalog_aux_update_cfg(struct dp_catalog *dp_catalog)
230  {
231  	struct dp_catalog_private *catalog = container_of(dp_catalog,
232  				struct dp_catalog_private, dp_catalog);
233  	struct dp_io *dp_io = catalog->io;
234  	struct phy *phy = dp_io->phy;
235  
236  	phy_calibrate(phy);
237  }
238  
dump_regs(void __iomem * base,int len)239  static void dump_regs(void __iomem *base, int len)
240  {
241  	int i;
242  	u32 x0, x4, x8, xc;
243  	u32 addr_off = 0;
244  
245  	len = DIV_ROUND_UP(len, 16);
246  	for (i = 0; i < len; i++) {
247  		x0 = readl_relaxed(base + addr_off);
248  		x4 = readl_relaxed(base + addr_off + 0x04);
249  		x8 = readl_relaxed(base + addr_off + 0x08);
250  		xc = readl_relaxed(base + addr_off + 0x0c);
251  
252  		pr_info("%08x: %08x %08x %08x %08x", addr_off, x0, x4, x8, xc);
253  		addr_off += 16;
254  	}
255  }
256  
dp_catalog_dump_regs(struct dp_catalog * dp_catalog)257  void dp_catalog_dump_regs(struct dp_catalog *dp_catalog)
258  {
259  	u32 offset, len;
260  	struct dp_catalog_private *catalog = container_of(dp_catalog,
261  		struct dp_catalog_private, dp_catalog);
262  
263  	pr_info("AHB regs\n");
264  	offset = MSM_DP_CONTROLLER_AHB_OFFSET;
265  	len = MSM_DP_CONTROLLER_AHB_SIZE;
266  	dump_regs(catalog->io->dp_controller.base + offset, len);
267  
268  	pr_info("AUXCLK regs\n");
269  	offset = MSM_DP_CONTROLLER_AUX_OFFSET;
270  	len = MSM_DP_CONTROLLER_AUX_SIZE;
271  	dump_regs(catalog->io->dp_controller.base + offset, len);
272  
273  	pr_info("LCLK regs\n");
274  	offset = MSM_DP_CONTROLLER_LINK_OFFSET;
275  	len = MSM_DP_CONTROLLER_LINK_SIZE;
276  	dump_regs(catalog->io->dp_controller.base + offset, len);
277  
278  	pr_info("P0CLK regs\n");
279  	offset = MSM_DP_CONTROLLER_P0_OFFSET;
280  	len = MSM_DP_CONTROLLER_P0_SIZE;
281  	dump_regs(catalog->io->dp_controller.base + offset, len);
282  }
283  
dp_catalog_aux_get_irq(struct dp_catalog * dp_catalog)284  int dp_catalog_aux_get_irq(struct dp_catalog *dp_catalog)
285  {
286  	struct dp_catalog_private *catalog = container_of(dp_catalog,
287  				struct dp_catalog_private, dp_catalog);
288  	u32 intr, intr_ack;
289  
290  	intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS);
291  	intr &= ~DP_INTERRUPT_STATUS1_MASK;
292  	intr_ack = (intr & DP_INTERRUPT_STATUS1)
293  			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
294  	dp_write_ahb(catalog, REG_DP_INTR_STATUS, intr_ack |
295  			DP_INTERRUPT_STATUS1_MASK);
296  
297  	return intr;
298  
299  }
300  
301  /* controller related catalog functions */
dp_catalog_ctrl_update_transfer_unit(struct dp_catalog * dp_catalog,u32 dp_tu,u32 valid_boundary,u32 valid_boundary2)302  void dp_catalog_ctrl_update_transfer_unit(struct dp_catalog *dp_catalog,
303  				u32 dp_tu, u32 valid_boundary,
304  				u32 valid_boundary2)
305  {
306  	struct dp_catalog_private *catalog = container_of(dp_catalog,
307  				struct dp_catalog_private, dp_catalog);
308  
309  	dp_write_link(catalog, REG_DP_VALID_BOUNDARY, valid_boundary);
310  	dp_write_link(catalog, REG_DP_TU, dp_tu);
311  	dp_write_link(catalog, REG_DP_VALID_BOUNDARY_2, valid_boundary2);
312  }
313  
dp_catalog_ctrl_state_ctrl(struct dp_catalog * dp_catalog,u32 state)314  void dp_catalog_ctrl_state_ctrl(struct dp_catalog *dp_catalog, u32 state)
315  {
316  	struct dp_catalog_private *catalog = container_of(dp_catalog,
317  				struct dp_catalog_private, dp_catalog);
318  
319  	dp_write_link(catalog, REG_DP_STATE_CTRL, state);
320  }
321  
dp_catalog_ctrl_config_ctrl(struct dp_catalog * dp_catalog,u32 cfg)322  void dp_catalog_ctrl_config_ctrl(struct dp_catalog *dp_catalog, u32 cfg)
323  {
324  	struct dp_catalog_private *catalog = container_of(dp_catalog,
325  				struct dp_catalog_private, dp_catalog);
326  
327  	DRM_DEBUG_DP("DP_CONFIGURATION_CTRL=0x%x\n", cfg);
328  
329  	dp_write_link(catalog, REG_DP_CONFIGURATION_CTRL, cfg);
330  }
331  
dp_catalog_ctrl_lane_mapping(struct dp_catalog * dp_catalog)332  void dp_catalog_ctrl_lane_mapping(struct dp_catalog *dp_catalog)
333  {
334  	struct dp_catalog_private *catalog = container_of(dp_catalog,
335  				struct dp_catalog_private, dp_catalog);
336  	u32 ln_0 = 0, ln_1 = 1, ln_2 = 2, ln_3 = 3; /* One-to-One mapping */
337  	u32 ln_mapping;
338  
339  	ln_mapping = ln_0 << LANE0_MAPPING_SHIFT;
340  	ln_mapping |= ln_1 << LANE1_MAPPING_SHIFT;
341  	ln_mapping |= ln_2 << LANE2_MAPPING_SHIFT;
342  	ln_mapping |= ln_3 << LANE3_MAPPING_SHIFT;
343  
344  	dp_write_link(catalog, REG_DP_LOGICAL2PHYSICAL_LANE_MAPPING,
345  			ln_mapping);
346  }
347  
dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog * dp_catalog,bool enable)348  void dp_catalog_ctrl_mainlink_ctrl(struct dp_catalog *dp_catalog,
349  						bool enable)
350  {
351  	u32 mainlink_ctrl;
352  	struct dp_catalog_private *catalog = container_of(dp_catalog,
353  				struct dp_catalog_private, dp_catalog);
354  
355  	if (enable) {
356  		/*
357  		 * To make sure link reg writes happens before other operation,
358  		 * dp_write_link() function uses writel()
359  		 */
360  		mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
361  
362  		mainlink_ctrl &= ~(DP_MAINLINK_CTRL_RESET |
363  						DP_MAINLINK_CTRL_ENABLE);
364  		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
365  
366  		mainlink_ctrl |= DP_MAINLINK_CTRL_RESET;
367  		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
368  
369  		mainlink_ctrl &= ~DP_MAINLINK_CTRL_RESET;
370  		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
371  
372  		mainlink_ctrl |= (DP_MAINLINK_CTRL_ENABLE |
373  					DP_MAINLINK_FB_BOUNDARY_SEL);
374  		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
375  	} else {
376  		mainlink_ctrl = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
377  		mainlink_ctrl &= ~DP_MAINLINK_CTRL_ENABLE;
378  		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, mainlink_ctrl);
379  	}
380  }
381  
dp_catalog_ctrl_config_misc(struct dp_catalog * dp_catalog,u32 colorimetry_cfg,u32 test_bits_depth)382  void dp_catalog_ctrl_config_misc(struct dp_catalog *dp_catalog,
383  					u32 colorimetry_cfg,
384  					u32 test_bits_depth)
385  {
386  	u32 misc_val;
387  	struct dp_catalog_private *catalog = container_of(dp_catalog,
388  				struct dp_catalog_private, dp_catalog);
389  
390  	misc_val = dp_read_link(catalog, REG_DP_MISC1_MISC0);
391  
392  	/* clear bpp bits */
393  	misc_val &= ~(0x07 << DP_MISC0_TEST_BITS_DEPTH_SHIFT);
394  	misc_val |= colorimetry_cfg << DP_MISC0_COLORIMETRY_CFG_SHIFT;
395  	misc_val |= test_bits_depth << DP_MISC0_TEST_BITS_DEPTH_SHIFT;
396  	/* Configure clock to synchronous mode */
397  	misc_val |= DP_MISC0_SYNCHRONOUS_CLK;
398  
399  	DRM_DEBUG_DP("misc settings = 0x%x\n", misc_val);
400  	dp_write_link(catalog, REG_DP_MISC1_MISC0, misc_val);
401  }
402  
dp_catalog_ctrl_config_msa(struct dp_catalog * dp_catalog,u32 rate,u32 stream_rate_khz,bool fixed_nvid)403  void dp_catalog_ctrl_config_msa(struct dp_catalog *dp_catalog,
404  					u32 rate, u32 stream_rate_khz,
405  					bool fixed_nvid)
406  {
407  	u32 pixel_m, pixel_n;
408  	u32 mvid, nvid, pixel_div = 0, dispcc_input_rate;
409  	u32 const nvid_fixed = DP_LINK_CONSTANT_N_VALUE;
410  	u32 const link_rate_hbr2 = 540000;
411  	u32 const link_rate_hbr3 = 810000;
412  	unsigned long den, num;
413  
414  	struct dp_catalog_private *catalog = container_of(dp_catalog,
415  				struct dp_catalog_private, dp_catalog);
416  
417  	if (rate == link_rate_hbr3)
418  		pixel_div = 6;
419  	else if (rate == 162000 || rate == 270000)
420  		pixel_div = 2;
421  	else if (rate == link_rate_hbr2)
422  		pixel_div = 4;
423  	else
424  		DRM_ERROR("Invalid pixel mux divider\n");
425  
426  	dispcc_input_rate = (rate * 10) / pixel_div;
427  
428  	rational_best_approximation(dispcc_input_rate, stream_rate_khz,
429  			(unsigned long)(1 << 16) - 1,
430  			(unsigned long)(1 << 16) - 1, &den, &num);
431  
432  	den = ~(den - num);
433  	den = den & 0xFFFF;
434  	pixel_m = num;
435  	pixel_n = den;
436  
437  	mvid = (pixel_m & 0xFFFF) * 5;
438  	nvid = (0xFFFF & (~pixel_n)) + (pixel_m & 0xFFFF);
439  
440  	if (nvid < nvid_fixed) {
441  		u32 temp;
442  
443  		temp = (nvid_fixed / nvid) * nvid;
444  		mvid = (nvid_fixed / nvid) * mvid;
445  		nvid = temp;
446  	}
447  
448  	if (link_rate_hbr2 == rate)
449  		nvid *= 2;
450  
451  	if (link_rate_hbr3 == rate)
452  		nvid *= 3;
453  
454  	DRM_DEBUG_DP("mvid=0x%x, nvid=0x%x\n", mvid, nvid);
455  	dp_write_link(catalog, REG_DP_SOFTWARE_MVID, mvid);
456  	dp_write_link(catalog, REG_DP_SOFTWARE_NVID, nvid);
457  	dp_write_p0(catalog, MMSS_DP_DSC_DTO, 0x0);
458  }
459  
dp_catalog_ctrl_set_pattern(struct dp_catalog * dp_catalog,u32 pattern)460  int dp_catalog_ctrl_set_pattern(struct dp_catalog *dp_catalog,
461  					u32 pattern)
462  {
463  	int bit, ret;
464  	u32 data;
465  	struct dp_catalog_private *catalog = container_of(dp_catalog,
466  				struct dp_catalog_private, dp_catalog);
467  
468  	bit = BIT(pattern - 1);
469  	DRM_DEBUG_DP("hw: bit=%d train=%d\n", bit, pattern);
470  	dp_catalog_ctrl_state_ctrl(dp_catalog, bit);
471  
472  	bit = BIT(pattern - 1) << DP_MAINLINK_READY_LINK_TRAINING_SHIFT;
473  
474  	/* Poll for mainlink ready status */
475  	ret = readx_poll_timeout(readl, catalog->io->dp_controller.base +
476  					MSM_DP_CONTROLLER_LINK_OFFSET +
477  					REG_DP_MAINLINK_READY,
478  					data, data & bit,
479  					POLLING_SLEEP_US, POLLING_TIMEOUT_US);
480  	if (ret < 0) {
481  		DRM_ERROR("set pattern for link_train=%d failed\n", pattern);
482  		return ret;
483  	}
484  	return 0;
485  }
486  
dp_catalog_ctrl_reset(struct dp_catalog * dp_catalog)487  void dp_catalog_ctrl_reset(struct dp_catalog *dp_catalog)
488  {
489  	u32 sw_reset;
490  	struct dp_catalog_private *catalog = container_of(dp_catalog,
491  				struct dp_catalog_private, dp_catalog);
492  
493  	sw_reset = dp_read_ahb(catalog, REG_DP_SW_RESET);
494  
495  	sw_reset |= DP_SW_RESET;
496  	dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
497  	usleep_range(1000, 1100); /* h/w recommended delay */
498  
499  	sw_reset &= ~DP_SW_RESET;
500  	dp_write_ahb(catalog, REG_DP_SW_RESET, sw_reset);
501  }
502  
dp_catalog_ctrl_mainlink_ready(struct dp_catalog * dp_catalog)503  bool dp_catalog_ctrl_mainlink_ready(struct dp_catalog *dp_catalog)
504  {
505  	u32 data;
506  	int ret;
507  	struct dp_catalog_private *catalog = container_of(dp_catalog,
508  				struct dp_catalog_private, dp_catalog);
509  
510  	/* Poll for mainlink ready status */
511  	ret = readl_poll_timeout(catalog->io->dp_controller.base +
512  				MSM_DP_CONTROLLER_LINK_OFFSET +
513  				REG_DP_MAINLINK_READY,
514  				data, data & DP_MAINLINK_READY_FOR_VIDEO,
515  				POLLING_SLEEP_US, POLLING_TIMEOUT_US);
516  	if (ret < 0) {
517  		DRM_ERROR("mainlink not ready\n");
518  		return false;
519  	}
520  
521  	return true;
522  }
523  
dp_catalog_ctrl_enable_irq(struct dp_catalog * dp_catalog,bool enable)524  void dp_catalog_ctrl_enable_irq(struct dp_catalog *dp_catalog,
525  						bool enable)
526  {
527  	struct dp_catalog_private *catalog = container_of(dp_catalog,
528  				struct dp_catalog_private, dp_catalog);
529  
530  	if (enable) {
531  		dp_write_ahb(catalog, REG_DP_INTR_STATUS,
532  				DP_INTERRUPT_STATUS1_MASK);
533  		dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
534  				DP_INTERRUPT_STATUS2_MASK);
535  	} else {
536  		dp_write_ahb(catalog, REG_DP_INTR_STATUS, 0x00);
537  		dp_write_ahb(catalog, REG_DP_INTR_STATUS2, 0x00);
538  	}
539  }
540  
dp_catalog_hpd_config_intr(struct dp_catalog * dp_catalog,u32 intr_mask,bool en)541  void dp_catalog_hpd_config_intr(struct dp_catalog *dp_catalog,
542  			u32 intr_mask, bool en)
543  {
544  	struct dp_catalog_private *catalog = container_of(dp_catalog,
545  				struct dp_catalog_private, dp_catalog);
546  
547  	u32 config = dp_read_aux(catalog, REG_DP_DP_HPD_INT_MASK);
548  
549  	config = (en ? config | intr_mask : config & ~intr_mask);
550  
551  	dp_write_aux(catalog, REG_DP_DP_HPD_INT_MASK,
552  				config & DP_DP_HPD_INT_MASK);
553  }
554  
dp_catalog_ctrl_hpd_config(struct dp_catalog * dp_catalog)555  void dp_catalog_ctrl_hpd_config(struct dp_catalog *dp_catalog)
556  {
557  	struct dp_catalog_private *catalog = container_of(dp_catalog,
558  				struct dp_catalog_private, dp_catalog);
559  
560  	u32 reftimer = dp_read_aux(catalog, REG_DP_DP_HPD_REFTIMER);
561  
562  	/* enable HPD interrupts */
563  	dp_catalog_hpd_config_intr(dp_catalog,
564  		DP_DP_HPD_PLUG_INT_MASK | DP_DP_IRQ_HPD_INT_MASK
565  		| DP_DP_HPD_UNPLUG_INT_MASK | DP_DP_HPD_REPLUG_INT_MASK, true);
566  
567  	/* Configure REFTIMER and enable it */
568  	reftimer |= DP_DP_HPD_REFTIMER_ENABLE;
569  	dp_write_aux(catalog, REG_DP_DP_HPD_REFTIMER, reftimer);
570  
571  	/* Enable HPD */
572  	dp_write_aux(catalog, REG_DP_DP_HPD_CTRL, DP_DP_HPD_CTRL_HPD_EN);
573  }
574  
dp_catalog_link_is_connected(struct dp_catalog * dp_catalog)575  u32 dp_catalog_link_is_connected(struct dp_catalog *dp_catalog)
576  {
577  	struct dp_catalog_private *catalog = container_of(dp_catalog,
578  				struct dp_catalog_private, dp_catalog);
579  	u32 status;
580  
581  	status = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
582  	status >>= DP_DP_HPD_STATE_STATUS_BITS_SHIFT;
583  	status &= DP_DP_HPD_STATE_STATUS_BITS_MASK;
584  
585  	return status;
586  }
587  
dp_catalog_hpd_get_intr_status(struct dp_catalog * dp_catalog)588  u32 dp_catalog_hpd_get_intr_status(struct dp_catalog *dp_catalog)
589  {
590  	struct dp_catalog_private *catalog = container_of(dp_catalog,
591  				struct dp_catalog_private, dp_catalog);
592  	int isr = 0;
593  
594  	isr = dp_read_aux(catalog, REG_DP_DP_HPD_INT_STATUS);
595  	dp_write_aux(catalog, REG_DP_DP_HPD_INT_ACK,
596  				 (isr & DP_DP_HPD_INT_MASK));
597  
598  	return isr;
599  }
600  
dp_catalog_ctrl_get_interrupt(struct dp_catalog * dp_catalog)601  int dp_catalog_ctrl_get_interrupt(struct dp_catalog *dp_catalog)
602  {
603  	struct dp_catalog_private *catalog = container_of(dp_catalog,
604  				struct dp_catalog_private, dp_catalog);
605  	u32 intr, intr_ack;
606  
607  	intr = dp_read_ahb(catalog, REG_DP_INTR_STATUS2);
608  	intr &= ~DP_INTERRUPT_STATUS2_MASK;
609  	intr_ack = (intr & DP_INTERRUPT_STATUS2)
610  			<< DP_INTERRUPT_STATUS_ACK_SHIFT;
611  	dp_write_ahb(catalog, REG_DP_INTR_STATUS2,
612  			intr_ack | DP_INTERRUPT_STATUS2_MASK);
613  
614  	return intr;
615  }
616  
dp_catalog_ctrl_phy_reset(struct dp_catalog * dp_catalog)617  void dp_catalog_ctrl_phy_reset(struct dp_catalog *dp_catalog)
618  {
619  	struct dp_catalog_private *catalog = container_of(dp_catalog,
620  				struct dp_catalog_private, dp_catalog);
621  
622  	dp_write_ahb(catalog, REG_DP_PHY_CTRL,
623  			DP_PHY_CTRL_SW_RESET | DP_PHY_CTRL_SW_RESET_PLL);
624  	usleep_range(1000, 1100); /* h/w recommended delay */
625  	dp_write_ahb(catalog, REG_DP_PHY_CTRL, 0x0);
626  }
627  
dp_catalog_ctrl_update_vx_px(struct dp_catalog * dp_catalog,u8 v_level,u8 p_level)628  int dp_catalog_ctrl_update_vx_px(struct dp_catalog *dp_catalog,
629  		u8 v_level, u8 p_level)
630  {
631  	struct dp_catalog_private *catalog = container_of(dp_catalog,
632  				struct dp_catalog_private, dp_catalog);
633  	struct dp_io *dp_io = catalog->io;
634  	struct phy *phy = dp_io->phy;
635  	struct phy_configure_opts_dp *opts_dp = &dp_io->phy_opts.dp;
636  
637  	/* TODO: Update for all lanes instead of just first one */
638  	opts_dp->voltage[0] = v_level;
639  	opts_dp->pre[0] = p_level;
640  	opts_dp->set_voltages = 1;
641  	phy_configure(phy, &dp_io->phy_opts);
642  	opts_dp->set_voltages = 0;
643  
644  	return 0;
645  }
646  
dp_catalog_ctrl_send_phy_pattern(struct dp_catalog * dp_catalog,u32 pattern)647  void dp_catalog_ctrl_send_phy_pattern(struct dp_catalog *dp_catalog,
648  			u32 pattern)
649  {
650  	struct dp_catalog_private *catalog = container_of(dp_catalog,
651  				struct dp_catalog_private, dp_catalog);
652  	u32 value = 0x0;
653  
654  	/* Make sure to clear the current pattern before starting a new one */
655  	dp_write_link(catalog, REG_DP_STATE_CTRL, 0x0);
656  
657  	switch (pattern) {
658  	case DP_PHY_TEST_PATTERN_D10_2:
659  		dp_write_link(catalog, REG_DP_STATE_CTRL,
660  				DP_STATE_CTRL_LINK_TRAINING_PATTERN1);
661  		break;
662  	case DP_PHY_TEST_PATTERN_ERROR_COUNT:
663  		value &= ~(1 << 16);
664  		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
665  					value);
666  		value |= SCRAMBLER_RESET_COUNT_VALUE;
667  		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
668  					value);
669  		dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
670  					DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
671  		dp_write_link(catalog, REG_DP_STATE_CTRL,
672  					DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
673  		break;
674  	case DP_PHY_TEST_PATTERN_PRBS7:
675  		dp_write_link(catalog, REG_DP_STATE_CTRL,
676  				DP_STATE_CTRL_LINK_PRBS7);
677  		break;
678  	case DP_PHY_TEST_PATTERN_80BIT_CUSTOM:
679  		dp_write_link(catalog, REG_DP_STATE_CTRL,
680  				DP_STATE_CTRL_LINK_TEST_CUSTOM_PATTERN);
681  		/* 00111110000011111000001111100000 */
682  		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG0,
683  				0x3E0F83E0);
684  		/* 00001111100000111110000011111000 */
685  		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG1,
686  				0x0F83E0F8);
687  		/* 1111100000111110 */
688  		dp_write_link(catalog, REG_DP_TEST_80BIT_CUSTOM_PATTERN_REG2,
689  				0x0000F83E);
690  		break;
691  	case DP_PHY_TEST_PATTERN_CP2520:
692  		value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
693  		value &= ~DP_MAINLINK_CTRL_SW_BYPASS_SCRAMBLER;
694  		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
695  
696  		value = DP_HBR2_ERM_PATTERN;
697  		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
698  				value);
699  		value |= SCRAMBLER_RESET_COUNT_VALUE;
700  		dp_write_link(catalog, REG_DP_HBR2_COMPLIANCE_SCRAMBLER_RESET,
701  					value);
702  		dp_write_link(catalog, REG_DP_MAINLINK_LEVELS,
703  					DP_MAINLINK_SAFE_TO_EXIT_LEVEL_2);
704  		dp_write_link(catalog, REG_DP_STATE_CTRL,
705  					DP_STATE_CTRL_LINK_SYMBOL_ERR_MEASURE);
706  		value = dp_read_link(catalog, REG_DP_MAINLINK_CTRL);
707  		value |= DP_MAINLINK_CTRL_ENABLE;
708  		dp_write_link(catalog, REG_DP_MAINLINK_CTRL, value);
709  		break;
710  	case DP_PHY_TEST_PATTERN_SEL_MASK:
711  		dp_write_link(catalog, REG_DP_MAINLINK_CTRL,
712  				DP_MAINLINK_CTRL_ENABLE);
713  		dp_write_link(catalog, REG_DP_STATE_CTRL,
714  				DP_STATE_CTRL_LINK_TRAINING_PATTERN4);
715  		break;
716  	default:
717  		DRM_DEBUG_DP("No valid test pattern requested:0x%x\n", pattern);
718  		break;
719  	}
720  }
721  
dp_catalog_ctrl_read_phy_pattern(struct dp_catalog * dp_catalog)722  u32 dp_catalog_ctrl_read_phy_pattern(struct dp_catalog *dp_catalog)
723  {
724  	struct dp_catalog_private *catalog = container_of(dp_catalog,
725  				struct dp_catalog_private, dp_catalog);
726  
727  	return dp_read_link(catalog, REG_DP_MAINLINK_READY);
728  }
729  
730  /* panel related catalog functions */
dp_catalog_panel_timing_cfg(struct dp_catalog * dp_catalog)731  int dp_catalog_panel_timing_cfg(struct dp_catalog *dp_catalog)
732  {
733  	struct dp_catalog_private *catalog = container_of(dp_catalog,
734  				struct dp_catalog_private, dp_catalog);
735  
736  	dp_write_link(catalog, REG_DP_TOTAL_HOR_VER,
737  				dp_catalog->total);
738  	dp_write_link(catalog, REG_DP_START_HOR_VER_FROM_SYNC,
739  				dp_catalog->sync_start);
740  	dp_write_link(catalog, REG_DP_HSYNC_VSYNC_WIDTH_POLARITY,
741  				dp_catalog->width_blanking);
742  	dp_write_link(catalog, REG_DP_ACTIVE_HOR_VER, dp_catalog->dp_active);
743  	dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, 0);
744  	return 0;
745  }
746  
dp_catalog_panel_tpg_enable(struct dp_catalog * dp_catalog,struct drm_display_mode * drm_mode)747  void dp_catalog_panel_tpg_enable(struct dp_catalog *dp_catalog,
748  				struct drm_display_mode *drm_mode)
749  {
750  	struct dp_catalog_private *catalog = container_of(dp_catalog,
751  				struct dp_catalog_private, dp_catalog);
752  	u32 hsync_period, vsync_period;
753  	u32 display_v_start, display_v_end;
754  	u32 hsync_start_x, hsync_end_x;
755  	u32 v_sync_width;
756  	u32 hsync_ctl;
757  	u32 display_hctl;
758  
759  	/* TPG config parameters*/
760  	hsync_period = drm_mode->htotal;
761  	vsync_period = drm_mode->vtotal;
762  
763  	display_v_start = ((drm_mode->vtotal - drm_mode->vsync_start) *
764  					hsync_period);
765  	display_v_end = ((vsync_period - (drm_mode->vsync_start -
766  					drm_mode->vdisplay))
767  					* hsync_period) - 1;
768  
769  	display_v_start += drm_mode->htotal - drm_mode->hsync_start;
770  	display_v_end -= (drm_mode->hsync_start - drm_mode->hdisplay);
771  
772  	hsync_start_x = drm_mode->htotal - drm_mode->hsync_start;
773  	hsync_end_x = hsync_period - (drm_mode->hsync_start -
774  					drm_mode->hdisplay) - 1;
775  
776  	v_sync_width = drm_mode->vsync_end - drm_mode->vsync_start;
777  
778  	hsync_ctl = (hsync_period << 16) |
779  			(drm_mode->hsync_end - drm_mode->hsync_start);
780  	display_hctl = (hsync_end_x << 16) | hsync_start_x;
781  
782  
783  	dp_write_p0(catalog, MMSS_DP_INTF_CONFIG, 0x0);
784  	dp_write_p0(catalog, MMSS_DP_INTF_HSYNC_CTL, hsync_ctl);
785  	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F0, vsync_period *
786  			hsync_period);
787  	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F0, v_sync_width *
788  			hsync_period);
789  	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PERIOD_F1, 0);
790  	dp_write_p0(catalog, MMSS_DP_INTF_VSYNC_PULSE_WIDTH_F1, 0);
791  	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_HCTL, display_hctl);
792  	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_HCTL, 0);
793  	dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F0, display_v_start);
794  	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F0, display_v_end);
795  	dp_write_p0(catalog, MMSS_INTF_DISPLAY_V_START_F1, 0);
796  	dp_write_p0(catalog, MMSS_DP_INTF_DISPLAY_V_END_F1, 0);
797  	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F0, 0);
798  	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F0, 0);
799  	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_START_F1, 0);
800  	dp_write_p0(catalog, MMSS_DP_INTF_ACTIVE_V_END_F1, 0);
801  	dp_write_p0(catalog, MMSS_DP_INTF_POLARITY_CTL, 0);
802  
803  	dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL,
804  				DP_TPG_CHECKERED_RECT_PATTERN);
805  	dp_write_p0(catalog, MMSS_DP_TPG_VIDEO_CONFIG,
806  				DP_TPG_VIDEO_CONFIG_BPP_8BIT |
807  				DP_TPG_VIDEO_CONFIG_RGB);
808  	dp_write_p0(catalog, MMSS_DP_BIST_ENABLE,
809  				DP_BIST_ENABLE_DPBIST_EN);
810  	dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN,
811  				DP_TIMING_ENGINE_EN_EN);
812  	DRM_DEBUG_DP("%s: enabled tpg\n", __func__);
813  }
814  
dp_catalog_panel_tpg_disable(struct dp_catalog * dp_catalog)815  void dp_catalog_panel_tpg_disable(struct dp_catalog *dp_catalog)
816  {
817  	struct dp_catalog_private *catalog = container_of(dp_catalog,
818  				struct dp_catalog_private, dp_catalog);
819  
820  	dp_write_p0(catalog, MMSS_DP_TPG_MAIN_CONTROL, 0x0);
821  	dp_write_p0(catalog, MMSS_DP_BIST_ENABLE, 0x0);
822  	dp_write_p0(catalog, MMSS_DP_TIMING_ENGINE_EN, 0x0);
823  }
824  
dp_catalog_get(struct device * dev,struct dp_io * io)825  struct dp_catalog *dp_catalog_get(struct device *dev, struct dp_io *io)
826  {
827  	struct dp_catalog_private *catalog;
828  
829  	if (!io) {
830  		DRM_ERROR("invalid input\n");
831  		return ERR_PTR(-EINVAL);
832  	}
833  
834  	catalog  = devm_kzalloc(dev, sizeof(*catalog), GFP_KERNEL);
835  	if (!catalog)
836  		return ERR_PTR(-ENOMEM);
837  
838  	catalog->dev = dev;
839  	catalog->io = io;
840  
841  	return &catalog->dp_catalog;
842  }
843  
dp_catalog_audio_get_header(struct dp_catalog * dp_catalog)844  void dp_catalog_audio_get_header(struct dp_catalog *dp_catalog)
845  {
846  	struct dp_catalog_private *catalog;
847  	u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
848  	enum dp_catalog_audio_sdp_type sdp;
849  	enum dp_catalog_audio_header_type header;
850  
851  	if (!dp_catalog)
852  		return;
853  
854  	catalog = container_of(dp_catalog,
855  		struct dp_catalog_private, dp_catalog);
856  
857  	sdp_map = catalog->audio_map;
858  	sdp     = dp_catalog->sdp_type;
859  	header  = dp_catalog->sdp_header;
860  
861  	dp_catalog->audio_data = dp_read_link(catalog,
862  			sdp_map[sdp][header]);
863  }
864  
dp_catalog_audio_set_header(struct dp_catalog * dp_catalog)865  void dp_catalog_audio_set_header(struct dp_catalog *dp_catalog)
866  {
867  	struct dp_catalog_private *catalog;
868  	u32 (*sdp_map)[DP_AUDIO_SDP_HEADER_MAX];
869  	enum dp_catalog_audio_sdp_type sdp;
870  	enum dp_catalog_audio_header_type header;
871  	u32 data;
872  
873  	if (!dp_catalog)
874  		return;
875  
876  	catalog = container_of(dp_catalog,
877  		struct dp_catalog_private, dp_catalog);
878  
879  	sdp_map = catalog->audio_map;
880  	sdp     = dp_catalog->sdp_type;
881  	header  = dp_catalog->sdp_header;
882  	data    = dp_catalog->audio_data;
883  
884  	dp_write_link(catalog, sdp_map[sdp][header], data);
885  }
886  
dp_catalog_audio_config_acr(struct dp_catalog * dp_catalog)887  void dp_catalog_audio_config_acr(struct dp_catalog *dp_catalog)
888  {
889  	struct dp_catalog_private *catalog;
890  	u32 acr_ctrl, select;
891  
892  	if (!dp_catalog)
893  		return;
894  
895  	catalog = container_of(dp_catalog,
896  		struct dp_catalog_private, dp_catalog);
897  
898  	select = dp_catalog->audio_data;
899  	acr_ctrl = select << 4 | BIT(31) | BIT(8) | BIT(14);
900  
901  	DRM_DEBUG_DP("select = 0x%x, acr_ctrl = 0x%x\n", select, acr_ctrl);
902  
903  	dp_write_link(catalog, MMSS_DP_AUDIO_ACR_CTRL, acr_ctrl);
904  }
905  
dp_catalog_audio_enable(struct dp_catalog * dp_catalog)906  void dp_catalog_audio_enable(struct dp_catalog *dp_catalog)
907  {
908  	struct dp_catalog_private *catalog;
909  	bool enable;
910  	u32 audio_ctrl;
911  
912  	if (!dp_catalog)
913  		return;
914  
915  	catalog = container_of(dp_catalog,
916  		struct dp_catalog_private, dp_catalog);
917  
918  	enable = !!dp_catalog->audio_data;
919  	audio_ctrl = dp_read_link(catalog, MMSS_DP_AUDIO_CFG);
920  
921  	if (enable)
922  		audio_ctrl |= BIT(0);
923  	else
924  		audio_ctrl &= ~BIT(0);
925  
926  	DRM_DEBUG_DP("dp_audio_cfg = 0x%x\n", audio_ctrl);
927  
928  	dp_write_link(catalog, MMSS_DP_AUDIO_CFG, audio_ctrl);
929  	/* make sure audio engine is disabled */
930  	wmb();
931  }
932  
dp_catalog_audio_config_sdp(struct dp_catalog * dp_catalog)933  void dp_catalog_audio_config_sdp(struct dp_catalog *dp_catalog)
934  {
935  	struct dp_catalog_private *catalog;
936  	u32 sdp_cfg = 0;
937  	u32 sdp_cfg2 = 0;
938  
939  	if (!dp_catalog)
940  		return;
941  
942  	catalog = container_of(dp_catalog,
943  		struct dp_catalog_private, dp_catalog);
944  
945  	sdp_cfg = dp_read_link(catalog, MMSS_DP_SDP_CFG);
946  	/* AUDIO_TIMESTAMP_SDP_EN */
947  	sdp_cfg |= BIT(1);
948  	/* AUDIO_STREAM_SDP_EN */
949  	sdp_cfg |= BIT(2);
950  	/* AUDIO_COPY_MANAGEMENT_SDP_EN */
951  	sdp_cfg |= BIT(5);
952  	/* AUDIO_ISRC_SDP_EN  */
953  	sdp_cfg |= BIT(6);
954  	/* AUDIO_INFOFRAME_SDP_EN  */
955  	sdp_cfg |= BIT(20);
956  
957  	DRM_DEBUG_DP("sdp_cfg = 0x%x\n", sdp_cfg);
958  
959  	dp_write_link(catalog, MMSS_DP_SDP_CFG, sdp_cfg);
960  
961  	sdp_cfg2 = dp_read_link(catalog, MMSS_DP_SDP_CFG2);
962  	/* IFRM_REGSRC -> Do not use reg values */
963  	sdp_cfg2 &= ~BIT(0);
964  	/* AUDIO_STREAM_HB3_REGSRC-> Do not use reg values */
965  	sdp_cfg2 &= ~BIT(1);
966  
967  	DRM_DEBUG_DP("sdp_cfg2 = 0x%x\n", sdp_cfg2);
968  
969  	dp_write_link(catalog, MMSS_DP_SDP_CFG2, sdp_cfg2);
970  }
971  
dp_catalog_audio_init(struct dp_catalog * dp_catalog)972  void dp_catalog_audio_init(struct dp_catalog *dp_catalog)
973  {
974  	struct dp_catalog_private *catalog;
975  
976  	static u32 sdp_map[][DP_AUDIO_SDP_HEADER_MAX] = {
977  		{
978  			MMSS_DP_AUDIO_STREAM_0,
979  			MMSS_DP_AUDIO_STREAM_1,
980  			MMSS_DP_AUDIO_STREAM_1,
981  		},
982  		{
983  			MMSS_DP_AUDIO_TIMESTAMP_0,
984  			MMSS_DP_AUDIO_TIMESTAMP_1,
985  			MMSS_DP_AUDIO_TIMESTAMP_1,
986  		},
987  		{
988  			MMSS_DP_AUDIO_INFOFRAME_0,
989  			MMSS_DP_AUDIO_INFOFRAME_1,
990  			MMSS_DP_AUDIO_INFOFRAME_1,
991  		},
992  		{
993  			MMSS_DP_AUDIO_COPYMANAGEMENT_0,
994  			MMSS_DP_AUDIO_COPYMANAGEMENT_1,
995  			MMSS_DP_AUDIO_COPYMANAGEMENT_1,
996  		},
997  		{
998  			MMSS_DP_AUDIO_ISRC_0,
999  			MMSS_DP_AUDIO_ISRC_1,
1000  			MMSS_DP_AUDIO_ISRC_1,
1001  		},
1002  	};
1003  
1004  	if (!dp_catalog)
1005  		return;
1006  
1007  	catalog = container_of(dp_catalog,
1008  		struct dp_catalog_private, dp_catalog);
1009  
1010  	catalog->audio_map = sdp_map;
1011  }
1012  
dp_catalog_audio_sfe_level(struct dp_catalog * dp_catalog)1013  void dp_catalog_audio_sfe_level(struct dp_catalog *dp_catalog)
1014  {
1015  	struct dp_catalog_private *catalog;
1016  	u32 mainlink_levels, safe_to_exit_level;
1017  
1018  	if (!dp_catalog)
1019  		return;
1020  
1021  	catalog = container_of(dp_catalog,
1022  		struct dp_catalog_private, dp_catalog);
1023  
1024  	safe_to_exit_level = dp_catalog->audio_data;
1025  	mainlink_levels = dp_read_link(catalog, REG_DP_MAINLINK_LEVELS);
1026  	mainlink_levels &= 0xFE0;
1027  	mainlink_levels |= safe_to_exit_level;
1028  
1029  	DRM_DEBUG_DP("mainlink_level = 0x%x, safe_to_exit_level = 0x%x\n",
1030  			 mainlink_levels, safe_to_exit_level);
1031  
1032  	dp_write_link(catalog, REG_DP_MAINLINK_LEVELS, mainlink_levels);
1033  }
1034