1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Copyright (c) 2022-2023 Qualcomm Innovation Center, Inc. All rights reserved.
4 * Copyright (c) 2015-2018, The Linux Foundation. All rights reserved.
5 */
6
7 #include "dpu_hwio.h"
8 #include "dpu_hw_catalog.h"
9 #include "dpu_hw_intf.h"
10 #include "dpu_kms.h"
11 #include "dpu_trace.h"
12
13 #include <linux/iopoll.h>
14
15 #define INTF_TIMING_ENGINE_EN 0x000
16 #define INTF_CONFIG 0x004
17 #define INTF_HSYNC_CTL 0x008
18 #define INTF_VSYNC_PERIOD_F0 0x00C
19 #define INTF_VSYNC_PERIOD_F1 0x010
20 #define INTF_VSYNC_PULSE_WIDTH_F0 0x014
21 #define INTF_VSYNC_PULSE_WIDTH_F1 0x018
22 #define INTF_DISPLAY_V_START_F0 0x01C
23 #define INTF_DISPLAY_V_START_F1 0x020
24 #define INTF_DISPLAY_V_END_F0 0x024
25 #define INTF_DISPLAY_V_END_F1 0x028
26 #define INTF_ACTIVE_V_START_F0 0x02C
27 #define INTF_ACTIVE_V_START_F1 0x030
28 #define INTF_ACTIVE_V_END_F0 0x034
29 #define INTF_ACTIVE_V_END_F1 0x038
30 #define INTF_DISPLAY_HCTL 0x03C
31 #define INTF_ACTIVE_HCTL 0x040
32 #define INTF_BORDER_COLOR 0x044
33 #define INTF_UNDERFLOW_COLOR 0x048
34 #define INTF_HSYNC_SKEW 0x04C
35 #define INTF_POLARITY_CTL 0x050
36 #define INTF_TEST_CTL 0x054
37 #define INTF_TP_COLOR0 0x058
38 #define INTF_TP_COLOR1 0x05C
39 #define INTF_CONFIG2 0x060
40 #define INTF_DISPLAY_DATA_HCTL 0x064
41 #define INTF_ACTIVE_DATA_HCTL 0x068
42
43 #define INTF_DSI_CMD_MODE_TRIGGER_EN 0x084
44 #define INTF_PANEL_FORMAT 0x090
45
46 #define INTF_FRAME_LINE_COUNT_EN 0x0A8
47 #define INTF_FRAME_COUNT 0x0AC
48 #define INTF_LINE_COUNT 0x0B0
49
50 #define INTF_DEFLICKER_CONFIG 0x0F0
51 #define INTF_DEFLICKER_STRNG_COEFF 0x0F4
52 #define INTF_DEFLICKER_WEAK_COEFF 0x0F8
53
54 #define INTF_TPG_ENABLE 0x100
55 #define INTF_TPG_MAIN_CONTROL 0x104
56 #define INTF_TPG_VIDEO_CONFIG 0x108
57 #define INTF_TPG_COMPONENT_LIMITS 0x10C
58 #define INTF_TPG_RECTANGLE 0x110
59 #define INTF_TPG_INITIAL_VALUE 0x114
60 #define INTF_TPG_BLK_WHITE_PATTERN_FRAMES 0x118
61 #define INTF_TPG_RGB_MAPPING 0x11C
62 #define INTF_PROG_FETCH_START 0x170
63 #define INTF_PROG_ROT_START 0x174
64
65 #define INTF_MISR_CTRL 0x180
66 #define INTF_MISR_SIGNATURE 0x184
67
68 #define INTF_MUX 0x25C
69 #define INTF_STATUS 0x26C
70 #define INTF_AVR_CONTROL 0x270
71 #define INTF_AVR_MODE 0x274
72 #define INTF_AVR_TRIGGER 0x278
73 #define INTF_AVR_VTOTAL 0x27C
74 #define INTF_TEAR_MDP_VSYNC_SEL 0x280
75 #define INTF_TEAR_TEAR_CHECK_EN 0x284
76 #define INTF_TEAR_SYNC_CONFIG_VSYNC 0x288
77 #define INTF_TEAR_SYNC_CONFIG_HEIGHT 0x28C
78 #define INTF_TEAR_SYNC_WRCOUNT 0x290
79 #define INTF_TEAR_VSYNC_INIT_VAL 0x294
80 #define INTF_TEAR_INT_COUNT_VAL 0x298
81 #define INTF_TEAR_SYNC_THRESH 0x29C
82 #define INTF_TEAR_START_POS 0x2A0
83 #define INTF_TEAR_RD_PTR_IRQ 0x2A4
84 #define INTF_TEAR_WR_PTR_IRQ 0x2A8
85 #define INTF_TEAR_OUT_LINE_COUNT 0x2AC
86 #define INTF_TEAR_LINE_COUNT 0x2B0
87 #define INTF_TEAR_AUTOREFRESH_CONFIG 0x2B4
88
89 #define INTF_CFG_ACTIVE_H_EN BIT(29)
90 #define INTF_CFG_ACTIVE_V_EN BIT(30)
91
92 #define INTF_CFG2_DATABUS_WIDEN BIT(0)
93 #define INTF_CFG2_DATA_HCTL_EN BIT(4)
94 #define INTF_CFG2_DCE_DATA_COMPRESS BIT(12)
95
96
dpu_hw_intf_setup_timing_engine(struct dpu_hw_intf * ctx,const struct dpu_hw_intf_timing_params * p,const struct dpu_format * fmt)97 static void dpu_hw_intf_setup_timing_engine(struct dpu_hw_intf *ctx,
98 const struct dpu_hw_intf_timing_params *p,
99 const struct dpu_format *fmt)
100 {
101 struct dpu_hw_blk_reg_map *c = &ctx->hw;
102 u32 hsync_period, vsync_period;
103 u32 display_v_start, display_v_end;
104 u32 hsync_start_x, hsync_end_x;
105 u32 hsync_data_start_x, hsync_data_end_x;
106 u32 active_h_start, active_h_end;
107 u32 active_v_start, active_v_end;
108 u32 active_hctl, display_hctl, hsync_ctl;
109 u32 polarity_ctl, den_polarity;
110 u32 panel_format;
111 u32 intf_cfg, intf_cfg2 = 0;
112 u32 display_data_hctl = 0, active_data_hctl = 0;
113 u32 data_width;
114 bool dp_intf = false;
115
116 /* read interface_cfg */
117 intf_cfg = DPU_REG_READ(c, INTF_CONFIG);
118
119 if (ctx->cap->type == INTF_DP)
120 dp_intf = true;
121
122 hsync_period = p->hsync_pulse_width + p->h_back_porch + p->width +
123 p->h_front_porch;
124 vsync_period = p->vsync_pulse_width + p->v_back_porch + p->height +
125 p->v_front_porch;
126
127 display_v_start = ((p->vsync_pulse_width + p->v_back_porch) *
128 hsync_period) + p->hsync_skew;
129 display_v_end = ((vsync_period - p->v_front_porch) * hsync_period) +
130 p->hsync_skew - 1;
131
132 hsync_start_x = p->h_back_porch + p->hsync_pulse_width;
133 hsync_end_x = hsync_period - p->h_front_porch - 1;
134
135 if (p->width != p->xres) { /* border fill added */
136 active_h_start = hsync_start_x;
137 active_h_end = active_h_start + p->xres - 1;
138 } else {
139 active_h_start = 0;
140 active_h_end = 0;
141 }
142
143 if (p->height != p->yres) { /* border fill added */
144 active_v_start = display_v_start;
145 active_v_end = active_v_start + (p->yres * hsync_period) - 1;
146 } else {
147 active_v_start = 0;
148 active_v_end = 0;
149 }
150
151 if (active_h_end) {
152 active_hctl = (active_h_end << 16) | active_h_start;
153 intf_cfg |= INTF_CFG_ACTIVE_H_EN;
154 } else {
155 active_hctl = 0;
156 }
157
158 if (active_v_end)
159 intf_cfg |= INTF_CFG_ACTIVE_V_EN;
160
161 hsync_ctl = (hsync_period << 16) | p->hsync_pulse_width;
162 display_hctl = (hsync_end_x << 16) | hsync_start_x;
163
164 if (p->wide_bus_en)
165 intf_cfg2 |= INTF_CFG2_DATABUS_WIDEN;
166
167 data_width = p->width;
168
169 hsync_data_start_x = hsync_start_x;
170 hsync_data_end_x = hsync_start_x + data_width - 1;
171
172 display_data_hctl = (hsync_data_end_x << 16) | hsync_data_start_x;
173
174 if (dp_intf) {
175 /* DP timing adjustment */
176 display_v_start += p->hsync_pulse_width + p->h_back_porch;
177 display_v_end -= p->h_front_porch;
178
179 active_h_start = hsync_start_x;
180 active_h_end = active_h_start + p->xres - 1;
181 active_v_start = display_v_start;
182 active_v_end = active_v_start + (p->yres * hsync_period) - 1;
183
184 active_hctl = (active_h_end << 16) | active_h_start;
185 display_hctl = active_hctl;
186
187 intf_cfg |= INTF_CFG_ACTIVE_H_EN | INTF_CFG_ACTIVE_V_EN;
188 }
189
190 den_polarity = 0;
191 polarity_ctl = (den_polarity << 2) | /* DEN Polarity */
192 (p->vsync_polarity << 1) | /* VSYNC Polarity */
193 (p->hsync_polarity << 0); /* HSYNC Polarity */
194
195 if (!DPU_FORMAT_IS_YUV(fmt))
196 panel_format = (fmt->bits[C0_G_Y] |
197 (fmt->bits[C1_B_Cb] << 2) |
198 (fmt->bits[C2_R_Cr] << 4) |
199 (0x21 << 8));
200 else
201 /* Interface treats all the pixel data in RGB888 format */
202 panel_format = (COLOR_8BIT |
203 (COLOR_8BIT << 2) |
204 (COLOR_8BIT << 4) |
205 (0x21 << 8));
206
207 DPU_REG_WRITE(c, INTF_HSYNC_CTL, hsync_ctl);
208 DPU_REG_WRITE(c, INTF_VSYNC_PERIOD_F0, vsync_period * hsync_period);
209 DPU_REG_WRITE(c, INTF_VSYNC_PULSE_WIDTH_F0,
210 p->vsync_pulse_width * hsync_period);
211 DPU_REG_WRITE(c, INTF_DISPLAY_HCTL, display_hctl);
212 DPU_REG_WRITE(c, INTF_DISPLAY_V_START_F0, display_v_start);
213 DPU_REG_WRITE(c, INTF_DISPLAY_V_END_F0, display_v_end);
214 DPU_REG_WRITE(c, INTF_ACTIVE_HCTL, active_hctl);
215 DPU_REG_WRITE(c, INTF_ACTIVE_V_START_F0, active_v_start);
216 DPU_REG_WRITE(c, INTF_ACTIVE_V_END_F0, active_v_end);
217 DPU_REG_WRITE(c, INTF_BORDER_COLOR, p->border_clr);
218 DPU_REG_WRITE(c, INTF_UNDERFLOW_COLOR, p->underflow_clr);
219 DPU_REG_WRITE(c, INTF_HSYNC_SKEW, p->hsync_skew);
220 DPU_REG_WRITE(c, INTF_POLARITY_CTL, polarity_ctl);
221 DPU_REG_WRITE(c, INTF_FRAME_LINE_COUNT_EN, 0x3);
222 DPU_REG_WRITE(c, INTF_CONFIG, intf_cfg);
223 DPU_REG_WRITE(c, INTF_PANEL_FORMAT, panel_format);
224 if (ctx->cap->features & BIT(DPU_DATA_HCTL_EN)) {
225 /*
226 * DATA_HCTL_EN controls data timing which can be different from
227 * video timing. It is recommended to enable it for all cases, except
228 * if compression is enabled in 1 pixel per clock mode
229 */
230 if (!(p->compression_en && !p->wide_bus_en))
231 intf_cfg2 |= INTF_CFG2_DATA_HCTL_EN;
232
233 DPU_REG_WRITE(c, INTF_CONFIG2, intf_cfg2);
234 DPU_REG_WRITE(c, INTF_DISPLAY_DATA_HCTL, display_data_hctl);
235 DPU_REG_WRITE(c, INTF_ACTIVE_DATA_HCTL, active_data_hctl);
236 }
237 }
238
dpu_hw_intf_enable_timing_engine(struct dpu_hw_intf * intf,u8 enable)239 static void dpu_hw_intf_enable_timing_engine(
240 struct dpu_hw_intf *intf,
241 u8 enable)
242 {
243 struct dpu_hw_blk_reg_map *c = &intf->hw;
244 /* Note: Display interface select is handled in top block hw layer */
245 DPU_REG_WRITE(c, INTF_TIMING_ENGINE_EN, enable != 0);
246 }
247
dpu_hw_intf_setup_prg_fetch(struct dpu_hw_intf * intf,const struct dpu_hw_intf_prog_fetch * fetch)248 static void dpu_hw_intf_setup_prg_fetch(
249 struct dpu_hw_intf *intf,
250 const struct dpu_hw_intf_prog_fetch *fetch)
251 {
252 struct dpu_hw_blk_reg_map *c = &intf->hw;
253 int fetch_enable;
254
255 /*
256 * Fetch should always be outside the active lines. If the fetching
257 * is programmed within active region, hardware behavior is unknown.
258 */
259
260 fetch_enable = DPU_REG_READ(c, INTF_CONFIG);
261 if (fetch->enable) {
262 fetch_enable |= BIT(31);
263 DPU_REG_WRITE(c, INTF_PROG_FETCH_START,
264 fetch->fetch_start);
265 } else {
266 fetch_enable &= ~BIT(31);
267 }
268
269 DPU_REG_WRITE(c, INTF_CONFIG, fetch_enable);
270 }
271
dpu_hw_intf_bind_pingpong_blk(struct dpu_hw_intf * intf,const enum dpu_pingpong pp)272 static void dpu_hw_intf_bind_pingpong_blk(
273 struct dpu_hw_intf *intf,
274 const enum dpu_pingpong pp)
275 {
276 struct dpu_hw_blk_reg_map *c = &intf->hw;
277 u32 mux_cfg;
278
279 mux_cfg = DPU_REG_READ(c, INTF_MUX);
280 mux_cfg &= ~0xf;
281
282 if (pp)
283 mux_cfg |= (pp - PINGPONG_0) & 0x7;
284 else
285 mux_cfg |= 0xf;
286
287 DPU_REG_WRITE(c, INTF_MUX, mux_cfg);
288 }
289
dpu_hw_intf_get_status(struct dpu_hw_intf * intf,struct dpu_hw_intf_status * s)290 static void dpu_hw_intf_get_status(
291 struct dpu_hw_intf *intf,
292 struct dpu_hw_intf_status *s)
293 {
294 struct dpu_hw_blk_reg_map *c = &intf->hw;
295 unsigned long cap = intf->cap->features;
296
297 if (cap & BIT(DPU_INTF_STATUS_SUPPORTED))
298 s->is_en = DPU_REG_READ(c, INTF_STATUS) & BIT(0);
299 else
300 s->is_en = DPU_REG_READ(c, INTF_TIMING_ENGINE_EN);
301
302 s->is_prog_fetch_en = !!(DPU_REG_READ(c, INTF_CONFIG) & BIT(31));
303 if (s->is_en) {
304 s->frame_count = DPU_REG_READ(c, INTF_FRAME_COUNT);
305 s->line_count = DPU_REG_READ(c, INTF_LINE_COUNT);
306 } else {
307 s->line_count = 0;
308 s->frame_count = 0;
309 }
310 }
311
dpu_hw_intf_get_line_count(struct dpu_hw_intf * intf)312 static u32 dpu_hw_intf_get_line_count(struct dpu_hw_intf *intf)
313 {
314 struct dpu_hw_blk_reg_map *c;
315
316 if (!intf)
317 return 0;
318
319 c = &intf->hw;
320
321 return DPU_REG_READ(c, INTF_LINE_COUNT);
322 }
323
dpu_hw_intf_setup_misr(struct dpu_hw_intf * intf)324 static void dpu_hw_intf_setup_misr(struct dpu_hw_intf *intf)
325 {
326 dpu_hw_setup_misr(&intf->hw, INTF_MISR_CTRL, 0x1);
327 }
328
dpu_hw_intf_collect_misr(struct dpu_hw_intf * intf,u32 * misr_value)329 static int dpu_hw_intf_collect_misr(struct dpu_hw_intf *intf, u32 *misr_value)
330 {
331 return dpu_hw_collect_misr(&intf->hw, INTF_MISR_CTRL, INTF_MISR_SIGNATURE, misr_value);
332 }
333
dpu_hw_intf_enable_te(struct dpu_hw_intf * intf,struct dpu_hw_tear_check * te)334 static int dpu_hw_intf_enable_te(struct dpu_hw_intf *intf,
335 struct dpu_hw_tear_check *te)
336 {
337 struct dpu_hw_blk_reg_map *c;
338 int cfg;
339
340 if (!intf)
341 return -EINVAL;
342
343 c = &intf->hw;
344
345 cfg = BIT(19); /* VSYNC_COUNTER_EN */
346 if (te->hw_vsync_mode)
347 cfg |= BIT(20);
348
349 cfg |= te->vsync_count;
350
351 DPU_REG_WRITE(c, INTF_TEAR_SYNC_CONFIG_VSYNC, cfg);
352 DPU_REG_WRITE(c, INTF_TEAR_SYNC_CONFIG_HEIGHT, te->sync_cfg_height);
353 DPU_REG_WRITE(c, INTF_TEAR_VSYNC_INIT_VAL, te->vsync_init_val);
354 DPU_REG_WRITE(c, INTF_TEAR_RD_PTR_IRQ, te->rd_ptr_irq);
355 DPU_REG_WRITE(c, INTF_TEAR_START_POS, te->start_pos);
356 DPU_REG_WRITE(c, INTF_TEAR_SYNC_THRESH,
357 ((te->sync_threshold_continue << 16) |
358 te->sync_threshold_start));
359 DPU_REG_WRITE(c, INTF_TEAR_SYNC_WRCOUNT,
360 (te->start_pos + te->sync_threshold_start + 1));
361
362 DPU_REG_WRITE(c, INTF_TEAR_TEAR_CHECK_EN, 1);
363
364 return 0;
365 }
366
dpu_hw_intf_setup_autorefresh_config(struct dpu_hw_intf * intf,u32 frame_count,bool enable)367 static void dpu_hw_intf_setup_autorefresh_config(struct dpu_hw_intf *intf,
368 u32 frame_count, bool enable)
369 {
370 struct dpu_hw_blk_reg_map *c;
371 u32 refresh_cfg;
372
373 c = &intf->hw;
374 refresh_cfg = DPU_REG_READ(c, INTF_TEAR_AUTOREFRESH_CONFIG);
375 if (enable)
376 refresh_cfg = BIT(31) | frame_count;
377 else
378 refresh_cfg &= ~BIT(31);
379
380 DPU_REG_WRITE(c, INTF_TEAR_AUTOREFRESH_CONFIG, refresh_cfg);
381 }
382
383 /*
384 * dpu_hw_intf_get_autorefresh_config - Get autorefresh config from HW
385 * @intf: DPU intf structure
386 * @frame_count: Used to return the current frame count from hw
387 *
388 * Returns: True if autorefresh enabled, false if disabled.
389 */
dpu_hw_intf_get_autorefresh_config(struct dpu_hw_intf * intf,u32 * frame_count)390 static bool dpu_hw_intf_get_autorefresh_config(struct dpu_hw_intf *intf,
391 u32 *frame_count)
392 {
393 u32 val = DPU_REG_READ(&intf->hw, INTF_TEAR_AUTOREFRESH_CONFIG);
394
395 if (frame_count != NULL)
396 *frame_count = val & 0xffff;
397 return !!((val & BIT(31)) >> 31);
398 }
399
dpu_hw_intf_disable_te(struct dpu_hw_intf * intf)400 static int dpu_hw_intf_disable_te(struct dpu_hw_intf *intf)
401 {
402 struct dpu_hw_blk_reg_map *c;
403
404 if (!intf)
405 return -EINVAL;
406
407 c = &intf->hw;
408 DPU_REG_WRITE(c, INTF_TEAR_TEAR_CHECK_EN, 0);
409 return 0;
410 }
411
dpu_hw_intf_connect_external_te(struct dpu_hw_intf * intf,bool enable_external_te)412 static int dpu_hw_intf_connect_external_te(struct dpu_hw_intf *intf,
413 bool enable_external_te)
414 {
415 struct dpu_hw_blk_reg_map *c = &intf->hw;
416 u32 cfg;
417 int orig;
418
419 if (!intf)
420 return -EINVAL;
421
422 c = &intf->hw;
423 cfg = DPU_REG_READ(c, INTF_TEAR_SYNC_CONFIG_VSYNC);
424 orig = (bool)(cfg & BIT(20));
425 if (enable_external_te)
426 cfg |= BIT(20);
427 else
428 cfg &= ~BIT(20);
429 DPU_REG_WRITE(c, INTF_TEAR_SYNC_CONFIG_VSYNC, cfg);
430 trace_dpu_intf_connect_ext_te(intf->idx - INTF_0, cfg);
431
432 return orig;
433 }
434
dpu_hw_intf_get_vsync_info(struct dpu_hw_intf * intf,struct dpu_hw_pp_vsync_info * info)435 static int dpu_hw_intf_get_vsync_info(struct dpu_hw_intf *intf,
436 struct dpu_hw_pp_vsync_info *info)
437 {
438 struct dpu_hw_blk_reg_map *c = &intf->hw;
439 u32 val;
440
441 if (!intf || !info)
442 return -EINVAL;
443
444 c = &intf->hw;
445
446 val = DPU_REG_READ(c, INTF_TEAR_VSYNC_INIT_VAL);
447 info->rd_ptr_init_val = val & 0xffff;
448
449 val = DPU_REG_READ(c, INTF_TEAR_INT_COUNT_VAL);
450 info->rd_ptr_frame_count = (val & 0xffff0000) >> 16;
451 info->rd_ptr_line_count = val & 0xffff;
452
453 val = DPU_REG_READ(c, INTF_TEAR_LINE_COUNT);
454 info->wr_ptr_line_count = val & 0xffff;
455
456 val = DPU_REG_READ(c, INTF_FRAME_COUNT);
457 info->intf_frame_count = val;
458
459 return 0;
460 }
461
dpu_hw_intf_vsync_sel(struct dpu_hw_intf * intf,u32 vsync_source)462 static void dpu_hw_intf_vsync_sel(struct dpu_hw_intf *intf,
463 u32 vsync_source)
464 {
465 struct dpu_hw_blk_reg_map *c;
466
467 if (!intf)
468 return;
469
470 c = &intf->hw;
471
472 DPU_REG_WRITE(c, INTF_TEAR_MDP_VSYNC_SEL, (vsync_source & 0xf));
473 }
474
dpu_hw_intf_disable_autorefresh(struct dpu_hw_intf * intf,uint32_t encoder_id,u16 vdisplay)475 static void dpu_hw_intf_disable_autorefresh(struct dpu_hw_intf *intf,
476 uint32_t encoder_id, u16 vdisplay)
477 {
478 struct dpu_hw_pp_vsync_info info;
479 int trial = 0;
480
481 /* If autorefresh is already disabled, we have nothing to do */
482 if (!dpu_hw_intf_get_autorefresh_config(intf, NULL))
483 return;
484
485 /*
486 * If autorefresh is enabled, disable it and make sure it is safe to
487 * proceed with current frame commit/push. Sequence followed is,
488 * 1. Disable TE
489 * 2. Disable autorefresh config
490 * 4. Poll for frame transfer ongoing to be false
491 * 5. Enable TE back
492 */
493
494 dpu_hw_intf_connect_external_te(intf, false);
495 dpu_hw_intf_setup_autorefresh_config(intf, 0, false);
496
497 do {
498 udelay(DPU_ENC_MAX_POLL_TIMEOUT_US);
499 if ((trial * DPU_ENC_MAX_POLL_TIMEOUT_US)
500 > (KICKOFF_TIMEOUT_MS * USEC_PER_MSEC)) {
501 DPU_ERROR("enc%d intf%d disable autorefresh failed\n",
502 encoder_id, intf->idx - INTF_0);
503 break;
504 }
505
506 trial++;
507
508 dpu_hw_intf_get_vsync_info(intf, &info);
509 } while (info.wr_ptr_line_count > 0 &&
510 info.wr_ptr_line_count < vdisplay);
511
512 dpu_hw_intf_connect_external_te(intf, true);
513
514 DPU_DEBUG("enc%d intf%d disabled autorefresh\n",
515 encoder_id, intf->idx - INTF_0);
516
517 }
518
dpu_hw_intf_program_intf_cmd_cfg(struct dpu_hw_intf * ctx,struct dpu_hw_intf_cmd_mode_cfg * cmd_mode_cfg)519 static void dpu_hw_intf_program_intf_cmd_cfg(struct dpu_hw_intf *ctx,
520 struct dpu_hw_intf_cmd_mode_cfg *cmd_mode_cfg)
521 {
522 u32 intf_cfg2 = DPU_REG_READ(&ctx->hw, INTF_CONFIG2);
523
524 if (cmd_mode_cfg->data_compress)
525 intf_cfg2 |= INTF_CFG2_DCE_DATA_COMPRESS;
526
527 DPU_REG_WRITE(&ctx->hw, INTF_CONFIG2, intf_cfg2);
528 }
529
_setup_intf_ops(struct dpu_hw_intf_ops * ops,unsigned long cap,const struct dpu_mdss_version * mdss_rev)530 static void _setup_intf_ops(struct dpu_hw_intf_ops *ops,
531 unsigned long cap, const struct dpu_mdss_version *mdss_rev)
532 {
533 ops->setup_timing_gen = dpu_hw_intf_setup_timing_engine;
534 ops->setup_prg_fetch = dpu_hw_intf_setup_prg_fetch;
535 ops->get_status = dpu_hw_intf_get_status;
536 ops->enable_timing = dpu_hw_intf_enable_timing_engine;
537 ops->get_line_count = dpu_hw_intf_get_line_count;
538 if (cap & BIT(DPU_INTF_INPUT_CTRL))
539 ops->bind_pingpong_blk = dpu_hw_intf_bind_pingpong_blk;
540 ops->setup_misr = dpu_hw_intf_setup_misr;
541 ops->collect_misr = dpu_hw_intf_collect_misr;
542
543 if (cap & BIT(DPU_INTF_TE)) {
544 ops->enable_tearcheck = dpu_hw_intf_enable_te;
545 ops->disable_tearcheck = dpu_hw_intf_disable_te;
546 ops->connect_external_te = dpu_hw_intf_connect_external_te;
547 ops->vsync_sel = dpu_hw_intf_vsync_sel;
548 ops->disable_autorefresh = dpu_hw_intf_disable_autorefresh;
549 }
550
551 if (mdss_rev->core_major_ver >= 7)
552 ops->program_intf_cmd_cfg = dpu_hw_intf_program_intf_cmd_cfg;
553 }
554
dpu_hw_intf_init(const struct dpu_intf_cfg * cfg,void __iomem * addr,const struct dpu_mdss_version * mdss_rev)555 struct dpu_hw_intf *dpu_hw_intf_init(const struct dpu_intf_cfg *cfg,
556 void __iomem *addr, const struct dpu_mdss_version *mdss_rev)
557 {
558 struct dpu_hw_intf *c;
559
560 if (cfg->type == INTF_NONE) {
561 DPU_DEBUG("Skip intf %d with type NONE\n", cfg->id - INTF_0);
562 return NULL;
563 }
564
565 c = kzalloc(sizeof(*c), GFP_KERNEL);
566 if (!c)
567 return ERR_PTR(-ENOMEM);
568
569 c->hw.blk_addr = addr + cfg->base;
570 c->hw.log_mask = DPU_DBG_MASK_INTF;
571
572 /*
573 * Assign ops
574 */
575 c->idx = cfg->id;
576 c->cap = cfg;
577 _setup_intf_ops(&c->ops, c->cap->features, mdss_rev);
578
579 return c;
580 }
581
dpu_hw_intf_destroy(struct dpu_hw_intf * intf)582 void dpu_hw_intf_destroy(struct dpu_hw_intf *intf)
583 {
584 kfree(intf);
585 }
586
587