• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * R-Car Display Unit CRTCs
4  *
5  * Copyright (C) 2013-2015 Renesas Electronics Corporation
6  *
7  * Contact: Laurent Pinchart (laurent.pinchart@ideasonboard.com)
8  */
9 
10 #include <linux/clk.h>
11 #include <linux/mutex.h>
12 #include <linux/platform_device.h>
13 
14 #include <drm/drm_atomic.h>
15 #include <drm/drm_atomic_helper.h>
16 #include <drm/drm_bridge.h>
17 #include <drm/drm_crtc.h>
18 #include <drm/drm_device.h>
19 #include <drm/drm_gem_dma_helper.h>
20 #include <drm/drm_vblank.h>
21 
22 #include "rcar_cmm.h"
23 #include "rcar_du_crtc.h"
24 #include "rcar_du_drv.h"
25 #include "rcar_du_encoder.h"
26 #include "rcar_du_kms.h"
27 #include "rcar_du_plane.h"
28 #include "rcar_du_regs.h"
29 #include "rcar_du_vsp.h"
30 #include "rcar_lvds.h"
31 #include "rcar_mipi_dsi.h"
32 
rcar_du_crtc_read(struct rcar_du_crtc * rcrtc,u32 reg)33 static u32 rcar_du_crtc_read(struct rcar_du_crtc *rcrtc, u32 reg)
34 {
35 	struct rcar_du_device *rcdu = rcrtc->dev;
36 
37 	return rcar_du_read(rcdu, rcrtc->mmio_offset + reg);
38 }
39 
rcar_du_crtc_write(struct rcar_du_crtc * rcrtc,u32 reg,u32 data)40 static void rcar_du_crtc_write(struct rcar_du_crtc *rcrtc, u32 reg, u32 data)
41 {
42 	struct rcar_du_device *rcdu = rcrtc->dev;
43 
44 	rcar_du_write(rcdu, rcrtc->mmio_offset + reg, data);
45 }
46 
rcar_du_crtc_clr(struct rcar_du_crtc * rcrtc,u32 reg,u32 clr)47 static void rcar_du_crtc_clr(struct rcar_du_crtc *rcrtc, u32 reg, u32 clr)
48 {
49 	struct rcar_du_device *rcdu = rcrtc->dev;
50 
51 	rcar_du_write(rcdu, rcrtc->mmio_offset + reg,
52 		      rcar_du_read(rcdu, rcrtc->mmio_offset + reg) & ~clr);
53 }
54 
rcar_du_crtc_set(struct rcar_du_crtc * rcrtc,u32 reg,u32 set)55 static void rcar_du_crtc_set(struct rcar_du_crtc *rcrtc, u32 reg, u32 set)
56 {
57 	struct rcar_du_device *rcdu = rcrtc->dev;
58 
59 	rcar_du_write(rcdu, rcrtc->mmio_offset + reg,
60 		      rcar_du_read(rcdu, rcrtc->mmio_offset + reg) | set);
61 }
62 
rcar_du_crtc_dsysr_clr_set(struct rcar_du_crtc * rcrtc,u32 clr,u32 set)63 void rcar_du_crtc_dsysr_clr_set(struct rcar_du_crtc *rcrtc, u32 clr, u32 set)
64 {
65 	struct rcar_du_device *rcdu = rcrtc->dev;
66 
67 	rcrtc->dsysr = (rcrtc->dsysr & ~clr) | set;
68 	rcar_du_write(rcdu, rcrtc->mmio_offset + DSYSR, rcrtc->dsysr);
69 }
70 
71 /* -----------------------------------------------------------------------------
72  * Hardware Setup
73  */
74 
75 struct dpll_info {
76 	unsigned int output;
77 	unsigned int fdpll;
78 	unsigned int n;
79 	unsigned int m;
80 };
81 
rcar_du_dpll_divider(struct rcar_du_crtc * rcrtc,struct dpll_info * dpll,unsigned long input,unsigned long target)82 static void rcar_du_dpll_divider(struct rcar_du_crtc *rcrtc,
83 				 struct dpll_info *dpll,
84 				 unsigned long input,
85 				 unsigned long target)
86 {
87 	unsigned long best_diff = (unsigned long)-1;
88 	unsigned long diff;
89 	unsigned int fdpll;
90 	unsigned int m;
91 	unsigned int n;
92 
93 	/*
94 	 *   fin                                 fvco        fout       fclkout
95 	 * in --> [1/M] --> |PD| -> [LPF] -> [VCO] -> [1/P] -+-> [1/FDPLL] -> out
96 	 *              +-> |  |                             |
97 	 *              |                                    |
98 	 *              +---------------- [1/N] <------------+
99 	 *
100 	 *	fclkout = fvco / P / FDPLL -- (1)
101 	 *
102 	 * fin/M = fvco/P/N
103 	 *
104 	 *	fvco = fin * P *  N / M -- (2)
105 	 *
106 	 * (1) + (2) indicates
107 	 *
108 	 *	fclkout = fin * N / M / FDPLL
109 	 *
110 	 * NOTES
111 	 *	N	: (n + 1)
112 	 *	M	: (m + 1)
113 	 *	FDPLL	: (fdpll + 1)
114 	 *	P	: 2
115 	 *	2kHz < fvco < 4096MHz
116 	 *
117 	 * To minimize the jitter,
118 	 * N : as large as possible
119 	 * M : as small as possible
120 	 */
121 	for (m = 0; m < 4; m++) {
122 		for (n = 119; n > 38; n--) {
123 			/*
124 			 * This code only runs on 64-bit architectures, the
125 			 * unsigned long type can thus be used for 64-bit
126 			 * computation. It will still compile without any
127 			 * warning on 32-bit architectures.
128 			 *
129 			 * To optimize calculations, use fout instead of fvco
130 			 * to verify the VCO frequency constraint.
131 			 */
132 			unsigned long fout = input * (n + 1) / (m + 1);
133 
134 			if (fout < 1000 || fout > 2048 * 1000 * 1000U)
135 				continue;
136 
137 			for (fdpll = 1; fdpll < 32; fdpll++) {
138 				unsigned long output;
139 
140 				output = fout / (fdpll + 1);
141 				if (output >= 400 * 1000 * 1000)
142 					continue;
143 
144 				diff = abs((long)output - (long)target);
145 				if (best_diff > diff) {
146 					best_diff = diff;
147 					dpll->n = n;
148 					dpll->m = m;
149 					dpll->fdpll = fdpll;
150 					dpll->output = output;
151 				}
152 
153 				if (diff == 0)
154 					goto done;
155 			}
156 		}
157 	}
158 
159 done:
160 	dev_dbg(rcrtc->dev->dev,
161 		"output:%u, fdpll:%u, n:%u, m:%u, diff:%lu\n",
162 		 dpll->output, dpll->fdpll, dpll->n, dpll->m, best_diff);
163 }
164 
165 struct du_clk_params {
166 	struct clk *clk;
167 	unsigned long rate;
168 	unsigned long diff;
169 	u32 escr;
170 };
171 
rcar_du_escr_divider(struct clk * clk,unsigned long target,u32 escr,struct du_clk_params * params)172 static void rcar_du_escr_divider(struct clk *clk, unsigned long target,
173 				 u32 escr, struct du_clk_params *params)
174 {
175 	unsigned long rate;
176 	unsigned long diff;
177 	u32 div;
178 
179 	/*
180 	 * If the target rate has already been achieved perfectly we can't do
181 	 * better.
182 	 */
183 	if (params->diff == 0)
184 		return;
185 
186 	/*
187 	 * Compute the input clock rate and internal divisor values to obtain
188 	 * the clock rate closest to the target frequency.
189 	 */
190 	rate = clk_round_rate(clk, target);
191 	div = clamp(DIV_ROUND_CLOSEST(rate, target), 1UL, 64UL) - 1;
192 	diff = abs(rate / (div + 1) - target);
193 
194 	/*
195 	 * Store the parameters if the resulting frequency is better than any
196 	 * previously calculated value.
197 	 */
198 	if (diff < params->diff) {
199 		params->clk = clk;
200 		params->rate = rate;
201 		params->diff = diff;
202 		params->escr = escr | div;
203 	}
204 }
205 
rcar_du_crtc_set_display_timing(struct rcar_du_crtc * rcrtc)206 static void rcar_du_crtc_set_display_timing(struct rcar_du_crtc *rcrtc)
207 {
208 	const struct drm_display_mode *mode = &rcrtc->crtc.state->adjusted_mode;
209 	struct rcar_du_device *rcdu = rcrtc->dev;
210 	unsigned long mode_clock = mode->clock * 1000;
211 	unsigned int hdse_offset;
212 	u32 dsmr;
213 	u32 escr;
214 
215 	if (rcdu->info->dpll_mask & (1 << rcrtc->index)) {
216 		unsigned long target = mode_clock;
217 		struct dpll_info dpll = { 0 };
218 		unsigned long extclk;
219 		u32 dpllcr;
220 		u32 div = 0;
221 
222 		/*
223 		 * DU channels that have a display PLL can't use the internal
224 		 * system clock, and have no internal clock divider.
225 		 */
226 		extclk = clk_get_rate(rcrtc->extclock);
227 		rcar_du_dpll_divider(rcrtc, &dpll, extclk, target);
228 
229 		dpllcr = DPLLCR_CODE | DPLLCR_CLKE
230 		       | DPLLCR_FDPLL(dpll.fdpll)
231 		       | DPLLCR_N(dpll.n) | DPLLCR_M(dpll.m)
232 		       | DPLLCR_STBY;
233 
234 		if (rcrtc->index == 1)
235 			dpllcr |= DPLLCR_PLCS1
236 			       |  DPLLCR_INCS_DOTCLKIN1;
237 		else
238 			dpllcr |= DPLLCR_PLCS0
239 			       |  DPLLCR_INCS_DOTCLKIN0;
240 
241 		rcar_du_group_write(rcrtc->group, DPLLCR, dpllcr);
242 
243 		escr = ESCR_DCLKSEL_DCLKIN | div;
244 	} else if (rcdu->info->lvds_clk_mask & BIT(rcrtc->index) ||
245 		   rcdu->info->dsi_clk_mask & BIT(rcrtc->index)) {
246 		/*
247 		 * Use the external LVDS or DSI PLL output as the dot clock when
248 		 * outputting to the LVDS or DSI encoder on an SoC that supports
249 		 * this clock routing option. We use the clock directly in that
250 		 * case, without any additional divider.
251 		 */
252 		escr = ESCR_DCLKSEL_DCLKIN;
253 	} else {
254 		struct du_clk_params params = { .diff = (unsigned long)-1 };
255 
256 		rcar_du_escr_divider(rcrtc->clock, mode_clock,
257 				     ESCR_DCLKSEL_CLKS, &params);
258 		if (rcrtc->extclock)
259 			rcar_du_escr_divider(rcrtc->extclock, mode_clock,
260 					     ESCR_DCLKSEL_DCLKIN, &params);
261 
262 		dev_dbg(rcrtc->dev->dev, "mode clock %lu %s rate %lu\n",
263 			mode_clock, params.clk == rcrtc->clock ? "cpg" : "ext",
264 			params.rate);
265 
266 		clk_set_rate(params.clk, params.rate);
267 		escr = params.escr;
268 	}
269 
270 	/*
271 	 * The ESCR register only exists in DU channels that can output to an
272 	 * LVDS or DPAT, and the OTAR register in DU channels that can output
273 	 * to a DPAD.
274 	 */
275 	if ((rcdu->info->routes[RCAR_DU_OUTPUT_DPAD0].possible_crtcs |
276 	     rcdu->info->routes[RCAR_DU_OUTPUT_DPAD1].possible_crtcs |
277 	     rcdu->info->routes[RCAR_DU_OUTPUT_LVDS0].possible_crtcs |
278 	     rcdu->info->routes[RCAR_DU_OUTPUT_LVDS1].possible_crtcs) &
279 	    BIT(rcrtc->index)) {
280 		dev_dbg(rcrtc->dev->dev, "%s: ESCR 0x%08x\n", __func__, escr);
281 
282 		rcar_du_crtc_write(rcrtc, rcrtc->index % 2 ? ESCR13 : ESCR02, escr);
283 	}
284 
285 	if ((rcdu->info->routes[RCAR_DU_OUTPUT_DPAD0].possible_crtcs |
286 	     rcdu->info->routes[RCAR_DU_OUTPUT_DPAD1].possible_crtcs) &
287 	    BIT(rcrtc->index))
288 		rcar_du_crtc_write(rcrtc, rcrtc->index % 2 ? OTAR13 : OTAR02, 0);
289 
290 	/* Signal polarities */
291 	dsmr = ((mode->flags & DRM_MODE_FLAG_PVSYNC) ? DSMR_VSL : 0)
292 	     | ((mode->flags & DRM_MODE_FLAG_PHSYNC) ? DSMR_HSL : 0)
293 	     | ((mode->flags & DRM_MODE_FLAG_INTERLACE) ? DSMR_ODEV : 0)
294 	     | DSMR_DIPM_DISP | DSMR_CSPM;
295 	rcar_du_crtc_write(rcrtc, DSMR, dsmr);
296 
297 	/*
298 	 * When the CMM is enabled, an additional offset of 25 pixels must be
299 	 * subtracted from the HDS (horizontal display start) and HDE
300 	 * (horizontal display end) registers.
301 	 */
302 	hdse_offset = 19;
303 	if (rcrtc->group->cmms_mask & BIT(rcrtc->index % 2))
304 		hdse_offset += 25;
305 
306 	/* Display timings */
307 	rcar_du_crtc_write(rcrtc, HDSR, mode->htotal - mode->hsync_start -
308 					hdse_offset);
309 	rcar_du_crtc_write(rcrtc, HDER, mode->htotal - mode->hsync_start +
310 					mode->hdisplay - hdse_offset);
311 	rcar_du_crtc_write(rcrtc, HSWR, mode->hsync_end -
312 					mode->hsync_start - 1);
313 	rcar_du_crtc_write(rcrtc, HCR,  mode->htotal - 1);
314 
315 	rcar_du_crtc_write(rcrtc, VDSR, mode->crtc_vtotal -
316 					mode->crtc_vsync_end - 2);
317 	rcar_du_crtc_write(rcrtc, VDER, mode->crtc_vtotal -
318 					mode->crtc_vsync_end +
319 					mode->crtc_vdisplay - 2);
320 	rcar_du_crtc_write(rcrtc, VSPR, mode->crtc_vtotal -
321 					mode->crtc_vsync_end +
322 					mode->crtc_vsync_start - 1);
323 	rcar_du_crtc_write(rcrtc, VCR,  mode->crtc_vtotal - 1);
324 
325 	rcar_du_crtc_write(rcrtc, DESR,  mode->htotal - mode->hsync_start - 1);
326 	rcar_du_crtc_write(rcrtc, DEWR,  mode->hdisplay);
327 }
328 
plane_zpos(struct rcar_du_plane * plane)329 static unsigned int plane_zpos(struct rcar_du_plane *plane)
330 {
331 	return plane->plane.state->normalized_zpos;
332 }
333 
334 static const struct rcar_du_format_info *
plane_format(struct rcar_du_plane * plane)335 plane_format(struct rcar_du_plane *plane)
336 {
337 	return to_rcar_plane_state(plane->plane.state)->format;
338 }
339 
rcar_du_crtc_update_planes(struct rcar_du_crtc * rcrtc)340 static void rcar_du_crtc_update_planes(struct rcar_du_crtc *rcrtc)
341 {
342 	struct rcar_du_plane *planes[RCAR_DU_NUM_HW_PLANES];
343 	struct rcar_du_device *rcdu = rcrtc->dev;
344 	unsigned int num_planes = 0;
345 	unsigned int dptsr_planes;
346 	unsigned int hwplanes = 0;
347 	unsigned int prio = 0;
348 	unsigned int i;
349 	u32 dspr = 0;
350 
351 	for (i = 0; i < rcrtc->group->num_planes; ++i) {
352 		struct rcar_du_plane *plane = &rcrtc->group->planes[i];
353 		unsigned int j;
354 
355 		if (plane->plane.state->crtc != &rcrtc->crtc ||
356 		    !plane->plane.state->visible)
357 			continue;
358 
359 		/* Insert the plane in the sorted planes array. */
360 		for (j = num_planes++; j > 0; --j) {
361 			if (plane_zpos(planes[j-1]) <= plane_zpos(plane))
362 				break;
363 			planes[j] = planes[j-1];
364 		}
365 
366 		planes[j] = plane;
367 		prio += plane_format(plane)->planes * 4;
368 	}
369 
370 	for (i = 0; i < num_planes; ++i) {
371 		struct rcar_du_plane *plane = planes[i];
372 		struct drm_plane_state *state = plane->plane.state;
373 		unsigned int index = to_rcar_plane_state(state)->hwindex;
374 
375 		prio -= 4;
376 		dspr |= (index + 1) << prio;
377 		hwplanes |= 1 << index;
378 
379 		if (plane_format(plane)->planes == 2) {
380 			index = (index + 1) % 8;
381 
382 			prio -= 4;
383 			dspr |= (index + 1) << prio;
384 			hwplanes |= 1 << index;
385 		}
386 	}
387 
388 	/* If VSP+DU integration is enabled the plane assignment is fixed. */
389 	if (rcar_du_has(rcdu, RCAR_DU_FEATURE_VSP1_SOURCE)) {
390 		if (rcdu->info->gen < 3) {
391 			dspr = (rcrtc->index % 2) + 1;
392 			hwplanes = 1 << (rcrtc->index % 2);
393 		} else {
394 			dspr = (rcrtc->index % 2) ? 3 : 1;
395 			hwplanes = 1 << ((rcrtc->index % 2) ? 2 : 0);
396 		}
397 	}
398 
399 	/*
400 	 * Update the planes to display timing and dot clock generator
401 	 * associations.
402 	 *
403 	 * Updating the DPTSR register requires restarting the CRTC group,
404 	 * resulting in visible flicker. To mitigate the issue only update the
405 	 * association if needed by enabled planes. Planes being disabled will
406 	 * keep their current association.
407 	 */
408 	mutex_lock(&rcrtc->group->lock);
409 
410 	dptsr_planes = rcrtc->index % 2 ? rcrtc->group->dptsr_planes | hwplanes
411 		     : rcrtc->group->dptsr_planes & ~hwplanes;
412 
413 	if (dptsr_planes != rcrtc->group->dptsr_planes) {
414 		rcar_du_group_write(rcrtc->group, DPTSR,
415 				    (dptsr_planes << 16) | dptsr_planes);
416 		rcrtc->group->dptsr_planes = dptsr_planes;
417 
418 		if (rcrtc->group->used_crtcs)
419 			rcar_du_group_restart(rcrtc->group);
420 	}
421 
422 	/* Restart the group if plane sources have changed. */
423 	if (rcrtc->group->need_restart)
424 		rcar_du_group_restart(rcrtc->group);
425 
426 	mutex_unlock(&rcrtc->group->lock);
427 
428 	rcar_du_group_write(rcrtc->group, rcrtc->index % 2 ? DS2PR : DS1PR,
429 			    dspr);
430 }
431 
432 /* -----------------------------------------------------------------------------
433  * Page Flip
434  */
435 
rcar_du_crtc_finish_page_flip(struct rcar_du_crtc * rcrtc)436 void rcar_du_crtc_finish_page_flip(struct rcar_du_crtc *rcrtc)
437 {
438 	struct drm_pending_vblank_event *event;
439 	struct drm_device *dev = rcrtc->crtc.dev;
440 	unsigned long flags;
441 
442 	spin_lock_irqsave(&dev->event_lock, flags);
443 	event = rcrtc->event;
444 	rcrtc->event = NULL;
445 	spin_unlock_irqrestore(&dev->event_lock, flags);
446 
447 	if (event == NULL)
448 		return;
449 
450 	spin_lock_irqsave(&dev->event_lock, flags);
451 	drm_crtc_send_vblank_event(&rcrtc->crtc, event);
452 	wake_up(&rcrtc->flip_wait);
453 	spin_unlock_irqrestore(&dev->event_lock, flags);
454 
455 	drm_crtc_vblank_put(&rcrtc->crtc);
456 }
457 
rcar_du_crtc_page_flip_pending(struct rcar_du_crtc * rcrtc)458 static bool rcar_du_crtc_page_flip_pending(struct rcar_du_crtc *rcrtc)
459 {
460 	struct drm_device *dev = rcrtc->crtc.dev;
461 	unsigned long flags;
462 	bool pending;
463 
464 	spin_lock_irqsave(&dev->event_lock, flags);
465 	pending = rcrtc->event != NULL;
466 	spin_unlock_irqrestore(&dev->event_lock, flags);
467 
468 	return pending;
469 }
470 
rcar_du_crtc_wait_page_flip(struct rcar_du_crtc * rcrtc)471 static void rcar_du_crtc_wait_page_flip(struct rcar_du_crtc *rcrtc)
472 {
473 	struct rcar_du_device *rcdu = rcrtc->dev;
474 
475 	if (wait_event_timeout(rcrtc->flip_wait,
476 			       !rcar_du_crtc_page_flip_pending(rcrtc),
477 			       msecs_to_jiffies(50)))
478 		return;
479 
480 	dev_warn(rcdu->dev, "page flip timeout\n");
481 
482 	rcar_du_crtc_finish_page_flip(rcrtc);
483 }
484 
485 /* -----------------------------------------------------------------------------
486  * Color Management Module (CMM)
487  */
488 
rcar_du_cmm_check(struct drm_crtc * crtc,struct drm_crtc_state * state)489 static int rcar_du_cmm_check(struct drm_crtc *crtc,
490 			     struct drm_crtc_state *state)
491 {
492 	struct drm_property_blob *drm_lut = state->gamma_lut;
493 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
494 	struct device *dev = rcrtc->dev->dev;
495 
496 	if (!drm_lut)
497 		return 0;
498 
499 	/* We only accept fully populated LUT tables. */
500 	if (drm_color_lut_size(drm_lut) != CM2_LUT_SIZE) {
501 		dev_err(dev, "invalid gamma lut size: %zu bytes\n",
502 			drm_lut->length);
503 		return -EINVAL;
504 	}
505 
506 	return 0;
507 }
508 
rcar_du_cmm_setup(struct drm_crtc * crtc)509 static void rcar_du_cmm_setup(struct drm_crtc *crtc)
510 {
511 	struct drm_property_blob *drm_lut = crtc->state->gamma_lut;
512 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
513 	struct rcar_cmm_config cmm_config = {};
514 
515 	if (!rcrtc->cmm)
516 		return;
517 
518 	if (drm_lut)
519 		cmm_config.lut.table = (struct drm_color_lut *)drm_lut->data;
520 
521 	rcar_cmm_setup(rcrtc->cmm, &cmm_config);
522 }
523 
524 /* -----------------------------------------------------------------------------
525  * Start/Stop and Suspend/Resume
526  */
527 
rcar_du_crtc_setup(struct rcar_du_crtc * rcrtc)528 static void rcar_du_crtc_setup(struct rcar_du_crtc *rcrtc)
529 {
530 	/* Set display off and background to black */
531 	rcar_du_crtc_write(rcrtc, DOOR, DOOR_RGB(0, 0, 0));
532 	rcar_du_crtc_write(rcrtc, BPOR, BPOR_RGB(0, 0, 0));
533 
534 	/* Configure display timings and output routing */
535 	rcar_du_crtc_set_display_timing(rcrtc);
536 	rcar_du_group_set_routing(rcrtc->group);
537 
538 	/* Start with all planes disabled. */
539 	rcar_du_group_write(rcrtc->group, rcrtc->index % 2 ? DS2PR : DS1PR, 0);
540 
541 	/* Enable the VSP compositor. */
542 	if (rcar_du_has(rcrtc->dev, RCAR_DU_FEATURE_VSP1_SOURCE))
543 		rcar_du_vsp_enable(rcrtc);
544 
545 	/* Turn vertical blanking interrupt reporting on. */
546 	drm_crtc_vblank_on(&rcrtc->crtc);
547 }
548 
rcar_du_crtc_get(struct rcar_du_crtc * rcrtc)549 static int rcar_du_crtc_get(struct rcar_du_crtc *rcrtc)
550 {
551 	int ret;
552 
553 	/*
554 	 * Guard against double-get, as the function is called from both the
555 	 * .atomic_enable() and .atomic_begin() handlers.
556 	 */
557 	if (rcrtc->initialized)
558 		return 0;
559 
560 	ret = clk_prepare_enable(rcrtc->clock);
561 	if (ret < 0)
562 		return ret;
563 
564 	ret = clk_prepare_enable(rcrtc->extclock);
565 	if (ret < 0)
566 		goto error_clock;
567 
568 	ret = rcar_du_group_get(rcrtc->group);
569 	if (ret < 0)
570 		goto error_group;
571 
572 	rcar_du_crtc_setup(rcrtc);
573 	rcrtc->initialized = true;
574 
575 	return 0;
576 
577 error_group:
578 	clk_disable_unprepare(rcrtc->extclock);
579 error_clock:
580 	clk_disable_unprepare(rcrtc->clock);
581 	return ret;
582 }
583 
rcar_du_crtc_put(struct rcar_du_crtc * rcrtc)584 static void rcar_du_crtc_put(struct rcar_du_crtc *rcrtc)
585 {
586 	rcar_du_group_put(rcrtc->group);
587 
588 	clk_disable_unprepare(rcrtc->extclock);
589 	clk_disable_unprepare(rcrtc->clock);
590 
591 	rcrtc->initialized = false;
592 }
593 
rcar_du_crtc_start(struct rcar_du_crtc * rcrtc)594 static void rcar_du_crtc_start(struct rcar_du_crtc *rcrtc)
595 {
596 	bool interlaced;
597 
598 	/*
599 	 * Select master sync mode. This enables display operation in master
600 	 * sync mode (with the HSYNC and VSYNC signals configured as outputs and
601 	 * actively driven).
602 	 */
603 	interlaced = rcrtc->crtc.mode.flags & DRM_MODE_FLAG_INTERLACE;
604 	rcar_du_crtc_dsysr_clr_set(rcrtc, DSYSR_TVM_MASK | DSYSR_SCM_MASK,
605 				   (interlaced ? DSYSR_SCM_INT_VIDEO : 0) |
606 				   DSYSR_TVM_MASTER);
607 
608 	rcar_du_group_start_stop(rcrtc->group, true);
609 }
610 
rcar_du_crtc_disable_planes(struct rcar_du_crtc * rcrtc)611 static void rcar_du_crtc_disable_planes(struct rcar_du_crtc *rcrtc)
612 {
613 	struct rcar_du_device *rcdu = rcrtc->dev;
614 	struct drm_crtc *crtc = &rcrtc->crtc;
615 	u32 status;
616 
617 	/* Make sure vblank interrupts are enabled. */
618 	drm_crtc_vblank_get(crtc);
619 
620 	/*
621 	 * Disable planes and calculate how many vertical blanking interrupts we
622 	 * have to wait for. If a vertical blanking interrupt has been triggered
623 	 * but not processed yet, we don't know whether it occurred before or
624 	 * after the planes got disabled. We thus have to wait for two vblank
625 	 * interrupts in that case.
626 	 */
627 	spin_lock_irq(&rcrtc->vblank_lock);
628 	rcar_du_group_write(rcrtc->group, rcrtc->index % 2 ? DS2PR : DS1PR, 0);
629 	status = rcar_du_crtc_read(rcrtc, DSSR);
630 	rcrtc->vblank_count = status & DSSR_VBK ? 2 : 1;
631 	spin_unlock_irq(&rcrtc->vblank_lock);
632 
633 	if (!wait_event_timeout(rcrtc->vblank_wait, rcrtc->vblank_count == 0,
634 				msecs_to_jiffies(100)))
635 		dev_warn(rcdu->dev, "vertical blanking timeout\n");
636 
637 	drm_crtc_vblank_put(crtc);
638 }
639 
rcar_du_crtc_stop(struct rcar_du_crtc * rcrtc)640 static void rcar_du_crtc_stop(struct rcar_du_crtc *rcrtc)
641 {
642 	struct drm_crtc *crtc = &rcrtc->crtc;
643 
644 	/*
645 	 * Disable all planes and wait for the change to take effect. This is
646 	 * required as the plane enable registers are updated on vblank, and no
647 	 * vblank will occur once the CRTC is stopped. Disabling planes when
648 	 * starting the CRTC thus wouldn't be enough as it would start scanning
649 	 * out immediately from old frame buffers until the next vblank.
650 	 *
651 	 * This increases the CRTC stop delay, especially when multiple CRTCs
652 	 * are stopped in one operation as we now wait for one vblank per CRTC.
653 	 * Whether this can be improved needs to be researched.
654 	 */
655 	rcar_du_crtc_disable_planes(rcrtc);
656 
657 	/*
658 	 * Disable vertical blanking interrupt reporting. We first need to wait
659 	 * for page flip completion before stopping the CRTC as userspace
660 	 * expects page flips to eventually complete.
661 	 */
662 	rcar_du_crtc_wait_page_flip(rcrtc);
663 	drm_crtc_vblank_off(crtc);
664 
665 	/* Disable the VSP compositor. */
666 	if (rcar_du_has(rcrtc->dev, RCAR_DU_FEATURE_VSP1_SOURCE))
667 		rcar_du_vsp_disable(rcrtc);
668 
669 	if (rcrtc->cmm)
670 		rcar_cmm_disable(rcrtc->cmm);
671 
672 	/*
673 	 * Select switch sync mode. This stops display operation and configures
674 	 * the HSYNC and VSYNC signals as inputs.
675 	 *
676 	 * TODO: Find another way to stop the display for DUs that don't support
677 	 * TVM sync.
678 	 */
679 	if (rcar_du_has(rcrtc->dev, RCAR_DU_FEATURE_TVM_SYNC))
680 		rcar_du_crtc_dsysr_clr_set(rcrtc, DSYSR_TVM_MASK,
681 					   DSYSR_TVM_SWITCH);
682 
683 	rcar_du_group_start_stop(rcrtc->group, false);
684 }
685 
686 /* -----------------------------------------------------------------------------
687  * CRTC Functions
688  */
689 
rcar_du_crtc_atomic_check(struct drm_crtc * crtc,struct drm_atomic_state * state)690 static int rcar_du_crtc_atomic_check(struct drm_crtc *crtc,
691 				     struct drm_atomic_state *state)
692 {
693 	struct drm_crtc_state *crtc_state = drm_atomic_get_new_crtc_state(state,
694 									  crtc);
695 	struct rcar_du_crtc_state *rstate = to_rcar_crtc_state(crtc_state);
696 	struct drm_encoder *encoder;
697 	int ret;
698 
699 	ret = rcar_du_cmm_check(crtc, crtc_state);
700 	if (ret)
701 		return ret;
702 
703 	/* Store the routes from the CRTC output to the DU outputs. */
704 	rstate->outputs = 0;
705 
706 	drm_for_each_encoder_mask(encoder, crtc->dev,
707 				  crtc_state->encoder_mask) {
708 		struct rcar_du_encoder *renc;
709 
710 		/* Skip the writeback encoder. */
711 		if (encoder->encoder_type == DRM_MODE_ENCODER_VIRTUAL)
712 			continue;
713 
714 		renc = to_rcar_encoder(encoder);
715 		rstate->outputs |= BIT(renc->output);
716 	}
717 
718 	return 0;
719 }
720 
rcar_du_crtc_atomic_enable(struct drm_crtc * crtc,struct drm_atomic_state * state)721 static void rcar_du_crtc_atomic_enable(struct drm_crtc *crtc,
722 				       struct drm_atomic_state *state)
723 {
724 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
725 	struct rcar_du_crtc_state *rstate = to_rcar_crtc_state(crtc->state);
726 	struct rcar_du_device *rcdu = rcrtc->dev;
727 
728 	if (rcrtc->cmm)
729 		rcar_cmm_enable(rcrtc->cmm);
730 	rcar_du_crtc_get(rcrtc);
731 
732 	/*
733 	 * On D3/E3 the dot clock is provided by the LVDS encoder attached to
734 	 * the DU channel. We need to enable its clock output explicitly before
735 	 * starting the CRTC, as the bridge hasn't been enabled by the atomic
736 	 * helpers yet.
737 	 */
738 	if (rcdu->info->lvds_clk_mask & BIT(rcrtc->index)) {
739 		bool dot_clk_only = rstate->outputs == BIT(RCAR_DU_OUTPUT_DPAD0);
740 		struct drm_bridge *bridge = rcdu->lvds[rcrtc->index];
741 		const struct drm_display_mode *mode =
742 			&crtc->state->adjusted_mode;
743 
744 		rcar_lvds_pclk_enable(bridge, mode->clock * 1000, dot_clk_only);
745 	}
746 
747 	/*
748 	 * Similarly to LVDS, on V3U the dot clock is provided by the DSI
749 	 * encoder, and we need to enable the DSI clocks before enabling the CRTC.
750 	 */
751 	if ((rcdu->info->dsi_clk_mask & BIT(rcrtc->index)) &&
752 	    (rstate->outputs &
753 	     (BIT(RCAR_DU_OUTPUT_DSI0) | BIT(RCAR_DU_OUTPUT_DSI1)))) {
754 		struct drm_bridge *bridge = rcdu->dsi[rcrtc->index];
755 
756 		rcar_mipi_dsi_pclk_enable(bridge, state);
757 	}
758 
759 	rcar_du_crtc_start(rcrtc);
760 
761 	/*
762 	 * TODO: The chip manual indicates that CMM tables should be written
763 	 * after the DU channel has been activated. Investigate the impact
764 	 * of this restriction on the first displayed frame.
765 	 */
766 	rcar_du_cmm_setup(crtc);
767 }
768 
rcar_du_crtc_atomic_disable(struct drm_crtc * crtc,struct drm_atomic_state * state)769 static void rcar_du_crtc_atomic_disable(struct drm_crtc *crtc,
770 					struct drm_atomic_state *state)
771 {
772 	struct drm_crtc_state *old_state = drm_atomic_get_old_crtc_state(state,
773 									 crtc);
774 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
775 	struct rcar_du_crtc_state *rstate = to_rcar_crtc_state(old_state);
776 	struct rcar_du_device *rcdu = rcrtc->dev;
777 
778 	rcar_du_crtc_stop(rcrtc);
779 	rcar_du_crtc_put(rcrtc);
780 
781 	if (rcdu->info->lvds_clk_mask & BIT(rcrtc->index)) {
782 		bool dot_clk_only = rstate->outputs == BIT(RCAR_DU_OUTPUT_DPAD0);
783 		struct drm_bridge *bridge = rcdu->lvds[rcrtc->index];
784 
785 		/*
786 		 * Disable the LVDS clock output, see
787 		 * rcar_du_crtc_atomic_enable(). When the LVDS output is used,
788 		 * this also disables the LVDS encoder.
789 		 */
790 		rcar_lvds_pclk_disable(bridge, dot_clk_only);
791 	}
792 
793 	if ((rcdu->info->dsi_clk_mask & BIT(rcrtc->index)) &&
794 	    (rstate->outputs &
795 	     (BIT(RCAR_DU_OUTPUT_DSI0) | BIT(RCAR_DU_OUTPUT_DSI1)))) {
796 		struct drm_bridge *bridge = rcdu->dsi[rcrtc->index];
797 
798 		/*
799 		 * Disable the DSI clock output, see
800 		 * rcar_du_crtc_atomic_enable().
801 		 */
802 		rcar_mipi_dsi_pclk_disable(bridge);
803 	}
804 
805 	spin_lock_irq(&crtc->dev->event_lock);
806 	if (crtc->state->event) {
807 		drm_crtc_send_vblank_event(crtc, crtc->state->event);
808 		crtc->state->event = NULL;
809 	}
810 	spin_unlock_irq(&crtc->dev->event_lock);
811 }
812 
rcar_du_crtc_atomic_begin(struct drm_crtc * crtc,struct drm_atomic_state * state)813 static void rcar_du_crtc_atomic_begin(struct drm_crtc *crtc,
814 				      struct drm_atomic_state *state)
815 {
816 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
817 
818 	WARN_ON(!crtc->state->enable);
819 
820 	/*
821 	 * If a mode set is in progress we can be called with the CRTC disabled.
822 	 * We thus need to first get and setup the CRTC in order to configure
823 	 * planes. We must *not* put the CRTC in .atomic_flush(), as it must be
824 	 * kept awake until the .atomic_enable() call that will follow. The get
825 	 * operation in .atomic_enable() will in that case be a no-op, and the
826 	 * CRTC will be put later in .atomic_disable().
827 	 *
828 	 * If a mode set is not in progress the CRTC is enabled, and the
829 	 * following get call will be a no-op. There is thus no need to balance
830 	 * it in .atomic_flush() either.
831 	 */
832 	rcar_du_crtc_get(rcrtc);
833 
834 	/* If the active state changed, we let .atomic_enable handle CMM. */
835 	if (crtc->state->color_mgmt_changed && !crtc->state->active_changed)
836 		rcar_du_cmm_setup(crtc);
837 
838 	if (rcar_du_has(rcrtc->dev, RCAR_DU_FEATURE_VSP1_SOURCE))
839 		rcar_du_vsp_atomic_begin(rcrtc);
840 }
841 
rcar_du_crtc_atomic_flush(struct drm_crtc * crtc,struct drm_atomic_state * state)842 static void rcar_du_crtc_atomic_flush(struct drm_crtc *crtc,
843 				      struct drm_atomic_state *state)
844 {
845 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
846 	struct drm_device *dev = rcrtc->crtc.dev;
847 	unsigned long flags;
848 
849 	rcar_du_crtc_update_planes(rcrtc);
850 
851 	if (crtc->state->event) {
852 		WARN_ON(drm_crtc_vblank_get(crtc) != 0);
853 
854 		spin_lock_irqsave(&dev->event_lock, flags);
855 		rcrtc->event = crtc->state->event;
856 		crtc->state->event = NULL;
857 		spin_unlock_irqrestore(&dev->event_lock, flags);
858 	}
859 
860 	if (rcar_du_has(rcrtc->dev, RCAR_DU_FEATURE_VSP1_SOURCE))
861 		rcar_du_vsp_atomic_flush(rcrtc);
862 }
863 
864 static enum drm_mode_status
rcar_du_crtc_mode_valid(struct drm_crtc * crtc,const struct drm_display_mode * mode)865 rcar_du_crtc_mode_valid(struct drm_crtc *crtc,
866 			const struct drm_display_mode *mode)
867 {
868 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
869 	struct rcar_du_device *rcdu = rcrtc->dev;
870 	bool interlaced = mode->flags & DRM_MODE_FLAG_INTERLACE;
871 	unsigned int min_sync_porch;
872 	unsigned int vbp;
873 
874 	if (interlaced && !rcar_du_has(rcdu, RCAR_DU_FEATURE_INTERLACED))
875 		return MODE_NO_INTERLACE;
876 
877 	/*
878 	 * The hardware requires a minimum combined horizontal sync and back
879 	 * porch of 20 pixels (when CMM isn't used) or 45 pixels (when CMM is
880 	 * used), and a minimum vertical back porch of 3 lines.
881 	 */
882 	min_sync_porch = 20;
883 	if (rcrtc->group->cmms_mask & BIT(rcrtc->index % 2))
884 		min_sync_porch += 25;
885 
886 	if (mode->htotal - mode->hsync_start < min_sync_porch)
887 		return MODE_HBLANK_NARROW;
888 
889 	vbp = (mode->vtotal - mode->vsync_end) / (interlaced ? 2 : 1);
890 	if (vbp < 3)
891 		return MODE_VBLANK_NARROW;
892 
893 	return MODE_OK;
894 }
895 
896 static const struct drm_crtc_helper_funcs crtc_helper_funcs = {
897 	.atomic_check = rcar_du_crtc_atomic_check,
898 	.atomic_begin = rcar_du_crtc_atomic_begin,
899 	.atomic_flush = rcar_du_crtc_atomic_flush,
900 	.atomic_enable = rcar_du_crtc_atomic_enable,
901 	.atomic_disable = rcar_du_crtc_atomic_disable,
902 	.mode_valid = rcar_du_crtc_mode_valid,
903 };
904 
rcar_du_crtc_crc_init(struct rcar_du_crtc * rcrtc)905 static void rcar_du_crtc_crc_init(struct rcar_du_crtc *rcrtc)
906 {
907 	struct rcar_du_device *rcdu = rcrtc->dev;
908 	const char **sources;
909 	unsigned int count;
910 	int i = -1;
911 
912 	/* CRC available only on Gen3 HW. */
913 	if (rcdu->info->gen < 3)
914 		return;
915 
916 	/* Reserve 1 for "auto" source. */
917 	count = rcrtc->vsp->num_planes + 1;
918 
919 	sources = kmalloc_array(count, sizeof(*sources), GFP_KERNEL);
920 	if (!sources)
921 		return;
922 
923 	sources[0] = kstrdup("auto", GFP_KERNEL);
924 	if (!sources[0])
925 		goto error;
926 
927 	for (i = 0; i < rcrtc->vsp->num_planes; ++i) {
928 		struct drm_plane *plane = &rcrtc->vsp->planes[i].plane;
929 		char name[16];
930 
931 		sprintf(name, "plane%u", plane->base.id);
932 		sources[i + 1] = kstrdup(name, GFP_KERNEL);
933 		if (!sources[i + 1])
934 			goto error;
935 	}
936 
937 	rcrtc->sources = sources;
938 	rcrtc->sources_count = count;
939 	return;
940 
941 error:
942 	while (i >= 0) {
943 		kfree(sources[i]);
944 		i--;
945 	}
946 	kfree(sources);
947 }
948 
rcar_du_crtc_crc_cleanup(struct rcar_du_crtc * rcrtc)949 static void rcar_du_crtc_crc_cleanup(struct rcar_du_crtc *rcrtc)
950 {
951 	unsigned int i;
952 
953 	if (!rcrtc->sources)
954 		return;
955 
956 	for (i = 0; i < rcrtc->sources_count; i++)
957 		kfree(rcrtc->sources[i]);
958 	kfree(rcrtc->sources);
959 
960 	rcrtc->sources = NULL;
961 	rcrtc->sources_count = 0;
962 }
963 
964 static struct drm_crtc_state *
rcar_du_crtc_atomic_duplicate_state(struct drm_crtc * crtc)965 rcar_du_crtc_atomic_duplicate_state(struct drm_crtc *crtc)
966 {
967 	struct rcar_du_crtc_state *state;
968 	struct rcar_du_crtc_state *copy;
969 
970 	if (WARN_ON(!crtc->state))
971 		return NULL;
972 
973 	state = to_rcar_crtc_state(crtc->state);
974 	copy = kmemdup(state, sizeof(*state), GFP_KERNEL);
975 	if (copy == NULL)
976 		return NULL;
977 
978 	__drm_atomic_helper_crtc_duplicate_state(crtc, &copy->state);
979 
980 	return &copy->state;
981 }
982 
rcar_du_crtc_atomic_destroy_state(struct drm_crtc * crtc,struct drm_crtc_state * state)983 static void rcar_du_crtc_atomic_destroy_state(struct drm_crtc *crtc,
984 					      struct drm_crtc_state *state)
985 {
986 	__drm_atomic_helper_crtc_destroy_state(state);
987 	kfree(to_rcar_crtc_state(state));
988 }
989 
rcar_du_crtc_cleanup(struct drm_crtc * crtc)990 static void rcar_du_crtc_cleanup(struct drm_crtc *crtc)
991 {
992 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
993 
994 	rcar_du_crtc_crc_cleanup(rcrtc);
995 
996 	return drm_crtc_cleanup(crtc);
997 }
998 
rcar_du_crtc_reset(struct drm_crtc * crtc)999 static void rcar_du_crtc_reset(struct drm_crtc *crtc)
1000 {
1001 	struct rcar_du_crtc_state *state;
1002 
1003 	if (crtc->state) {
1004 		rcar_du_crtc_atomic_destroy_state(crtc, crtc->state);
1005 		crtc->state = NULL;
1006 	}
1007 
1008 	state = kzalloc(sizeof(*state), GFP_KERNEL);
1009 	if (state == NULL)
1010 		return;
1011 
1012 	state->crc.source = VSP1_DU_CRC_NONE;
1013 	state->crc.index = 0;
1014 
1015 	__drm_atomic_helper_crtc_reset(crtc, &state->state);
1016 }
1017 
rcar_du_crtc_enable_vblank(struct drm_crtc * crtc)1018 static int rcar_du_crtc_enable_vblank(struct drm_crtc *crtc)
1019 {
1020 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
1021 
1022 	rcar_du_crtc_write(rcrtc, DSRCR, DSRCR_VBCL);
1023 	rcar_du_crtc_set(rcrtc, DIER, DIER_VBE);
1024 	rcrtc->vblank_enable = true;
1025 
1026 	return 0;
1027 }
1028 
rcar_du_crtc_disable_vblank(struct drm_crtc * crtc)1029 static void rcar_du_crtc_disable_vblank(struct drm_crtc *crtc)
1030 {
1031 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
1032 
1033 	rcar_du_crtc_clr(rcrtc, DIER, DIER_VBE);
1034 	rcrtc->vblank_enable = false;
1035 }
1036 
rcar_du_crtc_parse_crc_source(struct rcar_du_crtc * rcrtc,const char * source_name,enum vsp1_du_crc_source * source)1037 static int rcar_du_crtc_parse_crc_source(struct rcar_du_crtc *rcrtc,
1038 					 const char *source_name,
1039 					 enum vsp1_du_crc_source *source)
1040 {
1041 	unsigned int index;
1042 	int ret;
1043 
1044 	/*
1045 	 * Parse the source name. Supported values are "plane%u" to compute the
1046 	 * CRC on an input plane (%u is the plane ID), and "auto" to compute the
1047 	 * CRC on the composer (VSP) output.
1048 	 */
1049 
1050 	if (!source_name) {
1051 		*source = VSP1_DU_CRC_NONE;
1052 		return 0;
1053 	} else if (!strcmp(source_name, "auto")) {
1054 		*source = VSP1_DU_CRC_OUTPUT;
1055 		return 0;
1056 	} else if (strstarts(source_name, "plane")) {
1057 		unsigned int i;
1058 
1059 		*source = VSP1_DU_CRC_PLANE;
1060 
1061 		ret = kstrtouint(source_name + strlen("plane"), 10, &index);
1062 		if (ret < 0)
1063 			return ret;
1064 
1065 		for (i = 0; i < rcrtc->vsp->num_planes; ++i) {
1066 			if (index == rcrtc->vsp->planes[i].plane.base.id)
1067 				return i;
1068 		}
1069 	}
1070 
1071 	return -EINVAL;
1072 }
1073 
rcar_du_crtc_verify_crc_source(struct drm_crtc * crtc,const char * source_name,size_t * values_cnt)1074 static int rcar_du_crtc_verify_crc_source(struct drm_crtc *crtc,
1075 					  const char *source_name,
1076 					  size_t *values_cnt)
1077 {
1078 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
1079 	enum vsp1_du_crc_source source;
1080 
1081 	if (rcar_du_crtc_parse_crc_source(rcrtc, source_name, &source) < 0) {
1082 		DRM_DEBUG_DRIVER("unknown source %s\n", source_name);
1083 		return -EINVAL;
1084 	}
1085 
1086 	*values_cnt = 1;
1087 	return 0;
1088 }
1089 
1090 static const char *const *
rcar_du_crtc_get_crc_sources(struct drm_crtc * crtc,size_t * count)1091 rcar_du_crtc_get_crc_sources(struct drm_crtc *crtc, size_t *count)
1092 {
1093 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
1094 
1095 	*count = rcrtc->sources_count;
1096 	return rcrtc->sources;
1097 }
1098 
rcar_du_crtc_set_crc_source(struct drm_crtc * crtc,const char * source_name)1099 static int rcar_du_crtc_set_crc_source(struct drm_crtc *crtc,
1100 				       const char *source_name)
1101 {
1102 	struct rcar_du_crtc *rcrtc = to_rcar_crtc(crtc);
1103 	struct drm_modeset_acquire_ctx ctx;
1104 	struct drm_crtc_state *crtc_state;
1105 	struct drm_atomic_state *state;
1106 	enum vsp1_du_crc_source source;
1107 	unsigned int index;
1108 	int ret;
1109 
1110 	ret = rcar_du_crtc_parse_crc_source(rcrtc, source_name, &source);
1111 	if (ret < 0)
1112 		return ret;
1113 
1114 	index = ret;
1115 
1116 	/* Perform an atomic commit to set the CRC source. */
1117 	drm_modeset_acquire_init(&ctx, 0);
1118 
1119 	state = drm_atomic_state_alloc(crtc->dev);
1120 	if (!state) {
1121 		ret = -ENOMEM;
1122 		goto unlock;
1123 	}
1124 
1125 	state->acquire_ctx = &ctx;
1126 
1127 retry:
1128 	crtc_state = drm_atomic_get_crtc_state(state, crtc);
1129 	if (!IS_ERR(crtc_state)) {
1130 		struct rcar_du_crtc_state *rcrtc_state;
1131 
1132 		rcrtc_state = to_rcar_crtc_state(crtc_state);
1133 		rcrtc_state->crc.source = source;
1134 		rcrtc_state->crc.index = index;
1135 
1136 		ret = drm_atomic_commit(state);
1137 	} else {
1138 		ret = PTR_ERR(crtc_state);
1139 	}
1140 
1141 	if (ret == -EDEADLK) {
1142 		drm_atomic_state_clear(state);
1143 		drm_modeset_backoff(&ctx);
1144 		goto retry;
1145 	}
1146 
1147 	drm_atomic_state_put(state);
1148 
1149 unlock:
1150 	drm_modeset_drop_locks(&ctx);
1151 	drm_modeset_acquire_fini(&ctx);
1152 
1153 	return ret;
1154 }
1155 
1156 static const struct drm_crtc_funcs crtc_funcs_gen2 = {
1157 	.reset = rcar_du_crtc_reset,
1158 	.destroy = drm_crtc_cleanup,
1159 	.set_config = drm_atomic_helper_set_config,
1160 	.page_flip = drm_atomic_helper_page_flip,
1161 	.atomic_duplicate_state = rcar_du_crtc_atomic_duplicate_state,
1162 	.atomic_destroy_state = rcar_du_crtc_atomic_destroy_state,
1163 	.enable_vblank = rcar_du_crtc_enable_vblank,
1164 	.disable_vblank = rcar_du_crtc_disable_vblank,
1165 };
1166 
1167 static const struct drm_crtc_funcs crtc_funcs_gen3 = {
1168 	.reset = rcar_du_crtc_reset,
1169 	.destroy = rcar_du_crtc_cleanup,
1170 	.set_config = drm_atomic_helper_set_config,
1171 	.page_flip = drm_atomic_helper_page_flip,
1172 	.atomic_duplicate_state = rcar_du_crtc_atomic_duplicate_state,
1173 	.atomic_destroy_state = rcar_du_crtc_atomic_destroy_state,
1174 	.enable_vblank = rcar_du_crtc_enable_vblank,
1175 	.disable_vblank = rcar_du_crtc_disable_vblank,
1176 	.set_crc_source = rcar_du_crtc_set_crc_source,
1177 	.verify_crc_source = rcar_du_crtc_verify_crc_source,
1178 	.get_crc_sources = rcar_du_crtc_get_crc_sources,
1179 };
1180 
1181 /* -----------------------------------------------------------------------------
1182  * Interrupt Handling
1183  */
1184 
rcar_du_crtc_irq(int irq,void * arg)1185 static irqreturn_t rcar_du_crtc_irq(int irq, void *arg)
1186 {
1187 	struct rcar_du_crtc *rcrtc = arg;
1188 	struct rcar_du_device *rcdu = rcrtc->dev;
1189 	irqreturn_t ret = IRQ_NONE;
1190 	u32 status;
1191 
1192 	spin_lock(&rcrtc->vblank_lock);
1193 
1194 	status = rcar_du_crtc_read(rcrtc, DSSR);
1195 	rcar_du_crtc_write(rcrtc, DSRCR, status & DSRCR_MASK);
1196 
1197 	if (status & DSSR_VBK) {
1198 		/*
1199 		 * Wake up the vblank wait if the counter reaches 0. This must
1200 		 * be protected by the vblank_lock to avoid races in
1201 		 * rcar_du_crtc_disable_planes().
1202 		 */
1203 		if (rcrtc->vblank_count) {
1204 			if (--rcrtc->vblank_count == 0)
1205 				wake_up(&rcrtc->vblank_wait);
1206 		}
1207 	}
1208 
1209 	spin_unlock(&rcrtc->vblank_lock);
1210 
1211 	if (status & DSSR_VBK) {
1212 		if (rcdu->info->gen < 3) {
1213 			drm_crtc_handle_vblank(&rcrtc->crtc);
1214 			rcar_du_crtc_finish_page_flip(rcrtc);
1215 		}
1216 
1217 		ret = IRQ_HANDLED;
1218 	}
1219 
1220 	return ret;
1221 }
1222 
1223 /* -----------------------------------------------------------------------------
1224  * Initialization
1225  */
1226 
rcar_du_crtc_create(struct rcar_du_group * rgrp,unsigned int swindex,unsigned int hwindex)1227 int rcar_du_crtc_create(struct rcar_du_group *rgrp, unsigned int swindex,
1228 			unsigned int hwindex)
1229 {
1230 	static const unsigned int mmio_offsets[] = {
1231 		DU0_REG_OFFSET, DU1_REG_OFFSET, DU2_REG_OFFSET, DU3_REG_OFFSET
1232 	};
1233 
1234 	struct rcar_du_device *rcdu = rgrp->dev;
1235 	struct platform_device *pdev = to_platform_device(rcdu->dev);
1236 	struct rcar_du_crtc *rcrtc = &rcdu->crtcs[swindex];
1237 	struct drm_crtc *crtc = &rcrtc->crtc;
1238 	struct drm_plane *primary;
1239 	unsigned int irqflags;
1240 	struct clk *clk;
1241 	char clk_name[9];
1242 	char *name;
1243 	int irq;
1244 	int ret;
1245 
1246 	/* Get the CRTC clock and the optional external clock. */
1247 	if (rcar_du_has(rcdu, RCAR_DU_FEATURE_CRTC_CLOCK)) {
1248 		sprintf(clk_name, "du.%u", hwindex);
1249 		name = clk_name;
1250 	} else {
1251 		name = NULL;
1252 	}
1253 
1254 	rcrtc->clock = devm_clk_get(rcdu->dev, name);
1255 	if (IS_ERR(rcrtc->clock)) {
1256 		dev_err(rcdu->dev, "no clock for DU channel %u\n", hwindex);
1257 		return PTR_ERR(rcrtc->clock);
1258 	}
1259 
1260 	sprintf(clk_name, "dclkin.%u", hwindex);
1261 	clk = devm_clk_get(rcdu->dev, clk_name);
1262 	if (!IS_ERR(clk)) {
1263 		rcrtc->extclock = clk;
1264 	} else if (PTR_ERR(clk) == -EPROBE_DEFER) {
1265 		return -EPROBE_DEFER;
1266 	} else if (rcdu->info->dpll_mask & BIT(hwindex)) {
1267 		/*
1268 		 * DU channels that have a display PLL can't use the internal
1269 		 * system clock and thus require an external clock.
1270 		 */
1271 		ret = PTR_ERR(clk);
1272 		dev_err(rcdu->dev, "can't get dclkin.%u: %d\n", hwindex, ret);
1273 		return ret;
1274 	}
1275 
1276 	init_waitqueue_head(&rcrtc->flip_wait);
1277 	init_waitqueue_head(&rcrtc->vblank_wait);
1278 	spin_lock_init(&rcrtc->vblank_lock);
1279 
1280 	rcrtc->dev = rcdu;
1281 	rcrtc->group = rgrp;
1282 	rcrtc->mmio_offset = mmio_offsets[hwindex];
1283 	rcrtc->index = hwindex;
1284 	rcrtc->dsysr = rcrtc->index % 2 ? 0 : DSYSR_DRES;
1285 
1286 	if (rcar_du_has(rcdu, RCAR_DU_FEATURE_TVM_SYNC))
1287 		rcrtc->dsysr |= DSYSR_TVM_TVSYNC;
1288 
1289 	if (rcar_du_has(rcdu, RCAR_DU_FEATURE_VSP1_SOURCE))
1290 		primary = &rcrtc->vsp->planes[rcrtc->vsp_pipe].plane;
1291 	else
1292 		primary = &rgrp->planes[swindex % 2].plane;
1293 
1294 	ret = drm_crtc_init_with_planes(&rcdu->ddev, crtc, primary, NULL,
1295 					rcdu->info->gen <= 2 ?
1296 					&crtc_funcs_gen2 : &crtc_funcs_gen3,
1297 					NULL);
1298 	if (ret < 0)
1299 		return ret;
1300 
1301 	/* CMM might be disabled for this CRTC. */
1302 	if (rcdu->cmms[swindex]) {
1303 		rcrtc->cmm = rcdu->cmms[swindex];
1304 		rgrp->cmms_mask |= BIT(hwindex % 2);
1305 
1306 		drm_mode_crtc_set_gamma_size(crtc, CM2_LUT_SIZE);
1307 		drm_crtc_enable_color_mgmt(crtc, 0, false, CM2_LUT_SIZE);
1308 	}
1309 
1310 	drm_crtc_helper_add(crtc, &crtc_helper_funcs);
1311 
1312 	/* Register the interrupt handler. */
1313 	if (rcar_du_has(rcdu, RCAR_DU_FEATURE_CRTC_IRQ)) {
1314 		/* The IRQ's are associated with the CRTC (sw)index. */
1315 		irq = platform_get_irq(pdev, swindex);
1316 		irqflags = 0;
1317 	} else {
1318 		irq = platform_get_irq(pdev, 0);
1319 		irqflags = IRQF_SHARED;
1320 	}
1321 
1322 	if (irq < 0) {
1323 		dev_err(rcdu->dev, "no IRQ for CRTC %u\n", swindex);
1324 		return irq;
1325 	}
1326 
1327 	ret = devm_request_irq(rcdu->dev, irq, rcar_du_crtc_irq, irqflags,
1328 			       dev_name(rcdu->dev), rcrtc);
1329 	if (ret < 0) {
1330 		dev_err(rcdu->dev,
1331 			"failed to register IRQ for CRTC %u\n", swindex);
1332 		return ret;
1333 	}
1334 
1335 	rcar_du_crtc_crc_init(rcrtc);
1336 
1337 	return 0;
1338 }
1339