• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Hisilicon Hi6220 SoC ADE(Advanced Display Engine)'s crtc&plane driver
3  *
4  * Copyright (c) 2016 Linaro Limited.
5  * Copyright (c) 2014-2016 Hisilicon Limited.
6  *
7  * Author:
8  *	Xinliang Liu <z.liuxinliang@hisilicon.com>
9  *	Xinliang Liu <xinliang.liu@linaro.org>
10  *	Xinwei Kong <kong.kongxinwei@hisilicon.com>
11  *
12  * This program is free software; you can redistribute it and/or modify
13  * it under the terms of the GNU General Public License version 2 as
14  * published by the Free Software Foundation.
15  *
16  */
17 
18 #include <linux/bitops.h>
19 #include <linux/clk.h>
20 #include <video/display_timing.h>
21 #include <linux/mfd/syscon.h>
22 #include <linux/regmap.h>
23 #include <linux/reset.h>
24 #include <linux/of_address.h>
25 #include <linux/of.h>
26 #include <linux/of_irq.h>
27 #include <linux/platform_device.h>
28 
29 #include <drm/drm_drv.h>
30 #include <drm/drm_crtc.h>
31 #include <drm/drm_crtc_helper.h>
32 #include <drm/drm_atomic.h>
33 #include <drm/drm_atomic_helper.h>
34 #include <drm/drm_plane_helper.h>
35 #include <drm/drm_gem_cma_helper.h>
36 #include <drm/drm_gem_framebuffer_helper.h>
37 #include <drm/drm_fb_cma_helper.h>
38 #include <drm/drm_vblank.h>
39 #include <drm/drm_fourcc.h>
40 
41 #include "kirin_drm_drv.h"
42 #include "kirin_dpe_reg.h"
43 
44 #define DPE_WIDTH(width)   ((width) - 1)
45 #define DPE_HEIGHT(height) ((height) - 1)
46 
47 #define GET_FLUX_REQ_IN(max_depth)  ((max_depth) * 50 / 100)
48 #define GET_FLUX_REQ_OUT(max_depth)	((max_depth) * 90 / 100)
49 
50 #define DEFAULT_DPE_CORE_CLK_07V_RATE (400000000UL)
51 #define DPE_MAX_PXL0_CLK_144M         (144000000UL)
52 
53 #define DPE_UNSUPPORT (800)
54 #define RES_4K_PHONE  (3840 * 2160)
55 
56 enum dpe_ovl {
57 	DPE_OVL0 = 0,
58 	DPE_OVL_NUM
59 };
60 
61 enum dpe_channel {
62 	DPE_CH0 = 0, /* channel 1 for primary plane */
63 	DPE_CH_NUM
64 };
65 
66 struct dpe_hw_ctx {
67 	void __iomem *base;
68 	void __iomem *noc_base;
69 
70 	struct clk *dpe_axi_clk;
71 	struct clk *dpe_pclk_clk;
72 	struct clk *dpe_pri_clk;
73 	struct clk *dpe_pxl0_clk;
74 	struct clk *dpe_mmbuf_clk;
75 
76 	bool power_on;
77 	int irq;
78 
79 	struct drm_crtc *crtc;
80 
81 	u32 hdisplay;
82 	u32 vdisplay;
83 };
84 
85 static const struct kirin_format dpe_formats[] = {
86 	{ DRM_FORMAT_RGB565, DPE_RGB_565 },
87 	{ DRM_FORMAT_BGR565, DPE_BGR_565 },
88 	{ DRM_FORMAT_XRGB8888, DPE_RGBX_8888 },
89 	{ DRM_FORMAT_XBGR8888, DPE_BGRX_8888 },
90 	{ DRM_FORMAT_RGBA8888, DPE_RGBA_8888 },
91 	{ DRM_FORMAT_BGRA8888, DPE_BGRA_8888 },
92 	{ DRM_FORMAT_ARGB8888, DPE_BGRA_8888 },
93 	{ DRM_FORMAT_ABGR8888, DPE_RGBA_8888 },
94 };
95 
96 static const u32 dpe_channel_formats[] = {
97 	DRM_FORMAT_RGB565,
98 	DRM_FORMAT_BGR565,
99 	DRM_FORMAT_XRGB8888,
100 	DRM_FORMAT_XBGR8888,
101 	DRM_FORMAT_RGBA8888,
102 	DRM_FORMAT_BGRA8888,
103 	DRM_FORMAT_ARGB8888,
104 	DRM_FORMAT_ABGR8888,
105 };
106 
107 static u32 dpe_pixel_dma_format_map[] = {
108 	DMA_PIXEL_FORMAT_RGB_565,
109 	DMA_PIXEL_FORMAT_XRGB_4444,
110 	DMA_PIXEL_FORMAT_ARGB_4444,
111 	DMA_PIXEL_FORMAT_XRGB_5551,
112 	DMA_PIXEL_FORMAT_ARGB_5551,
113 	DMA_PIXEL_FORMAT_XRGB_8888,
114 	DMA_PIXEL_FORMAT_ARGB_8888,
115 	DMA_PIXEL_FORMAT_RGB_565,
116 	DMA_PIXEL_FORMAT_XRGB_4444,
117 	DMA_PIXEL_FORMAT_ARGB_4444,
118 	DMA_PIXEL_FORMAT_XRGB_5551,
119 	DMA_PIXEL_FORMAT_ARGB_5551,
120 	DMA_PIXEL_FORMAT_XRGB_8888,
121 	DMA_PIXEL_FORMAT_ARGB_8888,
122 	DMA_PIXEL_FORMAT_YUYV_422_Pkg,
123 	DMA_PIXEL_FORMAT_YUV_422_SP_HP,
124 	DMA_PIXEL_FORMAT_YUV_422_SP_HP,
125 	DMA_PIXEL_FORMAT_YUV_420_SP_HP,
126 	DMA_PIXEL_FORMAT_YUV_420_SP_HP,
127 	DMA_PIXEL_FORMAT_YUV_422_P_HP,
128 	DMA_PIXEL_FORMAT_YUV_422_P_HP,
129 	DMA_PIXEL_FORMAT_YUV_420_P_HP,
130 	DMA_PIXEL_FORMAT_YUV_420_P_HP,
131 	DMA_PIXEL_FORMAT_YUYV_422_Pkg,
132 	DMA_PIXEL_FORMAT_YUYV_422_Pkg,
133 	DMA_PIXEL_FORMAT_YUYV_422_Pkg,
134 	DMA_PIXEL_FORMAT_YUYV_422_Pkg,
135 };
136 
137 static u32 dpe_pixel_dfc_format_map[] = {
138 	DFC_PIXEL_FORMAT_RGB_565,
139 	DFC_PIXEL_FORMAT_XBGR_4444,
140 	DFC_PIXEL_FORMAT_ABGR_4444,
141 	DFC_PIXEL_FORMAT_XBGR_5551,
142 	DFC_PIXEL_FORMAT_ABGR_5551,
143 	DFC_PIXEL_FORMAT_XBGR_8888,
144 	DFC_PIXEL_FORMAT_ABGR_8888,
145 	DFC_PIXEL_FORMAT_BGR_565,
146 	DFC_PIXEL_FORMAT_XRGB_4444,
147 	DFC_PIXEL_FORMAT_ARGB_4444,
148 	DFC_PIXEL_FORMAT_XRGB_5551,
149 	DFC_PIXEL_FORMAT_ARGB_5551,
150 	DFC_PIXEL_FORMAT_XRGB_8888,
151 	DFC_PIXEL_FORMAT_ARGB_8888,
152 	DFC_PIXEL_FORMAT_YUYV422,
153 	DFC_PIXEL_FORMAT_YUYV422,
154 	DFC_PIXEL_FORMAT_YVYU422,
155 	DFC_PIXEL_FORMAT_YUYV422,
156 	DFC_PIXEL_FORMAT_YVYU422,
157 	DFC_PIXEL_FORMAT_YUYV422,
158 	DFC_PIXEL_FORMAT_YVYU422,
159 	DFC_PIXEL_FORMAT_YUYV422,
160 	DFC_PIXEL_FORMAT_YVYU422,
161 	DFC_PIXEL_FORMAT_YUYV422,
162 	DFC_PIXEL_FORMAT_UYVY422,
163 	DFC_PIXEL_FORMAT_YVYU422,
164 	DFC_PIXEL_FORMAT_VYUY422,
165 };
166 
167 static u32 mid_array[DPE_CH_NUM] = {0xb};
168 static u32 aif_offset[DPE_CH_NUM] = {AIF0_CH0_OFFSET};
169 static u32 mif_offset[DPE_CH_NUM] = {MIF_CH0_OFFSET};
170 static u32 rdma_offset[DPE_CH_NUM] = {DPE_RCH_D0_DMA_OFFSET};
171 static u32 rdfc_offset[DPE_CH_NUM] = {DPE_RCH_D0_DFC_OFFSET};
172 static u32 dpe_smmu_chn_sid_num[DPE_CH_NUM] = {4};
173 static u32 dpe_smmu_smrx_idx[DPE_CH_NUM] = {0};
174 static u32 mctl_offset[DPE_OVL_NUM] = {DPE_MCTRL_CTL0_OFFSET};
175 static u32 ovl_offset[DPE_OVL_NUM] = {DPE_OVL0_OFFSET};
176 
dpe_get_format(u32 pixel_format)177 static u32 dpe_get_format(u32 pixel_format)
178 {
179 	int i;
180 
181 	for (i = 0; i < ARRAY_SIZE(dpe_formats); i++)
182 		if (dpe_formats[i].pixel_format == pixel_format)
183 			return dpe_formats[i].hw_format;
184 
185 	DRM_ERROR("Not found pixel format!!fourcc_format= %d\n",
186 		  pixel_format);
187 	return DPE_UNSUPPORT;
188 }
189 
dpe_set_reg(char __iomem * addr,u32 val,u8 bw,u8 bs)190 static void dpe_set_reg(char __iomem *addr, u32 val, u8 bw, u8 bs)
191 {
192 	u32 mask = (1UL << bw) - 1UL;
193 	u32 tmp = 0;
194 
195 	tmp = readl(addr);
196 	tmp &= ~(mask << bs);
197 
198 	writel(tmp | ((val & mask) << bs), addr);
199 }
200 
201 /* dpe mctl utils */
dpe_mctl_lock(struct dpe_hw_ctx * ctx)202 static void dpe_mctl_lock(struct dpe_hw_ctx *ctx)
203 {
204 	void __iomem *mctl_base = ctx->base + mctl_offset[DPE_OVL0];
205 
206 	dpe_set_reg(mctl_base + MCTL_CTL_MUTEX, 0x1, 1, 0);
207 }
208 
dpe_mctl_unlock(struct dpe_hw_ctx * ctx)209 static void dpe_mctl_unlock(struct dpe_hw_ctx *ctx)
210 {
211 	void __iomem *mctl_base = ctx->base + mctl_offset[DPE_OVL0];
212 
213 	dpe_set_reg(mctl_base + MCTL_CTL_MUTEX, 0x0, 1, 0);
214 }
215 
dpe_mctl_init(struct dpe_hw_ctx * ctx)216 static void dpe_mctl_init(struct dpe_hw_ctx *ctx)
217 {
218 	void __iomem *mctl_base = ctx->base + mctl_offset[DPE_OVL0];
219 
220 	dpe_set_reg(mctl_base + MCTL_CTL_EN, 0x1, 32, 0);
221 	dpe_set_reg(mctl_base + MCTL_CTL_MUTEX_ITF, 0x1, 32, 0);
222 	dpe_set_reg(mctl_base + MCTL_CTL_DBG, 0xB13A00, 32, 0);
223 	dpe_set_reg(mctl_base + MCTL_CTL_TOP, 0x2, 32, 0);
224 }
225 
dpe_qos_init(struct dpe_hw_ctx * ctx)226 static void dpe_qos_init(struct dpe_hw_ctx *ctx)
227 {
228 	void __iomem *noc_base = ctx->noc_base;
229 
230 	writel(0x2, noc_base + 0x000c);
231 	writel(0x2, noc_base + 0x008c);
232 	writel(0x2, noc_base + 0x010c);
233 	writel(0x2, noc_base + 0x018c);
234 }
235 
236 /* dpe ldi utils */
dpe_enable_ldi(struct dpe_hw_ctx * ctx)237 static void dpe_enable_ldi(struct dpe_hw_ctx *ctx)
238 {
239 	void __iomem *ldi_base = ctx->base + DPE_LDI0_OFFSET;
240 
241 	dpe_set_reg(ldi_base + LDI_CTRL, 0x1, 1, 0);
242 }
243 
244 
245 /* interrupts utils */
dpe_interrupt_mask(struct dpe_hw_ctx * ctx)246 static void dpe_interrupt_mask(struct dpe_hw_ctx *ctx)
247 {
248 	void __iomem *base = ctx->base;
249 	u32 mask = ~0;
250 
251 	writel(mask, base + GLB_CPU_PDP_INT_MSK);
252 	writel(mask, base + DPE_LDI0_OFFSET + LDI_CPU_ITF_INT_MSK);
253 	writel(mask, base + DPE_DPP_OFFSET + DPP_INT_MSK);
254 	writel(mask, base + DPE_DBG_OFFSET + DBG_DPE_GLB_INT_MSK);
255 	writel(mask, base + DPE_DBG_OFFSET + DBG_MCTL_INT_MSK);
256 	writel(mask, base + DPE_DBG_OFFSET + DBG_WCH0_INT_MSK);
257 	writel(mask, base + DPE_DBG_OFFSET + DBG_WCH1_INT_MSK);
258 	writel(mask, base + DPE_DBG_OFFSET + DBG_RCH0_INT_MSK);
259 	writel(mask, base + DPE_DBG_OFFSET + DBG_RCH1_INT_MSK);
260 	writel(mask, base + DPE_DBG_OFFSET + DBG_RCH2_INT_MSK);
261 	writel(mask, base + DPE_DBG_OFFSET + DBG_RCH3_INT_MSK);
262 	writel(mask, base + DPE_DBG_OFFSET + DBG_RCH4_INT_MSK);
263 	writel(mask, base + DPE_DBG_OFFSET + DBG_RCH5_INT_MSK);
264 	writel(mask, base + DPE_DBG_OFFSET + DBG_RCH6_INT_MSK);
265 	writel(mask, base + DPE_DBG_OFFSET + DBG_RCH7_INT_MSK);
266 }
267 
dpe_interrupt_unmask(struct dpe_hw_ctx * ctx)268 static void dpe_interrupt_unmask(struct dpe_hw_ctx *ctx)
269 {
270 	void __iomem *base = ctx->base;
271 	u32 unmask;
272 
273 	unmask = ~0;
274 	unmask &= ~(BIT_DPP_INTS | BIT_ITF0_INTS | BIT_MMU_IRPT_NS);
275 	writel(unmask, base + GLB_CPU_PDP_INT_MSK);
276 
277 	unmask = ~0;
278 	unmask &= ~(BIT_VSYNC | BIT_LDI_UNFLOW);
279 	writel(unmask, base + DPE_LDI0_OFFSET + LDI_CPU_ITF_INT_MSK);
280 }
281 
dpe_interrupt_clear(struct dpe_hw_ctx * ctx)282 static void dpe_interrupt_clear(struct dpe_hw_ctx *ctx)
283 {
284 	void __iomem *base = ctx->base;
285 	u32 clear = ~0;
286 
287 	writel(clear, base + GLB_CPU_PDP_INTS);
288 	writel(clear, base + DPE_LDI0_OFFSET + LDI_CPU_ITF_INTS);
289 	writel(clear, base + DPE_DPP_OFFSET + DPP_INTS);
290 	writel(clear, base + DPE_DBG_OFFSET + DBG_MCTL_INTS);
291 	writel(clear, base + DPE_DBG_OFFSET + DBG_WCH0_INTS);
292 	writel(clear, base + DPE_DBG_OFFSET + DBG_WCH1_INTS);
293 	writel(clear, base + DPE_DBG_OFFSET + DBG_RCH0_INTS);
294 	writel(clear, base + DPE_DBG_OFFSET + DBG_RCH1_INTS);
295 	writel(clear, base + DPE_DBG_OFFSET + DBG_RCH2_INTS);
296 	writel(clear, base + DPE_DBG_OFFSET + DBG_RCH3_INTS);
297 	writel(clear, base + DPE_DBG_OFFSET + DBG_RCH4_INTS);
298 	writel(clear, base + DPE_DBG_OFFSET + DBG_RCH5_INTS);
299 	writel(clear, base + DPE_DBG_OFFSET + DBG_RCH6_INTS);
300 	writel(clear, base + DPE_DBG_OFFSET + DBG_RCH7_INTS);
301 	writel(clear, base + DPE_DBG_OFFSET + DBG_DPE_GLB_INTS);
302 }
303 
dpe_irq_enable(struct dpe_hw_ctx * ctx)304 static void dpe_irq_enable(struct dpe_hw_ctx *ctx)
305 {
306 	enable_irq(ctx->irq);
307 }
308 
dpe_clk_enable(struct dpe_hw_ctx * ctx)309 static void dpe_clk_enable(struct dpe_hw_ctx *ctx)
310 {
311 	void __iomem *base = ctx->base;
312 
313 	writel(0x00000088, base + DPE_IFBC_OFFSET + IFBC_MEM_CTRL);
314 	writel(0x00000888, base + DPE_DSC_OFFSET + DSC_MEM_CTRL);
315 	writel(0x00000008, base + DPE_LDI0_OFFSET + LDI_MEM_CTRL);
316 	writel(0x00000008, base + DPE_DBUF0_OFFSET + DBUF_MEM_CTRL);
317 	writel(0x00000008, base + DPE_DPP_DITHER_OFFSET + DITHER_MEM_CTRL);
318 	writel(0x00000008, base + DPE_CMDLIST_OFFSET + CMD_MEM_CTRL);
319 	writel(0x00000088, base + DPE_RCH_VG0_SCL_OFFSET + SCF_COEF_MEM_CTRL);
320 	writel(0x00000008, base + DPE_RCH_VG0_SCL_OFFSET + SCF_LB_MEM_CTRL);
321 	writel(0x00000008, base + DPE_RCH_VG0_ARSR_OFFSET + ARSR2P_LB_MEM_CTRL);
322 	writel(0x00000008, base + DPE_RCH_VG0_DMA_OFFSET + VPP_MEM_CTRL);
323 	writel(0x00000008, base + DPE_RCH_VG0_DMA_OFFSET + DMA_BUF_MEM_CTRL);
324 	writel(0x00008888, base + DPE_RCH_VG0_DMA_OFFSET + AFBCD_MEM_CTRL);
325 	writel(0x00000088, base + DPE_RCH_VG1_SCL_OFFSET + SCF_COEF_MEM_CTRL);
326 	writel(0x00000008, base + DPE_RCH_VG1_SCL_OFFSET + SCF_LB_MEM_CTRL);
327 	writel(0x00000008, base + DPE_RCH_VG1_DMA_OFFSET + DMA_BUF_MEM_CTRL);
328 	writel(0x00008888, base + DPE_RCH_VG1_DMA_OFFSET + AFBCD_MEM_CTRL);
329 	writel(0x00000088, base + DPE_RCH_VG2_SCL_OFFSET + SCF_COEF_MEM_CTRL);
330 	writel(0x00000008, base + DPE_RCH_VG2_SCL_OFFSET + SCF_LB_MEM_CTRL);
331 	writel(0x00000008, base + DPE_RCH_VG2_DMA_OFFSET + DMA_BUF_MEM_CTRL);
332 	writel(0x00000088, base + DPE_RCH_G0_SCL_OFFSET + SCF_COEF_MEM_CTRL);
333 	writel(0x00000008, base + DPE_RCH_G0_SCL_OFFSET + SCF_LB_MEM_CTRL);
334 	writel(0x00000008, base + DPE_RCH_G0_DMA_OFFSET + DMA_BUF_MEM_CTRL);
335 	writel(0x00008888, base + DPE_RCH_G0_DMA_OFFSET + AFBCD_MEM_CTRL);
336 	writel(0x00000088, base + DPE_RCH_G1_SCL_OFFSET + SCF_COEF_MEM_CTRL);
337 	writel(0x00000008, base + DPE_RCH_G1_SCL_OFFSET + SCF_LB_MEM_CTRL);
338 	writel(0x00000008, base + DPE_RCH_G1_DMA_OFFSET + DMA_BUF_MEM_CTRL);
339 	writel(0x00008888, base + DPE_RCH_G1_DMA_OFFSET + AFBCD_MEM_CTRL);
340 	writel(0x00000008, base + DPE_RCH_D0_DMA_OFFSET + DMA_BUF_MEM_CTRL);
341 	writel(0x00008888, base + DPE_RCH_D0_DMA_OFFSET + AFBCD_MEM_CTRL);
342 	writel(0x00000008, base + DPE_RCH_D1_DMA_OFFSET + DMA_BUF_MEM_CTRL);
343 	writel(0x00000008, base + DPE_RCH_D2_DMA_OFFSET + DMA_BUF_MEM_CTRL);
344 	writel(0x00000008, base + DPE_RCH_D3_DMA_OFFSET + DMA_BUF_MEM_CTRL);
345 	writel(0x00000008, base + DPE_WCH0_DMA_OFFSET + DMA_BUF_MEM_CTRL);
346 	writel(0x00000888, base + DPE_WCH0_DMA_OFFSET + AFBCE_MEM_CTRL);
347 	writel(0x00000008, base + DPE_WCH0_DMA_OFFSET + ROT_MEM_CTRL);
348 	writel(0x00000008, base + DPE_WCH1_DMA_OFFSET + DMA_BUF_MEM_CTRL);
349 	writel(0x00000888, base + DPE_WCH1_DMA_OFFSET + AFBCE_MEM_CTRL);
350 	writel(0x00000008, base + DPE_WCH1_DMA_OFFSET + ROT_MEM_CTRL);
351 	writel(0x00000008, base + DPE_WCH2_DMA_OFFSET + DMA_BUF_MEM_CTRL);
352 	writel(0x00000008, base + DPE_WCH2_DMA_OFFSET + ROT_MEM_CTRL);
353 }
354 
dpe_power_up(struct dpe_hw_ctx * ctx)355 static int dpe_power_up(struct dpe_hw_ctx *ctx)
356 {
357 	int ret;
358 
359 	if (ctx->power_on == true)
360 		return 0;
361 
362 	/*peri clk enable */
363 	ret = clk_prepare_enable(ctx->dpe_pxl0_clk);
364 	if (ret) {
365 		DRM_ERROR("failed to enable dpe_pxl0_clk (%d)\n", ret);
366 		return ret;
367 	}
368 
369 	ret = clk_prepare_enable(ctx->dpe_pri_clk);
370 	if (ret) {
371 		DRM_ERROR("failed to enable dpe_pri_clk (%d)\n", ret);
372 		return ret;
373 	}
374 
375 	ret = clk_prepare_enable(ctx->dpe_pclk_clk);
376 	if (ret) {
377 		DRM_ERROR("failed to enable dpe_pclk_clk (%d)\n", ret);
378 		return ret;
379 	}
380 
381 	ret = clk_prepare_enable(ctx->dpe_axi_clk);
382 	if (ret) {
383 		DRM_ERROR("failed to enable dpe_axi_clk (%d)\n", ret);
384 		return ret;
385 	}
386 
387 	ret = clk_prepare_enable(ctx->dpe_mmbuf_clk);
388 	if (ret) {
389 		DRM_ERROR("failed to enable dpe_mmbuf_clk (%d)\n", ret);
390 		return ret;
391 	}
392 
393 	dpe_clk_enable(ctx);
394 	dpe_interrupt_mask(ctx);
395 	dpe_interrupt_clear(ctx);
396 	dpe_irq_enable(ctx);
397 	dpe_interrupt_unmask(ctx);
398 
399 	ctx->power_on = true;
400 	return 0;
401 }
402 
dpe_dpp_init(struct dpe_hw_ctx * ctx,struct drm_display_mode * mode,struct drm_display_mode * adj_mode)403 static void dpe_dpp_init(struct dpe_hw_ctx *ctx, struct drm_display_mode *mode,
404 					struct drm_display_mode *adj_mode)
405 {
406 	void __iomem *dpp_base = ctx->base + DPE_DPP_OFFSET;
407 
408 	writel((DPE_HEIGHT(mode->vdisplay) << 16) | DPE_WIDTH(mode->hdisplay),
409 			dpp_base + DPP_IMG_SIZE_BEF_SR);
410 	writel((DPE_HEIGHT(mode->vdisplay) << 16) | DPE_WIDTH(mode->hdisplay),
411 			dpp_base + DPP_IMG_SIZE_AFT_SR);
412 }
413 
dpe_ovl_init(struct dpe_hw_ctx * ctx,u32 xres,u32 yres)414 static void dpe_ovl_init(struct dpe_hw_ctx *ctx, u32 xres, u32 yres)
415 {
416 	void __iomem *mctl_sys_base = ctx->base + DPE_MCTRL_SYS_OFFSET;
417 	void __iomem *mctl_base = ctx->base + mctl_offset[DPE_OVL0];
418 	void __iomem *ovl0_base = ctx->base + ovl_offset[DPE_OVL0];
419 
420 	dpe_set_reg(ovl0_base + OVL6_REG_DEFAULT, 0x1, 32, 0);
421 	dpe_set_reg(ovl0_base + OVL6_REG_DEFAULT, 0x0, 32, 0);
422 	dpe_set_reg(ovl0_base + OVL_SIZE,
423 				(xres - 1) | ((yres - 1) << 16), 32, 0);
424 	dpe_set_reg(ovl0_base + OVL_BG_COLOR, 0xFF000000, 32, 0);
425 	dpe_set_reg(ovl0_base + OVL_DST_STARTPOS, 0x0, 32, 0);
426 	dpe_set_reg(ovl0_base + OVL_DST_ENDPOS,
427 				(xres - 1) | ((yres - 1) << 16), 32, 0);
428 	dpe_set_reg(ovl0_base + OVL_GCFG, 0x10001, 32, 0);
429 	dpe_set_reg(mctl_base + MCTL_CTL_MUTEX_ITF, 0x1, 32, 0);
430 	dpe_set_reg(mctl_base + MCTL_CTL_MUTEX_DBUF, 0x1, 2, 0);
431 	dpe_set_reg(mctl_base + MCTL_CTL_MUTEX_OV, 1 << DPE_OVL0, 4, 0);
432 	dpe_set_reg(mctl_sys_base + MCTL_RCH_OV0_SEL, 0x8, 4, 0);
433 	dpe_set_reg(mctl_sys_base + MCTL_OV0_FLUSH_EN, 0xd, 4, 0);
434 }
435 
dpe_vesa_init(struct dpe_hw_ctx * ctx)436 static void dpe_vesa_init(struct dpe_hw_ctx *ctx)
437 {
438 	void __iomem *base = ctx->base;
439 
440 	dpe_set_reg(base + DPE_LDI0_OFFSET + LDI_VESA_CLK_SEL, 0, 1, 0);
441 }
442 
dpe_mipi_ifbc_get_rect(struct drm_rect * rect)443 static int dpe_mipi_ifbc_get_rect(struct drm_rect *rect)
444 {
445 	u32 xres_div = XRES_DIV_1;
446 	u32 yres_div = YRES_DIV_1;
447 
448 	if ((rect->x2 % xres_div) > 0)
449 		DRM_ERROR("xres(%d) is not division_h(%d) pixel aligned!\n",
450 					rect->x2, xres_div);
451 
452 	if ((rect->y2 % yres_div) > 0)
453 		DRM_ERROR("yres(%d) is not division_v(%d) pixel aligned!\n",
454 					rect->y2, yres_div);
455 
456 	rect->x2 /= xres_div;
457 	rect->y2 /= yres_div;
458 
459 	return 0;
460 }
461 
dpe_init_ldi_pxl_div(struct dpe_hw_ctx * ctx)462 static void dpe_init_ldi_pxl_div(struct dpe_hw_ctx *ctx)
463 {
464 	void __iomem *ldi_base = ctx->base + DPE_LDI0_OFFSET;
465 
466 	dpe_set_reg(ldi_base + LDI_PXL0_DIV2_GT_EN, PXL0_DIV2_GT_EN_CLOSE,
467 				1, 0);
468 	dpe_set_reg(ldi_base + LDI_PXL0_DIV4_GT_EN, PXL0_DIV4_GT_EN_CLOSE,
469 				1, 0);
470 	dpe_set_reg(ldi_base + LDI_PXL0_GT_EN, 0x1, 1, 0);
471 	dpe_set_reg(ldi_base + LDI_PXL0_DSI_GT_EN, PXL0_DSI_GT_EN_1, 2, 0);
472 	dpe_set_reg(ldi_base + LDI_PXL0_DIVXCFG, PXL0_DIVCFG_0, 3, 0);
473 }
474 
dpe_dbuf_init(struct dpe_hw_ctx * ctx,struct drm_display_mode * mode,struct drm_display_mode * adj_mode)475 static void dpe_dbuf_init(struct dpe_hw_ctx *ctx,
476 			struct drm_display_mode *mode,
477 			struct drm_display_mode *adj_mode)
478 {
479 	void __iomem *dbuf_base = ctx->base + DPE_DBUF0_OFFSET;
480 
481 	int sram_valid_num = 0;
482 	int sram_max_mem_depth = 0;
483 	int sram_min_support_depth = 0;
484 
485 	u32 thd_rqos_in = 0;
486 	u32 thd_rqos_out = 0;
487 	u32 thd_wqos_in = 0;
488 	u32 thd_wqos_out = 0;
489 	u32 thd_cg_in = 0;
490 	u32 thd_cg_out = 0;
491 	u32 thd_wr_wait = 0;
492 	u32 thd_cg_hold = 0;
493 	u32 thd_flux_req_befdfs_in = 0;
494 	u32 thd_flux_req_befdfs_out = 0;
495 	u32 thd_flux_req_aftdfs_in = 0;
496 	u32 thd_flux_req_aftdfs_out = 0;
497 	u32 thd_dfs_ok = 0;
498 	u32 dfs_ok_mask = 0;
499 	u32 thd_flux_req_sw_en = 1;
500 	u32 hfp, hbp, hsw, vfp, vbp, vsw;
501 
502 	int dfs_time_min = 0;
503 	int depth = 0;
504 
505 	hfp = mode->hsync_start - mode->hdisplay;
506 	hbp = mode->htotal - mode->hsync_end;
507 	hsw = mode->hsync_end - mode->hsync_start;
508 	vfp = mode->vsync_start - mode->vdisplay;
509 	vbp = mode->vtotal - mode->vsync_end;
510 	vsw = mode->vsync_end - mode->vsync_start;
511 
512 	dbuf_base = ctx->base + DPE_DBUF0_OFFSET;
513 
514 	if (mode->hdisplay * mode->vdisplay >= RES_4K_PHONE)
515 		dfs_time_min = DFS_TIME_MIN_4K;
516 	else
517 		dfs_time_min = DFS_TIME_MIN;
518 
519 	depth = DBUF0_DEPTH;
520 
521 	thd_cg_out = (DFS_TIME * adj_mode->clock * 1000UL * mode->hdisplay) /
522 		(((hsw + hbp + hfp) + mode->hdisplay) * 6 * 1000000UL);
523 
524 	sram_valid_num = thd_cg_out / depth;
525 	thd_cg_in = (sram_valid_num + 1) * depth - 1;
526 	sram_max_mem_depth = (sram_valid_num + 1) * depth;
527 
528 	thd_rqos_in = thd_cg_out * 85 / 100;
529 	thd_rqos_out = thd_cg_out;
530 	thd_flux_req_befdfs_in = GET_FLUX_REQ_IN(sram_max_mem_depth);
531 	thd_flux_req_befdfs_out = GET_FLUX_REQ_OUT(sram_max_mem_depth);
532 
533 	sram_min_support_depth = dfs_time_min * mode->hdisplay /
534 			(1000000 / 60 / (mode->vdisplay + vbp + vfp + vsw)
535 			* (DBUF_WIDTH_BIT / 3 / BITS_PER_BYTE));
536 
537 	thd_flux_req_aftdfs_in = (sram_max_mem_depth - sram_min_support_depth);
538 	thd_flux_req_aftdfs_in = thd_flux_req_aftdfs_in / 3;
539 	thd_flux_req_aftdfs_out = 2 * thd_flux_req_aftdfs_in;
540 	thd_dfs_ok = thd_flux_req_befdfs_in;
541 
542 	writel(mode->hdisplay * mode->vdisplay, dbuf_base + DBUF_FRM_SIZE);
543 	writel(DPE_WIDTH(mode->hdisplay), dbuf_base + DBUF_FRM_HSIZE);
544 	writel(sram_valid_num, dbuf_base + DBUF_SRAM_VALID_NUM);
545 
546 	writel((thd_rqos_out << 16) | thd_rqos_in, dbuf_base + DBUF_THD_RQOS);
547 	writel((thd_wqos_out << 16) | thd_wqos_in, dbuf_base + DBUF_THD_WQOS);
548 	writel((thd_cg_out << 16) | thd_cg_in, dbuf_base + DBUF_THD_CG);
549 	writel((thd_cg_hold << 16) | thd_wr_wait, dbuf_base + DBUF_THD_OTHER);
550 	writel((thd_flux_req_befdfs_out << 16) | thd_flux_req_befdfs_in,
551 			dbuf_base + DBUF_THD_FLUX_REQ_BEF);
552 	writel((thd_flux_req_aftdfs_out << 16) | thd_flux_req_aftdfs_in,
553 			dbuf_base + DBUF_THD_FLUX_REQ_AFT);
554 	writel(thd_dfs_ok, dbuf_base + DBUF_THD_DFS_OK);
555 	writel((dfs_ok_mask << 1) | thd_flux_req_sw_en,
556 			dbuf_base + DBUF_FLUX_REQ_CTRL);
557 
558 	writel(0x1, dbuf_base + DBUF_DFS_LP_CTRL);
559 }
560 
dpe_ldi_init(struct dpe_hw_ctx * ctx,struct drm_display_mode * mode,struct drm_display_mode * adj_mode)561 static void dpe_ldi_init(struct dpe_hw_ctx *ctx, struct drm_display_mode *mode,
562 					struct drm_display_mode *adj_mode)
563 {
564 	void __iomem *ldi_base = ctx->base + DPE_LDI0_OFFSET;
565 	struct drm_rect rect = {0, 0, 0, 0};
566 	u32 hfp, hbp, hsw, vfp, vbp, vsw;
567 	u32 vsync_plr = 0;
568 	u32 hsync_plr = 0;
569 	u32 pixelclk_plr = 0;
570 	u32 data_en_plr = 0;
571 
572 	hfp = mode->hsync_start - mode->hdisplay;
573 	hbp = mode->htotal - mode->hsync_end;
574 	hsw = mode->hsync_end - mode->hsync_start;
575 	vfp = mode->vsync_start - mode->vdisplay;
576 	vbp = mode->vtotal - mode->vsync_end;
577 	vsw = mode->vsync_end - mode->vsync_start;
578 
579 	rect.x1 = 0;
580 	rect.y1 = 0;
581 	rect.x2 = mode->hdisplay;
582 	rect.y2 = mode->vdisplay;
583 	dpe_mipi_ifbc_get_rect(&rect);
584 	dpe_init_ldi_pxl_div(ctx);
585 
586 	writel(hfp | ((hbp + DPE_WIDTH(hsw)) << 16),
587 		   ldi_base + LDI_DPI0_HRZ_CTRL0);
588 	writel(0, ldi_base + LDI_DPI0_HRZ_CTRL1);
589 	writel(DPE_WIDTH(rect.x2), ldi_base + LDI_DPI0_HRZ_CTRL2);
590 	writel(vfp | (vbp << 16), ldi_base + LDI_VRT_CTRL0);
591 	writel(DPE_HEIGHT(vsw), ldi_base + LDI_VRT_CTRL1);
592 	writel(DPE_HEIGHT(rect.y2), ldi_base + LDI_VRT_CTRL2);
593 	writel(vsync_plr | (hsync_plr << 1) | (pixelclk_plr << 2)
594 				| (data_en_plr << 3), ldi_base + LDI_PLR_CTRL);
595 
596 	dpe_set_reg(ldi_base + LDI_CTRL, LCD_RGB888, 2, 3);
597 	dpe_set_reg(ldi_base + LDI_CTRL, LCD_RGB, 1, 13);
598 
599 	writel(vfp, ldi_base + LDI_VINACT_MSK_LEN);
600 	writel(0x1, ldi_base + LDI_CMD_EVENT_SEL);
601 
602 	dpe_set_reg(ldi_base + LDI_DSI_CMD_MOD_CTRL, 0x1, 1, 1);
603 	dpe_set_reg(ldi_base + LDI_WORK_MODE, 0x1, 1, 0);
604 	dpe_set_reg(ldi_base + LDI_CTRL, 0x0, 1, 0);
605 }
606 
dpe_init(struct dpe_hw_ctx * ctx,struct drm_display_mode * mode,struct drm_display_mode * adj_mode)607 static void dpe_init(struct dpe_hw_ctx *ctx,
608 				struct drm_display_mode *mode,
609 				struct drm_display_mode *adj_mode)
610 {
611 	dpe_dbuf_init(ctx, mode, adj_mode);
612 	dpe_dpp_init(ctx, mode, adj_mode);
613 	dpe_vesa_init(ctx);
614 	dpe_ldi_init(ctx, mode, adj_mode);
615 	dpe_qos_init(ctx);
616 	dpe_mctl_init(ctx);
617 
618 	dpe_mctl_lock(ctx);
619 	dpe_ovl_init(ctx, mode->hdisplay, mode->vdisplay);
620 	dpe_mctl_unlock(ctx);
621 
622 //	dpe_enable_ldi(ctx);
623 
624 	ctx->hdisplay = mode->hdisplay;
625 	ctx->vdisplay = mode->vdisplay;
626 	mdelay(60);
627 }
628 
dpe_ldi_set_mode(struct dpe_hw_ctx * ctx,struct drm_display_mode * mode,struct drm_display_mode * adj_mode)629 static void dpe_ldi_set_mode(struct dpe_hw_ctx *ctx,
630 				struct drm_display_mode *mode,
631 				struct drm_display_mode *adj_mode)
632 {
633 	int ret;
634 	u32 clk_Hz;
635 
636 	switch (mode->clock) {
637 	case 148500:
638 		clk_Hz = 144000 * 1000UL;
639 		break;
640 	case 83496:
641 		clk_Hz = 80000 * 1000UL;
642 		break;
643 	case 74440:
644 		clk_Hz = 72000 * 1000UL;
645 		break;
646 	case 74250:
647 		clk_Hz = 72000 * 1000UL;
648 		break;
649 	default:
650 		clk_Hz = mode->clock * 1000UL;
651 	}
652 
653 	ret = clk_set_rate(ctx->dpe_pxl0_clk, clk_Hz);
654 	if (ret)
655 		DRM_ERROR("failed to set pixel clk %dHz (%d)\n",
656 				clk_Hz, ret);
657 
658 	adj_mode->clock = clk_get_rate(ctx->dpe_pxl0_clk) / 1000;
659 }
660 
dpe_enable_vblank(struct drm_crtc * crtc)661 static int dpe_enable_vblank(struct drm_crtc *crtc)
662 {
663 	struct kirin_crtc *kcrtc = to_kirin_crtc(crtc);
664 	struct dpe_hw_ctx *ctx = kcrtc->hw_ctx;
665 
666 	dpe_power_up(ctx);
667 
668 	return 0;
669 }
670 
dpe_disable_vblank(struct drm_crtc * crtc)671 static void dpe_disable_vblank(struct drm_crtc *crtc)
672 {
673 	struct kirin_crtc *kcrtc = to_kirin_crtc(crtc);
674 	struct dpe_hw_ctx *ctx = kcrtc->hw_ctx;
675 
676 	if (!ctx->power_on) {
677 		DRM_ERROR("power is down! vblank disable fail\n");
678 		return;
679 	}
680 }
681 
dpe_crtc_atomic_enable(struct drm_crtc * crtc,struct drm_crtc_state * old_state)682 static void dpe_crtc_atomic_enable(struct drm_crtc *crtc,
683 				   struct drm_crtc_state *old_state)
684 {
685 	struct kirin_crtc *kcrtc = to_kirin_crtc(crtc);
686 	struct dpe_hw_ctx *ctx = kcrtc->hw_ctx;
687 	int ret;
688 
689 	if (kcrtc->enable == true)
690 		return;
691 
692 	ret = dpe_power_up(ctx);
693 	if (ret)
694 		return;
695 
696 	kcrtc->enable = true;
697 	drm_crtc_vblank_on(crtc);
698 }
699 
dpe_crtc_atomic_disable(struct drm_crtc * crtc,struct drm_crtc_state * old_state)700 static void dpe_crtc_atomic_disable(struct drm_crtc *crtc,
701 				    struct drm_crtc_state *old_state)
702 {
703 	struct kirin_crtc *kcrtc = to_kirin_crtc(crtc);
704 
705 	if (kcrtc->enable == false)
706 		return;
707 
708 	drm_crtc_vblank_off(crtc);
709 	kcrtc->enable = false;
710 }
711 
dpe_crtc_mode_set_nofb(struct drm_crtc * crtc)712 static void dpe_crtc_mode_set_nofb(struct drm_crtc *crtc)
713 {
714 	struct kirin_crtc *kcrtc = to_kirin_crtc(crtc);
715 	struct dpe_hw_ctx *ctx = kcrtc->hw_ctx;
716 	struct drm_display_mode *mode = &crtc->state->mode;
717 	struct drm_display_mode *adj_mode = &crtc->state->adjusted_mode;
718 
719 	dpe_power_up(ctx);
720 	dpe_ldi_set_mode(ctx, mode, adj_mode);
721 	dpe_init(ctx, mode, adj_mode);
722 }
723 
dpe_crtc_atomic_begin(struct drm_crtc * crtc,struct drm_crtc_state * old_state)724 static void dpe_crtc_atomic_begin(struct drm_crtc *crtc,
725 				  struct drm_crtc_state *old_state)
726 {
727 	struct kirin_crtc *kcrtc = to_kirin_crtc(crtc);
728 	struct dpe_hw_ctx *ctx = kcrtc->hw_ctx;
729 
730 	dpe_power_up(ctx);
731 }
732 
dpe_crtc_atomic_flush(struct drm_crtc * crtc,struct drm_crtc_state * old_state)733 static void dpe_crtc_atomic_flush(struct drm_crtc *crtc,
734 				  struct drm_crtc_state *old_state)
735 
736 {
737 	struct drm_pending_vblank_event *event = crtc->state->event;
738 
739 	if (event) {
740 		crtc->state->event = NULL;
741 
742 		spin_lock_irq(&crtc->dev->event_lock);
743 		if (drm_crtc_vblank_get(crtc) == 0)
744 			drm_crtc_arm_vblank_event(crtc, event);
745 		else
746 			drm_crtc_send_vblank_event(crtc, event);
747 		spin_unlock_irq(&crtc->dev->event_lock);
748 	}
749 }
750 
751 const struct drm_crtc_helper_funcs dpe_crtc_helper_funcs = {
752 	.atomic_enable	= dpe_crtc_atomic_enable,
753 	.atomic_disable	= dpe_crtc_atomic_disable,
754 	.mode_set_nofb	= dpe_crtc_mode_set_nofb,
755 	.atomic_begin	= dpe_crtc_atomic_begin,
756 	.atomic_flush	= dpe_crtc_atomic_flush,
757 };
758 
759 const struct drm_crtc_funcs dpe_crtc_funcs = {
760 	.destroy = drm_crtc_cleanup,
761 	.set_config	= drm_atomic_helper_set_config,
762 	.page_flip = drm_atomic_helper_page_flip,
763 	.reset = drm_atomic_helper_crtc_reset,
764 	.atomic_duplicate_state	= drm_atomic_helper_crtc_duplicate_state,
765 	.atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
766 	.enable_vblank = dpe_enable_vblank,
767 	.disable_vblank	= dpe_disable_vblank,
768 };
769 
dpe_unflow_handler(struct dpe_hw_ctx * ctx)770 static void dpe_unflow_handler(struct dpe_hw_ctx *ctx)
771 {
772 	void __iomem *base = ctx->base;
773 	u32 tmp = 0;
774 
775 	tmp = readl(base + DPE_LDI0_OFFSET + LDI_CPU_ITF_INT_MSK);
776 	tmp &= ~BIT_LDI_UNFLOW;
777 
778 	writel(tmp, base + DPE_LDI0_OFFSET + LDI_CPU_ITF_INT_MSK);
779 }
780 
dpe_mctl_ov_config(struct dpe_hw_ctx * ctx,u32 ch)781 static void dpe_mctl_ov_config(struct dpe_hw_ctx *ctx, u32 ch)
782 {
783 	void __iomem *mctl_base = ctx->base + mctl_offset[DPE_OVL0];
784 
785 	dpe_set_reg(mctl_base + MCTL_CTL_EN, 0x1, 32, 0);
786 	dpe_set_reg(mctl_base + MCTL_CTL_TOP, 0x2, 32, 0);
787 	dpe_set_reg(mctl_base + MCTL_CTL_DBG, 0xB13A00, 32, 0);
788 	dpe_set_reg(mctl_base + MCTL_CTL_MUTEX_RCH0 + ch * 4, 0x1, 32, 0);
789 	dpe_set_reg(mctl_base + MCTL_CTL_MUTEX_ITF, 0x1, 2, 0);
790 	dpe_set_reg(mctl_base + MCTL_CTL_MUTEX_DBUF, 0x1, 2, 0);
791 	dpe_set_reg(mctl_base + MCTL_CTL_MUTEX_OV, 1 << DPE_OVL0, 4, 0);
792 }
793 
dpe_mctl_sys_config(struct dpe_hw_ctx * ctx,u32 ch)794 static void dpe_mctl_sys_config(struct dpe_hw_ctx *ctx, u32 ch)
795 {
796 	void __iomem *mctl_sys_base = ctx->base + DPE_MCTRL_SYS_OFFSET;
797 
798 	dpe_set_reg(mctl_sys_base + MCTL_RCH0_OV_OEN + ch * 4,
799 				(1 << 1) | 0x100, 32, 0);
800 	dpe_set_reg(mctl_sys_base + MCTL_RCH_OV0_SEL, 0x8, 4, 0);
801 	dpe_set_reg(mctl_sys_base + MCTL_RCH_OV0_SEL,
802 				ch, 4, (DPE_OVL0 + 1) * 4);
803 	dpe_set_reg(mctl_sys_base + MCTL_OV0_FLUSH_EN, 0xd, 4, 0);
804 	dpe_set_reg(mctl_sys_base + MCTL_RCH0_FLUSH_EN + ch * 4, 0x1, 32, 0);
805 }
806 
dpe_ovl_config(struct dpe_hw_ctx * ctx,const struct drm_rect * rect,u32 xres,u32 yres)807 static void dpe_ovl_config(struct dpe_hw_ctx *ctx, const struct drm_rect *rect,
808 						   u32 xres, u32 yres)
809 {
810 	void __iomem *ovl0_base = ctx->base + ovl_offset[DPE_OVL0];
811 
812 	dpe_set_reg(ovl0_base + OVL6_REG_DEFAULT, 0x1, 32, 0);
813 	dpe_set_reg(ovl0_base + OVL6_REG_DEFAULT, 0x0, 32, 0);
814 	dpe_set_reg(ovl0_base + OVL_SIZE,
815 				(xres - 1) | ((yres - 1) << 16), 32, 0);
816 	dpe_set_reg(ovl0_base + OVL_BG_COLOR, 0xFF000000, 32, 0);
817 	dpe_set_reg(ovl0_base + OVL_DST_STARTPOS, 0x0, 32, 0);
818 	dpe_set_reg(ovl0_base + OVL_DST_ENDPOS,
819 				(xres - 1) | ((yres - 1) << 16), 32, 0);
820 	dpe_set_reg(ovl0_base + OVL_GCFG, 0x10001, 32, 0);
821 	dpe_set_reg(ovl0_base + OVL_LAYER0_POS,
822 				(rect->x1) | ((rect->y1) << 16), 32, 0);
823 	dpe_set_reg(ovl0_base + OVL_LAYER0_SIZE,
824 				(rect->x2) | ((rect->y2) << 16), 32, 0);
825 	dpe_set_reg(ovl0_base + OVL_LAYER0_ALPHA, 0x00ff40ff, 32, 0);
826 	dpe_set_reg(ovl0_base + OVL_LAYER0_CFG, 0x1, 1, 0);
827 }
828 
dpe_rdma_config(struct dpe_hw_ctx * ctx,const struct drm_rect * rect,u32 display_addr,u32 hal_format,u32 bpp,int ch)829 static void dpe_rdma_config(struct dpe_hw_ctx *ctx,
830 				const struct drm_rect *rect,
831 				u32 display_addr, u32 hal_format,
832 				u32 bpp, int ch)
833 {
834 	void __iomem *rdma_base = ctx->base + rdma_offset[ch];
835 
836 	u32 aligned_pixel = 0;
837 	u32 rdma_oft_x0, rdma_oft_y0, rdma_oft_x1, rdma_oft_y1;
838 	u32 rdma_stride, rdma_format;
839 	u32 stretch_size_vrt = 0;
840 	u32 h_display = 0;
841 
842 	aligned_pixel = DMA_ALIGN_BYTES / bpp;
843 	rdma_oft_x0 = rect->x1 / aligned_pixel;
844 	rdma_oft_y0 = rect->y1;
845 	rdma_oft_x1 = rect->x2 / aligned_pixel;
846 	rdma_oft_y1 = rect->y2;
847 
848 	rdma_format = dpe_pixel_dma_format_map[hal_format];
849 	stretch_size_vrt = rdma_oft_y1 - rdma_oft_y0;
850 
851 	h_display = (rect->x2 - rect->x1) + 1;
852 	rdma_stride = (h_display * bpp) / DMA_ALIGN_BYTES;
853 
854 
855 	dpe_set_reg(rdma_base + DMA_CH_REG_DEFAULT, 0x1, 32, 0);
856 	dpe_set_reg(rdma_base + DMA_CH_REG_DEFAULT, 0x0, 32, 0);
857 
858 	dpe_set_reg(rdma_base + DMA_OFT_X0, rdma_oft_x0, 12, 0);
859 	dpe_set_reg(rdma_base + DMA_OFT_Y0, rdma_oft_y0, 16, 0);
860 	dpe_set_reg(rdma_base + DMA_OFT_X1, rdma_oft_x1, 12, 0);
861 	dpe_set_reg(rdma_base + DMA_OFT_Y1, rdma_oft_y1, 16, 0);
862 	dpe_set_reg(rdma_base + DMA_CTRL, rdma_format, 5, 3);
863 	dpe_set_reg(rdma_base + DMA_CTRL, 0x0, 1, 8);
864 	dpe_set_reg(rdma_base + DMA_STRETCH_SIZE_VRT, stretch_size_vrt, 32, 0);
865 	dpe_set_reg(rdma_base + DMA_DATA_ADDR0, display_addr, 32, 0);
866 	dpe_set_reg(rdma_base + DMA_STRIDE0, rdma_stride, 13, 0);
867 	dpe_set_reg(rdma_base + DMA_CH_CTL, 0x1, 1, 0);
868 }
869 
dpe_rdfc_config(struct dpe_hw_ctx * ctx,const struct drm_rect * rect,u32 hal_format,u32 bpp,int ch)870 static void dpe_rdfc_config(struct dpe_hw_ctx *ctx,
871 				const struct drm_rect *rect,
872 				u32 hal_format, u32 bpp, int ch)
873 {
874 	void __iomem *rdfc_base = ctx->base + rdfc_offset[ch];
875 
876 	u32 dfc_pix_in_num = 0;
877 	u32 size_hrz = 0;
878 	u32 size_vrt = 0;
879 	u32 dfc_fmt = 0;
880 
881 	dfc_pix_in_num = (bpp <= 2) ? 0x1 : 0x0;
882 	size_hrz = rect->x2 - rect->x1;
883 	size_vrt = rect->y2 - rect->y1;
884 
885 	dfc_fmt = dpe_pixel_dfc_format_map[hal_format];
886 
887 	dpe_set_reg(rdfc_base + DFC_DISP_SIZE,
888 				(size_vrt | (size_hrz << 16)), 29, 0);
889 	dpe_set_reg(rdfc_base + DFC_PIX_IN_NUM, dfc_pix_in_num, 1, 0);
890 	dpe_set_reg(rdfc_base + DFC_DISP_FMT, dfc_fmt, 5, 1);
891 	dpe_set_reg(rdfc_base + DFC_CTL_CLIP_EN, 0x1, 1, 0);
892 	dpe_set_reg(rdfc_base + DFC_ICG_MODULE, 0x1, 1, 0);
893 }
894 
dpe_aif_config(struct dpe_hw_ctx * ctx,u32 ch)895 static void dpe_aif_config(struct dpe_hw_ctx *ctx, u32 ch)
896 {
897 	void __iomem *aif_ch_base = ctx->base + aif_offset[ch];
898 
899 	dpe_set_reg(aif_ch_base, 0x0, 1, 0);
900 	dpe_set_reg(aif_ch_base, mid_array[ch], 4, 4);
901 }
902 
dpe_mif_config(struct dpe_hw_ctx * ctx,u32 ch)903 static void dpe_mif_config(struct dpe_hw_ctx *ctx, u32 ch)
904 {
905 	void __iomem *mif_ch_base = ctx->base + mif_offset[ch];
906 
907 	dpe_set_reg(mif_ch_base + MIF_CTRL1, 0x1, 1, 5);
908 }
909 
dpe_smmu_config_off(struct dpe_hw_ctx * ctx,u32 ch)910 static void dpe_smmu_config_off(struct dpe_hw_ctx *ctx, u32 ch)
911 {
912 	void __iomem *smmu_base = ctx->base + DPE_SMMU_OFFSET;
913 	int i, index;
914 
915 	for (i = 0; i < dpe_smmu_chn_sid_num[ch]; i++) {
916 		index = dpe_smmu_smrx_idx[ch] + i;
917 		dpe_set_reg(smmu_base + SMMU_SMRx_NS + index * 0x4, 1, 32, 0);
918 	}
919 }
920 
dpe_update_channel(struct kirin_plane * kplane,struct drm_framebuffer * fb,int crtc_x,int crtc_y,unsigned int crtc_w,unsigned int crtc_h,u32 src_x,u32 src_y,u32 src_w,u32 src_h)921 static void dpe_update_channel(struct kirin_plane *kplane,
922 			       struct drm_framebuffer *fb, int crtc_x,
923 			       int crtc_y, unsigned int crtc_w,
924 			       unsigned int crtc_h, u32 src_x,
925 			       u32 src_y, u32 src_w, u32 src_h)
926 {
927 	struct dpe_hw_ctx *ctx = kplane->hw_ctx;
928 	struct drm_gem_cma_object *obj = drm_fb_cma_get_gem_obj(fb, 0);
929 	struct drm_rect rect;
930 	u32 bpp;
931 	u32 stride;
932 	u32 display_addr;
933 	u32 hal_fmt;
934 	u32 ch = DPE_CH0;
935 
936 	bpp = fb->format->cpp[0];
937 	stride = fb->pitches[0];
938 
939 	display_addr = (u32)obj->paddr + src_y * stride;
940 
941 	rect.x1 = 0;
942 	rect.x2 = src_w - 1;
943 	rect.y1 = 0;
944 	rect.y2 = src_h - 1;
945 	hal_fmt = dpe_get_format(fb->format->format);
946 
947 	dpe_mctl_lock(ctx);
948 	dpe_aif_config(ctx, ch);
949 	dpe_mif_config(ctx, ch);
950 	dpe_smmu_config_off(ctx, ch);
951 
952 	dpe_rdma_config(ctx, &rect, display_addr, hal_fmt, bpp, ch);
953 	dpe_rdfc_config(ctx, &rect, hal_fmt, bpp, ch);
954 	dpe_ovl_config(ctx, &rect, ctx->hdisplay, ctx->vdisplay);
955 
956 	dpe_mctl_ov_config(ctx, ch);
957 	dpe_mctl_sys_config(ctx, ch);
958 	dpe_mctl_unlock(ctx);
959 	dpe_unflow_handler(ctx);
960 
961 	dpe_enable_ldi(ctx);
962 }
963 
dpe_plane_atomic_update(struct drm_plane * plane,struct drm_plane_state * old_state)964 static void dpe_plane_atomic_update(struct drm_plane *plane,
965 				    struct drm_plane_state *old_state)
966 {
967 	struct drm_plane_state *state = plane->state;
968 	struct kirin_plane *kplane = to_kirin_plane(plane);
969 
970 	if (!state->fb) {
971 		state->visible = false;
972 		return;
973 	}
974 
975 	dpe_update_channel(kplane, state->fb, state->crtc_x, state->crtc_y,
976 			   state->crtc_w, state->crtc_h,
977 			   state->src_x >> 16, state->src_y >> 16,
978 			   state->src_w >> 16, state->src_h >> 16);
979 }
980 
dpe_plane_atomic_check(struct drm_plane * plane,struct drm_plane_state * state)981 static int dpe_plane_atomic_check(struct drm_plane *plane,
982 				  struct drm_plane_state *state)
983 {
984 	struct drm_framebuffer *fb = state->fb;
985 	struct drm_crtc *crtc = state->crtc;
986 	struct drm_crtc_state *crtc_state;
987 	u32 src_x = state->src_x >> 16;
988 	u32 src_y = state->src_y >> 16;
989 	u32 src_w = state->src_w >> 16;
990 	u32 src_h = state->src_h >> 16;
991 	int crtc_x = state->crtc_x;
992 	int crtc_y = state->crtc_y;
993 	u32 crtc_w = state->crtc_w;
994 	u32 crtc_h = state->crtc_h;
995 	u32 fmt;
996 
997 	if (!crtc || !fb)
998 		return 0;
999 
1000 	fmt = dpe_get_format(fb->format->format);
1001 	if (fmt == DPE_UNSUPPORT)
1002 		return -EINVAL;
1003 
1004 	crtc_state = drm_atomic_get_crtc_state(state->state, crtc);
1005 	if (IS_ERR(crtc_state))
1006 		return PTR_ERR(crtc_state);
1007 
1008 	if (src_w != crtc_w || src_h != crtc_h) {
1009 		DRM_ERROR("Scale not support!!!\n");
1010 		return -EINVAL;
1011 	}
1012 
1013 	if (src_x + src_w > fb->width ||
1014 	    src_y + src_h > fb->height)
1015 		return -EINVAL;
1016 
1017 	if (crtc_x < 0 || crtc_y < 0)
1018 		return -EINVAL;
1019 
1020 	if (crtc_x + crtc_w > crtc_state->adjusted_mode.hdisplay ||
1021 	    crtc_y + crtc_h > crtc_state->adjusted_mode.vdisplay)
1022 		return -EINVAL;
1023 
1024 	return 0;
1025 }
1026 
1027 const struct drm_plane_helper_funcs dpe_plane_helper_funcs = {
1028 	.atomic_check = dpe_plane_atomic_check,
1029 	.atomic_update = dpe_plane_atomic_update,
1030 };
1031 
1032 const struct drm_plane_funcs dpe_plane_funcs = {
1033 	.update_plane = drm_atomic_helper_update_plane,
1034 	.disable_plane = drm_atomic_helper_disable_plane,
1035 	.destroy = drm_plane_cleanup,
1036 	.reset = drm_atomic_helper_plane_reset,
1037 	.atomic_duplicate_state = drm_atomic_helper_plane_duplicate_state,
1038 	.atomic_destroy_state = drm_atomic_helper_plane_destroy_state,
1039 };
1040 
dpe_irq_handler(int irq,void * data)1041 static irqreturn_t dpe_irq_handler(int irq, void *data)
1042 {
1043 	struct dpe_hw_ctx *ctx = data;
1044 	struct drm_crtc *crtc = ctx->crtc;
1045 	void __iomem *base = ctx->base;
1046 
1047 	u32 isr_s1 = 0;
1048 	u32 isr_s2 = 0;
1049 	u32 isr_s2_dpp = 0;
1050 	u32 isr_s2_smmu = 0;
1051 	u32 mask = 0;
1052 
1053 	isr_s1 = readl(base + GLB_CPU_PDP_INTS);
1054 	isr_s2 = readl(base + DPE_LDI0_OFFSET + LDI_CPU_ITF_INTS);
1055 	isr_s2_dpp = readl(base + DPE_DPP_OFFSET + DPP_INTS);
1056 	isr_s2_smmu = readl(base + DPE_SMMU_OFFSET + SMMU_INTSTAT_NS);
1057 
1058 	writel(isr_s2_smmu, base + DPE_SMMU_OFFSET + SMMU_INTCLR_NS);
1059 	writel(isr_s2_dpp, base + DPE_DPP_OFFSET + DPP_INTS);
1060 	writel(isr_s2, base + DPE_LDI0_OFFSET + LDI_CPU_ITF_INTS);
1061 	writel(isr_s1, base + GLB_CPU_PDP_INTS);
1062 
1063 	isr_s1 &= ~(readl(base + GLB_CPU_PDP_INT_MSK));
1064 	isr_s2 &= ~(readl(base + DPE_LDI0_OFFSET + LDI_CPU_ITF_INT_MSK));
1065 	isr_s2_dpp &= ~(readl(base + DPE_DPP_OFFSET + DPP_INT_MSK));
1066 
1067 	if (isr_s2 & BIT_VSYNC)
1068 		drm_crtc_handle_vblank(crtc);
1069 
1070 	if (isr_s2 & BIT_LDI_UNFLOW) {
1071 		mask = readl(base + DPE_LDI0_OFFSET + LDI_CPU_ITF_INT_MSK);
1072 		mask |= BIT_LDI_UNFLOW;
1073 		writel(mask, base + DPE_LDI0_OFFSET + LDI_CPU_ITF_INT_MSK);
1074 
1075 		DRM_ERROR("ldi underflow!\n");
1076 	}
1077 
1078 	return IRQ_HANDLED;
1079 }
1080 
dpe_hw_ctx_alloc(struct platform_device * pdev,struct drm_crtc * crtc)1081 static void *dpe_hw_ctx_alloc(struct platform_device *pdev,
1082 							  struct drm_crtc *crtc)
1083 {
1084 	struct dpe_hw_ctx *ctx = NULL;
1085 	struct device *dev = &pdev->dev;
1086 	struct device_node *np = pdev->dev.of_node;
1087 	int ret = 0;
1088 
1089 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
1090 	if (!ctx) {
1091 		DRM_ERROR("failed to alloc ade_hw_ctx\n");
1092 		return ERR_PTR(-ENOMEM);
1093 	}
1094 
1095 	ctx->base = of_iomap(np, 0);
1096 	if (!(ctx->base)) {
1097 		DRM_ERROR("failed to get ade base resource.\n");
1098 		return ERR_PTR(-ENXIO);
1099 	}
1100 
1101 	ctx->noc_base = of_iomap(np, 4);
1102 	if (!(ctx->noc_base)) {
1103 		DRM_ERROR("failed to get noc_base  resource.\n");
1104 		return ERR_PTR(-ENXIO);
1105 	}
1106 
1107 	ctx->irq = irq_of_parse_and_map(np, 0);
1108 	if (ctx->irq <= 0) {
1109 		DRM_ERROR("failed to get irq_pdp resource.\n");
1110 		return ERR_PTR(-ENXIO);
1111 	}
1112 
1113 	DRM_INFO("dpe irq = %d.", ctx->irq);
1114 
1115 	ctx->dpe_mmbuf_clk = devm_clk_get(dev, "clk_dss_axi_mm");
1116 	if (!ctx->dpe_mmbuf_clk) {
1117 		DRM_ERROR("failed to parse dpe_mmbuf_clk\n");
1118 		return ERR_PTR(-ENODEV);
1119 	}
1120 
1121 	ctx->dpe_axi_clk = devm_clk_get(dev, "aclk_dss");
1122 	if (!ctx->dpe_axi_clk) {
1123 		DRM_ERROR("failed to parse dpe_axi_clk\n");
1124 		return ERR_PTR(-ENODEV);
1125 	}
1126 
1127 	ctx->dpe_pclk_clk = devm_clk_get(dev, "pclk_dss");
1128 	if (!ctx->dpe_pclk_clk) {
1129 		DRM_ERROR("failed to parse dpe_pclk_clk\n");
1130 		return ERR_PTR(-ENODEV);
1131 	}
1132 
1133 	ctx->dpe_pri_clk = devm_clk_get(dev, "clk_edc0");
1134 	if (!ctx->dpe_pri_clk) {
1135 		DRM_ERROR("failed to parse dpe_pri_clk\n");
1136 		return ERR_PTR(-ENODEV);
1137 	}
1138 
1139 	ret = clk_set_rate(ctx->dpe_pri_clk, DEFAULT_DPE_CORE_CLK_07V_RATE);
1140 	if (ret < 0) {
1141 		DRM_ERROR("dpe_pri_clk clk_set_rate(%lu) failed, error=%d!\n",
1142 			DEFAULT_DPE_CORE_CLK_07V_RATE, ret);
1143 		return ERR_PTR(-EINVAL);
1144 	}
1145 
1146 	ctx->dpe_pxl0_clk = devm_clk_get(dev, "clk_ldi0");
1147 	if (!ctx->dpe_pxl0_clk) {
1148 		DRM_ERROR("failed to parse dpe_pxl0_clk\n");
1149 		return ERR_PTR(-ENODEV);
1150 	}
1151 
1152 	ret = clk_set_rate(ctx->dpe_pxl0_clk, DPE_MAX_PXL0_CLK_144M);
1153 	if (ret < 0) {
1154 		DRM_ERROR("dpe_pxl0_clk clk_set_rate(%lu) failed, error=%d!\n",
1155 			DPE_MAX_PXL0_CLK_144M, ret);
1156 		return ERR_PTR(-EINVAL);
1157 	}
1158 
1159 	ctx->crtc = crtc;
1160 	ret = devm_request_irq(dev, ctx->irq, dpe_irq_handler,
1161 			       IRQF_SHARED, dev->driver->name, ctx);
1162 	if (ret)
1163 		return ERR_PTR(-EIO);
1164 
1165 	disable_irq(ctx->irq);
1166 
1167 	return ctx;
1168 }
1169 
dpe_hw_ctx_cleanup(void * hw_ctx)1170 static void dpe_hw_ctx_cleanup(void *hw_ctx)
1171 {
1172 }
1173 
1174 extern void dsi_set_output_client(struct drm_device *dev);
kirin_fbdev_output_poll_changed(struct drm_device * dev)1175 static void kirin_fbdev_output_poll_changed(struct drm_device *dev)
1176 {
1177 	dsi_set_output_client(dev);
1178 }
1179 
1180 static const struct drm_mode_config_funcs dpe_mode_config_funcs = {
1181 	.fb_create = drm_gem_fb_create,
1182 	.output_poll_changed = kirin_fbdev_output_poll_changed,
1183 	.atomic_check = drm_atomic_helper_check,
1184 	.atomic_commit = drm_atomic_helper_commit,
1185 };
1186 
1187 DEFINE_DRM_GEM_CMA_FOPS(kirin_drm_fops);
1188 static struct drm_driver dpe_driver = {
1189 	.driver_features	= DRIVER_GEM | DRIVER_MODESET |
1190 				  DRIVER_ATOMIC | DRIVER_RENDER,
1191 
1192 	.date			= "20170309",
1193 	.fops				= &kirin_drm_fops,
1194 	.gem_free_object_unlocked	= drm_gem_cma_free_object,
1195 	.gem_vm_ops		= &drm_gem_cma_vm_ops,
1196 	.dumb_create		= drm_gem_cma_dumb_create_internal,
1197 	.prime_handle_to_fd	= drm_gem_prime_handle_to_fd,
1198 	.prime_fd_to_handle	= drm_gem_prime_fd_to_handle,
1199 	.gem_prime_export	= drm_gem_prime_export,
1200 	.gem_prime_import	= drm_gem_prime_import,
1201 	.gem_prime_get_sg_table = drm_gem_cma_prime_get_sg_table,
1202 	.gem_prime_import_sg_table = drm_gem_cma_prime_import_sg_table,
1203 	.gem_prime_vmap		= drm_gem_cma_prime_vmap,
1204 	.gem_prime_vunmap	= drm_gem_cma_prime_vunmap,
1205 	.gem_prime_mmap		= drm_gem_cma_prime_mmap,
1206 
1207 	.name			= "kirin",
1208 	.desc			= "Hisilicon Kirin SoCs' DRM Driver",
1209 	.major			= 1,
1210 	.minor			= 0,
1211 };
1212 
1213 const struct kirin_drm_data dpe_driver_data = {
1214 	.register_connects = true,
1215 	.num_planes = DPE_CH_NUM,
1216 	.prim_plane = DPE_CH0,
1217 
1218 	.channel_formats = dpe_channel_formats,
1219 	.channel_formats_cnt = ARRAY_SIZE(dpe_channel_formats),
1220 	.config_max_width = 4096,
1221 	.config_max_height = 4096,
1222 
1223 	.driver = &dpe_driver,
1224 
1225 	.crtc_helper_funcs = &dpe_crtc_helper_funcs,
1226 	.crtc_funcs = &dpe_crtc_funcs,
1227 	.plane_helper_funcs = &dpe_plane_helper_funcs,
1228 	.plane_funcs = &dpe_plane_funcs,
1229 	.mode_config_funcs = &dpe_mode_config_funcs,
1230 
1231 	.alloc_hw_ctx = dpe_hw_ctx_alloc,
1232 	.cleanup_hw_ctx = dpe_hw_ctx_cleanup,
1233 };
1234