1 /*
2 * Allwinner SoCs g2d driver.
3 *
4 * Copyright (C) 2016 Allwinner.
5 *
6 * This file is licensed under the terms of the GNU General Public
7 * License version 2. This program is licensed "as is" without any
8 * warranty of any kind, whether express or implied.
9 */
10
11 #include "g2d_driver_i.h"
12 #include <sunxi-g2d.h>
13
14 #define CREATE_TRACE_POINTS
15 #include "g2d_trace.h"
16
17 #if defined(CONFIG_SUNXI_SYNCFENCE)
18 extern int syncfence_init(void);
19 extern void syncfence_exit(void);
20 #endif
21
22 /* alloc based on 4K byte */
23 #define G2D_BYTE_ALIGN(x) (((x + (4*1024-1)) >> 12) << 12)
24 #define ALLOC_USING_DMA
25 static struct info_mem g2d_mem[MAX_G2D_MEM_INDEX];
26 static int g2d_mem_sel;
27 static enum g2d_scan_order scan_order;
28 static struct mutex global_lock;
29
30 static struct class *g2d_class;
31 static struct cdev *g2d_cdev;
32 static dev_t devid;
33 static struct device *g2d_dev;
34 static struct device *dmabuf_dev;
35 __g2d_drv_t g2d_ext_hd;
36 __g2d_info_t para;
37
38 u32 dbg_info;
39 u32 time_info;
40
41 struct dmabuf_item {
42 struct list_head list;
43 int fd;
44 struct dma_buf *buf;
45 struct dma_buf_attachment *attachment;
46 struct sg_table *sgt;
47 dma_addr_t dma_addr;
48 unsigned long long id;
49 };
50
51 #if !defined(CONFIG_OF)
52 static struct resource g2d_resource[2] = {
53
54 [0] = {
55 .start = SUNXI_MP_PBASE,
56 .end = SUNXI_MP_PBASE + 0x000fffff,
57 .flags = IORESOURCE_MEM,
58 },
59 [1] = {
60 .start = INTC_IRQNO_DE_MIX,
61 .end = INTC_IRQNO_DE_MIX,
62 .flags = IORESOURCE_IRQ,
63 },
64 };
65 #endif
66
g2d_debug_show(struct device * dev,struct device_attribute * attr,char * buf)67 static ssize_t g2d_debug_show(struct device *dev,
68 struct device_attribute *attr, char *buf)
69 {
70 return sprintf(buf, "debug=%d\n", dbg_info);
71 }
72
g2d_debug_store(struct device * dev,struct device_attribute * attr,const char * buf,size_t count)73 static ssize_t g2d_debug_store(struct device *dev,
74 struct device_attribute *attr,
75 const char *buf, size_t count)
76 {
77 if (strncasecmp(buf, "1", 1) == 0)
78 dbg_info = 1;
79 else if (strncasecmp(buf, "0", 1) == 0)
80 dbg_info = 0;
81 else
82 WARNING("Error input!\n");
83
84 return count;
85 }
86
87 static DEVICE_ATTR(debug, 0660,
88 g2d_debug_show, g2d_debug_store);
89
g2d_func_runtime_show(struct device * dev,struct device_attribute * attr,char * buf)90 static ssize_t g2d_func_runtime_show(struct device *dev,
91 struct device_attribute *attr, char *buf)
92 {
93 return sprintf(buf, "func_runtime=%d\n", time_info);
94 }
95
g2d_func_runtime_store(struct device * dev,struct device_attribute * attr,const char * buf,size_t count)96 static ssize_t g2d_func_runtime_store(struct device *dev,
97 struct device_attribute *attr,
98 const char *buf, size_t count)
99 {
100 if (strncasecmp(buf, "1", 1) == 0)
101 time_info = 1;
102 else if (strncasecmp(buf, "0", 1) == 0)
103 time_info = 0;
104 else
105 WARNING("Error input!\n");
106
107 return count;
108 }
109
110 static DEVICE_ATTR(func_runtime, 0660,
111 g2d_func_runtime_show, g2d_func_runtime_store);
112
g2d_standby_store(struct device * dev,struct device_attribute * attr,const char * buf,size_t count)113 static ssize_t g2d_standby_store(struct device *dev,
114 struct device_attribute *attr,
115 const char *buf, size_t count)
116 {
117 if (strncasecmp(buf, "suspend", 7) == 0) {
118 pr_info("[G2D]:self suspend\n");
119 g2d_suspend(NULL);
120 } else if (strncasecmp(buf, "resume", 6) == 0) {
121 pr_info("[G2D]:self resume\n");
122 g2d_resume(NULL);
123 } else
124 WARNING("Error input!\n");
125
126 return count;
127 }
128
129 static DEVICE_ATTR(standby, 0660, NULL, g2d_standby_store);
130
131 static struct attribute *g2d_attributes[] = {
132 &dev_attr_debug.attr,
133 &dev_attr_func_runtime.attr,
134 &dev_attr_standby.attr,
135 NULL
136 };
137
138 static struct attribute_group g2d_attribute_group = {
139 .name = "attr",
140 .attrs = g2d_attributes
141 };
142
g2d_clock_enable(const __g2d_info_t * info)143 static int g2d_clock_enable(const __g2d_info_t *info)
144 {
145 int ret = 0;
146 if (info->reset) {
147 ret = reset_control_deassert(info->reset);
148 if (ret != 0) {
149 pr_err("[G2D] deassert error\n");
150 return ret;
151 }
152 }
153
154 if (info->bus_clk) {
155 ret |= clk_prepare_enable(info->bus_clk);
156 }
157
158 if (info->clk) {
159 if (info->clk_parent) {
160 clk_set_parent(info->clk, info->clk_parent);
161 }
162 ret |= clk_prepare_enable(info->clk);
163 }
164 if (info->mbus_clk) {
165 ret |= clk_prepare_enable(info->mbus_clk);
166 }
167 if (ret != 0)
168 pr_err("[G2D] clock enable error\n");
169
170 return ret;
171 }
172
g2d_clock_disable(const __g2d_info_t * info)173 static int g2d_clock_disable(const __g2d_info_t *info)
174 {
175 if (info->clk)
176 clk_disable(info->clk);
177 if (info->bus_clk)
178 clk_disable(info->bus_clk);
179 if (info->mbus_clk)
180 clk_disable(info->mbus_clk);
181 if (info->reset)
182 reset_control_assert(info->reset);
183 return 0;
184 }
185
186 #ifdef G2D_V2X_SUPPORT
g2d_dma_map(int fd,struct dmabuf_item * item)187 static int g2d_dma_map(int fd, struct dmabuf_item *item)
188 {
189 struct dma_buf *dmabuf;
190 struct dma_buf_attachment *attachment;
191 struct sg_table *sgt;
192 int ret = -1;
193
194 G2D_TRACE_BEGIN("g2d_dma_map");
195 if (fd < 0) {
196 pr_err("[G2D]dma_buf_id(%d) is invalid\n", fd);
197 goto exit;
198 }
199 dmabuf = dma_buf_get(fd);
200 if (IS_ERR(dmabuf)) {
201 pr_err("[G2D]dma_buf_get failed, fd=%d\n", fd);
202 goto exit;
203 }
204
205 attachment = dma_buf_attach(dmabuf, dmabuf_dev);
206 if (IS_ERR(attachment)) {
207 pr_err("[G2D]dma_buf_attach failed\n");
208 goto err_buf_put;
209 }
210 sgt = dma_buf_map_attachment(attachment, DMA_TO_DEVICE);
211 if (IS_ERR_OR_NULL(sgt)) {
212 pr_err("[G2D]dma_buf_map_attachment failed\n");
213 goto err_buf_detach;
214 }
215
216 item->fd = fd;
217 item->buf = dmabuf;
218 item->sgt = sgt;
219 item->attachment = attachment;
220 item->dma_addr = sg_dma_address(sgt->sgl);
221 ret = 0;
222 goto exit;
223
224 err_buf_detach:
225 dma_buf_detach(dmabuf, attachment);
226 err_buf_put:
227 dma_buf_put(dmabuf);
228 exit:
229 G2D_TRACE_END("");
230 return ret;
231 }
232
g2d_dma_unmap(struct dmabuf_item * item)233 static void g2d_dma_unmap(struct dmabuf_item *item)
234 {
235 G2D_TRACE_BEGIN("g2d_dma_unmap");
236 dma_buf_unmap_attachment(item->attachment, item->sgt, DMA_TO_DEVICE);
237 dma_buf_detach(item->buf, item->attachment);
238 dma_buf_put(item->buf);
239 G2D_TRACE_END("");
240 }
241 #endif
242
243 static struct g2d_format_attr fmt_attr_tbl[] = {
244 /*
245 format bits
246 hor_rsample(u,v)
247 ver_rsample(u,v)
248 uvc
249 interleave
250 factor
251 div
252 */
253 { G2D_FORMAT_ARGB8888, 8, 1, 1, 1, 1, 0, 1, 4, 1},
254 { G2D_FORMAT_ABGR8888, 8, 1, 1, 1, 1, 0, 1, 4, 1},
255 { G2D_FORMAT_RGBA8888, 8, 1, 1, 1, 1, 0, 1, 4, 1},
256 { G2D_FORMAT_BGRA8888, 8, 1, 1, 1, 1, 0, 1, 4, 1},
257 { G2D_FORMAT_XRGB8888, 8, 1, 1, 1, 1, 0, 1, 4, 1},
258 { G2D_FORMAT_XBGR8888, 8, 1, 1, 1, 1, 0, 1, 4, 1},
259 { G2D_FORMAT_RGBX8888, 8, 1, 1, 1, 1, 0, 1, 4, 1},
260 { G2D_FORMAT_BGRX8888, 8, 1, 1, 1, 1, 0, 1, 4, 1},
261 { G2D_FORMAT_RGB888, 8, 1, 1, 1, 1, 0, 1, 3, 1},
262 { G2D_FORMAT_BGR888, 8, 1, 1, 1, 1, 0, 1, 3, 1},
263 { G2D_FORMAT_RGB565, 8, 1, 1, 1, 1, 0, 1, 2, 1},
264 { G2D_FORMAT_BGR565, 8, 1, 1, 1, 1, 0, 1, 2, 1},
265 { G2D_FORMAT_ARGB4444, 8, 1, 1, 1, 1, 0, 1, 2, 1},
266 { G2D_FORMAT_ABGR4444, 8, 1, 1, 1, 1, 0, 1, 2, 1},
267 { G2D_FORMAT_RGBA4444, 8, 1, 1, 1, 1, 0, 1, 2, 1},
268 { G2D_FORMAT_BGRA4444, 8, 1, 1, 1, 1, 0, 1, 2, 1},
269 { G2D_FORMAT_ARGB1555, 8, 1, 1, 1, 1, 0, 1, 2, 1},
270 { G2D_FORMAT_ABGR1555, 8, 1, 1, 1, 1, 0, 1, 2, 1},
271 { G2D_FORMAT_RGBA5551, 8, 1, 1, 1, 1, 0, 1, 2, 1},
272 { G2D_FORMAT_BGRA5551, 8, 1, 1, 1, 1, 0, 1, 2, 1},
273 { G2D_FORMAT_ARGB2101010, 10, 1, 1, 1, 1, 0, 1, 4, 1},
274 { G2D_FORMAT_ABGR2101010, 10, 1, 1, 1, 1, 0, 1, 4, 1},
275 { G2D_FORMAT_RGBA1010102, 10, 1, 1, 1, 1, 0, 1, 4, 1},
276 { G2D_FORMAT_BGRA1010102, 10, 1, 1, 1, 1, 0, 1, 4, 1},
277 { G2D_FORMAT_IYUV422_V0Y1U0Y0, 8, 1, 1, 1, 1, 0, 1, 2, 1},
278 { G2D_FORMAT_IYUV422_Y1V0Y0U0, 8, 1, 1, 1, 1, 0, 1, 2, 1},
279 { G2D_FORMAT_IYUV422_U0Y1V0Y0, 8, 1, 1, 1, 1, 0, 1, 2, 1},
280 { G2D_FORMAT_IYUV422_Y1U0Y0V0, 8, 1, 1, 1, 1, 0, 1, 2, 1},
281 { G2D_FORMAT_YUV422_PLANAR, 8, 2, 2, 1, 1, 0, 0, 2, 1},
282 { G2D_FORMAT_YUV420_PLANAR, 8, 2, 2, 2, 2, 0, 0, 3, 2},
283 { G2D_FORMAT_YUV411_PLANAR, 8, 4, 4, 1, 1, 0, 0, 3, 2},
284 { G2D_FORMAT_YUV422UVC_U1V1U0V0, 8, 2, 2, 1, 1, 1, 0, 2, 1},
285 { G2D_FORMAT_YUV422UVC_V1U1V0U0, 8, 2, 2, 1, 1, 1, 0, 2, 1},
286 { G2D_FORMAT_YUV420UVC_U1V1U0V0, 8, 2, 2, 2, 2, 1, 0, 3, 2},
287 { G2D_FORMAT_YUV420UVC_V1U1V0U0, 8, 2, 2, 2, 2, 1, 0, 3, 2},
288 { G2D_FORMAT_YUV411UVC_U1V1U0V0, 8, 4, 4, 1, 1, 1, 0, 3, 2},
289 { G2D_FORMAT_YUV411UVC_V1U1V0U0, 8, 4, 4, 1, 1, 1, 0, 3, 2},
290 { G2D_FORMAT_Y8, 8, 1, 1, 1, 1, 0, 0, 1, 1},
291 { G2D_FORMAT_YVU10_444, 10, 1, 1, 1, 1, 0, 1, 4, 1},
292 { G2D_FORMAT_YUV10_444, 10, 1, 1, 1, 1, 0, 1, 4, 1},
293 { G2D_FORMAT_YVU10_P210, 10, 2, 2, 1, 1, 0, 0, 4, 1},
294 { G2D_FORMAT_YVU10_P010, 10, 2, 2, 2, 2, 0, 0, 3, 1},
295 };
296
g2d_set_info(g2d_image_enh * g2d_img,struct dmabuf_item * item)297 s32 g2d_set_info(g2d_image_enh *g2d_img, struct dmabuf_item *item)
298 {
299 s32 ret = -1;
300 u32 i = 0;
301 u32 len = ARRAY_SIZE(fmt_attr_tbl);
302 u32 y_width, y_height, u_width, u_height;
303 u32 y_pitch, u_pitch;
304 u32 y_size, u_size;
305
306 g2d_img->laddr[0] = item->dma_addr;
307
308 if (g2d_img->format >= G2D_FORMAT_MAX) {
309 pr_err("%s, format 0x%x is out of range\n", __func__,
310 g2d_img->format);
311 goto exit;
312 }
313
314 for (i = 0; i < len; ++i) {
315
316 if (fmt_attr_tbl[i].format == g2d_img->format) {
317 y_width = g2d_img->width;
318 y_height = g2d_img->height;
319 u_width = y_width/fmt_attr_tbl[i].hor_rsample_u;
320 u_height = y_height/fmt_attr_tbl[i].ver_rsample_u;
321
322 y_pitch = G2DALIGN(y_width, g2d_img->align[0]);
323 u_pitch = G2DALIGN(u_width * (fmt_attr_tbl[i].uvc + 1),
324 g2d_img->align[1]);
325
326 y_size = y_pitch * y_height;
327 u_size = u_pitch * u_height;
328 g2d_img->laddr[1] = g2d_img->laddr[0] + y_size;
329 g2d_img->laddr[2] = g2d_img->laddr[0] + y_size + u_size;
330
331 if (g2d_img->format == G2D_FORMAT_YUV420_PLANAR) {
332 /* v */
333 g2d_img->laddr[1] = g2d_img->laddr[0] + y_size + u_size;
334 g2d_img->laddr[2] = g2d_img->laddr[0] + y_size; /* u */
335 }
336
337 ret = 0;
338 break;
339 }
340 }
341 if (ret != 0)
342 pr_err("%s, format 0x%x is invalid\n", __func__,
343 g2d_img->format);
344 exit:
345 return ret;
346
347 }
348
drv_g2d_init(void)349 __s32 drv_g2d_init(void)
350 {
351 g2d_init_para init_para;
352
353 DBG("drv_g2d_init\n");
354 init_para.g2d_base = (unsigned long) para.io;
355 memset(&g2d_ext_hd, 0, sizeof(__g2d_drv_t));
356 init_waitqueue_head(&g2d_ext_hd.queue);
357 g2d_init(&init_para);
358
359 return 0;
360 }
361
g2d_malloc(__u32 bytes_num,uintptr_t * phy_addr)362 void *g2d_malloc(__u32 bytes_num, uintptr_t *phy_addr)
363 {
364 void *address = NULL;
365
366 #ifdef ALLOC_USING_DMA
367 u32 actual_bytes;
368
369 if (bytes_num != 0) {
370 actual_bytes = G2D_BYTE_ALIGN(bytes_num);
371
372 address = dma_alloc_coherent(para.dev, actual_bytes,
373 (dma_addr_t *) phy_addr,
374 GFP_KERNEL);
375 if (address) {
376 DBG("dma_alloc_coherent ok, address=0x%p, size=0x%x\n",
377 (void *)(*(unsigned long *)phy_addr), bytes_num);
378 return address;
379 }
380 ERR("dma_alloc_coherent fail, size=0x%x\n", bytes_num);
381 return NULL;
382 }
383 ERR("%s size is zero\n", __func__);
384 #else
385 unsigned map_size = 0;
386 struct page *page;
387
388 if (bytes_num != 0) {
389 map_size = PAGE_ALIGN(bytes_num);
390 page = alloc_pages(GFP_KERNEL, get_order(map_size));
391 if (page != NULL) {
392 address = page_address(page);
393 if (address == NULL) {
394 free_pages((unsigned long)(page),
395 get_order(map_size));
396 ERR("page_address fail!\n");
397 return NULL;
398 }
399 *phy_addr = virt_to_phys(address);
400 return address;
401 }
402 ERR("alloc_pages fail!\n");
403 return NULL;
404 }
405 ERR("%s size is zero\n", __func__);
406 #endif
407
408 return NULL;
409 }
410
g2d_free(void * virt_addr,void * phy_addr,unsigned int size)411 void g2d_free(void *virt_addr, void *phy_addr, unsigned int size)
412 {
413 #ifdef ALLOC_USING_DMA
414
415 u32 actual_bytes;
416
417 actual_bytes = PAGE_ALIGN(size);
418 if (phy_addr && virt_addr)
419 dma_free_coherent(para.dev, actual_bytes, virt_addr,
420 (dma_addr_t) phy_addr);
421 #else
422 unsigned map_size = PAGE_ALIGN(size);
423 unsigned page_size = map_size;
424
425 if (virt_addr == NULL)
426 return;
427
428 free_pages((unsigned long)virt_addr, get_order(page_size));
429 #endif
430 }
431
g2d_get_free_mem_index(void)432 __s32 g2d_get_free_mem_index(void)
433 {
434 __u32 i = 0;
435
436 for (i = 0; i < MAX_G2D_MEM_INDEX; i++) {
437 if (g2d_mem[i].b_used == 0)
438 return i;
439 }
440 return -1;
441 }
442
g2d_mem_request(__u32 size)443 int g2d_mem_request(__u32 size)
444 {
445 __s32 sel;
446 unsigned long ret = 0;
447 uintptr_t phy_addr;
448
449 sel = g2d_get_free_mem_index();
450 if (sel < 0) {
451 ERR("g2d_get_free_mem_index fail!\n");
452 return -EINVAL;
453 }
454
455 ret = (unsigned long)g2d_malloc(size, &phy_addr);
456 if (ret != 0) {
457 g2d_mem[sel].virt_addr = (void *)ret;
458 memset(g2d_mem[sel].virt_addr, 0, size);
459 g2d_mem[sel].phy_addr = phy_addr;
460 g2d_mem[sel].mem_len = size;
461 g2d_mem[sel].b_used = 1;
462
463 INFO("map_g2d_memory[%d]: pa=%08lx va=%p size:%x\n", sel,
464 g2d_mem[sel].phy_addr, g2d_mem[sel].virt_addr, size);
465 return sel;
466 }
467 ERR("fail to alloc reserved memory!\n");
468 return -ENOMEM;
469 }
470
g2d_mem_release(__u32 sel)471 int g2d_mem_release(__u32 sel)
472 {
473 if (g2d_mem[sel].b_used == 0) {
474 ERR("mem not used in g2d_mem_release,%d\n", sel);
475 return -EINVAL;
476 }
477
478 g2d_free((void *)g2d_mem[sel].virt_addr, (void *)g2d_mem[sel].phy_addr,
479 g2d_mem[sel].mem_len);
480 memset(&g2d_mem[sel], 0, sizeof(struct info_mem));
481
482 return 0;
483 }
484
g2d_mmap(struct file * file,struct vm_area_struct * vma)485 int g2d_mmap(struct file *file, struct vm_area_struct *vma)
486 {
487 unsigned long mypfn = vma->vm_pgoff;
488 unsigned long vmsize = vma->vm_end - vma->vm_start;
489
490 vma->vm_pgoff = 0;
491
492 vma->vm_page_prot = pgprot_writecombine(vma->vm_page_prot);
493 if (remap_pfn_range(vma, vma->vm_start, mypfn,
494 vmsize, vma->vm_page_prot))
495 return -EAGAIN;
496
497 return 0;
498 }
499
g2d_open(struct inode * inode,struct file * file)500 int g2d_open(struct inode *inode, struct file *file)
501 {
502 mutex_lock(¶.mutex);
503 para.user_cnt++;
504 if (para.user_cnt == 1) {
505 g2d_clock_enable(¶);
506 para.opened = true;
507 #ifdef G2D_V2X_SUPPORT
508 g2d_bsp_open();
509 #endif
510 }
511
512 mutex_unlock(¶.mutex);
513 return 0;
514 }
515
g2d_release(struct inode * inode,struct file * file)516 int g2d_release(struct inode *inode, struct file *file)
517 {
518 mutex_lock(¶.mutex);
519 para.user_cnt--;
520 if (para.user_cnt == 0) {
521 g2d_clock_disable(¶);
522 para.opened = false;
523 #ifdef G2D_V2X_SUPPORT
524 g2d_bsp_close();
525 #endif
526 }
527
528 mutex_unlock(¶.mutex);
529
530 mutex_lock(&global_lock);
531 scan_order = G2D_SM_TDLR;
532 mutex_unlock(&global_lock);
533
534 return 0;
535 }
536
g2d_handle_irq(int irq,void * dev_id)537 irqreturn_t g2d_handle_irq(int irq, void *dev_id)
538 {
539 #ifdef G2D_V2X_SUPPORT
540 __u32 mixer_irq_flag, rot_irq_flag;
541
542 mixer_irq_flag = mixer_irq_query();
543 rot_irq_flag = rot_irq_query();
544
545 if (mixer_irq_flag == 0) {
546 g2d_mixer_reset();
547 g2d_ext_hd.finish_flag = 1;
548 wake_up(&g2d_ext_hd.queue);
549 } else if (rot_irq_flag == 0) {
550 g2d_rot_reset();
551 g2d_ext_hd.finish_flag = 1;
552 wake_up(&g2d_ext_hd.queue);
553 }
554 #else
555 __u32 mod_irq_flag, cmd_irq_flag;
556
557 mod_irq_flag = mixer_get_irq();
558 cmd_irq_flag = mixer_get_irq0();
559 if (mod_irq_flag & G2D_FINISH_IRQ) {
560 mixer_clear_init();
561 g2d_ext_hd.finish_flag = 1;
562 wake_up(&g2d_ext_hd.queue);
563 } else if (cmd_irq_flag & G2D_FINISH_IRQ) {
564 mixer_clear_init0();
565 g2d_ext_hd.finish_flag = 1;
566 wake_up(&g2d_ext_hd.queue);
567 }
568 #endif
569 return IRQ_HANDLED;
570 }
571
g2d_init(g2d_init_para * para)572 int g2d_init(g2d_init_para *para)
573 {
574 mixer_set_reg_base(para->g2d_base);
575
576 return 0;
577 }
578
g2d_exit(void)579 int g2d_exit(void)
580 {
581 __u8 err = 0;
582
583 return err;
584 }
585
g2d_wait_cmd_finish(void)586 int g2d_wait_cmd_finish(void)
587 {
588 long timeout = 100; /* 100ms */
589
590 timeout = wait_event_timeout(g2d_ext_hd.queue,
591 g2d_ext_hd.finish_flag == 1,
592 msecs_to_jiffies(timeout));
593 if (timeout <= 0) {
594 #ifdef G2D_V2X_SUPPORT
595 g2d_bsp_reset();
596 #else
597 mixer_clear_init();
598 mixer_clear_init0();
599 #endif
600 pr_warn("G2D irq pending flag timeout\n");
601 g2d_ext_hd.finish_flag = 1;
602 wake_up(&g2d_ext_hd.queue);
603 return -1;
604 }
605 g2d_ext_hd.finish_flag = 0;
606
607 return 0;
608 }
609
g2d_blit(g2d_blt * para)610 int g2d_blit(g2d_blt *para)
611 {
612 __s32 err = 0;
613 __u32 tmp_w, tmp_h;
614
615 if ((para->flag & G2D_BLT_ROTATE90) ||
616 (para->flag & G2D_BLT_ROTATE270)) {
617 tmp_w = para->src_rect.h;
618 tmp_h = para->src_rect.w;
619 } else {
620 tmp_w = para->src_rect.w;
621 tmp_h = para->src_rect.h;
622 }
623 /* check the parameter valid */
624 if (((para->src_rect.x < 0) &&
625 ((-para->src_rect.x) > para->src_rect.w)) ||
626 ((para->src_rect.y < 0) &&
627 ((-para->src_rect.y) > para->src_rect.h)) ||
628 ((para->dst_x < 0) &&
629 ((-para->dst_x) > tmp_w)) ||
630 ((para->dst_y < 0) &&
631 ((-para->dst_y) > tmp_h)) ||
632 ((para->src_rect.x > 0) &&
633 (para->src_rect.x > para->src_image.w - 1)) ||
634 ((para->src_rect.y > 0) &&
635 (para->src_rect.y > para->src_image.h - 1)) ||
636 ((para->dst_x > 0) &&
637 (para->dst_x > para->dst_image.w - 1)) ||
638 ((para->dst_y > 0) && (para->dst_y > para->dst_image.h - 1))) {
639 pr_warn("invalid blit parameter setting");
640 return -EINVAL;
641 }
642 if (((para->src_rect.x < 0) &&
643 ((-para->src_rect.x) < para->src_rect.w))) {
644 para->src_rect.w = para->src_rect.w + para->src_rect.x;
645 para->src_rect.x = 0;
646 } else if ((para->src_rect.x + para->src_rect.w)
647 > para->src_image.w) {
648 para->src_rect.w = para->src_image.w - para->src_rect.x;
649 }
650 if (((para->src_rect.y < 0) &&
651 ((-para->src_rect.y) < para->src_rect.h))) {
652 para->src_rect.h = para->src_rect.h + para->src_rect.y;
653 para->src_rect.y = 0;
654 } else if ((para->src_rect.y + para->src_rect.h)
655 > para->src_image.h) {
656 para->src_rect.h = para->src_image.h - para->src_rect.y;
657 }
658
659 if (((para->dst_x < 0) && ((-para->dst_x) < tmp_w))) {
660 para->src_rect.w = tmp_w + para->dst_x;
661 para->src_rect.x = (-para->dst_x);
662 para->dst_x = 0;
663 } else if ((para->dst_x + tmp_w) > para->dst_image.w) {
664 para->src_rect.w = para->dst_image.w - para->dst_x;
665 }
666 if (((para->dst_y < 0) && ((-para->dst_y) < tmp_h))) {
667 para->src_rect.h = tmp_h + para->dst_y;
668 para->src_rect.y = (-para->dst_y);
669 para->dst_y = 0;
670 } else if ((para->dst_y + tmp_h) > para->dst_image.h)
671 para->src_rect.h = para->dst_image.h - para->dst_y;
672
673 g2d_ext_hd.finish_flag = 0;
674
675 /* Add support inverted order copy, however,
676 * hardware have a bug when reciving y coordinate,
677 * it use (y + height) rather than (y) on inverted
678 * order mode, so here adjust it before pass it to hardware.
679 */
680 mutex_lock(&global_lock);
681 if (scan_order > G2D_SM_TDRL)
682 para->dst_y += para->src_rect.h;
683 mutex_unlock(&global_lock);
684
685 err = mixer_blt(para, scan_order);
686
687 return err;
688 }
689
g2d_fill(g2d_fillrect * para)690 int g2d_fill(g2d_fillrect *para)
691 {
692 __s32 err = 0;
693
694 /* check the parameter valid */
695 if (((para->dst_rect.x < 0) &&
696 ((-para->dst_rect.x) > para->dst_rect.w)) ||
697 ((para->dst_rect.y < 0) &&
698 ((-para->dst_rect.y) > para->dst_rect.h)) ||
699 ((para->dst_rect.x > 0) &&
700 (para->dst_rect.x > para->dst_image.w - 1)) ||
701 ((para->dst_rect.y > 0) &&
702 (para->dst_rect.y > para->dst_image.h - 1))) {
703 pr_warn("invalid fillrect parameter setting");
704 return -EINVAL;
705 }
706 if (((para->dst_rect.x < 0) &&
707 ((-para->dst_rect.x) < para->dst_rect.w))) {
708 para->dst_rect.w = para->dst_rect.w + para->dst_rect.x;
709 para->dst_rect.x = 0;
710 } else if ((para->dst_rect.x + para->dst_rect.w)
711 > para->dst_image.w) {
712 para->dst_rect.w = para->dst_image.w - para->dst_rect.x;
713 }
714 if (((para->dst_rect.y < 0) &&
715 ((-para->dst_rect.y) < para->dst_rect.h))) {
716 para->dst_rect.h = para->dst_rect.h + para->dst_rect.y;
717 para->dst_rect.y = 0;
718 } else if ((para->dst_rect.y + para->dst_rect.h)
719 > para->dst_image.h)
720 para->dst_rect.h = para->dst_image.h - para->dst_rect.y;
721
722 g2d_ext_hd.finish_flag = 0;
723 err = mixer_fillrectangle(para);
724
725 return err;
726 }
727
g2d_stretchblit(g2d_stretchblt * para)728 int g2d_stretchblit(g2d_stretchblt *para)
729 {
730 __s32 err = 0;
731
732 /* check the parameter valid */
733 if (((para->src_rect.x < 0) &&
734 ((-para->src_rect.x) > para->src_rect.w)) ||
735 ((para->src_rect.y < 0) &&
736 ((-para->src_rect.y) > para->src_rect.h)) ||
737 ((para->dst_rect.x < 0) &&
738 ((-para->dst_rect.x) > para->dst_rect.w)) ||
739 ((para->dst_rect.y < 0) &&
740 ((-para->dst_rect.y) > para->dst_rect.h)) ||
741 ((para->src_rect.x > 0) &&
742 (para->src_rect.x > para->src_image.w - 1)) ||
743 ((para->src_rect.y > 0) &&
744 (para->src_rect.y > para->src_image.h - 1)) ||
745 ((para->dst_rect.x > 0) &&
746 (para->dst_rect.x > para->dst_image.w - 1)) ||
747 ((para->dst_rect.y > 0) &&
748 (para->dst_rect.y > para->dst_image.h - 1))) {
749 pr_warn("invalid stretchblit parameter setting");
750 return -EINVAL;
751 }
752 if (((para->src_rect.x < 0) &&
753 ((-para->src_rect.x) < para->src_rect.w))) {
754 para->src_rect.w = para->src_rect.w + para->src_rect.x;
755 para->src_rect.x = 0;
756 } else if ((para->src_rect.x + para->src_rect.w)
757 > para->src_image.w) {
758 para->src_rect.w = para->src_image.w - para->src_rect.x;
759 }
760 if (((para->src_rect.y < 0) &&
761 ((-para->src_rect.y) < para->src_rect.h))) {
762 para->src_rect.h = para->src_rect.h + para->src_rect.y;
763 para->src_rect.y = 0;
764 } else if ((para->src_rect.y + para->src_rect.h)
765 > para->src_image.h) {
766 para->src_rect.h = para->src_image.h - para->src_rect.y;
767 }
768
769 if (((para->dst_rect.x < 0) &&
770 ((-para->dst_rect.x) < para->dst_rect.w))) {
771 para->dst_rect.w = para->dst_rect.w + para->dst_rect.x;
772 para->dst_rect.x = 0;
773 } else if ((para->dst_rect.x + para->dst_rect.w)
774 > para->dst_image.w) {
775 para->dst_rect.w = para->dst_image.w - para->dst_rect.x;
776 }
777 if (((para->dst_rect.y < 0) &&
778 ((-para->dst_rect.y) < para->dst_rect.h))) {
779 para->dst_rect.h = para->dst_rect.h + para->dst_rect.y;
780 para->dst_rect.y = 0;
781 } else if ((para->dst_rect.y + para->dst_rect.h)
782 > para->dst_image.h) {
783 para->dst_rect.h = para->dst_image.h - para->dst_rect.y;
784 }
785
786 g2d_ext_hd.finish_flag = 0;
787
788 /* Add support inverted order copy, however,
789 * hardware have a bug when reciving y coordinate,
790 * it use (y + height) rather than (y) on inverted
791 * order mode, so here adjust it before pass it to hardware.
792 */
793
794 mutex_lock(&global_lock);
795 if (scan_order > G2D_SM_TDRL)
796 para->dst_rect.y += para->src_rect.h;
797 mutex_unlock(&global_lock);
798
799 err = mixer_stretchblt(para, scan_order);
800
801 return err;
802 }
803
804 #ifdef G2D_V2X_SUPPORT
g2d_fill_h(g2d_fillrect_h * para)805 int g2d_fill_h(g2d_fillrect_h *para)
806 {
807 __s32 ret = 0;
808 struct dmabuf_item *dst_item = NULL;
809
810 if (!para->dst_image_h.use_phy_addr) {
811
812 dst_item = kmalloc(sizeof(struct dmabuf_item),
813 GFP_KERNEL | __GFP_ZERO);
814 if (dst_item == NULL) {
815 pr_err("[G2D]malloc memory of size %u fail!\n",
816 (unsigned int)sizeof(struct dmabuf_item));
817 goto EXIT;
818 }
819 }
820 /* check the parameter valid */
821 if (((para->dst_image_h.clip_rect.x < 0) &&
822 ((-para->dst_image_h.clip_rect.x) >
823 para->dst_image_h.clip_rect.w)) ||
824 ((para->dst_image_h.clip_rect.y < 0) &&
825 ((-para->dst_image_h.clip_rect.y) >
826 para->dst_image_h.clip_rect.h)) ||
827 ((para->dst_image_h.clip_rect.x > 0) &&
828 (para->dst_image_h.clip_rect.x > para->dst_image_h.width - 1))
829 || ((para->dst_image_h.clip_rect.y > 0) &&
830 (para->dst_image_h.clip_rect.y >
831 para->dst_image_h.height - 1))) {
832 pr_err("invalid fillrect parameter setting\n");
833 return -EINVAL;
834 }
835 if (((para->dst_image_h.clip_rect.x < 0) &&
836 ((-para->dst_image_h.clip_rect.x) <
837 para->dst_image_h.clip_rect.w))) {
838 para->dst_image_h.clip_rect.w =
839 para->dst_image_h.clip_rect.w +
840 para->dst_image_h.clip_rect.x;
841 para->dst_image_h.clip_rect.x = 0;
842 } else if ((para->dst_image_h.clip_rect.x +
843 para->dst_image_h.clip_rect.w)
844 > para->dst_image_h.width) {
845 para->dst_image_h.clip_rect.w =
846 para->dst_image_h.width -
847 para->dst_image_h.clip_rect.x;
848 }
849 if (((para->dst_image_h.clip_rect.y < 0) &&
850 ((-para->dst_image_h.clip_rect.y) <
851 para->dst_image_h.clip_rect.h))) {
852 para->dst_image_h.clip_rect.h =
853 para->dst_image_h.clip_rect.h +
854 para->dst_image_h.clip_rect.y;
855 para->dst_image_h.clip_rect.y = 0;
856 } else if ((para->dst_image_h.clip_rect.y +
857 para->dst_image_h.clip_rect.h)
858 > para->dst_image_h.height) {
859 para->dst_image_h.clip_rect.h =
860 para->dst_image_h.height -
861 para->dst_image_h.clip_rect.y;
862 }
863
864 para->dst_image_h.bbuff = 1;
865 para->dst_image_h.gamut = G2D_BT709;
866 para->dst_image_h.mode = 0;
867
868 g2d_ext_hd.finish_flag = 0;
869
870 if (!para->dst_image_h.use_phy_addr) {
871 ret = g2d_dma_map(para->dst_image_h.fd, dst_item);
872 if (ret != 0) {
873 pr_err("[G2D]map cur_item fail!\n");
874 goto FREE_DST;
875 }
876
877 g2d_set_info(¶->dst_image_h, dst_item);
878 }
879
880 ret = g2d_fillrectangle(¶->dst_image_h, para->dst_image_h.color);
881
882 if (ret)
883 pr_warn("G2D FILLRECTANGLE Failed!\n");
884 if (!para->dst_image_h.use_phy_addr)
885 g2d_dma_unmap(dst_item);
886 FREE_DST:
887 if (!para->dst_image_h.use_phy_addr)
888 kfree(dst_item);
889 EXIT:
890 return ret;
891 }
892
g2d_blit_h(g2d_blt_h * para)893 int g2d_blit_h(g2d_blt_h *para)
894 {
895 __s32 ret = 0;
896 struct dmabuf_item *src_item = NULL;
897 struct dmabuf_item *dst_item = NULL;
898
899 if (!para->src_image_h.use_phy_addr) {
900
901 src_item = kmalloc(sizeof(struct dmabuf_item),
902 GFP_KERNEL | __GFP_ZERO);
903 if (src_item == NULL) {
904 pr_err("[G2D]malloc memory of size %u fail!\n",
905 (unsigned int)sizeof(struct dmabuf_item));
906 goto EXIT;
907 }
908 }
909
910 if (!para->dst_image_h.use_phy_addr) {
911 dst_item = kmalloc(sizeof(struct dmabuf_item),
912 GFP_KERNEL | __GFP_ZERO);
913 if (dst_item == NULL) {
914 pr_err("[G2D]malloc memory of size %u fail!\n",
915 (unsigned int)sizeof(struct dmabuf_item));
916 goto FREE_SRC;
917 }
918 }
919 /* check the parameter valid */
920 if (((para->src_image_h.clip_rect.x < 0) &&
921 ((-para->src_image_h.clip_rect.x) >
922 para->src_image_h.clip_rect.w)) ||
923 ((para->src_image_h.clip_rect.y < 0) &&
924 ((-para->src_image_h.clip_rect.y) >
925 para->src_image_h.clip_rect.h)) ||
926 ((para->src_image_h.clip_rect.x > 0) &&
927 (para->src_image_h.clip_rect.x >
928 para->src_image_h.width - 1)) ||
929 ((para->src_image_h.clip_rect.y > 0) &&
930 (para->src_image_h.clip_rect.y >
931 para->src_image_h.height - 1)) ||
932 ((para->dst_image_h.clip_rect.x > 0) &&
933 (para->dst_image_h.clip_rect.x >
934 para->dst_image_h.width - 1)) ||
935 ((para->dst_image_h.clip_rect.y > 0) &&
936 (para->dst_image_h.clip_rect.y > para->dst_image_h.height - 1))) {
937 pr_err("invalid bitblit parameter setting\n");
938 return -EINVAL;
939 }
940 if (((para->src_image_h.clip_rect.x < 0) &&
941 ((-para->src_image_h.clip_rect.x) <
942 para->src_image_h.clip_rect.w))) {
943 para->src_image_h.clip_rect.w =
944 para->src_image_h.clip_rect.w +
945 para->src_image_h.clip_rect.x;
946 para->src_image_h.clip_rect.x = 0;
947 } else if ((para->src_image_h.clip_rect.x +
948 para->src_image_h.clip_rect.w)
949 > para->src_image_h.width) {
950 para->src_image_h.clip_rect.w =
951 para->src_image_h.width -
952 para->src_image_h.clip_rect.x;
953 }
954 if (((para->src_image_h.clip_rect.y < 0) &&
955 ((-para->src_image_h.clip_rect.y) <
956 para->src_image_h.clip_rect.h))) {
957 para->src_image_h.clip_rect.h =
958 para->src_image_h.clip_rect.h +
959 para->src_image_h.clip_rect.y;
960 para->src_image_h.clip_rect.y = 0;
961 } else if ((para->src_image_h.clip_rect.y +
962 para->src_image_h.clip_rect.h)
963 > para->src_image_h.height) {
964 para->src_image_h.clip_rect.h =
965 para->src_image_h.height -
966 para->src_image_h.clip_rect.y;
967 }
968
969 if (((para->dst_image_h.clip_rect.x < 0) &&
970 ((-para->dst_image_h.clip_rect.x) <
971 para->dst_image_h.clip_rect.w))) {
972 para->dst_image_h.clip_rect.w =
973 para->dst_image_h.clip_rect.w +
974 para->dst_image_h.clip_rect.x;
975 para->dst_image_h.clip_rect.x = 0;
976 } else if ((para->dst_image_h.clip_rect.x +
977 para->dst_image_h.clip_rect.w)
978 > para->dst_image_h.width) {
979 para->dst_image_h.clip_rect.w =
980 para->dst_image_h.width -
981 para->dst_image_h.clip_rect.x;
982 }
983 if (((para->dst_image_h.clip_rect.y < 0) &&
984 ((-para->dst_image_h.clip_rect.y) <
985 para->dst_image_h.clip_rect.h))) {
986 para->dst_image_h.clip_rect.h =
987 para->dst_image_h.clip_rect.h +
988 para->dst_image_h.clip_rect.y;
989 para->dst_image_h.clip_rect.y = 0;
990 } else if ((para->dst_image_h.clip_rect.y +
991 para->dst_image_h.clip_rect.h)
992 > para->dst_image_h.height) {
993 para->dst_image_h.clip_rect.h =
994 para->dst_image_h.height -
995 para->dst_image_h.clip_rect.y;
996 }
997
998 g2d_ext_hd.finish_flag = 0;
999
1000 /* Add support inverted order copy, however,
1001 * hardware have a bug when reciving y coordinate,
1002 * it use (y + height) rather than (y) on inverted
1003 * order mode, so here adjust it before pass it to hardware.
1004 */
1005
1006 para->src_image_h.bpremul = 0;
1007 para->src_image_h.bbuff = 1;
1008 para->src_image_h.gamut = G2D_BT709;
1009
1010 para->dst_image_h.bpremul = 0;
1011 para->dst_image_h.bbuff = 1;
1012 para->dst_image_h.gamut = G2D_BT709;
1013
1014 if (!para->src_image_h.use_phy_addr) {
1015 ret = g2d_dma_map(para->src_image_h.fd, src_item);
1016 if (ret != 0) {
1017 pr_err("[G2D]map cur_item fail!\n");
1018 goto FREE_DST;
1019 }
1020 g2d_set_info(¶->src_image_h, src_item);
1021 }
1022
1023 if (!para->dst_image_h.use_phy_addr) {
1024 ret = g2d_dma_map(para->dst_image_h.fd, dst_item);
1025 if (ret != 0) {
1026 pr_err("[G2D]map dst_item fail!\n");
1027 goto SRC_DMA_UNMAP;
1028 }
1029 g2d_set_info(¶->dst_image_h, dst_item);
1030 }
1031
1032 G2D_TRACE_BEGIN("g2d_bsp_bitblt");
1033 ret = g2d_bsp_bitblt(¶->src_image_h,
1034 ¶->dst_image_h, para->flag_h);
1035 G2D_TRACE_END("");
1036
1037 if (ret)
1038 pr_warn("G2D BITBLT Failed!\n");
1039
1040 if (!para->dst_image_h.use_phy_addr)
1041 g2d_dma_unmap(dst_item);
1042 SRC_DMA_UNMAP:
1043 if (!para->src_image_h.use_phy_addr)
1044 g2d_dma_unmap(src_item);
1045 FREE_DST:
1046 if (!para->dst_image_h.use_phy_addr)
1047 kfree(dst_item);
1048 FREE_SRC:
1049 if (!para->src_image_h.use_phy_addr)
1050 kfree(src_item);
1051 EXIT:
1052 return ret;
1053 }
1054
g2d_bld_h(g2d_bld * para)1055 int g2d_bld_h(g2d_bld *para)
1056 {
1057 __s32 ret = 0;
1058 struct dmabuf_item *src_item = NULL;
1059 struct dmabuf_item *dst_item = NULL;
1060
1061 if (!para->src_image[0].use_phy_addr) {
1062
1063 src_item = kmalloc(sizeof(struct dmabuf_item),
1064 GFP_KERNEL | __GFP_ZERO);
1065 if (src_item == NULL) {
1066 pr_err("malloc memory of size %u fail!\n",
1067 (unsigned int)sizeof(struct dmabuf_item));
1068 goto EXIT;
1069 }
1070 }
1071
1072 if (!para->dst_image.use_phy_addr) {
1073 dst_item = kmalloc(sizeof(struct dmabuf_item),
1074 GFP_KERNEL | __GFP_ZERO);
1075 if (dst_item == NULL) {
1076 pr_err("malloc memory of size %u fail!\n",
1077 (unsigned int)sizeof(struct dmabuf_item));
1078 goto FREE_SRC;
1079 }
1080 }
1081
1082 /* check the parameter valid */
1083 if (((para->src_image[0].clip_rect.x < 0) &&
1084 ((-para->src_image[0].clip_rect.x) >
1085 para->src_image[0].clip_rect.w)) ||
1086 ((para->src_image[0].clip_rect.y < 0) &&
1087 ((-para->src_image[0].clip_rect.y) >
1088 para->src_image[0].clip_rect.h)) ||
1089 ((para->src_image[0].clip_rect.x > 0) &&
1090 (para->src_image[0].clip_rect.x >
1091 para->src_image[0].width - 1)) ||
1092 ((para->src_image[0].clip_rect.y > 0) &&
1093 (para->src_image[0].clip_rect.y >
1094 para->src_image[0].height - 1)) ||
1095 ((para->dst_image.clip_rect.x > 0) &&
1096 (para->dst_image.clip_rect.x > para->dst_image.width - 1))
1097 || ((para->dst_image.clip_rect.y > 0) &&
1098 (para->dst_image.clip_rect.y >
1099 para->dst_image.height - 1))) {
1100 pr_err("invalid blit parameter setting\n");
1101 return -EINVAL;
1102 }
1103 if (((para->src_image[0].clip_rect.x < 0) &&
1104 ((-para->src_image[0].clip_rect.x) <
1105 para->src_image[0].clip_rect.w))) {
1106 para->src_image[0].clip_rect.w =
1107 para->src_image[0].clip_rect.w +
1108 para->src_image[0].clip_rect.x;
1109 para->src_image[0].clip_rect.x = 0;
1110 } else if ((para->src_image[0].clip_rect.x +
1111 para->src_image[0].clip_rect.w)
1112 > para->src_image[0].width) {
1113 para->src_image[0].clip_rect.w =
1114 para->src_image[0].width -
1115 para->src_image[0].clip_rect.x;
1116 }
1117 if (((para->src_image[0].clip_rect.y < 0) &&
1118 ((-para->src_image[0].clip_rect.y) <
1119 para->src_image[0].clip_rect.h))) {
1120 para->src_image[0].clip_rect.h =
1121 para->src_image[0].clip_rect.h +
1122 para->src_image[0].clip_rect.y;
1123 para->src_image[0].clip_rect.y = 0;
1124 } else if ((para->src_image[0].clip_rect.y +
1125 para->src_image[0].clip_rect.h)
1126 > para->src_image[0].height) {
1127 para->src_image[0].clip_rect.h =
1128 para->src_image[0].height -
1129 para->src_image[0].clip_rect.y;
1130 }
1131
1132 para->src_image[0].bpremul = 0;
1133 para->src_image[0].bbuff = 1;
1134 para->src_image[0].gamut = G2D_BT709;
1135
1136 para->dst_image.bpremul = 0;
1137 para->dst_image.bbuff = 1;
1138 para->dst_image.gamut = G2D_BT709;
1139
1140 g2d_ext_hd.finish_flag = 0;
1141
1142 if (!para->src_image[0].use_phy_addr) {
1143 ret = g2d_dma_map(para->src_image[0].fd, src_item);
1144 if (ret != 0) {
1145 pr_err("[G2D]map src_item fail!\n");
1146 goto FREE_DST;
1147 }
1148 g2d_set_info(¶->src_image[0], src_item);
1149 }
1150 if (!para->dst_image.use_phy_addr) {
1151 ret = g2d_dma_map(para->dst_image.fd, dst_item);
1152 if (ret != 0) {
1153 pr_err("[G2D]map dst_item fail!\n");
1154 goto SRC_DMA_UNMAP;
1155 }
1156 g2d_set_info(¶->dst_image, dst_item);
1157 }
1158 ret = g2d_bsp_bld(¶->src_image[0], ¶->dst_image,
1159 para->bld_cmd, ¶->ck_para);
1160
1161 if (ret)
1162 pr_warn("G2D BITBLT Failed!\n");
1163
1164 if (!para->dst_image.use_phy_addr)
1165 g2d_dma_unmap(dst_item);
1166 SRC_DMA_UNMAP:
1167 if (!para->src_image[0].use_phy_addr)
1168 g2d_dma_unmap(src_item);
1169 FREE_DST:
1170 if (!para->dst_image.use_phy_addr)
1171 kfree(dst_item);
1172 FREE_SRC:
1173 if (!para->src_image[0].use_phy_addr)
1174 kfree(src_item);
1175 EXIT:
1176 return ret;
1177 }
1178
g2d_maskblt_h(g2d_maskblt * para)1179 int g2d_maskblt_h(g2d_maskblt *para)
1180 {
1181 __s32 ret = 0;
1182 struct dmabuf_item *src_item = NULL;
1183 struct dmabuf_item *ptn_item = NULL;
1184 struct dmabuf_item *mask_item = NULL;
1185 struct dmabuf_item *dst_item = NULL;
1186
1187 if (!para->src_image_h.use_phy_addr) {
1188
1189 src_item = kmalloc(sizeof(struct dmabuf_item),
1190 GFP_KERNEL | __GFP_ZERO);
1191 if (src_item == NULL) {
1192 pr_err("malloc memory of size %u fail!\n",
1193 (unsigned int)sizeof(struct dmabuf_item));
1194 goto EXIT;
1195 }
1196 ptn_item = kmalloc(sizeof(struct dmabuf_item),
1197 GFP_KERNEL | __GFP_ZERO);
1198 if (ptn_item == NULL) {
1199 pr_err("malloc memory of size %u fail!\n",
1200 (unsigned int)sizeof(struct dmabuf_item));
1201 goto FREE_SRC;
1202 }
1203
1204 mask_item = kmalloc(sizeof(struct dmabuf_item),
1205 GFP_KERNEL | __GFP_ZERO);
1206 if (mask_item == NULL) {
1207 pr_err("malloc memory of size %u fail!\n",
1208 (unsigned int)sizeof(struct dmabuf_item));
1209 goto FREE_PTN;
1210 }
1211 dst_item = kmalloc(sizeof(struct dmabuf_item),
1212 GFP_KERNEL | __GFP_ZERO);
1213 if (dst_item == NULL) {
1214 pr_err("malloc memory of size %u fail!\n",
1215 (unsigned int)(unsigned int)sizeof(
1216 struct dmabuf_item));
1217 goto FREE_MASK;
1218 }
1219 }
1220 /* check the parameter valid */
1221 if (((para->dst_image_h.clip_rect.x < 0) &&
1222 ((-para->dst_image_h.clip_rect.x) >
1223 para->dst_image_h.clip_rect.w)) ||
1224 ((para->dst_image_h.clip_rect.y < 0) &&
1225 ((-para->dst_image_h.clip_rect.y) >
1226 para->dst_image_h.clip_rect.h)) ||
1227 ((para->dst_image_h.clip_rect.x > 0) &&
1228 (para->dst_image_h.clip_rect.x >
1229 para->dst_image_h.width - 1)) ||
1230 ((para->dst_image_h.clip_rect.y > 0) &&
1231 (para->dst_image_h.clip_rect.y > para->dst_image_h.height - 1))) {
1232 pr_err("invalid maskblt parameter setting\n");
1233 return -EINVAL;
1234 }
1235 if (((para->dst_image_h.clip_rect.x < 0) &&
1236 ((-para->dst_image_h.clip_rect.x) <
1237 para->dst_image_h.clip_rect.w))) {
1238 para->dst_image_h.clip_rect.w =
1239 para->dst_image_h.clip_rect.w +
1240 para->dst_image_h.clip_rect.x;
1241 para->dst_image_h.clip_rect.x = 0;
1242 } else if ((para->dst_image_h.clip_rect.x +
1243 para->dst_image_h.clip_rect.w)
1244 > para->dst_image_h.width) {
1245 para->dst_image_h.clip_rect.w =
1246 para->dst_image_h.width -
1247 para->dst_image_h.clip_rect.x;
1248 }
1249 if (((para->dst_image_h.clip_rect.y < 0) &&
1250 ((-para->dst_image_h.clip_rect.y) <
1251 para->dst_image_h.clip_rect.h))) {
1252 para->dst_image_h.clip_rect.h =
1253 para->dst_image_h.clip_rect.h +
1254 para->dst_image_h.clip_rect.y;
1255 para->dst_image_h.clip_rect.y = 0;
1256 } else if ((para->dst_image_h.clip_rect.y +
1257 para->dst_image_h.clip_rect.h)
1258 > para->dst_image_h.height) {
1259 para->dst_image_h.clip_rect.h =
1260 para->dst_image_h.height -
1261 para->dst_image_h.clip_rect.y;
1262 }
1263
1264 if (!para->src_image_h.use_phy_addr) {
1265 ret = g2d_dma_map(para->src_image_h.fd, src_item);
1266 if (ret != 0) {
1267 pr_err("[G2D]map src_item fail!\n");
1268 goto FREE_DST;
1269 }
1270 ret = g2d_dma_map(para->ptn_image_h.fd, ptn_item);
1271 if (ret != 0) {
1272 pr_err("[G2D]map ptn_item fail!\n");
1273 goto SRC_DMA_UNMAP;
1274 }
1275 ret = g2d_dma_map(para->mask_image_h.fd, mask_item);
1276 if (ret != 0) {
1277 pr_err("[G2D]map mask_item fail!\n");
1278 goto PTN_DMA_UNMAP;
1279 }
1280 ret = g2d_dma_map(para->dst_image_h.fd, dst_item);
1281 if (ret != 0) {
1282 pr_err("[G2D]map dst_item fail!\n");
1283 goto MASK_DMA_UNMAP;
1284 }
1285
1286 g2d_set_info(¶->src_image_h, src_item);
1287 g2d_set_info(¶->ptn_image_h, ptn_item);
1288 g2d_set_info(¶->mask_image_h, mask_item);
1289 g2d_set_info(¶->dst_image_h, dst_item);
1290 }
1291
1292 para->src_image_h.bbuff = 1;
1293 para->src_image_h.gamut = G2D_BT709;
1294
1295 para->ptn_image_h.bbuff = 1;
1296 para->ptn_image_h.gamut = G2D_BT709;
1297
1298 para->mask_image_h.bbuff = 1;
1299 para->mask_image_h.gamut = G2D_BT709;
1300
1301 para->dst_image_h.bbuff = 1;
1302 para->dst_image_h.gamut = G2D_BT709;
1303
1304 g2d_ext_hd.finish_flag = 0;
1305
1306 ret =
1307 g2d_bsp_maskblt(¶->src_image_h, ¶->ptn_image_h,
1308 ¶->mask_image_h, ¶->dst_image_h,
1309 para->back_flag, para->fore_flag);
1310
1311 if (ret)
1312 pr_warn("G2D MASKBLT Failed!\n");
1313 if (!para->src_image_h.use_phy_addr)
1314 g2d_dma_unmap(dst_item);
1315 MASK_DMA_UNMAP:
1316 if (!para->src_image_h.use_phy_addr)
1317 g2d_dma_unmap(mask_item);
1318 PTN_DMA_UNMAP:
1319 if (!para->src_image_h.use_phy_addr)
1320 g2d_dma_unmap(ptn_item);
1321 SRC_DMA_UNMAP:
1322 if (!para->src_image_h.use_phy_addr)
1323 g2d_dma_unmap(src_item);
1324 FREE_DST:
1325 if (!para->src_image_h.use_phy_addr)
1326 kfree(dst_item);
1327 FREE_MASK:
1328 if (!para->src_image_h.use_phy_addr)
1329 kfree(mask_item);
1330 FREE_PTN:
1331 if (!para->src_image_h.use_phy_addr)
1332 kfree(ptn_item);
1333 FREE_SRC:
1334 if (!para->src_image_h.use_phy_addr)
1335 kfree(src_item);
1336 EXIT:
1337 return ret;
1338 }
1339 #endif
1340
1341 /*
1342 int g2d_set_palette_table(g2d_palette *para)
1343 {
1344
1345 if ((para->pbuffer == NULL) || (para->size < 0) ||
1346 (para->size > 1024)) {
1347 pr_warn("para invalid in mixer_set_palette\n");
1348 return -1;
1349 }
1350
1351 mixer_set_palette(para);
1352
1353 return 0;
1354 }
1355 */
1356
1357 /*
1358 int g2d_cmdq(unsigned int para)
1359 {
1360 __s32 err = 0;
1361
1362 g2d_ext_hd.finish_flag = 0;
1363 err = mixer_cmdq(para);
1364
1365 return err;
1366 }
1367 */
1368
g2d_ioctl_mutex_lock(void)1369 void g2d_ioctl_mutex_lock(void)
1370 {
1371 if (!mutex_trylock(¶.mutex))
1372 mutex_lock(¶.mutex);
1373 }
1374
g2d_ioctl_mutex_unlock(void)1375 void g2d_ioctl_mutex_unlock(void)
1376 {
1377 mutex_unlock(¶.mutex);
1378 }
1379
g2d_ioctl(struct file * file,unsigned int cmd,unsigned long arg)1380 long g2d_ioctl(struct file *file, unsigned int cmd, unsigned long arg)
1381 {
1382 __s32 ret = 0;
1383 unsigned int size;
1384 unsigned int sel;
1385 struct timespec64 test_start, test_end;
1386 unsigned int runtime;
1387
1388 if (time_info == 1)
1389 ktime_get_real_ts64(&test_start);
1390
1391 if (!mutex_trylock(¶.mutex))
1392 mutex_lock(¶.mutex);
1393 switch (cmd) {
1394
1395 /* Proceed to the operation */
1396 case G2D_CMD_BITBLT:{
1397 g2d_blt blit_para;
1398
1399 if (copy_from_user(&blit_para, (g2d_blt *) arg,
1400 sizeof(g2d_blt))) {
1401 ret = -EFAULT;
1402 goto err_noput;
1403 }
1404 ret = g2d_blit(&blit_para);
1405 break;
1406 }
1407 case G2D_CMD_FILLRECT:{
1408 g2d_fillrect fill_para;
1409
1410 if (copy_from_user(&fill_para, (g2d_fillrect *) arg,
1411 sizeof(g2d_fillrect))) {
1412 ret = -EFAULT;
1413 goto err_noput;
1414 }
1415 ret = g2d_fill(&fill_para);
1416 break;
1417 }
1418 case G2D_CMD_STRETCHBLT:{
1419 g2d_stretchblt stre_para;
1420
1421 if (copy_from_user(&stre_para, (g2d_stretchblt *) arg,
1422 sizeof(g2d_stretchblt))) {
1423 ret = -EFAULT;
1424 goto err_noput;
1425 }
1426 ret = g2d_stretchblit(&stre_para);
1427 break;
1428 }
1429 /* case G2D_CMD_PALETTE_TBL:{
1430 g2d_palette pale_para;
1431
1432 if (copy_from_user(&pale_para, (g2d_palette *)arg,
1433 sizeof(g2d_palette))) {
1434 ret = -EFAULT;
1435 goto err_noput;
1436 }
1437 ret = g2d_set_palette_table(&pale_para);
1438 break;
1439 }
1440 case G2D_CMD_QUEUE:{
1441 unsigned int cmdq_addr;
1442
1443 if (copy_from_user(&cmdq_addr,
1444 (unsigned int *)arg, sizeof(unsigned int))) {
1445 ret = -EFAULT;
1446 goto err_noput;
1447 }
1448 ret = g2d_cmdq(cmdq_addr);
1449 break;
1450 }
1451 */
1452 #ifdef G2D_V2X_SUPPORT
1453 case G2D_CMD_BITBLT_H:{
1454 g2d_blt_h blit_para;
1455 if (copy_from_user(&blit_para, (g2d_blt_h *) arg,
1456 sizeof(g2d_blt_h))) {
1457 pr_err("[G2D]BITBLT copy from user failed!\n");
1458 ret = -EFAULT;
1459 goto err_noput;
1460 }
1461 ret = g2d_blit_h(&blit_para);
1462 break;
1463 }
1464 case G2D_CMD_FILLRECT_H:{
1465 g2d_fillrect_h fill_para;
1466
1467 if (copy_from_user(&fill_para, (g2d_fillrect_h *) arg,
1468 sizeof(g2d_fillrect_h))) {
1469 ret = -EFAULT;
1470 goto err_noput;
1471 }
1472 ret = g2d_fill_h(&fill_para);
1473 break;
1474 }
1475 case G2D_CMD_BLD_H:{
1476 g2d_bld bld_para;
1477
1478 if (copy_from_user(&bld_para, (g2d_bld *) arg,
1479 sizeof(g2d_bld))) {
1480 ret = -EFAULT;
1481 goto err_noput;
1482 }
1483 ret = g2d_bld_h(&bld_para);
1484 break;
1485 }
1486 case G2D_CMD_MASK_H:{
1487 g2d_maskblt mask_para;
1488
1489 if (copy_from_user(&mask_para, (g2d_maskblt *) arg,
1490 sizeof(g2d_maskblt))) {
1491 ret = -EFAULT;
1492 goto err_noput;
1493 }
1494 ret = g2d_maskblt_h(&mask_para);
1495 break;
1496 }
1497 #endif
1498 /* just management memory for test */
1499 case G2D_CMD_MEM_REQUEST:
1500 get_user(size, (unsigned int __user *)arg);
1501 ret = g2d_mem_request(size);
1502 break;
1503
1504 case G2D_CMD_MEM_RELEASE:
1505 get_user(sel, (unsigned int __user *)arg);
1506 ret = g2d_mem_release(sel);
1507 break;
1508
1509 case G2D_CMD_MEM_SELIDX:
1510 get_user(sel, (unsigned int __user *)arg);
1511 g2d_mem_sel = sel;
1512 break;
1513
1514 case G2D_CMD_MEM_GETADR:
1515 get_user(sel, (unsigned int __user *)arg);
1516 if (g2d_mem[sel].b_used) {
1517 ret = g2d_mem[sel].phy_addr;
1518 } else {
1519 ERR("mem not used in G2D_CMD_MEM_GETADR\n");
1520 ret = -1;
1521 }
1522 break;
1523
1524 case G2D_CMD_INVERTED_ORDER:
1525 {
1526 if (arg > G2D_SM_DTRL) {
1527 ERR("scan mode is err.\n");
1528 ret = -EINVAL;
1529 goto err_noput;
1530 }
1531
1532 mutex_lock(&global_lock);
1533 scan_order = arg;
1534 mutex_unlock(&global_lock);
1535 break;
1536 }
1537
1538 /* Invalid IOCTL call */
1539 default:
1540 return -EINVAL;
1541 }
1542
1543 err_noput:
1544 mutex_unlock(¶.mutex);
1545
1546 if (time_info == 1) {
1547 ktime_get_real_ts64(&test_end);
1548 runtime = (test_end.tv_sec - test_start.tv_sec) * 1000000 +
1549 (test_end.tv_nsec - test_start.tv_nsec) / NSEC_PER_USEC;
1550 pr_info("%s:use %u us!\n", __func__, runtime);
1551 }
1552 return ret;
1553 }
1554
1555 static const struct file_operations g2d_fops = {
1556 .owner = THIS_MODULE,
1557 .open = g2d_open,
1558 .release = g2d_release,
1559 .unlocked_ioctl = g2d_ioctl,
1560 #ifdef CONFIG_COMPAT
1561 .compat_ioctl = g2d_ioctl,
1562 #endif
1563 .mmap = g2d_mmap,
1564 };
1565
1566 static u64 sunxi_g2d_dma_mask = DMA_BIT_MASK(32);
g2d_probe(struct platform_device * pdev)1567 static int g2d_probe(struct platform_device *pdev)
1568 {
1569 #if !defined(CONFIG_OF)
1570 int size;
1571 struct resource *res;
1572 #endif
1573 int ret = 0;
1574 __g2d_info_t *info = NULL;
1575
1576 info = ¶
1577 info->dev = &pdev->dev;
1578 dmabuf_dev = &pdev->dev;
1579 dmabuf_dev->dma_mask = &sunxi_g2d_dma_mask;
1580 dmabuf_dev->coherent_dma_mask = DMA_BIT_MASK(32);
1581 platform_set_drvdata(pdev, info);
1582
1583 #if !defined(CONFIG_OF)
1584 /* get the memory region */
1585 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1586 if (res == NULL) {
1587 ERR("failed to get memory register\n");
1588 ret = -ENXIO;
1589 goto dealloc_fb;
1590 }
1591
1592 size = (res->end - res->start) + 1;
1593 /* map the memory */
1594 info->io = ioremap(res->start, size);
1595 if (info->io == NULL) {
1596 ERR("iorGmap() of register failed\n");
1597 ret = -ENXIO;
1598 goto dealloc_fb;
1599 }
1600 #else
1601 info->io = of_iomap(pdev->dev.of_node, 0);
1602 if (info->io == NULL) {
1603 ERR("iormap() of register failed\n");
1604 ret = -ENXIO;
1605 goto dealloc_fb;
1606 }
1607 #endif
1608
1609 #if !defined(CONFIG_OF)
1610 /* get the irq */
1611 res = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1612 if (res == NULL) {
1613 ERR("failed to get irq resource\n");
1614 ret = -ENXIO;
1615 goto release_regs;
1616 }
1617 info->irq = res->start;
1618 #else
1619 info->irq = irq_of_parse_and_map(pdev->dev.of_node, 0);
1620 if (!info->irq) {
1621 ERR("irq_of_parse_and_map irq fail for transform\n");
1622 ret = -ENXIO;
1623 goto release_regs;
1624 }
1625 #endif
1626
1627 /* request the irq */
1628 ret = request_irq(info->irq, g2d_handle_irq, 0,
1629 dev_name(&pdev->dev), NULL);
1630 if (ret) {
1631 ERR("failed to install irq resource\n");
1632 goto release_regs;
1633 }
1634 #if defined(CONFIG_OF)
1635 /* clk init */
1636 info->clk = devm_clk_get(&pdev->dev, "g2d");
1637 if (IS_ERR(info->clk)) {
1638 ERR("fail to get clk\n");
1639 ret = PTR_ERR(info->clk);
1640 goto out_dispose_mapping;
1641
1642 } else {
1643 info->clk_parent = clk_get_parent(info->clk);
1644 info->bus_clk = devm_clk_get(&pdev->dev, "bus");
1645 info->mbus_clk = devm_clk_get(&pdev->dev, "mbus_g2d");
1646 info->reset = devm_reset_control_get(&pdev->dev, NULL);
1647 }
1648 #endif
1649
1650 drv_g2d_init();
1651 mutex_init(&info->mutex);
1652 mutex_init(&global_lock);
1653
1654 ret = sysfs_create_group(&g2d_dev->kobj, &g2d_attribute_group);
1655 if (ret < 0)
1656 WARNING("sysfs_create_file fail!\n");
1657
1658 return 0;
1659
1660 out_dispose_mapping:
1661 #if !defined(CONFIG_OF)
1662 irq_dispose_mapping(info->irq);
1663 #endif
1664 release_regs:
1665 #if !defined(CONFIG_OF)
1666 iounmap(info->io);
1667 #endif
1668 dealloc_fb:
1669 platform_set_drvdata(pdev, NULL);
1670
1671 return ret;
1672 }
1673
g2d_remove(struct platform_device * pdev)1674 static int g2d_remove(struct platform_device *pdev)
1675 {
1676 __g2d_info_t *info = platform_get_drvdata(pdev);
1677
1678 free_irq(info->irq, NULL);
1679 #if !defined(CONFIG_OF)
1680 iounmap(info->io);
1681 #endif
1682 platform_set_drvdata(pdev, NULL);
1683
1684 sysfs_remove_group(&g2d_dev->kobj, &g2d_attribute_group);
1685
1686 INFO("Driver unloaded succesfully.\n");
1687 return 0;
1688 }
1689
g2d_suspend(struct device * dev)1690 static int g2d_suspend(struct device *dev)
1691 {
1692 INFO("%s.\n", __func__);
1693 mutex_lock(¶.mutex);
1694 if (para.opened) {
1695 g2d_clock_disable(¶);
1696 #ifdef G2D_V2X_SUPPORT
1697 g2d_bsp_close();
1698 #endif
1699 }
1700 mutex_unlock(¶.mutex);
1701 INFO("g2d_suspend succesfully.\n");
1702
1703 return 0;
1704 }
1705
g2d_resume(struct device * dev)1706 static int g2d_resume(struct device *dev)
1707 {
1708 INFO("%s.\n", __func__);
1709 mutex_lock(¶.mutex);
1710 if (para.opened) {
1711 g2d_clock_enable(¶);
1712 #ifdef G2D_V2X_SUPPORT
1713 g2d_bsp_open();
1714 #endif
1715 }
1716 mutex_unlock(¶.mutex);
1717 INFO("g2d_resume succesfully.\n");
1718
1719 return 0;
1720 }
1721
1722 static const struct dev_pm_ops g2d_pm_ops = {
1723 .suspend = g2d_suspend,
1724 .resume = g2d_resume,
1725 };
1726 #if !defined(CONFIG_OF)
1727 struct platform_device g2d_device = {
1728
1729 .name = "g2d",
1730 .id = -1,
1731 .num_resources = ARRAY_SIZE(g2d_resource),
1732 .resource = g2d_resource,
1733 .dev = {
1734
1735 },
1736 };
1737 #else
1738 static const struct of_device_id sunxi_g2d_match[] = {
1739 {.compatible = "allwinner,sunxi-g2d",},
1740 {},
1741 };
1742 #endif
1743
1744 static struct platform_driver g2d_driver = {
1745 .probe = g2d_probe,
1746 .remove = g2d_remove,
1747 .driver = {
1748
1749 .owner = THIS_MODULE,
1750 .name = "g2d",
1751 .pm = &g2d_pm_ops,
1752 .of_match_table = sunxi_g2d_match,
1753 },
1754 };
1755
g2d_module_init(void)1756 int __init g2d_module_init(void)
1757 {
1758 int ret = 0, err;
1759
1760 alloc_chrdev_region(&devid, 0, 1, "g2d_chrdev");
1761 g2d_cdev = cdev_alloc();
1762 cdev_init(g2d_cdev, &g2d_fops);
1763 g2d_cdev->owner = THIS_MODULE;
1764 err = cdev_add(g2d_cdev, devid, 1);
1765 if (err) {
1766 ERR("I was assigned major number %d.\n", MAJOR(devid));
1767 return -1;
1768 }
1769
1770 g2d_class = class_create(THIS_MODULE, "g2d");
1771 if (IS_ERR(g2d_class)) {
1772 ERR("create class error\n");
1773 return -1;
1774 }
1775
1776 g2d_dev = device_create(g2d_class, NULL, devid, NULL, "g2d");
1777 #if !defined(CONFIG_OF)
1778 ret = platform_device_register(&g2d_device);
1779 #endif
1780 if (ret == 0)
1781 ret = platform_driver_register(&g2d_driver);
1782
1783 #if defined(CONFIG_SUNXI_SYNCFENCE)
1784 syncfence_init();
1785 #endif
1786
1787 INFO("Module initialized.major:%d\n", MAJOR(devid));
1788 return ret;
1789 }
1790
g2d_module_exit(void)1791 static void __exit g2d_module_exit(void)
1792 {
1793 INFO("g2d_module_exit\n");
1794 /* kfree(g2d_ext_hd.g2d_finished_sem); */
1795
1796 #if defined(CONFIG_SUNXI_SYNCFENCE)
1797 syncfence_exit();
1798 #endif
1799
1800 platform_driver_unregister(&g2d_driver);
1801 #if !defined(CONFIG_OF)
1802 platform_device_unregister(&g2d_device);
1803 #endif
1804 device_destroy(g2d_class, devid);
1805 class_destroy(g2d_class);
1806
1807 cdev_del(g2d_cdev);
1808 }
1809 #ifdef CONFIG_ARCH_SUN8IW11P1
1810 subsys_initcall(g2d_module_init);
1811 #else
1812 module_init(g2d_module_init);
1813 #endif
1814 module_exit(g2d_module_exit);
1815
1816 MODULE_AUTHOR("yupu_tang");
1817 MODULE_AUTHOR("tyle <tyle@allwinnertech.com>");
1818 MODULE_DESCRIPTION("g2d driver");
1819 MODULE_LICENSE("GPL");
1820