1 /*
2 * Copyright (C) 2021 HiSilicon (Shanghai) Technologies CO., LIMITED.
3 *
4 * This program is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU General Public License
6 * as published by the Free Software Foundation; either version 2
7 * of the License, or (at your option) any later version.
8 *
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
13 *
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
17 */
18
19 #include "hifb_main.h"
20 #include "hi_debug.h"
21 #include "securec.h"
22 #include "hifb_vou_drv.h"
23 #include "hifb_vou_graphics.h"
24 #include "hifb_proc.h"
25 #include "hifb.h"
26 #include "hifb_blit.h"
27 #include "hifb_comm.h"
28 #include "hifb_init.h"
29 #include "hi_tde_ext.h"
30 #include "mm_ext.h"
31 #include "proc_ext.h"
32 #include "sys_ext.h"
33
34 #ifndef CONFIG_HI_VO_FB_SEPARATE
35 #include "vgs_ext.h"
36 #endif
37 #ifdef CONFIG_DRM_HISI_HISMART
38 #include "drm_gfx_func_ext.h"
39 #endif
40
41 #define mkstr(exp) #exp
42 #define mkmarcotostr(exp) mkstr(exp)
43
44 #define hifb_min_width(layer_id) g_drv_ops.capability[layer_id].u32MinWidth
45 #define hifb_min_height(layer_id) g_drv_ops.capability[layer_id].u32MinHeight
46
47 #define HIFB_CMAP_LEN 256
48 #define HIFB_ROTBUF_NAME_LEN 16
49
50 #define HIFB_NUMBLE 4
51 #define HIFB_ACFARAM 12
52 #define HIFB_ACTEMP 12
53
54 #define COMPRESS_HEADER_STRIDE 16
55 #define COMPRESS_STRIDE_ALIGNMENT 16
56
57 #ifdef __HuaweiLite__
58 #define fb_info hifb_info
59 extern void __init_waitqueue_head(wait_queue_head_t *wait);
60 extern void __wake_up_interruptible(wait_queue_head_t *wait);
61 #endif
62
63 typedef hi_s32(*drv_hifb_ioctl_func)(struct fb_info *info, unsigned long arg);
64
65 typedef struct {
66 hi_u32 cmd;
67 drv_hifb_ioctl_func func;
68 } drv_hifb_ioctl_func_item;
69
70 #ifdef CONFIG_HIFB_SOFT_IRQ_SUPPORT
71 static hi_bool g_hifb_soft_irq_register = HI_FALSE;
72 #endif
73 char *g_video; /* 64 The length of the array */
74 bool g_bUpdateRotateRect = HI_FALSE;
75
76 #define PAGE_SIZE_ALIGN_MAX ((~0ul - PAGE_SIZE) / 1024)
77
78 #define is_soft_cursor() (g_soft_cursor == HI_TRUE)
79
80 #ifdef __HuaweiLite__
81 #define in_atomic() (HI_FALSE)
82 #endif
83
84 #define HIFB_ALIGNMENT 0xf
85
86 #define HIFB_ALIGN 16
87
88 /*
89 * the interface to operate the chip
90 * Collection of properties and methods,filled in hifb_init
91 */
92 static hifb_drv_ops g_drv_ops;
93
94 static hi_bool g_soft_cursor = HI_FALSE;
95 static hi_bool g_display_on = HI_FALSE;
96 static hi_bool g_hifb_register = HI_TRUE;
97 #ifdef CONFIG_HI_VO_FB_SEPARATE
98 static hi_bool g_hifb_irq_register = HI_FALSE;
99 #endif
100 #define VO_INVALID_DEV (-1)
101
102 #define HIFB_INTMSK_HD0_VTTHD1 0x1
103 #define HIFB_INTMSK_HD0_VTTHD2 0x2
104 #define HIFB_INTMSK_HD0_VTTHD3 0x4
105 #define HIFB_INTMSK_HD1_VTTHD2 0x20
106 #define HIFB_INTMSK_HD1_VTTHD3 0x40
107 #define DRV_HIFB_IOCTL_CMD_NUM_MAX 151
108 #define DRV_HIFB_IOCTL_FUNC_ITEM_NUM_MAX 45
109
110 #ifdef CONFIG_DRIVERS_HDF_DISP
111 #undef DRV_HIFB_IOCTL_CMD_NUM_MAX
112 #undef DRV_HIFB_IOCTL_FUNC_ITEM_NUM_MAX
113 #define DRV_HIFB_IOCTL_CMD_NUM_MAX 153
114 #define DRV_HIFB_IOCTL_FUNC_ITEM_NUM_MAX 47
115 #endif
116
117 #define DEV_NAME "hifb"
118 #if HICHIP == HI3519A_V100
119 unsigned int g_fifb_irq = VOU1_IRQ_NR;
120 #elif HICHIP == HI3559A_V100
121 unsigned int g_fifb_irq = VO1_IRQ_NR;
122 #elif HICHIP == HI3516C_V500
123 unsigned int g_fifb_irq = VO1_IRQ_NR;
124 #endif
125
126 #ifdef __HuaweiLite__
127 hi_u32 g_fifb_soft_irq = 83;
128 #else
129 hi_u32 g_fifb_soft_irq = 0;
130 #endif
131
132 static hi_s32 hifb_parse_cfg(hi_void);
133 static hi_s32 hifb_overlay_probe(hi_u32 layer_id);
134 static hi_void hifb_overlay_cleanup(hi_u32 layer_id, hi_bool unregister);
135 #ifdef __HuaweiLite__
136 static hi_s32 hifb_pan_display(struct fb_vtable_s *vtable, struct fb_overlayinfo_s *oinfo);
137 #else
138 static hi_s32 hifb_pan_display(struct fb_var_screeninfo *var, struct fb_info *info);
139 #endif
140 #ifndef __HuaweiLite__
141 static HIFB_COLOR_FMT_E hifb_getfmtbyargb(struct fb_bitfield *red, struct fb_bitfield *green, struct fb_bitfield *blue,
142 struct fb_bitfield *transp, hi_u32 color_depth);
143 #endif
144 hi_void hifb_buf_freemem(hi_u64 phyaddr);
145 hi_u64 hifb_buf_allocmem(const hi_char *buf_name, hi_u32 size, hi_ulong layer_size, const hi_char *mmz_name);
146
147 static hi_s32 hifb_wait_regconfig_work(hi_u32 layer_id);
148 static hi_s32 hifb_freeccanbuf(hifb_par *par);
149 static hi_void hifb_set_dispbufinfo(hi_u32 layer_id);
150 static hi_void hifb_set_bufmode(hi_u32 layer_id, HIFB_LAYER_BUF_E layer_buf_mode);
151 static inline hi_void hifb_get_bufmode(hifb_par *par, HIFB_LAYER_BUF_E *buf_mode);
152 static hi_s32 hifb_onrefresh(hifb_par *par, hi_void __user *argp);
153 static hi_s32 hifb_refresh_0buf(hi_u32 layer_id, HIFB_BUFFER_S *canvas_buf);
154 static hi_s32 hifb_refresh_1buf(hi_u32 layer_id, HIFB_BUFFER_S *canvas_buf);
155 static hi_s32 hifb_refresh_2buf(hi_u32 layer_id, HIFB_BUFFER_S *canvas_buf);
156 static hi_s32 hifb_refresh_2buf_immediate_display(hi_u32 layer_id, HIFB_BUFFER_S *canvas_buf);
157 static hi_s32 hifb_set_mirrormode(hifb_par *par, HIFB_MIRROR_MODE_E mirror_mode);
158 static hi_s32 hifb_set_rotatemode(struct fb_info *info, HIFB_ROTATE_MODE_E rotate_mode);
159 #ifdef CURSOR
160 static inline hi_void hifb_set_cursorinfo(hifb_par *par, hifb_cursor_info *cursor_info);
161 static inline hi_void hifb_get_cursorinfo(hifb_par *par, hifb_cursor_info *cursor_info);
162 static hi_s32 hifb_cursor_attach(hi_u32 cursor_id, hi_u32 layer_id);
163 static hi_s32 hifb_cursor_detach(hi_u32 layer_id);
164 static hi_s32 hifb_cursor_changepos(hi_u32 cursor_id, HIFB_POINT_S pos);
165 static hi_s32 hifb_cursor_changestate(hifb_par *cursor_par, hi_bool show);
166 static hi_s32 hifb_cursor_putinfo(hifb_par *cursor_par, HIFB_CURSOR_S *cursor);
167 #endif
168 static hi_s32 hifb_onputlayerinfo(struct fb_info *info, hifb_par *par, const hi_void __user *argp);
169 static hi_void hifb_get_layerinfo(hifb_par *par, HIFB_LAYER_INFO_S *layer_info);
170 static inline hi_void hifb_get_antiflickerlevel(hifb_par *par,
171 HIFB_LAYER_ANTIFLICKER_LEVEL_E *pen_antiflicker_level);
172 static hi_void hifb_set_antiflickerlevel(hi_u32 layer_id, HIFB_LAYER_ANTIFLICKER_LEVEL_E antiflicker_level);
173 static inline hi_void hifb_get_fmt(hifb_par *par, HIFB_COLOR_FMT_E *color_format);
174 static inline hi_void hifb_set_fmt(hifb_par *par, HIFB_COLOR_FMT_E color_fmt);
175 static inline hi_void hifb_set_alpha(hifb_par *par, HIFB_ALPHA_S *alpha);
176 static inline hi_void hifb_get_alpha(hifb_par *par, HIFB_ALPHA_S *alpha);
177 static inline hi_void hifb_set_key(hifb_par *par, hifb_colorkeyex *key);
178 static inline hi_void hifb_get_key(hifb_par *par, hifb_colorkeyex *key);
179 static inline hi_void hifb_get_layerpos(hifb_par *par, HIFB_POINT_S *pos);
180 static hi_void hifb_get_screensize(hifb_par *par, hi_u32 *width, hi_u32 *height);
181 static hi_s32 hifb_set_screensize(hifb_par *par, hi_u32 *width, hi_u32 *height);
182 static hi_void hifb_get_maxscreensize(hifb_par *par, hi_u32 *width, hi_u32 *height);
183 static hi_void hifb_get_dispsize(hifb_par *par, hi_u32 *width, hi_u32 *height);
184 static inline hi_void hifb_get_premul(hifb_par *par, hi_bool *premul);
185 static inline hi_bool hifb_get_show(hifb_par *par);
186 static inline hi_void hifb_set_show(hifb_par *par, hi_bool show);
187 static hi_void hifb_set_layerpos(hifb_par *par, HIFB_POINT_S *pos);
188 #if (HICHIP == HI3516E_V200)
189 static hi_s32 hifb_tde_rotate_callback(hi_u32 call_mod_id, hi_u32 call_dev_id, hi_u32 call_chn_id, hi_void *job_data);
190 static hi_s32 hifb_tde_callback(hi_u32 call_mod_id, hi_u32 call_dev_id, hi_u32 call_chn_id, hi_void *job_data);
191 #else
192 static hi_s32 hifb_tde_rotate_callback(const hi_void *paraml, const hi_void *paramr);
193 static hi_s32 hifb_tde_callback(const hi_void *paraml, const hi_void *paramr);
194 #endif
195 static hi_s32 hifb_vo_callback(const hi_void *paraml, const hi_void *paramr);
196 static hi_s32 hifb_wbc_callback(const hi_void *paraml, const hi_void *paramr);
197 static hi_void hifb_get_idledispbuf(hifb_par *par, hi_u64 *phy_addr);
198 static hi_void hifb_get_workdispbuf(hifb_par *par, hi_u64 *phy_addr);
199 static inline hi_void hifb_get_workcursorbuf(hifb_par *par, hi_u64 *phy_addr);
200 #ifdef CURSOR
201 static hi_void hifb_get_idlecursorbuf(hifb_par *par, hi_u64 *phy_addr);
202 #endif
203 static inline hi_bool hifb_is_interlace(hifb_par *par);
204 static hi_s32 hifb_set_dynamic_range_display(hifb_par *par, HIFB_DYNAMIC_RANGE_E dynamic_range);
205
206 hi_s32 hifb_read_proc(struct osal_proc_dir_entry *entry);
207 hi_s32 hifb_write_proc(struct osal_proc_dir_entry *entry, const char *buf, int count, long long *);
208
209 static hi_s32 hifb_get_colorkey(struct fb_info *info, unsigned long arg);
210 static hi_s32 hifb_set_colorkey(struct fb_info *info, unsigned long arg);
211 static hi_s32 hifb_get_layer_alpha(struct fb_info *info, unsigned long arg);
212 static hi_s32 hifb_set_layer_alpha(struct fb_info *info, unsigned long arg);
213 static hi_s32 hifb_get_screen_origin_pos(struct fb_info *info, unsigned long arg);
214 static hi_s32 hifb_set_screen_origin_pos(struct fb_info *info, unsigned long arg);
215 static hi_s32 hifb_get_deflicker(struct fb_info *info, unsigned long arg);
216 static hi_s32 hifb_set_deflicker(struct fb_info *info, unsigned long arg);
217 static hi_s32 hifb_get_vblank(struct fb_info *info, unsigned long arg);
218 static hi_s32 hifb_show_layer(struct fb_info *info, unsigned long arg);
219 static hi_s32 hifb_get_layer_show_state(struct fb_info *info, unsigned long arg);
220 static hi_s32 hifb_get_capability(struct fb_info *info, unsigned long arg);
221 static hi_s32 hifb_set_sursor_info(struct fb_info *info, unsigned long arg);
222 static hi_s32 hifb_get_sursor_info(struct fb_info *info, unsigned long arg);
223 static hi_s32 hifb_set_cursor_state(struct fb_info *info, unsigned long arg);
224 static hi_s32 hifb_get_cursor_state(struct fb_info *info, unsigned long arg);
225 static hi_s32 hifb_set_cursor_pos(struct fb_info *info, unsigned long arg);
226 static hi_s32 hifb_get_cursor_pos(struct fb_info *info, unsigned long arg);
227 static hi_s32 hifb_set_cursor_colorkey(struct fb_info *info, unsigned long arg);
228 static hi_s32 hifb_get_cursor_colorkey(struct fb_info *info, unsigned long arg);
229 static hi_s32 hifb_set_cursor_alpha(struct fb_info *info, unsigned long arg);
230 static hi_s32 hifb_get_cursor_alpha(struct fb_info *info, unsigned long arg);
231 static hi_s32 hifb_set_attch_cursor(struct fb_info *info, unsigned long arg);
232 static hi_s32 hifb_set_detach_cursor(struct fb_info *info, unsigned long arg);
233 static hi_s32 hifb_set_layer_info(struct fb_info *info, unsigned long arg);
234 static hi_s32 hifb_get_layer_info(struct fb_info *info, unsigned long arg);
235 static hi_s32 hifb_get_canvas_buffer(struct fb_info *info, unsigned long arg);
236 static hi_s32 hifb_refresh_layer(struct fb_info *info, unsigned long arg);
237 static hi_s32 hifb_wait_refresh_finish(struct fb_info *info, unsigned long arg);
238 static hi_s32 hifb_set_mirror_mode(struct fb_info *info, unsigned long arg);
239 static hi_s32 hifb_get_mirror_mode(struct fb_info *info, unsigned long arg);
240 static hi_s32 hifb_set_rotate_mode(struct fb_info *info, unsigned long arg);
241 static hi_s32 hifb_get_rotate_mode(struct fb_info *info, unsigned long arg);
242 static hi_s32 hifb_set_screen_size(struct fb_info *info, unsigned long arg);
243 static hi_s32 hifb_get_screen_size(struct fb_info *info, unsigned long arg);
244 static hi_s32 hifb_flip_surface(struct fb_info *info, unsigned long arg);
245 static hi_s32 hifb_set_compression_mode(struct fb_info *info, unsigned long arg);
246 static hi_s32 hifb_get_compression_mode(struct fb_info *info, unsigned long arg);
247 static hi_s32 hifb_set_mddrdetect(struct fb_info *info, unsigned long arg);
248 static hi_s32 hifb_get_mddrdetect(struct fb_info *info, unsigned long arg);
249 static hi_s32 hifb_set_dynamic_range(struct fb_info *info, unsigned long arg);
250 static hi_s32 hifb_get_dynamic_range(struct fb_info *info, unsigned long arg);
251 static hi_s32 drv_hifb_create(struct fb_info *info, unsigned long arg);
252 static hi_s32 drv_hifb_release(struct fb_info *info, unsigned long arg);
253 #ifdef CONFIG_DRIVERS_HDF_DISP
254 static hi_s32 hdf_panel_set_powerstatus(struct fb_info *info, unsigned long arg);
255 static hi_s32 hdf_panel_set_backlight(struct fb_info *info, unsigned long arg);
256 #endif
257
258
259 #if HICHIP == HI3516E_V200
set_hifb_irq(unsigned int temp_hifb_irq)260 hi_void set_hifb_irq(unsigned int temp_hifb_irq)
261 {
262 return;
263 }
264 #else
set_hifb_irq(unsigned int temp_hifb_irq)265 hi_void set_hifb_irq(unsigned int temp_hifb_irq)
266 {
267 g_fifb_irq = temp_hifb_irq;
268 }
269 #endif
270
set_hifb_soft_irq(unsigned int temp_hifb_soft_irq)271 hi_void set_hifb_soft_irq(unsigned int temp_hifb_soft_irq)
272 {
273 g_fifb_soft_irq = temp_hifb_soft_irq;
274 }
275
set_update_rotate_rect(bool temp_update_rotate_rect)276 hi_void set_update_rotate_rect(bool temp_update_rotate_rect)
277 {
278 g_bUpdateRotateRect = temp_update_rotate_rect;
279 }
280
set_video_name(char * temp_video)281 hi_void set_video_name(char *temp_video)
282 {
283 g_video = temp_video;
284 }
285
286 /*
287 * Name : g_stDefFix
288 * Desc : default fixed info: fix_info
289 */
290 static hi_s32 g_drv_hifb_ctl_num[DRV_HIFB_IOCTL_CMD_NUM_MAX] = {
291 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
292 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
293 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
294 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
295 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 0, 0, 7, 8,
296 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 0, 0, 0, 0,
297 25, 26, 0, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 0, 0, 41,
298 42, 0, 0, 0, 0, 0, 0, 0, 0, 43, 44,
299 #ifdef CONFIG_DRIVERS_HDF_DISP
300 45, 46,
301 #endif
302 };
303
304 static drv_hifb_ioctl_func_item g_drv_hifb_ioctl_func[DRV_HIFB_IOCTL_FUNC_ITEM_NUM_MAX] = {
305 {0, HI_NULL},
306 {FBIOGET_COLORKEY_HIFB, hifb_get_colorkey}, {FBIOPUT_COLORKEY_HIFB, hifb_set_colorkey},
307 {FBIOGET_ALPHA_HIFB, hifb_get_layer_alpha}, {FBIOPUT_ALPHA_HIFB, hifb_set_layer_alpha},
308 {FBIOGET_SCREEN_ORIGIN_HIFB, hifb_get_screen_origin_pos},
309 {FBIOPUT_SCREEN_ORIGIN_HIFB, hifb_set_screen_origin_pos},
310 {FBIOGET_DEFLICKER_HIFB, hifb_get_deflicker}, {FBIOPUT_DEFLICKER_HIFB, hifb_set_deflicker},
311 {FBIOGET_VBLANK_HIFB, hifb_get_vblank},
312 {FBIOPUT_SHOW_HIFB, hifb_show_layer}, {FBIOGET_SHOW_HIFB, hifb_get_layer_show_state},
313 {FBIOGET_CAPABILITY_HIFB, hifb_get_capability},
314 {FBIOPUT_CURSOR_INFO, hifb_set_sursor_info}, {FBIOGET_CURSOR_INFO, hifb_get_sursor_info},
315 {FBIOPUT_CURSOR_STATE, hifb_set_cursor_state}, {FBIOGET_CURSOR_STATE, hifb_get_cursor_state},
316 {FBIOPUT_CURSOR_POS, hifb_set_cursor_pos}, {FBIOGET_CURSOR_POS, hifb_get_cursor_pos},
317 {FBIOPUT_CURSOR_COLORKEY, hifb_set_cursor_colorkey},
318 {FBIOGET_CURSOR_COLORKEY, hifb_get_cursor_colorkey},
319 {FBIOPUT_CURSOR_ALPHA, hifb_set_cursor_alpha}, {FBIOGET_CURSOR_ALPHA, hifb_get_cursor_alpha},
320 {FBIOPUT_CURSOR_ATTCHCURSOR, hifb_set_attch_cursor},
321 {FBIOPUT_CURSOR_DETACHCURSOR, hifb_set_detach_cursor},
322 {FBIOPUT_LAYER_INFO, hifb_set_layer_info}, {FBIOGET_LAYER_INFO, hifb_get_layer_info},
323 {FBIOGET_CANVAS_BUFFER, hifb_get_canvas_buffer}, {FBIO_REFRESH, hifb_refresh_layer},
324 {FBIO_WAITFOR_FREFRESH_DONE, hifb_wait_refresh_finish},
325 {FBIOPUT_MIRROR_MODE, hifb_set_mirror_mode}, {FBIOGET_MIRROR_MODE, hifb_get_mirror_mode},
326 {FBIOPUT_ROTATE_MODE, hifb_set_rotate_mode}, {FBIOGET_ROTATE_MODE, hifb_get_rotate_mode},
327 {FBIOPUT_SCREENSIZE, hifb_set_screen_size}, {FBIOGET_SCREENSIZE, hifb_get_screen_size},
328 {FBIOFLIP_SURFACE, hifb_flip_surface}, {FBIOPUT_COMPRESSION_HIFB, hifb_set_compression_mode},
329 {FBIOGET_COMPRESSION_HIFB, hifb_get_compression_mode},
330 {FBIOPUT_MDDRDETECT_HIFB, hifb_set_mddrdetect}, {FBIOGET_MDDRDETECT_HIFB, hifb_get_mddrdetect},
331 {FBIOPUT_DYNAMIC_RANGE_HIFB, hifb_set_dynamic_range},
332 {FBIOGET_DYNAMIC_RANGE_HIFB, hifb_get_dynamic_range},
333 {FBIO_CREATE_LAYER, drv_hifb_create},
334 {FBIO_DESTROY_LAYER, drv_hifb_release},
335 #ifdef CONFIG_DRIVERS_HDF_DISP
336 {FBIO_PANEL_SET_POWERSTATUS, hdf_panel_set_powerstatus},
337 {FBIO_PANEL_SET_BACKLIGHT, hdf_panel_set_backlight},
338 #endif
339 };
340
341 #ifndef __HuaweiLite__
hifb_get_smem_start(struct fb_info * info)342 static hi_ulong hifb_get_smem_start(struct fb_info *info)
343 {
344 return (hi_ulong)info->fix.smem_start;
345 }
346
hifb_get_screen_base(struct fb_info * info)347 static hi_s8 *hifb_get_screen_base(struct fb_info *info)
348 {
349 return (hi_s8 *)info->screen_base;
350 }
351
hifb_get_xres(struct fb_info * info)352 static hi_u32 hifb_get_xres(struct fb_info *info)
353 {
354 return (hi_u32)info->var.xres;
355 }
356
hifb_get_yres(struct fb_info * info)357 static hi_u32 hifb_get_yres(struct fb_info *info)
358 {
359 return (hi_u32)info->var.yres;
360 }
361
hifb_get_xres_virtual(struct fb_info * info)362 static hi_u32 hifb_get_xres_virtual(struct fb_info *info)
363 {
364 return (hi_u32)info->var.xres_virtual;
365 }
366
hifb_get_yres_virtual(struct fb_info * info)367 static hi_u32 hifb_get_yres_virtual(struct fb_info *info)
368 {
369 return (hi_u32)info->var.yres_virtual;
370 }
371
hifb_get_smem_len(struct fb_info * info)372 static hi_u32 hifb_get_smem_len(struct fb_info *info)
373 {
374 return (hi_u32)info->fix.smem_len;
375 }
376
hifb_get_line_length(struct fb_info * info)377 static hi_u32 hifb_get_line_length(struct fb_info *info)
378 {
379 return (hi_u32)info->fix.line_length;
380 }
381
hifb_get_bits_per_pixel(struct fb_info * info)382 static hi_u32 hifb_get_bits_per_pixel(struct fb_info *info)
383 {
384 return (hi_u32)info->var.bits_per_pixel;
385 }
386
hifb_get_yoffset(struct fb_info * info)387 static hi_u32 hifb_get_yoffset(struct fb_info *info)
388 {
389 return (hi_u32)info->var.yoffset;
390 }
391
hifb_get_xoffset(struct fb_info * info)392 static hi_u32 hifb_get_xoffset(struct fb_info *info)
393 {
394 return (hi_u32)info->var.xoffset;
395 }
396
397 static struct fb_fix_screeninfo g_default_fix[HIFB_LAYER_TYPE_BUTT] = {
398 {
399 .id = "hifb", /* String identifierString identifier */
400 .type = FB_TYPE_PACKED_PIXELS, /* FB type */
401 .visual = FB_VISUAL_TRUECOLOR,
402 .xpanstep = 1,
403 .ypanstep = 1,
404 .ywrapstep = 0,
405 /* The number of bytes in one line(width * 4 (32 bits per pixels / 8 bits per byte)) */
406 .line_length = HIFB_HD_DEF_STRIDE,
407 .accel = FB_ACCEL_NONE,
408 .mmio_len = 0,
409 .mmio_start = 0,
410 },
411 {
412 .id = "hifb",
413 .type = FB_TYPE_PACKED_PIXELS,
414 .visual = FB_VISUAL_TRUECOLOR,
415 .xpanstep = 1,
416 .ypanstep = 1,
417 .ywrapstep = 0,
418 .line_length = HIFB_SD_DEF_STRIDE,
419 .accel = FB_ACCEL_NONE,
420 .mmio_len = 0,
421 .mmio_start = 0,
422 },
423 {
424 .id = "hifb",
425 .type = FB_TYPE_PACKED_PIXELS,
426 .visual = FB_VISUAL_TRUECOLOR,
427 .xpanstep = 1,
428 .ypanstep = 1,
429 .ywrapstep = 0,
430 .line_length = HIFB_SD_DEF_STRIDE,
431 .accel = FB_ACCEL_NONE,
432 .mmio_len = 0,
433 .mmio_start = 0,
434 },
435 {
436 .id = "hifb",
437 .type = FB_TYPE_PACKED_PIXELS,
438 .visual = FB_VISUAL_TRUECOLOR,
439 .xpanstep = 1,
440 .ypanstep = 1,
441 .ywrapstep = 0,
442 .line_length = HIFB_HD_DEF_STRIDE,
443 .accel = FB_ACCEL_NONE,
444 .mmio_len = 0,
445 .mmio_start = 0,
446 }
447 };
448
449 static struct fb_var_screeninfo g_default_var[HIFB_LAYER_TYPE_BUTT] = {
450 /* for HD layer */
451 {
452 .xres = HIFB_HD_DEF_WIDTH,
453 .yres = HIFB_HD_DEF_HEIGHT,
454 .xres_virtual = HIFB_HD_DEF_WIDTH,
455 .yres_virtual = HIFB_HD_DEF_HEIGHT,
456 .xoffset = 0,
457 .yoffset = 0,
458 .bits_per_pixel = HIFB_DEF_DEPTH,
459 .red = {10, 5, 0},
460 .green = {5, 5, 0},
461 .blue = {0, 5, 0},
462 .transp = {15, 1, 0},
463 .activate = FB_ACTIVATE_NOW,
464 .pixclock = -1, /* pixel clock in ps (pico seconds) */
465 .left_margin = 0, /* time from sync to picture */
466 .right_margin = 0, /* time from picture to sync */
467 .upper_margin = 0, /* time from sync to picture */
468 .lower_margin = 0,
469 .hsync_len = 0, /* length of horizontal sync */
470 .vsync_len = 0, /* length of vertical sync */
471 },
472 /* for SD layer */
473 {
474 .xres = HIFB_SD_DEF_WIDTH,
475 .yres = HIFB_SD_DEF_HEIGHT,
476 .xres_virtual = HIFB_SD_DEF_WIDTH,
477 .yres_virtual = HIFB_SD_DEF_HEIGHT,
478 .xoffset = 0,
479 .yoffset = 0,
480 .bits_per_pixel = HIFB_DEF_DEPTH,
481 .red = {10, 5, 0},
482 .green = {5, 5, 0},
483 .blue = {0, 5, 0},
484 .transp = {15, 1, 0},
485 .activate = FB_ACTIVATE_NOW,
486 .pixclock = -1, /* pixel clock in ps (pico seconds) */
487 .left_margin = 0, /* time from sync to picture */
488 .right_margin = 0, /* time from picture to sync */
489 .upper_margin = 0, /* time from sync to picture */
490 .lower_margin = 0,
491 .hsync_len = 0, /* length of horizontal sync */
492 .vsync_len = 0, /* length of vertical sync */
493 },
494 /* for AD layer */
495 {
496 .xres = HIFB_AD_DEF_WIDTH,
497 .yres = HIFB_AD_DEF_HEIGHT,
498 .xres_virtual = HIFB_AD_DEF_WIDTH,
499 .yres_virtual = HIFB_AD_DEF_HEIGHT,
500 .xoffset = 0,
501 .yoffset = 0,
502 .bits_per_pixel = HIFB_DEF_DEPTH,
503 .red = {10, 5, 0},
504 .green = {5, 5, 0},
505 .blue = {0, 5, 0},
506 .transp = {15, 1, 0},
507 .activate = FB_ACTIVATE_NOW,
508 .pixclock = -1, /* pixel clock in ps (pico seconds) */
509 .left_margin = 0, /* time from sync to picture */
510 .right_margin = 0, /* time from picture to sync */
511 .upper_margin = 0, /* time from sync to picture */
512 .lower_margin = 0,
513 .hsync_len = 0, /* length of horizontal sync */
514 .vsync_len = 0, /* length of vertical sync */
515 },
516 /* for cursor layer */
517 {
518 .xres = HIFB_CURSOR_DEF_WIDTH,
519 .yres = HIFB_CURSOR_DEF_HEIGHT,
520 .xres_virtual = HIFB_CURSOR_DEF_WIDTH,
521 .yres_virtual = HIFB_CURSOR_DEF_HEIGHT,
522 .xoffset = 0,
523 .yoffset = 0,
524 .bits_per_pixel = HIFB_DEF_DEPTH,
525 .red = {10, 5, 0},
526 .green = {5, 5, 0},
527 .blue = {0, 5, 0},
528 .transp = {15, 1, 0},
529 .activate = FB_ACTIVATE_NOW,
530 .pixclock = -1, /* pixel clock in ps (pico seconds) */
531 .left_margin = 0, /* time from sync to picture */
532 .right_margin = 0, /* time from picture to sync */
533 .upper_margin = 0, /* time from sync to picture */
534 .lower_margin = 0,
535 .hsync_len = 0, /* length of horizontal sync */
536 .vsync_len = 0, /* length of vertical sync */
537 }
538 };
539 #else
hifb_get_smem_start(struct hifb_info * info)540 static hi_ulong hifb_get_smem_start(struct hifb_info *info)
541 {
542 return (hi_ulong)(uintptr_t)info->oinfo.fbmem;
543 }
544
hifb_get_screen_base(struct hifb_info * info)545 static hi_s8 *hifb_get_screen_base(struct hifb_info *info)
546 {
547 return (hi_s8 *)info->oinfo.fbmem;
548 }
549
hifb_get_xres(struct hifb_info * info)550 static hi_u32 hifb_get_xres(struct hifb_info *info)
551 {
552 return (hi_u32)info->vinfo.xres;
553 }
554
hifb_get_yres(struct hifb_info * info)555 static hi_u32 hifb_get_yres(struct hifb_info *info)
556 {
557 return (hi_u32)info->vinfo.yres;
558 }
559
hifb_get_xres_virtual(struct hifb_info * info)560 static hi_u32 hifb_get_xres_virtual(struct hifb_info *info)
561 {
562 return (hi_u32)info->oinfo.sarea.w;
563 }
564
hifb_get_yres_virtual(struct hifb_info * info)565 static hi_u32 hifb_get_yres_virtual(struct hifb_info *info)
566 {
567 return (hi_u32)info->oinfo.sarea.h;
568 }
569
hifb_get_smem_len(struct hifb_info * info)570 static hi_u32 hifb_get_smem_len(struct hifb_info *info)
571 {
572 return (hi_u32)info->oinfo.fblen;
573 }
574
hifb_get_line_length(struct hifb_info * info)575 static hi_u32 hifb_get_line_length(struct hifb_info *info)
576 {
577 return (hi_u32)info->oinfo.stride;
578 }
579
hifb_get_bits_per_pixel(struct hifb_info * info)580 static hi_u32 hifb_get_bits_per_pixel(struct hifb_info *info)
581 {
582 return (hi_u32)info->oinfo.bpp;
583 }
584
hifb_get_yoffset(struct hifb_info * info)585 static hi_u32 hifb_get_yoffset(struct hifb_info *info)
586 {
587 return (hi_u32)info->oinfo.sarea.y;
588 }
589
hifb_get_xoffset(struct hifb_info * info)590 static hi_u32 hifb_get_xoffset(struct hifb_info *info)
591 {
592 return (hi_u32)info->oinfo.sarea.x;
593 }
594
595 struct hifb_screeninfo {
596 uint8_t fmt; /* see FB_FMT_* */
597 fb_coord_t xres; /* Horizontal resolution in pixel columns */
598 fb_coord_t yres; /* Vertical resolution in pixel rows */
599 struct fb_area_s sarea; /* Selected area within the overlay */
600 fb_coord_t stride; /* Length of a line in bytes */
601 uint8_t bpp; /* Bits per pixel */
602 uint32_t accl; /* Supported hardware acceleration */
603 uint8_t nplanes; /* Number of color planes supported */
604 uint8_t noverlays; /* Number of overlays supported */
605 };
606
607 static struct hifb_screeninfo g_default_info[HIFB_LAYER_TYPE_BUTT] = {
608 /* for HD layer */
609 {
610 .fmt = HIFB_FMT_ARGB1555,
611 .xres = HIFB_HD_DEF_WIDTH,
612 .yres = HIFB_HD_DEF_HEIGHT,
613 .sarea = {0, 0, HIFB_HD_DEF_WIDTH, HIFB_HD_DEF_HEIGHT},
614 .stride = HIFB_HD_DEF_STRIDE,
615 .bpp = HIFB_DEF_DEPTH,
616 .accl = 0,
617 .nplanes = 1,
618 .noverlays = 1,
619 },
620 /* for SD layer */
621 {
622 .fmt = HIFB_FMT_ARGB1555,
623 .xres = HIFB_SD_DEF_WIDTH,
624 .yres = HIFB_SD_DEF_HEIGHT,
625 .sarea = {0, 0, HIFB_SD_DEF_WIDTH, HIFB_SD_DEF_HEIGHT},
626 .stride = HIFB_SD_DEF_STRIDE,
627 .bpp = HIFB_DEF_DEPTH,
628 .accl = 0,
629 .nplanes = 1,
630 .noverlays = 1,
631 },
632 /* for AD layer */
633 {
634 .fmt = HIFB_FMT_ARGB1555,
635 .xres = HIFB_AD_DEF_WIDTH,
636 .yres = HIFB_AD_DEF_HEIGHT,
637 .sarea = {0, 0, HIFB_AD_DEF_WIDTH, HIFB_AD_DEF_HEIGHT},
638 .stride = HIFB_AD_DEF_STRIDE,
639 .bpp = HIFB_DEF_DEPTH,
640 .accl = 0,
641 .nplanes = 1,
642 .noverlays = 1,
643 },
644 /* for cursor layer */
645 {
646 .fmt = HIFB_FMT_ARGB1555,
647 .xres = HIFB_CURSOR_DEF_WIDTH,
648 .yres = HIFB_CURSOR_DEF_HEIGHT,
649 .sarea = {0, 0, HIFB_CURSOR_DEF_WIDTH, HIFB_CURSOR_DEF_HEIGHT},
650 .stride = HIFB_CURSOR_DEF_STRIDE,
651 .bpp = HIFB_DEF_DEPTH,
652 .accl = 0,
653 .nplanes = 1,
654 .noverlays = 1,
655 },
656 };
657
hifb_screeninfo_init(struct hifb_info * info,struct hifb_screeninfo * sinfo)658 static void hifb_screeninfo_init(struct hifb_info *info, struct hifb_screeninfo *sinfo)
659 {
660 info->vinfo.xres = sinfo->xres;
661 info->vinfo.yres = sinfo->yres;
662 info->oinfo.sarea = sinfo->sarea;
663 info->oinfo.bpp = sinfo->bpp;
664 info->oinfo.stride = sinfo->stride;
665 info->oinfo.accl = sinfo->accl;
666 info->vinfo.nplanes = sinfo->nplanes;
667 info->vinfo.noverlays = sinfo->noverlays;
668 };
669 #endif
670
671 /*
672 * Name : g_argb_bit_field
673 * Desc : bit fields of each color format in HIFB_COLOR_FMT_E,
674 * the order must be the same as that of HIFB_COLOR_FMT_E
675 */
676 #ifndef __HuaweiLite__
677 static hifb_argb_bitinfo g_argb_bit_field[] = {
678 {
679 .red = {11, 5, 0},
680 .green = {5, 6, 0},
681 .blue = {0, 5, 0},
682 .transp = {0, 0, 0},
683 },
684 /* RGB888 */
685 {
686 .red = {16, 8, 0},
687 .green = {8, 8, 0},
688 .blue = {0, 8, 0},
689 .transp = {0, 0, 0},
690 },
691 /* KRGB444 */
692 {
693 .red = {8, 4, 0},
694 .green = {4, 4, 0},
695 .blue = {0, 4, 0},
696 .transp = {0, 0, 0},
697 },
698 /* KRGB555 */
699 {
700 .red = {10, 5, 0},
701 .green = {5, 5, 0},
702 .blue = {0, 5, 0},
703 .transp = {0, 0, 0},
704 },
705 /* KRGB888 */
706 {
707 .red = {16, 8, 0},
708 .green = {8, 8, 0},
709 .blue = {0, 8, 0},
710 .transp = {0, 0, 0},
711 },
712 /* ARGB4444 */
713 {
714 .red = {8, 4, 0},
715 .green = {4, 4, 0},
716 .blue = {0, 4, 0},
717 .transp = {12, 4, 0},
718 },
719 /* ARGB1555 */
720 {
721 .red = {10, 5, 0},
722 .green = {5, 5, 0},
723 .blue = {0, 5, 0},
724 .transp = {15, 1, 0},
725 },
726 /* ARGB8888 */
727 {
728 .red = {16, 8, 0},
729 .green = {8, 8, 0},
730 .blue = {0, 8, 0},
731 .transp = {24, 8, 0},
732 },
733 /* ARGB8565 */
734 {
735 .red = {11, 5, 0},
736 .green = {5, 6, 0},
737 .blue = {0, 5, 0},
738 .transp = {16, 8, 0},
739 },
740 /* RGBA4444 */
741 {
742 .red = {12, 4, 0},
743 .green = {8, 4, 0},
744 .blue = {4, 4, 0},
745 .transp = {0, 4, 0},
746 },
747 /* RGBA5551 */
748 {
749 .red = {11, 5, 0},
750 .green = {6, 5, 0},
751 .blue = {1, 5, 0},
752 .transp = {0, 1, 0},
753 },
754 /* RGBA5658 */
755 {
756 .red = {19, 5, 0},
757 .green = {13, 6, 0},
758 .blue = {8, 5, 0},
759 .transp = {0, 8, 0},
760 },
761 /* RGBA8888 */
762 {
763 .red = {24, 8, 0},
764 .green = {16, 8, 0},
765 .blue = {8, 8, 0},
766 .transp = {0, 8, 0},
767 },
768 /* BGR565 */
769 {
770 .red = {0, 5, 0},
771 .green = {5, 6, 0},
772 .blue = {11, 5, 0},
773 .transp = {0, 0, 0},
774 },
775 /* BGR888 */
776 {
777 .red = {0, 8, 0},
778 .green = {8, 8, 0},
779 .blue = {16, 8, 0},
780 .transp = {0, 0, 0},
781 },
782 /* ABGR4444 */
783 {
784 .red = {0, 4, 0},
785 .green = {4, 4, 0},
786 .blue = {8, 4, 0},
787 .transp = {12, 4, 0},
788 },
789 /* ABGR1555 */
790 {
791 .red = {0, 5, 0},
792 .green = {5, 5, 0},
793 .blue = {10, 5, 0},
794 .transp = {15, 1, 0},
795 },
796 /* ABGR8888 */
797 {
798 .red = {0, 8, 0},
799 .green = {8, 8, 0},
800 .blue = {16, 8, 0},
801 .transp = {24, 8, 0},
802 },
803 /* ABGR8565 */
804 {
805 .red = {0, 5, 0},
806 .green = {5, 6, 0},
807 .blue = {11, 5, 0},
808 .transp = {16, 8, 0},
809 },
810 /* KBGR444 16bpp */
811 {
812 .red = {0, 4, 0},
813 .green = {4, 4, 0},
814 .blue = {8, 4, 0},
815 .transp = {0, 0, 0},
816 },
817 /* KBGR555 16bpp */
818 {
819 .red = {0, 5, 0},
820 .green = {5, 5, 0},
821 .blue = {10, 5, 0},
822 .transp = {0, 0, 0},
823 },
824 /* KBGR888 32bpp */
825 {
826 .red = {0, 8, 0},
827 .green = {8, 8, 0},
828 .blue = {16, 8, 0},
829 .transp = {0, 0, 0},
830 },
831
832 /* 1bpp */
833 {
834 .red = {0, 1, 0},
835 .green = {0, 1, 0},
836 .blue = {0, 1, 0},
837 .transp = {0, 0, 0},
838 },
839 /* 2bpp */
840 {
841 .red = {0, 2, 0},
842 .green = {0, 2, 0},
843 .blue = {0, 2, 0},
844 .transp = {0, 0, 0},
845 },
846 /* 4bpp */
847 {
848 .red = {0, 4, 0},
849 .green = {0, 4, 0},
850 .blue = {0, 4, 0},
851 .transp = {0, 0, 0},
852 },
853 /* 8bpp */
854 {
855 .red = {0, 8, 0},
856 .green = {0, 8, 0},
857 .blue = {0, 8, 0},
858 .transp = {0, 0, 0},
859 },
860 /* ACLUT44 */
861 {
862 .red = {4, 4, 0},
863 .green = {4, 4, 0},
864 .blue = {4, 4, 0},
865 .transp = {0, 4, 0},
866 },
867 /* ACLUT88 */
868 {
869 .red = {8, 8, 0},
870 .green = {8, 8, 0},
871 .blue = {8, 8, 0},
872 .transp = {0, 8, 0},
873 }
874 };
875 #endif
876 static hi_void hifb_version(hi_void);
877 #ifdef CURSOR
878 static hi_void hifb_cursor_calcdispinfo(hi_u32 cursor_id, hifb_par *par, const HIFB_POINT_S *cur_new_pos);
879 #endif
880 static hi_s32 hifb_refresh(hi_u32 layer_id, HIFB_BUFFER_S *canvas_buf, HIFB_LAYER_BUF_E buf_mode);
881 static hi_void hifb_select_antiflicker_mode(hifb_par *par);
882 #ifndef __HuaweiLite__
883 static hi_s32 hifb_setcolreg(unsigned regno, unsigned red, unsigned green,
884 unsigned blue, unsigned transp, struct fb_info *info);
885 #endif
886
887 static hifb_layer g_layer[HIFB_MAX_LAYER_NUM];
888
889 #ifndef __HuaweiLite__
890
891 #endif
892
893 static char g_display[8] = "off"; /* 8 open vo or not */
894
895 static char g_softcursor[8] = "off"; /* 8 use soft cursor or not */
896
hifb_update_rotate_rect(hi_void)897 bool hifb_update_rotate_rect(hi_void)
898 {
899 return g_bUpdateRotateRect;
900 }
901
902 /*
903 * Name : hifb_completely
904 * Desc : fill buffer, it is faster than memset_s.
905 * See : Called by hifb_overlay_probe
906 */
907 #ifndef __HuaweiLite__
hifb_completely(struct fb_info * info,hi_u32 fill_data,size_t n)908 static hi_u32 hifb_completely(struct fb_info *info, hi_u32 fill_data, size_t n)
909 {
910 hi_s32 ret;
911 HIFB_BUFFER_S dst_img;
912 hi_s8 *rest = HI_NULL;
913 const hi_u32 can_pitch = HIFB_TDE_DISTINCT_WIDTH * 4; /* 4 alg data */
914
915 if (info == HI_NULL) {
916 hifb_error("The pointer is HI_NULL\n");
917 return HI_FAILURE;
918 }
919 /*
920 * Assume that the image width is 3840, the format is ARBG8888, and the image is filled by TDE.
921 * Since the size of the image that TDE can handle is limited,
922 * the width and height are limited to HIFB_TDE_DISTINCT_WIDTH
923 * and HIFB_TDE_DISTINCT_HEIGHT, Exceeded parts are cleared using memset_s
924 * Configure the target image, pitch, format, width and height, physical address (pointing to the memory start
925 * address)
926 */
927 dst_img.stCanvas.u64PhyAddr = hifb_get_smem_start(info);
928 dst_img.stCanvas.u32Pitch = can_pitch;
929 dst_img.stCanvas.enFmt = HIFB_FMT_ABGR8888;
930 dst_img.stCanvas.u32Width = HIFB_TDE_DISTINCT_WIDTH;
931 dst_img.stCanvas.u32Height = n / can_pitch;
932
933 if (dst_img.stCanvas.u32Height > HIFB_TDE_DISTINCT_HEIGHT) {
934 dst_img.stCanvas.u32Height = HIFB_TDE_DISTINCT_HEIGHT;
935 }
936
937 if (dst_img.stCanvas.u32Height != 0) {
938 ret = hifb_drv_fill(&dst_img, fill_data);
939 if (ret != HI_SUCCESS) {
940 hifb_error("hifb_drv_fill fail!\n");
941 return HI_FAILURE;
942 }
943 }
944
945 /* The memory not in the canvas is set by memset_s */
946 if ((n - dst_img.stCanvas.u32Height * can_pitch) != 0) {
947 rest = hifb_get_screen_base(info) + can_pitch * (dst_img.stCanvas.u32Height);
948 (hi_void)memset_s(rest, n - dst_img.stCanvas.u32Height * can_pitch, fill_data,
949 n - dst_img.stCanvas.u32Height * can_pitch);
950 }
951 return HI_SUCCESS;
952 }
953 #endif
954
955 /*
956 * Name : hifb_alloc_compress_buffer
957 * Desc : the compression is TRUE, the memory is allocated and the address is put in the compressed information of the
958 * private data.
959 */
hifb_alloc_compress_buffer(hifb_par * par)960 hi_s32 hifb_alloc_compress_buffer(hifb_par *par)
961 {
962 hi_u32 buf_height;
963 hi_u32 buf_stride;
964 hi_u32 header_size;
965 hi_u32 buf_size;
966 hi_u64 buf_phy_addr;
967 hi_u32 layer_index;
968
969 if (par == HI_NULL) {
970 return HI_FAILURE;
971 }
972
973 layer_index = par->layer_id;
974 if (layer_index >= HIFB_MAX_LAYER_NUM) {
975 return HI_FAILURE;
976 }
977
978 buf_height = par->display_info.display_height;
979 /* 2 alg data */
980 buf_stride = (par->display_info.display_width * 2 + COMPRESS_STRIDE_ALIGNMENT - 1) &
981 (~(COMPRESS_STRIDE_ALIGNMENT - 1));
982 header_size = COMPRESS_HEADER_STRIDE * buf_height;
983 buf_size = buf_stride * buf_height + header_size;
984 /* 2 alg data 16 size buffer */
985 buf_phy_addr = hifb_buf_allocmem("Compress buffer", 16, 2 * buf_size, hifb_get_layer_mmz_names(layer_index));
986 if (buf_phy_addr == 0) {
987 hifb_trace(HI_DBG_ERR, "Mmz Malloc Compress buffer fail\n");
988 return HI_FAILURE;
989 }
990
991 par->compress_info.ar_buf_phy_addr = buf_phy_addr;
992 par->compress_info.gb_buf_phy_addr = buf_phy_addr + buf_size;
993 par->compress_info.header_size = header_size;
994 par->compress_info.stride = buf_stride;
995
996 return HI_SUCCESS;
997 }
998
hifb_free_compress_buffer(hifb_par * par)999 hi_void hifb_free_compress_buffer(hifb_par *par)
1000 {
1001 if (par == HI_NULL) {
1002 return;
1003 }
1004
1005 if (par->compress_info.ar_buf_phy_addr != 0) {
1006 hifb_buf_freemem(par->compress_info.ar_buf_phy_addr);
1007 par->compress_info.ar_buf_phy_addr = 0;
1008 }
1009 }
1010
1011 /*
1012 * Name : hifb_set_dcmp_info
1013 * Desc : set the decompression info.
1014 */
hifb_set_dcmp_info(hifb_par * par)1015 hi_void hifb_set_dcmp_info(hifb_par *par)
1016 {
1017 volatile hifb_compress_info *compress_info = HI_NULL;
1018 hifb_refresh_info *refresh_info = HI_NULL;
1019 fb_vo_graphic_dcmp_info dcmp_info;
1020 HIFB_COLOR_FMT_E hifb_color_fmt;
1021
1022 if (par == HI_NULL) {
1023 return;
1024 }
1025
1026 compress_info = &par->compress_info;
1027 refresh_info = &par->refresh_info;
1028
1029 if ((refresh_info->screen_addr == 0) || (refresh_info->gb_screen_addr == 0)) {
1030 /* close the dcmp */
1031 g_drv_ops.hifb_drv_enable_dcmp(par->layer_id, HI_FALSE);
1032 hifb_trace(HI_DBG_WARN, "Compression is opened, but compressed buffer phyaddr for refreshing is HI_NULL(0)\n");
1033 return;
1034 }
1035
1036 /*
1037 * Decompressing information from compressed information: compression channels AR, GB address, and its size size0,
1038 * size1
1039 */
1040 hifb_color_fmt = refresh_info->user_buffer.stCanvas.enFmt;
1041 switch (hifb_color_fmt) {
1042 case HIFB_FMT_ARGB4444:
1043 dcmp_info.pixel_fmt = FB_VO_INPUTFMT_ARGB_4444;
1044 break;
1045 case HIFB_FMT_ARGB1555:
1046 dcmp_info.pixel_fmt = FB_VO_INPUTFMT_ARGB_1555;
1047 break;
1048 case HIFB_FMT_ARGB8888:
1049 dcmp_info.pixel_fmt = FB_VO_INPUTFMT_ARGB_8888;
1050 break;
1051 default:
1052 hifb_trace(HI_DBG_ERR, "Pixel format(%d) is invalid!\n", hifb_color_fmt);
1053 return;
1054 }
1055 dcmp_info.ar_phy_addr = hifb_drv_get_dcmp_offset_addr(refresh_info->screen_addr);
1056 dcmp_info.gb_phy_addr = hifb_drv_get_dcmp_offset_addr(refresh_info->gb_screen_addr);
1057 dcmp_info.width = refresh_info->user_buffer.stCanvas.u32Width;
1058 dcmp_info.height = refresh_info->user_buffer.stCanvas.u32Height;
1059 dcmp_info.stride = refresh_info->disp_buf_info.stride;
1060 dcmp_info.frame_size0 = compress_info->frame_size0;
1061 dcmp_info.frame_size1 = compress_info->frame_size1;
1062 dcmp_info.is_lossless_a = HI_FALSE; /* Whether lossless,FALSE:Lossy,TRUE:lossless */
1063 dcmp_info.is_lossless = HI_FALSE; /* Whether lossless,FALSE:Lossy,TRUE:lossless */
1064
1065 /* Decompressing information to drv level */
1066 if (g_drv_ops.hifb_drv_set_dcmp_info(par->layer_id, &dcmp_info) == HI_FALSE) {
1067 /* close dcmp */
1068 g_drv_ops.hifb_drv_enable_dcmp(par->layer_id, HI_FALSE);
1069 }
1070 }
1071
1072 #ifdef CONFIG_HI_VO_FB_SEPARATE
1073 static int hifb_interrupt_route(hi_s32 irq, hi_void *dev_id);
1074 #endif
1075
1076 #ifdef CONFIG_HIFB_SOFT_IRQ_SUPPORT
1077 static hi_s32 hifb_soft_interrupt(hi_s32 irq, hi_void *dev_id);
1078 #endif
1079
hifb_debug_info_init(hi_void)1080 static hi_s32 hifb_debug_info_init(hi_void)
1081 {
1082 hi_u32 i;
1083 hi_s32 j;
1084 const hi_char *entry_name[5] = { /* 5 max */
1085 "hifb0",
1086 "hifb1",
1087 "hifb2",
1088 "hifb3",
1089 "hifb4"
1090 };
1091
1092 hifb_proc_init();
1093 /* initialize fb file according the config */
1094 for (i = 0; i < g_drv_ops.layer_count; i++) {
1095 /* register the layer */
1096 if (hifb_overlay_probe(i) == HI_SUCCESS) {
1097 #ifdef CONFIG_HI_PROC_SHOW_SUPPORT
1098 /* create a proc entry in 'hifb' for the layer */
1099 hifb_proc_add_module(entry_name[i], 16, hifb_read_proc, hifb_write_proc, /* 16 name size */
1100 g_layer[i].info);
1101 #endif
1102 } else {
1103 /* roll back hifb0~hifbi-1's proc entry, remove them. */
1104 j = (hi_s32)i;
1105 for (j = i - 1; j >= 0; j--) {
1106 #ifdef CONFIG_HI_PROC_SHOW_SUPPORT
1107 /* destroy a proc entry in 'hifb' for the layer */
1108 hifb_proc_remove_module(entry_name[j]);
1109 #endif
1110 /* unregister the layer */
1111 hifb_overlay_cleanup(j, HI_TRUE);
1112 }
1113 osal_printk("ERROR: Load hifb.ko FAILED!\n");
1114 return HI_FAILURE;
1115 }
1116 }
1117 return HI_SUCCESS;
1118 }
1119
hifb_init_process(hi_void)1120 static hi_s32 hifb_init_process(hi_void)
1121 {
1122 /* open vo */
1123 if (g_display_on) {
1124 g_drv_ops.hifb_drv_open_display();
1125 }
1126
1127 /* initial adoption layer */
1128 if (g_drv_ops.hifb_drv_init() != HI_SUCCESS) {
1129 hifb_error("drv init failed\n");
1130 return HI_FAILURE;
1131 }
1132
1133 hifb_drv_set_tde_callback(hifb_tde_callback);
1134 hifb_drv_set_tde_rotate_callback(hifb_tde_rotate_callback);
1135
1136 return HI_SUCCESS;
1137 }
1138
hifb_request_irq(hi_void)1139 static hi_s32 hifb_request_irq(hi_void)
1140 {
1141 #ifdef CONFIG_HI_VO_FB_SEPARATE
1142 if (osal_request_irq(g_fifb_irq, hifb_interrupt_route, HI_NULL, "HIFB Int", hifb_interrupt_route)) {
1143 g_hifb_register = HI_FALSE;
1144 hifb_error("request_irq for Hifb failure!\n");
1145 #ifdef CONFIG_HIFB_SOFT_IRQ_SUPPORT
1146 if (g_hifb_soft_irq_register == HI_TRUE) {
1147 osal_free_irq(g_fifb_soft_irq, hifb_soft_interrupt);
1148 g_hifb_soft_irq_register = HI_FALSE;
1149 }
1150 #endif
1151 if (g_hifb_irq_register == HI_TRUE) {
1152 osal_free_irq(g_fifb_irq, hifb_interrupt_route);
1153 g_hifb_irq_register = HI_FALSE;
1154 }
1155 return HI_FAILURE;
1156 }
1157 g_hifb_irq_register = HI_TRUE;
1158 #endif
1159
1160 #ifdef CONFIG_HIFB_SOFT_IRQ_SUPPORT
1161 if (osal_request_irq(g_fifb_soft_irq, hifb_soft_interrupt, HI_NULL, "HIFB_SOFT_INT", hifb_soft_interrupt)) {
1162 g_hifb_register = HI_FALSE;
1163 hifb_error("request_irq for hifb soft irq failure!\n");
1164 if (g_hifb_soft_irq_register == HI_TRUE) {
1165 osal_free_irq(g_fifb_soft_irq, hifb_soft_interrupt);
1166 g_hifb_soft_irq_register = HI_FALSE;
1167 }
1168 #ifdef CONFIG_HI_VO_FB_SEPARATE
1169 if (g_hifb_irq_register == HI_TRUE) {
1170 osal_free_irq(g_fifb_irq, hifb_interrupt_route);
1171 g_hifb_irq_register = HI_FALSE;
1172 }
1173 #endif
1174 return HI_FAILURE;
1175 }
1176 g_hifb_soft_irq_register = HI_TRUE;
1177 #endif
1178 return HI_SUCCESS;
1179 }
1180
hifb_init_do_err2(hi_void)1181 static hi_void hifb_init_do_err2(hi_void)
1182 {
1183 #ifdef __HuaweiLite__
1184 g_drv_ops.hifb_drv_deinit();
1185 #else
1186 #ifndef CONFIG_HI_HIFB_VGS
1187 g_drv_ops.hifb_drv_deinit();
1188
1189 #endif
1190 #endif
1191 hifb_drv_set_tde_callback(HI_NULL);
1192 hifb_drv_set_tde_rotate_callback(HI_NULL);
1193 }
1194
hifb_init_do_err1(hi_void)1195 static hi_void hifb_init_do_err1(hi_void)
1196 {
1197 hi_u32 i;
1198 /* close vo */
1199 if (g_display_on) {
1200 g_drv_ops.hifb_drv_close_display();
1201 }
1202
1203 for (i = 0; i < g_drv_ops.layer_count; i++) {
1204 hifb_overlay_cleanup(i, HI_TRUE);
1205 }
1206
1207 #ifdef CONFIG_HI_PROC_SHOW_SUPPORT
1208 hifb_proc_remove_all_module();
1209 #endif
1210 osal_printk("ERROR: Load hifb.ko FAILED!\n");
1211 return;
1212 }
1213
1214 /*
1215 * Function : hifb_init
1216 * Description : initialize framebuffer. the function is called when
1217 loading the moudel
1218 * Return : 0, if success; otherwise, return error code
1219 */
1220 #ifdef __HuaweiLite__
hifb_init(hi_void * args)1221 hi_s32 hifb_init(hi_void *args)
1222 {
1223 if (hifb_get_module_para(args) != HI_SUCCESS) {
1224 return HI_FAILURE;
1225 }
1226 #ifdef CONFIG_HI_PROC_SHOW_SUPPORT
1227 hifb_proc_init();
1228 #endif
1229 /* get the chip operation method */
1230 hifb_drv_get_ops(&g_drv_ops);
1231
1232 /* parse the \arg video && g_softcursor && g_display string */
1233 if (hifb_parse_cfg() < 0) {
1234 hifb_error("Usage: insmod hifb.ko video=\"hifb: vrami_size:xxx, vramj_size: xxx, ...\"\n");
1235 hifb_error("i, j means layer id, xxx means layer size in kbytes!\n");
1236 hifb_error("example: insmod hifb.ko video=\"hifb: vram0_size: 810, vram1_size: 810\"\n\n");
1237 return HI_FAILURE;
1238 }
1239
1240 if (hifb_debug_info_init() != HI_SUCCESS) {
1241 return HI_FAILURE;
1242 }
1243
1244 /* open vo and initial layer */
1245 if (hifb_init_process() != HI_SUCCESS) {
1246 goto err1;
1247 }
1248 if (hifb_request_irq() != HI_SUCCESS) {
1249 goto err2;
1250 }
1251
1252 /* show version */
1253 hifb_version();
1254 osal_printk("Load hifb.ko OK!\n");
1255 return 0;
1256
1257 err2:
1258 hifb_init_do_err2();
1259 err1:
1260 hifb_init_do_err1();
1261 return HI_FAILURE;
1262 }
1263 #else
hifb_init(hi_void)1264 hi_s32 hifb_init(hi_void)
1265 {
1266 /* make dir 'hifb' under proc/graphics */
1267 #ifdef CONFIG_HI_PROC_SHOW_SUPPORT
1268 hifb_proc_init();
1269 #endif
1270
1271 /* get the chip operation method */
1272 hifb_drv_get_ops(&g_drv_ops);
1273 /* parse the \arg video && g_softcursor && g_display string */
1274 if (hifb_parse_cfg() < 0) {
1275 hifb_error("Usage:insmod hifb.ko video=\"hifb:vrami_size:xxx,vramj_size:xxx,...\"\n");
1276 hifb_error("i,j means layer id, xxx means layer size in kbytes!\n");
1277 hifb_error("example:insmod hifb.ko video=\"hifb:vram0_size:1620,vram1_size:1620\"\n\n");
1278 g_hifb_register = HI_FALSE;
1279 osal_printk("ERROR: Load hifb.ko FAILED!\n");
1280 return HI_FAILURE;
1281 }
1282
1283 /* initialize fb file according the config */
1284 if (hifb_debug_info_init() != HI_SUCCESS) {
1285 g_hifb_register = HI_FALSE;
1286 return HI_FAILURE;
1287 }
1288
1289 /* open vo and initial layer */
1290 if (hifb_init_process() != HI_SUCCESS) {
1291 goto err1;
1292 }
1293
1294 /*
1295 * Interrupt registration interrupt is placed at the end.
1296 * Since the interrupt will be opened in the boot screen,
1297 * it will respond when loading ko, and the context is used in the interrupt
1298 */
1299 if (hifb_request_irq() != HI_SUCCESS) {
1300 goto err2;
1301 }
1302 /* show version */
1303 hifb_version();
1304
1305 (void)hifb_vsync_init();
1306 (void)hifb_pts_init();
1307
1308 #ifdef CONFIG_DRM_HISI_HISMART
1309 drm_hal_gfx_dev_register();
1310 #endif
1311
1312 osal_printk("load hifb.ko OK!\n");
1313
1314 return 0;
1315
1316 err2:
1317 hifb_init_do_err2();
1318 err1:
1319 hifb_init_do_err1();
1320 g_hifb_register = HI_FALSE;
1321 return HI_FAILURE;
1322 }
1323 #endif
1324
hifb_cleanup(hi_void)1325 hi_void hifb_cleanup(hi_void)
1326 {
1327 hi_u32 i;
1328
1329 #ifdef CONFIG_DRM_HISI_HISMART
1330 drm_hal_gfx_dev_unregister();
1331 #endif
1332
1333 #ifdef CONFIG_HIFB_SOFT_IRQ_SUPPORT
1334 if (g_hifb_soft_irq_register == HI_TRUE) {
1335 osal_free_irq(g_fifb_soft_irq, hifb_soft_interrupt);
1336 g_hifb_soft_irq_register = HI_FALSE;
1337 }
1338 #endif
1339
1340 #ifdef CONFIG_HI_VO_FB_SEPARATE
1341 if (g_hifb_irq_register == HI_TRUE) {
1342 osal_free_irq(g_fifb_irq, hifb_interrupt_route);
1343 g_hifb_irq_register = HI_FALSE;
1344 }
1345 #endif
1346
1347 if (g_hifb_register) {
1348 hifb_pts_exit();
1349 hifb_vsync_exit();
1350
1351 hifb_drv_set_tde_callback(HI_NULL);
1352 hifb_drv_set_tde_rotate_callback(HI_NULL);
1353
1354 g_drv_ops.hifb_drv_deinit();
1355
1356 if (g_display_on) {
1357 g_drv_ops.hifb_drv_close_display();
1358 }
1359
1360 /* remove all entry under dir 'hifb' */
1361 #ifdef CONFIG_HI_PROC_SHOW_SUPPORT
1362 hifb_proc_remove_all_module();
1363
1364 /* remove dir 'hifb' under proc */
1365 #endif
1366 for (i = 0; i < g_drv_ops.layer_count; i++) {
1367 hifb_overlay_cleanup(i, HI_TRUE);
1368 }
1369 }
1370 osal_printk("unload hifb.ko ... ok!\n");
1371 return;
1372 }
1373
1374 #ifdef CONFIG_HI_VO_FB_SEPARATE
1375 static hifb_main_gfx_cb g_registered_cb = HI_NULL;
1376
hifb_main_reg_callback(hi_u32 layer_id,hifb_main_intf_type type,hifb_main_gfx_cb cb)1377 int hifb_main_reg_callback(hi_u32 layer_id, hifb_main_intf_type type, hifb_main_gfx_cb cb)
1378 {
1379 g_registered_cb = cb;
1380 return 0;
1381 }
1382 #endif
1383
1384 #ifdef CONFIG_HI_VO_FB_SEPARATE
hifb_interrupt_route(hi_s32 irq,hi_void * dev_id)1385 static int hifb_interrupt_route(hi_s32 irq, hi_void *dev_id)
1386 {
1387 hi_u32 int_status = 0;
1388 hifb_vo_dev vo_dev = VO_INVALID_DEV;
1389 hi_s32 i, ret;
1390 struct fb_info *info = HI_NULL;
1391 hifb_par *par = HI_NULL;
1392 hi_u32 vtthd_flag = 0x0;
1393 hi_u32 frm_start_flag = 0x0;
1394
1395 hi_unused(irq);
1396 hi_unused(dev_id);
1397 g_drv_ops.hifb_drv_graphics_get_int(&int_status);
1398 /*
1399 * HIFB_DRV_GraphicsClearINTStatus will clear all reported interrupts.
1400 * If devices' interrupt are reported at the same time,
1401 * Will cause only one device to be processed, the interruption of other devices is ignored,
1402 * so it cannot be cleared here.
1403 * All reported interrupts can only be cleared one by one in HIFB_DRV_GraphicsGetINTDev
1404 */
1405 hifb_trace(HI_DBG_DEBUG, "intstatus : %u\n", int_status);
1406
1407 ret = g_drv_ops.hifb_drv_graphics_get_int_dev(int_status, &vo_dev);
1408 if (ret != HI_SUCCESS) {
1409 return OSAL_IRQ_HANDLED;
1410 }
1411
1412 /* Graphic layer interrupt g_display processing */
1413 if ((hi_s32)vo_dev == VO_INVALID_DEV) {
1414 hifb_error("unknown dev:%d \n", vo_dev);
1415 return OSAL_IRQ_HANDLED;
1416 }
1417
1418 if (vo_dev == VO_DEV_DHD0) {
1419 vtthd_flag = HIFB_INTMSK_HD0_VTTHD2;
1420 frm_start_flag = HIFB_INTMSK_HD0_VTTHD3;
1421 } else if (vo_dev == VO_DEV_DHD1) {
1422 vtthd_flag = HIFB_INTMSK_HD1_VTTHD2;
1423 frm_start_flag = HIFB_INTMSK_HD1_VTTHD3;
1424 }
1425
1426 /* Handling all graphics layers on this device */
1427 for (i = 0; i < VO_MAX_GRAPHICS_LAYER_NUM; ++i) {
1428 info = g_layer[i].info;
1429 par = (hifb_par *)(info->par);
1430
1431 // vo interrupt, inform drm module
1432 if (int_status & vtthd_flag) {
1433 if (g_registered_cb != HI_NULL) {
1434 g_registered_cb(i, HIFB_MAIN_INTF_VO, HI_NULL);
1435 return OSAL_IRQ_HANDLED;
1436 }
1437 }
1438
1439 /* If the layer is not open, do not act or if the layer is bound to the device */
1440 if ((par->layer_open == HI_FALSE) || (vo_dev != graphic_drv_get_bind_dev(i))) {
1441 continue;
1442 }
1443
1444 /*
1445 * 1. The frame start interrupt and the vertical timing interrupt appear
1446 * in the interrupt status register at the same time.
1447 * Frame start interrupts and vertical timing interrupts
1448 * are processed one by one and cannot be processed at the same time because
1449 * interrupts are cleared one by one, and the interrupt processing order
1450 * and the clearing order should be also consistent.
1451 * The first clear must be processed first, then the post-cleared
1452 * 2, as long as there is a vertical timing interrupt, it is necessary to drive the g_display
1453 */
1454 if (int_status & vtthd_flag) { /* VO vertical timing interrupt */
1455 #ifndef CONFIG_HIFB_SOFT_IRQ_SUPPORT
1456 par->refresh_info.do_refresh_job = HI_FALSE;
1457 hifb_vo_callback(&i, HI_NULL);
1458 #endif
1459 } else if (int_status & frm_start_flag) { /* VO frame start interrupt */
1460 par->refresh_info.do_refresh_job = HI_TRUE;
1461 wake_up_interruptible(&(par->do_refresh_job));
1462 }
1463 }
1464 return OSAL_IRQ_HANDLED;
1465 }
1466 #endif
1467
1468 #ifdef CONFIG_HIFB_SOFT_IRQ_SUPPORT
hifb_soft_interrupt(hi_s32 irq,hi_void * dev_id)1469 static hi_s32 hifb_soft_interrupt(hi_s32 irq, hi_void *dev_id)
1470 {
1471 hi_s32 i;
1472 struct fb_info *info = HI_NULL;
1473 hifb_par *par = HI_NULL;
1474 hi_unused(irq);
1475 hi_unused(dev_id);
1476 for (i = 0; i < VO_MAX_GRAPHICS_LAYER_NUM; ++i) {
1477 hifb_hal_soft_int_en(HI_FALSE);
1478 info = g_layer[i].info;
1479 par = (hifb_par *)(info->par);
1480 if (g_registered_cb != HI_NULL) {
1481 g_registered_cb(i, HIFB_MAIN_INTF_VO, HI_NULL);
1482 return OSAL_IRQ_HANDLED;
1483 }
1484 par->refresh_info.do_refresh_job = HI_FALSE;
1485 if (hifb_vo_callback(&i, HI_NULL) != HI_SUCCESS) {
1486 hifb_error("hifb_vo_callback HI_FAILURE \r\n");
1487 }
1488 }
1489 return OSAL_IRQ_HANDLED;
1490 }
1491 #endif
1492
hifb_open_check_param(struct fb_info * info)1493 static hi_s32 hifb_open_check_param(struct fb_info *info)
1494 {
1495 hifb_par *par = HI_NULL;
1496 if ((info == HI_NULL) || (info->par == HI_NULL)) {
1497 return HI_FAILURE;
1498 }
1499
1500 par = (hifb_par *)info->par;
1501 if (par->layer_id >= HIFB_MAX_LAYER_NUM) {
1502 hifb_error("layer %d is not supported!\n", par->layer_id);
1503 return HI_FAILURE;
1504 }
1505
1506 /* assure layer is legal */
1507 if (!g_drv_ops.capability[par->layer_id].bLayerSupported) {
1508 hifb_error("layer %d is not supported!\n", par->layer_id);
1509 return HI_FAILURE;
1510 }
1511
1512 return HI_SUCCESS;
1513 }
1514
hifb_open_is_cursor_layer(struct fb_info * info)1515 static hi_bool hifb_open_is_cursor_layer(struct fb_info *info)
1516 {
1517 hifb_par *par = HI_NULL;
1518 hi_u32 layer_id;
1519 HIFB_ALPHA_S alpha = {
1520 HI_TRUE, HI_FALSE, HIFB_ALPHA_TRANSPARENT,
1521 HIFB_ALPHA_OPAQUE, HIFB_ALPHA_OPAQUE, 0
1522 };
1523 if ((info == HI_NULL) || (info->par == HI_NULL)) {
1524 hifb_error("%s:%d: Unexpected null pointer\n", __FUNCTION__, __LINE__);
1525 return HI_FALSE;
1526 }
1527 par = (hifb_par *)info->par;
1528 layer_id = par->layer_id;
1529
1530 if (is_cursor_layer(par->layer_id) && is_soft_cursor()) {
1531 if (!atomic_read(&par->ref_count)) {
1532 (hi_void)memset_s(par, sizeof(hifb_par), 0, sizeof(hifb_par));
1533
1534 par->layer_id = layer_id;
1535 atomic_set(&par->ref_count, 0);
1536
1537 /* Configuring alpha properties */
1538 hifb_set_alpha(par, &alpha);
1539 }
1540 atomic_inc(&par->ref_count);
1541
1542 return HI_TRUE;
1543 }
1544 return HI_FALSE;
1545 }
1546
1547 #ifdef __HuaweiLite__
hifb_open_default_param(struct fb_info * info,HIFB_COLOR_FMT_E * color_format)1548 static hi_s32 hifb_open_default_param(struct fb_info *info, HIFB_COLOR_FMT_E *color_format)
1549 {
1550 hifb_par *par = HI_NULL;
1551 hi_u32 layer_id;
1552 par = (hifb_par *)info->par;
1553 layer_id = par->layer_id;
1554
1555 if (is_hd_layer(layer_id)) {
1556 hifb_screeninfo_init(info, &g_default_info[HIFB_LAYER_TYPE_HD]);
1557 } else if (is_sd_layer(layer_id)) {
1558 hifb_screeninfo_init(info, &g_default_info[HIFB_LAYER_TYPE_SD]);
1559 } else if (is_ad_layer(layer_id)) {
1560 hifb_screeninfo_init(info, &g_default_info[HIFB_LAYER_TYPE_AD]);
1561 } else if (is_cursor_layer(layer_id)) {
1562 hifb_screeninfo_init(info, &g_default_info[HIFB_LAYER_TYPE_CURSOR]);
1563 } else {
1564 hifb_error("error layer id:%u\n", par->layer_id);
1565 }
1566 *color_format = info->vinfo.fmt;
1567 if (*color_format == HIFB_FMT_BUTT) {
1568 hifb_error("Invalid default color format!\n");
1569 return HI_FAILURE;
1570 }
1571
1572 info->oinfo.stride = (info->oinfo.sarea.w * (info->oinfo.bpp >> 3) + HIFB_ALIGNMENT) & /* 8 is bits 2^3 */
1573 (~HIFB_ALIGNMENT);
1574
1575 return HI_SUCCESS;
1576 }
1577 #else
hifb_open_default_param(struct fb_info * info,HIFB_COLOR_FMT_E * color_format)1578 static hi_s32 hifb_open_default_param(struct fb_info *info, HIFB_COLOR_FMT_E *color_format)
1579 {
1580 hifb_par *par = HI_NULL;
1581 hi_u32 layer_id;
1582 struct fb_var_screeninfo *var = HI_NULL;
1583 struct fb_fix_screeninfo *fix = HI_NULL;
1584
1585 var = &info->var;
1586 fix = &info->fix;
1587
1588 par = (hifb_par *)info->par;
1589 layer_id = par->layer_id;
1590
1591 if (is_hd_layer(layer_id)) {
1592 *var = g_default_var[HIFB_LAYER_TYPE_HD];
1593 } else if (is_sd_layer(layer_id)) {
1594 *var = g_default_var[HIFB_LAYER_TYPE_SD];
1595 } else if (is_ad_layer(layer_id)) {
1596 *var = g_default_var[HIFB_LAYER_TYPE_AD];
1597 } else if (is_cursor_layer(layer_id)) {
1598 *var = g_default_var[HIFB_LAYER_TYPE_CURSOR];
1599 } else {
1600 hifb_error("error layer id:%u\n", par->layer_id);
1601 }
1602 /* transform colorfmt form bitfiled to hifb fmt, and record it */
1603 *color_format = hifb_getfmtbyargb(&var->red, &var->green, &var->blue,
1604 &var->transp, var->bits_per_pixel);
1605 if (*color_format == HIFB_FMT_BUTT) {
1606 hifb_error("Invalid default color format!\n");
1607 return HI_FAILURE;
1608 }
1609 /* By default it is width*4 (32 bits per pixel / 8(2^3)) */
1610 fix->line_length = (var->xres_virtual * (var->bits_per_pixel >> 3) + HIFB_ALIGNMENT) & /* 8 is bits 2^3 */
1611 (~HIFB_ALIGNMENT);
1612 return HI_SUCCESS;
1613 }
1614 #endif
1615
hifb_open_init_display(struct fb_info * info,hi_u32 layer_id,hifb_osd_data * osd_data)1616 static hi_void hifb_open_init_display(struct fb_info *info, hi_u32 layer_id, hifb_osd_data *osd_data)
1617 {
1618 hifb_par *par = HI_NULL;
1619 hifb_display_info *display_info = HI_NULL;
1620 par = (hifb_par *)info->par;
1621
1622 (hi_void)memset_s(par, sizeof(hifb_par), 0, sizeof(hifb_par));
1623 par->layer_id = layer_id;
1624 atomic_set(&par->ref_count, 0);
1625
1626 display_info = &par->display_info;
1627 display_info->display_width = hifb_get_xres(info);
1628 display_info->display_height = hifb_get_yres(info);
1629 display_info->screen_width = hifb_get_xres(info);
1630 display_info->screen_height = hifb_get_yres(info);
1631 display_info->vir_x_res = hifb_get_xres_virtual(info);
1632 display_info->vir_y_res = hifb_get_yres_virtual(info);
1633 display_info->x_res = hifb_get_xres(info);
1634 display_info->y_res = hifb_get_yres(info);
1635 display_info->max_screen_width = osd_data->screen_width;
1636 display_info->max_screen_height = osd_data->screen_height;
1637 display_info->mirror_mode = HIFB_MIRROR_NONE;
1638 display_info->rotate_mode = HIFB_ROTATE_NONE;
1639 par->rotate_vb = 0;
1640
1641 init_waitqueue_head(&(par->vbl_event));
1642 init_waitqueue_head(&(par->do_refresh_job));
1643
1644 return;
1645 }
1646
hifb_open_init_config(struct fb_info * info,hi_u32 layer_id,hifb_osd_data * osd_data,HIFB_COLOR_FMT_E color_format,hifb_cursor_info * cursor_info)1647 static hi_void hifb_open_init_config(struct fb_info *info, hi_u32 layer_id, hifb_osd_data *osd_data,
1648 HIFB_COLOR_FMT_E color_format, hifb_cursor_info *cursor_info)
1649 {
1650 hifb_par *par = HI_NULL;
1651 HIFB_ALPHA_S alpha = {
1652 HI_TRUE, HI_FALSE, HIFB_ALPHA_TRANSPARENT,
1653 HIFB_ALPHA_OPAQUE, HIFB_ALPHA_OPAQUE, 0
1654 };
1655 hifb_display_info *display_info = HI_NULL;
1656 unsigned long lock_flag;
1657 par = (hifb_par *)info->par;
1658 display_info = &par->display_info;
1659
1660 hifb_spin_lock_irqsave(&par->lock, lock_flag);
1661 hifb_set_bufmode(par->layer_id, HIFB_LAYER_BUF_BUTT);
1662 hifb_set_alpha(par, &alpha);
1663 hifb_set_dispbufinfo(layer_id);
1664 hifb_set_fmt(par, color_format);
1665 /* Anti-flicker when interlaced */
1666 display_info->need_antiflicker = (osd_data->scan_mode == HIFB_SCANMODE_I) ? (HI_TRUE) : (HI_FALSE);
1667 hifb_set_antiflickerlevel(par->layer_id, HIFB_LAYER_ANTIFLICKER_AUTO);
1668
1669 #ifdef CURSOR
1670 if (is_soft_cursor() && !is_cursor_layer(layer_id)) {
1671 cursor_info->cursor.stCursor.u64PhyAddr = hifb_get_smem_start(info) + hifb_get_smem_len(info);
1672 cursor_info->cursor.stCursor.u32Pitch = HIFB_SOFTCURSOR_STRIDE;
1673 cursor_info->cursor.stCursor.enFmt = color_format;
1674 hifb_set_cursorinfo(par, cursor_info);
1675 }
1676 #endif
1677 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
1678 g_drv_ops.hifb_drv_layer_default_setting(layer_id);
1679 g_drv_ops.hifb_drv_set_layer_alpha(layer_id, par->alpha);
1680 g_drv_ops.hifb_drv_set_layer_data_fmt(layer_id, par->color_format);
1681 }
1682
hifb_open_init_compress(struct fb_info * info)1683 static hi_void hifb_open_init_compress(struct fb_info *info)
1684 {
1685 hifb_par *par = HI_NULL;
1686 volatile hifb_compress_info *compress_info = HI_NULL;
1687 hifb_refresh_info *refresh_info = HI_NULL;
1688 hifb_display_info *display_info = HI_NULL;
1689 HIFB_RECT rect;
1690 hi_u32 layer_id;
1691 par = (hifb_par *)info->par;
1692 display_info = &par->display_info;
1693 layer_id = par->layer_id;
1694 compress_info = &par->compress_info;
1695 refresh_info = &par->refresh_info;
1696 #ifdef MDDRDETECT
1697 /* Memory detection is open by default. This function is not supported at this time. */
1698 g_drv_ops.hifb_drv_get_ddr_zone_cfg(layer_id, (hi_u32*)&compress_info->start_section,
1699 (hi_u32*)&compress_info->zone_nums);
1700 #endif
1701 /* Decompression information settings */
1702 compress_info->new_start_section = compress_info->start_section;
1703 compress_info->new_zone_nums = compress_info->zone_nums;
1704 compress_info->clear_zone = HI_FALSE;
1705
1706 g_drv_ops.hifb_drv_enable_dcmp(layer_id, HI_FALSE); /* Clear decompression status */
1707
1708 rect.x = 0;
1709 rect.y = 0;
1710 rect.w = hifb_get_xres(info);
1711 rect.h = hifb_get_yres(info);
1712
1713 g_drv_ops.hifb_drv_set_layer_rect(par->layer_id, &rect, &rect);
1714 g_drv_ops.hifb_drv_set_layer_src_image_reso(par->layer_id, &rect);
1715
1716 g_drv_ops.hifb_drv_set_layer_stride(par->layer_id, hifb_get_line_length(info));
1717
1718 g_drv_ops.hifb_drv_set_pre_mul(par->layer_id, display_info->premul);
1719 if (g_drv_ops.capability[layer_id].bKeyRgb || g_drv_ops.capability[layer_id].bKeyAlpha) {
1720 g_drv_ops.hifb_drv_set_layer_key_mask(par->layer_id, &par->ckey);
1721 }
1722
1723 /* Set to memory address */
1724 g_drv_ops.hifb_drv_set_layer_addr(par->layer_id, hifb_get_smem_start(info));
1725 refresh_info->screen_addr = hifb_get_smem_start(info);
1726 refresh_info->gb_screen_addr = 0x0; /* Clear the decompression address */
1727 compress_info->layer_addr_update = HI_TRUE;
1728
1729 return;
1730 }
1731
hifb_open_init_finish(struct fb_info * info)1732 static hi_void hifb_open_init_finish(struct fb_info *info)
1733 {
1734 hifb_par *par = HI_NULL;
1735 volatile hifb_compress_info *compress_info = HI_NULL;
1736 unsigned long lock_flag;
1737
1738 par = (hifb_par *)info->par;
1739 compress_info = &par->compress_info;
1740
1741 #ifndef CONFIG_HI_VO_FB_SEPARATE
1742 /* The soft interrupt handlers are registered in the context of the graphics layer. */
1743 g_drv_ops.hifb_drv_set_int_callback(HIFB_INTTYPE_VO, hifb_vo_callback,
1744 par->layer_id, &par->layer_id);
1745 #endif
1746 g_drv_ops.hifb_drv_set_int_callback(HIFB_INTTYPE_WBC, hifb_wbc_callback,
1747 par->layer_id, &par->layer_id);
1748
1749 hifb_spin_lock_irqsave(&par->lock, lock_flag);
1750
1751 compress_info->compress_open = HI_FALSE;
1752
1753 par->modifying = HI_TRUE;
1754 hifb_set_show(par, HI_TRUE);
1755 par->param_modify_mask = HIFB_LAYER_PARAMODIFY_SHOW;
1756 par->modifying = HI_FALSE;
1757 par->layer_open = HI_TRUE;
1758
1759 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
1760
1761 /* enable layer for vo to decide config csc or not */
1762 g_drv_ops.hifb_drv_enable_layer(par->layer_id, HI_TRUE);
1763
1764 return;
1765 }
1766
hifb_open_start(struct fb_info * info)1767 static hi_s32 hifb_open_start(struct fb_info *info)
1768 {
1769 hifb_par *par = HI_NULL;
1770 hifb_cursor_info cursor_info;
1771 hifb_osd_data osd_data = {0};
1772 HIFB_COLOR_FMT_E color_format;
1773 hi_u32 layer_id;
1774
1775 par = (hifb_par *)info->par;
1776 layer_id = par->layer_id;
1777
1778 if (!atomic_read(&par->ref_count)) {
1779 #if (HICHIP == HI3516E_V200)
1780 hifb_graphics_init();
1781 #else
1782 vou_graphics_init();
1783 #endif
1784 (hi_void)memset_s(&cursor_info, sizeof(hifb_cursor_info), 0, sizeof(hifb_cursor_info));
1785 if (g_drv_ops.hifb_drv_get_osd_data(layer_id, &osd_data) != HI_SUCCESS) {
1786 hifb_error("Failed to get osd data!\n");
1787 return HI_FAILURE;
1788 }
1789
1790 /* Configure the layer's default variable parameter var_info according to the type of layer */
1791 if (g_drv_ops.hifb_open_layer(layer_id) != HI_SUCCESS) {
1792 hifb_error("Open graphic layer %u# failed!\n", layer_id);
1793 return HI_FAILURE;
1794 }
1795
1796 /* get default var and fix */
1797 if (hifb_open_default_param(info, &color_format) != HI_SUCCESS) {
1798 return HI_FAILURE;
1799 }
1800
1801 /* Initialize the display information in private data */
1802 hifb_open_init_display(info, layer_id, &osd_data);
1803
1804 /* Initialization lock */
1805 hifb_spin_lock_init(&par->lock);
1806
1807 /* hifb set bufmode\alpha\displaybufinfo\fmt */
1808 hifb_open_init_config(info, layer_id, &osd_data, color_format, &cursor_info);
1809
1810 /* hifb compress init */
1811 hifb_open_init_compress(info);
1812
1813 /* hifb set callback and init finish */
1814 hifb_open_init_finish(info);
1815 }
1816 return HI_SUCCESS;
1817 }
1818
1819 /*
1820 * Function : hifb_open
1821 * Description : open the framebuffer and using the default parameter to set the layer
1822 struct fb_info *info
1823 * Return : return 0
1824 */
1825 #ifdef __HuaweiLite__
1826 void *g_hifb_info_ptr = HI_NULL;
hifb_open(struct fb_vtable_s * vtable)1827 static hi_s32 hifb_open (struct fb_vtable_s *vtable)
1828 #else
1829 static hi_s32 hifb_open (struct fb_info *info, hi_s32 user)
1830 #endif
1831 {
1832 #ifdef __HuaweiLite__
1833 struct hifb_info *info = (struct hifb_info *)vtable;
1834 #endif
1835 hifb_par *par = HI_NULL;
1836
1837 /* check input param */
1838 if (hifb_open_check_param(info) != HI_SUCCESS) {
1839 return HI_FAILURE;
1840 }
1841 par = (hifb_par *)info->par;
1842
1843 /* check layer is cursor and is soft cursor */
1844 if (hifb_open_is_cursor_layer(info) == HI_TRUE) {
1845 return HI_SUCCESS;
1846 }
1847
1848 /* open the layer first */
1849 if (hifb_open_start(info) != HI_SUCCESS) {
1850 return HI_FAILURE;
1851 }
1852
1853 /* increase reference count */
1854 atomic_inc(&par->ref_count);
1855 #ifdef __HuaweiLite__
1856 g_hifb_info_ptr = (void *)info;
1857 #else
1858 hi_unused(user);
1859 #endif
1860 return HI_SUCCESS;
1861 }
1862
1863 /*
1864 * Function : hifb_release
1865 * Description : open the framebuffer and disable the layer
1866 struct fb_info *info
1867 * Return : return 0 if succeed, otherwise return -EINVAL
1868 */
hifb_release_info(hifb_par * par)1869 hi_void hifb_release_info(hifb_par *par)
1870 {
1871 unsigned long lock_flag;
1872 if (par == HI_NULL) {
1873 return;
1874 }
1875
1876 if ((g_drv_ops.hifb_drv_enable_wbc_int == HI_NULL) || (g_drv_ops.hifb_drv_enable_layer == HI_NULL) ||
1877 (g_drv_ops.hifb_drv_updata_layer_reg == HI_NULL) || (g_drv_ops.hifb_drv_set_int_callback == HI_NULL) ||
1878 (g_drv_ops.hifb_close_layer == HI_NULL)) {
1879 hifb_error("hifb_release_info failed!\n");
1880 return;
1881 }
1882 #ifdef MDDRDETECT
1883 if (g_drv_ops.hifb_drv_clear_ddr_dectect_zone == HI_NULL) {
1884 hifb_error("hifb_release_info failed!\n");
1885 return;
1886 }
1887 #endif
1888 hifb_spin_lock_irqsave(&par->lock, lock_flag);
1889 hifb_set_show(par, HI_FALSE);
1890
1891 if ((!is_cursor_layer(par->layer_id)) || (!is_soft_cursor())) {
1892 /* disable the compress */
1893 if (par->compress_info.compress_open) {
1894 /* No memory alloc, no need to release */
1895 par->compress_info.compress_open = HI_FALSE;
1896 }
1897
1898 /* clear wbc interrupt when close fb */
1899 g_drv_ops.hifb_drv_enable_wbc_int(par->layer_id, HI_FALSE);
1900 g_drv_ops.hifb_drv_enable_layer(par->layer_id, HI_FALSE);
1901 g_drv_ops.hifb_drv_updata_layer_reg(par->layer_id);
1902
1903 g_drv_ops.hifb_drv_set_int_callback(HIFB_INTTYPE_VO, HI_NULL, par->layer_id, HI_NULL);
1904 g_drv_ops.hifb_drv_set_int_callback(HIFB_INTTYPE_WBC, HI_NULL, par->layer_id, HI_NULL);
1905
1906 g_drv_ops.hifb_close_layer(par->layer_id);
1907 /* Clear memory detection area when closing graphics layer */
1908 #ifdef MDDRDETECT
1909 g_drv_ops.hifb_drv_clear_ddr_dectect_zone(par->compress_info.start_section, par->compress_info.zone_nums);
1910 #endif
1911 par->compress_info.start_section = 0;
1912 par->compress_info.zone_nums = 0;
1913 par->compress_info.new_start_section = 0;
1914 par->compress_info.new_zone_nums = 0;
1915 par->compress_info.clear_zone = HI_TRUE;
1916
1917 /* MMzFree in hifb_freeccanbuf has a sleep function call, you need to release the spin lock first. */
1918 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
1919 hifb_freeccanbuf(par);
1920 hifb_spin_lock_irqsave(&par->lock, lock_flag);
1921 }
1922 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
1923 }
1924
1925 #ifdef __HuaweiLite__
hifb_release(struct fb_vtable_s * vtable)1926 static hi_s32 hifb_release(struct fb_vtable_s *vtable)
1927 #else
1928 static hi_s32 hifb_release(struct fb_info *info, hi_s32 user)
1929 #endif
1930 {
1931 #ifdef __HuaweiLite__
1932 struct hifb_info *info = (struct hifb_info *)vtable;
1933 #endif
1934 hifb_par *par = HI_NULL;
1935 hi_u32 mem_len;
1936 hi_s8 *screen_base = HI_NULL;
1937 #ifndef __HuaweiLite__
1938 hi_unused(user);
1939 #endif
1940 if (info == HI_NULL) {
1941 return HI_FAILURE;
1942 }
1943 if (info->par == HI_NULL) {
1944 return HI_FAILURE;
1945 }
1946 par = (hifb_par *)info->par;
1947
1948 if (atomic_dec_and_test(&par->ref_count)) {
1949 hifb_release_info(par);
1950
1951 screen_base = hifb_get_screen_base(info);
1952 mem_len = hifb_get_smem_len(info);
1953 if ((screen_base != HI_NULL) && (mem_len != 0)) {
1954 (hi_void)memset_s(screen_base, mem_len, 0, mem_len);
1955 }
1956 #ifdef CONFIG_HI_VO_FB_SEPARATE
1957 vou_graphics_deinit();
1958 par->layer_open = HI_FALSE;
1959 #endif
1960 }
1961
1962 return 0;
1963 }
1964
1965 /* Soft mouse needs to filter out the hardware mouse interface */
hifb_cmdfilter(hi_u32 cmd)1966 hi_bool hifb_cmdfilter(hi_u32 cmd)
1967 {
1968 hi_bool filter = HI_FALSE;
1969 switch (cmd) {
1970 case FBIO_REFRESH:
1971 case FBIOGET_CANVAS_BUFFER:
1972 case FBIOPUT_LAYER_INFO:
1973 case FBIOGET_LAYER_INFO:
1974 case FBIOGET_ALPHA_HIFB:
1975 case FBIOPUT_ALPHA_HIFB:
1976 case FBIOGET_DEFLICKER_HIFB:
1977 case FBIOPUT_DEFLICKER_HIFB:
1978 case FBIOGET_COLORKEY_HIFB:
1979 case FBIOPUT_COLORKEY_HIFB:
1980 case FBIOPUT_SCREENSIZE:
1981 case FBIOGET_SCREENSIZE:
1982 case FBIOGET_SCREEN_ORIGIN_HIFB:
1983 case FBIOPUT_SCREEN_ORIGIN_HIFB:
1984 case FBIOGET_VBLANK_HIFB:
1985 case FBIOPUT_SHOW_HIFB:
1986 case FBIOGET_SHOW_HIFB:
1987 case FBIO_WAITFOR_FREFRESH_DONE:
1988 case FBIOPUT_COMPRESSION_HIFB:
1989 case FBIOGET_COMPRESSION_HIFB:
1990 case FBIOFLIP_SURFACE:
1991
1992 #ifdef MDDRDETECT
1993 case FBIOPUT_MDDRDETECT_HIFB:
1994 case FBIOGET_MDDRDETECT_HIFB:
1995 #endif
1996 {
1997 filter = HI_TRUE; /* These interfaces need to be filtered */
1998 break;
1999 }
2000 default:
2001 break;
2002 }
2003
2004 return filter;
2005 }
2006
2007 #ifdef CURSOR
hifb_check_cusor_phyaddr(HIFB_SURFACE_S * cursor)2008 static hi_s32 hifb_check_cusor_phyaddr(HIFB_SURFACE_S *cursor)
2009 {
2010 HIFB_SURFACE_S *cursor_sur = cursor;
2011 hi_u64 len;
2012 if (cursor == HI_NULL) {
2013 return HI_FAILURE;
2014 }
2015
2016 len = cursor_sur->u32Pitch * cursor_sur->u32Height;
2017 return cmpi_check_mmz_phy_addr(cursor_sur->u64PhyAddr, len);
2018 }
2019 #endif
2020
hifb_ioctl_check_param(struct fb_info * info,hi_u32 cmd,hi_void * argp)2021 static hi_s32 hifb_ioctl_check_param(struct fb_info *info, hi_u32 cmd, hi_void *argp)
2022 {
2023 hifb_par *par = HI_NULL;
2024 hi_u32 layer_id;
2025
2026 if ((info == HI_NULL) || (info->par == HI_NULL)) {
2027 return HI_FAILURE;
2028 }
2029 par = (hifb_par *)info->par;
2030 layer_id = par->layer_id;
2031 if (layer_id >= HIFB_MAX_LAYER_NUM) {
2032 return HI_FAILURE;
2033 }
2034
2035 if ((argp == HI_NULL) && (cmd != FBIOGET_VBLANK_HIFB) &&
2036 (cmd != FBIO_WAITFOR_FREFRESH_DONE) && (cmd != FBIO_CREATE_LAYER) && (cmd != FBIO_DESTROY_LAYER)
2037 #ifdef CONFIG_DRIVERS_HDF_DISP
2038 && (cmd != FBIO_PANEL_SET_POWERSTATUS) && (cmd != FBIO_PANEL_SET_BACKLIGHT)
2039 #endif
2040 ) {
2041 hifb_error("HI_NULL arg!\n");
2042 return HI_FAILURE;
2043 }
2044
2045 if ((!g_drv_ops.capability[layer_id].bLayerSupported) &&
2046 (!is_cursor_layer(layer_id) || (!is_soft_cursor()))) {
2047 hifb_error("not support layer %u!\n", layer_id);
2048 return HI_FAILURE;
2049 }
2050
2051 /* Turn on the soft mouse function, and it is the mouse layer, you need to filter the hard interface. */
2052 if ((is_soft_cursor()) && (is_cursor_layer(layer_id))) {
2053 if (hifb_cmdfilter(cmd)) {
2054 hifb_error("soft cursor layer %u not support this function!\n", layer_id);
2055 return HI_FAILURE;
2056 }
2057 }
2058 return HI_SUCCESS;
2059 }
2060
hifb_get_colorkey(struct fb_info * info,unsigned long arg)2061 static hi_s32 hifb_get_colorkey(struct fb_info *info, unsigned long arg)
2062 {
2063 HIFB_COLORKEY_S colorkey = {0};
2064 hifb_colorkeyex colorkey_ex = {0};
2065 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2066 hifb_par *par = HI_NULL;
2067 par = (hifb_par *)info->par;
2068
2069 if ((!g_drv_ops.capability[par->layer_id].bKeyRgb) &&
2070 (!g_drv_ops.capability[par->layer_id].bKeyAlpha)) {
2071 hifb_error("Layer %u doesn't support colorkey!\n", par->layer_id);
2072 return HI_FAILURE;
2073 }
2074
2075 hifb_get_key(par, &colorkey_ex);
2076
2077 colorkey.bKeyEnable = colorkey_ex.key_enable;
2078 colorkey.u32Key = colorkey_ex.key;
2079
2080 return osal_copy_to_user(argp, &colorkey, sizeof(HIFB_COLORKEY_S));
2081 }
2082
hifb_set_colorkey(struct fb_info * info,unsigned long arg)2083 static hi_s32 hifb_set_colorkey(struct fb_info *info, unsigned long arg)
2084 {
2085 HIFB_COLORKEY_S colorkey;
2086 hifb_colorkeyex colorkey_ex;
2087 unsigned long lock_flag;
2088 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2089 hifb_par *par = (hifb_par *)info->par;
2090 hifb_display_info *display_info = &par->display_info;
2091
2092 if (osal_copy_from_user(&colorkey, argp, sizeof(HIFB_COLORKEY_S))) {
2093 return -EFAULT;
2094 }
2095
2096 if ((colorkey.bKeyEnable != HI_TRUE) && (colorkey.bKeyEnable != HI_FALSE)) {
2097 hifb_error("bKeyEnable(%d) should be HI_TRUE or HI_FALSE!\n", colorkey.bKeyEnable);
2098 return HI_FAILURE;
2099 }
2100
2101 if ((!g_drv_ops.capability[par->layer_id].bKeyRgb) && (!g_drv_ops.capability[par->layer_id].bKeyAlpha)) {
2102 hifb_error("Layer %u doesn't support colorkey!\n", par->layer_id);
2103 return HI_FAILURE;
2104 }
2105
2106 if ((colorkey.bKeyEnable) && (display_info->premul)) {
2107 hifb_error("colorkey and premul couldn't take effect at the same time!\n");
2108 return HI_FAILURE;
2109 }
2110
2111 colorkey_ex.key = colorkey.u32Key;
2112 colorkey_ex.key_enable = colorkey.bKeyEnable;
2113 /* 8 is bits */
2114 if (hifb_get_bits_per_pixel(info) <= 8) {
2115 if (colorkey.u32Key >= (hi_u32)(2 << hifb_get_bits_per_pixel(info))) { /* 2 bit colorkey */
2116 hifb_error("The key :%u is out of range the palette: %u!\n",
2117 colorkey.u32Key, 2 << hifb_get_bits_per_pixel(info)); /* 2 bit colorkey */
2118 return HI_FAILURE;
2119 }
2120 colorkey_ex.blue_max = colorkey_ex.blue_min = info->cmap.blue[colorkey.u32Key];
2121 colorkey_ex.green_max = colorkey_ex.green_min = info->cmap.green[colorkey.u32Key];
2122 colorkey_ex.red_max = colorkey_ex.red_min = info->cmap.red[colorkey.u32Key];
2123 } else {
2124 #ifdef __HuaweiLite__
2125 g_drv_ops.hifb_drv_color_convert(info, &colorkey_ex);
2126 #else
2127 g_drv_ops.hifb_drv_color_convert(&info->var, &colorkey_ex);
2128 #endif
2129 }
2130
2131 hifb_spin_lock_irqsave(&par->lock, lock_flag);
2132 par->modifying = HI_TRUE;
2133 hifb_set_key(par, &colorkey_ex);
2134 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_COLORKEY;
2135 par->modifying = HI_FALSE;
2136 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
2137 return HI_SUCCESS;
2138 }
2139
hifb_get_layer_alpha(struct fb_info * info,unsigned long arg)2140 static hi_s32 hifb_get_layer_alpha(struct fb_info *info, unsigned long arg)
2141 {
2142 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2143 hifb_par *par = HI_NULL;
2144 HIFB_ALPHA_S alpha = {0};
2145
2146 par = (hifb_par *)info->par;
2147 hifb_get_alpha(par, &alpha);
2148 return osal_copy_to_user(argp, &alpha, sizeof(HIFB_ALPHA_S));
2149 }
2150
hifb_set_layer_alpha(struct fb_info * info,unsigned long arg)2151 static hi_s32 hifb_set_layer_alpha(struct fb_info *info, unsigned long arg)
2152 {
2153 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2154 hifb_par *par = HI_NULL;
2155 unsigned long lock_flag;
2156 HIFB_ALPHA_S alpha = {0};
2157
2158 par = (hifb_par *)info->par;
2159 if (osal_copy_from_user(&alpha, argp, sizeof(HIFB_ALPHA_S))) {
2160 return -EFAULT;
2161 }
2162
2163 if ((alpha.bAlphaEnable != HI_TRUE) && (alpha.bAlphaEnable != HI_FALSE)) {
2164 hifb_error("alpha.bAlphaEnable should be HI_TRUE or HI_FALSE!\n");
2165 return HI_FAILURE;
2166 }
2167
2168 if ((alpha.bAlphaChannel != HI_TRUE) && (alpha.bAlphaChannel != HI_FALSE)) {
2169 hifb_error("alpha.bAlphaChannel should be HI_TRUE or HI_FALSE!\n");
2170 return HI_FAILURE;
2171 }
2172
2173 hifb_spin_lock_irqsave(&par->lock, lock_flag);
2174 par->modifying = HI_TRUE;
2175 hifb_set_alpha(par, &alpha);
2176 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_ALPHA;
2177
2178 par->modifying = HI_FALSE;
2179 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
2180 return HI_SUCCESS;
2181 }
2182
hifb_get_screen_origin_pos(struct fb_info * info,unsigned long arg)2183 static hi_s32 hifb_get_screen_origin_pos(struct fb_info *info, unsigned long arg)
2184 {
2185 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2186 hifb_par *par = HI_NULL;
2187
2188 HIFB_POINT_S pos = {0};
2189
2190 par = (hifb_par *)info->par;
2191 hifb_get_layerpos(par, &pos);
2192
2193 return osal_copy_to_user(argp, &pos, sizeof(HIFB_POINT_S));
2194 }
2195
hifb_set_screen_origin_pos(struct fb_info * info,unsigned long arg)2196 static hi_s32 hifb_set_screen_origin_pos(struct fb_info *info, unsigned long arg)
2197 {
2198 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2199 hifb_par *par = HI_NULL;
2200 HIFB_POINT_S expected_pos;
2201 hi_u32 layer_id;
2202 unsigned long lock_flag;
2203 hi_bool is_interlace = HI_FALSE;
2204 par = (hifb_par *)info->par;
2205 layer_id = par->layer_id;
2206 is_interlace = hifb_is_interlace(par);
2207
2208 if (is_cursor_layer(layer_id) && is_soft_cursor()) {
2209 hifb_error("you shouldn't set soft cursor origin by this cmd, try FBIOPUT_CURSOR_POS");
2210 return HI_FAILURE;
2211 }
2212
2213 if (osal_copy_from_user(&expected_pos, argp, sizeof(HIFB_POINT_S))) {
2214 return -EFAULT;
2215 }
2216
2217 if (expected_pos.s32XPos < 0 || expected_pos.s32YPos < 0) {
2218 hifb_error("It's not supported to set start pos of layer to negative!\n");
2219 return HI_FAILURE;
2220 }
2221
2222 if (is_interlace && ((expected_pos.s32YPos % 2) != 0)) { /* 2 Judge parity */
2223 hifb_error("s32YPos should be even for interlace vodev!\n");
2224 return HI_FAILURE;
2225 }
2226
2227 hifb_spin_lock_irqsave(&par->lock, lock_flag);
2228 par->modifying = HI_TRUE;
2229
2230 /* Record the old location first */
2231 hifb_set_layerpos(par, &expected_pos);
2232 par->modifying = HI_FALSE;
2233 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
2234
2235 return HI_SUCCESS;
2236 }
2237
hifb_get_deflicker(struct fb_info * info,unsigned long arg)2238 static hi_s32 hifb_get_deflicker(struct fb_info *info, unsigned long arg)
2239 {
2240 hifb_par *par = HI_NULL;
2241 hi_unused(arg);
2242 par = (hifb_par *)info->par;
2243 if (!g_drv_ops.capability[par->layer_id].u32HDefLevel &&
2244 !g_drv_ops.capability[par->layer_id].u32VDefLevel) {
2245 hifb_error("deflicker is not supported!\n");
2246 return -EPERM;
2247 }
2248
2249 /* There is currently no chip that needs to support this feature, so comment out first. */
2250 return HI_SUCCESS;
2251 }
2252
hifb_set_deflicker(struct fb_info * info,unsigned long arg)2253 static hi_s32 hifb_set_deflicker(struct fb_info *info, unsigned long arg)
2254 {
2255 hifb_par *par = HI_NULL;
2256 hi_unused(arg);
2257 par = (hifb_par *)info->par;
2258 if (!g_drv_ops.capability[par->layer_id].u32HDefLevel &&
2259 !g_drv_ops.capability[par->layer_id].u32VDefLevel) {
2260 hifb_error("deflicker is not supported!\n");
2261 return -EPERM;
2262 }
2263 return HI_SUCCESS;
2264 }
2265
hifb_get_vblank(struct fb_info * info,unsigned long arg)2266 static hi_s32 hifb_get_vblank(struct fb_info *info, unsigned long arg)
2267 {
2268 hifb_par *par = HI_NULL;
2269 hi_unused(arg);
2270 par = (hifb_par *)info->par;
2271 if (hifb_wait_regconfig_work(par->layer_id)) {
2272 hifb_error("It is not support VBL!\n");
2273 return -EPERM;
2274 }
2275
2276 return HI_SUCCESS;
2277 }
2278
hifb_show_layer(struct fb_info * info,unsigned long arg)2279 static hi_s32 hifb_show_layer(struct fb_info *info, unsigned long arg)
2280 {
2281 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2282 hifb_par *par = HI_NULL;
2283 hi_bool is_show = HI_FALSE;
2284 unsigned long lock_flag;
2285 hi_u32 layer_id;
2286
2287 par = (hifb_par *)info->par;
2288 layer_id = par->layer_id;
2289 if (is_cursor_layer(layer_id) && is_soft_cursor()) {
2290 hifb_error("you shouldn't show sot cursor by this cmd, try FBIOPUT_CURSOR_STATE!");
2291 return HI_FAILURE;
2292 }
2293
2294 if (osal_copy_from_user(&is_show, argp, sizeof(hi_bool))) {
2295 return -EFAULT;
2296 }
2297
2298 if ((is_show != HI_TRUE) && (is_show != HI_FALSE)) {
2299 hifb_error("bShow(%d) should be HI_TRUE or HI_FALSE!\n", is_show);
2300 return HI_FAILURE;
2301 }
2302
2303 if (is_show == hifb_get_show(par)) {
2304 hifb_dbg_info("The layer is show(%d) now!\n", par->show);
2305 return HI_SUCCESS;
2306 }
2307
2308 hifb_spin_lock_irqsave(&par->lock, lock_flag);
2309 par->modifying = HI_TRUE;
2310
2311 hifb_set_show(par, is_show);
2312 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_SHOW;
2313
2314 par->modifying = HI_FALSE;
2315 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
2316
2317 return HI_SUCCESS;
2318 }
2319
hifb_get_layer_show_state(struct fb_info * info,unsigned long arg)2320 static hi_s32 hifb_get_layer_show_state(struct fb_info *info, unsigned long arg)
2321 {
2322 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2323 hifb_par *par = HI_NULL;
2324 hi_bool is_show = HI_FALSE;
2325 par = (hifb_par *)info->par;
2326 is_show = par->show;
2327 return osal_copy_to_user(argp, &is_show, sizeof(hi_bool));
2328 }
2329
hifb_get_capability(struct fb_info * info,unsigned long arg)2330 static hi_s32 hifb_get_capability(struct fb_info *info, unsigned long arg)
2331 {
2332 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2333 hifb_par *par = HI_NULL;
2334 HIFB_CAPABILITY_S capability = {0};
2335
2336 par = (hifb_par *)info->par;
2337 capability = g_drv_ops.capability[par->layer_id];
2338 return osal_copy_to_user(argp, (hi_void *)&capability, sizeof(HIFB_CAPABILITY_S));
2339 }
2340
hifb_set_sursor_info(struct fb_info * info,unsigned long arg)2341 static hi_s32 hifb_set_sursor_info(struct fb_info *info, unsigned long arg)
2342 {
2343 #ifdef CURSOR
2344 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2345 hifb_par *par = HI_NULL;
2346 HIFB_CURSOR_S cursor_info;
2347
2348 par = (hifb_par *)info->par;
2349 (hi_void)memset_s(&cursor_info, sizeof(HIFB_CURSOR_S), 0, sizeof(HIFB_CURSOR_S));
2350 if (!is_soft_cursor()) {
2351 hifb_error("you shouldn't detach hard cursor by this cmd!\n");
2352 return HI_FAILURE;
2353 }
2354
2355 if (!is_cursor_layer(par->layer_id)) {
2356 hifb_error("layer %d is not cursor layer!\n", par->layer_id);
2357 return HI_FAILURE;
2358 }
2359
2360 if (osal_copy_from_user(&cursor_info, argp, sizeof(HIFB_CURSOR_S))) {
2361 return -EFAULT;
2362 }
2363
2364 if (hifb_check_cusor_phyaddr(&cursor_info.stCursor) == HI_FAILURE) {
2365 hifb_error("cursor mmz phy addr 0x%llx invalid.\n", cursor_info.stCursor.u64PhyAddr);
2366 return HI_FAILURE;
2367 }
2368
2369 if (hifb_cursor_putinfo(par, &cursor_info) != HI_SUCCESS) {
2370 return HI_FAILURE;
2371 }
2372
2373 return HI_SUCCESS;
2374 #else
2375 hifb_error("the soft cursor operation is unsupported!\n");
2376 return HI_FAILURE;
2377 #endif
2378 }
2379
hifb_get_sursor_info(struct fb_info * info,unsigned long arg)2380 static hi_s32 hifb_get_sursor_info(struct fb_info *info, unsigned long arg)
2381 {
2382 #ifdef CURSOR
2383 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2384 hifb_par *par = HI_NULL;
2385 hifb_cursor_info cursor_info = {0};
2386 HIFB_CURSOR_S cursor = {0};
2387
2388 par = (hifb_par *)info->par;
2389 if (!is_soft_cursor()) {
2390 hifb_error("you shouldn't detach hard cursor by this cmd!\n");
2391 return HI_FAILURE;
2392 }
2393
2394 if (!is_cursor_layer(par->layer_id)) {
2395 hifb_error("layer %u is not cursor layer!\n", par->layer_id);
2396 return HI_FAILURE;
2397 }
2398 hifb_get_cursorinfo(par, &cursor_info);
2399 cursor = cursor_info.cursor;
2400 return osal_copy_to_user(argp, &cursor, sizeof(HIFB_CURSOR_S));
2401 #else
2402
2403 hifb_error("the soft cursor operation is unsupported!\n");
2404 return HI_FAILURE;
2405 #endif
2406 }
2407
hifb_set_cursor_state(struct fb_info * info,unsigned long arg)2408 static hi_s32 hifb_set_cursor_state(struct fb_info *info, unsigned long arg)
2409 {
2410 #ifdef CURSOR
2411 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2412 hifb_par *par = HI_NULL;
2413 hi_bool is_show = HI_FALSE;
2414
2415 par = (hifb_par *)info->par;
2416 if (!is_soft_cursor()) {
2417 hifb_error("you shouldn't detach hard cursor by this cmd!\n");
2418 return HI_FAILURE;
2419 }
2420
2421 if (!is_cursor_layer(par->layer_id)) {
2422 hifb_error("layer %u is not cursor layer!\n", par->layer_id);
2423 return HI_FAILURE;
2424 }
2425 if (osal_copy_from_user(&is_show, argp, sizeof(hi_bool))) {
2426 return -EFAULT;
2427 }
2428
2429 if (hifb_cursor_changestate(par, is_show) != HI_SUCCESS) {
2430 return HI_FAILURE;
2431 }
2432
2433 return HI_SUCCESS;
2434 #else
2435 hifb_error("the soft cursor operation is unsupported!\n");
2436 return HI_FAILURE;
2437 #endif
2438 }
2439
hifb_get_cursor_state(struct fb_info * info,unsigned long arg)2440 static hi_s32 hifb_get_cursor_state(struct fb_info *info, unsigned long arg)
2441 {
2442 #ifdef CURSOR
2443 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2444 hifb_par *par = HI_NULL;
2445 hi_bool is_show = HI_FALSE;
2446 par = (hifb_par *)info->par;
2447 if (!is_soft_cursor()) {
2448 hifb_error("you shouldn't detach hard cursor by this cmd!\n");
2449 return HI_FAILURE;
2450 }
2451
2452 if (!is_cursor_layer(par->layer_id)) {
2453 hifb_error("layer %u is not cursor layer!\n", par->layer_id);
2454 return HI_FAILURE;
2455 }
2456 is_show = hifb_get_show(par);
2457
2458 return osal_copy_to_user(argp, &is_show, sizeof(hi_bool));
2459 #else
2460 hifb_error("the soft cursor operation is unsupported!\n");
2461 return HI_FAILURE;
2462 #endif
2463 }
2464
hifb_set_cursor_pos(struct fb_info * info,unsigned long arg)2465 static hi_s32 hifb_set_cursor_pos(struct fb_info *info, unsigned long arg)
2466 {
2467 #ifdef CURSOR
2468 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2469 hifb_par *par = HI_NULL;
2470 HIFB_POINT_S pos = {0};
2471
2472 par = (hifb_par *)info->par;
2473 if (!is_soft_cursor()) {
2474 hifb_error("you shouldn't detach hard cursor by this cmd!\n");
2475 return HI_FAILURE;
2476 }
2477
2478 if (!is_cursor_layer(par->layer_id)) {
2479 hifb_error("layer %u is not cursor layer!\n", par->layer_id);
2480 return HI_FAILURE;
2481 }
2482 if (osal_copy_from_user(&pos, argp, sizeof(HIFB_POINT_S))) {
2483 return -EFAULT;
2484 }
2485
2486 if (hifb_cursor_changepos(par->layer_id, pos) != HI_SUCCESS) {
2487 return HI_FAILURE;
2488 }
2489
2490 return HI_SUCCESS;
2491 #else
2492 hifb_error("the soft cursor operation is unsupported!\n");
2493 return HI_FAILURE;
2494 #endif
2495 }
2496
hifb_get_cursor_pos(struct fb_info * info,unsigned long arg)2497 static hi_s32 hifb_get_cursor_pos(struct fb_info *info, unsigned long arg)
2498 {
2499 #ifdef CURSOR
2500 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2501 hifb_par *par = HI_NULL;
2502 HIFB_POINT_S pos = {0};
2503
2504 par = (hifb_par *)info->par;
2505 if (!is_soft_cursor()) {
2506 hifb_error("you shouldn't detach hard cursor by this cmd!\n");
2507 return HI_FAILURE;
2508 }
2509 if (!is_cursor_layer(par->layer_id)) {
2510 hifb_error("layer %u is not cursor layer!\n", par->layer_id);
2511 return HI_FAILURE;
2512 }
2513 hifb_get_layerpos(par, &pos);
2514
2515 return osal_copy_to_user(argp, &pos, sizeof(HIFB_POINT_S));
2516 #else
2517 hifb_error("the soft cursor operation is unsupported!\n");
2518 return HI_FAILURE;
2519 #endif
2520 }
2521
hifb_set_cursor_colorkey(struct fb_info * info,unsigned long arg)2522 static hi_s32 hifb_set_cursor_colorkey(struct fb_info *info, unsigned long arg)
2523 {
2524 #ifdef CURSOR
2525 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2526 hifb_par *par = HI_NULL;
2527 HIFB_COLORKEY_S colorkey;
2528 hifb_colorkeyex colorkey_ex;
2529 HIFB_COLOR_FMT_E color_format;
2530
2531 par = (hifb_par *)info->par;
2532 if (!is_soft_cursor()) {
2533 hifb_error("you shouldn't detach hard cursor by this cmd!\n");
2534 return HI_FAILURE;
2535 }
2536
2537 if (!is_cursor_layer(par->layer_id)) {
2538 hifb_error("layer %u is not cursor layer!\n", par->layer_id);
2539 return HI_FAILURE;
2540 }
2541 if (osal_copy_from_user(&colorkey, argp, sizeof(HIFB_COLORKEY_S))) {
2542 return -EFAULT;
2543 }
2544
2545 colorkey_ex.key = colorkey.u32Key;
2546 colorkey_ex.key_enable = colorkey.bKeyEnable;
2547 hifb_get_fmt(par, &color_format);
2548 #ifdef __HuaweiLite__
2549 g_drv_ops.hifb_drv_color_convert(info, &colorkey_ex);
2550 #else
2551 g_drv_ops.hifb_drv_color_convert(&info->var, &colorkey_ex);
2552 #endif
2553
2554 hifb_set_key(par, &colorkey_ex);
2555
2556 return HI_SUCCESS;
2557 #else
2558 hifb_error("the soft cursor operation is unsupported!\n");
2559 return HI_FAILURE;
2560 #endif
2561 }
2562
hifb_get_cursor_colorkey(struct fb_info * info,unsigned long arg)2563 static hi_s32 hifb_get_cursor_colorkey(struct fb_info *info, unsigned long arg)
2564 {
2565 #ifdef CURSOR
2566 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2567 hifb_par *par = HI_NULL;
2568 HIFB_COLORKEY_S colorkey = {0};
2569 hifb_colorkeyex colorkey_ex = {0};
2570
2571 par = (hifb_par *)info->par;
2572 if (!is_soft_cursor()) {
2573 hifb_error("you shouldn't detach hard cursor by this cmd!\n");
2574 return HI_FAILURE;
2575 }
2576
2577 if (!is_cursor_layer(par->layer_id)) {
2578 hifb_error("layer %d is not cursor layer!\n", par->layer_id);
2579 return HI_FAILURE;
2580 }
2581 hifb_get_key(par, &colorkey_ex);
2582
2583 colorkey.bKeyEnable = colorkey_ex.key_enable;
2584 colorkey.u32Key = colorkey_ex.key;
2585 return osal_copy_to_user(argp, &(colorkey), sizeof(HIFB_COLORKEY_S));
2586 #else
2587 hifb_error("the soft cursor operation is unsupported!\n");
2588 return HI_FAILURE;
2589 #endif
2590 }
2591
hifb_set_cursor_alpha(struct fb_info * info,unsigned long arg)2592 static hi_s32 hifb_set_cursor_alpha(struct fb_info *info, unsigned long arg)
2593 {
2594 #ifdef CURSOR
2595 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2596 hifb_par *par = HI_NULL;
2597 HIFB_ALPHA_S alpha = {0};
2598
2599 par = (hifb_par *)info->par;
2600 if (!is_soft_cursor()) {
2601 hifb_error("you shouldn't set hard cursor alpha by this cmd!\n");
2602 return HI_FAILURE;
2603 }
2604
2605 if (osal_copy_from_user(&alpha, argp, sizeof(HIFB_ALPHA_S))) {
2606 return -EFAULT;
2607 }
2608
2609 if ((alpha.bAlphaEnable != HI_TRUE) && (alpha.bAlphaEnable != HI_FALSE)) {
2610 hifb_error("pstAlphaInfo->bAlphaEnable(%d) should be HI_TRUE or HI_FALSE!\n", alpha.bAlphaEnable);
2611 return HI_FAILURE;
2612 }
2613
2614 if ((alpha.bAlphaChannel != HI_TRUE) && (alpha.bAlphaChannel != HI_FALSE)) {
2615 hifb_error("pstAlphaInfo->bAlphaChannel(%d) should be HI_TRUE or HI_FALSE!\n", alpha.bAlphaChannel);
2616 return HI_FAILURE;
2617 }
2618
2619 hifb_set_alpha(par, &alpha);
2620
2621 return HI_SUCCESS;
2622 #else
2623 hifb_error("the soft cursor operation is unsupported!\n");
2624 return HI_FAILURE;
2625 #endif
2626 }
2627
hifb_get_cursor_alpha(struct fb_info * info,unsigned long arg)2628 static hi_s32 hifb_get_cursor_alpha(struct fb_info *info, unsigned long arg)
2629 {
2630 #ifdef CURSOR
2631 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2632 hifb_par *par = HI_NULL;
2633 HIFB_ALPHA_S alpha = {0};
2634
2635 par = (hifb_par *)info->par;
2636 if (!is_soft_cursor()) {
2637 hifb_error("you shouldn't detach hard cursor by this cmd!\n");
2638 return HI_FAILURE;
2639 }
2640 if (!is_cursor_layer(par->layer_id)) {
2641 hifb_error("layer %d is not cursor layer!\n", par->layer_id);
2642 return HI_FAILURE;
2643 }
2644 hifb_get_alpha(par, &alpha);
2645
2646 return osal_copy_to_user(argp, &alpha, sizeof(HIFB_ALPHA_S));
2647 #else
2648 hifb_error("the soft cursor operation is unsupported!\n");
2649 return HI_FAILURE;
2650 #endif
2651 }
2652
hifb_set_attch_cursor(struct fb_info * info,unsigned long arg)2653 static hi_s32 hifb_set_attch_cursor(struct fb_info *info, unsigned long arg)
2654 {
2655 #ifdef CURSOR
2656 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2657 hifb_par *par = HI_NULL;
2658 hi_u32 layer_id = 0;
2659
2660 par = (hifb_par *)info->par;
2661 if (!is_soft_cursor()) {
2662 hifb_error("you shouldn't attach hard cursor by this cmd!\n");
2663 return HI_FAILURE;
2664 }
2665
2666 if (!is_cursor_layer(par->layer_id)) {
2667 hifb_error("layer %d is not cursor layer!\n", par->layer_id);
2668 return HI_FAILURE;
2669 }
2670 if (osal_copy_from_user(&layer_id, argp, sizeof(hi_u32))) {
2671 return -EFAULT;
2672 }
2673 if ((layer_id >= g_drv_ops.layer_count) ||
2674 (g_drv_ops.capability[layer_id].bLayerSupported == HI_FALSE)) {
2675 hifb_error("LayerId %d should be in [0, %u) or it is not supported!\n", layer_id,
2676 g_drv_ops.layer_count);
2677 return -EFAULT;
2678 }
2679
2680 if (is_cursor_layer(layer_id)) {
2681 hifb_error("Don't allow attach cursor to itself or other cursor layer!\n");
2682 return HI_FAILURE;
2683 }
2684
2685 return hifb_cursor_attach(par->layer_id, layer_id);
2686 #else
2687 hifb_error("the soft cursor operation is unsupported!\n");
2688 return HI_FAILURE;
2689 #endif
2690 }
2691
hifb_set_detach_cursor(struct fb_info * info,unsigned long arg)2692 static hi_s32 hifb_set_detach_cursor(struct fb_info *info, unsigned long arg)
2693 {
2694 #ifdef CURSOR
2695 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2696 hifb_par *par = HI_NULL;
2697 hi_u32 layer_id = 0;
2698
2699 par = (hifb_par *)info->par;
2700 if (!is_soft_cursor()) {
2701 hifb_error("you shouldn't detach hard cursor by this cmd!\n");
2702 return HI_FAILURE;
2703 }
2704
2705 if (!is_cursor_layer(par->layer_id)) {
2706 hifb_error("layer %d is not cursor layer!\n", par->layer_id);
2707 return HI_FAILURE;
2708 }
2709 if (osal_copy_from_user(&layer_id, argp, sizeof(hi_u32))) {
2710 return -EFAULT;
2711 }
2712
2713 if ((layer_id >= g_drv_ops.layer_count) ||
2714 (g_drv_ops.capability[layer_id].bLayerSupported == HI_FALSE)) {
2715 hifb_error("LayerId %d should be in [0, %u) or it is not supported!\n", layer_id,
2716 g_drv_ops.layer_count);
2717 return -EFAULT;
2718 }
2719
2720 if (is_cursor_layer(layer_id)) {
2721 hifb_error("Detach cursor to itself or other cursor layer!\n");
2722 return HI_FAILURE;
2723 }
2724 return hifb_cursor_detach(layer_id);
2725 #else
2726 hifb_error("the soft cursor operation is unsupported!\n");
2727 return HI_FAILURE;
2728 #endif
2729 }
2730
hifb_set_layer_info(struct fb_info * info,unsigned long arg)2731 static hi_s32 hifb_set_layer_info(struct fb_info *info, unsigned long arg)
2732 {
2733 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2734 hifb_par *par = HI_NULL;
2735
2736 par = (hifb_par *)info->par;
2737 return hifb_onputlayerinfo(info, par, argp);
2738 }
2739
hifb_get_layer_info(struct fb_info * info,unsigned long arg)2740 static hi_s32 hifb_get_layer_info(struct fb_info *info, unsigned long arg)
2741 {
2742 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2743 hifb_par *par = HI_NULL;
2744 HIFB_LAYER_INFO_S layer_info = {0};
2745 par = (hifb_par *)info->par;
2746 hifb_get_layerinfo(par, &layer_info);
2747 return osal_copy_to_user(argp, &layer_info, sizeof(HIFB_LAYER_INFO_S));
2748 }
2749
hifb_get_canvas_buffer(struct fb_info * info,unsigned long arg)2750 static hi_s32 hifb_get_canvas_buffer(struct fb_info *info, unsigned long arg)
2751 {
2752 hi_s32 ret;
2753 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2754 hifb_par *par = HI_NULL;
2755 HIFB_BUFFER_S buf = {0};
2756 hifb_refresh_info *refresh_info = HI_NULL;
2757
2758 par = (hifb_par *)info->par;
2759 refresh_info = &par->refresh_info;
2760 ret = memcpy_s(&(buf.stCanvas), sizeof(HIFB_SURFACE_S), &(par->canvas_sur), sizeof(HIFB_SURFACE_S));
2761 hifb_unequal_eok_return(ret);
2762 ret = memcpy_s(&(buf.UpdateRect), sizeof(HIFB_RECT), &(refresh_info->user_buffer.UpdateRect),
2763 sizeof(HIFB_RECT));
2764 hifb_unequal_eok_return(ret);
2765 if (osal_copy_to_user(argp, &(buf), sizeof(HIFB_BUFFER_S))) {
2766 return -EFAULT;
2767 }
2768 return HI_SUCCESS;
2769 }
2770
hifb_refresh_layer(struct fb_info * info,unsigned long arg)2771 static hi_s32 hifb_refresh_layer(struct fb_info *info, unsigned long arg)
2772 {
2773 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2774 hifb_par *par = (hifb_par *)info->par;
2775 return hifb_onrefresh(par, argp);
2776 }
2777
hifb_wait_refresh_finish(struct fb_info * info,unsigned long arg)2778 static hi_s32 hifb_wait_refresh_finish(struct fb_info *info, unsigned long arg)
2779 {
2780 hifb_par *par = HI_NULL;
2781 HIFB_LAYER_BUF_E buf_mode;
2782 hi_s32 ret;
2783 hi_unused(arg);
2784 par = (hifb_par *)info->par;
2785 hifb_get_bufmode(par, &buf_mode);
2786 if ((buf_mode != HIFB_LAYER_BUF_NONE) && (buf_mode != HIFB_LAYER_BUF_BUTT)) {
2787 /* 80 is timeout */
2788 ret = wait_event_interruptible_timeout(par->vbl_event, par->vblflag, osal_msecs_to_jiffies(80));
2789 if (ret < 0) {
2790 return -ERESTARTSYS;
2791 }
2792 } else {
2793 hifb_error("doesn't support FBIO_WAITFOR_FREFRESH_DONE operation when in standard mode" \
2794 "or HI_FB_LAYER_BUF_NONE!\n");
2795 return HI_FAILURE;
2796 }
2797 return HI_SUCCESS;
2798 }
2799
hifb_set_mirror_mode(struct fb_info * info,unsigned long arg)2800 static hi_s32 hifb_set_mirror_mode(struct fb_info *info, unsigned long arg)
2801 {
2802 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2803 hifb_par *par = HI_NULL;
2804 HIFB_MIRROR_MODE_E mirror_mode;
2805 par = (hifb_par *)info->par;
2806 if (osal_copy_from_user(&mirror_mode, argp, sizeof(HIFB_MIRROR_MODE_E))) {
2807 return -EFAULT;
2808 }
2809 if (hifb_set_mirrormode(par, mirror_mode) != HI_SUCCESS) {
2810 return HI_FAILURE;
2811 }
2812 return HI_SUCCESS;
2813 }
2814
hifb_get_mirror_mode(struct fb_info * info,unsigned long arg)2815 static hi_s32 hifb_get_mirror_mode(struct fb_info *info, unsigned long arg)
2816 {
2817 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2818 hifb_par *par = HI_NULL;
2819 hifb_display_info *display_info = HI_NULL;
2820 HIFB_LAYER_BUF_E buf_mode = 0;
2821 HIFB_MIRROR_MODE_E mirror_mode;
2822
2823 par = (hifb_par *)info->par;
2824 display_info = &par->display_info;
2825 hifb_get_bufmode(par, &buf_mode);
2826 mirror_mode = display_info->mirror_mode;
2827 if ((buf_mode == HIFB_LAYER_BUF_BUTT) || (buf_mode == HIFB_LAYER_BUF_NONE)) {
2828 hifb_error("doesn't support FBIOGET_MIRROR_MODE operation when in standard mode" \
2829 "or HI_FB_LAYER_BUF_NONE!\n");
2830 return HI_FAILURE;
2831 }
2832 if (osal_copy_to_user(argp, &mirror_mode, sizeof(HIFB_MIRROR_MODE_E))) {
2833 return -EFAULT;
2834 }
2835 return HI_SUCCESS;
2836 }
2837
hifb_set_rotate_mode(struct fb_info * info,unsigned long arg)2838 static hi_s32 hifb_set_rotate_mode(struct fb_info *info, unsigned long arg)
2839 {
2840 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2841 HIFB_ROTATE_MODE_E rotate_mode;
2842
2843 if (osal_copy_from_user(&rotate_mode, argp, sizeof(HIFB_ROTATE_MODE_E))) {
2844 return -EFAULT;
2845 /* Reset compression information */
2846 }
2847 if (hifb_set_rotatemode(info, rotate_mode) != HI_SUCCESS) {
2848 return HI_FAILURE;
2849 }
2850
2851 return HI_SUCCESS;
2852 }
2853
hifb_get_rotate_mode(struct fb_info * info,unsigned long arg)2854 static hi_s32 hifb_get_rotate_mode(struct fb_info *info, unsigned long arg)
2855 {
2856 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2857 hifb_par *par = HI_NULL;
2858 hifb_display_info *display_info = HI_NULL;
2859 HIFB_LAYER_BUF_E buf_mode = 0;
2860 HIFB_ROTATE_MODE_E rotate_mode;
2861
2862 par = (hifb_par *)info->par;
2863 display_info = &par->display_info;
2864 hifb_get_bufmode(par, &buf_mode);
2865
2866 if ((buf_mode == HIFB_LAYER_BUF_BUTT) || (buf_mode == HIFB_LAYER_BUF_NONE)) {
2867 hifb_error("doesn't support FBIOGET_ROTATE_MODE operation when in standard mode" \
2868 "or HIFB_LAYER_BUF_NONE!\n");
2869 return HI_FAILURE;
2870 }
2871 rotate_mode = display_info->rotate_mode;
2872 if (osal_copy_to_user(argp, &rotate_mode, sizeof(HIFB_ROTATE_MODE_E))) {
2873 return -EFAULT;
2874 }
2875 return HI_SUCCESS;
2876 }
2877
hifb_set_screen_size(struct fb_info * info,unsigned long arg)2878 static hi_s32 hifb_set_screen_size(struct fb_info *info, unsigned long arg)
2879 {
2880 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2881 hifb_par *par = HI_NULL;
2882 HIFB_SIZE_S screen_size;
2883 HIFB_SIZE_S max_screen_size = {0};
2884 HIFB_POINT_S pos = {0};
2885 unsigned long lock_flag;
2886
2887 par = (hifb_par *)info->par;
2888 if (is_cursor_layer(par->layer_id) && is_soft_cursor()) {
2889 hifb_error("you shouldn't set soft cursor screensize!");
2890 return HI_FAILURE;
2891 }
2892
2893 if (osal_copy_from_user(&screen_size, argp, sizeof(HIFB_SIZE_S))) {
2894 return -EFAULT;
2895 }
2896 if ((screen_size.u32Width == 0) || (screen_size.u32Height == 0)) {
2897 hifb_error("screen width(%u) height(%u) shouldn't be 0\n", screen_size.u32Width, screen_size.u32Height);
2898 return HI_FAILURE;
2899 }
2900 if (screen_size.u32Width % 2 || screen_size.u32Height % 2) { /* 2 for align */
2901 hifb_error("stScreenSize (%u, %u) should align to 2!\n", screen_size.u32Width, screen_size.u32Height);
2902 return HI_FAILURE;
2903 }
2904 hifb_get_maxscreensize(par, &max_screen_size.u32Width, &max_screen_size.u32Height);
2905 hifb_get_layerpos(par, &pos);
2906 if (screen_size.u32Width > max_screen_size.u32Width - pos.s32XPos) {
2907 hifb_warning("the sum of u32Width(%u) and s32XPos(%d) larger than Vodev screen width(%u),"\
2908 "width will be changed!\n",
2909 screen_size.u32Width,
2910 pos.s32XPos,
2911 max_screen_size.u32Width);
2912 screen_size.u32Width = max_screen_size.u32Width - pos.s32XPos;
2913 }
2914 if (screen_size.u32Height > max_screen_size.u32Height - pos.s32YPos) {
2915 hifb_warning("the sum of u32Height(%u) and s32YPos(%d) larger than Vodev screen height(%u),"\
2916 "width will be changed!\n",
2917 screen_size.u32Height,
2918 pos.s32YPos,
2919 max_screen_size.u32Height);
2920 screen_size.u32Height = max_screen_size.u32Height - pos.s32YPos;
2921 }
2922 hifb_spin_lock_irqsave(&par->lock, lock_flag);
2923 par->modifying = HI_TRUE;
2924 if (hifb_set_screensize(par, &screen_size.u32Width, &screen_size.u32Height) == HI_SUCCESS) {
2925 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_OUTRECT;
2926 }
2927 par->modifying = HI_FALSE;
2928 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
2929 return HI_SUCCESS;
2930 }
2931
hifb_get_screen_size(struct fb_info * info,unsigned long arg)2932 static hi_s32 hifb_get_screen_size(struct fb_info *info, unsigned long arg)
2933 {
2934 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
2935 hifb_par *par = HI_NULL;
2936 HIFB_SIZE_S screen_size = {0};
2937 par = (hifb_par *)info->par;
2938 hifb_get_screensize(par, &screen_size.u32Width, &screen_size.u32Height);
2939 return osal_copy_to_user(argp, &screen_size, sizeof(HIFB_SIZE_S));
2940 }
2941
flip_surface_check_param(struct fb_info * info,HIFB_SURFACEEX_S * surface_ex)2942 static hi_s32 flip_surface_check_param(struct fb_info *info, HIFB_SURFACEEX_S *surface_ex)
2943 {
2944 hifb_par *par = HI_NULL;
2945 hifb_display_info *display_info = HI_NULL;
2946 unsigned long addr;
2947 unsigned long smem_end;
2948 par = (hifb_par *)info->par;
2949 display_info = &par->display_info;
2950 if (surface_ex->stColorkey.bKeyEnable != HI_TRUE && surface_ex->stColorkey.bKeyEnable != HI_FALSE) {
2951 hifb_error("colorkey.enable(%d) should be HI_TRUE or HI_FALSE!\n", surface_ex->stColorkey.bKeyEnable);
2952 return HI_FAILURE;
2953 }
2954
2955 if (surface_ex->stAlpha.bAlphaEnable != HI_TRUE && surface_ex->stAlpha.bAlphaEnable != HI_FALSE) {
2956 hifb_error("alpha.alpha_en(%d) should be HI_TRUE or HI_FALSE!\n", surface_ex->stAlpha.bAlphaEnable);
2957 return HI_FAILURE;
2958 }
2959
2960 if (surface_ex->stAlpha.bAlphaChannel != HI_TRUE && surface_ex->stAlpha.bAlphaChannel != HI_FALSE) {
2961 hifb_error("alpha.alpha_chn_en (%d) should be HI_TRUE or HI_FALSE!\n",
2962 surface_ex->stAlpha.bAlphaChannel);
2963 return HI_FAILURE;
2964 }
2965
2966 if ((surface_ex->stColorkey.bKeyEnable) && (!g_drv_ops.capability[par->layer_id].bKeyRgb) &&
2967 (!g_drv_ops.capability[par->layer_id].bKeyAlpha)) {
2968 hifb_error("Layer %d doesn't support colorkey!\n", par->layer_id);
2969 return HI_FAILURE;
2970 }
2971
2972 if (surface_ex->stColorkey.bKeyEnable && display_info->premul) {
2973 hifb_error("colorkey and premul couldn't take effect at the same time!\n");
2974 return HI_FAILURE;
2975 }
2976
2977 addr = (unsigned long)surface_ex->u64PhyAddr;
2978 smem_end = hifb_get_smem_start(info) + hifb_get_smem_len(info) - hifb_get_yres(info) *
2979 hifb_get_line_length(info);
2980 if ((addr < hifb_get_smem_start(info)) || (addr > smem_end)) {
2981 hifb_error("the addr is out of range!\n");
2982 return HI_FAILURE;
2983 }
2984
2985 if (hifb_get_line_length(info) == 0) {
2986 return HI_FAILURE;
2987 }
2988
2989 if (hifb_get_bits_per_pixel(info) == 0) {
2990 return HI_FAILURE;
2991 }
2992
2993 return HI_SUCCESS;
2994 }
2995
2996 #ifdef __HuaweiLite__
flip_surface_pan_display(struct fb_info * info,HIFB_SURFACEEX_S * surface_ex,hifb_colorkeyex * colorkey_ex)2997 static hi_s32 flip_surface_pan_display(struct fb_info *info, HIFB_SURFACEEX_S *surface_ex,
2998 hifb_colorkeyex *colorkey_ex)
2999 {
3000 hi_s32 ret;
3001 unsigned long addr;
3002 unsigned int differ;
3003 unsigned int x_offset;
3004 unsigned int y_offset;
3005 hi_u32 x_value, y_value;
3006 struct fb_overlayinfo_s oinfo;
3007 addr = (unsigned long)surface_ex->u64PhyAddr;
3008 differ = addr - hifb_get_smem_start(info);
3009
3010 x_value = hifb_get_bits_per_pixel(info);
3011 y_value = hifb_get_line_length(info);
3012 if ((x_value == 0) || (y_value == 0)) {
3013 return HI_FAILURE;
3014 }
3015 /* 3 is 8 bit */
3016 x_offset = ((differ % hifb_get_line_length(info)) << 3) / x_value;
3017 y_offset = differ / y_value;
3018
3019 ret = memcpy_s(&oinfo, sizeof(oinfo), &info->oinfo, sizeof(struct fb_overlayinfo_s));
3020 hifb_unequal_eok_return(ret);
3021 oinfo.sarea.x = x_offset;
3022 oinfo.sarea.y = y_offset;
3023
3024 if (hifb_pan_display((struct fb_vtable_s *)info, &oinfo) < 0) {
3025 hifb_error("error!\n");
3026 return HI_FAILURE;
3027 }
3028
3029 info->oinfo.sarea.x = x_offset;
3030 info->oinfo.sarea.y = y_offset;
3031 colorkey_ex->key = surface_ex->stColorkey.u32Key;
3032 colorkey_ex->key_enable = surface_ex->stColorkey.bKeyEnable;
3033 if (hifb_get_bits_per_pixel(info) <= 8) { /* 8 bit */
3034 if (surface_ex->stColorkey.u32Key >= (hi_u32)(2 << hifb_get_bits_per_pixel(info))) { /* 2 bit Colorkey */
3035 hifb_error("The key :%u is out of range the palette: %u!\n",
3036 surface_ex->stColorkey.u32Key, 2 << hifb_get_bits_per_pixel(info)); /* 2 bit Colorkey */
3037 return HI_FAILURE;
3038 }
3039 colorkey_ex->blue_max = colorkey_ex->blue_min = info->cmap.blue[surface_ex->stColorkey.u32Key];
3040 colorkey_ex->green_max = colorkey_ex->green_min = info->cmap.green[surface_ex->stColorkey.u32Key];
3041 colorkey_ex->red_max = colorkey_ex->red_min = info->cmap.red[surface_ex->stColorkey.u32Key];
3042 } else {
3043 g_drv_ops.hifb_drv_color_convert(info, colorkey_ex);
3044 }
3045 return HI_SUCCESS;
3046 }
3047 #else
flip_surface_pan_display(struct fb_info * info,HIFB_SURFACEEX_S * surface_ex,hifb_colorkeyex * colorkey_ex)3048 static hi_s32 flip_surface_pan_display(struct fb_info *info, HIFB_SURFACEEX_S *surface_ex,
3049 hifb_colorkeyex *colorkey_ex)
3050 {
3051 hi_s32 ret;
3052 unsigned long addr;
3053 unsigned int differ;
3054 unsigned int x_offset;
3055 unsigned int y_offset;
3056 hi_u32 x_value, y_value;
3057 struct fb_var_screeninfo var;
3058 addr = (unsigned long)surface_ex->u64PhyAddr;
3059 differ = addr - hifb_get_smem_start(info);
3060
3061 x_value = hifb_get_bits_per_pixel(info);
3062 y_value = hifb_get_line_length(info);
3063 if ((x_value == 0) || (y_value == 0)) {
3064 return HI_FAILURE;
3065 }
3066 /* 8 bit (2^3) */
3067 x_offset = ((differ % hifb_get_line_length(info)) << 3) / x_value;
3068 y_offset = differ / y_value;
3069
3070 ret = memcpy_s(&var, sizeof(var), &info->var, sizeof(var));
3071 hifb_unequal_eok_return(ret);
3072 var.xoffset = x_offset;
3073 var.yoffset = y_offset;
3074
3075 if (fb_pan_display(info, &var) < 0) {
3076 hifb_error("pan_display error!\n");
3077 return HI_FAILURE;
3078 }
3079
3080 colorkey_ex->key = surface_ex->stColorkey.u32Key;
3081 colorkey_ex->key_enable = surface_ex->stColorkey.bKeyEnable;
3082
3083 if (hifb_get_bits_per_pixel(info) <= 8) { /* 8 bit */
3084 if (surface_ex->stColorkey.u32Key >= (hi_u32)(2 << hifb_get_bits_per_pixel(info))) { /* 2 bit Colorkey */
3085 hifb_error("The key :%u is out of range the palette: %u!\n",
3086 surface_ex->stColorkey.u32Key, 2 << hifb_get_bits_per_pixel(info)); /* 2 bit Colorkey */
3087 return HI_FAILURE;
3088 }
3089
3090 colorkey_ex->blue_max = colorkey_ex->blue_min = info->cmap.blue[surface_ex->stColorkey.u32Key];
3091 colorkey_ex->green_max = colorkey_ex->green_min = info->cmap.green[surface_ex->stColorkey.u32Key];
3092 colorkey_ex->red_max = colorkey_ex->red_min = info->cmap.red[surface_ex->stColorkey.u32Key];
3093 } else {
3094 g_drv_ops.hifb_drv_color_convert(&info->var, colorkey_ex);
3095 }
3096
3097 return HI_SUCCESS;
3098 }
3099 #endif
3100
hifb_flip_surface(struct fb_info * info,unsigned long arg)3101 static hi_s32 hifb_flip_surface(struct fb_info *info, unsigned long arg)
3102 {
3103 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
3104 hifb_par *par = HI_NULL;
3105 HIFB_SURFACEEX_S surface_ex;
3106 unsigned long lock_flag;
3107 hifb_colorkeyex colorkey_ex = {0};
3108
3109 par = (hifb_par *)info->par;
3110
3111 if (is_cursor_layer(par->layer_id) && is_soft_cursor()) {
3112 hifb_error("you shouldn't use FBIOFLIP_SURFACE for soft cursor!");
3113 return HI_FAILURE;
3114 }
3115
3116 if (osal_copy_from_user(&surface_ex, argp, sizeof(HIFB_SURFACEEX_S))) {
3117 return -EFAULT;
3118 }
3119 if (flip_surface_check_param(info, &surface_ex) != HI_SUCCESS) {
3120 return HI_FAILURE;
3121 }
3122 if (flip_surface_pan_display(info, &surface_ex, &colorkey_ex) != HI_SUCCESS) {
3123 return HI_FAILURE;
3124 }
3125 hifb_spin_lock_irqsave(&par->lock, lock_flag);
3126 par->modifying = HI_TRUE;
3127 hifb_set_alpha(par, &surface_ex.stAlpha);
3128 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_ALPHA;
3129 if (g_drv_ops.capability[par->layer_id].bKeyRgb ||
3130 g_drv_ops.capability[par->layer_id].bKeyAlpha) {
3131 hifb_set_key(par, &colorkey_ex);
3132 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_COLORKEY;
3133 }
3134 par->modifying = HI_FALSE;
3135 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
3136
3137 return HI_SUCCESS;
3138 }
3139
set_compression_process(struct fb_info * info)3140 static hi_s32 set_compression_process(struct fb_info *info)
3141 {
3142 hifb_par *par = HI_NULL;
3143 volatile hifb_compress_info *compress_info = HI_NULL;
3144 hifb_display_info *display_info = HI_NULL;
3145 HIFB_LAYER_BUF_E buf_mode;
3146 unsigned long lock_flag;
3147
3148 par = (hifb_par *)info->par;
3149 compress_info = &par->compress_info;
3150 display_info = &par->display_info;
3151
3152 if (par->color_format != HIFB_FMT_ARGB8888 && par->color_format != HIFB_FMT_ARGB1555 &&
3153 par->color_format != HIFB_FMT_ARGB4444) {
3154 hifb_error("compression only support pixel format (ARGB8888,ARGB1555,ARGB4444)\n");
3155 return HI_FAILURE;
3156 }
3157 if ((display_info->mirror_mode != HIFB_MIRROR_NONE) ||
3158 (display_info->rotate_mode != HIFB_ROTATE_NONE)) {
3159 hifb_error("Can't do compression when mirror or rotate!\n");
3160 return HI_FAILURE;
3161 }
3162 hifb_get_bufmode(par, &buf_mode);
3163
3164 /* FB uses frame decompression, can not be displayed while refreshing, so only supports double buf mode */
3165 if ((buf_mode != HIFB_LAYER_BUF_DOUBLE) && (buf_mode != HIFB_LAYER_BUF_DOUBLE_IMMEDIATE)) {
3166 hifb_error("only HI_FB_LAYER_BUF_DOUBLE or HI_FB_LAYER_BUF_DOUBLE_IMMEDIATE mode support compress!\n");
3167 return HI_FAILURE;
3168 }
3169
3170 /* Record the entire image as the area to be compressed */
3171 hifb_spin_lock_irqsave(&par->lock, lock_flag);
3172 compress_info->compress_rect.x = 0;
3173 compress_info->compress_rect.y = 0;
3174 compress_info->compress_rect.w = display_info->display_width;
3175 compress_info->compress_rect.h = display_info->display_height;
3176 compress_info->update_finished = HI_TRUE;
3177 compress_info->delay = HI_TRUE;
3178 compress_info->compress_open = HI_TRUE;
3179 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
3180
3181 return HI_SUCCESS;
3182 }
3183
set_compression_start(struct fb_info * info,hi_bool is_compress)3184 static hi_s32 set_compression_start(struct fb_info *info, hi_bool is_compress)
3185 {
3186 hifb_par *par = HI_NULL;
3187 volatile hifb_compress_info *compress_info = HI_NULL;
3188 unsigned long lock_flag;
3189 par = (hifb_par *)info->par;
3190 compress_info = &par->compress_info;
3191 if (is_compress != HI_TRUE && is_compress != HI_FALSE) {
3192 hifb_error("compress(%d) should be HI_TRUE or HI_FALSE!\n", is_compress);
3193 return HI_FAILURE;
3194 }
3195 if (is_compress != compress_info->compress_open) {
3196 if (is_compress) {
3197 if (set_compression_process(info) != HI_SUCCESS) {
3198 return HI_FAILURE;
3199 }
3200 } else {
3201 hifb_spin_lock_irqsave(&par->lock, lock_flag);
3202 compress_info->compress_open = HI_FALSE;
3203 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
3204 /* Waiting for an interrupt before proceeding to the next two operations */
3205 hifb_wait_regconfig_work(par->layer_id);
3206 hifb_free_compress_buffer(par);
3207 /* Reset compression information */
3208 compress_info->update_rect.w = 0;
3209 compress_info->update_rect.h = 0;
3210 }
3211 }
3212 return HI_SUCCESS;
3213 }
3214
hifb_set_compression_mode(struct fb_info * info,unsigned long arg)3215 static hi_s32 hifb_set_compression_mode(struct fb_info *info, unsigned long arg)
3216 {
3217 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
3218 hifb_par *par = HI_NULL;
3219 hi_bool is_compress = HI_FALSE;
3220 par = (hifb_par *)info->par;
3221 if (!g_drv_ops.capability[par->layer_id].bDcmp) {
3222 hifb_error("Layer %d doesn't support compression operation!\n", par->layer_id);
3223 return HI_FAILURE;
3224 }
3225
3226 if (osal_copy_from_user(&is_compress, argp, sizeof(hi_bool))) {
3227 return -EFAULT;
3228 }
3229
3230 /* check detect zone */
3231 if (set_compression_start(info, is_compress) != HI_SUCCESS) {
3232 return HI_FAILURE;
3233 }
3234 return HI_SUCCESS;
3235 }
3236
hifb_get_compression_mode(struct fb_info * info,unsigned long arg)3237 static hi_s32 hifb_get_compression_mode(struct fb_info *info, unsigned long arg)
3238 {
3239 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
3240 hifb_par *par = HI_NULL;
3241 volatile hifb_compress_info *compress_info = HI_NULL;
3242 hi_bool is_compress = HI_FALSE;
3243 par = (hifb_par *)info->par;
3244 compress_info = &par->compress_info;
3245 if (!g_drv_ops.capability[par->layer_id].bDcmp) {
3246 hifb_warning("Layer %d doesn't support get compression!\n", par->layer_id);
3247 } else {
3248 is_compress = compress_info->compress_open;
3249 }
3250 return osal_copy_to_user(argp, &is_compress, sizeof(hi_bool));
3251 }
3252
hifb_set_mddrdetect(struct fb_info * info,unsigned long arg)3253 static hi_s32 hifb_set_mddrdetect(struct fb_info *info, unsigned long arg)
3254 {
3255 #ifdef MDDRDETECT
3256 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
3257 hifb_par *par = HI_NULL;
3258 volatile hifb_compress_info *compress_info = HI_NULL;
3259 HIFB_DDRZONE_S ddr_zone_para = {0};
3260 unsigned long lock_flag;
3261 par = (hifb_par *)info->par;
3262 compress_info = &par->compress_info;
3263
3264 if ((g_drv_ops.hifb_drv_is_layer_support_ddr_detect == HI_NULL) ||
3265 (g_drv_ops.hifb_drv_check_ddr_dectect_zone == HI_NULL)) {
3266 hifb_error("ptr is HI_NULL\n");
3267 return HI_FAILURE;
3268 }
3269
3270 if (g_drv_ops.hifb_drv_is_layer_support_ddr_detect(par->layer_id) == HI_FALSE) {
3271 hifb_error("Layer %d doesn't support DDR detect PUT.\n", par->layer_id);
3272 return HI_FAILURE;
3273 }
3274 if (!g_drv_ops.capability[par->layer_id].bDcmp) {
3275 hifb_error("Layer %d doesn't support DDR detect!\n", par->layer_id);
3276 return HI_FAILURE;
3277 }
3278 if (osal_copy_from_user(&ddr_zone_para, argp, sizeof(HIFB_DDRZONE_S))) {
3279 return -EFAULT;
3280 }
3281 if (g_drv_ops.hifb_drv_check_ddr_dectect_zone(par->layer_id, ddr_zone_para.start_section,
3282 ddr_zone_para.zone_nums, compress_info->start_section, compress_info->zone_nums) == HI_FAILURE) {
3283 hifb_error("Layer %d DDR detect zone ERR!\n", par->layer_id);
3284
3285 return HI_FAILURE;
3286 }
3287
3288 hifb_spin_lock_irqsave(&par->lock, lock_flag);
3289
3290 /* If you repeat the settings, return directly */
3291 if (ddr_zone_para.start_section == compress_info->start_section &&
3292 ddr_zone_para.zone_nums == compress_info->zone_nums) {
3293 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
3294
3295 return HI_SUCCESS;
3296 }
3297
3298 compress_info->new_start_section = ddr_zone_para.start_section;
3299 compress_info->new_zone_nums = ddr_zone_para.zone_nums;
3300 compress_info->clear_zone = (ddr_zone_para.zone_nums == 0) ? HI_TRUE : HI_FALSE;
3301 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
3302
3303 return HI_SUCCESS;
3304 #else
3305 hi_unused(info);
3306 hi_unused(arg);
3307 hifb_error("the set mddrdetect operation is unsupported!\n");
3308 return HI_FAILURE;
3309 #endif
3310 }
3311
hifb_get_mddrdetect(struct fb_info * info,unsigned long arg)3312 static hi_s32 hifb_get_mddrdetect(struct fb_info *info, unsigned long arg)
3313 {
3314 #ifdef MDDRDETECT
3315 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
3316 hifb_par *par = HI_NULL;
3317 volatile hifb_compress_info *compress_info = HI_NULL;
3318 HIFB_DDRZONE_S ddr_zone_para = {0};
3319 unsigned long lock_flag;
3320 par = (hifb_par *)info->par;
3321 compress_info = &par->compress_info;
3322 if (g_drv_ops.hifb_drv_is_layer_support_ddr_detect(par->layer_id) == HI_FALSE) {
3323 hifb_error("Layer %d doesn't support DDR detect GET.\n", par->layer_id);
3324 return HI_FAILURE;
3325 }
3326 if (!g_drv_ops.capability[par->layer_id].bDcmp) {
3327 hifb_error("Layer %d doesn't support DDR detect!\n", par->layer_id);
3328 return HI_FAILURE;
3329 }
3330
3331 hifb_spin_lock_irqsave(&par->lock, lock_flag);
3332 ddr_zone_para.start_section = compress_info->new_start_section;
3333 ddr_zone_para.zone_nums = compress_info->new_zone_nums;
3334 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
3335
3336 return osal_copy_to_user(argp, &ddr_zone_para, sizeof(HIFB_DDRZONE_S));
3337 #else
3338 hi_unused(info);
3339 hi_unused(arg);
3340 hifb_error("the get mddrdetect operation is unsupported!\n");
3341 return HI_FAILURE;
3342 #endif
3343 }
3344
hifb_set_dynamic_range(struct fb_info * info,unsigned long arg)3345 static hi_s32 hifb_set_dynamic_range(struct fb_info *info, unsigned long arg)
3346 {
3347 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
3348 hifb_par *par = HI_NULL;
3349 HIFB_DYNAMIC_RANGE_E dynamic_range = HIFB_DYNAMIC_RANGE_BUTT;
3350 par = (hifb_par *)info->par;
3351 if (g_drv_ops.capability[par->layer_id].bGHDR == HI_FALSE) {
3352 hifb_error("Layer %d# doesn't support HDR function.\n", par->layer_id);
3353 return HI_FAILURE;
3354 }
3355 if (osal_copy_from_user(&dynamic_range, argp, sizeof(HIFB_DYNAMIC_RANGE_E))) {
3356 return -EFAULT;
3357 }
3358 if (hifb_set_dynamic_range_display(par, dynamic_range) != HI_SUCCESS) {
3359 return HI_FAILURE;
3360 }
3361 return HI_SUCCESS;
3362 }
3363
hifb_get_dynamic_range(struct fb_info * info,unsigned long arg)3364 static hi_s32 hifb_get_dynamic_range(struct fb_info *info, unsigned long arg)
3365 {
3366 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
3367 hifb_par *par = HI_NULL;
3368 HIFB_DYNAMIC_RANGE_E dynamic_range;
3369 hifb_display_info *display_info = HI_NULL;
3370 par = (hifb_par *)info->par;
3371 display_info = &par->display_info;
3372 if (g_drv_ops.capability[par->layer_id].bGHDR == HI_FALSE) {
3373 hifb_error("Layer %d# doesn't support HDR function.\n", par->layer_id);
3374 return HI_FAILURE;
3375 }
3376 dynamic_range = display_info->dynamic_range;
3377 if (osal_copy_to_user(argp, &dynamic_range, sizeof(HIFB_DYNAMIC_RANGE_E))) {
3378 return -EFAULT;
3379 }
3380 return HI_SUCCESS;
3381 }
3382
drv_hifb_create(struct fb_info * info,unsigned long arg)3383 static hi_s32 drv_hifb_create(struct fb_info *info, unsigned long arg)
3384 {
3385 hifb_par *par = HI_NULL;
3386 hi_u32 layer_id;
3387 unsigned long lock_flag;
3388 hi_unused(arg);
3389 par = (hifb_par *)info->par;
3390 if (par == HI_NULL) {
3391 hifb_error("par is NULL failed!\n");
3392 return HI_FAILURE;
3393 }
3394 layer_id = par->layer_id;
3395
3396 /* when unbind/bind */
3397 if (g_drv_ops.hifb_open_layer(layer_id) != HI_SUCCESS) {
3398 hifb_error("Open graphic layer %u# failed!\n", layer_id);
3399 return HI_FAILURE;
3400 }
3401
3402 if (g_drv_ops.hifb_drv_enable_layer(layer_id, HI_TRUE) != HI_SUCCESS) {
3403 hifb_error("enable graphic layer %u# failed!\n", layer_id);
3404 return HI_FAILURE;
3405 }
3406 hifb_spin_lock_irqsave(&par->lock, lock_flag);
3407 par->show = HI_TRUE;
3408 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
3409 return HI_SUCCESS;
3410 }
3411
3412 #ifdef CONFIG_DRIVERS_HDF_DISP
hdf_panel_set_powerstatus(struct fb_info * info,unsigned long arg)3413 static hi_s32 hdf_panel_set_powerstatus(struct fb_info *info, unsigned long arg)
3414 {
3415 int32_t ret = HI_FAILURE;
3416
3417 hifb_dbg_info("%s cmd = %d enter\n", __func__, arg);
3418 switch (arg) {
3419 case POWER_STATUS_ON:
3420 ret = DispOn(info->node);
3421 hifb_dbg_info("%s cmd = %d\n", __func__, arg);
3422 break;
3423 case POWER_STATUS_STANDBY:
3424 case POWER_STATUS_SUSPEND:
3425 case POWER_STATUS_OFF:
3426 ret = DispOff(info->node);
3427 hifb_dbg_info("%s cmd = %d\n", __func__, arg);
3428 break;
3429 default:
3430 hifb_error("%s cmd not support\n", __func__);
3431 break;
3432 }
3433 return ret;
3434 }
3435
hdf_panel_set_backlight(struct fb_info * info,unsigned long arg)3436 static hi_s32 hdf_panel_set_backlight(struct fb_info *info, unsigned long arg)
3437 {
3438 int32_t ret = HI_FAILURE;
3439
3440 ret = SetDispBacklight(info->node, arg);
3441 return ret;
3442 }
3443 #endif
3444
drv_hifb_release(struct fb_info * info,unsigned long arg)3445 static hi_s32 drv_hifb_release(struct fb_info *info, unsigned long arg)
3446 {
3447 hifb_par *par = HI_NULL;
3448 hi_u32 layer_id;
3449 unsigned long lock_flag;
3450 hi_unused(arg);
3451 par = (hifb_par *)info->par;
3452 if (par == HI_NULL) {
3453 hifb_error("par is NULL failed!\n");
3454 return HI_FAILURE;
3455 }
3456 layer_id = par->layer_id;
3457
3458 if (g_drv_ops.hifb_drv_enable_layer(layer_id, HI_FALSE) != HI_SUCCESS) {
3459 hifb_error("disable graphic layer %u# failed!\n", layer_id);
3460 return HI_FAILURE;
3461 }
3462 hifb_spin_lock_irqsave(&par->lock, lock_flag);
3463 par->show = HI_FALSE;
3464 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
3465 return HI_SUCCESS;
3466 }
3467
3468 #ifdef __HuaweiLite__
hifb_ioctl_liteos_standard_io(struct fb_info * info,hi_u32 cmd,unsigned long arg,hi_bool * is_continue)3469 static hi_s32 hifb_ioctl_liteos_standard_io(struct fb_info *info, hi_u32 cmd, unsigned long arg, hi_bool *is_continue)
3470 {
3471 hi_s32 ret;
3472 /* for FBIOPUT_SCREENINFO_HIFB */
3473 struct hifb_info *info_temp = (struct hifb_info *)(hi_uintptr_t)arg;
3474 struct hifb_info info_copy = {0};
3475 /* for FBIOPAN_DISPLAY_HIFB */
3476 struct fb_overlayinfo_s *oinfo = (struct fb_overlayinfo_s *)(hi_uintptr_t)arg;
3477
3478 switch (cmd) {
3479 case FBIOGET_SCREENINFO_HIFB:
3480 ret = memcpy_s((void *)(hi_uintptr_t)arg, sizeof(struct hifb_info), info, sizeof(struct hifb_info));
3481 hifb_unequal_eok_return(ret);
3482 break;
3483 case FBIOPUT_SCREENINFO_HIFB:
3484 ret = memcpy_s(&info_copy, sizeof(info_copy), info, sizeof(info_copy));
3485 hifb_unequal_eok_return(ret);
3486 if (info->vtable.fb_set_par) {
3487 info->vinfo.xres = info_temp->vinfo.xres;
3488 info->vinfo.yres = info_temp->vinfo.yres;
3489 info->vinfo.fmt = info_temp->vinfo.fmt;
3490 info->oinfo.sarea.w = info_temp->oinfo.sarea.w;
3491 info->oinfo.sarea.h = info_temp->oinfo.sarea.h;
3492 info->oinfo.sarea.x = info_temp->oinfo.sarea.x;
3493 info->oinfo.sarea.y = info_temp->oinfo.sarea.y;
3494 info->oinfo.bpp = info_temp->oinfo.bpp;
3495 if (info->vtable.fb_set_par(&info->vtable) != HI_SUCCESS) {
3496 hifb_error("Put screeninfo error! ret=%d\n", ret);
3497 ret = memcpy_s(info, sizeof(struct hifb_info), &info_copy, sizeof(info_copy));
3498 hifb_unequal_eok_return(ret);
3499 return HI_FAILURE;
3500 }
3501 hifb_pan_display(&info->vtable, &info->oinfo);
3502 }
3503 break;
3504 case FBIOPAN_DISPLAY_HIFB:
3505 if (info->vtable.fb_pan_display) {
3506 if (info->vtable.fb_pan_display(&info->vtable, oinfo) != HI_SUCCESS) {
3507 hifb_error("Put screeninfo error!");
3508 ret = memcpy_s(info, sizeof(struct hifb_info), &info_copy, sizeof(info_copy));
3509 hifb_unequal_eok_return(ret);
3510 return HI_FAILURE;
3511 }
3512 info->oinfo.sarea.x = oinfo->sarea.x;
3513 info->oinfo.sarea.y = oinfo->sarea.y;
3514 }
3515 break;
3516 default:
3517 *is_continue = HI_TRUE;
3518 break;
3519 }
3520 return HI_SUCCESS;
3521 }
3522 #endif
3523
3524 /*
3525 * Function : hifb_ioctl
3526 * Description : set the colorkey or alpha for overlay
3527 * Return : return 0 if succeed, otherwise return error code
3528 */
3529 #ifdef __HuaweiLite__
hifb_ioctl(struct fb_vtable_s * vtable,int cmd,unsigned long arg)3530 static hi_s32 hifb_ioctl(struct fb_vtable_s *vtable, int cmd, unsigned long arg)
3531 #else
3532 static hi_s32 hifb_ioctl(struct fb_info *info, hi_u32 cmd, unsigned long arg)
3533 #endif
3534 {
3535 hi_u8 hifb_cmd = _IOC_NR(((hi_u32)cmd));
3536 hi_void __user *argp = (hi_void __user *)(hi_uintptr_t)arg;
3537 #ifdef __HuaweiLite__
3538 struct hifb_info *info = (struct hifb_info *)vtable;
3539 hi_bool is_continue = HI_FALSE;
3540 #endif
3541 if (hifb_ioctl_check_param(info, cmd, argp) != HI_SUCCESS) {
3542 return HI_FAILURE;
3543 }
3544 #ifdef __HuaweiLite__
3545 if (hifb_ioctl_liteos_standard_io(info, cmd, arg, &is_continue) != HI_SUCCESS) {
3546 return HI_FAILURE;
3547 }
3548 if (is_continue != HI_TRUE) {
3549 return HI_SUCCESS;
3550 }
3551 #endif
3552 if ((hifb_cmd < 1) || (hifb_cmd >= DRV_HIFB_IOCTL_CMD_NUM_MAX) || (g_drv_hifb_ctl_num[hifb_cmd] < 1) ||
3553 (g_drv_hifb_ctl_num[hifb_cmd] >= DRV_HIFB_IOCTL_FUNC_ITEM_NUM_MAX)) {
3554 return HI_FAILURE;
3555 }
3556 if (g_drv_hifb_ioctl_func[g_drv_hifb_ctl_num[hifb_cmd]].func == HI_NULL) {
3557 return HI_FAILURE;
3558 }
3559 #ifdef __HuaweiLite__
3560 if ((hi_u32)cmd != g_drv_hifb_ioctl_func[g_drv_hifb_ctl_num[hifb_cmd]].cmd) {
3561 #else
3562 if (cmd != g_drv_hifb_ioctl_func[g_drv_hifb_ctl_num[hifb_cmd]].cmd) {
3563 #endif
3564 hifb_error("the command:0x%x is unsupported!\n", cmd);
3565 return HI_FAILURE;
3566 }
3567 return g_drv_hifb_ioctl_func[g_drv_hifb_ctl_num[hifb_cmd]].func(info, arg);
3568 }
3569
3570 #ifdef CONFIG_COMPAT
3571 static hi_s32 hifb_compat_ioctl(struct fb_info *info, unsigned cmd, unsigned long arg)
3572 {
3573 return hifb_ioctl(info, cmd, arg);
3574 }
3575 #endif
3576
3577 static hi_void hifb_version(hi_void)
3578 {
3579 /* 80 size Use "strings hifb.ko | grep "HIFB_MAIN_VERSION"" to get the version */
3580 hi_char hifb_version[80] =
3581 "HIFB_MAIN_VERSION[" mkmarcotostr(HIFB_MAIN_VERSION) "] Build Time[" __DATE__ ", "__TIME__ "]";
3582 hifb_dbg_info("%s\n", hifb_version);
3583 hi_unused(hifb_version);
3584 }
3585
3586 #ifndef __HuaweiLite__
3587 static hi_s32 hifb_bitfieldcmp(struct fb_bitfield x, struct fb_bitfield y)
3588 {
3589 if ((x.offset == y.offset) && (x.length == y.length) && (x.msb_right == y.msb_right)) {
3590 return 0;
3591 } else {
3592 return -1;
3593 }
3594 }
3595
3596 static hi_u32 hifb_getbppbyfmt(HIFB_COLOR_FMT_E color_fmt)
3597 {
3598 switch (color_fmt) {
3599 case HIFB_FMT_RGB565:
3600 case HIFB_FMT_KRGB444:
3601 case HIFB_FMT_KRGB555:
3602 case HIFB_FMT_ARGB4444:
3603 case HIFB_FMT_ARGB1555:
3604 case HIFB_FMT_RGBA4444:
3605 case HIFB_FMT_RGBA5551:
3606 case HIFB_FMT_ACLUT88:
3607 case HIFB_FMT_BGR565:
3608 case HIFB_FMT_ABGR1555:
3609 case HIFB_FMT_ABGR4444:
3610 case HIFB_FMT_KBGR444:
3611 case HIFB_FMT_KBGR555:
3612 return 16; /* 16 is bpp fmt */
3613 case HIFB_FMT_RGB888:
3614 case HIFB_FMT_ARGB8565:
3615 case HIFB_FMT_RGBA5658:
3616 case HIFB_FMT_ABGR8565:
3617 case HIFB_FMT_BGR888:
3618 return 24; /* 24 is bpp fmt */
3619 case HIFB_FMT_KRGB888:
3620 case HIFB_FMT_ARGB8888:
3621 case HIFB_FMT_RGBA8888:
3622 case HIFB_FMT_ABGR8888:
3623 case HIFB_FMT_KBGR888:
3624 return 32; /* 32 is bpp fmt */
3625 case HIFB_FMT_1BPP:
3626 return 1;
3627 case HIFB_FMT_2BPP:
3628 return 2; /* 2 is bpp fmt */
3629 case HIFB_FMT_4BPP:
3630 return 4; /* 4 is bpp fmt */
3631 case HIFB_FMT_8BPP:
3632 case HIFB_FMT_ACLUT44:
3633 return 8; /* 8 is bpp fmt */
3634 default:
3635 return 0;
3636 }
3637 }
3638
3639 static HIFB_COLOR_FMT_E hifb_getfmtbyargb(struct fb_bitfield *red, struct fb_bitfield *green, struct fb_bitfield *blue,
3640 struct fb_bitfield *transp, hi_u32 color_depth)
3641 {
3642 hi_u32 i;
3643 hi_u32 bpp;
3644
3645 if ((red == HI_NULL) || (green == HI_NULL) || (blue == HI_NULL) || (transp == HI_NULL)) {
3646 return HIFB_FMT_BUTT;
3647 }
3648
3649 /* not support color palette low than 8 bit */
3650 if (color_depth < 8) {
3651 return HIFB_FMT_BUTT;
3652 }
3653 /* not support color palette low than 8 bit */
3654 if (color_depth == 8) {
3655 return HIFB_FMT_8BPP;
3656 }
3657
3658 /*
3659 * Find the pixel format (HIFB_ARGB_BITINFO_S) corresponding to the given red,
3660 * green, and blue bit field information and the number of bits per pixel (bpp)
3661 */
3662 for (i = 0; i < sizeof(g_argb_bit_field) / sizeof(hifb_argb_bitinfo); i++) {
3663 if ((hifb_bitfieldcmp(*red, g_argb_bit_field[i].red) == 0) &&
3664 (hifb_bitfieldcmp(*green, g_argb_bit_field[i].green) == 0) &&
3665 (hifb_bitfieldcmp(*blue, g_argb_bit_field[i].blue) == 0) &&
3666 (hifb_bitfieldcmp(*transp, g_argb_bit_field[i].transp) == 0)) {
3667 bpp = hifb_getbppbyfmt(i);
3668 if (bpp == color_depth) {
3669 return i;
3670 }
3671 }
3672 }
3673 i = HIFB_FMT_BUTT;
3674 return i;
3675 }
3676 #endif
3677
3678 hi_s32 hifb_check_mem_enough(struct fb_info *info, hi_u32 pitch, hi_u32 height)
3679 {
3680 hi_u32 buffer_num = 0;
3681 hi_u32 buffer_size;
3682 hifb_par *par = HI_NULL;
3683 hifb_refresh_info *refresh_info = HI_NULL;
3684 if (info == HI_NULL) {
3685 return HI_FAILURE;
3686 }
3687 if (info->par == HI_NULL) {
3688 return HI_FAILURE;
3689 }
3690 par = (hifb_par *)info->par;
3691 refresh_info = &par->refresh_info;
3692
3693 switch (refresh_info->buf_mode) {
3694 case HIFB_LAYER_BUF_DOUBLE:
3695 case HIFB_LAYER_BUF_DOUBLE_IMMEDIATE:
3696 buffer_num = 2; /* 2 buffer num */
3697 break;
3698 case HIFB_LAYER_BUF_ONE:
3699 buffer_num = 1;
3700 break;
3701 default:
3702 return HI_SUCCESS;
3703 }
3704 /* The interface setting requires uBuffersize, the actual memory size info->fix.smem_len */
3705 buffer_size = buffer_num * pitch * height;
3706 if (hifb_get_smem_len(info) >= buffer_size) {
3707 return HI_SUCCESS;
3708 }
3709 hifb_error("memory is not enough! now is %u u32Pitch %u u32Height %u expect %u\n", hifb_get_smem_len(info),
3710 pitch, height, buffer_size);
3711 return HI_FAILURE;
3712 }
3713
3714 /* Address check only for uncompressed data */
3715 static hi_s32 hifb_check_phyaddr(HIFB_BUFFER_S *canvas_buf)
3716 {
3717 HIFB_BUFFER_S *can_buf = canvas_buf;
3718 hi_u64 len;
3719 if (canvas_buf == HI_NULL) {
3720 return HI_FAILURE;
3721 }
3722
3723 len = (hi_u64)can_buf->stCanvas.u32Pitch * can_buf->stCanvas.u32Height;
3724
3725 return cmpi_check_mmz_phy_addr(can_buf->stCanvas.u64PhyAddr, len);
3726 }
3727
3728 #ifndef __HuaweiLite__
3729 static hi_s32 hifb_check_fmt(struct fb_var_screeninfo *var, struct fb_info *info)
3730 {
3731 HIFB_COLOR_FMT_E fmt;
3732 hifb_par *par = HI_NULL;
3733 hi_u32 layer_id;
3734 if ((info == HI_NULL) || (var == HI_NULL)) {
3735 return HI_FAILURE;
3736 }
3737 if (info->par == HI_NULL) {
3738 return HI_FAILURE;
3739 }
3740 par = (hifb_par *)info->par;
3741 layer_id = par->layer_id;
3742
3743 fmt = hifb_getfmtbyargb(&var->red, &var->green, &var->blue, &var->transp, var->bits_per_pixel);
3744 if (fmt == HIFB_FMT_BUTT) {
3745 hifb_error("Unknown fmt(offset, length) r:(%u, %u, %u) , g:(%u, %u, %u), b(%u, %u, %u), \
3746 a(%u, %u, %u), bpp:%u!\n",
3747 var->red.offset, var->red.length, var->red.msb_right, var->green.offset, var->green.length,
3748 var->green.msb_right, var->blue.offset, var->blue.length, var->blue.msb_right, var->transp.offset,
3749 var->transp.length, var->transp.msb_right, var->bits_per_pixel);
3750 return -EINVAL;
3751 }
3752
3753 if (!g_drv_ops.capability[layer_id].bColFmt[fmt]) {
3754 hifb_error("Unsupported PIXEL FORMAT!\n");
3755 return -EINVAL;
3756 }
3757
3758 return HI_SUCCESS;
3759 }
3760 #endif
3761
3762 /*
3763 * Name : hifb_buf_map
3764 * Desc : Memory mapping, which generates virtual addresses based on physical address mappings.
3765 */
3766 hi_void *hifb_buf_map(hi_u64 phy_addr, hi_u32 size)
3767 {
3768 return cmpi_remap_nocache(phy_addr, size);
3769 }
3770
3771 hi_void hifb_buf_ummap(hi_void *viraddr)
3772 {
3773 cmpi_unmap(viraddr);
3774 return;
3775 }
3776
3777 #ifndef __HuaweiLite__
3778 static hi_s32 hifb_check_output(struct fb_var_screeninfo *var, struct fb_info *info)
3779 {
3780 hifb_par *par = HI_NULL;
3781 hi_u32 layer_id;
3782 par = (hifb_par *)info->par;
3783 layer_id = par->layer_id;
3784 /* 2 is be divided with no remainder */
3785 if (((var->yres % 2) != 0) && (hifb_is_interlace(par))) {
3786 hifb_error("yres(%d) of layer_id %d should be even when vodev output is interlace\n", var->yres,
3787 layer_id);
3788 hi_unused(layer_id);
3789 return HI_FAILURE;
3790 }
3791 return HI_SUCCESS;
3792 }
3793
3794 static hi_s32 hifb_check_virtual_resolution(struct fb_var_screeninfo *var, struct fb_info *info)
3795 {
3796 hifb_par *par = HI_NULL;
3797 hi_u32 layer_id;
3798 par = (hifb_par *)info->par;
3799 layer_id = par->layer_id;
3800
3801 if (var->xres < hifb_min_width(layer_id)) {
3802 hifb_error("xres(%d) of layer_id %d can't be less than min_width(%d)\n", var->xres,
3803 layer_id, hifb_min_width(layer_id));
3804 return HI_FAILURE;
3805 }
3806 if (var->yres < hifb_min_height(layer_id)) {
3807 hifb_error("yres(%d) of layer_id %d can't be less than min_height(%d)\n", var->yres,
3808 layer_id, hifb_min_height(layer_id));
3809 return HI_FAILURE;
3810 }
3811
3812 if (var->xres > var->xres_virtual) {
3813 hifb_error("xres(%d) of layer_id %d should be less than xres_virtual(%d)\n", var->xres,
3814 layer_id, var->xres_virtual);
3815 return HI_FAILURE;
3816 }
3817 if (var->yres > var->yres_virtual) {
3818 hifb_error("yres(%d) of layer_id %d should be less than yres_virtual(%d)\n", var->yres,
3819 layer_id, var->yres_virtual);
3820 return HI_FAILURE;
3821 }
3822 return HI_SUCCESS;
3823 }
3824
3825 static hi_s32 hifb_check_offset(struct fb_var_screeninfo *var, struct fb_info *info)
3826 {
3827 hifb_par *par = HI_NULL;
3828 hi_u32 layer_id;
3829 par = (hifb_par *)info->par;
3830 layer_id = par->layer_id;
3831
3832 if ((var->xoffset + var->xres > var->xres_virtual) ||
3833 (var->xoffset > var->xres_virtual)) {
3834 hifb_error("the sum of layer%d's xoffset(%d) and xres(%d) should be less than xres_virtual(%d)\n",
3835 layer_id, var->xoffset, var->xres, var->xres_virtual);
3836 return -EINVAL;
3837 }
3838
3839 if ((var->yoffset + var->yres > var->yres_virtual) ||
3840 (var->yoffset > var->yres_virtual)) {
3841 hifb_error("the sum of layer%d's yoffset(%d) and yres(%d) should be less than yres_virtual(%d)\n",
3842 layer_id, var->yoffset, var->yres, var->yres_virtual);
3843 return -EINVAL;
3844 }
3845 hi_unused(layer_id);
3846 return HI_SUCCESS;
3847 }
3848
3849 static hi_s32 hifb_check_total(struct fb_var_screeninfo *var, struct fb_info *info)
3850 {
3851 hi_u32 hor_total;
3852 hi_u32 ver_total;
3853 hifb_par *par = HI_NULL;
3854 hi_u32 layer_id;
3855 par = (hifb_par *)info->par;
3856 layer_id = par->layer_id;
3857
3858 hor_total = var->left_margin + var->xres + var->right_margin + var->hsync_len;
3859 if (hor_total == 0) {
3860 hifb_error("the sum of layer%d's left_margin(%d),xres(%d),right_margin(%d),hsync_len(%d) can't be 0\n",
3861 layer_id, var->left_margin, var->xres, var->right_margin, var->hsync_len);
3862 return HI_FAILURE;
3863 }
3864 ver_total = var->yres + var->lower_margin + var->vsync_len + var->upper_margin;
3865 if (ver_total == 0) {
3866 hifb_error("the sum of layer%d's left_margin(%d),xres(%d),right_margin(%d),hsync_len(%d) can't be 0\n",
3867 layer_id, var->upper_margin, var->yres, var->lower_margin, var->vsync_len);
3868 return HI_FAILURE;
3869 }
3870 hi_unused(layer_id);
3871 return HI_SUCCESS;
3872 }
3873
3874 /*
3875 * Function : hifb_check_var
3876 * Description : check if the parameter for framebuffer is supported.
3877 * Return : return 0, if the parameter is supported, otherwise,return error
3878 */
3879 static hi_s32 hifb_check_var(struct fb_var_screeninfo *var, struct fb_info *info)
3880 {
3881 hifb_par *par = HI_NULL;
3882 hi_u32 expected_len;
3883 hi_u32 layer_id;
3884
3885 if ((info == HI_NULL) || (var == HI_NULL) || (info->par == HI_NULL)) {
3886 return HI_FAILURE;
3887 }
3888
3889 par = (hifb_par *)info->par;
3890 layer_id = par->layer_id;
3891
3892 if (is_cursor_layer(par->layer_id) && is_soft_cursor()) {
3893 hifb_error("cursor layer doesn't support this operation!\n");
3894 return HI_FAILURE;
3895 }
3896
3897 if (hifb_check_fmt(var, info) != HI_SUCCESS) {
3898 return HI_FAILURE;
3899 }
3900
3901 /*
3902 * For interlaced output check
3903 * the actual height of the layer must be an even number
3904 * Progressive output without this limit
3905 */
3906 if (hifb_check_output(var, info) != HI_SUCCESS) {
3907 return HI_FAILURE;
3908 }
3909 /*
3910 * for virtual resolution check
3911 * virtual resolution can't be less than minimal resolution
3912 */
3913 if (hifb_check_virtual_resolution(var, info) != HI_SUCCESS) {
3914 return HI_FAILURE;
3915 }
3916 /* check if the offset is valid */
3917 if (hifb_check_offset(var, info) != HI_SUCCESS) {
3918 return HI_FAILURE;
3919 }
3920 /*
3921 * for hor_total and ver_total check
3922 * The FB driver in the Linux kernel will use u32HTotal and u32VTotal as divisors
3923 * so they cannot be 0
3924 */
3925 if (hifb_check_total(var, info) != HI_SUCCESS) {
3926 return HI_FAILURE;
3927 }
3928
3929 hifb_dbg_info("xres:%d, yres:%d, xres_virtual:%d, yres_virtual:%d\n", var->xres, var->yres,
3930 var->xres_virtual, var->yres_virtual);
3931 /* for mem len check */
3932 expected_len = var->yres_virtual * ((((var->xres_virtual * var->bits_per_pixel) >> 3) + /* 8 bit (2^3) */
3933 HIFB_ALIGNMENT) & (~HIFB_ALIGNMENT));
3934
3935 if (info->fix.smem_len && (expected_len > info->fix.smem_len)) {
3936 hifb_error("layer %d don't has enough mem! expected: %d KBytes, real:%d KBytes\n", layer_id,
3937 expected_len / 1024, info->fix.smem_len / 1024); /* 1024 (2^10) */
3938 return -EINVAL;
3939 }
3940
3941 hi_unused(layer_id);
3942 return HI_SUCCESS;
3943 }
3944 #endif
3945
3946 static hi_void hifb_set_dispbufinfo(hi_u32 layer_id)
3947 {
3948 struct fb_info *info = g_layer[layer_id].info;
3949 hifb_par *par = (hifb_par *)(info->par);
3950 hifb_refresh_info *refresh_info = &par->refresh_info;
3951 hifb_dispbuf_info *disp_buf_info = &refresh_info->disp_buf_info;
3952
3953 /*
3954 * there's a limit from hardware that screen buf should be 16 bytes aligned,maybe it's proper
3955 * to get this info from drv adapter
3956 */
3957 hi_u32 buf_size = ((hifb_get_line_length(info) * hifb_get_yres(info)) + HIFB_ALIGNMENT) & (~HIFB_ALIGNMENT);
3958 if (hifb_get_smem_len(info) == 0) {
3959 return;
3960 } else if ((hifb_get_smem_len(info) >= buf_size) && (hifb_get_smem_len(info) < buf_size * 2)) { /* 2 size expand */
3961 disp_buf_info->phy_addr[0] = hifb_get_smem_start(info);
3962 disp_buf_info->phy_addr[1] = hifb_get_smem_start(info);
3963 } else if (hifb_get_smem_len(info) >= buf_size * 2) { /* 2 size expand */
3964 disp_buf_info->phy_addr[0] = hifb_get_smem_start(info);
3965 disp_buf_info->phy_addr[1] = hifb_get_smem_start(info) + buf_size;
3966 }
3967 return;
3968 }
3969
3970 /*
3971 * Name : hifb_get_dcmp_framesize
3972 * Desc : Read two numbers from the virtual address.
3973 */
3974 static hi_void hifb_get_dcmp_framesize(HIFB_BUFFER_S *dst, hi_u32 *ar_size, hi_u32 *gb_size,
3975 const hi_u8 *screen_base, hi_u32 offset)
3976 {
3977 hi_unused(dst);
3978 *ar_size = *(hi_u64 *)screen_base;
3979 /* Screen_base cannot be converted to any type except hi_u8* */
3980 *gb_size = *(hi_u64 *)(screen_base + offset);
3981 }
3982
3983 static hi_s32 hifb_refresh_1buf_prepare_dst(HIFB_BUFFER_S *dst_rect, hifb_par *par,
3984 hifb_display_info *display_info, hifb_dispbuf_info *display_buf_info, struct fb_info *info)
3985 {
3986 hi_u32 bytes_per_pixel = 2;
3987 hi_u32 buf_size = ((hifb_get_line_length(info) * hifb_get_yres(info)) + HIFB_ALIGNMENT) & (~HIFB_ALIGNMENT);
3988
3989 dst_rect->stCanvas.enFmt = par->color_format;
3990 dst_rect->stCanvas.u32Height = display_info->display_height;
3991 dst_rect->stCanvas.u32Width = display_info->display_width;
3992 dst_rect->stCanvas.u32Pitch = hifb_get_line_length(info);
3993
3994 if (display_info->rotate_mode == HIFB_ROTATE_90 || display_info->rotate_mode == HIFB_ROTATE_270) {
3995 dst_rect->stCanvas.u32Height = display_info->display_width;
3996 dst_rect->stCanvas.u32Width = display_info->display_height;
3997 if (dst_rect->stCanvas.enFmt == HIFB_FMT_ARGB1555 ||
3998 dst_rect->stCanvas.enFmt == HIFB_FMT_ARGB4444) {
3999 bytes_per_pixel = 2; /* 2 for bit */
4000 } else if (dst_rect->stCanvas.enFmt == HIFB_FMT_ARGB8888) {
4001 bytes_per_pixel = 4; /* 4 for bit */
4002 }
4003 dst_rect->stCanvas.u32Pitch = ((bytes_per_pixel * dst_rect->stCanvas.u32Width + HIFB_ALIGN - 1) /
4004 HIFB_ALIGN) * HIFB_ALIGN;
4005
4006 if ((par->color_format != HIFB_FMT_ARGB4444) && (par->color_format != HIFB_FMT_ARGB1555) &&
4007 (par->color_format != HIFB_FMT_ARGB8888)) {
4008 hifb_error("The rotate mode only support HI_FB_FORMAT_ARGB4444,HI_FB_FORMAT_ARGB1555,"\
4009 "HI_FB_FORMAT_ARGB8888 which is %d\n!\n", par->color_format);
4010 return HI_FAILURE;
4011 }
4012
4013 dst_rect->stCanvas.u64PhyAddr = par->rotate_vb;
4014 } else {
4015 /*
4016 * The target address is selected as the display buf configured for the interrupt,
4017 * which is indicated by index for interrupt.
4018 */
4019 dst_rect->stCanvas.u64PhyAddr = display_buf_info->phy_addr[display_buf_info->index_for_int];
4020 /* If compression is not open, you do not have to configure a compressed address for this refresh */
4021 if (par->compress_info.compress_open) {
4022 /* Use half of the video memory instead of another buffer ,2 is size */
4023 dst_rect->stCanvas.u64GBPhyAddr = dst_rect->stCanvas.u64PhyAddr + buf_size / 2;
4024 }
4025 }
4026 return HI_SUCCESS;
4027 }
4028
4029 static hi_void hifb_refresh_1buf_prepare_addr(hifb_refresh_info *refresh_info, hi_u32 layer_id,
4030 hifb_osd_data *osd_data, struct fb_info *info, HIFB_BUFFER_S *dst_rect)
4031 {
4032 unsigned long lock_flag;
4033 hifb_par *par = (hifb_par *)info->par;
4034 hifb_dispbuf_info *display_buf_info = &refresh_info->disp_buf_info;
4035 hi_u32 buf_size = ((hifb_get_line_length(info) * hifb_get_yres(info)) + HIFB_ALIGNMENT) &
4036 (~HIFB_ALIGNMENT);
4037 /* 1buf does not support compression, close compression configuration */
4038 refresh_info->disp_buf_info.compress = HI_FALSE;
4039
4040 g_drv_ops.hifb_drv_get_osd_data(layer_id, osd_data);
4041
4042 if (osd_data->buffer_phy_addr != par->refresh_info.disp_buf_info.phy_addr[0] &&
4043 display_buf_info->phy_addr[0]) {
4044 hifb_spin_lock_irqsave(&par->lock, lock_flag);
4045 par->modifying = HI_TRUE;
4046 /* Notify the interrupt handler to modify the display address. */
4047 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_DISPLAYADDR;
4048
4049 /*
4050 * The buf address in the display information is configured to
4051 * the screen display address for refreshing the screen.
4052 */
4053 refresh_info->screen_addr = display_buf_info->phy_addr[display_buf_info->index_for_int];
4054 /* If compression is not open, you do not have to configure a compressed address for this refresh */
4055 if (par->compress_info.compress_open) {
4056 /* Use half of the video memory instead of another buffer */
4057 refresh_info->gb_screen_addr = refresh_info->screen_addr + buf_size / 2; /* 2 part */
4058 }
4059 display_buf_info->stride = 4 * dst_rect->stCanvas.u32Width; /* 4 for argb 8888 */
4060
4061 par->modifying = HI_FALSE;
4062 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
4063 }
4064 }
4065
4066 static hi_void hifb_refresh_1buf_prepare_opt(hifb_blit_opt *blit_opt, hifb_par *par)
4067 {
4068 blit_opt->call_back = HI_TRUE;
4069 /* Non-blocking mode */
4070 blit_opt->block = HI_FALSE;
4071
4072 if (par->display_info.antiflicker_mode == HIFB_ANTIFLICKER_TDE) {
4073 blit_opt->antiflicker_level = HIFB_LAYER_ANTIFLICKER_NONE;
4074 }
4075
4076 blit_opt->region_deflicker = HI_TRUE;
4077
4078 blit_opt->param = &(par->layer_id);
4079
4080 if (par->display_info.rotate_mode == HIFB_ROTATE_180) {
4081 blit_opt->mirror_mode = HIFB_MIRROR_BOTH;
4082 } else {
4083 blit_opt->mirror_mode = par->display_info.mirror_mode;
4084 }
4085 }
4086
4087 static hi_void hifb_refresh_1buf_prepare_global_refresh(HIFB_BUFFER_S *canvas_buf, HIFB_BUFFER_S *dst_rect,
4088 hifb_blit_opt *blit_opt)
4089 {
4090 if (canvas_buf->stCanvas.u32Height != dst_rect->stCanvas.u32Height ||
4091 canvas_buf->stCanvas.u32Width != dst_rect->stCanvas.u32Width) {
4092 /* Rotate 0 or 180 degrees, zoomed, then global refresh */
4093 blit_opt->scale = HI_TRUE;
4094
4095 dst_rect->UpdateRect.x = 0;
4096 dst_rect->UpdateRect.y = 0;
4097 dst_rect->UpdateRect.w = dst_rect->stCanvas.u32Width;
4098 dst_rect->UpdateRect.h = dst_rect->stCanvas.u32Height;
4099 } else {
4100 /* Rotate 0 or 180 degrees, no zoomed, then partial refresh */
4101 dst_rect->UpdateRect = canvas_buf->UpdateRect;
4102 }
4103 }
4104
4105 static hi_void hifb_refresh_1buf_prepare_compress(volatile hifb_compress_info *compress_info,
4106 HIFB_BUFFER_S *dst_rect, hifb_par *par, hifb_blit_opt *blit_opt)
4107 {
4108 hi_s32 ret;
4109 unsigned long lock_flag;
4110 hifb_spin_lock_irqsave(&par->lock, lock_flag);
4111 if (compress_info->compress_open) {
4112 /*
4113 * This is just updating the refresh area. The refresh flag is first set to FALSE to
4114 * indicate that the TDE has not been moved yet, and is set to TRUE in the TDE callback.
4115 */
4116 ret = memcpy_s((void *)&compress_info->update_rect, sizeof(HIFB_RECT), &dst_rect->UpdateRect,
4117 sizeof(HIFB_RECT));
4118 hifb_unlock_unequal_eok_return_void(ret, &par->lock, lock_flag);
4119 compress_info->update_finished = HI_FALSE;
4120 blit_opt->compress = HI_TRUE;
4121 } else {
4122 blit_opt->compress = HI_FALSE;
4123 }
4124
4125 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
4126 }
4127
4128 static hi_s32 hifb_refresh_1buf_blit(HIFB_BUFFER_S *canvas_buf, HIFB_BUFFER_S *dst_rect, hifb_blit_opt *blit_opt,
4129 hifb_dispbuf_info *display_buf_info)
4130 {
4131 hi_tde_export_func *tde_export_func = HI_NULL;
4132
4133 tde_export_func = func_entry(hi_tde_export_func, HI_ID_TDE);
4134 if ((tde_export_func == HI_NULL) || (tde_export_func->drv_tde_module_begin_job == HI_NULL) ||
4135 (tde_export_func->drv_tde_module_rotate == HI_NULL) ||
4136 (tde_export_func->drv_tde_module_end_job == HI_NULL)) {
4137 hifb_error("can't get TDE export function, it may be TDE module has not been inserted!\n");
4138 return HI_FAILURE;
4139 }
4140 /*
4141 * The user buf is used as the source by blit, and the user buf is moved to the display buf with
4142 * the target set (with the target showing the buff address) as the target.
4143 */
4144 if (hifb_drv_blit(canvas_buf, dst_rect, blit_opt, HI_TRUE, &display_buf_info->refresh_handle) < 0) {
4145 return HI_FAILURE;
4146 }
4147 return HI_SUCCESS;
4148 }
4149
4150 /* This function has a lock operation, so you can't call it if the caller has a lock operation. */
4151 static hi_s32 hifb_refresh_1buf(hi_u32 layer_id, HIFB_BUFFER_S *canvas_buf)
4152 {
4153 struct fb_info *info = g_layer[layer_id].info;
4154 hifb_par *par = (hifb_par *)info->par;
4155 hifb_display_info *display_info = &par->display_info;
4156 hifb_refresh_info *refresh_info = &par->refresh_info;
4157 hifb_dispbuf_info *display_buf_info = &refresh_info->disp_buf_info;
4158 volatile hifb_compress_info *compress_info = &par->compress_info;
4159 hifb_osd_data osd_data;
4160 hi_s32 ret;
4161 hifb_blit_opt blit_opt = {0};
4162 HIFB_BUFFER_S dst_rect;
4163
4164 if (canvas_buf == HI_NULL) {
4165 return HI_FAILURE;
4166 }
4167
4168 ret = hifb_refresh_1buf_prepare_dst(&dst_rect, par, display_info, display_buf_info, info);
4169 if (ret != HI_SUCCESS) {
4170 return ret;
4171 }
4172
4173 hifb_refresh_1buf_prepare_addr(refresh_info, layer_id, &osd_data, info, &dst_rect);
4174
4175 hifb_refresh_1buf_prepare_opt(&blit_opt, par);
4176
4177 hifb_refresh_1buf_prepare_global_refresh(canvas_buf, &dst_rect, &blit_opt);
4178
4179 hifb_refresh_1buf_prepare_compress(compress_info, &dst_rect, par, &blit_opt);
4180
4181 ret = hifb_refresh_1buf_blit(canvas_buf, &dst_rect, &blit_opt, display_buf_info);
4182 if (ret != HI_SUCCESS) {
4183 return ret;
4184 }
4185
4186 ret = memcpy_s(&(refresh_info->user_buffer), sizeof(HIFB_BUFFER_S), canvas_buf, sizeof(HIFB_BUFFER_S));
4187 hifb_unequal_eok_return(ret);
4188 return HI_SUCCESS;
4189 }
4190
4191 /* unit rect */
4192 static void hifb_unite_rect(HIFB_RECT *dst_rect, HIFB_RECT *src_rect)
4193 {
4194 hi_s32 ret;
4195 HIFB_RECT rect;
4196 rect.x = (dst_rect->x < src_rect->x) ? dst_rect->x : src_rect->x;
4197 rect.y = (dst_rect->y < src_rect->y) ? dst_rect->y : src_rect->y;
4198 rect.w = ((dst_rect->x + dst_rect->w) > (src_rect->x + src_rect->w)) ?
4199 (dst_rect->x + dst_rect->w - rect.x) : (src_rect->x + src_rect->w - rect.x);
4200 rect.h = ((dst_rect->y + dst_rect->h) > (src_rect->y + src_rect->h)) ?
4201 (dst_rect->y + dst_rect->h - rect.y) : (src_rect->y + src_rect->h - rect.y);
4202 ret = memcpy_s(dst_rect, sizeof(HIFB_RECT), &rect, sizeof(HIFB_RECT));
4203 hifb_unequal_eok_return_void(ret);
4204 return;
4205 }
4206
4207 /* check these two rectangle cover each other */
4208 static hi_bool hifb_iscontain(HIFB_RECT parent_rect, HIFB_RECT child_rect)
4209 {
4210 HIFB_POINT_S point;
4211 point.s32XPos = child_rect.x;
4212 point.s32YPos = child_rect.y;
4213 if ((point.s32XPos < parent_rect.x) || (point.s32XPos > (parent_rect.x + parent_rect.w)) ||
4214 (point.s32YPos < parent_rect.y) || (point.s32YPos > (parent_rect.y + parent_rect.h))) {
4215 return HI_FALSE;
4216 }
4217 point.s32XPos = child_rect.x + child_rect.w;
4218 point.s32YPos = child_rect.y + child_rect.h;
4219 if ((point.s32XPos < parent_rect.x) || (point.s32XPos > (parent_rect.x + parent_rect.w)) ||
4220 (point.s32YPos < parent_rect.y) || (point.s32YPos > (parent_rect.y + parent_rect.h))) {
4221 return HI_FALSE;
4222 }
4223 return HI_TRUE;
4224 }
4225
4226 static hi_s32 refresh_2buf_prepare_back_buf(struct fb_info *info, HIFB_BUFFER_S *back_buf, hi_u32 *bytes_per_pixel)
4227 {
4228 hifb_par *par = (hifb_par *)info->par;
4229 hifb_refresh_info *refresh_info = &par->refresh_info;
4230 hifb_display_info *display_info = &par->display_info;
4231 volatile hifb_compress_info *compress_info = &par->compress_info;
4232 hi_tde_export_func *tde_export_func = HI_NULL;
4233 hi_u32 buf_size = ((hifb_get_line_length(info) * hifb_get_yres(info)) + HIFB_ALIGNMENT) & (~HIFB_ALIGNMENT);
4234 if (refresh_info->disp_buf_info.need_flip == HI_TRUE) {
4235 hifb_error("Layer(%d) refresh again before display another buf, maybe refresh too fast !\n", par->layer_id);
4236 }
4237
4238 refresh_info->disp_buf_info.need_flip = HI_FALSE;
4239 refresh_info->disp_buf_info.refresh_handle = 0;
4240
4241 back_buf->stCanvas.enFmt = par->color_format;
4242 back_buf->stCanvas.u32Height = display_info->display_height;
4243 back_buf->stCanvas.u32Width = display_info->display_width;
4244 back_buf->stCanvas.u32Pitch = hifb_get_line_length(info);
4245
4246 if (display_info->rotate_mode == HIFB_ROTATE_90 || display_info->rotate_mode == HIFB_ROTATE_270) {
4247 back_buf->stCanvas.u32Width = display_info->display_height;
4248 back_buf->stCanvas.u32Height = display_info->display_width;
4249 if (back_buf->stCanvas.enFmt == HIFB_FMT_ARGB1555 || back_buf->stCanvas.enFmt == HIFB_FMT_ARGB4444) {
4250 *bytes_per_pixel = 2; /* 2 bit per pixel */
4251 } else if (back_buf->stCanvas.enFmt == HIFB_FMT_ARGB8888) {
4252 *bytes_per_pixel = 4; /* 4 bit per pixel */
4253 }
4254 back_buf->stCanvas.u32Pitch = ((*bytes_per_pixel * back_buf->stCanvas.u32Width + HIFB_ALIGN - 1) / HIFB_ALIGN) *
4255 HIFB_ALIGN;
4256
4257 if ((par->color_format != HIFB_FMT_ARGB4444) && (par->color_format != HIFB_FMT_ARGB1555) &&
4258 (par->color_format != HIFB_FMT_ARGB8888)) {
4259 hifb_error("The rotate mode only support HIFB_FMT_ARGB4444,HIFB_FMT_ARGB1555," \
4260 "HIFB_FMT_ARGB8888 which is %d\n!\n", par->color_format);
4261 return HI_FAILURE;
4262 }
4263
4264 tde_export_func = func_entry(hi_tde_export_func, HI_ID_TDE);
4265 if ((tde_export_func == HI_NULL) || (tde_export_func->drv_tde_module_begin_job == HI_NULL) ||
4266 (tde_export_func->drv_tde_module_rotate == HI_NULL) ||
4267 (tde_export_func->drv_tde_module_end_job == HI_NULL)) {
4268 hifb_error("can't get TDE export function, it may be TDE module has not been inserted!\n");
4269 return HI_FAILURE;
4270 }
4271
4272 back_buf->stCanvas.u64PhyAddr = par->rotate_vb;
4273 } else {
4274 /* Set the background buf as the target, get the free buf to the background buf */
4275 hifb_get_idledispbuf(par, (hi_u64*)(&back_buf->stCanvas.u64PhyAddr));
4276 if (compress_info->compress_open) {
4277 /*
4278 * Use half of the video memory instead of another buffer. Currently only 59A on the himpp
4279 * platform needs to be configured with this address. Other chips do not need to be configured.
4280 */
4281 back_buf->stCanvas.u64GBPhyAddr = back_buf->stCanvas.u64PhyAddr + buf_size / 2; /* 2 alg data */
4282 }
4283 }
4284 return HI_SUCCESS;
4285 }
4286
4287 static hi_void refresh_2buf_get_new_rect(HIFB_BUFFER_S *canvas_buf, HIFB_BUFFER_S *back_buf,
4288 HIFB_RECT *new_union_rect, hifb_blit_opt *blit_opt)
4289 {
4290 hi_tde_rect src_rect = {0};
4291 hi_tde_rect dst_rect = {0};
4292 hi_tde_rect inner_src_rect = {0};
4293 hi_tde_rect inner_dst_rect = {0};
4294
4295 if (canvas_buf->stCanvas.u32Height != back_buf->stCanvas.u32Height ||
4296 canvas_buf->stCanvas.u32Width != back_buf->stCanvas.u32Width) {
4297 src_rect.width = canvas_buf->stCanvas.u32Width;
4298 src_rect.height = canvas_buf->stCanvas.u32Height;
4299 dst_rect.width = back_buf->stCanvas.u32Width;
4300 dst_rect.height = back_buf->stCanvas.u32Height;
4301 inner_src_rect.pos_x = canvas_buf->UpdateRect.x;
4302 inner_src_rect.pos_y = canvas_buf->UpdateRect.y;
4303 inner_src_rect.width = (hi_u32)canvas_buf->UpdateRect.w;
4304 inner_src_rect.height = (hi_u32)canvas_buf->UpdateRect.h;
4305 tde_cal_scale_rect_hifb(&src_rect, &dst_rect, &inner_src_rect, &inner_dst_rect);
4306
4307 new_union_rect->x = inner_dst_rect.pos_x;
4308 new_union_rect->y = inner_dst_rect.pos_y;
4309 new_union_rect->w = (hi_s32)inner_dst_rect.width;
4310 new_union_rect->h = (hi_s32)inner_dst_rect.height;
4311 blit_opt->scale = HI_TRUE;
4312 } else {
4313 *new_union_rect = canvas_buf->UpdateRect;
4314 }
4315 return;
4316 }
4317
4318 static hi_void refresh_2buf_blit_init_buf(hifb_display_info *display_info, HIFB_BUFFER_S *fore_buf,
4319 HIFB_BUFFER_S *back_buf, hi_u32 bytes_per_pixel)
4320 {
4321 fore_buf->stCanvas.u32Width = display_info->display_width;
4322 fore_buf->stCanvas.u32Height = display_info->display_height;
4323 fore_buf->stCanvas.u32Pitch = ((bytes_per_pixel * fore_buf->stCanvas.u32Width + (HIFB_ALIGN - 1)) / HIFB_ALIGN) *
4324 HIFB_ALIGN;
4325 fore_buf->UpdateRect.x = 0;
4326 fore_buf->UpdateRect.y = 0;
4327 fore_buf->UpdateRect.w = fore_buf->stCanvas.u32Width;
4328 fore_buf->UpdateRect.h = fore_buf->stCanvas.u32Height;
4329
4330 back_buf->UpdateRect.x = 0;
4331 back_buf->UpdateRect.y = 0;
4332 back_buf->UpdateRect.w = back_buf->stCanvas.u32Width;
4333 back_buf->UpdateRect.h = back_buf->stCanvas.u32Height;
4334 return;
4335 }
4336
4337 static hi_s32 refresh_2buf_blit(hifb_par *par, hi_u64 osd_buf_addr, HIFB_BUFFER_S *back_buf,
4338 HIFB_RECT *new_union_rect, hi_u32 bytes_per_pixel)
4339 {
4340 hi_s32 ret;
4341 hifb_blit_opt tmp = {0};
4342 hifb_rotate_opt rot_tmp = {0};
4343 hifb_refresh_info *refresh_info = &par->refresh_info;
4344 hifb_display_info *display_info = &par->display_info;
4345 HIFB_BUFFER_S fore_buf = {0};
4346 hi_u64 work_buf_addr = 0;
4347
4348 hifb_get_workdispbuf(par, &work_buf_addr);
4349
4350 if ((refresh_info->disp_buf_info.fliped == HI_FALSE) ||
4351 ((osd_buf_addr != work_buf_addr) && (par->compress_info.compress_open == HI_FALSE))) {
4352 return HI_SUCCESS;
4353 }
4354
4355 /* Background as a target pointing to an idle buf */
4356 ret = memcpy_s(&fore_buf, sizeof(HIFB_BUFFER_S), back_buf, sizeof(HIFB_BUFFER_S));
4357 hifb_unequal_eok_return(ret);
4358 /* Foreground as a source points to the buf at work */
4359 hifb_get_workdispbuf(par, (hi_u64*)(&fore_buf.stCanvas.u64PhyAddr));
4360
4361 /* The union rect is also used as an update area for the foreground and background. */
4362 ret = memcpy_s(&fore_buf.UpdateRect, sizeof(HIFB_RECT), &refresh_info->disp_buf_info.union_rect, sizeof(HIFB_RECT));
4363 hifb_unequal_eok_return(ret);
4364 ret = memcpy_s(&back_buf->UpdateRect, sizeof(HIFB_RECT), &fore_buf.UpdateRect, sizeof(HIFB_RECT));
4365 hifb_unequal_eok_return(ret);
4366 (hi_void)memset_s(&tmp, sizeof(hifb_blit_opt), 0x0, sizeof(hifb_blit_opt));
4367 /* blit with union rect */
4368 if ((display_info->rotate_mode != HIFB_ROTATE_90) &&
4369 (display_info->rotate_mode != HIFB_ROTATE_270)) {
4370 if (hifb_iscontain(*new_union_rect, refresh_info->disp_buf_info.union_rect) == HI_FALSE) {
4371 if (hifb_drv_blit(&fore_buf, back_buf, &tmp, HI_TRUE, HI_NULL) < 0) {
4372 hifb_error("blit err!\n");
4373 return HI_FAILURE;
4374 }
4375 }
4376 } else {
4377 if (display_info->rotate_mode == HIFB_ROTATE_90) {
4378 rot_tmp.rotate_mode = HIFB_ROTATE_270;
4379 } else {
4380 rot_tmp.rotate_mode = HIFB_ROTATE_90;
4381 }
4382 /* init buf */
4383 refresh_2buf_blit_init_buf(display_info, &fore_buf, back_buf, bytes_per_pixel);
4384 if (hifb_drv_rotate(&fore_buf, back_buf, &rot_tmp, HI_TRUE) < 0) {
4385 hifb_error("blit err!\n");
4386 return HI_FAILURE;
4387 }
4388 }
4389
4390 /* clear union rect */
4391 (hi_void)memset_s(&(refresh_info->disp_buf_info.union_rect), sizeof(HIFB_RECT), 0, sizeof(HIFB_RECT));
4392
4393 refresh_info->disp_buf_info.fliped = HI_FALSE;
4394 return HI_SUCCESS;
4395 }
4396
4397 static hi_void refresh_2buf_prepare_opt(hifb_par *par, HIFB_BUFFER_S *canvas_buf, HIFB_BUFFER_S *back_buf,
4398 hi_u32 bytes_per_pixel, hifb_blit_opt *blit_opt)
4399 {
4400 hifb_display_info *display_info = &par->display_info;
4401 blit_opt->call_back = HI_TRUE;
4402 blit_opt->param = &(par->layer_id);
4403
4404 if (display_info->antiflicker_mode == HIFB_ANTIFLICKER_TDE) {
4405 blit_opt->antiflicker_level = HIFB_LAYER_ANTIFLICKER_NONE;
4406 }
4407
4408 if (blit_opt->scale == HI_TRUE) {
4409 /* actual area, calculate by TDE, here is just use for let pass the test */
4410 back_buf->UpdateRect.x = 0;
4411 back_buf->UpdateRect.y = 0;
4412 back_buf->UpdateRect.w = back_buf->stCanvas.u32Width;
4413 back_buf->UpdateRect.h = back_buf->stCanvas.u32Height;
4414 } else {
4415 back_buf->UpdateRect = canvas_buf->UpdateRect;
4416 }
4417
4418 if (par->display_info.rotate_mode == HIFB_ROTATE_90 ||
4419 par->display_info.rotate_mode == HIFB_ROTATE_270) {
4420 back_buf->stCanvas.u32Height = par->display_info.display_width;
4421 back_buf->stCanvas.u32Width = par->display_info.display_height;
4422 back_buf->stCanvas.u32Pitch = ((bytes_per_pixel * back_buf->stCanvas.u32Width +
4423 (HIFB_ALIGN - 1)) / HIFB_ALIGN) * HIFB_ALIGN;
4424 back_buf->UpdateRect = canvas_buf->UpdateRect;
4425 }
4426
4427 blit_opt->region_deflicker = HI_TRUE;
4428
4429 if (display_info->rotate_mode == HIFB_ROTATE_180) {
4430 blit_opt->mirror_mode = HIFB_MIRROR_BOTH;
4431 } else {
4432 blit_opt->mirror_mode = display_info->mirror_mode;
4433 }
4434 return;
4435 }
4436
4437 static hi_void refresh_2buf_prepare_compress(hifb_par *par, HIFB_BUFFER_S *back_buf, hifb_blit_opt *blit_opt)
4438 {
4439 volatile hifb_compress_info *compress_info = &par->compress_info;
4440 unsigned long lock_flag;
4441 hi_s32 ret;
4442
4443 hifb_spin_lock_irqsave(&par->lock, lock_flag);
4444 if (par->compress_info.compress_open) {
4445 /*
4446 * This is just updating the refresh area. The refresh flag is first set to FALSE
4447 * to indicate that the TDE has not been moved yet, and is set to TRUE in the TDE callback.
4448 */
4449 ret = memcpy_s((void *)&par->compress_info.update_rect, sizeof(HIFB_RECT), &back_buf->UpdateRect,
4450 sizeof(HIFB_RECT));
4451 hifb_unlock_unequal_eok_return_void(ret, &par->lock, lock_flag);
4452 compress_info->update_finished = HI_FALSE;
4453 blit_opt->compress = HI_TRUE;
4454 } else {
4455 blit_opt->compress = HI_FALSE;
4456 }
4457
4458 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
4459 return;
4460 }
4461
4462 static inline hi_void refresh_2buf_update_rect(hifb_refresh_info *refresh_info, HIFB_RECT *new_union_rect)
4463 {
4464 hi_s32 ret;
4465 if ((refresh_info->disp_buf_info.union_rect.w == 0) || (refresh_info->disp_buf_info.union_rect.h == 0)) {
4466 ret = memcpy_s(&refresh_info->disp_buf_info.union_rect, sizeof(HIFB_RECT), new_union_rect, sizeof(HIFB_RECT));
4467 hifb_unequal_eok_return_void(ret);
4468 } else {
4469 hifb_unite_rect(&refresh_info->disp_buf_info.union_rect, new_union_rect);
4470 }
4471 }
4472
4473 /* This function has a lock operation, so you can't call it if the caller has a lock operation. */
4474 static hi_s32 hifb_refresh_2buf(hi_u32 layer_id, HIFB_BUFFER_S *canvas_buf)
4475 {
4476 struct fb_info *info = g_layer[layer_id].info;
4477 hifb_par *par = (hifb_par *)info->par;
4478 hifb_refresh_info *refresh_info = &par->refresh_info;
4479 hifb_blit_opt blit_opt = {0};
4480 HIFB_BUFFER_S back_buf = {0};
4481 HIFB_RECT new_union_rect = {0};
4482 unsigned long lock_flag;
4483 hi_u64 osd_buf_addr;
4484 hi_u32 bytes_per_pixel = 2;
4485 hi_s32 ret;
4486
4487 /*
4488 * Refresh task submitted between VO vertical timing interrupt and frame start interrupt
4489 * Will cause TDE/VGS to write the buffer being displayed, and a split screen will appear.
4490 * Blocked here, it is forbidden to submit the refresh task during this time.
4491 */
4492 ret = wait_event_interruptible_timeout(par->do_refresh_job, par->refresh_info.do_refresh_job,
4493 osal_msecs_to_jiffies(40)); /* 40 for timeout */
4494 if (ret < 0) {
4495 hifb_error("wait interrupt!\n");
4496 } else if (ret == 0) {
4497 hifb_error("wait timeout!\n");
4498 }
4499
4500 refresh_info->refresh_num++;
4501
4502 /* get osd buffer addr */
4503 g_drv_ops.hifb_drv_get_layer_addr(layer_id, &osd_buf_addr);
4504 hifb_spin_lock_irqsave(&par->lock, lock_flag);
4505 /* prepare back buf and get bytes_per_pixel */
4506 if (refresh_2buf_prepare_back_buf(info, &back_buf, &bytes_per_pixel) != HI_SUCCESS) {
4507 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
4508 return HI_FAILURE;
4509 }
4510 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
4511 /* according to the hw arithmetic, calculate source and dst fresh rectangle */
4512 refresh_2buf_get_new_rect(canvas_buf, &back_buf, &new_union_rect, &blit_opt);
4513 /*
4514 * We should check is address changed,
4515 * for make sure that the address configured to the hw register is in effec
4516 */
4517 hifb_spin_lock_irqsave(&par->lock, lock_flag);
4518 /* refresh_2buf_blit */
4519 if (refresh_2buf_blit(par, osd_buf_addr, &back_buf, &new_union_rect, bytes_per_pixel) != HI_SUCCESS) {
4520 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
4521 return HI_FAILURE;
4522 }
4523 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
4524
4525 /* update union rect */
4526 refresh_2buf_update_rect(refresh_info, &new_union_rect);
4527
4528 /* prepare opt */
4529 refresh_2buf_prepare_opt(par, canvas_buf, &back_buf, bytes_per_pixel, &blit_opt);
4530
4531 /* prepare compress */
4532 refresh_2buf_prepare_compress(par, &back_buf, &blit_opt);
4533
4534 /* blit with refresh rect */
4535 ret = hifb_drv_blit(canvas_buf, &back_buf, &blit_opt, HI_TRUE, &refresh_info->disp_buf_info.refresh_handle);
4536 if (ret < 0) {
4537 hifb_error("blit err:0x%x!\n", ret);
4538 return HI_FAILURE;
4539 }
4540
4541 ret = memcpy_s(&(refresh_info->user_buffer), sizeof(HIFB_BUFFER_S), canvas_buf, sizeof(HIFB_BUFFER_S));
4542 hifb_unequal_eok_return(ret);
4543 return HI_SUCCESS;
4544 }
4545
4546 static hi_s32 hifb_wait_regconfig_work(hi_u32 layer_id)
4547 {
4548 hi_s32 ret;
4549 hifb_par *par = HI_NULL;
4550
4551 if (layer_id >= HIFB_MAX_LAYER_NUM) {
4552 return HI_FAILURE;
4553 }
4554 par = (hifb_par *)g_layer[layer_id].info->par;
4555 if (par == HI_NULL) {
4556 return HI_FAILURE;
4557 }
4558 par->vblflag = 0;
4559 /* Assuming TDE is fast enough, 40ms */
4560 ret = wait_event_interruptible_timeout(par->vbl_event, par->vblflag, osal_msecs_to_jiffies(40));
4561 if (ret < 0) {
4562 hifb_error("Wait vblank failed!");
4563 return HI_FAILURE;
4564 }
4565
4566 return HI_SUCCESS;
4567 }
4568
4569 static hi_s32 refresh_2buf_immediate_prepare_back_buf(struct fb_info *info, HIFB_BUFFER_S *back_buf,
4570 hi_u32 *bytes_per_pixel, hi_u32 index)
4571 {
4572 hifb_par *par = (hifb_par *)info->par;
4573 hifb_display_info *display_info = &par->display_info;
4574 hifb_refresh_info *refresh_info = &par->refresh_info;
4575 volatile hifb_compress_info *compress_info = &par->compress_info;
4576 hi_u32 buf_size = ((hifb_get_line_length(info) * hifb_get_yres(info)) + HIFB_ALIGNMENT) & (~HIFB_ALIGNMENT);
4577 hi_tde_export_func *tde_export_func = HI_NULL;
4578 if (refresh_info->disp_buf_info.need_flip == HI_TRUE) {
4579 hifb_error("Layer(%d) refresh again before display another buf, maybe refresh too fast !\n", par->layer_id);
4580 }
4581
4582 /* forbid changing display buffer in interrupt handle */
4583 refresh_info->disp_buf_info.fliped = HI_FALSE;
4584 refresh_info->disp_buf_info.need_flip = HI_FALSE;
4585 refresh_info->disp_buf_info.refresh_handle = 0;
4586
4587 back_buf->stCanvas.enFmt = par->color_format;
4588 back_buf->stCanvas.u32Height = display_info->display_height;
4589 back_buf->stCanvas.u32Width = display_info->display_width;
4590 back_buf->stCanvas.u32Pitch = hifb_get_line_length(info);
4591
4592 if (display_info->rotate_mode == HIFB_ROTATE_90 || display_info->rotate_mode == HIFB_ROTATE_270) {
4593 back_buf->stCanvas.u32Width = display_info->display_height;
4594 back_buf->stCanvas.u32Height = display_info->display_width;
4595 if (back_buf->stCanvas.enFmt == HIFB_FMT_ARGB1555 || back_buf->stCanvas.enFmt == HIFB_FMT_ARGB4444) {
4596 *bytes_per_pixel = 2; /* 2 depth per pixel */
4597 } else if (back_buf->stCanvas.enFmt == HIFB_FMT_ARGB8888) {
4598 *bytes_per_pixel = 4; /* 4 depth per pixel */
4599 }
4600 back_buf->stCanvas.u32Pitch = (((*bytes_per_pixel)*back_buf->stCanvas.u32Width + HIFB_ALIGN - 1) / HIFB_ALIGN) *
4601 HIFB_ALIGN;
4602 if ((par->color_format != HIFB_FMT_ARGB4444) && (par->color_format != HIFB_FMT_ARGB1555) &&
4603 (par->color_format != HIFB_FMT_ARGB8888)) {
4604 hifb_error("The rotate mode only support ARGB4444, ARGB1555 and ARGB8888. which is %d\n!\n",
4605 par->color_format);
4606 return HI_FAILURE;
4607 }
4608
4609 tde_export_func = func_entry(hi_tde_export_func, HI_ID_TDE);
4610 if ((tde_export_func == HI_NULL) || (tde_export_func->drv_tde_module_begin_job == HI_NULL) ||
4611 (tde_export_func->drv_tde_module_rotate == HI_NULL) ||
4612 (tde_export_func->drv_tde_module_end_job == HI_NULL)) {
4613 return HI_FAILURE;
4614 }
4615
4616 back_buf->stCanvas.u64PhyAddr = par->rotate_vb;
4617 } else {
4618 back_buf->stCanvas.u64PhyAddr = refresh_info->disp_buf_info.phy_addr[1 - index];
4619 if (compress_info->compress_open) {
4620 back_buf->stCanvas.u64GBPhyAddr = back_buf->stCanvas.u64PhyAddr + buf_size / 2; /* 2 buf size narrow */
4621 }
4622 }
4623 return HI_SUCCESS;
4624 }
4625
4626 static hi_s32 refresh_2buf_imediate_blit(hifb_par *par, HIFB_BUFFER_S *back_buf, HIFB_RECT *new_union_rect,
4627 hi_u32 bytes_per_pixel, hi_u32 index)
4628 {
4629 hifb_display_info *display_info = &par->display_info;
4630 hifb_refresh_info *refresh_info = &par->refresh_info;
4631 HIFB_BUFFER_S fore_buf = {0};
4632 hifb_rotate_opt rot_tmp = {0};
4633 hifb_blit_opt tmp_opt = {0};
4634 unsigned long lock_flag;
4635 hi_s32 ret;
4636
4637 hifb_spin_lock_irqsave(&par->lock, lock_flag);
4638 if (display_info->rotate_mode != HIFB_ROTATE_90 && display_info->rotate_mode != HIFB_ROTATE_270) {
4639 /*
4640 * because reverse, the 2 buffer needed to sync contain,
4641 * if the fresh area has cover last fresh area, then no need to sync
4642 */
4643 if (!hifb_iscontain(*new_union_rect, refresh_info->disp_buf_info.union_rect) &&
4644 refresh_info->disp_buf_info.union_rect.w && refresh_info->disp_buf_info.union_rect.h) {
4645 ret = memcpy_s(&fore_buf, sizeof(HIFB_BUFFER_S), back_buf, sizeof(HIFB_BUFFER_S));
4646 hifb_unlock_unequal_eok_return(ret, &par->lock, lock_flag);
4647 fore_buf.stCanvas.u64PhyAddr = refresh_info->disp_buf_info.phy_addr[index];
4648 ret = memcpy_s(&fore_buf.UpdateRect, sizeof(HIFB_RECT), &refresh_info->disp_buf_info.union_rect,
4649 sizeof(HIFB_RECT));
4650 hifb_unlock_unequal_eok_return(ret, &par->lock, lock_flag);
4651 ret = memcpy_s(&back_buf->UpdateRect, sizeof(HIFB_RECT), &fore_buf.UpdateRect, sizeof(HIFB_RECT));
4652 hifb_unlock_unequal_eok_return(ret, &par->lock, lock_flag);
4653 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
4654 if (hifb_drv_blit(&fore_buf, back_buf, &tmp_opt, HI_TRUE, HI_NULL) < 0) {
4655 return HI_FAILURE;
4656 }
4657 hifb_spin_lock_irqsave(&par->lock, lock_flag);
4658 }
4659 } else {
4660 ret = memcpy_s(&fore_buf, sizeof(HIFB_BUFFER_S), back_buf, sizeof(HIFB_BUFFER_S));
4661 hifb_unlock_unequal_eok_return(ret, &par->lock, lock_flag);
4662 fore_buf.stCanvas.u64PhyAddr = refresh_info->disp_buf_info.phy_addr[index];
4663 ret = memcpy_s(&fore_buf.UpdateRect, sizeof(HIFB_RECT), &refresh_info->disp_buf_info.union_rect,
4664 sizeof(HIFB_RECT));
4665 hifb_unlock_unequal_eok_return(ret, &par->lock, lock_flag);
4666 ret = memcpy_s(&back_buf->UpdateRect, sizeof(HIFB_RECT), &fore_buf.UpdateRect, sizeof(HIFB_RECT));
4667 hifb_unlock_unequal_eok_return(ret, &par->lock, lock_flag);
4668 rot_tmp.rotate_mode = (display_info->rotate_mode == HIFB_ROTATE_90) ? (HIFB_ROTATE_270) : (HIFB_ROTATE_90);
4669 refresh_2buf_blit_init_buf(display_info, &fore_buf, back_buf, bytes_per_pixel);
4670 if (hifb_drv_rotate(&fore_buf, back_buf, &rot_tmp, HI_TRUE) < 0) {
4671 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
4672 return HI_FAILURE;
4673 }
4674 }
4675 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
4676 return HI_SUCCESS;
4677 }
4678
4679 static hi_void refresh_2buf_imediate_prepare_opt(hifb_par *par, HIFB_BUFFER_S *canvas_buf, HIFB_BUFFER_S *back_buf,
4680 hi_u32 bytes_per_pixel, hifb_blit_opt *blit_opt)
4681 {
4682 hifb_display_info *display_info = &par->display_info;
4683 volatile hifb_compress_info *compress_info = &par->compress_info;
4684 hi_s32 ret;
4685 hi_unused(bytes_per_pixel);
4686
4687 blit_opt->call_back = HI_TRUE;
4688 blit_opt->param = &(par->layer_id);
4689 blit_opt->block = HI_TRUE;
4690
4691 if (display_info->antiflicker_mode == HIFB_ANTIFLICKER_TDE) {
4692 blit_opt->antiflicker_level = HIFB_LAYER_ANTIFLICKER_NONE;
4693 }
4694
4695 if (blit_opt->scale == HI_TRUE) {
4696 /* actual area, calculate by TDE, here is just use for let pass the test */
4697 back_buf->UpdateRect.x = 0;
4698 back_buf->UpdateRect.y = 0;
4699 back_buf->UpdateRect.w = back_buf->stCanvas.u32Width;
4700 back_buf->UpdateRect.h = back_buf->stCanvas.u32Height;
4701 } else {
4702 back_buf->UpdateRect = canvas_buf->UpdateRect;
4703 }
4704
4705 blit_opt->region_deflicker = HI_TRUE;
4706
4707 if (display_info->rotate_mode == HIFB_ROTATE_180) {
4708 blit_opt->mirror_mode = HIFB_MIRROR_BOTH;
4709 } else {
4710 blit_opt->mirror_mode = display_info->mirror_mode;
4711 }
4712
4713 if (par->compress_info.compress_open) {
4714 /*
4715 * This is just updating the refresh area. The refresh flag is first set to FALSE to
4716 * indicate that the TDE has not been moved yet, and is set to TRUE in the TDE callback.
4717 */
4718 ret = memcpy_s((void *)&par->compress_info.update_rect, sizeof(HIFB_RECT), &back_buf->UpdateRect,
4719 sizeof(HIFB_RECT));
4720 hifb_unequal_eok_return_void(ret);
4721 compress_info->update_finished = HI_FALSE;
4722 blit_opt->compress = HI_TRUE;
4723 }
4724 return;
4725 }
4726
4727 static hi_void refresh_2buf_imediate_backup_buffer(struct fb_info *info, HIFB_BUFFER_S *back_buf,
4728 HIFB_RECT *new_union_rect,
4729 hifb_blit_opt *blit_opt, hi_u32 *index)
4730 {
4731 hifb_par *par = (hifb_par *)info->par;
4732 hifb_refresh_info *refresh_info = &par->refresh_info;
4733 volatile hifb_compress_info *compress_info = &par->compress_info;
4734 hi_u32 buf_size;
4735 hi_u32 ar_size;
4736 hi_u32 gb_size;
4737 unsigned long lock_flag;
4738 hi_s32 ret;
4739
4740 buf_size = ((hifb_get_line_length(info) * hifb_get_yres(info)) + HIFB_ALIGNMENT) & (~HIFB_ALIGNMENT);
4741 hifb_spin_lock_irqsave(&par->lock, lock_flag);
4742 *index = 1 - *index;
4743 refresh_info->disp_buf_info.index_for_int = *index;
4744 par->modifying = HI_TRUE;
4745 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_DISPLAYADDR;
4746 refresh_info->screen_addr = refresh_info->disp_buf_info.phy_addr[*index];
4747 refresh_info->gb_screen_addr = refresh_info->screen_addr + buf_size / 2; /* 2 for half */
4748 refresh_info->disp_buf_info.compress = compress_info->compress_open;
4749 par->modifying = HI_FALSE;
4750
4751 if (par->compress_info.compress_open) {
4752 par->compress_info.update_finished = HI_TRUE;
4753 ret = memcpy_s((HIFB_RECT*)(&par->compress_info.update_rect), sizeof(HIFB_RECT), new_union_rect,
4754 sizeof(HIFB_RECT));
4755 hifb_unlock_unequal_eok_return_void(ret, &par->lock, lock_flag);
4756 /*
4757 * When blocking, if there is a callback,
4758 * still get FrameSize0 and FrameSize1 here as early as possible.
4759 */
4760 if ((blit_opt->block == HI_TRUE && blit_opt->call_back == HI_FALSE) ||
4761 (blit_opt->block == HI_TRUE && blit_opt->call_back == HI_TRUE)) {
4762 hifb_get_dcmp_framesize(back_buf, &ar_size, &gb_size,
4763 ((hi_u8 *)(hifb_get_screen_base(info)) +
4764 (*index) * buf_size), buf_size / 2); /* 2 for half */
4765
4766 compress_info->frame_size0 = ar_size;
4767 compress_info->frame_size1 = gb_size;
4768 } else if ((blit_opt->block == HI_FALSE && blit_opt->call_back == HI_TRUE)) {
4769 /* get u32AR_size and u32GB_size in callback function */
4770 } else {
4771 hifb_get_dcmp_framesize(back_buf, &ar_size, &gb_size,
4772 ((hi_u8 *)(hifb_get_screen_base(info)) +
4773 (*index) * buf_size), buf_size / 2); /* 2 for half */
4774 compress_info->frame_size0 = ar_size;
4775 compress_info->frame_size1 = gb_size;
4776 hifb_error("error:tde blit(do compress) block=%d,bCallBack=%d,FrameSize0 = %u,FrameSize1 = %u\n",
4777 blit_opt->block, blit_opt->call_back, compress_info->frame_size0,
4778 compress_info->frame_size1);
4779 }
4780 }
4781
4782 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
4783 return;
4784 }
4785
4786 /*
4787 * In this function we should wait the new contain has been show on the screen before return,
4788 * and the operations such as address configuration no needed do in interrupt handle
4789 */
4790 static hi_s32 hifb_refresh_2buf_immediate_display(hi_u32 layer_id, HIFB_BUFFER_S *canvas_buf)
4791 {
4792 struct fb_info *info = g_layer[layer_id].info;
4793 hifb_par *par = (hifb_par *)info->par;
4794 hifb_refresh_info *refresh_info = &par->refresh_info;
4795 hifb_blit_opt blit_opt = {0};
4796 hi_u32 index = refresh_info->disp_buf_info.index_for_int;
4797 HIFB_BUFFER_S back_buf = {0};
4798 hi_s32 ret;
4799 HIFB_RECT new_union_rect = {0};
4800 unsigned long lock_flag;
4801 hi_u32 bytes_per_pixel = 2; /* 2 byte */
4802
4803 /*
4804 * Refresh task submitted between VO vertical timing interrupt and frame start interrupt
4805 * Will cause TDE/VGS to write the buffer being displayed, and a split screen will appear.
4806 * Blocked here, it is forbidden to submit the refresh task during this time.
4807 */
4808 ret = wait_event_interruptible_timeout(par->do_refresh_job, par->refresh_info.do_refresh_job,
4809 osal_msecs_to_jiffies(40)); /* 40 for timeout */
4810 if (ret < 0) {
4811 osal_printk("Func:%s, Line:%d, wait.\n", __FUNCTION__, __LINE__);
4812 }
4813 /*
4814 * TDE use the blocking mode
4815 * Immediate refresh mode requires blocking mode, non-callback mode,
4816 * avoiding callback mode when scrolling subtitles.
4817 * Return immediately. Maybe the next time the user quickly call the interface refresh,
4818 * it may flush the last content.
4819 */
4820 refresh_info->refresh_num++;
4821 hifb_spin_lock_irqsave(&par->lock, lock_flag);
4822 /* prepare for back buf and get bytes per pixel */
4823 if (refresh_2buf_immediate_prepare_back_buf(info, &back_buf, &bytes_per_pixel, index) != HI_SUCCESS) {
4824 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
4825 return HI_FAILURE;
4826 }
4827 /* according to the hw arithmetic, calculate source and Dst fresh rectangle */
4828 refresh_2buf_get_new_rect(canvas_buf, &back_buf, &new_union_rect, &blit_opt);
4829 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
4830 /* blit */
4831 if (refresh_2buf_imediate_blit(par, &back_buf, &new_union_rect, bytes_per_pixel, index) != HI_SUCCESS) {
4832 return HI_FAILURE;
4833 }
4834
4835 hifb_spin_lock_irqsave(&par->lock, lock_flag);
4836 /* record the fresh area */
4837 ret = memcpy_s(&refresh_info->disp_buf_info.union_rect, sizeof(HIFB_RECT), &new_union_rect, sizeof(HIFB_RECT));
4838 hifb_unlock_unequal_eok_return(ret, &par->lock, lock_flag);
4839 /* prepare for opt */
4840 refresh_2buf_imediate_prepare_opt(par, canvas_buf, &back_buf, bytes_per_pixel, &blit_opt);
4841 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
4842
4843 /* blit with refresh rect */
4844 if (hifb_drv_blit(canvas_buf, &back_buf, &blit_opt, HI_TRUE, &refresh_info->disp_buf_info.refresh_handle) < 0) {
4845 return HI_FAILURE;
4846 }
4847
4848 /* set the backup buffer to register and show it */
4849 refresh_2buf_imediate_backup_buffer(info, &back_buf, &new_union_rect, &blit_opt, &index);
4850
4851 hifb_spin_lock_irqsave(&par->lock, lock_flag);
4852 ret = memcpy_s(&(refresh_info->user_buffer), sizeof(HIFB_BUFFER_S), canvas_buf, sizeof(HIFB_BUFFER_S));
4853 hifb_unlock_unequal_eok_return(ret, &par->lock, lock_flag);
4854 par->vblflag = 0;
4855 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
4856 /* wait the address register's configuration take effect before return */
4857 if (in_atomic() == HI_FALSE) {
4858 hifb_wait_regconfig_work(layer_id);
4859 }
4860
4861 return HI_SUCCESS;
4862 }
4863
4864 static hi_void hifb_refresh_again(hi_u32 layer_id)
4865 {
4866 struct fb_info *info = g_layer[layer_id].info;
4867 hifb_par *par = (hifb_par *)info->par;
4868 hifb_refresh_info *refresh_info = &par->refresh_info;
4869 HIFB_BUFFER_S canvas;
4870
4871 /* Prerequisites for the canvas to be refreshed */
4872 if (!(par->param_modify_mask & HIFB_LAYER_PARAMODIFY_INRECT)) {
4873 return;
4874 }
4875
4876 if (refresh_info->user_buffer.stCanvas.u64PhyAddr == 0) {
4877 return;
4878 }
4879
4880 if (refresh_info->buf_mode == HIFB_LAYER_BUF_NONE) {
4881 return;
4882 }
4883 /* Fills the canvas object with refresh information from private data for refresh. */
4884 canvas = refresh_info->user_buffer;
4885 canvas.UpdateRect.x = 0;
4886 canvas.UpdateRect.y = 0;
4887 canvas.UpdateRect.w = canvas.stCanvas.u32Width;
4888 canvas.UpdateRect.h = canvas.stCanvas.u32Height;
4889 hifb_refresh(layer_id, &canvas, refresh_info->buf_mode);
4890 }
4891
4892 static hi_s32 hifb_disp_check_param(struct fb_info *info, hi_u32 width, hi_u32 height)
4893 {
4894 hifb_par *par = (hifb_par *)info->par;
4895 HIFB_SIZE_S max_screen_size = {0};
4896 hi_u32 pitch;
4897
4898 if (!g_drv_ops.capability[par->layer_id].bVoScale) {
4899 hifb_get_maxscreensize(par, &max_screen_size.u32Width, &max_screen_size.u32Height);
4900 if ((width > max_screen_size.u32Width) || (height > max_screen_size.u32Height) ||
4901 (width > g_drv_ops.capability[par->layer_id].u32MaxWidth) ||
4902 (height > g_drv_ops.capability[par->layer_id].u32MaxHeight)) {
4903 hifb_error("(%u, %u)larger than the max size of the screen(%u, %u) or the layer(%u, %u)!\n",
4904 width, height, max_screen_size.u32Width, max_screen_size.u32Height,
4905 g_drv_ops.capability[par->layer_id].u32MaxWidth,
4906 g_drv_ops.capability[par->layer_id].u32MaxHeight);
4907 return HI_FAILURE;
4908 }
4909 }
4910 /* 3 is 8 bits */
4911 pitch = (((width * hifb_get_bits_per_pixel(info)) >> 3) + HIFB_ALIGNMENT) & (~HIFB_ALIGNMENT);
4912 if (hifb_check_mem_enough(info, pitch, height) == HI_FAILURE) {
4913 hifb_error("++ setdispsize .memory is not enough!\n");
4914 return HI_FAILURE;
4915 }
4916 return HI_SUCCESS;
4917 }
4918
4919 /*
4920 * This function has a lock inside, and there is a call to the sleep function.
4921 * Before calling this function, you must first unlock the spin lock.
4922 */
4923 #ifdef __HuaweiLite__
4924 static hi_s32 hifb_disp_setdispsize(hi_u32 layer_id, hi_u32 width, hi_u32 height)
4925 {
4926 struct fb_info *info = g_layer[layer_id].info;
4927 hifb_par *par = (hifb_par *)info->par;
4928 hifb_display_info *display_info = &par->display_info;
4929 hi_u32 pitch;
4930 unsigned long lock_flag;
4931
4932 hifb_spin_lock_irqsave(&par->lock, lock_flag);
4933 if ((display_info->display_width == width) && (display_info->display_height == height)) {
4934 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
4935 return HI_SUCCESS;
4936 }
4937 /*
4938 * for width and height check
4939 * width and height should less than stMaxScreenSize
4940 */
4941 if (hifb_disp_check_param(info, width, height) != HI_SUCCESS) {
4942 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
4943 return HI_FAILURE;
4944 }
4945
4946 display_info->display_width = width;
4947 display_info->display_height = height;
4948 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_INRECT;
4949 /* 3 is 8 bits */
4950 pitch = (((width * hifb_get_bits_per_pixel(info)) >> 3) + HIFB_ALIGNMENT) & (~HIFB_ALIGNMENT);
4951 if (pitch > info->oinfo.stride) {
4952 info->oinfo.stride = pitch;
4953 info->oinfo.sarea.h = info->vinfo.yres = display_info->display_height;
4954 info->oinfo.sarea.w = info->vinfo.xres = display_info->display_width;
4955 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_STRIDE;
4956 }
4957
4958 hifb_set_dispbufinfo(layer_id);
4959
4960 if (!g_drv_ops.capability[par->layer_id].bVoScale) {
4961 display_info->screen_width = width;
4962 display_info->screen_height = height;
4963 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_OUTRECT;
4964 }
4965
4966 /*
4967 * here we need to think about how to resist flicker again,
4968 * we use VO do flicker resist before , but now if the display H size is the same as the screen,
4969 * VO will not do flicker resist, so should choose TDE to do flicker resist
4970 */
4971 hifb_select_antiflicker_mode(par);
4972 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
4973
4974 return HI_SUCCESS;
4975 }
4976 #else
4977 static hi_s32 hifb_disp_setdispsize(hi_u32 layer_id, hi_u32 width, hi_u32 height)
4978 {
4979 struct fb_info *info = g_layer[layer_id].info;
4980 hifb_par *par = (hifb_par *)info->par;
4981 struct fb_var_screeninfo *var = &info->var;
4982 struct fb_fix_screeninfo *fix = &info->fix;
4983 hifb_display_info *display_info = &par->display_info;
4984 hi_u32 pitch;
4985 unsigned long lock_flag;
4986
4987 hifb_spin_lock_irqsave(&par->lock, lock_flag);
4988 if ((display_info->display_width == width) && (display_info->display_height == height)) {
4989 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
4990 return HI_SUCCESS;
4991 }
4992 /*
4993 * for width and height check
4994 * width and height should less than stMaxScreenSize
4995 */
4996 if (hifb_disp_check_param(info, width, height) != HI_SUCCESS) {
4997 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
4998 return HI_FAILURE;
4999 }
5000
5001 display_info->display_width = width;
5002 display_info->display_height = height;
5003 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_INRECT;
5004 /* 3 is 8 bits */
5005 pitch = (((width * hifb_get_bits_per_pixel(info))>> 3) + HIFB_ALIGNMENT) & (~HIFB_ALIGNMENT);
5006 if (pitch > fix->line_length) {
5007 fix->line_length = pitch;
5008 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_STRIDE;
5009 }
5010 /*
5011 * If the user calls FBIOPUT_LAYER_INFO to set display_width and display_height,then sync to xres yres,
5012 * Otherwise, there will be an error in the memory address in hifb_set_dispbufinfo
5013 */
5014 var->xres = display_info->display_width;
5015 var->yres = display_info->display_height;
5016
5017 if (var->xres_virtual < display_info->display_width) {
5018 var->xres_virtual = display_info->display_width;
5019 }
5020
5021 if (var->yres_virtual < display_info->display_height) {
5022 var->yres_virtual = display_info->display_height;
5023 }
5024
5025 hifb_set_dispbufinfo(layer_id);
5026
5027 if (!g_drv_ops.capability[par->layer_id].bVoScale) {
5028 display_info->screen_width = width;
5029 display_info->screen_height = height;
5030 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_OUTRECT;
5031 }
5032
5033 /*
5034 * here we need to think about how to resist flicker again,
5035 * we use VO do flicker resist before , but now if the display H size is the same as the screen,
5036 * VO will not do flicker resist, so should choose TDE to do flicker resist
5037 */
5038 hifb_select_antiflicker_mode(par);
5039 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
5040
5041 return HI_SUCCESS;
5042 }
5043 #endif
5044
5045 /*
5046 * we handle it by two case:
5047 * case 1 : if VO support Zoom, we only change screen size, g_display size keep not change
5048 * case 2: if VO can't support zoom, g_display size should keep the same as screen size
5049 */
5050 static hi_s32 hifb_disp_setscreensize(hi_u32 layer_id, hi_u32 width, hi_u32 height)
5051 {
5052 struct fb_info *info = g_layer[layer_id].info;
5053 hifb_par *par = (hifb_par *)info->par;
5054 hifb_display_info *display_info = &par->display_info;
5055
5056 /* If the chip does not support scaling, it is consistent with the process of setting the g_display area. */
5057 if (!g_drv_ops.capability[par->layer_id].bVoScale) {
5058 return hifb_disp_setdispsize(layer_id, width, height);
5059 }
5060
5061 display_info->screen_width = width;
5062 display_info->screen_height = height;
5063
5064 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_OUTRECT;
5065
5066 /*
5067 * Here we need to think about how to resist flicker again, we use VO do flicker resist before ,
5068 * but now if the g_display H size is the same as the screen, VO will not do flicker resist, so should choose
5069 * TDE to do flicker resist
5070 */
5071 hifb_select_antiflicker_mode(par);
5072
5073 return HI_SUCCESS;
5074 }
5075
5076 hi_void hifb_buf_freemem(hi_u64 phyaddr)
5077 {
5078 cmpi_mmz_free(phyaddr, HI_NULL);
5079 }
5080
5081 static hi_s32 hifb_freeccanbuf(hifb_par *par)
5082 {
5083 HIFB_SURFACE_S *canvas_sur = HI_NULL;
5084 if (par == HI_NULL) {
5085 return HI_FAILURE;
5086 }
5087 canvas_sur = &par->canvas_sur;
5088
5089 if (canvas_sur->u64PhyAddr != 0) {
5090 hifb_buf_freemem(canvas_sur->u64PhyAddr);
5091 }
5092 canvas_sur->u64PhyAddr = 0;
5093
5094 if (par->rotate_vb != 0) {
5095 cmpi_mmz_free(par->rotate_vb, HI_NULL);
5096 par->rotate_vb = 0;
5097 }
5098
5099 return HI_SUCCESS;
5100 }
5101
5102 static hi_void set_par_stride(struct fb_info *info)
5103 {
5104 hifb_par *par = (hifb_par *)info->par;
5105 hifb_display_info *display_info = &par->display_info;
5106 hifb_refresh_info *refresh_info = &par->refresh_info;
5107 hi_u32 stride;
5108 hi_u64 display_addr;
5109
5110 stride = (((hifb_get_xres_virtual(info) * hifb_get_bits_per_pixel(info)) >> 3) + HIFB_ALIGNMENT) & /* 2^3 */
5111 (~HIFB_ALIGNMENT);
5112 if (stride != hifb_get_line_length(info) ||
5113 (hifb_get_yres(info) != display_info->y_res)) {
5114 #ifdef __HuaweiLite__
5115 info->oinfo.stride = stride;
5116 #else
5117 info->fix.line_length = stride;
5118 #endif
5119 hifb_set_dispbufinfo(par->layer_id);
5120 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_STRIDE;
5121 }
5122
5123 if ((stride * hifb_get_yoffset(info) + hifb_get_xoffset(info) *
5124 (hifb_get_bits_per_pixel(info) >> 3)) > hifb_get_smem_len(info)) { /* 3 is 8 bits */
5125 hifb_warning("y_offset=%u,x_offset=%u,invalid!\n", hifb_get_yoffset(info), hifb_get_xoffset(info));
5126 return;
5127 }
5128
5129 display_addr = (hifb_get_smem_start(info) + stride * hifb_get_yoffset(info)
5130 + hifb_get_xoffset(info) * (hifb_get_bits_per_pixel(info) >> 3)) & 0xfffffff0; /* 3 is 8 bits */
5131 if (display_addr != refresh_info->screen_addr) {
5132 refresh_info->screen_addr = display_addr;
5133 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_DISPLAYADDR;
5134 }
5135 return;
5136 }
5137
5138 static hi_s32 set_par_resolution(struct fb_info *info)
5139 {
5140 hifb_par *par = (hifb_par *)info->par;
5141 hifb_display_info *display_info = &par->display_info;
5142 unsigned long lock_flag;
5143
5144 hifb_spin_lock_irqsave(&par->lock, lock_flag);
5145 if (hifb_get_xres(info) != display_info->x_res ||
5146 hifb_get_yres(info) != display_info->y_res) {
5147 if ((hifb_get_xres(info) == 0) || (hifb_get_yres(info) == 0)) {
5148 if (par->show == HI_TRUE) {
5149 par->show = HI_FALSE;
5150 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_SHOW;
5151 }
5152 }
5153
5154 /*
5155 * The following two functions have a sleep operation, you must unlock before calling,
5156 * and lock the global variable inside the function.
5157 */
5158 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
5159
5160 if (hifb_disp_setdispsize(par->layer_id, hifb_get_xres(info), hifb_get_yres(info))
5161 != HI_SUCCESS) {
5162 return HI_FAILURE;
5163 }
5164 if (hifb_disp_setscreensize(par->layer_id, hifb_get_xres(info), hifb_get_yres(info))
5165 != HI_SUCCESS) {
5166 return HI_FAILURE;
5167 }
5168
5169 hifb_spin_lock_irqsave(&par->lock, lock_flag);
5170 }
5171
5172 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
5173 return HI_SUCCESS;
5174 }
5175
5176 /*
5177 * Function : hifb_set_par
5178 * Description : set the variable parmater and make it use
5179 * Input : struct fb_info *info
5180 * Return : return 0
5181 */
5182 #ifdef __HuaweiLite__
5183 static hi_s32 hifb_set_par(struct fb_vtable_s *vtable)
5184 #else
5185 static hi_s32 hifb_set_par(struct fb_info *info)
5186 #endif
5187 {
5188 #ifdef __HuaweiLite__
5189 struct hifb_info *info = (struct hifb_info *)vtable;
5190 #endif
5191 hifb_par *par = (hifb_par *)info->par;
5192 hifb_display_info *display_info = &par->display_info;
5193 HIFB_COLOR_FMT_E format;
5194 unsigned long lock_flag;
5195
5196 hifb_spin_lock_irqsave(&par->lock, lock_flag);
5197
5198 par->modifying = HI_TRUE;
5199
5200 /* set the stride if stride change */
5201 set_par_stride(info);
5202
5203 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
5204 /* If xres or yres change */
5205 if (set_par_resolution(info) != HI_SUCCESS) {
5206 return HI_FAILURE;
5207 }
5208
5209 #ifdef __HuaweiLite__
5210 format = info->vinfo.fmt;
5211 #else
5212 format = hifb_getfmtbyargb(&info->var.red, &info->var.green, &info->var.blue,
5213 &info->var.transp, hifb_get_bits_per_pixel(info));
5214 #endif
5215 if ((par->color_format != format)) {
5216 hifb_freeccanbuf(par);
5217 hifb_set_fmt(par, format);
5218 par->cursor_info.cursor.stCursor.enFmt = format;
5219 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_FMT;
5220 }
5221
5222 hifb_spin_lock_irqsave(&par->lock, lock_flag);
5223 display_info->x_res = hifb_get_xres(info);
5224 display_info->y_res = hifb_get_yres(info);
5225 display_info->vir_x_res = hifb_get_xres_virtual(info);
5226 display_info->vir_y_res = hifb_get_yres_virtual(info);
5227
5228 par->modifying = HI_FALSE;
5229 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
5230
5231 return 0;
5232 }
5233
5234 /*
5235 * Function : hifb_pan_display
5236 * Description : pan g_display.
5237 * Input : struct fb_var_screeninfo *var
5238 * Return : return 0
5239 */
5240 #ifdef __HuaweiLite__
5241 static hi_s32 hifb_pan_display(struct fb_vtable_s *vtable, struct fb_overlayinfo_s *oinfo)
5242 #else
5243 static hi_s32 hifb_pan_display(struct fb_var_screeninfo *var, struct fb_info *info)
5244 #endif
5245 {
5246 #ifdef __HuaweiLite__
5247 struct hifb_info *info = (struct hifb_info *)vtable;
5248 #endif
5249
5250 hifb_par *par = (hifb_par *)info->par;
5251 hifb_refresh_info *refresh_info = &par->refresh_info;
5252 hi_u64 display_addr;
5253 hi_u32 stride;
5254 HIFB_BUFFER_S canvas_buf;
5255 hi_s32 ret;
5256
5257 /* set the stride and g_display start address,3 is 8 bits, 0xf 0xfffffff0 mask */
5258 stride = (hifb_get_xres_virtual(info) * (hifb_get_bits_per_pixel(info) >> 3) + 0xf) & 0xfffffff0;
5259
5260 #ifdef __HuaweiLite__
5261 display_addr = (hifb_get_smem_start(info) + (hi_u64)stride * oinfo->sarea.y + (hi_u64)oinfo->sarea.x *
5262 (hifb_get_bits_per_pixel(info) >> 3)) & 0xfffffffffffffff0; /* 3 is 8 bits 0xfffffffffffffff0 mask */
5263 #else
5264 /* 3 is 8 bits */
5265 display_addr = (hifb_get_smem_start(info) + (hi_u64)stride * var->yoffset + (hi_u64)var->xoffset *
5266 (hifb_get_bits_per_pixel(info) >> 3)) & 0xfffffffffffffff0; /* 3 is 8 bits 0xfffffffffffffff0 mask */
5267 #endif
5268 canvas_buf.stCanvas.enFmt = par->color_format;
5269 canvas_buf.stCanvas.u64PhyAddr = display_addr;
5270 canvas_buf.stCanvas.u32Pitch = stride;
5271 canvas_buf.UpdateRect.x = 0;
5272 canvas_buf.UpdateRect.y = 0;
5273 #ifdef __HuaweiLite__
5274 canvas_buf.stCanvas.u32Width = info->vinfo.xres;
5275 canvas_buf.stCanvas.u32Height = info->vinfo.yres;
5276 canvas_buf.UpdateRect.w = info->vinfo.xres;
5277 canvas_buf.UpdateRect.h = info->vinfo.xres;
5278 g_layer[par->layer_id].info->activate = info->activate;
5279 #else
5280 canvas_buf.stCanvas.u32Width = hifb_get_xres(info);
5281 canvas_buf.stCanvas.u32Height = hifb_get_yres(info);
5282 canvas_buf.UpdateRect.w = hifb_get_xres(info);
5283 canvas_buf.UpdateRect.h = hifb_get_yres(info);
5284 g_layer[par->layer_id].info->var.activate = info->var.activate;
5285 #endif
5286 refresh_info->buf_mode = HIFB_LAYER_BUF_BUTT;
5287 ret = hifb_refresh_0buf(par->layer_id, &canvas_buf);
5288
5289 return ret;
5290 }
5291
5292 static inline hi_void hifb_get_fmt(hifb_par *par, HIFB_COLOR_FMT_E *color_format)
5293 {
5294 *color_format = par->color_format;
5295 return;
5296 }
5297
5298 static inline hi_void hifb_set_fmt(hifb_par *par, HIFB_COLOR_FMT_E color_fmt)
5299 {
5300 par->color_format = color_fmt;
5301
5302 return;
5303 }
5304
5305 static inline hi_void hifb_set_alpha(hifb_par *par, HIFB_ALPHA_S *alpha)
5306 {
5307 hi_s32 ret;
5308 ret = memcpy_s(&par->alpha, sizeof(HIFB_ALPHA_S), alpha, sizeof(HIFB_ALPHA_S));
5309 hifb_unequal_eok_return_void(ret);
5310 return;
5311 }
5312
5313 static inline hi_void hifb_get_alpha(hifb_par *par, HIFB_ALPHA_S *alpha)
5314 {
5315 hi_s32 ret;
5316 ret = memcpy_s(alpha, sizeof(HIFB_ALPHA_S), &par->alpha, sizeof(HIFB_ALPHA_S));
5317 hifb_unequal_eok_return_void(ret);
5318 return;
5319 }
5320
5321 static inline hi_void hifb_set_key(hifb_par *par, hifb_colorkeyex *key)
5322 {
5323 hi_s32 ret;
5324 ret = memcpy_s(&par->ckey, sizeof(hifb_colorkeyex), key, sizeof(hifb_colorkeyex));
5325 hifb_unequal_eok_return_void(ret);
5326 return;
5327 }
5328
5329 static inline hi_void hifb_get_key(hifb_par *par, hifb_colorkeyex *key)
5330 {
5331 hi_s32 ret;
5332 ret = memcpy_s(key, sizeof(hifb_colorkeyex), &par->ckey, sizeof(hifb_colorkeyex));
5333 hifb_unequal_eok_return_void(ret);
5334 return;
5335 }
5336
5337 static hi_void hifb_set_layerpos(hifb_par *par, HIFB_POINT_S *pos)
5338 {
5339 hi_s32 x_pos;
5340 hi_s32 y_pos;
5341 SIZE_S max_screensize;
5342 hi_u32 layer_id;
5343 hifb_display_info *display_info = HI_NULL;
5344 if ((par == HI_NULL) || (pos == HI_NULL)) {
5345 return;
5346 }
5347 layer_id = par->layer_id;
5348 display_info = &par->display_info;
5349
5350 hifb_get_maxscreensize(par, &max_screensize.u32Width, &max_screensize.u32Height);
5351 x_pos = pos->s32XPos;
5352 y_pos = pos->s32YPos;
5353 if (x_pos > (hi_s32)(max_screensize.u32Width - hifb_min_width(layer_id))) {
5354 hifb_warning("the sum of s32XPos(%d) and min_width(%d) larger than Vodev screen width(%d)!\n", x_pos,
5355 hifb_min_width(layer_id), max_screensize.u32Width);
5356 x_pos = max_screensize.u32Width - hifb_min_width(layer_id);
5357 }
5358
5359 if (y_pos > (hi_s32)(max_screensize.u32Height - hifb_min_height(layer_id))) {
5360 hifb_warning("the sum of s32YPos(%d) and min_height(%d) larger than Vodev screen height(%d)!\n", y_pos,
5361 hifb_min_height(layer_id), max_screensize.u32Height);
5362 y_pos = max_screensize.u32Height - hifb_min_height(layer_id);
5363 }
5364
5365 display_info->pos.s32XPos = x_pos;
5366 display_info->pos.s32YPos = y_pos;
5367
5368 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_OUTRECT;
5369
5370 return;
5371 }
5372
5373 #ifdef CURSOR
5374 static inline hi_void hifb_set_cursorinfo(hifb_par *par, hifb_cursor_info *cursor_info)
5375 {
5376 hi_s32 ret;
5377 ret = memcpy_s(&par->cursor_info, sizeof(hifb_cursor_info), cursor_info, sizeof(hifb_cursor_info));
5378 hifb_unequal_eok_return_void(ret);
5379 return;
5380 }
5381
5382 static inline hi_void hifb_get_cursorinfo(hifb_par *par, hifb_cursor_info *cursor_info)
5383 {
5384 hi_s32 ret;
5385 ret = memcpy_s(cursor_info, sizeof(hifb_cursor_info), &par->cursor_info, sizeof(hifb_cursor_info));
5386 hifb_unequal_eok_return_void(ret);
5387 return;
5388 }
5389 #endif
5390
5391 static inline hi_bool hifb_get_show(hifb_par *par)
5392 {
5393 if (par != HI_NULL) {
5394 return par->show;
5395 } else {
5396 return HI_FALSE;
5397 }
5398 }
5399
5400 static inline hi_void hifb_set_show(hifb_par *par, hi_bool show)
5401 {
5402 if (par != HI_NULL) {
5403 par->show = show;
5404 }
5405 }
5406
5407 hi_void hifb_get_layerinfo(hifb_par *par, HIFB_LAYER_INFO_S *layer_info)
5408 {
5409 HIFB_POINT_S pos = {0};
5410 if ((par != HI_NULL) && (layer_info != HI_NULL)) {
5411 hifb_get_premul(par, &layer_info->bPreMul);
5412 hifb_get_bufmode(par, &layer_info->BufMode);
5413 hifb_get_antiflickerlevel(par, &layer_info->eAntiflickerLevel);
5414 hifb_get_layerpos(par, &pos);
5415 layer_info->s32XPos = pos.s32XPos;
5416 layer_info->s32YPos = pos.s32YPos;
5417 hifb_get_dispsize(par, &layer_info->u32DisplayWidth, &layer_info->u32DisplayHeight);
5418 hifb_get_screensize(par, &layer_info->u32ScreenWidth, &layer_info->u32ScreenHeight);
5419 layer_info->u32CanvasWidth = par->canvas_sur.u32Width;
5420 layer_info->u32CanvasHeight = par->canvas_sur.u32Height;
5421 layer_info->u32Mask = HIFB_LAYERMASK_BUTT;
5422 }
5423 return;
5424 }
5425
5426 static hi_void hifb_get_idledispbuf(hifb_par *par, hi_u64 *phy_addr)
5427 {
5428 hifb_refresh_info *refresh_info = HI_NULL;
5429 hi_u32 index_for_int;
5430 if ((par == HI_NULL) || (phy_addr == HI_NULL)) {
5431 return;
5432 }
5433 refresh_info = &par->refresh_info;
5434 index_for_int = refresh_info->disp_buf_info.index_for_int;
5435 /*
5436 * Only the 2buf refresh mode allows to get free buf, otherwise it is the buf in the current
5437 * interrupt processing.
5438 */
5439 if ((refresh_info->buf_mode == HIFB_LAYER_BUF_DOUBLE) ||
5440 (refresh_info->buf_mode == HIFB_LAYER_BUF_DOUBLE_IMMEDIATE)) {
5441 *phy_addr = refresh_info->disp_buf_info.phy_addr[1 - index_for_int];
5442 } else {
5443 *phy_addr = refresh_info->disp_buf_info.phy_addr[index_for_int];
5444 }
5445
5446 return;
5447 }
5448
5449 static hi_void hifb_get_workdispbuf(hifb_par *par, hi_u64 *phy_addr)
5450 {
5451 hifb_refresh_info *refresh_info = HI_NULL;
5452 hi_u32 index_for_int;
5453 if ((par == HI_NULL) || (phy_addr == HI_NULL)) {
5454 return;
5455 }
5456 refresh_info = &par->refresh_info;
5457 index_for_int = refresh_info->disp_buf_info.index_for_int;
5458
5459 *phy_addr = refresh_info->disp_buf_info.phy_addr[index_for_int];
5460
5461 return;
5462 }
5463
5464 static hi_void hifb_get_dispsize(hifb_par *par, hi_u32 *width, hi_u32 *height)
5465 {
5466 hifb_display_info *display_info = HI_NULL;
5467 display_info = &par->display_info;
5468
5469 if ((width != HI_NULL) && (height != HI_NULL)) {
5470 *width = display_info->display_width;
5471 *height = display_info->display_height;
5472 }
5473
5474 return;
5475 }
5476
5477 #ifdef CURSOR
5478 static hi_void hifb_get_idlecursorbuf(hifb_par *par, hi_u64 *phy_addr)
5479 {
5480 hi_u32 index_for_int;
5481 if ((par == HI_NULL) || (phy_addr == HI_NULL)) {
5482 return;
5483 }
5484 index_for_int = par->refresh_info.disp_buf_info.index_for_int;
5485
5486 if ((par->refresh_info.buf_mode == HIFB_LAYER_BUF_DOUBLE) ||
5487 (par->refresh_info.buf_mode == HIFB_LAYER_BUF_DOUBLE_IMMEDIATE)) {
5488 *phy_addr = par->cursor_info.cursor.stCursor.u64PhyAddr +
5489 (HIFB_CURSOR_DEF_VRAM * 1024) * (hi_u64)(1 - index_for_int) / 2; /* 2 1024 size KB */
5490 } else {
5491 *phy_addr = par->cursor_info.cursor.stCursor.u64PhyAddr +
5492 (HIFB_CURSOR_DEF_VRAM * 1024) * (hi_u64)index_for_int / 2; /* 2 1024 size KB */
5493 }
5494
5495 return;
5496 }
5497 #endif
5498
5499 static inline hi_void hifb_get_workcursorbuf(hifb_par *par, hi_u64 *phy_addr)
5500 {
5501 hi_u32 index_for_int;
5502 index_for_int = par->refresh_info.disp_buf_info.index_for_int;
5503 *phy_addr = par->cursor_info.cursor.stCursor.u64PhyAddr +
5504 (HIFB_CURSOR_DEF_VRAM * 1024) * (hi_u64)index_for_int / 2; /* 2 1024 size KB */
5505
5506 return;
5507 }
5508
5509 static inline hi_void hifb_set_dispbufflip(hifb_par *par, hi_bool need_flip)
5510 {
5511 par->refresh_info.disp_buf_info.need_flip = need_flip;
5512
5513 return;
5514 }
5515
5516 static inline hi_void hifb_get_bufmode(hifb_par *par, HIFB_LAYER_BUF_E *buf_mode)
5517 {
5518 *buf_mode = par->refresh_info.buf_mode;
5519
5520 return;
5521 }
5522
5523 static inline hi_void hifb_get_premul(hifb_par *par, hi_bool *premul)
5524 {
5525 *premul = par->display_info.premul;
5526
5527 return;
5528 }
5529
5530 static inline hi_void hifb_get_antiflickerlevel(hifb_par *par, HIFB_LAYER_ANTIFLICKER_LEVEL_E *pen_antiflicker_level)
5531 {
5532 *pen_antiflicker_level = par->display_info.antiflicker_level;
5533
5534 return;
5535 }
5536
5537 static inline hi_void hifb_get_layerpos(hifb_par *par, HIFB_POINT_S *pos)
5538 {
5539 hi_s32 ret;
5540 ret = memcpy_s(pos, sizeof(HIFB_POINT_S), &par->display_info.pos, sizeof(HIFB_POINT_S));
5541 hifb_unequal_eok_return_void(ret);
5542 return;
5543 }
5544
5545 static hi_void hifb_get_screensize(hifb_par *par, hi_u32 *width, hi_u32 *height)
5546 {
5547 hifb_display_info *display_info = HI_NULL;
5548 display_info = &par->display_info;
5549
5550 if ((width != HI_NULL) && (height != HI_NULL)) {
5551 *width = display_info->screen_width;
5552 *height = display_info->screen_height;
5553 }
5554
5555 return;
5556 }
5557
5558 static hi_s32 hifb_set_screensize(hifb_par *par, hi_u32 *width, hi_u32 *height)
5559 {
5560 hi_s32 ret;
5561 hifb_display_info *display_info = HI_NULL;
5562
5563 ret = HI_SUCCESS;
5564 if ((par == HI_NULL) || (width == HI_NULL) || (height == HI_NULL)) {
5565 return HI_FAILURE;
5566 }
5567 display_info = &par->display_info;
5568
5569 if (g_drv_ops.capability[par->layer_id].bVoScale) {
5570 display_info->screen_width = *width;
5571 display_info->screen_height = *height;
5572 } else {
5573 if ((display_info->display_width != *width) || (display_info->display_height != *height)) {
5574 hifb_error("ScreenSize(%u, %u) can't be different with DisplaySize(%u, %u) \
5575 when layer %d don't scale\n", *width, *height,
5576 display_info->display_width, display_info->display_height, par->layer_id);
5577 ret = HI_FAILURE;
5578 }
5579 }
5580
5581 return ret;
5582 }
5583
5584 static hi_void hifb_get_maxscreensize(hifb_par *par, hi_u32 *width, hi_u32 *height)
5585 {
5586 hifb_osd_data layer_data;
5587 hifb_display_info *display_info = HI_NULL;
5588 if (par == HI_NULL) {
5589 return;
5590 }
5591 display_info = &par->display_info;
5592
5593 if (g_drv_ops.hifb_drv_get_osd_data(par->layer_id, &layer_data) == HI_SUCCESS) {
5594 display_info->max_screen_width = layer_data.screen_width;
5595 display_info->max_screen_height = layer_data.screen_height;
5596 }
5597
5598 if ((width != HI_NULL) && (height != HI_NULL)) {
5599 *width = display_info->max_screen_width;
5600 *height = display_info->max_screen_height;
5601 }
5602
5603 return;
5604 }
5605
5606 #if (HICHIP == HI3516E_V200)
5607 static hi_s32 hifb_tde_rotate_callback(hi_u32 call_mod_id, hi_u32 call_dev_id, hi_u32 call_chn_id, hi_void *job_data)
5608 {
5609 hi_u32 layer_id;
5610 hi_s32 tde_finish_handle;
5611 hifb_par *par = HI_NULL;
5612 hifb_refresh_info *refresh_info = HI_NULL;
5613 HIFB_LAYER_BUF_E buf_mode;
5614 unsigned long lockflag;
5615 vgs_job_data *callback_job_data = (vgs_job_data *)job_data;
5616 hi_unused(call_mod_id);
5617 hi_unused(call_dev_id);
5618 hi_unused(call_chn_id);
5619 layer_id = callback_job_data->private_data[0];
5620 tde_finish_handle = callback_job_data->private_data[1];
5621 if (layer_id <= g_drv_ops.layer_count) {
5622 par = (hifb_par *)(g_layer[layer_id].info->par);
5623 } else {
5624 hifb_error("u32LayerId = %u is invalid\n", layer_id);
5625 return HI_FAILURE;
5626 }
5627 refresh_info = &par->refresh_info;
5628 hifb_get_bufmode(par, &buf_mode);
5629
5630 hifb_spin_lock_irqsave(&par->lock, lockflag);
5631
5632 if (buf_mode == HIFB_LAYER_BUF_DOUBLE) {
5633 if (refresh_info->disp_buf_info.refresh_handle == tde_finish_handle) {
5634 /* Notify VO, it can be updated now */
5635 hifb_set_dispbufflip(par, HI_TRUE);
5636
5637 /* Compression is not supported when there is rotation */
5638 par->refresh_info.disp_buf_info.compress = HI_FALSE;
5639 } else {
5640 hifb_error("Layer(%d) refresh again before tde callback, maybe refresh too fast !\n", par->layer_id);
5641 }
5642 }
5643
5644 hifb_spin_unlock_irqrestore(&par->lock, lockflag);
5645 return HI_SUCCESS;
5646 }
5647 #else
5648 static hi_s32 hifb_tde_rotate_callback(const hi_void *paraml, const hi_void *paramr)
5649 {
5650 hi_u32 layer_id = 0;
5651 hi_s32 tde_finish_handle;
5652 hifb_par *par = HI_NULL;
5653 hifb_refresh_info *refresh_info = HI_NULL;
5654 HIFB_LAYER_BUF_E buf_mode;
5655 unsigned long lockflag;
5656 hifb_tde_callback_param *param = (hifb_tde_callback_param *)paraml;
5657 tde_finish_handle = *(hi_s32 *)paramr;
5658 if (param != HI_NULL) {
5659 layer_id = param->layer_id;
5660 } else {
5661 hifb_error("param is HI_NULL\n");
5662 return HI_FAILURE;
5663 }
5664 if (layer_id <= g_drv_ops.layer_count) {
5665 par = (hifb_par *)(g_layer[layer_id].info->par);
5666 } else {
5667 kfree(param);
5668 param = HI_NULL;
5669 hifb_error("u32LayerId = %u is invalid\n", layer_id);
5670 return HI_FAILURE;
5671 }
5672 refresh_info = &par->refresh_info;
5673 hifb_get_bufmode(par, &buf_mode);
5674
5675 hifb_spin_lock_irqsave(&par->lock, lockflag);
5676
5677 if (buf_mode == HIFB_LAYER_BUF_DOUBLE) {
5678 if (refresh_info->disp_buf_info.refresh_handle == tde_finish_handle) {
5679 /* Notify VO, it can be updated now */
5680 hifb_set_dispbufflip(par, HI_TRUE);
5681
5682 /* Compression is not supported when there is rotation */
5683 par->refresh_info.disp_buf_info.compress = HI_FALSE;
5684 } else {
5685 hifb_error("Layer(%d) refresh again before tde callback, maybe refresh too fast !\n", param->layer_id);
5686 }
5687 }
5688
5689 hifb_spin_unlock_irqrestore(&par->lock, lockflag);
5690 if (param != HI_NULL) {
5691 kfree(param);
5692 param = HI_NULL;
5693 }
5694 return HI_SUCCESS;
5695 }
5696 #endif
5697
5698 static hi_void tde_callback_src_init(hifb_par *par, HIFB_BUFFER_S *src_img)
5699 {
5700 hifb_display_info *display_info = &par->display_info;
5701 hi_u32 bytes_per_pixel = 2; /* 2 is PerPixel */
5702 src_img->stCanvas.u64PhyAddr = par->rotate_vb;
5703 src_img->stCanvas.u32Width = display_info->display_height;
5704 src_img->stCanvas.u32Height = display_info->display_width;
5705 src_img->UpdateRect.x = 0;
5706 src_img->UpdateRect.y = 0;
5707 src_img->UpdateRect.w = src_img->stCanvas.u32Width;
5708 src_img->UpdateRect.h = src_img->stCanvas.u32Height;
5709
5710 src_img->stCanvas.enFmt = par->color_format;
5711 if (src_img->stCanvas.enFmt == HIFB_FMT_ARGB1555 ||
5712 src_img->stCanvas.enFmt == HIFB_FMT_ARGB4444) {
5713 bytes_per_pixel = 2; /* 2 is PerPixel */
5714 } else if (src_img->stCanvas.enFmt == HIFB_FMT_ARGB8888) {
5715 bytes_per_pixel = 4; /* 4 is PerPixel */
5716 }
5717 src_img->stCanvas.u32Pitch = ((bytes_per_pixel * src_img->stCanvas.u32Width + HIFB_ALIGN - 1) / HIFB_ALIGN) *
5718 HIFB_ALIGN;
5719 return;
5720 }
5721
5722 static hi_void tde_callback_dst_init(hifb_par *par, HIFB_BUFFER_S *dst_img, HIFB_LAYER_BUF_E buf_mode)
5723 {
5724 hi_u32 bytes_per_pixel = 2; /* 2 is PerPixel */
5725 hifb_refresh_info *refresh_info = &par->refresh_info;
5726 hifb_display_info *display_info = &par->display_info;
5727 if (buf_mode == HIFB_LAYER_BUF_DOUBLE) {
5728 hifb_get_idledispbuf(par, (hi_u64*)(&dst_img->stCanvas.u64PhyAddr));
5729 } else if (buf_mode == HIFB_LAYER_BUF_ONE) {
5730 dst_img->stCanvas.u64PhyAddr =
5731 refresh_info->disp_buf_info.phy_addr[refresh_info->disp_buf_info.index_for_int];
5732 } else if (buf_mode == HIFB_LAYER_BUF_DOUBLE_IMMEDIATE) {
5733 dst_img->stCanvas.u64PhyAddr =
5734 refresh_info->disp_buf_info.phy_addr[1 - refresh_info->disp_buf_info.index_for_int];
5735 }
5736 dst_img->stCanvas.u32Width = display_info->display_width;
5737 dst_img->stCanvas.u32Height = display_info->display_height;
5738 dst_img->stCanvas.enFmt = par->color_format;
5739 dst_img->UpdateRect.x = 0;
5740 dst_img->UpdateRect.y = 0;
5741 dst_img->UpdateRect.w = dst_img->stCanvas.u32Width;
5742 dst_img->UpdateRect.h = dst_img->stCanvas.u32Height;
5743 if (dst_img->stCanvas.enFmt == HIFB_FMT_ARGB1555 ||
5744 dst_img->stCanvas.enFmt == HIFB_FMT_ARGB4444) {
5745 bytes_per_pixel = 2; /* 2 is PerPixel */
5746 } else if (dst_img->stCanvas.enFmt == HIFB_FMT_ARGB8888) {
5747 bytes_per_pixel = 4; /* 4 is PerPixel */
5748 }
5749 dst_img->stCanvas.u32Pitch = ((bytes_per_pixel * dst_img->stCanvas.u32Width + HIFB_ALIGN - 1) / HIFB_ALIGN) *
5750 HIFB_ALIGN;
5751 return;
5752 }
5753
5754 static hi_void tde_callback_with_rotate(hifb_par *par, HIFB_LAYER_BUF_E buf_mode)
5755 {
5756 HIFB_BUFFER_S src_img;
5757 HIFB_BUFFER_S dst_img;
5758 hifb_rotate_opt rot_opt;
5759 hi_bool is_refresh_screen = HI_TRUE;
5760 hifb_display_info *display_info = &par->display_info;
5761 hifb_refresh_info *refresh_info = &par->refresh_info;
5762
5763 /* fill src image info */
5764 tde_callback_src_init(par, &src_img);
5765
5766 /* fill dst image info */
5767 tde_callback_dst_init(par, &dst_img, buf_mode);
5768
5769 /* fill rot option info */
5770 rot_opt.rotate_mode = display_info->rotate_mode;
5771 /*
5772 * Note: After turning on the callback, the rotation job callback function may not be called.
5773 * Need to wait until the next tde task is called, the direct phenomenon is: if the last run
5774 * In the use case, the rotation angle of 90 or 270 is set and the tde rotation branch is entered.
5775 * Unloading ko and loading ko will result in core dump.
5776 */
5777 rot_opt.call_back = (buf_mode == HIFB_LAYER_BUF_DOUBLE) ? HI_TRUE : HI_FALSE;
5778
5779 /* In the interrupt, the blocking mode is not allowed, so the non-blocking mode is used. */
5780 rot_opt.block = HI_FALSE;
5781 rot_opt.param = &(par->layer_id);
5782
5783 /* Fill bRefreshScreen.Not used now */
5784 is_refresh_screen = HI_TRUE;
5785 refresh_info->disp_buf_info.refresh_handle = hifb_drv_rotate(&src_img, &dst_img, &rot_opt,
5786 is_refresh_screen);
5787 return;
5788 }
5789
5790 #if (HICHIP == HI3516E_V200)
5791 static hi_void tde_callback_without_rotate(hifb_par *par, HIFB_LAYER_BUF_E buf_mode,
5792 hi_void *job_data)
5793 {
5794 hi_u32 ar_size, gb_size, buf_size, index_for_int;
5795 hifb_refresh_info *refresh_info = &par->refresh_info;
5796 struct fb_info *info = HI_NULL;
5797 vgs_job_data *callback_job_data = (vgs_job_data *)job_data;
5798 hi_s32 tde_finish_handle = callback_job_data->private_data[1];
5799 info = g_layer[callback_job_data->private_data[0]].info;
5800 buf_size = ((hifb_get_line_length(info) * hifb_get_yres(info)) + HIFB_ALIGNMENT) & (~HIFB_ALIGNMENT);
5801 index_for_int = refresh_info->disp_buf_info.index_for_int;
5802 if (buf_mode == HIFB_LAYER_BUF_DOUBLE) {
5803 if (refresh_info->disp_buf_info.refresh_handle == tde_finish_handle) {
5804 /* Notify VO, can be updated */
5805 hifb_set_dispbufflip(par, HI_TRUE);
5806 } else {
5807 hifb_error("Layer(%d) refresh again before tde callback, maybe refresh too fast !\n", par->layer_id);
5808 }
5809 }
5810
5811 if (par->compress_info.compress_open) {
5812 /*
5813 * In the case of compression,
5814 * the size of the compressed data stored in the first 16 bytes is obtained.
5815 */
5816 if ((buf_mode == HIFB_LAYER_BUF_ONE) && (refresh_info->disp_buf_info.refresh_handle == tde_finish_handle)) {
5817 hifb_get_dcmp_framesize(HI_NULL, &ar_size, &gb_size,
5818 (hi_u8 *)(hifb_get_screen_base(info)), buf_size / 2); /* 2 buff size */
5819 } else if ((buf_mode == HIFB_LAYER_BUF_DOUBLE && refresh_info->disp_buf_info.refresh_handle ==
5820 tde_finish_handle) || (buf_mode == HIFB_LAYER_BUF_DOUBLE_IMMEDIATE)) {
5821 hifb_get_dcmp_framesize(HI_NULL, &ar_size, &gb_size, ((hi_u8 *)(hifb_get_screen_base(info)) +
5822 (1 - index_for_int) * buf_size), buf_size / 2); /* 2 buff size */
5823 } else {
5824 ar_size = 0;
5825 gb_size = 0;
5826 hifb_error("%s,%d,Layer(%d) buffer mode %d is supported,ARSize and GBSize are set to %u,%u\n",
5827 __FUNCTION__, __LINE__, (hi_u32)callback_job_data->private_data[0], buf_mode, ar_size, gb_size);
5828 }
5829 par->compress_info.frame_size0 = ar_size;
5830 par->compress_info.frame_size1 = gb_size;
5831 par->compress_info.update_finished = HI_TRUE;
5832 }
5833 return;
5834 }
5835 #else
5836 static hi_void tde_callback_without_rotate(hifb_par *par, HIFB_LAYER_BUF_E buf_mode,
5837 const hi_void *paraml, const hi_void *paramr)
5838 {
5839 hi_u32 ar_size, gb_size, buf_size, index_for_int;
5840 hifb_refresh_info *refresh_info = &par->refresh_info;
5841 struct fb_info *info = HI_NULL;
5842 hifb_tde_callback_param *param = (hifb_tde_callback_param *)paraml;
5843 hi_s32 tde_finish_handle = *(hi_s32 *)paramr;
5844 info = g_layer[param->layer_id].info;
5845 buf_size = ((hifb_get_line_length(info) * hifb_get_yres(info)) + HIFB_ALIGNMENT) & (~HIFB_ALIGNMENT);
5846 index_for_int = refresh_info->disp_buf_info.index_for_int;
5847 if (buf_mode == HIFB_LAYER_BUF_DOUBLE) {
5848 if (refresh_info->disp_buf_info.refresh_handle == tde_finish_handle) {
5849 /* Notify VO, can be updated */
5850 hifb_set_dispbufflip(par, HI_TRUE);
5851 par->refresh_info.disp_buf_info.compress = param->compress;
5852 } else {
5853 hifb_error("Layer(%d) refresh again before tde callback, maybe refresh too fast !\n", par->layer_id);
5854 }
5855 }
5856
5857 if (par->compress_info.compress_open) {
5858 /*
5859 * In the case of compression,
5860 * the size of the compressed data stored in the first 16 bytes is obtained.
5861 */
5862 if ((buf_mode == HIFB_LAYER_BUF_ONE) && (refresh_info->disp_buf_info.refresh_handle == tde_finish_handle)) {
5863 hifb_get_dcmp_framesize(HI_NULL, &ar_size, &gb_size,
5864 ((hi_u8 *)(hifb_get_screen_base(info))), buf_size / 2); /* 2 buff size */
5865 } else if ((buf_mode == HIFB_LAYER_BUF_DOUBLE && refresh_info->disp_buf_info.refresh_handle ==
5866 tde_finish_handle) || (buf_mode == HIFB_LAYER_BUF_DOUBLE_IMMEDIATE)) {
5867 hifb_get_dcmp_framesize(HI_NULL, &ar_size, &gb_size, ((hi_u8 *)(hifb_get_screen_base(info)) +
5868 (1 - index_for_int) * buf_size), buf_size / 2); /* 2 buff size */
5869 } else {
5870 ar_size = 0;
5871 gb_size = 0;
5872 hifb_error("%s,%d,Layer(%d) buffer mode %d is supported,ARSize and GBSize are set to %d,%d\n",
5873 __FUNCTION__, __LINE__, param->layer_id, buf_mode, ar_size, gb_size);
5874 }
5875 par->compress_info.frame_size0 = ar_size;
5876 par->compress_info.frame_size1 = gb_size;
5877 par->compress_info.update_finished = HI_TRUE;
5878 }
5879 return;
5880 }
5881 #endif
5882
5883 #if (HICHIP == HI3516E_V200)
5884 static hi_s32 hifb_tde_callback(hi_u32 call_mod_id, hi_u32 call_dev_id, hi_u32 call_chn_id, hi_void *job_data)
5885 {
5886 hi_u32 layer_id;
5887 hi_s32 tde_finish_handle;
5888 struct fb_info *info = HI_NULL;
5889 hifb_par *par = HI_NULL;
5890 hifb_refresh_info *refresh_info = HI_NULL;
5891 hifb_display_info *display_info = HI_NULL;
5892 HIFB_LAYER_BUF_E buf_mode;
5893 unsigned long lock_flag;
5894 vgs_job_data *callback_job_data = (vgs_job_data *)job_data;
5895 hi_unused(call_mod_id);
5896 hi_unused(call_dev_id);
5897 hi_unused(call_chn_id);
5898
5899 layer_id = callback_job_data->private_data[0];
5900 tde_finish_handle = callback_job_data->private_data[1];
5901
5902 if (layer_id <= g_drv_ops.layer_count) {
5903 info = g_layer[layer_id].info;
5904 } else {
5905 hifb_error("layer_id = %u is invalid\n", layer_id);
5906 return HI_FAILURE;
5907 }
5908 par = (hifb_par *)(info->par);
5909 refresh_info = &par->refresh_info;
5910 display_info = &par->display_info;
5911
5912 hifb_dbg_info("tde callback blit handle:%x, end handle:%x\n",
5913 refresh_info->disp_buf_info.refresh_handle, tde_finish_handle);
5914
5915 hifb_get_bufmode(par, &buf_mode);
5916
5917 hifb_spin_lock_irqsave(&par->lock, lock_flag);
5918
5919 if ((display_info->rotate_mode == HIFB_ROTATE_90 || display_info->rotate_mode == HIFB_ROTATE_270)) {
5920 /* hifb callback with rotate */
5921 tde_callback_with_rotate(par, buf_mode);
5922 } else {
5923 /* hifb callback without rotate */
5924 tde_callback_without_rotate(par, buf_mode, job_data);
5925 }
5926
5927 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
5928
5929 hi_unused(refresh_info);
5930 hi_unused(tde_finish_handle);
5931 return HI_SUCCESS;
5932 }
5933
5934 #else
5935 static hi_s32 hifb_tde_callback(const hi_void *paraml, const hi_void *paramr)
5936 {
5937 hi_u32 layer_id = 0;
5938 hi_s32 tde_finish_handle;
5939 struct fb_info *info = HI_NULL;
5940 hifb_par *par = HI_NULL;
5941 hifb_refresh_info *refresh_info = HI_NULL;
5942 hifb_display_info *display_info = HI_NULL;
5943 HIFB_LAYER_BUF_E buf_mode;
5944 unsigned long lock_flag;
5945 hifb_tde_callback_param *param = (hifb_tde_callback_param *)paraml;
5946 tde_finish_handle = *(hi_s32 *)paramr;
5947
5948 /* here get a wrong layer_id, make hifb core dump here */
5949 if (param != HI_NULL) {
5950 layer_id = param->layer_id;
5951 } else {
5952 hifb_error("paraml is HI_NULL\n");
5953 return HI_FAILURE;
5954 }
5955
5956 if (layer_id <= g_drv_ops.layer_count) {
5957 info = g_layer[layer_id].info;
5958 } else {
5959 kfree(param);
5960 param = HI_NULL;
5961 hifb_error("layer_id = %u is invalid\n", layer_id);
5962 return HI_FAILURE;
5963 }
5964 par = (hifb_par *)(info->par);
5965 refresh_info = &par->refresh_info;
5966 display_info = &par->display_info;
5967
5968 hifb_dbg_info("tde callback blit handle:%x, end handle:%x\n",
5969 refresh_info->disp_buf_info.refresh_handle, tde_finish_handle);
5970
5971 hifb_get_bufmode(par, &buf_mode);
5972
5973 hifb_spin_lock_irqsave(&par->lock, lock_flag);
5974
5975 if ((display_info->rotate_mode == HIFB_ROTATE_90 || display_info->rotate_mode == HIFB_ROTATE_270)) {
5976 /* hifb callback with rotate */
5977 tde_callback_with_rotate(par, buf_mode);
5978 } else {
5979 /* hifb callback without rotate */
5980 tde_callback_without_rotate(par, buf_mode, paraml, paramr);
5981 }
5982
5983 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
5984
5985 if (param != HI_NULL) {
5986 kfree(param);
5987 param = HI_NULL;
5988 }
5989 return HI_SUCCESS;
5990 }
5991 #endif
5992
5993 static inline hi_bool hifb_is_interlace(hifb_par *par)
5994 {
5995 hifb_osd_data osd_data = {0};
5996 if (par != HI_NULL) {
5997 g_drv_ops.hifb_drv_get_osd_data(par->layer_id, &osd_data);
5998 }
5999 return (osd_data.scan_mode == HIFB_SCANMODE_I);
6000 }
6001
6002 hi_u64 hifb_buf_allocmem(const hi_char *buf_name, hi_u32 size, hi_ulong layer_size, const hi_char *mmz_name)
6003 {
6004 hi_u64 addr;
6005 hi_unused(size);
6006 if ((layer_size == 0) || (layer_size > 0x40000000)) {
6007 return 0;
6008 }
6009
6010 addr = cmpi_mmz_malloc((hi_char *)mmz_name, (hi_char *)buf_name, layer_size);
6011 if ((addr == MMB_ADDR_INVALID) && (mmz_name != HI_NULL)) {
6012 addr = cmpi_mmz_malloc(HI_NULL, (hi_char *)buf_name, layer_size);
6013 }
6014
6015 if (addr == MMB_ADDR_INVALID) {
6016 hifb_error("alloc mem failed!\n");
6017 return 0;
6018 }
6019
6020 return addr;
6021 }
6022
6023 static hi_void hifb_set_bufmode(hi_u32 layer_id, HIFB_LAYER_BUF_E layer_buf_mode)
6024 {
6025 struct fb_info *info = g_layer[layer_id].info;
6026 hifb_par *par = (hifb_par *)info->par;
6027 hifb_refresh_info *refresh_info = &par->refresh_info;
6028
6029 /* in 0 buf mode ,maybe the stride or fmt will be changed! */
6030 if ((refresh_info->buf_mode == HIFB_LAYER_BUF_NONE) && (refresh_info->buf_mode != layer_buf_mode)) {
6031 par->modifying = HI_TRUE;
6032
6033 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_STRIDE;
6034
6035 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_FMT;
6036
6037 par->modifying = HI_FALSE;
6038 }
6039
6040 refresh_info->buf_mode = layer_buf_mode;
6041 }
6042
6043 /*
6044 * choose the module to do flicker resiting, TDE or VOU ? the rule is as this ,the module should do flicker resisting
6045 * who has do scaling
6046 */
6047 static hi_void hifb_select_antiflicker_mode(hifb_par *par)
6048 {
6049 hifb_display_info *display_info = HI_NULL;
6050 if (par == HI_NULL) {
6051 return;
6052 }
6053 display_info = &par->display_info;
6054
6055 /* if the usr's configuration is no needed to do flicker resisting, so no needed to do it */
6056 if (display_info->antiflicker_level == HIFB_LAYER_ANTIFLICKER_NONE) {
6057 display_info->antiflicker_mode = HIFB_ANTIFLICKER_NONE;
6058 } else {
6059 /* current standard no needed to do flicker resisting */
6060 if (!display_info->need_antiflicker) {
6061 display_info->antiflicker_mode = HIFB_ANTIFLICKER_NONE;
6062 } else {
6063 /* VO has done scaling , so should do flicker resisting at the same time */
6064 if ((display_info->display_width != display_info->screen_width) ||
6065 (display_info->display_height != display_info->screen_height)) {
6066 display_info->antiflicker_mode = HIFB_ANTIFLICKER_VO;
6067 } else {
6068 display_info->antiflicker_mode = HIFB_ANTIFLICKER_TDE;
6069 }
6070 }
6071 }
6072 }
6073
6074 static hi_void hifb_set_antiflickerlevel(hi_u32 layer_id, HIFB_LAYER_ANTIFLICKER_LEVEL_E antiflicker_level)
6075 {
6076 struct fb_info *info = g_layer[layer_id].info;
6077 hifb_par *par = (hifb_par *)info->par;
6078 hifb_display_info *display_info = &par->display_info;
6079
6080 display_info->antiflicker_level = antiflicker_level;
6081 hifb_select_antiflicker_mode(par);
6082
6083 return;
6084 }
6085
6086 #ifdef CURSOR
6087 /* restore or update cursor backup */
6088 static hi_void hifb_cursor_bakup(hi_u32 layer_id)
6089 {
6090 struct fb_info *info = g_layer[layer_id].info;
6091 hifb_par *par = (hifb_par *)info->par;
6092 hi_s32 ret;
6093 HIFB_BUFFER_S cursor_buf, display_buf;
6094
6095 hifb_blit_opt blit_opt = {0};
6096
6097 (hi_void)memset_s(&blit_opt, sizeof(hifb_blit_opt), 0, sizeof(hifb_blit_opt));
6098
6099 if (par->refresh_info.buf_mode == HIFB_LAYER_BUF_NONE) {
6100 if (par->refresh_info.user_buffer.stCanvas.u64PhyAddr == 0) {
6101 return;
6102 }
6103
6104 ret = memcpy_s(&display_buf.stCanvas, sizeof(HIFB_SURFACE_S), &(par->refresh_info.user_buffer.stCanvas),
6105 sizeof(HIFB_SURFACE_S));
6106 hifb_unequal_eok_return_void(ret);
6107 } else {
6108 display_buf.stCanvas.enFmt = par->color_format;
6109 display_buf.stCanvas.u32Height = par->display_info.display_height;
6110 display_buf.stCanvas.u32Width = par->display_info.display_width;
6111 display_buf.stCanvas.u32Pitch = hifb_get_line_length(info);
6112 hifb_get_workdispbuf(par, &display_buf.stCanvas.u64PhyAddr);
6113 }
6114
6115 ret = memcpy_s(&cursor_buf.stCanvas, sizeof(HIFB_SURFACE_S), &par->cursor_info.cursor.stCursor,
6116 sizeof(HIFB_SURFACE_S));
6117 hifb_unequal_eok_return_void(ret);
6118
6119 ret = memcpy_s(&cursor_buf.UpdateRect, sizeof(HIFB_RECT), &(par->cursor_info.rect_in_disp_buf), sizeof(HIFB_RECT));
6120 hifb_unequal_eok_return_void(ret);
6121 cursor_buf.UpdateRect.x = 0;
6122 cursor_buf.UpdateRect.y = 0;
6123 hifb_get_workcursorbuf(par, &cursor_buf.stCanvas.u64PhyAddr);
6124
6125 ret = memcpy_s(&display_buf.UpdateRect, sizeof(HIFB_RECT), &(par->cursor_info.rect_in_disp_buf), sizeof(HIFB_RECT));
6126 hifb_unequal_eok_return_void(ret);
6127
6128 blit_opt.mirror_mode = par->display_info.mirror_mode;
6129
6130 if (hifb_drv_blit(&display_buf, &cursor_buf, &blit_opt, HI_FALSE, HI_NULL) < 0) {
6131 hifb_error("blit err! 1\n");
6132 return;
6133 }
6134 if ((par->refresh_info.buf_mode == HIFB_LAYER_BUF_DOUBLE) ||
6135 (par->refresh_info.buf_mode == HIFB_LAYER_BUF_DOUBLE_IMMEDIATE)) {
6136 hifb_get_idledispbuf(par, &display_buf.stCanvas.u64PhyAddr);
6137 hifb_get_idlecursorbuf(par, &cursor_buf.stCanvas.u64PhyAddr);
6138 if (hifb_drv_blit(&display_buf, &cursor_buf, &blit_opt, HI_FALSE, HI_NULL) < 0) {
6139 hifb_error("blit err! 2\n");
6140 return;
6141 }
6142 }
6143 return;
6144 }
6145
6146 static hi_s32 cursor_show_blit(hi_u32 layer_id, hifb_par *cursor_par)
6147 {
6148 struct fb_info *info = g_layer[layer_id].info;
6149 hifb_par *par = info->par;
6150 hifb_blit_opt blit_opt = {0};
6151 HIFB_BUFFER_S cursor_buf;
6152 HIFB_BUFFER_S display_buf;
6153 hi_s32 ret;
6154
6155 (hi_void)memset_s(&blit_opt, sizeof(hifb_blit_opt), 0, sizeof(hifb_blit_opt));
6156
6157 cursor_buf.stCanvas = cursor_par->cursor_info.cursor.stCursor;
6158 blit_opt.alpha = cursor_par->alpha;
6159 blit_opt.ckey = cursor_par->ckey;
6160 blit_opt.mirror_mode = par->display_info.mirror_mode;
6161
6162 if (par->refresh_info.buf_mode == HIFB_LAYER_BUF_NONE) {
6163 if (par->refresh_info.user_buffer.stCanvas.u64PhyAddr == 0) {
6164 return HI_FAILURE;
6165 }
6166 ret = memcpy_s(&display_buf, sizeof(HIFB_BUFFER_S), &(par->refresh_info.user_buffer), sizeof(HIFB_BUFFER_S));
6167 hifb_unequal_eok_return(ret);
6168 } else {
6169 display_buf.stCanvas.enFmt = par->color_format;
6170 display_buf.stCanvas.u32Height = par->display_info.display_height;
6171 display_buf.stCanvas.u32Width = par->display_info.display_width;
6172 display_buf.stCanvas.u32Pitch = hifb_get_line_length(info);
6173 hifb_get_idledispbuf(par, (hi_u64*)&display_buf.stCanvas.u64PhyAddr);
6174 }
6175
6176 ret = memcpy_s(&cursor_buf.UpdateRect, sizeof(HIFB_RECT), &(par->cursor_info.rect_in_disp_buf), sizeof(HIFB_RECT));
6177 hifb_unequal_eok_return(ret);
6178 cursor_buf.UpdateRect.x = par->cursor_info.pos_in_cursor.s32XPos;
6179 cursor_buf.UpdateRect.y = par->cursor_info.pos_in_cursor.s32YPos;
6180
6181 ret = memcpy_s(&display_buf.UpdateRect, sizeof(HIFB_RECT), &(par->cursor_info.rect_in_disp_buf), sizeof(HIFB_RECT));
6182 hifb_unequal_eok_return(ret);
6183
6184 if (hifb_drv_blit(&cursor_buf, &display_buf, &blit_opt, HI_FALSE, HI_NULL) < 0) {
6185 hifb_error("blit err! 6\n");
6186 return HI_FAILURE;
6187 }
6188
6189 if ((par->refresh_info.buf_mode == HIFB_LAYER_BUF_DOUBLE) ||
6190 (par->refresh_info.buf_mode == HIFB_LAYER_BUF_DOUBLE_IMMEDIATE)) {
6191 hifb_get_workdispbuf(par, (hi_u64*)&display_buf.stCanvas.u64PhyAddr);
6192 if (hifb_drv_blit(&cursor_buf, &display_buf, &blit_opt, HI_FALSE, HI_NULL) < 0) {
6193 hifb_error("blit err!\n");
6194 return HI_FAILURE;
6195 }
6196 }
6197 return HI_SUCCESS;
6198 }
6199
6200 static hi_s32 hifb_cursor_show(hi_u32 layer_id)
6201 {
6202 struct fb_info *info = g_layer[layer_id].info;
6203 hifb_par *par = info->par;
6204
6205 struct fb_info *cursor_info = HI_NULL;
6206 hifb_par *cursor_par = HI_NULL;
6207
6208 if (!par->cursor_info.attached || !par->show) {
6209 hifb_dbg_info("Cursor should be attached to layer%u \n", layer_id);
6210 return HI_FAILURE;
6211 }
6212
6213 cursor_info = g_layer[par->cursor_info.attached_cursor_id].info;
6214 cursor_par = cursor_info->par;
6215
6216 if (!cursor_par->show) {
6217 return HI_SUCCESS;
6218 }
6219
6220 if (cursor_par->cursor_info.cursor.stCursor.u64PhyAddr == 0) {
6221 hifb_dbg_info("No cusor img set!\n");
6222 return HI_FAILURE;
6223 }
6224
6225 hifb_cursor_calcdispinfo(cursor_par->layer_id, par, &cursor_par->display_info.pos);
6226
6227 hifb_cursor_bakup(layer_id);
6228
6229 /* fill cursor_buf,display_buf,blit_opt, and blit */
6230 if (cursor_show_blit(layer_id, cursor_par) != HI_SUCCESS) {
6231 return HI_FAILURE;
6232 }
6233
6234 par->show = HI_TRUE;
6235
6236 if (par->compress_info.compress_open) {
6237 par->compress_info.update_finished = HI_FALSE;
6238 /* 40 Waiting for TDE to complete the blit */
6239 schedule_timeout(osal_msecs_to_jiffies(40));
6240 par->compress_info.update_rect.x = 0;
6241 par->compress_info.update_rect.y = 0;
6242 par->compress_info.update_rect.w = par->display_info.display_width;
6243 par->compress_info.update_rect.h = par->display_info.display_height;
6244 par->compress_info.update_finished = HI_TRUE;
6245 }
6246 return HI_SUCCESS;
6247 }
6248
6249 static hi_s32 cursor_hide_blit(hi_u32 layer_id, hifb_par *cursor_par)
6250 {
6251 struct fb_info *info = g_layer[layer_id].info;
6252 hifb_par *par = info->par;
6253 hifb_blit_opt blit_opt = {0};
6254 HIFB_BUFFER_S cursor_buf, display_buf;
6255 hi_s32 ret;
6256 hi_unused(cursor_par);
6257 (hi_void)memset_s(&blit_opt, sizeof(hifb_blit_opt), 0, sizeof(hifb_blit_opt));
6258
6259 cursor_buf.stCanvas = par->cursor_info.cursor.stCursor;
6260
6261 if (par->refresh_info.buf_mode == HIFB_LAYER_BUF_NONE) {
6262 if (par->refresh_info.user_buffer.stCanvas.u64PhyAddr == 0) {
6263 hifb_dbg_info("No user buf send to fb in 0 buf mode!\n");
6264 return HI_FAILURE;
6265 }
6266 ret = memcpy_s(&display_buf, sizeof(HIFB_BUFFER_S), &(par->refresh_info.user_buffer), sizeof(HIFB_BUFFER_S));
6267 hifb_unequal_eok_return(ret);
6268 } else {
6269 display_buf.stCanvas.enFmt = par->color_format;
6270 display_buf.stCanvas.u32Height = par->display_info.display_height;
6271 display_buf.stCanvas.u32Width = par->display_info.display_width;
6272 display_buf.stCanvas.u32Pitch = hifb_get_line_length(info);
6273 hifb_get_idledispbuf(par, (hi_u64*)&display_buf.stCanvas.u64PhyAddr);
6274 }
6275
6276 ret = memcpy_s(&cursor_buf.UpdateRect, sizeof(HIFB_RECT), &(par->cursor_info.rect_in_disp_buf), sizeof(HIFB_RECT));
6277 hifb_unequal_eok_return(ret);
6278 cursor_buf.UpdateRect.x = 0;
6279 cursor_buf.UpdateRect.y = 0;
6280
6281 ret = memcpy_s(&display_buf.UpdateRect, sizeof(HIFB_RECT), &(par->cursor_info.rect_in_disp_buf), sizeof(HIFB_RECT));
6282 hifb_unequal_eok_return(ret);
6283
6284 hifb_get_idlecursorbuf(par, (hi_u64*)&cursor_buf.stCanvas.u64PhyAddr);
6285
6286 blit_opt.mirror_mode = par->display_info.mirror_mode;
6287
6288 if (hifb_drv_blit(&cursor_buf, &display_buf, &blit_opt, HI_FALSE, HI_NULL) < 0) {
6289 hifb_error("blit err!\n");
6290 return HI_FAILURE;
6291 }
6292 if ((par->refresh_info.buf_mode == HIFB_LAYER_BUF_DOUBLE) ||
6293 (par->refresh_info.buf_mode == HIFB_LAYER_BUF_DOUBLE_IMMEDIATE)) {
6294 hifb_get_workdispbuf(par, (hi_u64*)&display_buf.stCanvas.u64PhyAddr);
6295 hifb_get_workcursorbuf(par, (hi_u64*)&cursor_buf.stCanvas.u64PhyAddr);
6296
6297 if (hifb_drv_blit(&cursor_buf, &display_buf, &blit_opt, HI_FALSE, HI_NULL) < 0) {
6298 hifb_error("blit err!\n");
6299 return HI_FAILURE;
6300 }
6301 }
6302 return HI_SUCCESS;
6303 }
6304
6305 static hi_s32 hifb_cursor_hide(hi_u32 layer_id)
6306 {
6307 struct fb_info *info = g_layer[layer_id].info;
6308 hifb_par *par = info->par;
6309 struct fb_info *cursor_info = HI_NULL;
6310 hifb_par *cursor_par = HI_NULL;
6311
6312 if (!par->cursor_info.attached) {
6313 hifb_dbg_info("Cursor should be attached to layer%u \n", layer_id);
6314 return HI_FAILURE;
6315 }
6316
6317 cursor_info = g_layer[par->cursor_info.attached_cursor_id].info;
6318 cursor_par = (hifb_par *)cursor_info->par;
6319
6320 if (!cursor_par->show || !par->show) {
6321 return HI_SUCCESS;
6322 }
6323
6324 /* fill cursor_buf,display_buf,blit_opt, and blit */
6325 if (cursor_hide_blit(layer_id, cursor_par) != HI_SUCCESS) {
6326 return HI_FAILURE;
6327 }
6328
6329 if (par->compress_info.compress_open) {
6330 par->compress_info.update_finished = HI_FALSE;
6331 /* 40 Waiting for TDE to complete the blit */
6332 schedule_timeout(osal_msecs_to_jiffies(40));
6333 par->compress_info.update_rect.x = 0;
6334 par->compress_info.update_rect.y = 0;
6335 par->compress_info.update_rect.w = par->display_info.display_width;
6336 par->compress_info.update_rect.h = par->display_info.display_height;
6337 par->compress_info.update_finished = HI_TRUE;
6338 }
6339 return HI_SUCCESS;
6340 }
6341
6342 static hi_s32 hifb_cursor_attach(hi_u32 cursor_id, hi_u32 layer_id)
6343 {
6344 hi_u32 cnt;
6345
6346 struct fb_info *info = g_layer[layer_id].info;
6347 hifb_par *par = (hifb_par *)info->par;
6348
6349 struct fb_info *cursorinfo = g_layer[cursor_id].info;
6350 hifb_par *cursor_par = (hifb_par *)cursorinfo->par;
6351
6352 unsigned long lock_flag;
6353
6354 cnt = atomic_read(&par->ref_count);
6355 if (cnt == 0) {
6356 hifb_error("failed to attach layer! The layer :%u is not opened!\n", layer_id);
6357 return HI_FAILURE;
6358 }
6359
6360 if (hifb_is_clutfmt(par->color_format)) {
6361 hifb_error("failed to attach layer! The layer format is clut not supported!\n");
6362 return HI_FAILURE;
6363 }
6364
6365 if (cursor_par->cursor_info.cursor.stCursor.u64PhyAddr == 0) {
6366 hifb_error("failed to attach layer! The cursor info is not set yet!\n");
6367 return HI_FAILURE;
6368 }
6369
6370 if (par->cursor_info.attached) {
6371 if (par->cursor_info.attached_cursor_id == cursor_id) {
6372 return HI_SUCCESS;
6373 } else {
6374 hifb_error("The layer has attached another cursor, you should detach it first!\n");
6375 return HI_FAILURE;
6376 }
6377 }
6378
6379 par->cursor_info.attached = 1;
6380 par->cursor_info.attached_cursor_id = cursor_id;
6381
6382 if ((par->cursor_info.cursor.stCursor.u32Height > par->display_info.display_height) ||
6383 (par->cursor_info.cursor.stCursor.u32Width > par->display_info.display_width)) {
6384 return HI_FAILURE;
6385 }
6386
6387 par->cursor_info.cursor.stCursor.u32Height = cursor_par->cursor_info.cursor.stCursor.u32Height;
6388 par->cursor_info.cursor.stCursor.u32Width = cursor_par->cursor_info.cursor.stCursor.u32Width;
6389 par->cursor_info.cursor.stHotPos = cursor_par->cursor_info.cursor.stHotPos;
6390
6391 /* when cursor attach to layer, we use the position calculate before */
6392 hifb_spin_lock_irqsave(&par->lock, lock_flag);
6393 if (hifb_cursor_show(layer_id) != HI_SUCCESS) {
6394 hifb_error("hifb_cursor_show HI_FAILURE\r\n");
6395 }
6396 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
6397 return HI_SUCCESS;
6398 }
6399
6400 static hi_s32 hifb_cursor_detach(hi_u32 layer_id)
6401 {
6402 struct fb_info *info = g_layer[layer_id].info;
6403 hifb_par *par = (hifb_par *)info->par;
6404 if (par == HI_NULL) {
6405 return HI_FAILURE;
6406 }
6407 if (!par->cursor_info.attached) {
6408 return HI_SUCCESS;
6409 }
6410
6411 if (hifb_cursor_hide(layer_id) != HI_SUCCESS) {
6412 hifb_error("hifb_cursor_hide HI_FAILURE\r\n");
6413 }
6414
6415 par->cursor_info.attached = HI_FALSE;
6416 return HI_SUCCESS;
6417 }
6418
6419 static hi_void cursor_calc_display_rect(hifb_par *par, const HIFB_POINT_S* cursor_new_pos, HIFB_RECT *rect)
6420 {
6421 hi_tde_rect src_rect = {0};
6422 hi_tde_rect dst_rect = {0};
6423 hi_tde_rect in_src_rect = {0};
6424 hi_tde_rect in_dst_rect = {0};
6425 if ((par->refresh_info.user_buffer.stCanvas.u32Height && par->refresh_info.user_buffer.stCanvas.u32Width &&
6426 (par->refresh_info.user_buffer.stCanvas.u32Height != par->display_info.display_height)) ||
6427 (par->refresh_info.user_buffer.stCanvas.u32Width && par->refresh_info.user_buffer.stCanvas.u32Height &&
6428 (par->refresh_info.user_buffer.stCanvas.u32Width != par->display_info.display_width))) {
6429 src_rect.width = par->refresh_info.user_buffer.stCanvas.u32Width;
6430 src_rect.height = par->refresh_info.user_buffer.stCanvas.u32Height;
6431 dst_rect.width = par->display_info.display_width;
6432 dst_rect.height = par->display_info.display_height;
6433 in_src_rect.pos_x = cursor_new_pos->s32XPos;
6434 in_src_rect.pos_y = cursor_new_pos->s32YPos;
6435 tde_cal_scale_rect_hifb(&src_rect, &dst_rect, &in_src_rect, &in_dst_rect);
6436 rect->x = in_dst_rect.pos_x;
6437 rect->y = in_dst_rect.pos_y;
6438 } else {
6439 rect->x = cursor_new_pos->s32XPos;
6440 rect->y = cursor_new_pos->s32YPos;
6441 }
6442
6443 if ((par->display_info.display_width < 1) || (par->display_info.display_height < 1) {
6444 return;
6445 }
6446
6447 if (rect->x > (hi_s32)par->display_info.display_width) {
6448 rect->x = (hi_s32)(par->display_info.display_width - 1);
6449 }
6450
6451 if (rect->y > (hi_s32)par->display_info.display_height) {
6452 rect->y = (hi_s32)(par->display_info.display_height - 1);
6453 }
6454 return;
6455 }
6456
6457 /*
6458 * calculate cusor display position info such as the start position of copying,
6459 * display buffer position, width of copying.
6460 */
6461 static hi_void hifb_cursor_calcdispinfo(hi_u32 cursor_id, hifb_par *par, const HIFB_POINT_S *cursor_new_pos)
6462 {
6463 struct fb_info *info = HI_NULL;
6464 hifb_par *cursor_par = HI_NULL;
6465 HIFB_POINT_S pos = {0};
6466 HIFB_RECT rect = {0};
6467
6468 info = g_layer[cursor_id].info;
6469 cursor_par = (hifb_par *)info->par;
6470
6471 if ((par == HI_NULL) || (cursor_new_pos == HI_NULL)) {
6472 return;
6473 }
6474
6475 /* calculate in display rect:(x,y) */
6476 cursor_calc_display_rect(par, cursor_new_pos, &rect);
6477 rect.x -= cursor_par->cursor_info.cursor.stHotPos.s32XPos;
6478 rect.y -= cursor_par->cursor_info.cursor.stHotPos.s32YPos;
6479
6480 /* calculate in display rect:(w,h) */
6481 rect.w = (hi_s32)cursor_par->cursor_info.cursor.stCursor.u32Width;
6482 rect.h = (hi_s32)cursor_par->cursor_info.cursor.stCursor.u32Height;
6483 if ((hi_s32)rect.x < 0) {
6484 rect.x = 0;
6485 pos.s32XPos = cursor_par->cursor_info.cursor.stHotPos.s32XPos;
6486 rect.w -= pos.s32XPos;
6487 }
6488
6489 if ((hi_s32)rect.y < 0) {
6490 rect.y = 0;
6491 pos.s32YPos = cursor_par->cursor_info.cursor.stHotPos.s32YPos;
6492 rect.h -= pos.s32YPos;
6493 }
6494
6495 if (rect.x + rect.w > (hi_s32)par->display_info.display_width) {
6496 rect.w = (hi_s32)(par->display_info.display_width - rect.x);
6497 }
6498
6499 if (rect.y + rect.h > (hi_s32)par->display_info.display_height) {
6500 rect.h = (hi_s32)(par->display_info.display_height - rect.y);
6501 }
6502
6503 par->cursor_info.pos_in_cursor = pos;
6504 par->cursor_info.rect_in_disp_buf = rect;
6505
6506 return;
6507 }
6508
6509 static hi_s32 hifb_cursor_changepos(hi_u32 cursor_id, HIFB_POINT_S pos)
6510 {
6511 struct fb_info *cursorinfo = g_layer[cursor_id].info;
6512 hifb_par *cursor_par = (hifb_par *)cursorinfo->par;
6513 hi_size max_screensize = {0};
6514 hi_u32 i;
6515
6516 if ((pos.s32XPos < 0) || (pos.s32YPos < 0)) {
6517 hifb_error("the cursor pos(%d,%d) out of range !\n", pos.s32XPos, pos.s32YPos);
6518 return HI_FAILURE;
6519 }
6520
6521 /* pos no change */
6522 if ((pos.s32XPos == cursor_par->display_info.pos.s32XPos) &&
6523 (pos.s32YPos == cursor_par->display_info.pos.s32YPos)) {
6524 return HI_SUCCESS;
6525 }
6526 hifb_get_maxscreensize(cursor_par, &max_screensize.width, &max_screensize.height);
6527 if (pos.x_pos > max_screensize.width - hifb_min_width(cursor_id)) {
6528 hifb_warnng("the sum of x_pos(%d) and min_width(%d) larger than Vodev screen width(%d)!\n",
6529 pos.x_pos, hifb_min_width(cursor_id), max_screensize.width);
6530 pos.x_pos = max_screensize.width - hifb_min_width(cursor_id);
6531 }
6532
6533 if (pos.y_pos > max_screensize.height - hifb_min_height(cursor_id)) {
6534 hifb_warnng("the sum of y_pos(%d) and min_height(%d) larger than Vodev screen height(%d)!\n",
6535 pos.y_pos, hifb_min_height(cursor_id), max_screensize.height);
6536 pos.y_pos = max_screensize.height - hifb_min_height(cursor_id);
6537 }
6538
6539 cursor_par->display_info.pos.s32XPos = pos.s32XPos;
6540 cursor_par->display_info.pos.s32YPos = pos.s32YPos;
6541
6542 if (!cursor_par->show) {
6543 return HI_FAILURE;
6544 }
6545
6546 /* process all layers attached to cursor */
6547 for (i = 0; i < g_drv_ops.layer_count; i++) {
6548 struct fb_info *info = g_layer[i].info;
6549 hifb_par *par = (hifb_par *)info->par;
6550
6551 if (is_cursor_layer(i)) {
6552 continue;
6553 }
6554
6555 if (!par->cursor_info.attached) {
6556 continue;
6557 }
6558 if (hifb_cursor_hide(i) != HI_SUCCESS) {
6559 hifb_error("hifb_cursor_hide HI_FAILURE\r\n");
6560 }
6561 if (hifb_cursor_show(i) != HI_SUCCESS) {
6562 hifb_error("hifb_cursor_show HI_FAILURE\r\n");
6563 }
6564 }
6565
6566 return HI_SUCCESS;
6567 }
6568
6569 hi_s32 hifb_cursor_changestate(hifb_par *cursor_par, hi_bool show)
6570 {
6571 hi_u32 i;
6572 if (cursor_par == HI_NULL) {
6573 return HI_FAILURE;
6574 }
6575
6576 if (!cursor_par->cursor_info.cursor.stCursor.u64PhyAddr) {
6577 hifb_error("The cursor image addr is HI_NULL!\n");
6578 return HI_FAILURE;
6579 }
6580
6581 /* no change to state , return */
6582 if (show == cursor_par->show) {
6583 return HI_SUCCESS;
6584 }
6585
6586 for (i = 0; i < g_drv_ops.layer_count; i++) {
6587 if (is_cursor_layer(i)) {
6588 continue;
6589 }
6590
6591 if (show) {
6592 cursor_par->show = HI_TRUE;
6593 hifb_cursor_show(i);
6594 } else {
6595 hifb_cursor_hide(i);
6596 }
6597 }
6598
6599 cursor_par->show = show;
6600
6601 return HI_SUCCESS;
6602 }
6603
6604 static hi_s32 cursor_info_check(HIFB_CURSOR_S* cursor)
6605 {
6606 if (cursor->stCursor.u32Width == 0 || cursor->stCursor.u32Height == 0) {
6607 hifb_error("cursor's width or height shouldn't be 0!\n");
6608 return HI_FAILURE;
6609 }
6610
6611 if (cursor->stCursor.u32Pitch == 0) {
6612 hifb_error("cursor's pitch shouldn't be 0!\n");
6613 return HI_FAILURE;
6614 }
6615
6616 if (cursor->stCursor.enFmt == HIFB_FMT_BUTT) {
6617 hifb_error("unknown color format!\n");
6618 return HI_FAILURE;
6619 }
6620 return HI_SUCCESS;
6621 }
6622
6623 static hi_s32 hifb_cursor_putinfo(hifb_par *cursor_par, HIFB_CURSOR_S *cursor)
6624 {
6625 hi_s32 ret;
6626
6627 if (cursor->stCursor.u64PhyAddr == 0) {
6628 hifb_error("cursor image addr is equal to 0!\n");
6629 cursor_par->cursor_info.cursor.stCursor.u64PhyAddr = 0;
6630 return HI_SUCCESS;
6631 }
6632 /* cursor info check:width,height,pitch,format */
6633 if (cursor_info_check(cursor) != HI_SUCCESS) {
6634 return HI_FAILURE;
6635 }
6636
6637 cursor_par->color_format = cursor->stCursor.enFmt;
6638 if (cursor_par->color_format >= (sizeof(g_argb_bit_field) / sizeof(g_argb_bit_field[0]))) {
6639 hifb_error("unknown color fmt!\n");
6640 return HI_FAILURE;
6641 }
6642 #ifndef __HuaweiLite__
6643 ret = memcpy_s(&g_layer[cursor_par->layer_id].info->var.red, sizeof(struct fb_bitfield),
6644 &g_argb_bit_field[cursor_par->color_format].red, sizeof(struct fb_bitfield));
6645 hifb_unequal_eok_return(ret);
6646 ret = memcpy_s(&g_layer[cursor_par->layer_id].info->var.green, sizeof(struct fb_bitfield),
6647 &g_argb_bit_field[cursor_par->color_format].green, sizeof(struct fb_bitfield));
6648 hifb_unequal_eok_return(ret);
6649 ret = memcpy_s(&g_layer[cursor_par->layer_id].info->var.blue, sizeof(struct fb_bitfield),
6650 &g_argb_bit_field[cursor_par->color_format].blue, sizeof(struct fb_bitfield));
6651 hifb_unequal_eok_return(ret);
6652 ret = memcpy_s(&g_layer[cursor_par->layer_id].info->var.transp, sizeof(struct fb_bitfield),
6653 &g_argb_bit_field[cursor_par->color_format].transp, sizeof(struct fb_bitfield));
6654 hifb_unequal_eok_return(ret);
6655 #endif
6656 /* change hotx or hoty will result in cursor position change */
6657 if (cursor->stCursor.u32Height > HIFB_MAX_CURSOR_HEIGHT) {
6658 cursor->stCursor.u32Height = HIFB_MAX_CURSOR_HEIGHT;
6659 }
6660
6661 if (cursor->stCursor.u32Width > HIFB_MAX_CURSOR_WIDTH) {
6662 cursor->stCursor.u32Width = HIFB_MAX_CURSOR_WIDTH;
6663 }
6664
6665 if ((cursor->stHotPos.s32XPos < 0) || (cursor->stHotPos.s32XPos > (hi_s32)cursor->stCursor.u32Width) ||
6666 (cursor->stHotPos.s32YPos < 0) || (cursor->stHotPos.s32YPos > (hi_s32)cursor->stCursor.u32Height)) {
6667 hifb_error("hotpos err!\n");
6668 return HI_FAILURE;
6669 }
6670
6671 if ((cursor->stHotPos.s32XPos != cursor_par->cursor_info.cursor.stHotPos.s32XPos) ||
6672 (cursor->stHotPos.s32YPos != cursor_par->cursor_info.cursor.stHotPos.s32YPos)) {
6673 cursor_par->cursor_info.cursor.stHotPos.s32XPos = cursor->stHotPos.s32XPos;
6674 cursor_par->cursor_info.cursor.stHotPos.s32YPos = cursor->stHotPos.s32YPos;
6675 }
6676
6677 ret = memcpy_s(&(cursor_par->cursor_info.cursor), sizeof(HIFB_CURSOR_S), cursor, sizeof(HIFB_CURSOR_S));
6678 hifb_unequal_eok_return(ret);
6679 return HI_SUCCESS;
6680 }
6681 #endif
6682
6683 /* Does the chip support scaling */
6684 static hi_bool hifb_check_imagezoomenable(hi_u32 layer_id, HIFB_RECT *in_rect, HIFB_RECT *out_rect)
6685 {
6686 hi_bool is_layer_support_zoom_out = HI_TRUE;
6687 hi_bool need_zoom = HI_FALSE;
6688
6689 if ((in_rect == HI_NULL) || (out_rect == HI_NULL)) {
6690 return HI_FALSE;
6691 }
6692 if (!g_drv_ops.capability[layer_id].bVoScale) {
6693 return HI_FALSE;
6694 }
6695
6696 if (g_drv_ops.hifb_drv_is_layer_support_zoom_out) {
6697 is_layer_support_zoom_out = g_drv_ops.hifb_drv_is_layer_support_zoom_out(layer_id);
6698 }
6699
6700 /* The chip does not support zoomout, and the current image output size is smaller than the input size. */
6701 if ((is_layer_support_zoom_out == HI_FALSE) && ((out_rect->w < in_rect->w) || (out_rect->h < in_rect->h))) {
6702 hifb_error("HIFB layer%u not support zoomout, please check the g_display and screen size!\n", layer_id);
6703 return HI_FALSE;
6704 }
6705
6706 /* The chip zoomin ratio exceeds the maximum allowed by the chip, and the error is returned. */
6707 if ((out_rect->w > (in_rect->w * HIFB_MAX_ZOOMIN)) || (out_rect->h > (in_rect->h * HIFB_MAX_ZOOMIN))) {
6708 hifb_error("HIFB layer%u in_size(%d, %d) and out_size(%d, %d) do out of ZoomRatio[1, %d]!!\n", layer_id,
6709 in_rect->w, in_rect->h, out_rect->w, out_rect->h, VPSS_MAX_ZOOMIN);
6710 return HI_FALSE;
6711 }
6712
6713 need_zoom = ((out_rect->w != in_rect->w) || (out_rect->h != in_rect->h));
6714
6715 if (need_zoom && (in_rect->w > HIFB_LINE_BUF)) {
6716 hifb_error("HIFB layer%u in width: %u is bigger than %d, will not zoom in!!\n", layer_id, in_rect->w,
6717 HIFB_LINE_BUF);
6718 return HI_FALSE;
6719 }
6720
6721 return HI_TRUE;
6722 }
6723
6724 hi_void hifb_dcmp_config(hi_u32 layer_id)
6725 {
6726 struct fb_info *info = g_layer[layer_id].info;
6727 hifb_par *par = (hifb_par *)(info->par);
6728 hifb_refresh_info *refresh_info = &par->refresh_info;
6729 hi_bool dcmp_state_en = HI_FALSE;
6730
6731 /*
6732 * In order to support compressed and non-compressed dynamic switching, you cannot use
6733 * pstCompressInfo->bCompressOpen to determine whether to open compression.
6734 * To use the compression information that follows the frame, and whether or not
6735 * to flip to decide whether to switch compression or non-compression
6736 */
6737 if (refresh_info->disp_buf_info.compress) {
6738 /* See hifb_drv_enable_dcmp */
6739 g_drv_ops.hifb_drv_enable_dcmp(layer_id, HI_TRUE);
6740 /* See hifb_drv_get_dcmp_enable_state */
6741 g_drv_ops.hifb_drv_get_dcmp_enable_state(layer_id, &dcmp_state_en);
6742 if (dcmp_state_en) {
6743 hifb_set_dcmp_info(par);
6744 }
6745 } else {
6746 /*
6747 * Set the address and stride to ensure that the compression will g_display correctly when switched to
6748 * non-compressed
6749 */
6750 g_drv_ops.hifb_drv_set_layer_addr(layer_id, refresh_info->screen_addr);
6751 if ((refresh_info->buf_mode == HIFB_LAYER_BUF_NONE) && refresh_info->user_buffer.stCanvas.u64PhyAddr) {
6752 g_drv_ops.hifb_drv_set_layer_stride(layer_id, refresh_info->user_buffer.stCanvas.u32Pitch);
6753 } else {
6754 g_drv_ops.hifb_drv_set_layer_stride(layer_id, hifb_get_line_length(info));
6755 }
6756
6757 g_drv_ops.hifb_drv_enable_dcmp(layer_id, HI_FALSE);
6758 g_drv_ops.hifb_drv_enable_wbc(layer_id, HI_FALSE);
6759 }
6760 }
6761
6762 /* Callback function for VO vertical timing interrupt */
6763 static hi_s32 callback_get_osd_data(hifb_par *par, hi_u32 layer_id)
6764 {
6765 hifb_osd_data layer_data = {0};
6766 hifb_display_info *display_info = HI_NULL;
6767 hifb_refresh_info *refresh_info = HI_NULL;
6768
6769 display_info = &par->display_info;
6770 refresh_info = &par->refresh_info;
6771
6772 if (g_drv_ops.hifb_drv_get_osd_data(par->layer_id, &layer_data) != HI_SUCCESS) {
6773 hifb_error("failed to get layer%d's osd data!\n", par->layer_id);
6774 return HI_FAILURE;
6775 }
6776 display_info->max_screen_width = layer_data.screen_width;
6777 display_info->max_screen_height = layer_data.screen_height;
6778
6779 if (par->param_modify_mask & HIFB_LAYER_PARAMODIFY_ALPHA) {
6780 g_drv_ops.hifb_drv_set_layer_alpha(layer_id, par->alpha);
6781 par->param_modify_mask &= ~HIFB_LAYER_PARAMODIFY_ALPHA;
6782 }
6783
6784 if (par->param_modify_mask & HIFB_LAYER_PARAMODIFY_COLORKEY) {
6785 g_drv_ops.hifb_drv_set_layer_key_mask(layer_id, &par->ckey);
6786 par->param_modify_mask &= ~HIFB_LAYER_PARAMODIFY_COLORKEY;
6787 }
6788
6789 if (par->param_modify_mask & HIFB_LAYER_PARAMODIFY_FMT) {
6790 if ((refresh_info->buf_mode == HIFB_LAYER_BUF_NONE) &&
6791 refresh_info->user_buffer.stCanvas.u64PhyAddr) {
6792 g_drv_ops.hifb_drv_set_layer_data_fmt(layer_id, refresh_info->user_buffer.stCanvas.enFmt);
6793 } else {
6794 g_drv_ops.hifb_drv_set_layer_data_fmt(layer_id, par->color_format);
6795 }
6796 par->param_modify_mask &= ~HIFB_LAYER_PARAMODIFY_FMT;
6797 }
6798 return HI_SUCCESS;
6799 }
6800
6801 static hi_void callback_modify_dynamic_range(hifb_par *par, hi_u32 layer_id)
6802 {
6803 hifb_display_info *display_info = HI_NULL;
6804 hifb_refresh_info *refresh_info = HI_NULL;
6805 struct fb_info *info = HI_NULL;
6806 HIFB_DEFLICKER_S deflicker;
6807
6808 display_info = &par->display_info;
6809 refresh_info = &par->refresh_info;
6810 info = g_layer[layer_id].info;
6811 /* NEW modify graphic dynamic range */
6812 if (par->param_modify_mask & HIFB_LAYER_PARAMODIFY_DYNAMICRANGE) {
6813 /* Modified to HDR10 */
6814 if (display_info->dynamic_range == HIFB_DYNAMIC_RANGE_HDR10) {
6815 /* hifb_drv_enable_ghdr */
6816 if (g_drv_ops.hifb_drv_enable_ghdr) g_drv_ops.hifb_drv_enable_ghdr(layer_id, HI_TRUE);
6817 } else if (display_info->dynamic_range == HIFB_DYNAMIC_RANGE_SDR8 ||
6818 display_info->dynamic_range == HIFB_DYNAMIC_RANGE_SDR10) {
6819 /* See hifb_drv_enable_ghdr */
6820 if (g_drv_ops.hifb_drv_enable_ghdr) g_drv_ops.hifb_drv_enable_ghdr(layer_id, HI_FALSE);
6821 }
6822 /*
6823 * After the modification is completed, the mask is cleared, just like the interrupt
6824 * processing mechanism. When the interrupt is processed, the interrupt is cleared.
6825 */
6826 par->param_modify_mask &= ~HIFB_LAYER_PARAMODIFY_DYNAMICRANGE;
6827 }
6828
6829 if (par->param_modify_mask & HIFB_LAYER_PARAMODIFY_BMUL) {
6830 if (g_drv_ops.hifb_drv_set_pre_mul(layer_id, display_info->premul) == HI_FAILURE) {
6831 if (par->display_info.premul == HI_TRUE) {
6832 par->display_info.premul = HI_FALSE;
6833 } else {
6834 par->display_info.premul = HI_TRUE;
6835 }
6836 }
6837 par->param_modify_mask &= ~HIFB_LAYER_PARAMODIFY_BMUL;
6838 }
6839
6840 if (par->param_modify_mask & HIFB_LAYER_PARAMODIFY_STRIDE) {
6841 if ((refresh_info->buf_mode == HIFB_LAYER_BUF_NONE) &&
6842 refresh_info->user_buffer.stCanvas.u64PhyAddr) {
6843 g_drv_ops.hifb_drv_set_layer_stride(layer_id, refresh_info->user_buffer.stCanvas.u32Pitch);
6844 } else {
6845 g_drv_ops.hifb_drv_set_layer_stride(layer_id, hifb_get_line_length(info));
6846 }
6847 par->param_modify_mask &= ~HIFB_LAYER_PARAMODIFY_STRIDE;
6848 }
6849
6850 if (par->param_modify_mask & HIFB_LAYER_PARAMODIFY_ANTIFLICKERLEVEL) {
6851 deflicker.pu8HDfCoef = par->h_dfcoef;
6852 deflicker.pu8VDfCoef = par->v_dfcoef;
6853 deflicker.u32HDfLevel = par->h_dflevel;
6854 deflicker.u32VDfLevel = par->v_dflevel;
6855 g_drv_ops.hifb_drv_set_layer_de_flicker(layer_id, deflicker);
6856 par->param_modify_mask &= ~HIFB_LAYER_PARAMODIFY_ANTIFLICKERLEVEL;
6857 }
6858 return;
6859 }
6860
6861 static hi_void callback_init_rect(HIFB_RECT *in_rect, HIFB_RECT *out_rect,
6862 hifb_display_info *display_info)
6863 {
6864 in_rect->x = display_info->pos.s32XPos;
6865 in_rect->y = display_info->pos.s32YPos;
6866 in_rect->w = (hi_s32)display_info->display_width;
6867 in_rect->h = (hi_s32)display_info->display_height;
6868
6869 out_rect->x = display_info->pos.s32XPos;
6870 out_rect->y = display_info->pos.s32YPos;
6871 out_rect->w = display_info->screen_width;
6872 out_rect->h = display_info->screen_height;
6873
6874 if (out_rect->x + out_rect->w > (hi_s32)display_info->max_screen_width) {
6875 out_rect->w = (hi_s32)(display_info->max_screen_width - out_rect->x);
6876 }
6877
6878 if (out_rect->y + out_rect->h > (hi_s32)display_info->max_screen_height) {
6879 out_rect->h = (hi_s32)(display_info->max_screen_height - out_rect->y);
6880 }
6881
6882 /*
6883 * after cut off, the input rectangle keep rate with output rectangle
6884 * Prevent the occurrence of the divide-by-zero error
6885 */
6886 if ((display_info->screen_width != 0) && (display_info->screen_height != 0)) {
6887 in_rect->w = in_rect->w * out_rect->w / (hi_s32)display_info->screen_width;
6888 in_rect->h = in_rect->h * out_rect->h / (hi_s32)display_info->screen_height;
6889 }
6890 return;
6891 }
6892
6893 static hi_void callback_modify_sizes(hifb_par *par, hi_u32 layer_id)
6894 {
6895 hifb_display_info *display_info = HI_NULL;
6896 HIFB_RECT in_rect = {0};
6897 HIFB_RECT out_rect = {0};
6898 hi_bool zme_en = HI_FALSE;
6899
6900 display_info = &par->display_info;
6901 /* Handles requests to modify input and output sizes. */
6902 if ((par->param_modify_mask & HIFB_LAYER_PARAMODIFY_INRECT) ||
6903 (par->param_modify_mask & HIFB_LAYER_PARAMODIFY_OUTRECT)) {
6904 /* if VO can't support scaling, we set the screen's H/W as the same as display */
6905 if (!g_drv_ops.capability[layer_id].bVoScale) {
6906 display_info->screen_width = display_info->display_width;
6907 display_info->screen_height = display_info->display_height;
6908 }
6909
6910 /* for init rect */
6911 callback_init_rect(&in_rect, &out_rect, display_info);
6912
6913 if (hifb_check_imagezoomenable(layer_id, &in_rect, &out_rect) == HI_TRUE) {
6914 /*
6915 * If you want to go through the zoom module, you need to correct it to 2 alignment,
6916 * otherwise it will appear abnormal.
6917 */
6918 in_rect.w = HI_ALIGN_DOWN(in_rect.w, 2); /* 2 size data */
6919 in_rect.h = HI_ALIGN_DOWN(in_rect.h, 2); /* 2 size data */
6920
6921 if (g_drv_ops.hifb_drv_set_layer_rect) {
6922 g_drv_ops.hifb_drv_set_layer_rect(layer_id, &in_rect, &out_rect);
6923 }
6924
6925 if (g_drv_ops.hifb_drv_set_layer_src_image_reso) {
6926 g_drv_ops.hifb_drv_set_layer_src_image_reso(layer_id, &in_rect);
6927 }
6928
6929 /* See out width and height not equal to in width and height,then enable zme. */
6930 zme_en = (out_rect.w != in_rect.w || out_rect.h != in_rect.h);
6931 if (g_drv_ops.hifb_drv_enable_zme) {
6932 g_drv_ops.hifb_drv_enable_zme(layer_id, &in_rect, &out_rect, zme_en);
6933 }
6934 } else {
6935 /*
6936 * If scaling is not enabled,
6937 * the input size is used as the output size and the zoom module is closed.
6938 */
6939 if (g_drv_ops.hifb_drv_set_layer_rect) {
6940 g_drv_ops.hifb_drv_set_layer_rect(layer_id, &in_rect, &in_rect);
6941 }
6942
6943 if (g_drv_ops.hifb_drv_set_layer_src_image_reso) {
6944 g_drv_ops.hifb_drv_set_layer_src_image_reso(layer_id, &in_rect);
6945 }
6946
6947 if (g_drv_ops.hifb_drv_enable_zme) {
6948 g_drv_ops.hifb_drv_enable_zme(layer_id, &in_rect, &out_rect, HI_FALSE);
6949 }
6950 }
6951
6952 /* Processing completed, clear mask */
6953 par->param_modify_mask &= ~HIFB_LAYER_PARAMODIFY_INRECT;
6954 par->param_modify_mask &= ~HIFB_LAYER_PARAMODIFY_OUTRECT;
6955 }
6956 return;
6957 }
6958
6959 static hi_void callback_set_fmt_and_stride(hifb_par *par, hi_u32 layer_id)
6960 {
6961 struct fb_info *info = HI_NULL;
6962 hifb_refresh_info *refresh_info = HI_NULL;
6963
6964 info = g_layer[layer_id].info;
6965 refresh_info = &par->refresh_info;
6966 if (par->param_modify_mask & HIFB_LAYER_PARAMODIFY_FMT) {
6967 if ((refresh_info->buf_mode == HIFB_LAYER_BUF_NONE) &&
6968 refresh_info->user_buffer.stCanvas.u64PhyAddr) {
6969 g_drv_ops.hifb_drv_set_layer_data_fmt(layer_id, refresh_info->user_buffer.stCanvas.enFmt);
6970 } else {
6971 g_drv_ops.hifb_drv_set_layer_data_fmt(layer_id, par->color_format);
6972 }
6973 }
6974
6975 if (par->param_modify_mask & HIFB_LAYER_PARAMODIFY_STRIDE) {
6976 if ((refresh_info->buf_mode == HIFB_LAYER_BUF_NONE) &&
6977 refresh_info->user_buffer.stCanvas.u64PhyAddr) {
6978 g_drv_ops.hifb_drv_set_layer_stride(layer_id, refresh_info->user_buffer.stCanvas.u32Pitch);
6979 } else {
6980 g_drv_ops.hifb_drv_set_layer_stride(layer_id, hifb_get_line_length(info));
6981 }
6982 }
6983 return;
6984 }
6985
6986 static hi_void callback_update_refresh_info(hifb_par *par, hi_u32 layer_id)
6987 {
6988 hifb_refresh_info *refresh_info = HI_NULL;
6989 struct fb_info *info = HI_NULL;
6990 volatile hifb_compress_info *compress_info = HI_NULL;
6991 hi_u32 index, buf_size;
6992
6993 refresh_info = &par->refresh_info;
6994 index = refresh_info->disp_buf_info.index_for_int;
6995 info = g_layer[layer_id].info;
6996 buf_size = ((hifb_get_line_length(info) * hifb_get_yres(info)) + HIFB_ALIGNMENT) & (~HIFB_ALIGNMENT);
6997 compress_info = &par->compress_info;
6998 if ((refresh_info->buf_mode == HIFB_LAYER_BUF_DOUBLE) &&
6999 (refresh_info->disp_buf_info.need_flip == HI_TRUE)) {
7000 /* Work buf to change to free buf. Take free buf to display */
7001 index = 1 - index;
7002 refresh_info->disp_buf_info.index_for_int = index;
7003 /*
7004 * The display address is set to the address of the free buf,
7005 * which is set to the screen buf address differently from 0buf
7006 */
7007 g_drv_ops.hifb_drv_set_layer_addr(layer_id, refresh_info->disp_buf_info.phy_addr[index]);
7008 refresh_info->screen_addr = refresh_info->disp_buf_info.phy_addr[index];
7009 refresh_info->gb_screen_addr = refresh_info->screen_addr + buf_size / 2; /* 2 alg data */
7010 compress_info->layer_addr_update = HI_TRUE;
7011 #ifdef __HuaweiLite__
7012 if (info->oinfo.stride != 0) {
7013 info->oinfo.sarea.y = (par->refresh_info.disp_buf_info.phy_addr[index] -
7014 (hi_u32)(uintptr_t)info->oinfo.fbmem) / info->oinfo.stride;
7015 if ((info->oinfo.bpp >> 3) != 0) { /* 3 is 8 bits */
7016 info->oinfo.sarea.x = ((par->refresh_info.disp_buf_info.phy_addr[index] -
7017 (hi_u32)(uintptr_t)info->oinfo.fbmem) % info->oinfo.stride) /
7018 (info->oinfo.bpp >> 3); /* 3 is 8 bits */
7019 }
7020 }
7021 #else
7022 info->var.yoffset = osal_div_u64((refresh_info->disp_buf_info.phy_addr[index] -
7023 hifb_get_smem_start(info)), hifb_get_line_length(info));
7024 if ((hifb_get_line_length(info) != 0) && ((hifb_get_bits_per_pixel(info)>> 3) != 0)) { /* 3 is 8 bits */
7025 info->var.xoffset = ((hi_ulong)(refresh_info->disp_buf_info.phy_addr[index] -
7026 hifb_get_smem_start(info)) % hifb_get_line_length(info)) /
7027 (hifb_get_bits_per_pixel(info) >> 3); /* 3 is 8 bits */
7028 }
7029 #endif
7030 refresh_info->disp_buf_info.fliped = HI_TRUE;
7031 refresh_info->disp_buf_info.need_flip = HI_FALSE;
7032 refresh_info->disp_buf_info.int_pic_num++;
7033 }
7034 return;
7035 }
7036
7037 static hi_void callback_modify_address(hifb_par *par, hi_u32 layer_id)
7038 {
7039 hifb_refresh_info *refresh_info = HI_NULL;
7040 hifb_display_info *display_info = HI_NULL;
7041 volatile hifb_compress_info *compress_info = HI_NULL;
7042 struct fb_info *info = HI_NULL;
7043 HIFB_RECT in_rect = {0};
7044 HIFB_RECT out_rect = {0};
7045
7046 refresh_info = &par->refresh_info;
7047 display_info = &par->display_info;
7048 compress_info = &par->compress_info;
7049 info = g_layer[layer_id].info;
7050 /* The display address is refreshed and the display address is modified. */
7051 if (!(par->param_modify_mask & HIFB_LAYER_PARAMODIFY_DISPLAYADDR)) {
7052 /* according to the index, decide which buf set to the screen */
7053 callback_update_refresh_info(par, layer_id);
7054 return;
7055 }
7056 /* SetLayerDataFmt and SetLayerStride */
7057 callback_set_fmt_and_stride(par, layer_id);
7058
7059 if ((par->param_modify_mask & HIFB_LAYER_PARAMODIFY_INRECT) ||
7060 (par->param_modify_mask & HIFB_LAYER_PARAMODIFY_OUTRECT)) {
7061 /* if VO can't support scaling, we set the screen's H/W as the same as display */
7062 if (!g_drv_ops.capability[layer_id].bVoScale) {
7063 display_info->screen_width = display_info->display_width;
7064 display_info->screen_height = display_info->display_height;
7065 }
7066
7067 /* for init rect */
7068 callback_init_rect(&in_rect, &out_rect, display_info);
7069
7070 if (hifb_check_imagezoomenable(layer_id, &in_rect, &out_rect) == HI_TRUE) {
7071 g_drv_ops.hifb_drv_set_layer_rect(layer_id, &in_rect, &out_rect);
7072 if (g_drv_ops.hifb_drv_set_layer_src_image_reso) {
7073 g_drv_ops.hifb_drv_set_layer_src_image_reso(layer_id, &in_rect);
7074 }
7075 }
7076 }
7077
7078 g_drv_ops.hifb_drv_set_layer_addr(layer_id, refresh_info->screen_addr);
7079
7080 par->param_modify_mask &= ~HIFB_LAYER_PARAMODIFY_DISPLAYADDR;
7081 compress_info->layer_addr_update = HI_TRUE;
7082
7083 if ((refresh_info->disp_buf_info.phy_addr[0] != refresh_info->disp_buf_info.phy_addr[1]) &&
7084 (refresh_info->disp_buf_info.phy_addr[0])) {
7085 if (refresh_info->screen_addr >= refresh_info->disp_buf_info.phy_addr[0] &&
7086 refresh_info->screen_addr < refresh_info->disp_buf_info.phy_addr[1]) {
7087 refresh_info->disp_buf_info.index_for_int = 0;
7088 } else if ((refresh_info->screen_addr >= refresh_info->disp_buf_info.phy_addr[1]) &&
7089 (refresh_info->screen_addr < (refresh_info->disp_buf_info.phy_addr[0] +
7090 hifb_get_smem_len(info)))) {
7091 refresh_info->disp_buf_info.index_for_int = 1;
7092 }
7093 }
7094 /* according to the index, decide which buf set to the screen */
7095 callback_update_refresh_info(par, layer_id);
7096 return;
7097 }
7098
7099 static hi_s32 vo_callback_process(hifb_par *par, hi_u32 layer_id, hi_bool *is_continue)
7100 {
7101 unsigned long lock_flag;
7102
7103 hifb_spin_lock_irqsave(&par->lock, lock_flag);
7104 /* If not displayed, close the graphics layer and exit */
7105 if (par->show == HI_FALSE) {
7106 if (g_drv_ops.hifb_drv_enable_layer) {
7107 g_drv_ops.hifb_drv_enable_layer(layer_id, HI_FALSE);
7108 }
7109 if (g_drv_ops.hifb_drv_updata_layer_reg) {
7110 g_drv_ops.hifb_drv_updata_layer_reg(layer_id);
7111 }
7112 *is_continue = HI_FALSE;
7113 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
7114 return HI_SUCCESS;
7115 }
7116 if (layer_id == 0 || layer_id == 1) {
7117 #ifndef __HuaweiLite__
7118 hifb_vsync_notify(); /* vsync signal */
7119 #endif
7120 }
7121
7122 /* Non-modified status, modified, can be modified */
7123 if (!par->modifying) {
7124 /*
7125 * 1.Get osd data
7126 * 2.Set layer alpha
7127 * 3.Set layer keymask
7128 * 4.Set layer data fmt
7129 */
7130 if (callback_get_osd_data(par, layer_id) != HI_SUCCESS) {
7131 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
7132 return HI_FAILURE;
7133 }
7134
7135 /*
7136 * 1.NEW modify graphic dynamic range
7137 * 2.Set premul
7138 * 3.Set layer stride
7139 * 4.Set layer deflicker
7140 */
7141 callback_modify_dynamic_range(par, layer_id);
7142
7143 /*
7144 * Handles requests to modify input and output sizes
7145 * Set layer rect
7146 * Set layer srcImage reso
7147 * if enable ZME or not
7148 */
7149 callback_modify_sizes(par, layer_id);
7150
7151 /*
7152 * The display address is refreshed and the display address is modified
7153 * set layer data fmt
7154 * set layer stride
7155 * set layer rect
7156 * set layer addr
7157 */
7158 callback_modify_address(par, layer_id);
7159
7160 /* hifb_drv_enable_layer */
7161 g_drv_ops.hifb_drv_enable_layer(layer_id, par->show);
7162 }
7163 /* Decompression configuration */
7164 hifb_dcmp_config(layer_id);
7165
7166 /* hifb_drv_updata_layer_reg */
7167 g_drv_ops.hifb_drv_updata_layer_reg(layer_id);
7168 *is_continue = HI_TRUE;
7169 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
7170 return HI_SUCCESS;
7171 }
7172
7173 #ifndef CONFIG_HI_VO_FB_SEPARATE
7174 static hi_s32 vo_callback_unseparate(hifb_par *par, const hi_void *paramr, hi_bool *is_continue)
7175 {
7176 hi_u32 vtthd_flag;
7177 hi_u32 frm_start_flag;
7178 hi_u32 int_status;
7179
7180 if (paramr == HI_NULL) {
7181 hifb_error("HI_NULL pointer !\n");
7182 return HI_FAILURE;
7183 }
7184 int_status = *(hi_u32 *)paramr;
7185 vtthd_flag = HIFB_INTMSK_HD0_VTTHD1;
7186 frm_start_flag = HIFB_INTMSK_HD0_VTTHD3;
7187
7188 if (int_status & frm_start_flag) {
7189 par->refresh_info.do_refresh_job = HI_TRUE;
7190 wake_up_interruptible(&(par->do_refresh_job));
7191 *is_continue = HI_FALSE;
7192 return HI_SUCCESS;
7193 } else if (int_status & vtthd_flag) {
7194 par->refresh_info.do_refresh_job = HI_FALSE;
7195 }
7196 *is_continue = HI_TRUE;
7197 return HI_SUCCESS;
7198 }
7199 #endif
7200
7201 /* Callback function for VO vertical timing interrupt */
7202 static hi_s32 hifb_vo_callback(const hi_void *paraml, const hi_void *paramr)
7203 {
7204 hi_u32 *layer_id = HI_NULL;
7205 struct fb_info *info = HI_NULL;
7206 hifb_par *par = HI_NULL;
7207 hi_bool is_continue = HI_FALSE;
7208
7209 if (paraml == HI_NULL) {
7210 return HI_FAILURE;
7211 }
7212 layer_id = (hi_u32 *)paraml;
7213 if (*layer_id >= HIFB_MAX_LAYER_NUM) {
7214 return HI_FAILURE;
7215 }
7216 info = g_layer[*layer_id].info;
7217 if (info == HI_NULL) {
7218 return HI_FAILURE;
7219 }
7220 if (info->par == HI_NULL) {
7221 return HI_FAILURE;
7222 }
7223
7224 par = (hifb_par *)(info->par);
7225 #ifndef CONFIG_HI_VO_FB_SEPARATE
7226 if (vo_callback_unseparate(par, paramr, &is_continue) != HI_SUCCESS) {
7227 return HI_FAILURE;
7228 }
7229 if (is_continue != HI_TRUE) {
7230 return HI_SUCCESS;
7231 }
7232 #else
7233 hi_unused(paramr);
7234 #endif
7235
7236 if (vo_callback_process(par, *layer_id, &is_continue) != HI_SUCCESS) {
7237 return HI_FAILURE;
7238 }
7239 if (is_continue != HI_TRUE) {
7240 return HI_SUCCESS;
7241 }
7242
7243 /* Field blanking mark */
7244 par->vblflag = 1;
7245 wake_up_interruptible(&(par->vbl_event));
7246
7247 return HI_SUCCESS;
7248 }
7249
7250 /* Compression completion interrupt callback function */
7251 static hi_s32 hifb_wbc_callback(const hi_void *paraml, const hi_void *paramr)
7252 {
7253 hi_u32 *layer_id = HI_NULL;
7254 struct fb_info *info = HI_NULL;
7255 hifb_par *par = HI_NULL;
7256 unsigned long lockflag;
7257 hi_bool wbc_stop = HI_FALSE;
7258 hi_unused(paramr);
7259 if (paraml == HI_NULL) {
7260 return HI_FAILURE;
7261 }
7262 layer_id = (hi_u32 *)paraml;
7263 if (*layer_id >= HIFB_MAX_LAYER_NUM) {
7264 return HI_FAILURE;
7265 }
7266 info = g_layer[*layer_id].info;
7267 par = (hifb_par *)(info->par);
7268
7269 hifb_spin_lock_irqsave(&par->lock, lockflag);
7270
7271 g_drv_ops.hifb_drv_get_wbc_stop_state(*layer_id, &wbc_stop);
7272 if (wbc_stop) {
7273 hifb_dbg_info("layer %u wbc autostop!\n", *layer_id);
7274 hifb_spin_unlock_irqrestore(&par->lock, lockflag);
7275 return HI_SUCCESS;
7276 }
7277
7278 /* close compression */
7279 g_drv_ops.hifb_drv_enable_wbc(*layer_id, HI_FALSE);
7280
7281 /* Switch to compressed g_display */
7282 hifb_set_dcmp_info(par);
7283 g_drv_ops.hifb_drv_enable_dcmp(*layer_id, HI_TRUE);
7284 g_drv_ops.hifb_drv_updata_layer_reg(*layer_id);
7285
7286 /* Clear compression area and compression flag */
7287 (hi_void)memset_s((void *)&par->compress_info.compress_rect, sizeof(HIFB_RECT), 0, sizeof(HIFB_RECT));
7288
7289 hifb_spin_unlock_irqrestore(&par->lock, lockflag);
7290
7291 return HI_SUCCESS;
7292 }
7293
7294 static hi_s32 hifb_refresh_0buf(hi_u32 layer_id, HIFB_BUFFER_S *canvas_buf)
7295 {
7296 struct fb_info *info = g_layer[layer_id].info;
7297 hifb_par *par = (hifb_par *)info->par;
7298 hifb_refresh_info *refresh_info = &par->refresh_info;
7299 volatile hifb_compress_info *compress_info = &par->compress_info;
7300 unsigned long lockflag;
7301 hi_s32 ret = hifb_disp_setdispsize(layer_id, canvas_buf->stCanvas.u32Width, canvas_buf->stCanvas.u32Height);
7302 if (ret != HI_SUCCESS) {
7303 return HI_FAILURE;
7304 }
7305
7306 hifb_spin_lock_irqsave(&par->lock, lockflag);
7307
7308 if (compress_info->compress_open) {
7309 hifb_error("only HIFB_LAYER_BUF_DOUBLE or HIFB_LAYER_BUF_DOUBLE_IMMEDIATE mode support compress!\n");
7310 hifb_spin_unlock_irqrestore(&par->lock, lockflag);
7311 return HI_FAILURE;
7312 }
7313
7314 refresh_info->disp_buf_info.compress = HI_FALSE;
7315
7316 par->modifying = HI_TRUE;
7317 /* modify by wxl : if change flush type between 2buffer and 0 buffer, the addr couldn't be changed. */
7318 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_DISPLAYADDR;
7319 /*
7320 * The graphic address is taken from the canvas of the user data and
7321 * filled in the screen address of the refresh information.
7322 */
7323 refresh_info->screen_addr = canvas_buf->stCanvas.u64PhyAddr;
7324
7325 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_STRIDE;
7326 refresh_info->user_buffer.stCanvas.u32Pitch = canvas_buf->stCanvas.u32Pitch;
7327
7328 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_FMT;
7329 refresh_info->user_buffer.stCanvas.enFmt = canvas_buf->stCanvas.enFmt;
7330
7331 /* NEW */
7332 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_DYNAMICRANGE;
7333 refresh_info->user_buffer.stCanvas.enDynamicRange = canvas_buf->stCanvas.enDynamicRange;
7334
7335 /* NEW feature: zme */
7336 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_OUTRECT;
7337
7338 hifb_spin_unlock_irqrestore(&par->lock, lockflag);
7339 /*
7340 * In hifb_disp_setdispsize, it is possible that kmalloc
7341 * allocates memory in a non-atomic manner, so the lock must be released first.
7342 */
7343 hifb_spin_lock_irqsave(&par->lock, lockflag);
7344 par->modifying = HI_FALSE;
7345 ret = memcpy_s(&(refresh_info->user_buffer), sizeof(HIFB_BUFFER_S), canvas_buf, sizeof(HIFB_BUFFER_S));
7346 hifb_unlock_unequal_eok_return(ret, &par->lock, lockflag);
7347 if (par->compress_info.compress_open) {
7348 ret = memcpy_s((void *)&par->compress_info.update_rect, sizeof(HIFB_RECT), &canvas_buf->UpdateRect,
7349 sizeof(HIFB_RECT));
7350 hifb_unlock_unequal_eok_return(ret, &par->lock, lockflag);
7351 par->compress_info.update_finished = HI_TRUE;
7352 }
7353
7354 par->vblflag = 0;
7355 hifb_spin_unlock_irqrestore(&par->lock, lockflag);
7356
7357 /* if the flag "FB_ACTIVATE_VBL" has been set, we should wait for register update finish */
7358 #ifdef __HuaweiLite__
7359 if (!in_atomic()) {
7360 #else
7361 if (!in_atomic() && (info->var.activate & FB_ACTIVATE_VBL)) {
7362 #endif
7363 hifb_wait_regconfig_work(layer_id);
7364 }
7365
7366 return HI_SUCCESS;
7367 }
7368
7369 static hi_void refresh_is_cursor_overlay(hifb_par *par, HIFB_BUFFER_S *canvas_buf, hi_bool *is_overlay)
7370 {
7371 hifb_par *cursor_par = HI_NULL;
7372 hifb_cursor_info *cursor_info = &par->cursor_info;
7373 HIFB_RECT rc_cursor;
7374 cursor_par = (hifb_par *)g_layer[par->cursor_info.attached_cursor_id].info->par;
7375 if (cursor_info->attached != 0) {
7376 rc_cursor.x = cursor_par->display_info.pos.s32XPos -
7377 cursor_par->cursor_info.cursor.stHotPos.s32XPos;
7378 rc_cursor.y = cursor_par->display_info.pos.s32YPos -
7379 cursor_par->cursor_info.cursor.stHotPos.s32YPos;
7380 rc_cursor.w = cursor_par->cursor_info.cursor.stCursor.u32Width;
7381 rc_cursor.h = cursor_par->cursor_info.cursor.stCursor.u32Height;
7382
7383 /* check the cusor overlay with refresh area */
7384 if (cursor_par->show &&
7385 (((rc_cursor.x >= canvas_buf->UpdateRect.x &&
7386 rc_cursor.x <= canvas_buf->UpdateRect.x + canvas_buf->UpdateRect.w)) ||
7387 (rc_cursor.x < canvas_buf->UpdateRect.x &&
7388 rc_cursor.x + rc_cursor.w >= canvas_buf->UpdateRect.x))) {
7389 if (((rc_cursor.y >= canvas_buf->UpdateRect.y &&
7390 rc_cursor.y <= canvas_buf->UpdateRect.y + canvas_buf->UpdateRect.h)) ||
7391 (rc_cursor.y < canvas_buf->UpdateRect.y &&
7392 rc_cursor.y + rc_cursor.h >= canvas_buf->UpdateRect.y)) {
7393 *is_overlay = HI_TRUE;
7394 }
7395 }
7396 }
7397 return;
7398 }
7399
7400 /*
7401 * Name : hifb_refresh
7402 * Desc : It is refreshed according to the canvas information and the layer's buf refresh mode.
7403 * It is called indirectly when setting the layer attr.
7404 * See : references hifb_refresh_again,hifb_onrefresh
7405 * calls hifb_refresh_0buf,hifb_refresh_1buf,hifb_refresh_2buf
7406 */
7407 static hi_s32 hifb_refresh(hi_u32 layer_id, HIFB_BUFFER_S *canvas_buf, HIFB_LAYER_BUF_E buf_mode)
7408 {
7409 hi_s32 ret = HI_FAILURE;
7410 hifb_par *par = (hifb_par *)g_layer[layer_id].info->par;
7411 hi_bool is_overlay = HI_FALSE; /* is the cusor overlay with refresh area */
7412
7413 if (canvas_buf == HI_NULL) {
7414 return HI_FAILURE;
7415 }
7416
7417 /*
7418 * For cursor layer
7419 * you can quary whether cursor attach to a certain layer for general layer
7420 * when bAttched not zero, we should check whether the cusor overlay with refresh area or not
7421 */
7422 refresh_is_cursor_overlay(par, canvas_buf, &is_overlay);
7423
7424 #ifdef CURSOR
7425 if (is_overlay && is_soft_cursor()) {
7426 hifb_cursor_hide(layer_id);
7427 }
7428 #endif
7429 switch (buf_mode) {
7430 case HIFB_LAYER_BUF_DOUBLE:
7431 ret = hifb_refresh_2buf(layer_id, canvas_buf);
7432 break;
7433 case HIFB_LAYER_BUF_ONE:
7434 ret = hifb_refresh_1buf(layer_id, canvas_buf);
7435 break;
7436 case HIFB_LAYER_BUF_NONE:
7437 ret = hifb_refresh_0buf(layer_id, canvas_buf);
7438 break;
7439 case HIFB_LAYER_BUF_DOUBLE_IMMEDIATE:
7440 ret = hifb_refresh_2buf_immediate_display(layer_id, canvas_buf);
7441 break;
7442 default:
7443 break;
7444 }
7445 #ifdef CURSOR
7446 if (is_overlay && is_soft_cursor()) {
7447 hifb_cursor_show(layer_id);
7448 }
7449 #endif
7450 return ret;
7451 }
7452
7453 #ifdef __HuaweiLite__
7454 static void hifb_free(struct hifb_info *info)
7455 {
7456 if (info != HI_NULL) {
7457 free(info);
7458 }
7459 }
7460
7461 static struct hifb_info *hifb_alloc(size_t size)
7462 {
7463 hi_s32 hifb_info_len = sizeof(struct hifb_info);
7464 struct hifb_info *info = HI_NULL;
7465
7466 hifb_info_len = ALIGN(hifb_info_len, sizeof(unsigned long));
7467
7468 info = (struct hifb_info *)malloc(hifb_info_len + size);
7469 if (info == HI_NULL) {
7470 return HI_NULL;
7471 }
7472 (hi_void)memset_s(info, (hifb_info_len + size), 0, (hifb_info_len + size));
7473
7474 info->par = (void *)((char *)info + hifb_info_len);
7475
7476 return info;
7477 }
7478 #endif
7479
7480 #ifdef __HuaweiLite__
7481 static hi_s32 alloc_new_canvas_buffer(struct fb_info *info, HIFB_LAYER_INFO_S *layer_info)
7482 {
7483 hifb_par *par = (hifb_par *)info->par;
7484 HIFB_SURFACE_S *canvas_surface = &par->canvas_sur;
7485 hi_u32 layer_size;
7486 hi_u32 pitch;
7487 hi_char *buf = HI_NULL;
7488 hi_char name[16]; /* 16 max length */
7489 struct fb_info *info_temp = HI_NULL;
7490 /*
7491 * Applying fb_info without private data is not for registration,
7492 * but for the convenience of passing to hifb_completely to empty the memory.
7493 */
7494 info_temp = hifb_alloc(0);
7495 if (info_temp == HI_NULL) {
7496 return HI_FAILURE;
7497 }
7498
7499 /* 16 bytes alignment */
7500 pitch = (((layer_info->u32CanvasWidth * hifb_get_bits_per_pixel(info)) >> 3) + 15) >> 4; /* 3 4 15 alg data */
7501 pitch = pitch << 4; /* 4 alg data */
7502
7503 layer_size = pitch * layer_info->u32CanvasHeight;
7504 /* alloc new buffer */
7505 if (snprintf_s(name, sizeof(name), 13, "hifb_canvas%01u", par->layer_id) < 0) { /* 13:for char length */
7506 hifb_error("%s:%d:snprintf_s failure\n", __FUNCTION__, __LINE__);
7507 hifb_free(info_temp);
7508 return HI_FAILURE;
7509 }
7510 /* 16 max length */
7511 canvas_surface->u64PhyAddr = hifb_buf_allocmem(name, 16, layer_size, hifb_get_layer_mmz_names(par->layer_id));
7512 if (canvas_surface->u64PhyAddr == 0) {
7513 hifb_error("alloc canvas buffer no mem");
7514 hifb_free(info_temp);
7515 return HI_FAILURE;
7516 }
7517
7518 buf = (hi_char *)hifb_buf_map(canvas_surface->u64PhyAddr, layer_size);
7519 if (buf == HI_NULL) {
7520 hifb_error("map canvas buffer failed!\n");
7521 hifb_buf_freemem(canvas_surface->u64PhyAddr);
7522 hifb_free(info_temp);
7523 return HI_FAILURE;
7524 }
7525 /* hifb_completely is faster than memset_s */
7526 info_temp->oinfo.fbmem = (hi_void *)(uintptr_t)canvas_surface->u64PhyAddr;
7527 (hi_void)memset_s(info_temp->oinfo.fbmem, layer_size, 0x0, layer_size);
7528
7529 hifb_buf_ummap(buf);
7530
7531 hifb_dbg_info("alloc new memory for canvas buffer success\n");
7532 canvas_surface->u32Width = layer_info->u32CanvasWidth;
7533 canvas_surface->u32Height = layer_info->u32CanvasHeight;
7534 canvas_surface->u32Pitch = pitch;
7535 par->canvas_sur.enFmt = info->vinfo.fmt;
7536 hifb_free(info_temp);
7537
7538 return HI_SUCCESS;
7539 }
7540 #else
7541 static hi_s32 alloc_new_canvas_buffer(struct fb_info *info, HIFB_LAYER_INFO_S *layer_info)
7542 {
7543 hifb_par *par = (hifb_par *)info->par;
7544 HIFB_SURFACE_S *canvas_surface = &par->canvas_sur;
7545 hi_u32 layer_size;
7546 hi_u32 pitch;
7547 hi_char *buf = HI_NULL;
7548 hi_char name[16]; /* 16 max length */
7549 struct fb_info *info_temp = HI_NULL;
7550 /*
7551 * Applying fb_info without private data is not for registration,
7552 * but for the convenience of passing to hifb_completely to empty the memory.
7553 */
7554 info_temp = framebuffer_alloc(0, HI_NULL);
7555 if (info_temp == HI_NULL) {
7556 return HI_FAILURE;
7557 }
7558
7559 /* 16 bytes alignment */
7560 pitch = (((layer_info->u32CanvasWidth * hifb_get_bits_per_pixel(info)) >> 3) + 15) >> 4; /* 3 4 15 alg data */
7561 pitch = pitch << 4; /* 4 alg data */
7562
7563 layer_size = pitch * layer_info->u32CanvasHeight;
7564 /* alloc new buffer */
7565 if (snprintf_s(name, sizeof(name), 13, "hifb_canvas%01u", par->layer_id) < 0) { /* 13:for char length */
7566 hifb_error("%s:%d:snprintf_s failure\n", __FUNCTION__, __LINE__);
7567 framebuffer_release(info_temp);
7568 return HI_FAILURE;
7569 }
7570 /* 16 max length */
7571 canvas_surface->u64PhyAddr = hifb_buf_allocmem(name, 16, layer_size, hifb_get_layer_mmz_names(par->layer_id));
7572 if (canvas_surface->u64PhyAddr == 0) {
7573 hifb_error("alloc canvas buffer no mem");
7574 framebuffer_release(info_temp);
7575 return HI_FAILURE;
7576 }
7577
7578 buf = (hi_char *)hifb_buf_map(canvas_surface->u64PhyAddr, layer_size);
7579 if (buf == HI_NULL) {
7580 hifb_error("map canvas buffer failed!\n");
7581 hifb_buf_freemem(canvas_surface->u64PhyAddr);
7582 framebuffer_release(info_temp);
7583 return HI_FAILURE;
7584 }
7585 /* hifb_completely is faster than memset_s */
7586 info_temp->fix.smem_start = canvas_surface->u64PhyAddr;
7587 info_temp->screen_base = buf;
7588 if (hifb_completely(info_temp, 0, layer_size) != HI_SUCCESS) {
7589 hifb_error("hifb_memset failed!\n");
7590 }
7591 hifb_buf_ummap(buf);
7592
7593 hifb_dbg_info("alloc new memory for canvas buffer success\n");
7594 canvas_surface->u32Width = layer_info->u32CanvasWidth;
7595 canvas_surface->u32Height = layer_info->u32CanvasHeight;
7596 canvas_surface->u32Pitch = pitch;
7597 canvas_surface->enFmt = hifb_getfmtbyargb(&info->var.red, &info->var.green, &info->var.blue,
7598 &info->var.transp, info->var.bits_per_pixel);
7599 framebuffer_release(info_temp);
7600 return HI_SUCCESS;
7601 }
7602 #endif
7603
7604 static hi_s32 hifb_alloccanbuf(struct fb_info *info, HIFB_LAYER_INFO_S *layer_info)
7605 {
7606 hifb_par *par = HI_NULL;
7607 HIFB_SURFACE_S *canvas_surface = HI_NULL;
7608
7609 if ((info == HI_NULL) || (layer_info == HI_NULL)) {
7610 return HI_FAILURE;
7611 }
7612 par = (hifb_par *)info->par;
7613 canvas_surface = &par->canvas_sur;
7614
7615 if (!(layer_info->u32Mask & HIFB_LAYERMASK_CANVASSIZE)) {
7616 return HI_SUCCESS;
7617 }
7618
7619 /* if with old canvas buffer */
7620 if (canvas_surface->u64PhyAddr != 0) {
7621 /* if old is the same with new , then return, else free the old buffer */
7622 if ((layer_info->u32CanvasWidth == canvas_surface->u32Width) &&
7623 (layer_info->u32CanvasHeight == canvas_surface->u32Height)) {
7624 hifb_dbg_info("mem size is the same , no need alloc new memory");
7625 return HI_SUCCESS;
7626 }
7627
7628 /* free new old buffer */
7629 hifb_dbg_info("free old canvas buffer\n");
7630 hifb_freeccanbuf(par);
7631 }
7632 if (layer_info->u32CanvasWidth > HIFB_4K_DEF_WIDTH || layer_info->u32CanvasHeight > HIFB_4K_DEF_HEIGHT) {
7633 hifb_dbg_info("unsupported too large w(%d) and h(%d)\n",
7634 layer_info->u32CanvasWidth, layer_info->u32CanvasHeight);
7635 return HI_FAILURE;
7636 }
7637
7638 /* new canvas buffer */
7639 if (alloc_new_canvas_buffer(info, layer_info) != HI_SUCCESS) {
7640 return HI_FAILURE;
7641 }
7642 return HI_SUCCESS;
7643 }
7644
7645 static hi_s32 onrefresh_check_param(hifb_par* par, HIFB_BUFFER_S *canvas_buf, HIFB_LAYER_BUF_E buf_mode)
7646 {
7647 hi_unused(par);
7648 if (canvas_buf->stCanvas.enFmt >= HIFB_FMT_BUTT) {
7649 return HI_FAILURE;
7650 }
7651
7652 /* Check the legality of physical address and size */
7653 if (hifb_check_phyaddr(canvas_buf) == HI_FAILURE) {
7654 hifb_error("mmz phy addr 0x%x invalid.\n", canvas_buf->stCanvas.enFmt);
7655 return HI_FAILURE;
7656 }
7657
7658 if (buf_mode == HIFB_LAYER_BUF_BUTT) {
7659 hifb_error("doesn't support FBIO_REFRESH operation when refresh mode is HI_FB_LAYER_BUF_BUTT!\n");
7660 return HI_FAILURE;
7661 }
7662
7663 if ((canvas_buf->UpdateRect.x >= (hi_s32)canvas_buf->stCanvas.u32Width) ||
7664 (canvas_buf->UpdateRect.y >= (hi_s32)canvas_buf->stCanvas.u32Height) ||
7665 (canvas_buf->UpdateRect.w <= 0) || (canvas_buf->UpdateRect.h <= 0)) {
7666 hifb_error("rect error: update rect:(%d,%d,%d,%d), canvas range:(%d,%d)\n",
7667 canvas_buf->UpdateRect.x, canvas_buf->UpdateRect.y,
7668 canvas_buf->UpdateRect.w, canvas_buf->UpdateRect.h,
7669 canvas_buf->stCanvas.u32Width, canvas_buf->stCanvas.u32Height);
7670 return HI_FAILURE;
7671 }
7672
7673 return HI_SUCCESS;
7674 }
7675
7676 static hi_s32 onrefresh_get_canvas_buf(hifb_par *par, hi_void __user *argp, HIFB_BUFFER_S *canvas_buf)
7677 {
7678 unsigned long lock_flag;
7679 HIFB_LAYER_BUF_E buf_mode;
7680 if (osal_copy_from_user(canvas_buf, argp, sizeof(HIFB_BUFFER_S))) {
7681 return -EFAULT;
7682 }
7683
7684 hifb_get_bufmode(par, &buf_mode);
7685 /*
7686 * check canvas format
7687 * check canvas phyaddr
7688 * check buf_mode
7689 * check canvas update rect legality
7690 */
7691 if (onrefresh_check_param(par, canvas_buf, buf_mode) != HI_SUCCESS) {
7692 return HI_FAILURE;
7693 }
7694
7695 /* update canvas update_rect */
7696 if (canvas_buf->UpdateRect.x + canvas_buf->UpdateRect.w > (hi_s32)canvas_buf->stCanvas.u32Width) {
7697 canvas_buf->UpdateRect.w = canvas_buf->stCanvas.u32Width - canvas_buf->UpdateRect.x;
7698 }
7699
7700 if (canvas_buf->UpdateRect.y + canvas_buf->UpdateRect.h > (hi_s32)canvas_buf->stCanvas.u32Height) {
7701 canvas_buf->UpdateRect.h = canvas_buf->stCanvas.u32Height - canvas_buf->UpdateRect.y;
7702 }
7703 if (buf_mode == HIFB_LAYER_BUF_NONE) {
7704 /* Check if the format of the canvas supported or not by HiFB */
7705 if ((par->layer_id >= HIFB_MAX_LAYER_NUM) || (canvas_buf->stCanvas.enFmt >= HIFB_FMT_BUTT)) {
7706 return HI_FAILURE;
7707 }
7708
7709 if (!g_drv_ops.capability[par->layer_id].bColFmt[canvas_buf->stCanvas.enFmt]) {
7710 hifb_error("Unsupported PIXEL FORMAT!\n");
7711 return -EINVAL;
7712 }
7713 /*
7714 * there's a limit from hardware that the start address of screen buf
7715 * should be 16byte aligned.
7716 */
7717 hifb_spin_lock_irqsave(&par->lock, lock_flag);
7718 if ((canvas_buf->stCanvas.u64PhyAddr & 0xf) || (canvas_buf->stCanvas.u32Pitch & 0xf)) { /* 0xf mask */
7719 hifb_error("addr 0x%llx or pitch: 0x%x is not 16 bytes align !\n",
7720 canvas_buf->stCanvas.u64PhyAddr, canvas_buf->stCanvas.u32Pitch);
7721 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
7722 return HI_FAILURE;
7723 }
7724 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
7725 } else {
7726 /*
7727 * Check if the format of the canvas and the format of
7728 * HiFB belong to the category that TDE can support
7729 */
7730 if ((!hifb_tde_is_support_fmt(canvas_buf->stCanvas.enFmt)) ||
7731 (!hifb_tde_is_support_fmt(par->color_format))) {
7732 hifb_error("Unsupported PIXEL FORMAT!CanvasFmt:%d,par->enColFmt:%d\n", canvas_buf->stCanvas.enFmt,
7733 par->color_format);
7734 return -EINVAL;
7735 }
7736 }
7737 return HI_SUCCESS;
7738 }
7739
7740 static hi_s32 hifb_onrefresh(hifb_par* par, hi_void __user *argp)
7741 {
7742 hi_s32 ret;
7743 HIFB_BUFFER_S canvas_buf;
7744 HIFB_LAYER_BUF_E buf_mode;
7745 hifb_display_info *diplay_info = &par->display_info;
7746
7747 if (is_cursor_layer(par->layer_id) && is_soft_cursor()) {
7748 hifb_warning("you shouldn't refresh cursor layer!\n");
7749 return HI_SUCCESS;
7750 }
7751
7752 if (argp == HI_NULL) {
7753 hifb_error("HI_NULL arg!\n");
7754 return -EINVAL;
7755 }
7756
7757 /* get canvas buffer and check legality */
7758 ret = onrefresh_get_canvas_buf(par, argp, &canvas_buf);
7759 if (ret != HI_SUCCESS) {
7760 return HI_FAILURE;
7761 }
7762
7763 hifb_get_bufmode(par, &buf_mode);
7764 if ((buf_mode == HIFB_LAYER_BUF_NONE) && (diplay_info->mirror_mode != HIFB_MIRROR_NONE)) {
7765 hifb_error("Can't do mirror when the layer buf is none!\n");
7766 return -EINVAL;
7767 }
7768
7769 if ((buf_mode == HIFB_LAYER_BUF_NONE) && (diplay_info->rotate_mode != HIFB_ROTATE_NONE)) {
7770 hifb_error("Can't do rotate when the layer buf is none!\n");
7771 return -EINVAL;
7772 }
7773
7774 ret = hifb_refresh(par->layer_id, &canvas_buf, buf_mode);
7775
7776 return ret;
7777 }
7778
7779 static hi_s32 hifb_set_mirrormode(hifb_par *par, HIFB_MIRROR_MODE_E mirror_mode)
7780 {
7781 hifb_display_info *display_info = &par->display_info;
7782 volatile hifb_compress_info *compress_info = &par->compress_info;
7783 HIFB_LAYER_BUF_E buf_mode;
7784
7785 hifb_get_bufmode(par, &buf_mode);
7786
7787 if ((buf_mode == HIFB_LAYER_BUF_BUTT) || (buf_mode == HIFB_LAYER_BUF_NONE)) {
7788 hifb_error("doesn't support FBIOPUT_MIRROR_MODE operation when in standard mode or HIFB_LAYER_BUF_NONE!\n");
7789 return HI_FAILURE;
7790 }
7791
7792 if (mirror_mode >= HIFB_MIRROR_BUTT) {
7793 hifb_error("The input mirror mode is wrong!\n");
7794 return -EINVAL;
7795 }
7796
7797 if ((mirror_mode != HIFB_MIRROR_NONE) && (display_info->rotate_mode != HIFB_ROTATE_NONE)) {
7798 hifb_error("Can't do mirror when rotate!\n");
7799 return -EINVAL;
7800 }
7801
7802 if ((mirror_mode != HIFB_MIRROR_NONE) && (compress_info->compress_open == HI_TRUE)) {
7803 hifb_error("Can't do mirror when compression is on!\n");
7804 return -EINVAL;
7805 }
7806
7807 display_info->mirror_mode = mirror_mode;
7808 return HI_SUCCESS;
7809 }
7810
7811 static hi_s32 rotate_vb_init(struct fb_info *info)
7812 {
7813 hifb_par *par = (hifb_par *)info->par;
7814 hifb_display_info *display_info = &par->display_info;
7815 hi_void *mmz_name = HI_NULL;
7816 hi_u32 size;
7817 hi_char name[HIFB_ROTBUF_NAME_LEN];
7818 hi_u64 *vir_rotate_vb = HI_NULL;
7819 hi_mpp_chn chn;
7820 chn.mod_id = HI_ID_FB;
7821 chn.dev_id = par->layer_id;
7822 chn.chn_id = 0;
7823
7824 size = hifb_get_line_length(info) * (((display_info->display_height + 15) / 16) * 16); /* 15 16 align */
7825
7826 if (func_entry(sys_export_func, HI_ID_SYS)->pfn_get_mmz_name(&chn, &mmz_name)) {
7827 hifb_error("get mmz name fail!\n");
7828 return ENOMEM;
7829 }
7830
7831 if (snprintf_s(name, sizeof(name), 13, "hifb%01d_rotate", par->layer_id) < 0) { /* 13:for char length */
7832 hifb_error("%s:%d:snprintf_s failure\n", __FUNCTION__, __LINE__);
7833 return HI_FAILURE;
7834 }
7835 par->rotate_vb = cmpi_mmz_malloc(mmz_name, name, size);
7836 if (par->rotate_vb == 0) {
7837 hifb_error("get buffer fail,size %d !\n", size);
7838 return ENOMEM;
7839 }
7840
7841 vir_rotate_vb = cmpi_remap_nocache(par->rotate_vb, size);
7842 if (vir_rotate_vb == HI_NULL) {
7843 hifb_error("get buffer fail,size %d !\n", size);
7844 cmpi_mmz_free(par->rotate_vb, HI_NULL);
7845 return ENOMEM;
7846 }
7847
7848 (hi_void)memset_s(vir_rotate_vb, size, 0x0, size);
7849
7850 cmpi_unmap(vir_rotate_vb);
7851 return HI_SUCCESS;
7852 }
7853
7854 static hi_s32 hifb_set_rotatemode(struct fb_info *info, HIFB_ROTATE_MODE_E rotate_mode)
7855 {
7856 hifb_par *par = (hifb_par *)info->par;
7857 hifb_display_info *display_info = &par->display_info;
7858 HIFB_LAYER_BUF_E buf_mode;
7859 volatile hifb_compress_info *compress_info = &par->compress_info;
7860
7861 if (!hifb_drv_support_rotate()) {
7862 hifb_error("can't support rotate\n!\n");
7863 return -EINVAL;
7864 }
7865 hifb_get_bufmode(par, &buf_mode);
7866
7867 if ((buf_mode == HIFB_LAYER_BUF_BUTT) || (buf_mode == HIFB_LAYER_BUF_NONE)) {
7868 hifb_error("doesn't support FBIOPUT_ROTATE_MODE operation when in standard mode or HIFB_LAYER_BUF_NONE!\n");
7869 return HI_FAILURE;
7870 }
7871
7872 if ((par->color_format != HIFB_FMT_ARGB4444) && (par->color_format != HIFB_FMT_ARGB1555) &&
7873 (par->color_format != HIFB_FMT_ARGB8888)) {
7874 hifb_error("The rotate mode only support HIFB_FMT_ARGB4444 and HIFB_FMT_ARGB1555 which is %d\n!\n",
7875 par->color_format);
7876 return -EINVAL;
7877 }
7878
7879 if (rotate_mode >= HIFB_ROTATE_BUTT) {
7880 hifb_error("The input rotate mode is wrong!\n");
7881 return -EINVAL;
7882 }
7883
7884 if (compress_info->compress_open && (rotate_mode != HIFB_ROTATE_NONE)) {
7885 hifb_error("Can't rotate when in compress mode!\n");
7886 return -EINVAL;
7887 }
7888
7889 if ((rotate_mode != HIFB_ROTATE_NONE) && (display_info->mirror_mode != HIFB_MIRROR_NONE)) {
7890 hifb_error("Can't do rotate when mirror!\n");
7891 return -EINVAL;
7892 }
7893
7894 if (((rotate_mode == HIFB_ROTATE_90) || (rotate_mode == HIFB_ROTATE_270)) && (par->rotate_vb == 0)) {
7895 if (rotate_vb_init(info) != HI_SUCCESS) {
7896 return ENOMEM;
7897 }
7898 }
7899
7900 display_info->rotate_mode = rotate_mode;
7901
7902 return HI_SUCCESS;
7903 }
7904
7905 /*
7906 * Name : hifb_check_layerinfo
7907 * Desc : check layer information: buf refresh mode,position,canvas width
7908 and height, display width and height, screen width and height.
7909 */
7910 static hi_s32 hifb_check_layerinfo(HIFB_LAYER_INFO_S *layer_info)
7911 {
7912 if (layer_info == HI_NULL) {
7913 return HI_FAILURE;
7914 }
7915
7916 if (layer_info->u32Mask & HIFB_LAYERMASK_BUFMODE) {
7917 if (layer_info->BufMode > HIFB_LAYER_BUF_DOUBLE_IMMEDIATE) {
7918 hifb_error("BufMode(%d) is error, should between %d and %d\n", layer_info->BufMode, HIFB_LAYER_BUF_DOUBLE,
7919 HIFB_LAYER_BUF_DOUBLE_IMMEDIATE);
7920 return HI_FAILURE;
7921 }
7922 }
7923
7924 /* Detection of anti-flicker mode */
7925 if (layer_info->u32Mask & HIFB_LAYERMASK_ANTIFLICKER_MODE) {
7926 if (layer_info->eAntiflickerLevel > HIFB_LAYER_ANTIFLICKER_AUTO) {
7927 hifb_error("eAntiflickerLevel(%d) is error, should between %d and %d\n", layer_info->eAntiflickerLevel,
7928 HIFB_LAYER_ANTIFLICKER_NONE, HIFB_LAYER_ANTIFLICKER_AUTO);
7929 return HI_FAILURE;
7930 }
7931 }
7932
7933 /* check the width and height */
7934 if (layer_info->u32Mask & HIFB_LAYERMASK_DISPSIZE) {
7935 if (layer_info->u32DisplayWidth % 2 || layer_info->u32DisplayHeight % 2) { /* 2 align to */
7936 hifb_error("Disaplay W(%u) and H(%u) should align to 2!\n", layer_info->u32DisplayWidth,
7937 layer_info->u32DisplayHeight);
7938 return HI_FAILURE;
7939 }
7940 }
7941
7942 if (layer_info->u32Mask & HIFB_LAYERMASK_SCREENSIZE) {
7943 if (layer_info->u32ScreenWidth % 2 || layer_info->u32ScreenHeight % 2) { /* 2 align to */
7944 hifb_error("Screenaplay W(%u) and H(%u) should align to 2!\n", layer_info->u32ScreenWidth,
7945 layer_info->u32ScreenHeight);
7946 return HI_FAILURE;
7947 }
7948 }
7949
7950 /* check pre-multiplier value. */
7951 if (layer_info->u32Mask & HIFB_LAYERMASK_BMUL) {
7952 if ((layer_info->bPreMul != HI_TRUE) && (layer_info->bPreMul != HI_FALSE)) {
7953 hifb_error("pstLayerInfo->bPreMul should be HI_TRUE or HI_FALSE but it is %d\n", layer_info->bPreMul);
7954 return HI_FAILURE;
7955 }
7956 }
7957 return HI_SUCCESS;
7958 }
7959
7960 static hi_s32 check_display_size(struct fb_info *info, hifb_par* par,
7961 HIFB_LAYER_INFO_S *layer_info, hi_bool is_interlace)
7962 {
7963 hi_u32 pitch;
7964 if (!g_drv_ops.capability[par->layer_id].bVoScale) {
7965 /*
7966 * If the chip does not support scaling,
7967 * if both the display size and the screen size are set , both must be equal.
7968 */
7969 if ((layer_info->u32Mask & HIFB_LAYERMASK_DISPSIZE) &&
7970 (layer_info->u32Mask & HIFB_LAYERMASK_SCREENSIZE)) {
7971 if ((layer_info->u32DisplayWidth != layer_info->u32ScreenWidth) ||
7972 (layer_info->u32DisplayHeight != layer_info->u32ScreenHeight)) {
7973 hifb_error("DisplaySize(%u, %u) can't be different with ScreenSize(%u, %u) \
7974 when layer %u don't scale\n", layer_info->u32DisplayWidth, layer_info->u32DisplayHeight,
7975 layer_info->u32ScreenWidth, layer_info->u32ScreenHeight, par->layer_id);
7976 return HI_FAILURE;
7977 }
7978 }
7979 }
7980
7981 /* Modify the display size, the memory size has changed, limited by the size of the memory */
7982 if (layer_info->u32Mask & HIFB_LAYERMASK_DISPSIZE) {
7983 pitch = (layer_info->u32DisplayWidth * hifb_get_bits_per_pixel(info)) >> 3; /* 3 for 8 bit */
7984 pitch = (pitch + 0xf) & 0xfffffff0; /* 0xf 0xfffffff0 mask */
7985 if (hifb_check_mem_enough(info, pitch, layer_info->u32DisplayHeight) == HI_FAILURE) {
7986 hifb_error("memory is not enough!\n");
7987 return HI_FAILURE;
7988 }
7989
7990 if (layer_info->u32DisplayWidth == 0 || layer_info->u32DisplayHeight == 0) {
7991 hifb_error("display width/height shouldn't be 0!\n");
7992 return HI_FAILURE;
7993 }
7994 /*
7995 * For interlaced output, the height of the layer must be even.
7996 * Progressive output without this limit.
7997 */
7998 if (is_interlace && ((layer_info->u32DisplayHeight % 2) != 0)) { /* 2 for align */
7999 hifb_error("display_height(%d) of u32LayerId %d should be even when vodev output is interlace\n",
8000 layer_info->u32DisplayHeight, par->layer_id);
8001 return HI_FAILURE;
8002 }
8003 }
8004 return HI_SUCCESS;
8005 }
8006
8007 static hi_s32 onputlayerinfo_check_size(struct fb_info *info, hifb_par* par,
8008 HIFB_LAYER_INFO_S *layer_info, hi_bool is_interlace)
8009 {
8010 /* Check the display size */
8011 if (check_display_size(info, par, layer_info, is_interlace) != HI_SUCCESS) {
8012 return HI_FAILURE;
8013 }
8014
8015 /* Check the canvas size */
8016 if (layer_info->u32Mask & HIFB_LAYERMASK_CANVASSIZE) {
8017 if ((layer_info->u32CanvasWidth == 0) || (layer_info->u32CanvasHeight == 0)) {
8018 hifb_error("canvas width/height shouldn't be 0\n");
8019 return HI_FAILURE;
8020 }
8021 }
8022
8023 /* Check the screen size */
8024 if (layer_info->u32Mask & HIFB_LAYERMASK_SCREENSIZE) {
8025 if ((layer_info->u32CanvasWidth == 0) || (layer_info->u32CanvasHeight == 0)) {
8026 hifb_error("screen width/height shouldn't be 0\n");
8027 return HI_FAILURE;
8028 }
8029 /*
8030 * For interlaced output, the height of the layer must be even.
8031 * Progressive output without this limit.
8032 */
8033 if (is_interlace && ((layer_info->u32ScreenHeight % 2) != 0)) { /* 2 for align */
8034 hifb_error("screen_height(%d) of u32LayerId %d should be even when vodev output is interlace\n",
8035 layer_info->u32ScreenHeight, par->layer_id);
8036 return HI_FAILURE;
8037 }
8038 }
8039 return HI_SUCCESS;
8040 }
8041
8042 static hi_s32 onputlayerinfo_check_buf_mode(struct fb_info *info, hifb_par* par,
8043 HIFB_LAYER_INFO_S *layer_info, hi_bool is_interlace)
8044 {
8045 volatile hifb_compress_info *compress_info = &par->compress_info;
8046 hi_u32 layer_size;
8047 /* Modify the display buf mode, the memory size has changed, limited by the size of the memory */
8048 if (layer_info->u32Mask & HIFB_LAYERMASK_BUFMODE) {
8049 if (layer_info->BufMode == HIFB_LAYER_BUF_ONE) {
8050 layer_size = hifb_get_line_length(info) * hifb_get_yres(info);
8051 } else if ((layer_info->BufMode == HIFB_LAYER_BUF_DOUBLE) ||
8052 (layer_info->BufMode == HIFB_LAYER_BUF_DOUBLE_IMMEDIATE)) {
8053 layer_size = 2 * hifb_get_line_length(info) * hifb_get_yres(info); /* 2 length data */
8054 } else {
8055 layer_size = 0;
8056 }
8057
8058 if (layer_size > hifb_get_smem_len(info)) {
8059 /*
8060 * layer real memory size:%d KBytes, expected:%d KBtyes
8061 * real:hifb_get_smem_len(info)/1024, expectde:layer_size/1024
8062 */
8063 hifb_error("No enough mem!\n");
8064 return HI_FAILURE;
8065 }
8066
8067 /* If compression is enabled, only 2buf mode can be set. */
8068 if (compress_info->compress_open) {
8069 if (layer_info->BufMode != HIFB_LAYER_BUF_DOUBLE &&
8070 layer_info->BufMode != HIFB_LAYER_BUF_DOUBLE_IMMEDIATE) {
8071 hifb_error("only HIFB_LAYER_BUF_DOUBLE/HIFB_LAYER_BUF_DOUBLE_IMMEDIATE support compress!\n");
8072 return HI_FAILURE;
8073 }
8074 }
8075 }
8076
8077 /* if x>width or y>height ,how to deal with: see nothing in screen or return failure. */
8078 if (layer_info->u32Mask & HIFB_LAYERMASK_POS) {
8079 if ((layer_info->s32XPos < 0) || (layer_info->s32YPos < 0)) {
8080 hifb_error("It's not supported to set start pos of layer to negative!\n");
8081 return HI_FAILURE;
8082 }
8083 /*
8084 * For interlaced output, the start of the layer must be even.
8085 * Progressive output without this limit.
8086 */
8087 if (is_interlace && (layer_info->s32YPos % 2 != 0)) { /* 2 for align */
8088 hifb_error("s32YPos should be even for interlace vodev!\n");
8089 return HI_FAILURE;
8090 }
8091 }
8092 return HI_SUCCESS;
8093 }
8094
8095 static hi_s32 onputlayerinfo_check_premult(hifb_par* par, HIFB_LAYER_INFO_S *layer_info)
8096 {
8097 /* Limit the pre-multiplication and color value. */
8098 if ((layer_info->u32Mask & HIFB_LAYERMASK_BMUL) && par->ckey.key_enable) {
8099 hifb_error("Colorkey and premul couldn't take effect at same time!\n");
8100 return HI_FAILURE;
8101 }
8102
8103 /* return HI_FAILURE, not allow to set pre-mult mode when the color format is ARGB1555 or ARGB4444 */
8104 if (layer_info->u32Mask & HIFB_LAYERMASK_BMUL) {
8105 if ((layer_info->bPreMul == HI_TRUE) &&
8106 (par->color_format == HIFB_FMT_ARGB4444 || par->color_format == HIFB_FMT_ARGB1555)) {
8107 hifb_error("not allow to set pre-mult mode when the color format is ARGB1555 or ARGB4444\n");
8108 return HI_FAILURE;
8109 }
8110 }
8111 /* not allow to set pre-mult mode when the GlobalAlpha is 1 */
8112 if (layer_info->u32Mask & HIFB_LAYERMASK_BMUL) {
8113 if ((layer_info->bPreMul == HI_TRUE) &&
8114 (par->alpha.bAlphaChannel == HI_TRUE && par->alpha.u8GlobalAlpha == 1)) {
8115 hifb_error("not allow to set pre-mult mode when the GlobalAlpha is 1\n");
8116 return HI_FAILURE;
8117 }
8118 }
8119 return HI_SUCCESS;
8120 }
8121
8122 static hi_void onputlayerinfo_set_with_mask(hifb_par* par, HIFB_LAYER_INFO_S *layer_info)
8123 {
8124 hifb_display_info *display_info = &par->display_info;
8125 HIFB_POINT_S pos;
8126
8127 if (layer_info->u32Mask & HIFB_LAYERMASK_BMUL) {
8128 display_info->premul = layer_info->bPreMul;
8129 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_BMUL;
8130 }
8131
8132 if (layer_info->u32Mask & HIFB_LAYERMASK_BUFMODE) {
8133 hifb_set_bufmode(par->layer_id, layer_info->BufMode);
8134 }
8135
8136 if (layer_info->u32Mask & HIFB_LAYERMASK_POS) {
8137 pos.s32XPos = layer_info->s32XPos;
8138 pos.s32YPos = layer_info->s32YPos;
8139 hifb_set_layerpos(par, &pos);
8140 }
8141
8142 if (layer_info->u32Mask & HIFB_LAYERMASK_ANTIFLICKER_MODE) {
8143 hifb_set_antiflickerlevel(par->layer_id, layer_info->eAntiflickerLevel);
8144 }
8145 return;
8146 }
8147
8148 static hi_s32 onputlayerinfo_process(hifb_par* par, HIFB_LAYER_INFO_S *layer_info)
8149 {
8150 hifb_refresh_info *refresh_info = &par->refresh_info;
8151 hi_s32 ret = HI_SUCCESS;
8152 unsigned long lock_flag;
8153
8154 hifb_spin_lock_irqsave(&par->lock, lock_flag);
8155 par->modifying = HI_TRUE;
8156
8157 /* deal with layer_info->mask */
8158 onputlayerinfo_set_with_mask(par, layer_info);
8159
8160 /* set screen size in the scaling mode */
8161 if (g_drv_ops.capability[par->layer_id].bVoScale) {
8162 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
8163 /*
8164 * The following two functions have a sleep operation inside,
8165 * you must unlock before calling, and lock the global amount inside the function.
8166 */
8167 if (layer_info->u32Mask & HIFB_LAYERMASK_SCREENSIZE) {
8168 ret = hifb_disp_setscreensize(par->layer_id, layer_info->u32ScreenWidth,
8169 layer_info->u32ScreenHeight);
8170 if (ret != HI_SUCCESS) {
8171 return ret;
8172 }
8173 }
8174 if (layer_info->u32Mask & HIFB_LAYERMASK_DISPSIZE) {
8175 ret = hifb_disp_setdispsize(par->layer_id, layer_info->u32DisplayWidth,
8176 layer_info->u32DisplayHeight);
8177 if (ret != HI_SUCCESS) {
8178 return ret;
8179 }
8180 }
8181 hifb_spin_lock_irqsave(&par->lock, lock_flag);
8182 } else {
8183 /* no scaling mode, no buffer mode, screen size and display size can be set */
8184 if (refresh_info->buf_mode != HIFB_LAYER_BUF_NONE) {
8185 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
8186 if (layer_info->u32Mask & HIFB_LAYERMASK_SCREENSIZE) {
8187 ret = hifb_disp_setscreensize(par->layer_id, layer_info->u32ScreenWidth,
8188 layer_info->u32ScreenHeight);
8189 }
8190 if (ret != HI_SUCCESS) {
8191 return ret;
8192 }
8193 if (layer_info->u32Mask & HIFB_LAYERMASK_DISPSIZE) {
8194 ret = hifb_disp_setdispsize(par->layer_id, layer_info->u32DisplayWidth,
8195 layer_info->u32DisplayHeight);
8196 }
8197 if (ret != HI_SUCCESS) {
8198 return ret;
8199 }
8200 hifb_spin_lock_irqsave(&par->lock, lock_flag);
8201 }
8202 }
8203
8204 par->modifying = HI_FALSE;
8205 hifb_spin_unlock_irqrestore(&par->lock, lock_flag);
8206 return HI_SUCCESS;
8207 }
8208
8209 static hi_s32 hifb_onputlayerinfo(struct fb_info *info, hifb_par* par, const hi_void __user *argp)
8210 {
8211 hi_s32 ret = HI_SUCCESS;
8212 HIFB_LAYER_INFO_S layer_info;
8213 hi_bool is_interlace = HI_FALSE;
8214
8215 if (is_cursor_layer(par->layer_id) && is_soft_cursor()) {
8216 hifb_warning("you shouldn't put cursor layer info!");
8217 return HI_SUCCESS;
8218 }
8219
8220 if (argp == HI_NULL) {
8221 hifb_error("HI_NULL arg!\n");
8222 return -EINVAL;
8223 }
8224
8225 if (osal_copy_from_user(&layer_info, argp, sizeof(HIFB_LAYER_INFO_S))) {
8226 return -EFAULT;
8227 }
8228
8229 ret = hifb_check_layerinfo(&layer_info);
8230 if (ret != HI_SUCCESS) {
8231 return HI_FAILURE;
8232 }
8233
8234 /*
8235 * Check the display size
8236 * Check the canvas size
8237 * Check the screen size
8238 */
8239 is_interlace = hifb_is_interlace(par);
8240 if (onputlayerinfo_check_size(info, par, &layer_info, is_interlace) != HI_SUCCESS) {
8241 return HI_FAILURE;
8242 }
8243
8244 /*
8245 * Check when modify buf mode
8246 * Check when modify pos
8247 */
8248 if (onputlayerinfo_check_buf_mode(info, par, &layer_info, is_interlace) != HI_SUCCESS) {
8249 return HI_FAILURE;
8250 }
8251
8252 /* Check pre-mult */
8253 if (onputlayerinfo_check_premult(par, &layer_info) != HI_SUCCESS) {
8254 return HI_FAILURE;
8255 }
8256 /*
8257 * avoid modifying register in vo isr before all params has been recorded!
8258 * In vo irq,flag bModifying will be checked.
8259 */
8260 ret = onputlayerinfo_process(par, &layer_info);
8261 if (ret != HI_SUCCESS) {
8262 return ret;
8263 }
8264
8265 ret = hifb_alloccanbuf(info, &layer_info);
8266 if (ret != HI_SUCCESS) {
8267 /*
8268 * There is no error returned here, because the user can also
8269 * specify this memory; in addition, even if the allocation is successful,
8270 * The user also needs to call FBIOGET_CANVAS_BUF to get it to operate.
8271 */
8272 hifb_warning("alloc canvas buffer failed\n");
8273 }
8274
8275 hifb_refresh_again(par->layer_id);
8276
8277 return ret;
8278 }
8279
8280 static hi_s32 hifb_dosetcolreg(unsigned regno, unsigned red, unsigned green, unsigned blue, unsigned transp,
8281 struct fb_info *info, hi_bool update_finished_reg)
8282 {
8283 hifb_par *par = (hifb_par *)info->par;
8284
8285 hi_u32 argb = ((transp & 0xff) << 24) | ((red & 0xff) << 16) | /* 16 24 alg data 0xff mask */
8286 ((green & 0xff) << 8) | (blue & 0xff); /* 8 alg data 0xff mask */
8287
8288 if (regno > 255) { /* 255 is larger than */
8289 hifb_warning("regno: %d, larger than 255!\n", regno);
8290 return HI_FAILURE;
8291 }
8292
8293 g_drv_ops.hifb_drv_set_color_reg(par->layer_id, regno, argb, update_finished_reg);
8294 return HI_SUCCESS;
8295 }
8296
8297 #ifndef __HuaweiLite__
8298 static hi_s32 hifb_setcolreg(unsigned regno, unsigned red, unsigned green, unsigned blue, unsigned transp,
8299 struct fb_info *info)
8300 {
8301 return hifb_dosetcolreg(regno, red, green, blue, transp, info, HI_TRUE);
8302 }
8303 #endif
8304
8305 #ifdef __HuaweiLite__
8306 static hi_s32 hifb_setcmap(struct fb_cmap_s *cmap, struct fb_info *info)
8307 #else
8308 static hi_s32 hifb_setcmap(struct fb_cmap *cmap, struct fb_info *info)
8309 #endif
8310 {
8311 hi_u32 i;
8312 hi_s32 start;
8313 unsigned short *red = HI_NULL;
8314 unsigned short *green = HI_NULL;
8315 unsigned short *blue = HI_NULL;
8316 unsigned short *transp = HI_NULL;
8317 unsigned short hred, hgreen, hblue;
8318 unsigned short htransp = 0xffff; /* 0xffff The initial value */
8319 hifb_par *par = (hifb_par *)info->par;
8320
8321 if (is_cursor_layer(par->layer_id)) {
8322 return -EINVAL;
8323 }
8324
8325 if (!g_drv_ops.capability[par->layer_id].bCmap) {
8326 /* AE6D03519, delete this color map warning! */
8327 return -EPERM;
8328 }
8329
8330 red = (unsigned short *)cmap->red;
8331 green = (unsigned short *)cmap->green;
8332 blue = (unsigned short *)cmap->blue;
8333 transp = (unsigned short *)cmap->transp;
8334
8335 #ifdef __HuaweiLite__
8336 start = cmap->first;
8337 #else
8338 start = cmap->start;
8339 #endif
8340 for (i = 0; i < cmap->len; i++) {
8341 hred = *red++;
8342 hgreen = *green++;
8343 hblue = *blue++;
8344 if (transp != HI_NULL) {
8345 htransp = *transp++;
8346 }
8347
8348 if (i < (hi_u32)(cmap->len - 1)) {
8349 if (hifb_dosetcolreg(start++, hred, hgreen, hblue, htransp, info, HI_FALSE)) {
8350 break;
8351 }
8352 } else {
8353 if (hifb_dosetcolreg(start++, hred, hgreen, hblue, htransp, info, HI_TRUE)) {
8354 break;
8355 }
8356 }
8357 }
8358 return 0;
8359 }
8360
8361 #ifdef CONFIG_FB_CFB_IMAGEBLIT
8362 static hi_void hifb_imageblit(struct fb_info *p, const struct fb_image *image)
8363 {
8364 cfb_imageblit(p, image);
8365 }
8366 #endif
8367
8368 #ifdef __HuaweiLite__
8369 #define HIFB_MAX_LAYER_NUM 1
8370 static struct hifb_info *g_hifb[HIFB_MAX_LAYER_NUM] = {HI_NULL};
8371
8372 static hi_s32 copy_rgb_to_cmap(hi_u8 *cmap_start, const struct fb_cmap_s *src, struct fb_cmap_s *dst)
8373 {
8374 hi_s32 offset;
8375 hi_s32 copy_size;
8376 int cmap_size;
8377 hi_s32 ret;
8378
8379 cmap_size = dst->len * sizeof(u_short);
8380 offset = src->first - dst->first;
8381 copy_size = (src->len > (dst->len - offset)) ? (dst->len - offset) : src->len;
8382
8383 if (copy_size <= 0) {
8384 return -EINVAL;
8385 }
8386 copy_size *= sizeof(u_short);
8387
8388 ret = memcpy_s(cmap_start + offset, copy_size, src->red, copy_size);
8389 hifb_unequal_eok_return(ret);
8390 cmap_start += cmap_size;
8391 ret = memcpy_s(cmap_start + offset, copy_size, src->green, copy_size);
8392 hifb_unequal_eok_return(ret);
8393 cmap_start += cmap_size;
8394 ret = memcpy_s(cmap_start + offset, copy_size, src->blue, copy_size);
8395 hifb_unequal_eok_return(ret);
8396 cmap_start += cmap_size;
8397 if (src->transp && dst->transp) {
8398 ret = memcpy_s(cmap_start + offset, copy_size, src->transp, copy_size);
8399 hifb_unequal_eok_return(ret);
8400 }
8401 return HI_SUCCESS;
8402 }
8403
8404 static int fb_copy_cmap(const struct fb_cmap_s *src, struct fb_cmap_s *dst)
8405 {
8406 int offset;
8407 int copy_size;
8408 hi_u8 *cmap_start = HI_NULL;
8409 int cmap_size;
8410 int ret;
8411
8412 if ((src == HI_NULL) || (dst == HI_NULL)) {
8413 return -EINVAL;
8414 }
8415
8416 cmap_start = dst->red;
8417 if (cmap_start == HI_NULL)
8418 return -EINVAL;
8419
8420 cmap_size = dst->len * sizeof(u_short);
8421
8422 if (src->first > dst->first) {
8423 ret = copy_rgb_to_cmap(cmap_start, src, dst);
8424 if (ret != HI_SUCCESS) {
8425 return HI_FAILURE;
8426 }
8427 } else {
8428 offset = dst->first - src->first;
8429 copy_size = ((src->len - offset) > dst->len) ? dst->len : src->len - offset;
8430
8431 if (copy_size <= 0) {
8432 return -EINVAL;
8433 }
8434 copy_size *= sizeof(u_short);
8435
8436 ret = memcpy_s(cmap_start, copy_size, src->red + offset, copy_size);
8437 hifb_unequal_eok_return(ret);
8438 cmap_start += cmap_size;
8439 ret = memcpy_s(cmap_start, copy_size, src->green + offset, copy_size);
8440 hifb_unequal_eok_return(ret);
8441 cmap_start += cmap_size;
8442 ret = memcpy_s(cmap_start, copy_size, src->blue + offset, copy_size);
8443 hifb_unequal_eok_return(ret);
8444 cmap_start += cmap_size;
8445 if (src->transp && dst->transp) {
8446 ret = memcpy_s(cmap_start, copy_size, src->transp + offset, copy_size);
8447 hifb_unequal_eok_return(ret);
8448 }
8449 }
8450 return 0;
8451 }
8452
8453 int fb_set_cmap(struct fb_cmap_s *cmap, struct hifb_info *info)
8454 {
8455 int ret;
8456
8457 if (cmap == HI_NULL) {
8458 return -EINVAL;
8459 }
8460
8461 if (info == HI_NULL)
8462 return -EINVAL;
8463
8464 ret = hifb_setcmap(cmap, info);
8465 if (ret == 0)
8466 fb_copy_cmap(cmap, &info->cmap);
8467
8468 return ret;
8469 }
8470
8471 FAR struct fb_vtable_s *up_fbgetvplane(int display, int vplane)
8472 {
8473 (void)vplane;
8474 if (g_hifb[display] == HI_NULL)
8475 return HI_NULL;
8476
8477 return &g_hifb[display]->vtable;
8478 }
8479
8480 static int hifb_getvideoinfo(FAR struct fb_vtable_s *vtable, FAR struct fb_videoinfo_s *vinfo)
8481 {
8482 struct hifb_info *info = (struct hifb_info *)vtable;
8483 hi_s32 ret;
8484
8485 if ((info == HI_NULL) || (vinfo == HI_NULL))
8486 return -EINVAL;
8487
8488 ret = memcpy_s(vinfo, sizeof(struct fb_videoinfo_s), &info->vinfo, sizeof(struct fb_videoinfo_s));
8489 hifb_unequal_eok_return(ret);
8490 return 0;
8491 }
8492
8493 static int hifb_getplaneinfo(FAR struct fb_vtable_s *vtable, int planeno, FAR struct fb_planeinfo_s *pinfo)
8494 {
8495 struct hifb_info *info = (struct hifb_info *)vtable;
8496 hi_unused(planeno);
8497 if ((info == HI_NULL) || (pinfo == HI_NULL))
8498 return -EINVAL;
8499
8500 pinfo->fbmem = info->oinfo.fbmem;
8501 pinfo->fblen = info->oinfo.fblen;
8502 pinfo->display = info->oinfo.overlay;
8503 pinfo->bpp = info->oinfo.bpp;
8504 pinfo->stride = info->oinfo.stride;
8505
8506 return 0;
8507 }
8508
8509 static int hifb_getoverlayinfo(FAR struct fb_vtable_s *vtable, int overlayno, FAR struct fb_overlayinfo_s *oinfo)
8510 {
8511 struct hifb_info *info = (struct hifb_info *)vtable;
8512 hi_s32 ret;
8513 hi_unused(overlayno);
8514 if ((info == HI_NULL) || (oinfo == HI_NULL))
8515 return -EINVAL;
8516
8517 ret = memcpy_s(oinfo, sizeof(struct fb_overlayinfo_s), &info->oinfo, sizeof(struct fb_overlayinfo_s));
8518 hifb_unequal_eok_return(ret);
8519
8520 return 0;
8521 }
8522
8523 static hi_s32 fbinitialize_alloc_mem(struct hifb_info *info)
8524 {
8525 hi_char name[16] = {'\0'}; /* 16 is The length of the array */
8526 hifb_par *par = HI_NULL;
8527 par = (hifb_par *)(info->par);
8528
8529 if (g_layer[par->layer_id].layer_size != 0) {
8530 /* initialize the fix screen info */
8531 if (is_hd_layer(par->layer_id)) {
8532 hifb_screeninfo_init(info, &g_default_info[HIFB_LAYER_TYPE_HD]);
8533 } else if (is_sd_layer(par->layer_id)) {
8534 hifb_screeninfo_init(info, &g_default_info[HIFB_LAYER_TYPE_SD]);
8535 } else if (is_ad_layer(par->layer_id)) {
8536 hifb_screeninfo_init(info, &g_default_info[HIFB_LAYER_TYPE_AD]);
8537 } else if (is_cursor_layer(par->layer_id)) {
8538 hifb_screeninfo_init(info, &g_default_info[HIFB_LAYER_TYPE_CURSOR]);
8539 }
8540 if (snprintf_s(name, sizeof(name), 12, "hifb_layer%01u", par->layer_id) < 0) { /* 12:for char length */
8541 hifb_error("%s:%d:snprintf_s failure\n", __FUNCTION__, __LINE__);
8542 return HI_FAILURE;
8543 }
8544 /* 16 is The length of the array */
8545 info->oinfo.fbmem = (void *)(hi_uintptr_t)hifb_buf_allocmem(name, 16, (g_layer[par->layer_id].layer_size +
8546 /* 1024 Into the system */
8547 g_layer[par->layer_id].curosr_buf_size) * 1024, hifb_get_layer_mmz_names(par->layer_id));
8548 if (info->oinfo.fbmem == HI_NULL) {
8549 hifb_error("%s:failed to malloc the video memory, size: %ld KBtyes!\n", name,
8550 (g_layer[par->layer_id].layer_size + g_layer[par->layer_id].curosr_buf_size));
8551 hifb_free(info);
8552 g_layer[par->layer_id].info = HI_NULL;
8553 return HI_FAILURE;
8554 }
8555 /* u32LayerSize is KB */
8556 info->oinfo.fblen = g_layer[par->layer_id].layer_size * 1024; /* 1024 layer size */
8557
8558 /* initialize the virtual address and clear memory */
8559 info->oinfo.fbmem = hifb_buf_map((hi_u32)(uintptr_t)info->oinfo.fbmem,
8560 (g_layer[par->layer_id].layer_size +
8561 g_layer[par->layer_id].curosr_buf_size) * 1024); /* 1024 buf size */
8562 if (info->oinfo.fbmem == HI_NULL) {
8563 hifb_warning("Failed to call map video memory,size:0x%x, start: 0x%llx\n",\
8564 info->oinfo.fblen, (hi_u64)(uintptr_t)info->oinfo.fbmem);
8565 } else {
8566 (hi_void)memset_s(info->oinfo.fbmem, info->oinfo.fblen, 0, info->oinfo.fblen);
8567 }
8568 }
8569 return HI_SUCCESS;
8570 }
8571
8572 int up_fbinitialize(int display)
8573 {
8574 hi_s32 ret;
8575 struct hifb_info *info = HI_NULL;
8576 hifb_par *par = HI_NULL;
8577
8578 /* Creates a new frame buffer info structure. reserves hifb_par for driver private data (info->par) */
8579 info = hifb_alloc(sizeof(hifb_par));
8580 if (info == HI_NULL) {
8581 hifb_error("failed to malloc the hifb_info!\n");
8582 return -ENOMEM;
8583 }
8584
8585 /* save the info pointer in global pointer array */
8586 g_layer[display].info = info;
8587 info->oinfo.overlay = display;
8588 info->vtable.fb_open = hifb_open;
8589 info->vtable.fb_release = hifb_release;
8590 info->vtable.fb_set_par = hifb_set_par;
8591 info->vtable.fb_pan_display = hifb_pan_display;
8592 info->vtable.fb_ioctl = hifb_ioctl;
8593 info->vtable.getvideoinfo = hifb_getvideoinfo;
8594 info->vtable.getplaneinfo = hifb_getplaneinfo;
8595 info->vtable.getoverlayinfo = hifb_getoverlayinfo;
8596 info->vtable.getcmap = HI_NULL;
8597 info->vtable.putcmap = HI_NULL;
8598
8599 par = (hifb_par *)(info->par);
8600 (hi_void)memset_s(par, sizeof(hifb_par), 0, sizeof(hifb_par));
8601 par->layer_id = display;
8602 par->color_format = HIFB_FMT_ARGB1555;
8603 info->vinfo.fmt = HIFB_FMT_ARGB1555;
8604
8605 /* It's not need to alloc mem for cursor layer if use g_softcursor */
8606 ret = fbinitialize_alloc_mem(info);
8607 if (ret != HI_SUCCESS) {
8608 return ret;
8609 }
8610
8611 g_hifb[display] = info;
8612
8613 return 0;
8614 }
8615
8616 void up_fbuninitialize(int display)
8617 {
8618 struct hifb_info *info = HI_NULL;
8619
8620 /* get framebuffer info structure pointer */
8621 info = g_layer[display].info;
8622 if (info != HI_NULL) {
8623 if (info->oinfo.fbmem) {
8624 hifb_buf_freemem((hi_u64)(uintptr_t)info->oinfo.fbmem);
8625 }
8626
8627 hifb_free(info);
8628 g_layer[display].info = HI_NULL;
8629 g_hifb[display] = HI_NULL;
8630 }
8631 }
8632 #endif
8633
8634 static hi_s32 hifb_set_dynamic_range_display(hifb_par *par, HIFB_DYNAMIC_RANGE_E dynamic_range)
8635 {
8636 if (dynamic_range >= HIFB_DYNAMIC_RANGE_BUTT) {
8637 hifb_error("The input dynamic range value is wrong!\n");
8638 return -EINVAL;
8639 }
8640
8641 par->display_info.dynamic_range = dynamic_range;
8642 return HI_SUCCESS;
8643 }
8644
8645 #if HICHIP == HI3559A_V100
8646 #ifdef CONFIG_DMA_SHARED_BUFFER
8647 struct dma_buf *hifb_dmabuf_export(struct fb_info *info)
8648 {
8649 return hifb_memblock_export(hifb_get_smem_start(info), hifb_get_smem_len(info), 0);
8650 }
8651 #endif
8652 #endif
8653
8654 /*
8655 * Name : g_hi_fb_ops
8656 * Desc : fb struct
8657 * See : hifb_overlay_probe
8658 */
8659 #ifndef __HuaweiLite__
8660 static struct fb_ops g_hi_fb_ops = {
8661 .owner = THIS_MODULE,
8662 .fb_open = hifb_open,
8663 .fb_release = hifb_release,
8664 .fb_check_var = hifb_check_var,
8665 .fb_set_par = hifb_set_par,
8666 .fb_pan_display = hifb_pan_display,
8667 .fb_ioctl = hifb_ioctl,
8668 .fb_setcolreg = hifb_setcolreg,
8669 .fb_setcmap = hifb_setcmap,
8670 #ifdef CONFIG_COMPAT
8671 .fb_compat_ioctl = hifb_compat_ioctl,
8672 #endif
8673 #ifdef CONFIG_FB_CFB_IMAGEBLIT
8674 .fb_imageblit = hifb_imageblit,
8675 #endif
8676 #if HICHIP == HI3559A_V100
8677 #ifdef CONFIG_DMA_SHARED_BUFFER
8678 .fb_dmabuf_export = hifb_dmabuf_export,
8679 #endif
8680 #endif
8681 };
8682 #endif
8683
8684 /*
8685 * Function : hifb_overlay_cleanup
8686 * Description : releae the resource for certain framebuffer
8687 */
8688 #ifdef __HuaweiLite__
8689 static hi_void hifb_overlay_cleanup(hi_u32 layer_id, hi_bool unregister)
8690 {
8691 hi_s32 ret;
8692 hi_unused(unregister);
8693 ret = fb_unregister(layer_id);
8694 if (ret < 0) {
8695 hifb_error("hifb overlay cleanup fail!\n");
8696 }
8697
8698 return;
8699 }
8700 #else
8701 static hi_void hifb_overlay_cleanup(hi_u32 layer_id, hi_bool unregister)
8702 {
8703 struct fb_info *info = HI_NULL;
8704 /* get framebuffer info structure pointer */
8705 info = g_layer[layer_id].info;
8706 if (info != HI_NULL) {
8707 if (hifb_get_screen_base(info)) {
8708 hifb_buf_ummap(hifb_get_screen_base(info));
8709 }
8710
8711 if (hifb_get_smem_start(info)) {
8712 hifb_buf_freemem(hifb_get_smem_start(info));
8713 }
8714
8715 if (unregister) {
8716 unregister_framebuffer(info);
8717 }
8718
8719 framebuffer_release(info);
8720 g_layer[layer_id].info = HI_NULL;
8721 }
8722
8723 return;
8724 }
8725 #endif
8726
8727 #ifndef __HuaweiLite__
8728 static hi_s32 overlay_probe_alloc_mem(struct fb_info *info, struct fb_fix_screeninfo *fix,
8729 struct fb_var_screeninfo *var)
8730 {
8731 hifb_par *par = HI_NULL;
8732 hi_u32 layer_id;
8733 hi_char name[16]; /* 16 for length of name */
8734
8735 par = (hifb_par *)(info->par);
8736 layer_id = par->layer_id;
8737
8738 if (g_layer[layer_id].layer_size != 0) {
8739 /* initialize the fix screen info */
8740 if (is_hd_layer(layer_id)) {
8741 *fix = g_default_fix[HIFB_LAYER_TYPE_HD];
8742 *var = g_default_var[HIFB_LAYER_TYPE_HD];
8743 } else if (is_sd_layer(layer_id)) {
8744 *fix = g_default_fix[HIFB_LAYER_TYPE_SD];
8745 *var = g_default_var[HIFB_LAYER_TYPE_SD];
8746 } else if (is_ad_layer(layer_id)) {
8747 *fix = g_default_fix[HIFB_LAYER_TYPE_AD];
8748 *var = g_default_var[HIFB_LAYER_TYPE_AD];
8749 } else if (is_cursor_layer(layer_id)) {
8750 *fix = g_default_fix[HIFB_LAYER_TYPE_CURSOR];
8751 *var = g_default_var[HIFB_LAYER_TYPE_CURSOR];
8752 }
8753 if (snprintf_s(name, sizeof(name), 12, "hifb_layer%01u", layer_id) < 0) { /* 12:for char length */
8754 hifb_error("%s:%d:snprintf_s failure\n", __FUNCTION__, __LINE__);
8755 return HI_FAILURE;
8756 }
8757 fix->smem_start = hifb_buf_allocmem(name, 16, /* 16 buffer size */
8758 (g_layer[layer_id].layer_size + g_layer[layer_id].curosr_buf_size) * 1024, /* 1024 for 1k */
8759 hifb_get_layer_mmz_names(layer_id));
8760 if (fix->smem_start == 0) {
8761 hifb_error("%s:failed to malloc the video memory, size: %ld KBtyes!\n", name,
8762 (g_layer[layer_id].layer_size + g_layer[layer_id].curosr_buf_size));
8763 return HI_FAILURE;
8764 }
8765 /* u32LayerSize is KB */
8766 fix->smem_len = g_layer[layer_id].layer_size * 1024; /* 1024 for 1k */
8767
8768 /* initialize the virtual address and clear memory */
8769 info->screen_base = hifb_buf_map(fix->smem_start, (g_layer[layer_id].layer_size +
8770 g_layer[layer_id].curosr_buf_size) * 1024); /* 1024 for 1k */
8771
8772 if (info->screen_base == HI_NULL) {
8773 hifb_error("Failed to call map video memory,size:0x%x, start: 0x%lx\n",
8774 fix->smem_len, fix->smem_start);
8775 return HI_FAILURE;
8776 }
8777
8778 /* Initialize the memory to 0. Call TDE to do it. */
8779 #if (HICHIP == HI3516E_V200)
8780 (hi_void)memset_s(info->screen_base, hifb_get_smem_len(info), 0x0, hifb_get_smem_len(info));
8781 #else
8782 hifb_completely(info, 0, hifb_get_smem_len(info));
8783 #endif
8784 }
8785 return HI_SUCCESS;
8786 }
8787 #endif
8788
8789 /*
8790 * Function : hifb_overlay_probe
8791 * Description : initialize the framebuffer for the overlay and set
8792 */
8793 #ifdef __HuaweiLite__
8794 static hi_s32 __init hifb_overlay_probe(hi_u32 layer_id)
8795 {
8796 hi_s32 ret = fb_register(layer_id, 0);
8797 if (ret < 0) {
8798 hifb_error("failed to register_framebuffer!\n");
8799 ret = -EINVAL;
8800 goto ERR;
8801 }
8802
8803 return HI_SUCCESS;
8804
8805 ERR:
8806 hifb_overlay_cleanup(layer_id, HI_FALSE);
8807
8808 return ret;
8809 }
8810 #else
8811 static hi_s32 __init hifb_overlay_probe(hi_u32 layer_id)
8812 {
8813 hi_s32 ret;
8814 struct fb_info *info = HI_NULL;
8815 struct fb_fix_screeninfo *fix = HI_NULL;
8816 struct fb_var_screeninfo *var = HI_NULL;
8817 hifb_par *par = HI_NULL;
8818
8819 /*
8820 * The size of the size represents the private data space of the device,
8821 * and the par of fb_info points to the private space, that is,
8822 * info->par already points to the memory space
8823 */
8824 info = framebuffer_alloc(sizeof(hifb_par), HI_NULL);
8825 if (info == HI_NULL) {
8826 hifb_error("failed to malloc the fb_info!\n");
8827 return -ENOMEM;
8828 }
8829 fix = &info->fix;
8830 var = &info->var;
8831 /* save the info pointer in global pointer array */
8832 g_layer[layer_id].info = info;
8833
8834 info->flags = FBINFO_FLAG_DEFAULT | FBINFO_HWACCEL_YPAN | FBINFO_HWACCEL_XPAN;
8835 /* fbops members in fb_info point to s_sthifbops, so open, release, ioctl, etc. can get fb_info. */
8836 info->fbops = &g_hi_fb_ops;
8837
8838 par = (hifb_par *)(info->par);
8839 (hi_void)memset_s(par, sizeof(hifb_par), 0, sizeof(hifb_par));
8840 par->layer_id = layer_id;
8841 par->color_format = HIFB_FMT_ARGB1555;
8842
8843 if (snprintf_s(fix->id, sizeof(fix->id), 5, "ovl%01u", layer_id) < 0) { /* 5:for char length */
8844 hifb_error("%s:%d:snprintf_s failure\n", __FUNCTION__, __LINE__);
8845 ret = HI_FAILURE;
8846 goto ERR;
8847 }
8848 /* It's not need to alloc mem for cursor layer if use softcursor */
8849 ret = overlay_probe_alloc_mem(info, fix, var);
8850 if (ret != HI_SUCCESS) {
8851 goto ERR;
8852 }
8853 ret = register_framebuffer(info);
8854 if (ret < 0) {
8855 hifb_error("failed to register_framebuffer!layerid = %d, s32Ret = %d\n", layer_id, ret);
8856 ret = -EINVAL;
8857 goto ERR;
8858 }
8859
8860 hifb_dbg_info("succeed in registering the fb%d: %s frame buffer device\n",
8861 info->node, fix->id);
8862
8863 return HI_SUCCESS;
8864
8865 ERR:
8866 hifb_overlay_cleanup(layer_id, HI_FALSE);
8867
8868 return ret;
8869 }
8870 #endif
8871
8872 /*
8873 * Function : hifb_get_vram_size
8874 * Description : parse the parameter string and get the size. if
8875 the parameter is invalid, the size is default value.
8876 * Input : const char* pstr the string for the vram size
8877 * Return : the video memory size
8878 */
8879 static unsigned long hifb_get_vram_size(char *pstr)
8880 {
8881 hi_s32 str_is_valid = HI_TRUE;
8882 unsigned long vram_size = 0;
8883 unsigned long vram_size_temp;
8884 char *ptr = pstr;
8885
8886 if ((ptr == HI_NULL) || (*ptr == '\0')) {
8887 return 0;
8888 }
8889
8890 /* check if the string is valid */
8891 while (*ptr != '\0') {
8892 if (*ptr == ',') {
8893 break;
8894 } else if ((!((*ptr) >= '0' && (*ptr) <= '9')) && (*ptr != 'X') && (*ptr != 'x') &&
8895 ((*ptr > 'f' && *ptr <= 'z') || (*ptr > 'F' && *ptr <= 'Z'))) {
8896 str_is_valid = HI_FALSE;
8897 break;
8898 }
8899
8900 ptr++;
8901 }
8902
8903 if (str_is_valid) {
8904 #ifdef __HuaweiLite__
8905 vram_size = strtoul(pstr, (char **)HI_NULL, 0);
8906 #else
8907 vram_size = simple_strtoul(pstr, (char **)HI_NULL, 0);
8908 #endif
8909 if (vram_size > PAGE_SIZE_ALIGN_MAX) {
8910 hifb_error("vram_size(%lu)( > %lu) is overflow, it will be set to %u!\n", vram_size, PAGE_SIZE_ALIGN_MAX,
8911 0);
8912 vram_size = 0;
8913 }
8914
8915 vram_size_temp = vram_size;
8916 /* make the size PAGE_SIZE align */
8917 vram_size = ((vram_size * 1024 + PAGE_SIZE - 1) & PAGE_MASK) / 1024; /* 2^10 1024 */
8918 if (vram_size_temp != vram_size) {
8919 hifb_error("vram_size(%lu) if not align in 4, it will be set to %lu!\n", vram_size_temp, vram_size);
8920 }
8921 }
8922 return vram_size;
8923 }
8924
8925 static hi_void parse_cfg_start(hi_char **sc_str)
8926 {
8927 /* judge the cursor if use soft or hard layer */
8928 if (!strcmp("off", g_softcursor)) {
8929 g_soft_cursor = HI_FALSE;
8930 } else {
8931 g_soft_cursor = HI_TRUE;
8932 }
8933
8934 /* judge the g_display is need on */
8935 if (!strcmp("on", g_display)) {
8936 g_display_on = HI_TRUE;
8937 } else {
8938 g_display_on = HI_FALSE;
8939 }
8940
8941 /* get the string before next varm */
8942 *sc_str = strstr(g_video, "vram");
8943 hifb_dbg_info("video:%s\n", g_video);
8944
8945 return;
8946 }
8947
8948 static hi_void parse_cfg_change_layer_size(hi_char *sc_str, hi_u32 layer_id)
8949 {
8950 hi_ulong layer_size;
8951 layer_size = hifb_get_vram_size(sc_str);
8952
8953 if (g_drv_ops.capability[layer_id].bLayerSupported) {
8954 if (is_cursor_layer(layer_id)) {
8955 if (!is_soft_cursor()) {
8956 g_layer[layer_id].layer_size = layer_size;
8957 }
8958 } else {
8959 g_layer[layer_id].layer_size = layer_size;
8960 if (is_soft_cursor() && layer_size) {
8961 g_layer[layer_id].curosr_buf_size = HIFB_CURSOR_DEF_VRAM;
8962 }
8963 }
8964 }
8965 return;
8966 }
8967
8968 static hi_s32 parse_cfg_change_layer_id(hi_u32 *layer_id, hi_char *number, hi_u32 size)
8969 {
8970 hi_unused(size);
8971 #ifdef __HuaweiLite__
8972 *layer_id = strtoul(number, (char **)HI_NULL, 10); /* 10 base data */
8973 #else
8974 *layer_id = simple_strtoul(number, (char **)HI_NULL, 10); /* 10 base data */
8975 #endif
8976 if (*layer_id >= g_drv_ops.layer_count) {
8977 hifb_error("Layer %u is in module_param---video out of range!\n", *layer_id);
8978 return HI_FAILURE;
8979 }
8980
8981 if (!g_drv_ops.capability[*layer_id].bLayerSupported) {
8982 hifb_error("Layer %u unsupported, so module_param---video can't contain vram_size for it!\n",
8983 *layer_id);
8984 return HI_FAILURE;
8985 }
8986 return HI_SUCCESS;
8987 }
8988
8989 /*
8990 * Name : hifb_parse_cfg
8991 * Desc : Parse the parameters.
8992 * See : hifb_overlay_probe
8993 */
8994 static hi_s32 hifb_parse_cfg(hi_void)
8995 {
8996 hi_char *sc_str = HI_NULL;
8997 hi_char number[HIFB_NUMBLE] = {0};
8998 hi_u32 i, j;
8999 hi_u32 layer_id;
9000 hi_char ac_param[HIFB_ACFARAM] = {0};
9001 hi_char ac_temp[HIFB_ACTEMP] = {0};
9002 hi_bool is_param_valid = HI_FALSE;
9003
9004 /*
9005 * 1.judge the cursor if use soft or hard layer
9006 * 2.judge the display is need on
9007 * 3.get the string before next varm
9008 */
9009 parse_cfg_start(&sc_str);
9010
9011 /* parse cfg process */
9012 while (sc_str != HI_NULL) {
9013 /* parse the layer id and save it in a string */
9014 i = 0;
9015
9016 /* if the number of graphics layers is 10 or more, the string is risky */
9017 for (j = 0; j < g_drv_ops.layer_count; j++) {
9018 if (snprintf_s(ac_param, sizeof(ac_param), 11, "vram%01u_size", j) < 0) { /* 11:for char length */
9019 hifb_error("%s:%d:snprintf_s failure!\n", __FUNCTION__, __LINE__);
9020 return HI_FAILURE;
9021 }
9022 if (strncpy_s(ac_temp, sizeof(ac_temp), sc_str, 10) != EOK) { /* 10 cpy num */
9023 hifb_error("%s:%d:strncpy_s failure!\n", __FUNCTION__, __LINE__);
9024 return HI_FAILURE;
9025 }
9026 if (!strcmp(ac_param, ac_temp)) {
9027 is_param_valid = HI_TRUE;
9028 }
9029 }
9030 if (!is_param_valid) {
9031 hifb_error("insmod parameter is invalid!\n");
9032 return HI_FAILURE;
9033 }
9034 /* skip "vram" */
9035 sc_str += HIFB_NUMBLE;
9036 while (*sc_str != '_') {
9037 /* i>1 means layer id is bigger than 100, it's obviously out of range! */
9038 if (i > 1) {
9039 hifb_error("layer id is out of range!\n");
9040 return -1;
9041 }
9042
9043 number[i] = *sc_str;
9044 i++;
9045 sc_str++;
9046 }
9047
9048 number[i] = '\0';
9049
9050 /* change the layer id string into digital and assure it's legal */
9051 if (parse_cfg_change_layer_id(&layer_id, number, HIFB_NUMBLE) != HI_SUCCESS) {
9052 return HI_FAILURE;
9053 }
9054
9055 sc_str += sizeof("size") + i;
9056 /* get the layer size string and change it to digital */
9057 parse_cfg_change_layer_size(sc_str, layer_id);
9058
9059 /* get next layer string */
9060 sc_str = strstr(sc_str, "vram");
9061 }
9062
9063 return HI_SUCCESS;
9064 }
9065
9066 #ifdef CONFIG_HI_PROC_SHOW_SUPPORT
9067 static const hi_char *g_fmt_name[] = {
9068 "RGB565", "RGB888", "KRGB444", "KRGB555", "KRGB888", "ARGB4444", "ARGB1555", "ARGB8888", "ARGB8565",
9069 "RGBA4444", "RGBA5551", "RGBA5658", "RGBA8888", "1BPP", "2BPP", "4BPP", "8BPP", "ACLUT44",
9070 "ACLUT88", "PUYVY", "PYUYV", "PYVYU", "YUV888", "AYUV8888", "YUVA8888", "BUTT"};
9071
9072 static const hi_char *g_layer_name[] = {"layer_0", "layer_1", "layer_2", "layer_3"};
9073 hi_s32 hifb_print_softcursor_proc(struct fb_info *info, osal_proc_entry_t *p, hi_void *v)
9074 {
9075 hifb_par *par = (hifb_par *)info->par;
9076 const hi_char *layer_name = HI_NULL;
9077 hi_unused(v);
9078 if (par->layer_id >= sizeof(g_layer_name) / sizeof(hi_char *)) {
9079 layer_name = "unknown layer";
9080 } else {
9081 layer_name = g_layer_name[par->layer_id];
9082 }
9083
9084 osal_seq_printf(p, "layer name \t: %s \n", layer_name);
9085 osal_seq_printf(p, "Show State \t :%s\n", par->show ? "ON" : "OFF");
9086 osal_seq_printf(p, "referecce count \t :%d\n", atomic_read(&par->ref_count));
9087 osal_seq_printf(p, "position \t :(%d, %d)\n", par->display_info.pos.s32XPos,
9088 par->display_info.pos.s32YPos);
9089 osal_seq_printf(p, "ColorFormat: \t :%s\n", g_fmt_name[par->color_format]);
9090 osal_seq_printf(p, "Alpha Enable \t :%s\n", par->alpha.bAlphaEnable ? "ON" : "OFF");
9091 osal_seq_printf(p, "AlphaChannel Enable \t :%s\n", par->alpha.bAlphaChannel ? "ON" : "OFF");
9092 osal_seq_printf(p, "Alpha0, Alpha1 \t :%u, %u\n", par->alpha.u8Alpha0, par->alpha.u8Alpha1);
9093 osal_seq_printf(p, "Alpha Global \t :%u\n", par->alpha.u8GlobalAlpha);
9094 osal_seq_printf(p, "Colorkey Enable \t :%s\n", par->ckey.key_enable ? "ON" : "OFF");
9095 osal_seq_printf(p, "Colorkey value \t :0x%x\n", par->ckey.key);
9096 osal_seq_printf(p, "cursor hot pos(x, y) \t :(%d, %d)\n", par->cursor_info.cursor.stHotPos.s32XPos,
9097 par->cursor_info.cursor.stHotPos.s32YPos);
9098 return 0;
9099 }
9100
9101 static hi_void print_cursor_proc(osal_proc_entry_t *p, hifb_par *par)
9102 {
9103 osal_seq_printf(p, "attach cursor id: \t :%u \n", par->cursor_info.attached_cursor_id);
9104 osal_seq_printf(p, "backup cursor addr \t :0x%llu\n", par->cursor_info.cursor.stCursor.u64PhyAddr);
9105 osal_seq_printf(p, "backup cursor fmt \t :%s\n", g_fmt_name[par->cursor_info.cursor.stCursor.enFmt]);
9106 osal_seq_printf(p, "backup cursor stride \t :%u\n", par->cursor_info.cursor.stCursor.u32Pitch);
9107 osal_seq_printf(p, "backup cursor (w, h) \t :(%u, %u)\n",
9108 par->cursor_info.cursor.stCursor.u32Width, par->cursor_info.cursor.stCursor.u32Height);
9109 osal_seq_printf(p, "cursor rect in g_display buffer \t :(%d, %d, %d, %d)\n",
9110 par->cursor_info.rect_in_disp_buf.x, par->cursor_info.rect_in_disp_buf.y,
9111 par->cursor_info.rect_in_disp_buf.w, par->cursor_info.rect_in_disp_buf.h);
9112 osal_seq_printf(p, "cursor pos in cursor image \t :(%d, %d)\n",
9113 par->cursor_info.pos_in_cursor.s32XPos, par->cursor_info.pos_in_cursor.s32YPos);
9114 return;
9115 }
9116
9117 static hi_void print_canvas_proc(osal_proc_entry_t *p, hifb_par *par)
9118 {
9119 hifb_refresh_info *refresh_info = &par->refresh_info;
9120 volatile hifb_compress_info *compress_info = &par->compress_info;
9121
9122 osal_seq_printf(p, "canavas updated addr \t :0x%llx\n", refresh_info->user_buffer.stCanvas.u64PhyAddr +
9123 refresh_info->user_buffer.UpdateRect.y * refresh_info->user_buffer.UpdateRect.w +
9124 refresh_info->user_buffer.UpdateRect.x);
9125 osal_seq_printf(p, "canavas updated (w, h) \t :%d,%d \n", refresh_info->user_buffer.UpdateRect.w,
9126 refresh_info->user_buffer.UpdateRect.h);
9127 osal_seq_printf(p, "canvas width \t :%u\n", refresh_info->user_buffer.stCanvas.u32Width);
9128 osal_seq_printf(p, "canvas height \t :%u\n", refresh_info->user_buffer.stCanvas.u32Height);
9129 osal_seq_printf(p, "canvas pitch \t :%u\n", refresh_info->user_buffer.stCanvas.u32Pitch);
9130 osal_seq_printf(p, "canvas format \t :%s\n", g_fmt_name[refresh_info->user_buffer.stCanvas.enFmt]);
9131 osal_seq_printf(p, "IsCompress \t :%s\n", compress_info->compress_open ? "YES" : "NO");
9132 osal_seq_printf(p, "Is DDR Dettect \t :%s\n", (compress_info->compress_open &&
9133 compress_info->zone_nums && (refresh_info->buf_mode == HIFB_LAYER_BUF_NONE ||
9134 refresh_info->buf_mode == HIFB_LAYER_BUF_BUTT)) ? "YES" : "NO");
9135 osal_seq_printf(p, "DDR Detect Zones \t :%d\n", (compress_info->compress_open &&
9136 (refresh_info->buf_mode == HIFB_LAYER_BUF_NONE || refresh_info->buf_mode == HIFB_LAYER_BUF_BUTT)) ?
9137 compress_info->zone_nums : 0);
9138 osal_seq_printf(p, "PreMul Enable \t :%s\n", par->display_info.premul ? "ON" : "OFF");
9139 return;
9140 }
9141
9142 static hi_void print_display_proc(osal_proc_entry_t *p, hifb_par *par)
9143 {
9144 const hi_char *buf_mode[] = {
9145 "triple", "double", "single", "triple( no frame discarded)", "fence", "unknown"
9146 };
9147 hifb_display_info *display_info = &par->display_info;
9148 hifb_refresh_info *refresh_info = &par->refresh_info;
9149
9150 osal_seq_printf(p, "Display Buffer mode(+UsrBuf)\t :%s\n", buf_mode[refresh_info->buf_mode]);
9151 osal_seq_printf(p, "Displaying addr (register) \t :0x%llx\n", refresh_info->screen_addr);
9152 osal_seq_printf(p, "g_display buffer[0] addr \t :0x%llx\n", refresh_info->disp_buf_info.phy_addr[0]);
9153 osal_seq_printf(p, "g_display buffer[1] addr \t :0x%llx\n", refresh_info->disp_buf_info.phy_addr[1]);
9154 osal_seq_printf(p, "Be PreMul Mode: \t :%s\n", display_info->premul == HI_TRUE ? "YES" : "NO");
9155 osal_seq_printf(p, "displayrect \t :(%u, %u)\n", display_info->display_width,
9156 display_info->display_height);
9157 osal_seq_printf(p, "screenrect \t :(%u, %u)\n", display_info->screen_width,
9158 display_info->screen_height);
9159 osal_seq_printf(p, "device max resolution \t :%u, %u\n", display_info->max_screen_width,
9160 display_info->max_screen_height);
9161 osal_seq_printf(p, "IsNeedFlip(2buf) \t :%s\n", refresh_info->disp_buf_info.need_flip ? "YES" : "NO");
9162 osal_seq_printf(p, "BufferIndexDisplaying(2buf)\t :%u\n", refresh_info->disp_buf_info.index_for_int);
9163 osal_seq_printf(p, "refresh request num(2buf) \t :%u\n", refresh_info->refresh_num);
9164 osal_seq_printf(p, "switch buf num(2buf) \t :%u\n", refresh_info->disp_buf_info.int_pic_num);
9165 osal_seq_printf(p, "union rect (2buf) \t :(%d,%d,%d,%d)\n",
9166 refresh_info->disp_buf_info.union_rect.x, refresh_info->disp_buf_info.union_rect.y,
9167 refresh_info->disp_buf_info.union_rect.w, refresh_info->disp_buf_info.union_rect.h);
9168 return;
9169 }
9170
9171 static hi_void print_common_proc(osal_proc_entry_t *p, struct fb_info *info, const hi_char *layer_name)
9172 {
9173 hifb_par *par = (hifb_par *)info->par;
9174 hifb_display_info *display_info = &par->display_info;
9175 hifb_refresh_info *refresh_info = &par->refresh_info;
9176 const hi_char *antiflicer_level[] = {"NONE", "LOW", "MIDDLE", "HIGH", "AUTO", "ERROR"};
9177 const hi_char *mirror_mode[] = {"NONE", "HORIZONTAL", "VERTICAL", "BOTH", "unknown"};
9178 const hi_char *dynamic_range[] = {"SDR8", "SDR10", "HDR10", "HLG", "SLF", "unknown"};
9179 const hi_char *anti_mode[] = {"NONE", "TDE", "VOU", "ERROR"};
9180 const hi_char *rotation_mode[] = {"0", "90", "180", "270", "-"};
9181
9182 osal_seq_printf(p, "\n[HIFB] Version: ["MPP_VERSION"], Build Time["__DATE__", "__TIME__"]\n");
9183 osal_seq_printf(p, "\n");
9184
9185 osal_seq_printf(p, "layer name \t :%s \n", layer_name);
9186 osal_seq_printf(p, "Open count \t :%d\n", atomic_read(&par->ref_count));
9187 osal_seq_printf(p, "Show state \t :%s\n", par->show ? "ON" : "OFF");
9188 osal_seq_printf(p, "Start position \t :(%d, %d)\n", display_info->pos.s32XPos,
9189 display_info->pos.s32YPos);
9190 osal_seq_printf(p, "xres, yres \t :(%u, %u)\n", hifb_get_xres(info), hifb_get_yres(info));
9191 osal_seq_printf(p, "xres_virtual, yres_virtual \t :(%u, %u)\n", hifb_get_xres_virtual(info),
9192 hifb_get_yres_virtual(info));
9193 osal_seq_printf(p, "xoffset, yoffset \t :(%u, %u)\n", hifb_get_xoffset(info), hifb_get_yoffset(info));
9194 osal_seq_printf(p, "fix.line_length \t :%u\n", hifb_get_line_length(info));
9195 osal_seq_printf(p, "Mem size: \t :%u KB\n", hifb_get_smem_len(info) / 1024); /* 2^10 1024 */
9196 osal_seq_printf(p, "Layer Scale (hw): \t :%s \n", g_drv_ops.capability[par->layer_id].bVoScale ? "YES" : "NO");
9197 osal_seq_printf(p, "ColorFormat: \t :%s\n", g_fmt_name[par->color_format]);
9198 osal_seq_printf(p, "Alpha Enable \t :%s\n", par->alpha.bAlphaEnable ? "ON" : "OFF");
9199 osal_seq_printf(p, "AlphaChannel Enable \t :%s\n", par->alpha.bAlphaChannel ? "ON" : "OFF");
9200 osal_seq_printf(p, "Alpha0, Alpha1 \t :%u, %u\n", par->alpha.u8Alpha0, par->alpha.u8Alpha1);
9201 osal_seq_printf(p, "Alpha Global \t :%u\n", par->alpha.u8GlobalAlpha);
9202 osal_seq_printf(p, "Colorkey Enable \t :%s\n", par->ckey.key_enable ? "ON" : "OFF");
9203 osal_seq_printf(p, "Colorkey value \t :0x%x\n", par->ckey.key);
9204 osal_seq_printf(p, "Mirror Mode: \t :%s\n", mirror_mode[display_info->mirror_mode]);
9205 osal_seq_printf(p, "Dynamic Range: \t :%s\n", dynamic_range[display_info->dynamic_range]);
9206 osal_seq_printf(p, "Deflicker Mode: \t :%s\n", anti_mode[display_info->antiflicker_mode]);
9207 osal_seq_printf(p, "Rotation mode: \t :%s\n", rotation_mode[display_info->rotate_mode]);
9208 osal_seq_printf(p, "Deflicker Level: \t :%s\n", antiflicer_level[display_info->antiflicker_level]);
9209 osal_seq_printf(p, "HiFB mode: \t :%s\n", HIFB_LAYER_BUF_BUTT ==
9210 refresh_info->buf_mode ? "STANDARD" : "EXTEND");
9211 return;
9212 }
9213
9214 hi_s32 hifb_print_layer_proc(struct fb_info *info, osal_proc_entry_t *p, hi_void *v)
9215 {
9216 hifb_par *par = (hifb_par *)info->par;
9217 hifb_display_info *display_info = &par->display_info;
9218 const hi_char *layer_name = HI_NULL;
9219 hi_unused(v);
9220 if (par->layer_id >= sizeof(g_layer_name) / sizeof(hi_char *)) {
9221 layer_name = "unknown layer";
9222 } else {
9223 layer_name = g_layer_name[par->layer_id];
9224 }
9225
9226 if (display_info->antiflicker_mode > HIFB_ANTIFLICKER_BUTT) {
9227 display_info->antiflicker_mode = HIFB_ANTIFLICKER_BUTT;
9228 }
9229
9230 if (display_info->antiflicker_level > HIFB_LAYER_ANTIFLICKER_BUTT) {
9231 display_info->antiflicker_level = HIFB_LAYER_ANTIFLICKER_BUTT;
9232 }
9233
9234 print_common_proc(p, info, layer_name);
9235
9236 print_display_proc(p, par);
9237
9238 print_canvas_proc(p, par);
9239
9240 if (par->cursor_info.attached && is_soft_cursor()) {
9241 print_cursor_proc(p, par);
9242 }
9243 return HI_SUCCESS;
9244 }
9245
9246 hi_s32 hifb_read_proc(osal_proc_entry_t *entry)
9247 {
9248 struct fb_info *info = HI_NULL;
9249 hifb_par *par = HI_NULL;
9250 if (entry == HI_NULL) {
9251 return HI_FAILURE;
9252 }
9253 info = (struct fb_info *)(entry->private);
9254 if (info == HI_NULL) {
9255 return HI_FAILURE;
9256 }
9257 par = (hifb_par *)info->par;
9258 if (par == HI_NULL) {
9259 return HI_FAILURE;
9260 }
9261
9262 if (!is_cursor_layer(par->layer_id) || !is_soft_cursor()) {
9263 return hifb_print_layer_proc(info, entry, HI_NULL);
9264 } else {
9265 return hifb_print_softcursor_proc(info, entry, HI_NULL);
9266 }
9267
9268 return HI_SUCCESS;
9269 }
9270
9271 #ifndef __HuaweiLite__
9272 static hi_void parse_procmd_help(hifb_par *par, const hi_char *cmd)
9273 {
9274 if (strncmp("help", cmd, 4) == 0) { /* 4 is length */
9275 osal_printk("help info:\n");
9276 osal_printk("echo cmd > proc file\n");
9277 osal_printk("hifb support cmd:\n");
9278 osal_printk("show:show layer\n");
9279 osal_printk("hide:hide layer\n");
9280 osal_printk("For example, if you want to hide layer 0,you can input:\n");
9281 osal_printk(" echo hide > /proc/umap/hifb0\n");
9282 } else {
9283 hifb_error("u32LayerID %d doesn't support cmd:%s, use help cmd to show help info!\n", par->layer_id, cmd);
9284 }
9285 return;
9286 }
9287
9288 static hi_void hifb_parse_proccmd(osal_proc_entry_t *p, hi_u32 layer_id, const hi_char *cmd)
9289 {
9290 struct fb_info *info = g_layer[layer_id].info;
9291 hifb_par *par = (hifb_par *)info->par;
9292 hi_s32 cnt;
9293 unsigned long lockflag;
9294 hi_unused(p);
9295 hifb_spin_lock_irqsave(&par->lock, lockflag);
9296 cnt = atomic_read(&par->ref_count);
9297
9298 if (strncmp("show", cmd, 4) == 0) { /* 4 is length */
9299 if (cnt == 0) {
9300 hifb_spin_unlock_irqrestore(&par->lock, lockflag);
9301 hifb_error("err:u32LayerId %d no open!\n", par->layer_id);
9302 return;
9303 }
9304
9305 if (is_cursor_layer(par->layer_id) && is_soft_cursor()) {
9306 hifb_spin_unlock_irqrestore(&par->lock, lockflag);
9307 hifb_error("cursor layer %d doesn't support this cmd!\n", par->layer_id);
9308 return;
9309 }
9310
9311 if (!par->show) {
9312 par->modifying = HI_TRUE;
9313 par->show = HI_TRUE;
9314 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_SHOW;
9315 par->modifying = HI_FALSE;
9316 }
9317 } else if (strncmp("hide", cmd, 4) == 0) { /* 4 is length */
9318 if (cnt == 0) {
9319 hifb_spin_unlock_irqrestore(&par->lock, lockflag);
9320 hifb_error("err:u32LayerId %d no open!\n", par->layer_id);
9321 return;
9322 }
9323
9324 if (is_cursor_layer(par->layer_id) && is_soft_cursor()) {
9325 hifb_spin_unlock_irqrestore(&par->lock, lockflag);
9326 hifb_error("cursor layer %d doesn't support this cmd!\n", par->layer_id);
9327 return;
9328 }
9329
9330 if (par->show) {
9331 par->modifying = HI_TRUE;
9332 par->show = HI_FALSE;
9333 par->param_modify_mask |= HIFB_LAYER_PARAMODIFY_SHOW;
9334 par->modifying = HI_FALSE;
9335 }
9336 }
9337
9338 hifb_spin_unlock_irqrestore(&par->lock, lockflag);
9339
9340 parse_procmd_help(par, cmd);
9341 return;
9342 }
9343 #endif
9344
9345 hi_s32 hifb_write_proc(osal_proc_entry_t *entry, const char *buf, int count, long long *ppos)
9346 {
9347 #ifndef __HuaweiLite__
9348 #define TMP_BUF_LEN 32
9349
9350 struct fb_info *info = HI_NULL;
9351 hifb_par *par = HI_NULL;
9352 char tmp_buf[TMP_BUF_LEN] = {0};
9353 int len;
9354
9355 if (entry == HI_NULL) {
9356 return -ENOSYS;
9357 }
9358
9359 if (count <= 0) {
9360 return -ENOSYS;
9361 }
9362
9363 len = (count >= TMP_BUF_LEN) ? TMP_BUF_LEN : count;
9364
9365 if (osal_copy_from_user(tmp_buf, buf, len))
9366 return HI_FAILURE;
9367
9368 tmp_buf[len - 1] = '\0';
9369
9370 info = (struct fb_info *)(entry->private);
9371 if (info == HI_NULL) {
9372 return HI_FAILURE;
9373 }
9374 par = (hifb_par *)(info->par);
9375 if (par == HI_NULL) {
9376 return HI_FAILURE;
9377 }
9378
9379 hifb_parse_proccmd(entry, par->layer_id, (hi_char*)tmp_buf);
9380 if (memset_s((hi_void *)tmp_buf, TMP_BUF_LEN, 0, count) != EOK) {
9381 hifb_error("%s:%d:memset_s failed!\n", __FUNCTION__, __LINE__);
9382 return HI_FAILURE;
9383 }
9384 #endif
9385 hi_unused(ppos);
9386 hi_unused(entry);
9387 hi_unused(buf);
9388 return count;
9389 }
9390 #endif
9391
9392 /* for linux */
9393 #ifndef __HuaweiLite__
9394 #ifndef CONFIG_HI_VO_FB_SEPARATE
9395 /* for CONFIG_HI_VO_FB_SEPARATE=n in linux */
9396 #ifdef MODULE
9397 module_init(hifb_init);
9398 module_exit(hifb_cleanup);
9399 MODULE_LICENSE("GPL");
9400 #else
9401 subsys_initcall(hifb_init);
9402 #endif
9403 #endif
9404
9405 #endif
9406