1 /*
2 * drivers/amlogic/amports/vh265.c
3 *
4 * Copyright (C) 2015 Amlogic, Inc. All rights reserved.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
14 * more details.
15 *
16 */
17 #define DEBUG
18 #include <linux/kernel.h>
19 #include <linux/module.h>
20 #include <linux/types.h>
21 #include <linux/errno.h>
22 #include <linux/interrupt.h>
23 #include <linux/semaphore.h>
24 #include <linux/delay.h>
25 #include <linux/timer.h>
26 #include <linux/kfifo.h>
27 #include <linux/kthread.h>
28 #include <linux/sched/clock.h>
29 #include <linux/platform_device.h>
30 #include <linux/amlogic/media/vfm/vframe.h>
31 #include <linux/amlogic/media/utils/amstream.h>
32 #include <linux/amlogic/media/utils/vformat.h>
33 #include <linux/amlogic/media/frame_sync/ptsserv.h>
34 #include <linux/amlogic/media/canvas/canvas.h>
35 #include <linux/amlogic/media/vfm/vframe.h>
36 #include <linux/amlogic/media/vfm/vframe_provider.h>
37 #include <linux/amlogic/media/vfm/vframe_receiver.h>
38 #include <linux/dma-mapping.h>
39 #include <linux/dma-map-ops.h>
40 #include <linux/slab.h>
41 #include <linux/mm.h>
42 #include <linux/amlogic/tee.h>
43 #include "../../../stream_input/amports/amports_priv.h"
44 #include <linux/amlogic/media/codec_mm/codec_mm.h>
45 #include "../utils/decoder_mmu_box.h"
46 #include "../utils/decoder_bmmu_box.h"
47 #include "../utils/config_parser.h"
48 #include "../utils/firmware.h"
49 #include "../../../common/chips/decoder_cpu_ver_info.h"
50 #include "../utils/vdec_v4l2_buffer_ops.h"
51 #include <media/v4l2-mem2mem.h>
52
53 #define HEVC_8K_LFTOFFSET_FIX
54
55 #define CONSTRAIN_MAX_BUF_NUM
56
57 #define SWAP_HEVC_UCODE
58 #define DETREFILL_ENABLE
59
60 #define AGAIN_HAS_THRESHOLD
61 /*#define TEST_NO_BUF*/
62 #define HEVC_PIC_STRUCT_SUPPORT
63 #define MULTI_INSTANCE_SUPPORT
64 #define USE_UNINIT_SEMA
65
66 /* .buf_size = 0x100000*16,
67 //4k2k , 0x100000 per buffer */
68 /* 4096x2304 , 0x120000 per buffer */
69 #define MPRED_8K_MV_BUF_SIZE (0x120000*4)
70 #define MPRED_4K_MV_BUF_SIZE (0x120000)
71 #define MPRED_MV_BUF_SIZE (0x40000)
72
73 #define MMU_COMPRESS_HEADER_SIZE 0x48000
74 #define MMU_COMPRESS_8K_HEADER_SIZE (0x48000*4)
75
76 #define MAX_FRAME_4K_NUM 0x1200
77 #define MAX_FRAME_8K_NUM (0x1200*4)
78
79 //#define FRAME_MMU_MAP_SIZE (MAX_FRAME_4K_NUM * 4)
80 #define H265_MMU_MAP_BUFFER HEVC_ASSIST_SCRATCH_7
81
82 #define HEVC_ASSIST_MMU_MAP_ADDR 0x3009
83
84 #define HEVC_CM_HEADER_START_ADDR 0x3628
85 #define HEVC_SAO_MMU_VH1_ADDR 0x363b
86 #define HEVC_SAO_MMU_VH0_ADDR 0x363a
87
88 #define HEVC_DBLK_CFGB 0x350b
89 #define HEVCD_MPP_DECOMP_AXIURG_CTL 0x34c7
90 #define SWAP_HEVC_OFFSET (3 * 0x1000)
91
92 #define MEM_NAME "codec_265"
93 /* #include <mach/am_regs.h> */
94 #include <linux/amlogic/media/utils/vdec_reg.h>
95
96 #include "../utils/vdec.h"
97 #include "../utils/amvdec.h"
98 #include <linux/amlogic/media/video_sink/video.h>
99 #include <linux/amlogic/media/codec_mm/configs.h>
100
101 #define SEND_LMEM_WITH_RPM
102 #define SUPPORT_10BIT
103 /* #define ERROR_HANDLE_DEBUG */
104
105 #ifndef STAT_KTHREAD
106 #define STAT_KTHREAD 0x40
107 #endif
108
109 #ifdef MULTI_INSTANCE_SUPPORT
110 #define MAX_DECODE_INSTANCE_NUM 9
111 #define MULTI_DRIVER_NAME "ammvdec_h265"
112 #endif
113 #define DRIVER_NAME "amvdec_h265"
114 #define MODULE_NAME "amvdec_h265"
115 #define DRIVER_HEADER_NAME "amvdec_h265_header"
116
117 #define PUT_INTERVAL (HZ/100)
118 #define ERROR_SYSTEM_RESET_COUNT 200
119
120 #define PTS_NORMAL 0
121 #define PTS_NONE_REF_USE_DURATION 1
122
123 #define PTS_MODE_SWITCHING_THRESHOLD 3
124 #define PTS_MODE_SWITCHING_RECOVERY_THREASHOLD 3
125
126 #define DUR2PTS(x) ((x)*90/96)
127
128 #define MAX_SIZE_8K (8192 * 4608)
129 #define MAX_SIZE_4K (4096 * 2304)
130
131 #define IS_8K_SIZE(w, h) (((w) * (h)) > MAX_SIZE_4K)
132 #define IS_4K_SIZE(w, h) (((w) * (h)) > (1920*1088))
133
134 #define SEI_UserDataITU_T_T35 4
135 #define INVALID_IDX -1 /* Invalid buffer index.*/
136
137 static struct semaphore h265_sema;
138
139 struct hevc_state_s;
140 static int hevc_print(struct hevc_state_s *hevc,
141 int debug_flag, const char *fmt, ...);
142 static int hevc_print_cont(struct hevc_state_s *hevc,
143 int debug_flag, const char *fmt, ...);
144 static int vh265_vf_states(struct vframe_states *states, void *);
145 static struct vframe_s *vh265_vf_peek(void *);
146 static struct vframe_s *vh265_vf_get(void *);
147 static void vh265_vf_put(struct vframe_s *, void *);
148 static int vh265_event_cb(int type, void *data, void *private_data);
149
150 static int vh265_stop(struct hevc_state_s *hevc);
151 #ifdef MULTI_INSTANCE_SUPPORT
152 static int vmh265_stop(struct hevc_state_s *hevc);
153 static s32 vh265_init(struct vdec_s *vdec);
154 static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask);
155 static void reset_process_time(struct hevc_state_s *hevc);
156 static void start_process_time(struct hevc_state_s *hevc);
157 static void restart_process_time(struct hevc_state_s *hevc);
158 static void timeout_process(struct hevc_state_s *hevc);
159 #else
160 static s32 vh265_init(struct hevc_state_s *hevc);
161 #endif
162 static void vh265_prot_init(struct hevc_state_s *hevc);
163 static int vh265_local_init(struct hevc_state_s *hevc);
164 static void vh265_check_timer_func(struct timer_list *timer);
165 static void config_decode_mode(struct hevc_state_s *hevc);
166
167 static const char vh265_dec_id[] = "vh265-dev";
168
169 #define PROVIDER_NAME "decoder.h265"
170 #define MULTI_INSTANCE_PROVIDER_NAME "vdec.h265"
171
172 static const struct vframe_operations_s vh265_vf_provider = {
173 .peek = vh265_vf_peek,
174 .get = vh265_vf_get,
175 .put = vh265_vf_put,
176 .event_cb = vh265_event_cb,
177 .vf_states = vh265_vf_states,
178 };
179
180 static struct vframe_provider_s vh265_vf_prov;
181
182 static u32 bit_depth_luma;
183 static u32 bit_depth_chroma;
184 static u32 video_signal_type;
185 static int start_decode_buf_level = 0x8000;
186 static unsigned int decode_timeout_val = 200;
187
188 static u32 run_ready_min_buf_num = 2;
189 static u32 disable_ip_mode;
190 /*data_resend_policy:
191 bit 0, stream base resend data when decoding buf empty
192 */
193 static u32 data_resend_policy = 1;
194
195 #define VIDEO_SIGNAL_TYPE_AVAILABLE_MASK 0x20000000
196 /*
197 static const char * const video_format_names[] = {
198 "component", "PAL", "NTSC", "SECAM",
199 "MAC", "unspecified", "unspecified", "unspecified"
200 };
201
202 static const char * const color_primaries_names[] = {
203 "unknown", "bt709", "undef", "unknown",
204 "bt470m", "bt470bg", "smpte170m", "smpte240m",
205 "film", "bt2020"
206 };
207
208 static const char * const transfer_characteristics_names[] = {
209 "unknown", "bt709", "undef", "unknown",
210 "bt470m", "bt470bg", "smpte170m", "smpte240m",
211 "linear", "log100", "log316", "iec61966-2-4",
212 "bt1361e", "iec61966-2-1", "bt2020-10", "bt2020-12",
213 "smpte-st-2084", "smpte-st-428"
214 };
215
216 static const char * const matrix_coeffs_names[] = {
217 "GBR", "bt709", "undef", "unknown",
218 "fcc", "bt470bg", "smpte170m", "smpte240m",
219 "YCgCo", "bt2020nc", "bt2020c"
220 };
221 */
222 #ifdef SUPPORT_10BIT
223 #define HEVC_CM_BODY_START_ADDR 0x3626
224 #define HEVC_CM_BODY_LENGTH 0x3627
225 #define HEVC_CM_HEADER_LENGTH 0x3629
226 #define HEVC_CM_HEADER_OFFSET 0x362b
227 #define HEVC_SAO_CTRL9 0x362d
228 #define LOSLESS_COMPRESS_MODE
229 /* DOUBLE_WRITE_MODE is enabled only when NV21 8 bit output is needed */
230 /* double_write_mode:
231 * 0, no double write;
232 * 1, 1:1 ratio;
233 * 2, (1/4):(1/4) ratio;
234 * 3, (1/4):(1/4) ratio, with both compressed frame included
235 * 4, (1/2):(1/2) ratio;
236 * 0x10, double write only
237 * 0x100, if > 1080p,use mode 4,else use mode 1;
238 * 0x200, if > 1080p,use mode 2,else use mode 1;
239 * 0x300, if > 720p, use mode 4, else use mode 1;
240 */
241 static u32 double_write_mode;
242
243 /*#define DECOMP_HEADR_SURGENT*/
244
245 static u32 mem_map_mode; /* 0:linear 1:32x32 2:64x32 ; m8baby test1902 */
246 static u32 enable_mem_saving = 1;
247 static u32 workaround_enable;
248 static u32 force_w_h;
249 #endif
250 static u32 force_fps;
251 static u32 pts_unstable;
252 #define H265_DEBUG_BUFMGR 0x01
253 #define H265_DEBUG_BUFMGR_MORE 0x02
254 #define H265_DEBUG_DETAIL 0x04
255 #define H265_DEBUG_REG 0x08
256 #define H265_DEBUG_MAN_SEARCH_NAL 0x10
257 #define H265_DEBUG_MAN_SKIP_NAL 0x20
258 #define H265_DEBUG_DISPLAY_CUR_FRAME 0x40
259 #define H265_DEBUG_FORCE_CLK 0x80
260 #define H265_DEBUG_SEND_PARAM_WITH_REG 0x100
261 #define H265_DEBUG_NO_DISPLAY 0x200
262 #define H265_DEBUG_DISCARD_NAL 0x400
263 #define H265_DEBUG_OUT_PTS 0x800
264 #define H265_DEBUG_DUMP_PIC_LIST 0x1000
265 #define H265_DEBUG_PRINT_SEI 0x2000
266 #define H265_DEBUG_PIC_STRUCT 0x4000
267 #define H265_DEBUG_HAS_AUX_IN_SLICE 0x8000
268 #define H265_DEBUG_DIS_LOC_ERROR_PROC 0x10000
269 #define H265_DEBUG_DIS_SYS_ERROR_PROC 0x20000
270 #define H265_NO_CHANG_DEBUG_FLAG_IN_CODE 0x40000
271 #define H265_DEBUG_TRIG_SLICE_SEGMENT_PROC 0x80000
272 #define H265_DEBUG_HW_RESET 0x100000
273 #define H265_CFG_CANVAS_IN_DECODE 0x200000
274 #define H265_DEBUG_DV 0x400000
275 #define H265_DEBUG_NO_EOS_SEARCH_DONE 0x800000
276 #define H265_DEBUG_NOT_USE_LAST_DISPBUF 0x1000000
277 #define H265_DEBUG_IGNORE_CONFORMANCE_WINDOW 0x2000000
278 #define H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP 0x4000000
279 #ifdef MULTI_INSTANCE_SUPPORT
280 #define PRINT_FLAG_ERROR 0x0
281 #define IGNORE_PARAM_FROM_CONFIG 0x08000000
282 #define PRINT_FRAMEBASE_DATA 0x10000000
283 #define PRINT_FLAG_VDEC_STATUS 0x20000000
284 #define PRINT_FLAG_VDEC_DETAIL 0x40000000
285 #define PRINT_FLAG_V4L_DETAIL 0x80000000
286 #endif
287
288 #define BUF_POOL_SIZE 32
289 #define MAX_BUF_NUM 24
290 #define MAX_REF_PIC_NUM 24
291 #define MAX_REF_ACTIVE 16
292
293 #ifdef MV_USE_FIXED_BUF
294 #define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1)
295 #define VF_BUFFER_IDX(n) (n)
296 #define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
297 #else
298 #define BMMU_MAX_BUFFERS (BUF_POOL_SIZE + 1 + MAX_REF_PIC_NUM)
299 #define VF_BUFFER_IDX(n) (n)
300 #define BMMU_WORKSPACE_ID (BUF_POOL_SIZE)
301 #define MV_BUFFER_IDX(n) (BUF_POOL_SIZE + 1 + n)
302 #endif
303
304 #define HEVC_MV_INFO 0x310d
305 #define HEVC_QP_INFO 0x3137
306 #define HEVC_SKIP_INFO 0x3136
307
308 const u32 h265_version = 201602101;
309 static u32 debug_mask = 0xffffffff;
310 static u32 log_mask;
311 static u32 debug;
312 static u32 radr;
313 static u32 rval;
314 static u32 dbg_cmd;
315 static u32 dump_nal;
316 static u32 dbg_skip_decode_index;
317 static u32 endian = 0xff0;
318 #ifdef ERROR_HANDLE_DEBUG
319 static u32 dbg_nal_skip_flag;
320 /* bit[0], skip vps; bit[1], skip sps; bit[2], skip pps */
321 static u32 dbg_nal_skip_count;
322 #endif
323 /*for debug*/
324 /*
325 udebug_flag:
326 bit 0, enable ucode print
327 bit 1, enable ucode detail print
328 bit [31:16] not 0, pos to dump lmem
329 bit 2, pop bits to lmem
330 bit [11:8], pre-pop bits for alignment (when bit 2 is 1)
331 */
332 static u32 udebug_flag;
333 /*
334 when udebug_flag[1:0] is not 0
335 udebug_pause_pos not 0,
336 pause position
337 */
338 static u32 udebug_pause_pos;
339 /*
340 when udebug_flag[1:0] is not 0
341 and udebug_pause_pos is not 0,
342 pause only when DEBUG_REG2 is equal to this val
343 */
344 static u32 udebug_pause_val;
345
346 static u32 udebug_pause_decode_idx;
347
348 static u32 decode_pic_begin;
349 static uint slice_parse_begin;
350 static u32 step;
351 static bool is_reset;
352
353 #ifdef CONSTRAIN_MAX_BUF_NUM
354 static u32 run_ready_max_vf_only_num;
355 static u32 run_ready_display_q_num;
356 /*0: not check
357 0xff: work_pic_num
358 */
359 static u32 run_ready_max_buf_num = 0xff;
360 #endif
361
362 static u32 dynamic_buf_num_margin = 7;
363 static u32 buf_alloc_width;
364 static u32 buf_alloc_height;
365
366 static u32 max_buf_num = 16;
367 static u32 buf_alloc_size;
368 /*static u32 re_config_pic_flag;*/
369 /*
370 *bit[0]: 0,
371 *bit[1]: 0, always release cma buffer when stop
372 *bit[1]: 1, never release cma buffer when stop
373 *bit[0]: 1, when stop, release cma buffer if blackout is 1;
374 *do not release cma buffer is blackout is not 1
375 *
376 *bit[2]: 0, when start decoding, check current displayed buffer
377 * (only for buffer decoded by h265) if blackout is 0
378 * 1, do not check current displayed buffer
379 *
380 *bit[3]: 1, if blackout is not 1, do not release current
381 * displayed cma buffer always.
382 */
383 /* set to 1 for fast play;
384 * set to 8 for other case of "keep last frame"
385 */
386 static u32 buffer_mode = 1;
387
388 /* buffer_mode_dbg: debug only*/
389 static u32 buffer_mode_dbg = 0xffff0000;
390 /**/
391 /*
392 *bit[1:0]PB_skip_mode: 0, start decoding at begin;
393 *1, start decoding after first I;
394 *2, only decode and display none error picture;
395 *3, start decoding and display after IDR,etc
396 *bit[31:16] PB_skip_count_after_decoding (decoding but not display),
397 *only for mode 0 and 1.
398 */
399 static u32 nal_skip_policy = 2;
400
401 /*
402 *bit 0, 1: only display I picture;
403 *bit 1, 1: only decode I picture;
404 */
405 static u32 i_only_flag;
406
407 /*
408 bit 0, fast output first I picture
409 */
410 static u32 fast_output_enable = 1;
411
412 static u32 frmbase_cont_bitlevel = 0x60;
413
414 /*
415 use_cma: 1, use both reserver memory and cma for buffers
416 2, only use cma for buffers
417 */
418 static u32 use_cma = 2;
419
420 #define AUX_BUF_ALIGN(adr) ((adr + 0xf) & (~0xf))
421 static u32 prefix_aux_buf_size = (16 * 1024);
422 static u32 suffix_aux_buf_size;
423
424 static u32 max_decoding_time;
425 /*
426 *error handling
427 */
428 /*error_handle_policy:
429 *bit 0: 0, auto skip error_skip_nal_count nals before error recovery;
430 *1, skip error_skip_nal_count nals before error recovery;
431 *bit 1 (valid only when bit0 == 1):
432 *1, wait vps/sps/pps after error recovery;
433 *bit 2 (valid only when bit0 == 0):
434 *0, auto search after error recovery (hevc_recover() called);
435 *1, manual search after error recovery
436 *(change to auto search after get IDR: WRITE_VREG(NAL_SEARCH_CTL, 0x2))
437 *
438 *bit 4: 0, set error_mark after reset/recover
439 * 1, do not set error_mark after reset/recover
440 *bit 5: 0, check total lcu for every picture
441 * 1, do not check total lcu
442 *bit 6: 0, do not check head error
443 * 1, check head error
444 *
445 */
446
447 static u32 error_handle_policy;
448 static u32 error_skip_nal_count = 6;
449 static u32 error_handle_threshold = 30;
450 static u32 error_handle_nal_skip_threshold = 10;
451 static u32 error_handle_system_threshold = 30;
452 static u32 interlace_enable = 1;
453 static u32 fr_hint_status;
454
455 /*
456 *parser_sei_enable:
457 * bit 0, sei;
458 * bit 1, sei_suffix (fill aux buf)
459 * bit 2, fill sei to aux buf (when bit 0 is 1)
460 * bit 8, debug flag
461 */
462 static u32 parser_sei_enable;
463 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
464 static u32 parser_dolby_vision_enable = 1;
465 static u32 dolby_meta_with_el;
466 static u32 dolby_el_flush_th = 2;
467 #endif
468 /* this is only for h265 mmu enable */
469
470 static u32 mmu_enable = 1;
471 static u32 mmu_enable_force;
472 static u32 work_buf_size;
473 static unsigned int force_disp_pic_index;
474 static unsigned int disp_vframe_valve_level;
475 static int pre_decode_buf_level = 0x1000;
476 static unsigned int pic_list_debug;
477 #ifdef HEVC_8K_LFTOFFSET_FIX
478 /* performance_profile: bit 0, multi slice in ucode
479 */
480 static unsigned int performance_profile = 1;
481 #endif
482 #ifdef MULTI_INSTANCE_SUPPORT
483 static unsigned int max_decode_instance_num
484 = MAX_DECODE_INSTANCE_NUM;
485 static unsigned int decode_frame_count[MAX_DECODE_INSTANCE_NUM];
486 static unsigned int display_frame_count[MAX_DECODE_INSTANCE_NUM];
487 static unsigned int max_process_time[MAX_DECODE_INSTANCE_NUM];
488 static unsigned int max_get_frame_interval[MAX_DECODE_INSTANCE_NUM];
489 static unsigned int run_count[MAX_DECODE_INSTANCE_NUM];
490 static unsigned int input_empty[MAX_DECODE_INSTANCE_NUM];
491 static unsigned int not_run_ready[MAX_DECODE_INSTANCE_NUM];
492 static unsigned int ref_frame_mark_flag[MAX_DECODE_INSTANCE_NUM] =
493 {1, 1, 1, 1, 1, 1, 1, 1, 1};
494
495 #ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
496 static unsigned char get_idx(struct hevc_state_s *hevc);
497 #endif
498
499 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
500 static u32 dv_toggle_prov_name;
501
502 static u32 dv_debug;
503
504 static u32 force_bypass_dvenl;
505 #endif
506 #endif
507
508
509 #ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
510 #define get_dbg_flag(hevc) ((debug_mask & (1 << hevc->index)) ? debug : 0)
511 #define get_dbg_flag2(hevc) ((debug_mask & (1 << get_idx(hevc))) ? debug : 0)
512 #define is_log_enable(hevc) ((log_mask & (1 << hevc->index)) ? 1 : 0)
513 #else
514 #define get_dbg_flag(hevc) debug
515 #define get_dbg_flag2(hevc) debug
516 #define is_log_enable(hevc) (log_mask ? 1 : 0)
517 #define get_valid_double_write_mode(hevc) double_write_mode
518 #define get_buf_alloc_width(hevc) buf_alloc_width
519 #define get_buf_alloc_height(hevc) buf_alloc_height
520 #define get_dynamic_buf_num_margin(hevc) dynamic_buf_num_margin
521 #endif
522 #define get_buffer_mode(hevc) buffer_mode
523
524
525 DEFINE_SPINLOCK(lock);
526 struct task_struct *h265_task = NULL;
527 #undef DEBUG_REG
528 #ifdef DEBUG_REG
WRITE_VREG_DBG(unsigned adr,unsigned val)529 void WRITE_VREG_DBG(unsigned adr, unsigned val)
530 {
531 if (debug & H265_DEBUG_REG)
532 pr_info("%s(%x, %x)\n", __func__, adr, val);
533 WRITE_VREG(adr, val);
534 }
535
536 #undef WRITE_VREG
537 #define WRITE_VREG WRITE_VREG_DBG
538 #endif
539 extern u32 trickmode_i;
540
541 static DEFINE_MUTEX(vh265_mutex);
542
543 static DEFINE_MUTEX(vh265_log_mutex);
544
545 //static struct vdec_info *gvs;
546
547 static u32 without_display_mode;
548
549 /**************************************************
550 *
551 *h265 buffer management include
552 *
553 ***************************************************
554 */
555 enum NalUnitType {
556 NAL_UNIT_CODED_SLICE_TRAIL_N = 0, /* 0 */
557 NAL_UNIT_CODED_SLICE_TRAIL_R, /* 1 */
558
559 NAL_UNIT_CODED_SLICE_TSA_N, /* 2 */
560 /* Current name in the spec: TSA_R */
561 NAL_UNIT_CODED_SLICE_TLA, /* 3 */
562
563 NAL_UNIT_CODED_SLICE_STSA_N, /* 4 */
564 NAL_UNIT_CODED_SLICE_STSA_R, /* 5 */
565
566 NAL_UNIT_CODED_SLICE_RADL_N, /* 6 */
567 /* Current name in the spec: RADL_R */
568 NAL_UNIT_CODED_SLICE_DLP, /* 7 */
569
570 NAL_UNIT_CODED_SLICE_RASL_N, /* 8 */
571 /* Current name in the spec: RASL_R */
572 NAL_UNIT_CODED_SLICE_TFD, /* 9 */
573
574 NAL_UNIT_RESERVED_10,
575 NAL_UNIT_RESERVED_11,
576 NAL_UNIT_RESERVED_12,
577 NAL_UNIT_RESERVED_13,
578 NAL_UNIT_RESERVED_14,
579 NAL_UNIT_RESERVED_15,
580
581 /* Current name in the spec: BLA_W_LP */
582 NAL_UNIT_CODED_SLICE_BLA, /* 16 */
583 /* Current name in the spec: BLA_W_DLP */
584 NAL_UNIT_CODED_SLICE_BLANT, /* 17 */
585 NAL_UNIT_CODED_SLICE_BLA_N_LP, /* 18 */
586 /* Current name in the spec: IDR_W_DLP */
587 NAL_UNIT_CODED_SLICE_IDR, /* 19 */
588 NAL_UNIT_CODED_SLICE_IDR_N_LP, /* 20 */
589 NAL_UNIT_CODED_SLICE_CRA, /* 21 */
590 NAL_UNIT_RESERVED_22,
591 NAL_UNIT_RESERVED_23,
592
593 NAL_UNIT_RESERVED_24,
594 NAL_UNIT_RESERVED_25,
595 NAL_UNIT_RESERVED_26,
596 NAL_UNIT_RESERVED_27,
597 NAL_UNIT_RESERVED_28,
598 NAL_UNIT_RESERVED_29,
599 NAL_UNIT_RESERVED_30,
600 NAL_UNIT_RESERVED_31,
601
602 NAL_UNIT_VPS, /* 32 */
603 NAL_UNIT_SPS, /* 33 */
604 NAL_UNIT_PPS, /* 34 */
605 NAL_UNIT_ACCESS_UNIT_DELIMITER, /* 35 */
606 NAL_UNIT_EOS, /* 36 */
607 NAL_UNIT_EOB, /* 37 */
608 NAL_UNIT_FILLER_DATA, /* 38 */
609 NAL_UNIT_SEI, /* 39 Prefix SEI */
610 NAL_UNIT_SEI_SUFFIX, /* 40 Suffix SEI */
611 NAL_UNIT_RESERVED_41,
612 NAL_UNIT_RESERVED_42,
613 NAL_UNIT_RESERVED_43,
614 NAL_UNIT_RESERVED_44,
615 NAL_UNIT_RESERVED_45,
616 NAL_UNIT_RESERVED_46,
617 NAL_UNIT_RESERVED_47,
618 NAL_UNIT_UNSPECIFIED_48,
619 NAL_UNIT_UNSPECIFIED_49,
620 NAL_UNIT_UNSPECIFIED_50,
621 NAL_UNIT_UNSPECIFIED_51,
622 NAL_UNIT_UNSPECIFIED_52,
623 NAL_UNIT_UNSPECIFIED_53,
624 NAL_UNIT_UNSPECIFIED_54,
625 NAL_UNIT_UNSPECIFIED_55,
626 NAL_UNIT_UNSPECIFIED_56,
627 NAL_UNIT_UNSPECIFIED_57,
628 NAL_UNIT_UNSPECIFIED_58,
629 NAL_UNIT_UNSPECIFIED_59,
630 NAL_UNIT_UNSPECIFIED_60,
631 NAL_UNIT_UNSPECIFIED_61,
632 NAL_UNIT_UNSPECIFIED_62,
633 NAL_UNIT_UNSPECIFIED_63,
634 NAL_UNIT_INVALID,
635 };
636
637 /* --------------------------------------------------- */
638 /* Amrisc Software Interrupt */
639 /* --------------------------------------------------- */
640 #define AMRISC_STREAM_EMPTY_REQ 0x01
641 #define AMRISC_PARSER_REQ 0x02
642 #define AMRISC_MAIN_REQ 0x04
643
644 /* --------------------------------------------------- */
645 /* HEVC_DEC_STATUS define */
646 /* --------------------------------------------------- */
647 #define HEVC_DEC_IDLE 0x0
648 #define HEVC_NAL_UNIT_VPS 0x1
649 #define HEVC_NAL_UNIT_SPS 0x2
650 #define HEVC_NAL_UNIT_PPS 0x3
651 #define HEVC_NAL_UNIT_CODED_SLICE_SEGMENT 0x4
652 #define HEVC_CODED_SLICE_SEGMENT_DAT 0x5
653 #define HEVC_SLICE_DECODING 0x6
654 #define HEVC_NAL_UNIT_SEI 0x7
655 #define HEVC_SLICE_SEGMENT_DONE 0x8
656 #define HEVC_NAL_SEARCH_DONE 0x9
657 #define HEVC_DECPIC_DATA_DONE 0xa
658 #define HEVC_DECPIC_DATA_ERROR 0xb
659 #define HEVC_SEI_DAT 0xc
660 #define HEVC_SEI_DAT_DONE 0xd
661 #define HEVC_NAL_DECODE_DONE 0xe
662 #define HEVC_OVER_DECODE 0xf
663
664 #define HEVC_DATA_REQUEST 0x12
665
666 #define HEVC_DECODE_BUFEMPTY 0x20
667 #define HEVC_DECODE_TIMEOUT 0x21
668 #define HEVC_SEARCH_BUFEMPTY 0x22
669 #define HEVC_DECODE_OVER_SIZE 0x23
670 #define HEVC_DECODE_BUFEMPTY2 0x24
671 #define HEVC_FIND_NEXT_PIC_NAL 0x50
672 #define HEVC_FIND_NEXT_DVEL_NAL 0x51
673
674 #define HEVC_DUMP_LMEM 0x30
675
676 #define HEVC_4k2k_60HZ_NOT_SUPPORT 0x80
677 #define HEVC_DISCARD_NAL 0xf0
678 #define HEVC_ACTION_DEC_CONT 0xfd
679 #define HEVC_ACTION_ERROR 0xfe
680 #define HEVC_ACTION_DONE 0xff
681
682 /* --------------------------------------------------- */
683 /* Include "parser_cmd.h" */
684 /* --------------------------------------------------- */
685 #define PARSER_CMD_SKIP_CFG_0 0x0000090b
686
687 #define PARSER_CMD_SKIP_CFG_1 0x1b14140f
688
689 #define PARSER_CMD_SKIP_CFG_2 0x001b1910
690
691 #define PARSER_CMD_NUMBER 37
692
693 /**************************************************
694 *
695 *h265 buffer management
696 *
697 ***************************************************
698 */
699 /* #define BUFFER_MGR_ONLY */
700 /* #define CONFIG_HEVC_CLK_FORCED_ON */
701 /* #define ENABLE_SWAP_TEST */
702 #define MCRCC_ENABLE
703 #define INVALID_POC 0x80000000
704
705 #define HEVC_DEC_STATUS_REG HEVC_ASSIST_SCRATCH_0
706 #define HEVC_RPM_BUFFER HEVC_ASSIST_SCRATCH_1
707 #define HEVC_SHORT_TERM_RPS HEVC_ASSIST_SCRATCH_2
708 #define HEVC_VPS_BUFFER HEVC_ASSIST_SCRATCH_3
709 #define HEVC_SPS_BUFFER HEVC_ASSIST_SCRATCH_4
710 #define HEVC_PPS_BUFFER HEVC_ASSIST_SCRATCH_5
711 #define HEVC_SAO_UP HEVC_ASSIST_SCRATCH_6
712 #define HEVC_STREAM_SWAP_BUFFER HEVC_ASSIST_SCRATCH_7
713 #define HEVC_STREAM_SWAP_BUFFER2 HEVC_ASSIST_SCRATCH_8
714 #define HEVC_sao_mem_unit HEVC_ASSIST_SCRATCH_9
715 #define HEVC_SAO_ABV HEVC_ASSIST_SCRATCH_A
716 #define HEVC_sao_vb_size HEVC_ASSIST_SCRATCH_B
717 #define HEVC_SAO_VB HEVC_ASSIST_SCRATCH_C
718 #define HEVC_SCALELUT HEVC_ASSIST_SCRATCH_D
719 #define HEVC_WAIT_FLAG HEVC_ASSIST_SCRATCH_E
720 #define RPM_CMD_REG HEVC_ASSIST_SCRATCH_F
721 #define LMEM_DUMP_ADR HEVC_ASSIST_SCRATCH_F
722 #ifdef ENABLE_SWAP_TEST
723 #define HEVC_STREAM_SWAP_TEST HEVC_ASSIST_SCRATCH_L
724 #endif
725
726 /*#define HEVC_DECODE_PIC_BEGIN_REG HEVC_ASSIST_SCRATCH_M*/
727 /*#define HEVC_DECODE_PIC_NUM_REG HEVC_ASSIST_SCRATCH_N*/
728 #define HEVC_DECODE_SIZE HEVC_ASSIST_SCRATCH_N
729 /*do not define ENABLE_SWAP_TEST*/
730 #define HEVC_AUX_ADR HEVC_ASSIST_SCRATCH_L
731 #define HEVC_AUX_DATA_SIZE HEVC_ASSIST_SCRATCH_M
732
733 #define DEBUG_REG1 HEVC_ASSIST_SCRATCH_G
734 #define DEBUG_REG2 HEVC_ASSIST_SCRATCH_H
735 /*
736 *ucode parser/search control
737 *bit 0: 0, header auto parse; 1, header manual parse
738 *bit 1: 0, auto skip for noneseamless stream; 1, no skip
739 *bit [3:2]: valid when bit1==0;
740 *0, auto skip nal before first vps/sps/pps/idr;
741 *1, auto skip nal before first vps/sps/pps
742 *2, auto skip nal before first vps/sps/pps,
743 * and not decode until the first I slice (with slice address of 0)
744 *
745 *3, auto skip before first I slice (nal_type >=16 && nal_type<=21)
746 *bit [15:4] nal skip count (valid when bit0 == 1 (manual mode) )
747 *bit [16]: for NAL_UNIT_EOS when bit0 is 0:
748 * 0, send SEARCH_DONE to arm ; 1, do not send SEARCH_DONE to arm
749 *bit [17]: for NAL_SEI when bit0 is 0:
750 * 0, do not parse/fetch SEI in ucode;
751 * 1, parse/fetch SEI in ucode
752 *bit [18]: for NAL_SEI_SUFFIX when bit0 is 0:
753 * 0, do not fetch NAL_SEI_SUFFIX to aux buf;
754 * 1, fetch NAL_SEL_SUFFIX data to aux buf
755 *bit [19]:
756 * 0, parse NAL_SEI in ucode
757 * 1, fetch NAL_SEI to aux buf
758 *bit [20]: for DOLBY_VISION_META
759 * 0, do not fetch DOLBY_VISION_META to aux buf
760 * 1, fetch DOLBY_VISION_META to aux buf
761 */
762 #define NAL_SEARCH_CTL HEVC_ASSIST_SCRATCH_I
763 /*read only*/
764 #define CUR_NAL_UNIT_TYPE HEVC_ASSIST_SCRATCH_J
765 /*
766 [15 : 8] rps_set_id
767 [7 : 0] start_decoding_flag
768 */
769 #define HEVC_DECODE_INFO HEVC_ASSIST_SCRATCH_1
770 /*set before start decoder*/
771 #define HEVC_DECODE_MODE HEVC_ASSIST_SCRATCH_J
772 #define HEVC_DECODE_MODE2 HEVC_ASSIST_SCRATCH_H
773 #define DECODE_STOP_POS HEVC_ASSIST_SCRATCH_K
774
775 #define DECODE_MODE_SINGLE 0x0
776 #define DECODE_MODE_MULTI_FRAMEBASE 0x1
777 #define DECODE_MODE_MULTI_STREAMBASE 0x2
778 #define DECODE_MODE_MULTI_DVBAL 0x3
779 #define DECODE_MODE_MULTI_DVENL 0x4
780
781 #define MAX_INT 0x7FFFFFFF
782
783 #define RPM_BEGIN 0x100
784 #define modification_list_cur 0x148
785 #define RPM_END 0x180
786
787 #define RPS_USED_BIT 14
788 /* MISC_FLAG0 */
789 #define PCM_LOOP_FILTER_DISABLED_FLAG_BIT 0
790 #define PCM_ENABLE_FLAG_BIT 1
791 #define LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT 2
792 #define PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 3
793 #define DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT 4
794 #define PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 5
795 #define DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT 6
796 #define SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT 7
797 #define SLICE_SAO_LUMA_FLAG_BIT 8
798 #define SLICE_SAO_CHROMA_FLAG_BIT 9
799 #define SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT 10
800
801 union param_u {
802 struct {
803 unsigned short data[RPM_END - RPM_BEGIN];
804 } l;
805 struct {
806 /* from ucode lmem, do not change this struct */
807 unsigned short CUR_RPS[0x10];
808 unsigned short num_ref_idx_l0_active;
809 unsigned short num_ref_idx_l1_active;
810 unsigned short slice_type;
811 unsigned short slice_temporal_mvp_enable_flag;
812 unsigned short dependent_slice_segment_flag;
813 unsigned short slice_segment_address;
814 unsigned short num_title_rows_minus1;
815 unsigned short pic_width_in_luma_samples;
816 unsigned short pic_height_in_luma_samples;
817 unsigned short log2_min_coding_block_size_minus3;
818 unsigned short log2_diff_max_min_coding_block_size;
819 unsigned short log2_max_pic_order_cnt_lsb_minus4;
820 unsigned short POClsb;
821 unsigned short collocated_from_l0_flag;
822 unsigned short collocated_ref_idx;
823 unsigned short log2_parallel_merge_level;
824 unsigned short five_minus_max_num_merge_cand;
825 unsigned short sps_num_reorder_pics_0;
826 unsigned short modification_flag;
827 unsigned short tiles_enabled_flag;
828 unsigned short num_tile_columns_minus1;
829 unsigned short num_tile_rows_minus1;
830 unsigned short tile_width[12];
831 unsigned short tile_height[8];
832 unsigned short misc_flag0;
833 unsigned short pps_beta_offset_div2;
834 unsigned short pps_tc_offset_div2;
835 unsigned short slice_beta_offset_div2;
836 unsigned short slice_tc_offset_div2;
837 unsigned short pps_cb_qp_offset;
838 unsigned short pps_cr_qp_offset;
839 unsigned short first_slice_segment_in_pic_flag;
840 unsigned short m_temporalId;
841 unsigned short m_nalUnitType;
842
843 unsigned short vui_num_units_in_tick_hi;
844 unsigned short vui_num_units_in_tick_lo;
845 unsigned short vui_time_scale_hi;
846 unsigned short vui_time_scale_lo;
847 unsigned short bit_depth;
848 unsigned short profile_etc;
849 unsigned short sei_frame_field_info;
850 unsigned short video_signal_type;
851 unsigned short modification_list[0x20];
852 unsigned short conformance_window_flag;
853 unsigned short conf_win_left_offset;
854 unsigned short conf_win_right_offset;
855 unsigned short conf_win_top_offset;
856 unsigned short conf_win_bottom_offset;
857 unsigned short chroma_format_idc;
858 unsigned short color_description;
859 unsigned short aspect_ratio_idc;
860 unsigned short sar_width;
861 unsigned short sar_height;
862 unsigned short sps_max_dec_pic_buffering_minus1_0;
863 } p;
864 };
865
866 #define RPM_BUF_SIZE (0x80*2)
867 /* non mmu mode lmem size : 0x400, mmu mode : 0x500*/
868 #define LMEM_BUF_SIZE (0x500 * 2)
869
870 struct buff_s {
871 u32 buf_start;
872 u32 buf_size;
873 u32 buf_end;
874 };
875
876 struct BuffInfo_s {
877 u32 max_width;
878 u32 max_height;
879 unsigned int start_adr;
880 unsigned int end_adr;
881 struct buff_s ipp;
882 struct buff_s sao_abv;
883 struct buff_s sao_vb;
884 struct buff_s short_term_rps;
885 struct buff_s vps;
886 struct buff_s sps;
887 struct buff_s pps;
888 struct buff_s sao_up;
889 struct buff_s swap_buf;
890 struct buff_s swap_buf2;
891 struct buff_s scalelut;
892 struct buff_s dblk_para;
893 struct buff_s dblk_data;
894 struct buff_s dblk_data2;
895 struct buff_s mmu_vbh;
896 struct buff_s cm_header;
897 struct buff_s mpred_above;
898 #ifdef MV_USE_FIXED_BUF
899 struct buff_s mpred_mv;
900 #endif
901 struct buff_s rpm;
902 struct buff_s lmem;
903 };
904 #define WORK_BUF_SPEC_NUM 3
905 static struct BuffInfo_s amvh265_workbuff_spec[WORK_BUF_SPEC_NUM] = {
906 {
907 /* 8M bytes */
908 .max_width = 1920,
909 .max_height = 1088,
910 .ipp = {
911 /* IPP work space calculation :
912 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
913 */
914 .buf_size = 0x4000,
915 },
916 .sao_abv = {
917 .buf_size = 0x30000,
918 },
919 .sao_vb = {
920 .buf_size = 0x30000,
921 },
922 .short_term_rps = {
923 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
924 * total 64x16x2 = 2048 bytes (0x800)
925 */
926 .buf_size = 0x800,
927 },
928 .vps = {
929 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
930 * total 0x0800 bytes
931 */
932 .buf_size = 0x800,
933 },
934 .sps = {
935 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
936 * total 0x0800 bytes
937 */
938 .buf_size = 0x800,
939 },
940 .pps = {
941 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
942 * total 0x2000 bytes
943 */
944 .buf_size = 0x2000,
945 },
946 .sao_up = {
947 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
948 * each has 16 bytes total 0x2800 bytes
949 */
950 .buf_size = 0x2800,
951 },
952 .swap_buf = {
953 /* 256cyclex64bit = 2K bytes 0x800
954 * (only 144 cycles valid)
955 */
956 .buf_size = 0x800,
957 },
958 .swap_buf2 = {
959 .buf_size = 0x800,
960 },
961 .scalelut = {
962 /* support up to 32 SCALELUT 1024x32 =
963 * 32Kbytes (0x8000)
964 */
965 .buf_size = 0x8000,
966 },
967 .dblk_para = {
968 #ifdef SUPPORT_10BIT
969 .buf_size = 0x40000,
970 #else
971 /* DBLK -> Max 256(4096/16) LCU, each para
972 *512bytes(total:0x20000), data 1024bytes(total:0x40000)
973 */
974 .buf_size = 0x20000,
975 #endif
976 },
977 .dblk_data = {
978 .buf_size = 0x40000,
979 },
980 .dblk_data2 = {
981 .buf_size = 0x40000,
982 }, /*dblk data for adapter*/
983 .mmu_vbh = {
984 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
985 },
986 #if 0
987 .cm_header = {/* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
988 .buf_size = MMU_COMPRESS_HEADER_SIZE *
989 (MAX_REF_PIC_NUM + 1),
990 },
991 #endif
992 .mpred_above = {
993 .buf_size = 0x8000,
994 },
995 #ifdef MV_USE_FIXED_BUF
996 .mpred_mv = {/* 1080p, 0x40000 per buffer */
997 .buf_size = 0x40000 * MAX_REF_PIC_NUM,
998 },
999 #endif
1000 .rpm = {
1001 .buf_size = RPM_BUF_SIZE,
1002 },
1003 .lmem = {
1004 .buf_size = 0x500 * 2,
1005 }
1006 },
1007 {
1008 .max_width = 4096,
1009 .max_height = 2048,
1010 .ipp = {
1011 /* IPP work space calculation :
1012 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1013 */
1014 .buf_size = 0x4000,
1015 },
1016 .sao_abv = {
1017 .buf_size = 0x30000,
1018 },
1019 .sao_vb = {
1020 .buf_size = 0x30000,
1021 },
1022 .short_term_rps = {
1023 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
1024 * total 64x16x2 = 2048 bytes (0x800)
1025 */
1026 .buf_size = 0x800,
1027 },
1028 .vps = {
1029 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
1030 * total 0x0800 bytes
1031 */
1032 .buf_size = 0x800,
1033 },
1034 .sps = {
1035 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
1036 * total 0x0800 bytes
1037 */
1038 .buf_size = 0x800,
1039 },
1040 .pps = {
1041 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
1042 * total 0x2000 bytes
1043 */
1044 .buf_size = 0x2000,
1045 },
1046 .sao_up = {
1047 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
1048 * each has 16 bytes total 0x2800 bytes
1049 */
1050 .buf_size = 0x2800,
1051 },
1052 .swap_buf = {
1053 /* 256cyclex64bit = 2K bytes 0x800
1054 * (only 144 cycles valid)
1055 */
1056 .buf_size = 0x800,
1057 },
1058 .swap_buf2 = {
1059 .buf_size = 0x800,
1060 },
1061 .scalelut = {
1062 /* support up to 32 SCALELUT 1024x32 = 32Kbytes
1063 * (0x8000)
1064 */
1065 .buf_size = 0x8000,
1066 },
1067 .dblk_para = {
1068 /* DBLK -> Max 256(4096/16) LCU, each para
1069 * 512bytes(total:0x20000),
1070 * data 1024bytes(total:0x40000)
1071 */
1072 .buf_size = 0x20000,
1073 },
1074 .dblk_data = {
1075 .buf_size = 0x80000,
1076 },
1077 .dblk_data2 = {
1078 .buf_size = 0x80000,
1079 }, /*dblk data for adapter*/
1080 .mmu_vbh = {
1081 .buf_size = 0x5000, /*2*16*2304/4, 4K*/
1082 },
1083 #if 0
1084 .cm_header = {/*0x44000 = ((1088*2*1024*4)/32/4)*(32/8)*/
1085 .buf_size = MMU_COMPRESS_HEADER_SIZE *
1086 (MAX_REF_PIC_NUM + 1),
1087 },
1088 #endif
1089 .mpred_above = {
1090 .buf_size = 0x8000,
1091 },
1092 #ifdef MV_USE_FIXED_BUF
1093 .mpred_mv = {
1094 /* .buf_size = 0x100000*16,
1095 //4k2k , 0x100000 per buffer */
1096 /* 4096x2304 , 0x120000 per buffer */
1097 .buf_size = MPRED_4K_MV_BUF_SIZE * MAX_REF_PIC_NUM,
1098 },
1099 #endif
1100 .rpm = {
1101 .buf_size = RPM_BUF_SIZE,
1102 },
1103 .lmem = {
1104 .buf_size = 0x500 * 2,
1105 }
1106 },
1107
1108 {
1109 .max_width = 4096*2,
1110 .max_height = 2048*2,
1111 .ipp = {
1112 // IPP work space calculation : 4096 * (Y+CbCr+Flags) = 12k, round to 16k
1113 .buf_size = 0x4000*2,
1114 },
1115 .sao_abv = {
1116 .buf_size = 0x30000*2,
1117 },
1118 .sao_vb = {
1119 .buf_size = 0x30000*2,
1120 },
1121 .short_term_rps = {
1122 // SHORT_TERM_RPS - Max 64 set, 16 entry every set, total 64x16x2 = 2048 bytes (0x800)
1123 .buf_size = 0x800,
1124 },
1125 .vps = {
1126 // VPS STORE AREA - Max 16 VPS, each has 0x80 bytes, total 0x0800 bytes
1127 .buf_size = 0x800,
1128 },
1129 .sps = {
1130 // SPS STORE AREA - Max 16 SPS, each has 0x80 bytes, total 0x0800 bytes
1131 .buf_size = 0x800,
1132 },
1133 .pps = {
1134 // PPS STORE AREA - Max 64 PPS, each has 0x80 bytes, total 0x2000 bytes
1135 .buf_size = 0x2000,
1136 },
1137 .sao_up = {
1138 // SAO UP STORE AREA - Max 640(10240/16) LCU, each has 16 bytes total 0x2800 bytes
1139 .buf_size = 0x2800*2,
1140 },
1141 .swap_buf = {
1142 // 256cyclex64bit = 2K bytes 0x800 (only 144 cycles valid)
1143 .buf_size = 0x800,
1144 },
1145 .swap_buf2 = {
1146 .buf_size = 0x800,
1147 },
1148 .scalelut = {
1149 // support up to 32 SCALELUT 1024x32 = 32Kbytes (0x8000)
1150 .buf_size = 0x8000*2,
1151 },
1152 .dblk_para = {.buf_size = 0x40000*2, }, // dblk parameter
1153 .dblk_data = {.buf_size = 0x80000*2, }, // dblk data for left/top
1154 .dblk_data2 = {.buf_size = 0x80000*2, }, // dblk data for adapter
1155 .mmu_vbh = {
1156 .buf_size = 0x5000*2, //2*16*2304/4, 4K
1157 },
1158 #if 0
1159 .cm_header = {
1160 .buf_size = MMU_COMPRESS_8K_HEADER_SIZE *
1161 MAX_REF_PIC_NUM, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)
1162 },
1163 #endif
1164 .mpred_above = {
1165 .buf_size = 0x8000*2,
1166 },
1167 #ifdef MV_USE_FIXED_BUF
1168 .mpred_mv = {
1169 .buf_size = MPRED_8K_MV_BUF_SIZE * MAX_REF_PIC_NUM, //4k2k , 0x120000 per buffer
1170 },
1171 #endif
1172 .rpm = {
1173 .buf_size = RPM_BUF_SIZE,
1174 },
1175 .lmem = {
1176 .buf_size = 0x500 * 2,
1177 },
1178 }
1179 };
1180
init_buff_spec(struct hevc_state_s * hevc,struct BuffInfo_s * buf_spec)1181 static void init_buff_spec(struct hevc_state_s *hevc,
1182 struct BuffInfo_s *buf_spec)
1183 {
1184 buf_spec->ipp.buf_start = buf_spec->start_adr;
1185 buf_spec->sao_abv.buf_start =
1186 buf_spec->ipp.buf_start + buf_spec->ipp.buf_size;
1187
1188 buf_spec->sao_vb.buf_start =
1189 buf_spec->sao_abv.buf_start + buf_spec->sao_abv.buf_size;
1190 buf_spec->short_term_rps.buf_start =
1191 buf_spec->sao_vb.buf_start + buf_spec->sao_vb.buf_size;
1192 buf_spec->vps.buf_start =
1193 buf_spec->short_term_rps.buf_start +
1194 buf_spec->short_term_rps.buf_size;
1195 buf_spec->sps.buf_start =
1196 buf_spec->vps.buf_start + buf_spec->vps.buf_size;
1197 buf_spec->pps.buf_start =
1198 buf_spec->sps.buf_start + buf_spec->sps.buf_size;
1199 buf_spec->sao_up.buf_start =
1200 buf_spec->pps.buf_start + buf_spec->pps.buf_size;
1201 buf_spec->swap_buf.buf_start =
1202 buf_spec->sao_up.buf_start + buf_spec->sao_up.buf_size;
1203 buf_spec->swap_buf2.buf_start =
1204 buf_spec->swap_buf.buf_start + buf_spec->swap_buf.buf_size;
1205 buf_spec->scalelut.buf_start =
1206 buf_spec->swap_buf2.buf_start + buf_spec->swap_buf2.buf_size;
1207 buf_spec->dblk_para.buf_start =
1208 buf_spec->scalelut.buf_start + buf_spec->scalelut.buf_size;
1209 buf_spec->dblk_data.buf_start =
1210 buf_spec->dblk_para.buf_start + buf_spec->dblk_para.buf_size;
1211 buf_spec->dblk_data2.buf_start =
1212 buf_spec->dblk_data.buf_start + buf_spec->dblk_data.buf_size;
1213 buf_spec->mmu_vbh.buf_start =
1214 buf_spec->dblk_data2.buf_start + buf_spec->dblk_data2.buf_size;
1215 buf_spec->mpred_above.buf_start =
1216 buf_spec->mmu_vbh.buf_start + buf_spec->mmu_vbh.buf_size;
1217 #ifdef MV_USE_FIXED_BUF
1218 buf_spec->mpred_mv.buf_start =
1219 buf_spec->mpred_above.buf_start +
1220 buf_spec->mpred_above.buf_size;
1221
1222 buf_spec->rpm.buf_start =
1223 buf_spec->mpred_mv.buf_start +
1224 buf_spec->mpred_mv.buf_size;
1225 #else
1226 buf_spec->rpm.buf_start =
1227 buf_spec->mpred_above.buf_start +
1228 buf_spec->mpred_above.buf_size;
1229 #endif
1230 buf_spec->lmem.buf_start =
1231 buf_spec->rpm.buf_start +
1232 buf_spec->rpm.buf_size;
1233 buf_spec->end_adr =
1234 buf_spec->lmem.buf_start +
1235 buf_spec->lmem.buf_size;
1236
1237 if (hevc && get_dbg_flag2(hevc)) {
1238 hevc_print(hevc, 0,
1239 "%s workspace (%x %x) size = %x\n", __func__,
1240 buf_spec->start_adr, buf_spec->end_adr,
1241 buf_spec->end_adr - buf_spec->start_adr);
1242
1243 hevc_print(hevc, 0,
1244 "ipp.buf_start :%x\n",
1245 buf_spec->ipp.buf_start);
1246 hevc_print(hevc, 0,
1247 "sao_abv.buf_start :%x\n",
1248 buf_spec->sao_abv.buf_start);
1249 hevc_print(hevc, 0,
1250 "sao_vb.buf_start :%x\n",
1251 buf_spec->sao_vb.buf_start);
1252 hevc_print(hevc, 0,
1253 "short_term_rps.buf_start :%x\n",
1254 buf_spec->short_term_rps.buf_start);
1255 hevc_print(hevc, 0,
1256 "vps.buf_start :%x\n",
1257 buf_spec->vps.buf_start);
1258 hevc_print(hevc, 0,
1259 "sps.buf_start :%x\n",
1260 buf_spec->sps.buf_start);
1261 hevc_print(hevc, 0,
1262 "pps.buf_start :%x\n",
1263 buf_spec->pps.buf_start);
1264 hevc_print(hevc, 0,
1265 "sao_up.buf_start :%x\n",
1266 buf_spec->sao_up.buf_start);
1267 hevc_print(hevc, 0,
1268 "swap_buf.buf_start :%x\n",
1269 buf_spec->swap_buf.buf_start);
1270 hevc_print(hevc, 0,
1271 "swap_buf2.buf_start :%x\n",
1272 buf_spec->swap_buf2.buf_start);
1273 hevc_print(hevc, 0,
1274 "scalelut.buf_start :%x\n",
1275 buf_spec->scalelut.buf_start);
1276 hevc_print(hevc, 0,
1277 "dblk_para.buf_start :%x\n",
1278 buf_spec->dblk_para.buf_start);
1279 hevc_print(hevc, 0,
1280 "dblk_data.buf_start :%x\n",
1281 buf_spec->dblk_data.buf_start);
1282 hevc_print(hevc, 0,
1283 "dblk_data2.buf_start :%x\n",
1284 buf_spec->dblk_data2.buf_start);
1285 hevc_print(hevc, 0,
1286 "mpred_above.buf_start :%x\n",
1287 buf_spec->mpred_above.buf_start);
1288 #ifdef MV_USE_FIXED_BUF
1289 hevc_print(hevc, 0,
1290 "mpred_mv.buf_start :%x\n",
1291 buf_spec->mpred_mv.buf_start);
1292 #endif
1293 if ((get_dbg_flag2(hevc)
1294 &
1295 H265_DEBUG_SEND_PARAM_WITH_REG)
1296 == 0) {
1297 hevc_print(hevc, 0,
1298 "rpm.buf_start :%x\n",
1299 buf_spec->rpm.buf_start);
1300 }
1301 }
1302
1303 }
1304
1305 enum SliceType {
1306 B_SLICE,
1307 P_SLICE,
1308 I_SLICE
1309 };
1310
1311 /*USE_BUF_BLOCK*/
1312 struct BUF_s {
1313 ulong start_adr;
1314 u32 size;
1315 u32 luma_size;
1316 ulong header_addr;
1317 u32 header_size;
1318 int used_flag;
1319 ulong v4l_ref_buf_addr;
1320 ulong chroma_addr;
1321 u32 chroma_size;
1322 } /*BUF_t */;
1323
1324 /* level 6, 6.1 maximum slice number is 800; other is 200 */
1325 #define MAX_SLICE_NUM 800
1326 struct PIC_s {
1327 int index;
1328 int scatter_alloc;
1329 int BUF_index;
1330 int mv_buf_index;
1331 int POC;
1332 int decode_idx;
1333 int slice_type;
1334 int RefNum_L0;
1335 int RefNum_L1;
1336 int num_reorder_pic;
1337 int stream_offset;
1338 unsigned char referenced;
1339 unsigned char output_mark;
1340 unsigned char recon_mark;
1341 unsigned char output_ready;
1342 unsigned char error_mark;
1343 //dis_mark = 0:discard mark,dis_mark = 1:no discard mark
1344 unsigned char dis_mark;
1345 /**/ int slice_idx;
1346 int m_aiRefPOCList0[MAX_SLICE_NUM][16];
1347 int m_aiRefPOCList1[MAX_SLICE_NUM][16];
1348 /*buffer */
1349 unsigned int header_adr;
1350 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1351 unsigned char dv_enhance_exist;
1352 #endif
1353 char *aux_data_buf;
1354 int aux_data_size;
1355 unsigned long cma_alloc_addr;
1356 struct page *alloc_pages;
1357 unsigned int mpred_mv_wr_start_addr;
1358 unsigned int mc_y_adr;
1359 unsigned int mc_u_v_adr;
1360 #ifdef SUPPORT_10BIT
1361 /*unsigned int comp_body_size;*/
1362 unsigned int dw_y_adr;
1363 unsigned int dw_u_v_adr;
1364 #endif
1365 int mc_canvas_y;
1366 int mc_canvas_u_v;
1367 int width;
1368 int height;
1369
1370 int y_canvas_index;
1371 int uv_canvas_index;
1372 #ifdef MULTI_INSTANCE_SUPPORT
1373 struct canvas_config_s canvas_config[2];
1374 #endif
1375 #ifdef SUPPORT_10BIT
1376 int mem_saving_mode;
1377 u32 bit_depth_luma;
1378 u32 bit_depth_chroma;
1379 #endif
1380 #ifdef LOSLESS_COMPRESS_MODE
1381 unsigned int losless_comp_body_size;
1382 #endif
1383 unsigned char pic_struct;
1384 int vf_ref;
1385
1386 u32 pts;
1387 u64 pts64;
1388 u64 timestamp;
1389
1390 u32 aspect_ratio_idc;
1391 u32 sar_width;
1392 u32 sar_height;
1393 u32 double_write_mode;
1394 u32 video_signal_type;
1395 unsigned short conformance_window_flag;
1396 unsigned short conf_win_left_offset;
1397 unsigned short conf_win_right_offset;
1398 unsigned short conf_win_top_offset;
1399 unsigned short conf_win_bottom_offset;
1400 unsigned short chroma_format_idc;
1401
1402 /* picture qos infomation*/
1403 int max_qp;
1404 int avg_qp;
1405 int min_qp;
1406 int max_skip;
1407 int avg_skip;
1408 int min_skip;
1409 int max_mv;
1410 int min_mv;
1411 int avg_mv;
1412
1413 u32 hw_decode_time;
1414 u32 frame_size; // For frame base mode
1415 bool vframe_bound;
1416 bool ip_mode;
1417 u32 stream_frame_size; //for stream base
1418 } /*PIC_t */;
1419
1420 #define MAX_TILE_COL_NUM 10
1421 #define MAX_TILE_ROW_NUM 20
1422 struct tile_s {
1423 int width;
1424 int height;
1425 int start_cu_x;
1426 int start_cu_y;
1427
1428 unsigned int sao_vb_start_addr;
1429 unsigned int sao_abv_start_addr;
1430 };
1431
1432 #define SEI_MASTER_DISPLAY_COLOR_MASK 0x00000001
1433 #define SEI_CONTENT_LIGHT_LEVEL_MASK 0x00000002
1434 #define SEI_HDR10PLUS_MASK 0x00000004
1435
1436 #define VF_POOL_SIZE 32
1437
1438 #ifdef MULTI_INSTANCE_SUPPORT
1439 #define DEC_RESULT_NONE 0
1440 #define DEC_RESULT_DONE 1
1441 #define DEC_RESULT_AGAIN 2
1442 #define DEC_RESULT_CONFIG_PARAM 3
1443 #define DEC_RESULT_ERROR 4
1444 #define DEC_INIT_PICLIST 5
1445 #define DEC_UNINIT_PICLIST 6
1446 #define DEC_RESULT_GET_DATA 7
1447 #define DEC_RESULT_GET_DATA_RETRY 8
1448 #define DEC_RESULT_EOS 9
1449 #define DEC_RESULT_FORCE_EXIT 10
1450 #define DEC_RESULT_FREE_CANVAS 11
1451
1452 static void vh265_work(struct work_struct *work);
1453 static void vh265_timeout_work(struct work_struct *work);
1454 static void vh265_notify_work(struct work_struct *work);
1455
1456 #endif
1457
1458 struct debug_log_s {
1459 struct list_head list;
1460 uint8_t data; /*will alloc more size*/
1461 };
1462
1463 struct hevc_state_s {
1464 #ifdef MULTI_INSTANCE_SUPPORT
1465 struct platform_device *platform_dev;
1466 void (*vdec_cb)(struct vdec_s *, void *);
1467 void *vdec_cb_arg;
1468 struct vframe_chunk_s *chunk;
1469 int dec_result;
1470 struct work_struct work;
1471 struct work_struct timeout_work;
1472 struct work_struct notify_work;
1473 struct work_struct set_clk_work;
1474 /* timeout handle */
1475 unsigned long int start_process_time;
1476 unsigned int last_lcu_idx;
1477 unsigned int decode_timeout_count;
1478 unsigned int timeout_num;
1479 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
1480 unsigned char switch_dvlayer_flag;
1481 unsigned char no_switch_dvlayer_count;
1482 unsigned char bypass_dvenl_enable;
1483 unsigned char bypass_dvenl;
1484 #endif
1485 unsigned char start_parser_type;
1486 /*start_decoding_flag:
1487 vps/pps/sps/idr info from ucode*/
1488 unsigned char start_decoding_flag;
1489 unsigned char rps_set_id;
1490 unsigned char eos;
1491 int pic_decoded_lcu_idx;
1492 u8 over_decode;
1493 u8 empty_flag;
1494 #endif
1495 struct vframe_s vframe_dummy;
1496 char *provider_name;
1497 int index;
1498 struct device *cma_dev;
1499 unsigned char m_ins_flag;
1500 unsigned char dolby_enhance_flag;
1501 unsigned long buf_start;
1502 u32 buf_size;
1503 u32 mv_buf_size;
1504
1505 struct BuffInfo_s work_space_buf_store;
1506 struct BuffInfo_s *work_space_buf;
1507
1508 u8 aux_data_dirty;
1509 u32 prefix_aux_size;
1510 u32 suffix_aux_size;
1511 void *aux_addr;
1512 void *rpm_addr;
1513 void *lmem_addr;
1514 dma_addr_t aux_phy_addr;
1515 dma_addr_t rpm_phy_addr;
1516 dma_addr_t lmem_phy_addr;
1517
1518 unsigned int pic_list_init_flag;
1519 unsigned int use_cma_flag;
1520
1521 unsigned short *rpm_ptr;
1522 unsigned short *lmem_ptr;
1523 unsigned short *debug_ptr;
1524 int debug_ptr_size;
1525 int pic_w;
1526 int pic_h;
1527 int lcu_x_num;
1528 int lcu_y_num;
1529 int lcu_total;
1530 int lcu_size;
1531 int lcu_size_log2;
1532 int lcu_x_num_pre;
1533 int lcu_y_num_pre;
1534 int first_pic_after_recover;
1535
1536 int num_tile_col;
1537 int num_tile_row;
1538 int tile_enabled;
1539 int tile_x;
1540 int tile_y;
1541 int tile_y_x;
1542 int tile_start_lcu_x;
1543 int tile_start_lcu_y;
1544 int tile_width_lcu;
1545 int tile_height_lcu;
1546
1547 int slice_type;
1548 unsigned int slice_addr;
1549 unsigned int slice_segment_addr;
1550
1551 unsigned char interlace_flag;
1552 unsigned char curr_pic_struct;
1553 unsigned char frame_field_info_present_flag;
1554
1555 unsigned short sps_num_reorder_pics_0;
1556 unsigned short misc_flag0;
1557 int m_temporalId;
1558 int m_nalUnitType;
1559 int TMVPFlag;
1560 int isNextSliceSegment;
1561 int LDCFlag;
1562 int m_pocRandomAccess;
1563 int plevel;
1564 int MaxNumMergeCand;
1565
1566 int new_pic;
1567 int new_tile;
1568 int curr_POC;
1569 int iPrevPOC;
1570 #ifdef MULTI_INSTANCE_SUPPORT
1571 int decoded_poc;
1572 struct PIC_s *decoding_pic;
1573 #endif
1574 int iPrevTid0POC;
1575 int list_no;
1576 int RefNum_L0;
1577 int RefNum_L1;
1578 int ColFromL0Flag;
1579 int LongTerm_Curr;
1580 int LongTerm_Col;
1581 int Col_POC;
1582 int LongTerm_Ref;
1583 #ifdef MULTI_INSTANCE_SUPPORT
1584 int m_pocRandomAccess_bak;
1585 int curr_POC_bak;
1586 int iPrevPOC_bak;
1587 int iPrevTid0POC_bak;
1588 unsigned char start_parser_type_bak;
1589 unsigned char start_decoding_flag_bak;
1590 unsigned char rps_set_id_bak;
1591 int pic_decoded_lcu_idx_bak;
1592 int decode_idx_bak;
1593 #endif
1594 struct PIC_s *cur_pic;
1595 struct PIC_s *col_pic;
1596 int skip_flag;
1597 int decode_idx;
1598 int slice_idx;
1599 unsigned char have_vps;
1600 unsigned char have_sps;
1601 unsigned char have_pps;
1602 unsigned char have_valid_start_slice;
1603 unsigned char wait_buf;
1604 unsigned char error_flag;
1605 unsigned int error_skip_nal_count;
1606 long used_4k_num;
1607
1608 unsigned char
1609 ignore_bufmgr_error; /* bit 0, for decoding;
1610 bit 1, for displaying
1611 bit 1 must be set if bit 0 is 1*/
1612 int PB_skip_mode;
1613 int PB_skip_count_after_decoding;
1614 #ifdef SUPPORT_10BIT
1615 int mem_saving_mode;
1616 #endif
1617 #ifdef LOSLESS_COMPRESS_MODE
1618 unsigned int losless_comp_body_size;
1619 #endif
1620 int pts_mode;
1621 int last_lookup_pts;
1622 int last_pts;
1623 u64 last_lookup_pts_us64;
1624 u64 last_pts_us64;
1625 u32 shift_byte_count_lo;
1626 u32 shift_byte_count_hi;
1627 int pts_mode_switching_count;
1628 int pts_mode_recovery_count;
1629
1630 int pic_num;
1631
1632 /**/
1633 union param_u param;
1634
1635 struct tile_s m_tile[MAX_TILE_ROW_NUM][MAX_TILE_COL_NUM];
1636
1637 struct timer_list timer;
1638 struct BUF_s m_BUF[BUF_POOL_SIZE];
1639 struct BUF_s m_mv_BUF[MAX_REF_PIC_NUM];
1640 struct PIC_s *m_PIC[MAX_REF_PIC_NUM];
1641
1642 DECLARE_KFIFO(newframe_q, struct vframe_s *, VF_POOL_SIZE);
1643 DECLARE_KFIFO(display_q, struct vframe_s *, VF_POOL_SIZE);
1644 DECLARE_KFIFO(pending_q, struct vframe_s *, VF_POOL_SIZE);
1645 struct vframe_s vfpool[VF_POOL_SIZE];
1646
1647 u32 stat;
1648 u32 frame_width;
1649 u32 frame_height;
1650 u32 frame_dur;
1651 u32 frame_ar;
1652 u32 bit_depth_luma;
1653 u32 bit_depth_chroma;
1654 u32 video_signal_type;
1655 u32 video_signal_type_debug;
1656 u32 saved_resolution;
1657 bool get_frame_dur;
1658 u32 error_watchdog_count;
1659 u32 error_skip_nal_wt_cnt;
1660 u32 error_system_watchdog_count;
1661
1662 #ifdef DEBUG_PTS
1663 unsigned long pts_missed;
1664 unsigned long pts_hit;
1665 #endif
1666 struct dec_sysinfo vh265_amstream_dec_info;
1667 unsigned char init_flag;
1668 unsigned char first_sc_checked;
1669 unsigned char uninit_list;
1670 u32 start_decoding_time;
1671
1672 int show_frame_num;
1673 #ifdef USE_UNINIT_SEMA
1674 struct semaphore h265_uninit_done_sema;
1675 #endif
1676 int fatal_error;
1677
1678
1679 u32 sei_present_flag;
1680 void *frame_mmu_map_addr;
1681 dma_addr_t frame_mmu_map_phy_addr;
1682 unsigned int mmu_mc_buf_start;
1683 unsigned int mmu_mc_buf_end;
1684 unsigned int mmu_mc_start_4k_adr;
1685 void *mmu_box;
1686 void *bmmu_box;
1687 int mmu_enable;
1688
1689 unsigned int dec_status;
1690
1691 /* data for SEI_MASTER_DISPLAY_COLOR */
1692 unsigned int primaries[3][2];
1693 unsigned int white_point[2];
1694 unsigned int luminance[2];
1695 /* data for SEI_CONTENT_LIGHT_LEVEL */
1696 unsigned int content_light_level[2];
1697
1698 struct PIC_s *pre_top_pic;
1699 struct PIC_s *pre_bot_pic;
1700
1701 #ifdef MULTI_INSTANCE_SUPPORT
1702 int double_write_mode;
1703 int dynamic_buf_num_margin;
1704 int start_action;
1705 int save_buffer_mode;
1706 #endif
1707 u32 i_only;
1708 struct list_head log_list;
1709 u32 ucode_pause_pos;
1710 u32 start_shift_bytes;
1711
1712 u32 vf_pre_count;
1713 u32 vf_get_count;
1714 u32 vf_put_count;
1715 #ifdef SWAP_HEVC_UCODE
1716 dma_addr_t mc_dma_handle;
1717 void *mc_cpu_addr;
1718 int swap_size;
1719 ulong swap_addr;
1720 #endif
1721 #ifdef DETREFILL_ENABLE
1722 dma_addr_t detbuf_adr;
1723 u16 *detbuf_adr_virt;
1724 u8 delrefill_check;
1725 #endif
1726 u8 head_error_flag;
1727 int valve_count;
1728 struct firmware_s *fw;
1729 int max_pic_w;
1730 int max_pic_h;
1731 #ifdef AGAIN_HAS_THRESHOLD
1732 u8 next_again_flag;
1733 u32 pre_parser_wr_ptr;
1734 #endif
1735 u32 ratio_control;
1736 u32 first_pic_flag;
1737 u32 decode_size;
1738 struct mutex chunks_mutex;
1739 int need_cache_size;
1740 u64 sc_start_time;
1741 u32 skip_first_nal;
1742 bool is_swap;
1743 bool is_4k;
1744 int frameinfo_enable;
1745 struct vframe_qos_s vframe_qos;
1746 bool is_used_v4l;
1747 void *v4l2_ctx;
1748 bool v4l_params_parsed;
1749 u32 mem_map_mode;
1750 u32 performance_profile;
1751 struct vdec_info *gvs;
1752 unsigned int res_ch_flag;
1753 bool ip_mode;
1754 u32 kpi_first_i_comming;
1755 u32 kpi_first_i_decoded;
1756 int sidebind_type;
1757 int sidebind_channel_id;
1758 u32 last_dec_pic_offset;
1759 u32 min_pic_size;
1760 u32 pts_continue_miss;
1761 u32 pts_lookup_margin;
1762 } /*hevc_stru_t */;
1763
1764 #ifdef AGAIN_HAS_THRESHOLD
1765 u32 again_threshold;
1766 #endif
1767 #ifdef SEND_LMEM_WITH_RPM
1768 #define get_lmem_params(hevc, ladr) \
1769 hevc->lmem_ptr[ladr - (ladr & 0x3) + 3 - (ladr & 0x3)]
1770
1771
get_frame_mmu_map_size(void)1772 static int get_frame_mmu_map_size(void)
1773 {
1774 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
1775 return (MAX_FRAME_8K_NUM * 4);
1776
1777 return (MAX_FRAME_4K_NUM * 4);
1778 }
1779
is_oversize(int w,int h)1780 static int is_oversize(int w, int h)
1781 {
1782 int max = (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)?
1783 MAX_SIZE_8K : MAX_SIZE_4K;
1784
1785 if (w < 0 || h < 0)
1786 return true;
1787
1788 if (h != 0 && (w > max / h))
1789 return true;
1790
1791 return false;
1792 }
1793
check_head_error(struct hevc_state_s * hevc)1794 void check_head_error(struct hevc_state_s *hevc)
1795 {
1796 #define pcm_enabled_flag 0x040
1797 #define pcm_sample_bit_depth_luma 0x041
1798 #define pcm_sample_bit_depth_chroma 0x042
1799 hevc->head_error_flag = 0;
1800 if ((error_handle_policy & 0x40) == 0)
1801 return;
1802 if (get_lmem_params(hevc, pcm_enabled_flag)) {
1803 uint16_t pcm_depth_luma = get_lmem_params(
1804 hevc, pcm_sample_bit_depth_luma);
1805 uint16_t pcm_sample_chroma = get_lmem_params(
1806 hevc, pcm_sample_bit_depth_chroma);
1807 if (pcm_depth_luma >
1808 hevc->bit_depth_luma ||
1809 pcm_sample_chroma >
1810 hevc->bit_depth_chroma) {
1811 hevc_print(hevc, 0,
1812 "error, pcm bit depth %d, %d is greater than normal bit depth %d, %d\n",
1813 pcm_depth_luma,
1814 pcm_sample_chroma,
1815 hevc->bit_depth_luma,
1816 hevc->bit_depth_chroma);
1817 hevc->head_error_flag = 1;
1818 }
1819 }
1820 }
1821 #endif
1822
1823 #ifdef SUPPORT_10BIT
1824 /* Losless compression body buffer size 4K per 64x32 (jt) */
compute_losless_comp_body_size(struct hevc_state_s * hevc,int width,int height,int mem_saving_mode)1825 static int compute_losless_comp_body_size(struct hevc_state_s *hevc,
1826 int width, int height, int mem_saving_mode)
1827 {
1828 int width_x64;
1829 int height_x32;
1830 int bsize;
1831
1832 width_x64 = width + 63;
1833 width_x64 >>= 6;
1834
1835 height_x32 = height + 31;
1836 height_x32 >>= 5;
1837 if (mem_saving_mode == 1 && hevc->mmu_enable)
1838 bsize = 3200 * width_x64 * height_x32;
1839 else if (mem_saving_mode == 1)
1840 bsize = 3072 * width_x64 * height_x32;
1841 else
1842 bsize = 4096 * width_x64 * height_x32;
1843
1844 return bsize;
1845 }
1846
1847 /* Losless compression header buffer size 32bytes per 128x64 (jt) */
compute_losless_comp_header_size(int width,int height)1848 static int compute_losless_comp_header_size(int width, int height)
1849 {
1850 int width_x128;
1851 int height_x64;
1852 int hsize;
1853
1854 width_x128 = width + 127;
1855 width_x128 >>= 7;
1856
1857 height_x64 = height + 63;
1858 height_x64 >>= 6;
1859
1860 hsize = 32*width_x128*height_x64;
1861
1862 return hsize;
1863 }
1864 #endif
1865
add_log(struct hevc_state_s * hevc,const char * fmt,...)1866 static int add_log(struct hevc_state_s *hevc,
1867 const char *fmt, ...)
1868 {
1869 #define HEVC_LOG_BUF 196
1870 struct debug_log_s *log_item;
1871 unsigned char buf[HEVC_LOG_BUF];
1872 int len = 0;
1873 va_list args;
1874 mutex_lock(&vh265_log_mutex);
1875 va_start(args, fmt);
1876 len = sprintf(buf, "<%ld> <%05d> ",
1877 jiffies, hevc->decode_idx);
1878 len += vsnprintf(buf + len,
1879 HEVC_LOG_BUF - len, fmt, args);
1880 va_end(args);
1881 log_item = kmalloc(
1882 sizeof(struct debug_log_s) + len,
1883 GFP_KERNEL);
1884 if (log_item) {
1885 INIT_LIST_HEAD(&log_item->list);
1886 strcpy(&log_item->data, buf);
1887 list_add_tail(&log_item->list,
1888 &hevc->log_list);
1889 }
1890 mutex_unlock(&vh265_log_mutex);
1891 return 0;
1892 }
1893
dump_log(struct hevc_state_s * hevc)1894 static void dump_log(struct hevc_state_s *hevc)
1895 {
1896 int i = 0;
1897 struct debug_log_s *log_item, *tmp;
1898 mutex_lock(&vh265_log_mutex);
1899 list_for_each_entry_safe(log_item, tmp, &hevc->log_list, list) {
1900 hevc_print(hevc, 0,
1901 "[LOG%04d]%s\n",
1902 i++,
1903 &log_item->data);
1904 list_del(&log_item->list);
1905 kfree(log_item);
1906 }
1907 mutex_unlock(&vh265_log_mutex);
1908 }
1909
is_skip_decoding(struct hevc_state_s * hevc,struct PIC_s * pic)1910 static unsigned char is_skip_decoding(struct hevc_state_s *hevc,
1911 struct PIC_s *pic)
1912 {
1913 if (pic->error_mark
1914 && ((hevc->ignore_bufmgr_error & 0x1) == 0))
1915 return 1;
1916 return 0;
1917 }
1918
get_pic_poc(struct hevc_state_s * hevc,unsigned int idx)1919 static int get_pic_poc(struct hevc_state_s *hevc,
1920 unsigned int idx)
1921 {
1922 if (idx != 0xff
1923 && idx < MAX_REF_PIC_NUM
1924 && hevc->m_PIC[idx])
1925 return hevc->m_PIC[idx]->POC;
1926 return INVALID_POC;
1927 }
1928
1929 #ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
get_valid_double_write_mode(struct hevc_state_s * hevc)1930 static int get_valid_double_write_mode(struct hevc_state_s *hevc)
1931 {
1932 return (hevc->m_ins_flag &&
1933 ((double_write_mode & 0x80000000) == 0)) ?
1934 hevc->double_write_mode :
1935 (double_write_mode & 0x7fffffff);
1936 }
1937
get_dynamic_buf_num_margin(struct hevc_state_s * hevc)1938 static int get_dynamic_buf_num_margin(struct hevc_state_s *hevc)
1939 {
1940 return (hevc->m_ins_flag &&
1941 ((dynamic_buf_num_margin & 0x80000000) == 0)) ?
1942 hevc->dynamic_buf_num_margin :
1943 (dynamic_buf_num_margin & 0x7fffffff);
1944 }
1945 #endif
1946
get_double_write_mode(struct hevc_state_s * hevc)1947 static int get_double_write_mode(struct hevc_state_s *hevc)
1948 {
1949 u32 valid_dw_mode = get_valid_double_write_mode(hevc);
1950 int w = hevc->pic_w;
1951 int h = hevc->pic_h;
1952 u32 dw = 0x1; /*1:1*/
1953 switch (valid_dw_mode) {
1954 case 0x100:
1955 if (w > 1920 && h > 1088)
1956 dw = 0x4; /*1:2*/
1957 break;
1958 case 0x200:
1959 if (w > 1920 && h > 1088)
1960 dw = 0x2; /*1:4*/
1961 break;
1962 case 0x300:
1963 if (w > 1280 && h > 720)
1964 dw = 0x4; /*1:2*/
1965 break;
1966 default:
1967 dw = valid_dw_mode;
1968 break;
1969 }
1970 return dw;
1971 }
1972
v4l_parser_get_double_write_mode(struct hevc_state_s * hevc,int w,int h)1973 static int v4l_parser_get_double_write_mode(struct hevc_state_s *hevc, int w, int h)
1974 {
1975 u32 valid_dw_mode = get_valid_double_write_mode(hevc);
1976 u32 dw = 0x1; /*1:1*/
1977 switch (valid_dw_mode) {
1978 case 0x100:
1979 if (w > 1920 && h > 1088)
1980 dw = 0x4; /*1:2*/
1981 break;
1982 case 0x200:
1983 if (w > 1920 && h > 1088)
1984 dw = 0x2; /*1:4*/
1985 break;
1986 case 0x300:
1987 if (w > 1280 && h > 720)
1988 dw = 0x4; /*1:2*/
1989 break;
1990 default:
1991 dw = valid_dw_mode;
1992 break;
1993 }
1994 return dw;
1995 }
1996
1997
get_double_write_ratio(struct hevc_state_s * hevc,int dw_mode)1998 static int get_double_write_ratio(struct hevc_state_s *hevc,
1999 int dw_mode)
2000 {
2001 int ratio = 1;
2002 if ((dw_mode == 2) ||
2003 (dw_mode == 3))
2004 ratio = 4;
2005 else if (dw_mode == 4)
2006 ratio = 2;
2007 return ratio;
2008 }
2009 #ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
get_idx(struct hevc_state_s * hevc)2010 static unsigned char get_idx(struct hevc_state_s *hevc)
2011 {
2012 return hevc->index;
2013 }
2014 #endif
2015
2016 #undef pr_info
2017 #define pr_info printk
hevc_print(struct hevc_state_s * hevc,int flag,const char * fmt,...)2018 static int hevc_print(struct hevc_state_s *hevc,
2019 int flag, const char *fmt, ...)
2020 {
2021 #define HEVC_PRINT_BUF 256
2022 unsigned char buf[HEVC_PRINT_BUF];
2023 int len = 0;
2024 #ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2025 if (hevc == NULL ||
2026 (flag == 0) ||
2027 ((debug_mask &
2028 (1 << hevc->index))
2029 && (debug & flag))) {
2030 #endif
2031 va_list args;
2032
2033 va_start(args, fmt);
2034 if (hevc)
2035 len = sprintf(buf, "[%d]", hevc->index);
2036 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
2037 pr_debug("%s", buf);
2038 va_end(args);
2039 #ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2040 }
2041 #endif
2042 return 0;
2043 }
2044
hevc_print_cont(struct hevc_state_s * hevc,int flag,const char * fmt,...)2045 static int hevc_print_cont(struct hevc_state_s *hevc,
2046 int flag, const char *fmt, ...)
2047 {
2048 unsigned char buf[HEVC_PRINT_BUF];
2049 int len = 0;
2050 #ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2051 if (hevc == NULL ||
2052 (flag == 0) ||
2053 ((debug_mask &
2054 (1 << hevc->index))
2055 && (debug & flag))) {
2056 #endif
2057 va_list args;
2058
2059 va_start(args, fmt);
2060 vsnprintf(buf + len, HEVC_PRINT_BUF - len, fmt, args);
2061 pr_info("%s", buf);
2062 va_end(args);
2063 #ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
2064 }
2065 #endif
2066 return 0;
2067 }
2068
2069 static void put_mv_buf(struct hevc_state_s *hevc,
2070 struct PIC_s *pic);
2071
2072 static void update_vf_memhandle(struct hevc_state_s *hevc,
2073 struct vframe_s *vf, struct PIC_s *pic);
2074
2075 static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic);
2076
2077 static void release_aux_data(struct hevc_state_s *hevc,
2078 struct PIC_s *pic);
2079 static void release_pic_mmu_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2080
2081 #ifdef MULTI_INSTANCE_SUPPORT
backup_decode_state(struct hevc_state_s * hevc)2082 static void backup_decode_state(struct hevc_state_s *hevc)
2083 {
2084 hevc->m_pocRandomAccess_bak = hevc->m_pocRandomAccess;
2085 hevc->curr_POC_bak = hevc->curr_POC;
2086 hevc->iPrevPOC_bak = hevc->iPrevPOC;
2087 hevc->iPrevTid0POC_bak = hevc->iPrevTid0POC;
2088 hevc->start_parser_type_bak = hevc->start_parser_type;
2089 hevc->start_decoding_flag_bak = hevc->start_decoding_flag;
2090 hevc->rps_set_id_bak = hevc->rps_set_id;
2091 hevc->pic_decoded_lcu_idx_bak = hevc->pic_decoded_lcu_idx;
2092 hevc->decode_idx_bak = hevc->decode_idx;
2093
2094 }
2095
restore_decode_state(struct hevc_state_s * hevc)2096 static void restore_decode_state(struct hevc_state_s *hevc)
2097 {
2098 struct vdec_s *vdec = hw_to_vdec(hevc);
2099 if (!vdec_has_more_input(vdec)) {
2100 hevc->pic_decoded_lcu_idx =
2101 READ_VREG(HEVC_PARSER_LCU_START)
2102 & 0xffffff;
2103 return;
2104 }
2105 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
2106 "%s: discard pic index 0x%x\n",
2107 __func__, hevc->decoding_pic ?
2108 hevc->decoding_pic->index : 0xff);
2109 if (hevc->decoding_pic) {
2110 hevc->decoding_pic->error_mark = 0;
2111 hevc->decoding_pic->output_ready = 0;
2112 hevc->decoding_pic->output_mark = 0;
2113 hevc->decoding_pic->referenced = 0;
2114 hevc->decoding_pic->POC = INVALID_POC;
2115 put_mv_buf(hevc, hevc->decoding_pic);
2116 release_pic_mmu_buf(hevc, hevc->decoding_pic);
2117 release_aux_data(hevc, hevc->decoding_pic);
2118 hevc->decoding_pic = NULL;
2119 }
2120 hevc->decode_idx = hevc->decode_idx_bak;
2121 hevc->m_pocRandomAccess = hevc->m_pocRandomAccess_bak;
2122 hevc->curr_POC = hevc->curr_POC_bak;
2123 hevc->iPrevPOC = hevc->iPrevPOC_bak;
2124 hevc->iPrevTid0POC = hevc->iPrevTid0POC_bak;
2125 hevc->start_parser_type = hevc->start_parser_type_bak;
2126 hevc->start_decoding_flag = hevc->start_decoding_flag_bak;
2127 hevc->rps_set_id = hevc->rps_set_id_bak;
2128 hevc->pic_decoded_lcu_idx = hevc->pic_decoded_lcu_idx_bak;
2129
2130 if (hevc->pic_list_init_flag == 1)
2131 hevc->pic_list_init_flag = 0;
2132 /*if (hevc->decode_idx == 0)
2133 hevc->start_decoding_flag = 0;*/
2134
2135 hevc->slice_idx = 0;
2136 hevc->used_4k_num = -1;
2137 }
2138 #endif
2139
hevc_init_stru(struct hevc_state_s * hevc,struct BuffInfo_s * buf_spec_i)2140 static void hevc_init_stru(struct hevc_state_s *hevc,
2141 struct BuffInfo_s *buf_spec_i)
2142 {
2143 int i;
2144 INIT_LIST_HEAD(&hevc->log_list);
2145 hevc->work_space_buf = buf_spec_i;
2146 hevc->prefix_aux_size = 0;
2147 hevc->suffix_aux_size = 0;
2148 hevc->aux_addr = NULL;
2149 hevc->rpm_addr = NULL;
2150 hevc->lmem_addr = NULL;
2151
2152 hevc->curr_POC = INVALID_POC;
2153
2154 hevc->pic_list_init_flag = 0;
2155 hevc->use_cma_flag = 0;
2156 hevc->decode_idx = 0;
2157 hevc->slice_idx = 0;
2158 hevc->new_pic = 0;
2159 hevc->new_tile = 0;
2160 hevc->iPrevPOC = 0;
2161 hevc->list_no = 0;
2162 /* int m_uiMaxCUWidth = 1<<7; */
2163 /* int m_uiMaxCUHeight = 1<<7; */
2164 hevc->m_pocRandomAccess = MAX_INT;
2165 hevc->tile_enabled = 0;
2166 hevc->tile_x = 0;
2167 hevc->tile_y = 0;
2168 hevc->iPrevTid0POC = 0;
2169 hevc->slice_addr = 0;
2170 hevc->slice_segment_addr = 0;
2171 hevc->skip_flag = 0;
2172 hevc->misc_flag0 = 0;
2173
2174 hevc->cur_pic = NULL;
2175 hevc->col_pic = NULL;
2176 hevc->wait_buf = 0;
2177 hevc->error_flag = 0;
2178 hevc->head_error_flag = 0;
2179 hevc->error_skip_nal_count = 0;
2180 hevc->have_vps = 0;
2181 hevc->have_sps = 0;
2182 hevc->have_pps = 0;
2183 hevc->have_valid_start_slice = 0;
2184
2185 hevc->pts_mode = PTS_NORMAL;
2186 hevc->last_pts = 0;
2187 hevc->last_lookup_pts = 0;
2188 hevc->last_pts_us64 = 0;
2189 hevc->last_lookup_pts_us64 = 0;
2190 hevc->pts_mode_switching_count = 0;
2191 hevc->pts_mode_recovery_count = 0;
2192
2193 hevc->PB_skip_mode = nal_skip_policy & 0x3;
2194 hevc->PB_skip_count_after_decoding = (nal_skip_policy >> 16) & 0xffff;
2195 if (hevc->PB_skip_mode == 0)
2196 hevc->ignore_bufmgr_error = 0x1;
2197 else
2198 hevc->ignore_bufmgr_error = 0x0;
2199
2200 if (hevc->is_used_v4l) {
2201 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2202 if (hevc->m_PIC[i] != NULL) {
2203 memset(hevc->m_PIC[i], 0 ,sizeof(struct PIC_s));
2204 hevc->m_PIC[i]->index = i;
2205 }
2206 }
2207 }
2208
2209 hevc->pic_num = 0;
2210 hevc->lcu_x_num_pre = 0;
2211 hevc->lcu_y_num_pre = 0;
2212 hevc->first_pic_after_recover = 0;
2213
2214 hevc->pre_top_pic = NULL;
2215 hevc->pre_bot_pic = NULL;
2216
2217 hevc->sei_present_flag = 0;
2218 hevc->valve_count = 0;
2219 hevc->first_pic_flag = 0;
2220 #ifdef MULTI_INSTANCE_SUPPORT
2221 hevc->decoded_poc = INVALID_POC;
2222 hevc->start_process_time = 0;
2223 hevc->last_lcu_idx = 0;
2224 hevc->decode_timeout_count = 0;
2225 hevc->timeout_num = 0;
2226 hevc->eos = 0;
2227 hevc->pic_decoded_lcu_idx = -1;
2228 hevc->over_decode = 0;
2229 hevc->used_4k_num = -1;
2230 hevc->start_decoding_flag = 0;
2231 hevc->rps_set_id = 0;
2232 backup_decode_state(hevc);
2233 #endif
2234 #ifdef DETREFILL_ENABLE
2235 hevc->detbuf_adr = 0;
2236 hevc->detbuf_adr_virt = NULL;
2237 #endif
2238 }
2239
2240 static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic);
2241 static int H265_alloc_mmu(struct hevc_state_s *hevc,
2242 struct PIC_s *new_pic, unsigned short bit_depth,
2243 unsigned int *mmu_index_adr);
2244
2245 #ifdef DETREFILL_ENABLE
2246 #define DETREFILL_BUF_SIZE (4 * 0x4000)
2247 #define HEVC_SAO_DBG_MODE0 0x361e
2248 #define HEVC_SAO_DBG_MODE1 0x361f
2249 #define HEVC_SAO_CTRL10 0x362e
2250 #define HEVC_SAO_CTRL11 0x362f
init_detrefill_buf(struct hevc_state_s * hevc)2251 static int init_detrefill_buf(struct hevc_state_s *hevc)
2252 {
2253 if (hevc->detbuf_adr_virt)
2254 return 0;
2255
2256 hevc->detbuf_adr_virt =
2257 (void *)dma_alloc_coherent(amports_get_dma_device(),
2258 DETREFILL_BUF_SIZE, &hevc->detbuf_adr,
2259 GFP_KERNEL);
2260
2261 if (hevc->detbuf_adr_virt == NULL) {
2262 pr_err("%s: failed to alloc ETREFILL_BUF\n", __func__);
2263 return -1;
2264 }
2265 return 0;
2266 }
2267
uninit_detrefill_buf(struct hevc_state_s * hevc)2268 static void uninit_detrefill_buf(struct hevc_state_s *hevc)
2269 {
2270 if (hevc->detbuf_adr_virt) {
2271 dma_free_coherent(amports_get_dma_device(),
2272 DETREFILL_BUF_SIZE, hevc->detbuf_adr_virt,
2273 hevc->detbuf_adr);
2274
2275 hevc->detbuf_adr_virt = NULL;
2276 hevc->detbuf_adr = 0;
2277 }
2278 }
2279
2280 /*
2281 * convert uncompressed frame buffer data from/to ddr
2282 */
convUnc8x4blk(uint16_t * blk8x4Luma,uint16_t * blk8x4Cb,uint16_t * blk8x4Cr,uint16_t * cmBodyBuf,int32_t direction)2283 static void convUnc8x4blk(uint16_t* blk8x4Luma,
2284 uint16_t* blk8x4Cb, uint16_t* blk8x4Cr, uint16_t* cmBodyBuf, int32_t direction)
2285 {
2286 if (direction == 0) {
2287 blk8x4Luma[3 + 0 * 8] = ((cmBodyBuf[0] >> 0)) & 0x3ff;
2288 blk8x4Luma[3 + 1 * 8] = ((cmBodyBuf[1] << 6)
2289 | (cmBodyBuf[0] >> 10)) & 0x3ff;
2290 blk8x4Luma[3 + 2 * 8] = ((cmBodyBuf[1] >> 4)) & 0x3ff;
2291 blk8x4Luma[3 + 3 * 8] = ((cmBodyBuf[2] << 2)
2292 | (cmBodyBuf[1] >> 14)) & 0x3ff;
2293 blk8x4Luma[7 + 0 * 8] = ((cmBodyBuf[3] << 8)
2294 | (cmBodyBuf[2] >> 8)) & 0x3ff;
2295 blk8x4Luma[7 + 1 * 8] = ((cmBodyBuf[3] >> 2)) & 0x3ff;
2296 blk8x4Luma[7 + 2 * 8] = ((cmBodyBuf[4] << 4)
2297 | (cmBodyBuf[3] >> 12)) & 0x3ff;
2298 blk8x4Luma[7 + 3 * 8] = ((cmBodyBuf[4] >> 6)) & 0x3ff;
2299 blk8x4Cb [0 + 0 * 4] = ((cmBodyBuf[5] >> 0)) & 0x3ff;
2300 blk8x4Cr [0 + 0 * 4] = ((cmBodyBuf[6] << 6)
2301 | (cmBodyBuf[5] >> 10)) & 0x3ff;
2302 blk8x4Cb [0 + 1 * 4] = ((cmBodyBuf[6] >> 4)) & 0x3ff;
2303 blk8x4Cr [0 + 1 * 4] = ((cmBodyBuf[7] << 2)
2304 | (cmBodyBuf[6] >> 14)) & 0x3ff;
2305
2306 blk8x4Luma[0 + 0 * 8] = ((cmBodyBuf[0 + 8] >> 0)) & 0x3ff;
2307 blk8x4Luma[1 + 0 * 8] = ((cmBodyBuf[1 + 8] << 6) |
2308 (cmBodyBuf[0 + 8] >> 10)) & 0x3ff;
2309 blk8x4Luma[2 + 0 * 8] = ((cmBodyBuf[1 + 8] >> 4)) & 0x3ff;
2310 blk8x4Luma[0 + 1 * 8] = ((cmBodyBuf[2 + 8] << 2) |
2311 (cmBodyBuf[1 + 8] >> 14)) & 0x3ff;
2312 blk8x4Luma[1 + 1 * 8] = ((cmBodyBuf[3 + 8] << 8) |
2313 (cmBodyBuf[2 + 8] >> 8)) & 0x3ff;
2314 blk8x4Luma[2 + 1 * 8] = ((cmBodyBuf[3 + 8] >> 2)) & 0x3ff;
2315 blk8x4Luma[0 + 2 * 8] = ((cmBodyBuf[4 + 8] << 4) |
2316 (cmBodyBuf[3 + 8] >> 12)) & 0x3ff;
2317 blk8x4Luma[1 + 2 * 8] = ((cmBodyBuf[4 + 8] >> 6)) & 0x3ff;
2318 blk8x4Luma[2 + 2 * 8] = ((cmBodyBuf[5 + 8] >> 0)) & 0x3ff;
2319 blk8x4Luma[0 + 3 * 8] = ((cmBodyBuf[6 + 8] << 6) |
2320 (cmBodyBuf[5 + 8] >> 10)) & 0x3ff;
2321 blk8x4Luma[1 + 3 * 8] = ((cmBodyBuf[6 + 8] >> 4)) & 0x3ff;
2322 blk8x4Luma[2 + 3 * 8] = ((cmBodyBuf[7 + 8] << 2) |
2323 (cmBodyBuf[6 + 8] >> 14)) & 0x3ff;
2324
2325 blk8x4Luma[4 + 0 * 8] = ((cmBodyBuf[0 + 16] >> 0)) & 0x3ff;
2326 blk8x4Luma[5 + 0 * 8] = ((cmBodyBuf[1 + 16] << 6) |
2327 (cmBodyBuf[0 + 16] >> 10)) & 0x3ff;
2328 blk8x4Luma[6 + 0 * 8] = ((cmBodyBuf[1 + 16] >> 4)) & 0x3ff;
2329 blk8x4Luma[4 + 1 * 8] = ((cmBodyBuf[2 + 16] << 2) |
2330 (cmBodyBuf[1 + 16] >> 14)) & 0x3ff;
2331 blk8x4Luma[5 + 1 * 8] = ((cmBodyBuf[3 + 16] << 8) |
2332 (cmBodyBuf[2 + 16] >> 8)) & 0x3ff;
2333 blk8x4Luma[6 + 1 * 8] = ((cmBodyBuf[3 + 16] >> 2)) & 0x3ff;
2334 blk8x4Luma[4 + 2 * 8] = ((cmBodyBuf[4 + 16] << 4) |
2335 (cmBodyBuf[3 + 16] >> 12)) & 0x3ff;
2336 blk8x4Luma[5 + 2 * 8] = ((cmBodyBuf[4 + 16] >> 6)) & 0x3ff;
2337 blk8x4Luma[6 + 2 * 8] = ((cmBodyBuf[5 + 16] >> 0)) & 0x3ff;
2338 blk8x4Luma[4 + 3 * 8] = ((cmBodyBuf[6 + 16] << 6) |
2339 (cmBodyBuf[5 + 16] >> 10)) & 0x3ff;
2340 blk8x4Luma[5 + 3 * 8] = ((cmBodyBuf[6 + 16] >> 4)) & 0x3ff;
2341 blk8x4Luma[6 + 3 * 8] = ((cmBodyBuf[7 + 16] << 2) |
2342 (cmBodyBuf[6 + 16] >> 14)) & 0x3ff;
2343
2344 blk8x4Cb[1 + 0 * 4] = ((cmBodyBuf[0 + 24] >> 0)) & 0x3ff;
2345 blk8x4Cr[1 + 0 * 4] = ((cmBodyBuf[1 + 24] << 6) |
2346 (cmBodyBuf[0 + 24] >> 10)) & 0x3ff;
2347 blk8x4Cb[2 + 0 * 4] = ((cmBodyBuf[1 + 24] >> 4)) & 0x3ff;
2348 blk8x4Cr[2 + 0 * 4] = ((cmBodyBuf[2 + 24] << 2) |
2349 (cmBodyBuf[1 + 24] >> 14)) & 0x3ff;
2350 blk8x4Cb[3 + 0 * 4] = ((cmBodyBuf[3 + 24] << 8) |
2351 (cmBodyBuf[2 + 24] >> 8)) & 0x3ff;
2352 blk8x4Cr[3 + 0 * 4] = ((cmBodyBuf[3 + 24] >> 2)) & 0x3ff;
2353 blk8x4Cb[1 + 1 * 4] = ((cmBodyBuf[4 + 24] << 4) |
2354 (cmBodyBuf[3 + 24] >> 12)) & 0x3ff;
2355 blk8x4Cr[1 + 1 * 4] = ((cmBodyBuf[4 + 24] >> 6)) & 0x3ff;
2356 blk8x4Cb[2 + 1 * 4] = ((cmBodyBuf[5 + 24] >> 0)) & 0x3ff;
2357 blk8x4Cr[2 + 1 * 4] = ((cmBodyBuf[6 + 24] << 6) |
2358 (cmBodyBuf[5 + 24] >> 10)) & 0x3ff;
2359 blk8x4Cb[3 + 1 * 4] = ((cmBodyBuf[6 + 24] >> 4)) & 0x3ff;
2360 blk8x4Cr[3 + 1 * 4] = ((cmBodyBuf[7 + 24] << 2) |
2361 (cmBodyBuf[6 + 24] >> 14)) & 0x3ff;
2362 } else {
2363 cmBodyBuf[0 + 8 * 0] = (blk8x4Luma[3 + 1 * 8] << 10) |
2364 blk8x4Luma[3 + 0 * 8];
2365 cmBodyBuf[1 + 8 * 0] = (blk8x4Luma[3 + 3 * 8] << 14) |
2366 (blk8x4Luma[3 + 2 * 8] << 4) | (blk8x4Luma[3 + 1 * 8] >> 6);
2367 cmBodyBuf[2 + 8 * 0] = (blk8x4Luma[7 + 0 * 8] << 8) |
2368 (blk8x4Luma[3 + 3 * 8] >> 2);
2369 cmBodyBuf[3 + 8 * 0] = (blk8x4Luma[7 + 2 * 8] << 12) |
2370 (blk8x4Luma[7 + 1 * 8] << 2) | (blk8x4Luma[7 + 0 * 8] >>8);
2371 cmBodyBuf[4 + 8 * 0] = (blk8x4Luma[7 + 3 * 8] << 6) |
2372 (blk8x4Luma[7 + 2 * 8] >>4);
2373 cmBodyBuf[5 + 8 * 0] = (blk8x4Cr[0 + 0 * 4] << 10) |
2374 blk8x4Cb[0 + 0 * 4];
2375 cmBodyBuf[6 + 8 * 0] = (blk8x4Cr[0 + 1 * 4] << 14) |
2376 (blk8x4Cb[0 + 1 * 4] << 4) | (blk8x4Cr[0 + 0 * 4] >> 6);
2377 cmBodyBuf[7 + 8 * 0] = (0<< 8) | (blk8x4Cr[0 + 1 * 4] >> 2);
2378
2379 cmBodyBuf[0 + 8 * 1] = (blk8x4Luma[1 + 0 * 8] << 10) |
2380 blk8x4Luma[0 + 0 * 8];
2381 cmBodyBuf[1 + 8 * 1] = (blk8x4Luma[0 + 1 * 8] << 14) |
2382 (blk8x4Luma[2 + 0 * 8] << 4) | (blk8x4Luma[1 + 0 * 8] >> 6);
2383 cmBodyBuf[2 + 8 * 1] = (blk8x4Luma[1 + 1 * 8] << 8) |
2384 (blk8x4Luma[0 + 1 * 8] >> 2);
2385 cmBodyBuf[3 + 8 * 1] = (blk8x4Luma[0 + 2 * 8] << 12) |
2386 (blk8x4Luma[2 + 1 * 8] << 2) | (blk8x4Luma[1 + 1 * 8] >>8);
2387 cmBodyBuf[4 + 8 * 1] = (blk8x4Luma[1 + 2 * 8] << 6) |
2388 (blk8x4Luma[0 + 2 * 8] >>4);
2389 cmBodyBuf[5 + 8 * 1] = (blk8x4Luma[0 + 3 * 8] << 10) |
2390 blk8x4Luma[2 + 2 * 8];
2391 cmBodyBuf[6 + 8 * 1] = (blk8x4Luma[2 + 3 * 8] << 14) |
2392 (blk8x4Luma[1 + 3 * 8] << 4) | (blk8x4Luma[0 + 3 * 8] >> 6);
2393 cmBodyBuf[7 + 8 * 1] = (0<< 8) | (blk8x4Luma[2 + 3 * 8] >> 2);
2394
2395 cmBodyBuf[0 + 8 * 2] = (blk8x4Luma[5 + 0 * 8] << 10) |
2396 blk8x4Luma[4 + 0 * 8];
2397 cmBodyBuf[1 + 8 * 2] = (blk8x4Luma[4 + 1 * 8] << 14) |
2398 (blk8x4Luma[6 + 0 * 8] << 4) | (blk8x4Luma[5 + 0 * 8] >> 6);
2399 cmBodyBuf[2 + 8 * 2] = (blk8x4Luma[5 + 1 * 8] << 8) |
2400 (blk8x4Luma[4 + 1 * 8] >> 2);
2401 cmBodyBuf[3 + 8 * 2] = (blk8x4Luma[4 + 2 * 8] << 12) |
2402 (blk8x4Luma[6 + 1 * 8] << 2) | (blk8x4Luma[5 + 1 * 8] >>8);
2403 cmBodyBuf[4 + 8 * 2] = (blk8x4Luma[5 + 2 * 8] << 6) |
2404 (blk8x4Luma[4 + 2 * 8] >>4);
2405 cmBodyBuf[5 + 8 * 2] = (blk8x4Luma[4 + 3 * 8] << 10) |
2406 blk8x4Luma[6 + 2 * 8];
2407 cmBodyBuf[6 + 8 * 2] = (blk8x4Luma[6 + 3 * 8] << 14) |
2408 (blk8x4Luma[5 + 3 * 8] << 4) | (blk8x4Luma[4 + 3 * 8] >> 6);
2409 cmBodyBuf[7 + 8 * 2] = (0<< 8) | (blk8x4Luma[6 + 3 * 8] >> 2);
2410
2411 cmBodyBuf[0 + 8 * 3] = (blk8x4Cr[1 + 0 * 4] << 10) |
2412 blk8x4Cb[1 + 0 * 4];
2413 cmBodyBuf[1 + 8 * 3] = (blk8x4Cr[2 + 0 * 4] << 14) |
2414 (blk8x4Cb[2 + 0 * 4] << 4) | (blk8x4Cr[1 + 0 * 4] >> 6);
2415 cmBodyBuf[2 + 8 * 3] = (blk8x4Cb[3 + 0 * 4] << 8) |
2416 (blk8x4Cr[2 + 0 * 4] >> 2);
2417 cmBodyBuf[3 + 8 * 3] = (blk8x4Cb[1 + 1 * 4] << 12) |
2418 (blk8x4Cr[3 + 0 * 4] << 2) | (blk8x4Cb[3 + 0 * 4] >>8);
2419 cmBodyBuf[4 + 8 * 3] = (blk8x4Cr[1 + 1 * 4] << 6) |
2420 (blk8x4Cb[1 + 1 * 4] >>4);
2421 cmBodyBuf[5 + 8 * 3] = (blk8x4Cr[2 + 1 * 4] << 10) |
2422 blk8x4Cb[2 + 1 * 4];
2423 cmBodyBuf[6 + 8 * 3] = (blk8x4Cr[3 + 1 * 4] << 14) |
2424 (blk8x4Cb[3 + 1 * 4] << 4) | (blk8x4Cr[2 + 1 * 4] >> 6);
2425 cmBodyBuf[7 + 8 * 3] = (0 << 8) | (blk8x4Cr[3 + 1 * 4] >> 2);
2426 }
2427 }
2428
corrRefillWithAmrisc(struct hevc_state_s * hevc,uint32_t cmHeaderBaseAddr,uint32_t picWidth,uint32_t ctuPosition)2429 static void corrRefillWithAmrisc (
2430 struct hevc_state_s *hevc,
2431 uint32_t cmHeaderBaseAddr,
2432 uint32_t picWidth,
2433 uint32_t ctuPosition)
2434 {
2435 int32_t i;
2436 uint16_t ctux = (ctuPosition>>16) & 0xffff;
2437 uint16_t ctuy = (ctuPosition>> 0) & 0xffff;
2438 int32_t aboveCtuAvailable = (ctuy) ? 1 : 0;
2439
2440 uint16_t cmBodyBuf[32 * 18];
2441
2442 uint32_t pic_width_x64_pre = picWidth + 0x3f;
2443 uint32_t pic_width_x64 = pic_width_x64_pre >> 6;
2444 uint32_t stride64x64 = pic_width_x64 * 128;
2445 uint32_t addr_offset64x64_abv = stride64x64 *
2446 (aboveCtuAvailable ? ctuy - 1 : ctuy) + 128 * ctux;
2447 uint32_t addr_offset64x64_cur = stride64x64*ctuy + 128 * ctux;
2448 uint32_t cmHeaderAddrAbv = cmHeaderBaseAddr + addr_offset64x64_abv;
2449 uint32_t cmHeaderAddrCur = cmHeaderBaseAddr + addr_offset64x64_cur;
2450 unsigned int tmpData32;
2451
2452 uint16_t blkBuf0Y[32];
2453 uint16_t blkBuf0Cb[8];
2454 uint16_t blkBuf0Cr[8];
2455 uint16_t blkBuf1Y[32];
2456 uint16_t blkBuf1Cb[8];
2457 uint16_t blkBuf1Cr[8];
2458 int32_t blkBufCnt = 0;
2459
2460 int32_t blkIdx;
2461
2462 WRITE_VREG(HEVC_SAO_CTRL10, cmHeaderAddrAbv);
2463 WRITE_VREG(HEVC_SAO_CTRL11, cmHeaderAddrCur);
2464 WRITE_VREG(HEVC_SAO_DBG_MODE0, hevc->detbuf_adr);
2465 WRITE_VREG(HEVC_SAO_DBG_MODE1, 2);
2466
2467 for (i = 0; i < 32 * 18; i++)
2468 cmBodyBuf[i] = 0;
2469
2470 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2471 "%s, %d\n", __func__, __LINE__);
2472 do {
2473 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2474 } while (tmpData32);
2475 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2476 "%s, %d\n", __func__, __LINE__);
2477
2478 hevc_print(hevc, H265_DEBUG_DETAIL,
2479 "cmBodyBuf from detbuf:\n");
2480 for (i = 0; i < 32 * 18; i++) {
2481 cmBodyBuf[i] = hevc->detbuf_adr_virt[i];
2482 if (get_dbg_flag(hevc) &
2483 H265_DEBUG_DETAIL) {
2484 if ((i & 0xf) == 0)
2485 hevc_print_cont(hevc, 0, "\n");
2486 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2487 }
2488 }
2489 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2490
2491 for (i = 0; i < 32; i++)
2492 blkBuf0Y[i] = 0;
2493 for (i = 0; i < 8; i++)
2494 blkBuf0Cb[i] = 0;
2495 for (i = 0; i < 8; i++)
2496 blkBuf0Cr[i] = 0;
2497 for (i = 0; i < 32; i++)
2498 blkBuf1Y[i] = 0;
2499 for (i = 0; i < 8; i++)
2500 blkBuf1Cb[i] = 0;
2501 for (i = 0; i < 8; i++)
2502 blkBuf1Cr[i] = 0;
2503
2504 for (blkIdx = 0; blkIdx < 18; blkIdx++) {
2505 int32_t inAboveCtu = (blkIdx<2) ? 1 : 0;
2506 int32_t restoreEnable = (blkIdx>0) ? 1 : 0;
2507 uint16_t* blkY = (blkBufCnt==0) ? blkBuf0Y : blkBuf1Y ;
2508 uint16_t* blkCb = (blkBufCnt==0) ? blkBuf0Cb : blkBuf1Cb;
2509 uint16_t* blkCr = (blkBufCnt==0) ? blkBuf0Cr : blkBuf1Cr;
2510 uint16_t* cmBodyBufNow = cmBodyBuf + (blkIdx * 32);
2511
2512 if (!aboveCtuAvailable && inAboveCtu)
2513 continue;
2514
2515 /* detRefillBuf --> 8x4block*/
2516 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 0);
2517
2518 if (restoreEnable) {
2519 blkY[3 + 0 * 8] = blkY[2 + 0 * 8] + 2;
2520 blkY[4 + 0 * 8] = blkY[1 + 0 * 8] + 3;
2521 blkY[5 + 0 * 8] = blkY[0 + 0 * 8] + 1;
2522 blkY[6 + 0 * 8] = blkY[0 + 0 * 8] + 2;
2523 blkY[7 + 0 * 8] = blkY[1 + 0 * 8] + 2;
2524 blkY[3 + 1 * 8] = blkY[2 + 1 * 8] + 1;
2525 blkY[4 + 1 * 8] = blkY[1 + 1 * 8] + 2;
2526 blkY[5 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2527 blkY[6 + 1 * 8] = blkY[0 + 1 * 8] + 2;
2528 blkY[7 + 1 * 8] = blkY[1 + 1 * 8] + 3;
2529 blkY[3 + 2 * 8] = blkY[2 + 2 * 8] + 3;
2530 blkY[4 + 2 * 8] = blkY[1 + 2 * 8] + 1;
2531 blkY[5 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2532 blkY[6 + 2 * 8] = blkY[0 + 2 * 8] + 3;
2533 blkY[7 + 2 * 8] = blkY[1 + 2 * 8] + 3;
2534 blkY[3 + 3 * 8] = blkY[2 + 3 * 8] + 0;
2535 blkY[4 + 3 * 8] = blkY[1 + 3 * 8] + 0;
2536 blkY[5 + 3 * 8] = blkY[0 + 3 * 8] + 1;
2537 blkY[6 + 3 * 8] = blkY[0 + 3 * 8] + 2;
2538 blkY[7 + 3 * 8] = blkY[1 + 3 * 8] + 1;
2539 blkCb[1 + 0 * 4] = blkCb[0 + 0 * 4];
2540 blkCb[2 + 0 * 4] = blkCb[0 + 0 * 4];
2541 blkCb[3 + 0 * 4] = blkCb[0 + 0 * 4];
2542 blkCb[1 + 1 * 4] = blkCb[0 + 1 * 4];
2543 blkCb[2 + 1 * 4] = blkCb[0 + 1 * 4];
2544 blkCb[3 + 1 * 4] = blkCb[0 + 1 * 4];
2545 blkCr[1 + 0 * 4] = blkCr[0 + 0 * 4];
2546 blkCr[2 + 0 * 4] = blkCr[0 + 0 * 4];
2547 blkCr[3 + 0 * 4] = blkCr[0 + 0 * 4];
2548 blkCr[1 + 1 * 4] = blkCr[0 + 1 * 4];
2549 blkCr[2 + 1 * 4] = blkCr[0 + 1 * 4];
2550 blkCr[3 + 1 * 4] = blkCr[0 + 1 * 4];
2551
2552 /*Store data back to DDR*/
2553 convUnc8x4blk(blkY, blkCb, blkCr, cmBodyBufNow, 1);
2554 }
2555
2556 blkBufCnt = (blkBufCnt==1) ? 0 : blkBufCnt + 1;
2557 }
2558
2559 hevc_print(hevc, H265_DEBUG_DETAIL,
2560 "cmBodyBuf to detbuf:\n");
2561 for (i = 0; i < 32 * 18; i++) {
2562 hevc->detbuf_adr_virt[i] = cmBodyBuf[i];
2563 if (get_dbg_flag(hevc) &
2564 H265_DEBUG_DETAIL) {
2565 if ((i & 0xf) == 0)
2566 hevc_print_cont(hevc, 0, "\n");
2567 hevc_print_cont(hevc, 0, "%02x ", cmBodyBuf[i]);
2568 }
2569 }
2570 hevc_print_cont(hevc, H265_DEBUG_DETAIL, "\n");
2571
2572 WRITE_VREG(HEVC_SAO_DBG_MODE1, 3);
2573 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2574 "%s, %d\n", __func__, __LINE__);
2575 do {
2576 tmpData32 = READ_VREG(HEVC_SAO_DBG_MODE1);
2577 } while (tmpData32);
2578 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2579 "%s, %d\n", __func__, __LINE__);
2580 }
2581
delrefill(struct hevc_state_s * hevc)2582 static void delrefill(struct hevc_state_s *hevc)
2583 {
2584 /*
2585 * corrRefill
2586 */
2587 /*HEVC_SAO_DBG_MODE0: picGlobalVariable
2588 [31:30]error number
2589 [29:20]error2([9:7]tilex[6:0]ctuy)
2590 [19:10]error1 [9:0]error0*/
2591 uint32_t detResult = READ_VREG(HEVC_ASSIST_SCRATCH_3);
2592 uint32_t errorIdx;
2593 uint32_t errorNum = (detResult>>30);
2594
2595 if (detResult) {
2596 hevc_print(hevc, H265_DEBUG_BUFMGR,
2597 "[corrRefillWithAmrisc] detResult=%08x\n", detResult);
2598 for (errorIdx = 0; errorIdx < errorNum; errorIdx++) {
2599 uint32_t errorPos = errorIdx * 10;
2600 uint32_t errorResult = (detResult >> errorPos) & 0x3ff;
2601 uint32_t tilex = (errorResult >> 7) - 1;
2602 uint16_t ctux = hevc->m_tile[0][tilex].start_cu_x
2603 + hevc->m_tile[0][tilex].width - 1;
2604 uint16_t ctuy = (uint16_t)(errorResult & 0x7f);
2605 uint32_t ctuPosition = (ctux<< 16) + ctuy;
2606 hevc_print(hevc, H265_DEBUG_BUFMGR,
2607 "Idx:%d tilex:%d ctu(%d(0x%x), %d(0x%x))\n",
2608 errorIdx,tilex,ctux,ctux, ctuy,ctuy);
2609 corrRefillWithAmrisc(
2610 hevc,
2611 (uint32_t)hevc->cur_pic->header_adr,
2612 hevc->pic_w,
2613 ctuPosition);
2614 }
2615
2616 WRITE_VREG(HEVC_ASSIST_SCRATCH_3, 0); /*clear status*/
2617 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
2618 WRITE_VREG(HEVC_SAO_DBG_MODE1, 1);
2619 }
2620 }
2621 #endif
2622
get_rpm_param(union param_u * params)2623 static void get_rpm_param(union param_u *params)
2624 {
2625 int i;
2626 unsigned int data32;
2627
2628 for (i = 0; i < 128; i++) {
2629 do {
2630 data32 = READ_VREG(RPM_CMD_REG);
2631 /* hevc_print(hevc, 0, "%x\n", data32); */
2632 } while ((data32 & 0x10000) == 0);
2633 params->l.data[i] = data32 & 0xffff;
2634 /* hevc_print(hevc, 0, "%x\n", data32); */
2635 WRITE_VREG(RPM_CMD_REG, 0);
2636 }
2637 }
2638
get_free_buf_idx(struct hevc_state_s * hevc)2639 static int get_free_buf_idx(struct hevc_state_s *hevc)
2640 {
2641 int index = INVALID_IDX;
2642 struct PIC_s *pic;
2643 int i;
2644
2645 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2646 pic = hevc->m_PIC[i];
2647 if (pic == NULL ||
2648 pic->index == -1 ||
2649 pic->BUF_index == -1)
2650 continue;
2651
2652 if (pic->output_mark == 0 &&
2653 pic->referenced == 0 &&
2654 pic->output_ready == 0 &&
2655 pic->cma_alloc_addr) {
2656 pic->output_ready = 1;
2657 index = i;
2658 break;
2659 }
2660 }
2661
2662 return index;
2663 }
2664
get_pic_by_POC(struct hevc_state_s * hevc,int POC)2665 static struct PIC_s *get_pic_by_POC(struct hevc_state_s *hevc, int POC)
2666 {
2667 int i;
2668 struct PIC_s *pic;
2669 struct PIC_s *ret_pic = NULL;
2670 if (POC == INVALID_POC)
2671 return NULL;
2672 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2673 pic = hevc->m_PIC[i];
2674 if (pic == NULL || pic->index == -1 ||
2675 pic->BUF_index == -1)
2676 continue;
2677 if (pic->POC == POC) {
2678 if (ret_pic == NULL)
2679 ret_pic = pic;
2680 else {
2681 if (pic->decode_idx > ret_pic->decode_idx)
2682 ret_pic = pic;
2683 }
2684 }
2685 }
2686 return ret_pic;
2687 }
2688
get_ref_pic_by_POC(struct hevc_state_s * hevc,int POC)2689 static struct PIC_s *get_ref_pic_by_POC(struct hevc_state_s *hevc, int POC)
2690 {
2691 int i;
2692 struct PIC_s *pic;
2693 struct PIC_s *ret_pic = NULL;
2694
2695 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2696 pic = hevc->m_PIC[i];
2697 if (pic == NULL || pic->index == -1 ||
2698 pic->BUF_index == -1)
2699 continue;
2700 if ((pic->POC == POC) && (pic->referenced)) {
2701 if (ret_pic == NULL)
2702 ret_pic = pic;
2703 else {
2704 if (pic->decode_idx > ret_pic->decode_idx)
2705 ret_pic = pic;
2706 }
2707 }
2708 }
2709
2710 if (ret_pic == NULL) {
2711 if (get_dbg_flag(hevc)) {
2712 hevc_print(hevc, 0,
2713 "Wrong, POC of %d is not in referenced list\n",
2714 POC);
2715 }
2716 ret_pic = get_pic_by_POC(hevc, POC);
2717 }
2718 return ret_pic;
2719 }
2720
log2i(unsigned int val)2721 static unsigned int log2i(unsigned int val)
2722 {
2723 unsigned int ret = -1;
2724
2725 while (val != 0) {
2726 val >>= 1;
2727 ret++;
2728 }
2729 return ret;
2730 }
2731
2732 static int init_buf_spec(struct hevc_state_s *hevc);
2733
v4l_is_there_vframe_bound(struct hevc_state_s * hevc)2734 static bool v4l_is_there_vframe_bound(struct hevc_state_s *hevc)
2735 {
2736 int i;
2737
2738 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2739 struct PIC_s *pic = hevc->m_PIC[i];
2740
2741 if (pic && pic->vframe_bound)
2742 return true;
2743 }
2744
2745 return false;
2746 }
2747
v4l_mmu_buffer_release(struct hevc_state_s * hevc)2748 static void v4l_mmu_buffer_release(struct hevc_state_s *hevc)
2749 {
2750 int i;
2751
2752 /* release workspace */
2753 if (hevc->bmmu_box)
2754 decoder_bmmu_box_free_idx(hevc->bmmu_box,
2755 BMMU_WORKSPACE_ID);
2756 /*
2757 * it's only when vframe get back to driver, right now we can be sure
2758 * that vframe and fd are related. if the playback exits, the capture
2759 * requires the upper app to release when the fd is closed, and others
2760 * buffers drivers are released by driver.
2761 */
2762 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2763 struct PIC_s *pic = hevc->m_PIC[i];
2764
2765 if (pic && !pic->vframe_bound) {
2766 if (hevc->bmmu_box)
2767 decoder_bmmu_box_free_idx(hevc->bmmu_box,
2768 VF_BUFFER_IDX(i));
2769 if (hevc->mmu_box)
2770 decoder_mmu_box_free_idx(hevc->mmu_box, i);
2771
2772 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,
2773 "%s free buffer[%d], bmmu_box: %p, mmu_box: %p\n",
2774 __func__, i, hevc->bmmu_box, hevc->mmu_box);
2775 }
2776 }
2777 }
2778
uninit_mmu_buffers(struct hevc_state_s * hevc)2779 static void uninit_mmu_buffers(struct hevc_state_s *hevc)
2780 {
2781 if (hevc->is_used_v4l &&
2782 v4l_is_there_vframe_bound(hevc)) {
2783 if (get_double_write_mode(hevc) != 0x10) {
2784 v4l_mmu_buffer_release(hevc);
2785 return;
2786 }
2787 }
2788
2789 if (hevc->mmu_box)
2790 decoder_mmu_box_free(hevc->mmu_box);
2791 hevc->mmu_box = NULL;
2792
2793 if (hevc->bmmu_box)
2794 decoder_bmmu_box_free(hevc->bmmu_box);
2795 hevc->bmmu_box = NULL;
2796 }
init_mmu_buffers(struct hevc_state_s * hevc)2797 static int init_mmu_buffers(struct hevc_state_s *hevc)
2798 {
2799 int tvp_flag = vdec_secure(hw_to_vdec(hevc)) ?
2800 CODEC_MM_FLAGS_TVP : 0;
2801 int buf_size = 64;
2802
2803 if ((hevc->max_pic_w * hevc->max_pic_h) > 0 &&
2804 (hevc->max_pic_w * hevc->max_pic_h) <= 1920*1088) {
2805 buf_size = 24;
2806 }
2807
2808 if (get_dbg_flag(hevc)) {
2809 hevc_print(hevc, 0, "%s max_w %d max_h %d\n",
2810 __func__, hevc->max_pic_w, hevc->max_pic_h);
2811 }
2812
2813 hevc->need_cache_size = buf_size * SZ_1M;
2814 hevc->sc_start_time = get_jiffies_64();
2815 if (hevc->mmu_enable
2816 && ((get_double_write_mode(hevc) & 0x10) == 0)) {
2817 hevc->mmu_box = decoder_mmu_box_alloc_box(DRIVER_NAME,
2818 hevc->index,
2819 MAX_REF_PIC_NUM,
2820 buf_size * SZ_1M,
2821 tvp_flag
2822 );
2823 if (!hevc->mmu_box) {
2824 pr_err("h265 alloc mmu box failed!!\n");
2825 return -1;
2826 }
2827 }
2828
2829 hevc->bmmu_box = decoder_bmmu_box_alloc_box(DRIVER_NAME,
2830 hevc->index,
2831 BMMU_MAX_BUFFERS,
2832 4 + PAGE_SHIFT,
2833 CODEC_MM_FLAGS_CMA_CLEAR |
2834 CODEC_MM_FLAGS_FOR_VDECODER |
2835 tvp_flag);
2836 if (!hevc->bmmu_box) {
2837 if (hevc->mmu_box)
2838 decoder_mmu_box_free(hevc->mmu_box);
2839 hevc->mmu_box = NULL;
2840 pr_err("h265 alloc mmu box failed!!\n");
2841 return -1;
2842 }
2843 return 0;
2844 }
2845
2846 struct buf_stru_s
2847 {
2848 int lcu_total;
2849 int mc_buffer_size_h;
2850 int mc_buffer_size_u_v_h;
2851 };
2852
2853 #ifndef MV_USE_FIXED_BUF
dealloc_mv_bufs(struct hevc_state_s * hevc)2854 static void dealloc_mv_bufs(struct hevc_state_s *hevc)
2855 {
2856 int i;
2857 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2858 if (hevc->m_mv_BUF[i].start_adr) {
2859 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
2860 hevc_print(hevc, 0,
2861 "dealloc mv buf(%d) adr 0x%p size 0x%x used_flag %d\n",
2862 i, hevc->m_mv_BUF[i].start_adr,
2863 hevc->m_mv_BUF[i].size,
2864 hevc->m_mv_BUF[i].used_flag);
2865 decoder_bmmu_box_free_idx(
2866 hevc->bmmu_box,
2867 MV_BUFFER_IDX(i));
2868 hevc->m_mv_BUF[i].start_adr = 0;
2869 hevc->m_mv_BUF[i].size = 0;
2870 hevc->m_mv_BUF[i].used_flag = 0;
2871 }
2872 }
2873 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2874 if (hevc->m_PIC[i] != NULL)
2875 hevc->m_PIC[i]->mv_buf_index = -1;
2876 }
2877 }
2878
alloc_mv_buf(struct hevc_state_s * hevc,int i)2879 static int alloc_mv_buf(struct hevc_state_s *hevc, int i)
2880 {
2881 int ret = 0;
2882 /*get_cma_alloc_ref();*/ /*DEBUG_TMP*/
2883 if (decoder_bmmu_box_alloc_buf_phy
2884 (hevc->bmmu_box,
2885 MV_BUFFER_IDX(i), hevc->mv_buf_size,
2886 DRIVER_NAME,
2887 &hevc->m_mv_BUF[i].start_adr) < 0) {
2888 hevc->m_mv_BUF[i].start_adr = 0;
2889 ret = -1;
2890 } else {
2891 hevc->m_mv_BUF[i].size = hevc->mv_buf_size;
2892 hevc->m_mv_BUF[i].used_flag = 0;
2893 ret = 0;
2894 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
2895 hevc_print(hevc, 0,
2896 "MV Buffer %d: start_adr %p size %x\n",
2897 i,
2898 (void *)hevc->m_mv_BUF[i].start_adr,
2899 hevc->m_mv_BUF[i].size);
2900 }
2901 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_mv_BUF[i].start_adr)) {
2902 void *mem_start_virt;
2903 mem_start_virt =
2904 codec_mm_phys_to_virt(hevc->m_mv_BUF[i].start_adr);
2905 if (mem_start_virt) {
2906 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2907 codec_mm_dma_flush(mem_start_virt,
2908 hevc->m_mv_BUF[i].size, DMA_TO_DEVICE);
2909 } else {
2910 mem_start_virt = codec_mm_vmap(
2911 hevc->m_mv_BUF[i].start_adr,
2912 hevc->m_mv_BUF[i].size);
2913 if (mem_start_virt) {
2914 memset(mem_start_virt, 0, hevc->m_mv_BUF[i].size);
2915 codec_mm_dma_flush(mem_start_virt,
2916 hevc->m_mv_BUF[i].size,
2917 DMA_TO_DEVICE);
2918 codec_mm_unmap_phyaddr(mem_start_virt);
2919 } else {
2920 /*not virt for tvp playing,
2921 may need clear on ucode.*/
2922 pr_err("ref %s mem_start_virt failed\n", __func__);
2923 }
2924 }
2925 }
2926 }
2927 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
2928 return ret;
2929 }
2930 #endif
2931
get_mv_buf(struct hevc_state_s * hevc,struct PIC_s * pic)2932 static int get_mv_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
2933 {
2934 #ifdef MV_USE_FIXED_BUF
2935 if (pic && pic->index >= 0) {
2936 if (IS_8K_SIZE(pic->width, pic->height)) {
2937 pic->mpred_mv_wr_start_addr =
2938 hevc->work_space_buf->mpred_mv.buf_start
2939 + (pic->index * MPRED_8K_MV_BUF_SIZE);
2940 } else {
2941 pic->mpred_mv_wr_start_addr =
2942 hevc->work_space_buf->mpred_mv.buf_start
2943 + (pic->index * MPRED_4K_MV_BUF_SIZE);
2944 }
2945 }
2946 return 0;
2947 #else
2948 int i;
2949 int ret = -1;
2950 int new_size;
2951 if (IS_8K_SIZE(pic->width, pic->height))
2952 new_size = MPRED_8K_MV_BUF_SIZE + 0x10000;
2953 else if (IS_4K_SIZE(pic->width, pic->height))
2954 new_size = MPRED_4K_MV_BUF_SIZE + 0x10000; /*0x120000*/
2955 else
2956 new_size = MPRED_MV_BUF_SIZE + 0x10000;
2957 if (new_size != hevc->mv_buf_size) {
2958 dealloc_mv_bufs(hevc);
2959 hevc->mv_buf_size = new_size;
2960 }
2961 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2962 if (hevc->m_mv_BUF[i].start_adr &&
2963 hevc->m_mv_BUF[i].used_flag == 0) {
2964 hevc->m_mv_BUF[i].used_flag = 1;
2965 ret = i;
2966 break;
2967 }
2968 }
2969 if (ret < 0) {
2970 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
2971 if (hevc->m_mv_BUF[i].start_adr == 0) {
2972 if (alloc_mv_buf(hevc, i) >= 0) {
2973 hevc->m_mv_BUF[i].used_flag = 1;
2974 ret = i;
2975 }
2976 break;
2977 }
2978 }
2979 }
2980
2981 if (ret >= 0) {
2982 pic->mv_buf_index = ret;
2983 pic->mpred_mv_wr_start_addr =
2984 (hevc->m_mv_BUF[ret].start_adr + 0xffff) &
2985 (~0xffff);
2986 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
2987 "%s => %d (0x%x) size 0x%x\n",
2988 __func__, ret,
2989 pic->mpred_mv_wr_start_addr,
2990 hevc->m_mv_BUF[ret].size);
2991
2992 } else {
2993 hevc_print(hevc, 0,
2994 "%s: Error, mv buf is not enough\n",
2995 __func__);
2996 }
2997 return ret;
2998
2999 #endif
3000 }
3001
put_mv_buf(struct hevc_state_s * hevc,struct PIC_s * pic)3002 static void put_mv_buf(struct hevc_state_s *hevc,
3003 struct PIC_s *pic)
3004 {
3005 #ifndef MV_USE_FIXED_BUF
3006 int i = pic->mv_buf_index;
3007 if (i < 0 || i >= MAX_REF_PIC_NUM) {
3008 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
3009 "%s: index %d beyond range\n",
3010 __func__, i);
3011 return;
3012 }
3013 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
3014 "%s(%d): used_flag(%d)\n",
3015 __func__, i,
3016 hevc->m_mv_BUF[i].used_flag);
3017
3018 if (hevc->m_mv_BUF[i].start_adr &&
3019 hevc->m_mv_BUF[i].used_flag)
3020 hevc->m_mv_BUF[i].used_flag = 0;
3021 pic->mv_buf_index = -1;
3022 #endif
3023 }
3024
cal_current_buf_size(struct hevc_state_s * hevc,struct buf_stru_s * buf_stru)3025 static int cal_current_buf_size(struct hevc_state_s *hevc,
3026 struct buf_stru_s *buf_stru)
3027 {
3028
3029 int buf_size;
3030 int pic_width = hevc->pic_w;
3031 int pic_height = hevc->pic_h;
3032 int lcu_size = hevc->lcu_size;
3033 int pic_width_lcu = (pic_width % lcu_size) ? pic_width / lcu_size +
3034 1 : pic_width / lcu_size;
3035 int pic_height_lcu = (pic_height % lcu_size) ? pic_height / lcu_size +
3036 1 : pic_height / lcu_size;
3037 /*SUPPORT_10BIT*/
3038 int losless_comp_header_size = compute_losless_comp_header_size
3039 (pic_width, pic_height);
3040 /*always alloc buf for 10bit*/
3041 int losless_comp_body_size = compute_losless_comp_body_size
3042 (hevc, pic_width, pic_height, 0);
3043 int mc_buffer_size = losless_comp_header_size
3044 + losless_comp_body_size;
3045 int mc_buffer_size_h = (mc_buffer_size + 0xffff) >> 16;
3046 int mc_buffer_size_u_v_h = 0;
3047
3048 int dw_mode = get_double_write_mode(hevc);
3049
3050 if (hevc->mmu_enable) {
3051 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3052 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3053 buf_size = ((MMU_COMPRESS_8K_HEADER_SIZE + 0xffff) >> 16)
3054 << 16;
3055 else
3056 buf_size = ((MMU_COMPRESS_HEADER_SIZE + 0xffff) >> 16)
3057 << 16;
3058 } else
3059 buf_size = 0;
3060
3061 if (dw_mode) {
3062 int pic_width_dw = pic_width /
3063 get_double_write_ratio(hevc, dw_mode);
3064 int pic_height_dw = pic_height /
3065 get_double_write_ratio(hevc, dw_mode);
3066
3067 int pic_width_lcu_dw = (pic_width_dw % lcu_size) ?
3068 pic_width_dw / lcu_size + 1 :
3069 pic_width_dw / lcu_size;
3070 int pic_height_lcu_dw = (pic_height_dw % lcu_size) ?
3071 pic_height_dw / lcu_size + 1 :
3072 pic_height_dw / lcu_size;
3073 int lcu_total_dw = pic_width_lcu_dw * pic_height_lcu_dw;
3074
3075 int mc_buffer_size_u_v = lcu_total_dw * lcu_size * lcu_size / 2;
3076 mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
3077 /*64k alignment*/
3078 buf_size += ((mc_buffer_size_u_v_h << 16) * 3);
3079 }
3080
3081 if ((!hevc->mmu_enable) &&
3082 ((dw_mode & 0x10) == 0)) {
3083 /* use compress mode without mmu,
3084 need buf for compress decoding*/
3085 buf_size += (mc_buffer_size_h << 16);
3086 }
3087
3088 /*in case start adr is not 64k alignment*/
3089 if (buf_size > 0)
3090 buf_size += 0x10000;
3091
3092 if (buf_stru) {
3093 buf_stru->lcu_total = pic_width_lcu * pic_height_lcu;
3094 buf_stru->mc_buffer_size_h = mc_buffer_size_h;
3095 buf_stru->mc_buffer_size_u_v_h = mc_buffer_size_u_v_h;
3096 }
3097
3098 hevc_print(hevc, PRINT_FLAG_V4L_DETAIL,"pic width: %d, pic height: %d, headr: %d, body: %d, size h: %d, size uvh: %d, buf size: %x\n",
3099 pic_width, pic_height, losless_comp_header_size,
3100 losless_comp_body_size, mc_buffer_size_h,
3101 mc_buffer_size_u_v_h, buf_size);
3102
3103 return buf_size;
3104 }
3105
v4l_alloc_buf(struct hevc_state_s * hevc,struct PIC_s * pic)3106 static int v4l_alloc_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
3107 {
3108 int ret = -1;
3109 int i = pic->index;
3110 struct vdec_v4l2_buffer *fb = NULL;
3111
3112 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
3113 return ret;
3114
3115 ret = vdec_v4l_get_buffer(hevc->v4l2_ctx, &fb);
3116 if (ret < 0) {
3117 hevc_print(hevc, 0, "[%d] H265 get buffer fail.\n",
3118 ((struct aml_vcodec_ctx *)(hevc->v4l2_ctx))->id);
3119 return ret;
3120 }
3121
3122 if (hevc->mmu_enable) {
3123 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3124 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3125 hevc->m_BUF[i].header_size =
3126 ALIGN(MMU_COMPRESS_8K_HEADER_SIZE, 0x10000);
3127 else
3128 hevc->m_BUF[i].header_size =
3129 ALIGN(MMU_COMPRESS_HEADER_SIZE, 0x10000);
3130
3131 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
3132 VF_BUFFER_IDX(i), hevc->m_BUF[i].header_size,
3133 DRIVER_NAME, &hevc->m_BUF[i].header_addr);
3134 if (ret < 0) {
3135 hevc_print(hevc, PRINT_FLAG_ERROR,
3136 "%s[%d], header size: %d, no mem fatal err\n",
3137 __func__, i, hevc->m_BUF[i].header_size);
3138 return ret;
3139 }
3140 }
3141
3142 hevc->m_BUF[i].used_flag = 0;
3143 hevc->m_BUF[i].v4l_ref_buf_addr = (ulong)fb;
3144 pic->cma_alloc_addr = hevc->m_BUF[i].v4l_ref_buf_addr;
3145 if (fb->num_planes == 1) {
3146 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3147 hevc->m_BUF[i].luma_size = fb->m.mem[0].offset;
3148 hevc->m_BUF[i].size = fb->m.mem[0].size;
3149 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3150 pic->dw_y_adr = hevc->m_BUF[i].start_adr;
3151 pic->dw_u_v_adr = pic->dw_y_adr + hevc->m_BUF[i].luma_size;
3152 } else if (fb->num_planes == 2) {
3153 hevc->m_BUF[i].start_adr = fb->m.mem[0].addr;
3154 hevc->m_BUF[i].luma_size = fb->m.mem[0].size;
3155 hevc->m_BUF[i].chroma_addr = fb->m.mem[1].addr;
3156 hevc->m_BUF[i].chroma_size = fb->m.mem[1].size;
3157 hevc->m_BUF[i].size = fb->m.mem[0].size + fb->m.mem[1].size;
3158 fb->m.mem[0].bytes_used = fb->m.mem[0].size;
3159 fb->m.mem[1].bytes_used = fb->m.mem[1].size;
3160 pic->dw_y_adr = hevc->m_BUF[i].start_adr;
3161 pic->dw_u_v_adr = hevc->m_BUF[i].chroma_addr;
3162 }
3163
3164 return ret;
3165 }
3166
alloc_buf(struct hevc_state_s * hevc)3167 static int alloc_buf(struct hevc_state_s *hevc)
3168 {
3169 int i;
3170 int ret = -1;
3171 int buf_size = cal_current_buf_size(hevc, NULL);
3172
3173 if (hevc->fatal_error & DECODER_FATAL_ERROR_NO_MEM)
3174 return ret;
3175
3176 for (i = 0; i < BUF_POOL_SIZE; i++) {
3177 if (hevc->m_BUF[i].start_adr == 0)
3178 break;
3179 }
3180 if (i < BUF_POOL_SIZE) {
3181 if (buf_size > 0) {
3182 ret = decoder_bmmu_box_alloc_buf_phy
3183 (hevc->bmmu_box,
3184 VF_BUFFER_IDX(i), buf_size,
3185 DRIVER_NAME,
3186 &hevc->m_BUF[i].start_adr);
3187 if (ret < 0) {
3188 hevc->m_BUF[i].start_adr = 0;
3189 if (i <= 8) {
3190 hevc->fatal_error |=
3191 DECODER_FATAL_ERROR_NO_MEM;
3192 hevc_print(hevc, PRINT_FLAG_ERROR,
3193 "%s[%d], size: %d, no mem fatal err\n",
3194 __func__, i, buf_size);
3195 }
3196 }
3197
3198 if (ret >= 0) {
3199 hevc->m_BUF[i].size = buf_size;
3200 hevc->m_BUF[i].used_flag = 0;
3201 ret = 0;
3202
3203 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3204 hevc_print(hevc, 0,
3205 "Buffer %d: start_adr %p size %x\n",
3206 i,
3207 (void *)hevc->m_BUF[i].start_adr,
3208 hevc->m_BUF[i].size);
3209 }
3210 /*flush the buffer make sure no cache dirty*/
3211 if (!vdec_secure(hw_to_vdec(hevc)) && (hevc->m_BUF[i].start_adr)) {
3212 void *mem_start_virt;
3213 mem_start_virt =
3214 codec_mm_phys_to_virt(hevc->m_BUF[i].start_adr);
3215 if (mem_start_virt) {
3216 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3217 codec_mm_dma_flush(mem_start_virt,
3218 hevc->m_BUF[i].size, DMA_TO_DEVICE);
3219 } else {
3220 mem_start_virt = codec_mm_vmap(
3221 hevc->m_BUF[i].start_adr,
3222 hevc->m_BUF[i].size);
3223 if (mem_start_virt) {
3224 memset(mem_start_virt, 0, hevc->m_BUF[i].size);
3225 codec_mm_dma_flush(mem_start_virt,
3226 hevc->m_BUF[i].size,
3227 DMA_TO_DEVICE);
3228 codec_mm_unmap_phyaddr(mem_start_virt);
3229 } else {
3230 /*not virt for tvp playing,
3231 may need clear on ucode.*/
3232 pr_err("ref %s mem_start_virt failed\n", __func__);
3233 }
3234 }
3235 }
3236 }
3237 /*put_cma_alloc_ref();*/ /*DEBUG_TMP*/
3238 } else
3239 ret = 0;
3240 }
3241
3242 if (ret >= 0) {
3243 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3244 hevc_print(hevc, 0,
3245 "alloc buf(%d) for %d/%d size 0x%x) => %p\n",
3246 i, hevc->pic_w, hevc->pic_h,
3247 buf_size,
3248 hevc->m_BUF[i].start_adr);
3249 }
3250 } else {
3251 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3252 hevc_print(hevc, 0,
3253 "alloc buf(%d) for %d/%d size 0x%x) => Fail!!!\n",
3254 i, hevc->pic_w, hevc->pic_h,
3255 buf_size);
3256 }
3257 }
3258 return ret;
3259 }
3260
set_buf_unused(struct hevc_state_s * hevc,int i)3261 static void set_buf_unused(struct hevc_state_s *hevc, int i)
3262 {
3263 if (i >= 0 && i < BUF_POOL_SIZE)
3264 hevc->m_BUF[i].used_flag = 0;
3265 }
3266
dealloc_unused_buf(struct hevc_state_s * hevc)3267 static void dealloc_unused_buf(struct hevc_state_s *hevc)
3268 {
3269 int i;
3270 for (i = 0; i < BUF_POOL_SIZE; i++) {
3271 if (hevc->m_BUF[i].start_adr &&
3272 hevc->m_BUF[i].used_flag == 0) {
3273 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3274 hevc_print(hevc, 0,
3275 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3276 i, hevc->m_BUF[i].start_adr,
3277 hevc->m_BUF[i].size);
3278 }
3279 if (!hevc->is_used_v4l)
3280 decoder_bmmu_box_free_idx(
3281 hevc->bmmu_box,
3282 VF_BUFFER_IDX(i));
3283 hevc->m_BUF[i].start_adr = 0;
3284 hevc->m_BUF[i].size = 0;
3285 }
3286 }
3287 }
3288
dealloc_pic_buf(struct hevc_state_s * hevc,struct PIC_s * pic)3289 static void dealloc_pic_buf(struct hevc_state_s *hevc,
3290 struct PIC_s *pic)
3291 {
3292 int i = pic->BUF_index;
3293 pic->BUF_index = -1;
3294 if (i >= 0 &&
3295 i < BUF_POOL_SIZE &&
3296 hevc->m_BUF[i].start_adr) {
3297 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3298 hevc_print(hevc, 0,
3299 "dealloc buf(%d) adr 0x%p size 0x%x\n",
3300 i, hevc->m_BUF[i].start_adr,
3301 hevc->m_BUF[i].size);
3302 }
3303
3304 if (!hevc->is_used_v4l)
3305 decoder_bmmu_box_free_idx(
3306 hevc->bmmu_box,
3307 VF_BUFFER_IDX(i));
3308 hevc->m_BUF[i].used_flag = 0;
3309 hevc->m_BUF[i].start_adr = 0;
3310 hevc->m_BUF[i].size = 0;
3311 }
3312 }
3313
get_work_pic_num(struct hevc_state_s * hevc)3314 static int get_work_pic_num(struct hevc_state_s *hevc)
3315 {
3316 int used_buf_num = 0;
3317 int sps_pic_buf_diff = 0;
3318
3319 if (get_dynamic_buf_num_margin(hevc) > 0) {
3320 if ((!hevc->sps_num_reorder_pics_0) &&
3321 (hevc->param.p.sps_max_dec_pic_buffering_minus1_0)) {
3322 /* the range of sps_num_reorder_pics_0 is in
3323 [0, sps_max_dec_pic_buffering_minus1_0] */
3324 used_buf_num = get_dynamic_buf_num_margin(hevc) +
3325 hevc->param.p.sps_max_dec_pic_buffering_minus1_0;
3326 } else
3327 used_buf_num = hevc->sps_num_reorder_pics_0
3328 + get_dynamic_buf_num_margin(hevc);
3329
3330 sps_pic_buf_diff = hevc->param.p.sps_max_dec_pic_buffering_minus1_0
3331 - hevc->sps_num_reorder_pics_0;
3332 #ifdef MULTI_INSTANCE_SUPPORT
3333 /*
3334 need one more for multi instance, as
3335 apply_ref_pic_set() has no chanch to run to
3336 to clear referenced flag in some case
3337 */
3338 if (hevc->m_ins_flag)
3339 used_buf_num++;
3340 #endif
3341 } else
3342 used_buf_num = max_buf_num;
3343
3344 if (hevc->save_buffer_mode)
3345 hevc_print(hevc, 0,
3346 "save buf _mode : dynamic_buf_num_margin %d ----> %d \n",
3347 dynamic_buf_num_margin, hevc->dynamic_buf_num_margin);
3348
3349 if (sps_pic_buf_diff >= 4)
3350 used_buf_num += sps_pic_buf_diff;
3351
3352 if (hevc->is_used_v4l) {
3353 /* for eos add more buffer to flush.*/
3354 used_buf_num++;
3355 }
3356
3357 if (used_buf_num > MAX_BUF_NUM)
3358 used_buf_num = MAX_BUF_NUM;
3359 return used_buf_num;
3360 }
3361
v4l_parser_work_pic_num(struct hevc_state_s * hevc)3362 static int v4l_parser_work_pic_num(struct hevc_state_s *hevc)
3363 {
3364 int used_buf_num = 0;
3365 int sps_pic_buf_diff = 0;
3366 pr_debug("margin = %d, sps_max_dec_pic_buffering_minus1_0 = %d, sps_num_reorder_pics_0 = %d\n",
3367 get_dynamic_buf_num_margin(hevc),
3368 hevc->param.p.sps_max_dec_pic_buffering_minus1_0,
3369 hevc->param.p.sps_num_reorder_pics_0);
3370 if (get_dynamic_buf_num_margin(hevc) > 0) {
3371 if ((!hevc->param.p.sps_num_reorder_pics_0) &&
3372 (hevc->param.p.sps_max_dec_pic_buffering_minus1_0)) {
3373 /* the range of sps_num_reorder_pics_0 is in
3374 [0, sps_max_dec_pic_buffering_minus1_0] */
3375 used_buf_num = get_dynamic_buf_num_margin(hevc) +
3376 hevc->param.p.sps_max_dec_pic_buffering_minus1_0;
3377 } else
3378 used_buf_num = hevc->param.p.sps_num_reorder_pics_0
3379 + get_dynamic_buf_num_margin(hevc);
3380
3381 sps_pic_buf_diff = hevc->param.p.sps_max_dec_pic_buffering_minus1_0
3382 - hevc->param.p.sps_num_reorder_pics_0;
3383 #ifdef MULTI_INSTANCE_SUPPORT
3384 /*
3385 need one more for multi instance, as
3386 apply_ref_pic_set() has no chanch to run to
3387 to clear referenced flag in some case
3388 */
3389 if (hevc->m_ins_flag)
3390 used_buf_num++;
3391 #endif
3392 } else
3393 used_buf_num = max_buf_num;
3394
3395 if (hevc->save_buffer_mode)
3396 hevc_print(hevc, 0,
3397 "save buf _mode : dynamic_buf_num_margin %d ----> %d \n",
3398 dynamic_buf_num_margin, hevc->dynamic_buf_num_margin);
3399
3400 if (sps_pic_buf_diff >= 4)
3401 {
3402 used_buf_num += 1;
3403 }
3404
3405 /* for eos add more buffer to flush.*/
3406 used_buf_num++;
3407
3408 if (used_buf_num > MAX_BUF_NUM)
3409 used_buf_num = MAX_BUF_NUM;
3410 return used_buf_num;
3411 }
3412
3413
get_alloc_pic_count(struct hevc_state_s * hevc)3414 static int get_alloc_pic_count(struct hevc_state_s *hevc)
3415 {
3416 int alloc_pic_count = 0;
3417 int i;
3418 struct PIC_s *pic;
3419 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3420 pic = hevc->m_PIC[i];
3421 if (pic && pic->index >= 0)
3422 alloc_pic_count++;
3423 }
3424 return alloc_pic_count;
3425 }
3426
v4l_config_pic(struct hevc_state_s * hevc,struct PIC_s * pic)3427 static int v4l_config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3428 {
3429 int i = pic->index;
3430 int dw_mode = get_double_write_mode(hevc);
3431
3432 if (hevc->mmu_enable)
3433 pic->header_adr = hevc->m_BUF[i].header_addr;
3434
3435 pic->BUF_index = i;
3436 pic->POC = INVALID_POC;
3437 pic->mc_canvas_y = pic->index;
3438 pic->mc_canvas_u_v = pic->index;
3439
3440 if (dw_mode & 0x10) {
3441 pic->mc_canvas_y = (pic->index << 1);
3442 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3443 pic->mc_y_adr = pic->dw_y_adr;
3444 pic->mc_u_v_adr = pic->dw_u_v_adr;
3445 }
3446
3447 return 0;
3448 }
3449
config_pic(struct hevc_state_s * hevc,struct PIC_s * pic)3450 static int config_pic(struct hevc_state_s *hevc, struct PIC_s *pic)
3451 {
3452 int ret = -1;
3453 int i;
3454 /*int lcu_size_log2 = hevc->lcu_size_log2;
3455 int MV_MEM_UNIT=lcu_size_log2==
3456 6 ? 0x100 : lcu_size_log2==5 ? 0x40 : 0x10;*/
3457 /*int MV_MEM_UNIT = lcu_size_log2 == 6 ? 0x200 : lcu_size_log2 ==
3458 5 ? 0x80 : 0x20;
3459 int mpred_mv_end = hevc->work_space_buf->mpred_mv.buf_start +
3460 hevc->work_space_buf->mpred_mv.buf_size;*/
3461 unsigned int y_adr = 0;
3462 struct buf_stru_s buf_stru;
3463 int buf_size = cal_current_buf_size(hevc, &buf_stru);
3464 int dw_mode = get_double_write_mode(hevc);
3465
3466 for (i = 0; i < BUF_POOL_SIZE; i++) {
3467 if (hevc->m_BUF[i].start_adr != 0 &&
3468 hevc->m_BUF[i].used_flag == 0 &&
3469 buf_size <= hevc->m_BUF[i].size) {
3470 hevc->m_BUF[i].used_flag = 1;
3471 break;
3472 }
3473 }
3474
3475 if (i >= BUF_POOL_SIZE)
3476 return -1;
3477
3478 if (hevc->mmu_enable) {
3479 pic->header_adr = hevc->m_BUF[i].start_adr;
3480 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) &&
3481 (IS_8K_SIZE(hevc->pic_w, hevc->pic_h)))
3482 y_adr = hevc->m_BUF[i].start_adr +
3483 MMU_COMPRESS_8K_HEADER_SIZE;
3484 else
3485 y_adr = hevc->m_BUF[i].start_adr +
3486 MMU_COMPRESS_HEADER_SIZE;
3487 } else
3488 y_adr = hevc->m_BUF[i].start_adr;
3489
3490 y_adr = ((y_adr + 0xffff) >> 16) << 16; /*64k alignment*/
3491
3492 pic->POC = INVALID_POC;
3493 /*ensure get_pic_by_POC()
3494 not get the buffer not decoded*/
3495 pic->BUF_index = i;
3496
3497 if ((!hevc->mmu_enable) &&
3498 ((dw_mode & 0x10) == 0)
3499 ) {
3500 pic->mc_y_adr = y_adr;
3501 y_adr += (buf_stru.mc_buffer_size_h << 16);
3502 }
3503 pic->mc_canvas_y = pic->index;
3504 pic->mc_canvas_u_v = pic->index;
3505 if (dw_mode & 0x10) {
3506 pic->mc_y_adr = y_adr;
3507 pic->mc_u_v_adr = y_adr +
3508 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3509 pic->mc_canvas_y = (pic->index << 1);
3510 pic->mc_canvas_u_v = (pic->index << 1) + 1;
3511
3512 pic->dw_y_adr = pic->mc_y_adr;
3513 pic->dw_u_v_adr = pic->mc_u_v_adr;
3514 } else if (dw_mode) {
3515 pic->dw_y_adr = y_adr;
3516 pic->dw_u_v_adr = pic->dw_y_adr +
3517 ((buf_stru.mc_buffer_size_u_v_h << 16) << 1);
3518 }
3519
3520 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3521 hevc_print(hevc, 0,
3522 "%s index %d BUF_index %d mc_y_adr %x\n",
3523 __func__, pic->index,
3524 pic->BUF_index, pic->mc_y_adr);
3525 if (hevc->mmu_enable &&
3526 dw_mode)
3527 hevc_print(hevc, 0,
3528 "mmu double write adr %ld\n",
3529 pic->cma_alloc_addr);
3530 }
3531 ret = 0;
3532
3533 return ret;
3534 }
3535
init_pic_list(struct hevc_state_s * hevc)3536 static void init_pic_list(struct hevc_state_s *hevc)
3537 {
3538 int i;
3539 int init_buf_num = get_work_pic_num(hevc);
3540 int dw_mode = get_double_write_mode(hevc);
3541 struct vdec_s *vdec = hw_to_vdec(hevc);
3542 /*alloc decoder buf will be delay if work on v4l. */
3543 if (!hevc->is_used_v4l) {
3544 for (i = 0; i < init_buf_num; i++) {
3545 if (alloc_buf(hevc) < 0) {
3546 if (i <= 8) {
3547 /*if alloced (i+1)>=9
3548 don't send errors.*/
3549 hevc->fatal_error |=
3550 DECODER_FATAL_ERROR_NO_MEM;
3551 }
3552 break;
3553 }
3554 }
3555 }
3556
3557 for (i = 0; i < init_buf_num; i++) {
3558 struct PIC_s *pic = hevc->m_PIC[i];
3559
3560 if (!pic) {
3561 pic = vmalloc(sizeof(struct PIC_s));
3562 if (pic == NULL) {
3563 hevc_print(hevc, 0,
3564 "%s: alloc pic %d fail!!!\n",
3565 __func__, i);
3566 break;
3567 }
3568 hevc->m_PIC[i] = pic;
3569 }
3570 memset(pic, 0, sizeof(struct PIC_s));
3571
3572 pic->index = i;
3573 pic->BUF_index = -1;
3574 pic->mv_buf_index = -1;
3575 if (vdec->parallel_dec == 1) {
3576 pic->y_canvas_index = -1;
3577 pic->uv_canvas_index = -1;
3578 }
3579
3580 pic->width = hevc->pic_w;
3581 pic->height = hevc->pic_h;
3582 pic->double_write_mode = dw_mode;
3583
3584 /*config canvas will be delay if work on v4l. */
3585 if (!hevc->is_used_v4l) {
3586 if (config_pic(hevc, pic) < 0) {
3587 if (get_dbg_flag(hevc))
3588 hevc_print(hevc, 0,
3589 "Config_pic %d fail\n", pic->index);
3590 pic->index = -1;
3591 i++;
3592 break;
3593 }
3594
3595 if (pic->double_write_mode)
3596 set_canvas(hevc, pic);
3597 }
3598 }
3599 }
3600
uninit_pic_list(struct hevc_state_s * hevc)3601 static void uninit_pic_list(struct hevc_state_s *hevc)
3602 {
3603 struct vdec_s *vdec = hw_to_vdec(hevc);
3604 int i;
3605 #ifndef MV_USE_FIXED_BUF
3606 dealloc_mv_bufs(hevc);
3607 #endif
3608 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3609 struct PIC_s *pic = hevc->m_PIC[i];
3610
3611 if (pic) {
3612 if (vdec->parallel_dec == 1) {
3613 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
3614 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
3615 }
3616 release_aux_data(hevc, pic);
3617 vfree(pic);
3618 hevc->m_PIC[i] = NULL;
3619 }
3620 }
3621 }
3622
3623 #ifdef LOSLESS_COMPRESS_MODE
init_decode_head_hw(struct hevc_state_s * hevc)3624 static void init_decode_head_hw(struct hevc_state_s *hevc)
3625 {
3626
3627 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
3628 unsigned int data32;
3629
3630 int losless_comp_header_size =
3631 compute_losless_comp_header_size(hevc->pic_w,
3632 hevc->pic_h);
3633 int losless_comp_body_size = compute_losless_comp_body_size(hevc,
3634 hevc->pic_w, hevc->pic_h, hevc->mem_saving_mode);
3635
3636 hevc->losless_comp_body_size = losless_comp_body_size;
3637
3638
3639 if (hevc->mmu_enable) {
3640 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1, (0x1 << 4));
3641 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, 0x0);
3642 } else {
3643 if (hevc->mem_saving_mode == 1)
3644 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3645 (1 << 3) | ((workaround_enable & 2) ? 1 : 0));
3646 else
3647 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
3648 ((workaround_enable & 2) ? 1 : 0));
3649 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2, (losless_comp_body_size >> 5));
3650 /*
3651 *WRITE_VREG(HEVCD_MPP_DECOMP_CTL3,(0xff<<20) | (0xff<<10) | 0xff);
3652 * //8-bit mode
3653 */
3654 }
3655 WRITE_VREG(HEVC_CM_BODY_LENGTH, losless_comp_body_size);
3656 WRITE_VREG(HEVC_CM_HEADER_OFFSET, losless_comp_body_size);
3657 WRITE_VREG(HEVC_CM_HEADER_LENGTH, losless_comp_header_size);
3658
3659 if (hevc->mmu_enable) {
3660 WRITE_VREG(HEVC_SAO_MMU_VH0_ADDR, buf_spec->mmu_vbh.buf_start);
3661 WRITE_VREG(HEVC_SAO_MMU_VH1_ADDR,
3662 buf_spec->mmu_vbh.buf_start +
3663 buf_spec->mmu_vbh.buf_size/2);
3664 data32 = READ_VREG(HEVC_SAO_CTRL9);
3665 data32 |= 0x1;
3666 WRITE_VREG(HEVC_SAO_CTRL9, data32);
3667
3668 /* use HEVC_CM_HEADER_START_ADDR */
3669 data32 = READ_VREG(HEVC_SAO_CTRL5);
3670 data32 |= (1<<10);
3671 WRITE_VREG(HEVC_SAO_CTRL5, data32);
3672 }
3673
3674 if (!hevc->m_ins_flag)
3675 hevc_print(hevc, 0,
3676 "%s: (%d, %d) body_size 0x%x header_size 0x%x\n",
3677 __func__, hevc->pic_w, hevc->pic_h,
3678 losless_comp_body_size, losless_comp_header_size);
3679
3680 }
3681 #endif
3682 #define HEVCD_MPP_ANC2AXI_TBL_DATA 0x3464
3683
init_pic_list_hw(struct hevc_state_s * hevc)3684 static void init_pic_list_hw(struct hevc_state_s *hevc)
3685 {
3686 int i;
3687 int cur_pic_num = MAX_REF_PIC_NUM;
3688 int dw_mode = get_double_write_mode(hevc);
3689 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL)
3690 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR,
3691 (0x1 << 1) | (0x1 << 2));
3692 else
3693 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x0);
3694
3695 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3696 if (hevc->m_PIC[i] == NULL ||
3697 hevc->m_PIC[i]->index == -1) {
3698 cur_pic_num = i;
3699 break;
3700 }
3701 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3702 if (hevc->mmu_enable && ((dw_mode & 0x10) == 0))
3703 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3704 hevc->m_PIC[i]->header_adr>>5);
3705 else
3706 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3707 hevc->m_PIC[i]->mc_y_adr >> 5);
3708 } else
3709 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3710 hevc->m_PIC[i]->mc_y_adr |
3711 (hevc->m_PIC[i]->mc_canvas_y << 8) | 0x1);
3712 if (dw_mode & 0x10) {
3713 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL) {
3714 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA,
3715 hevc->m_PIC[i]->mc_u_v_adr >> 5);
3716 }
3717 else
3718 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
3719 hevc->m_PIC[i]->mc_u_v_adr |
3720 (hevc->m_PIC[i]->mc_canvas_u_v << 8)
3721 | 0x1);
3722 }
3723 }
3724 if (cur_pic_num == 0)
3725 return;
3726
3727 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x1);
3728
3729 /* Zero out canvas registers in IPP -- avoid simulation X */
3730 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3731 (0 << 8) | (0 << 1) | 1);
3732 for (i = 0; i < 32; i++)
3733 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR, 0);
3734
3735 #ifdef LOSLESS_COMPRESS_MODE
3736 if ((dw_mode & 0x10) == 0)
3737 init_decode_head_hw(hevc);
3738 #endif
3739
3740 }
3741
3742
dump_pic_list(struct hevc_state_s * hevc)3743 static void dump_pic_list(struct hevc_state_s *hevc)
3744 {
3745 int i;
3746 struct PIC_s *pic;
3747
3748 hevc_print(hevc, 0,
3749 "pic_list_init_flag is %d\r\n", hevc->pic_list_init_flag);
3750 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3751 pic = hevc->m_PIC[i];
3752 if (pic == NULL || pic->index == -1)
3753 continue;
3754 hevc_print_cont(hevc, 0,
3755 "index %d buf_idx %d mv_idx %d decode_idx:%d, POC:%d, referenced:%d, ",
3756 pic->index, pic->BUF_index,
3757 #ifndef MV_USE_FIXED_BUF
3758 pic->mv_buf_index,
3759 #else
3760 -1,
3761 #endif
3762 pic->decode_idx, pic->POC, pic->referenced);
3763 hevc_print_cont(hevc, 0,
3764 "num_reorder_pic:%d, output_mark:%d, error_mark:%d w/h %d,%d",
3765 pic->num_reorder_pic, pic->output_mark, pic->error_mark,
3766 pic->width, pic->height);
3767 hevc_print_cont(hevc, 0,
3768 "output_ready:%d, mv_wr_start %x vf_ref %d\n",
3769 pic->output_ready, pic->mpred_mv_wr_start_addr,
3770 pic->vf_ref);
3771 }
3772 }
3773
clear_referenced_flag(struct hevc_state_s * hevc)3774 static void clear_referenced_flag(struct hevc_state_s *hevc)
3775 {
3776 int i;
3777 struct PIC_s *pic;
3778 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3779 pic = hevc->m_PIC[i];
3780 if (pic == NULL || pic->index == -1)
3781 continue;
3782 if (pic->referenced) {
3783 pic->referenced = 0;
3784 put_mv_buf(hevc, pic);
3785 }
3786 }
3787 }
3788
clear_poc_flag(struct hevc_state_s * hevc)3789 static void clear_poc_flag(struct hevc_state_s *hevc)
3790 {
3791 int i;
3792 struct PIC_s *pic;
3793 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3794 pic = hevc->m_PIC[i];
3795 if (pic == NULL || pic->index == -1)
3796 continue;
3797 pic->POC = INVALID_POC;
3798 }
3799 }
3800
output_pic(struct hevc_state_s * hevc,unsigned char flush_flag)3801 static struct PIC_s *output_pic(struct hevc_state_s *hevc,
3802 unsigned char flush_flag)
3803 {
3804 int num_pic_not_yet_display = 0;
3805 int i, fisrt_pic_flag = 0;
3806 struct PIC_s *pic;
3807 struct PIC_s *pic_display = NULL;
3808 struct vdec_s *vdec = hw_to_vdec(hevc);
3809
3810 if (hevc->i_only & 0x4) {
3811 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3812 pic = hevc->m_PIC[i];
3813 if (pic == NULL ||
3814 (pic->index == -1) ||
3815 (pic->BUF_index == -1) ||
3816 (pic->POC == INVALID_POC))
3817 continue;
3818 if (pic->output_mark) {
3819 if (pic_display) {
3820 if (pic->decode_idx <
3821 pic_display->decode_idx)
3822 pic_display = pic;
3823
3824 } else
3825 pic_display = pic;
3826
3827 }
3828 }
3829 if (pic_display) {
3830 pic_display->output_mark = 0;
3831 pic_display->recon_mark = 0;
3832 pic_display->output_ready = 1;
3833 pic_display->referenced = 0;
3834 put_mv_buf(hevc, pic_display);
3835 }
3836 } else {
3837 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3838 pic = hevc->m_PIC[i];
3839 if (pic == NULL ||
3840 (pic->index == -1) ||
3841 (pic->BUF_index == -1) ||
3842 (pic->POC == INVALID_POC))
3843 continue;
3844 if (pic->output_mark)
3845 num_pic_not_yet_display++;
3846 if (pic->slice_type == 2 &&
3847 hevc->vf_pre_count == 0 &&
3848 fast_output_enable & 0x1) {
3849 /*fast output for first I picture*/
3850 pic->num_reorder_pic = 0;
3851 if (vdec->master || vdec->slave)
3852 pic_display = pic;
3853 fisrt_pic_flag = 1;
3854 hevc_print(hevc, 0, "VH265: output first frame\n");
3855 }
3856 }
3857
3858 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
3859 pic = hevc->m_PIC[i];
3860 if (pic == NULL ||
3861 (pic->index == -1) ||
3862 (pic->BUF_index == -1) ||
3863 (pic->POC == INVALID_POC))
3864 continue;
3865 if (pic->output_mark) {
3866 if (pic_display) {
3867 if (pic->POC < pic_display->POC)
3868 pic_display = pic;
3869 else if ((pic->POC == pic_display->POC)
3870 && (pic->decode_idx <
3871 pic_display->
3872 decode_idx))
3873 pic_display
3874 = pic;
3875
3876 } else
3877 pic_display = pic;
3878
3879 }
3880 }
3881 /* dv wait cur_pic all data get,
3882 some data may get after picture output */
3883 if ((vdec->master || vdec->slave)
3884 && (pic_display == hevc->cur_pic) && (!flush_flag)
3885 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
3886 && (hevc->bypass_dvenl && !dolby_meta_with_el)
3887 #endif
3888 && (!fisrt_pic_flag))
3889 pic_display = NULL;
3890
3891 if (pic_display) {
3892 if ((num_pic_not_yet_display >
3893 pic_display->num_reorder_pic)
3894 || flush_flag) {
3895 pic_display->output_mark = 0;
3896 pic_display->recon_mark = 0;
3897 pic_display->output_ready = 1;
3898 } else if (num_pic_not_yet_display >=
3899 (MAX_REF_PIC_NUM - 1)) {
3900 pic_display->output_mark = 0;
3901 pic_display->recon_mark = 0;
3902 pic_display->output_ready = 1;
3903 hevc_print(hevc, 0,
3904 "Warning, num_reorder_pic %d is byeond buf num\n",
3905 pic_display->num_reorder_pic);
3906 } else
3907 pic_display = NULL;
3908 }
3909 }
3910
3911 if (pic_display && hevc->sps_num_reorder_pics_0 &&
3912 (hevc->vf_pre_count == 1) && (hevc->first_pic_flag == 1)) {
3913 pic_display = NULL;
3914 hevc->first_pic_flag = 2;
3915 }
3916 return pic_display;
3917 }
3918
config_mc_buffer(struct hevc_state_s * hevc,struct PIC_s * cur_pic)3919 static int config_mc_buffer(struct hevc_state_s *hevc, struct PIC_s *cur_pic)
3920 {
3921 int i;
3922 struct PIC_s *pic;
3923
3924 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3925 hevc_print(hevc, 0,
3926 "config_mc_buffer entered .....\n");
3927 if (cur_pic->slice_type != 2) { /* P and B pic */
3928 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3929 (0 << 8) | (0 << 1) | 1);
3930 for (i = 0; i < cur_pic->RefNum_L0; i++) {
3931 pic =
3932 get_ref_pic_by_POC(hevc,
3933 cur_pic->
3934 m_aiRefPOCList0[cur_pic->
3935 slice_idx][i]);
3936 if (pic) {
3937 if ((pic->width != hevc->pic_w) ||
3938 (pic->height != hevc->pic_h)) {
3939 hevc_print(hevc, 0,
3940 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3941 __func__, pic->POC,
3942 pic->width, pic->height);
3943 cur_pic->error_mark = 1;
3944 }
3945 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3946 cur_pic->error_mark = 1;
3947 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
3948 (pic->mc_canvas_u_v << 16)
3949 | (pic->mc_canvas_u_v
3950 << 8) |
3951 pic->mc_canvas_y);
3952 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
3953 hevc_print_cont(hevc, 0,
3954 "refid %x mc_canvas_u_v %x",
3955 i, pic->mc_canvas_u_v);
3956 hevc_print_cont(hevc, 0,
3957 " mc_canvas_y %x\n",
3958 pic->mc_canvas_y);
3959 }
3960 } else
3961 cur_pic->error_mark = 1;
3962
3963 if (pic == NULL || pic->error_mark) {
3964 hevc_print(hevc, 0,
3965 "Error %s, %dth poc (%d) %s",
3966 __func__, i,
3967 cur_pic->m_aiRefPOCList0[cur_pic->
3968 slice_idx][i],
3969 pic ? "has error" :
3970 "not in list0");
3971 }
3972 }
3973 }
3974 if (cur_pic->slice_type == 0) { /* B pic */
3975 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
3976 hevc_print(hevc, 0,
3977 "config_mc_buffer RefNum_L1\n");
3978 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
3979 (16 << 8) | (0 << 1) | 1);
3980
3981 for (i = 0; i < cur_pic->RefNum_L1; i++) {
3982 pic =
3983 get_ref_pic_by_POC(hevc,
3984 cur_pic->
3985 m_aiRefPOCList1[cur_pic->
3986 slice_idx][i]);
3987 if (pic) {
3988 if ((pic->width != hevc->pic_w) ||
3989 (pic->height != hevc->pic_h)) {
3990 hevc_print(hevc, 0,
3991 "%s: Wrong reference pic (poc %d) width/height %d/%d\n",
3992 __func__, pic->POC,
3993 pic->width, pic->height);
3994 cur_pic->error_mark = 1;
3995 }
3996
3997 if (pic->error_mark && (ref_frame_mark_flag[hevc->index]))
3998 cur_pic->error_mark = 1;
3999 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR,
4000 (pic->mc_canvas_u_v << 16)
4001 | (pic->mc_canvas_u_v
4002 << 8) |
4003 pic->mc_canvas_y);
4004 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4005 hevc_print_cont(hevc, 0,
4006 "refid %x mc_canvas_u_v %x",
4007 i, pic->mc_canvas_u_v);
4008 hevc_print_cont(hevc, 0,
4009 " mc_canvas_y %x\n",
4010 pic->mc_canvas_y);
4011 }
4012 } else
4013 cur_pic->error_mark = 1;
4014
4015 if (pic == NULL || pic->error_mark) {
4016 hevc_print(hevc, 0,
4017 "Error %s, %dth poc (%d) %s",
4018 __func__, i,
4019 cur_pic->m_aiRefPOCList1[cur_pic->
4020 slice_idx][i],
4021 pic ? "has error" :
4022 "not in list1");
4023 }
4024 }
4025 }
4026 return 0;
4027 }
4028
apply_ref_pic_set(struct hevc_state_s * hevc,int cur_poc,union param_u * params)4029 static void apply_ref_pic_set(struct hevc_state_s *hevc, int cur_poc,
4030 union param_u *params)
4031 {
4032 int ii, i;
4033 int poc_tmp;
4034 struct PIC_s *pic;
4035 unsigned char is_referenced;
4036 /* hevc_print(hevc, 0,
4037 "%s cur_poc %d\n", __func__, cur_poc); */
4038 if (pic_list_debug & 0x2) {
4039 pr_err("cur poc %d\n", cur_poc);
4040 }
4041 for (ii = 0; ii < MAX_REF_PIC_NUM; ii++) {
4042 pic = hevc->m_PIC[ii];
4043 if (pic == NULL ||
4044 pic->index == -1 ||
4045 pic->BUF_index == -1
4046 )
4047 continue;
4048
4049 if ((pic->referenced == 0 || pic->POC == cur_poc))
4050 continue;
4051 is_referenced = 0;
4052 for (i = 0; i < 16; i++) {
4053 int delt;
4054
4055 if (params->p.CUR_RPS[i] & 0x8000)
4056 break;
4057 delt =
4058 params->p.CUR_RPS[i] &
4059 ((1 << (RPS_USED_BIT - 1)) - 1);
4060 if (params->p.CUR_RPS[i] & (1 << (RPS_USED_BIT - 1))) {
4061 poc_tmp =
4062 cur_poc - ((1 << (RPS_USED_BIT - 1)) -
4063 delt);
4064 } else
4065 poc_tmp = cur_poc + delt;
4066 if (poc_tmp == pic->POC) {
4067 is_referenced = 1;
4068 /* hevc_print(hevc, 0, "i is %d\n", i); */
4069 break;
4070 }
4071 }
4072 if (is_referenced == 0) {
4073 pic->referenced = 0;
4074 put_mv_buf(hevc, pic);
4075 /* hevc_print(hevc, 0,
4076 "set poc %d reference to 0\n", pic->POC); */
4077 if (pic_list_debug & 0x2) {
4078 pr_err("set poc %d reference to 0\n", pic->POC);
4079 }
4080 }
4081 }
4082
4083 }
4084
set_ref_pic_list(struct hevc_state_s * hevc,union param_u * params)4085 static void set_ref_pic_list(struct hevc_state_s *hevc, union param_u *params)
4086 {
4087 struct PIC_s *pic = hevc->cur_pic;
4088 int i, rIdx;
4089 int num_neg = 0;
4090 int num_pos = 0;
4091 int total_num;
4092 int num_ref_idx_l0_active =
4093 (params->p.num_ref_idx_l0_active >
4094 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
4095 params->p.num_ref_idx_l0_active;
4096 int num_ref_idx_l1_active =
4097 (params->p.num_ref_idx_l1_active >
4098 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE :
4099 params->p.num_ref_idx_l1_active;
4100
4101 int RefPicSetStCurr0[16];
4102 int RefPicSetStCurr1[16];
4103
4104 for (i = 0; i < 16; i++) {
4105 RefPicSetStCurr0[i] = 0;
4106 RefPicSetStCurr1[i] = 0;
4107 pic->m_aiRefPOCList0[pic->slice_idx][i] = 0;
4108 pic->m_aiRefPOCList1[pic->slice_idx][i] = 0;
4109 }
4110 for (i = 0; i < 16; i++) {
4111 if (params->p.CUR_RPS[i] & 0x8000)
4112 break;
4113 if ((params->p.CUR_RPS[i] >> RPS_USED_BIT) & 1) {
4114 int delt =
4115 params->p.CUR_RPS[i] &
4116 ((1 << (RPS_USED_BIT - 1)) - 1);
4117
4118 if ((params->p.CUR_RPS[i] >> (RPS_USED_BIT - 1)) & 1) {
4119 RefPicSetStCurr0[num_neg] =
4120 pic->POC - ((1 << (RPS_USED_BIT - 1)) -
4121 delt);
4122 /* hevc_print(hevc, 0,
4123 * "RefPicSetStCurr0 %x %x %x\n",
4124 * RefPicSetStCurr0[num_neg], pic->POC,
4125 * (0x800-(params[i]&0x7ff)));
4126 */
4127 num_neg++;
4128 } else {
4129 RefPicSetStCurr1[num_pos] = pic->POC + delt;
4130 /* hevc_print(hevc, 0,
4131 * "RefPicSetStCurr1 %d\n",
4132 * RefPicSetStCurr1[num_pos]);
4133 */
4134 num_pos++;
4135 }
4136 }
4137 }
4138 total_num = num_neg + num_pos;
4139 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4140 hevc_print(hevc, 0,
4141 "%s: curpoc %d slice_type %d, total %d ",
4142 __func__, pic->POC, params->p.slice_type, total_num);
4143 hevc_print_cont(hevc, 0,
4144 "num_neg %d num_list0 %d num_list1 %d\n",
4145 num_neg, num_ref_idx_l0_active, num_ref_idx_l1_active);
4146 }
4147
4148 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4149 hevc_print(hevc, 0,
4150 "HEVC Stream buf start ");
4151 hevc_print_cont(hevc, 0,
4152 "%x end %x wr %x rd %x lev %x ctl %x intctl %x\n",
4153 READ_VREG(HEVC_STREAM_START_ADDR),
4154 READ_VREG(HEVC_STREAM_END_ADDR),
4155 READ_VREG(HEVC_STREAM_WR_PTR),
4156 READ_VREG(HEVC_STREAM_RD_PTR),
4157 READ_VREG(HEVC_STREAM_LEVEL),
4158 READ_VREG(HEVC_STREAM_FIFO_CTL),
4159 READ_VREG(HEVC_PARSER_INT_CONTROL));
4160 }
4161
4162 if (total_num > 0) {
4163 if (params->p.modification_flag & 0x1) {
4164 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4165 hevc_print(hevc, 0, "ref0 POC (modification):");
4166 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
4167 int cIdx = params->p.modification_list[rIdx];
4168
4169 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
4170 cIdx >=
4171 num_neg ? RefPicSetStCurr1[cIdx -
4172 num_neg] :
4173 RefPicSetStCurr0[cIdx];
4174 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4175 hevc_print_cont(hevc, 0, "%d ",
4176 pic->m_aiRefPOCList0[pic->
4177 slice_idx]
4178 [rIdx]);
4179 }
4180 }
4181 } else {
4182 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4183 hevc_print(hevc, 0, "ref0 POC:");
4184 for (rIdx = 0; rIdx < num_ref_idx_l0_active; rIdx++) {
4185 int cIdx = rIdx % total_num;
4186
4187 pic->m_aiRefPOCList0[pic->slice_idx][rIdx] =
4188 cIdx >=
4189 num_neg ? RefPicSetStCurr1[cIdx -
4190 num_neg] :
4191 RefPicSetStCurr0[cIdx];
4192 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4193 hevc_print_cont(hevc, 0, "%d ",
4194 pic->m_aiRefPOCList0[pic->
4195 slice_idx]
4196 [rIdx]);
4197 }
4198 }
4199 }
4200 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4201 hevc_print_cont(hevc, 0, "\n");
4202 if (params->p.slice_type == B_SLICE) {
4203 if (params->p.modification_flag & 0x2) {
4204 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4205 hevc_print(hevc, 0,
4206 "ref1 POC (modification):");
4207 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4208 rIdx++) {
4209 int cIdx;
4210
4211 if (params->p.modification_flag & 0x1) {
4212 cIdx =
4213 params->p.
4214 modification_list
4215 [num_ref_idx_l0_active +
4216 rIdx];
4217 } else {
4218 cIdx =
4219 params->p.
4220 modification_list[rIdx];
4221 }
4222 pic->m_aiRefPOCList1[pic->
4223 slice_idx][rIdx] =
4224 cIdx >=
4225 num_pos ?
4226 RefPicSetStCurr0[cIdx - num_pos]
4227 : RefPicSetStCurr1[cIdx];
4228 if (get_dbg_flag(hevc) &
4229 H265_DEBUG_BUFMGR) {
4230 hevc_print_cont(hevc, 0, "%d ",
4231 pic->
4232 m_aiRefPOCList1[pic->
4233 slice_idx]
4234 [rIdx]);
4235 }
4236 }
4237 } else {
4238 if (get_dbg_flag(hevc) &
4239 H265_DEBUG_BUFMGR)
4240 hevc_print(hevc, 0, "ref1 POC:");
4241 for (rIdx = 0; rIdx < num_ref_idx_l1_active;
4242 rIdx++) {
4243 int cIdx = rIdx % total_num;
4244
4245 pic->m_aiRefPOCList1[pic->
4246 slice_idx][rIdx] =
4247 cIdx >=
4248 num_pos ?
4249 RefPicSetStCurr0[cIdx -
4250 num_pos]
4251 : RefPicSetStCurr1[cIdx];
4252 if (get_dbg_flag(hevc) &
4253 H265_DEBUG_BUFMGR) {
4254 hevc_print_cont(hevc, 0, "%d ",
4255 pic->
4256 m_aiRefPOCList1[pic->
4257 slice_idx]
4258 [rIdx]);
4259 }
4260 }
4261 }
4262 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4263 hevc_print_cont(hevc, 0, "\n");
4264 }
4265 }
4266 /*set m_PIC */
4267 pic->slice_type = (params->p.slice_type == I_SLICE) ? 2 :
4268 (params->p.slice_type == P_SLICE) ? 1 :
4269 (params->p.slice_type == B_SLICE) ? 0 : 3;
4270 pic->RefNum_L0 = num_ref_idx_l0_active;
4271 pic->RefNum_L1 = num_ref_idx_l1_active;
4272 }
4273
update_tile_info(struct hevc_state_s * hevc,int pic_width_cu,int pic_height_cu,int sao_mem_unit,union param_u * params)4274 static void update_tile_info(struct hevc_state_s *hevc, int pic_width_cu,
4275 int pic_height_cu, int sao_mem_unit,
4276 union param_u *params)
4277 {
4278 int i, j;
4279 int start_cu_x, start_cu_y;
4280 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
4281 int sao_abv_size = sao_mem_unit * pic_width_cu;
4282 #ifdef DETREFILL_ENABLE
4283 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
4284 int tmpRefillLcuSize = 1 <<
4285 (params->p.log2_min_coding_block_size_minus3 +
4286 3 + params->p.log2_diff_max_min_coding_block_size);
4287 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4288 "%x, %x, %x, %x\n",
4289 params->p.slice_segment_address,
4290 params->p.bit_depth,
4291 params->p.tiles_enabled_flag,
4292 tmpRefillLcuSize);
4293 if (params->p.slice_segment_address == 0 &&
4294 params->p.bit_depth != 0 &&
4295 (params->p.tiles_enabled_flag & 1) &&
4296 tmpRefillLcuSize == 64)
4297 hevc->delrefill_check = 1;
4298 else
4299 hevc->delrefill_check = 0;
4300 }
4301 #endif
4302
4303 hevc->tile_enabled = params->p.tiles_enabled_flag & 1;
4304 if (params->p.tiles_enabled_flag & 1) {
4305 hevc->num_tile_col = params->p.num_tile_columns_minus1 + 1;
4306 hevc->num_tile_row = params->p.num_tile_rows_minus1 + 1;
4307
4308 if (hevc->num_tile_row > MAX_TILE_ROW_NUM
4309 || hevc->num_tile_row <= 0) {
4310 hevc->num_tile_row = 1;
4311 hevc_print(hevc, 0,
4312 "%s: num_tile_rows_minus1 (%d) error!!\n",
4313 __func__, params->p.num_tile_rows_minus1);
4314 }
4315 if (hevc->num_tile_col > MAX_TILE_COL_NUM
4316 || hevc->num_tile_col <= 0) {
4317 hevc->num_tile_col = 1;
4318 hevc_print(hevc, 0,
4319 "%s: num_tile_columns_minus1 (%d) error!!\n",
4320 __func__, params->p.num_tile_columns_minus1);
4321 }
4322 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
4323 hevc_print(hevc, 0,
4324 "%s pic_w_cu %d pic_h_cu %d tile_enabled ",
4325 __func__, pic_width_cu, pic_height_cu);
4326 hevc_print_cont(hevc, 0,
4327 "num_tile_col %d num_tile_row %d:\n",
4328 hevc->num_tile_col, hevc->num_tile_row);
4329 }
4330
4331 if (params->p.tiles_enabled_flag & 2) { /* uniform flag */
4332 int w = pic_width_cu / hevc->num_tile_col;
4333 int h = pic_height_cu / hevc->num_tile_row;
4334
4335 start_cu_y = 0;
4336 for (i = 0; i < hevc->num_tile_row; i++) {
4337 start_cu_x = 0;
4338 for (j = 0; j < hevc->num_tile_col; j++) {
4339 if (j == (hevc->num_tile_col - 1)) {
4340 hevc->m_tile[i][j].width =
4341 pic_width_cu -
4342 start_cu_x;
4343 } else
4344 hevc->m_tile[i][j].width = w;
4345 if (i == (hevc->num_tile_row - 1)) {
4346 hevc->m_tile[i][j].height =
4347 pic_height_cu -
4348 start_cu_y;
4349 } else
4350 hevc->m_tile[i][j].height = h;
4351 hevc->m_tile[i][j].start_cu_x
4352 = start_cu_x;
4353 hevc->m_tile[i][j].start_cu_y
4354 = start_cu_y;
4355 hevc->m_tile[i][j].sao_vb_start_addr =
4356 hevc->work_space_buf->sao_vb.
4357 buf_start + j * sao_vb_size;
4358 hevc->m_tile[i][j].sao_abv_start_addr =
4359 hevc->work_space_buf->sao_abv.
4360 buf_start + i * sao_abv_size;
4361 if (get_dbg_flag(hevc) &
4362 H265_DEBUG_BUFMGR) {
4363 hevc_print_cont(hevc, 0,
4364 "{y=%d, x=%d w %d h %d ",
4365 i, j, hevc->m_tile[i][j].width,
4366 hevc->m_tile[i][j].height);
4367 hevc_print_cont(hevc, 0,
4368 "start_x %d start_y %d ",
4369 hevc->m_tile[i][j].start_cu_x,
4370 hevc->m_tile[i][j].start_cu_y);
4371 hevc_print_cont(hevc, 0,
4372 "sao_vb_start 0x%x ",
4373 hevc->m_tile[i][j].
4374 sao_vb_start_addr);
4375 hevc_print_cont(hevc, 0,
4376 "sao_abv_start 0x%x}\n",
4377 hevc->m_tile[i][j].
4378 sao_abv_start_addr);
4379 }
4380 start_cu_x += hevc->m_tile[i][j].width;
4381
4382 }
4383 start_cu_y += hevc->m_tile[i][0].height;
4384 }
4385 } else {
4386 start_cu_y = 0;
4387 for (i = 0; i < hevc->num_tile_row; i++) {
4388 start_cu_x = 0;
4389 for (j = 0; j < hevc->num_tile_col; j++) {
4390 if (j == (hevc->num_tile_col - 1)) {
4391 hevc->m_tile[i][j].width =
4392 pic_width_cu -
4393 start_cu_x;
4394 } else {
4395 hevc->m_tile[i][j].width =
4396 params->p.tile_width[j];
4397 }
4398 if (i == (hevc->num_tile_row - 1)) {
4399 hevc->m_tile[i][j].height =
4400 pic_height_cu -
4401 start_cu_y;
4402 } else {
4403 hevc->m_tile[i][j].height =
4404 params->
4405 p.tile_height[i];
4406 }
4407 hevc->m_tile[i][j].start_cu_x
4408 = start_cu_x;
4409 hevc->m_tile[i][j].start_cu_y
4410 = start_cu_y;
4411 hevc->m_tile[i][j].sao_vb_start_addr =
4412 hevc->work_space_buf->sao_vb.
4413 buf_start + j * sao_vb_size;
4414 hevc->m_tile[i][j].sao_abv_start_addr =
4415 hevc->work_space_buf->sao_abv.
4416 buf_start + i * sao_abv_size;
4417 if (get_dbg_flag(hevc) &
4418 H265_DEBUG_BUFMGR) {
4419 hevc_print_cont(hevc, 0,
4420 "{y=%d, x=%d w %d h %d ",
4421 i, j, hevc->m_tile[i][j].width,
4422 hevc->m_tile[i][j].height);
4423 hevc_print_cont(hevc, 0,
4424 "start_x %d start_y %d ",
4425 hevc->m_tile[i][j].start_cu_x,
4426 hevc->m_tile[i][j].start_cu_y);
4427 hevc_print_cont(hevc, 0,
4428 "sao_vb_start 0x%x ",
4429 hevc->m_tile[i][j].
4430 sao_vb_start_addr);
4431 hevc_print_cont(hevc, 0,
4432 "sao_abv_start 0x%x}\n",
4433 hevc->m_tile[i][j].
4434 sao_abv_start_addr);
4435
4436 }
4437 start_cu_x += hevc->m_tile[i][j].width;
4438 }
4439 start_cu_y += hevc->m_tile[i][0].height;
4440 }
4441 }
4442 } else {
4443 hevc->num_tile_col = 1;
4444 hevc->num_tile_row = 1;
4445 hevc->m_tile[0][0].width = pic_width_cu;
4446 hevc->m_tile[0][0].height = pic_height_cu;
4447 hevc->m_tile[0][0].start_cu_x = 0;
4448 hevc->m_tile[0][0].start_cu_y = 0;
4449 hevc->m_tile[0][0].sao_vb_start_addr =
4450 hevc->work_space_buf->sao_vb.buf_start;
4451 hevc->m_tile[0][0].sao_abv_start_addr =
4452 hevc->work_space_buf->sao_abv.buf_start;
4453 }
4454 }
4455
get_tile_index(struct hevc_state_s * hevc,int cu_adr,int pic_width_lcu)4456 static int get_tile_index(struct hevc_state_s *hevc, int cu_adr,
4457 int pic_width_lcu)
4458 {
4459 int cu_x;
4460 int cu_y;
4461 int tile_x = 0;
4462 int tile_y = 0;
4463 int i;
4464
4465 if (pic_width_lcu == 0) {
4466 if (get_dbg_flag(hevc)) {
4467 hevc_print(hevc, 0,
4468 "%s Error, pic_width_lcu is 0, pic_w %d, pic_h %d\n",
4469 __func__, hevc->pic_w, hevc->pic_h);
4470 }
4471 return -1;
4472 }
4473 cu_x = cu_adr % pic_width_lcu;
4474 cu_y = cu_adr / pic_width_lcu;
4475 if (hevc->tile_enabled) {
4476 for (i = 0; i < hevc->num_tile_col; i++) {
4477 if (cu_x >= hevc->m_tile[0][i].start_cu_x)
4478 tile_x = i;
4479 else
4480 break;
4481 }
4482 for (i = 0; i < hevc->num_tile_row; i++) {
4483 if (cu_y >= hevc->m_tile[i][0].start_cu_y)
4484 tile_y = i;
4485 else
4486 break;
4487 }
4488 }
4489 return (tile_x) | (tile_y << 8);
4490 }
4491
print_scratch_error(int error_num)4492 static void print_scratch_error(int error_num)
4493 {
4494 #if 0
4495 if (get_dbg_flag(hevc)) {
4496 hevc_print(hevc, 0,
4497 " ERROR : HEVC_ASSIST_SCRATCH_TEST Error : %d\n",
4498 error_num);
4499 }
4500 #endif
4501 }
4502
hevc_config_work_space_hw(struct hevc_state_s * hevc)4503 static void hevc_config_work_space_hw(struct hevc_state_s *hevc)
4504 {
4505 struct BuffInfo_s *buf_spec = hevc->work_space_buf;
4506
4507 if (get_dbg_flag(hevc))
4508 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4509 "%s %x %x %x %x %x %x %x %x %x %x %x %x %x\n",
4510 __func__,
4511 buf_spec->ipp.buf_start,
4512 buf_spec->start_adr,
4513 buf_spec->short_term_rps.buf_start,
4514 buf_spec->vps.buf_start,
4515 buf_spec->sps.buf_start,
4516 buf_spec->pps.buf_start,
4517 buf_spec->sao_up.buf_start,
4518 buf_spec->swap_buf.buf_start,
4519 buf_spec->swap_buf2.buf_start,
4520 buf_spec->scalelut.buf_start,
4521 buf_spec->dblk_para.buf_start,
4522 buf_spec->dblk_data.buf_start,
4523 buf_spec->dblk_data2.buf_start);
4524 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE, buf_spec->ipp.buf_start);
4525 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0)
4526 WRITE_VREG(HEVC_RPM_BUFFER, (u32)hevc->rpm_phy_addr);
4527 WRITE_VREG(HEVC_SHORT_TERM_RPS, buf_spec->short_term_rps.buf_start);
4528 WRITE_VREG(HEVC_VPS_BUFFER, buf_spec->vps.buf_start);
4529 WRITE_VREG(HEVC_SPS_BUFFER, buf_spec->sps.buf_start);
4530 WRITE_VREG(HEVC_PPS_BUFFER, buf_spec->pps.buf_start);
4531 WRITE_VREG(HEVC_SAO_UP, buf_spec->sao_up.buf_start);
4532 if (hevc->mmu_enable) {
4533 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
4534 WRITE_VREG(HEVC_ASSIST_MMU_MAP_ADDR, hevc->frame_mmu_map_phy_addr);
4535 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4536 "write HEVC_ASSIST_MMU_MAP_ADDR\n");
4537 } else
4538 WRITE_VREG(H265_MMU_MAP_BUFFER, hevc->frame_mmu_map_phy_addr);
4539 } /*else
4540 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER,
4541 buf_spec->swap_buf.buf_start);
4542 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, buf_spec->swap_buf2.buf_start);*/
4543 WRITE_VREG(HEVC_SCALELUT, buf_spec->scalelut.buf_start);
4544 #ifdef HEVC_8K_LFTOFFSET_FIX
4545 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
4546 WRITE_VREG(HEVC_DBLK_CFG3, 0x808020); /*offset should x2 if 8k*/
4547 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
4548 "write HEVC_DBLK_CFG3\n");
4549 }
4550 #endif
4551 /* cfg_p_addr */
4552 WRITE_VREG(HEVC_DBLK_CFG4, buf_spec->dblk_para.buf_start);
4553 /* cfg_d_addr */
4554 WRITE_VREG(HEVC_DBLK_CFG5, buf_spec->dblk_data.buf_start);
4555
4556 WRITE_VREG(HEVC_DBLK_CFGE, buf_spec->dblk_data2.buf_start);
4557
4558 WRITE_VREG(LMEM_DUMP_ADR, (u32)hevc->lmem_phy_addr);
4559 }
4560
parser_cmd_write(void)4561 static void parser_cmd_write(void)
4562 {
4563 u32 i;
4564 const unsigned short parser_cmd[PARSER_CMD_NUMBER] = {
4565 0x0401, 0x8401, 0x0800, 0x0402, 0x9002, 0x1423,
4566 0x8CC3, 0x1423, 0x8804, 0x9825, 0x0800, 0x04FE,
4567 0x8406, 0x8411, 0x1800, 0x8408, 0x8409, 0x8C2A,
4568 0x9C2B, 0x1C00, 0x840F, 0x8407, 0x8000, 0x8408,
4569 0x2000, 0xA800, 0x8410, 0x04DE, 0x840C, 0x840D,
4570 0xAC00, 0xA000, 0x08C0, 0x08E0, 0xA40E, 0xFC00,
4571 0x7C00
4572 };
4573 for (i = 0; i < PARSER_CMD_NUMBER; i++)
4574 WRITE_VREG(HEVC_PARSER_CMD_WRITE, parser_cmd[i]);
4575 }
4576
hevc_init_decoder_hw(struct hevc_state_s * hevc,int decode_pic_begin,int decode_pic_num)4577 static void hevc_init_decoder_hw(struct hevc_state_s *hevc,
4578 int decode_pic_begin, int decode_pic_num)
4579 {
4580 unsigned int data32;
4581 int i;
4582 #if 0
4583 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A) {
4584 /* Set MCR fetch priorities*/
4585 data32 = 0x1 | (0x1 << 2) | (0x1 <<3) |
4586 (24 << 4) | (32 << 11) | (24 << 18) | (32 << 25);
4587 WRITE_VREG(HEVCD_MPP_DECOMP_AXIURG_CTL, data32);
4588 }
4589 #endif
4590 #if 1
4591 /* m8baby test1902 */
4592 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
4593 hevc_print(hevc, 0,
4594 "%s\n", __func__);
4595 data32 = READ_VREG(HEVC_PARSER_VERSION);
4596 if (data32 != 0x00010001) {
4597 print_scratch_error(25);
4598 return;
4599 }
4600 WRITE_VREG(HEVC_PARSER_VERSION, 0x5a5a55aa);
4601 data32 = READ_VREG(HEVC_PARSER_VERSION);
4602 if (data32 != 0x5a5a55aa) {
4603 print_scratch_error(26);
4604 return;
4605 }
4606 #if 0
4607 /* test Parser Reset */
4608 /* reset iqit to start mem init again */
4609 WRITE_VREG(DOS_SW_RESET3, (1 << 14) |
4610 (1 << 3) /* reset_whole parser */
4611 );
4612 WRITE_VREG(DOS_SW_RESET3, 0); /* clear reset_whole parser */
4613 data32 = READ_VREG(HEVC_PARSER_VERSION);
4614 if (data32 != 0x00010001)
4615 hevc_print(hevc, 0,
4616 "Test Parser Fatal Error\n");
4617 #endif
4618 /* reset iqit to start mem init again */
4619 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4620 );
4621 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4622 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4623
4624 #endif
4625 if (!hevc->m_ins_flag) {
4626 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4627 data32 = data32 | (1 << 0); /* stream_fetch_enable */
4628 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A)
4629 data32 |= (0xf << 25); /*arwlen_axi_max*/
4630 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4631 }
4632 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4633 if (data32 != 0x00000100) {
4634 print_scratch_error(29);
4635 return;
4636 }
4637 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4638 if (data32 != 0x00000300) {
4639 print_scratch_error(30);
4640 return;
4641 }
4642 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4643 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4644 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4645 if (data32 != 0x12345678) {
4646 print_scratch_error(31);
4647 return;
4648 }
4649 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4650 if (data32 != 0x9abcdef0) {
4651 print_scratch_error(32);
4652 return;
4653 }
4654 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4655 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4656
4657 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4658 data32 &= 0x03ffffff;
4659 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4660 | /* stream_buffer_empty_int_amrisc_enable */
4661 (1 << 22) | /* stream_fifo_empty_int_amrisc_enable*/
4662 (1 << 7) | /* dec_done_int_cpu_enable */
4663 (1 << 4) | /* startcode_found_int_cpu_enable */
4664 (0 << 3) | /* startcode_found_int_amrisc_enable */
4665 (1 << 0) /* parser_int_enable */
4666 ;
4667 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4668
4669 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4670 data32 = data32 | (1 << 1) | /* emulation_check_on */
4671 (1 << 0) /* startcode_check_on */
4672 ;
4673 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4674
4675 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4676 (2 << 4) | /* emulate_code_length_sub_1 */
4677 (2 << 1) | /* start_code_length_sub_1 */
4678 (1 << 0) /* stream_shift_enable */
4679 );
4680
4681 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4682 );
4683 /* hevc_parser_core_clk_en */
4684 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4685 );
4686
4687 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
4688
4689 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4690 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4691 for (i = 0; i < 1024; i++)
4692 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4693
4694 #ifdef ENABLE_SWAP_TEST
4695 WRITE_VREG(HEVC_STREAM_SWAP_TEST, 100);
4696 #endif
4697
4698 /*WRITE_VREG(HEVC_DECODE_PIC_BEGIN_REG, 0);*/
4699 /*WRITE_VREG(HEVC_DECODE_PIC_NUM_REG, 0xffffffff);*/
4700 WRITE_VREG(HEVC_DECODE_SIZE, 0);
4701 /*WRITE_VREG(HEVC_DECODE_COUNT, 0);*/
4702 /* Send parser_cmd */
4703 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4704
4705 parser_cmd_write();
4706
4707 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4708 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4709 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4710
4711 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4712 /* (1 << 8) | // sao_sw_pred_enable */
4713 (1 << 5) | /* parser_sao_if_en */
4714 (1 << 2) | /* parser_mpred_if_en */
4715 (1 << 0) /* parser_scaler_if_en */
4716 );
4717
4718 /* Changed to Start MPRED in microcode */
4719 /*
4720 * hevc_print(hevc, 0, "[test.c] Start MPRED\n");
4721 * WRITE_VREG(HEVC_MPRED_INT_STATUS,
4722 * (1<<31)
4723 * );
4724 */
4725
4726 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4727 (1 << 0) /* software reset ipp and mpp */
4728 );
4729 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4730 (0 << 0) /* software reset ipp and mpp */
4731 );
4732
4733 if (get_double_write_mode(hevc) & 0x10)
4734 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
4735 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
4736 );
4737
4738 }
4739
decoder_hw_reset(void)4740 static void decoder_hw_reset(void)
4741 {
4742 int i;
4743 unsigned int data32;
4744 /* reset iqit to start mem init again */
4745 WRITE_VREG(DOS_SW_RESET3, (1 << 14)
4746 );
4747 CLEAR_VREG_MASK(HEVC_CABAC_CONTROL, 1);
4748 CLEAR_VREG_MASK(HEVC_PARSER_CORE_CONTROL, 1);
4749
4750 data32 = READ_VREG(HEVC_STREAM_CONTROL);
4751 data32 = data32 | (1 << 0) /* stream_fetch_enable */
4752 ;
4753 WRITE_VREG(HEVC_STREAM_CONTROL, data32);
4754
4755 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4756 if (data32 != 0x00000100) {
4757 print_scratch_error(29);
4758 return;
4759 }
4760 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4761 if (data32 != 0x00000300) {
4762 print_scratch_error(30);
4763 return;
4764 }
4765 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x12345678);
4766 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x9abcdef0);
4767 data32 = READ_VREG(HEVC_SHIFT_STARTCODE);
4768 if (data32 != 0x12345678) {
4769 print_scratch_error(31);
4770 return;
4771 }
4772 data32 = READ_VREG(HEVC_SHIFT_EMULATECODE);
4773 if (data32 != 0x9abcdef0) {
4774 print_scratch_error(32);
4775 return;
4776 }
4777 WRITE_VREG(HEVC_SHIFT_STARTCODE, 0x00000100);
4778 WRITE_VREG(HEVC_SHIFT_EMULATECODE, 0x00000300);
4779
4780 data32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4781 data32 &= 0x03ffffff;
4782 data32 = data32 | (3 << 29) | (2 << 26) | (1 << 24)
4783 | /* stream_buffer_empty_int_amrisc_enable */
4784 (1 << 22) | /*stream_fifo_empty_int_amrisc_enable */
4785 (1 << 7) | /* dec_done_int_cpu_enable */
4786 (1 << 4) | /* startcode_found_int_cpu_enable */
4787 (0 << 3) | /* startcode_found_int_amrisc_enable */
4788 (1 << 0) /* parser_int_enable */
4789 ;
4790 WRITE_VREG(HEVC_PARSER_INT_CONTROL, data32);
4791
4792 data32 = READ_VREG(HEVC_SHIFT_STATUS);
4793 data32 = data32 | (1 << 1) | /* emulation_check_on */
4794 (1 << 0) /* startcode_check_on */
4795 ;
4796 WRITE_VREG(HEVC_SHIFT_STATUS, data32);
4797
4798 WRITE_VREG(HEVC_SHIFT_CONTROL, (3 << 6) |/* sft_valid_wr_position */
4799 (2 << 4) | /* emulate_code_length_sub_1 */
4800 (2 << 1) | /* start_code_length_sub_1 */
4801 (1 << 0) /* stream_shift_enable */
4802 );
4803
4804 WRITE_VREG(HEVC_CABAC_CONTROL, (1 << 0) /* cabac_enable */
4805 );
4806 /* hevc_parser_core_clk_en */
4807 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, (1 << 0)
4808 );
4809
4810 /* Initial IQIT_SCALELUT memory -- just to avoid X in simulation */
4811 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR, 0); /* cfg_p_addr */
4812 for (i = 0; i < 1024; i++)
4813 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA, 0);
4814
4815 /* Send parser_cmd */
4816 WRITE_VREG(HEVC_PARSER_CMD_WRITE, (1 << 16) | (0 << 0));
4817
4818 parser_cmd_write();
4819
4820 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0, PARSER_CMD_SKIP_CFG_0);
4821 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1, PARSER_CMD_SKIP_CFG_1);
4822 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2, PARSER_CMD_SKIP_CFG_2);
4823
4824 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4825 /* (1 << 8) | // sao_sw_pred_enable */
4826 (1 << 5) | /* parser_sao_if_en */
4827 (1 << 2) | /* parser_mpred_if_en */
4828 (1 << 0) /* parser_scaler_if_en */
4829 );
4830
4831 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (0 << 1) | /* enable ipp */
4832 (1 << 0) /* software reset ipp and mpp */
4833 );
4834 WRITE_VREG(HEVCD_IPP_TOP_CNTL, (1 << 1) | /* enable ipp */
4835 (0 << 0) /* software reset ipp and mpp */
4836 );
4837 }
4838
4839 #ifdef CONFIG_HEVC_CLK_FORCED_ON
config_hevc_clk_forced_on(void)4840 static void config_hevc_clk_forced_on(void)
4841 {
4842 unsigned int rdata32;
4843 /* IQIT */
4844 rdata32 = READ_VREG(HEVC_IQIT_CLK_RST_CTRL);
4845 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL, rdata32 | (0x1 << 2));
4846
4847 /* DBLK */
4848 rdata32 = READ_VREG(HEVC_DBLK_CFG0);
4849 WRITE_VREG(HEVC_DBLK_CFG0, rdata32 | (0x1 << 2));
4850
4851 /* SAO */
4852 rdata32 = READ_VREG(HEVC_SAO_CTRL1);
4853 WRITE_VREG(HEVC_SAO_CTRL1, rdata32 | (0x1 << 2));
4854
4855 /* MPRED */
4856 rdata32 = READ_VREG(HEVC_MPRED_CTRL1);
4857 WRITE_VREG(HEVC_MPRED_CTRL1, rdata32 | (0x1 << 24));
4858
4859 /* PARSER */
4860 rdata32 = READ_VREG(HEVC_STREAM_CONTROL);
4861 WRITE_VREG(HEVC_STREAM_CONTROL, rdata32 | (0x1 << 15));
4862 rdata32 = READ_VREG(HEVC_SHIFT_CONTROL);
4863 WRITE_VREG(HEVC_SHIFT_CONTROL, rdata32 | (0x1 << 15));
4864 rdata32 = READ_VREG(HEVC_CABAC_CONTROL);
4865 WRITE_VREG(HEVC_CABAC_CONTROL, rdata32 | (0x1 << 13));
4866 rdata32 = READ_VREG(HEVC_PARSER_CORE_CONTROL);
4867 WRITE_VREG(HEVC_PARSER_CORE_CONTROL, rdata32 | (0x1 << 15));
4868 rdata32 = READ_VREG(HEVC_PARSER_INT_CONTROL);
4869 WRITE_VREG(HEVC_PARSER_INT_CONTROL, rdata32 | (0x1 << 15));
4870 rdata32 = READ_VREG(HEVC_PARSER_IF_CONTROL);
4871 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
4872 rdata32 | (0x3 << 5) | (0x3 << 2) | (0x3 << 0));
4873
4874 /* IPP */
4875 rdata32 = READ_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG);
4876 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, rdata32 | 0xffffffff);
4877
4878 /* MCRCC */
4879 rdata32 = READ_VREG(HEVCD_MCRCC_CTL1);
4880 WRITE_VREG(HEVCD_MCRCC_CTL1, rdata32 | (0x1 << 3));
4881 }
4882 #endif
4883
4884 #ifdef MCRCC_ENABLE
config_mcrcc_axi_hw(struct hevc_state_s * hevc,int slice_type)4885 static void config_mcrcc_axi_hw(struct hevc_state_s *hevc, int slice_type)
4886 {
4887 unsigned int rdata32;
4888 unsigned int rdata32_2;
4889 int l0_cnt = 0;
4890 int l1_cnt = 0x7fff;
4891
4892 if (get_double_write_mode(hevc) & 0x10) {
4893 l0_cnt = hevc->cur_pic->RefNum_L0;
4894 l1_cnt = hevc->cur_pic->RefNum_L1;
4895 }
4896
4897 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x2); /* reset mcrcc */
4898
4899 if (slice_type == 2) { /* I-PIC */
4900 /* remove reset -- disables clock */
4901 WRITE_VREG(HEVCD_MCRCC_CTL1, 0x0);
4902 return;
4903 }
4904
4905 if (slice_type == 0) { /* B-PIC */
4906 /* Programme canvas0 */
4907 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4908 (0 << 8) | (0 << 1) | 0);
4909 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4910 rdata32 = rdata32 & 0xffff;
4911 rdata32 = rdata32 | (rdata32 << 16);
4912 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4913
4914 /* Programme canvas1 */
4915 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4916 (16 << 8) | (1 << 1) | 0);
4917 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4918 rdata32_2 = rdata32_2 & 0xffff;
4919 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4920 if (rdata32 == rdata32_2 && l1_cnt > 1) {
4921 rdata32_2 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4922 rdata32_2 = rdata32_2 & 0xffff;
4923 rdata32_2 = rdata32_2 | (rdata32_2 << 16);
4924 }
4925 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32_2);
4926 } else { /* P-PIC */
4927 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR,
4928 (0 << 8) | (1 << 1) | 0);
4929 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4930 rdata32 = rdata32 & 0xffff;
4931 rdata32 = rdata32 | (rdata32 << 16);
4932 WRITE_VREG(HEVCD_MCRCC_CTL2, rdata32);
4933
4934 if (l0_cnt == 1) {
4935 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4936 } else {
4937 /* Programme canvas1 */
4938 rdata32 = READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR);
4939 rdata32 = rdata32 & 0xffff;
4940 rdata32 = rdata32 | (rdata32 << 16);
4941 WRITE_VREG(HEVCD_MCRCC_CTL3, rdata32);
4942 }
4943 }
4944 /* enable mcrcc progressive-mode */
4945 WRITE_VREG(HEVCD_MCRCC_CTL1, 0xff0);
4946 }
4947 #endif
4948
config_title_hw(struct hevc_state_s * hevc,int sao_vb_size,int sao_mem_unit)4949 static void config_title_hw(struct hevc_state_s *hevc, int sao_vb_size,
4950 int sao_mem_unit)
4951 {
4952 WRITE_VREG(HEVC_sao_mem_unit, sao_mem_unit);
4953 WRITE_VREG(HEVC_SAO_ABV, hevc->work_space_buf->sao_abv.buf_start);
4954 WRITE_VREG(HEVC_sao_vb_size, sao_vb_size);
4955 WRITE_VREG(HEVC_SAO_VB, hevc->work_space_buf->sao_vb.buf_start);
4956 }
4957
4958 static u32 init_aux_size;
aux_data_is_avaible(struct hevc_state_s * hevc)4959 static int aux_data_is_avaible(struct hevc_state_s *hevc)
4960 {
4961 u32 reg_val;
4962
4963 reg_val = READ_VREG(HEVC_AUX_DATA_SIZE);
4964 if (reg_val != 0 && reg_val != init_aux_size)
4965 return 1;
4966 else
4967 return 0;
4968 }
4969
config_aux_buf(struct hevc_state_s * hevc)4970 static void config_aux_buf(struct hevc_state_s *hevc)
4971 {
4972 WRITE_VREG(HEVC_AUX_ADR, hevc->aux_phy_addr);
4973 init_aux_size = ((hevc->prefix_aux_size >> 4) << 16) |
4974 (hevc->suffix_aux_size >> 4);
4975 WRITE_VREG(HEVC_AUX_DATA_SIZE, init_aux_size);
4976 }
4977
config_mpred_hw(struct hevc_state_s * hevc)4978 static void config_mpred_hw(struct hevc_state_s *hevc)
4979 {
4980 int i;
4981 unsigned int data32;
4982 struct PIC_s *cur_pic = hevc->cur_pic;
4983 struct PIC_s *col_pic = hevc->col_pic;
4984 int AMVP_MAX_NUM_CANDS_MEM = 3;
4985 int AMVP_MAX_NUM_CANDS = 2;
4986 int NUM_CHROMA_MODE = 5;
4987 int DM_CHROMA_IDX = 36;
4988 int above_ptr_ctrl = 0;
4989 int buffer_linear = 1;
4990 int cu_size_log2 = 3;
4991
4992 int mpred_mv_rd_start_addr;
4993 int mpred_curr_lcu_x;
4994 int mpred_curr_lcu_y;
4995 int mpred_above_buf_start;
4996 int mpred_mv_rd_ptr;
4997 int mpred_mv_rd_ptr_p1;
4998 int mpred_mv_rd_end_addr;
4999 int MV_MEM_UNIT;
5000 int mpred_mv_wr_ptr;
5001 int *ref_poc_L0, *ref_poc_L1;
5002
5003 int above_en;
5004 int mv_wr_en;
5005 int mv_rd_en;
5006 int col_isIntra;
5007
5008 if (hevc->slice_type != 2) {
5009 above_en = 1;
5010 mv_wr_en = 1;
5011 mv_rd_en = 1;
5012 col_isIntra = 0;
5013 } else {
5014 above_en = 1;
5015 mv_wr_en = 1;
5016 mv_rd_en = 0;
5017 col_isIntra = 0;
5018 }
5019
5020 mpred_mv_rd_start_addr = col_pic->mpred_mv_wr_start_addr;
5021 data32 = READ_VREG(HEVC_MPRED_CURR_LCU);
5022 mpred_curr_lcu_x = data32 & 0xffff;
5023 mpred_curr_lcu_y = (data32 >> 16) & 0xffff;
5024
5025 MV_MEM_UNIT =
5026 hevc->lcu_size_log2 == 6 ? 0x200 : hevc->lcu_size_log2 ==
5027 5 ? 0x80 : 0x20;
5028 mpred_mv_rd_ptr =
5029 mpred_mv_rd_start_addr + (hevc->slice_addr * MV_MEM_UNIT);
5030
5031 mpred_mv_rd_ptr_p1 = mpred_mv_rd_ptr + MV_MEM_UNIT;
5032 mpred_mv_rd_end_addr =
5033 mpred_mv_rd_start_addr +
5034 ((hevc->lcu_x_num * hevc->lcu_y_num) * MV_MEM_UNIT);
5035
5036 mpred_above_buf_start = hevc->work_space_buf->mpred_above.buf_start;
5037
5038 mpred_mv_wr_ptr =
5039 cur_pic->mpred_mv_wr_start_addr +
5040 (hevc->slice_addr * MV_MEM_UNIT);
5041
5042 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5043 hevc_print(hevc, 0,
5044 "cur pic index %d col pic index %d\n", cur_pic->index,
5045 col_pic->index);
5046 }
5047
5048 WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR,
5049 cur_pic->mpred_mv_wr_start_addr);
5050 WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR, mpred_mv_rd_start_addr);
5051
5052 data32 = ((hevc->lcu_x_num - hevc->tile_width_lcu) * MV_MEM_UNIT);
5053 WRITE_VREG(HEVC_MPRED_MV_WR_ROW_JUMP, data32);
5054 WRITE_VREG(HEVC_MPRED_MV_RD_ROW_JUMP, data32);
5055
5056 data32 = READ_VREG(HEVC_MPRED_CTRL0);
5057 data32 = (hevc->slice_type |
5058 hevc->new_pic << 2 |
5059 hevc->new_tile << 3 |
5060 hevc->isNextSliceSegment << 4 |
5061 hevc->TMVPFlag << 5 |
5062 hevc->LDCFlag << 6 |
5063 hevc->ColFromL0Flag << 7 |
5064 above_ptr_ctrl << 8 |
5065 above_en << 9 |
5066 mv_wr_en << 10 |
5067 mv_rd_en << 11 |
5068 col_isIntra << 12 |
5069 buffer_linear << 13 |
5070 hevc->LongTerm_Curr << 14 |
5071 hevc->LongTerm_Col << 15 |
5072 hevc->lcu_size_log2 << 16 |
5073 cu_size_log2 << 20 | hevc->plevel << 24);
5074 WRITE_VREG(HEVC_MPRED_CTRL0, data32);
5075
5076 data32 = READ_VREG(HEVC_MPRED_CTRL1);
5077 data32 = (
5078 #if 0
5079 /* no set in m8baby test1902 */
5080 /* Don't override clk_forced_on , */
5081 (data32 & (0x1 << 24)) |
5082 #endif
5083 hevc->MaxNumMergeCand |
5084 AMVP_MAX_NUM_CANDS << 4 |
5085 AMVP_MAX_NUM_CANDS_MEM << 8 |
5086 NUM_CHROMA_MODE << 12 | DM_CHROMA_IDX << 16);
5087 WRITE_VREG(HEVC_MPRED_CTRL1, data32);
5088
5089 data32 = (hevc->pic_w | hevc->pic_h << 16);
5090 WRITE_VREG(HEVC_MPRED_PIC_SIZE, data32);
5091
5092 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5093 WRITE_VREG(HEVC_MPRED_PIC_SIZE_LCU, data32);
5094
5095 data32 = (hevc->tile_start_lcu_x | hevc->tile_start_lcu_y << 16);
5096 WRITE_VREG(HEVC_MPRED_TILE_START, data32);
5097
5098 data32 = (hevc->tile_width_lcu | hevc->tile_height_lcu << 16);
5099 WRITE_VREG(HEVC_MPRED_TILE_SIZE_LCU, data32);
5100
5101 data32 = (hevc->RefNum_L0 | hevc->RefNum_L1 << 8 | 0
5102 /* col_RefNum_L0<<16| */
5103 /* col_RefNum_L1<<24 */
5104 );
5105 WRITE_VREG(HEVC_MPRED_REF_NUM, data32);
5106
5107 data32 = (hevc->LongTerm_Ref);
5108 WRITE_VREG(HEVC_MPRED_LT_REF, data32);
5109
5110 data32 = 0;
5111 for (i = 0; i < hevc->RefNum_L0; i++)
5112 data32 = data32 | (1 << i);
5113 WRITE_VREG(HEVC_MPRED_REF_EN_L0, data32);
5114
5115 data32 = 0;
5116 for (i = 0; i < hevc->RefNum_L1; i++)
5117 data32 = data32 | (1 << i);
5118 WRITE_VREG(HEVC_MPRED_REF_EN_L1, data32);
5119
5120 WRITE_VREG(HEVC_MPRED_CUR_POC, hevc->curr_POC);
5121 WRITE_VREG(HEVC_MPRED_COL_POC, hevc->Col_POC);
5122
5123 /* below MPRED Ref_POC_xx_Lx registers must follow Ref_POC_xx_L0 ->
5124 * Ref_POC_xx_L1 in pair write order!!!
5125 */
5126 ref_poc_L0 = &(cur_pic->m_aiRefPOCList0[cur_pic->slice_idx][0]);
5127 ref_poc_L1 = &(cur_pic->m_aiRefPOCList1[cur_pic->slice_idx][0]);
5128
5129 WRITE_VREG(HEVC_MPRED_L0_REF00_POC, ref_poc_L0[0]);
5130 WRITE_VREG(HEVC_MPRED_L1_REF00_POC, ref_poc_L1[0]);
5131
5132 WRITE_VREG(HEVC_MPRED_L0_REF01_POC, ref_poc_L0[1]);
5133 WRITE_VREG(HEVC_MPRED_L1_REF01_POC, ref_poc_L1[1]);
5134
5135 WRITE_VREG(HEVC_MPRED_L0_REF02_POC, ref_poc_L0[2]);
5136 WRITE_VREG(HEVC_MPRED_L1_REF02_POC, ref_poc_L1[2]);
5137
5138 WRITE_VREG(HEVC_MPRED_L0_REF03_POC, ref_poc_L0[3]);
5139 WRITE_VREG(HEVC_MPRED_L1_REF03_POC, ref_poc_L1[3]);
5140
5141 WRITE_VREG(HEVC_MPRED_L0_REF04_POC, ref_poc_L0[4]);
5142 WRITE_VREG(HEVC_MPRED_L1_REF04_POC, ref_poc_L1[4]);
5143
5144 WRITE_VREG(HEVC_MPRED_L0_REF05_POC, ref_poc_L0[5]);
5145 WRITE_VREG(HEVC_MPRED_L1_REF05_POC, ref_poc_L1[5]);
5146
5147 WRITE_VREG(HEVC_MPRED_L0_REF06_POC, ref_poc_L0[6]);
5148 WRITE_VREG(HEVC_MPRED_L1_REF06_POC, ref_poc_L1[6]);
5149
5150 WRITE_VREG(HEVC_MPRED_L0_REF07_POC, ref_poc_L0[7]);
5151 WRITE_VREG(HEVC_MPRED_L1_REF07_POC, ref_poc_L1[7]);
5152
5153 WRITE_VREG(HEVC_MPRED_L0_REF08_POC, ref_poc_L0[8]);
5154 WRITE_VREG(HEVC_MPRED_L1_REF08_POC, ref_poc_L1[8]);
5155
5156 WRITE_VREG(HEVC_MPRED_L0_REF09_POC, ref_poc_L0[9]);
5157 WRITE_VREG(HEVC_MPRED_L1_REF09_POC, ref_poc_L1[9]);
5158
5159 WRITE_VREG(HEVC_MPRED_L0_REF10_POC, ref_poc_L0[10]);
5160 WRITE_VREG(HEVC_MPRED_L1_REF10_POC, ref_poc_L1[10]);
5161
5162 WRITE_VREG(HEVC_MPRED_L0_REF11_POC, ref_poc_L0[11]);
5163 WRITE_VREG(HEVC_MPRED_L1_REF11_POC, ref_poc_L1[11]);
5164
5165 WRITE_VREG(HEVC_MPRED_L0_REF12_POC, ref_poc_L0[12]);
5166 WRITE_VREG(HEVC_MPRED_L1_REF12_POC, ref_poc_L1[12]);
5167
5168 WRITE_VREG(HEVC_MPRED_L0_REF13_POC, ref_poc_L0[13]);
5169 WRITE_VREG(HEVC_MPRED_L1_REF13_POC, ref_poc_L1[13]);
5170
5171 WRITE_VREG(HEVC_MPRED_L0_REF14_POC, ref_poc_L0[14]);
5172 WRITE_VREG(HEVC_MPRED_L1_REF14_POC, ref_poc_L1[14]);
5173
5174 WRITE_VREG(HEVC_MPRED_L0_REF15_POC, ref_poc_L0[15]);
5175 WRITE_VREG(HEVC_MPRED_L1_REF15_POC, ref_poc_L1[15]);
5176
5177 if (hevc->new_pic) {
5178 WRITE_VREG(HEVC_MPRED_ABV_START_ADDR, mpred_above_buf_start);
5179 WRITE_VREG(HEVC_MPRED_MV_WPTR, mpred_mv_wr_ptr);
5180 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr); */
5181 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_start_addr);
5182 } else if (!hevc->isNextSliceSegment) {
5183 /* WRITE_VREG(HEVC_MPRED_MV_RPTR,mpred_mv_rd_ptr_p1); */
5184 WRITE_VREG(HEVC_MPRED_MV_RPTR, mpred_mv_rd_ptr);
5185 }
5186
5187 WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR, mpred_mv_rd_end_addr);
5188 }
5189
config_sao_hw(struct hevc_state_s * hevc,union param_u * params)5190 static void config_sao_hw(struct hevc_state_s *hevc, union param_u *params)
5191 {
5192 unsigned int data32, data32_2;
5193 int misc_flag0 = hevc->misc_flag0;
5194 int slice_deblocking_filter_disabled_flag = 0;
5195
5196 int mc_buffer_size_u_v =
5197 hevc->lcu_total * hevc->lcu_size * hevc->lcu_size / 2;
5198 int mc_buffer_size_u_v_h = (mc_buffer_size_u_v + 0xffff) >> 16;
5199 struct PIC_s *cur_pic = hevc->cur_pic;
5200 struct aml_vcodec_ctx * v4l2_ctx = hevc->v4l2_ctx;
5201
5202 data32 = READ_VREG(HEVC_SAO_CTRL0);
5203 data32 &= (~0xf);
5204 data32 |= hevc->lcu_size_log2;
5205 WRITE_VREG(HEVC_SAO_CTRL0, data32);
5206
5207 data32 = (hevc->pic_w | hevc->pic_h << 16);
5208 WRITE_VREG(HEVC_SAO_PIC_SIZE, data32);
5209
5210 data32 = ((hevc->lcu_x_num - 1) | (hevc->lcu_y_num - 1) << 16);
5211 WRITE_VREG(HEVC_SAO_PIC_SIZE_LCU, data32);
5212
5213 if (hevc->new_pic)
5214 WRITE_VREG(HEVC_SAO_Y_START_ADDR, 0xffffffff);
5215 #ifdef LOSLESS_COMPRESS_MODE
5216 /*SUPPORT_10BIT*/
5217 if ((get_double_write_mode(hevc) & 0x10) == 0) {
5218 data32 = READ_VREG(HEVC_SAO_CTRL5);
5219 data32 &= (~(0xff << 16));
5220
5221 if (get_double_write_mode(hevc) == 2 ||
5222 get_double_write_mode(hevc) == 3)
5223 data32 |= (0xff<<16);
5224 else if (get_double_write_mode(hevc) == 4)
5225 data32 |= (0x33<<16);
5226
5227 if (hevc->mem_saving_mode == 1)
5228 data32 |= (1 << 9);
5229 else
5230 data32 &= ~(1 << 9);
5231 if (workaround_enable & 1)
5232 data32 |= (1 << 7);
5233 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5234 }
5235 data32 = cur_pic->mc_y_adr;
5236 if (get_double_write_mode(hevc))
5237 WRITE_VREG(HEVC_SAO_Y_START_ADDR, cur_pic->dw_y_adr);
5238
5239 if ((get_double_write_mode(hevc) & 0x10) == 0)
5240 WRITE_VREG(HEVC_CM_BODY_START_ADDR, data32);
5241
5242 if (hevc->mmu_enable)
5243 WRITE_VREG(HEVC_CM_HEADER_START_ADDR, cur_pic->header_adr);
5244 #else
5245 data32 = cur_pic->mc_y_adr;
5246 WRITE_VREG(HEVC_SAO_Y_START_ADDR, data32);
5247 #endif
5248 data32 = (mc_buffer_size_u_v_h << 16) << 1;
5249 WRITE_VREG(HEVC_SAO_Y_LENGTH, data32);
5250
5251 #ifdef LOSLESS_COMPRESS_MODE
5252 /*SUPPORT_10BIT*/
5253 if (get_double_write_mode(hevc))
5254 WRITE_VREG(HEVC_SAO_C_START_ADDR, cur_pic->dw_u_v_adr);
5255 #else
5256 data32 = cur_pic->mc_u_v_adr;
5257 WRITE_VREG(HEVC_SAO_C_START_ADDR, data32);
5258 #endif
5259 data32 = (mc_buffer_size_u_v_h << 16);
5260 WRITE_VREG(HEVC_SAO_C_LENGTH, data32);
5261
5262 #ifdef LOSLESS_COMPRESS_MODE
5263 /*SUPPORT_10BIT*/
5264 if (get_double_write_mode(hevc)) {
5265 WRITE_VREG(HEVC_SAO_Y_WPTR, cur_pic->dw_y_adr);
5266 WRITE_VREG(HEVC_SAO_C_WPTR, cur_pic->dw_u_v_adr);
5267 }
5268 #else
5269 /* multi tile to do... */
5270 data32 = cur_pic->mc_y_adr;
5271 WRITE_VREG(HEVC_SAO_Y_WPTR, data32);
5272
5273 data32 = cur_pic->mc_u_v_adr;
5274 WRITE_VREG(HEVC_SAO_C_WPTR, data32);
5275 #endif
5276 /* DBLK CONFIG HERE */
5277 if (hevc->new_pic) {
5278 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5279 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
5280 data32 = (0xff << 8) | (0x0 << 0);
5281 else
5282 data32 = (0x57 << 8) | /* 1st/2nd write both enable*/
5283 (0x0 << 0); /* h265 video format*/
5284
5285 if (hevc->pic_w >= 1280)
5286 data32 |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5287 data32 &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5288 if (get_double_write_mode(hevc) == 0)
5289 data32 |= (0x1 << 8); /*enable first write*/
5290 else if (get_double_write_mode(hevc) == 0x10)
5291 data32 |= (0x1 << 9); /*double write only*/
5292 else
5293 data32 |= ((0x1 << 8) |(0x1 << 9));
5294
5295 WRITE_VREG(HEVC_DBLK_CFGB, data32);
5296 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5297 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data32);
5298 }
5299 data32 = (hevc->pic_w | hevc->pic_h << 16);
5300 WRITE_VREG(HEVC_DBLK_CFG2, data32);
5301
5302 if ((misc_flag0 >> PCM_ENABLE_FLAG_BIT) & 0x1) {
5303 data32 =
5304 ((misc_flag0 >>
5305 PCM_LOOP_FILTER_DISABLED_FLAG_BIT) &
5306 0x1) << 3;
5307 } else
5308 data32 = 0;
5309 data32 |=
5310 (((params->p.pps_cb_qp_offset & 0x1f) << 4) |
5311 ((params->p.pps_cr_qp_offset
5312 & 0x1f) <<
5313 9));
5314 data32 |=
5315 (hevc->lcu_size ==
5316 64) ? 0 : ((hevc->lcu_size == 32) ? 1 : 2);
5317
5318 WRITE_VREG(HEVC_DBLK_CFG1, data32);
5319
5320 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5321 /*if (debug & 0x80) {*/
5322 data32 = 1 << 28; /* Debug only: sts1 chooses dblk_main*/
5323 WRITE_VREG(HEVC_DBLK_STS1 + 4, data32); /* 0x3510 */
5324 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5325 "[DBLK DEBUG] HEVC1 STS1 : 0x%x\n",
5326 data32);
5327 /*}*/
5328 }
5329 }
5330 #if 0
5331 data32 = READ_VREG(HEVC_SAO_CTRL1);
5332 data32 &= (~0x3000);
5333 data32 |= (hevc->mem_map_mode <<
5334 12);
5335
5336 /* [13:12] axi_aformat,
5337 * 0-Linear, 1-32x32, 2-64x32
5338 */
5339 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5340
5341 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5342 data32 &= (~0x30);
5343 data32 |= (hevc->mem_map_mode <<
5344 4);
5345
5346 /* [5:4] -- address_format
5347 * 00:linear 01:32x32 10:64x32
5348 */
5349 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5350 #else
5351 /* m8baby test1902 */
5352 data32 = READ_VREG(HEVC_SAO_CTRL1);
5353 data32 &= (~0x3000);
5354 data32 |= (hevc->mem_map_mode <<
5355 12);
5356
5357 /* [13:12] axi_aformat, 0-Linear,
5358 * 1-32x32, 2-64x32
5359 */
5360 data32 &= (~0xff0);
5361 /* data32 |= 0x670; // Big-Endian per 64-bit */
5362 data32 |= endian; /* Big-Endian per 64-bit */
5363 data32 &= (~0x3); /*[1]:dw_disable [0]:cm_disable*/
5364 if (get_double_write_mode(hevc) == 0)
5365 data32 |= 0x2; /*disable double write*/
5366 else if (get_double_write_mode(hevc) & 0x10)
5367 data32 |= 0x1; /*disable cm*/
5368 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A) {
5369 unsigned int data;
5370 data = (0x57 << 8) | /* 1st/2nd write both enable*/
5371 (0x0 << 0); /* h265 video format*/
5372 if (hevc->pic_w >= 1280)
5373 data |= (0x1 << 4); /*dblk pipeline mode=1 for performance*/
5374 data &= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5375 if (get_double_write_mode(hevc) == 0)
5376 data |= (0x1 << 8); /*enable first write*/
5377 else if (get_double_write_mode(hevc) & 0x10)
5378 data |= (0x1 << 9); /*double write only*/
5379 else
5380 data |= ((0x1 << 8) |(0x1 << 9));
5381 WRITE_VREG(HEVC_DBLK_CFGB, data);
5382 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5383 "[DBLK DEBUG] HEVC1 CFGB : 0x%x\n", data);
5384 }
5385
5386 /* swap uv */
5387 if (hevc->is_used_v4l) {
5388 if ((v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV21) ||
5389 (v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV21M))
5390 data32 &= ~(1 << 8); /* NV21 */
5391 else
5392 data32 |= (1 << 8); /* NV12 */
5393 }
5394
5395 /*
5396 * [31:24] ar_fifo1_axi_thred
5397 * [23:16] ar_fifo0_axi_thred
5398 * [15:14] axi_linealign, 0-16bytes, 1-32bytes, 2-64bytes
5399 * [13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32
5400 * [11:08] axi_lendian_C
5401 * [07:04] axi_lendian_Y
5402 * [3] reserved
5403 * [2] clk_forceon
5404 * [1] dw_disable:disable double write output
5405 * [0] cm_disable:disable compress output
5406 */
5407
5408 WRITE_VREG(HEVC_SAO_CTRL1, data32);
5409 if (get_double_write_mode(hevc) & 0x10) {
5410 /* [23:22] dw_v1_ctrl
5411 *[21:20] dw_v0_ctrl
5412 *[19:18] dw_h1_ctrl
5413 *[17:16] dw_h0_ctrl
5414 */
5415 data32 = READ_VREG(HEVC_SAO_CTRL5);
5416 /*set them all 0 for H265_NV21 (no down-scale)*/
5417 data32 &= ~(0xff << 16);
5418 WRITE_VREG(HEVC_SAO_CTRL5, data32);
5419 }
5420
5421 data32 = READ_VREG(HEVCD_IPP_AXIIF_CONFIG);
5422 data32 &= (~0x30);
5423 /* [5:4] -- address_format 00:linear 01:32x32 10:64x32 */
5424 data32 |= (hevc->mem_map_mode <<
5425 4);
5426 data32 &= (~0xF);
5427 data32 |= 0xf; /* valid only when double write only */
5428 /*data32 |= 0x8;*/ /* Big-Endian per 64-bit */
5429
5430 /* swap uv */
5431 if (hevc->is_used_v4l) {
5432 if ((v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV21) ||
5433 (v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV21M))
5434 data32 |= (1 << 12); /* NV21 */
5435 else
5436 data32 &= ~(1 << 12); /* NV12 */
5437 }
5438
5439 /*
5440 * [3:0] little_endian
5441 * [5:4] address_format 00:linear 01:32x32 10:64x32
5442 * [7:6] reserved
5443 * [9:8] Linear_LineAlignment 00:16byte 01:32byte 10:64byte
5444 * [11:10] reserved
5445 * [12] CbCr_byte_swap
5446 * [31:13] reserved
5447 */
5448 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG, data32);
5449 #endif
5450 data32 = 0;
5451 data32_2 = READ_VREG(HEVC_SAO_CTRL0);
5452 data32_2 &= (~0x300);
5453 /* slice_deblocking_filter_disabled_flag = 0;
5454 * ucode has handle it , so read it from ucode directly
5455 */
5456 if (hevc->tile_enabled) {
5457 data32 |=
5458 ((misc_flag0 >>
5459 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5460 0x1) << 0;
5461 data32_2 |=
5462 ((misc_flag0 >>
5463 LOOP_FILER_ACROSS_TILES_ENABLED_FLAG_BIT) &
5464 0x1) << 8;
5465 }
5466 slice_deblocking_filter_disabled_flag = (misc_flag0 >>
5467 SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5468 0x1; /* ucode has handle it,so read it from ucode directly */
5469 if ((misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_BIT))
5470 && (misc_flag0 & (1 << DEBLOCKING_FILTER_OVERRIDE_FLAG_BIT))) {
5471 /* slice_deblocking_filter_disabled_flag =
5472 * (misc_flag0>>SLICE_DEBLOCKING_FILTER_DISABLED_FLAG_BIT)&0x1;
5473 * //ucode has handle it , so read it from ucode directly
5474 */
5475 data32 |= slice_deblocking_filter_disabled_flag << 2;
5476 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5477 hevc_print_cont(hevc, 0,
5478 "(1,%x)", data32);
5479 if (!slice_deblocking_filter_disabled_flag) {
5480 data32 |= (params->p.slice_beta_offset_div2 & 0xf) << 3;
5481 data32 |= (params->p.slice_tc_offset_div2 & 0xf) << 7;
5482 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5483 hevc_print_cont(hevc, 0,
5484 "(2,%x)", data32);
5485 }
5486 } else {
5487 data32 |=
5488 ((misc_flag0 >>
5489 PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5490 0x1) << 2;
5491 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5492 hevc_print_cont(hevc, 0,
5493 "(3,%x)", data32);
5494 if (((misc_flag0 >> PPS_DEBLOCKING_FILTER_DISABLED_FLAG_BIT) &
5495 0x1) == 0) {
5496 data32 |= (params->p.pps_beta_offset_div2 & 0xf) << 3;
5497 data32 |= (params->p.pps_tc_offset_div2 & 0xf) << 7;
5498 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5499 hevc_print_cont(hevc, 0,
5500 "(4,%x)", data32);
5501 }
5502 }
5503 if ((misc_flag0 & (1 << PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT))
5504 && ((misc_flag0 & (1 << SLICE_SAO_LUMA_FLAG_BIT))
5505 || (misc_flag0 & (1 << SLICE_SAO_CHROMA_FLAG_BIT))
5506 || (!slice_deblocking_filter_disabled_flag))) {
5507 data32 |=
5508 ((misc_flag0 >>
5509 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5510 & 0x1) << 1;
5511 data32_2 |=
5512 ((misc_flag0 >>
5513 SLICE_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5514 & 0x1) << 9;
5515 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5516 hevc_print_cont(hevc, 0,
5517 "(5,%x)\n", data32);
5518 } else {
5519 data32 |=
5520 ((misc_flag0 >>
5521 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5522 & 0x1) << 1;
5523 data32_2 |=
5524 ((misc_flag0 >>
5525 PPS_LOOP_FILTER_ACROSS_SLICES_ENABLED_FLAG_BIT)
5526 & 0x1) << 9;
5527 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
5528 hevc_print_cont(hevc, 0,
5529 "(6,%x)\n", data32);
5530 }
5531 WRITE_VREG(HEVC_DBLK_CFG9, data32);
5532 WRITE_VREG(HEVC_SAO_CTRL0, data32_2);
5533 }
5534
5535 #ifdef TEST_NO_BUF
5536 static unsigned char test_flag = 1;
5537 #endif
5538
pic_list_process(struct hevc_state_s * hevc)5539 static void pic_list_process(struct hevc_state_s *hevc)
5540 {
5541 int work_pic_num = get_work_pic_num(hevc);
5542 int alloc_pic_count = 0;
5543 int i;
5544 struct PIC_s *pic;
5545 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5546 pic = hevc->m_PIC[i];
5547 if (pic == NULL || pic->index == -1)
5548 continue;
5549 alloc_pic_count++;
5550 if (pic->output_mark == 0 && pic->referenced == 0
5551 && pic->output_ready == 0
5552 && (pic->width != hevc->pic_w ||
5553 pic->height != hevc->pic_h)
5554 ) {
5555 set_buf_unused(hevc, pic->BUF_index);
5556 pic->BUF_index = -1;
5557 if (alloc_pic_count > work_pic_num) {
5558 pic->width = 0;
5559 pic->height = 0;
5560 pic->index = -1;
5561 } else {
5562 pic->width = hevc->pic_w;
5563 pic->height = hevc->pic_h;
5564 }
5565 }
5566 }
5567 if (alloc_pic_count < work_pic_num) {
5568 int new_count = alloc_pic_count;
5569 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5570 pic = hevc->m_PIC[i];
5571 if (pic && pic->index == -1) {
5572 pic->index = i;
5573 pic->BUF_index = -1;
5574 pic->width = hevc->pic_w;
5575 pic->height = hevc->pic_h;
5576 new_count++;
5577 if (new_count >=
5578 work_pic_num)
5579 break;
5580 }
5581 }
5582
5583 }
5584 dealloc_unused_buf(hevc);
5585 if (get_alloc_pic_count(hevc)
5586 != alloc_pic_count) {
5587 hevc_print_cont(hevc, 0,
5588 "%s: work_pic_num is %d, Change alloc_pic_count from %d to %d\n",
5589 __func__,
5590 work_pic_num,
5591 alloc_pic_count,
5592 get_alloc_pic_count(hevc));
5593 }
5594 }
5595
recycle_mmu_bufs(struct hevc_state_s * hevc)5596 static void recycle_mmu_bufs(struct hevc_state_s *hevc)
5597 {
5598 int i;
5599 struct PIC_s *pic;
5600 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5601 pic = hevc->m_PIC[i];
5602 if (pic == NULL || pic->index == -1)
5603 continue;
5604
5605 if (pic->output_mark == 0 && pic->referenced == 0
5606 && pic->output_ready == 0
5607 && pic->scatter_alloc
5608 )
5609 release_pic_mmu_buf(hevc, pic);
5610 }
5611
5612 }
5613
get_new_pic(struct hevc_state_s * hevc,union param_u * rpm_param)5614 static struct PIC_s *get_new_pic(struct hevc_state_s *hevc,
5615 union param_u *rpm_param)
5616 {
5617 struct vdec_s *vdec = hw_to_vdec(hevc);
5618 struct PIC_s *new_pic = NULL;
5619 struct PIC_s *pic;
5620 int i;
5621 int ret;
5622
5623 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5624 pic = hevc->m_PIC[i];
5625 if (pic == NULL || pic->index == -1)
5626 continue;
5627
5628 if (pic->output_mark == 0 && pic->referenced == 0
5629 && pic->output_ready == 0
5630 && pic->width == hevc->pic_w
5631 && pic->height == hevc->pic_h
5632 && pic->vf_ref == 0
5633 ) {
5634 if (new_pic) {
5635 if (new_pic->POC != INVALID_POC) {
5636 if (pic->POC == INVALID_POC ||
5637 pic->POC < new_pic->POC)
5638 new_pic = pic;
5639 }
5640 } else
5641 new_pic = pic;
5642 }
5643 }
5644
5645 if (new_pic == NULL)
5646 return NULL;
5647
5648 if (new_pic->BUF_index < 0) {
5649 if (alloc_buf(hevc) < 0)
5650 return NULL;
5651 else {
5652 if (config_pic(hevc, new_pic) < 0) {
5653 dealloc_pic_buf(hevc, new_pic);
5654 return NULL;
5655 }
5656 }
5657 new_pic->width = hevc->pic_w;
5658 new_pic->height = hevc->pic_h;
5659 set_canvas(hevc, new_pic);
5660
5661 init_pic_list_hw(hevc);
5662 }
5663
5664 if (new_pic) {
5665 new_pic->double_write_mode =
5666 get_double_write_mode(hevc);
5667 if (new_pic->double_write_mode)
5668 set_canvas(hevc, new_pic);
5669
5670 #ifdef TEST_NO_BUF
5671 if (test_flag) {
5672 test_flag = 0;
5673 return NULL;
5674 } else
5675 test_flag = 1;
5676 #endif
5677 if (get_mv_buf(hevc, new_pic) < 0)
5678 return NULL;
5679
5680 if (hevc->mmu_enable) {
5681 ret = H265_alloc_mmu(hevc, new_pic,
5682 rpm_param->p.bit_depth,
5683 hevc->frame_mmu_map_addr);
5684 if (ret != 0) {
5685 put_mv_buf(hevc, new_pic);
5686 hevc_print(hevc, 0,
5687 "can't alloc need mmu1,idx %d ret =%d\n",
5688 new_pic->decode_idx,
5689 ret);
5690 return NULL;
5691 }
5692 }
5693 new_pic->referenced = 1;
5694 new_pic->decode_idx = hevc->decode_idx;
5695 new_pic->slice_idx = 0;
5696 new_pic->referenced = 1;
5697 new_pic->output_mark = 0;
5698 new_pic->recon_mark = 0;
5699 new_pic->error_mark = 0;
5700 new_pic->dis_mark = 0;
5701 /* new_pic->output_ready = 0; */
5702 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5703 new_pic->ip_mode = (!new_pic->num_reorder_pic &&
5704 !(vdec->slave || vdec->master) &&
5705 !disable_ip_mode) ? true : false;
5706 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5707 new_pic->POC = hevc->curr_POC;
5708 new_pic->pic_struct = hevc->curr_pic_struct;
5709 if (new_pic->aux_data_buf)
5710 release_aux_data(hevc, new_pic);
5711 new_pic->mem_saving_mode =
5712 hevc->mem_saving_mode;
5713 new_pic->bit_depth_luma =
5714 hevc->bit_depth_luma;
5715 new_pic->bit_depth_chroma =
5716 hevc->bit_depth_chroma;
5717 new_pic->video_signal_type =
5718 hevc->video_signal_type;
5719
5720 new_pic->conformance_window_flag =
5721 hevc->param.p.conformance_window_flag;
5722 new_pic->conf_win_left_offset =
5723 hevc->param.p.conf_win_left_offset;
5724 new_pic->conf_win_right_offset =
5725 hevc->param.p.conf_win_right_offset;
5726 new_pic->conf_win_top_offset =
5727 hevc->param.p.conf_win_top_offset;
5728 new_pic->conf_win_bottom_offset =
5729 hevc->param.p.conf_win_bottom_offset;
5730 new_pic->chroma_format_idc =
5731 hevc->param.p.chroma_format_idc;
5732
5733 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5734 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5735 __func__, new_pic->index,
5736 new_pic->BUF_index, new_pic->decode_idx,
5737 new_pic->POC);
5738
5739 }
5740 if (pic_list_debug & 0x1) {
5741 dump_pic_list(hevc);
5742 pr_err("\n*******************************************\n");
5743 }
5744
5745 return new_pic;
5746 }
5747
v4l_get_new_pic(struct hevc_state_s * hevc,union param_u * rpm_param)5748 static struct PIC_s *v4l_get_new_pic(struct hevc_state_s *hevc,
5749 union param_u *rpm_param)
5750 {
5751 struct vdec_s *vdec = hw_to_vdec(hevc);
5752 int ret;
5753 struct aml_vcodec_ctx * v4l = hevc->v4l2_ctx;
5754 struct v4l_buff_pool *pool = &v4l->cap_pool;
5755 struct PIC_s *new_pic = NULL;
5756 struct PIC_s *pic = NULL;
5757 int i;
5758
5759 for (i = 0; i < pool->in; ++i) {
5760 u32 state = (pool->seq[i] >> 16);
5761 u32 index = (pool->seq[i] & 0xffff);
5762
5763 switch (state) {
5764 case V4L_CAP_BUFF_IN_DEC:
5765 pic = hevc->m_PIC[i];
5766 if (pic && (pic->index != -1) &&
5767 (pic->output_mark == 0) &&
5768 (pic->referenced == 0) &&
5769 (pic->output_ready == 0) &&
5770 (pic->width == hevc->pic_w) &&
5771 (pic->height == hevc->pic_h) &&
5772 (pic->vf_ref == 0) &&
5773 pic->cma_alloc_addr) {
5774 new_pic = pic;
5775 }
5776 break;
5777 case V4L_CAP_BUFF_IN_M2M:
5778 pic = hevc->m_PIC[index];
5779 pic->width = hevc->pic_w;
5780 pic->height = hevc->pic_h;
5781 if ((pic->index != -1) &&
5782 !v4l_alloc_buf(hevc, pic)) {
5783 v4l_config_pic(hevc, pic);
5784 init_pic_list_hw(hevc);
5785 new_pic = pic;
5786 }
5787 break;
5788 default:
5789 pr_err("v4l buffer state err %d.\n", state);
5790 break;
5791 }
5792
5793 if (new_pic)
5794 break;
5795 }
5796
5797 if (new_pic == NULL)
5798 return NULL;
5799
5800 new_pic->double_write_mode = get_double_write_mode(hevc);
5801 if (new_pic->double_write_mode)
5802 set_canvas(hevc, new_pic);
5803
5804 if (get_mv_buf(hevc, new_pic) < 0)
5805 return NULL;
5806
5807 if (hevc->mmu_enable) {
5808 ret = H265_alloc_mmu(hevc, new_pic,
5809 rpm_param->p.bit_depth,
5810 hevc->frame_mmu_map_addr);
5811 if (ret != 0) {
5812 put_mv_buf(hevc, new_pic);
5813 hevc_print(hevc, 0,
5814 "can't alloc need mmu1,idx %d ret =%d\n",
5815 new_pic->decode_idx, ret);
5816 return NULL;
5817 }
5818 }
5819
5820 new_pic->referenced = 1;
5821 new_pic->decode_idx = hevc->decode_idx;
5822 new_pic->slice_idx = 0;
5823 new_pic->referenced = 1;
5824 new_pic->output_mark = 0;
5825 new_pic->recon_mark = 0;
5826 new_pic->error_mark = 0;
5827 new_pic->dis_mark = 0;
5828 /* new_pic->output_ready = 0; */
5829 new_pic->num_reorder_pic = rpm_param->p.sps_num_reorder_pics_0;
5830 new_pic->ip_mode = (!new_pic->num_reorder_pic &&
5831 !(vdec->slave || vdec->master) &&
5832 !disable_ip_mode) ? true : false;
5833 new_pic->losless_comp_body_size = hevc->losless_comp_body_size;
5834 new_pic->POC = hevc->curr_POC;
5835 new_pic->pic_struct = hevc->curr_pic_struct;
5836
5837 if (new_pic->aux_data_buf)
5838 release_aux_data(hevc, new_pic);
5839 new_pic->mem_saving_mode =
5840 hevc->mem_saving_mode;
5841 new_pic->bit_depth_luma =
5842 hevc->bit_depth_luma;
5843 new_pic->bit_depth_chroma =
5844 hevc->bit_depth_chroma;
5845 new_pic->video_signal_type =
5846 hevc->video_signal_type;
5847
5848 new_pic->conformance_window_flag =
5849 hevc->param.p.conformance_window_flag;
5850 new_pic->conf_win_left_offset =
5851 hevc->param.p.conf_win_left_offset;
5852 new_pic->conf_win_right_offset =
5853 hevc->param.p.conf_win_right_offset;
5854 new_pic->conf_win_top_offset =
5855 hevc->param.p.conf_win_top_offset;
5856 new_pic->conf_win_bottom_offset =
5857 hevc->param.p.conf_win_bottom_offset;
5858 new_pic->chroma_format_idc =
5859 hevc->param.p.chroma_format_idc;
5860
5861 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
5862 "%s: index %d, buf_idx %d, decode_idx %d, POC %d\n",
5863 __func__, new_pic->index,
5864 new_pic->BUF_index, new_pic->decode_idx,
5865 new_pic->POC);
5866
5867 return new_pic;
5868 }
5869
get_display_pic_num(struct hevc_state_s * hevc)5870 static int get_display_pic_num(struct hevc_state_s *hevc)
5871 {
5872 int i;
5873 struct PIC_s *pic;
5874 int num = 0;
5875
5876 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
5877 pic = hevc->m_PIC[i];
5878 if (pic == NULL ||
5879 pic->index == -1)
5880 continue;
5881
5882 if (pic->output_ready == 1)
5883 num++;
5884 }
5885 return num;
5886 }
5887
flush_output(struct hevc_state_s * hevc,struct PIC_s * pic)5888 static void flush_output(struct hevc_state_s *hevc, struct PIC_s *pic)
5889 {
5890 struct PIC_s *pic_display;
5891
5892 if (pic) {
5893 /*PB skip control */
5894 if (pic->error_mark == 0 && hevc->PB_skip_mode == 1) {
5895 /* start decoding after first I */
5896 hevc->ignore_bufmgr_error |= 0x1;
5897 }
5898 if (hevc->ignore_bufmgr_error & 1) {
5899 if (hevc->PB_skip_count_after_decoding > 0)
5900 hevc->PB_skip_count_after_decoding--;
5901 else {
5902 /* start displaying */
5903 hevc->ignore_bufmgr_error |= 0x2;
5904 }
5905 }
5906 if (pic->POC != INVALID_POC && !pic->ip_mode)
5907 pic->output_mark = 1;
5908 pic->recon_mark = 1;
5909 }
5910 do {
5911 pic_display = output_pic(hevc, 1);
5912
5913 if (pic_display) {
5914 pic_display->referenced = 0;
5915 put_mv_buf(hevc, pic_display);
5916 if ((pic_display->error_mark
5917 && ((hevc->ignore_bufmgr_error & 0x2) == 0))
5918 || (get_dbg_flag(hevc) &
5919 H265_DEBUG_DISPLAY_CUR_FRAME)
5920 || (get_dbg_flag(hevc) &
5921 H265_DEBUG_NO_DISPLAY)) {
5922 pic_display->output_ready = 0;
5923 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
5924 hevc_print(hevc, 0,
5925 "[BM] Display: POC %d, ",
5926 pic_display->POC);
5927 hevc_print_cont(hevc, 0,
5928 "decoding index %d ==> ",
5929 pic_display->decode_idx);
5930 hevc_print_cont(hevc, 0,
5931 "Debug mode or error, recycle it\n");
5932 }
5933 /*
5934 * Here the pic/frame error_mark is 1,
5935 * and it won't be displayed, so increase
5936 * the drop count
5937 */
5938 hevc->gvs->drop_frame_count++;
5939 /* error frame count also need increase */
5940 hevc->gvs->error_frame_count++;
5941 } else {
5942 if (hevc->i_only & 0x1
5943 && pic_display->slice_type != 2) {
5944 pic_display->output_ready = 0;
5945 } else {
5946 prepare_display_buf(hevc, pic_display);
5947 if (get_dbg_flag(hevc)
5948 & H265_DEBUG_BUFMGR) {
5949 hevc_print(hevc, 0,
5950 "[BM] flush Display: POC %d, ",
5951 pic_display->POC);
5952 hevc_print_cont(hevc, 0,
5953 "decoding index %d\n",
5954 pic_display->decode_idx);
5955 }
5956 }
5957 }
5958 }
5959 } while (pic_display);
5960 clear_referenced_flag(hevc);
5961 }
5962
5963 /*
5964 * dv_meta_flag: 1, dolby meta only; 2, not include dolby meta
5965 */
set_aux_data(struct hevc_state_s * hevc,struct PIC_s * pic,unsigned char suffix_flag,unsigned char dv_meta_flag)5966 static void set_aux_data(struct hevc_state_s *hevc,
5967 struct PIC_s *pic, unsigned char suffix_flag,
5968 unsigned char dv_meta_flag)
5969 {
5970 int i;
5971 unsigned short *aux_adr;
5972 unsigned int size_reg_val =
5973 READ_VREG(HEVC_AUX_DATA_SIZE);
5974 unsigned int aux_count = 0;
5975 int aux_size = 0;
5976 if (pic == NULL || 0 == aux_data_is_avaible(hevc))
5977 return;
5978
5979 if (hevc->aux_data_dirty ||
5980 hevc->m_ins_flag == 0) {
5981
5982 hevc->aux_data_dirty = 0;
5983 }
5984
5985 if (suffix_flag) {
5986 aux_adr = (unsigned short *)
5987 (hevc->aux_addr +
5988 hevc->prefix_aux_size);
5989 aux_count =
5990 ((size_reg_val & 0xffff) << 4)
5991 >> 1;
5992 aux_size =
5993 hevc->suffix_aux_size;
5994 } else {
5995 aux_adr =
5996 (unsigned short *)hevc->aux_addr;
5997 aux_count =
5998 ((size_reg_val >> 16) << 4)
5999 >> 1;
6000 aux_size =
6001 hevc->prefix_aux_size;
6002 }
6003 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
6004 hevc_print(hevc, 0,
6005 "%s:pic 0x%p old size %d count %d,suf %d dv_flag %d\r\n",
6006 __func__, pic, pic->aux_data_size,
6007 aux_count, suffix_flag, dv_meta_flag);
6008 }
6009 if (aux_size > 0 && aux_count > 0) {
6010 int heads_size = 0;
6011 int new_size;
6012 char *new_buf;
6013
6014 for (i = 0; i < aux_count; i++) {
6015 unsigned char tag = aux_adr[i] >> 8;
6016 if (tag != 0 && tag != 0xff) {
6017 if (dv_meta_flag == 0)
6018 heads_size += 8;
6019 else if (dv_meta_flag == 1 && tag == 0x1)
6020 heads_size += 8;
6021 else if (dv_meta_flag == 2 && tag != 0x1)
6022 heads_size += 8;
6023 }
6024 }
6025 new_size = pic->aux_data_size + aux_count + heads_size;
6026 new_buf = vzalloc(new_size);
6027 if (new_buf) {
6028 unsigned char valid_tag = 0;
6029 unsigned char *h =
6030 new_buf +
6031 pic->aux_data_size;
6032 unsigned char *p = h + 8;
6033 int len = 0;
6034 int padding_len = 0;
6035
6036 if (pic->aux_data_buf) {
6037 memcpy(new_buf, pic->aux_data_buf, pic->aux_data_size);
6038 vfree(pic->aux_data_buf);
6039 }
6040 pic->aux_data_buf = new_buf;
6041
6042 for (i = 0; i < aux_count; i += 4) {
6043 int ii;
6044 unsigned char tag = aux_adr[i + 3] >> 8;
6045 if (tag != 0 && tag != 0xff) {
6046 if (dv_meta_flag == 0)
6047 valid_tag = 1;
6048 else if (dv_meta_flag == 1
6049 && tag == 0x1)
6050 valid_tag = 1;
6051 else if (dv_meta_flag == 2
6052 && tag != 0x1)
6053 valid_tag = 1;
6054 else
6055 valid_tag = 0;
6056 if (valid_tag && len > 0) {
6057 pic->aux_data_size +=
6058 (len + 8);
6059 h[0] = (len >> 24)
6060 & 0xff;
6061 h[1] = (len >> 16)
6062 & 0xff;
6063 h[2] = (len >> 8)
6064 & 0xff;
6065 h[3] = (len >> 0)
6066 & 0xff;
6067 h[6] =
6068 (padding_len >> 8)
6069 & 0xff;
6070 h[7] = (padding_len)
6071 & 0xff;
6072 h += (len + 8);
6073 p += 8;
6074 len = 0;
6075 padding_len = 0;
6076 }
6077 if (valid_tag) {
6078 h[4] = tag;
6079 h[5] = 0;
6080 h[6] = 0;
6081 h[7] = 0;
6082 }
6083 }
6084 if (valid_tag) {
6085 for (ii = 0; ii < 4; ii++) {
6086 unsigned short aa =
6087 aux_adr[i + 3
6088 - ii];
6089 *p = aa & 0xff;
6090 p++;
6091 len++;
6092 /*if ((aa >> 8) == 0xff)
6093 padding_len++;*/
6094 }
6095 }
6096 }
6097 if (len > 0) {
6098 pic->aux_data_size += (len + 8);
6099 h[0] = (len >> 24) & 0xff;
6100 h[1] = (len >> 16) & 0xff;
6101 h[2] = (len >> 8) & 0xff;
6102 h[3] = (len >> 0) & 0xff;
6103 h[6] = (padding_len >> 8) & 0xff;
6104 h[7] = (padding_len) & 0xff;
6105 }
6106 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
6107 hevc_print(hevc, 0,
6108 "aux: (size %d) suffix_flag %d\n",
6109 pic->aux_data_size, suffix_flag);
6110 for (i = 0; i < pic->aux_data_size; i++) {
6111 hevc_print_cont(hevc, 0,
6112 "%02x ", pic->aux_data_buf[i]);
6113 if (((i + 1) & 0xf) == 0)
6114 hevc_print_cont(hevc, 0, "\n");
6115 }
6116 hevc_print_cont(hevc, 0, "\n");
6117 }
6118
6119 } else {
6120 hevc_print(hevc, 0, "new buf alloc failed\n");
6121 if (pic->aux_data_buf)
6122 vfree(pic->aux_data_buf);
6123 pic->aux_data_buf = NULL;
6124 pic->aux_data_size = 0;
6125 }
6126 }
6127
6128 }
6129
release_aux_data(struct hevc_state_s * hevc,struct PIC_s * pic)6130 static void release_aux_data(struct hevc_state_s *hevc,
6131 struct PIC_s *pic)
6132 {
6133 if (pic->aux_data_buf)
6134 vfree(pic->aux_data_buf);
6135 pic->aux_data_buf = NULL;
6136 pic->aux_data_size = 0;
6137 }
6138
hevc_pre_pic(struct hevc_state_s * hevc,struct PIC_s * pic)6139 static inline void hevc_pre_pic(struct hevc_state_s *hevc,
6140 struct PIC_s *pic)
6141 {
6142
6143 /* prev pic */
6144 /*if (hevc->curr_POC != 0) {*/
6145 int decoded_poc = hevc->iPrevPOC;
6146 #ifdef MULTI_INSTANCE_SUPPORT
6147 if (hevc->m_ins_flag) {
6148 decoded_poc = hevc->decoded_poc;
6149 hevc->decoded_poc = INVALID_POC;
6150 }
6151 #endif
6152 if (hevc->m_nalUnitType != NAL_UNIT_CODED_SLICE_IDR
6153 && hevc->m_nalUnitType !=
6154 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
6155 struct PIC_s *pic_display;
6156
6157 pic = get_pic_by_POC(hevc, decoded_poc);
6158 if (pic && (pic->POC != INVALID_POC)) {
6159 struct vdec_s *vdec = hw_to_vdec(hevc);
6160
6161 /*PB skip control */
6162 if (pic->error_mark == 0
6163 && hevc->PB_skip_mode == 1) {
6164 /* start decoding after
6165 * first I
6166 */
6167 hevc->ignore_bufmgr_error |= 0x1;
6168 }
6169 if (hevc->ignore_bufmgr_error & 1) {
6170 if (hevc->PB_skip_count_after_decoding > 0) {
6171 hevc->PB_skip_count_after_decoding--;
6172 } else {
6173 /* start displaying */
6174 hevc->ignore_bufmgr_error |= 0x2;
6175 }
6176 }
6177 if (hevc->mmu_enable
6178 && ((hevc->double_write_mode & 0x10) == 0)) {
6179 if (!hevc->m_ins_flag) {
6180 hevc->used_4k_num =
6181 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
6182
6183 if ((!is_skip_decoding(hevc, pic)) &&
6184 (hevc->used_4k_num >= 0) &&
6185 (hevc->cur_pic->scatter_alloc
6186 == 1)) {
6187 hevc_print(hevc,
6188 H265_DEBUG_BUFMGR_MORE,
6189 "%s pic index %d scatter_alloc %d page_start %d\n",
6190 "decoder_mmu_box_free_idx_tail",
6191 hevc->cur_pic->index,
6192 hevc->cur_pic->scatter_alloc,
6193 hevc->used_4k_num);
6194 hevc_mmu_dma_check(hw_to_vdec(hevc));
6195 decoder_mmu_box_free_idx_tail(
6196 hevc->mmu_box,
6197 hevc->cur_pic->index,
6198 hevc->used_4k_num);
6199 hevc->cur_pic->scatter_alloc
6200 = 2;
6201 }
6202 hevc->used_4k_num = -1;
6203 }
6204 }
6205 if (!pic->ip_mode)
6206 pic->output_mark = 1;
6207 pic->recon_mark = 1;
6208 pic->dis_mark = 1;
6209 if (vdec->mvfrm) {
6210 pic->frame_size = vdec->mvfrm->frame_size;
6211 pic->hw_decode_time = (u32)vdec->mvfrm->hw_decode_time;
6212 }
6213 }
6214 do {
6215 pic_display = output_pic(hevc, 0);
6216
6217 if (pic_display) {
6218 if ((pic_display->error_mark &&
6219 ((hevc->ignore_bufmgr_error &
6220 0x2) == 0))
6221 || (get_dbg_flag(hevc) &
6222 H265_DEBUG_DISPLAY_CUR_FRAME)
6223 || (get_dbg_flag(hevc) &
6224 H265_DEBUG_NO_DISPLAY)) {
6225 pic_display->output_ready = 0;
6226 if (get_dbg_flag(hevc) &
6227 H265_DEBUG_BUFMGR) {
6228 hevc_print(hevc, 0,
6229 "[BM] Display: POC %d, ",
6230 pic_display->POC);
6231 hevc_print_cont(hevc, 0,
6232 "decoding index %d ==> ",
6233 pic_display->
6234 decode_idx);
6235 hevc_print_cont(hevc, 0,
6236 "Debug or err,recycle it\n");
6237 }
6238 /*
6239 * Here the pic/frame error_mark is 1,
6240 * and it won't be displayed, so increase
6241 * the drop count
6242 */
6243 hevc->gvs->drop_frame_count++;
6244 /* error frame count also need increase */
6245 hevc->gvs->error_frame_count++;
6246 } else {
6247 if (hevc->i_only & 0x1
6248 && pic_display->
6249 slice_type != 2) {
6250 pic_display->output_ready = 0;
6251 } else {
6252 prepare_display_buf
6253 (hevc,
6254 pic_display);
6255 if (get_dbg_flag(hevc) &
6256 H265_DEBUG_BUFMGR) {
6257 hevc_print(hevc, 0,
6258 "[BM] Display: POC %d, ",
6259 pic_display->POC);
6260 hevc_print_cont(hevc, 0,
6261 "decoding index %d\n",
6262 pic_display->
6263 decode_idx);
6264 }
6265 }
6266 }
6267 }
6268 } while (pic_display);
6269 } else {
6270 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6271 hevc_print(hevc, 0,
6272 "[BM] current pic is IDR, ");
6273 hevc_print(hevc, 0,
6274 "clear referenced flag of all buffers\n");
6275 }
6276 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
6277 dump_pic_list(hevc);
6278 if (hevc->vf_pre_count == 1 &&
6279 hevc->first_pic_flag == 1) {
6280 hevc->first_pic_flag = 2;
6281 pic = NULL;
6282 }
6283 else
6284 pic = get_pic_by_POC(hevc, decoded_poc);
6285
6286 flush_output(hevc, pic);
6287 }
6288
6289 }
6290
check_pic_decoded_error_pre(struct hevc_state_s * hevc,int decoded_lcu)6291 static void check_pic_decoded_error_pre(struct hevc_state_s *hevc,
6292 int decoded_lcu)
6293 {
6294 int current_lcu_idx = decoded_lcu;
6295 if (decoded_lcu < 0)
6296 return;
6297
6298 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6299 hevc_print(hevc, 0,
6300 "cur lcu idx = %d, (total %d)\n",
6301 current_lcu_idx, hevc->lcu_total);
6302 }
6303 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
6304 if (hevc->first_pic_after_recover) {
6305 if (current_lcu_idx !=
6306 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
6307 hevc->cur_pic->error_mark = 1;
6308 } else {
6309 if (hevc->lcu_x_num_pre != 0
6310 && hevc->lcu_y_num_pre != 0
6311 && current_lcu_idx != 0
6312 && current_lcu_idx <
6313 ((hevc->lcu_x_num_pre*hevc->lcu_y_num_pre) - 1))
6314 hevc->cur_pic->error_mark = 1;
6315 }
6316 if (hevc->cur_pic->error_mark) {
6317 hevc_print(hevc, 0,
6318 "cur lcu idx = %d, (total %d), set error_mark\n",
6319 current_lcu_idx,
6320 hevc->lcu_x_num_pre*hevc->lcu_y_num_pre);
6321 if (is_log_enable(hevc))
6322 add_log(hevc,
6323 "cur lcu idx = %d, (total %d), set error_mark",
6324 current_lcu_idx,
6325 hevc->lcu_x_num_pre *
6326 hevc->lcu_y_num_pre);
6327
6328 }
6329
6330 }
6331 if (hevc->cur_pic && hevc->head_error_flag) {
6332 hevc->cur_pic->error_mark = 1;
6333 hevc_print(hevc, 0,
6334 "head has error, set error_mark\n");
6335 }
6336
6337 if ((error_handle_policy & 0x80) == 0) {
6338 if (hevc->over_decode && hevc->cur_pic) {
6339 hevc_print(hevc, 0,
6340 "over decode, set error_mark\n");
6341 hevc->cur_pic->error_mark = 1;
6342 }
6343 }
6344
6345 hevc->lcu_x_num_pre = hevc->lcu_x_num;
6346 hevc->lcu_y_num_pre = hevc->lcu_y_num;
6347 }
6348
check_pic_decoded_error(struct hevc_state_s * hevc,int decoded_lcu)6349 static void check_pic_decoded_error(struct hevc_state_s *hevc,
6350 int decoded_lcu)
6351 {
6352 int current_lcu_idx = decoded_lcu;
6353 if (decoded_lcu < 0)
6354 return;
6355
6356 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
6357 hevc_print(hevc, 0,
6358 "cur lcu idx = %d, (total %d)\n",
6359 current_lcu_idx, hevc->lcu_total);
6360 }
6361 if ((error_handle_policy & 0x20) == 0 && hevc->cur_pic != NULL) {
6362 if (hevc->lcu_x_num != 0
6363 && hevc->lcu_y_num != 0
6364 && current_lcu_idx != 0
6365 && current_lcu_idx <
6366 ((hevc->lcu_x_num*hevc->lcu_y_num) - 1))
6367 hevc->cur_pic->error_mark = 1;
6368 if (hevc->cur_pic->error_mark) {
6369 hevc_print(hevc, 0,
6370 "cur lcu idx = %d, (total %d), set error_mark\n",
6371 current_lcu_idx,
6372 hevc->lcu_x_num*hevc->lcu_y_num);
6373 if (((hevc->i_only & 0x4) == 0) && hevc->cur_pic->POC && ( hevc->cur_pic->slice_type == 0)
6374 && ((hevc->cur_pic->POC + MAX_BUF_NUM) < hevc->iPrevPOC)) {
6375 hevc_print(hevc, 0,
6376 "Flush.. num_reorder_pic %d pic->POC %d hevc->iPrevPOC %d\n",
6377 hevc->sps_num_reorder_pics_0,hevc->cur_pic->POC ,hevc->iPrevPOC);
6378 flush_output(hevc, get_pic_by_POC(hevc, hevc->cur_pic->POC ));
6379 }
6380 if (is_log_enable(hevc))
6381 add_log(hevc,
6382 "cur lcu idx = %d, (total %d), set error_mark",
6383 current_lcu_idx,
6384 hevc->lcu_x_num *
6385 hevc->lcu_y_num);
6386
6387 }
6388
6389 }
6390 if (hevc->cur_pic && hevc->head_error_flag) {
6391 hevc->cur_pic->error_mark = 1;
6392 hevc_print(hevc, 0,
6393 "head has error, set error_mark\n");
6394 }
6395
6396 if ((error_handle_policy & 0x80) == 0) {
6397 if (hevc->over_decode && hevc->cur_pic) {
6398 hevc_print(hevc, 0,
6399 "over decode, set error_mark\n");
6400 hevc->cur_pic->error_mark = 1;
6401 }
6402 }
6403 }
6404
6405 /* only when we decoded one field or one frame,
6406 we can call this function to get qos info*/
get_picture_qos_info(struct hevc_state_s * hevc)6407 static void get_picture_qos_info(struct hevc_state_s *hevc)
6408 {
6409 struct PIC_s *picture = hevc->cur_pic;
6410
6411 /*
6412 #define DEBUG_QOS
6413 */
6414
6415 if (!hevc->cur_pic)
6416 return;
6417
6418 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A) {
6419 unsigned char a[3];
6420 unsigned char i, j, t;
6421 unsigned long data;
6422
6423 data = READ_VREG(HEVC_MV_INFO);
6424 if (picture->slice_type == I_SLICE)
6425 data = 0;
6426 a[0] = data & 0xff;
6427 a[1] = (data >> 8) & 0xff;
6428 a[2] = (data >> 16) & 0xff;
6429
6430 for (i = 0; i < 3; i++)
6431 for (j = i+1; j < 3; j++) {
6432 if (a[j] < a[i]) {
6433 t = a[j];
6434 a[j] = a[i];
6435 a[i] = t;
6436 } else if (a[j] == a[i]) {
6437 a[i]++;
6438 t = a[j];
6439 a[j] = a[i];
6440 a[i] = t;
6441 }
6442 }
6443 picture->max_mv = a[2];
6444 picture->avg_mv = a[1];
6445 picture->min_mv = a[0];
6446 #ifdef DEBUG_QOS
6447 hevc_print(hevc, 0, "mv data %x a[0]= %x a[1]= %x a[2]= %x\n",
6448 data, a[0], a[1], a[2]);
6449 #endif
6450
6451 data = READ_VREG(HEVC_QP_INFO);
6452 a[0] = data & 0x1f;
6453 a[1] = (data >> 8) & 0x3f;
6454 a[2] = (data >> 16) & 0x7f;
6455
6456 for (i = 0; i < 3; i++)
6457 for (j = i+1; j < 3; j++) {
6458 if (a[j] < a[i]) {
6459 t = a[j];
6460 a[j] = a[i];
6461 a[i] = t;
6462 } else if (a[j] == a[i]) {
6463 a[i]++;
6464 t = a[j];
6465 a[j] = a[i];
6466 a[i] = t;
6467 }
6468 }
6469 picture->max_qp = a[2];
6470 picture->avg_qp = a[1];
6471 picture->min_qp = a[0];
6472 #ifdef DEBUG_QOS
6473 hevc_print(hevc, 0, "qp data %x a[0]= %x a[1]= %x a[2]= %x\n",
6474 data, a[0], a[1], a[2]);
6475 #endif
6476
6477 data = READ_VREG(HEVC_SKIP_INFO);
6478 a[0] = data & 0x1f;
6479 a[1] = (data >> 8) & 0x3f;
6480 a[2] = (data >> 16) & 0x7f;
6481
6482 for (i = 0; i < 3; i++)
6483 for (j = i+1; j < 3; j++) {
6484 if (a[j] < a[i]) {
6485 t = a[j];
6486 a[j] = a[i];
6487 a[i] = t;
6488 } else if (a[j] == a[i]) {
6489 a[i]++;
6490 t = a[j];
6491 a[j] = a[i];
6492 a[i] = t;
6493 }
6494 }
6495 picture->max_skip = a[2];
6496 picture->avg_skip = a[1];
6497 picture->min_skip = a[0];
6498
6499 #ifdef DEBUG_QOS
6500 hevc_print(hevc, 0,
6501 "skip data %x a[0]= %x a[1]= %x a[2]= %x\n",
6502 data, a[0], a[1], a[2]);
6503 #endif
6504 } else {
6505 uint32_t blk88_y_count;
6506 uint32_t blk88_c_count;
6507 uint32_t blk22_mv_count;
6508 uint32_t rdata32;
6509 int32_t mv_hi;
6510 int32_t mv_lo;
6511 uint32_t rdata32_l;
6512 uint32_t mvx_L0_hi;
6513 uint32_t mvy_L0_hi;
6514 uint32_t mvx_L1_hi;
6515 uint32_t mvy_L1_hi;
6516 int64_t value;
6517 uint64_t temp_value;
6518 #ifdef DEBUG_QOS
6519 int pic_number = picture->POC;
6520 #endif
6521
6522 picture->max_mv = 0;
6523 picture->avg_mv = 0;
6524 picture->min_mv = 0;
6525
6526 picture->max_skip = 0;
6527 picture->avg_skip = 0;
6528 picture->min_skip = 0;
6529
6530 picture->max_qp = 0;
6531 picture->avg_qp = 0;
6532 picture->min_qp = 0;
6533
6534
6535
6536 #ifdef DEBUG_QOS
6537 hevc_print(hevc, 0, "slice_type:%d, poc:%d\n",
6538 picture->slice_type,
6539 picture->POC);
6540 #endif
6541 /* set rd_idx to 0 */
6542 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, 0);
6543
6544 blk88_y_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6545 if (blk88_y_count == 0) {
6546 #ifdef DEBUG_QOS
6547 hevc_print(hevc, 0,
6548 "[Picture %d Quality] NO Data yet.\n",
6549 pic_number);
6550 #endif
6551 /* reset all counts */
6552 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6553 return;
6554 }
6555 /* qp_y_sum */
6556 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6557 #ifdef DEBUG_QOS
6558 hevc_print(hevc, 0,
6559 "[Picture %d Quality] Y QP AVG : %d (%d/%d)\n",
6560 pic_number, rdata32/blk88_y_count,
6561 rdata32, blk88_y_count);
6562 #endif
6563 picture->avg_qp = rdata32/blk88_y_count;
6564 /* intra_y_count */
6565 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6566 #ifdef DEBUG_QOS
6567 hevc_print(hevc, 0,
6568 "[Picture %d Quality] Y intra rate : %d%c (%d)\n",
6569 pic_number, rdata32*100/blk88_y_count,
6570 '%', rdata32);
6571 #endif
6572 /* skipped_y_count */
6573 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6574 #ifdef DEBUG_QOS
6575 hevc_print(hevc, 0,
6576 "[Picture %d Quality] Y skipped rate : %d%c (%d)\n",
6577 pic_number, rdata32*100/blk88_y_count,
6578 '%', rdata32);
6579 #endif
6580 picture->avg_skip = rdata32*100/blk88_y_count;
6581 /* coeff_non_zero_y_count */
6582 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6583 #ifdef DEBUG_QOS
6584 hevc_print(hevc, 0,
6585 "[Picture %d Quality] Y ZERO_Coeff rate : %d%c (%d)\n",
6586 pic_number, (100 - rdata32*100/(blk88_y_count*1)),
6587 '%', rdata32);
6588 #endif
6589 /* blk66_c_count */
6590 blk88_c_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6591 if (blk88_c_count == 0) {
6592 #ifdef DEBUG_QOS
6593 hevc_print(hevc, 0,
6594 "[Picture %d Quality] NO Data yet.\n",
6595 pic_number);
6596 #endif
6597 /* reset all counts */
6598 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6599 return;
6600 }
6601 /* qp_c_sum */
6602 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6603 #ifdef DEBUG_QOS
6604 hevc_print(hevc, 0,
6605 "[Picture %d Quality] C QP AVG : %d (%d/%d)\n",
6606 pic_number, rdata32/blk88_c_count,
6607 rdata32, blk88_c_count);
6608 #endif
6609 /* intra_c_count */
6610 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6611 #ifdef DEBUG_QOS
6612 hevc_print(hevc, 0,
6613 "[Picture %d Quality] C intra rate : %d%c (%d)\n",
6614 pic_number, rdata32*100/blk88_c_count,
6615 '%', rdata32);
6616 #endif
6617 /* skipped_cu_c_count */
6618 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6619 #ifdef DEBUG_QOS
6620 hevc_print(hevc, 0,
6621 "[Picture %d Quality] C skipped rate : %d%c (%d)\n",
6622 pic_number, rdata32*100/blk88_c_count,
6623 '%', rdata32);
6624 #endif
6625 /* coeff_non_zero_c_count */
6626 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6627 #ifdef DEBUG_QOS
6628 hevc_print(hevc, 0,
6629 "[Picture %d Quality] C ZERO_Coeff rate : %d%c (%d)\n",
6630 pic_number, (100 - rdata32*100/(blk88_c_count*1)),
6631 '%', rdata32);
6632 #endif
6633
6634 /* 1'h0, qp_c_max[6:0], 1'h0, qp_c_min[6:0],
6635 1'h0, qp_y_max[6:0], 1'h0, qp_y_min[6:0] */
6636 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6637 #ifdef DEBUG_QOS
6638 hevc_print(hevc, 0, "[Picture %d Quality] Y QP min : %d\n",
6639 pic_number, (rdata32>>0)&0xff);
6640 #endif
6641 picture->min_qp = (rdata32>>0)&0xff;
6642
6643 #ifdef DEBUG_QOS
6644 hevc_print(hevc, 0, "[Picture %d Quality] Y QP max : %d\n",
6645 pic_number, (rdata32>>8)&0xff);
6646 #endif
6647 picture->max_qp = (rdata32>>8)&0xff;
6648
6649 #ifdef DEBUG_QOS
6650 hevc_print(hevc, 0, "[Picture %d Quality] C QP min : %d\n",
6651 pic_number, (rdata32>>16)&0xff);
6652 hevc_print(hevc, 0, "[Picture %d Quality] C QP max : %d\n",
6653 pic_number, (rdata32>>24)&0xff);
6654 #endif
6655
6656 /* blk22_mv_count */
6657 blk22_mv_count = READ_VREG(HEVC_PIC_QUALITY_DATA);
6658 if (blk22_mv_count == 0) {
6659 #ifdef DEBUG_QOS
6660 hevc_print(hevc, 0,
6661 "[Picture %d Quality] NO MV Data yet.\n",
6662 pic_number);
6663 #endif
6664 /* reset all counts */
6665 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6666 return;
6667 }
6668 /* mvy_L1_count[39:32], mvx_L1_count[39:32],
6669 mvy_L0_count[39:32], mvx_L0_count[39:32] */
6670 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6671 /* should all be 0x00 or 0xff */
6672 #ifdef DEBUG_QOS
6673 hevc_print(hevc, 0,
6674 "[Picture %d Quality] MV AVG High Bits: 0x%X\n",
6675 pic_number, rdata32);
6676 #endif
6677 mvx_L0_hi = ((rdata32>>0)&0xff);
6678 mvy_L0_hi = ((rdata32>>8)&0xff);
6679 mvx_L1_hi = ((rdata32>>16)&0xff);
6680 mvy_L1_hi = ((rdata32>>24)&0xff);
6681
6682 /* mvx_L0_count[31:0] */
6683 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6684 temp_value = mvx_L0_hi;
6685 temp_value = (temp_value << 32) | rdata32_l;
6686
6687 if (mvx_L0_hi & 0x80)
6688 value = 0xFFFFFFF000000000 | temp_value;
6689 else
6690 value = temp_value;
6691 value = div_s64(value, blk22_mv_count);
6692 #ifdef DEBUG_QOS
6693 hevc_print(hevc, 0,
6694 "[Picture %d Quality] MVX_L0 AVG : %d (%lld/%d)\n",
6695 pic_number, (int)value,
6696 value, blk22_mv_count);
6697 #endif
6698 picture->avg_mv = value;
6699
6700 /* mvy_L0_count[31:0] */
6701 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6702 temp_value = mvy_L0_hi;
6703 temp_value = (temp_value << 32) | rdata32_l;
6704
6705 if (mvy_L0_hi & 0x80)
6706 value = 0xFFFFFFF000000000 | temp_value;
6707 else
6708 value = temp_value;
6709 #ifdef DEBUG_QOS
6710 hevc_print(hevc, 0,
6711 "[Picture %d Quality] MVY_L0 AVG : %d (%lld/%d)\n",
6712 pic_number, rdata32_l/blk22_mv_count,
6713 value, blk22_mv_count);
6714 #endif
6715
6716 /* mvx_L1_count[31:0] */
6717 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6718 temp_value = mvx_L1_hi;
6719 temp_value = (temp_value << 32) | rdata32_l;
6720 if (mvx_L1_hi & 0x80)
6721 value = 0xFFFFFFF000000000 | temp_value;
6722 else
6723 value = temp_value;
6724 #ifdef DEBUG_QOS
6725 hevc_print(hevc, 0,
6726 "[Picture %d Quality] MVX_L1 AVG : %d (%lld/%d)\n",
6727 pic_number, rdata32_l/blk22_mv_count,
6728 value, blk22_mv_count);
6729 #endif
6730
6731 /* mvy_L1_count[31:0] */
6732 rdata32_l = READ_VREG(HEVC_PIC_QUALITY_DATA);
6733 temp_value = mvy_L1_hi;
6734 temp_value = (temp_value << 32) | rdata32_l;
6735 if (mvy_L1_hi & 0x80)
6736 value = 0xFFFFFFF000000000 | temp_value;
6737 else
6738 value = temp_value;
6739 #ifdef DEBUG_QOS
6740 hevc_print(hevc, 0,
6741 "[Picture %d Quality] MVY_L1 AVG : %d (%lld/%d)\n",
6742 pic_number, rdata32_l/blk22_mv_count,
6743 value, blk22_mv_count);
6744 #endif
6745
6746 /* {mvx_L0_max, mvx_L0_min} // format : {sign, abs[14:0]} */
6747 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6748 mv_hi = (rdata32>>16)&0xffff;
6749 if (mv_hi & 0x8000)
6750 mv_hi = 0x8000 - mv_hi;
6751 #ifdef DEBUG_QOS
6752 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MAX : %d\n",
6753 pic_number, mv_hi);
6754 #endif
6755 picture->max_mv = mv_hi;
6756
6757 mv_lo = (rdata32>>0)&0xffff;
6758 if (mv_lo & 0x8000)
6759 mv_lo = 0x8000 - mv_lo;
6760 #ifdef DEBUG_QOS
6761 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L0 MIN : %d\n",
6762 pic_number, mv_lo);
6763 #endif
6764 picture->min_mv = mv_lo;
6765
6766 #ifdef DEBUG_QOS
6767 /* {mvy_L0_max, mvy_L0_min} */
6768 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6769 mv_hi = (rdata32>>16)&0xffff;
6770 if (mv_hi & 0x8000)
6771 mv_hi = 0x8000 - mv_hi;
6772 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MAX : %d\n",
6773 pic_number, mv_hi);
6774
6775
6776 mv_lo = (rdata32>>0)&0xffff;
6777 if (mv_lo & 0x8000)
6778 mv_lo = 0x8000 - mv_lo;
6779
6780 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L0 MIN : %d\n",
6781 pic_number, mv_lo);
6782
6783
6784 /* {mvx_L1_max, mvx_L1_min} */
6785 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6786 mv_hi = (rdata32>>16)&0xffff;
6787 if (mv_hi & 0x8000)
6788 mv_hi = 0x8000 - mv_hi;
6789
6790 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MAX : %d\n",
6791 pic_number, mv_hi);
6792
6793
6794 mv_lo = (rdata32>>0)&0xffff;
6795 if (mv_lo & 0x8000)
6796 mv_lo = 0x8000 - mv_lo;
6797
6798 hevc_print(hevc, 0, "[Picture %d Quality] MVX_L1 MIN : %d\n",
6799 pic_number, mv_lo);
6800
6801
6802 /* {mvy_L1_max, mvy_L1_min} */
6803 rdata32 = READ_VREG(HEVC_PIC_QUALITY_DATA);
6804 mv_hi = (rdata32>>16)&0xffff;
6805 if (mv_hi & 0x8000)
6806 mv_hi = 0x8000 - mv_hi;
6807
6808 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MAX : %d\n",
6809 pic_number, mv_hi);
6810
6811 mv_lo = (rdata32>>0)&0xffff;
6812 if (mv_lo & 0x8000)
6813 mv_lo = 0x8000 - mv_lo;
6814
6815 hevc_print(hevc, 0, "[Picture %d Quality] MVY_L1 MIN : %d\n",
6816 pic_number, mv_lo);
6817 #endif
6818
6819 rdata32 = READ_VREG(HEVC_PIC_QUALITY_CTRL);
6820 #ifdef DEBUG_QOS
6821 hevc_print(hevc, 0,
6822 "[Picture %d Quality] After Read : VDEC_PIC_QUALITY_CTRL : 0x%x\n",
6823 pic_number, rdata32);
6824 #endif
6825 /* reset all counts */
6826 WRITE_VREG(HEVC_PIC_QUALITY_CTRL, (1<<8));
6827 }
6828 }
6829
hevc_slice_segment_header_process(struct hevc_state_s * hevc,union param_u * rpm_param,int decode_pic_begin)6830 static int hevc_slice_segment_header_process(struct hevc_state_s *hevc,
6831 union param_u *rpm_param,
6832 int decode_pic_begin)
6833 {
6834 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6835 struct vdec_s *vdec = hw_to_vdec(hevc);
6836 #endif
6837 int i;
6838 int lcu_x_num_div;
6839 int lcu_y_num_div;
6840 int Col_ref;
6841 int dbg_skip_flag = 0;
6842 struct aml_vcodec_ctx *ctx =
6843 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
6844
6845 if (hevc->is_used_v4l && ctx->param_sets_from_ucode)
6846 hevc->res_ch_flag = 0;
6847
6848 if (hevc->wait_buf == 0) {
6849 hevc->sps_num_reorder_pics_0 =
6850 rpm_param->p.sps_num_reorder_pics_0;
6851 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
6852 hevc->ip_mode = (!hevc->sps_num_reorder_pics_0 &&
6853 !(vdec->slave || vdec->master) &&
6854 !disable_ip_mode) ? true : false;
6855 #else
6856 hevc->ip_mode = (!hevc->sps_num_reorder_pics_0 &&
6857 !disable_ip_mode) ? true : false;
6858 #endif
6859 hevc->m_temporalId = rpm_param->p.m_temporalId;
6860 hevc->m_nalUnitType = rpm_param->p.m_nalUnitType;
6861 hevc->interlace_flag =
6862 (rpm_param->p.profile_etc >> 2) & 0x1;
6863 hevc->curr_pic_struct =
6864 (rpm_param->p.sei_frame_field_info >> 3) & 0xf;
6865 if (parser_sei_enable & 0x4) {
6866 hevc->frame_field_info_present_flag =
6867 (rpm_param->p.sei_frame_field_info >> 8) & 0x1;
6868 }
6869
6870 /* if (interlace_enable == 0 || hevc->m_ins_flag) */
6871 if (interlace_enable == 0)
6872 hevc->interlace_flag = 0;
6873 if (interlace_enable & 0x100)
6874 hevc->interlace_flag = interlace_enable & 0x1;
6875 if (hevc->interlace_flag == 0)
6876 hevc->curr_pic_struct = 0;
6877 /* if(hevc->m_nalUnitType == NAL_UNIT_EOS){ */
6878 /*
6879 *hevc->m_pocRandomAccess = MAX_INT;
6880 * //add to fix RAP_B_Bossen_1
6881 */
6882 /* } */
6883 hevc->misc_flag0 = rpm_param->p.misc_flag0;
6884 if (rpm_param->p.first_slice_segment_in_pic_flag == 0) {
6885 hevc->slice_segment_addr =
6886 rpm_param->p.slice_segment_address;
6887 if (!rpm_param->p.dependent_slice_segment_flag)
6888 hevc->slice_addr = hevc->slice_segment_addr;
6889 } else {
6890 hevc->slice_segment_addr = 0;
6891 hevc->slice_addr = 0;
6892 }
6893
6894 hevc->iPrevPOC = hevc->curr_POC;
6895 hevc->slice_type = (rpm_param->p.slice_type == I_SLICE) ? 2 :
6896 (rpm_param->p.slice_type == P_SLICE) ? 1 :
6897 (rpm_param->p.slice_type == B_SLICE) ? 0 : 3;
6898 /* hevc->curr_predFlag_L0=(hevc->slice_type==2) ? 0:1; */
6899 /* hevc->curr_predFlag_L1=(hevc->slice_type==0) ? 1:0; */
6900 hevc->TMVPFlag = rpm_param->p.slice_temporal_mvp_enable_flag;
6901 hevc->isNextSliceSegment =
6902 rpm_param->p.dependent_slice_segment_flag ? 1 : 0;
6903 if (hevc->pic_w != rpm_param->p.pic_width_in_luma_samples
6904 || hevc->pic_h !=
6905 rpm_param->p.pic_height_in_luma_samples) {
6906 hevc_print(hevc, 0,
6907 "Pic Width/Height Change (%d,%d)=>(%d,%d), interlace %d\n",
6908 hevc->pic_w, hevc->pic_h,
6909 rpm_param->p.pic_width_in_luma_samples,
6910 rpm_param->p.pic_height_in_luma_samples,
6911 hevc->interlace_flag);
6912
6913 hevc->pic_w = rpm_param->p.pic_width_in_luma_samples;
6914 hevc->pic_h = rpm_param->p.pic_height_in_luma_samples;
6915 hevc->frame_width = hevc->pic_w;
6916 hevc->frame_height = hevc->pic_h;
6917 #ifdef LOSLESS_COMPRESS_MODE
6918 if (/*re_config_pic_flag == 0 &&*/
6919 (get_double_write_mode(hevc) & 0x10) == 0)
6920 init_decode_head_hw(hevc);
6921 #endif
6922 }
6923
6924 if (is_oversize(hevc->pic_w, hevc->pic_h)) {
6925 hevc_print(hevc, 0, "over size : %u x %u.\n",
6926 hevc->pic_w, hevc->pic_h);
6927 if ((!hevc->m_ins_flag) &&
6928 ((debug &
6929 H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0))
6930 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6931 H265_DEBUG_DIS_SYS_ERROR_PROC);
6932 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6933 return 3;
6934 }
6935 if (hevc->bit_depth_chroma > 10 ||
6936 hevc->bit_depth_luma > 10) {
6937 hevc_print(hevc, 0, "unsupport bitdepth : %u,%u\n",
6938 hevc->bit_depth_chroma,
6939 hevc->bit_depth_luma);
6940 if (!hevc->m_ins_flag)
6941 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
6942 H265_DEBUG_DIS_SYS_ERROR_PROC);
6943 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
6944 return 4;
6945 }
6946
6947 /* it will cause divide 0 error */
6948 if (hevc->pic_w == 0 || hevc->pic_h == 0) {
6949 if (get_dbg_flag(hevc)) {
6950 hevc_print(hevc, 0,
6951 "Fatal Error, pic_w = %d, pic_h = %d\n",
6952 hevc->pic_w, hevc->pic_h);
6953 }
6954 return 3;
6955 }
6956 pic_list_process(hevc);
6957
6958 hevc->lcu_size =
6959 1 << (rpm_param->p.log2_min_coding_block_size_minus3 +
6960 3 + rpm_param->
6961 p.log2_diff_max_min_coding_block_size);
6962 if (hevc->lcu_size == 0) {
6963 hevc_print(hevc, 0,
6964 "Error, lcu_size = 0 (%d,%d)\n",
6965 rpm_param->p.
6966 log2_min_coding_block_size_minus3,
6967 rpm_param->p.
6968 log2_diff_max_min_coding_block_size);
6969 return 3;
6970 }
6971 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
6972 lcu_x_num_div = (hevc->pic_w / hevc->lcu_size);
6973 lcu_y_num_div = (hevc->pic_h / hevc->lcu_size);
6974 hevc->lcu_x_num =
6975 ((hevc->pic_w % hevc->lcu_size) ==
6976 0) ? lcu_x_num_div : lcu_x_num_div + 1;
6977 hevc->lcu_y_num =
6978 ((hevc->pic_h % hevc->lcu_size) ==
6979 0) ? lcu_y_num_div : lcu_y_num_div + 1;
6980 hevc->lcu_total = hevc->lcu_x_num * hevc->lcu_y_num;
6981
6982 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR
6983 || hevc->m_nalUnitType ==
6984 NAL_UNIT_CODED_SLICE_IDR_N_LP) {
6985 hevc->curr_POC = 0;
6986 if ((hevc->m_temporalId - 1) == 0)
6987 hevc->iPrevTid0POC = hevc->curr_POC;
6988 } else {
6989 int iMaxPOClsb =
6990 1 << (rpm_param->p.
6991 log2_max_pic_order_cnt_lsb_minus4 + 4);
6992 int iPrevPOClsb;
6993 int iPrevPOCmsb;
6994 int iPOCmsb;
6995 int iPOClsb = rpm_param->p.POClsb;
6996
6997 if (iMaxPOClsb == 0) {
6998 hevc_print(hevc, 0,
6999 "error iMaxPOClsb is 0\n");
7000 return 3;
7001 }
7002
7003 iPrevPOClsb = hevc->iPrevTid0POC % iMaxPOClsb;
7004 iPrevPOCmsb = hevc->iPrevTid0POC - iPrevPOClsb;
7005
7006 if ((iPOClsb < iPrevPOClsb)
7007 && ((iPrevPOClsb - iPOClsb) >=
7008 (iMaxPOClsb / 2)))
7009 iPOCmsb = iPrevPOCmsb + iMaxPOClsb;
7010 else if ((iPOClsb > iPrevPOClsb)
7011 && ((iPOClsb - iPrevPOClsb) >
7012 (iMaxPOClsb / 2)))
7013 iPOCmsb = iPrevPOCmsb - iMaxPOClsb;
7014 else
7015 iPOCmsb = iPrevPOCmsb;
7016 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7017 hevc_print(hevc, 0,
7018 "iPrePOC%d iMaxPOClsb%d iPOCmsb%d iPOClsb%d\n",
7019 hevc->iPrevTid0POC, iMaxPOClsb, iPOCmsb,
7020 iPOClsb);
7021 }
7022 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
7023 || hevc->m_nalUnitType ==
7024 NAL_UNIT_CODED_SLICE_BLANT
7025 || hevc->m_nalUnitType ==
7026 NAL_UNIT_CODED_SLICE_BLA_N_LP) {
7027 /* For BLA picture types, POCmsb is set to 0. */
7028 iPOCmsb = 0;
7029 }
7030 hevc->curr_POC = (iPOCmsb + iPOClsb);
7031 if ((hevc->m_temporalId - 1) == 0)
7032 hevc->iPrevTid0POC = hevc->curr_POC;
7033 else {
7034 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7035 hevc_print(hevc, 0,
7036 "m_temporalID is %d\n",
7037 hevc->m_temporalId);
7038 }
7039 }
7040 }
7041 hevc->RefNum_L0 =
7042 (rpm_param->p.num_ref_idx_l0_active >
7043 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
7044 num_ref_idx_l0_active;
7045 hevc->RefNum_L1 =
7046 (rpm_param->p.num_ref_idx_l1_active >
7047 MAX_REF_ACTIVE) ? MAX_REF_ACTIVE : rpm_param->p.
7048 num_ref_idx_l1_active;
7049
7050 /* if(curr_POC==0x10) dump_lmem(); */
7051
7052 /* skip RASL pictures after CRA/BLA pictures */
7053 if (hevc->m_pocRandomAccess == MAX_INT) {/* first picture */
7054 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_CRA ||
7055 hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
7056 || hevc->m_nalUnitType ==
7057 NAL_UNIT_CODED_SLICE_BLANT
7058 || hevc->m_nalUnitType ==
7059 NAL_UNIT_CODED_SLICE_BLA_N_LP)
7060 hevc->m_pocRandomAccess = hevc->curr_POC;
7061 else
7062 hevc->m_pocRandomAccess = -MAX_INT;
7063 } else if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_BLA
7064 || hevc->m_nalUnitType ==
7065 NAL_UNIT_CODED_SLICE_BLANT
7066 || hevc->m_nalUnitType ==
7067 NAL_UNIT_CODED_SLICE_BLA_N_LP)
7068 hevc->m_pocRandomAccess = hevc->curr_POC;
7069 else if ((hevc->curr_POC < hevc->m_pocRandomAccess) &&
7070 (nal_skip_policy >= 3) &&
7071 (hevc->m_nalUnitType ==
7072 NAL_UNIT_CODED_SLICE_RASL_N ||
7073 hevc->m_nalUnitType ==
7074 NAL_UNIT_CODED_SLICE_TFD)) { /* skip */
7075 if (get_dbg_flag(hevc)) {
7076 hevc_print(hevc, 0,
7077 "RASL picture with POC %d < %d ",
7078 hevc->curr_POC, hevc->m_pocRandomAccess);
7079 hevc_print(hevc, 0,
7080 "RandomAccess point POC), skip it\n");
7081 }
7082 return 1;
7083 }
7084
7085 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) | 0x2);
7086 hevc->skip_flag = 0;
7087 /**/
7088 /* if((iPrevPOC != curr_POC)){ */
7089 if (rpm_param->p.slice_segment_address == 0) {
7090 struct PIC_s *pic;
7091
7092 hevc->new_pic = 1;
7093 #ifdef MULTI_INSTANCE_SUPPORT
7094 if (!hevc->m_ins_flag)
7095 #endif
7096 check_pic_decoded_error_pre(hevc,
7097 READ_VREG(HEVC_PARSER_LCU_START)
7098 & 0xffffff);
7099 /**/ if (use_cma == 0) {
7100 if (hevc->pic_list_init_flag == 0) {
7101 init_pic_list(hevc);
7102 init_pic_list_hw(hevc);
7103 init_buf_spec(hevc);
7104 hevc->pic_list_init_flag = 3;
7105 }
7106 }
7107 if (!hevc->m_ins_flag) {
7108 if (hevc->cur_pic)
7109 get_picture_qos_info(hevc);
7110 }
7111 hevc->first_pic_after_recover = 0;
7112 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
7113 dump_pic_list(hevc);
7114 /* prev pic */
7115 hevc_pre_pic(hevc, pic);
7116 /*
7117 *update referenced of old pictures
7118 *(cur_pic->referenced is 1 and not updated)
7119 */
7120 apply_ref_pic_set(hevc, hevc->curr_POC,
7121 rpm_param);
7122
7123 if (hevc->mmu_enable)
7124 recycle_mmu_bufs(hevc);
7125
7126 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7127 if (vdec->master) {
7128 struct hevc_state_s *hevc_ba =
7129 (struct hevc_state_s *)
7130 vdec->master->private;
7131 if (hevc_ba->cur_pic != NULL) {
7132 hevc_ba->cur_pic->dv_enhance_exist = 1;
7133 hevc_print(hevc, H265_DEBUG_DV,
7134 "To decode el (poc %d) => set bl (poc %d) dv_enhance_exist flag\n",
7135 hevc->curr_POC, hevc_ba->cur_pic->POC);
7136 }
7137 }
7138 if (vdec->master == NULL &&
7139 vdec->slave == NULL)
7140 set_aux_data(hevc,
7141 hevc->cur_pic, 1, 0); /*suffix*/
7142 if (hevc->bypass_dvenl && !dolby_meta_with_el)
7143 set_aux_data(hevc,
7144 hevc->cur_pic, 0, 1); /*dv meta only*/
7145 #else
7146 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7147 #endif
7148 /* new pic */
7149 hevc->cur_pic = hevc->is_used_v4l ?
7150 v4l_get_new_pic(hevc, rpm_param) :
7151 get_new_pic(hevc, rpm_param);
7152 if (hevc->cur_pic == NULL) {
7153 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
7154 dump_pic_list(hevc);
7155 hevc->wait_buf = 1;
7156 return -1;
7157 }
7158 #ifdef MULTI_INSTANCE_SUPPORT
7159 hevc->decoding_pic = hevc->cur_pic;
7160 if (!hevc->m_ins_flag)
7161 hevc->over_decode = 0;
7162 #endif
7163 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7164 hevc->cur_pic->dv_enhance_exist = 0;
7165 if (vdec->slave)
7166 hevc_print(hevc, H265_DEBUG_DV,
7167 "Clear bl (poc %d) dv_enhance_exist flag\n",
7168 hevc->curr_POC);
7169 if (vdec->master == NULL &&
7170 vdec->slave == NULL)
7171 set_aux_data(hevc,
7172 hevc->cur_pic, 0, 0); /*prefix*/
7173
7174 if (hevc->bypass_dvenl && !dolby_meta_with_el)
7175 set_aux_data(hevc,
7176 hevc->cur_pic, 0, 2); /*pre sei only*/
7177 #else
7178 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7179 #endif
7180 if (get_dbg_flag(hevc) & H265_DEBUG_DISPLAY_CUR_FRAME) {
7181 hevc->cur_pic->output_ready = 1;
7182 hevc->cur_pic->stream_offset =
7183 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
7184 prepare_display_buf(hevc, hevc->cur_pic);
7185 hevc->wait_buf = 2;
7186 return -1;
7187 }
7188 } else {
7189 if (get_dbg_flag(hevc) & H265_DEBUG_HAS_AUX_IN_SLICE) {
7190 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7191 if (vdec->master == NULL &&
7192 vdec->slave == NULL) {
7193 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7194 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7195 }
7196 #else
7197 set_aux_data(hevc, hevc->cur_pic, 1, 0);
7198 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7199 #endif
7200 }
7201 if (hevc->pic_list_init_flag != 3
7202 || hevc->cur_pic == NULL) {
7203 /* make it dec from the first slice segment */
7204 return 3;
7205 }
7206 hevc->cur_pic->slice_idx++;
7207 hevc->new_pic = 0;
7208 }
7209 } else {
7210 if (hevc->wait_buf == 1) {
7211 pic_list_process(hevc);
7212 hevc->cur_pic = hevc->is_used_v4l ?
7213 v4l_get_new_pic(hevc, rpm_param) :
7214 get_new_pic(hevc, rpm_param);
7215 if (hevc->cur_pic == NULL)
7216 return -1;
7217
7218 if (!hevc->m_ins_flag)
7219 hevc->over_decode = 0;
7220
7221 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
7222 hevc->cur_pic->dv_enhance_exist = 0;
7223 if (vdec->master == NULL &&
7224 vdec->slave == NULL)
7225 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7226 #else
7227 set_aux_data(hevc, hevc->cur_pic, 0, 0);
7228 #endif
7229 hevc->wait_buf = 0;
7230 } else if (hevc->wait_buf ==
7231 2) {
7232 if (get_display_pic_num(hevc) >
7233 1)
7234 return -1;
7235 hevc->wait_buf = 0;
7236 }
7237 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE)
7238 dump_pic_list(hevc);
7239 }
7240
7241 if (hevc->new_pic) {
7242 #if 1
7243 /*SUPPORT_10BIT*/
7244 int sao_mem_unit =
7245 (hevc->lcu_size == 16 ? 9 :
7246 hevc->lcu_size ==
7247 32 ? 14 : 24) << 4;
7248 #else
7249 int sao_mem_unit = ((hevc->lcu_size / 8) * 2 + 4) << 4;
7250 #endif
7251 int pic_height_cu =
7252 (hevc->pic_h + hevc->lcu_size - 1) / hevc->lcu_size;
7253 int pic_width_cu =
7254 (hevc->pic_w + hevc->lcu_size - 1) / hevc->lcu_size;
7255 int sao_vb_size = (sao_mem_unit + (2 << 4)) * pic_height_cu;
7256
7257 /* int sao_abv_size = sao_mem_unit*pic_width_cu; */
7258 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7259 hevc_print(hevc, 0,
7260 "==>%s dec idx %d, struct %d interlace %d pic idx %d\n",
7261 __func__,
7262 hevc->decode_idx,
7263 hevc->curr_pic_struct,
7264 hevc->interlace_flag,
7265 hevc->cur_pic->index);
7266 }
7267 if (dbg_skip_decode_index != 0 &&
7268 hevc->decode_idx == dbg_skip_decode_index)
7269 dbg_skip_flag = 1;
7270
7271 hevc->decode_idx++;
7272 update_tile_info(hevc, pic_width_cu, pic_height_cu,
7273 sao_mem_unit, rpm_param);
7274
7275 config_title_hw(hevc, sao_vb_size, sao_mem_unit);
7276 }
7277
7278 if (hevc->iPrevPOC != hevc->curr_POC) {
7279 hevc->new_tile = 1;
7280 hevc->tile_x = 0;
7281 hevc->tile_y = 0;
7282 hevc->tile_y_x = 0;
7283 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7284 hevc_print(hevc, 0,
7285 "new_tile (new_pic) tile_x=%d, tile_y=%d\n",
7286 hevc->tile_x, hevc->tile_y);
7287 }
7288 } else if (hevc->tile_enabled) {
7289 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7290 hevc_print(hevc, 0,
7291 "slice_segment_address is %d\n",
7292 rpm_param->p.slice_segment_address);
7293 }
7294 hevc->tile_y_x =
7295 get_tile_index(hevc, rpm_param->p.slice_segment_address,
7296 (hevc->pic_w +
7297 hevc->lcu_size -
7298 1) / hevc->lcu_size);
7299 if ((hevc->tile_y_x != (hevc->tile_x | (hevc->tile_y << 8)))
7300 && (hevc->tile_y_x != -1)) {
7301 hevc->new_tile = 1;
7302 hevc->tile_x = hevc->tile_y_x & 0xff;
7303 hevc->tile_y = (hevc->tile_y_x >> 8) & 0xff;
7304 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
7305 hevc_print(hevc, 0,
7306 "new_tile seg adr %d tile_x=%d, tile_y=%d\n",
7307 rpm_param->p.slice_segment_address,
7308 hevc->tile_x, hevc->tile_y);
7309 }
7310 } else
7311 hevc->new_tile = 0;
7312 } else
7313 hevc->new_tile = 0;
7314
7315 if ((hevc->tile_x > (MAX_TILE_COL_NUM - 1))
7316 || (hevc->tile_y > (MAX_TILE_ROW_NUM - 1)))
7317 hevc->new_tile = 0;
7318
7319 if (hevc->new_tile) {
7320 hevc->tile_start_lcu_x =
7321 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_x;
7322 hevc->tile_start_lcu_y =
7323 hevc->m_tile[hevc->tile_y][hevc->tile_x].start_cu_y;
7324 hevc->tile_width_lcu =
7325 hevc->m_tile[hevc->tile_y][hevc->tile_x].width;
7326 hevc->tile_height_lcu =
7327 hevc->m_tile[hevc->tile_y][hevc->tile_x].height;
7328 }
7329
7330 set_ref_pic_list(hevc, rpm_param);
7331
7332 Col_ref = rpm_param->p.collocated_ref_idx;
7333
7334 hevc->LDCFlag = 0;
7335 if (rpm_param->p.slice_type != I_SLICE) {
7336 hevc->LDCFlag = 1;
7337 for (i = 0; (i < hevc->RefNum_L0) && hevc->LDCFlag; i++) {
7338 if (hevc->cur_pic->
7339 m_aiRefPOCList0[hevc->cur_pic->slice_idx][i] >
7340 hevc->curr_POC)
7341 hevc->LDCFlag = 0;
7342 }
7343 if (rpm_param->p.slice_type == B_SLICE) {
7344 for (i = 0; (i < hevc->RefNum_L1)
7345 && hevc->LDCFlag; i++) {
7346 if (hevc->cur_pic->
7347 m_aiRefPOCList1[hevc->cur_pic->
7348 slice_idx][i] >
7349 hevc->curr_POC)
7350 hevc->LDCFlag = 0;
7351 }
7352 }
7353 }
7354
7355 hevc->ColFromL0Flag = rpm_param->p.collocated_from_l0_flag;
7356
7357 hevc->plevel =
7358 rpm_param->p.log2_parallel_merge_level;
7359 hevc->MaxNumMergeCand = 5 - rpm_param->p.five_minus_max_num_merge_cand;
7360
7361 hevc->LongTerm_Curr = 0; /* to do ... */
7362 hevc->LongTerm_Col = 0; /* to do ... */
7363
7364 hevc->list_no = 0;
7365 if (rpm_param->p.slice_type == B_SLICE)
7366 hevc->list_no = 1 - hevc->ColFromL0Flag;
7367 if (hevc->list_no == 0) {
7368 if (Col_ref < hevc->RefNum_L0) {
7369 hevc->Col_POC =
7370 hevc->cur_pic->m_aiRefPOCList0[hevc->cur_pic->
7371 slice_idx][Col_ref];
7372 } else
7373 hevc->Col_POC = INVALID_POC;
7374 } else {
7375 if (Col_ref < hevc->RefNum_L1) {
7376 hevc->Col_POC =
7377 hevc->cur_pic->m_aiRefPOCList1[hevc->cur_pic->
7378 slice_idx][Col_ref];
7379 } else
7380 hevc->Col_POC = INVALID_POC;
7381 }
7382
7383 hevc->LongTerm_Ref = 0; /* to do ... */
7384
7385 if (hevc->slice_type != 2) {
7386 /* if(hevc->i_only==1){ */
7387 /* return 0xf; */
7388 /* } */
7389
7390 if (hevc->Col_POC != INVALID_POC) {
7391 hevc->col_pic = get_ref_pic_by_POC(hevc, hevc->Col_POC);
7392 if (hevc->col_pic == NULL) {
7393 hevc->cur_pic->error_mark = 1;
7394 if (get_dbg_flag(hevc)) {
7395 hevc_print(hevc, 0,
7396 "WRONG,fail to get the pic Col_POC\n");
7397 }
7398 if (is_log_enable(hevc))
7399 add_log(hevc,
7400 "WRONG,fail to get the pic Col_POC");
7401 } else if (hevc->col_pic->error_mark || hevc->col_pic->dis_mark == 0) {
7402 hevc->cur_pic->error_mark = 1;
7403 if (get_dbg_flag(hevc)) {
7404 hevc_print(hevc, 0,
7405 "WRONG, Col_POC error_mark is 1\n");
7406 }
7407 if (is_log_enable(hevc))
7408 add_log(hevc,
7409 "WRONG, Col_POC error_mark is 1");
7410 } else {
7411 if ((hevc->col_pic->width
7412 != hevc->pic_w) ||
7413 (hevc->col_pic->height
7414 != hevc->pic_h)) {
7415 hevc_print(hevc, 0,
7416 "Wrong reference pic (poc %d) width/height %d/%d\n",
7417 hevc->col_pic->POC,
7418 hevc->col_pic->width,
7419 hevc->col_pic->height);
7420 hevc->cur_pic->error_mark = 1;
7421 }
7422
7423 }
7424
7425 if (hevc->cur_pic->error_mark
7426 && ((hevc->ignore_bufmgr_error & 0x1) == 0)) {
7427 /*count info*/
7428 vdec_count_info(hevc->gvs, hevc->cur_pic->error_mark,
7429 hevc->cur_pic->stream_offset);
7430 if (hevc->PB_skip_mode == 2)
7431 hevc->gvs->drop_frame_count++;
7432 }
7433
7434 if (is_skip_decoding(hevc,
7435 hevc->cur_pic)) {
7436 return 2;
7437 }
7438 } else
7439 hevc->col_pic = hevc->cur_pic;
7440 } /* */
7441 if (hevc->col_pic == NULL)
7442 hevc->col_pic = hevc->cur_pic;
7443 #ifdef BUFFER_MGR_ONLY
7444 return 0xf;
7445 #else
7446 if ((decode_pic_begin > 0 && hevc->decode_idx <= decode_pic_begin)
7447 || (dbg_skip_flag))
7448 return 0xf;
7449 #endif
7450
7451 config_mc_buffer(hevc, hevc->cur_pic);
7452
7453 if (is_skip_decoding(hevc,
7454 hevc->cur_pic)) {
7455 if (get_dbg_flag(hevc))
7456 hevc_print(hevc, 0,
7457 "Discard this picture index %d\n",
7458 hevc->cur_pic->index);
7459 /*count info*/
7460 vdec_count_info(hevc->gvs, hevc->cur_pic->error_mark,
7461 hevc->cur_pic->stream_offset);
7462 if (hevc->PB_skip_mode == 2)
7463 hevc->gvs->drop_frame_count++;
7464 return 2;
7465 }
7466 #ifdef MCRCC_ENABLE
7467 config_mcrcc_axi_hw(hevc, hevc->cur_pic->slice_type);
7468 #endif
7469 config_mpred_hw(hevc);
7470
7471 config_sao_hw(hevc, rpm_param);
7472
7473 if ((hevc->slice_type != 2) && (hevc->i_only & 0x2))
7474 return 0xf;
7475
7476 return 0;
7477 }
7478
7479
7480
H265_alloc_mmu(struct hevc_state_s * hevc,struct PIC_s * new_pic,unsigned short bit_depth,unsigned int * mmu_index_adr)7481 static int H265_alloc_mmu(struct hevc_state_s *hevc, struct PIC_s *new_pic,
7482 unsigned short bit_depth, unsigned int *mmu_index_adr) {
7483 int cur_buf_idx = new_pic->index;
7484 int bit_depth_10 = (bit_depth != 0x00);
7485 int picture_size;
7486 int cur_mmu_4k_number;
7487 int ret, max_frame_num;
7488 picture_size = compute_losless_comp_body_size(hevc, new_pic->width,
7489 new_pic->height, !bit_depth_10);
7490 cur_mmu_4k_number = ((picture_size+(1<<12)-1) >> 12);
7491 if (get_double_write_mode(hevc) == 0x10)
7492 return 0;
7493 /*hevc_print(hevc, 0,
7494 "alloc_mmu cur_idx : %d picture_size : %d mmu_4k_number : %d\r\n",
7495 cur_buf_idx, picture_size, cur_mmu_4k_number);*/
7496 if (new_pic->scatter_alloc) {
7497 decoder_mmu_box_free_idx(hevc->mmu_box, new_pic->index);
7498 new_pic->scatter_alloc = 0;
7499 }
7500 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7501 max_frame_num = MAX_FRAME_8K_NUM;
7502 else
7503 max_frame_num = MAX_FRAME_4K_NUM;
7504 if (cur_mmu_4k_number > max_frame_num) {
7505 hevc_print(hevc, 0, "over max !! 0x%x width %d height %d\n",
7506 cur_mmu_4k_number,
7507 new_pic->width,
7508 new_pic->height);
7509 return -1;
7510 }
7511 ret = decoder_mmu_box_alloc_idx(
7512 hevc->mmu_box,
7513 cur_buf_idx,
7514 cur_mmu_4k_number,
7515 mmu_index_adr);
7516 if (ret == 0)
7517 new_pic->scatter_alloc = 1;
7518
7519 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7520 "%s pic index %d page count(%d) ret =%d\n",
7521 __func__, cur_buf_idx,
7522 cur_mmu_4k_number, ret);
7523 return ret;
7524 }
7525
7526
release_pic_mmu_buf(struct hevc_state_s * hevc,struct PIC_s * pic)7527 static void release_pic_mmu_buf(struct hevc_state_s *hevc,
7528 struct PIC_s *pic)
7529 {
7530 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
7531 "%s pic index %d scatter_alloc %d\n",
7532 __func__, pic->index,
7533 pic->scatter_alloc);
7534
7535 if (hevc->mmu_enable
7536 && ((hevc->double_write_mode & 0x10) == 0)
7537 && pic->scatter_alloc)
7538 decoder_mmu_box_free_idx(hevc->mmu_box, pic->index);
7539 pic->scatter_alloc = 0;
7540 }
7541
7542 /*
7543 *************************************************
7544 *
7545 *h265 buffer management end
7546 *
7547 **************************************************
7548 */
7549 static struct hevc_state_s *gHevc;
7550
hevc_local_uninit(struct hevc_state_s * hevc)7551 static void hevc_local_uninit(struct hevc_state_s *hevc)
7552 {
7553 hevc->rpm_ptr = NULL;
7554 hevc->lmem_ptr = NULL;
7555
7556 #ifdef SWAP_HEVC_UCODE
7557 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
7558 if (hevc->mc_cpu_addr != NULL) {
7559 dma_free_coherent(amports_get_dma_device(),
7560 hevc->swap_size, hevc->mc_cpu_addr,
7561 hevc->mc_dma_handle);
7562 hevc->mc_cpu_addr = NULL;
7563 }
7564
7565 }
7566 #endif
7567 #ifdef DETREFILL_ENABLE
7568 if (hevc->is_swap && get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
7569 uninit_detrefill_buf(hevc);
7570 #endif
7571 if (hevc->aux_addr) {
7572 dma_free_coherent(amports_get_dma_device(),
7573 hevc->prefix_aux_size + hevc->suffix_aux_size, hevc->aux_addr,
7574 hevc->aux_phy_addr);
7575 hevc->aux_addr = NULL;
7576 }
7577 if (hevc->rpm_addr) {
7578 dma_free_coherent(amports_get_dma_device(),
7579 RPM_BUF_SIZE, hevc->rpm_addr,
7580 hevc->rpm_phy_addr);
7581 hevc->rpm_addr = NULL;
7582 }
7583 if (hevc->lmem_addr) {
7584 dma_free_coherent(amports_get_dma_device(),
7585 RPM_BUF_SIZE, hevc->lmem_addr,
7586 hevc->lmem_phy_addr);
7587 hevc->lmem_addr = NULL;
7588 }
7589
7590 if (hevc->mmu_enable && hevc->frame_mmu_map_addr) {
7591 if (hevc->frame_mmu_map_phy_addr)
7592 dma_free_coherent(amports_get_dma_device(),
7593 get_frame_mmu_map_size(), hevc->frame_mmu_map_addr,
7594 hevc->frame_mmu_map_phy_addr);
7595
7596 hevc->frame_mmu_map_addr = NULL;
7597 }
7598
7599 //pr_err("[%s line %d] hevc->gvs=0x%p operation\n",__func__, __LINE__, hevc->gvs);
7600 }
7601
hevc_local_init(struct hevc_state_s * hevc)7602 static int hevc_local_init(struct hevc_state_s *hevc)
7603 {
7604 int ret = -1;
7605 struct BuffInfo_s *cur_buf_info = NULL;
7606
7607 memset(&hevc->param, 0, sizeof(union param_u));
7608
7609 cur_buf_info = &hevc->work_space_buf_store;
7610
7611 if (vdec_is_support_4k()) {
7612 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
7613 memcpy(cur_buf_info, &amvh265_workbuff_spec[2], /* 4k */
7614 sizeof(struct BuffInfo_s));
7615 else
7616 memcpy(cur_buf_info, &amvh265_workbuff_spec[1], /* 4k */
7617 sizeof(struct BuffInfo_s));
7618 } else
7619 memcpy(cur_buf_info, &amvh265_workbuff_spec[0], /* 1080p */
7620 sizeof(struct BuffInfo_s));
7621
7622 cur_buf_info->start_adr = hevc->buf_start;
7623 init_buff_spec(hevc, cur_buf_info);
7624
7625 hevc_init_stru(hevc, cur_buf_info);
7626
7627 hevc->bit_depth_luma = 8;
7628 hevc->bit_depth_chroma = 8;
7629 hevc->video_signal_type = 0;
7630 hevc->video_signal_type_debug = 0;
7631 bit_depth_luma = hevc->bit_depth_luma;
7632 bit_depth_chroma = hevc->bit_depth_chroma;
7633 video_signal_type = hevc->video_signal_type;
7634
7635 if ((get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG) == 0) {
7636 hevc->rpm_addr = dma_alloc_coherent(amports_get_dma_device(),
7637 RPM_BUF_SIZE, &hevc->rpm_phy_addr, GFP_KERNEL);
7638 if (hevc->rpm_addr == NULL) {
7639 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7640 return -1;
7641 }
7642 hevc->rpm_ptr = hevc->rpm_addr;
7643 }
7644
7645 if (prefix_aux_buf_size > 0 ||
7646 suffix_aux_buf_size > 0) {
7647 u32 aux_buf_size;
7648
7649 hevc->prefix_aux_size = AUX_BUF_ALIGN(prefix_aux_buf_size);
7650 hevc->suffix_aux_size = AUX_BUF_ALIGN(suffix_aux_buf_size);
7651 aux_buf_size = hevc->prefix_aux_size + hevc->suffix_aux_size;
7652 hevc->aux_addr =dma_alloc_coherent(amports_get_dma_device(),
7653 aux_buf_size, &hevc->aux_phy_addr, GFP_KERNEL);
7654 if (hevc->aux_addr == NULL) {
7655 pr_err("%s: failed to alloc rpm buffer\n", __func__);
7656 return -1;
7657 }
7658 }
7659
7660 hevc->lmem_addr = dma_alloc_coherent(amports_get_dma_device(),
7661 LMEM_BUF_SIZE, &hevc->lmem_phy_addr, GFP_KERNEL);
7662 if (hevc->lmem_addr == NULL) {
7663 pr_err("%s: failed to alloc lmem buffer\n", __func__);
7664 return -1;
7665 }
7666 hevc->lmem_ptr = hevc->lmem_addr;
7667
7668 if (hevc->mmu_enable) {
7669 hevc->frame_mmu_map_addr =
7670 dma_alloc_coherent(amports_get_dma_device(),
7671 get_frame_mmu_map_size(),
7672 &hevc->frame_mmu_map_phy_addr, GFP_KERNEL);
7673 if (hevc->frame_mmu_map_addr == NULL) {
7674 pr_err("%s: failed to alloc count_buffer\n", __func__);
7675 return -1;
7676 }
7677 memset(hevc->frame_mmu_map_addr, 0, get_frame_mmu_map_size());
7678 }
7679 ret = 0;
7680 return ret;
7681 }
7682
7683 /*
7684 *******************************************
7685 * Mailbox command
7686 *******************************************
7687 */
7688 #define CMD_FINISHED 0
7689 #define CMD_ALLOC_VIEW 1
7690 #define CMD_FRAME_DISPLAY 3
7691 #define CMD_DEBUG 10
7692
7693
7694 #define DECODE_BUFFER_NUM_MAX 32
7695 #define DISPLAY_BUFFER_NUM 6
7696
7697 #define video_domain_addr(adr) (adr&0x7fffffff)
7698 #define DECODER_WORK_SPACE_SIZE 0x800000
7699
7700 #define spec2canvas(x) \
7701 (((x)->uv_canvas_index << 16) | \
7702 ((x)->uv_canvas_index << 8) | \
7703 ((x)->y_canvas_index << 0))
7704
7705
set_canvas(struct hevc_state_s * hevc,struct PIC_s * pic)7706 static void set_canvas(struct hevc_state_s *hevc, struct PIC_s *pic)
7707 {
7708 struct vdec_s *vdec = hw_to_vdec(hevc);
7709 int canvas_w = ALIGN(pic->width, 64)/4;
7710 int canvas_h = ALIGN(pic->height, 32)/4;
7711 int blkmode = hevc->mem_map_mode;
7712
7713 /*CANVAS_BLKMODE_64X32*/
7714 #ifdef SUPPORT_10BIT
7715 if (pic->double_write_mode) {
7716 canvas_w = pic->width /
7717 get_double_write_ratio(hevc, pic->double_write_mode);
7718 canvas_h = pic->height /
7719 get_double_write_ratio(hevc, pic->double_write_mode);
7720
7721 if (hevc->mem_map_mode == 0)
7722 canvas_w = ALIGN(canvas_w, 32);
7723 else
7724 canvas_w = ALIGN(canvas_w, 64);
7725 canvas_h = ALIGN(canvas_h, 32);
7726
7727 if (vdec->parallel_dec == 1) {
7728 if (pic->y_canvas_index == -1)
7729 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7730 if (pic->uv_canvas_index == -1)
7731 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7732 } else {
7733 pic->y_canvas_index = 128 + pic->index * 2;
7734 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7735 }
7736
7737 canvas_config_ex(pic->y_canvas_index,
7738 pic->dw_y_adr, canvas_w, canvas_h,
7739 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7740 canvas_config_ex(pic->uv_canvas_index, pic->dw_u_v_adr,
7741 canvas_w, canvas_h,
7742 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7743 #ifdef MULTI_INSTANCE_SUPPORT
7744 pic->canvas_config[0].phy_addr =
7745 pic->dw_y_adr;
7746 pic->canvas_config[0].width =
7747 canvas_w;
7748 pic->canvas_config[0].height =
7749 canvas_h;
7750 pic->canvas_config[0].block_mode =
7751 blkmode;
7752 pic->canvas_config[0].endian = hevc->is_used_v4l ? 0 : 7;
7753
7754 pic->canvas_config[1].phy_addr =
7755 pic->dw_u_v_adr;
7756 pic->canvas_config[1].width =
7757 canvas_w;
7758 pic->canvas_config[1].height =
7759 canvas_h;
7760 pic->canvas_config[1].block_mode =
7761 blkmode;
7762 pic->canvas_config[1].endian = hevc->is_used_v4l ? 0 : 7;
7763 #endif
7764 } else {
7765 if (!hevc->mmu_enable) {
7766 /* to change after 10bit VPU is ready ... */
7767 if (vdec->parallel_dec == 1) {
7768 if (pic->y_canvas_index == -1)
7769 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7770 pic->uv_canvas_index = pic->y_canvas_index;
7771 } else {
7772 pic->y_canvas_index = 128 + pic->index;
7773 pic->uv_canvas_index = 128 + pic->index;
7774 }
7775
7776 canvas_config_ex(pic->y_canvas_index,
7777 pic->mc_y_adr, canvas_w, canvas_h,
7778 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7779 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7780 canvas_w, canvas_h,
7781 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7782 }
7783 }
7784 #else
7785 if (vdec->parallel_dec == 1) {
7786 if (pic->y_canvas_index == -1)
7787 pic->y_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7788 if (pic->uv_canvas_index == -1)
7789 pic->uv_canvas_index = vdec->get_canvas_ex(CORE_MASK_HEVC, vdec->id);
7790 } else {
7791 pic->y_canvas_index = 128 + pic->index * 2;
7792 pic->uv_canvas_index = 128 + pic->index * 2 + 1;
7793 }
7794
7795
7796 canvas_config_ex(pic->y_canvas_index, pic->mc_y_adr, canvas_w, canvas_h,
7797 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7798 canvas_config_ex(pic->uv_canvas_index, pic->mc_u_v_adr,
7799 canvas_w, canvas_h,
7800 CANVAS_ADDR_NOWRAP, blkmode, hevc->is_used_v4l ? 0 : 7);
7801 #endif
7802 }
7803
init_buf_spec(struct hevc_state_s * hevc)7804 static int init_buf_spec(struct hevc_state_s *hevc)
7805 {
7806 int pic_width = hevc->pic_w;
7807 int pic_height = hevc->pic_h;
7808
7809 /* hevc_print(hevc, 0,
7810 *"%s1: %d %d\n", __func__, hevc->pic_w, hevc->pic_h);
7811 */
7812 hevc_print(hevc, 0,
7813 "%s2 %d %d\n", __func__, pic_width, pic_height);
7814 /* pic_width = hevc->pic_w; */
7815 /* pic_height = hevc->pic_h; */
7816
7817 if (hevc->frame_width == 0 || hevc->frame_height == 0) {
7818 hevc->frame_width = pic_width;
7819 hevc->frame_height = pic_height;
7820
7821 }
7822
7823 return 0;
7824 }
7825
parse_sei(struct hevc_state_s * hevc,struct PIC_s * pic,char * sei_buf,uint32_t size)7826 static int parse_sei(struct hevc_state_s *hevc,
7827 struct PIC_s *pic, char *sei_buf, uint32_t size)
7828 {
7829 char *p = sei_buf;
7830 char *p_sei;
7831 uint16_t header;
7832 uint8_t nal_unit_type;
7833 uint8_t payload_type, payload_size;
7834 int i, j;
7835
7836 if (size < 2)
7837 return 0;
7838 header = *p++;
7839 header <<= 8;
7840 header += *p++;
7841 nal_unit_type = header >> 9;
7842 if ((nal_unit_type != NAL_UNIT_SEI)
7843 && (nal_unit_type != NAL_UNIT_SEI_SUFFIX))
7844 return 0;
7845 while (p+2 <= sei_buf+size) {
7846 payload_type = *p++;
7847 payload_size = *p++;
7848 if (p+payload_size <= sei_buf+size) {
7849 switch (payload_type) {
7850 case SEI_PicTiming:
7851 if ((parser_sei_enable & 0x4) &&
7852 hevc->frame_field_info_present_flag) {
7853 p_sei = p;
7854 hevc->curr_pic_struct = (*p_sei >> 4)&0x0f;
7855 pic->pic_struct = hevc->curr_pic_struct;
7856 if (get_dbg_flag(hevc) &
7857 H265_DEBUG_PIC_STRUCT) {
7858 hevc_print(hevc, 0,
7859 "parse result pic_struct = %d\n",
7860 hevc->curr_pic_struct);
7861 }
7862 }
7863 break;
7864 case SEI_UserDataITU_T_T35:
7865 p_sei = p;
7866 if (p_sei[0] == 0xB5
7867 && p_sei[1] == 0x00
7868 && p_sei[2] == 0x3C
7869 && p_sei[3] == 0x00
7870 && p_sei[4] == 0x01
7871 && p_sei[5] == 0x04)
7872 hevc->sei_present_flag |= SEI_HDR10PLUS_MASK;
7873
7874 break;
7875 case SEI_MasteringDisplayColorVolume:
7876 /*hevc_print(hevc, 0,
7877 "sei type: primary display color volume %d, size %d\n",
7878 payload_type,
7879 payload_size);*/
7880 /* master_display_colour */
7881 p_sei = p;
7882 for (i = 0; i < 3; i++) {
7883 for (j = 0; j < 2; j++) {
7884 hevc->primaries[i][j]
7885 = (*p_sei<<8)
7886 | *(p_sei+1);
7887 p_sei += 2;
7888 }
7889 }
7890 for (i = 0; i < 2; i++) {
7891 hevc->white_point[i]
7892 = (*p_sei<<8)
7893 | *(p_sei+1);
7894 p_sei += 2;
7895 }
7896 for (i = 0; i < 2; i++) {
7897 hevc->luminance[i]
7898 = (*p_sei<<24)
7899 | (*(p_sei+1)<<16)
7900 | (*(p_sei+2)<<8)
7901 | *(p_sei+3);
7902 p_sei += 4;
7903 }
7904 hevc->sei_present_flag |=
7905 SEI_MASTER_DISPLAY_COLOR_MASK;
7906 /*for (i = 0; i < 3; i++)
7907 for (j = 0; j < 2; j++)
7908 hevc_print(hevc, 0,
7909 "\tprimaries[%1d][%1d] = %04x\n",
7910 i, j,
7911 hevc->primaries[i][j]);
7912 hevc_print(hevc, 0,
7913 "\twhite_point = (%04x, %04x)\n",
7914 hevc->white_point[0],
7915 hevc->white_point[1]);
7916 hevc_print(hevc, 0,
7917 "\tmax,min luminance = %08x, %08x\n",
7918 hevc->luminance[0],
7919 hevc->luminance[1]);*/
7920 break;
7921 case SEI_ContentLightLevel:
7922 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7923 hevc_print(hevc, 0,
7924 "sei type: max content light level %d, size %d\n",
7925 payload_type, payload_size);
7926 /* content_light_level */
7927 p_sei = p;
7928 hevc->content_light_level[0]
7929 = (*p_sei<<8) | *(p_sei+1);
7930 p_sei += 2;
7931 hevc->content_light_level[1]
7932 = (*p_sei<<8) | *(p_sei+1);
7933 p_sei += 2;
7934 hevc->sei_present_flag |=
7935 SEI_CONTENT_LIGHT_LEVEL_MASK;
7936 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
7937 hevc_print(hevc, 0,
7938 "\tmax cll = %04x, max_pa_cll = %04x\n",
7939 hevc->content_light_level[0],
7940 hevc->content_light_level[1]);
7941 break;
7942 default:
7943 break;
7944 }
7945 }
7946 p += payload_size;
7947 }
7948 return 0;
7949 }
7950
calc_ar(unsigned idc,unsigned sar_w,unsigned sar_h,unsigned w,unsigned h)7951 static unsigned calc_ar(unsigned idc, unsigned sar_w, unsigned sar_h,
7952 unsigned w, unsigned h)
7953 {
7954 unsigned ar;
7955
7956 if (idc == 255) {
7957 ar = div_u64(256ULL * sar_h * h,
7958 sar_w * w);
7959 } else {
7960 switch (idc) {
7961 case 1:
7962 ar = 0x100 * h / w;
7963 break;
7964 case 2:
7965 ar = 0x100 * h * 11 / (w * 12);
7966 break;
7967 case 3:
7968 ar = 0x100 * h * 11 / (w * 10);
7969 break;
7970 case 4:
7971 ar = 0x100 * h * 11 / (w * 16);
7972 break;
7973 case 5:
7974 ar = 0x100 * h * 33 / (w * 40);
7975 break;
7976 case 6:
7977 ar = 0x100 * h * 11 / (w * 24);
7978 break;
7979 case 7:
7980 ar = 0x100 * h * 11 / (w * 20);
7981 break;
7982 case 8:
7983 ar = 0x100 * h * 11 / (w * 32);
7984 break;
7985 case 9:
7986 ar = 0x100 * h * 33 / (w * 80);
7987 break;
7988 case 10:
7989 ar = 0x100 * h * 11 / (w * 18);
7990 break;
7991 case 11:
7992 ar = 0x100 * h * 11 / (w * 15);
7993 break;
7994 case 12:
7995 ar = 0x100 * h * 33 / (w * 64);
7996 break;
7997 case 13:
7998 ar = 0x100 * h * 99 / (w * 160);
7999 break;
8000 case 14:
8001 ar = 0x100 * h * 3 / (w * 4);
8002 break;
8003 case 15:
8004 ar = 0x100 * h * 2 / (w * 3);
8005 break;
8006 case 16:
8007 ar = 0x100 * h * 1 / (w * 2);
8008 break;
8009 default:
8010 ar = h * 0x100 / w;
8011 break;
8012 }
8013 }
8014
8015 return ar;
8016 }
8017
set_frame_info(struct hevc_state_s * hevc,struct vframe_s * vf,struct PIC_s * pic)8018 static void set_frame_info(struct hevc_state_s *hevc, struct vframe_s *vf,
8019 struct PIC_s *pic)
8020 {
8021 unsigned int ar;
8022 int i, j;
8023 char *p;
8024 unsigned size = 0;
8025 unsigned type = 0;
8026 struct vframe_master_display_colour_s *vf_dp
8027 = &vf->prop.master_display_colour;
8028
8029 vf->width = pic->width /
8030 get_double_write_ratio(hevc, pic->double_write_mode);
8031 vf->height = pic->height /
8032 get_double_write_ratio(hevc, pic->double_write_mode);
8033
8034 vf->duration = hevc->frame_dur;
8035 vf->duration_pulldown = 0;
8036 vf->flag = 0;
8037
8038 ar = min_t(u32, hevc->frame_ar, DISP_RATIO_ASPECT_RATIO_MAX);
8039 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
8040
8041
8042 if (((pic->aspect_ratio_idc == 255) &&
8043 pic->sar_width &&
8044 pic->sar_height) ||
8045 ((pic->aspect_ratio_idc != 255) &&
8046 (pic->width))) {
8047 ar = min_t(u32,
8048 calc_ar(pic->aspect_ratio_idc,
8049 pic->sar_width,
8050 pic->sar_height,
8051 pic->width,
8052 pic->height),
8053 DISP_RATIO_ASPECT_RATIO_MAX);
8054 vf->ratio_control = (ar << DISP_RATIO_ASPECT_RATIO_BIT);
8055 vf->ratio_control <<= hevc->interlace_flag;
8056 }
8057 hevc->ratio_control = vf->ratio_control;
8058 if (pic->aux_data_buf
8059 && pic->aux_data_size) {
8060 /* parser sei */
8061 p = pic->aux_data_buf;
8062 while (p < pic->aux_data_buf
8063 + pic->aux_data_size - 8) {
8064 size = *p++;
8065 size = (size << 8) | *p++;
8066 size = (size << 8) | *p++;
8067 size = (size << 8) | *p++;
8068 type = *p++;
8069 type = (type << 8) | *p++;
8070 type = (type << 8) | *p++;
8071 type = (type << 8) | *p++;
8072 if (type == 0x02000000) {
8073 /* hevc_print(hevc, 0,
8074 "sei(%d)\n", size); */
8075 parse_sei(hevc, pic, p, size);
8076 }
8077 p += size;
8078 }
8079 }
8080 if (hevc->video_signal_type & VIDEO_SIGNAL_TYPE_AVAILABLE_MASK) {
8081 vf->signal_type = pic->video_signal_type;
8082 if (hevc->sei_present_flag & SEI_HDR10PLUS_MASK) {
8083 u32 data;
8084 data = vf->signal_type;
8085 data = data & 0xFFFF00FF;
8086 data = data | (0x30<<8);
8087 vf->signal_type = data;
8088 }
8089 }
8090 else
8091 vf->signal_type = 0;
8092 hevc->video_signal_type_debug = vf->signal_type;
8093
8094 /* master_display_colour */
8095 if (hevc->sei_present_flag & SEI_MASTER_DISPLAY_COLOR_MASK) {
8096 for (i = 0; i < 3; i++)
8097 for (j = 0; j < 2; j++)
8098 vf_dp->primaries[i][j] = hevc->primaries[i][j];
8099 for (i = 0; i < 2; i++) {
8100 vf_dp->white_point[i] = hevc->white_point[i];
8101 vf_dp->luminance[i]
8102 = hevc->luminance[i];
8103 }
8104 vf_dp->present_flag = 1;
8105 } else
8106 vf_dp->present_flag = 0;
8107
8108 /* content_light_level */
8109 if (hevc->sei_present_flag & SEI_CONTENT_LIGHT_LEVEL_MASK) {
8110 vf_dp->content_light_level.max_content
8111 = hevc->content_light_level[0];
8112 vf_dp->content_light_level.max_pic_average
8113 = hevc->content_light_level[1];
8114 vf_dp->content_light_level.present_flag = 1;
8115 } else
8116 vf_dp->content_light_level.present_flag = 0;
8117
8118 if (hevc->is_used_v4l &&
8119 ((hevc->sei_present_flag & SEI_HDR10PLUS_MASK) ||
8120 (vf_dp->present_flag) ||
8121 (vf_dp->content_light_level.present_flag))) {
8122 struct aml_vdec_hdr_infos hdr;
8123 struct aml_vcodec_ctx *ctx =
8124 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
8125
8126 memset(&hdr, 0, sizeof(hdr));
8127 hdr.signal_type = vf->signal_type;
8128 hdr.color_parms = *vf_dp;
8129 vdec_v4l_set_hdr_infos(ctx, &hdr);
8130 }
8131
8132 vf->sidebind_type = hevc->sidebind_type;
8133 vf->sidebind_channel_id = hevc->sidebind_channel_id;
8134 }
8135
vh265_vf_states(struct vframe_states * states,void * op_arg)8136 static int vh265_vf_states(struct vframe_states *states, void *op_arg)
8137 {
8138 unsigned long flags;
8139 #ifdef MULTI_INSTANCE_SUPPORT
8140 struct vdec_s *vdec = op_arg;
8141 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8142 #else
8143 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8144 #endif
8145
8146 spin_lock_irqsave(&lock, flags);
8147
8148 states->vf_pool_size = VF_POOL_SIZE;
8149 states->buf_free_num = kfifo_len(&hevc->newframe_q);
8150 states->buf_avail_num = kfifo_len(&hevc->display_q);
8151
8152 if (step == 2)
8153 states->buf_avail_num = 0;
8154 spin_unlock_irqrestore(&lock, flags);
8155 return 0;
8156 }
8157
vh265_vf_peek(void * op_arg)8158 static struct vframe_s *vh265_vf_peek(void *op_arg)
8159 {
8160 struct vframe_s *vf[2] = {0, 0};
8161 #ifdef MULTI_INSTANCE_SUPPORT
8162 struct vdec_s *vdec = op_arg;
8163 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8164 #else
8165 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8166 #endif
8167
8168 if (step == 2)
8169 return NULL;
8170
8171 if (force_disp_pic_index & 0x100) {
8172 if (force_disp_pic_index & 0x200)
8173 return NULL;
8174 return &hevc->vframe_dummy;
8175 }
8176
8177
8178 if (kfifo_out_peek(&hevc->display_q, (void *)&vf, 2)) {
8179 if (vf[1]) {
8180 vf[0]->next_vf_pts_valid = true;
8181 vf[0]->next_vf_pts = vf[1]->pts;
8182 } else
8183 vf[0]->next_vf_pts_valid = false;
8184 return vf[0];
8185 }
8186
8187 return NULL;
8188 }
8189
vh265_vf_get(void * op_arg)8190 static struct vframe_s *vh265_vf_get(void *op_arg)
8191 {
8192 struct vframe_s *vf;
8193 #ifdef MULTI_INSTANCE_SUPPORT
8194 struct vdec_s *vdec = op_arg;
8195 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8196 #else
8197 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8198 #endif
8199
8200 if (step == 2)
8201 return NULL;
8202 else if (step == 1)
8203 step = 2;
8204
8205 #if 0
8206 if (force_disp_pic_index & 0x100) {
8207 int buffer_index = force_disp_pic_index & 0xff;
8208 struct PIC_s *pic = NULL;
8209 if (buffer_index >= 0
8210 && buffer_index < MAX_REF_PIC_NUM)
8211 pic = hevc->m_PIC[buffer_index];
8212 if (pic == NULL)
8213 return NULL;
8214 if (force_disp_pic_index & 0x200)
8215 return NULL;
8216
8217 vf = &hevc->vframe_dummy;
8218 if (get_double_write_mode(hevc)) {
8219 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD |
8220 VIDTYPE_VIU_NV21;
8221 if (hevc->m_ins_flag) {
8222 vf->canvas0Addr = vf->canvas1Addr = -1;
8223 vf->plane_num = 2;
8224 vf->canvas0_config[0] =
8225 pic->canvas_config[0];
8226 vf->canvas0_config[1] =
8227 pic->canvas_config[1];
8228
8229 vf->canvas1_config[0] =
8230 pic->canvas_config[0];
8231 vf->canvas1_config[1] =
8232 pic->canvas_config[1];
8233 } else {
8234 vf->canvas0Addr = vf->canvas1Addr
8235 = spec2canvas(pic);
8236 }
8237 } else {
8238 vf->canvas0Addr = vf->canvas1Addr = 0;
8239 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8240 if (hevc->mmu_enable)
8241 vf->type |= VIDTYPE_SCATTER;
8242 }
8243 vf->compWidth = pic->width;
8244 vf->compHeight = pic->height;
8245 update_vf_memhandle(hevc, vf, pic);
8246 switch (hevc->bit_depth_luma) {
8247 case 9:
8248 vf->bitdepth = BITDEPTH_Y9 | BITDEPTH_U9 | BITDEPTH_V9;
8249 break;
8250 case 10:
8251 vf->bitdepth = BITDEPTH_Y10 | BITDEPTH_U10
8252 | BITDEPTH_V10;
8253 break;
8254 default:
8255 vf->bitdepth = BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8256 break;
8257 }
8258 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8259 vf->bitdepth =
8260 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8261 if (hevc->mem_saving_mode == 1)
8262 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8263 vf->duration_pulldown = 0;
8264 vf->pts = 0;
8265 vf->pts_us64 = 0;
8266 set_frame_info(hevc, vf);
8267
8268 vf->width = pic->width /
8269 get_double_write_ratio(hevc, pic->double_write_mode);
8270 vf->height = pic->height /
8271 get_double_write_ratio(hevc, pic->double_write_mode);
8272
8273 force_disp_pic_index |= 0x200;
8274 return vf;
8275 }
8276 #endif
8277
8278 if (kfifo_get(&hevc->display_q, &vf)) {
8279 struct vframe_s *next_vf;
8280 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8281 hevc_print(hevc, 0,
8282 "%s(vf 0x%p type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
8283 __func__, vf, vf->type, vf->index,
8284 get_pic_poc(hevc, vf->index & 0xff),
8285 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
8286 vf->pts, vf->pts_us64,
8287 vf->duration);
8288 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8289 if (get_dbg_flag(hevc) & H265_DEBUG_DV) {
8290 struct PIC_s *pic = hevc->m_PIC[vf->index & 0xff];
8291 if (pic->aux_data_buf && pic->aux_data_size > 0) {
8292 int i;
8293 struct PIC_s *pic =
8294 hevc->m_PIC[vf->index & 0xff];
8295 hevc_print(hevc, 0,
8296 "pic 0x%p aux size %d:\n",
8297 pic, pic->aux_data_size);
8298 for (i = 0; i < pic->aux_data_size; i++) {
8299 hevc_print_cont(hevc, 0,
8300 "%02x ", pic->aux_data_buf[i]);
8301 if (((i + 1) & 0xf) == 0)
8302 hevc_print_cont(hevc, 0, "\n");
8303 }
8304 hevc_print_cont(hevc, 0, "\n");
8305 }
8306 }
8307 #endif
8308 hevc->show_frame_num++;
8309 vf->index_disp = hevc->vf_get_count;
8310 hevc->vf_get_count++;
8311
8312 if (kfifo_peek(&hevc->display_q, &next_vf)) {
8313 vf->next_vf_pts_valid = true;
8314 vf->next_vf_pts = next_vf->pts;
8315 } else
8316 vf->next_vf_pts_valid = false;
8317
8318 return vf;
8319 }
8320
8321 return NULL;
8322 }
vf_valid_check(struct vframe_s * vf,struct hevc_state_s * hevc)8323 static bool vf_valid_check(struct vframe_s *vf, struct hevc_state_s *hevc) {
8324 int i;
8325 for (i = 0; i < VF_POOL_SIZE; i++) {
8326 if (vf == &hevc->vfpool[i])
8327 return true;
8328 }
8329 pr_info(" h265 invalid vf been put, vf = %p\n", vf);
8330 for (i = 0; i < VF_POOL_SIZE; i++) {
8331 pr_info("www valid vf[%d]= %p \n", i, &hevc->vfpool[i]);
8332 }
8333 return false;
8334 }
8335
vh265_vf_put(struct vframe_s * vf,void * op_arg)8336 static void vh265_vf_put(struct vframe_s *vf, void *op_arg)
8337 {
8338 unsigned long flags;
8339 #ifdef MULTI_INSTANCE_SUPPORT
8340 struct vdec_s *vdec = op_arg;
8341 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8342 #else
8343 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8344 #endif
8345 unsigned char index_top;
8346 unsigned char index_bot;
8347
8348 if (vf && (vf_valid_check(vf, hevc) == false))
8349 return;
8350 if (vf == (&hevc->vframe_dummy))
8351 return;
8352 if (!vf)
8353 return;
8354 index_top = vf->index & 0xff;
8355 index_bot = (vf->index >> 8) & 0xff;
8356 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8357 hevc_print(hevc, 0,
8358 "%s(type %d index 0x%x)\n",
8359 __func__, vf->type, vf->index);
8360 hevc->vf_put_count++;
8361 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8362 spin_lock_irqsave(&lock, flags);
8363
8364 if (index_top != 0xff
8365 && index_top < MAX_REF_PIC_NUM
8366 && hevc->m_PIC[index_top]) {
8367 if (hevc->is_used_v4l)
8368 hevc->m_PIC[index_top]->vframe_bound = true;
8369 if (hevc->m_PIC[index_top]->vf_ref > 0) {
8370 hevc->m_PIC[index_top]->vf_ref--;
8371
8372 if (hevc->m_PIC[index_top]->vf_ref == 0) {
8373 hevc->m_PIC[index_top]->output_ready = 0;
8374
8375 if (hevc->wait_buf != 0)
8376 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8377 0x1);
8378 }
8379 }
8380 }
8381
8382 if (index_bot != 0xff
8383 && index_bot < MAX_REF_PIC_NUM
8384 && hevc->m_PIC[index_bot]) {
8385 if (hevc->is_used_v4l)
8386 hevc->m_PIC[index_bot]->vframe_bound = true;
8387 if (hevc->m_PIC[index_bot]->vf_ref > 0) {
8388 hevc->m_PIC[index_bot]->vf_ref--;
8389
8390 if (hevc->m_PIC[index_bot]->vf_ref == 0) {
8391 hevc->m_PIC[index_bot]->output_ready = 0;
8392 if (hevc->wait_buf != 0)
8393 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8394 0x1);
8395 }
8396 }
8397 }
8398 spin_unlock_irqrestore(&lock, flags);
8399 }
8400
vh265_event_cb(int type,void * data,void * op_arg)8401 static int vh265_event_cb(int type, void *data, void *op_arg)
8402 {
8403 unsigned long flags;
8404 #ifdef MULTI_INSTANCE_SUPPORT
8405 struct vdec_s *vdec = op_arg;
8406 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
8407 #else
8408 struct hevc_state_s *hevc = (struct hevc_state_s *)op_arg;
8409 #endif
8410 if (type & VFRAME_EVENT_RECEIVER_RESET) {
8411 #if 0
8412 amhevc_stop();
8413 #ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8414 vf_light_unreg_provider(&vh265_vf_prov);
8415 #endif
8416 spin_lock_irqsave(&hevc->lock, flags);
8417 vh265_local_init();
8418 vh265_prot_init();
8419 spin_unlock_irqrestore(&hevc->lock, flags);
8420 #ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
8421 vf_reg_provider(&vh265_vf_prov);
8422 #endif
8423 amhevc_start();
8424 #endif
8425 } else if (type & VFRAME_EVENT_RECEIVER_GET_AUX_DATA) {
8426 struct provider_aux_req_s *req =
8427 (struct provider_aux_req_s *)data;
8428 unsigned char index;
8429
8430 if (!req->vf) {
8431 req->aux_size = hevc->vf_put_count;
8432 return 0;
8433 }
8434 spin_lock_irqsave(&lock, flags);
8435 index = req->vf->index & 0xff;
8436 req->aux_buf = NULL;
8437 req->aux_size = 0;
8438 if (req->bot_flag)
8439 index = (req->vf->index >> 8) & 0xff;
8440 if (index != 0xff
8441 && index < MAX_REF_PIC_NUM
8442 && hevc->m_PIC[index]) {
8443 req->aux_buf = hevc->m_PIC[index]->aux_data_buf;
8444 req->aux_size = hevc->m_PIC[index]->aux_data_size;
8445 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8446 if (hevc->bypass_dvenl && !dolby_meta_with_el)
8447 req->dv_enhance_exist = false;
8448 else
8449 req->dv_enhance_exist =
8450 hevc->m_PIC[index]->dv_enhance_exist;
8451 hevc_print(hevc, H265_DEBUG_DV,
8452 "query dv_enhance_exist for pic (vf 0x%p, poc %d index %d) flag => %d, aux sizd 0x%x\n",
8453 req->vf,
8454 hevc->m_PIC[index]->POC, index,
8455 req->dv_enhance_exist, req->aux_size);
8456 #else
8457 req->dv_enhance_exist = 0;
8458 #endif
8459 }
8460 spin_unlock_irqrestore(&lock, flags);
8461
8462 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8463 hevc_print(hevc, 0,
8464 "%s(type 0x%x vf index 0x%x)=>size 0x%x\n",
8465 __func__, type, index, req->aux_size);
8466 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8467 } else if (type & VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL) {
8468 if ((force_bypass_dvenl & 0x80000000) == 0) {
8469 hevc_print(hevc, 0,
8470 "%s: VFRAME_EVENT_RECEIVER_DOLBY_BYPASS_EL\n",
8471 __func__);
8472 hevc->bypass_dvenl_enable = 1;
8473 }
8474
8475 #endif
8476 }
8477 return 0;
8478 }
8479
8480 #ifdef HEVC_PIC_STRUCT_SUPPORT
process_pending_vframe(struct hevc_state_s * hevc,struct PIC_s * pair_pic,unsigned char pair_frame_top_flag)8481 static int process_pending_vframe(struct hevc_state_s *hevc,
8482 struct PIC_s *pair_pic, unsigned char pair_frame_top_flag)
8483 {
8484 struct vframe_s *vf;
8485
8486 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8487 hevc_print(hevc, 0,
8488 "%s: pair_pic index 0x%x %s\n",
8489 __func__, pair_pic->index,
8490 pair_frame_top_flag ?
8491 "top" : "bot");
8492
8493 if (kfifo_len(&hevc->pending_q) > 1) {
8494 unsigned long flags;
8495 /* do not pending more than 1 frame */
8496 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8497 hevc_print(hevc, 0,
8498 "fatal error, no available buffer slot.");
8499 return -1;
8500 }
8501 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8502 hevc_print(hevc, 0,
8503 "%s warning(1), vf=>display_q: (index 0x%x)\n",
8504 __func__, vf->index);
8505 if ((hevc->double_write_mode == 3) &&
8506 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8507 vf->type |= VIDTYPE_COMPRESS;
8508 if (hevc->mmu_enable)
8509 vf->type |= VIDTYPE_SCATTER;
8510 }
8511 hevc->vf_pre_count++;
8512 kfifo_put(&hevc->newframe_q, (const struct vframe_s *)vf);
8513 spin_lock_irqsave(&lock, flags);
8514 vf->index &= 0xff;
8515 hevc->m_PIC[vf->index]->vf_ref = 0;
8516 hevc->m_PIC[vf->index]->output_ready = 0;
8517 if (hevc->wait_buf != 0)
8518 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG,
8519 0x1);
8520 spin_unlock_irqrestore(&lock, flags);
8521
8522 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8523 }
8524
8525 if (kfifo_peek(&hevc->pending_q, &vf)) {
8526 if (pair_pic == NULL || pair_pic->vf_ref <= 0) {
8527 /*
8528 *if pair_pic is recycled (pair_pic->vf_ref <= 0),
8529 *do not use it
8530 */
8531 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8532 hevc_print(hevc, 0,
8533 "fatal error, no available buffer slot.");
8534 return -1;
8535 }
8536 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8537 hevc_print(hevc, 0,
8538 "%s warning(2), vf=>display_q: (index 0x%x)\n",
8539 __func__, vf->index);
8540 if (vf) {
8541 if ((hevc->double_write_mode == 3) &&
8542 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8543 vf->type |= VIDTYPE_COMPRESS;
8544 if (hevc->mmu_enable)
8545 vf->type |= VIDTYPE_SCATTER;
8546 }
8547 hevc->vf_pre_count++;
8548 kfifo_put(&hevc->display_q,
8549 (const struct vframe_s *)vf);
8550 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8551 }
8552 } else if ((!pair_frame_top_flag) &&
8553 (((vf->index >> 8) & 0xff) == 0xff)) {
8554 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8555 hevc_print(hevc, 0,
8556 "fatal error, no available buffer slot.");
8557 return -1;
8558 }
8559 if (vf) {
8560 if ((hevc->double_write_mode == 3) &&
8561 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8562 vf->type |= VIDTYPE_COMPRESS;
8563 if (hevc->mmu_enable)
8564 vf->type |= VIDTYPE_SCATTER;
8565 }
8566 vf->index &= 0xff;
8567 vf->index |= (pair_pic->index << 8);
8568 vf->canvas1Addr = spec2canvas(pair_pic);
8569 pair_pic->vf_ref++;
8570 kfifo_put(&hevc->display_q,
8571 (const struct vframe_s *)vf);
8572 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8573 hevc->vf_pre_count++;
8574 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8575 hevc_print(hevc, 0,
8576 "%s vf => display_q: (index 0x%x)\n",
8577 __func__, vf->index);
8578 }
8579 } else if (pair_frame_top_flag &&
8580 ((vf->index & 0xff) == 0xff)) {
8581 if (kfifo_get(&hevc->pending_q, &vf) == 0) {
8582 hevc_print(hevc, 0,
8583 "fatal error, no available buffer slot.");
8584 return -1;
8585 }
8586 if (vf) {
8587 if ((hevc->double_write_mode == 3) &&
8588 (!(IS_8K_SIZE(vf->width, vf->height)))) {
8589 vf->type |= VIDTYPE_COMPRESS;
8590 if (hevc->mmu_enable)
8591 vf->type |= VIDTYPE_SCATTER;
8592 }
8593 vf->index &= 0xff00;
8594 vf->index |= pair_pic->index;
8595 vf->canvas0Addr = spec2canvas(pair_pic);
8596 pair_pic->vf_ref++;
8597 kfifo_put(&hevc->display_q,
8598 (const struct vframe_s *)vf);
8599 ATRACE_COUNTER(MODULE_NAME, vf->pts);
8600 hevc->vf_pre_count++;
8601 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
8602 hevc_print(hevc, 0,
8603 "%s vf => display_q: (index 0x%x)\n",
8604 __func__, vf->index);
8605 }
8606 }
8607 }
8608 return 0;
8609 }
8610 #endif
update_vf_memhandle(struct hevc_state_s * hevc,struct vframe_s * vf,struct PIC_s * pic)8611 static void update_vf_memhandle(struct hevc_state_s *hevc,
8612 struct vframe_s *vf, struct PIC_s *pic)
8613 {
8614 if (pic->index < 0) {
8615 vf->mem_handle = NULL;
8616 vf->mem_head_handle = NULL;
8617 } else if (vf->type & VIDTYPE_SCATTER) {
8618 vf->mem_handle =
8619 decoder_mmu_box_get_mem_handle(
8620 hevc->mmu_box, pic->index);
8621 vf->mem_head_handle =
8622 decoder_bmmu_box_get_mem_handle(
8623 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8624 } else {
8625 vf->mem_handle =
8626 decoder_bmmu_box_get_mem_handle(
8627 hevc->bmmu_box, VF_BUFFER_IDX(pic->BUF_index));
8628 vf->mem_head_handle = NULL;
8629 /*vf->mem_head_handle =
8630 decoder_bmmu_box_get_mem_handle(
8631 hevc->bmmu_box, VF_BUFFER_IDX(BUF_index));*/
8632 }
8633 return;
8634 }
8635
fill_frame_info(struct hevc_state_s * hevc,struct PIC_s * pic,unsigned int framesize,unsigned int pts)8636 static void fill_frame_info(struct hevc_state_s *hevc,
8637 struct PIC_s *pic, unsigned int framesize, unsigned int pts)
8638 {
8639 struct vframe_qos_s *vframe_qos = &hevc->vframe_qos;
8640 if (hevc->m_nalUnitType == NAL_UNIT_CODED_SLICE_IDR)
8641 vframe_qos->type = 4;
8642 else if (pic->slice_type == I_SLICE)
8643 vframe_qos->type = 1;
8644 else if (pic->slice_type == P_SLICE)
8645 vframe_qos->type = 2;
8646 else if (pic->slice_type == B_SLICE)
8647 vframe_qos->type = 3;
8648 /*
8649 #define SHOW_QOS_INFO
8650 */
8651 if (input_frame_based(hw_to_vdec(hevc)))
8652 vframe_qos->size = pic->frame_size;
8653 else
8654 vframe_qos->size = framesize;
8655 vframe_qos->pts = pts;
8656 #ifdef SHOW_QOS_INFO
8657 hevc_print(hevc, 0, "slice:%d, poc:%d\n", pic->slice_type, pic->POC);
8658 #endif
8659
8660
8661 vframe_qos->max_mv = pic->max_mv;
8662 vframe_qos->avg_mv = pic->avg_mv;
8663 vframe_qos->min_mv = pic->min_mv;
8664 #ifdef SHOW_QOS_INFO
8665 hevc_print(hevc, 0, "mv: max:%d, avg:%d, min:%d\n",
8666 vframe_qos->max_mv,
8667 vframe_qos->avg_mv,
8668 vframe_qos->min_mv);
8669 #endif
8670
8671 vframe_qos->max_qp = pic->max_qp;
8672 vframe_qos->avg_qp = pic->avg_qp;
8673 vframe_qos->min_qp = pic->min_qp;
8674 #ifdef SHOW_QOS_INFO
8675 hevc_print(hevc, 0, "qp: max:%d, avg:%d, min:%d\n",
8676 vframe_qos->max_qp,
8677 vframe_qos->avg_qp,
8678 vframe_qos->min_qp);
8679 #endif
8680
8681 vframe_qos->max_skip = pic->max_skip;
8682 vframe_qos->avg_skip = pic->avg_skip;
8683 vframe_qos->min_skip = pic->min_skip;
8684 #ifdef SHOW_QOS_INFO
8685 hevc_print(hevc, 0, "skip: max:%d, avg:%d, min:%d\n",
8686 vframe_qos->max_skip,
8687 vframe_qos->avg_skip,
8688 vframe_qos->min_skip);
8689 #endif
8690
8691 vframe_qos->num++;
8692
8693 }
8694
hevc_update_gvs(struct hevc_state_s * hevc)8695 static inline void hevc_update_gvs(struct hevc_state_s *hevc)
8696 {
8697 if (hevc->gvs->frame_height != hevc->frame_height) {
8698 hevc->gvs->frame_width = hevc->frame_width;
8699 hevc->gvs->frame_height = hevc->frame_height;
8700 }
8701 if (hevc->gvs->frame_dur != hevc->frame_dur) {
8702 hevc->gvs->frame_dur = hevc->frame_dur;
8703 if (hevc->frame_dur != 0)
8704 hevc->gvs->frame_rate = 96000 / hevc->frame_dur;
8705 else
8706 hevc->gvs->frame_rate = -1;
8707 }
8708 hevc->gvs->error_count = hevc->gvs->error_frame_count;
8709 hevc->gvs->status = hevc->stat | hevc->fatal_error;
8710 if (hevc->gvs->ratio_control != hevc->ratio_control)
8711 hevc->gvs->ratio_control = hevc->ratio_control;
8712 }
8713
prepare_display_buf(struct hevc_state_s * hevc,struct PIC_s * pic)8714 static int prepare_display_buf(struct hevc_state_s *hevc, struct PIC_s *pic)
8715 {
8716 struct vdec_s *vdec = hw_to_vdec(hevc);
8717 struct vframe_s *vf = NULL;
8718 int stream_offset = pic->stream_offset;
8719 unsigned short slice_type = pic->slice_type;
8720 ulong nv_order = VIDTYPE_VIU_NV21;
8721 u32 frame_size = 0;
8722 struct vdec_info tmp4x;
8723 struct aml_vcodec_ctx * v4l2_ctx = hevc->v4l2_ctx;
8724
8725 /* swap uv */
8726 if (hevc->is_used_v4l) {
8727 if ((v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV12) ||
8728 (v4l2_ctx->cap_pix_fmt == V4L2_PIX_FMT_NV12M))
8729 nv_order = VIDTYPE_VIU_NV12;
8730 }
8731
8732 if (force_disp_pic_index & 0x100) {
8733 /*recycle directly*/
8734 pic->output_ready = 0;
8735 return -1;
8736 }
8737 if (kfifo_get(&hevc->newframe_q, &vf) == 0) {
8738 hevc_print(hevc, 0,
8739 "fatal error, no available buffer slot.");
8740 return -1;
8741 }
8742 display_frame_count[hevc->index]++;
8743 if (vf) {
8744 /*hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
8745 "%s: pic index 0x%x\n",
8746 __func__, pic->index);*/
8747
8748 if (hevc->is_used_v4l) {
8749 vf->v4l_mem_handle
8750 = hevc->m_BUF[pic->BUF_index].v4l_ref_buf_addr;
8751 if (hevc->mmu_enable) {
8752 vf->mm_box.bmmu_box = hevc->bmmu_box;
8753 vf->mm_box.bmmu_idx = VF_BUFFER_IDX(pic->BUF_index);
8754 vf->mm_box.mmu_box = hevc->mmu_box;
8755 vf->mm_box.mmu_idx = pic->index;
8756 }
8757 }
8758
8759 #ifdef MULTI_INSTANCE_SUPPORT
8760 if (vdec_frame_based(vdec)) {
8761 vf->pts = pic->pts;
8762 vf->pts_us64 = pic->pts64;
8763 vf->timestamp = pic->timestamp;
8764 }
8765 /* if (pts_lookup_offset(PTS_TYPE_VIDEO,
8766 stream_offset, &vf->pts, 0) != 0) { */
8767 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8768 else if (vdec->master == NULL) {
8769 #else
8770 else {
8771 #endif
8772 #endif
8773 if (!vdec_dual(vdec) && pic->stream_frame_size > 50 &&
8774 (hevc->min_pic_size > pic->stream_frame_size ||
8775 (hevc->min_pic_size == 0))) {
8776 hevc->min_pic_size = pic->stream_frame_size;
8777
8778 if (hevc->min_pic_size < 1024 &&
8779 ((hevc->pts_lookup_margin > hevc->min_pic_size)
8780 || (hevc->pts_lookup_margin == 0)))
8781 hevc->pts_lookup_margin = hevc->min_pic_size;
8782 }
8783
8784 hevc_print(hevc, H265_DEBUG_OUT_PTS,
8785 "call pts_lookup_offset_us64(0x%x)\n",
8786 stream_offset);
8787 if (pts_lookup_offset_us64
8788 (PTS_TYPE_VIDEO, stream_offset, &vf->pts,
8789 &frame_size, hevc->pts_lookup_margin,
8790 &vf->pts_us64) != 0) {
8791 #ifdef DEBUG_PTS
8792 hevc->pts_missed++;
8793 #endif
8794 vf->pts = 0;
8795 vf->pts_us64 = 0;
8796 hevc->pts_continue_miss++;
8797 } else {
8798 hevc->pts_continue_miss = 0;
8799 #ifdef DEBUG_PTS
8800 hevc->pts_hit++;
8801 #endif
8802 }
8803 #ifdef MULTI_INSTANCE_SUPPORT
8804 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
8805 } else {
8806 vf->pts = 0;
8807 vf->pts_us64 = 0;
8808 }
8809 #else
8810 }
8811 #endif
8812 #endif
8813 if (!vdec_dual(vdec) &&
8814 vdec_stream_based(vdec) && (vf->duration > 0)) {
8815 if ((vf->pts != 0) && (hevc->last_pts != 0)) {
8816 int diff = vf->pts - hevc->last_pts;
8817 if (diff > ((hevc->pts_continue_miss + 2)
8818 * DUR2PTS(vf->duration))) {
8819 vf->pts = 0;
8820 vf->pts_us64 = 0;
8821 }
8822 }
8823 }
8824
8825 if (pts_unstable && (hevc->frame_dur > 0))
8826 hevc->pts_mode = PTS_NONE_REF_USE_DURATION;
8827
8828 fill_frame_info(hevc, pic, frame_size, vf->pts);
8829
8830 if ((hevc->pts_mode == PTS_NORMAL) && (vf->pts != 0)
8831 && hevc->get_frame_dur) {
8832 int pts_diff = (int)vf->pts - hevc->last_lookup_pts;
8833
8834 if (pts_diff < 0) {
8835 hevc->pts_mode_switching_count++;
8836 hevc->pts_mode_recovery_count = 0;
8837
8838 if (hevc->pts_mode_switching_count >=
8839 PTS_MODE_SWITCHING_THRESHOLD) {
8840 hevc->pts_mode =
8841 PTS_NONE_REF_USE_DURATION;
8842 hevc_print(hevc, 0,
8843 "HEVC: switch to n_d mode.\n");
8844 }
8845
8846 } else {
8847 int p = PTS_MODE_SWITCHING_RECOVERY_THREASHOLD;
8848
8849 hevc->pts_mode_recovery_count++;
8850 if (hevc->pts_mode_recovery_count > p) {
8851 hevc->pts_mode_switching_count = 0;
8852 hevc->pts_mode_recovery_count = 0;
8853 }
8854 }
8855 }
8856
8857 if (vf->pts != 0)
8858 hevc->last_lookup_pts = vf->pts;
8859
8860 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8861 && (slice_type != 2))
8862 vf->pts = hevc->last_pts + DUR2PTS(hevc->frame_dur);
8863 hevc->last_pts = vf->pts;
8864
8865 if (vf->pts_us64 != 0)
8866 hevc->last_lookup_pts_us64 = vf->pts_us64;
8867
8868 if ((hevc->pts_mode == PTS_NONE_REF_USE_DURATION)
8869 && (slice_type != 2)) {
8870 vf->pts_us64 =
8871 hevc->last_pts_us64 +
8872 (DUR2PTS(hevc->frame_dur) * 100 / 9);
8873 }
8874 hevc->last_pts_us64 = vf->pts_us64;
8875 if ((get_dbg_flag(hevc) & H265_DEBUG_OUT_PTS) != 0) {
8876 hevc_print(hevc, 0,
8877 "H265 dec out pts: vf->pts=%d, vf->pts_us64 = %lld\n",
8878 vf->pts, vf->pts_us64);
8879 }
8880
8881 /*
8882 *vf->index:
8883 *(1) vf->type is VIDTYPE_PROGRESSIVE
8884 * and vf->canvas0Addr != vf->canvas1Addr,
8885 * vf->index[7:0] is the index of top pic
8886 * vf->index[15:8] is the index of bot pic
8887 *(2) other cases,
8888 * only vf->index[7:0] is used
8889 * vf->index[15:8] == 0xff
8890 */
8891 vf->index = 0xff00 | pic->index;
8892 #if 1
8893 /*SUPPORT_10BIT*/
8894 if (pic->double_write_mode & 0x10) {
8895 /* double write only */
8896 vf->compBodyAddr = 0;
8897 vf->compHeadAddr = 0;
8898 } else {
8899
8900 if (hevc->mmu_enable) {
8901 vf->compBodyAddr = 0;
8902 vf->compHeadAddr = pic->header_adr;
8903 } else {
8904 vf->compBodyAddr = pic->mc_y_adr; /*body adr*/
8905 vf->compHeadAddr = pic->mc_y_adr +
8906 pic->losless_comp_body_size;
8907 vf->mem_head_handle = NULL;
8908 }
8909
8910 /*head adr*/
8911 vf->canvas0Addr = vf->canvas1Addr = 0;
8912 }
8913 if (pic->double_write_mode) {
8914 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8915 vf->type |= nv_order;
8916
8917 if ((pic->double_write_mode == 3) &&
8918 (!(IS_8K_SIZE(pic->width, pic->height)))) {
8919 vf->type |= VIDTYPE_COMPRESS;
8920 if (hevc->mmu_enable)
8921 vf->type |= VIDTYPE_SCATTER;
8922 }
8923 #ifdef MULTI_INSTANCE_SUPPORT
8924 if (hevc->m_ins_flag &&
8925 (get_dbg_flag(hevc)
8926 & H265_CFG_CANVAS_IN_DECODE) == 0) {
8927 vf->canvas0Addr = vf->canvas1Addr = -1;
8928 vf->plane_num = 2;
8929 vf->canvas0_config[0] =
8930 pic->canvas_config[0];
8931 vf->canvas0_config[1] =
8932 pic->canvas_config[1];
8933
8934 vf->canvas1_config[0] =
8935 pic->canvas_config[0];
8936 vf->canvas1_config[1] =
8937 pic->canvas_config[1];
8938
8939 } else
8940 #endif
8941 vf->canvas0Addr = vf->canvas1Addr
8942 = spec2canvas(pic);
8943 } else {
8944 vf->canvas0Addr = vf->canvas1Addr = 0;
8945 vf->type = VIDTYPE_COMPRESS | VIDTYPE_VIU_FIELD;
8946 if (hevc->mmu_enable)
8947 vf->type |= VIDTYPE_SCATTER;
8948 }
8949 vf->compWidth = pic->width;
8950 vf->compHeight = pic->height;
8951 update_vf_memhandle(hevc, vf, pic);
8952 switch (pic->bit_depth_luma) {
8953 case 9:
8954 vf->bitdepth = BITDEPTH_Y9;
8955 break;
8956 case 10:
8957 vf->bitdepth = BITDEPTH_Y10;
8958 break;
8959 default:
8960 vf->bitdepth = BITDEPTH_Y8;
8961 break;
8962 }
8963 switch (pic->bit_depth_chroma) {
8964 case 9:
8965 vf->bitdepth |= (BITDEPTH_U9 | BITDEPTH_V9);
8966 break;
8967 case 10:
8968 vf->bitdepth |= (BITDEPTH_U10 | BITDEPTH_V10);
8969 break;
8970 default:
8971 vf->bitdepth |= (BITDEPTH_U8 | BITDEPTH_V8);
8972 break;
8973 }
8974 if ((vf->type & VIDTYPE_COMPRESS) == 0)
8975 vf->bitdepth =
8976 BITDEPTH_Y8 | BITDEPTH_U8 | BITDEPTH_V8;
8977 if (pic->mem_saving_mode == 1)
8978 vf->bitdepth |= BITDEPTH_SAVING_MODE;
8979 #else
8980 vf->type = VIDTYPE_PROGRESSIVE | VIDTYPE_VIU_FIELD;
8981 vf->type |= nv_order;
8982 vf->canvas0Addr = vf->canvas1Addr = spec2canvas(pic);
8983 #endif
8984 set_frame_info(hevc, vf, pic);
8985 /* if((vf->width!=pic->width)||(vf->height!=pic->height)) */
8986 /* hevc_print(hevc, 0,
8987 "aaa: %d/%d, %d/%d\n",
8988 vf->width,vf->height, pic->width, pic->height); */
8989 vf->width = pic->width;
8990 vf->height = pic->height;
8991
8992 if (force_w_h != 0) {
8993 vf->width = (force_w_h >> 16) & 0xffff;
8994 vf->height = force_w_h & 0xffff;
8995 }
8996 if (force_fps & 0x100) {
8997 u32 rate = force_fps & 0xff;
8998
8999 if (rate)
9000 vf->duration = 96000/rate;
9001 else
9002 vf->duration = 0;
9003 }
9004 if (force_fps & 0x200) {
9005 vf->pts = 0;
9006 vf->pts_us64 = 0;
9007 }
9008 /*
9009 * !!! to do ...
9010 * need move below code to get_new_pic(),
9011 * hevc->xxx can only be used by current decoded pic
9012 */
9013 if (pic->conformance_window_flag &&
9014 (get_dbg_flag(hevc) &
9015 H265_DEBUG_IGNORE_CONFORMANCE_WINDOW) == 0) {
9016 unsigned int SubWidthC, SubHeightC;
9017
9018 switch (pic->chroma_format_idc) {
9019 case 1:
9020 SubWidthC = 2;
9021 SubHeightC = 2;
9022 break;
9023 case 2:
9024 SubWidthC = 2;
9025 SubHeightC = 1;
9026 break;
9027 default:
9028 SubWidthC = 1;
9029 SubHeightC = 1;
9030 break;
9031 }
9032 vf->width -= SubWidthC *
9033 (pic->conf_win_left_offset +
9034 pic->conf_win_right_offset);
9035 vf->height -= SubHeightC *
9036 (pic->conf_win_top_offset +
9037 pic->conf_win_bottom_offset);
9038
9039 vf->compWidth -= SubWidthC *
9040 (pic->conf_win_left_offset +
9041 pic->conf_win_right_offset);
9042 vf->compHeight -= SubHeightC *
9043 (pic->conf_win_top_offset +
9044 pic->conf_win_bottom_offset);
9045
9046 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
9047 hevc_print(hevc, 0,
9048 "conformance_window %d, %d, %d, %d, %d => cropped width %d, height %d com_w %d com_h %d\n",
9049 pic->chroma_format_idc,
9050 pic->conf_win_left_offset,
9051 pic->conf_win_right_offset,
9052 pic->conf_win_top_offset,
9053 pic->conf_win_bottom_offset,
9054 vf->width, vf->height, vf->compWidth, vf->compHeight);
9055 }
9056
9057 vf->width = vf->width /
9058 get_double_write_ratio(hevc, pic->double_write_mode);
9059 vf->height = vf->height /
9060 get_double_write_ratio(hevc, pic->double_write_mode);
9061 #ifdef HEVC_PIC_STRUCT_SUPPORT
9062 if (pic->pic_struct == 3 || pic->pic_struct == 4) {
9063 struct vframe_s *vf2;
9064
9065 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9066 hevc_print(hevc, 0,
9067 "pic_struct = %d index 0x%x\n",
9068 pic->pic_struct,
9069 pic->index);
9070
9071 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
9072 hevc_print(hevc, 0,
9073 "fatal error, no available buffer slot.");
9074 return -1;
9075 }
9076 pic->vf_ref = 2;
9077 vf->duration = vf->duration>>1;
9078 memcpy(vf2, vf, sizeof(struct vframe_s));
9079
9080 if (pic->pic_struct == 3) {
9081 vf->type = VIDTYPE_INTERLACE_TOP
9082 | nv_order;
9083 vf2->type = VIDTYPE_INTERLACE_BOTTOM
9084 | nv_order;
9085 } else {
9086 vf->type = VIDTYPE_INTERLACE_BOTTOM
9087 | nv_order;
9088 vf2->type = VIDTYPE_INTERLACE_TOP
9089 | nv_order;
9090 }
9091 hevc->vf_pre_count++;
9092 decoder_do_frame_check(vdec, vf);
9093 kfifo_put(&hevc->display_q,
9094 (const struct vframe_s *)vf);
9095 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9096 hevc->vf_pre_count++;
9097 kfifo_put(&hevc->display_q,
9098 (const struct vframe_s *)vf2);
9099 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
9100 } else if (pic->pic_struct == 5
9101 || pic->pic_struct == 6) {
9102 struct vframe_s *vf2, *vf3;
9103
9104 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9105 hevc_print(hevc, 0,
9106 "pic_struct = %d index 0x%x\n",
9107 pic->pic_struct,
9108 pic->index);
9109
9110 if (kfifo_get(&hevc->newframe_q, &vf2) == 0) {
9111 hevc_print(hevc, 0,
9112 "fatal error, no available buffer slot.");
9113 return -1;
9114 }
9115 if (kfifo_get(&hevc->newframe_q, &vf3) == 0) {
9116 hevc_print(hevc, 0,
9117 "fatal error, no available buffer slot.");
9118 return -1;
9119 }
9120 pic->vf_ref = 3;
9121 vf->duration = vf->duration/3;
9122 memcpy(vf2, vf, sizeof(struct vframe_s));
9123 memcpy(vf3, vf, sizeof(struct vframe_s));
9124
9125 if (pic->pic_struct == 5) {
9126 vf->type = VIDTYPE_INTERLACE_TOP
9127 | nv_order;
9128 vf2->type = VIDTYPE_INTERLACE_BOTTOM
9129 | nv_order;
9130 vf3->type = VIDTYPE_INTERLACE_TOP
9131 | nv_order;
9132 } else {
9133 vf->type = VIDTYPE_INTERLACE_BOTTOM
9134 | nv_order;
9135 vf2->type = VIDTYPE_INTERLACE_TOP
9136 | nv_order;
9137 vf3->type = VIDTYPE_INTERLACE_BOTTOM
9138 | nv_order;
9139 }
9140 hevc->vf_pre_count++;
9141 decoder_do_frame_check(vdec, vf);
9142 kfifo_put(&hevc->display_q,
9143 (const struct vframe_s *)vf);
9144 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9145 hevc->vf_pre_count++;
9146 kfifo_put(&hevc->display_q,
9147 (const struct vframe_s *)vf2);
9148 ATRACE_COUNTER(MODULE_NAME, vf2->pts);
9149 hevc->vf_pre_count++;
9150 kfifo_put(&hevc->display_q,
9151 (const struct vframe_s *)vf3);
9152 ATRACE_COUNTER(MODULE_NAME, vf3->pts);
9153
9154 } else if (pic->pic_struct == 9
9155 || pic->pic_struct == 10) {
9156 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9157 hevc_print(hevc, 0,
9158 "pic_struct = %d index 0x%x\n",
9159 pic->pic_struct,
9160 pic->index);
9161
9162 pic->vf_ref = 1;
9163 /* process previous pending vf*/
9164 process_pending_vframe(hevc,
9165 pic, (pic->pic_struct == 9));
9166
9167 decoder_do_frame_check(vdec, vf);
9168 /* process current vf */
9169 kfifo_put(&hevc->pending_q,
9170 (const struct vframe_s *)vf);
9171 vf->height <<= 1;
9172 if (pic->pic_struct == 9) {
9173 vf->type = VIDTYPE_INTERLACE_TOP
9174 | nv_order | VIDTYPE_VIU_FIELD;
9175 process_pending_vframe(hevc,
9176 hevc->pre_bot_pic, 0);
9177 } else {
9178 vf->type = VIDTYPE_INTERLACE_BOTTOM |
9179 nv_order | VIDTYPE_VIU_FIELD;
9180 vf->index = (pic->index << 8) | 0xff;
9181 process_pending_vframe(hevc,
9182 hevc->pre_top_pic, 1);
9183 }
9184
9185 if (hevc->vf_pre_count == 0)
9186 hevc->vf_pre_count++;
9187
9188 /**/
9189 if (pic->pic_struct == 9)
9190 hevc->pre_top_pic = pic;
9191 else
9192 hevc->pre_bot_pic = pic;
9193
9194 } else if (pic->pic_struct == 11
9195 || pic->pic_struct == 12) {
9196 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9197 hevc_print(hevc, 0,
9198 "pic_struct = %d index 0x%x\n",
9199 pic->pic_struct,
9200 pic->index);
9201 pic->vf_ref = 1;
9202 /* process previous pending vf*/
9203 process_pending_vframe(hevc, pic,
9204 (pic->pic_struct == 11));
9205
9206 /* put current into pending q */
9207 vf->height <<= 1;
9208 if (pic->pic_struct == 11)
9209 vf->type = VIDTYPE_INTERLACE_TOP |
9210 nv_order | VIDTYPE_VIU_FIELD;
9211 else {
9212 vf->type = VIDTYPE_INTERLACE_BOTTOM |
9213 nv_order | VIDTYPE_VIU_FIELD;
9214 vf->index = (pic->index << 8) | 0xff;
9215 }
9216 decoder_do_frame_check(vdec, vf);
9217 kfifo_put(&hevc->pending_q,
9218 (const struct vframe_s *)vf);
9219 if (hevc->vf_pre_count == 0)
9220 hevc->vf_pre_count++;
9221
9222 /**/
9223 if (pic->pic_struct == 11)
9224 hevc->pre_top_pic = pic;
9225 else
9226 hevc->pre_bot_pic = pic;
9227
9228 } else {
9229 pic->vf_ref = 1;
9230
9231 if (get_dbg_flag(hevc) & H265_DEBUG_PIC_STRUCT)
9232 hevc_print(hevc, 0,
9233 "pic_struct = %d index 0x%x\n",
9234 pic->pic_struct,
9235 pic->index);
9236
9237 switch (pic->pic_struct) {
9238 case 7:
9239 vf->duration <<= 1;
9240 break;
9241 case 8:
9242 vf->duration = vf->duration * 3;
9243 break;
9244 case 1:
9245 vf->height <<= 1;
9246 vf->type = VIDTYPE_INTERLACE_TOP |
9247 nv_order | VIDTYPE_VIU_FIELD;
9248 process_pending_vframe(hevc, pic, 1);
9249 hevc->pre_top_pic = pic;
9250 break;
9251 case 2:
9252 vf->height <<= 1;
9253 vf->type = VIDTYPE_INTERLACE_BOTTOM
9254 | nv_order
9255 | VIDTYPE_VIU_FIELD;
9256 process_pending_vframe(hevc, pic, 0);
9257 hevc->pre_bot_pic = pic;
9258 break;
9259 }
9260 hevc->vf_pre_count++;
9261 decoder_do_frame_check(vdec, vf);
9262 kfifo_put(&hevc->display_q,
9263 (const struct vframe_s *)vf);
9264 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9265 }
9266 #else
9267 vf->type_original = vf->type;
9268 pic->vf_ref = 1;
9269 hevc->vf_pre_count++;
9270 decoder_do_frame_check(vdec, vf);
9271 kfifo_put(&hevc->display_q, (const struct vframe_s *)vf);
9272 ATRACE_COUNTER(MODULE_NAME, vf->pts);
9273 #endif
9274 /*count info*/
9275 vdec_count_info(hevc->gvs, 0, stream_offset);
9276 hevc_update_gvs(hevc);
9277 memcpy(&tmp4x, hevc->gvs, sizeof(struct vdec_info));
9278 tmp4x.bit_depth_luma = hevc->bit_depth_luma;
9279 tmp4x.bit_depth_chroma = hevc->bit_depth_chroma;
9280 tmp4x.double_write_mode = get_double_write_mode(hevc);
9281 vdec_fill_vdec_frame(vdec, &hevc->vframe_qos, &tmp4x, vf, pic->hw_decode_time);
9282 vdec->vdec_fps_detec(vdec->id);
9283 hevc_print(hevc, H265_DEBUG_BUFMGR,
9284 "%s(type %d index 0x%x poc %d/%d) pts(%d,%d) dur %d\n",
9285 __func__, vf->type, vf->index,
9286 get_pic_poc(hevc, vf->index & 0xff),
9287 get_pic_poc(hevc, (vf->index >> 8) & 0xff),
9288 vf->pts, vf->pts_us64,
9289 vf->duration);
9290
9291 /*if (pic->vf_ref == hevc->vf_pre_count) {*/
9292 if (hevc->kpi_first_i_decoded == 0) {
9293 hevc->kpi_first_i_decoded = 1;
9294 pr_debug("[vdec_kpi][%s] First I frame decoded.\n",
9295 __func__);
9296 }
9297
9298 if (without_display_mode == 0) {
9299 vf_notify_receiver(hevc->provider_name,
9300 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
9301 }
9302 else
9303 vh265_vf_put(vh265_vf_get(vdec), vdec);
9304 }
9305
9306 return 0;
9307 }
9308
notify_v4l_eos(struct vdec_s * vdec)9309 static int notify_v4l_eos(struct vdec_s *vdec)
9310 {
9311 struct hevc_state_s *hw = (struct hevc_state_s *)vdec->private;
9312 struct aml_vcodec_ctx *ctx = (struct aml_vcodec_ctx *)(hw->v4l2_ctx);
9313 struct vframe_s *vf = &hw->vframe_dummy;
9314 struct vdec_v4l2_buffer *fb = NULL;
9315 int index = INVALID_IDX;
9316 ulong expires;
9317
9318 if (hw->is_used_v4l && hw->eos) {
9319 expires = jiffies + msecs_to_jiffies(2000);
9320 while (INVALID_IDX == (index = get_free_buf_idx(hw))) {
9321 if (time_after(jiffies, expires) ||
9322 v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx))
9323 break;
9324 }
9325
9326 if (index == INVALID_IDX) {
9327 if (vdec_v4l_get_buffer(hw->v4l2_ctx, &fb) < 0) {
9328 pr_err("[%d] EOS get free buff fail.\n", ctx->id);
9329 return -1;
9330 }
9331 }
9332
9333 vf->type |= VIDTYPE_V4L_EOS;
9334 vf->timestamp = ULONG_MAX;
9335 vf->flag = VFRAME_FLAG_EMPTY_FRAME_V4L;
9336 vf->v4l_mem_handle = (index == INVALID_IDX) ? (ulong)fb :
9337 hw->m_BUF[index].v4l_ref_buf_addr;
9338 kfifo_put(&hw->display_q, (const struct vframe_s *)vf);
9339 vf_notify_receiver(vdec->vf_provider_name,
9340 VFRAME_EVENT_PROVIDER_VFRAME_READY, NULL);
9341
9342 pr_info("[%d] H265 EOS notify.\n", ctx->id);
9343 }
9344
9345 return 0;
9346 }
9347
process_nal_sei(struct hevc_state_s * hevc,int payload_type,int payload_size)9348 static void process_nal_sei(struct hevc_state_s *hevc,
9349 int payload_type, int payload_size)
9350 {
9351 unsigned short data;
9352
9353 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9354 hevc_print(hevc, 0,
9355 "\tsei message: payload_type = 0x%02x, payload_size = 0x%02x\n",
9356 payload_type, payload_size);
9357
9358 if (payload_type == 137) {
9359 int i, j;
9360 /* MASTERING_DISPLAY_COLOUR_VOLUME */
9361 if (payload_size >= 24) {
9362 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9363 hevc_print(hevc, 0,
9364 "\tsei MASTERING_DISPLAY_COLOUR_VOLUME available\n");
9365 for (i = 0; i < 3; i++) {
9366 for (j = 0; j < 2; j++) {
9367 data =
9368 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9369 hevc->primaries[i][j] = data;
9370 WRITE_HREG(HEVC_SHIFT_COMMAND,
9371 (1<<7)|16);
9372 if (get_dbg_flag(hevc) &
9373 H265_DEBUG_PRINT_SEI)
9374 hevc_print(hevc, 0,
9375 "\t\tprimaries[%1d][%1d] = %04x\n",
9376 i, j, hevc->primaries[i][j]);
9377 }
9378 }
9379 for (i = 0; i < 2; i++) {
9380 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9381 hevc->white_point[i] = data;
9382 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|16);
9383 if (get_dbg_flag(hevc) & H265_DEBUG_PRINT_SEI)
9384 hevc_print(hevc, 0,
9385 "\t\twhite_point[%1d] = %04x\n",
9386 i, hevc->white_point[i]);
9387 }
9388 for (i = 0; i < 2; i++) {
9389 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9390 hevc->luminance[i] = data << 16;
9391 WRITE_HREG(HEVC_SHIFT_COMMAND,
9392 (1<<7)|16);
9393 data =
9394 (READ_HREG(HEVC_SHIFTED_DATA) >> 16);
9395 hevc->luminance[i] |= data;
9396 WRITE_HREG(HEVC_SHIFT_COMMAND,
9397 (1<<7)|16);
9398 if (get_dbg_flag(hevc) &
9399 H265_DEBUG_PRINT_SEI)
9400 hevc_print(hevc, 0,
9401 "\t\tluminance[%1d] = %08x\n",
9402 i, hevc->luminance[i]);
9403 }
9404 hevc->sei_present_flag |= SEI_MASTER_DISPLAY_COLOR_MASK;
9405 }
9406 payload_size -= 24;
9407 while (payload_size > 0) {
9408 data = (READ_HREG(HEVC_SHIFTED_DATA) >> 24);
9409 payload_size--;
9410 WRITE_HREG(HEVC_SHIFT_COMMAND, (1<<7)|8);
9411 hevc_print(hevc, 0, "\t\tskip byte %02x\n", data);
9412 }
9413 }
9414 }
9415
hevc_recover(struct hevc_state_s * hevc)9416 static int hevc_recover(struct hevc_state_s *hevc)
9417 {
9418 int ret = -1;
9419 u32 rem;
9420 u64 shift_byte_count64;
9421 unsigned int hevc_shift_byte_count;
9422 unsigned int hevc_stream_start_addr;
9423 unsigned int hevc_stream_end_addr;
9424 unsigned int hevc_stream_rd_ptr;
9425 unsigned int hevc_stream_wr_ptr;
9426 unsigned int hevc_stream_control;
9427 unsigned int hevc_stream_fifo_ctl;
9428 unsigned int hevc_stream_buf_size;
9429 struct vdec_s *vdec = hw_to_vdec(hevc);
9430
9431 mutex_lock(&vh265_mutex);
9432 #if 0
9433 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9434 int ii;
9435
9436 for (ii = 0; ii < 4; ii++)
9437 hevc_print(hevc, 0,
9438 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9439 if (((i + ii) & 0xf) == 0)
9440 hevc_print(hevc, 0, "\n");
9441 }
9442 #endif
9443 #define ES_VID_MAN_RD_PTR (1<<0)
9444 if (!hevc->init_flag) {
9445 hevc_print(hevc, 0, "h265 has stopped, recover return!\n");
9446 mutex_unlock(&vh265_mutex);
9447 return ret;
9448 }
9449 amhevc_stop();
9450 msleep(20);
9451 ret = 0;
9452 /* reset */
9453 if (vdec_stream_based(vdec)) {
9454 STBUF_WRITE(&vdec->vbuf, set_rp,
9455 READ_VREG(HEVC_STREAM_RD_PTR));
9456
9457 if (!vdec->vbuf.no_parser)
9458 SET_PARSER_REG_MASK(PARSER_ES_CONTROL,
9459 ES_VID_MAN_RD_PTR);
9460 }
9461
9462 hevc_stream_start_addr = READ_VREG(HEVC_STREAM_START_ADDR);
9463 hevc_stream_end_addr = READ_VREG(HEVC_STREAM_END_ADDR);
9464 hevc_stream_rd_ptr = READ_VREG(HEVC_STREAM_RD_PTR);
9465 hevc_stream_wr_ptr = READ_VREG(HEVC_STREAM_WR_PTR);
9466 hevc_stream_control = READ_VREG(HEVC_STREAM_CONTROL);
9467 hevc_stream_fifo_ctl = READ_VREG(HEVC_STREAM_FIFO_CTL);
9468 hevc_stream_buf_size = hevc_stream_end_addr - hevc_stream_start_addr;
9469
9470 /* HEVC streaming buffer will reset and restart
9471 * from current hevc_stream_rd_ptr position
9472 */
9473 /* calculate HEVC_SHIFT_BYTE_COUNT value with the new position. */
9474 hevc_shift_byte_count = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9475 if ((hevc->shift_byte_count_lo & (1 << 31))
9476 && ((hevc_shift_byte_count & (1 << 31)) == 0))
9477 hevc->shift_byte_count_hi++;
9478
9479 hevc->shift_byte_count_lo = hevc_shift_byte_count;
9480 shift_byte_count64 = ((u64)(hevc->shift_byte_count_hi) << 32) |
9481 hevc->shift_byte_count_lo;
9482 div_u64_rem(shift_byte_count64, hevc_stream_buf_size, &rem);
9483 shift_byte_count64 -= rem;
9484 shift_byte_count64 += hevc_stream_rd_ptr - hevc_stream_start_addr;
9485
9486 if (rem > (hevc_stream_rd_ptr - hevc_stream_start_addr))
9487 shift_byte_count64 += hevc_stream_buf_size;
9488
9489 hevc->shift_byte_count_lo = (u32)shift_byte_count64;
9490 hevc->shift_byte_count_hi = (u32)(shift_byte_count64 >> 32);
9491
9492 WRITE_VREG(DOS_SW_RESET3,
9493 /* (1<<2)| */
9494 (1 << 3) | (1 << 4) | (1 << 8) |
9495 (1 << 11) | (1 << 12) | (1 << 14)
9496 | (1 << 15) | (1 << 17) | (1 << 18) | (1 << 19));
9497 WRITE_VREG(DOS_SW_RESET3, 0);
9498
9499 WRITE_VREG(HEVC_STREAM_START_ADDR, hevc_stream_start_addr);
9500 WRITE_VREG(HEVC_STREAM_END_ADDR, hevc_stream_end_addr);
9501 WRITE_VREG(HEVC_STREAM_RD_PTR, hevc_stream_rd_ptr);
9502 WRITE_VREG(HEVC_STREAM_WR_PTR, hevc_stream_wr_ptr);
9503 WRITE_VREG(HEVC_STREAM_CONTROL, hevc_stream_control);
9504 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, hevc->shift_byte_count_lo);
9505 WRITE_VREG(HEVC_STREAM_FIFO_CTL, hevc_stream_fifo_ctl);
9506
9507 hevc_config_work_space_hw(hevc);
9508 decoder_hw_reset();
9509
9510 hevc->have_vps = 0;
9511 hevc->have_sps = 0;
9512 hevc->have_pps = 0;
9513
9514 hevc->have_valid_start_slice = 0;
9515
9516 if (get_double_write_mode(hevc) & 0x10)
9517 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1,
9518 0x1 << 31 /*/Enable NV21 reference read mode for MC*/
9519 );
9520
9521 WRITE_VREG(HEVC_WAIT_FLAG, 1);
9522 /* clear mailbox interrupt */
9523 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
9524 /* enable mailbox interrupt */
9525 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
9526 /* disable PSCALE for hardware sharing */
9527 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
9528
9529 CLEAR_PARSER_REG_MASK(PARSER_ES_CONTROL, ES_VID_MAN_RD_PTR);
9530
9531 WRITE_VREG(DEBUG_REG1, 0x0);
9532
9533 if ((error_handle_policy & 1) == 0) {
9534 if ((error_handle_policy & 4) == 0) {
9535 /* ucode auto mode, and do not check vps/sps/pps/idr */
9536 WRITE_VREG(NAL_SEARCH_CTL,
9537 0xc);
9538 } else {
9539 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9540 }
9541 } else {
9542 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9543 }
9544
9545 if (get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9546 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9547 WRITE_VREG(NAL_SEARCH_CTL,
9548 READ_VREG(NAL_SEARCH_CTL)
9549 | ((parser_sei_enable & 0x7) << 17));
9550 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9551 WRITE_VREG(NAL_SEARCH_CTL,
9552 READ_VREG(NAL_SEARCH_CTL) |
9553 ((parser_dolby_vision_enable & 0x1) << 20));
9554 #endif
9555 config_decode_mode(hevc);
9556 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
9557
9558 /* if (amhevc_loadmc(vh265_mc) < 0) { */
9559 /* amhevc_disable(); */
9560 /* return -EBUSY; */
9561 /* } */
9562 #if 0
9563 for (i = 0; i < (hevc->debug_ptr_size / 2); i += 4) {
9564 int ii;
9565
9566 for (ii = 0; ii < 4; ii++) {
9567 /* hevc->debug_ptr[i+3-ii]=ttt++; */
9568 hevc_print(hevc, 0,
9569 "%04x ", hevc->debug_ptr[i + 3 - ii]);
9570 }
9571 if (((i + ii) & 0xf) == 0)
9572 hevc_print(hevc, 0, "\n");
9573 }
9574 #endif
9575 init_pic_list_hw(hevc);
9576
9577 hevc_print(hevc, 0, "%s HEVC_SHIFT_BYTE_COUNT=0x%x\n", __func__,
9578 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9579
9580 #ifdef SWAP_HEVC_UCODE
9581 if (!tee_enabled() && hevc->is_swap &&
9582 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9583 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
9584 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
9585 }
9586 #endif
9587 amhevc_start();
9588
9589 /* skip, search next start code */
9590 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
9591 hevc->skip_flag = 1;
9592 #ifdef ERROR_HANDLE_DEBUG
9593 if (dbg_nal_skip_count & 0x20000) {
9594 dbg_nal_skip_count &= ~0x20000;
9595 mutex_unlock(&vh265_mutex);
9596 return ret;
9597 }
9598 #endif
9599 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9600 /* Interrupt Amrisc to excute */
9601 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9602 #ifdef MULTI_INSTANCE_SUPPORT
9603 if (!hevc->m_ins_flag)
9604 #endif
9605 hevc->first_pic_after_recover = 1;
9606 mutex_unlock(&vh265_mutex);
9607 return ret;
9608 }
9609
dump_aux_buf(struct hevc_state_s * hevc)9610 static void dump_aux_buf(struct hevc_state_s *hevc)
9611 {
9612 int i;
9613 unsigned short *aux_adr =
9614 (unsigned short *)
9615 hevc->aux_addr;
9616 unsigned int aux_size =
9617 (READ_VREG(HEVC_AUX_DATA_SIZE)
9618 >> 16) << 4;
9619
9620 if (hevc->prefix_aux_size > 0) {
9621 hevc_print(hevc, 0,
9622 "prefix aux: (size %d)\n",
9623 aux_size);
9624 for (i = 0; i <
9625 (aux_size >> 1); i++) {
9626 hevc_print_cont(hevc, 0,
9627 "%04x ",
9628 *(aux_adr + i));
9629 if (((i + 1) & 0xf)
9630 == 0)
9631 hevc_print_cont(hevc,
9632 0, "\n");
9633 }
9634 }
9635 if (hevc->suffix_aux_size > 0) {
9636 aux_adr = (unsigned short *)
9637 (hevc->aux_addr +
9638 hevc->prefix_aux_size);
9639 aux_size =
9640 (READ_VREG(HEVC_AUX_DATA_SIZE) & 0xffff)
9641 << 4;
9642 hevc_print(hevc, 0,
9643 "suffix aux: (size %d)\n",
9644 aux_size);
9645 for (i = 0; i <
9646 (aux_size >> 1); i++) {
9647 hevc_print_cont(hevc, 0,
9648 "%04x ", *(aux_adr + i));
9649 if (((i + 1) & 0xf) == 0)
9650 hevc_print_cont(hevc, 0, "\n");
9651 }
9652 }
9653 }
9654
9655 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
dolby_get_meta(struct hevc_state_s * hevc)9656 static void dolby_get_meta(struct hevc_state_s *hevc)
9657 {
9658 struct vdec_s *vdec = hw_to_vdec(hevc);
9659
9660 if (get_dbg_flag(hevc) &
9661 H265_DEBUG_BUFMGR_MORE)
9662 dump_aux_buf(hevc);
9663 if (vdec->dolby_meta_with_el || vdec->slave) {
9664 set_aux_data(hevc,
9665 hevc->cur_pic, 0, 0);
9666 } else if (vdec->master) {
9667 struct hevc_state_s *hevc_ba =
9668 (struct hevc_state_s *)
9669 vdec->master->private;
9670 /*do not use hevc_ba*/
9671 set_aux_data(hevc,
9672 hevc_ba->cur_pic,
9673 0, 1);
9674 set_aux_data(hevc,
9675 hevc->cur_pic, 0, 2);
9676 }
9677 }
9678 #endif
9679
read_decode_info(struct hevc_state_s * hevc)9680 static void read_decode_info(struct hevc_state_s *hevc)
9681 {
9682 uint32_t decode_info =
9683 READ_HREG(HEVC_DECODE_INFO);
9684 hevc->start_decoding_flag |=
9685 (decode_info & 0xff);
9686 hevc->rps_set_id = (decode_info >> 8) & 0xff;
9687 }
9688
vh265_get_ps_info(struct hevc_state_s * hevc,int width,int height,struct aml_vdec_ps_infos * ps)9689 static int vh265_get_ps_info(struct hevc_state_s *hevc, int width, int height, struct aml_vdec_ps_infos *ps)
9690 {
9691 int dw_mode = v4l_parser_get_double_write_mode(hevc, width, height);
9692
9693 ps->visible_width = width / get_double_write_ratio(hevc, dw_mode);
9694 ps->visible_height = height / get_double_write_ratio(hevc, dw_mode);
9695 ps->coded_width = ALIGN(width, 32) / get_double_write_ratio(hevc, dw_mode);
9696 ps->coded_height = ALIGN(height, 32) / get_double_write_ratio(hevc, dw_mode);
9697 ps->dpb_size = v4l_parser_work_pic_num(hevc);
9698
9699 return 0;
9700 }
9701
v4l_res_change(struct hevc_state_s * hevc,union param_u * rpm_param)9702 static int v4l_res_change(struct hevc_state_s *hevc, union param_u *rpm_param)
9703 {
9704 struct aml_vcodec_ctx *ctx =
9705 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
9706 int ret = 0;
9707
9708 if (ctx->param_sets_from_ucode &&
9709 hevc->res_ch_flag == 0) {
9710 struct aml_vdec_ps_infos ps;
9711 int width = rpm_param->p.pic_width_in_luma_samples;
9712 int height = rpm_param->p.pic_height_in_luma_samples;
9713 if ((hevc->pic_w != 0 &&
9714 hevc->pic_h != 0) &&
9715 (hevc->pic_w != width ||
9716 hevc->pic_h != height)) {
9717 hevc_print(hevc, 0,
9718 "v4l_res_change Pic Width/Height Change (%d,%d)=>(%d,%d), interlace %d\n",
9719 hevc->pic_w, hevc->pic_h,
9720 width,
9721 height,
9722 hevc->interlace_flag);
9723
9724 vh265_get_ps_info(hevc, width, height, &ps);
9725 vdec_v4l_set_ps_infos(ctx, &ps);
9726 vdec_v4l_res_ch_event(ctx);
9727 hevc->v4l_params_parsed = false;
9728 hevc->res_ch_flag = 1;
9729 hevc->eos = 1;
9730 flush_output(hevc, NULL);
9731 //del_timer_sync(&hevc->timer);
9732 notify_v4l_eos(hw_to_vdec(hevc));
9733
9734 ret = 1;
9735 }
9736 }
9737
9738 return ret;
9739 }
9740
9741
vh265_isr_thread_fn(int irq,void * data)9742 static irqreturn_t vh265_isr_thread_fn(int irq, void *data)
9743 {
9744 struct hevc_state_s *hevc = (struct hevc_state_s *) data;
9745 unsigned int dec_status = hevc->dec_status;
9746 int i, ret;
9747
9748 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9749 struct vdec_s *vdec = hw_to_vdec(hevc);
9750 #endif
9751
9752 if (hevc->eos)
9753 return IRQ_HANDLED;
9754 if (
9755 #ifdef MULTI_INSTANCE_SUPPORT
9756 (!hevc->m_ins_flag) &&
9757 #endif
9758 hevc->error_flag == 1) {
9759 if ((error_handle_policy & 0x10) == 0) {
9760 if (hevc->cur_pic) {
9761 int current_lcu_idx =
9762 READ_VREG(HEVC_PARSER_LCU_START)
9763 & 0xffffff;
9764 if (current_lcu_idx <
9765 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9766 hevc->cur_pic->error_mark = 1;
9767
9768 }
9769 }
9770 if ((error_handle_policy & 1) == 0) {
9771 hevc->error_skip_nal_count = 1;
9772 /* manual search nal, skip error_skip_nal_count
9773 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9774 */
9775 WRITE_VREG(NAL_SEARCH_CTL,
9776 (error_skip_nal_count << 4) | 0x1);
9777 } else {
9778 hevc->error_skip_nal_count = error_skip_nal_count;
9779 WRITE_VREG(NAL_SEARCH_CTL, 0x1);/* manual parser NAL */
9780 }
9781 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
9782 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9783 || vdec->master
9784 || vdec->slave
9785 #endif
9786 ) {
9787 WRITE_VREG(NAL_SEARCH_CTL,
9788 READ_VREG(NAL_SEARCH_CTL) | 0x10000);
9789 }
9790 WRITE_VREG(NAL_SEARCH_CTL,
9791 READ_VREG(NAL_SEARCH_CTL)
9792 | ((parser_sei_enable & 0x7) << 17));
9793 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9794 WRITE_VREG(NAL_SEARCH_CTL,
9795 READ_VREG(NAL_SEARCH_CTL) |
9796 ((parser_dolby_vision_enable & 0x1) << 20));
9797 #endif
9798 config_decode_mode(hevc);
9799 /* search new nal */
9800 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9801 /* Interrupt Amrisc to excute */
9802 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
9803
9804 /* hevc_print(hevc, 0,
9805 *"%s: error handle\n", __func__);
9806 */
9807 hevc->error_flag = 2;
9808 return IRQ_HANDLED;
9809 } else if (
9810 #ifdef MULTI_INSTANCE_SUPPORT
9811 (!hevc->m_ins_flag) &&
9812 #endif
9813 hevc->error_flag == 3) {
9814 hevc_print(hevc, 0, "error_flag=3, hevc_recover\n");
9815 hevc_recover(hevc);
9816 hevc->error_flag = 0;
9817
9818 if ((error_handle_policy & 0x10) == 0) {
9819 if (hevc->cur_pic) {
9820 int current_lcu_idx =
9821 READ_VREG(HEVC_PARSER_LCU_START)
9822 & 0xffffff;
9823 if (current_lcu_idx <
9824 ((hevc->lcu_x_num*hevc->lcu_y_num)-1))
9825 hevc->cur_pic->error_mark = 1;
9826
9827 }
9828 }
9829 if ((error_handle_policy & 1) == 0) {
9830 /* need skip some data when
9831 * error_flag of 3 is triggered,
9832 */
9833 /* to avoid hevc_recover() being called
9834 * for many times at the same bitstream position
9835 */
9836 hevc->error_skip_nal_count = 1;
9837 /* manual search nal, skip error_skip_nal_count
9838 * of nal and trigger the HEVC_NAL_SEARCH_DONE irq
9839 */
9840 WRITE_VREG(NAL_SEARCH_CTL,
9841 (error_skip_nal_count << 4) | 0x1);
9842 }
9843
9844 if ((error_handle_policy & 0x2) == 0) {
9845 hevc->have_vps = 1;
9846 hevc->have_sps = 1;
9847 hevc->have_pps = 1;
9848 }
9849 return IRQ_HANDLED;
9850 }
9851 if (!hevc->m_ins_flag) {
9852 i = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
9853 if ((hevc->shift_byte_count_lo & (1 << 31))
9854 && ((i & (1 << 31)) == 0))
9855 hevc->shift_byte_count_hi++;
9856 hevc->shift_byte_count_lo = i;
9857 }
9858 #ifdef MULTI_INSTANCE_SUPPORT
9859 mutex_lock(&hevc->chunks_mutex);
9860 if ((dec_status == HEVC_DECPIC_DATA_DONE ||
9861 dec_status == HEVC_FIND_NEXT_PIC_NAL ||
9862 dec_status == HEVC_FIND_NEXT_DVEL_NAL)
9863 && (hevc->chunk)) {
9864 hevc->cur_pic->pts = hevc->chunk->pts;
9865 hevc->cur_pic->pts64 = hevc->chunk->pts64;
9866 hevc->cur_pic->timestamp = hevc->chunk->timestamp;
9867 }
9868 mutex_unlock(&hevc->chunks_mutex);
9869
9870 if (dec_status == HEVC_DECODE_BUFEMPTY ||
9871 dec_status == HEVC_DECODE_BUFEMPTY2) {
9872 if (hevc->m_ins_flag) {
9873 read_decode_info(hevc);
9874 if (vdec_frame_based(hw_to_vdec(hevc))) {
9875 hevc->empty_flag = 1;
9876 goto pic_done;
9877 } else {
9878 if (
9879 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9880 vdec->master ||
9881 vdec->slave ||
9882 #endif
9883 (data_resend_policy & 0x1)) {
9884 hevc->dec_result = DEC_RESULT_AGAIN;
9885 amhevc_stop();
9886 restore_decode_state(hevc);
9887 } else
9888 hevc->dec_result = DEC_RESULT_GET_DATA;
9889 }
9890 reset_process_time(hevc);
9891 vdec_schedule_work(&hevc->work);
9892 }
9893 return IRQ_HANDLED;
9894 } else if ((dec_status == HEVC_SEARCH_BUFEMPTY) ||
9895 (dec_status == HEVC_NAL_DECODE_DONE)
9896 ) {
9897 if (hevc->m_ins_flag) {
9898 read_decode_info(hevc);
9899 if (vdec_frame_based(hw_to_vdec(hevc))) {
9900 /*hevc->dec_result = DEC_RESULT_GET_DATA;*/
9901 hevc->empty_flag = 1;
9902 goto pic_done;
9903 } else {
9904 hevc->dec_result = DEC_RESULT_AGAIN;
9905 amhevc_stop();
9906 restore_decode_state(hevc);
9907 }
9908
9909 reset_process_time(hevc);
9910 vdec_schedule_work(&hevc->work);
9911 }
9912
9913 return IRQ_HANDLED;
9914 } else if (dec_status == HEVC_DECPIC_DATA_DONE) {
9915 if (hevc->m_ins_flag) {
9916 struct PIC_s *pic;
9917 struct PIC_s *pic_display;
9918 int decoded_poc;
9919 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9920 if (vdec->mvfrm)
9921 vdec->mvfrm->hw_decode_time =
9922 local_clock() - vdec->mvfrm->hw_decode_start;
9923 #endif
9924 #ifdef DETREFILL_ENABLE
9925 if (hevc->is_swap &&
9926 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
9927 if (hevc->detbuf_adr_virt && hevc->delrefill_check
9928 && READ_VREG(HEVC_SAO_DBG_MODE0))
9929 hevc->delrefill_check = 2;
9930 }
9931 #endif
9932 hevc->empty_flag = 0;
9933 pic_done:
9934 if (input_frame_based(hw_to_vdec(hevc)) &&
9935 frmbase_cont_bitlevel != 0 &&
9936 (hevc->decode_size > READ_VREG(HEVC_SHIFT_BYTE_COUNT)) &&
9937 (hevc->decode_size - (READ_VREG(HEVC_SHIFT_BYTE_COUNT))
9938 > frmbase_cont_bitlevel)) {
9939 /*handle the case: multi pictures in one packet*/
9940 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
9941 "%s has more data index= %d, size=0x%x shiftcnt=0x%x)\n",
9942 __func__,
9943 hevc->decode_idx, hevc->decode_size,
9944 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
9945 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
9946 start_process_time(hevc);
9947 return IRQ_HANDLED;
9948 }
9949
9950 read_decode_info(hevc);
9951 get_picture_qos_info(hevc);
9952 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
9953 hevc->start_parser_type = 0;
9954 hevc->switch_dvlayer_flag = 0;
9955 #endif
9956 hevc->decoded_poc = hevc->curr_POC;
9957 hevc->decoding_pic = NULL;
9958 hevc->dec_result = DEC_RESULT_DONE;
9959 #ifdef DETREFILL_ENABLE
9960 if (hevc->is_swap &&
9961 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
9962 if (hevc->delrefill_check != 2)
9963 #endif
9964
9965 amhevc_stop();
9966
9967 reset_process_time(hevc);
9968
9969 if (hevc->vf_pre_count == 0 || hevc->ip_mode) {
9970 decoded_poc = hevc->curr_POC;
9971 pic = get_pic_by_POC(hevc, decoded_poc);
9972 if (pic && (pic->POC != INVALID_POC)) {
9973 /*PB skip control */
9974 if (pic->error_mark == 0
9975 && hevc->PB_skip_mode == 1) {
9976 /* start decoding after
9977 * first I
9978 */
9979 hevc->ignore_bufmgr_error |= 0x1;
9980 }
9981 if (hevc->ignore_bufmgr_error & 1) {
9982 if (hevc->PB_skip_count_after_decoding > 0) {
9983 hevc->PB_skip_count_after_decoding--;
9984 } else {
9985 /* start displaying */
9986 hevc->ignore_bufmgr_error |= 0x2;
9987 }
9988 }
9989 if (hevc->mmu_enable
9990 && ((hevc->double_write_mode & 0x10) == 0)) {
9991 if (!hevc->m_ins_flag) {
9992 hevc->used_4k_num =
9993 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
9994
9995 if ((!is_skip_decoding(hevc, pic)) &&
9996 (hevc->used_4k_num >= 0) &&
9997 (hevc->cur_pic->scatter_alloc
9998 == 1)) {
9999 hevc_print(hevc,
10000 H265_DEBUG_BUFMGR_MORE,
10001 "%s pic index %d scatter_alloc %d page_start %d\n",
10002 "decoder_mmu_box_free_idx_tail",
10003 hevc->cur_pic->index,
10004 hevc->cur_pic->scatter_alloc,
10005 hevc->used_4k_num);
10006 decoder_mmu_box_free_idx_tail(
10007 hevc->mmu_box,
10008 hevc->cur_pic->index,
10009 hevc->used_4k_num);
10010 hevc->cur_pic->scatter_alloc
10011 = 2;
10012 }
10013 hevc->used_4k_num = -1;
10014 }
10015 }
10016
10017 pic->output_mark = 1;
10018 pic->recon_mark = 1;
10019 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10020 if (vdec->mvfrm) {
10021 pic->frame_size =
10022 vdec->mvfrm->frame_size;
10023 pic->hw_decode_time =
10024 (u32)vdec->mvfrm->hw_decode_time;
10025 }
10026 #endif
10027 }
10028 check_pic_decoded_error(hevc,
10029 READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff);
10030 if (hevc->cur_pic != NULL &&
10031 (READ_VREG(HEVC_PARSER_LCU_START) & 0xffffff) == 0
10032 && (hevc->lcu_x_num * hevc->lcu_y_num != 1))
10033 hevc->cur_pic->error_mark = 1;
10034 force_output:
10035 pic_display = output_pic(hevc, 1);
10036 if (pic_display) {
10037 if ((pic_display->error_mark &&
10038 ((hevc->ignore_bufmgr_error &
10039 0x2) == 0))
10040 || (get_dbg_flag(hevc) &
10041 H265_DEBUG_DISPLAY_CUR_FRAME)
10042 || (get_dbg_flag(hevc) &
10043 H265_DEBUG_NO_DISPLAY)) {
10044 pic_display->output_ready = 0;
10045 if (get_dbg_flag(hevc) &
10046 H265_DEBUG_BUFMGR) {
10047 hevc_print(hevc, 0,
10048 "[BM] Display: POC %d, ",
10049 pic_display->POC);
10050 hevc_print_cont(hevc, 0,
10051 "decoding index %d ==> ",
10052 pic_display->
10053 decode_idx);
10054 hevc_print_cont(hevc, 0,
10055 "Debug or err,recycle it\n");
10056 }
10057 } else {
10058 if ((pic_display->
10059 slice_type != 2) && !pic_display->ip_mode) {
10060 pic_display->output_ready = 0;
10061 } else {
10062 prepare_display_buf
10063 (hevc,
10064 pic_display);
10065 hevc->first_pic_flag = 1;
10066 }
10067 }
10068 }
10069 }
10070
10071 vdec_schedule_work(&hevc->work);
10072 }
10073
10074 return IRQ_HANDLED;
10075 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10076 } else if (dec_status == HEVC_FIND_NEXT_PIC_NAL ||
10077 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
10078 if (hevc->m_ins_flag) {
10079 unsigned char next_parser_type =
10080 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xff;
10081 read_decode_info(hevc);
10082
10083 if (vdec->slave &&
10084 dec_status == HEVC_FIND_NEXT_DVEL_NAL) {
10085 /*cur is base, found enhance*/
10086 struct hevc_state_s *hevc_el =
10087 (struct hevc_state_s *)
10088 vdec->slave->private;
10089 hevc->switch_dvlayer_flag = 1;
10090 hevc->no_switch_dvlayer_count = 0;
10091 hevc_el->start_parser_type =
10092 next_parser_type;
10093 hevc_print(hevc, H265_DEBUG_DV,
10094 "switch (poc %d) to el\n",
10095 hevc->cur_pic ?
10096 hevc->cur_pic->POC :
10097 INVALID_POC);
10098 } else if (vdec->master &&
10099 dec_status == HEVC_FIND_NEXT_PIC_NAL) {
10100 /*cur is enhance, found base*/
10101 struct hevc_state_s *hevc_ba =
10102 (struct hevc_state_s *)
10103 vdec->master->private;
10104 hevc->switch_dvlayer_flag = 1;
10105 hevc->no_switch_dvlayer_count = 0;
10106 hevc_ba->start_parser_type =
10107 next_parser_type;
10108 hevc_print(hevc, H265_DEBUG_DV,
10109 "switch (poc %d) to bl\n",
10110 hevc->cur_pic ?
10111 hevc->cur_pic->POC :
10112 INVALID_POC);
10113 } else {
10114 hevc->switch_dvlayer_flag = 0;
10115 hevc->start_parser_type =
10116 next_parser_type;
10117 hevc->no_switch_dvlayer_count++;
10118 hevc_print(hevc, H265_DEBUG_DV,
10119 "%s: no_switch_dvlayer_count = %d\n",
10120 vdec->master ? "el" : "bl",
10121 hevc->no_switch_dvlayer_count);
10122 if (vdec->slave &&
10123 dolby_el_flush_th != 0 &&
10124 hevc->no_switch_dvlayer_count >
10125 dolby_el_flush_th) {
10126 struct hevc_state_s *hevc_el =
10127 (struct hevc_state_s *)
10128 vdec->slave->private;
10129 struct PIC_s *el_pic;
10130 check_pic_decoded_error(hevc_el,
10131 hevc_el->pic_decoded_lcu_idx);
10132 el_pic = get_pic_by_POC(hevc_el,
10133 hevc_el->curr_POC);
10134 hevc_el->curr_POC = INVALID_POC;
10135 hevc_el->m_pocRandomAccess = MAX_INT;
10136 flush_output(hevc_el, el_pic);
10137 hevc_el->decoded_poc = INVALID_POC; /*
10138 already call flush_output*/
10139 hevc_el->decoding_pic = NULL;
10140 hevc->no_switch_dvlayer_count = 0;
10141 if (get_dbg_flag(hevc) & H265_DEBUG_DV)
10142 hevc_print(hevc, 0,
10143 "no el anymore, flush_output el\n");
10144 }
10145 }
10146 hevc->decoded_poc = hevc->curr_POC;
10147 hevc->decoding_pic = NULL;
10148 hevc->dec_result = DEC_RESULT_DONE;
10149 amhevc_stop();
10150 reset_process_time(hevc);
10151 if (aux_data_is_avaible(hevc))
10152 dolby_get_meta(hevc);
10153 if(hevc->cur_pic->slice_type == 2 &&
10154 hevc->vf_pre_count == 0) {
10155 hevc_print(hevc, 0,
10156 "first slice_type %x no_switch_dvlayer_count %x\n",
10157 hevc->cur_pic->slice_type,
10158 hevc->no_switch_dvlayer_count);
10159 goto force_output;
10160 }
10161 vdec_schedule_work(&hevc->work);
10162 }
10163
10164 return IRQ_HANDLED;
10165 #endif
10166 }
10167
10168 #endif
10169
10170 if (dec_status == HEVC_SEI_DAT) {
10171 if (!hevc->m_ins_flag) {
10172 int payload_type =
10173 READ_HREG(CUR_NAL_UNIT_TYPE) & 0xffff;
10174 int payload_size =
10175 (READ_HREG(CUR_NAL_UNIT_TYPE) >> 16) & 0xffff;
10176 process_nal_sei(hevc,
10177 payload_type, payload_size);
10178 }
10179 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_SEI_DAT_DONE);
10180 } else if (dec_status == HEVC_NAL_SEARCH_DONE) {
10181 int naltype = READ_HREG(CUR_NAL_UNIT_TYPE);
10182 int parse_type = HEVC_DISCARD_NAL;
10183
10184 hevc->error_watchdog_count = 0;
10185 hevc->error_skip_nal_wt_cnt = 0;
10186 #ifdef MULTI_INSTANCE_SUPPORT
10187 if (hevc->m_ins_flag)
10188 reset_process_time(hevc);
10189 #endif
10190 if (slice_parse_begin > 0 &&
10191 get_dbg_flag(hevc) & H265_DEBUG_DISCARD_NAL) {
10192 hevc_print(hevc, 0,
10193 "nal type %d, discard %d\n", naltype,
10194 slice_parse_begin);
10195 if (naltype <= NAL_UNIT_CODED_SLICE_CRA)
10196 slice_parse_begin--;
10197 }
10198 if (naltype == NAL_UNIT_EOS) {
10199 struct PIC_s *pic;
10200
10201 hevc_print(hevc, 0, "get NAL_UNIT_EOS, flush output\n");
10202 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10203 if ((vdec->master || vdec->slave) &&
10204 aux_data_is_avaible(hevc)) {
10205 if (hevc->decoding_pic)
10206 dolby_get_meta(hevc);
10207 }
10208 #endif
10209 check_pic_decoded_error(hevc,
10210 hevc->pic_decoded_lcu_idx);
10211 pic = get_pic_by_POC(hevc, hevc->curr_POC);
10212 hevc->curr_POC = INVALID_POC;
10213 /* add to fix RAP_B_Bossen_1 */
10214 hevc->m_pocRandomAccess = MAX_INT;
10215 flush_output(hevc, pic);
10216 clear_poc_flag(hevc);
10217 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_DISCARD_NAL);
10218 /* Interrupt Amrisc to excute */
10219 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10220 #ifdef MULTI_INSTANCE_SUPPORT
10221 if (hevc->m_ins_flag) {
10222 hevc->decoded_poc = INVALID_POC; /*
10223 already call flush_output*/
10224 hevc->decoding_pic = NULL;
10225 hevc->dec_result = DEC_RESULT_DONE;
10226 amhevc_stop();
10227
10228 vdec_schedule_work(&hevc->work);
10229 }
10230 #endif
10231 return IRQ_HANDLED;
10232 }
10233
10234 if (
10235 #ifdef MULTI_INSTANCE_SUPPORT
10236 (!hevc->m_ins_flag) &&
10237 #endif
10238 hevc->error_skip_nal_count > 0) {
10239 hevc_print(hevc, 0,
10240 "nal type %d, discard %d\n", naltype,
10241 hevc->error_skip_nal_count);
10242 hevc->error_skip_nal_count--;
10243 if (hevc->error_skip_nal_count == 0) {
10244 hevc_recover(hevc);
10245 hevc->error_flag = 0;
10246 if ((error_handle_policy & 0x2) == 0) {
10247 hevc->have_vps = 1;
10248 hevc->have_sps = 1;
10249 hevc->have_pps = 1;
10250 }
10251 return IRQ_HANDLED;
10252 }
10253 } else if (naltype == NAL_UNIT_VPS) {
10254 parse_type = HEVC_NAL_UNIT_VPS;
10255 hevc->have_vps = 1;
10256 #ifdef ERROR_HANDLE_DEBUG
10257 if (dbg_nal_skip_flag & 1)
10258 parse_type = HEVC_DISCARD_NAL;
10259 #endif
10260 } else if (hevc->have_vps) {
10261 if (naltype == NAL_UNIT_SPS) {
10262 parse_type = HEVC_NAL_UNIT_SPS;
10263 hevc->have_sps = 1;
10264 #ifdef ERROR_HANDLE_DEBUG
10265 if (dbg_nal_skip_flag & 2)
10266 parse_type = HEVC_DISCARD_NAL;
10267 #endif
10268 } else if (naltype == NAL_UNIT_PPS) {
10269 parse_type = HEVC_NAL_UNIT_PPS;
10270 hevc->have_pps = 1;
10271 #ifdef ERROR_HANDLE_DEBUG
10272 if (dbg_nal_skip_flag & 4)
10273 parse_type = HEVC_DISCARD_NAL;
10274 #endif
10275 } else if (hevc->have_sps && hevc->have_pps) {
10276 int seg = HEVC_NAL_UNIT_CODED_SLICE_SEGMENT;
10277
10278 if ((naltype == NAL_UNIT_CODED_SLICE_IDR) ||
10279 (naltype ==
10280 NAL_UNIT_CODED_SLICE_IDR_N_LP)
10281 || (naltype ==
10282 NAL_UNIT_CODED_SLICE_CRA)
10283 || (naltype ==
10284 NAL_UNIT_CODED_SLICE_BLA)
10285 || (naltype ==
10286 NAL_UNIT_CODED_SLICE_BLANT)
10287 || (naltype ==
10288 NAL_UNIT_CODED_SLICE_BLA_N_LP)
10289 ) {
10290 if (slice_parse_begin > 0) {
10291 hevc_print(hevc, 0,
10292 "discard %d, for debugging\n",
10293 slice_parse_begin);
10294 slice_parse_begin--;
10295 } else {
10296 parse_type = seg;
10297 }
10298 hevc->have_valid_start_slice = 1;
10299 } else if (naltype <=
10300 NAL_UNIT_CODED_SLICE_CRA
10301 && (hevc->have_valid_start_slice
10302 || (hevc->PB_skip_mode != 3))) {
10303 if (slice_parse_begin > 0) {
10304 hevc_print(hevc, 0,
10305 "discard %d, dd\n",
10306 slice_parse_begin);
10307 slice_parse_begin--;
10308 } else
10309 parse_type = seg;
10310
10311 }
10312 }
10313 }
10314 if (hevc->have_vps && hevc->have_sps && hevc->have_pps
10315 && hevc->have_valid_start_slice &&
10316 hevc->error_flag == 0) {
10317 if ((get_dbg_flag(hevc) &
10318 H265_DEBUG_MAN_SEARCH_NAL) == 0
10319 /* && (!hevc->m_ins_flag)*/) {
10320 /* auot parser NAL; do not check
10321 *vps/sps/pps/idr
10322 */
10323 WRITE_VREG(NAL_SEARCH_CTL, 0x2);
10324 }
10325
10326 if ((get_dbg_flag(hevc) &
10327 H265_DEBUG_NO_EOS_SEARCH_DONE)
10328 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10329 || vdec->master
10330 || vdec->slave
10331 #endif
10332 ) {
10333 WRITE_VREG(NAL_SEARCH_CTL,
10334 READ_VREG(NAL_SEARCH_CTL) |
10335 0x10000);
10336 }
10337 WRITE_VREG(NAL_SEARCH_CTL,
10338 READ_VREG(NAL_SEARCH_CTL)
10339 | ((parser_sei_enable & 0x7) << 17));
10340 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10341 WRITE_VREG(NAL_SEARCH_CTL,
10342 READ_VREG(NAL_SEARCH_CTL) |
10343 ((parser_dolby_vision_enable & 0x1) << 20));
10344 #endif
10345 config_decode_mode(hevc);
10346 }
10347
10348 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR) {
10349 hevc_print(hevc, 0,
10350 "naltype = %d parse_type %d\n %d %d %d %d\n",
10351 naltype, parse_type, hevc->have_vps,
10352 hevc->have_sps, hevc->have_pps,
10353 hevc->have_valid_start_slice);
10354 }
10355
10356 WRITE_VREG(HEVC_DEC_STATUS_REG, parse_type);
10357 /* Interrupt Amrisc to excute */
10358 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10359 #ifdef MULTI_INSTANCE_SUPPORT
10360 if (hevc->m_ins_flag)
10361 start_process_time(hevc);
10362 #endif
10363 } else if (dec_status == HEVC_SLICE_SEGMENT_DONE) {
10364 #ifdef MULTI_INSTANCE_SUPPORT
10365 if (hevc->m_ins_flag) {
10366 reset_process_time(hevc);
10367 read_decode_info(hevc);
10368
10369 }
10370 #endif
10371 if (hevc->start_decoding_time > 0) {
10372 u32 process_time = 1000*
10373 (jiffies - hevc->start_decoding_time)/HZ;
10374 if (process_time > max_decoding_time)
10375 max_decoding_time = process_time;
10376 }
10377
10378 hevc->error_watchdog_count = 0;
10379 if (hevc->pic_list_init_flag == 2) {
10380 hevc->pic_list_init_flag = 3;
10381 hevc_print(hevc, 0, "set pic_list_init_flag to 3\n");
10382 if (hevc->kpi_first_i_comming == 0) {
10383 hevc->kpi_first_i_comming = 1;
10384 pr_debug("[vdec_kpi][%s] First I frame coming.\n",
10385 __func__);
10386 }
10387 } else if (hevc->wait_buf == 0) {
10388 u32 vui_time_scale;
10389 u32 vui_num_units_in_tick;
10390 unsigned char reconfig_flag = 0;
10391
10392 if (get_dbg_flag(hevc) & H265_DEBUG_SEND_PARAM_WITH_REG)
10393 get_rpm_param(&hevc->param);
10394 else {
10395
10396 for (i = 0; i < (RPM_END - RPM_BEGIN); i += 4) {
10397 int ii;
10398
10399 for (ii = 0; ii < 4; ii++) {
10400 hevc->param.l.data[i + ii] =
10401 hevc->rpm_ptr[i + 3
10402 - ii];
10403 }
10404 }
10405 #ifdef SEND_LMEM_WITH_RPM
10406 check_head_error(hevc);
10407 #endif
10408 }
10409 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR_MORE) {
10410 hevc_print(hevc, 0,
10411 "rpm_param: (%d)\n", hevc->slice_idx);
10412 hevc->slice_idx++;
10413 for (i = 0; i < (RPM_END - RPM_BEGIN); i++) {
10414 hevc_print_cont(hevc, 0,
10415 "%04x ", hevc->param.l.data[i]);
10416 if (((i + 1) & 0xf) == 0)
10417 hevc_print_cont(hevc, 0, "\n");
10418 }
10419
10420 hevc_print(hevc, 0,
10421 "vui_timing_info: %x, %x, %x, %x\n",
10422 hevc->param.p.vui_num_units_in_tick_hi,
10423 hevc->param.p.vui_num_units_in_tick_lo,
10424 hevc->param.p.vui_time_scale_hi,
10425 hevc->param.p.vui_time_scale_lo);
10426 }
10427
10428 if (hevc->is_used_v4l) {
10429 struct aml_vcodec_ctx *ctx =
10430 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
10431 if (!v4l_res_change(hevc, &hevc->param)) {
10432 if (ctx->param_sets_from_ucode && !hevc->v4l_params_parsed) {
10433 struct aml_vdec_ps_infos ps;
10434 int width = hevc->param.p.pic_width_in_luma_samples;
10435 int height = hevc->param.p.pic_height_in_luma_samples;
10436
10437 pr_debug("set ucode parse\n");
10438 vh265_get_ps_info(hevc, width, height, &ps);
10439 /*notice the v4l2 codec.*/
10440 vdec_v4l_set_ps_infos(ctx, &ps);
10441 hevc->v4l_params_parsed = true;
10442 hevc->dec_result = DEC_RESULT_AGAIN;
10443 amhevc_stop();
10444 restore_decode_state(hevc);
10445 reset_process_time(hevc);
10446 vdec_schedule_work(&hevc->work);
10447 return IRQ_HANDLED;
10448 }
10449 }else {
10450 pr_debug("resolution change\n");
10451 hevc->dec_result = DEC_RESULT_AGAIN;
10452 amhevc_stop();
10453 restore_decode_state(hevc);
10454 reset_process_time(hevc);
10455 vdec_schedule_work(&hevc->work);
10456 return IRQ_HANDLED;
10457
10458 }
10459 }
10460
10461 if (
10462 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10463 vdec->master == NULL &&
10464 vdec->slave == NULL &&
10465 #endif
10466 aux_data_is_avaible(hevc)
10467 ) {
10468
10469 if (get_dbg_flag(hevc) &
10470 H265_DEBUG_BUFMGR_MORE)
10471 dump_aux_buf(hevc);
10472 }
10473
10474 vui_time_scale =
10475 (u32)(hevc->param.p.vui_time_scale_hi << 16) |
10476 hevc->param.p.vui_time_scale_lo;
10477 vui_num_units_in_tick =
10478 (u32)(hevc->param.
10479 p.vui_num_units_in_tick_hi << 16) |
10480 hevc->param.
10481 p.vui_num_units_in_tick_lo;
10482 if (hevc->bit_depth_luma !=
10483 ((hevc->param.p.bit_depth & 0xf) + 8)) {
10484 reconfig_flag = 1;
10485 hevc_print(hevc, 0, "Bit depth luma = %d\n",
10486 (hevc->param.p.bit_depth & 0xf) + 8);
10487 }
10488 if (hevc->bit_depth_chroma !=
10489 (((hevc->param.p.bit_depth >> 4) & 0xf) + 8)) {
10490 reconfig_flag = 1;
10491 hevc_print(hevc, 0, "Bit depth chroma = %d\n",
10492 ((hevc->param.p.bit_depth >> 4) &
10493 0xf) + 8);
10494 }
10495 hevc->bit_depth_luma =
10496 (hevc->param.p.bit_depth & 0xf) + 8;
10497 hevc->bit_depth_chroma =
10498 ((hevc->param.p.bit_depth >> 4) & 0xf) + 8;
10499 bit_depth_luma = hevc->bit_depth_luma;
10500 bit_depth_chroma = hevc->bit_depth_chroma;
10501 #ifdef SUPPORT_10BIT
10502 if (hevc->bit_depth_luma == 8 &&
10503 hevc->bit_depth_chroma == 8 &&
10504 enable_mem_saving)
10505 hevc->mem_saving_mode = 1;
10506 else
10507 hevc->mem_saving_mode = 0;
10508 #endif
10509 if (reconfig_flag &&
10510 (get_double_write_mode(hevc) & 0x10) == 0)
10511 init_decode_head_hw(hevc);
10512
10513 if ((vui_time_scale != 0)
10514 && (vui_num_units_in_tick != 0)) {
10515 hevc->frame_dur =
10516 div_u64(96000ULL *
10517 vui_num_units_in_tick,
10518 vui_time_scale);
10519 if (hevc->get_frame_dur != true)
10520 vdec_schedule_work(
10521 &hevc->notify_work);
10522
10523 hevc->get_frame_dur = true;
10524 //hevc->gvs->frame_dur = hevc->frame_dur;
10525 }
10526
10527 if (hevc->video_signal_type !=
10528 ((hevc->param.p.video_signal_type << 16)
10529 | hevc->param.p.color_description)) {
10530 u32 v = hevc->param.p.video_signal_type;
10531 u32 c = hevc->param.p.color_description;
10532 #if 0
10533 if (v & 0x2000) {
10534 hevc_print(hevc, 0,
10535 "video_signal_type present:\n");
10536 hevc_print(hevc, 0, " %s %s\n",
10537 video_format_names[(v >> 10) & 7],
10538 ((v >> 9) & 1) ?
10539 "full_range" : "limited");
10540 if (v & 0x100) {
10541 hevc_print(hevc, 0,
10542 " color_description present:\n");
10543 hevc_print(hevc, 0,
10544 " color_primarie = %s\n",
10545 color_primaries_names
10546 [v & 0xff]);
10547 hevc_print(hevc, 0,
10548 " transfer_characteristic = %s\n",
10549 transfer_characteristics_names
10550 [(c >> 8) & 0xff]);
10551 hevc_print(hevc, 0,
10552 " matrix_coefficient = %s\n",
10553 matrix_coeffs_names[c & 0xff]);
10554 }
10555 }
10556 #endif
10557 hevc->video_signal_type = (v << 16) | c;
10558 video_signal_type = hevc->video_signal_type;
10559 }
10560
10561 if (use_cma &&
10562 (hevc->param.p.slice_segment_address == 0)
10563 && (hevc->pic_list_init_flag == 0)) {
10564 int log = hevc->param.p.log2_min_coding_block_size_minus3;
10565 int log_s = hevc->param.p.log2_diff_max_min_coding_block_size;
10566
10567 hevc->pic_w = hevc->param.p.pic_width_in_luma_samples;
10568 hevc->pic_h = hevc->param.p.pic_height_in_luma_samples;
10569 hevc->lcu_size = 1 << (log + 3 + log_s);
10570 hevc->lcu_size_log2 = log2i(hevc->lcu_size);
10571 if (performance_profile &&( (!is_oversize(hevc->pic_w, hevc->pic_h)) && IS_8K_SIZE(hevc->pic_w,hevc->pic_h)))
10572 hevc->performance_profile = 1;
10573 else
10574 hevc->performance_profile = 0;
10575 hevc_print(hevc, 0, "hevc->performance_profile %d\n", hevc->performance_profile);
10576 if (hevc->pic_w == 0 || hevc->pic_h == 0
10577 || hevc->lcu_size == 0
10578 || is_oversize(hevc->pic_w, hevc->pic_h)
10579 || (!hevc->skip_first_nal &&
10580 (hevc->pic_h == 96) && (hevc->pic_w == 160))) {
10581 /* skip search next start code */
10582 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG)
10583 & (~0x2));
10584 if ( !hevc->skip_first_nal &&
10585 (hevc->pic_h == 96) && (hevc->pic_w == 160))
10586 hevc->skip_first_nal = 1;
10587 hevc->skip_flag = 1;
10588 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10589 /* Interrupt Amrisc to excute */
10590 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10591 #ifdef MULTI_INSTANCE_SUPPORT
10592 if (hevc->m_ins_flag)
10593 start_process_time(hevc);
10594 #endif
10595 } else {
10596 hevc->sps_num_reorder_pics_0 =
10597 hevc->param.p.sps_num_reorder_pics_0;
10598 hevc->ip_mode = (!hevc->sps_num_reorder_pics_0 &&
10599 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
10600 !(vdec->slave || vdec->master) &&
10601 #endif
10602 !disable_ip_mode) ? true : false;
10603 hevc->pic_list_init_flag = 1;
10604 if ((!IS_4K_SIZE(hevc->pic_w, hevc->pic_h)) &&
10605 ((hevc->param.p.profile_etc & 0xc) == 0x4)
10606 && (interlace_enable != 0)) {
10607 hevc->double_write_mode = 1;
10608 hevc->interlace_flag = 1;
10609 hevc->frame_ar = (hevc->pic_h * 0x100 / hevc->pic_w) * 2;
10610 hevc_print(hevc, 0,
10611 "interlace (%d, %d), profile_etc %x, ar 0x%x, dw %d\n",
10612 hevc->pic_w, hevc->pic_h, hevc->param.p.profile_etc, hevc->frame_ar,
10613 get_double_write_mode(hevc));
10614 }
10615 #ifdef MULTI_INSTANCE_SUPPORT
10616 if (hevc->m_ins_flag) {
10617 vdec_schedule_work(&hevc->work);
10618 } else
10619 #endif
10620 up(&h265_sema);
10621 hevc_print(hevc, 0, "set pic_list_init_flag 1\n");
10622 }
10623 return IRQ_HANDLED;
10624 }
10625
10626 }
10627 ret =
10628 hevc_slice_segment_header_process(hevc,
10629 &hevc->param, decode_pic_begin);
10630 if (ret < 0) {
10631 #ifdef MULTI_INSTANCE_SUPPORT
10632 if (hevc->m_ins_flag) {
10633 hevc->wait_buf = 0;
10634 hevc->dec_result = DEC_RESULT_AGAIN;
10635 amhevc_stop();
10636 restore_decode_state(hevc);
10637 reset_process_time(hevc);
10638 vdec_schedule_work(&hevc->work);
10639 return IRQ_HANDLED;
10640 }
10641 #else
10642 ;
10643 #endif
10644 } else if (ret == 0) {
10645 if ((hevc->new_pic) && (hevc->cur_pic)) {
10646 hevc->cur_pic->stream_offset =
10647 READ_VREG(HEVC_SHIFT_BYTE_COUNT);
10648 hevc->cur_pic->stream_frame_size =
10649 hevc->cur_pic->stream_offset - hevc->last_dec_pic_offset;
10650 hevc_print(hevc, H265_DEBUG_OUT_PTS,
10651 "read stream_offset = 0x%x, frame_size = 0x%x\n",
10652 hevc->cur_pic->stream_offset, hevc->cur_pic->stream_frame_size);
10653 hevc->last_dec_pic_offset = hevc->cur_pic->stream_offset;
10654
10655
10656 hevc->cur_pic->aspect_ratio_idc =
10657 hevc->param.p.aspect_ratio_idc;
10658 hevc->cur_pic->sar_width =
10659 hevc->param.p.sar_width;
10660 hevc->cur_pic->sar_height =
10661 hevc->param.p.sar_height;
10662 }
10663
10664 WRITE_VREG(HEVC_DEC_STATUS_REG,
10665 HEVC_CODED_SLICE_SEGMENT_DAT);
10666 /* Interrupt Amrisc to excute */
10667 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10668
10669 hevc->start_decoding_time = jiffies;
10670 #ifdef MULTI_INSTANCE_SUPPORT
10671 if (hevc->m_ins_flag)
10672 start_process_time(hevc);
10673 #endif
10674 #if 1
10675 /*to do..., copy aux data to hevc->cur_pic*/
10676 #endif
10677 #ifdef MULTI_INSTANCE_SUPPORT
10678 } else if (hevc->m_ins_flag) {
10679 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
10680 "%s, bufmgr ret %d skip, DEC_RESULT_DONE\n",
10681 __func__, ret);
10682 hevc->decoded_poc = INVALID_POC;
10683 hevc->decoding_pic = NULL;
10684 hevc->dec_result = DEC_RESULT_DONE;
10685 amhevc_stop();
10686 reset_process_time(hevc);
10687 vdec_schedule_work(&hevc->work);
10688 #endif
10689 } else {
10690 /* skip, search next start code */
10691 hevc->gvs->drop_frame_count++;
10692 WRITE_VREG(HEVC_WAIT_FLAG, READ_VREG(HEVC_WAIT_FLAG) & (~0x2));
10693 hevc->skip_flag = 1;
10694 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
10695 /* Interrupt Amrisc to excute */
10696 WRITE_VREG(HEVC_MCPU_INTR_REQ, AMRISC_MAIN_REQ);
10697 }
10698
10699 } else if (dec_status == HEVC_DECODE_OVER_SIZE) {
10700 hevc_print(hevc, 0 , "hevc decode oversize !!\n");
10701 #ifdef MULTI_INSTANCE_SUPPORT
10702 if (!hevc->m_ins_flag)
10703 debug |= (H265_DEBUG_DIS_LOC_ERROR_PROC |
10704 H265_DEBUG_DIS_SYS_ERROR_PROC);
10705 #endif
10706 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
10707 }
10708 return IRQ_HANDLED;
10709 }
10710
wait_hevc_search_done(struct hevc_state_s * hevc)10711 static void wait_hevc_search_done(struct hevc_state_s *hevc)
10712 {
10713 int count = 0;
10714 WRITE_VREG(HEVC_SHIFT_STATUS, 0);
10715 while (READ_VREG(HEVC_STREAM_CONTROL) & 0x2) {
10716 msleep(20);
10717 count++;
10718 if (count > 100) {
10719 hevc_print(hevc, 0, "%s timeout\n", __func__);
10720 break;
10721 }
10722 }
10723 }
vh265_isr(int irq,void * data)10724 static irqreturn_t vh265_isr(int irq, void *data)
10725 {
10726 int i, temp;
10727 unsigned int dec_status;
10728 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
10729 u32 debug_tag;
10730 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10731
10732 if (hevc->init_flag == 0)
10733 return IRQ_HANDLED;
10734 hevc->dec_status = dec_status;
10735 if (is_log_enable(hevc))
10736 add_log(hevc,
10737 "isr: status = 0x%x dec info 0x%x lcu 0x%x shiftbyte 0x%x shiftstatus 0x%x",
10738 dec_status, READ_HREG(HEVC_DECODE_INFO),
10739 READ_VREG(HEVC_MPRED_CURR_LCU),
10740 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10741 READ_VREG(HEVC_SHIFT_STATUS));
10742
10743 if (get_dbg_flag(hevc) & H265_DEBUG_BUFMGR)
10744 hevc_print(hevc, 0,
10745 "265 isr dec status = 0x%x dec info 0x%x shiftbyte 0x%x shiftstatus 0x%x\n",
10746 dec_status, READ_HREG(HEVC_DECODE_INFO),
10747 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
10748 READ_VREG(HEVC_SHIFT_STATUS));
10749
10750 debug_tag = READ_HREG(DEBUG_REG1);
10751 if (debug_tag & 0x10000) {
10752 hevc_print(hevc, 0,
10753 "LMEM<tag %x>:\n", READ_HREG(DEBUG_REG1));
10754
10755 if (hevc->mmu_enable)
10756 temp = 0x500;
10757 else
10758 temp = 0x400;
10759 for (i = 0; i < temp; i += 4) {
10760 int ii;
10761 if ((i & 0xf) == 0)
10762 hevc_print_cont(hevc, 0, "%03x: ", i);
10763 for (ii = 0; ii < 4; ii++) {
10764 hevc_print_cont(hevc, 0, "%04x ",
10765 hevc->lmem_ptr[i + 3 - ii]);
10766 }
10767 if (((i + ii) & 0xf) == 0)
10768 hevc_print_cont(hevc, 0, "\n");
10769 }
10770
10771 if (((udebug_pause_pos & 0xffff)
10772 == (debug_tag & 0xffff)) &&
10773 (udebug_pause_decode_idx == 0 ||
10774 udebug_pause_decode_idx == hevc->decode_idx) &&
10775 (udebug_pause_val == 0 ||
10776 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10777 udebug_pause_pos &= 0xffff;
10778 hevc->ucode_pause_pos = udebug_pause_pos;
10779 }
10780 else if (debug_tag & 0x20000)
10781 hevc->ucode_pause_pos = 0xffffffff;
10782 if (hevc->ucode_pause_pos)
10783 reset_process_time(hevc);
10784 else
10785 WRITE_HREG(DEBUG_REG1, 0);
10786 } else if (debug_tag != 0) {
10787 hevc_print(hevc, 0,
10788 "dbg%x: %x l/w/r %x %x %x\n", READ_HREG(DEBUG_REG1),
10789 READ_HREG(DEBUG_REG2),
10790 READ_VREG(HEVC_STREAM_LEVEL),
10791 READ_VREG(HEVC_STREAM_WR_PTR),
10792 READ_VREG(HEVC_STREAM_RD_PTR));
10793 if (((udebug_pause_pos & 0xffff)
10794 == (debug_tag & 0xffff)) &&
10795 (udebug_pause_decode_idx == 0 ||
10796 udebug_pause_decode_idx == hevc->decode_idx) &&
10797 (udebug_pause_val == 0 ||
10798 udebug_pause_val == READ_HREG(DEBUG_REG2))) {
10799 udebug_pause_pos &= 0xffff;
10800 hevc->ucode_pause_pos = udebug_pause_pos;
10801 }
10802 if (hevc->ucode_pause_pos)
10803 reset_process_time(hevc);
10804 else
10805 WRITE_HREG(DEBUG_REG1, 0);
10806 return IRQ_HANDLED;
10807 }
10808
10809
10810 if (hevc->pic_list_init_flag == 1)
10811 return IRQ_HANDLED;
10812
10813 if (!hevc->m_ins_flag) {
10814 if (dec_status == HEVC_OVER_DECODE) {
10815 hevc->over_decode = 1;
10816 hevc_print(hevc, 0,
10817 "isr: over decode\n"),
10818 WRITE_VREG(HEVC_DEC_STATUS_REG, 0);
10819 return IRQ_HANDLED;
10820 }
10821 }
10822
10823 return IRQ_WAKE_THREAD;
10824
10825 }
10826
vh265_set_clk(struct work_struct * work)10827 static void vh265_set_clk(struct work_struct *work)
10828 {
10829 struct hevc_state_s *hevc = container_of(work,
10830 struct hevc_state_s, set_clk_work);
10831
10832 int fps = 96000 / hevc->frame_dur;
10833
10834 if (hevc_source_changed(VFORMAT_HEVC,
10835 hevc->frame_width, hevc->frame_height, fps) > 0)
10836 hevc->saved_resolution = hevc->frame_width *
10837 hevc->frame_height * fps;
10838 }
10839
vh265_check_timer_func(struct timer_list * timer)10840 static void vh265_check_timer_func(struct timer_list *timer)
10841 {
10842 struct hevc_state_s *hevc = container_of(timer, struct hevc_state_s, timer);
10843 unsigned char empty_flag;
10844 unsigned int buf_level;
10845
10846 enum receviver_start_e state = RECEIVER_INACTIVE;
10847
10848 if (hevc->init_flag == 0) {
10849 if (hevc->stat & STAT_TIMER_ARM) {
10850 mod_timer(&hevc->timer, jiffies + PUT_INTERVAL);
10851 }
10852 return;
10853 }
10854 #ifdef MULTI_INSTANCE_SUPPORT
10855 if (hevc->m_ins_flag &&
10856 (get_dbg_flag(hevc) &
10857 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) == 0 &&
10858 hw_to_vdec(hevc)->next_status ==
10859 VDEC_STATUS_DISCONNECTED &&
10860 !hevc->is_used_v4l) {
10861 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
10862 vdec_schedule_work(&hevc->work);
10863 hevc_print(hevc,
10864 0, "vdec requested to be disconnected\n");
10865 return;
10866 }
10867
10868 if (hevc->m_ins_flag) {
10869 if (((get_dbg_flag(hevc) &
10870 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) &&
10871 (decode_timeout_val > 0) &&
10872 (hevc->start_process_time > 0) &&
10873 ((1000 * (jiffies - hevc->start_process_time) / HZ)
10874 > decode_timeout_val)
10875 ) {
10876 u32 dec_status = READ_VREG(HEVC_DEC_STATUS_REG);
10877 int current_lcu_idx =
10878 READ_VREG(HEVC_PARSER_LCU_START)&0xffffff;
10879 if (dec_status == HEVC_CODED_SLICE_SEGMENT_DAT) {
10880 if (hevc->last_lcu_idx == current_lcu_idx) {
10881 if (hevc->decode_timeout_count > 0)
10882 hevc->decode_timeout_count--;
10883 if (hevc->decode_timeout_count == 0)
10884 timeout_process(hevc);
10885 } else
10886 restart_process_time(hevc);
10887 hevc->last_lcu_idx = current_lcu_idx;
10888 } else {
10889 hevc->pic_decoded_lcu_idx = current_lcu_idx;
10890 timeout_process(hevc);
10891 }
10892 }
10893 } else {
10894 #endif
10895 if (hevc->m_ins_flag == 0 &&
10896 vf_get_receiver(hevc->provider_name)) {
10897 state =
10898 vf_notify_receiver(hevc->provider_name,
10899 VFRAME_EVENT_PROVIDER_QUREY_STATE,
10900 NULL);
10901 if ((state == RECEIVER_STATE_NULL)
10902 || (state == RECEIVER_STATE_NONE))
10903 state = RECEIVER_INACTIVE;
10904 } else
10905 state = RECEIVER_INACTIVE;
10906
10907 empty_flag = (READ_VREG(HEVC_PARSER_INT_STATUS) >> 6) & 0x1;
10908 /* error watchdog */
10909 if (hevc->m_ins_flag == 0 &&
10910 (empty_flag == 0)
10911 && (hevc->pic_list_init_flag == 0
10912 || hevc->pic_list_init_flag
10913 == 3)) {
10914 /* decoder has input */
10915 if ((get_dbg_flag(hevc) &
10916 H265_DEBUG_DIS_LOC_ERROR_PROC) == 0) {
10917
10918 buf_level = READ_VREG(HEVC_STREAM_LEVEL);
10919 /* receiver has no buffer to recycle */
10920 if ((state == RECEIVER_INACTIVE) &&
10921 (kfifo_is_empty(&hevc->display_q) &&
10922 buf_level > 0x200)
10923 ) {
10924 if (hevc->error_flag == 0) {
10925 hevc->error_watchdog_count++;
10926 if (hevc->error_watchdog_count ==
10927 error_handle_threshold) {
10928 hevc_print(hevc, 0,
10929 "H265 dec err local reset.\n");
10930 hevc->error_flag = 1;
10931 hevc->error_watchdog_count = 0;
10932 hevc->error_skip_nal_wt_cnt = 0;
10933 hevc->
10934 error_system_watchdog_count++;
10935 WRITE_VREG
10936 (HEVC_ASSIST_MBOX0_IRQ_REG,
10937 0x1);
10938 }
10939 } else if (hevc->error_flag == 2) {
10940 int th =
10941 error_handle_nal_skip_threshold;
10942 hevc->error_skip_nal_wt_cnt++;
10943 if (hevc->error_skip_nal_wt_cnt
10944 == th) {
10945 hevc->error_flag = 3;
10946 hevc->error_watchdog_count = 0;
10947 hevc->
10948 error_skip_nal_wt_cnt = 0;
10949 WRITE_VREG
10950 (HEVC_ASSIST_MBOX0_IRQ_REG,
10951 0x1);
10952 }
10953 }
10954 }
10955 }
10956
10957 if ((get_dbg_flag(hevc)
10958 & H265_DEBUG_DIS_SYS_ERROR_PROC) == 0)
10959 /* receiver has no buffer to recycle */
10960 if ((state == RECEIVER_INACTIVE) &&
10961 (kfifo_is_empty(&hevc->display_q))
10962 ) { /* no buffer to recycle */
10963 if ((get_dbg_flag(hevc) &
10964 H265_DEBUG_DIS_LOC_ERROR_PROC) !=
10965 0)
10966 hevc->error_system_watchdog_count++;
10967 if (hevc->error_system_watchdog_count ==
10968 error_handle_system_threshold) {
10969 /* and it lasts for a while */
10970 hevc_print(hevc, 0,
10971 "H265 dec fatal error watchdog.\n");
10972 hevc->
10973 error_system_watchdog_count = 0;
10974 hevc->fatal_error |= DECODER_FATAL_ERROR_UNKNOWN;
10975 }
10976 }
10977 } else {
10978 hevc->error_watchdog_count = 0;
10979 hevc->error_system_watchdog_count = 0;
10980 }
10981 #ifdef MULTI_INSTANCE_SUPPORT
10982 }
10983 #endif
10984 if ((hevc->ucode_pause_pos != 0) &&
10985 (hevc->ucode_pause_pos != 0xffffffff) &&
10986 udebug_pause_pos != hevc->ucode_pause_pos) {
10987 hevc->ucode_pause_pos = 0;
10988 WRITE_HREG(DEBUG_REG1, 0);
10989 }
10990
10991 if (get_dbg_flag(hevc) & H265_DEBUG_DUMP_PIC_LIST) {
10992 dump_pic_list(hevc);
10993 debug &= ~H265_DEBUG_DUMP_PIC_LIST;
10994 }
10995 if (get_dbg_flag(hevc) & H265_DEBUG_TRIG_SLICE_SEGMENT_PROC) {
10996 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
10997 debug &= ~H265_DEBUG_TRIG_SLICE_SEGMENT_PROC;
10998 }
10999 #ifdef TEST_NO_BUF
11000 if (hevc->wait_buf)
11001 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
11002 #endif
11003 if (get_dbg_flag(hevc) & H265_DEBUG_HW_RESET) {
11004 hevc->error_skip_nal_count = error_skip_nal_count;
11005 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
11006
11007 debug &= ~H265_DEBUG_HW_RESET;
11008 }
11009
11010 #ifdef ERROR_HANDLE_DEBUG
11011 if ((dbg_nal_skip_count > 0) && ((dbg_nal_skip_count & 0x10000) != 0)) {
11012 hevc->error_skip_nal_count = dbg_nal_skip_count & 0xffff;
11013 dbg_nal_skip_count &= ~0x10000;
11014 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
11015 }
11016 #endif
11017
11018 if (radr != 0) {
11019 if (rval != 0) {
11020 WRITE_VREG(radr, rval);
11021 hevc_print(hevc, 0,
11022 "WRITE_VREG(%x,%x)\n", radr, rval);
11023 } else
11024 hevc_print(hevc, 0,
11025 "READ_VREG(%x)=%x\n", radr, READ_VREG(radr));
11026 rval = 0;
11027 radr = 0;
11028 }
11029 if (dbg_cmd != 0) {
11030 if (dbg_cmd == 1) {
11031 u32 disp_laddr;
11032
11033 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB &&
11034 get_double_write_mode(hevc) == 0) {
11035 disp_laddr =
11036 READ_VCBUS_REG(AFBC_BODY_BADDR) << 4;
11037 } else {
11038 struct canvas_s cur_canvas;
11039
11040 canvas_read((READ_VCBUS_REG(VD1_IF0_CANVAS0)
11041 & 0xff), &cur_canvas);
11042 disp_laddr = cur_canvas.addr;
11043 }
11044 hevc_print(hevc, 0,
11045 "current displayed buffer address %x\r\n",
11046 disp_laddr);
11047 }
11048 dbg_cmd = 0;
11049 }
11050 /*don't changed at start.*/
11051 if (hevc->m_ins_flag == 0 &&
11052 hevc->get_frame_dur && hevc->show_frame_num > 60 &&
11053 hevc->frame_dur > 0 && hevc->saved_resolution !=
11054 hevc->frame_width * hevc->frame_height *
11055 (96000 / hevc->frame_dur))
11056 vdec_schedule_work(&hevc->set_clk_work);
11057
11058 mod_timer(timer, jiffies + PUT_INTERVAL);
11059 }
11060
h265_task_handle(void * data)11061 static int h265_task_handle(void *data)
11062 {
11063 int ret = 0;
11064 struct hevc_state_s *hevc = (struct hevc_state_s *)data;
11065
11066 set_user_nice(current, -10);
11067 while (1) {
11068 if (use_cma == 0) {
11069 hevc_print(hevc, 0,
11070 "ERROR: use_cma can not be changed dynamically\n");
11071 }
11072 ret = down_interruptible(&h265_sema);
11073 if ((hevc->init_flag != 0) && (hevc->pic_list_init_flag == 1)) {
11074 init_pic_list(hevc);
11075 init_pic_list_hw(hevc);
11076 init_buf_spec(hevc);
11077 hevc->pic_list_init_flag = 2;
11078 hevc_print(hevc, 0, "set pic_list_init_flag to 2\n");
11079
11080 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
11081
11082 }
11083
11084 if (hevc->uninit_list) {
11085 /*USE_BUF_BLOCK*/
11086 uninit_pic_list(hevc);
11087 hevc_print(hevc, 0, "uninit list\n");
11088 hevc->uninit_list = 0;
11089 #ifdef USE_UNINIT_SEMA
11090 if (use_cma) {
11091 up(&hevc->h265_uninit_done_sema);
11092 while (!kthread_should_stop())
11093 msleep(1);
11094 break;
11095 }
11096 #endif
11097 }
11098 }
11099
11100 return 0;
11101 }
11102
vh265_free_cmabuf(void)11103 void vh265_free_cmabuf(void)
11104 {
11105 struct hevc_state_s *hevc = gHevc;
11106
11107 mutex_lock(&vh265_mutex);
11108
11109 if (hevc->init_flag) {
11110 mutex_unlock(&vh265_mutex);
11111 return;
11112 }
11113
11114 mutex_unlock(&vh265_mutex);
11115 }
11116
11117 #ifdef MULTI_INSTANCE_SUPPORT
vh265_dec_status(struct vdec_s * vdec,struct vdec_info * vstatus)11118 int vh265_dec_status(struct vdec_s *vdec, struct vdec_info *vstatus)
11119 #else
11120 int vh265_dec_status(struct vdec_info *vstatus)
11121 #endif
11122 {
11123 #ifdef MULTI_INSTANCE_SUPPORT
11124 struct hevc_state_s *hevc =
11125 (struct hevc_state_s *)vdec->private;
11126 #else
11127 struct hevc_state_s *hevc = gHevc;
11128 #endif
11129 if (!hevc)
11130 return -1;
11131
11132 vstatus->frame_width = hevc->frame_width;
11133 /* for hevc interlace for disp height x2 */
11134 vstatus->frame_height =
11135 (hevc->frame_height << hevc->interlace_flag);
11136 if (hevc->frame_dur != 0)
11137 vstatus->frame_rate = 96000 / hevc->frame_dur;
11138 else
11139 vstatus->frame_rate = -1;
11140 vstatus->error_count = hevc->gvs->error_frame_count;
11141 vstatus->status = hevc->stat | hevc->fatal_error;
11142 vstatus->bit_rate = hevc->gvs->bit_rate;
11143 vstatus->frame_dur = hevc->frame_dur;
11144 if (hevc->gvs) {
11145 vstatus->bit_rate = hevc->gvs->bit_rate;
11146 vstatus->frame_data = hevc->gvs->frame_data;
11147 vstatus->total_data = hevc->gvs->total_data;
11148 vstatus->frame_count = hevc->gvs->frame_count;
11149 vstatus->error_frame_count = hevc->gvs->error_frame_count;
11150 vstatus->drop_frame_count = hevc->gvs->drop_frame_count;
11151 vstatus->samp_cnt = hevc->gvs->samp_cnt;
11152 vstatus->offset = hevc->gvs->offset;
11153 }
11154
11155 snprintf(vstatus->vdec_name, sizeof(vstatus->vdec_name),
11156 "%s", DRIVER_NAME);
11157 vstatus->ratio_control = hevc->ratio_control;
11158 return 0;
11159 }
11160
vh265_set_isreset(struct vdec_s * vdec,int isreset)11161 int vh265_set_isreset(struct vdec_s *vdec, int isreset)
11162 {
11163 is_reset = isreset;
11164 return 0;
11165 }
11166
vh265_vdec_info_init(struct hevc_state_s * hevc)11167 static int vh265_vdec_info_init(struct hevc_state_s *hevc)
11168 {
11169 hevc->gvs = kzalloc(sizeof(struct vdec_info), GFP_KERNEL);
11170 //pr_err("[%s line %d] hevc->gvs=0x%p operation\n",__func__, __LINE__, hevc->gvs);
11171 if (NULL == hevc->gvs) {
11172 pr_info("the struct of vdec status malloc failed.\n");
11173 return -ENOMEM;
11174 }
11175 vdec_set_vframe_comm(hw_to_vdec(hevc), DRIVER_NAME);
11176 return 0;
11177 }
11178
11179 #if 0
H265_DECODE_INIT(void)11180 static void H265_DECODE_INIT(void)
11181 {
11182 /* enable hevc clocks */
11183 WRITE_VREG(DOS_GCLK_EN3, 0xffffffff);
11184 /* *************************************************************** */
11185 /* Power ON HEVC */
11186 /* *************************************************************** */
11187 /* Powerup HEVC */
11188 WRITE_VREG(P_AO_RTI_GEN_PWR_SLEEP0,
11189 READ_VREG(P_AO_RTI_GEN_PWR_SLEEP0) & (~(0x3 << 6)));
11190 WRITE_VREG(DOS_MEM_PD_HEVC, 0x0);
11191 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) | (0x3ffff << 2));
11192 WRITE_VREG(DOS_SW_RESET3, READ_VREG(DOS_SW_RESET3) & (~(0x3ffff << 2)));
11193 /* remove isolations */
11194 WRITE_VREG(AO_RTI_GEN_PWR_ISO0,
11195 READ_VREG(AO_RTI_GEN_PWR_ISO0) & (~(0x3 << 10)));
11196
11197 }
11198 #endif
11199
vh265_set_trickmode(struct vdec_s * vdec,unsigned long trickmode)11200 int vh265_set_trickmode(struct vdec_s *vdec, unsigned long trickmode)
11201 {
11202 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
11203 hevc_print(hevc, 0, "[%s %d] trickmode:%lu\n", __func__, __LINE__, trickmode);
11204
11205 if (trickmode == TRICKMODE_I) {
11206 trickmode_i = 1;
11207 i_only_flag = 0x1;
11208 } else if (trickmode == TRICKMODE_NONE) {
11209 trickmode_i = 0;
11210 i_only_flag = 0x0;
11211 } else if (trickmode == 0x02) {
11212 trickmode_i = 0;
11213 i_only_flag = 0x02;
11214 } else if (trickmode == 0x03) {
11215 trickmode_i = 1;
11216 i_only_flag = 0x03;
11217 } else if (trickmode == 0x07) {
11218 trickmode_i = 1;
11219 i_only_flag = 0x07;
11220 }
11221 //hevc_print(hevc, 0, "i_only_flag: %d trickmode_i:%d\n", i_only_flag, trickmode_i);
11222
11223 return 0;
11224 }
11225
config_decode_mode(struct hevc_state_s * hevc)11226 static void config_decode_mode(struct hevc_state_s *hevc)
11227 {
11228 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11229 struct vdec_s *vdec = hw_to_vdec(hevc);
11230 #endif
11231 unsigned decode_mode;
11232 #ifdef HEVC_8K_LFTOFFSET_FIX
11233 if (hevc->performance_profile)
11234 WRITE_VREG(NAL_SEARCH_CTL,
11235 READ_VREG(NAL_SEARCH_CTL) | (1 << 21));
11236 #endif
11237 if (!hevc->m_ins_flag)
11238 decode_mode = DECODE_MODE_SINGLE;
11239 else if (vdec_frame_based(hw_to_vdec(hevc)))
11240 decode_mode =
11241 DECODE_MODE_MULTI_FRAMEBASE;
11242 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11243 else if (vdec->slave) {
11244 if (force_bypass_dvenl & 0x80000000)
11245 hevc->bypass_dvenl = force_bypass_dvenl & 0x1;
11246 else
11247 hevc->bypass_dvenl = hevc->bypass_dvenl_enable;
11248 if (dolby_meta_with_el && hevc->bypass_dvenl) {
11249 hevc->bypass_dvenl = 0;
11250 hevc_print(hevc, 0,
11251 "NOT support bypass_dvenl when meta_with_el\n");
11252 }
11253 if (hevc->bypass_dvenl)
11254 decode_mode =
11255 (hevc->start_parser_type << 8)
11256 | DECODE_MODE_MULTI_STREAMBASE;
11257 else
11258 decode_mode =
11259 (hevc->start_parser_type << 8)
11260 | DECODE_MODE_MULTI_DVBAL;
11261 } else if (vdec->master)
11262 decode_mode =
11263 (hevc->start_parser_type << 8)
11264 | DECODE_MODE_MULTI_DVENL;
11265 #endif
11266 else
11267 decode_mode =
11268 DECODE_MODE_MULTI_STREAMBASE;
11269
11270 if (hevc->m_ins_flag)
11271 decode_mode |=
11272 (hevc->start_decoding_flag << 16);
11273 /* set MBX0 interrupt flag */
11274 decode_mode |= (0x80 << 24);
11275 WRITE_VREG(HEVC_DECODE_MODE, decode_mode);
11276 WRITE_VREG(HEVC_DECODE_MODE2,
11277 hevc->rps_set_id);
11278 }
11279
vh265_prot_init(struct hevc_state_s * hevc)11280 static void vh265_prot_init(struct hevc_state_s *hevc)
11281 {
11282 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11283 struct vdec_s *vdec = hw_to_vdec(hevc);
11284 #endif
11285 /* H265_DECODE_INIT(); */
11286
11287 hevc_config_work_space_hw(hevc);
11288
11289 hevc_init_decoder_hw(hevc, 0, 0xffffffff);
11290
11291 WRITE_VREG(HEVC_WAIT_FLAG, 1);
11292
11293 /* WRITE_VREG(P_HEVC_MPSR, 1); */
11294
11295 /* clear mailbox interrupt */
11296 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG, 1);
11297
11298 /* enable mailbox interrupt */
11299 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 1);
11300
11301 /* disable PSCALE for hardware sharing */
11302 WRITE_VREG(HEVC_PSCALE_CTRL, 0);
11303
11304 WRITE_VREG(DEBUG_REG1, 0x0 | (dump_nal << 8));
11305
11306 if ((get_dbg_flag(hevc) &
11307 (H265_DEBUG_MAN_SKIP_NAL |
11308 H265_DEBUG_MAN_SEARCH_NAL))
11309 /*||hevc->m_ins_flag*/
11310 ) {
11311 WRITE_VREG(NAL_SEARCH_CTL, 0x1); /* manual parser NAL */
11312 } else {
11313 /* check vps/sps/pps/i-slice in ucode */
11314 unsigned ctl_val = 0x8;
11315 if (hevc->PB_skip_mode == 0)
11316 ctl_val = 0x4; /* check vps/sps/pps only in ucode */
11317 else if (hevc->PB_skip_mode == 3)
11318 ctl_val = 0x0; /* check vps/sps/pps/idr in ucode */
11319 WRITE_VREG(NAL_SEARCH_CTL, ctl_val);
11320 }
11321 if ((get_dbg_flag(hevc) & H265_DEBUG_NO_EOS_SEARCH_DONE)
11322 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11323 || vdec->master
11324 || vdec->slave
11325 #endif
11326 )
11327 WRITE_VREG(NAL_SEARCH_CTL, READ_VREG(NAL_SEARCH_CTL) | 0x10000);
11328
11329 WRITE_VREG(NAL_SEARCH_CTL,
11330 READ_VREG(NAL_SEARCH_CTL)
11331 | ((parser_sei_enable & 0x7) << 17));
11332 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
11333 WRITE_VREG(NAL_SEARCH_CTL,
11334 READ_VREG(NAL_SEARCH_CTL) |
11335 ((parser_dolby_vision_enable & 0x1) << 20));
11336 #endif
11337 WRITE_VREG(DECODE_STOP_POS, udebug_flag);
11338
11339 config_decode_mode(hevc);
11340 config_aux_buf(hevc);
11341 #ifdef SWAP_HEVC_UCODE
11342 if (!tee_enabled() && hevc->is_swap &&
11343 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11344 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
11345 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
11346 }
11347 #endif
11348 #ifdef DETREFILL_ENABLE
11349 if (hevc->is_swap &&
11350 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11351 WRITE_VREG(HEVC_SAO_DBG_MODE0, 0);
11352 WRITE_VREG(HEVC_SAO_DBG_MODE1, 0);
11353 }
11354 #endif
11355 }
11356
vh265_local_init(struct hevc_state_s * hevc)11357 static int vh265_local_init(struct hevc_state_s *hevc)
11358 {
11359 int i;
11360 int ret = -1;
11361
11362 #ifdef DEBUG_PTS
11363 hevc->pts_missed = 0;
11364 hevc->pts_hit = 0;
11365 #endif
11366 hevc->pts_lookup_margin = 0;
11367 hevc->pts_continue_miss = 0;
11368 hevc->min_pic_size = 0;
11369
11370 hevc->saved_resolution = 0;
11371 hevc->get_frame_dur = false;
11372 hevc->frame_width = hevc->vh265_amstream_dec_info.width;
11373 hevc->frame_height = hevc->vh265_amstream_dec_info.height;
11374 if (is_oversize(hevc->frame_width, hevc->frame_height)) {
11375 pr_info("over size : %u x %u.\n",
11376 hevc->frame_width, hevc->frame_height);
11377 hevc->fatal_error |= DECODER_FATAL_ERROR_SIZE_OVERFLOW;
11378 return ret;
11379 }
11380
11381 if (hevc->max_pic_w && hevc->max_pic_h) {
11382 hevc->is_4k = !(hevc->max_pic_w && hevc->max_pic_h) ||
11383 ((hevc->max_pic_w * hevc->max_pic_h) >
11384 1920 * 1088) ? true : false;
11385 } else {
11386 hevc->is_4k = !(hevc->frame_width && hevc->frame_height) ||
11387 ((hevc->frame_width * hevc->frame_height) >
11388 1920 * 1088) ? true : false;
11389 }
11390
11391 hevc->frame_dur =
11392 (hevc->vh265_amstream_dec_info.rate ==
11393 0) ? 3600 : hevc->vh265_amstream_dec_info.rate;
11394 //hevc->gvs->frame_dur = hevc->frame_dur;
11395 if (hevc->frame_width && hevc->frame_height)
11396 hevc->frame_ar = hevc->frame_height * 0x100 / hevc->frame_width;
11397
11398 if (i_only_flag)
11399 hevc->i_only = i_only_flag & 0xff;
11400 else if ((unsigned long) hevc->vh265_amstream_dec_info.param
11401 & 0x08)
11402 hevc->i_only = 0x7;
11403 else
11404 hevc->i_only = 0x0;
11405 hevc->error_watchdog_count = 0;
11406 hevc->sei_present_flag = 0;
11407 pts_unstable = ((unsigned long)hevc->vh265_amstream_dec_info.param
11408 & 0x40) >> 6;
11409 hevc_print(hevc, 0,
11410 "h265:pts_unstable=%d\n", pts_unstable);
11411 /*
11412 *TODO:FOR VERSION
11413 */
11414 hevc_print(hevc, 0,
11415 "h265: ver (%d,%d) decinfo: %dx%d rate=%d\n", h265_version,
11416 0, hevc->frame_width, hevc->frame_height, hevc->frame_dur);
11417
11418 if (hevc->frame_dur == 0)
11419 hevc->frame_dur = 96000 / 24;
11420
11421 INIT_KFIFO(hevc->display_q);
11422 INIT_KFIFO(hevc->newframe_q);
11423 INIT_KFIFO(hevc->pending_q);
11424
11425 for (i = 0; i < VF_POOL_SIZE; i++) {
11426 const struct vframe_s *vf = &hevc->vfpool[i];
11427
11428 hevc->vfpool[i].index = -1;
11429 kfifo_put(&hevc->newframe_q, vf);
11430 }
11431
11432
11433 ret = hevc_local_init(hevc);
11434
11435 return ret;
11436 }
11437 #ifdef MULTI_INSTANCE_SUPPORT
vh265_init(struct vdec_s * vdec)11438 static s32 vh265_init(struct vdec_s *vdec)
11439 {
11440 struct hevc_state_s *hevc = (struct hevc_state_s *)vdec->private;
11441 #else
11442 static s32 vh265_init(struct hevc_state_s *hevc)
11443 {
11444
11445 #endif
11446 int ret, size = -1;
11447 int fw_size = 0x1000 * 16;
11448 struct firmware_s *fw = NULL;
11449
11450 timer_setup(&hevc->timer, NULL, 0);
11451
11452 hevc->stat |= STAT_TIMER_INIT;
11453
11454 if (hevc->m_ins_flag) {
11455 #ifdef USE_UNINIT_SEMA
11456 sema_init(&hevc->h265_uninit_done_sema, 0);
11457 #endif
11458 INIT_WORK(&hevc->work, vh265_work);
11459 INIT_WORK(&hevc->timeout_work, vh265_timeout_work);
11460 }
11461
11462 if (vh265_local_init(hevc) < 0)
11463 return -EBUSY;
11464
11465 mutex_init(&hevc->chunks_mutex);
11466 INIT_WORK(&hevc->notify_work, vh265_notify_work);
11467 INIT_WORK(&hevc->set_clk_work, vh265_set_clk);
11468
11469 fw = vmalloc(sizeof(struct firmware_s) + fw_size);
11470 if (IS_ERR_OR_NULL(fw))
11471 return -ENOMEM;
11472
11473 if (hevc->mmu_enable)
11474 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11475 size = get_firmware_data(VIDEO_DEC_HEVC_MMU, fw->data);
11476 else {
11477 if (!hevc->is_4k) {
11478 /* if an older version of the fw was loaded, */
11479 /* needs try to load noswap fw because the */
11480 /* old fw package dose not contain the swap fw.*/
11481 size = get_firmware_data(
11482 VIDEO_DEC_HEVC_MMU_SWAP, fw->data);
11483 if (size < 0)
11484 size = get_firmware_data(
11485 VIDEO_DEC_HEVC_MMU, fw->data);
11486 else if (size)
11487 hevc->is_swap = true;
11488 } else
11489 size = get_firmware_data(VIDEO_DEC_HEVC_MMU,
11490 fw->data);
11491 }
11492 else
11493 size = get_firmware_data(VIDEO_DEC_HEVC, fw->data);
11494
11495 if (size < 0) {
11496 pr_err("get firmware fail.\n");
11497 vfree(fw);
11498 return -1;
11499 }
11500
11501 fw->len = size;
11502
11503 #ifdef SWAP_HEVC_UCODE
11504 if (!tee_enabled() && hevc->is_swap &&
11505 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11506 if (hevc->mmu_enable) {
11507 hevc->swap_size = (4 * (4 * SZ_1K)); /*max 4 swap code, each 0x400*/
11508 hevc->mc_cpu_addr =
11509 dma_alloc_coherent(amports_get_dma_device(),
11510 hevc->swap_size,
11511 &hevc->mc_dma_handle, GFP_KERNEL);
11512 if (!hevc->mc_cpu_addr) {
11513 amhevc_disable();
11514 pr_info("vh265 mmu swap ucode loaded fail.\n");
11515 return -ENOMEM;
11516 }
11517
11518 memcpy((u8 *) hevc->mc_cpu_addr, fw->data + SWAP_HEVC_OFFSET,
11519 hevc->swap_size);
11520
11521 hevc_print(hevc, 0,
11522 "vh265 mmu ucode swap loaded %x\n",
11523 hevc->mc_dma_handle);
11524 }
11525 }
11526 #endif
11527
11528 #ifdef MULTI_INSTANCE_SUPPORT
11529 if (hevc->m_ins_flag) {
11530 //hevc->timer.data = (ulong) hevc;
11531 hevc->timer.function = vh265_check_timer_func;
11532 hevc->timer.expires = jiffies + PUT_INTERVAL;
11533
11534 hevc->fw = fw;
11535 hevc->init_flag = 1;
11536
11537 return 0;
11538 }
11539 #endif
11540 amhevc_enable();
11541
11542 if (hevc->mmu_enable)
11543 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
11544 ret = amhevc_loadmc_ex(VFORMAT_HEVC, "h265_mmu", fw->data);
11545 else {
11546 if (!hevc->is_4k) {
11547 /* if an older version of the fw was loaded, */
11548 /* needs try to load noswap fw because the */
11549 /* old fw package dose not contain the swap fw. */
11550 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11551 "hevc_mmu_swap", fw->data);
11552 if (ret < 0)
11553 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11554 "h265_mmu", fw->data);
11555 else
11556 hevc->is_swap = true;
11557 } else
11558 ret = amhevc_loadmc_ex(VFORMAT_HEVC,
11559 "h265_mmu", fw->data);
11560 }
11561 else
11562 ret = amhevc_loadmc_ex(VFORMAT_HEVC, NULL, fw->data);
11563
11564 if (ret < 0) {
11565 amhevc_disable();
11566 vfree(fw);
11567 pr_err("H265: the %s fw loading failed, err: %x\n",
11568 tee_enabled() ? "TEE" : "local", ret);
11569 return -EBUSY;
11570 }
11571
11572 vfree(fw);
11573
11574 hevc->stat |= STAT_MC_LOAD;
11575
11576 #ifdef DETREFILL_ENABLE
11577 if (hevc->is_swap &&
11578 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
11579 init_detrefill_buf(hevc);
11580 #endif
11581 /* enable AMRISC side protocol */
11582 vh265_prot_init(hevc);
11583
11584 if (vdec_request_threaded_irq(VDEC_IRQ_0, vh265_isr,
11585 vh265_isr_thread_fn,
11586 IRQF_ONESHOT,/*run thread on this irq disabled*/
11587 "vh265-irq", (void *)hevc)) {
11588 hevc_print(hevc, 0, "vh265 irq register error.\n");
11589 amhevc_disable();
11590 return -ENOENT;
11591 }
11592
11593 hevc->stat |= STAT_ISR_REG;
11594 hevc->provider_name = PROVIDER_NAME;
11595
11596 #ifdef MULTI_INSTANCE_SUPPORT
11597 vf_provider_init(&vh265_vf_prov, hevc->provider_name,
11598 &vh265_vf_provider, vdec);
11599 vf_reg_provider(&vh265_vf_prov);
11600 vf_notify_receiver(hevc->provider_name, VFRAME_EVENT_PROVIDER_START,
11601 NULL);
11602 if (hevc->frame_dur != 0) {
11603 if (!is_reset) {
11604 vf_notify_receiver(hevc->provider_name,
11605 VFRAME_EVENT_PROVIDER_FR_HINT,
11606 (void *)
11607 ((unsigned long)hevc->frame_dur));
11608 fr_hint_status = VDEC_HINTED;
11609 }
11610 } else
11611 fr_hint_status = VDEC_NEED_HINT;
11612 #else
11613 vf_provider_init(&vh265_vf_prov, PROVIDER_NAME, &vh265_vf_provider,
11614 hevc);
11615 vf_reg_provider(&vh265_vf_prov);
11616 vf_notify_receiver(PROVIDER_NAME, VFRAME_EVENT_PROVIDER_START, NULL);
11617 if (hevc->frame_dur != 0) {
11618 vf_notify_receiver(PROVIDER_NAME,
11619 VFRAME_EVENT_PROVIDER_FR_HINT,
11620 (void *)
11621 ((unsigned long)hevc->frame_dur));
11622 fr_hint_status = VDEC_HINTED;
11623 } else
11624 fr_hint_status = VDEC_NEED_HINT;
11625 #endif
11626 hevc->stat |= STAT_VF_HOOK;
11627
11628 //hevc->timer.data = (ulong) hevc;
11629 hevc->timer.function = vh265_check_timer_func;
11630 hevc->timer.expires = jiffies + PUT_INTERVAL;
11631
11632 add_timer(&hevc->timer);
11633
11634 hevc->stat |= STAT_TIMER_ARM;
11635
11636 if (use_cma) {
11637 #ifdef USE_UNINIT_SEMA
11638 sema_init(&hevc->h265_uninit_done_sema, 0);
11639 #endif
11640 if (h265_task == NULL) {
11641 sema_init(&h265_sema, 1);
11642 h265_task =
11643 kthread_run(h265_task_handle, hevc,
11644 "kthread_h265");
11645 }
11646 }
11647 /* hevc->stat |= STAT_KTHREAD; */
11648 #if 0
11649 if (get_dbg_flag(hevc) & H265_DEBUG_FORCE_CLK) {
11650 hevc_print(hevc, 0, "%s force clk\n", __func__);
11651 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL,
11652 READ_VREG(HEVC_IQIT_CLK_RST_CTRL) |
11653 ((1 << 2) | (1 << 1)));
11654 WRITE_VREG(HEVC_DBLK_CFG0,
11655 READ_VREG(HEVC_DBLK_CFG0) | ((1 << 2) |
11656 (1 << 1) | 0x3fff0000));/* 2,29:16 */
11657 WRITE_VREG(HEVC_SAO_CTRL1, READ_VREG(HEVC_SAO_CTRL1) |
11658 (1 << 2)); /* 2 */
11659 WRITE_VREG(HEVC_MPRED_CTRL1, READ_VREG(HEVC_MPRED_CTRL1) |
11660 (1 << 24)); /* 24 */
11661 WRITE_VREG(HEVC_STREAM_CONTROL,
11662 READ_VREG(HEVC_STREAM_CONTROL) |
11663 (1 << 15)); /* 15 */
11664 WRITE_VREG(HEVC_CABAC_CONTROL, READ_VREG(HEVC_CABAC_CONTROL) |
11665 (1 << 13)); /* 13 */
11666 WRITE_VREG(HEVC_PARSER_CORE_CONTROL,
11667 READ_VREG(HEVC_PARSER_CORE_CONTROL) |
11668 (1 << 15)); /* 15 */
11669 WRITE_VREG(HEVC_PARSER_INT_CONTROL,
11670 READ_VREG(HEVC_PARSER_INT_CONTROL) |
11671 (1 << 15)); /* 15 */
11672 WRITE_VREG(HEVC_PARSER_IF_CONTROL,
11673 READ_VREG(HEVC_PARSER_IF_CONTROL) | ((1 << 6) |
11674 (1 << 3) | (1 << 1))); /* 6, 3, 1 */
11675 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG, 0xffffffff); /* 31:0 */
11676 WRITE_VREG(HEVCD_MCRCC_CTL1, READ_VREG(HEVCD_MCRCC_CTL1) |
11677 (1 << 3)); /* 3 */
11678 }
11679 #endif
11680 #ifdef SWAP_HEVC_UCODE
11681 if (!tee_enabled() && hevc->is_swap &&
11682 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
11683 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->mc_dma_handle);
11684 /*pr_info("write swap buffer %x\n", (u32)(hevc->mc_dma_handle));*/
11685 }
11686 #endif
11687
11688 #ifndef MULTI_INSTANCE_SUPPORT
11689 set_vdec_func(&vh265_dec_status);
11690 #endif
11691 amhevc_start();
11692 hevc->stat |= STAT_VDEC_RUN;
11693 hevc->init_flag = 1;
11694 error_handle_threshold = 30;
11695 /* pr_info("%d, vh265_init, RP=0x%x\n",
11696 * __LINE__, READ_VREG(HEVC_STREAM_RD_PTR));
11697 */
11698
11699 return 0;
11700 }
11701
11702 static int vh265_stop(struct hevc_state_s *hevc)
11703 {
11704 if (get_dbg_flag(hevc) &
11705 H265_DEBUG_WAIT_DECODE_DONE_WHEN_STOP) {
11706 int wait_timeout_count = 0;
11707
11708 while (READ_VREG(HEVC_DEC_STATUS_REG) ==
11709 HEVC_CODED_SLICE_SEGMENT_DAT &&
11710 wait_timeout_count < 10){
11711 wait_timeout_count++;
11712 msleep(20);
11713 }
11714 }
11715 if (hevc->stat & STAT_VDEC_RUN) {
11716 amhevc_stop();
11717 hevc->stat &= ~STAT_VDEC_RUN;
11718 }
11719
11720 if (hevc->stat & STAT_ISR_REG) {
11721 #ifdef MULTI_INSTANCE_SUPPORT
11722 if (!hevc->m_ins_flag)
11723 #endif
11724 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
11725 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11726 hevc->stat &= ~STAT_ISR_REG;
11727 }
11728
11729 hevc->stat &= ~STAT_TIMER_INIT;
11730 if (hevc->stat & STAT_TIMER_ARM) {
11731 del_timer_sync(&hevc->timer);
11732 hevc->stat &= ~STAT_TIMER_ARM;
11733 }
11734
11735 if (hevc->stat & STAT_VF_HOOK) {
11736 if (fr_hint_status == VDEC_HINTED) {
11737 vf_notify_receiver(hevc->provider_name,
11738 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11739 NULL);
11740 }
11741 fr_hint_status = VDEC_NO_NEED_HINT;
11742 vf_unreg_provider(&vh265_vf_prov);
11743 hevc->stat &= ~STAT_VF_HOOK;
11744 }
11745
11746 hevc_local_uninit(hevc);
11747
11748 if (use_cma) {
11749 hevc->uninit_list = 1;
11750 up(&h265_sema);
11751 #ifdef USE_UNINIT_SEMA
11752 down(&hevc->h265_uninit_done_sema);
11753 if (!IS_ERR(h265_task)) {
11754 kthread_stop(h265_task);
11755 h265_task = NULL;
11756 }
11757 #else
11758 while (hevc->uninit_list) /* wait uninit complete */
11759 msleep(20);
11760 #endif
11761
11762 }
11763 hevc->init_flag = 0;
11764 hevc->first_sc_checked = 0;
11765 cancel_work_sync(&hevc->notify_work);
11766 cancel_work_sync(&hevc->set_clk_work);
11767 uninit_mmu_buffers(hevc);
11768 amhevc_disable();
11769
11770 //pr_err("[%s line %d] hevc->gvs=0x%p operation\n",__func__, __LINE__, hevc->gvs);
11771 if (hevc->gvs)
11772 kfree(hevc->gvs);
11773 hevc->gvs = NULL;
11774
11775 return 0;
11776 }
11777
11778 #ifdef MULTI_INSTANCE_SUPPORT
11779 static void reset_process_time(struct hevc_state_s *hevc)
11780 {
11781 if (hevc->start_process_time) {
11782 unsigned int process_time =
11783 1000 * (jiffies - hevc->start_process_time) / HZ;
11784 hevc->start_process_time = 0;
11785 if (process_time > max_process_time[hevc->index])
11786 max_process_time[hevc->index] = process_time;
11787 }
11788 }
11789
11790 static void start_process_time(struct hevc_state_s *hevc)
11791 {
11792 hevc->start_process_time = jiffies;
11793 hevc->decode_timeout_count = 2;
11794 hevc->last_lcu_idx = 0;
11795 }
11796
11797 static void restart_process_time(struct hevc_state_s *hevc)
11798 {
11799 hevc->start_process_time = jiffies;
11800 hevc->decode_timeout_count = 2;
11801 }
11802
11803 static void timeout_process(struct hevc_state_s *hevc)
11804 {
11805 /*
11806 * In this very timeout point,the vh265_work arrives,
11807 * let it to handle the scenario.
11808 */
11809 if (work_pending(&hevc->work))
11810 return;
11811
11812 hevc->timeout_num++;
11813 amhevc_stop();
11814 read_decode_info(hevc);
11815
11816 hevc_print(hevc,
11817 0, "%s decoder timeout\n", __func__);
11818 check_pic_decoded_error(hevc,
11819 hevc->pic_decoded_lcu_idx);
11820 hevc->decoded_poc = hevc->curr_POC;
11821 hevc->decoding_pic = NULL;
11822 hevc->dec_result = DEC_RESULT_DONE;
11823 reset_process_time(hevc);
11824
11825 if (work_pending(&hevc->work))
11826 return;
11827 vdec_schedule_work(&hevc->timeout_work);
11828 }
11829
11830 #ifdef CONSTRAIN_MAX_BUF_NUM
11831 static int get_vf_ref_only_buf_count(struct hevc_state_s *hevc)
11832 {
11833 struct PIC_s *pic;
11834 int i;
11835 int count = 0;
11836 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11837 pic = hevc->m_PIC[i];
11838 if (pic == NULL || pic->index == -1)
11839 continue;
11840 if (pic->output_mark == 0 && pic->referenced == 0
11841 && pic->output_ready == 1)
11842 count++;
11843 }
11844
11845 return count;
11846 }
11847
11848 static int get_used_buf_count(struct hevc_state_s *hevc)
11849 {
11850 struct PIC_s *pic;
11851 int i;
11852 int count = 0;
11853 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11854 pic = hevc->m_PIC[i];
11855 if (pic == NULL || pic->index == -1)
11856 continue;
11857 if (pic->output_mark != 0 || pic->referenced != 0
11858 || pic->output_ready != 0)
11859 count++;
11860 }
11861
11862 return count;
11863 }
11864 #endif
11865
11866
11867 static unsigned char is_new_pic_available(struct hevc_state_s *hevc)
11868 {
11869 struct PIC_s *new_pic = NULL;
11870 struct PIC_s *pic;
11871 /* recycle un-used pic */
11872 int i;
11873 int ref_pic = 0;
11874 struct vdec_s *vdec = hw_to_vdec(hevc);
11875 /*return 1 if pic_list is not initialized yet*/
11876 if (hevc->pic_list_init_flag != 3)
11877 return 1;
11878
11879 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11880 pic = hevc->m_PIC[i];
11881 if (pic == NULL || pic->index == -1)
11882 continue;
11883 if (pic->referenced == 1)
11884 ref_pic++;
11885 if (pic->output_mark == 0 && pic->referenced == 0
11886 && pic->output_ready == 0
11887 && pic->vf_ref == 0
11888 ) {
11889 if (new_pic) {
11890 if (pic->POC < new_pic->POC)
11891 new_pic = pic;
11892 } else
11893 new_pic = pic;
11894 }
11895 }
11896 if (new_pic == NULL) {
11897 enum receviver_start_e state = RECEIVER_INACTIVE;
11898 if (vf_get_receiver(vdec->vf_provider_name)) {
11899 state =
11900 vf_notify_receiver(vdec->vf_provider_name,
11901 VFRAME_EVENT_PROVIDER_QUREY_STATE,
11902 NULL);
11903 if ((state == RECEIVER_STATE_NULL)
11904 || (state == RECEIVER_STATE_NONE))
11905 state = RECEIVER_INACTIVE;
11906 }
11907 if (state == RECEIVER_INACTIVE) {
11908 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11909 int poc = INVALID_POC;
11910 pic = hevc->m_PIC[i];
11911 if (pic == NULL || pic->index == -1)
11912 continue;
11913 if ((pic->referenced == 0) &&
11914 (pic->error_mark == 1) &&
11915 (pic->output_mark == 1)) {
11916 if (poc == INVALID_POC || (pic->POC < poc)) {
11917 new_pic = pic;
11918 poc = pic->POC;
11919 }
11920 }
11921 }
11922 if (new_pic) {
11923 new_pic->referenced = 0;
11924 new_pic->output_mark = 0;
11925 put_mv_buf(hevc, new_pic);
11926 hevc_print(hevc, 0, "force release error pic %d recieve_state %d \n", new_pic->POC, state);
11927 } else {
11928 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
11929 pic = hevc->m_PIC[i];
11930 if (pic == NULL || pic->index == -1)
11931 continue;
11932 if ((pic->referenced == 1) && (pic->error_mark == 1)) {
11933 flush_output(hevc, pic);
11934 hevc_print(hevc, 0, "DPB error, neeed fornce flush recieve_state %d \n", state);
11935 break;
11936 }
11937 }
11938 }
11939 }
11940 }
11941 return (new_pic != NULL) ? 1 : 0;
11942 }
11943
11944 static int vmh265_stop(struct hevc_state_s *hevc)
11945 {
11946 if (hevc->stat & STAT_TIMER_ARM) {
11947 del_timer_sync(&hevc->timer);
11948 hevc->stat &= ~STAT_TIMER_ARM;
11949 }
11950 if (hevc->stat & STAT_VDEC_RUN) {
11951 amhevc_stop();
11952 hevc->stat &= ~STAT_VDEC_RUN;
11953 }
11954 if (hevc->stat & STAT_ISR_REG) {
11955 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
11956 hevc->stat &= ~STAT_ISR_REG;
11957 }
11958
11959 if (hevc->stat & STAT_VF_HOOK) {
11960 if (fr_hint_status == VDEC_HINTED)
11961 vf_notify_receiver(hevc->provider_name,
11962 VFRAME_EVENT_PROVIDER_FR_END_HINT,
11963 NULL);
11964 fr_hint_status = VDEC_NO_NEED_HINT;
11965 vf_unreg_provider(&vh265_vf_prov);
11966 hevc->stat &= ~STAT_VF_HOOK;
11967 }
11968
11969 hevc_local_uninit(hevc);
11970
11971 if (hevc->gvs)
11972 kfree(hevc->gvs);
11973 hevc->gvs = NULL;
11974
11975 if (use_cma) {
11976 hevc->uninit_list = 1;
11977 reset_process_time(hevc);
11978 hevc->dec_result = DEC_RESULT_FREE_CANVAS;
11979 vdec_schedule_work(&hevc->work);
11980 flush_work(&hevc->work);
11981 #ifdef USE_UNINIT_SEMA
11982 if (hevc->init_flag) {
11983 down(&hevc->h265_uninit_done_sema);
11984 }
11985 #else
11986 while (hevc->uninit_list) /* wait uninit complete */
11987 msleep(20);
11988 #endif
11989 }
11990 hevc->init_flag = 0;
11991 hevc->first_sc_checked = 0;
11992 cancel_work_sync(&hevc->notify_work);
11993 cancel_work_sync(&hevc->set_clk_work);
11994 cancel_work_sync(&hevc->timeout_work);
11995 cancel_work_sync(&hevc->work);
11996 uninit_mmu_buffers(hevc);
11997
11998 vfree(hevc->fw);
11999 hevc->fw = NULL;
12000
12001 dump_log(hevc);
12002 return 0;
12003 }
12004
12005 static unsigned char get_data_check_sum
12006 (struct hevc_state_s *hevc, int size)
12007 {
12008 int jj;
12009 int sum = 0;
12010 u8 *data = NULL;
12011
12012 if (!hevc->chunk->block->is_mapped)
12013 data = codec_mm_vmap(hevc->chunk->block->start +
12014 hevc->chunk->offset, size);
12015 else
12016 data = ((u8 *)hevc->chunk->block->start_virt) +
12017 hevc->chunk->offset;
12018
12019 for (jj = 0; jj < size; jj++)
12020 sum += data[jj];
12021
12022 if (!hevc->chunk->block->is_mapped)
12023 codec_mm_unmap_phyaddr(data);
12024 return sum;
12025 }
12026
12027 static void vh265_notify_work(struct work_struct *work)
12028 {
12029 struct hevc_state_s *hevc =
12030 container_of(work,
12031 struct hevc_state_s,
12032 notify_work);
12033 struct vdec_s *vdec = hw_to_vdec(hevc);
12034 #ifdef MULTI_INSTANCE_SUPPORT
12035 if (vdec->fr_hint_state == VDEC_NEED_HINT) {
12036 vf_notify_receiver(hevc->provider_name,
12037 VFRAME_EVENT_PROVIDER_FR_HINT,
12038 (void *)
12039 ((unsigned long)hevc->frame_dur));
12040 vdec->fr_hint_state = VDEC_HINTED;
12041 } else if (fr_hint_status == VDEC_NEED_HINT) {
12042 vf_notify_receiver(hevc->provider_name,
12043 VFRAME_EVENT_PROVIDER_FR_HINT,
12044 (void *)
12045 ((unsigned long)hevc->frame_dur));
12046 fr_hint_status = VDEC_HINTED;
12047 }
12048 #else
12049 if (fr_hint_status == VDEC_NEED_HINT)
12050 vf_notify_receiver(PROVIDER_NAME,
12051 VFRAME_EVENT_PROVIDER_FR_HINT,
12052 (void *)
12053 ((unsigned long)hevc->frame_dur));
12054 fr_hint_status = VDEC_HINTED;
12055 }
12056 #endif
12057
12058 return;
12059 }
12060
vh265_work_implement(struct hevc_state_s * hevc,struct vdec_s * vdec,int from)12061 static void vh265_work_implement(struct hevc_state_s *hevc,
12062 struct vdec_s *vdec,int from)
12063 {
12064 if (hevc->dec_result == DEC_RESULT_FREE_CANVAS) {
12065 /*USE_BUF_BLOCK*/
12066 uninit_pic_list(hevc);
12067 hevc->uninit_list = 0;
12068 #ifdef USE_UNINIT_SEMA
12069 up(&hevc->h265_uninit_done_sema);
12070 #endif
12071 return;
12072 }
12073
12074 /* finished decoding one frame or error,
12075 * notify vdec core to switch context
12076 */
12077 if (hevc->pic_list_init_flag == 1
12078 && (hevc->dec_result != DEC_RESULT_FORCE_EXIT)) {
12079 hevc->pic_list_init_flag = 2;
12080 init_pic_list(hevc);
12081 init_pic_list_hw(hevc);
12082 init_buf_spec(hevc);
12083 hevc_print(hevc, 0,
12084 "set pic_list_init_flag to 2\n");
12085
12086 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG, 0x1);
12087 return;
12088 }
12089
12090 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
12091 "%s dec_result %d %x %x %x\n",
12092 __func__,
12093 hevc->dec_result,
12094 READ_VREG(HEVC_STREAM_LEVEL),
12095 READ_VREG(HEVC_STREAM_WR_PTR),
12096 READ_VREG(HEVC_STREAM_RD_PTR));
12097
12098 if (((hevc->dec_result == DEC_RESULT_GET_DATA) ||
12099 (hevc->dec_result == DEC_RESULT_GET_DATA_RETRY))
12100 && (hw_to_vdec(hevc)->next_status !=
12101 VDEC_STATUS_DISCONNECTED)) {
12102 if (!vdec_has_more_input(vdec)) {
12103 hevc->dec_result = DEC_RESULT_EOS;
12104 vdec_schedule_work(&hevc->work);
12105 return;
12106 }
12107 if (!input_frame_based(vdec)) {
12108 int r = vdec_sync_input(vdec);
12109 if (r >= 0x200) {
12110 WRITE_VREG(HEVC_DECODE_SIZE,
12111 READ_VREG(HEVC_DECODE_SIZE) + r);
12112
12113 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12114 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x size 0x%x\n",
12115 __func__,
12116 READ_VREG(HEVC_STREAM_LEVEL),
12117 READ_VREG(HEVC_STREAM_WR_PTR),
12118 READ_VREG(HEVC_STREAM_RD_PTR),
12119 READ_VREG(HEVC_MPC_E), r);
12120
12121 start_process_time(hevc);
12122 if (READ_VREG(HEVC_DEC_STATUS_REG)
12123 == HEVC_DECODE_BUFEMPTY2)
12124 WRITE_VREG(HEVC_DEC_STATUS_REG,
12125 HEVC_ACTION_DONE);
12126 else
12127 WRITE_VREG(HEVC_DEC_STATUS_REG,
12128 HEVC_ACTION_DEC_CONT);
12129 } else {
12130 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
12131 vdec_schedule_work(&hevc->work);
12132 }
12133 return;
12134 }
12135
12136 /*below for frame_base*/
12137 if (hevc->dec_result == DEC_RESULT_GET_DATA) {
12138 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12139 "%s DEC_RESULT_GET_DATA %x %x %x mpc %x\n",
12140 __func__,
12141 READ_VREG(HEVC_STREAM_LEVEL),
12142 READ_VREG(HEVC_STREAM_WR_PTR),
12143 READ_VREG(HEVC_STREAM_RD_PTR),
12144 READ_VREG(HEVC_MPC_E));
12145 mutex_lock(&hevc->chunks_mutex);
12146 vdec_vframe_dirty(vdec, hevc->chunk);
12147 hevc->chunk = NULL;
12148 mutex_unlock(&hevc->chunks_mutex);
12149 vdec_clean_input(vdec);
12150 }
12151
12152 /*if (is_new_pic_available(hevc)) {*/
12153 if (run_ready(vdec, VDEC_HEVC)) {
12154 int r;
12155 int decode_size;
12156 r = vdec_prepare_input(vdec, &hevc->chunk);
12157 if (r < 0) {
12158 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
12159
12160 hevc_print(hevc,
12161 PRINT_FLAG_VDEC_DETAIL,
12162 "amvdec_vh265: Insufficient data\n");
12163
12164 vdec_schedule_work(&hevc->work);
12165 return;
12166 }
12167 hevc->dec_result = DEC_RESULT_NONE;
12168 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12169 "%s: chunk size 0x%x sum 0x%x mpc %x\n",
12170 __func__, r,
12171 (get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS) ?
12172 get_data_check_sum(hevc, r) : 0,
12173 READ_VREG(HEVC_MPC_E));
12174
12175 if (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) {
12176 int jj;
12177 u8 *data = NULL;
12178
12179 if (!hevc->chunk->block->is_mapped)
12180 data = codec_mm_vmap(
12181 hevc->chunk->block->start +
12182 hevc->chunk->offset, r);
12183 else
12184 data = ((u8 *)
12185 hevc->chunk->block->start_virt)
12186 + hevc->chunk->offset;
12187
12188 for (jj = 0; jj < r; jj++) {
12189 if ((jj & 0xf) == 0)
12190 hevc_print(hevc,
12191 PRINT_FRAMEBASE_DATA,
12192 "%06x:", jj);
12193 hevc_print_cont(hevc,
12194 PRINT_FRAMEBASE_DATA,
12195 "%02x ", data[jj]);
12196 if (((jj + 1) & 0xf) == 0)
12197 hevc_print_cont(hevc,
12198 PRINT_FRAMEBASE_DATA,
12199 "\n");
12200 }
12201
12202 if (!hevc->chunk->block->is_mapped)
12203 codec_mm_unmap_phyaddr(data);
12204 }
12205
12206 decode_size = hevc->chunk->size +
12207 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
12208 WRITE_VREG(HEVC_DECODE_SIZE,
12209 READ_VREG(HEVC_DECODE_SIZE) + decode_size);
12210
12211 vdec_enable_input(vdec);
12212
12213 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12214 "%s: mpc %x\n",
12215 __func__, READ_VREG(HEVC_MPC_E));
12216
12217 start_process_time(hevc);
12218 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
12219 } else{
12220 hevc->dec_result = DEC_RESULT_GET_DATA_RETRY;
12221
12222 /*hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
12223 * "amvdec_vh265: Insufficient data\n");
12224 */
12225
12226 vdec_schedule_work(&hevc->work);
12227 }
12228 return;
12229 } else if (hevc->dec_result == DEC_RESULT_DONE) {
12230 /* if (!hevc->ctx_valid)
12231 hevc->ctx_valid = 1; */
12232 decode_frame_count[hevc->index]++;
12233 #ifdef DETREFILL_ENABLE
12234 if (hevc->is_swap &&
12235 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM) {
12236 if (hevc->delrefill_check == 2) {
12237 delrefill(hevc);
12238 amhevc_stop();
12239 }
12240 }
12241 #endif
12242 if (hevc->mmu_enable && ((hevc->double_write_mode & 0x10) == 0)) {
12243 hevc->used_4k_num =
12244 READ_VREG(HEVC_SAO_MMU_STATUS) >> 16;
12245 if (hevc->used_4k_num >= 0 &&
12246 hevc->cur_pic &&
12247 hevc->cur_pic->scatter_alloc
12248 == 1) {
12249 hevc_print(hevc, H265_DEBUG_BUFMGR_MORE,
12250 "%s pic index %d scatter_alloc %d page_start %d\n",
12251 "decoder_mmu_box_free_idx_tail",
12252 hevc->cur_pic->index,
12253 hevc->cur_pic->scatter_alloc,
12254 hevc->used_4k_num);
12255 if (hevc->m_ins_flag)
12256 hevc_mmu_dma_check(hw_to_vdec(hevc));
12257 decoder_mmu_box_free_idx_tail(
12258 hevc->mmu_box,
12259 hevc->cur_pic->index,
12260 hevc->used_4k_num);
12261 hevc->cur_pic->scatter_alloc = 2;
12262 }
12263 }
12264 hevc->pic_decoded_lcu_idx =
12265 READ_VREG(HEVC_PARSER_LCU_START)
12266 & 0xffffff;
12267
12268 if (vdec->master == NULL && vdec->slave == NULL &&
12269 hevc->empty_flag == 0) {
12270 hevc->over_decode =
12271 (READ_VREG(HEVC_SHIFT_STATUS) >> 15) & 0x1;
12272 if (hevc->over_decode)
12273 hevc_print(hevc, 0,
12274 "!!!Over decode\n");
12275 }
12276
12277 if (is_log_enable(hevc))
12278 add_log(hevc,
12279 "%s dec_result %d lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x",
12280 __func__,
12281 hevc->dec_result,
12282 hevc->pic_decoded_lcu_idx,
12283 hevc->used_4k_num,
12284 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
12285 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
12286 hevc->start_shift_bytes
12287 );
12288
12289 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12290 "%s dec_result %d (%x %x %x) lcu %d used_mmu %d shiftbyte 0x%x decbytes 0x%x\n",
12291 __func__,
12292 hevc->dec_result,
12293 READ_VREG(HEVC_STREAM_LEVEL),
12294 READ_VREG(HEVC_STREAM_WR_PTR),
12295 READ_VREG(HEVC_STREAM_RD_PTR),
12296 hevc->pic_decoded_lcu_idx,
12297 hevc->used_4k_num,
12298 READ_VREG(HEVC_SHIFT_BYTE_COUNT),
12299 READ_VREG(HEVC_SHIFT_BYTE_COUNT) -
12300 hevc->start_shift_bytes
12301 );
12302
12303 hevc->used_4k_num = -1;
12304
12305 check_pic_decoded_error(hevc,
12306 hevc->pic_decoded_lcu_idx);
12307 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12308 #if 1
12309 if (vdec->slave) {
12310 if (dv_debug & 0x1)
12311 vdec_set_flag(vdec->slave,
12312 VDEC_FLAG_SELF_INPUT_CONTEXT);
12313 else
12314 vdec_set_flag(vdec->slave,
12315 VDEC_FLAG_OTHER_INPUT_CONTEXT);
12316 }
12317 #else
12318 if (vdec->slave) {
12319 if (no_interleaved_el_slice)
12320 vdec_set_flag(vdec->slave,
12321 VDEC_FLAG_INPUT_KEEP_CONTEXT);
12322 /* this will move real HW pointer for input */
12323 else
12324 vdec_set_flag(vdec->slave, 0);
12325 /* this will not move real HW pointer
12326 *and SL layer decoding
12327 *will start from same stream position
12328 *as current BL decoder
12329 */
12330 }
12331 #endif
12332 #endif
12333 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12334 hevc->shift_byte_count_lo
12335 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12336 if (vdec->slave) {
12337 /*cur is base, found enhance*/
12338 struct hevc_state_s *hevc_el =
12339 (struct hevc_state_s *)
12340 vdec->slave->private;
12341 if (hevc_el)
12342 hevc_el->shift_byte_count_lo =
12343 hevc->shift_byte_count_lo;
12344 } else if (vdec->master) {
12345 /*cur is enhance, found base*/
12346 struct hevc_state_s *hevc_ba =
12347 (struct hevc_state_s *)
12348 vdec->master->private;
12349 if (hevc_ba)
12350 hevc_ba->shift_byte_count_lo =
12351 hevc->shift_byte_count_lo;
12352 }
12353 #endif
12354 mutex_lock(&hevc->chunks_mutex);
12355 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
12356 hevc->chunk = NULL;
12357 mutex_unlock(&hevc->chunks_mutex);
12358 } else if (hevc->dec_result == DEC_RESULT_AGAIN) {
12359 /*
12360 stream base: stream buf empty or timeout
12361 frame base: vdec_prepare_input fail
12362 */
12363 if (!vdec_has_more_input(vdec)) {
12364 hevc->dec_result = DEC_RESULT_EOS;
12365 vdec_schedule_work(&hevc->work);
12366 return;
12367 }
12368 #ifdef AGAIN_HAS_THRESHOLD
12369 hevc->next_again_flag = 1;
12370 #endif
12371 } else if (hevc->dec_result == DEC_RESULT_EOS) {
12372 struct PIC_s *pic;
12373 hevc->eos = 1;
12374 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12375 if ((vdec->master || vdec->slave) &&
12376 aux_data_is_avaible(hevc))
12377 dolby_get_meta(hevc);
12378 #endif
12379 check_pic_decoded_error(hevc,
12380 hevc->pic_decoded_lcu_idx);
12381 pic = get_pic_by_POC(hevc, hevc->curr_POC);
12382 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12383 "%s: end of stream, last dec poc %d => 0x%pf\n",
12384 __func__, hevc->curr_POC, pic);
12385 flush_output(hevc, pic);
12386
12387 if (hevc->is_used_v4l)
12388 notify_v4l_eos(hw_to_vdec(hevc));
12389 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12390 hevc->shift_byte_count_lo
12391 = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12392 if (vdec->slave) {
12393 /*cur is base, found enhance*/
12394 struct hevc_state_s *hevc_el =
12395 (struct hevc_state_s *)
12396 vdec->slave->private;
12397 if (hevc_el)
12398 hevc_el->shift_byte_count_lo =
12399 hevc->shift_byte_count_lo;
12400 } else if (vdec->master) {
12401 /*cur is enhance, found base*/
12402 struct hevc_state_s *hevc_ba =
12403 (struct hevc_state_s *)
12404 vdec->master->private;
12405 if (hevc_ba)
12406 hevc_ba->shift_byte_count_lo =
12407 hevc->shift_byte_count_lo;
12408 }
12409 #endif
12410 mutex_lock(&hevc->chunks_mutex);
12411 vdec_vframe_dirty(hw_to_vdec(hevc), hevc->chunk);
12412 hevc->chunk = NULL;
12413 mutex_unlock(&hevc->chunks_mutex);
12414 } else if (hevc->dec_result == DEC_RESULT_FORCE_EXIT) {
12415 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12416 "%s: force exit\n",
12417 __func__);
12418 if (hevc->stat & STAT_VDEC_RUN) {
12419 amhevc_stop();
12420 hevc->stat &= ~STAT_VDEC_RUN;
12421 }
12422 if (hevc->stat & STAT_ISR_REG) {
12423 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK, 0);
12424 vdec_free_irq(VDEC_IRQ_0, (void *)hevc);
12425 hevc->stat &= ~STAT_ISR_REG;
12426 }
12427 hevc_print(hevc, 0, "%s: force exit end\n",
12428 __func__);
12429 }
12430
12431 if (hevc->stat & STAT_VDEC_RUN) {
12432 amhevc_stop();
12433 hevc->stat &= ~STAT_VDEC_RUN;
12434 }
12435
12436 if (hevc->stat & STAT_TIMER_ARM) {
12437 del_timer_sync(&hevc->timer);
12438 hevc->stat &= ~STAT_TIMER_ARM;
12439 }
12440
12441 wait_hevc_search_done(hevc);
12442 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12443 if (hevc->switch_dvlayer_flag) {
12444 if (vdec->slave)
12445 vdec_set_next_sched(vdec, vdec->slave);
12446 else if (vdec->master)
12447 vdec_set_next_sched(vdec, vdec->master);
12448 } else if (vdec->slave || vdec->master)
12449 vdec_set_next_sched(vdec, vdec);
12450 #endif
12451
12452 if (from == 1) {
12453 /* This is a timeout work */
12454 if (work_pending(&hevc->work)) {
12455 /*
12456 * The vh265_work arrives at the last second,
12457 * give it a chance to handle the scenario.
12458 */
12459 return;
12460 //cancel_work_sync(&hevc->work);//reserved for future considraion
12461 }
12462 }
12463
12464 /* mark itself has all HW resource released and input released */
12465 if (vdec->parallel_dec == 1)
12466 vdec_core_finish_run(vdec, CORE_MASK_HEVC);
12467 else
12468 vdec_core_finish_run(vdec, CORE_MASK_VDEC_1 | CORE_MASK_HEVC);
12469
12470 if (hevc->is_used_v4l) {
12471 struct aml_vcodec_ctx *ctx =
12472 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
12473
12474 if (ctx->param_sets_from_ucode &&
12475 !hevc->v4l_params_parsed)
12476 vdec_v4l_write_frame_sync(ctx);
12477 }
12478
12479 if (hevc->vdec_cb)
12480 hevc->vdec_cb(hw_to_vdec(hevc), hevc->vdec_cb_arg);
12481 }
12482
vh265_work(struct work_struct * work)12483 static void vh265_work(struct work_struct *work)
12484 {
12485 struct hevc_state_s *hevc = container_of(work,
12486 struct hevc_state_s, work);
12487 struct vdec_s *vdec = hw_to_vdec(hevc);
12488
12489 vh265_work_implement(hevc, vdec, 0);
12490 }
12491
vh265_timeout_work(struct work_struct * work)12492 static void vh265_timeout_work(struct work_struct *work)
12493 {
12494 struct hevc_state_s *hevc = container_of(work,
12495 struct hevc_state_s, timeout_work);
12496 struct vdec_s *vdec = hw_to_vdec(hevc);
12497
12498 if (work_pending(&hevc->work))
12499 return;
12500 vh265_work_implement(hevc, vdec, 1);
12501 }
12502
12503
vh265_hw_ctx_restore(struct hevc_state_s * hevc)12504 static int vh265_hw_ctx_restore(struct hevc_state_s *hevc)
12505 {
12506 /* new to do ... */
12507 vh265_prot_init(hevc);
12508 return 0;
12509 }
run_ready(struct vdec_s * vdec,unsigned long mask)12510 static unsigned long run_ready(struct vdec_s *vdec, unsigned long mask)
12511 {
12512 struct hevc_state_s *hevc =
12513 (struct hevc_state_s *)vdec->private;
12514 int tvp = vdec_secure(hw_to_vdec(hevc)) ?
12515 CODEC_MM_FLAGS_TVP : 0;
12516 bool ret = 0;
12517 if (step == 0x12)
12518 return 0;
12519 else if (step == 0x11)
12520 step = 0x12;
12521
12522 if (hevc->eos)
12523 return 0;
12524 if (!hevc->first_sc_checked && hevc->mmu_enable) {
12525 int size = decoder_mmu_box_sc_check(hevc->mmu_box, tvp);
12526 hevc->first_sc_checked =1;
12527 hevc_print(hevc, 0,
12528 "vh265 cached=%d need_size=%d speed= %d ms\n",
12529 size, (hevc->need_cache_size >> PAGE_SHIFT),
12530 (int)(get_jiffies_64() - hevc->sc_start_time) * 1000/HZ);
12531 }
12532 if (vdec_stream_based(vdec) && (hevc->init_flag == 0)
12533 && pre_decode_buf_level != 0) {
12534 u32 rp, wp, level;
12535
12536 rp = STBUF_READ(&vdec->vbuf, get_rp);
12537 wp = STBUF_READ(&vdec->vbuf, get_wp);
12538 if (wp < rp)
12539 level = vdec->input.size + wp - rp;
12540 else
12541 level = wp - rp;
12542
12543 if (level < pre_decode_buf_level)
12544 return 0;
12545 }
12546
12547 #ifdef AGAIN_HAS_THRESHOLD
12548 if (hevc->next_again_flag &&
12549 (!vdec_frame_based(vdec))) {
12550 u32 parser_wr_ptr =
12551 STBUF_READ(&vdec->vbuf, get_wp);
12552 if (parser_wr_ptr >= hevc->pre_parser_wr_ptr &&
12553 (parser_wr_ptr - hevc->pre_parser_wr_ptr) <
12554 again_threshold) {
12555 int r = vdec_sync_input(vdec);
12556 hevc_print(hevc,
12557 PRINT_FLAG_VDEC_DETAIL, "%s buf lelvel:%x\n", __func__, r);
12558 return 0;
12559 }
12560 }
12561 #endif
12562
12563 if (disp_vframe_valve_level &&
12564 kfifo_len(&hevc->display_q) >=
12565 disp_vframe_valve_level) {
12566 hevc->valve_count--;
12567 if (hevc->valve_count <= 0)
12568 hevc->valve_count = 2;
12569 else
12570 return 0;
12571 }
12572
12573 ret = is_new_pic_available(hevc);
12574 if (!ret) {
12575 hevc_print(hevc,
12576 PRINT_FLAG_VDEC_DETAIL, "%s=>%d\r\n",
12577 __func__, ret);
12578 }
12579
12580 #ifdef CONSTRAIN_MAX_BUF_NUM
12581 if (hevc->pic_list_init_flag == 3) {
12582 if (run_ready_max_vf_only_num > 0 &&
12583 get_vf_ref_only_buf_count(hevc) >=
12584 run_ready_max_vf_only_num
12585 )
12586 ret = 0;
12587 if (run_ready_display_q_num > 0 &&
12588 kfifo_len(&hevc->display_q) >=
12589 run_ready_display_q_num)
12590 ret = 0;
12591
12592 /*avoid more buffers consumed when
12593 switching resolution*/
12594 if (run_ready_max_buf_num == 0xff &&
12595 get_used_buf_count(hevc) >=
12596 get_work_pic_num(hevc))
12597 ret = 0;
12598 else if (run_ready_max_buf_num &&
12599 get_used_buf_count(hevc) >=
12600 run_ready_max_buf_num)
12601 ret = 0;
12602 }
12603 #endif
12604
12605 if (hevc->is_used_v4l) {
12606 struct aml_vcodec_ctx *ctx =
12607 (struct aml_vcodec_ctx *)(hevc->v4l2_ctx);
12608
12609 if (ctx->param_sets_from_ucode) {
12610 if (hevc->v4l_params_parsed) {
12611 if (!ctx->v4l_codec_dpb_ready &&
12612 v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx) <
12613 run_ready_min_buf_num)
12614 ret = 0;
12615 } else {
12616 if ((hevc->res_ch_flag == 1) &&
12617 ((ctx->state <= AML_STATE_INIT) ||
12618 (ctx->state >= AML_STATE_FLUSHING)))
12619 ret = 0;
12620 }
12621 } else if (!ctx->v4l_codec_dpb_ready) {
12622 if (v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx) <
12623 run_ready_min_buf_num)
12624 ret = 0;
12625 }
12626 }
12627
12628 if (ret)
12629 not_run_ready[hevc->index] = 0;
12630 else
12631 not_run_ready[hevc->index]++;
12632 if (vdec->parallel_dec == 1)
12633 return ret ? (CORE_MASK_HEVC) : 0;
12634 else
12635 return ret ? (CORE_MASK_VDEC_1 | CORE_MASK_HEVC) : 0;
12636 }
12637
run(struct vdec_s * vdec,unsigned long mask,void (* callback)(struct vdec_s *,void *),void * arg)12638 static void run(struct vdec_s *vdec, unsigned long mask,
12639 void (*callback)(struct vdec_s *, void *), void *arg)
12640 {
12641 struct hevc_state_s *hevc =
12642 (struct hevc_state_s *)vdec->private;
12643 int r, loadr = 0;
12644 unsigned char check_sum = 0;
12645
12646 run_count[hevc->index]++;
12647 hevc->vdec_cb_arg = arg;
12648 hevc->vdec_cb = callback;
12649 hevc->aux_data_dirty = 1;
12650 hevc_reset_core(vdec);
12651
12652 #ifdef AGAIN_HAS_THRESHOLD
12653 if (vdec_stream_based(vdec)) {
12654 hevc->pre_parser_wr_ptr =
12655 STBUF_READ(&vdec->vbuf, get_wp);
12656 hevc->next_again_flag = 0;
12657 }
12658 #endif
12659 r = vdec_prepare_input(vdec, &hevc->chunk);
12660 if (r < 0) {
12661 input_empty[hevc->index]++;
12662 hevc->dec_result = DEC_RESULT_AGAIN;
12663 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL,
12664 "ammvdec_vh265: Insufficient data\n");
12665
12666 vdec_schedule_work(&hevc->work);
12667 return;
12668 }
12669 input_empty[hevc->index] = 0;
12670 hevc->dec_result = DEC_RESULT_NONE;
12671 if (vdec_frame_based(vdec) &&
12672 ((get_dbg_flag(hevc) & PRINT_FLAG_VDEC_STATUS)
12673 || is_log_enable(hevc)))
12674 check_sum = get_data_check_sum(hevc, r);
12675
12676 if (is_log_enable(hevc))
12677 add_log(hevc,
12678 "%s: size 0x%x sum 0x%x shiftbyte 0x%x",
12679 __func__, r,
12680 check_sum,
12681 READ_VREG(HEVC_SHIFT_BYTE_COUNT)
12682 );
12683 hevc->start_shift_bytes = READ_VREG(HEVC_SHIFT_BYTE_COUNT);
12684 hevc_print(hevc, PRINT_FLAG_VDEC_STATUS,
12685 "%s: size 0x%x sum 0x%x (%x %x %x %x %x) byte count %x\n",
12686 __func__, r,
12687 check_sum,
12688 READ_VREG(HEVC_STREAM_LEVEL),
12689 READ_VREG(HEVC_STREAM_WR_PTR),
12690 READ_VREG(HEVC_STREAM_RD_PTR),
12691 STBUF_READ(&vdec->vbuf, get_rp),
12692 STBUF_READ(&vdec->vbuf, get_wp),
12693 hevc->start_shift_bytes
12694 );
12695 if ((get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA) &&
12696 input_frame_based(vdec)) {
12697 int jj;
12698 u8 *data = NULL;
12699
12700 if (!hevc->chunk->block->is_mapped)
12701 data = codec_mm_vmap(hevc->chunk->block->start +
12702 hevc->chunk->offset, r);
12703 else
12704 data = ((u8 *)hevc->chunk->block->start_virt)
12705 + hevc->chunk->offset;
12706
12707 for (jj = 0; jj < r; jj++) {
12708 if ((jj & 0xf) == 0)
12709 hevc_print(hevc, PRINT_FRAMEBASE_DATA,
12710 "%06x:", jj);
12711 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12712 "%02x ", data[jj]);
12713 if (((jj + 1) & 0xf) == 0)
12714 hevc_print_cont(hevc, PRINT_FRAMEBASE_DATA,
12715 "\n");
12716 }
12717
12718 if (!hevc->chunk->block->is_mapped)
12719 codec_mm_unmap_phyaddr(data);
12720 }
12721 if (vdec->mc_loaded) {
12722 /*firmware have load before,
12723 and not changes to another.
12724 ignore reload.
12725 */
12726 if (tee_enabled() && hevc->is_swap &&
12727 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12728 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2, hevc->swap_addr);
12729 } else {
12730 if (hevc->mmu_enable)
12731 if (get_cpu_major_id() > AM_MESON_CPU_MAJOR_ID_GXM)
12732 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12733 "h265_mmu", hevc->fw->data);
12734 else {
12735 if (!hevc->is_4k) {
12736 /* if an older version of the fw was loaded, */
12737 /* needs try to load noswap fw because the */
12738 /* old fw package dose not contain the swap fw.*/
12739 loadr = amhevc_vdec_loadmc_ex(
12740 VFORMAT_HEVC, vdec,
12741 "hevc_mmu_swap",
12742 hevc->fw->data);
12743 if (loadr < 0)
12744 loadr = amhevc_vdec_loadmc_ex(
12745 VFORMAT_HEVC, vdec,
12746 "h265_mmu",
12747 hevc->fw->data);
12748 else
12749 hevc->is_swap = true;
12750 } else
12751 loadr = amhevc_vdec_loadmc_ex(
12752 VFORMAT_HEVC, vdec,
12753 "h265_mmu", hevc->fw->data);
12754 }
12755 else
12756 loadr = amhevc_vdec_loadmc_ex(VFORMAT_HEVC, vdec,
12757 NULL, hevc->fw->data);
12758 if (loadr < 0) {
12759 amhevc_disable();
12760 hevc_print(hevc, 0, "H265: the %s fw loading failed, err: %x\n",
12761 tee_enabled() ? "TEE" : "local", loadr);
12762 hevc->dec_result = DEC_RESULT_FORCE_EXIT;
12763 vdec_schedule_work(&hevc->work);
12764 return;
12765 }
12766
12767 if (tee_enabled() && hevc->is_swap &&
12768 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12769 hevc->swap_addr = READ_VREG(HEVC_STREAM_SWAP_BUFFER2);
12770 #ifdef DETREFILL_ENABLE
12771 if (hevc->is_swap &&
12772 get_cpu_major_id() <= AM_MESON_CPU_MAJOR_ID_GXM)
12773 init_detrefill_buf(hevc);
12774 #endif
12775 vdec->mc_loaded = 1;
12776 vdec->mc_type = VFORMAT_HEVC;
12777 }
12778 if (vh265_hw_ctx_restore(hevc) < 0) {
12779 vdec_schedule_work(&hevc->work);
12780 return;
12781 }
12782 vdec_enable_input(vdec);
12783
12784 WRITE_VREG(HEVC_DEC_STATUS_REG, HEVC_ACTION_DONE);
12785
12786 if (vdec_frame_based(vdec)) {
12787 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT, 0);
12788 r = hevc->chunk->size +
12789 (hevc->chunk->offset & (VDEC_FIFO_ALIGN - 1));
12790 hevc->decode_size = r;
12791 if (vdec->mvfrm)
12792 vdec->mvfrm->frame_size = hevc->chunk->size;
12793 }
12794 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
12795 else {
12796 if (vdec->master || vdec->slave)
12797 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT,
12798 hevc->shift_byte_count_lo);
12799 }
12800 #endif
12801 WRITE_VREG(HEVC_DECODE_SIZE, r);
12802 /*WRITE_VREG(HEVC_DECODE_COUNT, hevc->decode_idx);*/
12803 hevc->init_flag = 1;
12804
12805 if (hevc->pic_list_init_flag == 3)
12806 init_pic_list_hw(hevc);
12807
12808 backup_decode_state(hevc);
12809
12810 start_process_time(hevc);
12811 mod_timer(&hevc->timer, jiffies);
12812 hevc->stat |= STAT_TIMER_ARM;
12813 hevc->stat |= STAT_ISR_REG;
12814 if (vdec->mvfrm)
12815 vdec->mvfrm->hw_decode_start = local_clock();
12816 amhevc_start();
12817 hevc->stat |= STAT_VDEC_RUN;
12818 }
12819
aml_free_canvas(struct vdec_s * vdec)12820 static void aml_free_canvas(struct vdec_s *vdec)
12821 {
12822 int i;
12823 struct hevc_state_s *hevc =
12824 (struct hevc_state_s *)vdec->private;
12825
12826 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
12827 struct PIC_s *pic = hevc->m_PIC[i];
12828
12829 if (pic) {
12830 if (vdec->parallel_dec == 1) {
12831 vdec->free_canvas_ex(pic->y_canvas_index, vdec->id);
12832 vdec->free_canvas_ex(pic->uv_canvas_index, vdec->id);
12833 }
12834 }
12835 }
12836 }
12837
reset(struct vdec_s * vdec)12838 static void reset(struct vdec_s *vdec)
12839 {
12840 struct hevc_state_s *hevc =
12841 (struct hevc_state_s *)vdec->private;
12842 int i;
12843
12844 cancel_work_sync(&hevc->work);
12845 cancel_work_sync(&hevc->notify_work);
12846 if (hevc->stat & STAT_VDEC_RUN) {
12847 amhevc_stop();
12848 hevc->stat &= ~STAT_VDEC_RUN;
12849 }
12850
12851 if (hevc->stat & STAT_TIMER_ARM) {
12852 del_timer_sync(&hevc->timer);
12853 hevc->stat &= ~STAT_TIMER_ARM;
12854 }
12855 hevc->dec_result = DEC_RESULT_NONE;
12856 reset_process_time(hevc);
12857 hevc->pic_list_init_flag = 0;
12858 dealloc_mv_bufs(hevc);
12859 aml_free_canvas(vdec);
12860 hevc_local_uninit(hevc);
12861 if (vh265_local_init(hevc) < 0)
12862 pr_debug(" %s local init fail\n", __func__);
12863 for (i = 0; i < BUF_POOL_SIZE; i++) {
12864 hevc->m_BUF[i].start_adr = 0;
12865 }
12866
12867 hevc_print(hevc, PRINT_FLAG_VDEC_DETAIL, "%s\r\n", __func__);
12868 }
12869
vh265_irq_cb(struct vdec_s * vdec,int irq)12870 static irqreturn_t vh265_irq_cb(struct vdec_s *vdec, int irq)
12871 {
12872 struct hevc_state_s *hevc =
12873 (struct hevc_state_s *)vdec->private;
12874
12875 return vh265_isr(0, hevc);
12876 }
12877
vh265_threaded_irq_cb(struct vdec_s * vdec,int irq)12878 static irqreturn_t vh265_threaded_irq_cb(struct vdec_s *vdec, int irq)
12879 {
12880 struct hevc_state_s *hevc =
12881 (struct hevc_state_s *)vdec->private;
12882
12883 return vh265_isr_thread_fn(0, hevc);
12884 }
12885 #endif
12886
amvdec_h265_probe(struct platform_device * pdev)12887 static int amvdec_h265_probe(struct platform_device *pdev)
12888 {
12889 #ifdef MULTI_INSTANCE_SUPPORT
12890 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
12891 #else
12892 struct vdec_dev_reg_s *pdata =
12893 (struct vdec_dev_reg_s *)pdev->dev.platform_data;
12894 #endif
12895 char *tmpbuf;
12896 int ret;
12897 struct hevc_state_s *hevc;
12898
12899 hevc = vmalloc(sizeof(struct hevc_state_s));
12900 if (hevc == NULL) {
12901 hevc_print(hevc, 0, "%s vmalloc hevc failed\r\n", __func__);
12902 return -ENOMEM;
12903 }
12904 gHevc = hevc;
12905 if ((debug & H265_NO_CHANG_DEBUG_FLAG_IN_CODE) == 0)
12906 debug &= (~(H265_DEBUG_DIS_LOC_ERROR_PROC |
12907 H265_DEBUG_DIS_SYS_ERROR_PROC));
12908 memset(hevc, 0, sizeof(struct hevc_state_s));
12909 if (get_dbg_flag(hevc))
12910 hevc_print(hevc, 0, "%s\r\n", __func__);
12911 mutex_lock(&vh265_mutex);
12912
12913 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
12914 (parser_sei_enable & 0x100) == 0)
12915 parser_sei_enable = 7; /*old 1*/
12916 hevc->m_ins_flag = 0;
12917 hevc->init_flag = 0;
12918 hevc->first_sc_checked = 0;
12919 hevc->uninit_list = 0;
12920 hevc->fatal_error = 0;
12921 hevc->show_frame_num = 0;
12922 hevc->frameinfo_enable = 1;
12923 #ifdef MULTI_INSTANCE_SUPPORT
12924 hevc->platform_dev = pdev;
12925 platform_set_drvdata(pdev, pdata);
12926 #endif
12927
12928 if (pdata == NULL) {
12929 hevc_print(hevc, 0,
12930 "\namvdec_h265 memory resource undefined.\n");
12931 vfree(hevc);
12932 mutex_unlock(&vh265_mutex);
12933 return -EFAULT;
12934 }
12935 if (mmu_enable_force == 0) {
12936 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL
12937 || double_write_mode == 0x10)
12938 hevc->mmu_enable = 0;
12939 else
12940 hevc->mmu_enable = 1;
12941 }
12942 if (init_mmu_buffers(hevc)) {
12943 hevc_print(hevc, 0,
12944 "\n 265 mmu init failed!\n");
12945 vfree(hevc);
12946 mutex_unlock(&vh265_mutex);
12947 return -EFAULT;
12948 }
12949
12950 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box, BMMU_WORKSPACE_ID,
12951 work_buf_size, DRIVER_NAME, &hevc->buf_start);
12952 if (ret < 0) {
12953 uninit_mmu_buffers(hevc);
12954 vfree(hevc);
12955 mutex_unlock(&vh265_mutex);
12956 return ret;
12957 }
12958 hevc->buf_size = work_buf_size;
12959
12960
12961 if (!vdec_secure(pdata)) {
12962 tmpbuf = (char *)codec_mm_phys_to_virt(hevc->buf_start);
12963 if (tmpbuf) {
12964 memset(tmpbuf, 0, work_buf_size);
12965 dma_sync_single_for_device(amports_get_dma_device(),
12966 hevc->buf_start,
12967 work_buf_size, DMA_TO_DEVICE);
12968 } else {
12969 tmpbuf = codec_mm_vmap(hevc->buf_start,
12970 work_buf_size);
12971 if (tmpbuf) {
12972 memset(tmpbuf, 0, work_buf_size);
12973 dma_sync_single_for_device(
12974 amports_get_dma_device(),
12975 hevc->buf_start,
12976 work_buf_size,
12977 DMA_TO_DEVICE);
12978 codec_mm_unmap_phyaddr(tmpbuf);
12979 }
12980 }
12981 }
12982
12983 if (get_dbg_flag(hevc)) {
12984 hevc_print(hevc, 0,
12985 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
12986 hevc->buf_start, hevc->buf_size);
12987 }
12988
12989 if (pdata->sys_info)
12990 hevc->vh265_amstream_dec_info = *pdata->sys_info;
12991 else {
12992 hevc->vh265_amstream_dec_info.width = 0;
12993 hevc->vh265_amstream_dec_info.height = 0;
12994 hevc->vh265_amstream_dec_info.rate = 30;
12995 }
12996 #ifndef MULTI_INSTANCE_SUPPORT
12997 if (pdata->flag & DEC_FLAG_HEVC_WORKAROUND) {
12998 workaround_enable |= 3;
12999 hevc_print(hevc, 0,
13000 "amvdec_h265 HEVC_WORKAROUND flag set.\n");
13001 } else
13002 workaround_enable &= ~3;
13003 #endif
13004 hevc->cma_dev = pdata->cma_dev;
13005 vh265_vdec_info_init(hevc);
13006
13007 #ifdef MULTI_INSTANCE_SUPPORT
13008 pdata->private = hevc;
13009 pdata->dec_status = vh265_dec_status;
13010 pdata->set_trickmode = vh265_set_trickmode;
13011 pdata->set_isreset = vh265_set_isreset;
13012 is_reset = 0;
13013 if (vh265_init(pdata) < 0) {
13014 #else
13015 if (vh265_init(hevc) < 0) {
13016 #endif
13017 hevc_print(hevc, 0,
13018 "\namvdec_h265 init failed.\n");
13019 hevc_local_uninit(hevc);
13020 if (hevc->gvs)
13021 kfree(hevc->gvs);
13022 hevc->gvs = NULL;
13023 uninit_mmu_buffers(hevc);
13024 vfree(hevc);
13025 pdata->dec_status = NULL;
13026 mutex_unlock(&vh265_mutex);
13027 return -ENODEV;
13028 }
13029 /*set the max clk for smooth playing...*/
13030 hevc_source_changed(VFORMAT_HEVC,
13031 3840, 2160, 60);
13032 mutex_unlock(&vh265_mutex);
13033
13034 return 0;
13035 }
13036
13037 static int amvdec_h265_remove(struct platform_device *pdev)
13038 {
13039 struct hevc_state_s *hevc = gHevc;
13040
13041 if (get_dbg_flag(hevc))
13042 hevc_print(hevc, 0, "%s\r\n", __func__);
13043
13044 mutex_lock(&vh265_mutex);
13045
13046 vh265_stop(hevc);
13047
13048 hevc_source_changed(VFORMAT_HEVC, 0, 0, 0);
13049
13050
13051 #ifdef DEBUG_PTS
13052 hevc_print(hevc, 0,
13053 "pts missed %ld, pts hit %ld, duration %d\n",
13054 hevc->pts_missed, hevc->pts_hit, hevc->frame_dur);
13055 #endif
13056
13057 vfree(hevc);
13058 hevc = NULL;
13059 gHevc = NULL;
13060
13061 mutex_unlock(&vh265_mutex);
13062
13063 return 0;
13064 }
13065 /****************************************/
13066 #ifdef CONFIG_PM
13067 static int h265_suspend(struct device *dev)
13068 {
13069 amhevc_suspend(to_platform_device(dev), dev->power.power_state);
13070 return 0;
13071 }
13072
13073 static int h265_resume(struct device *dev)
13074 {
13075 amhevc_resume(to_platform_device(dev));
13076 return 0;
13077 }
13078
13079 static const struct dev_pm_ops h265_pm_ops = {
13080 SET_SYSTEM_SLEEP_PM_OPS(h265_suspend, h265_resume)
13081 };
13082 #endif
13083
13084 static struct platform_driver amvdec_h265_driver = {
13085 .probe = amvdec_h265_probe,
13086 .remove = amvdec_h265_remove,
13087 .driver = {
13088 .name = DRIVER_NAME,
13089 #ifdef CONFIG_PM
13090 .pm = &h265_pm_ops,
13091 #endif
13092 }
13093 };
13094
13095 #ifdef MULTI_INSTANCE_SUPPORT
13096 static void vh265_dump_state(struct vdec_s *vdec)
13097 {
13098 int i;
13099 struct hevc_state_s *hevc =
13100 (struct hevc_state_s *)vdec->private;
13101 hevc_print(hevc, 0,
13102 "====== %s\n", __func__);
13103
13104 hevc_print(hevc, 0,
13105 "width/height (%d/%d), reorder_pic_num %d ip_mode %d buf count(bufspec size) %d, video_signal_type 0x%x, is_swap %d\n",
13106 hevc->frame_width,
13107 hevc->frame_height,
13108 hevc->sps_num_reorder_pics_0,
13109 hevc->ip_mode,
13110 get_work_pic_num(hevc),
13111 hevc->video_signal_type_debug,
13112 hevc->is_swap
13113 );
13114
13115 hevc_print(hevc, 0,
13116 "is_framebase(%d), eos %d, dec_result 0x%x dec_frm %d disp_frm %d run %d not_run_ready %d input_empty %d\n",
13117 input_frame_based(vdec),
13118 hevc->eos,
13119 hevc->dec_result,
13120 decode_frame_count[hevc->index],
13121 display_frame_count[hevc->index],
13122 run_count[hevc->index],
13123 not_run_ready[hevc->index],
13124 input_empty[hevc->index]
13125 );
13126
13127 if (vf_get_receiver(vdec->vf_provider_name)) {
13128 enum receviver_start_e state =
13129 vf_notify_receiver(vdec->vf_provider_name,
13130 VFRAME_EVENT_PROVIDER_QUREY_STATE,
13131 NULL);
13132 hevc_print(hevc, 0,
13133 "\nreceiver(%s) state %d\n",
13134 vdec->vf_provider_name,
13135 state);
13136 }
13137
13138 hevc_print(hevc, 0,
13139 "%s, newq(%d/%d), dispq(%d/%d), vf prepare/get/put (%d/%d/%d), pic_list_init_flag(%d), is_new_pic_available(%d)\n",
13140 __func__,
13141 kfifo_len(&hevc->newframe_q),
13142 VF_POOL_SIZE,
13143 kfifo_len(&hevc->display_q),
13144 VF_POOL_SIZE,
13145 hevc->vf_pre_count,
13146 hevc->vf_get_count,
13147 hevc->vf_put_count,
13148 hevc->pic_list_init_flag,
13149 is_new_pic_available(hevc)
13150 );
13151
13152 dump_pic_list(hevc);
13153
13154 for (i = 0; i < BUF_POOL_SIZE; i++) {
13155 hevc_print(hevc, 0,
13156 "Buf(%d) start_adr 0x%x size 0x%x used %d\n",
13157 i,
13158 hevc->m_BUF[i].start_adr,
13159 hevc->m_BUF[i].size,
13160 hevc->m_BUF[i].used_flag);
13161 }
13162
13163 for (i = 0; i < MAX_REF_PIC_NUM; i++) {
13164 hevc_print(hevc, 0,
13165 "mv_Buf(%d) start_adr 0x%x size 0x%x used %d\n",
13166 i,
13167 hevc->m_mv_BUF[i].start_adr,
13168 hevc->m_mv_BUF[i].size,
13169 hevc->m_mv_BUF[i].used_flag);
13170 }
13171
13172 hevc_print(hevc, 0,
13173 "HEVC_DEC_STATUS_REG=0x%x\n",
13174 READ_VREG(HEVC_DEC_STATUS_REG));
13175 hevc_print(hevc, 0,
13176 "HEVC_MPC_E=0x%x\n",
13177 READ_VREG(HEVC_MPC_E));
13178 hevc_print(hevc, 0,
13179 "HEVC_DECODE_MODE=0x%x\n",
13180 READ_VREG(HEVC_DECODE_MODE));
13181 hevc_print(hevc, 0,
13182 "HEVC_DECODE_MODE2=0x%x\n",
13183 READ_VREG(HEVC_DECODE_MODE2));
13184 hevc_print(hevc, 0,
13185 "NAL_SEARCH_CTL=0x%x\n",
13186 READ_VREG(NAL_SEARCH_CTL));
13187 hevc_print(hevc, 0,
13188 "HEVC_PARSER_LCU_START=0x%x\n",
13189 READ_VREG(HEVC_PARSER_LCU_START));
13190 hevc_print(hevc, 0,
13191 "HEVC_DECODE_SIZE=0x%x\n",
13192 READ_VREG(HEVC_DECODE_SIZE));
13193 hevc_print(hevc, 0,
13194 "HEVC_SHIFT_BYTE_COUNT=0x%x\n",
13195 READ_VREG(HEVC_SHIFT_BYTE_COUNT));
13196 hevc_print(hevc, 0,
13197 "HEVC_STREAM_START_ADDR=0x%x\n",
13198 READ_VREG(HEVC_STREAM_START_ADDR));
13199 hevc_print(hevc, 0,
13200 "HEVC_STREAM_END_ADDR=0x%x\n",
13201 READ_VREG(HEVC_STREAM_END_ADDR));
13202 hevc_print(hevc, 0,
13203 "HEVC_STREAM_LEVEL=0x%x\n",
13204 READ_VREG(HEVC_STREAM_LEVEL));
13205 hevc_print(hevc, 0,
13206 "HEVC_STREAM_WR_PTR=0x%x\n",
13207 READ_VREG(HEVC_STREAM_WR_PTR));
13208 hevc_print(hevc, 0,
13209 "HEVC_STREAM_RD_PTR=0x%x\n",
13210 READ_VREG(HEVC_STREAM_RD_PTR));
13211 hevc_print(hevc, 0,
13212 "PARSER_VIDEO_RP=0x%x\n",
13213 STBUF_READ(&vdec->vbuf, get_rp));
13214 hevc_print(hevc, 0,
13215 "PARSER_VIDEO_WP=0x%x\n",
13216 STBUF_READ(&vdec->vbuf, get_wp));
13217
13218 if (input_frame_based(vdec) &&
13219 (get_dbg_flag(hevc) & PRINT_FRAMEBASE_DATA)
13220 ) {
13221 int jj;
13222 if (hevc->chunk && hevc->chunk->block &&
13223 hevc->chunk->size > 0) {
13224 u8 *data = NULL;
13225 if (!hevc->chunk->block->is_mapped)
13226 data = codec_mm_vmap(hevc->chunk->block->start +
13227 hevc->chunk->offset, hevc->chunk->size);
13228 else
13229 data = ((u8 *)hevc->chunk->block->start_virt)
13230 + hevc->chunk->offset;
13231 hevc_print(hevc, 0,
13232 "frame data size 0x%x\n",
13233 hevc->chunk->size);
13234 for (jj = 0; jj < hevc->chunk->size; jj++) {
13235 if ((jj & 0xf) == 0)
13236 hevc_print(hevc,
13237 PRINT_FRAMEBASE_DATA,
13238 "%06x:", jj);
13239 hevc_print_cont(hevc,
13240 PRINT_FRAMEBASE_DATA,
13241 "%02x ", data[jj]);
13242 if (((jj + 1) & 0xf) == 0)
13243 hevc_print_cont(hevc,
13244 PRINT_FRAMEBASE_DATA,
13245 "\n");
13246 }
13247
13248 if (!hevc->chunk->block->is_mapped)
13249 codec_mm_unmap_phyaddr(data);
13250 }
13251 }
13252
13253 }
13254
13255
13256 static int ammvdec_h265_probe(struct platform_device *pdev)
13257 {
13258
13259 struct vdec_s *pdata = *(struct vdec_s **)pdev->dev.platform_data;
13260 struct hevc_state_s *hevc = NULL;
13261 int ret;
13262 #ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
13263 int config_val;
13264 #endif
13265 //pr_err("[%s pid=%d tgid=%d] \n",__func__, current->pid, current->tgid);
13266 if (pdata == NULL) {
13267 pr_info("\nammvdec_h265 memory resource undefined.\n");
13268 return -EFAULT;
13269 }
13270
13271 /* hevc = (struct hevc_state_s *)devm_kzalloc(&pdev->dev,
13272 sizeof(struct hevc_state_s), GFP_KERNEL); */
13273 hevc = vmalloc(sizeof(struct hevc_state_s));
13274 if (hevc == NULL) {
13275 pr_info("\nammvdec_h265 device data allocation failed\n");
13276 return -ENOMEM;
13277 }
13278 memset(hevc, 0, sizeof(struct hevc_state_s));
13279
13280 /* the ctx from v4l2 driver. */
13281 hevc->v4l2_ctx = pdata->private;
13282
13283 pdata->private = hevc;
13284 pdata->dec_status = vh265_dec_status;
13285 pdata->set_trickmode = vh265_set_trickmode;
13286 pdata->run_ready = run_ready;
13287 pdata->run = run;
13288 pdata->reset = reset;
13289 pdata->irq_handler = vh265_irq_cb;
13290 pdata->threaded_irq_handler = vh265_threaded_irq_cb;
13291 pdata->dump_state = vh265_dump_state;
13292
13293 hevc->index = pdev->id;
13294 hevc->m_ins_flag = 1;
13295
13296 if (pdata->use_vfm_path) {
13297 snprintf(pdata->vf_provider_name,
13298 VDEC_PROVIDER_NAME_SIZE,
13299 VFM_DEC_PROVIDER_NAME);
13300 hevc->frameinfo_enable = 1;
13301 }
13302 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13303 else if (vdec_dual(pdata)) {
13304 struct hevc_state_s *hevc_pair = NULL;
13305
13306 if (dv_toggle_prov_name) /*debug purpose*/
13307 snprintf(pdata->vf_provider_name,
13308 VDEC_PROVIDER_NAME_SIZE,
13309 (pdata->master) ? VFM_DEC_DVBL_PROVIDER_NAME :
13310 VFM_DEC_DVEL_PROVIDER_NAME);
13311 else
13312 snprintf(pdata->vf_provider_name,
13313 VDEC_PROVIDER_NAME_SIZE,
13314 (pdata->master) ? VFM_DEC_DVEL_PROVIDER_NAME :
13315 VFM_DEC_DVBL_PROVIDER_NAME);
13316 hevc->dolby_enhance_flag = pdata->master ? 1 : 0;
13317 if (pdata->master)
13318 hevc_pair = (struct hevc_state_s *)
13319 pdata->master->private;
13320 else if (pdata->slave)
13321 hevc_pair = (struct hevc_state_s *)
13322 pdata->slave->private;
13323 if (hevc_pair)
13324 hevc->shift_byte_count_lo =
13325 hevc_pair->shift_byte_count_lo;
13326 }
13327 #endif
13328 else
13329 snprintf(pdata->vf_provider_name, VDEC_PROVIDER_NAME_SIZE,
13330 MULTI_INSTANCE_PROVIDER_NAME ".%02x", pdev->id & 0xff);
13331
13332 vf_provider_init(&pdata->vframe_provider, pdata->vf_provider_name,
13333 &vh265_vf_provider, pdata);
13334
13335 hevc->provider_name = pdata->vf_provider_name;
13336 platform_set_drvdata(pdev, pdata);
13337
13338 hevc->platform_dev = pdev;
13339
13340 if (((get_dbg_flag(hevc) & IGNORE_PARAM_FROM_CONFIG) == 0) &&
13341 pdata->config && pdata->config_len) {
13342 #ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
13343 /*use ptr config for doubel_write_mode, etc*/
13344 hevc_print(hevc, 0, "pdata->config=%s\n", pdata->config);
13345
13346 if (get_config_int(pdata->config, "hevc_double_write_mode",
13347 &config_val) == 0)
13348 hevc->double_write_mode = config_val;
13349 else
13350 hevc->double_write_mode = double_write_mode;
13351
13352 if (get_config_int(pdata->config, "save_buffer_mode",
13353 &config_val) == 0)
13354 hevc->save_buffer_mode = config_val;
13355 else
13356 hevc->save_buffer_mode = 0;
13357
13358 /*use ptr config for max_pic_w, etc*/
13359 if (get_config_int(pdata->config, "hevc_buf_width",
13360 &config_val) == 0) {
13361 hevc->max_pic_w = config_val;
13362 }
13363 if (get_config_int(pdata->config, "hevc_buf_height",
13364 &config_val) == 0) {
13365 hevc->max_pic_h = config_val;
13366 }
13367
13368 if (get_config_int(pdata->config, "sidebind_type",
13369 &config_val) == 0)
13370 hevc->sidebind_type = config_val;
13371
13372 if (get_config_int(pdata->config, "sidebind_channel_id",
13373 &config_val) == 0)
13374 hevc->sidebind_channel_id = config_val;
13375
13376 if (get_config_int(pdata->config,
13377 "parm_v4l_codec_enable",
13378 &config_val) == 0)
13379 hevc->is_used_v4l = config_val;
13380
13381 if (get_config_int(pdata->config,
13382 "parm_v4l_buffer_margin",
13383 &config_val) == 0)
13384 hevc->dynamic_buf_num_margin = config_val;
13385
13386 if (get_config_int(pdata->config,
13387 "parm_v4l_canvas_mem_mode",
13388 &config_val) == 0)
13389 hevc->mem_map_mode = config_val;
13390 #endif
13391 } else {
13392 if (pdata->sys_info)
13393 hevc->vh265_amstream_dec_info = *pdata->sys_info;
13394 else {
13395 hevc->vh265_amstream_dec_info.width = 0;
13396 hevc->vh265_amstream_dec_info.height = 0;
13397 hevc->vh265_amstream_dec_info.rate = 30;
13398 }
13399 hevc->double_write_mode = double_write_mode;
13400 }
13401 /* get valid double write from configure or node */
13402 hevc->double_write_mode = get_double_write_mode(hevc);
13403
13404 if (!hevc->is_used_v4l) {
13405 if (hevc->save_buffer_mode && dynamic_buf_num_margin > 2)
13406 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin -2;
13407 else
13408 hevc->dynamic_buf_num_margin = dynamic_buf_num_margin;
13409
13410 hevc->mem_map_mode = mem_map_mode;
13411 }
13412
13413 if (mmu_enable_force == 0) {
13414 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_GXL)
13415 hevc->mmu_enable = 0;
13416 else
13417 hevc->mmu_enable = 1;
13418 }
13419
13420 if (init_mmu_buffers(hevc) < 0) {
13421 hevc_print(hevc, 0,
13422 "\n 265 mmu init failed!\n");
13423 mutex_unlock(&vh265_mutex);
13424 /* devm_kfree(&pdev->dev, (void *)hevc);*/
13425 if (hevc)
13426 vfree((void *)hevc);
13427 pdata->dec_status = NULL;
13428 return -EFAULT;
13429 }
13430 #if 0
13431 hevc->buf_start = pdata->mem_start;
13432 hevc->buf_size = pdata->mem_end - pdata->mem_start + 1;
13433 #else
13434
13435 ret = decoder_bmmu_box_alloc_buf_phy(hevc->bmmu_box,
13436 BMMU_WORKSPACE_ID, work_buf_size,
13437 DRIVER_NAME, &hevc->buf_start);
13438 if (ret < 0) {
13439 uninit_mmu_buffers(hevc);
13440 /* devm_kfree(&pdev->dev, (void *)hevc); */
13441 if (hevc)
13442 vfree((void *)hevc);
13443 pdata->dec_status = NULL;
13444 mutex_unlock(&vh265_mutex);
13445 return ret;
13446 }
13447 hevc->buf_size = work_buf_size;
13448 #endif
13449 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXTVBB) &&
13450 (parser_sei_enable & 0x100) == 0)
13451 parser_sei_enable = 7;
13452 hevc->init_flag = 0;
13453 hevc->first_sc_checked = 0;
13454 hevc->uninit_list = 0;
13455 hevc->fatal_error = 0;
13456 hevc->show_frame_num = 0;
13457
13458 /*
13459 *hevc->mc_buf_spec.buf_end = pdata->mem_end + 1;
13460 *for (i = 0; i < WORK_BUF_SPEC_NUM; i++)
13461 * amvh265_workbuff_spec[i].start_adr = pdata->mem_start;
13462 */
13463 if (get_dbg_flag(hevc)) {
13464 hevc_print(hevc, 0,
13465 "===H.265 decoder mem resource 0x%lx size 0x%x\n",
13466 hevc->buf_start, hevc->buf_size);
13467 }
13468
13469 hevc_print(hevc, 0,
13470 "dynamic_buf_num_margin=%d\n",
13471 hevc->dynamic_buf_num_margin);
13472 hevc_print(hevc, 0,
13473 "double_write_mode=%d\n",
13474 hevc->double_write_mode);
13475
13476 hevc->cma_dev = pdata->cma_dev;
13477 vh265_vdec_info_init(hevc);
13478
13479 if (vh265_init(pdata) < 0) {
13480 hevc_print(hevc, 0,
13481 "\namvdec_h265 init failed.\n");
13482 hevc_local_uninit(hevc);
13483 if (hevc->gvs)
13484 kfree(hevc->gvs);
13485 hevc->gvs = NULL;
13486 uninit_mmu_buffers(hevc);
13487 /* devm_kfree(&pdev->dev, (void *)hevc); */
13488 if (hevc)
13489 vfree((void *)hevc);
13490 pdata->dec_status = NULL;
13491 return -ENODEV;
13492 }
13493
13494 vdec_set_prepare_level(pdata, start_decode_buf_level);
13495
13496 /*set the max clk for smooth playing...*/
13497 hevc_source_changed(VFORMAT_HEVC,
13498 3840, 2160, 60);
13499 if (pdata->parallel_dec == 1)
13500 vdec_core_request(pdata, CORE_MASK_HEVC);
13501 else
13502 vdec_core_request(pdata, CORE_MASK_VDEC_1 | CORE_MASK_HEVC
13503 | CORE_MASK_COMBINE);
13504
13505 return 0;
13506 }
13507
13508 static int ammvdec_h265_remove(struct platform_device *pdev)
13509 {
13510 struct hevc_state_s *hevc =
13511 (struct hevc_state_s *)
13512 (((struct vdec_s *)(platform_get_drvdata(pdev)))->private);
13513 struct vdec_s *vdec;
13514
13515 if (hevc == NULL)
13516 return 0;
13517 vdec = hw_to_vdec(hevc);
13518
13519 //pr_err("%s [pid=%d,tgid=%d]\n", __func__, current->pid, current->tgid);
13520 if (get_dbg_flag(hevc))
13521 hevc_print(hevc, 0, "%s\r\n", __func__);
13522
13523 vmh265_stop(hevc);
13524
13525 /* vdec_source_changed(VFORMAT_H264, 0, 0, 0); */
13526 if (vdec->parallel_dec == 1)
13527 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
13528 else
13529 vdec_core_release(hw_to_vdec(hevc), CORE_MASK_HEVC);
13530
13531 vdec_set_status(hw_to_vdec(hevc), VDEC_STATUS_DISCONNECTED);
13532
13533 vfree((void *)hevc);
13534
13535 return 0;
13536 }
13537
13538 static struct platform_driver ammvdec_h265_driver = {
13539 .probe = ammvdec_h265_probe,
13540 .remove = ammvdec_h265_remove,
13541 .driver = {
13542 .name = MULTI_DRIVER_NAME,
13543 #ifdef CONFIG_PM
13544 .pm = &h265_pm_ops,
13545 #endif
13546 }
13547 };
13548 #endif
13549
13550 static struct codec_profile_t amvdec_h265_profile = {
13551 .name = "hevc",
13552 .profile = ""
13553 };
13554
13555 static struct codec_profile_t amvdec_h265_profile_single,
13556 amvdec_h265_profile_mult;
13557
13558 static struct mconfig h265_configs[] = {
13559 MC_PU32("use_cma", &use_cma),
13560 MC_PU32("bit_depth_luma", &bit_depth_luma),
13561 MC_PU32("bit_depth_chroma", &bit_depth_chroma),
13562 MC_PU32("video_signal_type", &video_signal_type),
13563 #ifdef ERROR_HANDLE_DEBUG
13564 MC_PU32("dbg_nal_skip_flag", &dbg_nal_skip_flag),
13565 MC_PU32("dbg_nal_skip_count", &dbg_nal_skip_count),
13566 #endif
13567 MC_PU32("radr", &radr),
13568 MC_PU32("rval", &rval),
13569 MC_PU32("dbg_cmd", &dbg_cmd),
13570 MC_PU32("dbg_skip_decode_index", &dbg_skip_decode_index),
13571 MC_PU32("endian", &endian),
13572 MC_PU32("step", &step),
13573 MC_PU32("udebug_flag", &udebug_flag),
13574 MC_PU32("decode_pic_begin", &decode_pic_begin),
13575 MC_PU32("slice_parse_begin", &slice_parse_begin),
13576 MC_PU32("nal_skip_policy", &nal_skip_policy),
13577 MC_PU32("i_only_flag", &i_only_flag),
13578 MC_PU32("error_handle_policy", &error_handle_policy),
13579 MC_PU32("error_handle_threshold", &error_handle_threshold),
13580 MC_PU32("error_handle_nal_skip_threshold",
13581 &error_handle_nal_skip_threshold),
13582 MC_PU32("error_handle_system_threshold",
13583 &error_handle_system_threshold),
13584 MC_PU32("error_skip_nal_count", &error_skip_nal_count),
13585 MC_PU32("debug", &debug),
13586 MC_PU32("debug_mask", &debug_mask),
13587 MC_PU32("buffer_mode", &buffer_mode),
13588 MC_PU32("double_write_mode", &double_write_mode),
13589 MC_PU32("buf_alloc_width", &buf_alloc_width),
13590 MC_PU32("buf_alloc_height", &buf_alloc_height),
13591 MC_PU32("dynamic_buf_num_margin", &dynamic_buf_num_margin),
13592 MC_PU32("max_buf_num", &max_buf_num),
13593 MC_PU32("buf_alloc_size", &buf_alloc_size),
13594 MC_PU32("buffer_mode_dbg", &buffer_mode_dbg),
13595 MC_PU32("mem_map_mode", &mem_map_mode),
13596 MC_PU32("enable_mem_saving", &enable_mem_saving),
13597 MC_PU32("force_w_h", &force_w_h),
13598 MC_PU32("force_fps", &force_fps),
13599 MC_PU32("max_decoding_time", &max_decoding_time),
13600 MC_PU32("prefix_aux_buf_size", &prefix_aux_buf_size),
13601 MC_PU32("suffix_aux_buf_size", &suffix_aux_buf_size),
13602 MC_PU32("interlace_enable", &interlace_enable),
13603 MC_PU32("pts_unstable", &pts_unstable),
13604 MC_PU32("parser_sei_enable", &parser_sei_enable),
13605 MC_PU32("start_decode_buf_level", &start_decode_buf_level),
13606 MC_PU32("decode_timeout_val", &decode_timeout_val),
13607 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13608 MC_PU32("parser_dolby_vision_enable", &parser_dolby_vision_enable),
13609 MC_PU32("dv_toggle_prov_name", &dv_toggle_prov_name),
13610 MC_PU32("dv_debug", &dv_debug),
13611 #endif
13612 };
13613 static struct mconfig_node decoder_265_node;
13614
13615 static int __init amvdec_h265_driver_init_module(void)
13616 {
13617 struct BuffInfo_s *p_buf_info;
13618
13619 if (vdec_is_support_4k()) {
13620 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1)
13621 p_buf_info = &amvh265_workbuff_spec[2];
13622 else
13623 p_buf_info = &amvh265_workbuff_spec[1];
13624 } else
13625 p_buf_info = &amvh265_workbuff_spec[0];
13626
13627 init_buff_spec(NULL, p_buf_info);
13628 work_buf_size =
13629 (p_buf_info->end_adr - p_buf_info->start_adr
13630 + 0xffff) & (~0xffff);
13631
13632 pr_debug("amvdec_h265 module init\n");
13633 error_handle_policy = 0;
13634
13635 #ifdef ERROR_HANDLE_DEBUG
13636 dbg_nal_skip_flag = 0;
13637 dbg_nal_skip_count = 0;
13638 #endif
13639 udebug_flag = 0;
13640 decode_pic_begin = 0;
13641 slice_parse_begin = 0;
13642 step = 0;
13643 buf_alloc_size = 0;
13644
13645 #ifdef MULTI_INSTANCE_SUPPORT
13646 if (platform_driver_register(&ammvdec_h265_driver))
13647 pr_err("failed to register ammvdec_h265 driver\n");
13648
13649 #endif
13650 if (platform_driver_register(&amvdec_h265_driver)) {
13651 pr_err("failed to register amvdec_h265 driver\n");
13652 return -ENODEV;
13653 }
13654 #if 1/*MESON_CPU_TYPE >= MESON_CPU_TYPE_MESON8*/
13655 if (!has_hevc_vdec()) {
13656 /* not support hevc */
13657 amvdec_h265_profile.name = "hevc_unsupport";
13658 }
13659 if (vdec_is_support_4k()) {
13660 if (is_meson_m8m2_cpu()) {
13661 /* m8m2 support 4k */
13662 amvdec_h265_profile.profile = "4k";
13663 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1) {
13664 amvdec_h265_profile.profile =
13665 "8k, 8bit, 10bit, dwrite, compressed";
13666 }else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB) {
13667 amvdec_h265_profile.profile =
13668 "4k, 8bit, 10bit, dwrite, compressed";
13669 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_MG9TV)
13670 amvdec_h265_profile.profile = "4k";
13671 }
13672 #endif
13673 if (codec_mm_get_total_size() < 80 * SZ_1M) {
13674 pr_info("amvdec_h265 default mmu enabled.\n");
13675 mmu_enable = 1;
13676 }
13677
13678 vcodec_profile_register(&amvdec_h265_profile);
13679 amvdec_h265_profile_single = amvdec_h265_profile;
13680 amvdec_h265_profile_single.name = "h265";
13681 vcodec_profile_register(&amvdec_h265_profile_single);
13682 amvdec_h265_profile_mult = amvdec_h265_profile;
13683 amvdec_h265_profile_mult.name = "mh265";
13684 vcodec_profile_register(&amvdec_h265_profile_mult);
13685 INIT_REG_NODE_CONFIGS("media.decoder", &decoder_265_node,
13686 "h265", h265_configs, CONFIG_FOR_RW);
13687 return 0;
13688 }
13689
13690 static void __exit amvdec_h265_driver_remove_module(void)
13691 {
13692 pr_debug("amvdec_h265 module remove.\n");
13693
13694 #ifdef MULTI_INSTANCE_SUPPORT
13695 platform_driver_unregister(&ammvdec_h265_driver);
13696 #endif
13697 platform_driver_unregister(&amvdec_h265_driver);
13698 }
13699
13700 /****************************************/
13701 /*
13702 *module_param(stat, uint, 0664);
13703 *MODULE_PARM_DESC(stat, "\n amvdec_h265 stat\n");
13704 */
13705 module_param(use_cma, uint, 0664);
13706 MODULE_PARM_DESC(use_cma, "\n amvdec_h265 use_cma\n");
13707
13708 module_param(bit_depth_luma, uint, 0664);
13709 MODULE_PARM_DESC(bit_depth_luma, "\n amvdec_h265 bit_depth_luma\n");
13710
13711 module_param(bit_depth_chroma, uint, 0664);
13712 MODULE_PARM_DESC(bit_depth_chroma, "\n amvdec_h265 bit_depth_chroma\n");
13713
13714 module_param(video_signal_type, uint, 0664);
13715 MODULE_PARM_DESC(video_signal_type, "\n amvdec_h265 video_signal_type\n");
13716
13717 #ifdef ERROR_HANDLE_DEBUG
13718 module_param(dbg_nal_skip_flag, uint, 0664);
13719 MODULE_PARM_DESC(dbg_nal_skip_flag, "\n amvdec_h265 dbg_nal_skip_flag\n");
13720
13721 module_param(dbg_nal_skip_count, uint, 0664);
13722 MODULE_PARM_DESC(dbg_nal_skip_count, "\n amvdec_h265 dbg_nal_skip_count\n");
13723 #endif
13724
13725 module_param(radr, uint, 0664);
13726 MODULE_PARM_DESC(radr, "\n radr\n");
13727
13728 module_param(rval, uint, 0664);
13729 MODULE_PARM_DESC(rval, "\n rval\n");
13730
13731 module_param(dbg_cmd, uint, 0664);
13732 MODULE_PARM_DESC(dbg_cmd, "\n dbg_cmd\n");
13733
13734 module_param(dump_nal, uint, 0664);
13735 MODULE_PARM_DESC(dump_nal, "\n dump_nal\n");
13736
13737 module_param(dbg_skip_decode_index, uint, 0664);
13738 MODULE_PARM_DESC(dbg_skip_decode_index, "\n dbg_skip_decode_index\n");
13739
13740 module_param(endian, uint, 0664);
13741 MODULE_PARM_DESC(endian, "\n rval\n");
13742
13743 module_param(step, uint, 0664);
13744 MODULE_PARM_DESC(step, "\n amvdec_h265 step\n");
13745
13746 module_param(decode_pic_begin, uint, 0664);
13747 MODULE_PARM_DESC(decode_pic_begin, "\n amvdec_h265 decode_pic_begin\n");
13748
13749 module_param(slice_parse_begin, uint, 0664);
13750 MODULE_PARM_DESC(slice_parse_begin, "\n amvdec_h265 slice_parse_begin\n");
13751
13752 module_param(nal_skip_policy, uint, 0664);
13753 MODULE_PARM_DESC(nal_skip_policy, "\n amvdec_h265 nal_skip_policy\n");
13754
13755 module_param(i_only_flag, uint, 0664);
13756 MODULE_PARM_DESC(i_only_flag, "\n amvdec_h265 i_only_flag\n");
13757
13758 module_param(fast_output_enable, uint, 0664);
13759 MODULE_PARM_DESC(fast_output_enable, "\n amvdec_h265 fast_output_enable\n");
13760
13761 module_param(error_handle_policy, uint, 0664);
13762 MODULE_PARM_DESC(error_handle_policy, "\n amvdec_h265 error_handle_policy\n");
13763
13764 module_param(error_handle_threshold, uint, 0664);
13765 MODULE_PARM_DESC(error_handle_threshold,
13766 "\n amvdec_h265 error_handle_threshold\n");
13767
13768 module_param(error_handle_nal_skip_threshold, uint, 0664);
13769 MODULE_PARM_DESC(error_handle_nal_skip_threshold,
13770 "\n amvdec_h265 error_handle_nal_skip_threshold\n");
13771
13772 module_param(error_handle_system_threshold, uint, 0664);
13773 MODULE_PARM_DESC(error_handle_system_threshold,
13774 "\n amvdec_h265 error_handle_system_threshold\n");
13775
13776 module_param(error_skip_nal_count, uint, 0664);
13777 MODULE_PARM_DESC(error_skip_nal_count,
13778 "\n amvdec_h265 error_skip_nal_count\n");
13779
13780 module_param(debug, uint, 0664);
13781 MODULE_PARM_DESC(debug, "\n amvdec_h265 debug\n");
13782
13783 module_param(debug_mask, uint, 0664);
13784 MODULE_PARM_DESC(debug_mask, "\n amvdec_h265 debug mask\n");
13785
13786 module_param(log_mask, uint, 0664);
13787 MODULE_PARM_DESC(log_mask, "\n amvdec_h265 log_mask\n");
13788
13789 module_param(buffer_mode, uint, 0664);
13790 MODULE_PARM_DESC(buffer_mode, "\n buffer_mode\n");
13791
13792 module_param(double_write_mode, uint, 0664);
13793 MODULE_PARM_DESC(double_write_mode, "\n double_write_mode\n");
13794
13795 module_param(buf_alloc_width, uint, 0664);
13796 MODULE_PARM_DESC(buf_alloc_width, "\n buf_alloc_width\n");
13797
13798 module_param(buf_alloc_height, uint, 0664);
13799 MODULE_PARM_DESC(buf_alloc_height, "\n buf_alloc_height\n");
13800
13801 module_param(dynamic_buf_num_margin, uint, 0664);
13802 MODULE_PARM_DESC(dynamic_buf_num_margin, "\n dynamic_buf_num_margin\n");
13803
13804 module_param(max_buf_num, uint, 0664);
13805 MODULE_PARM_DESC(max_buf_num, "\n max_buf_num\n");
13806
13807 module_param(buf_alloc_size, uint, 0664);
13808 MODULE_PARM_DESC(buf_alloc_size, "\n buf_alloc_size\n");
13809
13810 #ifdef CONSTRAIN_MAX_BUF_NUM
13811 module_param(run_ready_max_vf_only_num, uint, 0664);
13812 MODULE_PARM_DESC(run_ready_max_vf_only_num, "\n run_ready_max_vf_only_num\n");
13813
13814 module_param(run_ready_display_q_num, uint, 0664);
13815 MODULE_PARM_DESC(run_ready_display_q_num, "\n run_ready_display_q_num\n");
13816
13817 module_param(run_ready_max_buf_num, uint, 0664);
13818 MODULE_PARM_DESC(run_ready_max_buf_num, "\n run_ready_max_buf_num\n");
13819 #endif
13820
13821 #if 0
13822 module_param(re_config_pic_flag, uint, 0664);
13823 MODULE_PARM_DESC(re_config_pic_flag, "\n re_config_pic_flag\n");
13824 #endif
13825
13826 module_param(buffer_mode_dbg, uint, 0664);
13827 MODULE_PARM_DESC(buffer_mode_dbg, "\n buffer_mode_dbg\n");
13828
13829 module_param(mem_map_mode, uint, 0664);
13830 MODULE_PARM_DESC(mem_map_mode, "\n mem_map_mode\n");
13831
13832 module_param(enable_mem_saving, uint, 0664);
13833 MODULE_PARM_DESC(enable_mem_saving, "\n enable_mem_saving\n");
13834
13835 module_param(force_w_h, uint, 0664);
13836 MODULE_PARM_DESC(force_w_h, "\n force_w_h\n");
13837
13838 module_param(force_fps, uint, 0664);
13839 MODULE_PARM_DESC(force_fps, "\n force_fps\n");
13840
13841 module_param(max_decoding_time, uint, 0664);
13842 MODULE_PARM_DESC(max_decoding_time, "\n max_decoding_time\n");
13843
13844 module_param(prefix_aux_buf_size, uint, 0664);
13845 MODULE_PARM_DESC(prefix_aux_buf_size, "\n prefix_aux_buf_size\n");
13846
13847 module_param(suffix_aux_buf_size, uint, 0664);
13848 MODULE_PARM_DESC(suffix_aux_buf_size, "\n suffix_aux_buf_size\n");
13849
13850 module_param(interlace_enable, uint, 0664);
13851 MODULE_PARM_DESC(interlace_enable, "\n interlace_enable\n");
13852 module_param(pts_unstable, uint, 0664);
13853 MODULE_PARM_DESC(pts_unstable, "\n amvdec_h265 pts_unstable\n");
13854 module_param(parser_sei_enable, uint, 0664);
13855 MODULE_PARM_DESC(parser_sei_enable, "\n parser_sei_enable\n");
13856
13857 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13858 module_param(parser_dolby_vision_enable, uint, 0664);
13859 MODULE_PARM_DESC(parser_dolby_vision_enable,
13860 "\n parser_dolby_vision_enable\n");
13861
13862 module_param(dolby_meta_with_el, uint, 0664);
13863 MODULE_PARM_DESC(dolby_meta_with_el,
13864 "\n dolby_meta_with_el\n");
13865
13866 module_param(dolby_el_flush_th, uint, 0664);
13867 MODULE_PARM_DESC(dolby_el_flush_th,
13868 "\n dolby_el_flush_th\n");
13869 #endif
13870 module_param(mmu_enable, uint, 0664);
13871 MODULE_PARM_DESC(mmu_enable, "\n mmu_enable\n");
13872
13873 module_param(mmu_enable_force, uint, 0664);
13874 MODULE_PARM_DESC(mmu_enable_force, "\n mmu_enable_force\n");
13875
13876 #ifdef MULTI_INSTANCE_SUPPORT
13877 module_param(start_decode_buf_level, int, 0664);
13878 MODULE_PARM_DESC(start_decode_buf_level,
13879 "\n h265 start_decode_buf_level\n");
13880
13881 module_param(decode_timeout_val, uint, 0664);
13882 MODULE_PARM_DESC(decode_timeout_val,
13883 "\n h265 decode_timeout_val\n");
13884
13885 module_param(data_resend_policy, uint, 0664);
13886 MODULE_PARM_DESC(data_resend_policy,
13887 "\n h265 data_resend_policy\n");
13888
13889 module_param_array(decode_frame_count, uint,
13890 &max_decode_instance_num, 0664);
13891
13892 module_param_array(display_frame_count, uint,
13893 &max_decode_instance_num, 0664);
13894
13895 module_param_array(max_process_time, uint,
13896 &max_decode_instance_num, 0664);
13897
13898 module_param_array(max_get_frame_interval,
13899 uint, &max_decode_instance_num, 0664);
13900
13901 module_param_array(run_count, uint,
13902 &max_decode_instance_num, 0664);
13903
13904 module_param_array(input_empty, uint,
13905 &max_decode_instance_num, 0664);
13906
13907 module_param_array(not_run_ready, uint,
13908 &max_decode_instance_num, 0664);
13909
13910 module_param_array(ref_frame_mark_flag, uint,
13911 &max_decode_instance_num, 0664);
13912
13913 #endif
13914 #ifdef CONFIG_AMLOGIC_MEDIA_ENHANCEMENT_DOLBYVISION
13915 module_param(dv_toggle_prov_name, uint, 0664);
13916 MODULE_PARM_DESC(dv_toggle_prov_name, "\n dv_toggle_prov_name\n");
13917
13918 module_param(dv_debug, uint, 0664);
13919 MODULE_PARM_DESC(dv_debug, "\n dv_debug\n");
13920
13921 module_param(force_bypass_dvenl, uint, 0664);
13922 MODULE_PARM_DESC(force_bypass_dvenl, "\n force_bypass_dvenl\n");
13923 #endif
13924
13925 #ifdef AGAIN_HAS_THRESHOLD
13926 module_param(again_threshold, uint, 0664);
13927 MODULE_PARM_DESC(again_threshold, "\n again_threshold\n");
13928 #endif
13929
13930 module_param(force_disp_pic_index, int, 0664);
13931 MODULE_PARM_DESC(force_disp_pic_index,
13932 "\n amvdec_h265 force_disp_pic_index\n");
13933
13934 module_param(frmbase_cont_bitlevel, uint, 0664);
13935 MODULE_PARM_DESC(frmbase_cont_bitlevel, "\n frmbase_cont_bitlevel\n");
13936
13937 module_param(udebug_flag, uint, 0664);
13938 MODULE_PARM_DESC(udebug_flag, "\n amvdec_h265 udebug_flag\n");
13939
13940 module_param(udebug_pause_pos, uint, 0664);
13941 MODULE_PARM_DESC(udebug_pause_pos, "\n udebug_pause_pos\n");
13942
13943 module_param(udebug_pause_val, uint, 0664);
13944 MODULE_PARM_DESC(udebug_pause_val, "\n udebug_pause_val\n");
13945
13946 module_param(pre_decode_buf_level, int, 0664);
13947 MODULE_PARM_DESC(pre_decode_buf_level, "\n ammvdec_h264 pre_decode_buf_level\n");
13948
13949 module_param(udebug_pause_decode_idx, uint, 0664);
13950 MODULE_PARM_DESC(udebug_pause_decode_idx, "\n udebug_pause_decode_idx\n");
13951
13952 module_param(disp_vframe_valve_level, uint, 0664);
13953 MODULE_PARM_DESC(disp_vframe_valve_level, "\n disp_vframe_valve_level\n");
13954
13955 module_param(pic_list_debug, uint, 0664);
13956 MODULE_PARM_DESC(pic_list_debug, "\n pic_list_debug\n");
13957
13958 module_param(without_display_mode, uint, 0664);
13959 MODULE_PARM_DESC(without_display_mode, "\n amvdec_h265 without_display_mode\n");
13960
13961 #ifdef HEVC_8K_LFTOFFSET_FIX
13962 module_param(performance_profile, uint, 0664);
13963 MODULE_PARM_DESC(performance_profile, "\n amvdec_h265 performance_profile\n");
13964 #endif
13965 module_param(disable_ip_mode, uint, 0664);
13966 MODULE_PARM_DESC(disable_ip_mode, "\n amvdec_h265 disable ip_mode\n");
13967
13968 module_init(amvdec_h265_driver_init_module);
13969 module_exit(amvdec_h265_driver_remove_module);
13970
13971 MODULE_DESCRIPTION("AMLOGIC h265 Video Decoder Driver");
13972 MODULE_LICENSE("GPL");
13973 MODULE_AUTHOR("Tim Yao <tim.yao@amlogic.com>");
13974