• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: (GPL-2.0 OR BSD-3-Clause)
2 /*
3  * Wave5 series multi-standard codec IP - helper functions
4  *
5  * Copyright (C) 2021-2023 CHIPS&MEDIA INC
6  */
7 
8 #include <linux/bug.h>
9 #include "wave5-vpuapi.h"
10 #include "wave5-regdefine.h"
11 #include "wave5.h"
12 
13 #define DECODE_ALL_TEMPORAL_LAYERS 0
14 #define DECODE_ALL_SPATIAL_LAYERS 0
15 
wave5_initialize_vpu(struct device * dev,u8 * code,size_t size)16 static int wave5_initialize_vpu(struct device *dev, u8 *code, size_t size)
17 {
18 	int ret;
19 	struct vpu_device *vpu_dev = dev_get_drvdata(dev);
20 
21 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
22 	if (ret)
23 		return ret;
24 
25 	if (wave5_vpu_is_init(vpu_dev)) {
26 		wave5_vpu_re_init(dev, (void *)code, size);
27 		ret = -EBUSY;
28 		goto err_out;
29 	}
30 
31 	ret = wave5_vpu_reset(dev, SW_RESET_ON_BOOT);
32 	if (ret)
33 		goto err_out;
34 
35 	ret = wave5_vpu_init(dev, (void *)code, size);
36 
37 err_out:
38 	mutex_unlock(&vpu_dev->hw_lock);
39 	return ret;
40 }
41 
wave5_vpu_init_with_bitcode(struct device * dev,u8 * bitcode,size_t size)42 int wave5_vpu_init_with_bitcode(struct device *dev, u8 *bitcode, size_t size)
43 {
44 	if (!bitcode || size == 0)
45 		return -EINVAL;
46 
47 	return wave5_initialize_vpu(dev, bitcode, size);
48 }
49 
wave5_vpu_flush_instance(struct vpu_instance * inst)50 int wave5_vpu_flush_instance(struct vpu_instance *inst)
51 {
52 	int ret = 0;
53 	int retry = 0;
54 
55 	ret = mutex_lock_interruptible(&inst->dev->hw_lock);
56 	if (ret)
57 		return ret;
58 	do {
59 		/*
60 		 * Repeat the FLUSH command until the firmware reports that the
61 		 * VPU isn't running anymore
62 		 */
63 		ret = wave5_vpu_hw_flush_instance(inst);
64 		if (ret < 0 && ret != -EBUSY) {
65 			dev_warn(inst->dev->dev, "Flush of %s instance with id: %d fail: %d\n",
66 				 inst->type == VPU_INST_TYPE_DEC ? "DECODER" : "ENCODER", inst->id,
67 				 ret);
68 			mutex_unlock(&inst->dev->hw_lock);
69 			return ret;
70 		}
71 		if (ret == -EBUSY && retry++ >= MAX_FIRMWARE_CALL_RETRY) {
72 			dev_warn(inst->dev->dev, "Flush of %s instance with id: %d timed out!\n",
73 				 inst->type == VPU_INST_TYPE_DEC ? "DECODER" : "ENCODER", inst->id);
74 			mutex_unlock(&inst->dev->hw_lock);
75 			return -ETIMEDOUT;
76 		} else if (ret == -EBUSY) {
77 			struct dec_output_info dec_info;
78 
79 			mutex_unlock(&inst->dev->hw_lock);
80 			wave5_vpu_dec_get_output_info(inst, &dec_info);
81 			ret = mutex_lock_interruptible(&inst->dev->hw_lock);
82 			if (ret)
83 				return ret;
84 			if (dec_info.index_frame_display > 0)
85 				wave5_vpu_dec_set_disp_flag(inst, dec_info.index_frame_display);
86 		}
87 	} while (ret != 0);
88 	mutex_unlock(&inst->dev->hw_lock);
89 
90 	return ret;
91 }
92 
wave5_vpu_get_version_info(struct device * dev,u32 * revision,unsigned int * product_id)93 int wave5_vpu_get_version_info(struct device *dev, u32 *revision, unsigned int *product_id)
94 {
95 	int ret;
96 	struct vpu_device *vpu_dev = dev_get_drvdata(dev);
97 
98 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
99 	if (ret)
100 		return ret;
101 
102 	if (!wave5_vpu_is_init(vpu_dev)) {
103 		ret = -EINVAL;
104 		goto err_out;
105 	}
106 
107 	if (product_id)
108 		*product_id = vpu_dev->product;
109 	ret = wave5_vpu_get_version(vpu_dev, revision);
110 
111 err_out:
112 	mutex_unlock(&vpu_dev->hw_lock);
113 	return ret;
114 }
115 
wave5_check_dec_open_param(struct vpu_instance * inst,struct dec_open_param * param)116 static int wave5_check_dec_open_param(struct vpu_instance *inst, struct dec_open_param *param)
117 {
118 	if (inst->id >= MAX_NUM_INSTANCE) {
119 		dev_err(inst->dev->dev, "Too many simultaneous instances: %d (max: %u)\n",
120 			inst->id, MAX_NUM_INSTANCE);
121 		return -EOPNOTSUPP;
122 	}
123 
124 	if (param->bitstream_buffer % 8) {
125 		dev_err(inst->dev->dev,
126 			"Bitstream buffer must be aligned to a multiple of 8\n");
127 		return -EINVAL;
128 	}
129 
130 	if (param->bitstream_buffer_size % 1024 ||
131 	    param->bitstream_buffer_size < MIN_BITSTREAM_BUFFER_SIZE) {
132 		dev_err(inst->dev->dev,
133 			"Bitstream buffer size must be aligned to a multiple of 1024 and have a minimum size of %d\n",
134 			MIN_BITSTREAM_BUFFER_SIZE);
135 		return -EINVAL;
136 	}
137 
138 	return 0;
139 }
140 
wave5_vpu_dec_open(struct vpu_instance * inst,struct dec_open_param * open_param)141 int wave5_vpu_dec_open(struct vpu_instance *inst, struct dec_open_param *open_param)
142 {
143 	struct dec_info *p_dec_info;
144 	int ret;
145 	struct vpu_device *vpu_dev = inst->dev;
146 	dma_addr_t buffer_addr;
147 	size_t buffer_size;
148 
149 	ret = wave5_check_dec_open_param(inst, open_param);
150 	if (ret)
151 		return ret;
152 
153 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
154 	if (ret)
155 		return ret;
156 
157 	if (!wave5_vpu_is_init(vpu_dev)) {
158 		mutex_unlock(&vpu_dev->hw_lock);
159 		return -ENODEV;
160 	}
161 
162 	p_dec_info = &inst->codec_info->dec_info;
163 	memcpy(&p_dec_info->open_param, open_param, sizeof(struct dec_open_param));
164 
165 	buffer_addr = open_param->bitstream_buffer;
166 	buffer_size = open_param->bitstream_buffer_size;
167 	p_dec_info->stream_wr_ptr = buffer_addr;
168 	p_dec_info->stream_rd_ptr = buffer_addr;
169 	p_dec_info->stream_buf_start_addr = buffer_addr;
170 	p_dec_info->stream_buf_size = buffer_size;
171 	p_dec_info->stream_buf_end_addr = buffer_addr + buffer_size;
172 	p_dec_info->reorder_enable = TRUE;
173 	p_dec_info->temp_id_select_mode = TEMPORAL_ID_MODE_ABSOLUTE;
174 	p_dec_info->target_temp_id = DECODE_ALL_TEMPORAL_LAYERS;
175 	p_dec_info->target_spatial_id = DECODE_ALL_SPATIAL_LAYERS;
176 
177 	ret = wave5_vpu_build_up_dec_param(inst, open_param);
178 	mutex_unlock(&vpu_dev->hw_lock);
179 
180 	return ret;
181 }
182 
reset_auxiliary_buffers(struct vpu_instance * inst,unsigned int index)183 static int reset_auxiliary_buffers(struct vpu_instance *inst, unsigned int index)
184 {
185 	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
186 
187 	if (index >= MAX_REG_FRAME)
188 		return 1;
189 
190 	if (p_dec_info->vb_mv[index].size == 0 && p_dec_info->vb_fbc_y_tbl[index].size == 0 &&
191 	    p_dec_info->vb_fbc_c_tbl[index].size == 0)
192 		return 1;
193 
194 	wave5_vdi_free_dma_memory(inst->dev, &p_dec_info->vb_mv[index]);
195 	wave5_vdi_free_dma_memory(inst->dev, &p_dec_info->vb_fbc_y_tbl[index]);
196 	wave5_vdi_free_dma_memory(inst->dev, &p_dec_info->vb_fbc_c_tbl[index]);
197 
198 	return 0;
199 }
200 
wave5_vpu_dec_close(struct vpu_instance * inst,u32 * fail_res)201 int wave5_vpu_dec_close(struct vpu_instance *inst, u32 *fail_res)
202 {
203 	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
204 	int ret;
205 	int retry = 0;
206 	struct vpu_device *vpu_dev = inst->dev;
207 	int i;
208 
209 	*fail_res = 0;
210 	if (!inst->codec_info)
211 		return -EINVAL;
212 
213 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
214 	if (ret)
215 		return ret;
216 
217 	do {
218 		ret = wave5_vpu_dec_finish_seq(inst, fail_res);
219 		if (ret < 0 && *fail_res != WAVE5_SYSERR_VPU_STILL_RUNNING) {
220 			dev_warn(inst->dev->dev, "dec_finish_seq timed out\n");
221 			goto unlock_and_return;
222 		}
223 
224 		if (*fail_res == WAVE5_SYSERR_VPU_STILL_RUNNING &&
225 		    retry++ >= MAX_FIRMWARE_CALL_RETRY) {
226 			ret = -ETIMEDOUT;
227 			goto unlock_and_return;
228 		}
229 	} while (ret != 0);
230 
231 	dev_dbg(inst->dev->dev, "%s: dec_finish_seq complete\n", __func__);
232 
233 	wave5_vdi_free_dma_memory(vpu_dev, &p_dec_info->vb_work);
234 
235 	for (i = 0 ; i < MAX_REG_FRAME; i++) {
236 		ret = reset_auxiliary_buffers(inst, i);
237 		if (ret) {
238 			ret = 0;
239 			break;
240 		}
241 	}
242 
243 	wave5_vdi_free_dma_memory(vpu_dev, &p_dec_info->vb_task);
244 
245 unlock_and_return:
246 	mutex_unlock(&vpu_dev->hw_lock);
247 
248 	return ret;
249 }
250 
wave5_vpu_dec_issue_seq_init(struct vpu_instance * inst)251 int wave5_vpu_dec_issue_seq_init(struct vpu_instance *inst)
252 {
253 	int ret;
254 	struct vpu_device *vpu_dev = inst->dev;
255 
256 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
257 	if (ret)
258 		return ret;
259 
260 	ret = wave5_vpu_dec_init_seq(inst);
261 
262 	mutex_unlock(&vpu_dev->hw_lock);
263 
264 	return ret;
265 }
266 
wave5_vpu_dec_complete_seq_init(struct vpu_instance * inst,struct dec_initial_info * info)267 int wave5_vpu_dec_complete_seq_init(struct vpu_instance *inst, struct dec_initial_info *info)
268 {
269 	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
270 	int ret;
271 	struct vpu_device *vpu_dev = inst->dev;
272 
273 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
274 	if (ret)
275 		return ret;
276 
277 	ret = wave5_vpu_dec_get_seq_info(inst, info);
278 	if (!ret)
279 		p_dec_info->initial_info_obtained = true;
280 
281 	info->rd_ptr = wave5_dec_get_rd_ptr(inst);
282 	info->wr_ptr = p_dec_info->stream_wr_ptr;
283 
284 	p_dec_info->initial_info = *info;
285 
286 	mutex_unlock(&vpu_dev->hw_lock);
287 
288 	return ret;
289 }
290 
wave5_vpu_dec_register_frame_buffer_ex(struct vpu_instance * inst,int num_of_decoding_fbs,int num_of_display_fbs,int stride,int height)291 int wave5_vpu_dec_register_frame_buffer_ex(struct vpu_instance *inst, int num_of_decoding_fbs,
292 					   int num_of_display_fbs, int stride, int height)
293 {
294 	struct dec_info *p_dec_info;
295 	int ret;
296 	struct vpu_device *vpu_dev = inst->dev;
297 	struct frame_buffer *fb;
298 
299 	if (num_of_decoding_fbs >= WAVE5_MAX_FBS || num_of_display_fbs >= WAVE5_MAX_FBS)
300 		return -EINVAL;
301 
302 	p_dec_info = &inst->codec_info->dec_info;
303 	p_dec_info->num_of_decoding_fbs = num_of_decoding_fbs;
304 	p_dec_info->num_of_display_fbs = num_of_display_fbs;
305 	p_dec_info->stride = stride;
306 
307 	if (!p_dec_info->initial_info_obtained)
308 		return -EINVAL;
309 
310 	if (stride < p_dec_info->initial_info.pic_width || (stride % 8 != 0) ||
311 	    height < p_dec_info->initial_info.pic_height)
312 		return -EINVAL;
313 
314 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
315 	if (ret)
316 		return ret;
317 
318 	fb = inst->frame_buf;
319 	ret = wave5_vpu_dec_register_framebuffer(inst, &fb[p_dec_info->num_of_decoding_fbs],
320 						 LINEAR_FRAME_MAP, p_dec_info->num_of_display_fbs);
321 	if (ret)
322 		goto err_out;
323 
324 	ret = wave5_vpu_dec_register_framebuffer(inst, &fb[0], COMPRESSED_FRAME_MAP,
325 						 p_dec_info->num_of_decoding_fbs);
326 
327 err_out:
328 	mutex_unlock(&vpu_dev->hw_lock);
329 
330 	return ret;
331 }
332 
wave5_vpu_dec_get_bitstream_buffer(struct vpu_instance * inst,dma_addr_t * prd_ptr,dma_addr_t * pwr_ptr,size_t * size)333 int wave5_vpu_dec_get_bitstream_buffer(struct vpu_instance *inst, dma_addr_t *prd_ptr,
334 				       dma_addr_t *pwr_ptr, size_t *size)
335 {
336 	struct dec_info *p_dec_info;
337 	dma_addr_t rd_ptr;
338 	dma_addr_t wr_ptr;
339 	int room;
340 	struct vpu_device *vpu_dev = inst->dev;
341 	int ret;
342 
343 	p_dec_info = &inst->codec_info->dec_info;
344 
345 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
346 	if (ret)
347 		return ret;
348 	rd_ptr = wave5_dec_get_rd_ptr(inst);
349 	mutex_unlock(&vpu_dev->hw_lock);
350 
351 	wr_ptr = p_dec_info->stream_wr_ptr;
352 
353 	if (wr_ptr < rd_ptr)
354 		room = rd_ptr - wr_ptr;
355 	else
356 		room = (p_dec_info->stream_buf_end_addr - wr_ptr) +
357 			(rd_ptr - p_dec_info->stream_buf_start_addr);
358 	room--;
359 
360 	if (prd_ptr)
361 		*prd_ptr = rd_ptr;
362 	if (pwr_ptr)
363 		*pwr_ptr = wr_ptr;
364 	if (size)
365 		*size = room;
366 
367 	return 0;
368 }
369 
wave5_vpu_dec_update_bitstream_buffer(struct vpu_instance * inst,size_t size)370 int wave5_vpu_dec_update_bitstream_buffer(struct vpu_instance *inst, size_t size)
371 {
372 	struct dec_info *p_dec_info;
373 	dma_addr_t wr_ptr;
374 	dma_addr_t rd_ptr;
375 	int ret;
376 	struct vpu_device *vpu_dev = inst->dev;
377 
378 	if (!inst->codec_info)
379 		return -EINVAL;
380 
381 	p_dec_info = &inst->codec_info->dec_info;
382 	wr_ptr = p_dec_info->stream_wr_ptr;
383 	rd_ptr = p_dec_info->stream_rd_ptr;
384 
385 	if (size > 0) {
386 		if (wr_ptr < rd_ptr && rd_ptr <= wr_ptr + size)
387 			return -EINVAL;
388 
389 		wr_ptr += size;
390 
391 		if (wr_ptr > p_dec_info->stream_buf_end_addr) {
392 			u32 room = wr_ptr - p_dec_info->stream_buf_end_addr;
393 
394 			wr_ptr = p_dec_info->stream_buf_start_addr;
395 			wr_ptr += room;
396 		} else if (wr_ptr == p_dec_info->stream_buf_end_addr) {
397 			wr_ptr = p_dec_info->stream_buf_start_addr;
398 		}
399 
400 		p_dec_info->stream_wr_ptr = wr_ptr;
401 		p_dec_info->stream_rd_ptr = rd_ptr;
402 	}
403 
404 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
405 	if (ret)
406 		return ret;
407 	ret = wave5_vpu_dec_set_bitstream_flag(inst, (size == 0));
408 	mutex_unlock(&vpu_dev->hw_lock);
409 
410 	return ret;
411 }
412 
wave5_vpu_dec_start_one_frame(struct vpu_instance * inst,u32 * res_fail)413 int wave5_vpu_dec_start_one_frame(struct vpu_instance *inst, u32 *res_fail)
414 {
415 	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
416 	int ret;
417 	struct vpu_device *vpu_dev = inst->dev;
418 
419 	if (p_dec_info->stride == 0) /* this means frame buffers have not been registered. */
420 		return -EINVAL;
421 
422 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
423 	if (ret)
424 		return ret;
425 
426 	ret = wave5_vpu_decode(inst, res_fail);
427 
428 	mutex_unlock(&vpu_dev->hw_lock);
429 
430 	return ret;
431 }
432 
wave5_vpu_dec_set_rd_ptr(struct vpu_instance * inst,dma_addr_t addr,int update_wr_ptr)433 int wave5_vpu_dec_set_rd_ptr(struct vpu_instance *inst, dma_addr_t addr, int update_wr_ptr)
434 {
435 	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
436 	int ret;
437 	struct vpu_device *vpu_dev = inst->dev;
438 
439 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
440 	if (ret)
441 		return ret;
442 
443 	ret = wave5_dec_set_rd_ptr(inst, addr);
444 
445 	p_dec_info->stream_rd_ptr = addr;
446 	if (update_wr_ptr)
447 		p_dec_info->stream_wr_ptr = addr;
448 
449 	mutex_unlock(&vpu_dev->hw_lock);
450 
451 	return ret;
452 }
453 
wave5_vpu_dec_get_rd_ptr(struct vpu_instance * inst)454 dma_addr_t wave5_vpu_dec_get_rd_ptr(struct vpu_instance *inst)
455 {
456 	int ret;
457 	dma_addr_t rd_ptr;
458 
459 	ret = mutex_lock_interruptible(&inst->dev->hw_lock);
460 	if (ret)
461 		return ret;
462 
463 	rd_ptr = wave5_dec_get_rd_ptr(inst);
464 
465 	mutex_unlock(&inst->dev->hw_lock);
466 
467 	return rd_ptr;
468 }
469 
wave5_vpu_dec_get_output_info(struct vpu_instance * inst,struct dec_output_info * info)470 int wave5_vpu_dec_get_output_info(struct vpu_instance *inst, struct dec_output_info *info)
471 {
472 	struct dec_info *p_dec_info;
473 	int ret;
474 	struct vpu_rect rect_info;
475 	u32 val;
476 	u32 decoded_index;
477 	u32 disp_idx;
478 	u32 max_dec_index;
479 	struct vpu_device *vpu_dev = inst->dev;
480 	struct dec_output_info *disp_info;
481 
482 	if (!info)
483 		return -EINVAL;
484 
485 	p_dec_info = &inst->codec_info->dec_info;
486 
487 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
488 	if (ret)
489 		return ret;
490 
491 	memset(info, 0, sizeof(*info));
492 
493 	ret = wave5_vpu_dec_get_result(inst, info);
494 	if (ret) {
495 		info->rd_ptr = p_dec_info->stream_rd_ptr;
496 		info->wr_ptr = p_dec_info->stream_wr_ptr;
497 		goto err_out;
498 	}
499 
500 	decoded_index = info->index_frame_decoded;
501 
502 	/* calculate display frame region */
503 	val = 0;
504 	rect_info.left = 0;
505 	rect_info.right = 0;
506 	rect_info.top = 0;
507 	rect_info.bottom = 0;
508 
509 	if (decoded_index < WAVE5_MAX_FBS) {
510 		if (inst->std == W_HEVC_DEC || inst->std == W_AVC_DEC)
511 			rect_info = p_dec_info->initial_info.pic_crop_rect;
512 
513 		if (inst->std == W_HEVC_DEC)
514 			p_dec_info->dec_out_info[decoded_index].decoded_poc = info->decoded_poc;
515 
516 		p_dec_info->dec_out_info[decoded_index].rc_decoded = rect_info;
517 	}
518 	info->rc_decoded = rect_info;
519 
520 	disp_idx = info->index_frame_display;
521 	if (info->index_frame_display >= 0 && info->index_frame_display < WAVE5_MAX_FBS) {
522 		disp_info = &p_dec_info->dec_out_info[disp_idx];
523 		if (info->index_frame_display != info->index_frame_decoded) {
524 			/*
525 			 * when index_frame_decoded < 0, and index_frame_display >= 0
526 			 * info->dec_pic_width and info->dec_pic_height are still valid
527 			 * but those of p_dec_info->dec_out_info[disp_idx] are invalid in VP9
528 			 */
529 			info->disp_pic_width = disp_info->dec_pic_width;
530 			info->disp_pic_height = disp_info->dec_pic_height;
531 		} else {
532 			info->disp_pic_width = info->dec_pic_width;
533 			info->disp_pic_height = info->dec_pic_height;
534 		}
535 
536 		info->rc_display = disp_info->rc_decoded;
537 
538 	} else {
539 		info->rc_display.left = 0;
540 		info->rc_display.right = 0;
541 		info->rc_display.top = 0;
542 		info->rc_display.bottom = 0;
543 		info->disp_pic_width = 0;
544 		info->disp_pic_height = 0;
545 	}
546 
547 	p_dec_info->stream_rd_ptr = wave5_dec_get_rd_ptr(inst);
548 	p_dec_info->frame_display_flag = vpu_read_reg(vpu_dev, W5_RET_DEC_DISP_IDC);
549 
550 	val = p_dec_info->num_of_decoding_fbs; //fb_offset
551 
552 	max_dec_index = (p_dec_info->num_of_decoding_fbs > p_dec_info->num_of_display_fbs) ?
553 		p_dec_info->num_of_decoding_fbs : p_dec_info->num_of_display_fbs;
554 
555 	if (info->index_frame_display >= 0 &&
556 	    info->index_frame_display < (int)max_dec_index)
557 		info->disp_frame = inst->frame_buf[val + info->index_frame_display];
558 
559 	info->rd_ptr = p_dec_info->stream_rd_ptr;
560 	info->wr_ptr = p_dec_info->stream_wr_ptr;
561 	info->frame_display_flag = p_dec_info->frame_display_flag;
562 
563 	info->sequence_no = p_dec_info->initial_info.sequence_no;
564 	if (decoded_index < WAVE5_MAX_FBS)
565 		p_dec_info->dec_out_info[decoded_index] = *info;
566 
567 	if (disp_idx < WAVE5_MAX_FBS)
568 		info->disp_frame.sequence_no = info->sequence_no;
569 
570 	if (info->sequence_changed) {
571 		memcpy((void *)&p_dec_info->initial_info, (void *)&p_dec_info->new_seq_info,
572 		       sizeof(struct dec_initial_info));
573 		p_dec_info->initial_info.sequence_no++;
574 	}
575 
576 err_out:
577 	mutex_unlock(&vpu_dev->hw_lock);
578 
579 	return ret;
580 }
581 
wave5_vpu_dec_clr_disp_flag(struct vpu_instance * inst,int index)582 int wave5_vpu_dec_clr_disp_flag(struct vpu_instance *inst, int index)
583 {
584 	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
585 	int ret;
586 	struct vpu_device *vpu_dev = inst->dev;
587 
588 	if (index >= p_dec_info->num_of_display_fbs)
589 		return -EINVAL;
590 
591 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
592 	if (ret)
593 		return ret;
594 	ret = wave5_dec_clr_disp_flag(inst, index);
595 	mutex_unlock(&vpu_dev->hw_lock);
596 
597 	return ret;
598 }
599 
wave5_vpu_dec_set_disp_flag(struct vpu_instance * inst,int index)600 int wave5_vpu_dec_set_disp_flag(struct vpu_instance *inst, int index)
601 {
602 	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
603 	int ret = 0;
604 	struct vpu_device *vpu_dev = inst->dev;
605 
606 	if (index >= p_dec_info->num_of_display_fbs)
607 		return -EINVAL;
608 
609 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
610 	if (ret)
611 		return ret;
612 	ret = wave5_dec_set_disp_flag(inst, index);
613 	mutex_unlock(&vpu_dev->hw_lock);
614 
615 	return ret;
616 }
617 
wave5_vpu_dec_reset_framebuffer(struct vpu_instance * inst,unsigned int index)618 int wave5_vpu_dec_reset_framebuffer(struct vpu_instance *inst, unsigned int index)
619 {
620 	if (index >= MAX_REG_FRAME)
621 		return -EINVAL;
622 
623 	if (inst->frame_vbuf[index].size == 0)
624 		return -EINVAL;
625 
626 	wave5_vdi_free_dma_memory(inst->dev, &inst->frame_vbuf[index]);
627 
628 	return 0;
629 }
630 
wave5_vpu_dec_give_command(struct vpu_instance * inst,enum codec_command cmd,void * parameter)631 int wave5_vpu_dec_give_command(struct vpu_instance *inst, enum codec_command cmd, void *parameter)
632 {
633 	struct dec_info *p_dec_info = &inst->codec_info->dec_info;
634 	int ret = 0;
635 
636 	switch (cmd) {
637 	case DEC_GET_QUEUE_STATUS: {
638 		struct queue_status_info *queue_info = parameter;
639 
640 		queue_info->instance_queue_count = p_dec_info->instance_queue_count;
641 		queue_info->report_queue_count = p_dec_info->report_queue_count;
642 		break;
643 	}
644 	case DEC_RESET_FRAMEBUF_INFO: {
645 		int i;
646 
647 		for (i = 0; i < MAX_REG_FRAME; i++) {
648 			ret = wave5_vpu_dec_reset_framebuffer(inst, i);
649 			if (ret)
650 				break;
651 		}
652 
653 		for (i = 0; i < MAX_REG_FRAME; i++) {
654 			ret = reset_auxiliary_buffers(inst, i);
655 			if (ret)
656 				break;
657 		}
658 
659 		wave5_vdi_free_dma_memory(inst->dev, &p_dec_info->vb_task);
660 		break;
661 	}
662 	case DEC_GET_SEQ_INFO: {
663 		struct dec_initial_info *seq_info = parameter;
664 
665 		*seq_info = p_dec_info->initial_info;
666 		break;
667 	}
668 
669 	default:
670 		return -EINVAL;
671 	}
672 
673 	return ret;
674 }
675 
wave5_vpu_enc_open(struct vpu_instance * inst,struct enc_open_param * open_param)676 int wave5_vpu_enc_open(struct vpu_instance *inst, struct enc_open_param *open_param)
677 {
678 	struct enc_info *p_enc_info;
679 	int ret;
680 	struct vpu_device *vpu_dev = inst->dev;
681 
682 	ret = wave5_vpu_enc_check_open_param(inst, open_param);
683 	if (ret)
684 		return ret;
685 
686 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
687 	if (ret)
688 		return ret;
689 
690 	if (!wave5_vpu_is_init(vpu_dev)) {
691 		mutex_unlock(&vpu_dev->hw_lock);
692 		return -ENODEV;
693 	}
694 
695 	p_enc_info = &inst->codec_info->enc_info;
696 	p_enc_info->open_param = *open_param;
697 
698 	ret = wave5_vpu_build_up_enc_param(vpu_dev->dev, inst, open_param);
699 	mutex_unlock(&vpu_dev->hw_lock);
700 
701 	return ret;
702 }
703 
wave5_vpu_enc_close(struct vpu_instance * inst,u32 * fail_res)704 int wave5_vpu_enc_close(struct vpu_instance *inst, u32 *fail_res)
705 {
706 	struct enc_info *p_enc_info = &inst->codec_info->enc_info;
707 	int ret;
708 	int retry = 0;
709 	struct vpu_device *vpu_dev = inst->dev;
710 
711 	*fail_res = 0;
712 	if (!inst->codec_info)
713 		return -EINVAL;
714 
715 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
716 	if (ret)
717 		return ret;
718 
719 	do {
720 		ret = wave5_vpu_enc_finish_seq(inst, fail_res);
721 		if (ret < 0 && *fail_res != WAVE5_SYSERR_VPU_STILL_RUNNING) {
722 			dev_warn(inst->dev->dev, "enc_finish_seq timed out\n");
723 			mutex_unlock(&vpu_dev->hw_lock);
724 			return ret;
725 		}
726 
727 		if (*fail_res == WAVE5_SYSERR_VPU_STILL_RUNNING &&
728 		    retry++ >= MAX_FIRMWARE_CALL_RETRY) {
729 			mutex_unlock(&vpu_dev->hw_lock);
730 			return -ETIMEDOUT;
731 		}
732 	} while (ret != 0);
733 
734 	dev_dbg(inst->dev->dev, "%s: enc_finish_seq complete\n", __func__);
735 
736 	wave5_vdi_free_dma_memory(vpu_dev, &p_enc_info->vb_work);
737 
738 	if (inst->std == W_HEVC_ENC || inst->std == W_AVC_ENC) {
739 		wave5_vdi_free_dma_memory(vpu_dev, &p_enc_info->vb_sub_sam_buf);
740 		wave5_vdi_free_dma_memory(vpu_dev, &p_enc_info->vb_mv);
741 		wave5_vdi_free_dma_memory(vpu_dev, &p_enc_info->vb_fbc_y_tbl);
742 		wave5_vdi_free_dma_memory(vpu_dev, &p_enc_info->vb_fbc_c_tbl);
743 	}
744 
745 	wave5_vdi_free_dma_memory(vpu_dev, &p_enc_info->vb_task);
746 
747 	mutex_unlock(&vpu_dev->hw_lock);
748 
749 	return 0;
750 }
751 
wave5_vpu_enc_register_frame_buffer(struct vpu_instance * inst,unsigned int num,unsigned int stride,int height,enum tiled_map_type map_type)752 int wave5_vpu_enc_register_frame_buffer(struct vpu_instance *inst, unsigned int num,
753 					unsigned int stride, int height,
754 					enum tiled_map_type map_type)
755 {
756 	struct enc_info *p_enc_info = &inst->codec_info->enc_info;
757 	int ret;
758 	struct vpu_device *vpu_dev = inst->dev;
759 	unsigned int size_luma, size_chroma;
760 	int i;
761 
762 	if (p_enc_info->stride)
763 		return -EINVAL;
764 
765 	if (!p_enc_info->initial_info_obtained)
766 		return -EINVAL;
767 
768 	if (num < p_enc_info->initial_info.min_frame_buffer_count)
769 		return -EINVAL;
770 
771 	if (stride == 0 || stride % 8 != 0)
772 		return -EINVAL;
773 
774 	if (height <= 0)
775 		return -EINVAL;
776 
777 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
778 	if (ret)
779 		return ret;
780 
781 	p_enc_info->num_frame_buffers = num;
782 	p_enc_info->stride = stride;
783 
784 	size_luma = stride * height;
785 	size_chroma = ALIGN(stride / 2, 16) * height;
786 
787 	for (i = 0; i < num; i++) {
788 		if (!inst->frame_buf[i].update_fb_info)
789 			continue;
790 
791 		inst->frame_buf[i].update_fb_info = false;
792 		inst->frame_buf[i].stride = stride;
793 		inst->frame_buf[i].height = height;
794 		inst->frame_buf[i].map_type = COMPRESSED_FRAME_MAP;
795 		inst->frame_buf[i].buf_y_size = size_luma;
796 		inst->frame_buf[i].buf_cb = inst->frame_buf[i].buf_y + size_luma;
797 		inst->frame_buf[i].buf_cb_size = size_chroma;
798 		inst->frame_buf[i].buf_cr_size = 0;
799 	}
800 
801 	ret = wave5_vpu_enc_register_framebuffer(inst->dev->dev, inst, &inst->frame_buf[0],
802 						 COMPRESSED_FRAME_MAP,
803 						 p_enc_info->num_frame_buffers);
804 
805 	mutex_unlock(&vpu_dev->hw_lock);
806 
807 	return ret;
808 }
809 
wave5_check_enc_param(struct vpu_instance * inst,struct enc_param * param)810 static int wave5_check_enc_param(struct vpu_instance *inst, struct enc_param *param)
811 {
812 	struct enc_info *p_enc_info = &inst->codec_info->enc_info;
813 
814 	if (!param)
815 		return -EINVAL;
816 
817 	if (!param->source_frame)
818 		return -EINVAL;
819 
820 	if (p_enc_info->open_param.bit_rate == 0 && inst->std == W_HEVC_ENC) {
821 		if (param->pic_stream_buffer_addr % 16 || param->pic_stream_buffer_size == 0)
822 			return -EINVAL;
823 	}
824 	if (param->pic_stream_buffer_addr % 8 || param->pic_stream_buffer_size == 0)
825 		return -EINVAL;
826 
827 	return 0;
828 }
829 
wave5_vpu_enc_start_one_frame(struct vpu_instance * inst,struct enc_param * param,u32 * fail_res)830 int wave5_vpu_enc_start_one_frame(struct vpu_instance *inst, struct enc_param *param, u32 *fail_res)
831 {
832 	struct enc_info *p_enc_info = &inst->codec_info->enc_info;
833 	int ret;
834 	struct vpu_device *vpu_dev = inst->dev;
835 
836 	*fail_res = 0;
837 
838 	if (p_enc_info->stride == 0) /* this means frame buffers have not been registered. */
839 		return -EINVAL;
840 
841 	ret = wave5_check_enc_param(inst, param);
842 	if (ret)
843 		return ret;
844 
845 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
846 	if (ret)
847 		return ret;
848 
849 	p_enc_info->pts_map[param->src_idx] = param->pts;
850 
851 	ret = wave5_vpu_encode(inst, param, fail_res);
852 
853 	mutex_unlock(&vpu_dev->hw_lock);
854 
855 	return ret;
856 }
857 
wave5_vpu_enc_get_output_info(struct vpu_instance * inst,struct enc_output_info * info)858 int wave5_vpu_enc_get_output_info(struct vpu_instance *inst, struct enc_output_info *info)
859 {
860 	struct enc_info *p_enc_info = &inst->codec_info->enc_info;
861 	int ret;
862 	struct vpu_device *vpu_dev = inst->dev;
863 
864 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
865 	if (ret)
866 		return ret;
867 
868 	ret = wave5_vpu_enc_get_result(inst, info);
869 	if (ret) {
870 		info->pts = 0;
871 		goto unlock;
872 	}
873 
874 	if (info->recon_frame_index >= 0)
875 		info->pts = p_enc_info->pts_map[info->enc_src_idx];
876 
877 unlock:
878 	mutex_unlock(&vpu_dev->hw_lock);
879 
880 	return ret;
881 }
882 
wave5_vpu_enc_give_command(struct vpu_instance * inst,enum codec_command cmd,void * parameter)883 int wave5_vpu_enc_give_command(struct vpu_instance *inst, enum codec_command cmd, void *parameter)
884 {
885 	struct enc_info *p_enc_info = &inst->codec_info->enc_info;
886 
887 	switch (cmd) {
888 	case ENABLE_ROTATION:
889 		p_enc_info->rotation_enable = true;
890 		break;
891 	case ENABLE_MIRRORING:
892 		p_enc_info->mirror_enable = true;
893 		break;
894 	case SET_MIRROR_DIRECTION: {
895 		enum mirror_direction mir_dir;
896 
897 		mir_dir = *(enum mirror_direction *)parameter;
898 		if (mir_dir != MIRDIR_NONE && mir_dir != MIRDIR_HOR &&
899 		    mir_dir != MIRDIR_VER && mir_dir != MIRDIR_HOR_VER)
900 			return -EINVAL;
901 		p_enc_info->mirror_direction = mir_dir;
902 		break;
903 	}
904 	case SET_ROTATION_ANGLE: {
905 		int angle;
906 
907 		angle = *(int *)parameter;
908 		if (angle && angle != 90 && angle != 180 && angle != 270)
909 			return -EINVAL;
910 		if (p_enc_info->initial_info_obtained && (angle == 90 || angle == 270))
911 			return -EINVAL;
912 		p_enc_info->rotation_angle = angle;
913 		break;
914 	}
915 	case ENC_GET_QUEUE_STATUS: {
916 		struct queue_status_info *queue_info = parameter;
917 
918 		queue_info->instance_queue_count = p_enc_info->instance_queue_count;
919 		queue_info->report_queue_count = p_enc_info->report_queue_count;
920 		break;
921 	}
922 	default:
923 		return -EINVAL;
924 	}
925 	return 0;
926 }
927 
wave5_vpu_enc_issue_seq_init(struct vpu_instance * inst)928 int wave5_vpu_enc_issue_seq_init(struct vpu_instance *inst)
929 {
930 	int ret;
931 	struct vpu_device *vpu_dev = inst->dev;
932 
933 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
934 	if (ret)
935 		return ret;
936 
937 	ret = wave5_vpu_enc_init_seq(inst);
938 
939 	mutex_unlock(&vpu_dev->hw_lock);
940 
941 	return ret;
942 }
943 
wave5_vpu_enc_complete_seq_init(struct vpu_instance * inst,struct enc_initial_info * info)944 int wave5_vpu_enc_complete_seq_init(struct vpu_instance *inst, struct enc_initial_info *info)
945 {
946 	struct enc_info *p_enc_info = &inst->codec_info->enc_info;
947 	int ret;
948 	struct vpu_device *vpu_dev = inst->dev;
949 
950 	if (!info)
951 		return -EINVAL;
952 
953 	ret = mutex_lock_interruptible(&vpu_dev->hw_lock);
954 	if (ret)
955 		return ret;
956 
957 	ret = wave5_vpu_enc_get_seq_info(inst, info);
958 	if (ret) {
959 		p_enc_info->initial_info_obtained = false;
960 		mutex_unlock(&vpu_dev->hw_lock);
961 		return ret;
962 	}
963 
964 	p_enc_info->initial_info_obtained = true;
965 	p_enc_info->initial_info = *info;
966 
967 	mutex_unlock(&vpu_dev->hw_lock);
968 
969 	return 0;
970 }
971