1 /**************************************************************************
2 *
3 * Copyright 2017 Advanced Micro Devices, Inc.
4 *
5 * SPDX-License-Identifier: MIT
6 *
7 **************************************************************************/
8
9 #include "radeon_vcn_enc.h"
10 #include "ac_vcn_enc_av1_default_cdf.h"
11 #include "ac_debug.h"
12
13 #include "pipe/p_video_codec.h"
14 #include "radeon_video.h"
15 #include "radeonsi/si_pipe.h"
16 #include "util/u_memory.h"
17 #include "util/u_video.h"
18 #include "vl/vl_video_buffer.h"
19
20 /* set quality modes from the input */
radeon_vcn_enc_quality_modes(struct radeon_encoder * enc,struct pipe_enc_quality_modes * in)21 static void radeon_vcn_enc_quality_modes(struct radeon_encoder *enc,
22 struct pipe_enc_quality_modes *in)
23 {
24 rvcn_enc_quality_modes_t *p = &enc->enc_pic.quality_modes;
25 struct si_screen *sscreen = (struct si_screen *)enc->screen;
26
27 p->preset_mode = in->preset_mode > RENCODE_PRESET_MODE_HIGH_QUALITY
28 ? RENCODE_PRESET_MODE_HIGH_QUALITY
29 : in->preset_mode;
30
31 if (u_reduce_video_profile(enc->base.profile) != PIPE_VIDEO_FORMAT_AV1 &&
32 p->preset_mode == RENCODE_PRESET_MODE_HIGH_QUALITY)
33 p->preset_mode = RENCODE_PRESET_MODE_QUALITY;
34
35 p->pre_encode_mode = in->pre_encode_mode ? RENCODE_PREENCODE_MODE_4X
36 : RENCODE_PREENCODE_MODE_NONE;
37
38 if (enc->enc_pic.rc_session_init.rate_control_method == RENCODE_RATE_CONTROL_METHOD_QUALITY_VBR)
39 p->pre_encode_mode = RENCODE_PREENCODE_MODE_4X;
40
41 /* Disabling 2pass encoding for VCN 5.0
42 * This is a temporary limitation only for VCN 5.0 due to HW,
43 * once verified in future VCN 5.X versions, it will be enabled again.
44 */
45 if (sscreen->info.vcn_ip_version >= VCN_5_0_0)
46 p->pre_encode_mode = RENCODE_PREENCODE_MODE_NONE;
47
48 p->vbaq_mode = in->vbaq_mode ? RENCODE_VBAQ_AUTO : RENCODE_VBAQ_NONE;
49
50 if (enc->enc_pic.rc_session_init.rate_control_method == RENCODE_RATE_CONTROL_METHOD_NONE)
51 p->vbaq_mode = RENCODE_VBAQ_NONE;
52
53 enc->enc_pic.quality_params.vbaq_mode = p->vbaq_mode;
54 enc->enc_pic.quality_params.scene_change_sensitivity = 0;
55 enc->enc_pic.quality_params.scene_change_min_idr_interval = 0;
56 enc->enc_pic.quality_params.two_pass_search_center_map_mode =
57 (enc->enc_pic.quality_modes.pre_encode_mode &&
58 !enc->enc_pic.spec_misc.b_picture_enabled) ? 1 : 0;
59 enc->enc_pic.quality_params.vbaq_strength = 0;
60 }
61
62 /* to process invalid frame rate */
radeon_vcn_enc_invalid_frame_rate(uint32_t * den,uint32_t * num)63 static void radeon_vcn_enc_invalid_frame_rate(uint32_t *den, uint32_t *num)
64 {
65 if (*den == 0 || *num == 0) {
66 *den = 1;
67 *num = 30;
68 }
69 }
70
radeon_vcn_per_frame_integer(uint32_t bitrate,uint32_t den,uint32_t num)71 static uint32_t radeon_vcn_per_frame_integer(uint32_t bitrate, uint32_t den, uint32_t num)
72 {
73 uint64_t rate_den = (uint64_t)bitrate * (uint64_t)den;
74
75 return (uint32_t)(rate_den/num);
76 }
77
radeon_vcn_per_frame_frac(uint32_t bitrate,uint32_t den,uint32_t num)78 static uint32_t radeon_vcn_per_frame_frac(uint32_t bitrate, uint32_t den, uint32_t num)
79 {
80 uint64_t rate_den = (uint64_t)bitrate * (uint64_t)den;
81 uint64_t remainder = rate_den % num;
82
83 return (uint32_t)((remainder << 32) / num);
84 }
85
86 /* block length for av1 and hevc is the same, 64, for avc 16 */
radeon_vcn_enc_blocks_in_frame(struct radeon_encoder * enc,uint32_t * width_in_block,uint32_t * height_in_block)87 static uint32_t radeon_vcn_enc_blocks_in_frame(struct radeon_encoder *enc,
88 uint32_t *width_in_block,
89 uint32_t *height_in_block)
90 {
91 bool is_h264 = u_reduce_video_profile(enc->base.profile) == PIPE_VIDEO_FORMAT_MPEG4_AVC;
92 uint32_t block_length = is_h264 ? PIPE_H264_MB_SIZE : PIPE_H265_ENC_CTB_SIZE;
93
94 *width_in_block = PIPE_ALIGN_IN_BLOCK_SIZE(enc->base.width, block_length);
95 *height_in_block = PIPE_ALIGN_IN_BLOCK_SIZE(enc->base.height, block_length);
96
97 return block_length;
98 }
99
radeon_vcn_enc_get_intra_refresh_param(struct radeon_encoder * enc,bool need_filter_overlap,struct pipe_enc_intra_refresh * intra_refresh)100 static void radeon_vcn_enc_get_intra_refresh_param(struct radeon_encoder *enc,
101 bool need_filter_overlap,
102 struct pipe_enc_intra_refresh *intra_refresh)
103 {
104 uint32_t width_in_block, height_in_block;
105
106 enc->enc_pic.intra_refresh.intra_refresh_mode = RENCODE_INTRA_REFRESH_MODE_NONE;
107 /* some exceptions where intra-refresh is disabled:
108 * 1. if B frame is enabled
109 * 2. if SVC (number of temproal layers is larger than 1) is enabled
110 */
111 if (enc->enc_pic.spec_misc.b_picture_enabled || enc->enc_pic.num_temporal_layers > 1) {
112 enc->enc_pic.intra_refresh.region_size = 0;
113 enc->enc_pic.intra_refresh.offset = 0;
114 return;
115 }
116
117 radeon_vcn_enc_blocks_in_frame(enc, &width_in_block, &height_in_block);
118
119 switch(intra_refresh->mode) {
120 case INTRA_REFRESH_MODE_UNIT_ROWS:
121 if (intra_refresh->offset < height_in_block)
122 enc->enc_pic.intra_refresh.intra_refresh_mode
123 = RENCODE_INTRA_REFRESH_MODE_CTB_MB_ROWS;
124 break;
125 case INTRA_REFRESH_MODE_UNIT_COLUMNS:
126 if (intra_refresh->offset < width_in_block)
127 enc->enc_pic.intra_refresh.intra_refresh_mode
128 = RENCODE_INTRA_REFRESH_MODE_CTB_MB_COLUMNS;
129 break;
130 case INTRA_REFRESH_MODE_NONE:
131 default:
132 break;
133 };
134
135 /* with loop filters (avc/hevc/av1) enabled the region_size has to increase 1 to
136 * get overlapped (av1 is enabling it all the time). The region_size and offset
137 * require to be in unit of MB or CTB or SB according to different codecs.
138 */
139 if (enc->enc_pic.intra_refresh.intra_refresh_mode != RENCODE_INTRA_REFRESH_MODE_NONE) {
140 enc->enc_pic.intra_refresh.region_size = (need_filter_overlap) ?
141 intra_refresh->region_size + 1 :
142 intra_refresh->region_size;
143 enc->enc_pic.intra_refresh.offset = intra_refresh->offset;
144 } else {
145 enc->enc_pic.intra_refresh.region_size = 0;
146 enc->enc_pic.intra_refresh.offset = 0;
147 }
148 }
149
radeon_vcn_enc_get_roi_param(struct radeon_encoder * enc,struct pipe_enc_roi * roi)150 static void radeon_vcn_enc_get_roi_param(struct radeon_encoder *enc,
151 struct pipe_enc_roi *roi)
152 {
153 struct si_screen *sscreen = (struct si_screen *)enc->screen;
154 bool is_av1 = u_reduce_video_profile(enc->base.profile)
155 == PIPE_VIDEO_FORMAT_AV1;
156 rvcn_enc_qp_map_t *qp_map = &enc->enc_pic.enc_qp_map;
157
158 if (!roi->num)
159 enc->enc_pic.enc_qp_map.qp_map_type = RENCODE_QP_MAP_TYPE_NONE;
160 else {
161 uint32_t width_in_block, height_in_block;
162 uint32_t block_length;
163 int32_t i, j, pa_format = 0;
164
165 qp_map->version = sscreen->info.vcn_ip_version >= VCN_5_0_0
166 ? RENCODE_QP_MAP_VCN5 : RENCODE_QP_MAP_LEGACY;
167
168 /* rate control is using a different qp map type, in case of below
169 * vcn_5_0_0 */
170 if (enc->enc_pic.rc_session_init.rate_control_method &&
171 (qp_map->version == RENCODE_QP_MAP_LEGACY)) {
172 enc->enc_pic.enc_qp_map.qp_map_type = RENCODE_QP_MAP_TYPE_MAP_PA;
173 pa_format = 1;
174 }
175 else
176 enc->enc_pic.enc_qp_map.qp_map_type = RENCODE_QP_MAP_TYPE_DELTA;
177
178 block_length = radeon_vcn_enc_blocks_in_frame(enc, &width_in_block, &height_in_block);
179
180 qp_map->width_in_block = width_in_block;
181 qp_map->height_in_block = height_in_block;
182
183 for (i = RENCODE_QP_MAP_MAX_REGIONS - 1; i >= roi->num; i--)
184 enc->enc_pic.enc_qp_map.map[i].is_valid = false;
185
186 /* reverse the map sequence */
187 for (j = 0; i >= 0; i--, j++) {
188 struct rvcn_enc_qp_map_region *map = &enc->enc_pic.enc_qp_map.map[j];
189 struct pipe_enc_region_in_roi *region = &roi->region[i];
190
191 map->is_valid = region->valid;
192 if (region->valid) {
193 int32_t av1_qi_value;
194 /* mapped av1 qi into the legacy qp range by dividing by 5 and
195 * rounding up in any rate control mode.
196 */
197 if (is_av1 && (pa_format || (qp_map->version == RENCODE_QP_MAP_VCN5))) {
198 if (region->qp_value > 0)
199 av1_qi_value = (region->qp_value + 2) / 5;
200 else if (region->qp_value < 0)
201 av1_qi_value = (region->qp_value - 2) / 5;
202 else
203 av1_qi_value = region->qp_value;
204 map->qp_delta = av1_qi_value;
205 } else
206 map->qp_delta = region->qp_value;
207
208 map->x_in_unit = CLAMP((region->x / block_length), 0, width_in_block - 1);
209 map->y_in_unit = CLAMP((region->y / block_length), 0, height_in_block - 1);
210 map->width_in_unit = CLAMP((region->width / block_length), 0, width_in_block);
211 map->height_in_unit = CLAMP((region->height / block_length), 0, width_in_block);
212 }
213 }
214 }
215 }
216
radeon_vcn_enc_get_latency_param(struct radeon_encoder * enc)217 static void radeon_vcn_enc_get_latency_param(struct radeon_encoder *enc)
218 {
219 struct si_screen *sscreen = (struct si_screen *)enc->screen;
220
221 enc->enc_pic.enc_latency.encode_latency =
222 sscreen->debug_flags & DBG(LOW_LATENCY_ENCODE) ? 1000 : 0;
223 }
224
radeon_vcn_enc_h264_get_session_param(struct radeon_encoder * enc,struct pipe_h264_enc_picture_desc * pic)225 static void radeon_vcn_enc_h264_get_session_param(struct radeon_encoder *enc,
226 struct pipe_h264_enc_picture_desc *pic)
227 {
228 if (enc->enc_pic.session_init.aligned_picture_width)
229 return;
230
231 uint32_t align_width = PIPE_H264_MB_SIZE;
232 uint32_t align_height = PIPE_H264_MB_SIZE;
233
234 enc->enc_pic.session_init.encode_standard = RENCODE_ENCODE_STANDARD_H264;
235 enc->enc_pic.session_init.aligned_picture_width = align(enc->base.width, align_width);
236 enc->enc_pic.session_init.aligned_picture_height = align(enc->base.height, align_height);
237
238 uint32_t padding_width = 0;
239 uint32_t padding_height = 0;
240 uint32_t max_padding_width = align_width - 2;
241 uint32_t max_padding_height = align_height - 2;
242
243 if (enc->enc_pic.session_init.aligned_picture_width > enc->source->width)
244 padding_width = enc->enc_pic.session_init.aligned_picture_width - enc->source->width;
245 if (enc->enc_pic.session_init.aligned_picture_height > enc->source->height)
246 padding_height = enc->enc_pic.session_init.aligned_picture_height - enc->source->height;
247
248 /* Input surface can be smaller if the difference is within padding bounds. */
249 if (padding_width > max_padding_width || padding_height > max_padding_height)
250 RADEON_ENC_ERR("Input surface size doesn't match aligned size\n");
251
252 if (pic->seq.enc_frame_cropping_flag) {
253 uint32_t pad_w =
254 (pic->seq.enc_frame_crop_left_offset + pic->seq.enc_frame_crop_right_offset) * 2;
255 uint32_t pad_h =
256 (pic->seq.enc_frame_crop_top_offset + pic->seq.enc_frame_crop_bottom_offset) * 2;
257 padding_width = CLAMP(pad_w, padding_width, max_padding_width);
258 padding_height = CLAMP(pad_h, padding_height, max_padding_height);
259 }
260
261 enc->enc_pic.session_init.padding_width = padding_width;
262 enc->enc_pic.session_init.padding_height = padding_height;
263 }
264
radeon_vcn_enc_h264_get_dbk_param(struct radeon_encoder * enc,struct pipe_h264_enc_picture_desc * pic)265 static void radeon_vcn_enc_h264_get_dbk_param(struct radeon_encoder *enc,
266 struct pipe_h264_enc_picture_desc *pic)
267 {
268 enc->enc_pic.h264_deblock.disable_deblocking_filter_idc =
269 CLAMP(pic->dbk.disable_deblocking_filter_idc, 0, 2);
270 enc->enc_pic.h264_deblock.alpha_c0_offset_div2 = pic->dbk.alpha_c0_offset_div2;
271 enc->enc_pic.h264_deblock.beta_offset_div2 = pic->dbk.beta_offset_div2;
272 enc->enc_pic.h264_deblock.cb_qp_offset = pic->pic_ctrl.chroma_qp_index_offset;
273 enc->enc_pic.h264_deblock.cr_qp_offset = pic->pic_ctrl.second_chroma_qp_index_offset;
274 }
275
radeon_vcn_enc_h264_get_spec_misc_param(struct radeon_encoder * enc,struct pipe_h264_enc_picture_desc * pic)276 static void radeon_vcn_enc_h264_get_spec_misc_param(struct radeon_encoder *enc,
277 struct pipe_h264_enc_picture_desc *pic)
278 {
279 struct si_screen *sscreen = (struct si_screen *)enc->screen;
280
281 enc->enc_pic.spec_misc.profile_idc = u_get_h264_profile_idc(enc->base.profile);
282 if (enc->enc_pic.spec_misc.profile_idc >= PIPE_VIDEO_PROFILE_MPEG4_AVC_MAIN &&
283 enc->enc_pic.spec_misc.profile_idc != PIPE_VIDEO_PROFILE_MPEG4_AVC_EXTENDED)
284 enc->enc_pic.spec_misc.cabac_enable = pic->pic_ctrl.enc_cabac_enable;
285 else
286 enc->enc_pic.spec_misc.cabac_enable = false;
287
288 enc->enc_pic.spec_misc.cabac_init_idc = enc->enc_pic.spec_misc.cabac_enable ?
289 pic->pic_ctrl.enc_cabac_init_idc : 0;
290 enc->enc_pic.spec_misc.deblocking_filter_control_present_flag =
291 pic->pic_ctrl.deblocking_filter_control_present_flag;
292 enc->enc_pic.spec_misc.redundant_pic_cnt_present_flag =
293 pic->pic_ctrl.redundant_pic_cnt_present_flag;
294 enc->enc_pic.spec_misc.b_picture_enabled = !!pic->seq.max_num_reorder_frames;
295 enc->enc_pic.spec_misc.constrained_intra_pred_flag =
296 pic->pic_ctrl.constrained_intra_pred_flag;
297 enc->enc_pic.spec_misc.half_pel_enabled = 1;
298 enc->enc_pic.spec_misc.quarter_pel_enabled = 1;
299 enc->enc_pic.spec_misc.weighted_bipred_idc = 0;
300 enc->enc_pic.spec_misc.transform_8x8_mode =
301 sscreen->info.vcn_ip_version >= VCN_5_0_0 &&
302 pic->pic_ctrl.transform_8x8_mode_flag;
303 enc->enc_pic.spec_misc.level_idc = pic->seq.level_idc;
304 }
305
radeon_vcn_enc_h264_get_rc_param(struct radeon_encoder * enc,struct pipe_h264_enc_picture_desc * pic)306 static void radeon_vcn_enc_h264_get_rc_param(struct radeon_encoder *enc,
307 struct pipe_h264_enc_picture_desc *pic)
308 {
309 uint32_t frame_rate_den, frame_rate_num, max_qp;
310
311 enc->enc_pic.num_temporal_layers = pic->seq.num_temporal_layers ? pic->seq.num_temporal_layers : 1;
312 enc->enc_pic.temporal_id = MIN2(pic->pic_ctrl.temporal_id, enc->enc_pic.num_temporal_layers - 1);
313
314 for (int i = 0; i < enc->enc_pic.num_temporal_layers; i++) {
315 enc->enc_pic.rc_layer_init[i].target_bit_rate = pic->rate_ctrl[i].target_bitrate;
316 enc->enc_pic.rc_layer_init[i].peak_bit_rate = pic->rate_ctrl[i].peak_bitrate;
317 frame_rate_den = pic->rate_ctrl[i].frame_rate_den;
318 frame_rate_num = pic->rate_ctrl[i].frame_rate_num;
319 radeon_vcn_enc_invalid_frame_rate(&frame_rate_den, &frame_rate_num);
320 enc->enc_pic.rc_layer_init[i].frame_rate_den = frame_rate_den;
321 enc->enc_pic.rc_layer_init[i].frame_rate_num = frame_rate_num;
322 enc->enc_pic.rc_layer_init[i].vbv_buffer_size = pic->rate_ctrl[i].vbv_buffer_size;
323 enc->enc_pic.rc_layer_init[i].avg_target_bits_per_picture =
324 radeon_vcn_per_frame_integer(pic->rate_ctrl[i].target_bitrate,
325 frame_rate_den,
326 frame_rate_num);
327 enc->enc_pic.rc_layer_init[i].peak_bits_per_picture_integer =
328 radeon_vcn_per_frame_integer(pic->rate_ctrl[i].peak_bitrate,
329 frame_rate_den,
330 frame_rate_num);
331 enc->enc_pic.rc_layer_init[i].peak_bits_per_picture_fractional =
332 radeon_vcn_per_frame_frac(pic->rate_ctrl[i].peak_bitrate,
333 frame_rate_den,
334 frame_rate_num);
335 }
336 enc->enc_pic.rc_session_init.vbv_buffer_level = pic->rate_ctrl[0].vbv_buf_lv;
337 enc->enc_pic.rc_per_pic.qp_obs = pic->quant_i_frames;
338 enc->enc_pic.rc_per_pic.min_qp_app_obs = pic->rate_ctrl[0].min_qp;
339 enc->enc_pic.rc_per_pic.max_qp_app_obs = pic->rate_ctrl[0].max_qp ?
340 pic->rate_ctrl[0].max_qp : 51;
341 enc->enc_pic.rc_per_pic.qp_i = pic->quant_i_frames;
342 enc->enc_pic.rc_per_pic.qp_p = pic->quant_p_frames;
343 enc->enc_pic.rc_per_pic.qp_b = pic->quant_b_frames;
344 enc->enc_pic.rc_per_pic.min_qp_i = pic->rate_ctrl[0].min_qp;
345 enc->enc_pic.rc_per_pic.min_qp_p = pic->rate_ctrl[0].min_qp;
346 enc->enc_pic.rc_per_pic.min_qp_b = pic->rate_ctrl[0].min_qp;
347 max_qp = pic->rate_ctrl[0].max_qp ? pic->rate_ctrl[0].max_qp : 51;
348 enc->enc_pic.rc_per_pic.max_qp_i = max_qp;
349 enc->enc_pic.rc_per_pic.max_qp_p = max_qp;
350 enc->enc_pic.rc_per_pic.max_qp_b = max_qp;
351 enc->enc_pic.rc_per_pic.enabled_filler_data = 0;
352 enc->enc_pic.rc_per_pic.skip_frame_enable = pic->rate_ctrl[0].skip_frame_enable;
353 enc->enc_pic.rc_per_pic.enforce_hrd = pic->rate_ctrl[0].enforce_hrd;
354 enc->enc_pic.rc_per_pic.qvbr_quality_level = pic->rate_ctrl[0].vbr_quality_factor;
355
356 switch (pic->rate_ctrl[0].rate_ctrl_method) {
357 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_DISABLE:
358 enc->enc_pic.rc_session_init.rate_control_method = RENCODE_RATE_CONTROL_METHOD_NONE;
359 break;
360 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_CONSTANT_SKIP:
361 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_CONSTANT:
362 enc->enc_pic.rc_session_init.rate_control_method = RENCODE_RATE_CONTROL_METHOD_CBR;
363 enc->enc_pic.rc_per_pic.enabled_filler_data = pic->rate_ctrl[0].fill_data_enable;
364 break;
365 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_VARIABLE_SKIP:
366 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_VARIABLE:
367 enc->enc_pic.rc_session_init.rate_control_method =
368 RENCODE_RATE_CONTROL_METHOD_PEAK_CONSTRAINED_VBR;
369 break;
370 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_QUALITY_VARIABLE:
371 enc->enc_pic.rc_session_init.rate_control_method =
372 RENCODE_RATE_CONTROL_METHOD_QUALITY_VBR;
373 break;
374 default:
375 enc->enc_pic.rc_session_init.rate_control_method = RENCODE_RATE_CONTROL_METHOD_NONE;
376 }
377 enc->enc_pic.rc_per_pic.max_au_size_obs = pic->rate_ctrl[0].max_au_size;
378 enc->enc_pic.rc_per_pic.max_au_size_i = pic->rate_ctrl[0].max_au_size;
379 enc->enc_pic.rc_per_pic.max_au_size_p = pic->rate_ctrl[0].max_au_size;
380 enc->enc_pic.rc_per_pic.max_au_size_b = pic->rate_ctrl[0].max_au_size;
381 }
382
radeon_vcn_enc_h264_get_slice_ctrl_param(struct radeon_encoder * enc,struct pipe_h264_enc_picture_desc * pic)383 static void radeon_vcn_enc_h264_get_slice_ctrl_param(struct radeon_encoder *enc,
384 struct pipe_h264_enc_picture_desc *pic)
385 {
386 uint32_t num_mbs_total, num_mbs_in_slice;
387
388 num_mbs_total =
389 PIPE_ALIGN_IN_BLOCK_SIZE(enc->base.width, PIPE_H264_MB_SIZE) *
390 PIPE_ALIGN_IN_BLOCK_SIZE(enc->base.height, PIPE_H264_MB_SIZE);
391
392 if (pic->num_slice_descriptors <= 1) {
393 num_mbs_in_slice = num_mbs_total;
394 } else {
395 bool use_app_config = true;
396 num_mbs_in_slice = pic->slices_descriptors[0].num_macroblocks;
397
398 /* All slices must have equal size */
399 for (unsigned i = 1; i < pic->num_slice_descriptors - 1; i++) {
400 if (num_mbs_in_slice != pic->slices_descriptors[i].num_macroblocks)
401 use_app_config = false;
402 }
403 /* Except last one can be smaller */
404 if (pic->slices_descriptors[pic->num_slice_descriptors - 1].num_macroblocks > num_mbs_in_slice)
405 use_app_config = false;
406
407 if (!use_app_config) {
408 assert(num_mbs_total >= pic->num_slice_descriptors);
409 num_mbs_in_slice =
410 (num_mbs_total + pic->num_slice_descriptors - 1) / pic->num_slice_descriptors;
411 }
412 }
413
414 num_mbs_in_slice = MAX2(4, num_mbs_in_slice);
415
416 enc->enc_pic.slice_ctrl.slice_control_mode = RENCODE_H264_SLICE_CONTROL_MODE_FIXED_MBS;
417 enc->enc_pic.slice_ctrl.num_mbs_per_slice = num_mbs_in_slice;
418 }
419
radeon_vcn_enc_get_output_format_param(struct radeon_encoder * enc,bool full_range)420 static void radeon_vcn_enc_get_output_format_param(struct radeon_encoder *enc, bool full_range)
421 {
422 switch (enc->enc_pic.bit_depth_luma_minus8) {
423 case 2: /* 10 bits */
424 enc->enc_pic.enc_output_format.output_color_volume = RENCODE_COLOR_VOLUME_G22_BT709;
425 enc->enc_pic.enc_output_format.output_color_range = full_range ?
426 RENCODE_COLOR_RANGE_FULL : RENCODE_COLOR_RANGE_STUDIO;
427 enc->enc_pic.enc_output_format.output_chroma_location = RENCODE_CHROMA_LOCATION_INTERSTITIAL;
428 enc->enc_pic.enc_output_format.output_color_bit_depth = RENCODE_COLOR_BIT_DEPTH_10_BIT;
429 break;
430 default: /* 8 bits */
431 enc->enc_pic.enc_output_format.output_color_volume = RENCODE_COLOR_VOLUME_G22_BT709;
432 enc->enc_pic.enc_output_format.output_color_range = full_range ?
433 RENCODE_COLOR_RANGE_FULL : RENCODE_COLOR_RANGE_STUDIO;
434 enc->enc_pic.enc_output_format.output_chroma_location = RENCODE_CHROMA_LOCATION_INTERSTITIAL;
435 enc->enc_pic.enc_output_format.output_color_bit_depth = RENCODE_COLOR_BIT_DEPTH_8_BIT;
436 break;
437 }
438 }
439
radeon_vcn_enc_get_input_format_param(struct radeon_encoder * enc,struct pipe_picture_desc * pic_base)440 static void radeon_vcn_enc_get_input_format_param(struct radeon_encoder *enc,
441 struct pipe_picture_desc *pic_base)
442 {
443 switch (pic_base->input_format) {
444 case PIPE_FORMAT_P010:
445 enc->enc_pic.enc_input_format.input_color_bit_depth = RENCODE_COLOR_BIT_DEPTH_10_BIT;
446 enc->enc_pic.enc_input_format.input_color_packing_format = RENCODE_COLOR_PACKING_FORMAT_P010;
447 enc->enc_pic.enc_input_format.input_chroma_subsampling = RENCODE_CHROMA_SUBSAMPLING_4_2_0;
448 enc->enc_pic.enc_input_format.input_color_space = RENCODE_COLOR_SPACE_YUV;
449 break;
450 case PIPE_FORMAT_B8G8R8A8_UNORM:
451 case PIPE_FORMAT_B8G8R8X8_UNORM:
452 enc->enc_pic.enc_input_format.input_color_bit_depth = RENCODE_COLOR_BIT_DEPTH_8_BIT;
453 enc->enc_pic.enc_input_format.input_chroma_subsampling = RENCODE_CHROMA_SUBSAMPLING_4_4_4;
454 enc->enc_pic.enc_input_format.input_color_packing_format = RENCODE_COLOR_PACKING_FORMAT_A8R8G8B8;
455 enc->enc_pic.enc_input_format.input_color_space = RENCODE_COLOR_SPACE_RGB;
456 break;
457 case PIPE_FORMAT_R8G8B8A8_UNORM:
458 case PIPE_FORMAT_R8G8B8X8_UNORM:
459 enc->enc_pic.enc_input_format.input_color_bit_depth = RENCODE_COLOR_BIT_DEPTH_8_BIT;
460 enc->enc_pic.enc_input_format.input_chroma_subsampling = RENCODE_CHROMA_SUBSAMPLING_4_4_4;
461 enc->enc_pic.enc_input_format.input_color_packing_format = RENCODE_COLOR_PACKING_FORMAT_A8B8G8R8;
462 enc->enc_pic.enc_input_format.input_color_space = RENCODE_COLOR_SPACE_RGB;
463 break;
464 case PIPE_FORMAT_B10G10R10A2_UNORM:
465 case PIPE_FORMAT_B10G10R10X2_UNORM:
466 enc->enc_pic.enc_input_format.input_color_bit_depth = RENCODE_COLOR_BIT_DEPTH_10_BIT;
467 enc->enc_pic.enc_input_format.input_chroma_subsampling = RENCODE_CHROMA_SUBSAMPLING_4_4_4;
468 enc->enc_pic.enc_input_format.input_color_packing_format = RENCODE_COLOR_PACKING_FORMAT_A2R10G10B10;
469 enc->enc_pic.enc_input_format.input_color_space = RENCODE_COLOR_SPACE_RGB;
470 break;
471 case PIPE_FORMAT_R10G10B10A2_UNORM:
472 case PIPE_FORMAT_R10G10B10X2_UNORM:
473 enc->enc_pic.enc_input_format.input_color_bit_depth = RENCODE_COLOR_BIT_DEPTH_10_BIT;
474 enc->enc_pic.enc_input_format.input_chroma_subsampling = RENCODE_CHROMA_SUBSAMPLING_4_4_4;
475 enc->enc_pic.enc_input_format.input_color_packing_format = RENCODE_COLOR_PACKING_FORMAT_A2B10G10R10;
476 enc->enc_pic.enc_input_format.input_color_space = RENCODE_COLOR_SPACE_RGB;
477 break;
478 case PIPE_FORMAT_NV12: /* FALL THROUGH */
479 default:
480 enc->enc_pic.enc_input_format.input_color_bit_depth = RENCODE_COLOR_BIT_DEPTH_8_BIT;
481 enc->enc_pic.enc_input_format.input_color_packing_format = RENCODE_COLOR_PACKING_FORMAT_NV12;
482 enc->enc_pic.enc_input_format.input_chroma_subsampling = RENCODE_CHROMA_SUBSAMPLING_4_2_0;
483 enc->enc_pic.enc_input_format.input_color_space = RENCODE_COLOR_SPACE_YUV;
484 break;
485 }
486
487 enc->enc_pic.enc_input_format.input_color_volume = RENCODE_COLOR_VOLUME_G22_BT709;
488 enc->enc_pic.enc_input_format.input_color_range = pic_base->input_full_range ?
489 RENCODE_COLOR_RANGE_FULL : RENCODE_COLOR_RANGE_STUDIO;
490 enc->enc_pic.enc_input_format.input_chroma_location = RENCODE_CHROMA_LOCATION_INTERSTITIAL;
491 }
492
radeon_vcn_enc_h264_get_param(struct radeon_encoder * enc,struct pipe_h264_enc_picture_desc * pic)493 static void radeon_vcn_enc_h264_get_param(struct radeon_encoder *enc,
494 struct pipe_h264_enc_picture_desc *pic)
495 {
496 bool use_filter;
497
498 enc->enc_pic.h264.desc = pic;
499 enc->enc_pic.picture_type = pic->picture_type;
500 enc->enc_pic.bit_depth_luma_minus8 = 0;
501 enc->enc_pic.bit_depth_chroma_minus8 = 0;
502 enc->enc_pic.h264_enc_params.input_pic_order_cnt = pic->pic_order_cnt;
503 enc->enc_pic.h264_enc_params.input_picture_structure = RENCODE_H264_PICTURE_STRUCTURE_FRAME;
504 enc->enc_pic.h264_enc_params.interlaced_mode = RENCODE_H264_INTERLACING_MODE_PROGRESSIVE;
505 enc->enc_pic.h264_enc_params.l0_reference_picture1_index = 0xffffffff;
506 enc->enc_pic.enc_params.reconstructed_picture_index = pic->dpb_curr_pic;
507 enc->enc_pic.h264_enc_params.is_reference = !pic->not_referenced;
508 enc->enc_pic.h264_enc_params.is_long_term = pic->is_ltr;
509 enc->enc_pic.not_referenced = pic->not_referenced;
510
511 if (pic->ref_list0[0] != PIPE_H2645_LIST_REF_INVALID_ENTRY) {
512 uint8_t ref_l0 = pic->ref_list0[0];
513
514 enc->enc_pic.enc_params.reference_picture_index = ref_l0;
515 enc->enc_pic.h264_enc_params.picture_info_l0_reference_picture0.pic_type =
516 radeon_enc_h2645_picture_type(pic->dpb[ref_l0].picture_type);
517 enc->enc_pic.h264_enc_params.picture_info_l0_reference_picture0.pic_order_cnt =
518 pic->dpb[ref_l0].pic_order_cnt;
519 enc->enc_pic.h264_enc_params.picture_info_l0_reference_picture0.is_long_term =
520 pic->dpb[ref_l0].is_ltr;
521 } else {
522 enc->enc_pic.enc_params.reference_picture_index = 0xffffffff;
523 }
524
525 if (pic->ref_list1[0] != PIPE_H2645_LIST_REF_INVALID_ENTRY) {
526 uint8_t ref_l1 = pic->ref_list1[0];
527
528 enc->enc_pic.h264_enc_params.l1_reference_picture0_index = ref_l1;
529 enc->enc_pic.h264_enc_params.picture_info_l1_reference_picture0.pic_type =
530 radeon_enc_h2645_picture_type(pic->dpb[ref_l1].picture_type);
531 enc->enc_pic.h264_enc_params.picture_info_l1_reference_picture0.pic_order_cnt =
532 pic->dpb[ref_l1].pic_order_cnt;
533 enc->enc_pic.h264_enc_params.picture_info_l1_reference_picture0.is_long_term =
534 pic->dpb[ref_l1].is_ltr;
535 } else {
536 enc->enc_pic.h264_enc_params.l1_reference_picture0_index = 0xffffffff;
537 }
538
539 if ((pic->ref_list0[0] != PIPE_H2645_LIST_REF_INVALID_ENTRY &&
540 pic->dpb[pic->ref_list0[0]].picture_type == PIPE_H2645_ENC_PICTURE_TYPE_B) ||
541 (pic->ref_list1[0] != PIPE_H2645_LIST_REF_INVALID_ENTRY &&
542 pic->dpb[pic->ref_list1[0]].picture_type == PIPE_H2645_ENC_PICTURE_TYPE_B))
543 RADEON_ENC_ERR("B-frame references not supported\n");
544
545 if (enc->dpb_type == DPB_TIER_2) {
546 for (uint32_t i = 0; i < ARRAY_SIZE(pic->dpb); i++) {
547 struct pipe_video_buffer *buf = pic->dpb[i].buffer;
548 enc->enc_pic.dpb_bufs[i] =
549 buf ? vl_video_buffer_get_associated_data(buf, &enc->base) : NULL;
550 assert(!buf || enc->enc_pic.dpb_bufs[i]);
551 }
552 }
553
554 radeon_vcn_enc_h264_get_session_param(enc, pic);
555 radeon_vcn_enc_h264_get_dbk_param(enc, pic);
556 radeon_vcn_enc_h264_get_rc_param(enc, pic);
557 radeon_vcn_enc_h264_get_spec_misc_param(enc, pic);
558 radeon_vcn_enc_h264_get_slice_ctrl_param(enc, pic);
559 radeon_vcn_enc_get_input_format_param(enc, &pic->base);
560 radeon_vcn_enc_get_output_format_param(enc, pic->seq.video_full_range_flag);
561
562 use_filter = enc->enc_pic.h264_deblock.disable_deblocking_filter_idc != 1;
563 radeon_vcn_enc_get_intra_refresh_param(enc, use_filter, &pic->intra_refresh);
564 radeon_vcn_enc_get_roi_param(enc, &pic->roi);
565 radeon_vcn_enc_get_latency_param(enc);
566 radeon_vcn_enc_quality_modes(enc, &pic->quality_modes);
567 }
568
radeon_vcn_enc_hevc_get_session_param(struct radeon_encoder * enc,struct pipe_h265_enc_picture_desc * pic)569 static void radeon_vcn_enc_hevc_get_session_param(struct radeon_encoder *enc,
570 struct pipe_h265_enc_picture_desc *pic)
571 {
572 if (enc->enc_pic.session_init.aligned_picture_width)
573 return;
574
575 uint32_t align_width = PIPE_H265_ENC_CTB_SIZE;
576 uint32_t align_height = 16;
577
578 enc->enc_pic.session_init.encode_standard = RENCODE_ENCODE_STANDARD_HEVC;
579 enc->enc_pic.session_init.aligned_picture_width = align(enc->base.width, align_width);
580 enc->enc_pic.session_init.aligned_picture_height = align(enc->base.height, align_height);
581
582 uint32_t padding_width = 0;
583 uint32_t padding_height = 0;
584 uint32_t max_padding_width = align_width - 2;
585 uint32_t max_padding_height = align_height - 2;
586
587 if (enc->enc_pic.session_init.aligned_picture_width > enc->source->width)
588 padding_width = enc->enc_pic.session_init.aligned_picture_width - enc->source->width;
589 if (enc->enc_pic.session_init.aligned_picture_height > enc->source->height)
590 padding_height = enc->enc_pic.session_init.aligned_picture_height - enc->source->height;
591
592 /* Input surface can be smaller if the difference is within padding bounds. */
593 if (padding_width > max_padding_width || padding_height > max_padding_height)
594 RADEON_ENC_ERR("Input surface size doesn't match aligned size\n");
595
596 if (pic->seq.conformance_window_flag) {
597 uint32_t pad_w =
598 (pic->seq.conf_win_left_offset + pic->seq.conf_win_right_offset) * 2;
599 uint32_t pad_h =
600 (pic->seq.conf_win_top_offset + pic->seq.conf_win_bottom_offset) * 2;
601 padding_width = CLAMP(pad_w, padding_width, max_padding_width);
602 padding_height = CLAMP(pad_h, padding_height, max_padding_height);
603 }
604
605 enc->enc_pic.session_init.padding_width = padding_width;
606 enc->enc_pic.session_init.padding_height = padding_height;
607 }
608
radeon_vcn_enc_hevc_get_dbk_param(struct radeon_encoder * enc,struct pipe_h265_enc_picture_desc * pic)609 static void radeon_vcn_enc_hevc_get_dbk_param(struct radeon_encoder *enc,
610 struct pipe_h265_enc_picture_desc *pic)
611 {
612 struct si_screen *sscreen = (struct si_screen *)enc->screen;
613
614 enc->enc_pic.hevc_deblock.loop_filter_across_slices_enabled =
615 pic->pic.pps_loop_filter_across_slices_enabled_flag;
616 enc->enc_pic.hevc_deblock.deblocking_filter_disabled =
617 pic->slice.slice_deblocking_filter_disabled_flag;
618 enc->enc_pic.hevc_deblock.beta_offset_div2 = pic->slice.slice_beta_offset_div2;
619 enc->enc_pic.hevc_deblock.tc_offset_div2 = pic->slice.slice_tc_offset_div2;
620 enc->enc_pic.hevc_deblock.cb_qp_offset = pic->slice.slice_cb_qp_offset;
621 enc->enc_pic.hevc_deblock.cr_qp_offset = pic->slice.slice_cr_qp_offset;
622 enc->enc_pic.hevc_deblock.disable_sao =
623 sscreen->info.vcn_ip_version < VCN_2_0_0 ||
624 !pic->seq.sample_adaptive_offset_enabled_flag;
625 }
626
radeon_vcn_enc_hevc_get_spec_misc_param(struct radeon_encoder * enc,struct pipe_h265_enc_picture_desc * pic)627 static void radeon_vcn_enc_hevc_get_spec_misc_param(struct radeon_encoder *enc,
628 struct pipe_h265_enc_picture_desc *pic)
629 {
630 struct si_screen *sscreen = (struct si_screen *)enc->screen;
631
632 enc->enc_pic.hevc_spec_misc.log2_min_luma_coding_block_size_minus3 =
633 pic->seq.log2_min_luma_coding_block_size_minus3;
634 enc->enc_pic.hevc_spec_misc.amp_disabled = !pic->seq.amp_enabled_flag;
635 enc->enc_pic.hevc_spec_misc.strong_intra_smoothing_enabled =
636 pic->seq.strong_intra_smoothing_enabled_flag;
637 enc->enc_pic.hevc_spec_misc.constrained_intra_pred_flag =
638 pic->pic.constrained_intra_pred_flag;
639 enc->enc_pic.hevc_spec_misc.cabac_init_flag = pic->slice.cabac_init_flag;
640 enc->enc_pic.hevc_spec_misc.half_pel_enabled = 1;
641 enc->enc_pic.hevc_spec_misc.quarter_pel_enabled = 1;
642 enc->enc_pic.hevc_spec_misc.transform_skip_disabled =
643 sscreen->info.vcn_ip_version < VCN_3_0_0 ||
644 !pic->pic.transform_skip_enabled_flag;
645 enc->enc_pic.hevc_spec_misc.cu_qp_delta_enabled_flag =
646 (sscreen->info.vcn_ip_version >= VCN_2_0_0 &&
647 pic->pic.cu_qp_delta_enabled_flag) ||
648 enc->enc_pic.enc_qp_map.qp_map_type ||
649 enc->enc_pic.rc_session_init.rate_control_method;
650 }
651
radeon_vcn_enc_hevc_get_rc_param(struct radeon_encoder * enc,struct pipe_h265_enc_picture_desc * pic)652 static void radeon_vcn_enc_hevc_get_rc_param(struct radeon_encoder *enc,
653 struct pipe_h265_enc_picture_desc *pic)
654 {
655 uint32_t frame_rate_den, frame_rate_num, max_qp;
656
657 enc->enc_pic.num_temporal_layers = pic->seq.num_temporal_layers ? pic->seq.num_temporal_layers : 1;
658 enc->enc_pic.temporal_id = MIN2(pic->pic.temporal_id, enc->enc_pic.num_temporal_layers - 1);
659
660 for (int i = 0; i < enc->enc_pic.num_temporal_layers; i++) {
661 enc->enc_pic.rc_layer_init[i].target_bit_rate = pic->rc[i].target_bitrate;
662 enc->enc_pic.rc_layer_init[i].peak_bit_rate = pic->rc[i].peak_bitrate;
663 frame_rate_den = pic->rc[i].frame_rate_den;
664 frame_rate_num = pic->rc[i].frame_rate_num;
665 radeon_vcn_enc_invalid_frame_rate(&frame_rate_den, &frame_rate_num);
666 enc->enc_pic.rc_layer_init[i].frame_rate_den = frame_rate_den;
667 enc->enc_pic.rc_layer_init[i].frame_rate_num = frame_rate_num;
668 enc->enc_pic.rc_layer_init[i].vbv_buffer_size = pic->rc[i].vbv_buffer_size;
669 enc->enc_pic.rc_layer_init[i].avg_target_bits_per_picture =
670 radeon_vcn_per_frame_integer(pic->rc[i].target_bitrate,
671 frame_rate_den,
672 frame_rate_num);
673 enc->enc_pic.rc_layer_init[i].peak_bits_per_picture_integer =
674 radeon_vcn_per_frame_integer(pic->rc[i].peak_bitrate,
675 frame_rate_den,
676 frame_rate_num);
677 enc->enc_pic.rc_layer_init[i].peak_bits_per_picture_fractional =
678 radeon_vcn_per_frame_frac(pic->rc[i].peak_bitrate,
679 frame_rate_den,
680 frame_rate_num);
681 }
682 enc->enc_pic.rc_session_init.vbv_buffer_level = pic->rc[0].vbv_buf_lv;
683 enc->enc_pic.rc_per_pic.qp_obs = pic->rc[0].quant_i_frames;
684 enc->enc_pic.rc_per_pic.min_qp_app_obs = pic->rc[0].min_qp;
685 enc->enc_pic.rc_per_pic.max_qp_app_obs = pic->rc[0].max_qp ? pic->rc[0].max_qp : 51;
686 enc->enc_pic.rc_per_pic.qp_i = pic->rc[0].quant_i_frames;
687 enc->enc_pic.rc_per_pic.qp_p = pic->rc[0].quant_p_frames;
688 enc->enc_pic.rc_per_pic.min_qp_i = pic->rc[0].min_qp;
689 enc->enc_pic.rc_per_pic.min_qp_p = pic->rc[0].min_qp;
690 max_qp = pic->rc[0].max_qp ? pic->rc[0].max_qp : 51;
691 enc->enc_pic.rc_per_pic.max_qp_i = max_qp;
692 enc->enc_pic.rc_per_pic.max_qp_p = max_qp;
693 enc->enc_pic.rc_per_pic.enabled_filler_data = 0;
694 enc->enc_pic.rc_per_pic.skip_frame_enable = pic->rc[0].skip_frame_enable;
695 enc->enc_pic.rc_per_pic.enforce_hrd = pic->rc[0].enforce_hrd;
696 enc->enc_pic.rc_per_pic.qvbr_quality_level = pic->rc[0].vbr_quality_factor;
697 switch (pic->rc[0].rate_ctrl_method) {
698 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_DISABLE:
699 enc->enc_pic.rc_session_init.rate_control_method = RENCODE_RATE_CONTROL_METHOD_NONE;
700 break;
701 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_CONSTANT_SKIP:
702 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_CONSTANT:
703 enc->enc_pic.rc_session_init.rate_control_method = RENCODE_RATE_CONTROL_METHOD_CBR;
704 enc->enc_pic.rc_per_pic.enabled_filler_data = pic->rc[0].fill_data_enable;
705 break;
706 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_VARIABLE_SKIP:
707 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_VARIABLE:
708 enc->enc_pic.rc_session_init.rate_control_method =
709 RENCODE_RATE_CONTROL_METHOD_PEAK_CONSTRAINED_VBR;
710 break;
711 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_QUALITY_VARIABLE:
712 enc->enc_pic.rc_session_init.rate_control_method =
713 RENCODE_RATE_CONTROL_METHOD_QUALITY_VBR;
714 break;
715 default:
716 enc->enc_pic.rc_session_init.rate_control_method = RENCODE_RATE_CONTROL_METHOD_NONE;
717 }
718 enc->enc_pic.rc_per_pic.max_au_size_obs = pic->rc[0].max_au_size;
719 enc->enc_pic.rc_per_pic.max_au_size_i = pic->rc[0].max_au_size;
720 enc->enc_pic.rc_per_pic.max_au_size_p = pic->rc[0].max_au_size;
721 }
722
radeon_vcn_enc_hevc_get_slice_ctrl_param(struct radeon_encoder * enc,struct pipe_h265_enc_picture_desc * pic)723 static void radeon_vcn_enc_hevc_get_slice_ctrl_param(struct radeon_encoder *enc,
724 struct pipe_h265_enc_picture_desc *pic)
725 {
726 uint32_t num_ctbs_total, num_ctbs_in_slice;
727
728 num_ctbs_total =
729 PIPE_ALIGN_IN_BLOCK_SIZE(pic->seq.pic_width_in_luma_samples, PIPE_H265_ENC_CTB_SIZE) *
730 PIPE_ALIGN_IN_BLOCK_SIZE(pic->seq.pic_height_in_luma_samples, PIPE_H265_ENC_CTB_SIZE);
731
732 if (pic->num_slice_descriptors <= 1) {
733 num_ctbs_in_slice = num_ctbs_total;
734 } else {
735 bool use_app_config = true;
736 num_ctbs_in_slice = pic->slices_descriptors[0].num_ctu_in_slice;
737
738 /* All slices must have equal size */
739 for (unsigned i = 1; i < pic->num_slice_descriptors - 1; i++) {
740 if (num_ctbs_in_slice != pic->slices_descriptors[i].num_ctu_in_slice)
741 use_app_config = false;
742 }
743 /* Except last one can be smaller */
744 if (pic->slices_descriptors[pic->num_slice_descriptors - 1].num_ctu_in_slice > num_ctbs_in_slice)
745 use_app_config = false;
746
747 if (!use_app_config) {
748 assert(num_ctbs_total >= pic->num_slice_descriptors);
749 num_ctbs_in_slice =
750 (num_ctbs_total + pic->num_slice_descriptors - 1) / pic->num_slice_descriptors;
751 }
752 }
753
754 num_ctbs_in_slice = MAX2(4, num_ctbs_in_slice);
755
756 enc->enc_pic.hevc_slice_ctrl.slice_control_mode = RENCODE_HEVC_SLICE_CONTROL_MODE_FIXED_CTBS;
757 enc->enc_pic.hevc_slice_ctrl.fixed_ctbs_per_slice.num_ctbs_per_slice =
758 num_ctbs_in_slice;
759 enc->enc_pic.hevc_slice_ctrl.fixed_ctbs_per_slice.num_ctbs_per_slice_segment =
760 num_ctbs_in_slice;
761 }
762
radeon_vcn_enc_hevc_get_param(struct radeon_encoder * enc,struct pipe_h265_enc_picture_desc * pic)763 static void radeon_vcn_enc_hevc_get_param(struct radeon_encoder *enc,
764 struct pipe_h265_enc_picture_desc *pic)
765 {
766 enc->enc_pic.hevc.desc = pic;
767 enc->enc_pic.picture_type = pic->picture_type;
768 enc->enc_pic.enc_params.reference_picture_index =
769 pic->ref_list0[0] == PIPE_H2645_LIST_REF_INVALID_ENTRY ? 0xffffffff : pic->ref_list0[0];
770 enc->enc_pic.enc_params.reconstructed_picture_index = pic->dpb_curr_pic;
771 enc->enc_pic.bit_depth_luma_minus8 = pic->seq.bit_depth_luma_minus8;
772 enc->enc_pic.bit_depth_chroma_minus8 = pic->seq.bit_depth_chroma_minus8;
773 enc->enc_pic.nal_unit_type = pic->pic.nal_unit_type;
774
775 if (enc->dpb_type == DPB_TIER_2) {
776 for (uint32_t i = 0; i < ARRAY_SIZE(pic->dpb); i++) {
777 struct pipe_video_buffer *buf = pic->dpb[i].buffer;
778 enc->enc_pic.dpb_bufs[i] =
779 buf ? vl_video_buffer_get_associated_data(buf, &enc->base) : NULL;
780 assert(!buf || enc->enc_pic.dpb_bufs[i]);
781 }
782 }
783
784 radeon_vcn_enc_hevc_get_session_param(enc, pic);
785 radeon_vcn_enc_hevc_get_dbk_param(enc, pic);
786 radeon_vcn_enc_hevc_get_rc_param(enc, pic);
787 radeon_vcn_enc_hevc_get_slice_ctrl_param(enc, pic);
788 radeon_vcn_enc_get_input_format_param(enc, &pic->base);
789 radeon_vcn_enc_get_output_format_param(enc, pic->seq.video_full_range_flag);
790 radeon_vcn_enc_get_intra_refresh_param(enc,
791 !(enc->enc_pic.hevc_deblock.deblocking_filter_disabled),
792 &pic->intra_refresh);
793 radeon_vcn_enc_get_roi_param(enc, &pic->roi);
794 radeon_vcn_enc_hevc_get_spec_misc_param(enc, pic);
795 radeon_vcn_enc_get_latency_param(enc);
796 radeon_vcn_enc_quality_modes(enc, &pic->quality_modes);
797 }
798
radeon_vcn_enc_av1_get_session_param(struct radeon_encoder * enc,struct pipe_av1_enc_picture_desc * pic)799 static void radeon_vcn_enc_av1_get_session_param(struct radeon_encoder *enc,
800 struct pipe_av1_enc_picture_desc *pic)
801 {
802 struct si_screen *sscreen = (struct si_screen *)enc->screen;
803
804 if (enc->enc_pic.session_init.aligned_picture_width)
805 return;
806
807 enc->enc_pic.session_init.encode_standard = RENCODE_ENCODE_STANDARD_AV1;
808
809 uint32_t width = enc->enc_pic.pic_width_in_luma_samples;
810 uint32_t height = enc->enc_pic.pic_height_in_luma_samples;
811 uint32_t align_width, align_height;
812
813 if (sscreen->info.vcn_ip_version < VCN_5_0_0) {
814 align_width = PIPE_AV1_ENC_SB_SIZE;
815 align_height = 16;
816 enc->enc_pic.session_init.aligned_picture_width = align(width, align_width);
817 enc->enc_pic.session_init.aligned_picture_height = align(height, align_height);
818 if (!(height % 8) && (height % 16))
819 enc->enc_pic.session_init.aligned_picture_height = height + 2;
820 if (sscreen->info.vcn_ip_version == VCN_4_0_2 ||
821 sscreen->info.vcn_ip_version == VCN_4_0_5 ||
822 sscreen->info.vcn_ip_version == VCN_4_0_6)
823 enc->enc_pic.session_init.WA_flags = 1;
824 } else {
825 align_width = 8;
826 align_height = 2;
827 enc->enc_pic.session_init.aligned_picture_width = align(width, align_width);
828 enc->enc_pic.session_init.aligned_picture_height = align(height, align_height);
829 }
830 enc->enc_pic.av1.coded_width = enc->enc_pic.session_init.aligned_picture_width;
831 enc->enc_pic.av1.coded_height = enc->enc_pic.session_init.aligned_picture_height;
832
833 uint32_t padding_width = 0;
834 uint32_t padding_height = 0;
835 uint32_t max_padding_width = align_width - 2;
836 uint32_t max_padding_height = align_height - 2;
837
838 if (enc->enc_pic.session_init.aligned_picture_width > enc->source->width)
839 padding_width = enc->enc_pic.session_init.aligned_picture_width - enc->source->width;
840 if (enc->enc_pic.session_init.aligned_picture_height > enc->source->height)
841 padding_height = enc->enc_pic.session_init.aligned_picture_height - enc->source->height;
842
843 /* Input surface can be smaller if the difference is within padding bounds. */
844 if (padding_width > max_padding_width || padding_height > max_padding_height)
845 RADEON_ENC_ERR("Input surface size doesn't match aligned size\n");
846
847 padding_width = MAX2(padding_width, enc->enc_pic.session_init.aligned_picture_width - width);
848 padding_height = MAX2(padding_height, enc->enc_pic.session_init.aligned_picture_height - height);
849
850 enc->enc_pic.session_init.padding_width = padding_width;
851 enc->enc_pic.session_init.padding_height = padding_height;
852 }
853
radeon_vcn_enc_av1_get_spec_misc_param(struct radeon_encoder * enc,struct pipe_av1_enc_picture_desc * pic)854 static void radeon_vcn_enc_av1_get_spec_misc_param(struct radeon_encoder *enc,
855 struct pipe_av1_enc_picture_desc *pic)
856 {
857 enc->enc_pic.av1_spec_misc.cdef_mode = pic->seq.seq_bits.enable_cdef;
858 enc->enc_pic.av1_spec_misc.disable_cdf_update = pic->disable_cdf_update;
859 enc->enc_pic.av1_spec_misc.disable_frame_end_update_cdf = pic->disable_frame_end_update_cdf;
860 enc->enc_pic.av1_spec_misc.palette_mode_enable = pic->palette_mode_enable;
861 enc->enc_pic.av1_spec_misc.cdef_bits = pic->cdef.cdef_bits;
862 enc->enc_pic.av1_spec_misc.cdef_damping_minus3 = pic->cdef.cdef_damping_minus_3;
863 for (int i = 0; i < (pic->cdef.cdef_bits << 1); i++ ){
864 enc->enc_pic.av1_spec_misc.cdef_y_pri_strength[i] = (pic->cdef.cdef_y_strengths[i] >> 2);
865 enc->enc_pic.av1_spec_misc.cdef_y_sec_strength[i] = (pic->cdef.cdef_y_strengths[i] & 0x3);
866 enc->enc_pic.av1_spec_misc.cdef_uv_pri_strength[i] = (pic->cdef.cdef_uv_strengths[i] >> 2);
867 enc->enc_pic.av1_spec_misc.cdef_uv_sec_strength[i] = (pic->cdef.cdef_uv_strengths[i] & 0x3);
868 }
869
870 enc->enc_pic.av1_spec_misc.delta_q_y_dc = pic->quantization.y_dc_delta_q;
871 enc->enc_pic.av1_spec_misc.delta_q_u_dc = pic->quantization.u_dc_delta_q;
872 enc->enc_pic.av1_spec_misc.delta_q_u_ac = pic->quantization.u_ac_delta_q;
873 enc->enc_pic.av1_spec_misc.delta_q_v_dc = pic->quantization.v_dc_delta_q;
874 enc->enc_pic.av1_spec_misc.delta_q_v_ac = pic->quantization.v_ac_delta_q;
875
876 if (enc->enc_pic.frame_type == PIPE_AV1_ENC_FRAME_TYPE_KEY)
877 enc->enc_pic.av1_spec_misc.separate_delta_q =
878 (pic->quantization.u_dc_delta_q != pic->quantization.v_dc_delta_q) ||
879 (pic->quantization.u_ac_delta_q != pic->quantization.v_ac_delta_q);
880
881 if (enc->enc_pic.disable_screen_content_tools) {
882 enc->enc_pic.force_integer_mv = 0;
883 enc->enc_pic.av1_spec_misc.palette_mode_enable = 0;
884 }
885
886 if (enc->enc_pic.force_integer_mv)
887 enc->enc_pic.av1_spec_misc.mv_precision = RENCODE_AV1_MV_PRECISION_FORCE_INTEGER_MV;
888 else
889 enc->enc_pic.av1_spec_misc.mv_precision = RENCODE_AV1_MV_PRECISION_ALLOW_HIGH_PRECISION;
890 }
891
radeon_vcn_enc_av1_get_rc_param(struct radeon_encoder * enc,struct pipe_av1_enc_picture_desc * pic)892 static void radeon_vcn_enc_av1_get_rc_param(struct radeon_encoder *enc,
893 struct pipe_av1_enc_picture_desc *pic)
894 {
895 uint32_t frame_rate_den, frame_rate_num, min_qp, max_qp;
896
897 enc->enc_pic.num_temporal_layers = pic->seq.num_temporal_layers ? pic->seq.num_temporal_layers : 1;
898 enc->enc_pic.temporal_id = MIN2(pic->temporal_id, enc->enc_pic.num_temporal_layers - 1);
899
900 for (int i = 0; i < ARRAY_SIZE(enc->enc_pic.rc_layer_init); i++) {
901 enc->enc_pic.rc_layer_init[i].target_bit_rate = pic->rc[i].target_bitrate;
902 enc->enc_pic.rc_layer_init[i].peak_bit_rate = pic->rc[i].peak_bitrate;
903 frame_rate_den = pic->rc[i].frame_rate_den;
904 frame_rate_num = pic->rc[i].frame_rate_num;
905 radeon_vcn_enc_invalid_frame_rate(&frame_rate_den, &frame_rate_num);
906 enc->enc_pic.rc_layer_init[i].frame_rate_den = frame_rate_den;
907 enc->enc_pic.rc_layer_init[i].frame_rate_num = frame_rate_num;
908 enc->enc_pic.rc_layer_init[i].vbv_buffer_size = pic->rc[i].vbv_buffer_size;
909 enc->enc_pic.rc_layer_init[i].avg_target_bits_per_picture =
910 radeon_vcn_per_frame_integer(pic->rc[i].target_bitrate,
911 frame_rate_den,
912 frame_rate_num);
913 enc->enc_pic.rc_layer_init[i].peak_bits_per_picture_integer =
914 radeon_vcn_per_frame_integer(pic->rc[i].peak_bitrate,
915 frame_rate_den,
916 frame_rate_num);
917 enc->enc_pic.rc_layer_init[i].peak_bits_per_picture_fractional =
918 radeon_vcn_per_frame_frac(pic->rc[i].peak_bitrate,
919 frame_rate_den,
920 frame_rate_num);
921 }
922 enc->enc_pic.rc_session_init.vbv_buffer_level = pic->rc[0].vbv_buf_lv;
923 enc->enc_pic.rc_per_pic.qp_obs = pic->rc[0].qp;
924 enc->enc_pic.rc_per_pic.min_qp_app_obs = pic->rc[0].min_qp ? pic->rc[0].min_qp : 1;
925 enc->enc_pic.rc_per_pic.max_qp_app_obs = pic->rc[0].max_qp ? pic->rc[0].max_qp : 255;
926 enc->enc_pic.rc_per_pic.qp_i = pic->rc[0].qp;
927 enc->enc_pic.rc_per_pic.qp_p = pic->rc[0].qp_inter;
928 enc->enc_pic.rc_per_pic.qp_b = pic->rc[0].qp_inter;
929 min_qp = pic->rc[0].min_qp ? pic->rc[0].min_qp : 1;
930 enc->enc_pic.rc_per_pic.min_qp_i = min_qp;
931 enc->enc_pic.rc_per_pic.min_qp_p = min_qp;
932 enc->enc_pic.rc_per_pic.min_qp_b = min_qp;
933 max_qp = pic->rc[0].max_qp ? pic->rc[0].max_qp : 255;
934 enc->enc_pic.rc_per_pic.max_qp_i = max_qp;
935 enc->enc_pic.rc_per_pic.max_qp_p = max_qp;
936 enc->enc_pic.rc_per_pic.max_qp_b = max_qp;
937 enc->enc_pic.rc_per_pic.enabled_filler_data = 0;
938 enc->enc_pic.rc_per_pic.skip_frame_enable = pic->rc[0].skip_frame_enable;
939 enc->enc_pic.rc_per_pic.enforce_hrd = pic->rc[0].enforce_hrd;
940 enc->enc_pic.rc_per_pic.qvbr_quality_level = (pic->rc[0].vbr_quality_factor + 2) / 5;
941 switch (pic->rc[0].rate_ctrl_method) {
942 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_DISABLE:
943 enc->enc_pic.rc_session_init.rate_control_method = RENCODE_RATE_CONTROL_METHOD_NONE;
944 break;
945 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_CONSTANT_SKIP:
946 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_CONSTANT:
947 enc->enc_pic.rc_session_init.rate_control_method = RENCODE_RATE_CONTROL_METHOD_CBR;
948 enc->enc_pic.rc_per_pic.enabled_filler_data = pic->rc[0].fill_data_enable;
949 break;
950 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_VARIABLE_SKIP:
951 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_VARIABLE:
952 enc->enc_pic.rc_session_init.rate_control_method =
953 RENCODE_RATE_CONTROL_METHOD_PEAK_CONSTRAINED_VBR;
954 break;
955 case PIPE_H2645_ENC_RATE_CONTROL_METHOD_QUALITY_VARIABLE:
956 enc->enc_pic.rc_session_init.rate_control_method =
957 RENCODE_RATE_CONTROL_METHOD_QUALITY_VBR;
958 break;
959 default:
960 enc->enc_pic.rc_session_init.rate_control_method = RENCODE_RATE_CONTROL_METHOD_NONE;
961 }
962 enc->enc_pic.rc_per_pic.max_au_size_obs = pic->rc[0].max_au_size;
963 enc->enc_pic.rc_per_pic.max_au_size_i = pic->rc[0].max_au_size;
964 enc->enc_pic.rc_per_pic.max_au_size_p = pic->rc[0].max_au_size;
965 enc->enc_pic.rc_per_pic.max_au_size_b = pic->rc[0].max_au_size;
966 }
967
radeon_vcn_enc_av1_get_tile_config(struct radeon_encoder * enc,struct pipe_av1_enc_picture_desc * pic)968 static void radeon_vcn_enc_av1_get_tile_config(struct radeon_encoder *enc,
969 struct pipe_av1_enc_picture_desc *pic)
970 {
971 uint32_t num_tile_cols, num_tile_rows;
972
973 num_tile_cols = MIN2(RENCODE_AV1_TILE_CONFIG_MAX_NUM_COLS, pic->tile_cols);
974 num_tile_rows = MIN2(RENCODE_AV1_TILE_CONFIG_MAX_NUM_ROWS, pic->tile_rows);
975
976 enc->enc_pic.av1_tile_config.uniform_tile_spacing = !!(pic->uniform_tile_spacing);
977 enc->enc_pic.av1_tile_config.num_tile_cols = pic->tile_cols;
978 enc->enc_pic.av1_tile_config.num_tile_rows = pic->tile_rows;
979 enc->enc_pic.av1_tile_config.num_tile_groups = pic->num_tile_groups;
980 for (int i = 0; i < num_tile_cols; i++ )
981 enc->enc_pic.av1_tile_config.tile_widths[i] = pic->width_in_sbs_minus_1[i] + 1;
982 for (int i = 0; i < num_tile_rows; i++ )
983 enc->enc_pic.av1_tile_config.tile_height[i] = pic->height_in_sbs_minus_1[i] + 1;
984 for (int i = 0; i < num_tile_cols * num_tile_rows; i++ ) {
985 enc->enc_pic.av1_tile_config.tile_groups[i].start =
986 (uint32_t)pic->tile_groups[i].tile_group_start;
987 enc->enc_pic.av1_tile_config.tile_groups[i].end =
988 (uint32_t)pic->tile_groups[i].tile_group_end;
989 }
990 enc->enc_pic.av1_tile_config.context_update_tile_id = pic->context_update_tile_id;
991 }
992
radeon_vcn_enc_av1_get_param(struct radeon_encoder * enc,struct pipe_av1_enc_picture_desc * pic)993 static void radeon_vcn_enc_av1_get_param(struct radeon_encoder *enc,
994 struct pipe_av1_enc_picture_desc *pic)
995 {
996 struct si_screen *sscreen = (struct si_screen *)enc->screen;
997 struct radeon_enc_pic *enc_pic = &enc->enc_pic;
998
999 enc_pic->av1.desc = pic;
1000 enc_pic->frame_type = pic->frame_type;
1001 enc_pic->bit_depth_luma_minus8 = enc_pic->bit_depth_chroma_minus8 =
1002 pic->seq.bit_depth_minus8;
1003 enc_pic->pic_width_in_luma_samples = pic->seq.pic_width_in_luma_samples;
1004 enc_pic->pic_height_in_luma_samples = pic->seq.pic_height_in_luma_samples;
1005 enc_pic->enable_error_resilient_mode = pic->error_resilient_mode;
1006 enc_pic->force_integer_mv = pic->force_integer_mv;
1007 enc_pic->disable_screen_content_tools = !pic->allow_screen_content_tools;
1008 enc_pic->is_obu_frame = pic->enable_frame_obu;
1009
1010 enc_pic->enc_params.reference_picture_index =
1011 pic->ref_list0[0] == PIPE_H2645_LIST_REF_INVALID_ENTRY ?
1012 0xffffffff : pic->dpb_ref_frame_idx[pic->ref_list0[0]];
1013 enc_pic->enc_params.reconstructed_picture_index = pic->dpb_curr_pic;
1014
1015 if (sscreen->info.vcn_ip_version >= VCN_5_0_0) {
1016 for (uint32_t i = 0; i < RENCODE_AV1_REFS_PER_FRAME; i++)
1017 enc_pic->av1_enc_params.ref_frames[i] = pic->dpb_ref_frame_idx[i];
1018
1019 enc_pic->av1_enc_params.lsm_reference_frame_index[0] =
1020 pic->ref_list0[0] == PIPE_H2645_LIST_REF_INVALID_ENTRY ? 0xffffffff : pic->ref_list0[0];
1021 enc_pic->av1_enc_params.lsm_reference_frame_index[1] = 0xffffffff;
1022 enc_pic->av1.compound = false;
1023
1024 if (pic->ref_list1[0] != PIPE_H2645_LIST_REF_INVALID_ENTRY) {
1025 enc_pic->av1.compound = true; /* BIDIR_COMP */
1026 enc_pic->av1_enc_params.lsm_reference_frame_index[1] = pic->ref_list1[0];
1027 } else if (pic->ref_list0[1] != PIPE_H2645_LIST_REF_INVALID_ENTRY) {
1028 enc_pic->av1.compound = true; /* UNIDIR_COMP */
1029 enc_pic->av1_enc_params.lsm_reference_frame_index[1] = pic->ref_list0[1];
1030 }
1031
1032 uint32_t skip_frames[2];
1033 enc_pic->av1.skip_mode_allowed = radeon_enc_av1_skip_mode_allowed(enc, skip_frames);
1034
1035 if (enc_pic->av1.compound) {
1036 bool disallow_skip_mode = enc_pic->av1_spec_misc.disallow_skip_mode;
1037 enc_pic->av1_spec_misc.disallow_skip_mode = !enc_pic->av1.skip_mode_allowed;
1038 /* Skip mode frames must match reference frames */
1039 if (enc_pic->av1.skip_mode_allowed) {
1040 enc_pic->av1_spec_misc.disallow_skip_mode =
1041 skip_frames[0] != enc_pic->av1_enc_params.lsm_reference_frame_index[0] ||
1042 skip_frames[1] != enc_pic->av1_enc_params.lsm_reference_frame_index[1];
1043 }
1044 enc->need_spec_misc = disallow_skip_mode != enc_pic->av1_spec_misc.disallow_skip_mode;
1045 } else {
1046 enc->need_spec_misc = false;
1047 }
1048 }
1049
1050 if (enc->dpb_type == DPB_TIER_2) {
1051 for (uint32_t i = 0; i < ARRAY_SIZE(pic->dpb); i++) {
1052 struct pipe_video_buffer *buf = pic->dpb[i].buffer;
1053 enc->enc_pic.dpb_bufs[i] =
1054 buf ? vl_video_buffer_get_associated_data(buf, &enc->base) : NULL;
1055 assert(!buf || enc->enc_pic.dpb_bufs[i]);
1056 }
1057 }
1058
1059 radeon_vcn_enc_av1_get_session_param(enc, pic);
1060 radeon_vcn_enc_av1_get_spec_misc_param(enc, pic);
1061 radeon_vcn_enc_av1_get_rc_param(enc, pic);
1062 radeon_vcn_enc_av1_get_tile_config(enc, pic);
1063 radeon_vcn_enc_get_input_format_param(enc, &pic->base);
1064 radeon_vcn_enc_get_output_format_param(enc, pic->seq.color_config.color_range);
1065 /* loop filter enabled all the time */
1066 radeon_vcn_enc_get_intra_refresh_param(enc,
1067 true,
1068 &pic->intra_refresh);
1069 radeon_vcn_enc_get_roi_param(enc, &pic->roi);
1070 radeon_vcn_enc_get_latency_param(enc);
1071 radeon_vcn_enc_quality_modes(enc, &pic->quality_modes);
1072 }
1073
radeon_vcn_enc_get_param(struct radeon_encoder * enc,struct pipe_picture_desc * picture)1074 static void radeon_vcn_enc_get_param(struct radeon_encoder *enc, struct pipe_picture_desc *picture)
1075 {
1076 enc->enc_pic.enc_params.allowed_max_bitstream_size = enc->bs_size - enc->bs_offset;
1077
1078 if (u_reduce_video_profile(picture->profile) == PIPE_VIDEO_FORMAT_MPEG4_AVC)
1079 radeon_vcn_enc_h264_get_param(enc, (struct pipe_h264_enc_picture_desc *)picture);
1080 else if (u_reduce_video_profile(picture->profile) == PIPE_VIDEO_FORMAT_HEVC)
1081 radeon_vcn_enc_hevc_get_param(enc, (struct pipe_h265_enc_picture_desc *)picture);
1082 else if (u_reduce_video_profile(picture->profile) == PIPE_VIDEO_FORMAT_AV1)
1083 radeon_vcn_enc_av1_get_param(enc, (struct pipe_av1_enc_picture_desc *)picture);
1084 }
1085
flush(struct radeon_encoder * enc,unsigned flags,struct pipe_fence_handle ** fence)1086 static int flush(struct radeon_encoder *enc, unsigned flags, struct pipe_fence_handle **fence)
1087 {
1088 struct si_screen *sscreen = (struct si_screen *)enc->screen;
1089
1090 if (sscreen->debug_flags & DBG(IB)) {
1091 struct ac_ib_parser ib_parser = {
1092 .f = stderr,
1093 .ib = enc->cs.current.buf,
1094 .num_dw = enc->cs.current.cdw,
1095 .gfx_level = sscreen->info.gfx_level,
1096 .vcn_version = sscreen->info.vcn_ip_version,
1097 .family = sscreen->info.family,
1098 .ip_type = AMD_IP_VCN_ENC,
1099 };
1100 ac_parse_ib(&ib_parser, "IB");
1101 }
1102
1103 return enc->ws->cs_flush(&enc->cs, flags, fence);
1104 }
1105
radeon_enc_flush(struct pipe_video_codec * encoder)1106 static void radeon_enc_flush(struct pipe_video_codec *encoder)
1107 {
1108 struct radeon_encoder *enc = (struct radeon_encoder *)encoder;
1109 flush(enc, PIPE_FLUSH_ASYNC, NULL);
1110 }
1111
radeon_enc_cs_flush(void * ctx,unsigned flags,struct pipe_fence_handle ** fence)1112 static void radeon_enc_cs_flush(void *ctx, unsigned flags, struct pipe_fence_handle **fence)
1113 {
1114 // just ignored
1115 }
1116
1117 /* configure reconstructed picture offset */
radeon_enc_rec_offset(rvcn_enc_reconstructed_picture_t * recon,uint32_t * offset,uint32_t luma_size,uint32_t chroma_size,bool is_av1)1118 static void radeon_enc_rec_offset(rvcn_enc_reconstructed_picture_t *recon,
1119 uint32_t *offset,
1120 uint32_t luma_size,
1121 uint32_t chroma_size,
1122 bool is_av1)
1123 {
1124 if (offset) {
1125 recon->luma_offset = *offset;
1126 *offset += luma_size;
1127 recon->chroma_offset = *offset;
1128 *offset += chroma_size;
1129 if (is_av1) {
1130 recon->av1.av1_cdf_frame_context_offset = *offset;
1131 *offset += RENCODE_AV1_FRAME_CONTEXT_CDF_TABLE_SIZE;
1132 recon->av1.av1_cdef_algorithm_context_offset = *offset;
1133 *offset += RENCODE_AV1_CDEF_ALGORITHM_FRAME_CONTEXT_SIZE;
1134 }
1135 } else {
1136 recon->luma_offset = 0;
1137 recon->chroma_offset = 0;
1138 recon->av1.av1_cdf_frame_context_offset = 0;
1139 recon->av1.av1_cdef_algorithm_context_offset = 0;
1140 }
1141 recon->chroma_v_offset = 0;
1142 }
1143
1144 /* configure reconstructed picture offset */
radeon_enc_rec_meta_offset(rvcn_enc_reconstructed_picture_t * recon,uint32_t * offset,uint32_t total_coloc_size,uint32_t alignment,bool has_b,bool is_h264,bool is_av1)1145 static void radeon_enc_rec_meta_offset(rvcn_enc_reconstructed_picture_t *recon,
1146 uint32_t *offset,
1147 uint32_t total_coloc_size,
1148 uint32_t alignment,
1149 bool has_b,
1150 bool is_h264,
1151 bool is_av1)
1152 {
1153 uint32_t context_offset = 0;
1154
1155 if (offset) {
1156 recon->frame_context_buffer_offset = *offset;
1157 recon->encode_metadata_offset = context_offset;
1158 context_offset += RENCODE_MAX_METADATA_BUFFER_SIZE_PER_FRAME;
1159 if (is_h264) {
1160 if (has_b) {
1161 recon->h264.colloc_buffer_offset = context_offset;
1162 context_offset += total_coloc_size;
1163 } else
1164 recon->h264.colloc_buffer_offset = RENCODE_INVALID_COLOC_OFFSET;
1165 }
1166
1167 if (is_av1) {
1168 recon->av1.av1_cdf_frame_context_offset = context_offset;
1169 context_offset += RENCODE_AV1_FRAME_CONTEXT_CDF_TABLE_SIZE;
1170 recon->av1.av1_cdef_algorithm_context_offset = context_offset;
1171 context_offset += RENCODE_AV1_CDEF_ALGORITHM_FRAME_CONTEXT_SIZE;
1172 }
1173 context_offset = align(context_offset, alignment);
1174 *offset += context_offset;
1175 } else {
1176 recon->frame_context_buffer_offset = 0;
1177 recon->encode_metadata_offset = 0;
1178 recon->av1.av1_cdf_frame_context_offset = 0;
1179 recon->av1.av1_cdef_algorithm_context_offset = 0;
1180 }
1181 }
1182
setup_cdf(struct radeon_encoder * enc)1183 static int setup_cdf(struct radeon_encoder *enc)
1184 {
1185 unsigned char *p_cdf = NULL;
1186
1187 if (!enc->cdf ||
1188 !si_vid_create_buffer(enc->screen,
1189 enc->cdf,
1190 VCN_ENC_AV1_DEFAULT_CDF_SIZE,
1191 PIPE_USAGE_DYNAMIC)) {
1192 RADEON_ENC_ERR("Can't create CDF buffer.\n");
1193 goto error;
1194 }
1195
1196 p_cdf = enc->ws->buffer_map(enc->ws,
1197 enc->cdf->res->buf,
1198 &enc->cs,
1199 PIPE_MAP_READ_WRITE | RADEON_MAP_TEMPORARY);
1200 if (!p_cdf)
1201 goto error;
1202
1203 memcpy(p_cdf, rvcn_av1_cdf_default_table, VCN_ENC_AV1_DEFAULT_CDF_SIZE);
1204 enc->ws->buffer_unmap(enc->ws, enc->cdf->res->buf);
1205
1206 return 0;
1207
1208 error:
1209 return -1;
1210 }
1211
pre_encode_size(struct radeon_encoder * enc,uint32_t * offset)1212 static void pre_encode_size(struct radeon_encoder *enc,
1213 uint32_t *offset)
1214 {
1215 struct si_screen *sscreen = (struct si_screen *)enc->screen;
1216 bool is_h264 = u_reduce_video_profile(enc->base.profile)
1217 == PIPE_VIDEO_FORMAT_MPEG4_AVC;
1218 uint32_t rec_alignment = is_h264 ? 16 : 64;
1219 uint32_t aligned_width = align(enc->base.width, rec_alignment);
1220 uint32_t aligned_height = align(enc->base.height, rec_alignment);
1221 struct radeon_enc_pic *enc_pic = &enc->enc_pic;
1222 bool has_b = enc_pic->spec_misc.b_picture_enabled; /* for h264 only */
1223 uint32_t pre_size = DIV_ROUND_UP((aligned_width >> 2), rec_alignment) *
1224 DIV_ROUND_UP((aligned_height >> 2), rec_alignment);
1225 uint32_t full_size = DIV_ROUND_UP(aligned_width, rec_alignment) *
1226 DIV_ROUND_UP(aligned_height, rec_alignment);
1227
1228 enc_pic->ctx_buf.two_pass_search_center_map_offset = *offset;
1229
1230 if (sscreen->info.vcn_ip_version < VCN_5_0_0) {
1231 if (is_h264 && !has_b)
1232 *offset += align((pre_size * 4 + full_size) * sizeof(uint32_t), enc->alignment);
1233 else if (!is_h264)
1234 *offset += align((pre_size * 52 + full_size) * sizeof(uint32_t), enc->alignment);
1235 } else { /* only for vcn5.x rather than VCN5_0_0 */
1236 if (is_h264 && !has_b)
1237 *offset += align(full_size * 8, enc->alignment);
1238 else if (!is_h264)
1239 *offset += align(full_size * 24, enc->alignment);
1240 }
1241 }
1242
setup_dpb(struct radeon_encoder * enc,uint32_t num_reconstructed_pictures)1243 static int setup_dpb(struct radeon_encoder *enc, uint32_t num_reconstructed_pictures)
1244 {
1245 struct si_screen *sscreen = (struct si_screen *)enc->screen;
1246 bool is_h264 = u_reduce_video_profile(enc->base.profile)
1247 == PIPE_VIDEO_FORMAT_MPEG4_AVC;
1248 bool is_av1 = u_reduce_video_profile(enc->base.profile)
1249 == PIPE_VIDEO_FORMAT_AV1;
1250 uint32_t rec_alignment = is_h264 ? 16 : 64;
1251 uint32_t aligned_width = align(enc->base.width, rec_alignment);
1252 uint32_t aligned_height = align(enc->base.height, rec_alignment);
1253 uint32_t pitch = align(aligned_width, enc->alignment);
1254 uint32_t luma_size, chroma_size, offset;
1255 struct radeon_enc_pic *enc_pic = &enc->enc_pic;
1256 int i;
1257 bool has_b = enc_pic->spec_misc.b_picture_enabled; /* for h264 only */
1258 uint32_t aligned_dpb_height = MAX2(256, aligned_height);
1259 uint32_t total_coloc_bytes = (align((aligned_width / 16), 64) / 2)
1260 * (aligned_height / 16);
1261
1262 luma_size = align(pitch * aligned_dpb_height , enc->alignment);
1263 chroma_size = align(luma_size / 2 , enc->alignment);
1264 if (enc_pic->bit_depth_luma_minus8 || enc_pic->bit_depth_chroma_minus8) {
1265 luma_size *= 2;
1266 chroma_size *= 2;
1267 }
1268
1269 assert(num_reconstructed_pictures <= RENCODE_MAX_NUM_RECONSTRUCTED_PICTURES);
1270
1271 enc_pic->ctx_buf.rec_luma_pitch = pitch;
1272 enc_pic->ctx_buf.pre_encode_picture_luma_pitch = pitch;
1273 enc_pic->ctx_buf.num_reconstructed_pictures = num_reconstructed_pictures;
1274 enc_pic->total_coloc_bytes = total_coloc_bytes;
1275
1276 offset = 0;
1277 enc->metadata_size = 0;
1278 if (sscreen->info.vcn_ip_version < VCN_5_0_0) {
1279 enc_pic->ctx_buf.rec_chroma_pitch = pitch;
1280 enc_pic->ctx_buf.pre_encode_picture_chroma_pitch = pitch;
1281 if (has_b) {
1282 enc_pic->ctx_buf.colloc_buffer_offset = offset;
1283 offset += total_coloc_bytes;
1284 } else
1285 enc_pic->ctx_buf.colloc_buffer_offset = 0;
1286
1287 if (enc_pic->quality_modes.pre_encode_mode)
1288 pre_encode_size(enc, &offset);
1289 else
1290 enc_pic->ctx_buf.two_pass_search_center_map_offset = 0;
1291
1292 if (enc_pic->quality_modes.pre_encode_mode) {
1293 enc_pic->ctx_buf.pre_encode_input_picture.rgb.red_offset = offset;
1294 offset += luma_size;
1295 enc_pic->ctx_buf.pre_encode_input_picture.rgb.green_offset = offset;
1296 offset += luma_size;
1297 enc_pic->ctx_buf.pre_encode_input_picture.rgb.blue_offset = offset;
1298 offset += luma_size;
1299 }
1300
1301 if (is_av1) {
1302 enc_pic->ctx_buf.av1.av1_sdb_intermediate_context_offset = offset;
1303 offset += RENCODE_AV1_SDB_FRAME_CONTEXT_SIZE;
1304 }
1305
1306 for (i = 0; i < num_reconstructed_pictures; i++) {
1307 radeon_enc_rec_offset(&enc_pic->ctx_buf.reconstructed_pictures[i],
1308 &offset, luma_size, chroma_size, is_av1);
1309
1310 if (enc_pic->quality_modes.pre_encode_mode)
1311 radeon_enc_rec_offset(&enc_pic->ctx_buf.pre_encode_reconstructed_pictures[i],
1312 &offset, luma_size, chroma_size, is_av1);
1313 }
1314
1315 for (; i < RENCODE_MAX_NUM_RECONSTRUCTED_PICTURES; i++) {
1316 radeon_enc_rec_offset(&enc_pic->ctx_buf.reconstructed_pictures[i],
1317 NULL, 0, 0, false);
1318 if (enc_pic->quality_modes.pre_encode_mode)
1319 radeon_enc_rec_offset(&enc_pic->ctx_buf.pre_encode_reconstructed_pictures[i],
1320 NULL, 0, 0, false);
1321 }
1322
1323 enc->dpb_size = offset;
1324 } else { /* vcn 5.0 */
1325 enc_pic->ctx_buf.rec_chroma_pitch = pitch / 2;
1326 enc_pic->ctx_buf.pre_encode_picture_chroma_pitch = pitch / 2;
1327 /* dpb buffer */
1328 if (is_av1) {
1329 enc_pic->ctx_buf.av1.av1_sdb_intermediate_context_offset = offset;
1330 offset += RENCODE_AV1_SDB_FRAME_CONTEXT_SIZE;
1331 } else
1332 enc_pic->ctx_buf.av1.av1_sdb_intermediate_context_offset = 0;
1333
1334 if (enc_pic->quality_modes.pre_encode_mode) {
1335 enc_pic->ctx_buf.pre_encode_input_picture.rgb.red_offset = offset;
1336 offset += luma_size;
1337 enc_pic->ctx_buf.pre_encode_input_picture.rgb.green_offset = offset;
1338 offset += luma_size;
1339 enc_pic->ctx_buf.pre_encode_input_picture.rgb.blue_offset = offset;
1340 offset += luma_size;
1341 }
1342
1343 for (i = 0; i < num_reconstructed_pictures; i++) {
1344 radeon_enc_rec_offset(&enc_pic->ctx_buf.reconstructed_pictures[i],
1345 &offset, luma_size, chroma_size, false);
1346
1347 if (enc_pic->quality_modes.pre_encode_mode)
1348 radeon_enc_rec_offset(&enc_pic->ctx_buf.pre_encode_reconstructed_pictures[i],
1349 &offset, luma_size, chroma_size, false);
1350 }
1351
1352 for (; i < RENCODE_MAX_NUM_RECONSTRUCTED_PICTURES; i++) {
1353 radeon_enc_rec_offset(&enc_pic->ctx_buf.reconstructed_pictures[i],
1354 NULL, 0, 0, false);
1355 if (enc_pic->quality_modes.pre_encode_mode)
1356 radeon_enc_rec_offset(&enc_pic->ctx_buf.pre_encode_reconstructed_pictures[i],
1357 NULL, 0, 0, false);
1358 }
1359
1360 enc->dpb_size = offset;
1361
1362 /* meta buffer*/
1363 offset = 0;
1364 if (enc_pic->quality_modes.pre_encode_mode)
1365 pre_encode_size(enc, &offset);
1366 else
1367 enc_pic->ctx_buf.two_pass_search_center_map_offset = 0;
1368
1369 for (i = 0; i < num_reconstructed_pictures; i++) {
1370 radeon_enc_rec_meta_offset(&enc_pic->ctx_buf.reconstructed_pictures[i],
1371 &offset, total_coloc_bytes, enc->alignment, has_b, is_h264, is_av1);
1372 if (enc_pic->quality_modes.pre_encode_mode)
1373 radeon_enc_rec_meta_offset(&enc_pic->ctx_buf.pre_encode_reconstructed_pictures[i],
1374 &offset, total_coloc_bytes, enc->alignment, has_b, is_h264, is_av1);
1375 }
1376 for (; i < RENCODE_MAX_NUM_RECONSTRUCTED_PICTURES; i++) {
1377 radeon_enc_rec_meta_offset(&enc_pic->ctx_buf.reconstructed_pictures[i],
1378 NULL, 0, 0, false, false, false);
1379 if (enc_pic->quality_modes.pre_encode_mode)
1380 radeon_enc_rec_meta_offset(&enc_pic->ctx_buf.pre_encode_reconstructed_pictures[i],
1381 NULL, 0, 0, false, false, false);
1382 }
1383 enc->metadata_size = offset;
1384 }
1385
1386 enc->dpb_slots = num_reconstructed_pictures;
1387
1388 return enc->dpb_size;
1389 }
1390
1391 /* each block (MB/CTB/SB) has one QP/QI value */
roi_buffer_size(struct radeon_encoder * enc)1392 static uint32_t roi_buffer_size(struct radeon_encoder *enc)
1393 {
1394 uint32_t pitch_size_in_dword = 0;
1395 rvcn_enc_qp_map_t *qp_map = &enc->enc_pic.enc_qp_map;
1396
1397 if ( qp_map->version == RENCODE_QP_MAP_LEGACY){
1398 pitch_size_in_dword = qp_map->width_in_block;
1399 qp_map->qp_map_pitch = qp_map->width_in_block;
1400 } else {
1401 /* two units merge into 1 dword */
1402 pitch_size_in_dword = DIV_ROUND_UP(qp_map->width_in_block, 2);
1403 qp_map->qp_map_pitch = pitch_size_in_dword * 2;
1404 }
1405
1406 return pitch_size_in_dword * qp_map->height_in_block * sizeof(uint32_t);
1407 }
1408
arrange_qp_map(void * start,struct rvcn_enc_qp_map_region * regin,rvcn_enc_qp_map_t * map)1409 static void arrange_qp_map(void *start,
1410 struct rvcn_enc_qp_map_region *regin,
1411 rvcn_enc_qp_map_t *map)
1412 {
1413 uint32_t i, j;
1414 uint32_t offset;
1415 uint32_t num_in_x = MIN2(regin->x_in_unit + regin->width_in_unit, map->width_in_block)
1416 - regin->x_in_unit;
1417 uint32_t num_in_y = MIN2(regin->y_in_unit + regin->height_in_unit, map->height_in_block)
1418 - regin->y_in_unit;;
1419
1420 for (j = 0; j < num_in_y; j++) {
1421 for (i = 0; i < num_in_x; i++) {
1422 offset = regin->x_in_unit + i + (regin->y_in_unit + j) * map->qp_map_pitch;
1423 if (map->version == RENCODE_QP_MAP_LEGACY)
1424 *((uint32_t *)start + offset) = (int32_t)regin->qp_delta;
1425 else
1426 *((int16_t *)start + offset) =
1427 (int16_t)(regin->qp_delta << RENCODE_QP_MAP_UNIFIED_QP_BITS_SHIFT);
1428 }
1429 }
1430 }
1431
1432 /* Arrange roi map values according to the input regions.
1433 * The arrangment will consider the lower sequence region
1434 * higher priority and that could overlap the higher sequence
1435 * map region. */
generate_roi_map(struct radeon_encoder * enc)1436 static int generate_roi_map(struct radeon_encoder *enc)
1437 {
1438 uint32_t width_in_block, height_in_block;
1439 uint32_t i;
1440 void *p_roi = NULL;
1441
1442 radeon_vcn_enc_blocks_in_frame(enc, &width_in_block, &height_in_block);
1443
1444 p_roi = enc->ws->buffer_map(enc->ws,
1445 enc->roi->res->buf,
1446 &enc->cs,
1447 PIPE_MAP_READ_WRITE | RADEON_MAP_TEMPORARY);
1448 if (!p_roi)
1449 goto error;
1450
1451 memset(p_roi, 0, enc->roi_size);
1452
1453 for (i = 0; i < ARRAY_SIZE(enc->enc_pic.enc_qp_map.map); i++) {
1454 struct rvcn_enc_qp_map_region *region = &enc->enc_pic.enc_qp_map.map[i];
1455 if (region->is_valid)
1456 arrange_qp_map(p_roi, region, &enc->enc_pic.enc_qp_map);
1457 }
1458
1459 enc->ws->buffer_unmap(enc->ws, enc->roi->res->buf);
1460 return 0;
1461 error:
1462 return -1;
1463 }
1464
radeon_enc_begin_frame(struct pipe_video_codec * encoder,struct pipe_video_buffer * source,struct pipe_picture_desc * picture)1465 static void radeon_enc_begin_frame(struct pipe_video_codec *encoder,
1466 struct pipe_video_buffer *source,
1467 struct pipe_picture_desc *picture)
1468 {
1469 struct radeon_encoder *enc = (struct radeon_encoder *)encoder;
1470 struct si_screen *sscreen = (struct si_screen *)enc->screen;
1471 struct vl_video_buffer *vid_buf = (struct vl_video_buffer *)source;
1472 unsigned dpb_slots = 0;
1473
1474 enc->source = source;
1475 enc->need_rate_control = false;
1476 enc->need_rc_per_pic = false;
1477
1478 if (u_reduce_video_profile(enc->base.profile) == PIPE_VIDEO_FORMAT_MPEG4_AVC) {
1479 struct pipe_h264_enc_picture_desc *pic = (struct pipe_h264_enc_picture_desc *)picture;
1480 dpb_slots = MAX2(pic->seq.max_num_ref_frames + 1, pic->dpb_size);
1481 enc->need_rate_control =
1482 (enc->enc_pic.rc_layer_init[0].target_bit_rate != pic->rate_ctrl[0].target_bitrate) ||
1483 (enc->enc_pic.rc_layer_init[0].frame_rate_num != pic->rate_ctrl[0].frame_rate_num) ||
1484 (enc->enc_pic.rc_layer_init[0].frame_rate_den != pic->rate_ctrl[0].frame_rate_den);
1485
1486 enc->need_rc_per_pic =
1487 (enc->enc_pic.rc_per_pic.qp_i != pic->quant_i_frames) ||
1488 (enc->enc_pic.rc_per_pic.qp_p != pic->quant_p_frames) ||
1489 (enc->enc_pic.rc_per_pic.qp_b != pic->quant_b_frames) ||
1490 (enc->enc_pic.rc_per_pic.max_au_size_i != pic->rate_ctrl[0].max_au_size) ||
1491 (enc->enc_pic.rc_per_pic.qvbr_quality_level != pic->rate_ctrl[0].vbr_quality_factor);
1492 } else if (u_reduce_video_profile(picture->profile) == PIPE_VIDEO_FORMAT_HEVC) {
1493 struct pipe_h265_enc_picture_desc *pic = (struct pipe_h265_enc_picture_desc *)picture;
1494 dpb_slots = MAX2(pic->seq.sps_max_dec_pic_buffering_minus1[0] + 1, pic->dpb_size);
1495 enc->need_rate_control =
1496 (enc->enc_pic.rc_layer_init[0].target_bit_rate != pic->rc[0].target_bitrate) ||
1497 (enc->enc_pic.rc_layer_init[0].frame_rate_num != pic->rc[0].frame_rate_num) ||
1498 (enc->enc_pic.rc_layer_init[0].frame_rate_den != pic->rc[0].frame_rate_den);
1499
1500 enc->need_rc_per_pic =
1501 (enc->enc_pic.rc_per_pic.qp_i != pic->rc[0].quant_i_frames) ||
1502 (enc->enc_pic.rc_per_pic.qp_p != pic->rc[0].quant_p_frames) ||
1503 (enc->enc_pic.rc_per_pic.max_au_size_i != pic->rc[0].max_au_size) ||
1504 (enc->enc_pic.rc_per_pic.qvbr_quality_level != pic->rc[0].vbr_quality_factor);
1505 } else if (u_reduce_video_profile(picture->profile) == PIPE_VIDEO_FORMAT_AV1) {
1506 struct pipe_av1_enc_picture_desc *pic = (struct pipe_av1_enc_picture_desc *)picture;
1507 dpb_slots = pic->dpb_size;
1508 enc->need_rate_control =
1509 (enc->enc_pic.rc_layer_init[0].target_bit_rate != pic->rc[0].target_bitrate) ||
1510 (enc->enc_pic.rc_layer_init[0].frame_rate_num != pic->rc[0].frame_rate_num) ||
1511 (enc->enc_pic.rc_layer_init[0].frame_rate_den != pic->rc[0].frame_rate_den);
1512
1513 enc->need_rc_per_pic =
1514 (enc->enc_pic.rc_per_pic.qp_i != pic->rc[0].qp) ||
1515 (enc->enc_pic.rc_per_pic.qp_p != pic->rc[0].qp_inter) ||
1516 (enc->enc_pic.rc_per_pic.qp_b != pic->rc[0].qp_inter) ||
1517 (enc->enc_pic.rc_per_pic.max_au_size_i != pic->rc[0].max_au_size) ||
1518 (enc->enc_pic.rc_per_pic.qvbr_quality_level != pic->rc[0].vbr_quality_factor);
1519
1520 if (!enc->cdf) {
1521 enc->cdf = CALLOC_STRUCT(rvid_buffer);
1522 if (setup_cdf(enc)) {
1523 RADEON_ENC_ERR("Can't create cdf buffer.\n");
1524 goto error;
1525 }
1526 }
1527 }
1528
1529 if (enc->dpb_type == DPB_TIER_2)
1530 dpb_slots = 0;
1531
1532 radeon_vcn_enc_get_param(enc, picture);
1533 if (!enc->dpb) {
1534 enc->dpb = CALLOC_STRUCT(rvid_buffer);
1535 if (setup_dpb(enc, dpb_slots)) {
1536 if (!enc->dpb ||
1537 !si_vid_create_buffer(enc->screen, enc->dpb, enc->dpb_size, PIPE_USAGE_DEFAULT)) {
1538 RADEON_ENC_ERR("Can't create DPB buffer.\n");
1539 goto error;
1540 }
1541 }
1542 }
1543
1544 if ((sscreen->info.vcn_ip_version >= VCN_5_0_0) && enc->metadata_size && !enc->meta) {
1545 enc->meta = CALLOC_STRUCT(rvid_buffer);
1546 if (!enc->meta ||
1547 !si_vid_create_buffer(enc->screen, enc->meta, enc->metadata_size, PIPE_USAGE_DEFAULT)) {
1548 RADEON_ENC_ERR("Can't create meta buffer.\n");
1549 goto error;
1550 }
1551 }
1552
1553 if (dpb_slots > enc->dpb_slots) {
1554 setup_dpb(enc, dpb_slots);
1555 if (!si_vid_resize_buffer(enc->base.context, &enc->cs, enc->dpb, enc->dpb_size, NULL)) {
1556 RADEON_ENC_ERR("Can't resize DPB buffer.\n");
1557 goto error;
1558 }
1559 if (sscreen->info.vcn_ip_version >= VCN_5_0_0 && enc->metadata_size &&
1560 !si_vid_resize_buffer(enc->base.context, &enc->cs, enc->meta, enc->metadata_size, NULL)) {
1561 RADEON_ENC_ERR("Can't resize meta buffer.\n");
1562 goto error;
1563 }
1564 }
1565
1566 /* qp map buffer could be created here, and release at the end */
1567 if (enc->enc_pic.enc_qp_map.qp_map_type != RENCODE_QP_MAP_TYPE_NONE) {
1568 if (!enc->roi) {
1569 enc->roi = CALLOC_STRUCT(rvid_buffer);
1570 enc->roi_size = roi_buffer_size(enc);
1571 if (!enc->roi || !enc->roi_size ||
1572 !si_vid_create_buffer(enc->screen, enc->roi, enc->roi_size, PIPE_USAGE_DYNAMIC)) {
1573 RADEON_ENC_ERR("Can't create ROI buffer.\n");
1574 goto error;
1575 }
1576 }
1577 if(generate_roi_map(enc)) {
1578 RADEON_ENC_ERR("Can't form roi map.\n");
1579 goto error;
1580 }
1581 }
1582
1583 if (source->buffer_format == PIPE_FORMAT_NV12 ||
1584 source->buffer_format == PIPE_FORMAT_P010 ||
1585 source->buffer_format == PIPE_FORMAT_P016) {
1586 enc->get_buffer(vid_buf->resources[0], &enc->handle, &enc->luma);
1587 enc->get_buffer(vid_buf->resources[1], NULL, &enc->chroma);
1588 }
1589 else {
1590 enc->get_buffer(vid_buf->resources[0], &enc->handle, &enc->luma);
1591 enc->chroma = NULL;
1592 }
1593
1594 enc->need_feedback = false;
1595
1596 if (!enc->stream_handle) {
1597 struct rvid_buffer fb;
1598 enc->stream_handle = si_vid_alloc_stream_handle();
1599 enc->si = CALLOC_STRUCT(rvid_buffer);
1600 if (!enc->si ||
1601 !enc->stream_handle ||
1602 !si_vid_create_buffer(enc->screen, enc->si, 128 * 1024, PIPE_USAGE_DEFAULT)) {
1603 RADEON_ENC_ERR("Can't create session buffer.\n");
1604 goto error;
1605 }
1606 si_vid_create_buffer(enc->screen, &fb, 4096, PIPE_USAGE_STAGING);
1607 enc->fb = &fb;
1608 enc->begin(enc);
1609 flush(enc, PIPE_FLUSH_ASYNC, NULL);
1610 si_vid_destroy_buffer(&fb);
1611 enc->need_rate_control = false;
1612 enc->need_rc_per_pic = false;
1613 }
1614
1615 return;
1616
1617 error:
1618 RADEON_ENC_DESTROY_VIDEO_BUFFER(enc->dpb);
1619 RADEON_ENC_DESTROY_VIDEO_BUFFER(enc->si);
1620 RADEON_ENC_DESTROY_VIDEO_BUFFER(enc->cdf);
1621 RADEON_ENC_DESTROY_VIDEO_BUFFER(enc->roi);
1622 RADEON_ENC_DESTROY_VIDEO_BUFFER(enc->meta);
1623 }
1624
radeon_vcn_enc_encode_h264_header(struct radeon_encoder * enc,struct pipe_enc_raw_header * header,uint8_t * out)1625 static uint32_t radeon_vcn_enc_encode_h264_header(struct radeon_encoder *enc,
1626 struct pipe_enc_raw_header *header,
1627 uint8_t *out)
1628 {
1629 /* Startcode may be 3 or 4 bytes. */
1630 const uint8_t nal_byte = header->buffer[header->buffer[2] == 0x1 ? 3 : 4];
1631
1632 switch (header->type) {
1633 case PIPE_H264_NAL_SPS:
1634 return radeon_enc_write_sps(enc, nal_byte, out);
1635 case PIPE_H264_NAL_PPS:
1636 return radeon_enc_write_pps(enc, nal_byte, out);
1637 default:
1638 assert(header->buffer);
1639 memcpy(out, header->buffer, header->size);
1640 return header->size;
1641 }
1642 }
1643
radeon_vcn_enc_encode_hevc_header(struct radeon_encoder * enc,struct pipe_enc_raw_header * header,uint8_t * out)1644 static uint32_t radeon_vcn_enc_encode_hevc_header(struct radeon_encoder *enc,
1645 struct pipe_enc_raw_header *header,
1646 uint8_t *out)
1647 {
1648 switch (header->type) {
1649 case PIPE_H265_NAL_VPS:
1650 return radeon_enc_write_vps(enc, out);
1651 case PIPE_H265_NAL_SPS:
1652 return radeon_enc_write_sps_hevc(enc, out);
1653 case PIPE_H265_NAL_PPS:
1654 return radeon_enc_write_pps_hevc(enc, out);
1655 default:
1656 assert(header->buffer);
1657 memcpy(out, header->buffer, header->size);
1658 return header->size;
1659 }
1660 }
1661
radeon_vcn_enc_encode_av1_header(struct radeon_encoder * enc,struct pipe_enc_raw_header * header,uint8_t * out)1662 static uint32_t radeon_vcn_enc_encode_av1_header(struct radeon_encoder *enc,
1663 struct pipe_enc_raw_header *header,
1664 uint8_t *out)
1665 {
1666 switch (header->type) {
1667 case 1: /* SEQUENCE_HEADER */
1668 return radeon_enc_write_sequence_header(enc, header->buffer, out);
1669 default:
1670 assert(header->buffer);
1671 memcpy(out, header->buffer, header->size);
1672 return header->size;
1673 }
1674 }
1675
radeon_vcn_enc_encode_headers(struct radeon_encoder * enc)1676 static void *radeon_vcn_enc_encode_headers(struct radeon_encoder *enc)
1677 {
1678 const bool is_h264 = u_reduce_video_profile(enc->base.profile) == PIPE_VIDEO_FORMAT_MPEG4_AVC;
1679 const bool is_hevc = u_reduce_video_profile(enc->base.profile) == PIPE_VIDEO_FORMAT_HEVC;
1680 const bool is_av1 = u_reduce_video_profile(enc->base.profile) == PIPE_VIDEO_FORMAT_AV1;
1681 struct util_dynarray *headers;
1682 unsigned num_slices = 0, num_headers = 0;
1683
1684 if (is_h264)
1685 headers = &enc->enc_pic.h264.desc->raw_headers;
1686 else if (is_hevc)
1687 headers = &enc->enc_pic.hevc.desc->raw_headers;
1688 else if (is_av1)
1689 headers = &enc->enc_pic.av1.desc->raw_headers;
1690 else
1691 return NULL;
1692
1693 util_dynarray_foreach(headers, struct pipe_enc_raw_header, header) {
1694 if (header->is_slice)
1695 num_slices++;
1696 num_headers++;
1697 }
1698
1699 if (!num_headers || !num_slices || num_headers == num_slices)
1700 return NULL;
1701
1702 size_t segments_size =
1703 sizeof(struct rvcn_enc_output_unit_segment) * (num_headers - num_slices + 1);
1704 struct rvcn_enc_feedback_data *data =
1705 CALLOC_VARIANT_LENGTH_STRUCT(rvcn_enc_feedback_data, segments_size);
1706 if (!data)
1707 return NULL;
1708
1709 uint8_t *ptr = enc->ws->buffer_map(enc->ws, enc->bs_handle, &enc->cs,
1710 PIPE_MAP_WRITE | RADEON_MAP_TEMPORARY);
1711 if (!ptr) {
1712 RADEON_ENC_ERR("Can't map bs buffer.\n");
1713 FREE(data);
1714 return NULL;
1715 }
1716
1717 unsigned offset = 0;
1718 struct rvcn_enc_output_unit_segment *slice_segment = NULL;
1719
1720 util_dynarray_foreach(headers, struct pipe_enc_raw_header, header) {
1721 if (header->is_slice) {
1722 if (slice_segment)
1723 continue;
1724 slice_segment = &data->segments[data->num_segments];
1725 slice_segment->is_slice = true;
1726 } else {
1727 unsigned size = 0;
1728 if (is_h264)
1729 size = radeon_vcn_enc_encode_h264_header(enc, header, ptr + offset);
1730 else if (is_hevc)
1731 size = radeon_vcn_enc_encode_hevc_header(enc, header, ptr + offset);
1732 else if (is_av1)
1733 size = radeon_vcn_enc_encode_av1_header(enc, header, ptr + offset);
1734 data->segments[data->num_segments].size = size;
1735 data->segments[data->num_segments].offset = offset;
1736 offset += size;
1737 }
1738 data->num_segments++;
1739 }
1740
1741 enc->bs_offset = align(offset, 16);
1742 assert(enc->bs_offset < enc->bs_size);
1743
1744 assert(slice_segment);
1745 slice_segment->offset = enc->bs_offset;
1746
1747 enc->ws->buffer_unmap(enc->ws, enc->bs_handle);
1748
1749 return data;
1750 }
1751
radeon_enc_encode_bitstream(struct pipe_video_codec * encoder,struct pipe_video_buffer * source,struct pipe_resource * destination,void ** fb)1752 static void radeon_enc_encode_bitstream(struct pipe_video_codec *encoder,
1753 struct pipe_video_buffer *source,
1754 struct pipe_resource *destination, void **fb)
1755 {
1756 struct radeon_encoder *enc = (struct radeon_encoder *)encoder;
1757 struct vl_video_buffer *vid_buf = (struct vl_video_buffer *)source;
1758
1759 if (enc->error)
1760 return;
1761
1762 enc->get_buffer(destination, &enc->bs_handle, NULL);
1763 enc->bs_size = destination->width0;
1764 enc->bs_offset = 0;
1765
1766 *fb = enc->fb = CALLOC_STRUCT(rvid_buffer);
1767
1768 if (!si_vid_create_buffer(enc->screen, enc->fb, 4096, PIPE_USAGE_STAGING)) {
1769 RADEON_ENC_ERR("Can't create feedback buffer.\n");
1770 return;
1771 }
1772
1773 enc->fb->user_data = radeon_vcn_enc_encode_headers(enc);
1774
1775 if (vid_buf->base.statistics_data) {
1776 enc->get_buffer(vid_buf->base.statistics_data, &enc->stats, NULL);
1777 if (enc->stats->size < sizeof(rvcn_encode_stats_type_0_t)) {
1778 RADEON_ENC_ERR("Encoder statistics output buffer is too small.\n");
1779 enc->stats = NULL;
1780 }
1781 vid_buf->base.statistics_data = NULL;
1782 }
1783 else
1784 enc->stats = NULL;
1785
1786 enc->need_feedback = true;
1787 enc->encode(enc);
1788 }
1789
radeon_enc_end_frame(struct pipe_video_codec * encoder,struct pipe_video_buffer * source,struct pipe_picture_desc * picture)1790 static int radeon_enc_end_frame(struct pipe_video_codec *encoder, struct pipe_video_buffer *source,
1791 struct pipe_picture_desc *picture)
1792 {
1793 struct radeon_encoder *enc = (struct radeon_encoder *)encoder;
1794
1795 if (enc->error)
1796 return -1;
1797
1798 return flush(enc, picture->flush_flags, picture->fence);
1799 }
1800
radeon_enc_destroy(struct pipe_video_codec * encoder)1801 static void radeon_enc_destroy(struct pipe_video_codec *encoder)
1802 {
1803 struct radeon_encoder *enc = (struct radeon_encoder *)encoder;
1804
1805 if (enc->stream_handle) {
1806 struct rvid_buffer fb;
1807 enc->need_feedback = false;
1808 si_vid_create_buffer(enc->screen, &fb, 512, PIPE_USAGE_STAGING);
1809 enc->fb = &fb;
1810 enc->destroy(enc);
1811 flush(enc, PIPE_FLUSH_ASYNC, NULL);
1812 RADEON_ENC_DESTROY_VIDEO_BUFFER(enc->si);
1813 si_vid_destroy_buffer(&fb);
1814 }
1815
1816 RADEON_ENC_DESTROY_VIDEO_BUFFER(enc->dpb);
1817 RADEON_ENC_DESTROY_VIDEO_BUFFER(enc->cdf);
1818 RADEON_ENC_DESTROY_VIDEO_BUFFER(enc->roi);
1819 RADEON_ENC_DESTROY_VIDEO_BUFFER(enc->meta);
1820 enc->ws->cs_destroy(&enc->cs);
1821 if (enc->ectx)
1822 enc->ectx->destroy(enc->ectx);
1823
1824 FREE(enc);
1825 }
1826
radeon_enc_get_feedback(struct pipe_video_codec * encoder,void * feedback,unsigned * size,struct pipe_enc_feedback_metadata * metadata)1827 static void radeon_enc_get_feedback(struct pipe_video_codec *encoder, void *feedback,
1828 unsigned *size, struct pipe_enc_feedback_metadata *metadata)
1829 {
1830 struct radeon_encoder *enc = (struct radeon_encoder *)encoder;
1831 struct rvid_buffer *fb = feedback;
1832
1833 uint32_t *ptr = enc->ws->buffer_map(enc->ws, fb->res->buf, &enc->cs,
1834 PIPE_MAP_READ_WRITE | RADEON_MAP_TEMPORARY);
1835 if (ptr[1])
1836 *size = ptr[6] - ptr[8];
1837 else
1838 *size = 0;
1839 enc->ws->buffer_unmap(enc->ws, fb->res->buf);
1840
1841 metadata->present_metadata = PIPE_VIDEO_FEEDBACK_METADATA_TYPE_CODEC_UNIT_LOCATION;
1842
1843 if (fb->user_data) {
1844 struct rvcn_enc_feedback_data *data = fb->user_data;
1845 metadata->codec_unit_metadata_count = data->num_segments;
1846 for (unsigned i = 0; i < data->num_segments; i++) {
1847 metadata->codec_unit_metadata[i].offset = data->segments[i].offset;
1848 if (data->segments[i].is_slice) {
1849 metadata->codec_unit_metadata[i].size = *size;
1850 metadata->codec_unit_metadata[i].flags = 0;
1851 } else {
1852 metadata->codec_unit_metadata[i].size = data->segments[i].size;
1853 metadata->codec_unit_metadata[i].flags = PIPE_VIDEO_CODEC_UNIT_LOCATION_FLAG_SINGLE_NALU;
1854 }
1855 }
1856 FREE(fb->user_data);
1857 fb->user_data = NULL;
1858 } else {
1859 metadata->codec_unit_metadata_count = 1;
1860 metadata->codec_unit_metadata[0].offset = 0;
1861 metadata->codec_unit_metadata[0].size = *size;
1862 metadata->codec_unit_metadata[0].flags = 0;
1863 }
1864
1865 RADEON_ENC_DESTROY_VIDEO_BUFFER(fb);
1866 }
1867
radeon_enc_fence_wait(struct pipe_video_codec * encoder,struct pipe_fence_handle * fence,uint64_t timeout)1868 static int radeon_enc_fence_wait(struct pipe_video_codec *encoder,
1869 struct pipe_fence_handle *fence,
1870 uint64_t timeout)
1871 {
1872 struct radeon_encoder *enc = (struct radeon_encoder *)encoder;
1873
1874 return enc->ws->fence_wait(enc->ws, fence, timeout);
1875 }
1876
radeon_enc_destroy_fence(struct pipe_video_codec * encoder,struct pipe_fence_handle * fence)1877 static void radeon_enc_destroy_fence(struct pipe_video_codec *encoder,
1878 struct pipe_fence_handle *fence)
1879 {
1880 struct radeon_encoder *enc = (struct radeon_encoder *)encoder;
1881
1882 enc->ws->fence_reference(enc->ws, &fence, NULL);
1883 }
1884
radeon_enc_frame_context_buffer_size(struct radeon_encoder * enc)1885 static unsigned int radeon_enc_frame_context_buffer_size(struct radeon_encoder *enc)
1886 {
1887 unsigned int size = 0;
1888 bool is_h264 = u_reduce_video_profile(enc->base.profile)
1889 == PIPE_VIDEO_FORMAT_MPEG4_AVC;
1890 bool is_av1 = u_reduce_video_profile(enc->base.profile)
1891 == PIPE_VIDEO_FORMAT_AV1;
1892 bool has_b = enc->enc_pic.spec_misc.b_picture_enabled; /* for h264 only */
1893
1894 size = RENCODE_MAX_METADATA_BUFFER_SIZE_PER_FRAME;
1895 if (is_h264) {
1896 if (has_b) {
1897 enc->enc_pic.fcb_offset.h264.colloc_buffer_offset = size;
1898 size += enc->enc_pic.total_coloc_bytes;
1899 } else
1900 enc->enc_pic.fcb_offset.h264.colloc_buffer_offset =
1901 RENCODE_INVALID_COLOC_OFFSET;
1902 }
1903
1904 if (is_av1) {
1905 enc->enc_pic.fcb_offset.av1.av1_cdf_frame_context_offset = size;
1906 size += RENCODE_AV1_FRAME_CONTEXT_CDF_TABLE_SIZE;
1907 enc->enc_pic.fcb_offset.av1.av1_cdef_algorithm_context_offset = size;
1908 size += RENCODE_AV1_CDEF_ALGORITHM_FRAME_CONTEXT_SIZE;
1909 }
1910
1911 size = align(size, enc->alignment);
1912 return size;
1913 }
1914
radeon_enc_create_dpb_aux_buffers(struct radeon_encoder * enc,struct radeon_enc_dpb_buffer * buf)1915 void radeon_enc_create_dpb_aux_buffers(struct radeon_encoder *enc, struct radeon_enc_dpb_buffer *buf)
1916 {
1917 if (buf->fcb)
1918 return;
1919
1920 uint32_t fcb_size = radeon_enc_frame_context_buffer_size(enc);
1921
1922 buf->fcb = CALLOC_STRUCT(rvid_buffer);
1923 if (!buf->fcb || !si_vid_create_buffer(enc->screen, buf->fcb, fcb_size, PIPE_USAGE_DEFAULT)) {
1924 RADEON_ENC_ERR("Can't create fcb buffer!\n");
1925 return;
1926 }
1927
1928 if (enc->enc_pic.quality_modes.pre_encode_mode) {
1929 buf->pre = enc->base.context->create_video_buffer(enc->base.context, &buf->templ);
1930 if (!buf->pre) {
1931 RADEON_ENC_ERR("Can't create preenc buffer!\n");
1932 return;
1933 }
1934 buf->pre_luma = (struct si_texture *)((struct vl_video_buffer *)buf->pre)->resources[0];
1935 buf->pre_chroma = (struct si_texture *)((struct vl_video_buffer *)buf->pre)->resources[1];
1936
1937 buf->pre_fcb = CALLOC_STRUCT(rvid_buffer);
1938 if (!buf->pre_fcb || !si_vid_create_buffer(enc->screen, buf->pre_fcb, fcb_size, PIPE_USAGE_DEFAULT)) {
1939 RADEON_ENC_ERR("Can't create preenc fcb buffer!\n");
1940 return;
1941 }
1942 }
1943 }
1944
radeon_enc_destroy_dpb_buffer(void * data)1945 static void radeon_enc_destroy_dpb_buffer(void *data)
1946 {
1947 struct radeon_enc_dpb_buffer *dpb = data;
1948
1949 if (dpb->pre)
1950 dpb->pre->destroy(dpb->pre);
1951
1952 RADEON_ENC_DESTROY_VIDEO_BUFFER(dpb->fcb);
1953 RADEON_ENC_DESTROY_VIDEO_BUFFER(dpb->pre_fcb);
1954 FREE(dpb);
1955 }
1956
radeon_enc_create_dpb_buffer(struct pipe_video_codec * encoder,struct pipe_picture_desc * picture,const struct pipe_video_buffer * templat)1957 static struct pipe_video_buffer *radeon_enc_create_dpb_buffer(struct pipe_video_codec *encoder,
1958 struct pipe_picture_desc *picture,
1959 const struct pipe_video_buffer *templat)
1960 {
1961 struct radeon_encoder *enc = (struct radeon_encoder *)encoder;
1962
1963 struct pipe_video_buffer templ = *templat;
1964 templ.bind |= PIPE_BIND_VIDEO_ENCODE_DPB;
1965 struct pipe_video_buffer *buf = enc->base.context->create_video_buffer(enc->base.context, &templ);
1966 if (!buf) {
1967 RADEON_ENC_ERR("Can't create dpb buffer!\n");
1968 return NULL;
1969 }
1970
1971 struct radeon_enc_dpb_buffer *dpb = CALLOC_STRUCT(radeon_enc_dpb_buffer);
1972 dpb->templ = templ;
1973 dpb->luma = (struct si_texture *)((struct vl_video_buffer *)buf)->resources[0];
1974 dpb->chroma = (struct si_texture *)((struct vl_video_buffer *)buf)->resources[1];
1975
1976 vl_video_buffer_set_associated_data(buf, &enc->base, dpb, &radeon_enc_destroy_dpb_buffer);
1977
1978 return buf;
1979 }
1980
radeon_create_encoder(struct pipe_context * context,const struct pipe_video_codec * templ,struct radeon_winsys * ws,radeon_enc_get_buffer get_buffer)1981 struct pipe_video_codec *radeon_create_encoder(struct pipe_context *context,
1982 const struct pipe_video_codec *templ,
1983 struct radeon_winsys *ws,
1984 radeon_enc_get_buffer get_buffer)
1985 {
1986 struct si_screen *sscreen = (struct si_screen *)context->screen;
1987 struct si_context *sctx = (struct si_context *)context;
1988 struct radeon_encoder *enc;
1989
1990 enc = CALLOC_STRUCT(radeon_encoder);
1991
1992 if (!enc)
1993 return NULL;
1994
1995 if (sctx->vcn_has_ctx) {
1996 enc->ectx = context->screen->context_create(context->screen, NULL, PIPE_CONTEXT_COMPUTE_ONLY);
1997 if (!enc->ectx)
1998 sctx->vcn_has_ctx = false;
1999 }
2000
2001 enc->alignment = 256;
2002 enc->base = *templ;
2003 enc->base.context = (sctx->vcn_has_ctx)? enc->ectx : context;
2004 enc->base.destroy = radeon_enc_destroy;
2005 enc->base.begin_frame = radeon_enc_begin_frame;
2006 enc->base.encode_bitstream = radeon_enc_encode_bitstream;
2007 enc->base.end_frame = radeon_enc_end_frame;
2008 enc->base.flush = radeon_enc_flush;
2009 enc->base.get_feedback = radeon_enc_get_feedback;
2010 enc->base.fence_wait = radeon_enc_fence_wait;
2011 enc->base.destroy_fence = radeon_enc_destroy_fence;
2012 enc->get_buffer = get_buffer;
2013 enc->screen = context->screen;
2014 enc->ws = ws;
2015
2016 if (!ws->cs_create(&enc->cs,
2017 (sctx->vcn_has_ctx) ? ((struct si_context *)enc->ectx)->ctx : sctx->ctx,
2018 AMD_IP_VCN_ENC, radeon_enc_cs_flush, enc)) {
2019 RADEON_ENC_ERR("Can't get command submission context.\n");
2020 goto error;
2021 }
2022
2023 enc->enc_pic.use_rc_per_pic_ex = false;
2024
2025 ac_vcn_enc_init_cmds(&enc->cmd, sscreen->info.vcn_ip_version);
2026
2027 if (sscreen->info.vcn_ip_version >= VCN_5_0_0)
2028 enc->dpb_type = DPB_TIER_2;
2029
2030 if (enc->dpb_type == DPB_TIER_2)
2031 enc->base.create_dpb_buffer = radeon_enc_create_dpb_buffer;
2032
2033 if (sscreen->info.vcn_ip_version >= VCN_5_0_0) {
2034 radeon_enc_5_0_init(enc);
2035 if (sscreen->info.vcn_ip_version == VCN_5_0_0) {
2036 /* this limits tile splitting scheme to use legacy method */
2037 enc->enc_pic.av1_tile_splitting_legacy_flag = true;
2038 }
2039 }
2040 else if (sscreen->info.vcn_ip_version >= VCN_4_0_0) {
2041 if (sscreen->info.vcn_enc_minor_version >= 1)
2042 enc->enc_pic.use_rc_per_pic_ex = true;
2043 radeon_enc_4_0_init(enc);
2044 }
2045 else if (sscreen->info.vcn_ip_version >= VCN_3_0_0) {
2046 if (sscreen->info.vcn_enc_minor_version >= 29)
2047 enc->enc_pic.use_rc_per_pic_ex = true;
2048 radeon_enc_3_0_init(enc);
2049 }
2050 else if (sscreen->info.vcn_ip_version >= VCN_2_0_0) {
2051 if (sscreen->info.vcn_enc_minor_version >= 18)
2052 enc->enc_pic.use_rc_per_pic_ex = true;
2053 radeon_enc_2_0_init(enc);
2054 }
2055 else {
2056 if (sscreen->info.vcn_enc_minor_version >= 15)
2057 enc->enc_pic.use_rc_per_pic_ex = true;
2058 radeon_enc_1_2_init(enc);
2059 }
2060
2061 return &enc->base;
2062
2063 error:
2064 enc->ws->cs_destroy(&enc->cs);
2065 FREE(enc);
2066 return NULL;
2067 }
2068
radeon_enc_add_buffer(struct radeon_encoder * enc,struct pb_buffer_lean * buf,unsigned usage,enum radeon_bo_domain domain,signed offset)2069 void radeon_enc_add_buffer(struct radeon_encoder *enc, struct pb_buffer_lean *buf,
2070 unsigned usage, enum radeon_bo_domain domain, signed offset)
2071 {
2072 enc->ws->cs_add_buffer(&enc->cs, buf, usage | RADEON_USAGE_SYNCHRONIZED, domain);
2073 uint64_t addr;
2074 addr = enc->ws->buffer_get_virtual_address(buf);
2075 addr = addr + offset;
2076 RADEON_ENC_CS(addr >> 32);
2077 RADEON_ENC_CS(addr);
2078 }
2079
radeon_enc_code_leb128(uint8_t * buf,uint32_t value,uint32_t num_bytes)2080 void radeon_enc_code_leb128(uint8_t *buf, uint32_t value,
2081 uint32_t num_bytes)
2082 {
2083 uint8_t leb128_byte = 0;
2084 uint32_t i = 0;
2085
2086 do {
2087 leb128_byte = (value & 0x7f);
2088 value >>= 7;
2089 if (num_bytes > 1)
2090 leb128_byte |= 0x80;
2091
2092 *(buf + i) = leb128_byte;
2093 num_bytes--;
2094 i++;
2095 } while((leb128_byte & 0x80));
2096 }
2097
radeon_enc_av1_tile_log2(unsigned int blk_size,unsigned int max)2098 unsigned int radeon_enc_av1_tile_log2(unsigned int blk_size, unsigned int max)
2099 {
2100 unsigned int k;
2101
2102 assert(blk_size);
2103 for (k = 0; (blk_size << k) < max; k++) {}
2104
2105 return k;
2106 }
2107
radeon_enc_h2645_picture_type(enum pipe_h2645_enc_picture_type type)2108 unsigned int radeon_enc_h2645_picture_type(enum pipe_h2645_enc_picture_type type)
2109 {
2110 switch (type) {
2111 case PIPE_H2645_ENC_PICTURE_TYPE_I:
2112 case PIPE_H2645_ENC_PICTURE_TYPE_IDR:
2113 return RENCODE_PICTURE_TYPE_I;
2114 case PIPE_H2645_ENC_PICTURE_TYPE_P:
2115 return RENCODE_PICTURE_TYPE_P;
2116 case PIPE_H2645_ENC_PICTURE_TYPE_SKIP:
2117 return RENCODE_PICTURE_TYPE_P_SKIP;
2118 case PIPE_H2645_ENC_PICTURE_TYPE_B:
2119 return RENCODE_PICTURE_TYPE_B;
2120 default:
2121 assert(false);
2122 return 0;
2123 }
2124 }
2125
2126 /* dummy function for re-using the same pipeline */
radeon_enc_dummy(struct radeon_encoder * enc)2127 void radeon_enc_dummy(struct radeon_encoder *enc) {}
2128
2129 /* this function has to be in pair with AV1 header copy instruction type at the end */
radeon_enc_av1_bs_copy_end(struct radeon_encoder * enc,uint32_t bits)2130 static void radeon_enc_av1_bs_copy_end(struct radeon_encoder *enc, uint32_t bits)
2131 {
2132 assert(bits > 0);
2133 /* it must be dword aligned at the end */
2134 *enc->enc_pic.copy_start = DIV_ROUND_UP(bits, 32) * 4 + 12;
2135 *(enc->enc_pic.copy_start + 2) = bits;
2136 }
2137
2138 /* av1 bitstream instruction type */
radeon_enc_av1_bs_instruction_type(struct radeon_encoder * enc,struct radeon_bitstream * bs,uint32_t inst,uint32_t obu_type)2139 void radeon_enc_av1_bs_instruction_type(struct radeon_encoder *enc,
2140 struct radeon_bitstream *bs,
2141 uint32_t inst,
2142 uint32_t obu_type)
2143 {
2144 radeon_bs_flush_headers(bs);
2145
2146 if (bs->bits_output)
2147 radeon_enc_av1_bs_copy_end(enc, bs->bits_output);
2148
2149 enc->enc_pic.copy_start = &enc->cs.current.buf[enc->cs.current.cdw++];
2150 RADEON_ENC_CS(inst);
2151
2152 if (inst != RENCODE_HEADER_INSTRUCTION_COPY) {
2153 *enc->enc_pic.copy_start = 8;
2154 if (inst == RENCODE_AV1_BITSTREAM_INSTRUCTION_OBU_START) {
2155 *enc->enc_pic.copy_start += 4;
2156 RADEON_ENC_CS(obu_type);
2157 }
2158 } else
2159 RADEON_ENC_CS(0); /* allocate a dword for number of bits */
2160
2161 radeon_bs_reset(bs, NULL, &enc->cs);
2162 }
2163
radeon_enc_value_bits(uint32_t value)2164 uint32_t radeon_enc_value_bits(uint32_t value)
2165 {
2166 uint32_t i = 1;
2167
2168 while (value > 1) {
2169 i++;
2170 value >>= 1;
2171 }
2172
2173 return i;
2174 }
2175