Home
last modified time | relevance | path

Searched refs:nb_refs (Results 1 – 15 of 15) sorted by relevance

/third_party/ffmpeg/libavcodec/
Dhevc_refs.c303 if (!(s->rps[ST_CURR_BEF].nb_refs + s->rps[ST_CURR_AFT].nb_refs + in ff_hevc_slice_rpl()
304 s->rps[LT_CURR].nb_refs)) { in ff_hevc_slice_rpl()
321 while (rpl_tmp.nb_refs < sh->nb_refs[list_idx]) { in ff_hevc_slice_rpl()
324 for (j = 0; j < rps->nb_refs && rpl_tmp.nb_refs < HEVC_MAX_REFS; j++) { in ff_hevc_slice_rpl()
325 rpl_tmp.list[rpl_tmp.nb_refs] = rps->list[j]; in ff_hevc_slice_rpl()
326 rpl_tmp.ref[rpl_tmp.nb_refs] = rps->ref[j]; in ff_hevc_slice_rpl()
327 rpl_tmp.isLongTerm[rpl_tmp.nb_refs] = i == 2; in ff_hevc_slice_rpl()
328 rpl_tmp.nb_refs++; in ff_hevc_slice_rpl()
335 for (i = 0; i < sh->nb_refs[list_idx]; i++) { in ff_hevc_slice_rpl()
338 if (idx >= rpl_tmp.nb_refs) { in ff_hevc_slice_rpl()
[all …]
Dnvdec_hevc.c189 .NumPocStCurrBefore = s->rps[ST_CURR_BEF].nb_refs, in nvdec_hevc_start_frame()
190 .NumPocStCurrAfter = s->rps[ST_CURR_AFT].nb_refs, in nvdec_hevc_start_frame()
191 .NumPocLtCurr = s->rps[LT_CURR].nb_refs, in nvdec_hevc_start_frame()
218 if (s->rps[LT_CURR].nb_refs > FF_ARRAY_ELEMS(ppc->RefPicSetLtCurr) || in nvdec_hevc_start_frame()
219 s->rps[ST_CURR_BEF].nb_refs > FF_ARRAY_ELEMS(ppc->RefPicSetStCurrBefore) || in nvdec_hevc_start_frame()
220 s->rps[ST_CURR_AFT].nb_refs > FF_ARRAY_ELEMS(ppc->RefPicSetStCurrAfter)) { in nvdec_hevc_start_frame()
240 for (i = 0; i < s->rps[ST_CURR_BEF].nb_refs; i++) { in nvdec_hevc_start_frame()
248 for (i = 0; i < s->rps[ST_CURR_AFT].nb_refs; i++) { in nvdec_hevc_start_frame()
256 for (i = 0; i < s->rps[LT_CURR].nb_refs; i++) { in nvdec_hevc_start_frame()
Dvaapi_hevc.c76 for (i = 0; i < h->rps[ST_CURR_BEF].nb_refs; i++) { in find_frame_rps_type()
81 for (i = 0; i < h->rps[ST_CURR_AFT].nb_refs; i++) { in find_frame_rps_type()
86 for (i = 0; i < h->rps[LT_CURR].nb_refs; i++) { in find_frame_rps_type()
354 for (i = 0; i < 15 && i < sh->nb_refs[L0]; i++) { in fill_pred_weight_table()
364 for (i = 0; i < 15 && i < sh->nb_refs[L1]; i++) { in fill_pred_weight_table()
438 .num_ref_idx_l0_active_minus1 = sh->nb_refs[L0] ? sh->nb_refs[L0] - 1 : 0, in vaapi_hevc_decode_slice()
439 .num_ref_idx_l1_active_minus1 = sh->nb_refs[L1] ? sh->nb_refs[L1] - 1 : 0, in vaapi_hevc_decode_slice()
461 for (i = 0; i < rpl->nb_refs; i++) in vaapi_hevc_decode_slice()
Dvaapi_encode_vp9.c91 av_assert0(pic->nb_refs == 0); in vaapi_encode_vp9_init_picture_params()
97 av_assert0(pic->nb_refs == 1); in vaapi_encode_vp9_init_picture_params()
115 av_assert0(pic->nb_refs == 2); in vaapi_encode_vp9_init_picture_params()
152 for (i = 0; i < pic->nb_refs; i++) { in vaapi_encode_vp9_init_picture_params()
Dvdpau_hevc.c268 info->NumPocStCurrBefore = h->rps[ST_CURR_BEF].nb_refs; in vdpau_hevc_start_frame()
277 info->NumPocStCurrAfter = h->rps[ST_CURR_AFT].nb_refs; in vdpau_hevc_start_frame()
286 info->NumPocLtCurr = h->rps[LT_CURR].nb_refs; in vdpau_hevc_start_frame()
295 for (ssize_t i = 0, j = 0; i < h->rps[ST_CURR_BEF].nb_refs; i++) { in vdpau_hevc_start_frame()
318 for (ssize_t i = 0, j = 0; i < h->rps[ST_CURR_AFT].nb_refs; i++) { in vdpau_hevc_start_frame()
341 for (ssize_t i = 0, j = 0; i < h->rps[LT_CURR].nb_refs; i++) { in vdpau_hevc_start_frame()
Dhevc_mvs.c180 for (i = 0; i < refPicList[j].nb_refs; i++) { in derive_temporal_colocated_mvs()
319 const int nb_refs = (s->sh.slice_type == HEVC_SLICE_P) ? in derive_spatial_merge_candidates() local
320 s->sh.nb_refs[0] : FFMIN(s->sh.nb_refs[0], s->sh.nb_refs[1]); in derive_spatial_merge_candidates()
466 mergecandlist[nb_merge_cand].ref_idx[0] = zero_idx < nb_refs ? zero_idx : 0; in derive_spatial_merge_candidates()
467 mergecandlist[nb_merge_cand].ref_idx[1] = zero_idx < nb_refs ? zero_idx : 0; in derive_spatial_merge_candidates()
Dhevcdec.c169 for (i = 0; i < s->sh.nb_refs[L0]; i++) { in pred_weight_table()
177 for (i = 0; i < s->sh.nb_refs[L0]; i++) in pred_weight_table()
180 for (i = 0; i < s->sh.nb_refs[L0]; i++) in pred_weight_table()
183 for (i = 0; i < s->sh.nb_refs[L0]; i++) { in pred_weight_table()
213 for (i = 0; i < s->sh.nb_refs[L1]; i++) { in pred_weight_table()
221 for (i = 0; i < s->sh.nb_refs[L1]; i++) in pred_weight_table()
224 for (i = 0; i < s->sh.nb_refs[L1]; i++) in pred_weight_table()
227 for (i = 0; i < s->sh.nb_refs[L1]; i++) { in pred_weight_table()
268 rps->nb_refs = 0; in decode_lt_rps()
281 rps->nb_refs = nb_sh + nb_sps; in decode_lt_rps()
[all …]
Dvaapi_encode_h264.c691 for (i = 0; i < pic->nb_refs; i++) { in vaapi_encode_h264_init_picture_params()
909 for (i = 0; i < pic->nb_refs; i++) { in vaapi_encode_h264_init_slice_params()
918 for (i = 0; i < pic->nb_refs; i++) { in vaapi_encode_h264_init_slice_params()
938 for (i = 0; i < pic->nb_refs; i++) { in vaapi_encode_h264_init_slice_params()
956 for (i = j = 0; i < pic->nb_refs; i++) { in vaapi_encode_h264_init_slice_params()
980 for (i = j = 0; i < pic->nb_refs; i++) { in vaapi_encode_h264_init_slice_params()
1023 av_assert0(pic->nb_refs <= 2); in vaapi_encode_h264_init_slice_params()
1024 if (pic->nb_refs >= 1) { in vaapi_encode_h264_init_slice_params()
1030 if (pic->nb_refs >= 2) { in vaapi_encode_h264_init_slice_params()
Dvaapi_encode_vp8.c89 av_assert0(pic->nb_refs == 0); in vaapi_encode_vp8_init_picture_params()
97 av_assert0(pic->nb_refs == 1); in vaapi_encode_vp8_init_picture_params()
Dhevcdec.h236 uint8_t nb_refs; member
243 int nb_refs; member
280 unsigned int nb_refs[2]; member
Dvaapi_encode_h265.c830 for (i = 0; i < pic->nb_refs; i++) { in vaapi_encode_h265_init_picture_params()
927 for (i = 0; i < pic->nb_refs; i++) { in vaapi_encode_h265_init_slice_params()
936 for (j = 0; j < pic->nb_refs; j++) { in vaapi_encode_h265_init_slice_params()
940 if (j < pic->nb_refs) in vaapi_encode_h265_init_slice_params()
1063 av_assert0(pic->nb_refs <= 2); in vaapi_encode_h265_init_slice_params()
1064 if (pic->nb_refs >= 1) { in vaapi_encode_h265_init_slice_params()
1070 if (pic->nb_refs >= 2) { in vaapi_encode_h265_init_slice_params()
Dvaapi_encode.c263 if (pic->nb_refs == 0) { in vaapi_encode_issue()
267 for (i = 0; i < pic->nb_refs; i++) { in vaapi_encode_issue()
275 for (i = 0; i < pic->nb_refs; i++) { in vaapi_encode_issue()
776 av_assert0(pic->nb_refs < MAX_PICTURE_REFERENCES); in vaapi_encode_add_ref()
777 pic->refs[pic->nb_refs++] = target; in vaapi_encode_add_ref()
806 for (i = 0; i < pic->nb_refs; i++) { in vaapi_encode_remove_refs()
906 for (i = 0; i < pic->nb_refs; i++) { in vaapi_encode_pick_next()
910 if (i == pic->nb_refs) in vaapi_encode_pick_next()
Dvaapi_encode.h115 int nb_refs; member
Ddxva2_hevc.c183 while (!frame && j < rpl->nb_refs) \ in fill_picture_parameters()
/third_party/ffmpeg/libavfilter/
Dvf_palettegen.c74 …int nb_refs; // number of color references (or number of different colo… member
265 static struct color_ref **load_color_refs(const struct hist_node *hist, int nb_refs) in load_color_refs() argument
268 struct color_ref **refs = av_malloc_array(nb_refs, sizeof(*refs)); in load_color_refs()
306 s->refs = load_color_refs(s->histogram, s->nb_refs); in get_palette_frame()
308 … av_log(ctx, AV_LOG_ERROR, "Unable to allocate references for %d different colors\n", s->nb_refs); in get_palette_frame()
320 box->len = s->nb_refs; in get_palette_frame()
381 ratio = set_colorquant_ratio_meta(out, s->nb_boxes, s->nb_refs); in get_palette_frame()
383 s->nb_boxes, s->reserve_transparent ? "(+1)" : "", s->nb_refs, ratio); in get_palette_frame()
487 s->nb_refs += ret; in filter_frame()
503 s->nb_refs = 0; in filter_frame()
[all …]