1 /*
2 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
3 *
4 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
10 */
11
12 #include <limits.h>
13 #include <math.h>
14
15 #include "av1/common/pred_common.h"
16 #include "av1/common/seg_common.h"
17 #include "av1/encoder/aq_cyclicrefresh.h"
18 #include "av1/encoder/ratectrl.h"
19 #include "av1/encoder/segmentation.h"
20 #include "av1/encoder/tokenize.h"
21 #include "aom_dsp/aom_dsp_common.h"
22
av1_cyclic_refresh_alloc(int mi_rows,int mi_cols)23 CYCLIC_REFRESH *av1_cyclic_refresh_alloc(int mi_rows, int mi_cols) {
24 size_t last_coded_q_map_size;
25 CYCLIC_REFRESH *const cr = aom_calloc(1, sizeof(*cr));
26 if (cr == NULL) return NULL;
27
28 cr->map = aom_calloc(mi_rows * mi_cols, sizeof(*cr->map));
29 if (cr->map == NULL) {
30 av1_cyclic_refresh_free(cr);
31 return NULL;
32 }
33 last_coded_q_map_size = mi_rows * mi_cols * sizeof(*cr->last_coded_q_map);
34 cr->last_coded_q_map = aom_malloc(last_coded_q_map_size);
35 if (cr->last_coded_q_map == NULL) {
36 av1_cyclic_refresh_free(cr);
37 return NULL;
38 }
39 assert(MAXQ <= 255);
40 memset(cr->last_coded_q_map, MAXQ, last_coded_q_map_size);
41 return cr;
42 }
43
av1_cyclic_refresh_free(CYCLIC_REFRESH * cr)44 void av1_cyclic_refresh_free(CYCLIC_REFRESH *cr) {
45 if (cr != NULL) {
46 aom_free(cr->map);
47 aom_free(cr->last_coded_q_map);
48 aom_free(cr);
49 }
50 }
51
52 // Check if this coding block, of size bsize, should be considered for refresh
53 // (lower-qp coding). Decision can be based on various factors, such as
54 // size of the coding block (i.e., below min_block size rejected), coding
55 // mode, and rate/distortion.
candidate_refresh_aq(const CYCLIC_REFRESH * cr,const MB_MODE_INFO * mbmi,int64_t rate,int64_t dist,int bsize)56 static int candidate_refresh_aq(const CYCLIC_REFRESH *cr,
57 const MB_MODE_INFO *mbmi, int64_t rate,
58 int64_t dist, int bsize) {
59 MV mv = mbmi->mv[0].as_mv;
60 int is_compound = has_second_ref(mbmi);
61 // Reject the block for lower-qp coding for non-compound mode if
62 // projected distortion is above the threshold, and any of the following
63 // is true:
64 // 1) mode uses large mv
65 // 2) mode is an intra-mode
66 // Otherwise accept for refresh.
67 if (!is_compound && dist > cr->thresh_dist_sb &&
68 (mv.row > cr->motion_thresh || mv.row < -cr->motion_thresh ||
69 mv.col > cr->motion_thresh || mv.col < -cr->motion_thresh ||
70 !is_inter_block(mbmi)))
71 return CR_SEGMENT_ID_BASE;
72 else if (is_compound || (bsize >= BLOCK_16X16 && rate < cr->thresh_rate_sb &&
73 is_inter_block(mbmi) && mbmi->mv[0].as_int == 0 &&
74 cr->rate_boost_fac > 10))
75 // More aggressive delta-q for bigger blocks with zero motion.
76 return CR_SEGMENT_ID_BOOST2;
77 else
78 return CR_SEGMENT_ID_BOOST1;
79 }
80
81 // Compute delta-q for the segment.
compute_deltaq(const AV1_COMP * cpi,int q,double rate_factor)82 static int compute_deltaq(const AV1_COMP *cpi, int q, double rate_factor) {
83 const CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
84 const RATE_CONTROL *const rc = &cpi->rc;
85 int deltaq = av1_compute_qdelta_by_rate(
86 rc, cpi->common.current_frame.frame_type, q, rate_factor,
87 cpi->is_screen_content_type, cpi->common.seq_params->bit_depth);
88 if ((-deltaq) > cr->max_qdelta_perc * q / 100) {
89 deltaq = -cr->max_qdelta_perc * q / 100;
90 }
91 return deltaq;
92 }
93
av1_cyclic_refresh_estimate_bits_at_q(const AV1_COMP * cpi,double correction_factor)94 int av1_cyclic_refresh_estimate_bits_at_q(const AV1_COMP *cpi,
95 double correction_factor) {
96 const AV1_COMMON *const cm = &cpi->common;
97 const FRAME_TYPE frame_type = cm->current_frame.frame_type;
98 const int base_qindex = cm->quant_params.base_qindex;
99 const int bit_depth = cm->seq_params->bit_depth;
100 const CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
101 const int mbs = cm->mi_params.MBs;
102 const int num4x4bl = mbs << 4;
103 // Weight for non-base segments: use actual number of blocks refreshed in
104 // previous/just encoded frame. Note number of blocks here is in 4x4 units.
105 const double weight_segment1 = (double)cr->actual_num_seg1_blocks / num4x4bl;
106 const double weight_segment2 = (double)cr->actual_num_seg2_blocks / num4x4bl;
107 // Take segment weighted average for estimated bits.
108 const int estimated_bits =
109 (int)((1.0 - weight_segment1 - weight_segment2) *
110 av1_estimate_bits_at_q(frame_type, base_qindex, mbs,
111 correction_factor, bit_depth,
112 cpi->is_screen_content_type) +
113 weight_segment1 * av1_estimate_bits_at_q(
114 frame_type, base_qindex + cr->qindex_delta[1],
115 mbs, correction_factor, bit_depth,
116 cpi->is_screen_content_type) +
117 weight_segment2 * av1_estimate_bits_at_q(
118 frame_type, base_qindex + cr->qindex_delta[2],
119 mbs, correction_factor, bit_depth,
120 cpi->is_screen_content_type));
121 return estimated_bits;
122 }
123
av1_cyclic_refresh_rc_bits_per_mb(const AV1_COMP * cpi,int i,double correction_factor)124 int av1_cyclic_refresh_rc_bits_per_mb(const AV1_COMP *cpi, int i,
125 double correction_factor) {
126 const AV1_COMMON *const cm = &cpi->common;
127 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
128 int bits_per_mb;
129 int num4x4bl = cm->mi_params.MBs << 4;
130 // Weight for segment prior to encoding: take the average of the target
131 // number for the frame to be encoded and the actual from the previous frame.
132 double weight_segment =
133 (double)((cr->target_num_seg_blocks + cr->actual_num_seg1_blocks +
134 cr->actual_num_seg2_blocks) >>
135 1) /
136 num4x4bl;
137 // Compute delta-q corresponding to qindex i.
138 int deltaq = compute_deltaq(cpi, i, cr->rate_ratio_qdelta);
139 // Take segment weighted average for bits per mb.
140 bits_per_mb =
141 (int)((1.0 - weight_segment) *
142 av1_rc_bits_per_mb(cm->current_frame.frame_type, i,
143 correction_factor, cm->seq_params->bit_depth,
144 cpi->is_screen_content_type) +
145 weight_segment * av1_rc_bits_per_mb(cm->current_frame.frame_type,
146 i + deltaq, correction_factor,
147 cm->seq_params->bit_depth,
148 cpi->is_screen_content_type));
149 return bits_per_mb;
150 }
151
av1_cyclic_reset_segment_skip(const AV1_COMP * cpi,MACROBLOCK * const x,int mi_row,int mi_col,BLOCK_SIZE bsize)152 void av1_cyclic_reset_segment_skip(const AV1_COMP *cpi, MACROBLOCK *const x,
153 int mi_row, int mi_col, BLOCK_SIZE bsize) {
154 int cdf_num;
155 const AV1_COMMON *const cm = &cpi->common;
156 MACROBLOCKD *const xd = &x->e_mbd;
157 MB_MODE_INFO *const mbmi = xd->mi[0];
158 const int prev_segment_id = mbmi->segment_id;
159 mbmi->segment_id = av1_get_spatial_seg_pred(cm, xd, &cdf_num);
160 if (prev_segment_id != mbmi->segment_id) {
161 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
162 const int bw = mi_size_wide[bsize];
163 const int bh = mi_size_high[bsize];
164 const int xmis = AOMMIN(cm->mi_params.mi_cols - mi_col, bw);
165 const int ymis = AOMMIN(cm->mi_params.mi_rows - mi_row, bh);
166 const int block_index = mi_row * cm->mi_params.mi_cols + mi_col;
167 for (int mi_y = 0; mi_y < ymis; mi_y++) {
168 for (int mi_x = 0; mi_x < xmis; mi_x++) {
169 const int map_offset =
170 block_index + mi_y * cm->mi_params.mi_cols + mi_x;
171 cr->map[map_offset] = 0;
172 cpi->enc_seg.map[map_offset] = mbmi->segment_id;
173 cm->cur_frame->seg_map[map_offset] = mbmi->segment_id;
174 }
175 }
176 if (cyclic_refresh_segment_id(prev_segment_id) == CR_SEGMENT_ID_BOOST1)
177 x->actual_num_seg1_blocks -= xmis * ymis;
178 else if (cyclic_refresh_segment_id(prev_segment_id) == CR_SEGMENT_ID_BOOST2)
179 x->actual_num_seg2_blocks -= xmis * ymis;
180 if (cyclic_refresh_segment_id(mbmi->segment_id) == CR_SEGMENT_ID_BOOST1)
181 x->actual_num_seg1_blocks += xmis * ymis;
182 else if (cyclic_refresh_segment_id(mbmi->segment_id) ==
183 CR_SEGMENT_ID_BOOST2)
184 x->actual_num_seg2_blocks += xmis * ymis;
185 }
186 }
187
av1_cyclic_refresh_update_segment(const AV1_COMP * cpi,MACROBLOCK * const x,int mi_row,int mi_col,BLOCK_SIZE bsize,int64_t rate,int64_t dist,int skip,RUN_TYPE dry_run)188 void av1_cyclic_refresh_update_segment(const AV1_COMP *cpi, MACROBLOCK *const x,
189 int mi_row, int mi_col, BLOCK_SIZE bsize,
190 int64_t rate, int64_t dist, int skip,
191 RUN_TYPE dry_run) {
192 const AV1_COMMON *const cm = &cpi->common;
193 MACROBLOCKD *const xd = &x->e_mbd;
194 MB_MODE_INFO *const mbmi = xd->mi[0];
195 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
196 const int bw = mi_size_wide[bsize];
197 const int bh = mi_size_high[bsize];
198 const int xmis = AOMMIN(cm->mi_params.mi_cols - mi_col, bw);
199 const int ymis = AOMMIN(cm->mi_params.mi_rows - mi_row, bh);
200 const int block_index = mi_row * cm->mi_params.mi_cols + mi_col;
201 const int refresh_this_block =
202 candidate_refresh_aq(cr, mbmi, rate, dist, bsize);
203 // Default is to not update the refresh map.
204 int new_map_value = cr->map[block_index];
205
206 // If this block is labeled for refresh, check if we should reset the
207 // segment_id.
208 if (cyclic_refresh_segment_id_boosted(mbmi->segment_id)) {
209 mbmi->segment_id = refresh_this_block;
210 // Reset segment_id if will be skipped.
211 if (skip) mbmi->segment_id = CR_SEGMENT_ID_BASE;
212 }
213
214 // Update the cyclic refresh map, to be used for setting segmentation map
215 // for the next frame. If the block will be refreshed this frame, mark it
216 // as clean. The magnitude of the -ve influences how long before we consider
217 // it for refresh again.
218 if (cyclic_refresh_segment_id_boosted(mbmi->segment_id)) {
219 new_map_value = -cr->time_for_refresh;
220 } else if (refresh_this_block) {
221 // Else if it is accepted as candidate for refresh, and has not already
222 // been refreshed (marked as 1) then mark it as a candidate for cleanup
223 // for future time (marked as 0), otherwise don't update it.
224 if (cr->map[block_index] == 1) new_map_value = 0;
225 } else {
226 // Leave it marked as block that is not candidate for refresh.
227 new_map_value = 1;
228 }
229
230 // Update entries in the cyclic refresh map with new_map_value, and
231 // copy mbmi->segment_id into global segmentation map.
232 for (int mi_y = 0; mi_y < ymis; mi_y++) {
233 for (int mi_x = 0; mi_x < xmis; mi_x++) {
234 const int map_offset = block_index + mi_y * cm->mi_params.mi_cols + mi_x;
235 cr->map[map_offset] = new_map_value;
236 cpi->enc_seg.map[map_offset] = mbmi->segment_id;
237 cm->cur_frame->seg_map[map_offset] = mbmi->segment_id;
238 }
239 }
240 // Accumulate cyclic refresh update counters.
241 if (!dry_run) {
242 if (cyclic_refresh_segment_id(mbmi->segment_id) == CR_SEGMENT_ID_BOOST1)
243 x->actual_num_seg1_blocks += xmis * ymis;
244 else if (cyclic_refresh_segment_id(mbmi->segment_id) ==
245 CR_SEGMENT_ID_BOOST2)
246 x->actual_num_seg2_blocks += xmis * ymis;
247 }
248 }
249
250 // Initializes counters used for cyclic refresh.
av1_init_cyclic_refresh_counters(MACROBLOCK * const x)251 void av1_init_cyclic_refresh_counters(MACROBLOCK *const x) {
252 x->actual_num_seg1_blocks = 0;
253 x->actual_num_seg2_blocks = 0;
254 x->cnt_zeromv = 0;
255 }
256
257 // Accumulate cyclic refresh counters.
av1_accumulate_cyclic_refresh_counters(CYCLIC_REFRESH * const cyclic_refresh,const MACROBLOCK * const x)258 void av1_accumulate_cyclic_refresh_counters(
259 CYCLIC_REFRESH *const cyclic_refresh, const MACROBLOCK *const x) {
260 cyclic_refresh->actual_num_seg1_blocks += x->actual_num_seg1_blocks;
261 cyclic_refresh->actual_num_seg2_blocks += x->actual_num_seg2_blocks;
262 cyclic_refresh->cnt_zeromv += x->cnt_zeromv;
263 }
264
av1_cyclic_refresh_postencode(AV1_COMP * const cpi)265 void av1_cyclic_refresh_postencode(AV1_COMP *const cpi) {
266 AV1_COMMON *const cm = &cpi->common;
267 const CommonModeInfoParams *const mi_params = &cm->mi_params;
268 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
269 RATE_CONTROL *const rc = &cpi->rc;
270 SVC *const svc = &cpi->svc;
271 const int avg_cnt_zeromv =
272 100 * cr->cnt_zeromv / (mi_params->mi_rows * mi_params->mi_cols);
273
274 if (!cpi->ppi->use_svc ||
275 (cpi->ppi->use_svc &&
276 !cpi->svc.layer_context[cpi->svc.temporal_layer_id].is_key_frame &&
277 cpi->svc.spatial_layer_id == cpi->svc.number_spatial_layers - 1)) {
278 rc->avg_frame_low_motion =
279 (3 * rc->avg_frame_low_motion + avg_cnt_zeromv) / 4;
280 // For SVC: set avg_frame_low_motion (only computed on top spatial layer)
281 // to all lower spatial layers.
282 if (cpi->ppi->use_svc &&
283 svc->spatial_layer_id == svc->number_spatial_layers - 1) {
284 for (int i = 0; i < svc->number_spatial_layers - 1; ++i) {
285 const int layer = LAYER_IDS_TO_IDX(i, svc->temporal_layer_id,
286 svc->number_temporal_layers);
287 LAYER_CONTEXT *const lc = &svc->layer_context[layer];
288 RATE_CONTROL *const lrc = &lc->rc;
289 lrc->avg_frame_low_motion = rc->avg_frame_low_motion;
290 }
291 }
292 }
293 }
294
av1_cyclic_refresh_set_golden_update(AV1_COMP * const cpi)295 void av1_cyclic_refresh_set_golden_update(AV1_COMP *const cpi) {
296 RATE_CONTROL *const rc = &cpi->rc;
297 PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
298 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
299 // Set minimum gf_interval for GF update to a multiple of the refresh period,
300 // with some max limit. Depending on past encoding stats, GF flag may be
301 // reset and update may not occur until next baseline_gf_interval.
302 if (cr->percent_refresh > 0)
303 p_rc->baseline_gf_interval = AOMMIN(2 * (100 / cr->percent_refresh), 40);
304 else
305 p_rc->baseline_gf_interval = 20;
306 if (rc->avg_frame_low_motion < 40) p_rc->baseline_gf_interval = 8;
307 }
308
309 // Update the segmentation map, and related quantities: cyclic refresh map,
310 // refresh sb_index, and target number of blocks to be refreshed.
311 // The map is set to either 0/CR_SEGMENT_ID_BASE (no refresh) or to
312 // 1/CR_SEGMENT_ID_BOOST1 (refresh) for each superblock.
313 // Blocks labeled as BOOST1 may later get set to BOOST2 (during the
314 // encoding of the superblock).
cyclic_refresh_update_map(AV1_COMP * const cpi)315 static void cyclic_refresh_update_map(AV1_COMP *const cpi) {
316 AV1_COMMON *const cm = &cpi->common;
317 const CommonModeInfoParams *const mi_params = &cm->mi_params;
318 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
319 unsigned char *const seg_map = cpi->enc_seg.map;
320 int i, block_count, bl_index, sb_rows, sb_cols, sbs_in_frame;
321 int xmis, ymis, x, y;
322 memset(seg_map, CR_SEGMENT_ID_BASE, mi_params->mi_rows * mi_params->mi_cols);
323 sb_cols = (mi_params->mi_cols + cm->seq_params->mib_size - 1) /
324 cm->seq_params->mib_size;
325 sb_rows = (mi_params->mi_rows + cm->seq_params->mib_size - 1) /
326 cm->seq_params->mib_size;
327 sbs_in_frame = sb_cols * sb_rows;
328 // Number of target blocks to get the q delta (segment 1).
329 block_count =
330 cr->percent_refresh * mi_params->mi_rows * mi_params->mi_cols / 100;
331 // Set the segmentation map: cycle through the superblocks, starting at
332 // cr->mb_index, and stopping when either block_count blocks have been found
333 // to be refreshed, or we have passed through whole frame.
334 if (cr->sb_index >= sbs_in_frame) cr->sb_index = 0;
335 assert(cr->sb_index < sbs_in_frame);
336 i = cr->sb_index;
337 cr->target_num_seg_blocks = 0;
338 do {
339 int sum_map = 0;
340 // Get the mi_row/mi_col corresponding to superblock index i.
341 int sb_row_index = (i / sb_cols);
342 int sb_col_index = i - sb_row_index * sb_cols;
343 int mi_row = sb_row_index * cm->seq_params->mib_size;
344 int mi_col = sb_col_index * cm->seq_params->mib_size;
345 // TODO(any): Ensure the population of
346 // cpi->common.features.allow_screen_content_tools and use the same instead
347 // of cpi->oxcf.tune_cfg.content == AOM_CONTENT_SCREEN
348 int qindex_thresh = cpi->oxcf.tune_cfg.content == AOM_CONTENT_SCREEN
349 ? av1_get_qindex(&cm->seg, CR_SEGMENT_ID_BOOST2,
350 cm->quant_params.base_qindex)
351 : 0;
352 assert(mi_row >= 0 && mi_row < mi_params->mi_rows);
353 assert(mi_col >= 0 && mi_col < mi_params->mi_cols);
354 bl_index = mi_row * mi_params->mi_cols + mi_col;
355 // Loop through all MI blocks in superblock and update map.
356 xmis = AOMMIN(mi_params->mi_cols - mi_col, cm->seq_params->mib_size);
357 ymis = AOMMIN(mi_params->mi_rows - mi_row, cm->seq_params->mib_size);
358 // cr_map only needed at 8x8 blocks.
359 for (y = 0; y < ymis; y += 2) {
360 for (x = 0; x < xmis; x += 2) {
361 const int bl_index2 = bl_index + y * mi_params->mi_cols + x;
362 // If the block is as a candidate for clean up then mark it
363 // for possible boost/refresh (segment 1). The segment id may get
364 // reset to 0 later if block gets coded anything other than GLOBALMV.
365 if (cr->map[bl_index2] == 0) {
366 if (cr->last_coded_q_map[bl_index2] > qindex_thresh) sum_map += 4;
367 } else if (cr->map[bl_index2] < 0) {
368 cr->map[bl_index2]++;
369 }
370 }
371 }
372 // Enforce constant segment over superblock.
373 // If segment is at least half of superblock, set to 1.
374 if (sum_map >= (xmis * ymis) >> 1) {
375 for (y = 0; y < ymis; y++)
376 for (x = 0; x < xmis; x++) {
377 seg_map[bl_index + y * mi_params->mi_cols + x] = CR_SEGMENT_ID_BOOST1;
378 }
379 cr->target_num_seg_blocks += xmis * ymis;
380 }
381 i++;
382 if (i == sbs_in_frame) {
383 i = 0;
384 }
385 } while (cr->target_num_seg_blocks < block_count && i != cr->sb_index);
386 cr->sb_index = i;
387 }
388
389 // Set cyclic refresh parameters.
av1_cyclic_refresh_update_parameters(AV1_COMP * const cpi)390 void av1_cyclic_refresh_update_parameters(AV1_COMP *const cpi) {
391 // TODO(marpan): Parameters need to be tuned.
392 const RATE_CONTROL *const rc = &cpi->rc;
393 const PRIMARY_RATE_CONTROL *const p_rc = &cpi->ppi->p_rc;
394 const AV1_COMMON *const cm = &cpi->common;
395 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
396 int num4x4bl = cm->mi_params.MBs << 4;
397 int target_refresh = 0;
398 double weight_segment_target = 0;
399 double weight_segment = 0;
400 int qp_thresh = AOMMIN(20, rc->best_quality << 1);
401 int qp_max_thresh = 118 * MAXQ >> 7;
402 cr->apply_cyclic_refresh = 1;
403 if (frame_is_intra_only(cm) || is_lossless_requested(&cpi->oxcf.rc_cfg) ||
404 cpi->svc.temporal_layer_id > 0 ||
405 p_rc->avg_frame_qindex[INTER_FRAME] < qp_thresh ||
406 (rc->frames_since_key > 20 &&
407 p_rc->avg_frame_qindex[INTER_FRAME] > qp_max_thresh) ||
408 (rc->avg_frame_low_motion < 45 && rc->frames_since_key > 40)) {
409 cr->apply_cyclic_refresh = 0;
410 return;
411 }
412 cr->percent_refresh = 10;
413 cr->max_qdelta_perc = 60;
414 cr->time_for_refresh = 0;
415 cr->motion_thresh = 32;
416 cr->rate_boost_fac = 15;
417 // Use larger delta-qp (increase rate_ratio_qdelta) for first few (~4)
418 // periods of the refresh cycle, after a key frame.
419 // Account for larger interval on base layer for temporal layers.
420 if (cr->percent_refresh > 0 &&
421 rc->frames_since_key < 400 / cr->percent_refresh) {
422 cr->rate_ratio_qdelta = 3.0;
423 } else {
424 cr->rate_ratio_qdelta = 2.0;
425 }
426 // Adjust some parameters for low resolutions.
427 if (cm->width * cm->height <= 352 * 288) {
428 if (rc->avg_frame_bandwidth < 3000) {
429 cr->motion_thresh = 16;
430 cr->rate_boost_fac = 13;
431 } else {
432 cr->max_qdelta_perc = 70;
433 cr->rate_ratio_qdelta = AOMMAX(cr->rate_ratio_qdelta, 2.5);
434 }
435 }
436 if (cpi->oxcf.rc_cfg.mode == AOM_VBR) {
437 // To be adjusted for VBR mode, e.g., based on gf period and boost.
438 // For now use smaller qp-delta (than CBR), no second boosted seg, and
439 // turn-off (no refresh) on golden refresh (since it's already boosted).
440 cr->percent_refresh = 10;
441 cr->rate_ratio_qdelta = 1.5;
442 cr->rate_boost_fac = 10;
443 if (cpi->refresh_frame.golden_frame) {
444 cr->percent_refresh = 0;
445 cr->rate_ratio_qdelta = 1.0;
446 }
447 }
448 // Weight for segment prior to encoding: take the average of the target
449 // number for the frame to be encoded and the actual from the previous frame.
450 // Use the target if its less. To be used for setting the base qp for the
451 // frame in av1_rc_regulate_q.
452 target_refresh =
453 cr->percent_refresh * cm->mi_params.mi_rows * cm->mi_params.mi_cols / 100;
454 weight_segment_target = (double)(target_refresh) / num4x4bl;
455 weight_segment = (double)((target_refresh + cr->actual_num_seg1_blocks +
456 cr->actual_num_seg2_blocks) >>
457 1) /
458 num4x4bl;
459 if (weight_segment_target < 7 * weight_segment / 8)
460 weight_segment = weight_segment_target;
461 cr->weight_segment = weight_segment;
462 }
463
464 // Setup cyclic background refresh: set delta q and segmentation map.
av1_cyclic_refresh_setup(AV1_COMP * const cpi)465 void av1_cyclic_refresh_setup(AV1_COMP *const cpi) {
466 AV1_COMMON *const cm = &cpi->common;
467 const RATE_CONTROL *const rc = &cpi->rc;
468 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
469 struct segmentation *const seg = &cm->seg;
470 int resolution_change =
471 cm->prev_frame && (cm->width != cm->prev_frame->width ||
472 cm->height != cm->prev_frame->height);
473 if (resolution_change) av1_cyclic_refresh_reset_resize(cpi);
474 if (!cr->apply_cyclic_refresh) {
475 // Set segmentation map to 0 and disable.
476 unsigned char *const seg_map = cpi->enc_seg.map;
477 memset(seg_map, 0, cm->mi_params.mi_rows * cm->mi_params.mi_cols);
478 av1_disable_segmentation(&cm->seg);
479 if (cm->current_frame.frame_type == KEY_FRAME) {
480 memset(cr->last_coded_q_map, MAXQ,
481 cm->mi_params.mi_rows * cm->mi_params.mi_cols *
482 sizeof(*cr->last_coded_q_map));
483 cr->sb_index = 0;
484 }
485 return;
486 } else {
487 const double q = av1_convert_qindex_to_q(cm->quant_params.base_qindex,
488 cm->seq_params->bit_depth);
489 // Set rate threshold to some multiple (set to 2 for now) of the target
490 // rate (target is given by sb64_target_rate and scaled by 256).
491 cr->thresh_rate_sb = ((int64_t)(rc->sb64_target_rate) << 8) << 2;
492 // Distortion threshold, quadratic in Q, scale factor to be adjusted.
493 // q will not exceed 457, so (q * q) is within 32bit; see:
494 // av1_convert_qindex_to_q(), av1_ac_quant(), ac_qlookup*[].
495 cr->thresh_dist_sb = ((int64_t)(q * q)) << 2;
496
497 // Set up segmentation.
498 // Clear down the segment map.
499 av1_enable_segmentation(&cm->seg);
500 av1_clearall_segfeatures(seg);
501
502 // Note: setting temporal_update has no effect, as the seg-map coding method
503 // (temporal or spatial) is determined in
504 // av1_choose_segmap_coding_method(),
505 // based on the coding cost of each method. For error_resilient mode on the
506 // last_frame_seg_map is set to 0, so if temporal coding is used, it is
507 // relative to 0 previous map.
508 // seg->temporal_update = 0;
509
510 // Segment BASE "Q" feature is disabled so it defaults to the baseline Q.
511 av1_disable_segfeature(seg, CR_SEGMENT_ID_BASE, SEG_LVL_ALT_Q);
512 // Use segment BOOST1 for in-frame Q adjustment.
513 av1_enable_segfeature(seg, CR_SEGMENT_ID_BOOST1, SEG_LVL_ALT_Q);
514 // Use segment BOOST2 for more aggressive in-frame Q adjustment.
515 av1_enable_segfeature(seg, CR_SEGMENT_ID_BOOST2, SEG_LVL_ALT_Q);
516
517 // Set the q delta for segment BOOST1.
518 const CommonQuantParams *const quant_params = &cm->quant_params;
519 int qindex_delta =
520 compute_deltaq(cpi, quant_params->base_qindex, cr->rate_ratio_qdelta);
521 cr->qindex_delta[1] = qindex_delta;
522
523 // Compute rd-mult for segment BOOST1.
524 const int qindex2 = clamp(
525 quant_params->base_qindex + quant_params->y_dc_delta_q + qindex_delta,
526 0, MAXQ);
527 cr->rdmult = av1_compute_rd_mult(cpi, qindex2);
528
529 av1_set_segdata(seg, CR_SEGMENT_ID_BOOST1, SEG_LVL_ALT_Q, qindex_delta);
530
531 // Set a more aggressive (higher) q delta for segment BOOST2.
532 qindex_delta = compute_deltaq(
533 cpi, quant_params->base_qindex,
534 AOMMIN(CR_MAX_RATE_TARGET_RATIO,
535 0.1 * cr->rate_boost_fac * cr->rate_ratio_qdelta));
536 cr->qindex_delta[2] = qindex_delta;
537 av1_set_segdata(seg, CR_SEGMENT_ID_BOOST2, SEG_LVL_ALT_Q, qindex_delta);
538
539 // Update the segmentation and refresh map.
540 cyclic_refresh_update_map(cpi);
541 }
542 }
543
av1_cyclic_refresh_get_rdmult(const CYCLIC_REFRESH * cr)544 int av1_cyclic_refresh_get_rdmult(const CYCLIC_REFRESH *cr) {
545 return cr->rdmult;
546 }
547
av1_cyclic_refresh_reset_resize(AV1_COMP * const cpi)548 void av1_cyclic_refresh_reset_resize(AV1_COMP *const cpi) {
549 const AV1_COMMON *const cm = &cpi->common;
550 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
551 memset(cr->map, 0, cm->mi_params.mi_rows * cm->mi_params.mi_cols);
552 cr->sb_index = 0;
553 cpi->refresh_frame.golden_frame = true;
554 cr->apply_cyclic_refresh = 0;
555 }
556