1 /*
2 * Copyright (c) 2014 The WebM project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include <limits.h>
12 #include <math.h>
13
14 #include "vpx_dsp/vpx_dsp_common.h"
15 #include "vpx_ports/system_state.h"
16
17 #include "vp9/encoder/vp9_aq_cyclicrefresh.h"
18
19 #include "vp9/common/vp9_seg_common.h"
20
21 #include "vp9/encoder/vp9_ratectrl.h"
22 #include "vp9/encoder/vp9_segmentation.h"
23
24 static const uint8_t VP9_VAR_OFFS[64] = {
25 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128,
26 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128,
27 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128,
28 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128,
29 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128
30 };
31
vp9_cyclic_refresh_alloc(int mi_rows,int mi_cols)32 CYCLIC_REFRESH *vp9_cyclic_refresh_alloc(int mi_rows, int mi_cols) {
33 size_t last_coded_q_map_size;
34 CYCLIC_REFRESH *const cr = vpx_calloc(1, sizeof(*cr));
35 if (cr == NULL) return NULL;
36
37 cr->map = vpx_calloc(mi_rows * mi_cols, sizeof(*cr->map));
38 if (cr->map == NULL) {
39 vp9_cyclic_refresh_free(cr);
40 return NULL;
41 }
42 last_coded_q_map_size = mi_rows * mi_cols * sizeof(*cr->last_coded_q_map);
43 cr->last_coded_q_map = vpx_malloc(last_coded_q_map_size);
44 if (cr->last_coded_q_map == NULL) {
45 vp9_cyclic_refresh_free(cr);
46 return NULL;
47 }
48 assert(MAXQ <= 255);
49 memset(cr->last_coded_q_map, MAXQ, last_coded_q_map_size);
50 cr->counter_encode_maxq_scene_change = 0;
51 cr->content_mode = 1;
52 return cr;
53 }
54
vp9_cyclic_refresh_free(CYCLIC_REFRESH * cr)55 void vp9_cyclic_refresh_free(CYCLIC_REFRESH *cr) {
56 if (cr != NULL) {
57 vpx_free(cr->map);
58 vpx_free(cr->last_coded_q_map);
59 vpx_free(cr);
60 }
61 }
62
63 // Check if this coding block, of size bsize, should be considered for refresh
64 // (lower-qp coding). Decision can be based on various factors, such as
65 // size of the coding block (i.e., below min_block size rejected), coding
66 // mode, and rate/distortion.
candidate_refresh_aq(const CYCLIC_REFRESH * cr,const MODE_INFO * mi,int64_t rate,int64_t dist,int bsize)67 static int candidate_refresh_aq(const CYCLIC_REFRESH *cr, const MODE_INFO *mi,
68 int64_t rate, int64_t dist, int bsize) {
69 MV mv = mi->mv[0].as_mv;
70 // Reject the block for lower-qp coding if projected distortion
71 // is above the threshold, and any of the following is true:
72 // 1) mode uses large mv
73 // 2) mode is an intra-mode
74 // Otherwise accept for refresh.
75 if (dist > cr->thresh_dist_sb &&
76 (mv.row > cr->motion_thresh || mv.row < -cr->motion_thresh ||
77 mv.col > cr->motion_thresh || mv.col < -cr->motion_thresh ||
78 !is_inter_block(mi)))
79 return CR_SEGMENT_ID_BASE;
80 else if (bsize >= BLOCK_16X16 && rate < cr->thresh_rate_sb &&
81 is_inter_block(mi) && mi->mv[0].as_int == 0 &&
82 cr->rate_boost_fac > 10)
83 // More aggressive delta-q for bigger blocks with zero motion.
84 return CR_SEGMENT_ID_BOOST2;
85 else
86 return CR_SEGMENT_ID_BOOST1;
87 }
88
89 // Compute delta-q for the segment.
compute_deltaq(const VP9_COMP * cpi,int q,double rate_factor)90 static int compute_deltaq(const VP9_COMP *cpi, int q, double rate_factor) {
91 const CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
92 const RATE_CONTROL *const rc = &cpi->rc;
93 int deltaq = vp9_compute_qdelta_by_rate(rc, cpi->common.frame_type, q,
94 rate_factor, cpi->common.bit_depth);
95 if ((-deltaq) > cr->max_qdelta_perc * q / 100) {
96 deltaq = -cr->max_qdelta_perc * q / 100;
97 }
98 return deltaq;
99 }
100
101 // For the just encoded frame, estimate the bits, incorporating the delta-q
102 // from non-base segment. For now ignore effect of multiple segments
103 // (with different delta-q). Note this function is called in the postencode
104 // (called from rc_update_rate_correction_factors()).
vp9_cyclic_refresh_estimate_bits_at_q(const VP9_COMP * cpi,double correction_factor)105 int vp9_cyclic_refresh_estimate_bits_at_q(const VP9_COMP *cpi,
106 double correction_factor) {
107 const VP9_COMMON *const cm = &cpi->common;
108 const CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
109 int estimated_bits;
110 int mbs = cm->MBs;
111 int num8x8bl = mbs << 2;
112 // Weight for non-base segments: use actual number of blocks refreshed in
113 // previous/just encoded frame. Note number of blocks here is in 8x8 units.
114 double weight_segment1 = (double)cr->actual_num_seg1_blocks / num8x8bl;
115 double weight_segment2 = (double)cr->actual_num_seg2_blocks / num8x8bl;
116 // Take segment weighted average for estimated bits.
117 estimated_bits =
118 (int)((1.0 - weight_segment1 - weight_segment2) *
119 vp9_estimate_bits_at_q(cm->frame_type, cm->base_qindex, mbs,
120 correction_factor, cm->bit_depth) +
121 weight_segment1 *
122 vp9_estimate_bits_at_q(cm->frame_type,
123 cm->base_qindex + cr->qindex_delta[1],
124 mbs, correction_factor, cm->bit_depth) +
125 weight_segment2 *
126 vp9_estimate_bits_at_q(cm->frame_type,
127 cm->base_qindex + cr->qindex_delta[2],
128 mbs, correction_factor, cm->bit_depth));
129 return estimated_bits;
130 }
131
132 // Prior to encoding the frame, estimate the bits per mb, for a given q = i and
133 // a corresponding delta-q (for segment 1). This function is called in the
134 // rc_regulate_q() to set the base qp index.
135 // Note: the segment map is set to either 0/CR_SEGMENT_ID_BASE (no refresh) or
136 // to 1/CR_SEGMENT_ID_BOOST1 (refresh) for each superblock, prior to encoding.
vp9_cyclic_refresh_rc_bits_per_mb(const VP9_COMP * cpi,int i,double correction_factor)137 int vp9_cyclic_refresh_rc_bits_per_mb(const VP9_COMP *cpi, int i,
138 double correction_factor) {
139 const VP9_COMMON *const cm = &cpi->common;
140 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
141 int bits_per_mb;
142 int deltaq = 0;
143 if (cpi->oxcf.speed < 8)
144 deltaq = compute_deltaq(cpi, i, cr->rate_ratio_qdelta);
145 else
146 deltaq = -(cr->max_qdelta_perc * i) / 200;
147 // Take segment weighted average for bits per mb.
148 bits_per_mb = (int)((1.0 - cr->weight_segment) *
149 vp9_rc_bits_per_mb(cm->frame_type, i,
150 correction_factor, cm->bit_depth) +
151 cr->weight_segment *
152 vp9_rc_bits_per_mb(cm->frame_type, i + deltaq,
153 correction_factor, cm->bit_depth));
154 return bits_per_mb;
155 }
156
157 // Prior to coding a given prediction block, of size bsize at (mi_row, mi_col),
158 // check if we should reset the segment_id, and update the cyclic_refresh map
159 // and segmentation map.
vp9_cyclic_refresh_update_segment(VP9_COMP * const cpi,MODE_INFO * const mi,int mi_row,int mi_col,BLOCK_SIZE bsize,int64_t rate,int64_t dist,int skip,struct macroblock_plane * const p)160 void vp9_cyclic_refresh_update_segment(VP9_COMP *const cpi, MODE_INFO *const mi,
161 int mi_row, int mi_col, BLOCK_SIZE bsize,
162 int64_t rate, int64_t dist, int skip,
163 struct macroblock_plane *const p) {
164 const VP9_COMMON *const cm = &cpi->common;
165 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
166 const int bw = num_8x8_blocks_wide_lookup[bsize];
167 const int bh = num_8x8_blocks_high_lookup[bsize];
168 const int xmis = VPXMIN(cm->mi_cols - mi_col, bw);
169 const int ymis = VPXMIN(cm->mi_rows - mi_row, bh);
170 const int block_index = mi_row * cm->mi_cols + mi_col;
171 int refresh_this_block = candidate_refresh_aq(cr, mi, rate, dist, bsize);
172 // Default is to not update the refresh map.
173 int new_map_value = cr->map[block_index];
174 int x = 0;
175 int y = 0;
176
177 int is_skin = 0;
178 if (refresh_this_block == 0 && bsize <= BLOCK_16X16 &&
179 cpi->use_skin_detection) {
180 is_skin =
181 vp9_compute_skin_block(p[0].src.buf, p[1].src.buf, p[2].src.buf,
182 p[0].src.stride, p[1].src.stride, bsize, 0, 0);
183 if (is_skin) refresh_this_block = 1;
184 }
185
186 if (cpi->oxcf.rc_mode == VPX_VBR && mi->ref_frame[0] == GOLDEN_FRAME)
187 refresh_this_block = 0;
188
189 // If this block is labeled for refresh, check if we should reset the
190 // segment_id.
191 if (cpi->sf.use_nonrd_pick_mode &&
192 cyclic_refresh_segment_id_boosted(mi->segment_id)) {
193 mi->segment_id = refresh_this_block;
194 // Reset segment_id if it will be skipped.
195 if (skip) mi->segment_id = CR_SEGMENT_ID_BASE;
196 }
197
198 // Update the cyclic refresh map, to be used for setting segmentation map
199 // for the next frame. If the block will be refreshed this frame, mark it
200 // as clean. The magnitude of the -ve influences how long before we consider
201 // it for refresh again.
202 if (cyclic_refresh_segment_id_boosted(mi->segment_id)) {
203 new_map_value = -cr->time_for_refresh;
204 } else if (refresh_this_block) {
205 // Else if it is accepted as candidate for refresh, and has not already
206 // been refreshed (marked as 1) then mark it as a candidate for cleanup
207 // for future time (marked as 0), otherwise don't update it.
208 if (cr->map[block_index] == 1) new_map_value = 0;
209 } else {
210 // Leave it marked as block that is not candidate for refresh.
211 new_map_value = 1;
212 }
213
214 // Update entries in the cyclic refresh map with new_map_value, and
215 // copy mbmi->segment_id into global segmentation map.
216 for (y = 0; y < ymis; y++)
217 for (x = 0; x < xmis; x++) {
218 int map_offset = block_index + y * cm->mi_cols + x;
219 cr->map[map_offset] = new_map_value;
220 cpi->segmentation_map[map_offset] = mi->segment_id;
221 }
222 }
223
vp9_cyclic_refresh_update_sb_postencode(VP9_COMP * const cpi,const MODE_INFO * const mi,int mi_row,int mi_col,BLOCK_SIZE bsize)224 void vp9_cyclic_refresh_update_sb_postencode(VP9_COMP *const cpi,
225 const MODE_INFO *const mi,
226 int mi_row, int mi_col,
227 BLOCK_SIZE bsize) {
228 const VP9_COMMON *const cm = &cpi->common;
229 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
230 const int bw = num_8x8_blocks_wide_lookup[bsize];
231 const int bh = num_8x8_blocks_high_lookup[bsize];
232 const int xmis = VPXMIN(cm->mi_cols - mi_col, bw);
233 const int ymis = VPXMIN(cm->mi_rows - mi_row, bh);
234 const int block_index = mi_row * cm->mi_cols + mi_col;
235 int x, y;
236 for (y = 0; y < ymis; y++)
237 for (x = 0; x < xmis; x++) {
238 int map_offset = block_index + y * cm->mi_cols + x;
239 // Inter skip blocks were clearly not coded at the current qindex, so
240 // don't update the map for them. For cases where motion is non-zero or
241 // the reference frame isn't the previous frame, the previous value in
242 // the map for this spatial location is not entirely correct.
243 if ((!is_inter_block(mi) || !mi->skip) &&
244 mi->segment_id <= CR_SEGMENT_ID_BOOST2) {
245 cr->last_coded_q_map[map_offset] =
246 clamp(cm->base_qindex + cr->qindex_delta[mi->segment_id], 0, MAXQ);
247 } else if (is_inter_block(mi) && mi->skip &&
248 mi->segment_id <= CR_SEGMENT_ID_BOOST2) {
249 cr->last_coded_q_map[map_offset] = VPXMIN(
250 clamp(cm->base_qindex + cr->qindex_delta[mi->segment_id], 0, MAXQ),
251 cr->last_coded_q_map[map_offset]);
252 }
253 }
254 }
255
256 // From the just encoded frame: update the actual number of blocks that were
257 // applied the segment delta q, and the amount of low motion in the frame.
258 // Also check conditions for forcing golden update, or preventing golden
259 // update if the period is up.
vp9_cyclic_refresh_postencode(VP9_COMP * const cpi)260 void vp9_cyclic_refresh_postencode(VP9_COMP *const cpi) {
261 VP9_COMMON *const cm = &cpi->common;
262 MODE_INFO **mi = cm->mi_grid_visible;
263 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
264 RATE_CONTROL *const rc = &cpi->rc;
265 unsigned char *const seg_map = cpi->segmentation_map;
266 double fraction_low = 0.0;
267 int force_gf_refresh = 0;
268 int low_content_frame = 0;
269 int mi_row, mi_col;
270 cr->actual_num_seg1_blocks = 0;
271 cr->actual_num_seg2_blocks = 0;
272 for (mi_row = 0; mi_row < cm->mi_rows; mi_row++) {
273 for (mi_col = 0; mi_col < cm->mi_cols; mi_col++) {
274 MV mv = mi[0]->mv[0].as_mv;
275 int map_index = mi_row * cm->mi_cols + mi_col;
276 if (cyclic_refresh_segment_id(seg_map[map_index]) == CR_SEGMENT_ID_BOOST1)
277 cr->actual_num_seg1_blocks++;
278 else if (cyclic_refresh_segment_id(seg_map[map_index]) ==
279 CR_SEGMENT_ID_BOOST2)
280 cr->actual_num_seg2_blocks++;
281 // Accumulate low_content_frame.
282 if (is_inter_block(mi[0]) && abs(mv.row) < 16 && abs(mv.col) < 16)
283 low_content_frame++;
284 mi++;
285 }
286 mi += 8;
287 }
288 // Check for golden frame update: only for non-SVC and non-golden boost.
289 if (!cpi->use_svc && cpi->ext_refresh_frame_flags_pending == 0 &&
290 !cpi->oxcf.gf_cbr_boost_pct) {
291 // Force this frame as a golden update frame if this frame changes the
292 // resolution (resize_pending != 0).
293 if (cpi->resize_pending != 0) {
294 vp9_cyclic_refresh_set_golden_update(cpi);
295 rc->frames_till_gf_update_due = rc->baseline_gf_interval;
296 if (rc->frames_till_gf_update_due > rc->frames_to_key)
297 rc->frames_till_gf_update_due = rc->frames_to_key;
298 cpi->refresh_golden_frame = 1;
299 force_gf_refresh = 1;
300 }
301 // Update average of low content/motion in the frame.
302 fraction_low = (double)low_content_frame / (cm->mi_rows * cm->mi_cols);
303 cr->low_content_avg = (fraction_low + 3 * cr->low_content_avg) / 4;
304 if (!force_gf_refresh && cpi->refresh_golden_frame == 1 &&
305 rc->frames_since_key > rc->frames_since_golden + 1) {
306 // Don't update golden reference if the amount of low_content for the
307 // current encoded frame is small, or if the recursive average of the
308 // low_content over the update interval window falls below threshold.
309 if (fraction_low < 0.65 || cr->low_content_avg < 0.6) {
310 cpi->refresh_golden_frame = 0;
311 }
312 // Reset for next internal.
313 cr->low_content_avg = fraction_low;
314 }
315 }
316 }
317
318 // Set golden frame update interval, for non-svc 1 pass CBR mode.
vp9_cyclic_refresh_set_golden_update(VP9_COMP * const cpi)319 void vp9_cyclic_refresh_set_golden_update(VP9_COMP *const cpi) {
320 RATE_CONTROL *const rc = &cpi->rc;
321 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
322 // Set minimum gf_interval for GF update to a multiple of the refresh period,
323 // with some max limit. Depending on past encoding stats, GF flag may be
324 // reset and update may not occur until next baseline_gf_interval.
325 if (cr->percent_refresh > 0)
326 rc->baseline_gf_interval = VPXMIN(4 * (100 / cr->percent_refresh), 40);
327 else
328 rc->baseline_gf_interval = 40;
329 if (cpi->oxcf.rc_mode == VPX_VBR) rc->baseline_gf_interval = 20;
330 if (rc->avg_frame_low_motion < 50 && rc->frames_since_key > 40 &&
331 cr->content_mode)
332 rc->baseline_gf_interval = 10;
333 }
334
is_superblock_flat_static(VP9_COMP * const cpi,int sb_row_index,int sb_col_index)335 static int is_superblock_flat_static(VP9_COMP *const cpi, int sb_row_index,
336 int sb_col_index) {
337 unsigned int source_variance;
338 const uint8_t *src_y = cpi->Source->y_buffer;
339 const int ystride = cpi->Source->y_stride;
340 unsigned int sse;
341 const BLOCK_SIZE bsize = BLOCK_64X64;
342 src_y += (sb_row_index << 6) * ystride + (sb_col_index << 6);
343 source_variance =
344 cpi->fn_ptr[bsize].vf(src_y, ystride, VP9_VAR_OFFS, 0, &sse);
345 if (source_variance == 0) {
346 uint64_t block_sad;
347 const uint8_t *last_src_y = cpi->Last_Source->y_buffer;
348 const int last_ystride = cpi->Last_Source->y_stride;
349 last_src_y += (sb_row_index << 6) * ystride + (sb_col_index << 6);
350 block_sad =
351 cpi->fn_ptr[bsize].sdf(src_y, ystride, last_src_y, last_ystride);
352 if (block_sad == 0) return 1;
353 }
354 return 0;
355 }
356
357 // Update the segmentation map, and related quantities: cyclic refresh map,
358 // refresh sb_index, and target number of blocks to be refreshed.
359 // The map is set to either 0/CR_SEGMENT_ID_BASE (no refresh) or to
360 // 1/CR_SEGMENT_ID_BOOST1 (refresh) for each superblock.
361 // Blocks labeled as BOOST1 may later get set to BOOST2 (during the
362 // encoding of the superblock).
cyclic_refresh_update_map(VP9_COMP * const cpi)363 static void cyclic_refresh_update_map(VP9_COMP *const cpi) {
364 VP9_COMMON *const cm = &cpi->common;
365 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
366 unsigned char *const seg_map = cpi->segmentation_map;
367 int i, block_count, bl_index, sb_rows, sb_cols, sbs_in_frame;
368 int xmis, ymis, x, y;
369 int consec_zero_mv_thresh = 0;
370 int qindex_thresh = 0;
371 int count_sel = 0;
372 int count_tot = 0;
373 memset(seg_map, CR_SEGMENT_ID_BASE, cm->mi_rows * cm->mi_cols);
374 sb_cols = (cm->mi_cols + MI_BLOCK_SIZE - 1) / MI_BLOCK_SIZE;
375 sb_rows = (cm->mi_rows + MI_BLOCK_SIZE - 1) / MI_BLOCK_SIZE;
376 sbs_in_frame = sb_cols * sb_rows;
377 // Number of target blocks to get the q delta (segment 1).
378 block_count = cr->percent_refresh * cm->mi_rows * cm->mi_cols / 100;
379 // Set the segmentation map: cycle through the superblocks, starting at
380 // cr->mb_index, and stopping when either block_count blocks have been found
381 // to be refreshed, or we have passed through whole frame.
382 assert(cr->sb_index < sbs_in_frame);
383 i = cr->sb_index;
384 cr->target_num_seg_blocks = 0;
385 if (cpi->oxcf.content != VP9E_CONTENT_SCREEN) {
386 consec_zero_mv_thresh = 100;
387 }
388 qindex_thresh =
389 cpi->oxcf.content == VP9E_CONTENT_SCREEN
390 ? vp9_get_qindex(&cm->seg, CR_SEGMENT_ID_BOOST2, cm->base_qindex)
391 : vp9_get_qindex(&cm->seg, CR_SEGMENT_ID_BOOST1, cm->base_qindex);
392 // More aggressive settings for noisy content.
393 if (cpi->noise_estimate.enabled && cpi->noise_estimate.level >= kMedium &&
394 cr->content_mode) {
395 consec_zero_mv_thresh = 60;
396 qindex_thresh =
397 VPXMAX(vp9_get_qindex(&cm->seg, CR_SEGMENT_ID_BOOST1, cm->base_qindex),
398 cm->base_qindex);
399 }
400 do {
401 int sum_map = 0;
402 int consec_zero_mv_thresh_block = consec_zero_mv_thresh;
403 // Get the mi_row/mi_col corresponding to superblock index i.
404 int sb_row_index = (i / sb_cols);
405 int sb_col_index = i - sb_row_index * sb_cols;
406 int mi_row = sb_row_index * MI_BLOCK_SIZE;
407 int mi_col = sb_col_index * MI_BLOCK_SIZE;
408 int flat_static_blocks = 0;
409 int compute_content = 1;
410 assert(mi_row >= 0 && mi_row < cm->mi_rows);
411 assert(mi_col >= 0 && mi_col < cm->mi_cols);
412 #if CONFIG_VP9_HIGHBITDEPTH
413 if (cpi->common.use_highbitdepth) compute_content = 0;
414 #endif
415 if (cr->content_mode == 0 || cpi->Last_Source == NULL ||
416 cpi->Last_Source->y_width != cpi->Source->y_width ||
417 cpi->Last_Source->y_height != cpi->Source->y_height)
418 compute_content = 0;
419 bl_index = mi_row * cm->mi_cols + mi_col;
420 // Loop through all 8x8 blocks in superblock and update map.
421 xmis =
422 VPXMIN(cm->mi_cols - mi_col, num_8x8_blocks_wide_lookup[BLOCK_64X64]);
423 ymis =
424 VPXMIN(cm->mi_rows - mi_row, num_8x8_blocks_high_lookup[BLOCK_64X64]);
425 if (cpi->noise_estimate.enabled && cpi->noise_estimate.level >= kMedium &&
426 (xmis <= 2 || ymis <= 2))
427 consec_zero_mv_thresh_block = 4;
428 for (y = 0; y < ymis; y++) {
429 for (x = 0; x < xmis; x++) {
430 const int bl_index2 = bl_index + y * cm->mi_cols + x;
431 // If the block is as a candidate for clean up then mark it
432 // for possible boost/refresh (segment 1). The segment id may get
433 // reset to 0 later depending on the coding mode.
434 if (cr->map[bl_index2] == 0) {
435 count_tot++;
436 if (cr->content_mode == 0 ||
437 cr->last_coded_q_map[bl_index2] > qindex_thresh ||
438 cpi->consec_zero_mv[bl_index2] < consec_zero_mv_thresh_block) {
439 sum_map++;
440 count_sel++;
441 }
442 } else if (cr->map[bl_index2] < 0) {
443 cr->map[bl_index2]++;
444 }
445 }
446 }
447 // Enforce constant segment over superblock.
448 // If segment is at least half of superblock, set to 1.
449 if (sum_map >= xmis * ymis / 2) {
450 // This superblock is a candidate for refresh:
451 // compute spatial variance and exclude blocks that are spatially flat
452 // and stationary. Note: this is currently only done for screne content
453 // mode.
454 if (compute_content && cr->skip_flat_static_blocks)
455 flat_static_blocks =
456 is_superblock_flat_static(cpi, sb_row_index, sb_col_index);
457 if (!flat_static_blocks) {
458 // Label this superblock as segment 1.
459 for (y = 0; y < ymis; y++)
460 for (x = 0; x < xmis; x++) {
461 seg_map[bl_index + y * cm->mi_cols + x] = CR_SEGMENT_ID_BOOST1;
462 }
463 cr->target_num_seg_blocks += xmis * ymis;
464 }
465 }
466 i++;
467 if (i == sbs_in_frame) {
468 i = 0;
469 }
470 } while (cr->target_num_seg_blocks < block_count && i != cr->sb_index);
471 cr->sb_index = i;
472 cr->reduce_refresh = 0;
473 if (cpi->oxcf.content != VP9E_CONTENT_SCREEN)
474 if (count_sel < (3 * count_tot) >> 2) cr->reduce_refresh = 1;
475 }
476
477 // Set cyclic refresh parameters.
vp9_cyclic_refresh_update_parameters(VP9_COMP * const cpi)478 void vp9_cyclic_refresh_update_parameters(VP9_COMP *const cpi) {
479 const RATE_CONTROL *const rc = &cpi->rc;
480 const VP9_COMMON *const cm = &cpi->common;
481 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
482 int num8x8bl = cm->MBs << 2;
483 int target_refresh = 0;
484 double weight_segment_target = 0;
485 double weight_segment = 0;
486 int thresh_low_motion = 20;
487 int qp_thresh = VPXMIN((cpi->oxcf.content == VP9E_CONTENT_SCREEN) ? 35 : 20,
488 rc->best_quality << 1);
489 int qp_max_thresh = 117 * MAXQ >> 7;
490 cr->apply_cyclic_refresh = 1;
491 if (frame_is_intra_only(cm) || cpi->svc.temporal_layer_id > 0 ||
492 is_lossless_requested(&cpi->oxcf) ||
493 rc->avg_frame_qindex[INTER_FRAME] < qp_thresh ||
494 (cpi->use_svc &&
495 cpi->svc.layer_context[cpi->svc.temporal_layer_id].is_key_frame) ||
496 (!cpi->use_svc && cr->content_mode &&
497 rc->avg_frame_low_motion < thresh_low_motion &&
498 rc->frames_since_key > 40) ||
499 (!cpi->use_svc && rc->avg_frame_qindex[INTER_FRAME] > qp_max_thresh &&
500 rc->frames_since_key > 20) ||
501 (cpi->roi.enabled && cpi->roi.skip[BACKGROUND_SEG_SKIP_ID] &&
502 rc->frames_since_key > FRAMES_NO_SKIPPING_AFTER_KEY)) {
503 cr->apply_cyclic_refresh = 0;
504 return;
505 }
506 cr->percent_refresh = 10;
507 if (cr->reduce_refresh) cr->percent_refresh = 5;
508 cr->max_qdelta_perc = 60;
509 cr->time_for_refresh = 0;
510 cr->motion_thresh = 32;
511 cr->rate_boost_fac = 15;
512 // Use larger delta-qp (increase rate_ratio_qdelta) for first few (~4)
513 // periods of the refresh cycle, after a key frame.
514 // Account for larger interval on base layer for temporal layers.
515 if (cr->percent_refresh > 0 &&
516 rc->frames_since_key <
517 (4 * cpi->svc.number_temporal_layers) * (100 / cr->percent_refresh)) {
518 cr->rate_ratio_qdelta = 3.0;
519 } else {
520 cr->rate_ratio_qdelta = 2.0;
521 if (cr->content_mode && cpi->noise_estimate.enabled &&
522 cpi->noise_estimate.level >= kMedium) {
523 // Reduce the delta-qp if the estimated source noise is above threshold.
524 cr->rate_ratio_qdelta = 1.7;
525 cr->rate_boost_fac = 13;
526 }
527 }
528 // For screen-content: keep rate_ratio_qdelta to 2.0 (segment#1 boost) and
529 // percent_refresh (refresh rate) to 10. But reduce rate boost for segment#2
530 // (rate_boost_fac = 10 disables segment#2).
531 if (cpi->oxcf.content == VP9E_CONTENT_SCREEN) {
532 // Only enable feature of skipping flat_static blocks for top layer
533 // under screen content mode.
534 if (cpi->svc.spatial_layer_id == cpi->svc.number_spatial_layers - 1)
535 cr->skip_flat_static_blocks = 1;
536 cr->percent_refresh = (cr->skip_flat_static_blocks) ? 5 : 10;
537 // Increase the amount of refresh on scene change that is encoded at max Q,
538 // increase for a few cycles of the refresh period (~100 / percent_refresh).
539 if (cr->content_mode && cr->counter_encode_maxq_scene_change < 30)
540 cr->percent_refresh = (cr->skip_flat_static_blocks) ? 10 : 15;
541 cr->rate_ratio_qdelta = 2.0;
542 cr->rate_boost_fac = 10;
543 }
544 // Adjust some parameters for low resolutions.
545 if (cm->width * cm->height <= 352 * 288) {
546 if (rc->avg_frame_bandwidth < 3000) {
547 cr->motion_thresh = 64;
548 cr->rate_boost_fac = 13;
549 } else {
550 cr->max_qdelta_perc = 70;
551 cr->rate_ratio_qdelta = VPXMAX(cr->rate_ratio_qdelta, 2.5);
552 }
553 }
554 if (cpi->oxcf.rc_mode == VPX_VBR) {
555 // To be adjusted for VBR mode, e.g., based on gf period and boost.
556 // For now use smaller qp-delta (than CBR), no second boosted seg, and
557 // turn-off (no refresh) on golden refresh (since it's already boosted).
558 cr->percent_refresh = 10;
559 cr->rate_ratio_qdelta = 1.5;
560 cr->rate_boost_fac = 10;
561 if (cpi->refresh_golden_frame == 1 && !cpi->use_svc) {
562 cr->percent_refresh = 0;
563 cr->rate_ratio_qdelta = 1.0;
564 }
565 }
566 // Weight for segment prior to encoding: take the average of the target
567 // number for the frame to be encoded and the actual from the previous frame.
568 // Use the target if its less. To be used for setting the base qp for the
569 // frame in vp9_rc_regulate_q.
570 target_refresh = cr->percent_refresh * cm->mi_rows * cm->mi_cols / 100;
571 weight_segment_target = (double)(target_refresh) / num8x8bl;
572 weight_segment = (double)((target_refresh + cr->actual_num_seg1_blocks +
573 cr->actual_num_seg2_blocks) >>
574 1) /
575 num8x8bl;
576 if (weight_segment_target < 7 * weight_segment / 8)
577 weight_segment = weight_segment_target;
578 // For screen-content: don't include target for the weight segment,
579 // since for all flat areas the segment is reset, so its more accurate
580 // to just use the previous actual number of seg blocks for the weight.
581 if (cpi->oxcf.content == VP9E_CONTENT_SCREEN)
582 weight_segment =
583 (double)(cr->actual_num_seg1_blocks + cr->actual_num_seg2_blocks) /
584 num8x8bl;
585 cr->weight_segment = weight_segment;
586 if (cr->content_mode == 0) {
587 cr->actual_num_seg1_blocks =
588 cr->percent_refresh * cm->mi_rows * cm->mi_cols / 100;
589 cr->actual_num_seg2_blocks = 0;
590 cr->weight_segment = (double)(cr->actual_num_seg1_blocks) / num8x8bl;
591 }
592 }
593
594 // Setup cyclic background refresh: set delta q and segmentation map.
vp9_cyclic_refresh_setup(VP9_COMP * const cpi)595 void vp9_cyclic_refresh_setup(VP9_COMP *const cpi) {
596 VP9_COMMON *const cm = &cpi->common;
597 const RATE_CONTROL *const rc = &cpi->rc;
598 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
599 struct segmentation *const seg = &cm->seg;
600 int scene_change_detected =
601 cpi->rc.high_source_sad ||
602 (cpi->use_svc && cpi->svc.high_source_sad_superframe);
603 if (cm->current_video_frame == 0) cr->low_content_avg = 0.0;
604 // Reset if resoluton change has occurred.
605 if (cpi->resize_pending != 0) vp9_cyclic_refresh_reset_resize(cpi);
606 if (!cr->apply_cyclic_refresh || (cpi->force_update_segmentation) ||
607 scene_change_detected) {
608 // Set segmentation map to 0 and disable.
609 unsigned char *const seg_map = cpi->segmentation_map;
610 memset(seg_map, 0, cm->mi_rows * cm->mi_cols);
611 vp9_disable_segmentation(&cm->seg);
612 if (cm->frame_type == KEY_FRAME || scene_change_detected) {
613 memset(cr->last_coded_q_map, MAXQ,
614 cm->mi_rows * cm->mi_cols * sizeof(*cr->last_coded_q_map));
615 cr->sb_index = 0;
616 cr->reduce_refresh = 0;
617 cr->counter_encode_maxq_scene_change = 0;
618 }
619 return;
620 } else {
621 int qindex_delta = 0;
622 int qindex2;
623 const double q = vp9_convert_qindex_to_q(cm->base_qindex, cm->bit_depth);
624 cr->counter_encode_maxq_scene_change++;
625 vpx_clear_system_state();
626 // Set rate threshold to some multiple (set to 2 for now) of the target
627 // rate (target is given by sb64_target_rate and scaled by 256).
628 cr->thresh_rate_sb = ((int64_t)(rc->sb64_target_rate) << 8) << 2;
629 // Distortion threshold, quadratic in Q, scale factor to be adjusted.
630 // q will not exceed 457, so (q * q) is within 32bit; see:
631 // vp9_convert_qindex_to_q(), vp9_ac_quant(), ac_qlookup*[].
632 cr->thresh_dist_sb = ((int64_t)(q * q)) << 2;
633
634 // Set up segmentation.
635 // Clear down the segment map.
636 vp9_enable_segmentation(&cm->seg);
637 vp9_clearall_segfeatures(seg);
638 // Select delta coding method.
639 seg->abs_delta = SEGMENT_DELTADATA;
640
641 // Note: setting temporal_update has no effect, as the seg-map coding method
642 // (temporal or spatial) is determined in vp9_choose_segmap_coding_method(),
643 // based on the coding cost of each method. For error_resilient mode on the
644 // last_frame_seg_map is set to 0, so if temporal coding is used, it is
645 // relative to 0 previous map.
646 // seg->temporal_update = 0;
647
648 // Segment BASE "Q" feature is disabled so it defaults to the baseline Q.
649 vp9_disable_segfeature(seg, CR_SEGMENT_ID_BASE, SEG_LVL_ALT_Q);
650 // Use segment BOOST1 for in-frame Q adjustment.
651 vp9_enable_segfeature(seg, CR_SEGMENT_ID_BOOST1, SEG_LVL_ALT_Q);
652 // Use segment BOOST2 for more aggressive in-frame Q adjustment.
653 vp9_enable_segfeature(seg, CR_SEGMENT_ID_BOOST2, SEG_LVL_ALT_Q);
654
655 // Set the q delta for segment BOOST1.
656 qindex_delta = compute_deltaq(cpi, cm->base_qindex, cr->rate_ratio_qdelta);
657 cr->qindex_delta[1] = qindex_delta;
658
659 // Compute rd-mult for segment BOOST1.
660 qindex2 = clamp(cm->base_qindex + cm->y_dc_delta_q + qindex_delta, 0, MAXQ);
661
662 cr->rdmult = vp9_compute_rd_mult(cpi, qindex2);
663
664 vp9_set_segdata(seg, CR_SEGMENT_ID_BOOST1, SEG_LVL_ALT_Q, qindex_delta);
665
666 // Set a more aggressive (higher) q delta for segment BOOST2.
667 qindex_delta = compute_deltaq(
668 cpi, cm->base_qindex,
669 VPXMIN(CR_MAX_RATE_TARGET_RATIO,
670 0.1 * cr->rate_boost_fac * cr->rate_ratio_qdelta));
671 cr->qindex_delta[2] = qindex_delta;
672 vp9_set_segdata(seg, CR_SEGMENT_ID_BOOST2, SEG_LVL_ALT_Q, qindex_delta);
673
674 // Update the segmentation and refresh map.
675 cyclic_refresh_update_map(cpi);
676 }
677 }
678
vp9_cyclic_refresh_get_rdmult(const CYCLIC_REFRESH * cr)679 int vp9_cyclic_refresh_get_rdmult(const CYCLIC_REFRESH *cr) {
680 return cr->rdmult;
681 }
682
vp9_cyclic_refresh_reset_resize(VP9_COMP * const cpi)683 void vp9_cyclic_refresh_reset_resize(VP9_COMP *const cpi) {
684 const VP9_COMMON *const cm = &cpi->common;
685 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
686 memset(cr->map, 0, cm->mi_rows * cm->mi_cols);
687 memset(cr->last_coded_q_map, MAXQ,
688 cm->mi_rows * cm->mi_cols * sizeof(*cr->last_coded_q_map));
689 cr->sb_index = 0;
690 cpi->refresh_golden_frame = 1;
691 cpi->refresh_alt_ref_frame = 1;
692 cr->counter_encode_maxq_scene_change = 0;
693 }
694
vp9_cyclic_refresh_limit_q(const VP9_COMP * cpi,int * q)695 void vp9_cyclic_refresh_limit_q(const VP9_COMP *cpi, int *q) {
696 CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
697 // For now apply hard limit to frame-level decrease in q, if the cyclic
698 // refresh is active (percent_refresh > 0).
699 if (cr->percent_refresh > 0 && cpi->rc.q_1_frame - *q > 8) {
700 *q = cpi->rc.q_1_frame - 8;
701 }
702 }
703