vp9_aq_cyclicrefresh.c 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687
  1. /*
  2. * Copyright (c) 2014 The WebM project authors. All Rights Reserved.
  3. *
  4. * Use of this source code is governed by a BSD-style license
  5. * that can be found in the LICENSE file in the root of the source
  6. * tree. An additional intellectual property rights grant can be found
  7. * in the file PATENTS. All contributing project authors may
  8. * be found in the AUTHORS file in the root of the source tree.
  9. */
  10. #include <limits.h>
  11. #include <math.h>
  12. #include "vpx_dsp/vpx_dsp_common.h"
  13. #include "vpx_ports/system_state.h"
  14. #include "vp9/encoder/vp9_aq_cyclicrefresh.h"
  15. #include "vp9/common/vp9_seg_common.h"
  16. #include "vp9/encoder/vp9_ratectrl.h"
  17. #include "vp9/encoder/vp9_segmentation.h"
  18. static const uint8_t VP9_VAR_OFFS[64] = {
  19. 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128,
  20. 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128,
  21. 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128,
  22. 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128,
  23. 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128
  24. };
  25. CYCLIC_REFRESH *vp9_cyclic_refresh_alloc(int mi_rows, int mi_cols) {
  26. size_t last_coded_q_map_size;
  27. CYCLIC_REFRESH *const cr = vpx_calloc(1, sizeof(*cr));
  28. if (cr == NULL) return NULL;
  29. cr->map = vpx_calloc(mi_rows * mi_cols, sizeof(*cr->map));
  30. if (cr->map == NULL) {
  31. vp9_cyclic_refresh_free(cr);
  32. return NULL;
  33. }
  34. last_coded_q_map_size = mi_rows * mi_cols * sizeof(*cr->last_coded_q_map);
  35. cr->last_coded_q_map = vpx_malloc(last_coded_q_map_size);
  36. if (cr->last_coded_q_map == NULL) {
  37. vp9_cyclic_refresh_free(cr);
  38. return NULL;
  39. }
  40. assert(MAXQ <= 255);
  41. memset(cr->last_coded_q_map, MAXQ, last_coded_q_map_size);
  42. cr->counter_encode_maxq_scene_change = 0;
  43. return cr;
  44. }
  45. void vp9_cyclic_refresh_free(CYCLIC_REFRESH *cr) {
  46. if (cr != NULL) {
  47. vpx_free(cr->map);
  48. vpx_free(cr->last_coded_q_map);
  49. vpx_free(cr);
  50. }
  51. }
  52. // Check if this coding block, of size bsize, should be considered for refresh
  53. // (lower-qp coding). Decision can be based on various factors, such as
  54. // size of the coding block (i.e., below min_block size rejected), coding
  55. // mode, and rate/distortion.
  56. static int candidate_refresh_aq(const CYCLIC_REFRESH *cr, const MODE_INFO *mi,
  57. int64_t rate, int64_t dist, int bsize) {
  58. MV mv = mi->mv[0].as_mv;
  59. // Reject the block for lower-qp coding if projected distortion
  60. // is above the threshold, and any of the following is true:
  61. // 1) mode uses large mv
  62. // 2) mode is an intra-mode
  63. // Otherwise accept for refresh.
  64. if (dist > cr->thresh_dist_sb &&
  65. (mv.row > cr->motion_thresh || mv.row < -cr->motion_thresh ||
  66. mv.col > cr->motion_thresh || mv.col < -cr->motion_thresh ||
  67. !is_inter_block(mi)))
  68. return CR_SEGMENT_ID_BASE;
  69. else if (bsize >= BLOCK_16X16 && rate < cr->thresh_rate_sb &&
  70. is_inter_block(mi) && mi->mv[0].as_int == 0 &&
  71. cr->rate_boost_fac > 10)
  72. // More aggressive delta-q for bigger blocks with zero motion.
  73. return CR_SEGMENT_ID_BOOST2;
  74. else
  75. return CR_SEGMENT_ID_BOOST1;
  76. }
  77. // Compute delta-q for the segment.
  78. static int compute_deltaq(const VP9_COMP *cpi, int q, double rate_factor) {
  79. const CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  80. const RATE_CONTROL *const rc = &cpi->rc;
  81. int deltaq = vp9_compute_qdelta_by_rate(rc, cpi->common.frame_type, q,
  82. rate_factor, cpi->common.bit_depth);
  83. if ((-deltaq) > cr->max_qdelta_perc * q / 100) {
  84. deltaq = -cr->max_qdelta_perc * q / 100;
  85. }
  86. return deltaq;
  87. }
  88. // For the just encoded frame, estimate the bits, incorporating the delta-q
  89. // from non-base segment. For now ignore effect of multiple segments
  90. // (with different delta-q). Note this function is called in the postencode
  91. // (called from rc_update_rate_correction_factors()).
  92. int vp9_cyclic_refresh_estimate_bits_at_q(const VP9_COMP *cpi,
  93. double correction_factor) {
  94. const VP9_COMMON *const cm = &cpi->common;
  95. const CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  96. int estimated_bits;
  97. int mbs = cm->MBs;
  98. int num8x8bl = mbs << 2;
  99. // Weight for non-base segments: use actual number of blocks refreshed in
  100. // previous/just encoded frame. Note number of blocks here is in 8x8 units.
  101. double weight_segment1 = (double)cr->actual_num_seg1_blocks / num8x8bl;
  102. double weight_segment2 = (double)cr->actual_num_seg2_blocks / num8x8bl;
  103. // Take segment weighted average for estimated bits.
  104. estimated_bits =
  105. (int)((1.0 - weight_segment1 - weight_segment2) *
  106. vp9_estimate_bits_at_q(cm->frame_type, cm->base_qindex, mbs,
  107. correction_factor, cm->bit_depth) +
  108. weight_segment1 *
  109. vp9_estimate_bits_at_q(cm->frame_type,
  110. cm->base_qindex + cr->qindex_delta[1],
  111. mbs, correction_factor, cm->bit_depth) +
  112. weight_segment2 *
  113. vp9_estimate_bits_at_q(cm->frame_type,
  114. cm->base_qindex + cr->qindex_delta[2],
  115. mbs, correction_factor, cm->bit_depth));
  116. return estimated_bits;
  117. }
  118. // Prior to encoding the frame, estimate the bits per mb, for a given q = i and
  119. // a corresponding delta-q (for segment 1). This function is called in the
  120. // rc_regulate_q() to set the base qp index.
  121. // Note: the segment map is set to either 0/CR_SEGMENT_ID_BASE (no refresh) or
  122. // to 1/CR_SEGMENT_ID_BOOST1 (refresh) for each superblock, prior to encoding.
  123. int vp9_cyclic_refresh_rc_bits_per_mb(const VP9_COMP *cpi, int i,
  124. double correction_factor) {
  125. const VP9_COMMON *const cm = &cpi->common;
  126. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  127. int bits_per_mb;
  128. int deltaq = 0;
  129. if (cpi->oxcf.speed < 8)
  130. deltaq = compute_deltaq(cpi, i, cr->rate_ratio_qdelta);
  131. else
  132. deltaq = -(cr->max_qdelta_perc * i) / 200;
  133. // Take segment weighted average for bits per mb.
  134. bits_per_mb = (int)((1.0 - cr->weight_segment) *
  135. vp9_rc_bits_per_mb(cm->frame_type, i,
  136. correction_factor, cm->bit_depth) +
  137. cr->weight_segment *
  138. vp9_rc_bits_per_mb(cm->frame_type, i + deltaq,
  139. correction_factor, cm->bit_depth));
  140. return bits_per_mb;
  141. }
  142. // Prior to coding a given prediction block, of size bsize at (mi_row, mi_col),
  143. // check if we should reset the segment_id, and update the cyclic_refresh map
  144. // and segmentation map.
  145. void vp9_cyclic_refresh_update_segment(VP9_COMP *const cpi, MODE_INFO *const mi,
  146. int mi_row, int mi_col, BLOCK_SIZE bsize,
  147. int64_t rate, int64_t dist, int skip,
  148. struct macroblock_plane *const p) {
  149. const VP9_COMMON *const cm = &cpi->common;
  150. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  151. const int bw = num_8x8_blocks_wide_lookup[bsize];
  152. const int bh = num_8x8_blocks_high_lookup[bsize];
  153. const int xmis = VPXMIN(cm->mi_cols - mi_col, bw);
  154. const int ymis = VPXMIN(cm->mi_rows - mi_row, bh);
  155. const int block_index = mi_row * cm->mi_cols + mi_col;
  156. int refresh_this_block = candidate_refresh_aq(cr, mi, rate, dist, bsize);
  157. // Default is to not update the refresh map.
  158. int new_map_value = cr->map[block_index];
  159. int x = 0;
  160. int y = 0;
  161. int is_skin = 0;
  162. if (refresh_this_block == 0 && bsize <= BLOCK_16X16 &&
  163. cpi->use_skin_detection) {
  164. is_skin =
  165. vp9_compute_skin_block(p[0].src.buf, p[1].src.buf, p[2].src.buf,
  166. p[0].src.stride, p[1].src.stride, bsize, 0, 0);
  167. if (is_skin) refresh_this_block = 1;
  168. }
  169. if (cpi->oxcf.rc_mode == VPX_VBR && mi->ref_frame[0] == GOLDEN_FRAME)
  170. refresh_this_block = 0;
  171. // If this block is labeled for refresh, check if we should reset the
  172. // segment_id.
  173. if (cyclic_refresh_segment_id_boosted(mi->segment_id)) {
  174. mi->segment_id = refresh_this_block;
  175. // Reset segment_id if it will be skipped.
  176. if (skip) mi->segment_id = CR_SEGMENT_ID_BASE;
  177. }
  178. // Update the cyclic refresh map, to be used for setting segmentation map
  179. // for the next frame. If the block will be refreshed this frame, mark it
  180. // as clean. The magnitude of the -ve influences how long before we consider
  181. // it for refresh again.
  182. if (cyclic_refresh_segment_id_boosted(mi->segment_id)) {
  183. new_map_value = -cr->time_for_refresh;
  184. } else if (refresh_this_block) {
  185. // Else if it is accepted as candidate for refresh, and has not already
  186. // been refreshed (marked as 1) then mark it as a candidate for cleanup
  187. // for future time (marked as 0), otherwise don't update it.
  188. if (cr->map[block_index] == 1) new_map_value = 0;
  189. } else {
  190. // Leave it marked as block that is not candidate for refresh.
  191. new_map_value = 1;
  192. }
  193. // Update entries in the cyclic refresh map with new_map_value, and
  194. // copy mbmi->segment_id into global segmentation map.
  195. for (y = 0; y < ymis; y++)
  196. for (x = 0; x < xmis; x++) {
  197. int map_offset = block_index + y * cm->mi_cols + x;
  198. cr->map[map_offset] = new_map_value;
  199. cpi->segmentation_map[map_offset] = mi->segment_id;
  200. }
  201. }
  202. void vp9_cyclic_refresh_update_sb_postencode(VP9_COMP *const cpi,
  203. const MODE_INFO *const mi,
  204. int mi_row, int mi_col,
  205. BLOCK_SIZE bsize) {
  206. const VP9_COMMON *const cm = &cpi->common;
  207. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  208. const int bw = num_8x8_blocks_wide_lookup[bsize];
  209. const int bh = num_8x8_blocks_high_lookup[bsize];
  210. const int xmis = VPXMIN(cm->mi_cols - mi_col, bw);
  211. const int ymis = VPXMIN(cm->mi_rows - mi_row, bh);
  212. const int block_index = mi_row * cm->mi_cols + mi_col;
  213. int x, y;
  214. for (y = 0; y < ymis; y++)
  215. for (x = 0; x < xmis; x++) {
  216. int map_offset = block_index + y * cm->mi_cols + x;
  217. // Inter skip blocks were clearly not coded at the current qindex, so
  218. // don't update the map for them. For cases where motion is non-zero or
  219. // the reference frame isn't the previous frame, the previous value in
  220. // the map for this spatial location is not entirely correct.
  221. if ((!is_inter_block(mi) || !mi->skip) &&
  222. mi->segment_id <= CR_SEGMENT_ID_BOOST2) {
  223. cr->last_coded_q_map[map_offset] =
  224. clamp(cm->base_qindex + cr->qindex_delta[mi->segment_id], 0, MAXQ);
  225. } else if (is_inter_block(mi) && mi->skip &&
  226. mi->segment_id <= CR_SEGMENT_ID_BOOST2) {
  227. cr->last_coded_q_map[map_offset] = VPXMIN(
  228. clamp(cm->base_qindex + cr->qindex_delta[mi->segment_id], 0, MAXQ),
  229. cr->last_coded_q_map[map_offset]);
  230. }
  231. }
  232. }
  233. // From the just encoded frame: update the actual number of blocks that were
  234. // applied the segment delta q, and the amount of low motion in the frame.
  235. // Also check conditions for forcing golden update, or preventing golden
  236. // update if the period is up.
  237. void vp9_cyclic_refresh_postencode(VP9_COMP *const cpi) {
  238. VP9_COMMON *const cm = &cpi->common;
  239. MODE_INFO **mi = cm->mi_grid_visible;
  240. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  241. RATE_CONTROL *const rc = &cpi->rc;
  242. unsigned char *const seg_map = cpi->segmentation_map;
  243. double fraction_low = 0.0;
  244. int force_gf_refresh = 0;
  245. int low_content_frame = 0;
  246. int mi_row, mi_col;
  247. cr->actual_num_seg1_blocks = 0;
  248. cr->actual_num_seg2_blocks = 0;
  249. for (mi_row = 0; mi_row < cm->mi_rows; mi_row++) {
  250. for (mi_col = 0; mi_col < cm->mi_cols; mi_col++) {
  251. MV mv = mi[0]->mv[0].as_mv;
  252. int map_index = mi_row * cm->mi_cols + mi_col;
  253. if (cyclic_refresh_segment_id(seg_map[map_index]) == CR_SEGMENT_ID_BOOST1)
  254. cr->actual_num_seg1_blocks++;
  255. else if (cyclic_refresh_segment_id(seg_map[map_index]) ==
  256. CR_SEGMENT_ID_BOOST2)
  257. cr->actual_num_seg2_blocks++;
  258. // Accumulate low_content_frame.
  259. if (is_inter_block(mi[0]) && abs(mv.row) < 16 && abs(mv.col) < 16)
  260. low_content_frame++;
  261. mi++;
  262. }
  263. mi += 8;
  264. }
  265. // Check for golden frame update: only for non-SVC and non-golden boost.
  266. if (!cpi->use_svc && cpi->ext_refresh_frame_flags_pending == 0 &&
  267. !cpi->oxcf.gf_cbr_boost_pct) {
  268. // Force this frame as a golden update frame if this frame changes the
  269. // resolution (resize_pending != 0).
  270. if (cpi->resize_pending != 0) {
  271. vp9_cyclic_refresh_set_golden_update(cpi);
  272. rc->frames_till_gf_update_due = rc->baseline_gf_interval;
  273. if (rc->frames_till_gf_update_due > rc->frames_to_key)
  274. rc->frames_till_gf_update_due = rc->frames_to_key;
  275. cpi->refresh_golden_frame = 1;
  276. force_gf_refresh = 1;
  277. }
  278. // Update average of low content/motion in the frame.
  279. fraction_low = (double)low_content_frame / (cm->mi_rows * cm->mi_cols);
  280. cr->low_content_avg = (fraction_low + 3 * cr->low_content_avg) / 4;
  281. if (!force_gf_refresh && cpi->refresh_golden_frame == 1 &&
  282. rc->frames_since_key > rc->frames_since_golden + 1) {
  283. // Don't update golden reference if the amount of low_content for the
  284. // current encoded frame is small, or if the recursive average of the
  285. // low_content over the update interval window falls below threshold.
  286. if (fraction_low < 0.65 || cr->low_content_avg < 0.6) {
  287. cpi->refresh_golden_frame = 0;
  288. }
  289. // Reset for next internal.
  290. cr->low_content_avg = fraction_low;
  291. }
  292. }
  293. }
  294. // Set golden frame update interval, for non-svc 1 pass CBR mode.
  295. void vp9_cyclic_refresh_set_golden_update(VP9_COMP *const cpi) {
  296. RATE_CONTROL *const rc = &cpi->rc;
  297. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  298. // Set minimum gf_interval for GF update to a multiple of the refresh period,
  299. // with some max limit. Depending on past encoding stats, GF flag may be
  300. // reset and update may not occur until next baseline_gf_interval.
  301. if (cr->percent_refresh > 0)
  302. rc->baseline_gf_interval = VPXMIN(4 * (100 / cr->percent_refresh), 40);
  303. else
  304. rc->baseline_gf_interval = 40;
  305. if (cpi->oxcf.rc_mode == VPX_VBR) rc->baseline_gf_interval = 20;
  306. if (rc->avg_frame_low_motion < 50 && rc->frames_since_key > 40)
  307. rc->baseline_gf_interval = 10;
  308. }
  309. static int is_superblock_flat_static(VP9_COMP *const cpi, int sb_row_index,
  310. int sb_col_index) {
  311. unsigned int source_variance;
  312. const uint8_t *src_y = cpi->Source->y_buffer;
  313. const int ystride = cpi->Source->y_stride;
  314. unsigned int sse;
  315. const BLOCK_SIZE bsize = BLOCK_64X64;
  316. src_y += (sb_row_index << 6) * ystride + (sb_col_index << 6);
  317. source_variance =
  318. cpi->fn_ptr[bsize].vf(src_y, ystride, VP9_VAR_OFFS, 0, &sse);
  319. if (source_variance == 0) {
  320. uint64_t block_sad;
  321. const uint8_t *last_src_y = cpi->Last_Source->y_buffer;
  322. const int last_ystride = cpi->Last_Source->y_stride;
  323. last_src_y += (sb_row_index << 6) * ystride + (sb_col_index << 6);
  324. block_sad =
  325. cpi->fn_ptr[bsize].sdf(src_y, ystride, last_src_y, last_ystride);
  326. if (block_sad == 0) return 1;
  327. }
  328. return 0;
  329. }
  330. // Update the segmentation map, and related quantities: cyclic refresh map,
  331. // refresh sb_index, and target number of blocks to be refreshed.
  332. // The map is set to either 0/CR_SEGMENT_ID_BASE (no refresh) or to
  333. // 1/CR_SEGMENT_ID_BOOST1 (refresh) for each superblock.
  334. // Blocks labeled as BOOST1 may later get set to BOOST2 (during the
  335. // encoding of the superblock).
  336. static void cyclic_refresh_update_map(VP9_COMP *const cpi) {
  337. VP9_COMMON *const cm = &cpi->common;
  338. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  339. unsigned char *const seg_map = cpi->segmentation_map;
  340. int i, block_count, bl_index, sb_rows, sb_cols, sbs_in_frame;
  341. int xmis, ymis, x, y;
  342. int consec_zero_mv_thresh = 0;
  343. int qindex_thresh = 0;
  344. int count_sel = 0;
  345. int count_tot = 0;
  346. memset(seg_map, CR_SEGMENT_ID_BASE, cm->mi_rows * cm->mi_cols);
  347. sb_cols = (cm->mi_cols + MI_BLOCK_SIZE - 1) / MI_BLOCK_SIZE;
  348. sb_rows = (cm->mi_rows + MI_BLOCK_SIZE - 1) / MI_BLOCK_SIZE;
  349. sbs_in_frame = sb_cols * sb_rows;
  350. // Number of target blocks to get the q delta (segment 1).
  351. block_count = cr->percent_refresh * cm->mi_rows * cm->mi_cols / 100;
  352. // Set the segmentation map: cycle through the superblocks, starting at
  353. // cr->mb_index, and stopping when either block_count blocks have been found
  354. // to be refreshed, or we have passed through whole frame.
  355. assert(cr->sb_index < sbs_in_frame);
  356. i = cr->sb_index;
  357. cr->target_num_seg_blocks = 0;
  358. if (cpi->oxcf.content != VP9E_CONTENT_SCREEN) {
  359. consec_zero_mv_thresh = 100;
  360. }
  361. qindex_thresh =
  362. cpi->oxcf.content == VP9E_CONTENT_SCREEN
  363. ? vp9_get_qindex(&cm->seg, CR_SEGMENT_ID_BOOST2, cm->base_qindex)
  364. : vp9_get_qindex(&cm->seg, CR_SEGMENT_ID_BOOST1, cm->base_qindex);
  365. // More aggressive settings for noisy content.
  366. if (cpi->noise_estimate.enabled && cpi->noise_estimate.level >= kMedium) {
  367. consec_zero_mv_thresh = 60;
  368. qindex_thresh =
  369. VPXMAX(vp9_get_qindex(&cm->seg, CR_SEGMENT_ID_BOOST1, cm->base_qindex),
  370. cm->base_qindex);
  371. }
  372. do {
  373. int sum_map = 0;
  374. int consec_zero_mv_thresh_block = consec_zero_mv_thresh;
  375. // Get the mi_row/mi_col corresponding to superblock index i.
  376. int sb_row_index = (i / sb_cols);
  377. int sb_col_index = i - sb_row_index * sb_cols;
  378. int mi_row = sb_row_index * MI_BLOCK_SIZE;
  379. int mi_col = sb_col_index * MI_BLOCK_SIZE;
  380. int flat_static_blocks = 0;
  381. int compute_content = 1;
  382. assert(mi_row >= 0 && mi_row < cm->mi_rows);
  383. assert(mi_col >= 0 && mi_col < cm->mi_cols);
  384. #if CONFIG_VP9_HIGHBITDEPTH
  385. if (cpi->common.use_highbitdepth) compute_content = 0;
  386. #endif
  387. if (cpi->Last_Source == NULL ||
  388. cpi->Last_Source->y_width != cpi->Source->y_width ||
  389. cpi->Last_Source->y_height != cpi->Source->y_height)
  390. compute_content = 0;
  391. bl_index = mi_row * cm->mi_cols + mi_col;
  392. // Loop through all 8x8 blocks in superblock and update map.
  393. xmis =
  394. VPXMIN(cm->mi_cols - mi_col, num_8x8_blocks_wide_lookup[BLOCK_64X64]);
  395. ymis =
  396. VPXMIN(cm->mi_rows - mi_row, num_8x8_blocks_high_lookup[BLOCK_64X64]);
  397. if (cpi->noise_estimate.enabled && cpi->noise_estimate.level >= kMedium &&
  398. (xmis <= 2 || ymis <= 2))
  399. consec_zero_mv_thresh_block = 4;
  400. for (y = 0; y < ymis; y++) {
  401. for (x = 0; x < xmis; x++) {
  402. const int bl_index2 = bl_index + y * cm->mi_cols + x;
  403. // If the block is as a candidate for clean up then mark it
  404. // for possible boost/refresh (segment 1). The segment id may get
  405. // reset to 0 later depending on the coding mode.
  406. if (cr->map[bl_index2] == 0) {
  407. count_tot++;
  408. if (cr->last_coded_q_map[bl_index2] > qindex_thresh ||
  409. cpi->consec_zero_mv[bl_index2] < consec_zero_mv_thresh_block) {
  410. sum_map++;
  411. count_sel++;
  412. }
  413. } else if (cr->map[bl_index2] < 0) {
  414. cr->map[bl_index2]++;
  415. }
  416. }
  417. }
  418. // Enforce constant segment over superblock.
  419. // If segment is at least half of superblock, set to 1.
  420. if (sum_map >= xmis * ymis / 2) {
  421. // This superblock is a candidate for refresh:
  422. // compute spatial variance and exclude blocks that are spatially flat
  423. // and stationary. Note: this is currently only done for screne content
  424. // mode.
  425. if (compute_content && cr->skip_flat_static_blocks)
  426. flat_static_blocks =
  427. is_superblock_flat_static(cpi, sb_row_index, sb_col_index);
  428. if (!flat_static_blocks) {
  429. // Label this superblock as segment 1.
  430. for (y = 0; y < ymis; y++)
  431. for (x = 0; x < xmis; x++) {
  432. seg_map[bl_index + y * cm->mi_cols + x] = CR_SEGMENT_ID_BOOST1;
  433. }
  434. cr->target_num_seg_blocks += xmis * ymis;
  435. }
  436. }
  437. i++;
  438. if (i == sbs_in_frame) {
  439. i = 0;
  440. }
  441. } while (cr->target_num_seg_blocks < block_count && i != cr->sb_index);
  442. cr->sb_index = i;
  443. cr->reduce_refresh = 0;
  444. if (cpi->oxcf.content != VP9E_CONTENT_SCREEN)
  445. if (count_sel<(3 * count_tot)>> 2) cr->reduce_refresh = 1;
  446. }
  447. // Set cyclic refresh parameters.
  448. void vp9_cyclic_refresh_update_parameters(VP9_COMP *const cpi) {
  449. const RATE_CONTROL *const rc = &cpi->rc;
  450. const VP9_COMMON *const cm = &cpi->common;
  451. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  452. int num8x8bl = cm->MBs << 2;
  453. int target_refresh = 0;
  454. double weight_segment_target = 0;
  455. double weight_segment = 0;
  456. int thresh_low_motion = 20;
  457. int qp_thresh = VPXMIN((cpi->oxcf.content == VP9E_CONTENT_SCREEN) ? 35 : 20,
  458. rc->best_quality << 1);
  459. int qp_max_thresh = 117 * MAXQ >> 7;
  460. cr->apply_cyclic_refresh = 1;
  461. if (frame_is_intra_only(cm) || cpi->svc.temporal_layer_id > 0 ||
  462. is_lossless_requested(&cpi->oxcf) ||
  463. rc->avg_frame_qindex[INTER_FRAME] < qp_thresh ||
  464. (cpi->use_svc &&
  465. cpi->svc.layer_context[cpi->svc.temporal_layer_id].is_key_frame) ||
  466. (!cpi->use_svc && rc->avg_frame_low_motion < thresh_low_motion &&
  467. rc->frames_since_key > 40) ||
  468. (!cpi->use_svc && rc->avg_frame_qindex[INTER_FRAME] > qp_max_thresh &&
  469. rc->frames_since_key > 20)) {
  470. cr->apply_cyclic_refresh = 0;
  471. return;
  472. }
  473. cr->percent_refresh = 10;
  474. if (cr->reduce_refresh) cr->percent_refresh = 5;
  475. cr->max_qdelta_perc = 60;
  476. cr->time_for_refresh = 0;
  477. cr->motion_thresh = 32;
  478. cr->rate_boost_fac = 15;
  479. // Use larger delta-qp (increase rate_ratio_qdelta) for first few (~4)
  480. // periods of the refresh cycle, after a key frame.
  481. // Account for larger interval on base layer for temporal layers.
  482. if (cr->percent_refresh > 0 &&
  483. rc->frames_since_key <
  484. (4 * cpi->svc.number_temporal_layers) * (100 / cr->percent_refresh)) {
  485. cr->rate_ratio_qdelta = 3.0;
  486. } else {
  487. cr->rate_ratio_qdelta = 2.0;
  488. if (cpi->noise_estimate.enabled && cpi->noise_estimate.level >= kMedium) {
  489. // Reduce the delta-qp if the estimated source noise is above threshold.
  490. cr->rate_ratio_qdelta = 1.7;
  491. cr->rate_boost_fac = 13;
  492. }
  493. }
  494. // For screen-content: keep rate_ratio_qdelta to 2.0 (segment#1 boost) and
  495. // percent_refresh (refresh rate) to 10. But reduce rate boost for segment#2
  496. // (rate_boost_fac = 10 disables segment#2).
  497. if (cpi->oxcf.content == VP9E_CONTENT_SCREEN) {
  498. // Only enable feature of skipping flat_static blocks for top layer
  499. // under screen content mode.
  500. if (cpi->svc.spatial_layer_id == cpi->svc.number_spatial_layers - 1)
  501. cr->skip_flat_static_blocks = 1;
  502. cr->percent_refresh = (cr->skip_flat_static_blocks) ? 5 : 10;
  503. // Increase the amount of refresh on scene change that is encoded at max Q,
  504. // increase for a few cycles of the refresh period (~100 / percent_refresh).
  505. if (cr->counter_encode_maxq_scene_change < 30)
  506. cr->percent_refresh = (cr->skip_flat_static_blocks) ? 10 : 15;
  507. cr->rate_ratio_qdelta = 2.0;
  508. cr->rate_boost_fac = 10;
  509. }
  510. // Adjust some parameters for low resolutions.
  511. if (cm->width * cm->height <= 352 * 288) {
  512. if (rc->avg_frame_bandwidth < 3000) {
  513. cr->motion_thresh = 64;
  514. cr->rate_boost_fac = 13;
  515. } else {
  516. cr->max_qdelta_perc = 70;
  517. cr->rate_ratio_qdelta = VPXMAX(cr->rate_ratio_qdelta, 2.5);
  518. }
  519. }
  520. if (cpi->oxcf.rc_mode == VPX_VBR) {
  521. // To be adjusted for VBR mode, e.g., based on gf period and boost.
  522. // For now use smaller qp-delta (than CBR), no second boosted seg, and
  523. // turn-off (no refresh) on golden refresh (since it's already boosted).
  524. cr->percent_refresh = 10;
  525. cr->rate_ratio_qdelta = 1.5;
  526. cr->rate_boost_fac = 10;
  527. if (cpi->refresh_golden_frame == 1) {
  528. cr->percent_refresh = 0;
  529. cr->rate_ratio_qdelta = 1.0;
  530. }
  531. }
  532. // Weight for segment prior to encoding: take the average of the target
  533. // number for the frame to be encoded and the actual from the previous frame.
  534. // Use the target if its less. To be used for setting the base qp for the
  535. // frame in vp9_rc_regulate_q.
  536. target_refresh = cr->percent_refresh * cm->mi_rows * cm->mi_cols / 100;
  537. weight_segment_target = (double)(target_refresh) / num8x8bl;
  538. weight_segment = (double)((target_refresh + cr->actual_num_seg1_blocks +
  539. cr->actual_num_seg2_blocks) >>
  540. 1) /
  541. num8x8bl;
  542. if (weight_segment_target < 7 * weight_segment / 8)
  543. weight_segment = weight_segment_target;
  544. // For screen-content: don't include target for the weight segment,
  545. // since for all flat areas the segment is reset, so its more accurate
  546. // to just use the previous actual number of seg blocks for the weight.
  547. if (cpi->oxcf.content == VP9E_CONTENT_SCREEN)
  548. weight_segment =
  549. (double)(cr->actual_num_seg1_blocks + cr->actual_num_seg2_blocks) /
  550. num8x8bl;
  551. cr->weight_segment = weight_segment;
  552. }
  553. // Setup cyclic background refresh: set delta q and segmentation map.
  554. void vp9_cyclic_refresh_setup(VP9_COMP *const cpi) {
  555. VP9_COMMON *const cm = &cpi->common;
  556. const RATE_CONTROL *const rc = &cpi->rc;
  557. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  558. struct segmentation *const seg = &cm->seg;
  559. int scene_change_detected =
  560. cpi->rc.high_source_sad ||
  561. (cpi->use_svc && cpi->svc.high_source_sad_superframe);
  562. if (cm->current_video_frame == 0) cr->low_content_avg = 0.0;
  563. // Reset if resoluton change has occurred.
  564. if (cpi->resize_pending != 0) vp9_cyclic_refresh_reset_resize(cpi);
  565. if (!cr->apply_cyclic_refresh || (cpi->force_update_segmentation) ||
  566. scene_change_detected) {
  567. // Set segmentation map to 0 and disable.
  568. unsigned char *const seg_map = cpi->segmentation_map;
  569. memset(seg_map, 0, cm->mi_rows * cm->mi_cols);
  570. vp9_disable_segmentation(&cm->seg);
  571. if (cm->frame_type == KEY_FRAME || scene_change_detected) {
  572. memset(cr->last_coded_q_map, MAXQ,
  573. cm->mi_rows * cm->mi_cols * sizeof(*cr->last_coded_q_map));
  574. cr->sb_index = 0;
  575. cr->reduce_refresh = 0;
  576. cr->counter_encode_maxq_scene_change = 0;
  577. }
  578. return;
  579. } else {
  580. int qindex_delta = 0;
  581. int qindex2;
  582. const double q = vp9_convert_qindex_to_q(cm->base_qindex, cm->bit_depth);
  583. cr->counter_encode_maxq_scene_change++;
  584. vpx_clear_system_state();
  585. // Set rate threshold to some multiple (set to 2 for now) of the target
  586. // rate (target is given by sb64_target_rate and scaled by 256).
  587. cr->thresh_rate_sb = ((int64_t)(rc->sb64_target_rate) << 8) << 2;
  588. // Distortion threshold, quadratic in Q, scale factor to be adjusted.
  589. // q will not exceed 457, so (q * q) is within 32bit; see:
  590. // vp9_convert_qindex_to_q(), vp9_ac_quant(), ac_qlookup*[].
  591. cr->thresh_dist_sb = ((int64_t)(q * q)) << 2;
  592. // Set up segmentation.
  593. // Clear down the segment map.
  594. vp9_enable_segmentation(&cm->seg);
  595. vp9_clearall_segfeatures(seg);
  596. // Select delta coding method.
  597. seg->abs_delta = SEGMENT_DELTADATA;
  598. // Note: setting temporal_update has no effect, as the seg-map coding method
  599. // (temporal or spatial) is determined in vp9_choose_segmap_coding_method(),
  600. // based on the coding cost of each method. For error_resilient mode on the
  601. // last_frame_seg_map is set to 0, so if temporal coding is used, it is
  602. // relative to 0 previous map.
  603. // seg->temporal_update = 0;
  604. // Segment BASE "Q" feature is disabled so it defaults to the baseline Q.
  605. vp9_disable_segfeature(seg, CR_SEGMENT_ID_BASE, SEG_LVL_ALT_Q);
  606. // Use segment BOOST1 for in-frame Q adjustment.
  607. vp9_enable_segfeature(seg, CR_SEGMENT_ID_BOOST1, SEG_LVL_ALT_Q);
  608. // Use segment BOOST2 for more aggressive in-frame Q adjustment.
  609. vp9_enable_segfeature(seg, CR_SEGMENT_ID_BOOST2, SEG_LVL_ALT_Q);
  610. // Set the q delta for segment BOOST1.
  611. qindex_delta = compute_deltaq(cpi, cm->base_qindex, cr->rate_ratio_qdelta);
  612. cr->qindex_delta[1] = qindex_delta;
  613. // Compute rd-mult for segment BOOST1.
  614. qindex2 = clamp(cm->base_qindex + cm->y_dc_delta_q + qindex_delta, 0, MAXQ);
  615. cr->rdmult = vp9_compute_rd_mult(cpi, qindex2);
  616. vp9_set_segdata(seg, CR_SEGMENT_ID_BOOST1, SEG_LVL_ALT_Q, qindex_delta);
  617. // Set a more aggressive (higher) q delta for segment BOOST2.
  618. qindex_delta = compute_deltaq(
  619. cpi, cm->base_qindex,
  620. VPXMIN(CR_MAX_RATE_TARGET_RATIO,
  621. 0.1 * cr->rate_boost_fac * cr->rate_ratio_qdelta));
  622. cr->qindex_delta[2] = qindex_delta;
  623. vp9_set_segdata(seg, CR_SEGMENT_ID_BOOST2, SEG_LVL_ALT_Q, qindex_delta);
  624. // Update the segmentation and refresh map.
  625. cyclic_refresh_update_map(cpi);
  626. }
  627. }
  628. int vp9_cyclic_refresh_get_rdmult(const CYCLIC_REFRESH *cr) {
  629. return cr->rdmult;
  630. }
  631. void vp9_cyclic_refresh_reset_resize(VP9_COMP *const cpi) {
  632. const VP9_COMMON *const cm = &cpi->common;
  633. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  634. memset(cr->map, 0, cm->mi_rows * cm->mi_cols);
  635. memset(cr->last_coded_q_map, MAXQ,
  636. cm->mi_rows * cm->mi_cols * sizeof(*cr->last_coded_q_map));
  637. cr->sb_index = 0;
  638. cpi->refresh_golden_frame = 1;
  639. cpi->refresh_alt_ref_frame = 1;
  640. cr->counter_encode_maxq_scene_change = 0;
  641. }
  642. void vp9_cyclic_refresh_limit_q(const VP9_COMP *cpi, int *q) {
  643. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  644. // For now apply hard limit to frame-level decrease in q, if the cyclic
  645. // refresh is active (percent_refresh > 0).
  646. if (cr->percent_refresh > 0 && cpi->rc.q_1_frame - *q > 8) {
  647. *q = cpi->rc.q_1_frame - 8;
  648. }
  649. }