vp9_aq_cyclicrefresh.c 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589
  1. /*
  2. * Copyright (c) 2014 The WebM project authors. All Rights Reserved.
  3. *
  4. * Use of this source code is governed by a BSD-style license
  5. * that can be found in the LICENSE file in the root of the source
  6. * tree. An additional intellectual property rights grant can be found
  7. * in the file PATENTS. All contributing project authors may
  8. * be found in the AUTHORS file in the root of the source tree.
  9. */
  10. #include <limits.h>
  11. #include <math.h>
  12. #include "vpx_dsp/vpx_dsp_common.h"
  13. #include "vpx_ports/system_state.h"
  14. #include "vp9/encoder/vp9_aq_cyclicrefresh.h"
  15. #include "vp9/common/vp9_seg_common.h"
  16. #include "vp9/encoder/vp9_ratectrl.h"
  17. #include "vp9/encoder/vp9_segmentation.h"
  18. CYCLIC_REFRESH *vp9_cyclic_refresh_alloc(int mi_rows, int mi_cols) {
  19. size_t last_coded_q_map_size;
  20. CYCLIC_REFRESH *const cr = vpx_calloc(1, sizeof(*cr));
  21. if (cr == NULL) return NULL;
  22. cr->map = vpx_calloc(mi_rows * mi_cols, sizeof(*cr->map));
  23. if (cr->map == NULL) {
  24. vp9_cyclic_refresh_free(cr);
  25. return NULL;
  26. }
  27. last_coded_q_map_size = mi_rows * mi_cols * sizeof(*cr->last_coded_q_map);
  28. cr->last_coded_q_map = vpx_malloc(last_coded_q_map_size);
  29. if (cr->last_coded_q_map == NULL) {
  30. vp9_cyclic_refresh_free(cr);
  31. return NULL;
  32. }
  33. assert(MAXQ <= 255);
  34. memset(cr->last_coded_q_map, MAXQ, last_coded_q_map_size);
  35. return cr;
  36. }
  37. void vp9_cyclic_refresh_free(CYCLIC_REFRESH *cr) {
  38. vpx_free(cr->map);
  39. vpx_free(cr->last_coded_q_map);
  40. vpx_free(cr);
  41. }
  42. // Check if this coding block, of size bsize, should be considered for refresh
  43. // (lower-qp coding). Decision can be based on various factors, such as
  44. // size of the coding block (i.e., below min_block size rejected), coding
  45. // mode, and rate/distortion.
  46. static int candidate_refresh_aq(const CYCLIC_REFRESH *cr, const MODE_INFO *mi,
  47. int64_t rate, int64_t dist, int bsize) {
  48. MV mv = mi->mv[0].as_mv;
  49. // Reject the block for lower-qp coding if projected distortion
  50. // is above the threshold, and any of the following is true:
  51. // 1) mode uses large mv
  52. // 2) mode is an intra-mode
  53. // Otherwise accept for refresh.
  54. if (dist > cr->thresh_dist_sb &&
  55. (mv.row > cr->motion_thresh || mv.row < -cr->motion_thresh ||
  56. mv.col > cr->motion_thresh || mv.col < -cr->motion_thresh ||
  57. !is_inter_block(mi)))
  58. return CR_SEGMENT_ID_BASE;
  59. else if (bsize >= BLOCK_16X16 && rate < cr->thresh_rate_sb &&
  60. is_inter_block(mi) && mi->mv[0].as_int == 0 &&
  61. cr->rate_boost_fac > 10)
  62. // More aggressive delta-q for bigger blocks with zero motion.
  63. return CR_SEGMENT_ID_BOOST2;
  64. else
  65. return CR_SEGMENT_ID_BOOST1;
  66. }
  67. // Compute delta-q for the segment.
  68. static int compute_deltaq(const VP9_COMP *cpi, int q, double rate_factor) {
  69. const CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  70. const RATE_CONTROL *const rc = &cpi->rc;
  71. int deltaq = vp9_compute_qdelta_by_rate(rc, cpi->common.frame_type, q,
  72. rate_factor, cpi->common.bit_depth);
  73. if ((-deltaq) > cr->max_qdelta_perc * q / 100) {
  74. deltaq = -cr->max_qdelta_perc * q / 100;
  75. }
  76. return deltaq;
  77. }
  78. // For the just encoded frame, estimate the bits, incorporating the delta-q
  79. // from non-base segment. For now ignore effect of multiple segments
  80. // (with different delta-q). Note this function is called in the postencode
  81. // (called from rc_update_rate_correction_factors()).
  82. int vp9_cyclic_refresh_estimate_bits_at_q(const VP9_COMP *cpi,
  83. double correction_factor) {
  84. const VP9_COMMON *const cm = &cpi->common;
  85. const CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  86. int estimated_bits;
  87. int mbs = cm->MBs;
  88. int num8x8bl = mbs << 2;
  89. // Weight for non-base segments: use actual number of blocks refreshed in
  90. // previous/just encoded frame. Note number of blocks here is in 8x8 units.
  91. double weight_segment1 = (double)cr->actual_num_seg1_blocks / num8x8bl;
  92. double weight_segment2 = (double)cr->actual_num_seg2_blocks / num8x8bl;
  93. // Take segment weighted average for estimated bits.
  94. estimated_bits =
  95. (int)((1.0 - weight_segment1 - weight_segment2) *
  96. vp9_estimate_bits_at_q(cm->frame_type, cm->base_qindex, mbs,
  97. correction_factor, cm->bit_depth) +
  98. weight_segment1 *
  99. vp9_estimate_bits_at_q(cm->frame_type,
  100. cm->base_qindex + cr->qindex_delta[1],
  101. mbs, correction_factor, cm->bit_depth) +
  102. weight_segment2 *
  103. vp9_estimate_bits_at_q(cm->frame_type,
  104. cm->base_qindex + cr->qindex_delta[2],
  105. mbs, correction_factor, cm->bit_depth));
  106. return estimated_bits;
  107. }
  108. // Prior to encoding the frame, estimate the bits per mb, for a given q = i and
  109. // a corresponding delta-q (for segment 1). This function is called in the
  110. // rc_regulate_q() to set the base qp index.
  111. // Note: the segment map is set to either 0/CR_SEGMENT_ID_BASE (no refresh) or
  112. // to 1/CR_SEGMENT_ID_BOOST1 (refresh) for each superblock, prior to encoding.
  113. int vp9_cyclic_refresh_rc_bits_per_mb(const VP9_COMP *cpi, int i,
  114. double correction_factor) {
  115. const VP9_COMMON *const cm = &cpi->common;
  116. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  117. int bits_per_mb;
  118. int num8x8bl = cm->MBs << 2;
  119. // Weight for segment prior to encoding: take the average of the target
  120. // number for the frame to be encoded and the actual from the previous frame.
  121. int target_refresh = cr->percent_refresh * cm->mi_rows * cm->mi_cols / 100;
  122. double weight_segment =
  123. (double)((target_refresh + cr->actual_num_seg1_blocks +
  124. cr->actual_num_seg2_blocks) >>
  125. 1) /
  126. num8x8bl;
  127. // Compute delta-q corresponding to qindex i.
  128. int deltaq = compute_deltaq(cpi, i, cr->rate_ratio_qdelta);
  129. // Take segment weighted average for bits per mb.
  130. bits_per_mb = (int)((1.0 - weight_segment) *
  131. vp9_rc_bits_per_mb(cm->frame_type, i,
  132. correction_factor, cm->bit_depth) +
  133. weight_segment *
  134. vp9_rc_bits_per_mb(cm->frame_type, i + deltaq,
  135. correction_factor, cm->bit_depth));
  136. return bits_per_mb;
  137. }
  138. // Prior to coding a given prediction block, of size bsize at (mi_row, mi_col),
  139. // check if we should reset the segment_id, and update the cyclic_refresh map
  140. // and segmentation map.
  141. void vp9_cyclic_refresh_update_segment(VP9_COMP *const cpi, MODE_INFO *const mi,
  142. int mi_row, int mi_col, BLOCK_SIZE bsize,
  143. int64_t rate, int64_t dist, int skip,
  144. struct macroblock_plane *const p) {
  145. const VP9_COMMON *const cm = &cpi->common;
  146. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  147. const int bw = num_8x8_blocks_wide_lookup[bsize];
  148. const int bh = num_8x8_blocks_high_lookup[bsize];
  149. const int xmis = VPXMIN(cm->mi_cols - mi_col, bw);
  150. const int ymis = VPXMIN(cm->mi_rows - mi_row, bh);
  151. const int block_index = mi_row * cm->mi_cols + mi_col;
  152. int refresh_this_block = candidate_refresh_aq(cr, mi, rate, dist, bsize);
  153. // Default is to not update the refresh map.
  154. int new_map_value = cr->map[block_index];
  155. int x = 0;
  156. int y = 0;
  157. int is_skin = 0;
  158. if (refresh_this_block == 0 && bsize <= BLOCK_16X16 &&
  159. cpi->use_skin_detection) {
  160. is_skin =
  161. vp9_compute_skin_block(p[0].src.buf, p[1].src.buf, p[2].src.buf,
  162. p[0].src.stride, p[1].src.stride, bsize, 0, 0);
  163. if (is_skin) refresh_this_block = 1;
  164. }
  165. if (cpi->oxcf.rc_mode == VPX_VBR && mi->ref_frame[0] == GOLDEN_FRAME)
  166. refresh_this_block = 0;
  167. // If this block is labeled for refresh, check if we should reset the
  168. // segment_id.
  169. if (cyclic_refresh_segment_id_boosted(mi->segment_id)) {
  170. mi->segment_id = refresh_this_block;
  171. // Reset segment_id if it will be skipped.
  172. if (skip) mi->segment_id = CR_SEGMENT_ID_BASE;
  173. }
  174. // Update the cyclic refresh map, to be used for setting segmentation map
  175. // for the next frame. If the block will be refreshed this frame, mark it
  176. // as clean. The magnitude of the -ve influences how long before we consider
  177. // it for refresh again.
  178. if (cyclic_refresh_segment_id_boosted(mi->segment_id)) {
  179. new_map_value = -cr->time_for_refresh;
  180. } else if (refresh_this_block) {
  181. // Else if it is accepted as candidate for refresh, and has not already
  182. // been refreshed (marked as 1) then mark it as a candidate for cleanup
  183. // for future time (marked as 0), otherwise don't update it.
  184. if (cr->map[block_index] == 1) new_map_value = 0;
  185. } else {
  186. // Leave it marked as block that is not candidate for refresh.
  187. new_map_value = 1;
  188. }
  189. // Update entries in the cyclic refresh map with new_map_value, and
  190. // copy mbmi->segment_id into global segmentation map.
  191. for (y = 0; y < ymis; y++)
  192. for (x = 0; x < xmis; x++) {
  193. int map_offset = block_index + y * cm->mi_cols + x;
  194. cr->map[map_offset] = new_map_value;
  195. cpi->segmentation_map[map_offset] = mi->segment_id;
  196. }
  197. }
  198. void vp9_cyclic_refresh_update_sb_postencode(VP9_COMP *const cpi,
  199. const MODE_INFO *const mi,
  200. int mi_row, int mi_col,
  201. BLOCK_SIZE bsize) {
  202. const VP9_COMMON *const cm = &cpi->common;
  203. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  204. const int bw = num_8x8_blocks_wide_lookup[bsize];
  205. const int bh = num_8x8_blocks_high_lookup[bsize];
  206. const int xmis = VPXMIN(cm->mi_cols - mi_col, bw);
  207. const int ymis = VPXMIN(cm->mi_rows - mi_row, bh);
  208. const int block_index = mi_row * cm->mi_cols + mi_col;
  209. int x, y;
  210. for (y = 0; y < ymis; y++)
  211. for (x = 0; x < xmis; x++) {
  212. int map_offset = block_index + y * cm->mi_cols + x;
  213. // Inter skip blocks were clearly not coded at the current qindex, so
  214. // don't update the map for them. For cases where motion is non-zero or
  215. // the reference frame isn't the previous frame, the previous value in
  216. // the map for this spatial location is not entirely correct.
  217. if ((!is_inter_block(mi) || !mi->skip) &&
  218. mi->segment_id <= CR_SEGMENT_ID_BOOST2) {
  219. cr->last_coded_q_map[map_offset] =
  220. clamp(cm->base_qindex + cr->qindex_delta[mi->segment_id], 0, MAXQ);
  221. } else if (is_inter_block(mi) && mi->skip &&
  222. mi->segment_id <= CR_SEGMENT_ID_BOOST2) {
  223. cr->last_coded_q_map[map_offset] = VPXMIN(
  224. clamp(cm->base_qindex + cr->qindex_delta[mi->segment_id], 0, MAXQ),
  225. cr->last_coded_q_map[map_offset]);
  226. }
  227. }
  228. }
  229. // Update the actual number of blocks that were applied the segment delta q.
  230. void vp9_cyclic_refresh_postencode(VP9_COMP *const cpi) {
  231. VP9_COMMON *const cm = &cpi->common;
  232. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  233. unsigned char *const seg_map = cpi->segmentation_map;
  234. int mi_row, mi_col;
  235. cr->actual_num_seg1_blocks = 0;
  236. cr->actual_num_seg2_blocks = 0;
  237. for (mi_row = 0; mi_row < cm->mi_rows; mi_row++)
  238. for (mi_col = 0; mi_col < cm->mi_cols; mi_col++) {
  239. if (cyclic_refresh_segment_id(seg_map[mi_row * cm->mi_cols + mi_col]) ==
  240. CR_SEGMENT_ID_BOOST1)
  241. cr->actual_num_seg1_blocks++;
  242. else if (cyclic_refresh_segment_id(
  243. seg_map[mi_row * cm->mi_cols + mi_col]) ==
  244. CR_SEGMENT_ID_BOOST2)
  245. cr->actual_num_seg2_blocks++;
  246. }
  247. }
  248. // Set golden frame update interval, for non-svc 1 pass CBR mode.
  249. void vp9_cyclic_refresh_set_golden_update(VP9_COMP *const cpi) {
  250. RATE_CONTROL *const rc = &cpi->rc;
  251. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  252. // Set minimum gf_interval for GF update to a multiple of the refresh period,
  253. // with some max limit. Depending on past encoding stats, GF flag may be
  254. // reset and update may not occur until next baseline_gf_interval.
  255. if (cr->percent_refresh > 0)
  256. rc->baseline_gf_interval = VPXMIN(4 * (100 / cr->percent_refresh), 40);
  257. else
  258. rc->baseline_gf_interval = 40;
  259. if (cpi->oxcf.rc_mode == VPX_VBR) rc->baseline_gf_interval = 20;
  260. }
  261. // Update some encoding stats (from the just encoded frame). If this frame's
  262. // background has high motion, refresh the golden frame. Otherwise, if the
  263. // golden reference is to be updated check if we should NOT update the golden
  264. // ref.
  265. void vp9_cyclic_refresh_check_golden_update(VP9_COMP *const cpi) {
  266. VP9_COMMON *const cm = &cpi->common;
  267. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  268. int mi_row, mi_col;
  269. double fraction_low = 0.0;
  270. int low_content_frame = 0;
  271. MODE_INFO **mi = cm->mi_grid_visible;
  272. RATE_CONTROL *const rc = &cpi->rc;
  273. const int rows = cm->mi_rows, cols = cm->mi_cols;
  274. int cnt1 = 0, cnt2 = 0;
  275. int force_gf_refresh = 0;
  276. int flag_force_gf_high_motion = 0;
  277. for (mi_row = 0; mi_row < rows; mi_row++) {
  278. for (mi_col = 0; mi_col < cols; mi_col++) {
  279. if (flag_force_gf_high_motion == 1) {
  280. int16_t abs_mvr = mi[0]->mv[0].as_mv.row >= 0
  281. ? mi[0]->mv[0].as_mv.row
  282. : -1 * mi[0]->mv[0].as_mv.row;
  283. int16_t abs_mvc = mi[0]->mv[0].as_mv.col >= 0
  284. ? mi[0]->mv[0].as_mv.col
  285. : -1 * mi[0]->mv[0].as_mv.col;
  286. // Calculate the motion of the background.
  287. if (abs_mvr <= 16 && abs_mvc <= 16) {
  288. cnt1++;
  289. if (abs_mvr == 0 && abs_mvc == 0) cnt2++;
  290. }
  291. }
  292. mi++;
  293. // Accumulate low_content_frame.
  294. if (cr->map[mi_row * cols + mi_col] < 1) low_content_frame++;
  295. }
  296. mi += 8;
  297. }
  298. // For video conference clips, if the background has high motion in current
  299. // frame because of the camera movement, set this frame as the golden frame.
  300. // Use 70% and 5% as the thresholds for golden frame refreshing.
  301. // Also, force this frame as a golden update frame if this frame will change
  302. // the resolution (resize_pending != 0).
  303. if (cpi->resize_pending != 0 ||
  304. (cnt1 * 100 > (70 * rows * cols) && cnt2 * 20 < cnt1)) {
  305. vp9_cyclic_refresh_set_golden_update(cpi);
  306. rc->frames_till_gf_update_due = rc->baseline_gf_interval;
  307. if (rc->frames_till_gf_update_due > rc->frames_to_key)
  308. rc->frames_till_gf_update_due = rc->frames_to_key;
  309. cpi->refresh_golden_frame = 1;
  310. force_gf_refresh = 1;
  311. }
  312. fraction_low = (double)low_content_frame / (rows * cols);
  313. // Update average.
  314. cr->low_content_avg = (fraction_low + 3 * cr->low_content_avg) / 4;
  315. if (!force_gf_refresh && cpi->refresh_golden_frame == 1) {
  316. // Don't update golden reference if the amount of low_content for the
  317. // current encoded frame is small, or if the recursive average of the
  318. // low_content over the update interval window falls below threshold.
  319. if (fraction_low < 0.8 || cr->low_content_avg < 0.7)
  320. cpi->refresh_golden_frame = 0;
  321. // Reset for next internal.
  322. cr->low_content_avg = fraction_low;
  323. }
  324. }
  325. // Update the segmentation map, and related quantities: cyclic refresh map,
  326. // refresh sb_index, and target number of blocks to be refreshed.
  327. // The map is set to either 0/CR_SEGMENT_ID_BASE (no refresh) or to
  328. // 1/CR_SEGMENT_ID_BOOST1 (refresh) for each superblock.
  329. // Blocks labeled as BOOST1 may later get set to BOOST2 (during the
  330. // encoding of the superblock).
  331. static void cyclic_refresh_update_map(VP9_COMP *const cpi) {
  332. VP9_COMMON *const cm = &cpi->common;
  333. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  334. unsigned char *const seg_map = cpi->segmentation_map;
  335. int i, block_count, bl_index, sb_rows, sb_cols, sbs_in_frame;
  336. int xmis, ymis, x, y;
  337. int consec_zero_mv_thresh = 0;
  338. int qindex_thresh = 0;
  339. int count_sel = 0;
  340. int count_tot = 0;
  341. memset(seg_map, CR_SEGMENT_ID_BASE, cm->mi_rows * cm->mi_cols);
  342. sb_cols = (cm->mi_cols + MI_BLOCK_SIZE - 1) / MI_BLOCK_SIZE;
  343. sb_rows = (cm->mi_rows + MI_BLOCK_SIZE - 1) / MI_BLOCK_SIZE;
  344. sbs_in_frame = sb_cols * sb_rows;
  345. // Number of target blocks to get the q delta (segment 1).
  346. block_count = cr->percent_refresh * cm->mi_rows * cm->mi_cols / 100;
  347. // Set the segmentation map: cycle through the superblocks, starting at
  348. // cr->mb_index, and stopping when either block_count blocks have been found
  349. // to be refreshed, or we have passed through whole frame.
  350. assert(cr->sb_index < sbs_in_frame);
  351. i = cr->sb_index;
  352. cr->target_num_seg_blocks = 0;
  353. if (cpi->oxcf.content != VP9E_CONTENT_SCREEN) {
  354. consec_zero_mv_thresh = 100;
  355. }
  356. qindex_thresh =
  357. cpi->oxcf.content == VP9E_CONTENT_SCREEN
  358. ? vp9_get_qindex(&cm->seg, CR_SEGMENT_ID_BOOST2, cm->base_qindex)
  359. : vp9_get_qindex(&cm->seg, CR_SEGMENT_ID_BOOST1, cm->base_qindex);
  360. // More aggressive settings for noisy content.
  361. if (cpi->noise_estimate.enabled && cpi->noise_estimate.level >= kMedium) {
  362. consec_zero_mv_thresh = 80;
  363. qindex_thresh =
  364. VPXMAX(vp9_get_qindex(&cm->seg, CR_SEGMENT_ID_BOOST1, cm->base_qindex),
  365. 7 * cm->base_qindex >> 3);
  366. }
  367. do {
  368. int sum_map = 0;
  369. // Get the mi_row/mi_col corresponding to superblock index i.
  370. int sb_row_index = (i / sb_cols);
  371. int sb_col_index = i - sb_row_index * sb_cols;
  372. int mi_row = sb_row_index * MI_BLOCK_SIZE;
  373. int mi_col = sb_col_index * MI_BLOCK_SIZE;
  374. assert(mi_row >= 0 && mi_row < cm->mi_rows);
  375. assert(mi_col >= 0 && mi_col < cm->mi_cols);
  376. bl_index = mi_row * cm->mi_cols + mi_col;
  377. // Loop through all 8x8 blocks in superblock and update map.
  378. xmis =
  379. VPXMIN(cm->mi_cols - mi_col, num_8x8_blocks_wide_lookup[BLOCK_64X64]);
  380. ymis =
  381. VPXMIN(cm->mi_rows - mi_row, num_8x8_blocks_high_lookup[BLOCK_64X64]);
  382. for (y = 0; y < ymis; y++) {
  383. for (x = 0; x < xmis; x++) {
  384. const int bl_index2 = bl_index + y * cm->mi_cols + x;
  385. // If the block is as a candidate for clean up then mark it
  386. // for possible boost/refresh (segment 1). The segment id may get
  387. // reset to 0 later if block gets coded anything other than ZEROMV.
  388. if (cr->map[bl_index2] == 0) {
  389. count_tot++;
  390. if (cr->last_coded_q_map[bl_index2] > qindex_thresh ||
  391. cpi->consec_zero_mv[bl_index2] < consec_zero_mv_thresh) {
  392. sum_map++;
  393. count_sel++;
  394. }
  395. } else if (cr->map[bl_index2] < 0) {
  396. cr->map[bl_index2]++;
  397. }
  398. }
  399. }
  400. // Enforce constant segment over superblock.
  401. // If segment is at least half of superblock, set to 1.
  402. if (sum_map >= xmis * ymis / 2) {
  403. for (y = 0; y < ymis; y++)
  404. for (x = 0; x < xmis; x++) {
  405. seg_map[bl_index + y * cm->mi_cols + x] = CR_SEGMENT_ID_BOOST1;
  406. }
  407. cr->target_num_seg_blocks += xmis * ymis;
  408. }
  409. i++;
  410. if (i == sbs_in_frame) {
  411. i = 0;
  412. }
  413. } while (cr->target_num_seg_blocks < block_count && i != cr->sb_index);
  414. cr->sb_index = i;
  415. cr->reduce_refresh = 0;
  416. if (count_sel<(3 * count_tot)>> 2) cr->reduce_refresh = 1;
  417. }
  418. // Set cyclic refresh parameters.
  419. void vp9_cyclic_refresh_update_parameters(VP9_COMP *const cpi) {
  420. const RATE_CONTROL *const rc = &cpi->rc;
  421. const VP9_COMMON *const cm = &cpi->common;
  422. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  423. cr->percent_refresh = 10;
  424. if (cr->reduce_refresh) cr->percent_refresh = 5;
  425. cr->max_qdelta_perc = 50;
  426. cr->time_for_refresh = 0;
  427. cr->motion_thresh = 32;
  428. cr->rate_boost_fac = 15;
  429. // Use larger delta-qp (increase rate_ratio_qdelta) for first few (~4)
  430. // periods of the refresh cycle, after a key frame.
  431. // Account for larger interval on base layer for temporal layers.
  432. if (cr->percent_refresh > 0 &&
  433. rc->frames_since_key <
  434. (4 * cpi->svc.number_temporal_layers) * (100 / cr->percent_refresh)) {
  435. cr->rate_ratio_qdelta = 3.0;
  436. } else {
  437. cr->rate_ratio_qdelta = 2.0;
  438. if (cpi->noise_estimate.enabled && cpi->noise_estimate.level >= kMedium) {
  439. // Reduce the delta-qp if the estimated source noise is above threshold.
  440. cr->rate_ratio_qdelta = 1.7;
  441. cr->rate_boost_fac = 13;
  442. }
  443. }
  444. // Adjust some parameters for low resolutions at low bitrates.
  445. if (cm->width <= 352 && cm->height <= 288 && rc->avg_frame_bandwidth < 3400) {
  446. cr->motion_thresh = 4;
  447. cr->rate_boost_fac = 10;
  448. }
  449. if (cpi->svc.spatial_layer_id > 0) {
  450. cr->motion_thresh = 4;
  451. cr->rate_boost_fac = 12;
  452. }
  453. if (cpi->oxcf.rc_mode == VPX_VBR) {
  454. // To be adjusted for VBR mode, e.g., based on gf period and boost.
  455. // For now use smaller qp-delta (than CBR), no second boosted seg, and
  456. // turn-off (no refresh) on golden refresh (since it's already boosted).
  457. cr->percent_refresh = 10;
  458. cr->rate_ratio_qdelta = 1.5;
  459. cr->rate_boost_fac = 10;
  460. if (cpi->refresh_golden_frame == 1) {
  461. cr->percent_refresh = 0;
  462. cr->rate_ratio_qdelta = 1.0;
  463. }
  464. }
  465. }
  466. // Setup cyclic background refresh: set delta q and segmentation map.
  467. void vp9_cyclic_refresh_setup(VP9_COMP *const cpi) {
  468. VP9_COMMON *const cm = &cpi->common;
  469. const RATE_CONTROL *const rc = &cpi->rc;
  470. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  471. struct segmentation *const seg = &cm->seg;
  472. // TODO(marpan): Look into whether we should reduce the amount/delta-qp
  473. // instead of completely shutting off at low bitrates. For now keep it on.
  474. // const int apply_cyclic_refresh = apply_cyclic_refresh_bitrate(cm, rc);
  475. const int apply_cyclic_refresh = 1;
  476. if (cm->current_video_frame == 0) cr->low_content_avg = 0.0;
  477. // Don't apply refresh on key frame or temporal enhancement layer frames.
  478. if (!apply_cyclic_refresh || (cm->frame_type == KEY_FRAME) ||
  479. (cpi->force_update_segmentation) || (cpi->svc.temporal_layer_id > 0)) {
  480. // Set segmentation map to 0 and disable.
  481. unsigned char *const seg_map = cpi->segmentation_map;
  482. memset(seg_map, 0, cm->mi_rows * cm->mi_cols);
  483. vp9_disable_segmentation(&cm->seg);
  484. if (cm->frame_type == KEY_FRAME) {
  485. memset(cr->last_coded_q_map, MAXQ,
  486. cm->mi_rows * cm->mi_cols * sizeof(*cr->last_coded_q_map));
  487. cr->sb_index = 0;
  488. }
  489. return;
  490. } else {
  491. int qindex_delta = 0;
  492. int qindex2;
  493. const double q = vp9_convert_qindex_to_q(cm->base_qindex, cm->bit_depth);
  494. vpx_clear_system_state();
  495. // Set rate threshold to some multiple (set to 2 for now) of the target
  496. // rate (target is given by sb64_target_rate and scaled by 256).
  497. cr->thresh_rate_sb = ((int64_t)(rc->sb64_target_rate) << 8) << 2;
  498. // Distortion threshold, quadratic in Q, scale factor to be adjusted.
  499. // q will not exceed 457, so (q * q) is within 32bit; see:
  500. // vp9_convert_qindex_to_q(), vp9_ac_quant(), ac_qlookup*[].
  501. cr->thresh_dist_sb = ((int64_t)(q * q)) << 2;
  502. // Set up segmentation.
  503. // Clear down the segment map.
  504. vp9_enable_segmentation(&cm->seg);
  505. vp9_clearall_segfeatures(seg);
  506. // Select delta coding method.
  507. seg->abs_delta = SEGMENT_DELTADATA;
  508. // Note: setting temporal_update has no effect, as the seg-map coding method
  509. // (temporal or spatial) is determined in vp9_choose_segmap_coding_method(),
  510. // based on the coding cost of each method. For error_resilient mode on the
  511. // last_frame_seg_map is set to 0, so if temporal coding is used, it is
  512. // relative to 0 previous map.
  513. // seg->temporal_update = 0;
  514. // Segment BASE "Q" feature is disabled so it defaults to the baseline Q.
  515. vp9_disable_segfeature(seg, CR_SEGMENT_ID_BASE, SEG_LVL_ALT_Q);
  516. // Use segment BOOST1 for in-frame Q adjustment.
  517. vp9_enable_segfeature(seg, CR_SEGMENT_ID_BOOST1, SEG_LVL_ALT_Q);
  518. // Use segment BOOST2 for more aggressive in-frame Q adjustment.
  519. vp9_enable_segfeature(seg, CR_SEGMENT_ID_BOOST2, SEG_LVL_ALT_Q);
  520. // Set the q delta for segment BOOST1.
  521. qindex_delta = compute_deltaq(cpi, cm->base_qindex, cr->rate_ratio_qdelta);
  522. cr->qindex_delta[1] = qindex_delta;
  523. // Compute rd-mult for segment BOOST1.
  524. qindex2 = clamp(cm->base_qindex + cm->y_dc_delta_q + qindex_delta, 0, MAXQ);
  525. cr->rdmult = vp9_compute_rd_mult(cpi, qindex2);
  526. vp9_set_segdata(seg, CR_SEGMENT_ID_BOOST1, SEG_LVL_ALT_Q, qindex_delta);
  527. // Set a more aggressive (higher) q delta for segment BOOST2.
  528. qindex_delta = compute_deltaq(
  529. cpi, cm->base_qindex,
  530. VPXMIN(CR_MAX_RATE_TARGET_RATIO,
  531. 0.1 * cr->rate_boost_fac * cr->rate_ratio_qdelta));
  532. cr->qindex_delta[2] = qindex_delta;
  533. vp9_set_segdata(seg, CR_SEGMENT_ID_BOOST2, SEG_LVL_ALT_Q, qindex_delta);
  534. // Reset if resoluton change has occurred.
  535. if (cpi->resize_pending != 0) vp9_cyclic_refresh_reset_resize(cpi);
  536. // Update the segmentation and refresh map.
  537. cyclic_refresh_update_map(cpi);
  538. }
  539. }
  540. int vp9_cyclic_refresh_get_rdmult(const CYCLIC_REFRESH *cr) {
  541. return cr->rdmult;
  542. }
  543. void vp9_cyclic_refresh_reset_resize(VP9_COMP *const cpi) {
  544. const VP9_COMMON *const cm = &cpi->common;
  545. CYCLIC_REFRESH *const cr = cpi->cyclic_refresh;
  546. memset(cr->map, 0, cm->mi_rows * cm->mi_cols);
  547. memset(cr->last_coded_q_map, MAXQ, cm->mi_rows * cm->mi_cols);
  548. cr->sb_index = 0;
  549. cpi->refresh_golden_frame = 1;
  550. cpi->refresh_alt_ref_frame = 1;
  551. }