pickinter.c 46 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347
  1. /*
  2. * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
  3. *
  4. * Use of this source code is governed by a BSD-style license
  5. * that can be found in the LICENSE file in the root of the source
  6. * tree. An additional intellectual property rights grant can be found
  7. * in the file PATENTS. All contributing project authors may
  8. * be found in the AUTHORS file in the root of the source tree.
  9. */
  10. #include <assert.h>
  11. #include <limits.h>
  12. #include "vpx_config.h"
  13. #include "./vpx_dsp_rtcd.h"
  14. #include "onyx_int.h"
  15. #include "modecosts.h"
  16. #include "encodeintra.h"
  17. #include "vp8/common/common.h"
  18. #include "vp8/common/entropymode.h"
  19. #include "pickinter.h"
  20. #include "vp8/common/findnearmv.h"
  21. #include "encodemb.h"
  22. #include "vp8/common/reconinter.h"
  23. #include "vp8/common/reconintra.h"
  24. #include "vp8/common/reconintra4x4.h"
  25. #include "vpx_dsp/variance.h"
  26. #include "mcomp.h"
  27. #include "vp8/common/vp8_skin_detection.h"
  28. #include "rdopt.h"
  29. #include "vpx_dsp/vpx_dsp_common.h"
  30. #include "vpx_mem/vpx_mem.h"
  31. #if CONFIG_TEMPORAL_DENOISING
  32. #include "denoising.h"
  33. #endif
  34. #ifdef SPEEDSTATS
  35. extern unsigned int cnt_pm;
  36. #endif
  37. extern const int vp8_ref_frame_order[MAX_MODES];
  38. extern const MB_PREDICTION_MODE vp8_mode_order[MAX_MODES];
  39. static int macroblock_corner_grad(unsigned char *signal, int stride,
  40. int offsetx, int offsety, int sgnx,
  41. int sgny) {
  42. int y1 = signal[offsetx * stride + offsety];
  43. int y2 = signal[offsetx * stride + offsety + sgny];
  44. int y3 = signal[(offsetx + sgnx) * stride + offsety];
  45. int y4 = signal[(offsetx + sgnx) * stride + offsety + sgny];
  46. return VPXMAX(VPXMAX(abs(y1 - y2), abs(y1 - y3)), abs(y1 - y4));
  47. }
  48. static int check_dot_artifact_candidate(VP8_COMP *cpi, MACROBLOCK *x,
  49. unsigned char *target_last, int stride,
  50. unsigned char *last_ref, int mb_row,
  51. int mb_col, int channel) {
  52. int threshold1 = 6;
  53. int threshold2 = 3;
  54. unsigned int max_num = (cpi->common.MBs) / 10;
  55. int grad_last = 0;
  56. int grad_source = 0;
  57. int index = mb_row * cpi->common.mb_cols + mb_col;
  58. // Threshold for #consecutive (base layer) frames using zero_last mode.
  59. int num_frames = 30;
  60. int shift = 15;
  61. if (channel > 0) {
  62. shift = 7;
  63. }
  64. if (cpi->oxcf.number_of_layers > 1) {
  65. num_frames = 20;
  66. }
  67. x->zero_last_dot_suppress = 0;
  68. // Blocks on base layer frames that have been using ZEROMV_LAST repeatedly
  69. // (i.e, at least |x| consecutive frames are candidates for increasing the
  70. // rd adjustment for zero_last mode.
  71. // Only allow this for at most |max_num| blocks per frame.
  72. // Don't allow this for screen content input.
  73. if (cpi->current_layer == 0 &&
  74. cpi->consec_zero_last_mvbias[index] > num_frames &&
  75. x->mbs_zero_last_dot_suppress < max_num &&
  76. !cpi->oxcf.screen_content_mode) {
  77. // If this block is checked here, label it so we don't check it again until
  78. // ~|x| framaes later.
  79. x->zero_last_dot_suppress = 1;
  80. // Dot artifact is noticeable as strong gradient at corners of macroblock,
  81. // for flat areas. As a simple detector for now, we look for a high
  82. // corner gradient on last ref, and a smaller gradient on source.
  83. // Check 4 corners, return if any satisfy condition.
  84. // Top-left:
  85. grad_last = macroblock_corner_grad(last_ref, stride, 0, 0, 1, 1);
  86. grad_source = macroblock_corner_grad(target_last, stride, 0, 0, 1, 1);
  87. if (grad_last >= threshold1 && grad_source <= threshold2) {
  88. x->mbs_zero_last_dot_suppress++;
  89. return 1;
  90. }
  91. // Top-right:
  92. grad_last = macroblock_corner_grad(last_ref, stride, 0, shift, 1, -1);
  93. grad_source = macroblock_corner_grad(target_last, stride, 0, shift, 1, -1);
  94. if (grad_last >= threshold1 && grad_source <= threshold2) {
  95. x->mbs_zero_last_dot_suppress++;
  96. return 1;
  97. }
  98. // Bottom-left:
  99. grad_last = macroblock_corner_grad(last_ref, stride, shift, 0, -1, 1);
  100. grad_source = macroblock_corner_grad(target_last, stride, shift, 0, -1, 1);
  101. if (grad_last >= threshold1 && grad_source <= threshold2) {
  102. x->mbs_zero_last_dot_suppress++;
  103. return 1;
  104. }
  105. // Bottom-right:
  106. grad_last = macroblock_corner_grad(last_ref, stride, shift, shift, -1, -1);
  107. grad_source =
  108. macroblock_corner_grad(target_last, stride, shift, shift, -1, -1);
  109. if (grad_last >= threshold1 && grad_source <= threshold2) {
  110. x->mbs_zero_last_dot_suppress++;
  111. return 1;
  112. }
  113. return 0;
  114. }
  115. return 0;
  116. }
  117. int vp8_skip_fractional_mv_step(MACROBLOCK *mb, BLOCK *b, BLOCKD *d,
  118. int_mv *bestmv, int_mv *ref_mv,
  119. int error_per_bit,
  120. const vp8_variance_fn_ptr_t *vfp,
  121. int *mvcost[2], int *distortion,
  122. unsigned int *sse) {
  123. (void)b;
  124. (void)d;
  125. (void)ref_mv;
  126. (void)error_per_bit;
  127. (void)vfp;
  128. (void)mb;
  129. (void)mvcost;
  130. (void)distortion;
  131. (void)sse;
  132. bestmv->as_mv.row *= 8;
  133. bestmv->as_mv.col *= 8;
  134. return 0;
  135. }
  136. int vp8_get_inter_mbpred_error(MACROBLOCK *mb, const vp8_variance_fn_ptr_t *vfp,
  137. unsigned int *sse, int_mv this_mv) {
  138. BLOCK *b = &mb->block[0];
  139. BLOCKD *d = &mb->e_mbd.block[0];
  140. unsigned char *what = (*(b->base_src) + b->src);
  141. int what_stride = b->src_stride;
  142. int pre_stride = mb->e_mbd.pre.y_stride;
  143. unsigned char *in_what = mb->e_mbd.pre.y_buffer + d->offset;
  144. int in_what_stride = pre_stride;
  145. int xoffset = this_mv.as_mv.col & 7;
  146. int yoffset = this_mv.as_mv.row & 7;
  147. in_what += (this_mv.as_mv.row >> 3) * pre_stride + (this_mv.as_mv.col >> 3);
  148. if (xoffset | yoffset) {
  149. return vfp->svf(in_what, in_what_stride, xoffset, yoffset, what,
  150. what_stride, sse);
  151. } else {
  152. return vfp->vf(what, what_stride, in_what, in_what_stride, sse);
  153. }
  154. }
  155. static int get_prediction_error(BLOCK *be, BLOCKD *b) {
  156. unsigned char *sptr;
  157. unsigned char *dptr;
  158. sptr = (*(be->base_src) + be->src);
  159. dptr = b->predictor;
  160. return vpx_get4x4sse_cs(sptr, be->src_stride, dptr, 16);
  161. }
  162. static int pick_intra4x4block(MACROBLOCK *x, int ib,
  163. B_PREDICTION_MODE *best_mode,
  164. const int *mode_costs, int *bestrate,
  165. int *bestdistortion) {
  166. BLOCKD *b = &x->e_mbd.block[ib];
  167. BLOCK *be = &x->block[ib];
  168. int dst_stride = x->e_mbd.dst.y_stride;
  169. unsigned char *dst = x->e_mbd.dst.y_buffer + b->offset;
  170. B_PREDICTION_MODE mode;
  171. int best_rd = INT_MAX;
  172. int rate;
  173. int distortion;
  174. unsigned char *Above = dst - dst_stride;
  175. unsigned char *yleft = dst - 1;
  176. unsigned char top_left = Above[-1];
  177. for (mode = B_DC_PRED; mode <= B_HE_PRED; ++mode) {
  178. int this_rd;
  179. rate = mode_costs[mode];
  180. vp8_intra4x4_predict(Above, yleft, dst_stride, mode, b->predictor, 16,
  181. top_left);
  182. distortion = get_prediction_error(be, b);
  183. this_rd = RDCOST(x->rdmult, x->rddiv, rate, distortion);
  184. if (this_rd < best_rd) {
  185. *bestrate = rate;
  186. *bestdistortion = distortion;
  187. best_rd = this_rd;
  188. *best_mode = mode;
  189. }
  190. }
  191. b->bmi.as_mode = *best_mode;
  192. vp8_encode_intra4x4block(x, ib);
  193. return best_rd;
  194. }
  195. static int pick_intra4x4mby_modes(MACROBLOCK *mb, int *Rate, int *best_dist) {
  196. MACROBLOCKD *const xd = &mb->e_mbd;
  197. int i;
  198. int cost = mb->mbmode_cost[xd->frame_type][B_PRED];
  199. int error;
  200. int distortion = 0;
  201. const int *bmode_costs;
  202. intra_prediction_down_copy(xd, xd->dst.y_buffer - xd->dst.y_stride + 16);
  203. bmode_costs = mb->inter_bmode_costs;
  204. for (i = 0; i < 16; ++i) {
  205. MODE_INFO *const mic = xd->mode_info_context;
  206. const int mis = xd->mode_info_stride;
  207. B_PREDICTION_MODE best_mode = B_MODE_COUNT;
  208. int r = 0, d = 0;
  209. if (mb->e_mbd.frame_type == KEY_FRAME) {
  210. const B_PREDICTION_MODE A = above_block_mode(mic, i, mis);
  211. const B_PREDICTION_MODE L = left_block_mode(mic, i);
  212. bmode_costs = mb->bmode_costs[A][L];
  213. }
  214. pick_intra4x4block(mb, i, &best_mode, bmode_costs, &r, &d);
  215. cost += r;
  216. distortion += d;
  217. assert(best_mode != B_MODE_COUNT);
  218. mic->bmi[i].as_mode = best_mode;
  219. /* Break out case where we have already exceeded best so far value
  220. * that was passed in
  221. */
  222. if (distortion > *best_dist) break;
  223. }
  224. *Rate = cost;
  225. if (i == 16) {
  226. *best_dist = distortion;
  227. error = RDCOST(mb->rdmult, mb->rddiv, cost, distortion);
  228. } else {
  229. *best_dist = INT_MAX;
  230. error = INT_MAX;
  231. }
  232. return error;
  233. }
  234. static void pick_intra_mbuv_mode(MACROBLOCK *mb) {
  235. MACROBLOCKD *x = &mb->e_mbd;
  236. unsigned char *uabove_row = x->dst.u_buffer - x->dst.uv_stride;
  237. unsigned char *vabove_row = x->dst.v_buffer - x->dst.uv_stride;
  238. unsigned char *usrc_ptr = (mb->block[16].src + *mb->block[16].base_src);
  239. unsigned char *vsrc_ptr = (mb->block[20].src + *mb->block[20].base_src);
  240. int uvsrc_stride = mb->block[16].src_stride;
  241. unsigned char uleft_col[8];
  242. unsigned char vleft_col[8];
  243. unsigned char utop_left = uabove_row[-1];
  244. unsigned char vtop_left = vabove_row[-1];
  245. int i, j;
  246. int expected_udc;
  247. int expected_vdc;
  248. int shift;
  249. int Uaverage = 0;
  250. int Vaverage = 0;
  251. int diff;
  252. int pred_error[4] = { 0, 0, 0, 0 }, best_error = INT_MAX;
  253. MB_PREDICTION_MODE best_mode = MB_MODE_COUNT;
  254. for (i = 0; i < 8; ++i) {
  255. uleft_col[i] = x->dst.u_buffer[i * x->dst.uv_stride - 1];
  256. vleft_col[i] = x->dst.v_buffer[i * x->dst.uv_stride - 1];
  257. }
  258. if (!x->up_available && !x->left_available) {
  259. expected_udc = 128;
  260. expected_vdc = 128;
  261. } else {
  262. shift = 2;
  263. if (x->up_available) {
  264. for (i = 0; i < 8; ++i) {
  265. Uaverage += uabove_row[i];
  266. Vaverage += vabove_row[i];
  267. }
  268. shift++;
  269. }
  270. if (x->left_available) {
  271. for (i = 0; i < 8; ++i) {
  272. Uaverage += uleft_col[i];
  273. Vaverage += vleft_col[i];
  274. }
  275. shift++;
  276. }
  277. expected_udc = (Uaverage + (1 << (shift - 1))) >> shift;
  278. expected_vdc = (Vaverage + (1 << (shift - 1))) >> shift;
  279. }
  280. for (i = 0; i < 8; ++i) {
  281. for (j = 0; j < 8; ++j) {
  282. int predu = uleft_col[i] + uabove_row[j] - utop_left;
  283. int predv = vleft_col[i] + vabove_row[j] - vtop_left;
  284. int u_p, v_p;
  285. u_p = usrc_ptr[j];
  286. v_p = vsrc_ptr[j];
  287. if (predu < 0) predu = 0;
  288. if (predu > 255) predu = 255;
  289. if (predv < 0) predv = 0;
  290. if (predv > 255) predv = 255;
  291. diff = u_p - expected_udc;
  292. pred_error[DC_PRED] += diff * diff;
  293. diff = v_p - expected_vdc;
  294. pred_error[DC_PRED] += diff * diff;
  295. diff = u_p - uabove_row[j];
  296. pred_error[V_PRED] += diff * diff;
  297. diff = v_p - vabove_row[j];
  298. pred_error[V_PRED] += diff * diff;
  299. diff = u_p - uleft_col[i];
  300. pred_error[H_PRED] += diff * diff;
  301. diff = v_p - vleft_col[i];
  302. pred_error[H_PRED] += diff * diff;
  303. diff = u_p - predu;
  304. pred_error[TM_PRED] += diff * diff;
  305. diff = v_p - predv;
  306. pred_error[TM_PRED] += diff * diff;
  307. }
  308. usrc_ptr += uvsrc_stride;
  309. vsrc_ptr += uvsrc_stride;
  310. if (i == 3) {
  311. usrc_ptr = (mb->block[18].src + *mb->block[18].base_src);
  312. vsrc_ptr = (mb->block[22].src + *mb->block[22].base_src);
  313. }
  314. }
  315. for (i = DC_PRED; i <= TM_PRED; ++i) {
  316. if (best_error > pred_error[i]) {
  317. best_error = pred_error[i];
  318. best_mode = (MB_PREDICTION_MODE)i;
  319. }
  320. }
  321. assert(best_mode != MB_MODE_COUNT);
  322. mb->e_mbd.mode_info_context->mbmi.uv_mode = best_mode;
  323. }
  324. static void update_mvcount(MACROBLOCK *x, int_mv *best_ref_mv) {
  325. MACROBLOCKD *xd = &x->e_mbd;
  326. /* Split MV modes currently not supported when RD is nopt enabled,
  327. * therefore, only need to modify MVcount in NEWMV mode. */
  328. if (xd->mode_info_context->mbmi.mode == NEWMV) {
  329. x->MVcount[0][mv_max + ((xd->mode_info_context->mbmi.mv.as_mv.row -
  330. best_ref_mv->as_mv.row) >>
  331. 1)]++;
  332. x->MVcount[1][mv_max + ((xd->mode_info_context->mbmi.mv.as_mv.col -
  333. best_ref_mv->as_mv.col) >>
  334. 1)]++;
  335. }
  336. }
  337. #if CONFIG_MULTI_RES_ENCODING
  338. static void get_lower_res_motion_info(VP8_COMP *cpi, MACROBLOCKD *xd,
  339. int *dissim, int *parent_ref_frame,
  340. MB_PREDICTION_MODE *parent_mode,
  341. int_mv *parent_ref_mv, int mb_row,
  342. int mb_col) {
  343. LOWER_RES_MB_INFO *store_mode_info =
  344. ((LOWER_RES_FRAME_INFO *)cpi->oxcf.mr_low_res_mode_info)->mb_info;
  345. unsigned int parent_mb_index;
  346. /* Consider different down_sampling_factor. */
  347. {
  348. /* TODO: Removed the loop that supports special down_sampling_factor
  349. * such as 2, 4, 8. Will revisit it if needed.
  350. * Should also try using a look-up table to see if it helps
  351. * performance. */
  352. int parent_mb_row, parent_mb_col;
  353. parent_mb_row = mb_row * cpi->oxcf.mr_down_sampling_factor.den /
  354. cpi->oxcf.mr_down_sampling_factor.num;
  355. parent_mb_col = mb_col * cpi->oxcf.mr_down_sampling_factor.den /
  356. cpi->oxcf.mr_down_sampling_factor.num;
  357. parent_mb_index = parent_mb_row * cpi->mr_low_res_mb_cols + parent_mb_col;
  358. }
  359. /* Read lower-resolution mode & motion result from memory.*/
  360. *parent_ref_frame = store_mode_info[parent_mb_index].ref_frame;
  361. *parent_mode = store_mode_info[parent_mb_index].mode;
  362. *dissim = store_mode_info[parent_mb_index].dissim;
  363. /* For highest-resolution encoder, adjust dissim value. Lower its quality
  364. * for good performance. */
  365. if (cpi->oxcf.mr_encoder_id == (cpi->oxcf.mr_total_resolutions - 1))
  366. *dissim >>= 1;
  367. if (*parent_ref_frame != INTRA_FRAME) {
  368. /* Consider different down_sampling_factor.
  369. * The result can be rounded to be more precise, but it takes more time.
  370. */
  371. (*parent_ref_mv).as_mv.row = store_mode_info[parent_mb_index].mv.as_mv.row *
  372. cpi->oxcf.mr_down_sampling_factor.num /
  373. cpi->oxcf.mr_down_sampling_factor.den;
  374. (*parent_ref_mv).as_mv.col = store_mode_info[parent_mb_index].mv.as_mv.col *
  375. cpi->oxcf.mr_down_sampling_factor.num /
  376. cpi->oxcf.mr_down_sampling_factor.den;
  377. vp8_clamp_mv2(parent_ref_mv, xd);
  378. }
  379. }
  380. #endif
  381. static void check_for_encode_breakout(unsigned int sse, MACROBLOCK *x) {
  382. MACROBLOCKD *xd = &x->e_mbd;
  383. unsigned int threshold =
  384. (xd->block[0].dequant[1] * xd->block[0].dequant[1] >> 4);
  385. if (threshold < x->encode_breakout) threshold = x->encode_breakout;
  386. if (sse < threshold) {
  387. /* Check u and v to make sure skip is ok */
  388. unsigned int sse2 = 0;
  389. sse2 = VP8_UVSSE(x);
  390. if (sse2 * 2 < x->encode_breakout) {
  391. x->skip = 1;
  392. } else {
  393. x->skip = 0;
  394. }
  395. }
  396. }
  397. static int evaluate_inter_mode(unsigned int *sse, int rate2, int *distortion2,
  398. VP8_COMP *cpi, MACROBLOCK *x, int rd_adj) {
  399. MB_PREDICTION_MODE this_mode = x->e_mbd.mode_info_context->mbmi.mode;
  400. int_mv mv = x->e_mbd.mode_info_context->mbmi.mv;
  401. int this_rd;
  402. int denoise_aggressive = 0;
  403. /* Exit early and don't compute the distortion if this macroblock
  404. * is marked inactive. */
  405. if (cpi->active_map_enabled && x->active_ptr[0] == 0) {
  406. *sse = 0;
  407. *distortion2 = 0;
  408. x->skip = 1;
  409. return INT_MAX;
  410. }
  411. if ((this_mode != NEWMV) || !(cpi->sf.half_pixel_search) ||
  412. cpi->common.full_pixel == 1) {
  413. *distortion2 =
  414. vp8_get_inter_mbpred_error(x, &cpi->fn_ptr[BLOCK_16X16], sse, mv);
  415. }
  416. this_rd = RDCOST(x->rdmult, x->rddiv, rate2, *distortion2);
  417. #if CONFIG_TEMPORAL_DENOISING
  418. if (cpi->oxcf.noise_sensitivity > 0) {
  419. denoise_aggressive =
  420. (cpi->denoiser.denoiser_mode == kDenoiserOnYUVAggressive) ? 1 : 0;
  421. }
  422. #endif
  423. // Adjust rd for ZEROMV and LAST, if LAST is the closest reference frame.
  424. // TODO: We should also add condition on distance of closest to current.
  425. if (!cpi->oxcf.screen_content_mode && this_mode == ZEROMV &&
  426. x->e_mbd.mode_info_context->mbmi.ref_frame == LAST_FRAME &&
  427. (denoise_aggressive || (cpi->closest_reference_frame == LAST_FRAME))) {
  428. // No adjustment if block is considered to be skin area.
  429. if (x->is_skin) rd_adj = 100;
  430. this_rd = (int)(((int64_t)this_rd) * rd_adj / 100);
  431. }
  432. check_for_encode_breakout(*sse, x);
  433. return this_rd;
  434. }
  435. static void calculate_zeromv_rd_adjustment(VP8_COMP *cpi, MACROBLOCK *x,
  436. int *rd_adjustment) {
  437. MODE_INFO *mic = x->e_mbd.mode_info_context;
  438. int_mv mv_l, mv_a, mv_al;
  439. int local_motion_check = 0;
  440. if (cpi->lf_zeromv_pct > 40) {
  441. /* left mb */
  442. mic -= 1;
  443. mv_l = mic->mbmi.mv;
  444. if (mic->mbmi.ref_frame != INTRA_FRAME) {
  445. if (abs(mv_l.as_mv.row) < 8 && abs(mv_l.as_mv.col) < 8) {
  446. local_motion_check++;
  447. }
  448. }
  449. /* above-left mb */
  450. mic -= x->e_mbd.mode_info_stride;
  451. mv_al = mic->mbmi.mv;
  452. if (mic->mbmi.ref_frame != INTRA_FRAME) {
  453. if (abs(mv_al.as_mv.row) < 8 && abs(mv_al.as_mv.col) < 8) {
  454. local_motion_check++;
  455. }
  456. }
  457. /* above mb */
  458. mic += 1;
  459. mv_a = mic->mbmi.mv;
  460. if (mic->mbmi.ref_frame != INTRA_FRAME) {
  461. if (abs(mv_a.as_mv.row) < 8 && abs(mv_a.as_mv.col) < 8) {
  462. local_motion_check++;
  463. }
  464. }
  465. if (((!x->e_mbd.mb_to_top_edge || !x->e_mbd.mb_to_left_edge) &&
  466. local_motion_check > 0) ||
  467. local_motion_check > 2) {
  468. *rd_adjustment = 80;
  469. } else if (local_motion_check > 0) {
  470. *rd_adjustment = 90;
  471. }
  472. }
  473. }
  474. void vp8_pick_inter_mode(VP8_COMP *cpi, MACROBLOCK *x, int recon_yoffset,
  475. int recon_uvoffset, int *returnrate,
  476. int *returndistortion, int *returnintra, int mb_row,
  477. int mb_col) {
  478. BLOCK *b = &x->block[0];
  479. BLOCKD *d = &x->e_mbd.block[0];
  480. MACROBLOCKD *xd = &x->e_mbd;
  481. MB_MODE_INFO best_mbmode;
  482. int_mv best_ref_mv_sb[2] = { { 0 }, { 0 } };
  483. int_mv mode_mv_sb[2][MB_MODE_COUNT];
  484. int_mv best_ref_mv;
  485. int_mv *mode_mv;
  486. MB_PREDICTION_MODE this_mode;
  487. int num00;
  488. int mdcounts[4];
  489. int best_rd = INT_MAX;
  490. int rd_adjustment = 100;
  491. int best_intra_rd = INT_MAX;
  492. int mode_index;
  493. int rate;
  494. int rate2;
  495. int distortion2;
  496. int bestsme = INT_MAX;
  497. int best_mode_index = 0;
  498. unsigned int sse = UINT_MAX, best_rd_sse = UINT_MAX;
  499. #if CONFIG_TEMPORAL_DENOISING
  500. unsigned int zero_mv_sse = UINT_MAX, best_sse = UINT_MAX;
  501. #endif
  502. int sf_improved_mv_pred = cpi->sf.improved_mv_pred;
  503. #if CONFIG_MULTI_RES_ENCODING
  504. int dissim = INT_MAX;
  505. int parent_ref_frame = 0;
  506. int_mv parent_ref_mv;
  507. MB_PREDICTION_MODE parent_mode = 0;
  508. int parent_ref_valid = 0;
  509. #endif
  510. int_mv mvp;
  511. int near_sadidx[8] = { 0, 1, 2, 3, 4, 5, 6, 7 };
  512. int saddone = 0;
  513. /* search range got from mv_pred(). It uses step_param levels. (0-7) */
  514. int sr = 0;
  515. unsigned char *plane[4][3] = { { 0, 0 } };
  516. int ref_frame_map[4];
  517. int sign_bias = 0;
  518. int dot_artifact_candidate = 0;
  519. get_predictor_pointers(cpi, plane, recon_yoffset, recon_uvoffset);
  520. // If the current frame is using LAST as a reference, check for
  521. // biasing the mode selection for dot artifacts.
  522. if (cpi->ref_frame_flags & VP8_LAST_FRAME) {
  523. unsigned char *target_y = x->src.y_buffer;
  524. unsigned char *target_u = x->block[16].src + *x->block[16].base_src;
  525. unsigned char *target_v = x->block[20].src + *x->block[20].base_src;
  526. int stride = x->src.y_stride;
  527. int stride_uv = x->block[16].src_stride;
  528. #if CONFIG_TEMPORAL_DENOISING
  529. if (cpi->oxcf.noise_sensitivity) {
  530. const int uv_denoise = (cpi->oxcf.noise_sensitivity >= 2) ? 1 : 0;
  531. target_y =
  532. cpi->denoiser.yv12_running_avg[LAST_FRAME].y_buffer + recon_yoffset;
  533. stride = cpi->denoiser.yv12_running_avg[LAST_FRAME].y_stride;
  534. if (uv_denoise) {
  535. target_u = cpi->denoiser.yv12_running_avg[LAST_FRAME].u_buffer +
  536. recon_uvoffset;
  537. target_v = cpi->denoiser.yv12_running_avg[LAST_FRAME].v_buffer +
  538. recon_uvoffset;
  539. stride_uv = cpi->denoiser.yv12_running_avg[LAST_FRAME].uv_stride;
  540. }
  541. }
  542. #endif
  543. assert(plane[LAST_FRAME][0] != NULL);
  544. if (plane[LAST_FRAME][0]) dot_artifact_candidate = check_dot_artifact_candidate(
  545. cpi, x, target_y, stride, plane[LAST_FRAME][0], mb_row, mb_col, 0);
  546. // If not found in Y channel, check UV channel.
  547. if (!dot_artifact_candidate) {
  548. assert(plane[LAST_FRAME][1] != NULL);
  549. if (plane[LAST_FRAME][1]) dot_artifact_candidate = check_dot_artifact_candidate(
  550. cpi, x, target_u, stride_uv, plane[LAST_FRAME][1], mb_row, mb_col, 1);
  551. if (!dot_artifact_candidate) {
  552. assert(plane[LAST_FRAME][2] != NULL);
  553. if (plane[LAST_FRAME][2]) dot_artifact_candidate = check_dot_artifact_candidate(
  554. cpi, x, target_v, stride_uv, plane[LAST_FRAME][2], mb_row, mb_col,
  555. 2);
  556. }
  557. }
  558. }
  559. #if CONFIG_MULTI_RES_ENCODING
  560. // |parent_ref_valid| will be set here if potentially we can do mv resue for
  561. // this higher resol (|cpi->oxcf.mr_encoder_id| > 0) frame.
  562. // |parent_ref_valid| may be reset depending on |parent_ref_frame| for
  563. // the current macroblock below.
  564. parent_ref_valid = cpi->oxcf.mr_encoder_id && cpi->mr_low_res_mv_avail;
  565. if (parent_ref_valid) {
  566. int parent_ref_flag;
  567. get_lower_res_motion_info(cpi, xd, &dissim, &parent_ref_frame, &parent_mode,
  568. &parent_ref_mv, mb_row, mb_col);
  569. /* TODO(jkoleszar): The references available (ref_frame_flags) to the
  570. * lower res encoder should match those available to this encoder, but
  571. * there seems to be a situation where this mismatch can happen in the
  572. * case of frame dropping and temporal layers. For example,
  573. * GOLD being disallowed in ref_frame_flags, but being returned as
  574. * parent_ref_frame.
  575. *
  576. * In this event, take the conservative approach of disabling the
  577. * lower res info for this MB.
  578. */
  579. parent_ref_flag = 0;
  580. // Note availability for mv reuse is only based on last and golden.
  581. if (parent_ref_frame == LAST_FRAME)
  582. parent_ref_flag = (cpi->ref_frame_flags & VP8_LAST_FRAME);
  583. else if (parent_ref_frame == GOLDEN_FRAME)
  584. parent_ref_flag = (cpi->ref_frame_flags & VP8_GOLD_FRAME);
  585. // assert(!parent_ref_frame || parent_ref_flag);
  586. // If |parent_ref_frame| did not match either last or golden then
  587. // shut off mv reuse.
  588. if (parent_ref_frame && !parent_ref_flag) parent_ref_valid = 0;
  589. // Don't do mv reuse since we want to allow for another mode besides
  590. // ZEROMV_LAST to remove dot artifact.
  591. if (dot_artifact_candidate) parent_ref_valid = 0;
  592. }
  593. #endif
  594. // Check if current macroblock is in skin area.
  595. x->is_skin = 0;
  596. if (!cpi->oxcf.screen_content_mode) {
  597. int block_index = mb_row * cpi->common.mb_cols + mb_col;
  598. x->is_skin = cpi->skin_map[block_index];
  599. }
  600. #if CONFIG_TEMPORAL_DENOISING
  601. if (cpi->oxcf.noise_sensitivity) {
  602. // Under aggressive denoising mode, should we use skin map to reduce
  603. // denoiser
  604. // and ZEROMV bias? Will need to revisit the accuracy of this detection for
  605. // very noisy input. For now keep this as is (i.e., don't turn it off).
  606. // if (cpi->denoiser.denoiser_mode == kDenoiserOnYUVAggressive)
  607. // x->is_skin = 0;
  608. }
  609. #endif
  610. mode_mv = mode_mv_sb[sign_bias];
  611. best_ref_mv.as_int = 0;
  612. memset(mode_mv_sb, 0, sizeof(mode_mv_sb));
  613. memset(&best_mbmode, 0, sizeof(best_mbmode));
  614. /* Setup search priorities */
  615. #if CONFIG_MULTI_RES_ENCODING
  616. if (parent_ref_valid && parent_ref_frame && dissim < 8) {
  617. ref_frame_map[0] = -1;
  618. ref_frame_map[1] = parent_ref_frame;
  619. ref_frame_map[2] = -1;
  620. ref_frame_map[3] = -1;
  621. } else
  622. #endif
  623. get_reference_search_order(cpi, ref_frame_map);
  624. /* Check to see if there is at least 1 valid reference frame that we need
  625. * to calculate near_mvs.
  626. */
  627. if (ref_frame_map[1] > 0) {
  628. sign_bias = vp8_find_near_mvs_bias(
  629. &x->e_mbd, x->e_mbd.mode_info_context, mode_mv_sb, best_ref_mv_sb,
  630. mdcounts, ref_frame_map[1], cpi->common.ref_frame_sign_bias);
  631. mode_mv = mode_mv_sb[sign_bias];
  632. best_ref_mv.as_int = best_ref_mv_sb[sign_bias].as_int;
  633. }
  634. /* Count of the number of MBs tested so far this frame */
  635. x->mbs_tested_so_far++;
  636. *returnintra = INT_MAX;
  637. x->skip = 0;
  638. x->e_mbd.mode_info_context->mbmi.ref_frame = INTRA_FRAME;
  639. /* If the frame has big static background and current MB is in low
  640. * motion area, its mode decision is biased to ZEROMV mode.
  641. * No adjustment if cpu_used is <= -12 (i.e., cpi->Speed >= 12).
  642. * At such speed settings, ZEROMV is already heavily favored.
  643. */
  644. if (cpi->Speed < 12) {
  645. calculate_zeromv_rd_adjustment(cpi, x, &rd_adjustment);
  646. }
  647. #if CONFIG_TEMPORAL_DENOISING
  648. if (cpi->oxcf.noise_sensitivity) {
  649. rd_adjustment = (int)(rd_adjustment *
  650. cpi->denoiser.denoise_pars.pickmode_mv_bias / 100);
  651. }
  652. #endif
  653. if (dot_artifact_candidate) {
  654. // Bias against ZEROMV_LAST mode.
  655. rd_adjustment = 150;
  656. }
  657. /* if we encode a new mv this is important
  658. * find the best new motion vector
  659. */
  660. for (mode_index = 0; mode_index < MAX_MODES; ++mode_index) {
  661. int frame_cost;
  662. int this_rd = INT_MAX;
  663. int this_ref_frame = ref_frame_map[vp8_ref_frame_order[mode_index]];
  664. if (best_rd <= x->rd_threshes[mode_index]) continue;
  665. if (this_ref_frame < 0) continue;
  666. x->e_mbd.mode_info_context->mbmi.ref_frame = this_ref_frame;
  667. /* everything but intra */
  668. if (x->e_mbd.mode_info_context->mbmi.ref_frame) {
  669. x->e_mbd.pre.y_buffer = plane[this_ref_frame][0];
  670. x->e_mbd.pre.u_buffer = plane[this_ref_frame][1];
  671. x->e_mbd.pre.v_buffer = plane[this_ref_frame][2];
  672. if (sign_bias != cpi->common.ref_frame_sign_bias[this_ref_frame]) {
  673. sign_bias = cpi->common.ref_frame_sign_bias[this_ref_frame];
  674. mode_mv = mode_mv_sb[sign_bias];
  675. best_ref_mv.as_int = best_ref_mv_sb[sign_bias].as_int;
  676. }
  677. #if CONFIG_MULTI_RES_ENCODING
  678. if (parent_ref_valid) {
  679. if (vp8_mode_order[mode_index] == NEARESTMV &&
  680. mode_mv[NEARESTMV].as_int == 0)
  681. continue;
  682. if (vp8_mode_order[mode_index] == NEARMV && mode_mv[NEARMV].as_int == 0)
  683. continue;
  684. if (vp8_mode_order[mode_index] == NEWMV && parent_mode == ZEROMV &&
  685. best_ref_mv.as_int == 0)
  686. continue;
  687. else if (vp8_mode_order[mode_index] == NEWMV && dissim == 0 &&
  688. best_ref_mv.as_int == parent_ref_mv.as_int)
  689. continue;
  690. }
  691. #endif
  692. }
  693. /* Check to see if the testing frequency for this mode is at its max
  694. * If so then prevent it from being tested and increase the threshold
  695. * for its testing */
  696. if (x->mode_test_hit_counts[mode_index] &&
  697. (cpi->mode_check_freq[mode_index] > 1)) {
  698. if (x->mbs_tested_so_far <= (cpi->mode_check_freq[mode_index] *
  699. x->mode_test_hit_counts[mode_index])) {
  700. /* Increase the threshold for coding this mode to make it less
  701. * likely to be chosen */
  702. x->rd_thresh_mult[mode_index] += 4;
  703. if (x->rd_thresh_mult[mode_index] > MAX_THRESHMULT) {
  704. x->rd_thresh_mult[mode_index] = MAX_THRESHMULT;
  705. }
  706. x->rd_threshes[mode_index] =
  707. (cpi->rd_baseline_thresh[mode_index] >> 7) *
  708. x->rd_thresh_mult[mode_index];
  709. continue;
  710. }
  711. }
  712. /* We have now reached the point where we are going to test the current
  713. * mode so increment the counter for the number of times it has been
  714. * tested */
  715. x->mode_test_hit_counts[mode_index]++;
  716. rate2 = 0;
  717. distortion2 = 0;
  718. this_mode = vp8_mode_order[mode_index];
  719. x->e_mbd.mode_info_context->mbmi.mode = this_mode;
  720. x->e_mbd.mode_info_context->mbmi.uv_mode = DC_PRED;
  721. /* Work out the cost assosciated with selecting the reference frame */
  722. frame_cost = x->ref_frame_cost[x->e_mbd.mode_info_context->mbmi.ref_frame];
  723. rate2 += frame_cost;
  724. /* Only consider ZEROMV/ALTREF_FRAME for alt ref frame,
  725. * unless ARNR filtering is enabled in which case we want
  726. * an unfiltered alternative */
  727. if (cpi->is_src_frame_alt_ref && (cpi->oxcf.arnr_max_frames == 0)) {
  728. if (this_mode != ZEROMV ||
  729. x->e_mbd.mode_info_context->mbmi.ref_frame != ALTREF_FRAME) {
  730. continue;
  731. }
  732. }
  733. switch (this_mode) {
  734. case B_PRED:
  735. /* Pass best so far to pick_intra4x4mby_modes to use as breakout */
  736. distortion2 = best_rd_sse;
  737. pick_intra4x4mby_modes(x, &rate, &distortion2);
  738. if (distortion2 == INT_MAX) {
  739. this_rd = INT_MAX;
  740. } else {
  741. rate2 += rate;
  742. distortion2 = vpx_variance16x16(*(b->base_src), b->src_stride,
  743. x->e_mbd.predictor, 16, &sse);
  744. this_rd = RDCOST(x->rdmult, x->rddiv, rate2, distortion2);
  745. if (this_rd < best_intra_rd) {
  746. best_intra_rd = this_rd;
  747. *returnintra = distortion2;
  748. }
  749. }
  750. break;
  751. case SPLITMV:
  752. /* Split MV modes currently not supported when RD is not enabled. */
  753. break;
  754. case DC_PRED:
  755. case V_PRED:
  756. case H_PRED:
  757. case TM_PRED:
  758. vp8_build_intra_predictors_mby_s(
  759. xd, xd->dst.y_buffer - xd->dst.y_stride, xd->dst.y_buffer - 1,
  760. xd->dst.y_stride, xd->predictor, 16);
  761. distortion2 = vpx_variance16x16(*(b->base_src), b->src_stride,
  762. x->e_mbd.predictor, 16, &sse);
  763. rate2 += x->mbmode_cost[x->e_mbd.frame_type]
  764. [x->e_mbd.mode_info_context->mbmi.mode];
  765. this_rd = RDCOST(x->rdmult, x->rddiv, rate2, distortion2);
  766. if (this_rd < best_intra_rd) {
  767. best_intra_rd = this_rd;
  768. *returnintra = distortion2;
  769. }
  770. break;
  771. case NEWMV: {
  772. int thissme;
  773. int step_param;
  774. int further_steps;
  775. int n = 0;
  776. int sadpb = x->sadperbit16;
  777. int_mv mvp_full;
  778. int col_min = ((best_ref_mv.as_mv.col + 7) >> 3) - MAX_FULL_PEL_VAL;
  779. int row_min = ((best_ref_mv.as_mv.row + 7) >> 3) - MAX_FULL_PEL_VAL;
  780. int col_max = (best_ref_mv.as_mv.col >> 3) + MAX_FULL_PEL_VAL;
  781. int row_max = (best_ref_mv.as_mv.row >> 3) + MAX_FULL_PEL_VAL;
  782. int tmp_col_min = x->mv_col_min;
  783. int tmp_col_max = x->mv_col_max;
  784. int tmp_row_min = x->mv_row_min;
  785. int tmp_row_max = x->mv_row_max;
  786. int speed_adjust = (cpi->Speed > 5) ? ((cpi->Speed >= 8) ? 3 : 2) : 1;
  787. /* Further step/diamond searches as necessary */
  788. step_param = cpi->sf.first_step + speed_adjust;
  789. #if CONFIG_MULTI_RES_ENCODING
  790. /* If lower-res frame is not available for mv reuse (because of
  791. frame dropping or different temporal layer pattern), then higher
  792. resol encoder does motion search without any previous knowledge.
  793. Also, since last frame motion info is not stored, then we can not
  794. use improved_mv_pred. */
  795. if (cpi->oxcf.mr_encoder_id) sf_improved_mv_pred = 0;
  796. // Only use parent MV as predictor if this candidate reference frame
  797. // (|this_ref_frame|) is equal to |parent_ref_frame|.
  798. if (parent_ref_valid && (parent_ref_frame == this_ref_frame)) {
  799. /* Use parent MV as predictor. Adjust search range
  800. * accordingly.
  801. */
  802. mvp.as_int = parent_ref_mv.as_int;
  803. mvp_full.as_mv.col = parent_ref_mv.as_mv.col >> 3;
  804. mvp_full.as_mv.row = parent_ref_mv.as_mv.row >> 3;
  805. if (dissim <= 32)
  806. step_param += 3;
  807. else if (dissim <= 128)
  808. step_param += 2;
  809. else
  810. step_param += 1;
  811. } else
  812. #endif
  813. {
  814. if (sf_improved_mv_pred) {
  815. if (!saddone) {
  816. vp8_cal_sad(cpi, xd, x, recon_yoffset, &near_sadidx[0]);
  817. saddone = 1;
  818. }
  819. vp8_mv_pred(cpi, &x->e_mbd, x->e_mbd.mode_info_context, &mvp,
  820. x->e_mbd.mode_info_context->mbmi.ref_frame,
  821. cpi->common.ref_frame_sign_bias, &sr, &near_sadidx[0]);
  822. sr += speed_adjust;
  823. /* adjust search range according to sr from mv prediction */
  824. if (sr > step_param) step_param = sr;
  825. mvp_full.as_mv.col = mvp.as_mv.col >> 3;
  826. mvp_full.as_mv.row = mvp.as_mv.row >> 3;
  827. } else {
  828. mvp.as_int = best_ref_mv.as_int;
  829. mvp_full.as_mv.col = best_ref_mv.as_mv.col >> 3;
  830. mvp_full.as_mv.row = best_ref_mv.as_mv.row >> 3;
  831. }
  832. }
  833. #if CONFIG_MULTI_RES_ENCODING
  834. if (parent_ref_valid && (parent_ref_frame == this_ref_frame) &&
  835. dissim <= 2 &&
  836. VPXMAX(abs(best_ref_mv.as_mv.row - parent_ref_mv.as_mv.row),
  837. abs(best_ref_mv.as_mv.col - parent_ref_mv.as_mv.col)) <= 4) {
  838. d->bmi.mv.as_int = mvp_full.as_int;
  839. mode_mv[NEWMV].as_int = mvp_full.as_int;
  840. cpi->find_fractional_mv_step(
  841. x, b, d, &d->bmi.mv, &best_ref_mv, x->errorperbit,
  842. &cpi->fn_ptr[BLOCK_16X16], cpi->mb.mvcost, &distortion2, &sse);
  843. } else
  844. #endif
  845. {
  846. /* Get intersection of UMV window and valid MV window to
  847. * reduce # of checks in diamond search. */
  848. if (x->mv_col_min < col_min) x->mv_col_min = col_min;
  849. if (x->mv_col_max > col_max) x->mv_col_max = col_max;
  850. if (x->mv_row_min < row_min) x->mv_row_min = row_min;
  851. if (x->mv_row_max > row_max) x->mv_row_max = row_max;
  852. further_steps =
  853. (cpi->Speed >= 8)
  854. ? 0
  855. : (cpi->sf.max_step_search_steps - 1 - step_param);
  856. if (cpi->sf.search_method == HEX) {
  857. #if CONFIG_MULTI_RES_ENCODING
  858. /* TODO: In higher-res pick_inter_mode, step_param is used to
  859. * modify hex search range. Here, set step_param to 0 not to
  860. * change the behavior in lowest-resolution encoder.
  861. * Will improve it later.
  862. */
  863. /* Set step_param to 0 to ensure large-range motion search
  864. * when mv reuse if not valid (i.e. |parent_ref_valid| = 0),
  865. * or if this candidate reference frame (|this_ref_frame|) is
  866. * not equal to |parent_ref_frame|.
  867. */
  868. if (!parent_ref_valid || (parent_ref_frame != this_ref_frame))
  869. step_param = 0;
  870. #endif
  871. bestsme = vp8_hex_search(x, b, d, &mvp_full, &d->bmi.mv, step_param,
  872. sadpb, &cpi->fn_ptr[BLOCK_16X16],
  873. x->mvsadcost, x->mvcost, &best_ref_mv);
  874. mode_mv[NEWMV].as_int = d->bmi.mv.as_int;
  875. } else {
  876. bestsme = cpi->diamond_search_sad(
  877. x, b, d, &mvp_full, &d->bmi.mv, step_param, sadpb, &num00,
  878. &cpi->fn_ptr[BLOCK_16X16], x->mvcost, &best_ref_mv);
  879. mode_mv[NEWMV].as_int = d->bmi.mv.as_int;
  880. /* Further step/diamond searches as necessary */
  881. n = num00;
  882. num00 = 0;
  883. while (n < further_steps) {
  884. n++;
  885. if (num00) {
  886. num00--;
  887. } else {
  888. thissme = cpi->diamond_search_sad(
  889. x, b, d, &mvp_full, &d->bmi.mv, step_param + n, sadpb,
  890. &num00, &cpi->fn_ptr[BLOCK_16X16], x->mvcost, &best_ref_mv);
  891. if (thissme < bestsme) {
  892. bestsme = thissme;
  893. mode_mv[NEWMV].as_int = d->bmi.mv.as_int;
  894. } else {
  895. d->bmi.mv.as_int = mode_mv[NEWMV].as_int;
  896. }
  897. }
  898. }
  899. }
  900. x->mv_col_min = tmp_col_min;
  901. x->mv_col_max = tmp_col_max;
  902. x->mv_row_min = tmp_row_min;
  903. x->mv_row_max = tmp_row_max;
  904. if (bestsme < INT_MAX) {
  905. cpi->find_fractional_mv_step(
  906. x, b, d, &d->bmi.mv, &best_ref_mv, x->errorperbit,
  907. &cpi->fn_ptr[BLOCK_16X16], cpi->mb.mvcost, &distortion2, &sse);
  908. }
  909. }
  910. mode_mv[NEWMV].as_int = d->bmi.mv.as_int;
  911. // The clamp below is not necessary from the perspective
  912. // of VP8 bitstream, but is added to improve ChromeCast
  913. // mirroring's robustness. Please do not remove.
  914. vp8_clamp_mv2(&mode_mv[this_mode], xd);
  915. /* mv cost; */
  916. rate2 +=
  917. vp8_mv_bit_cost(&mode_mv[NEWMV], &best_ref_mv, cpi->mb.mvcost, 128);
  918. }
  919. // fall through
  920. case NEARESTMV:
  921. case NEARMV:
  922. if (mode_mv[this_mode].as_int == 0) continue;
  923. // fall through
  924. case ZEROMV:
  925. /* Trap vectors that reach beyond the UMV borders
  926. * Note that ALL New MV, Nearest MV Near MV and Zero MV code drops
  927. * through to this point because of the lack of break statements
  928. * in the previous two cases.
  929. */
  930. if (((mode_mv[this_mode].as_mv.row >> 3) < x->mv_row_min) ||
  931. ((mode_mv[this_mode].as_mv.row >> 3) > x->mv_row_max) ||
  932. ((mode_mv[this_mode].as_mv.col >> 3) < x->mv_col_min) ||
  933. ((mode_mv[this_mode].as_mv.col >> 3) > x->mv_col_max)) {
  934. continue;
  935. }
  936. rate2 += vp8_cost_mv_ref(this_mode, mdcounts);
  937. x->e_mbd.mode_info_context->mbmi.mv.as_int = mode_mv[this_mode].as_int;
  938. this_rd = evaluate_inter_mode(&sse, rate2, &distortion2, cpi, x,
  939. rd_adjustment);
  940. break;
  941. default: break;
  942. }
  943. #if CONFIG_TEMPORAL_DENOISING
  944. if (cpi->oxcf.noise_sensitivity) {
  945. /* Store for later use by denoiser. */
  946. // Dont' denoise with GOLDEN OR ALTREF is they are old reference
  947. // frames (greater than MAX_GF_ARF_DENOISE_RANGE frames in past).
  948. int skip_old_reference = ((this_ref_frame != LAST_FRAME) &&
  949. (cpi->common.current_video_frame -
  950. cpi->current_ref_frames[this_ref_frame] >
  951. MAX_GF_ARF_DENOISE_RANGE))
  952. ? 1
  953. : 0;
  954. if (this_mode == ZEROMV && sse < zero_mv_sse && !skip_old_reference) {
  955. zero_mv_sse = sse;
  956. x->best_zeromv_reference_frame =
  957. x->e_mbd.mode_info_context->mbmi.ref_frame;
  958. }
  959. // Store the best NEWMV in x for later use in the denoiser.
  960. if (x->e_mbd.mode_info_context->mbmi.mode == NEWMV && sse < best_sse &&
  961. !skip_old_reference) {
  962. best_sse = sse;
  963. x->best_sse_inter_mode = NEWMV;
  964. x->best_sse_mv = x->e_mbd.mode_info_context->mbmi.mv;
  965. x->need_to_clamp_best_mvs =
  966. x->e_mbd.mode_info_context->mbmi.need_to_clamp_mvs;
  967. x->best_reference_frame = x->e_mbd.mode_info_context->mbmi.ref_frame;
  968. }
  969. }
  970. #endif
  971. if (this_rd < best_rd || x->skip) {
  972. /* Note index of best mode */
  973. best_mode_index = mode_index;
  974. *returnrate = rate2;
  975. *returndistortion = distortion2;
  976. best_rd_sse = sse;
  977. best_rd = this_rd;
  978. memcpy(&best_mbmode, &x->e_mbd.mode_info_context->mbmi,
  979. sizeof(MB_MODE_INFO));
  980. /* Testing this mode gave rise to an improvement in best error
  981. * score. Lower threshold a bit for next time
  982. */
  983. x->rd_thresh_mult[mode_index] =
  984. (x->rd_thresh_mult[mode_index] >= (MIN_THRESHMULT + 2))
  985. ? x->rd_thresh_mult[mode_index] - 2
  986. : MIN_THRESHMULT;
  987. x->rd_threshes[mode_index] = (cpi->rd_baseline_thresh[mode_index] >> 7) *
  988. x->rd_thresh_mult[mode_index];
  989. }
  990. /* If the mode did not help improve the best error case then raise the
  991. * threshold for testing that mode next time around.
  992. */
  993. else {
  994. x->rd_thresh_mult[mode_index] += 4;
  995. if (x->rd_thresh_mult[mode_index] > MAX_THRESHMULT) {
  996. x->rd_thresh_mult[mode_index] = MAX_THRESHMULT;
  997. }
  998. x->rd_threshes[mode_index] = (cpi->rd_baseline_thresh[mode_index] >> 7) *
  999. x->rd_thresh_mult[mode_index];
  1000. }
  1001. if (x->skip) break;
  1002. }
  1003. /* Reduce the activation RD thresholds for the best choice mode */
  1004. if ((cpi->rd_baseline_thresh[best_mode_index] > 0) &&
  1005. (cpi->rd_baseline_thresh[best_mode_index] < (INT_MAX >> 2))) {
  1006. int best_adjustment = (x->rd_thresh_mult[best_mode_index] >> 3);
  1007. x->rd_thresh_mult[best_mode_index] =
  1008. (x->rd_thresh_mult[best_mode_index] >=
  1009. (MIN_THRESHMULT + best_adjustment))
  1010. ? x->rd_thresh_mult[best_mode_index] - best_adjustment
  1011. : MIN_THRESHMULT;
  1012. x->rd_threshes[best_mode_index] =
  1013. (cpi->rd_baseline_thresh[best_mode_index] >> 7) *
  1014. x->rd_thresh_mult[best_mode_index];
  1015. }
  1016. {
  1017. int this_rdbin = (*returndistortion >> 7);
  1018. if (this_rdbin >= 1024) {
  1019. this_rdbin = 1023;
  1020. }
  1021. x->error_bins[this_rdbin]++;
  1022. }
  1023. #if CONFIG_TEMPORAL_DENOISING
  1024. if (cpi->oxcf.noise_sensitivity) {
  1025. int block_index = mb_row * cpi->common.mb_cols + mb_col;
  1026. int reevaluate = 0;
  1027. int is_noisy = 0;
  1028. if (x->best_sse_inter_mode == DC_PRED) {
  1029. /* No best MV found. */
  1030. x->best_sse_inter_mode = best_mbmode.mode;
  1031. x->best_sse_mv = best_mbmode.mv;
  1032. x->need_to_clamp_best_mvs = best_mbmode.need_to_clamp_mvs;
  1033. x->best_reference_frame = best_mbmode.ref_frame;
  1034. best_sse = best_rd_sse;
  1035. }
  1036. // For non-skin blocks that have selected ZEROMV for this current frame,
  1037. // and have been selecting ZEROMV_LAST (on the base layer frame) at
  1038. // least |x~20| consecutive past frames in a row, label the block for
  1039. // possible increase in denoising strength. We also condition this
  1040. // labeling on there being significant denoising in the scene
  1041. if (cpi->oxcf.noise_sensitivity == 4) {
  1042. if (cpi->denoiser.nmse_source_diff >
  1043. 70 * cpi->denoiser.threshold_aggressive_mode / 100) {
  1044. is_noisy = 1;
  1045. }
  1046. } else {
  1047. if (cpi->mse_source_denoised > 1000) is_noisy = 1;
  1048. }
  1049. x->increase_denoising = 0;
  1050. if (!x->is_skin && x->best_sse_inter_mode == ZEROMV &&
  1051. (x->best_reference_frame == LAST_FRAME ||
  1052. x->best_reference_frame == cpi->closest_reference_frame) &&
  1053. cpi->consec_zero_last[block_index] >= 20 && is_noisy) {
  1054. x->increase_denoising = 1;
  1055. }
  1056. x->denoise_zeromv = 0;
  1057. vp8_denoiser_denoise_mb(&cpi->denoiser, x, best_sse, zero_mv_sse,
  1058. recon_yoffset, recon_uvoffset, &cpi->common.lf_info,
  1059. mb_row, mb_col, block_index,
  1060. cpi->consec_zero_last_mvbias[block_index]);
  1061. // Reevaluate ZEROMV after denoising: for large noise content
  1062. // (i.e., cpi->mse_source_denoised is above threshold), do this for all
  1063. // blocks that did not pick ZEROMV as best mode but are using ZEROMV
  1064. // for denoising. Otherwise, always re-evaluate for blocks that picked
  1065. // INTRA mode as best mode.
  1066. // Avoid blocks that have been biased against ZERO_LAST
  1067. // (i.e., dot artifact candidate blocks).
  1068. reevaluate = (best_mbmode.ref_frame == INTRA_FRAME) ||
  1069. (best_mbmode.mode != ZEROMV && x->denoise_zeromv &&
  1070. cpi->mse_source_denoised > 2000);
  1071. if (!dot_artifact_candidate && reevaluate &&
  1072. x->best_zeromv_reference_frame != INTRA_FRAME) {
  1073. int this_rd = 0;
  1074. int this_ref_frame = x->best_zeromv_reference_frame;
  1075. rd_adjustment = 100;
  1076. rate2 =
  1077. x->ref_frame_cost[this_ref_frame] + vp8_cost_mv_ref(ZEROMV, mdcounts);
  1078. distortion2 = 0;
  1079. /* set up the proper prediction buffers for the frame */
  1080. x->e_mbd.mode_info_context->mbmi.ref_frame = this_ref_frame;
  1081. x->e_mbd.pre.y_buffer = plane[this_ref_frame][0];
  1082. x->e_mbd.pre.u_buffer = plane[this_ref_frame][1];
  1083. x->e_mbd.pre.v_buffer = plane[this_ref_frame][2];
  1084. x->e_mbd.mode_info_context->mbmi.mode = ZEROMV;
  1085. x->e_mbd.mode_info_context->mbmi.uv_mode = DC_PRED;
  1086. x->e_mbd.mode_info_context->mbmi.mv.as_int = 0;
  1087. this_rd =
  1088. evaluate_inter_mode(&sse, rate2, &distortion2, cpi, x, rd_adjustment);
  1089. if (this_rd < best_rd) {
  1090. memcpy(&best_mbmode, &x->e_mbd.mode_info_context->mbmi,
  1091. sizeof(MB_MODE_INFO));
  1092. }
  1093. }
  1094. }
  1095. #endif
  1096. if (cpi->is_src_frame_alt_ref &&
  1097. (best_mbmode.mode != ZEROMV || best_mbmode.ref_frame != ALTREF_FRAME)) {
  1098. x->e_mbd.mode_info_context->mbmi.mode = ZEROMV;
  1099. x->e_mbd.mode_info_context->mbmi.ref_frame = ALTREF_FRAME;
  1100. x->e_mbd.mode_info_context->mbmi.mv.as_int = 0;
  1101. x->e_mbd.mode_info_context->mbmi.uv_mode = DC_PRED;
  1102. x->e_mbd.mode_info_context->mbmi.mb_skip_coeff =
  1103. (cpi->common.mb_no_coeff_skip);
  1104. x->e_mbd.mode_info_context->mbmi.partitioning = 0;
  1105. return;
  1106. }
  1107. /* set to the best mb mode, this copy can be skip if x->skip since it
  1108. * already has the right content */
  1109. if (!x->skip) {
  1110. memcpy(&x->e_mbd.mode_info_context->mbmi, &best_mbmode,
  1111. sizeof(MB_MODE_INFO));
  1112. }
  1113. if (best_mbmode.mode <= B_PRED) {
  1114. /* set mode_info_context->mbmi.uv_mode */
  1115. pick_intra_mbuv_mode(x);
  1116. }
  1117. if (sign_bias !=
  1118. cpi->common.ref_frame_sign_bias[xd->mode_info_context->mbmi.ref_frame]) {
  1119. best_ref_mv.as_int = best_ref_mv_sb[!sign_bias].as_int;
  1120. }
  1121. update_mvcount(x, &best_ref_mv);
  1122. }
  1123. void vp8_pick_intra_mode(MACROBLOCK *x, int *rate) {
  1124. int error4x4, error16x16 = INT_MAX;
  1125. int rate_, best_rate = 0, distortion, best_sse;
  1126. MB_PREDICTION_MODE mode, best_mode = DC_PRED;
  1127. int this_rd;
  1128. unsigned int sse;
  1129. BLOCK *b = &x->block[0];
  1130. MACROBLOCKD *xd = &x->e_mbd;
  1131. xd->mode_info_context->mbmi.ref_frame = INTRA_FRAME;
  1132. pick_intra_mbuv_mode(x);
  1133. for (mode = DC_PRED; mode <= TM_PRED; ++mode) {
  1134. xd->mode_info_context->mbmi.mode = mode;
  1135. vp8_build_intra_predictors_mby_s(xd, xd->dst.y_buffer - xd->dst.y_stride,
  1136. xd->dst.y_buffer - 1, xd->dst.y_stride,
  1137. xd->predictor, 16);
  1138. distortion = vpx_variance16x16(*(b->base_src), b->src_stride, xd->predictor,
  1139. 16, &sse);
  1140. rate_ = x->mbmode_cost[xd->frame_type][mode];
  1141. this_rd = RDCOST(x->rdmult, x->rddiv, rate_, distortion);
  1142. if (error16x16 > this_rd) {
  1143. error16x16 = this_rd;
  1144. best_mode = mode;
  1145. best_sse = sse;
  1146. best_rate = rate_;
  1147. }
  1148. }
  1149. xd->mode_info_context->mbmi.mode = best_mode;
  1150. error4x4 = pick_intra4x4mby_modes(x, &rate_, &best_sse);
  1151. if (error4x4 < error16x16) {
  1152. xd->mode_info_context->mbmi.mode = B_PRED;
  1153. best_rate = rate_;
  1154. }
  1155. *rate = best_rate;
  1156. }