vp8_dx_iface.c 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732
  1. /*
  2. * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
  3. *
  4. * Use of this source code is governed by a BSD-style license
  5. * that can be found in the LICENSE file in the root of the source
  6. * tree. An additional intellectual property rights grant can be found
  7. * in the file PATENTS. All contributing project authors may
  8. * be found in the AUTHORS file in the root of the source tree.
  9. */
  10. #include <assert.h>
  11. #include <stdlib.h>
  12. #include <string.h>
  13. #include "./vp8_rtcd.h"
  14. #include "./vpx_dsp_rtcd.h"
  15. #include "./vpx_scale_rtcd.h"
  16. #include "vpx/vpx_decoder.h"
  17. #include "vpx/vp8dx.h"
  18. #include "vpx/internal/vpx_codec_internal.h"
  19. #include "vpx_version.h"
  20. #include "common/alloccommon.h"
  21. #include "common/common.h"
  22. #include "common/onyxd.h"
  23. #include "decoder/onyxd_int.h"
  24. #include "vpx_dsp/vpx_dsp_common.h"
  25. #include "vpx_mem/vpx_mem.h"
  26. #include "vpx_ports/system_state.h"
  27. #if CONFIG_ERROR_CONCEALMENT
  28. #include "decoder/error_concealment.h"
  29. #endif
  30. #include "decoder/decoderthreading.h"
  31. #define VP8_CAP_POSTPROC (CONFIG_POSTPROC ? VPX_CODEC_CAP_POSTPROC : 0)
  32. #define VP8_CAP_ERROR_CONCEALMENT \
  33. (CONFIG_ERROR_CONCEALMENT ? VPX_CODEC_CAP_ERROR_CONCEALMENT : 0)
  34. typedef vpx_codec_stream_info_t vp8_stream_info_t;
  35. /* Structures for handling memory allocations */
  36. typedef enum { VP8_SEG_ALG_PRIV = 256, VP8_SEG_MAX } mem_seg_id_t;
  37. #define NELEMENTS(x) ((int)(sizeof(x) / sizeof((x)[0])))
  38. struct vpx_codec_alg_priv {
  39. vpx_codec_priv_t base;
  40. vpx_codec_dec_cfg_t cfg;
  41. vp8_stream_info_t si;
  42. int decoder_init;
  43. #if CONFIG_MULTITHREAD
  44. // Restart threads on next frame if set to 1.
  45. // This is set when error happens in multithreaded decoding and all threads
  46. // are shut down.
  47. int restart_threads;
  48. #endif
  49. int postproc_cfg_set;
  50. vp8_postproc_cfg_t postproc_cfg;
  51. vpx_decrypt_cb decrypt_cb;
  52. void *decrypt_state;
  53. vpx_image_t img;
  54. int img_setup;
  55. struct frame_buffers yv12_frame_buffers;
  56. void *user_priv;
  57. FRAGMENT_DATA fragments;
  58. };
  59. static int vp8_init_ctx(vpx_codec_ctx_t *ctx) {
  60. vpx_codec_alg_priv_t *priv =
  61. (vpx_codec_alg_priv_t *)vpx_calloc(1, sizeof(*priv));
  62. if (!priv) return 1;
  63. ctx->priv = (vpx_codec_priv_t *)priv;
  64. ctx->priv->init_flags = ctx->init_flags;
  65. priv->si.sz = sizeof(priv->si);
  66. priv->decrypt_cb = NULL;
  67. priv->decrypt_state = NULL;
  68. if (ctx->config.dec) {
  69. /* Update the reference to the config structure to an internal copy. */
  70. priv->cfg = *ctx->config.dec;
  71. ctx->config.dec = &priv->cfg;
  72. }
  73. return 0;
  74. }
  75. static vpx_codec_err_t vp8_init(vpx_codec_ctx_t *ctx,
  76. vpx_codec_priv_enc_mr_cfg_t *data) {
  77. vpx_codec_err_t res = VPX_CODEC_OK;
  78. (void)data;
  79. vp8_rtcd();
  80. vpx_dsp_rtcd();
  81. vpx_scale_rtcd();
  82. /* This function only allocates space for the vpx_codec_alg_priv_t
  83. * structure. More memory may be required at the time the stream
  84. * information becomes known.
  85. */
  86. if (!ctx->priv) {
  87. vpx_codec_alg_priv_t *priv;
  88. if (vp8_init_ctx(ctx)) return VPX_CODEC_MEM_ERROR;
  89. priv = (vpx_codec_alg_priv_t *)ctx->priv;
  90. /* initialize number of fragments to zero */
  91. priv->fragments.count = 0;
  92. /* is input fragments enabled? */
  93. priv->fragments.enabled =
  94. (priv->base.init_flags & VPX_CODEC_USE_INPUT_FRAGMENTS);
  95. /*post processing level initialized to do nothing */
  96. }
  97. return res;
  98. }
  99. static vpx_codec_err_t vp8_destroy(vpx_codec_alg_priv_t *ctx) {
  100. vp8_remove_decoder_instances(&ctx->yv12_frame_buffers);
  101. vpx_free(ctx);
  102. return VPX_CODEC_OK;
  103. }
  104. #ifdef __clang_analyzer__
  105. #define FUNC_ATTR_NONNULL(...) __attribute__((nonnull(__VA_ARGS__)))
  106. #else
  107. #define FUNC_ATTR_NONNULL(...)
  108. #endif
  109. static vpx_codec_err_t vp8_peek_si_internal(const uint8_t *data,
  110. unsigned int data_sz,
  111. vpx_codec_stream_info_t *si,
  112. vpx_decrypt_cb decrypt_cb,
  113. void *decrypt_state) FUNC_ATTR_NONNULL(1) {
  114. vpx_codec_err_t res = VPX_CODEC_OK;
  115. assert(data != NULL);
  116. if (data + data_sz <= data) {
  117. res = VPX_CODEC_INVALID_PARAM;
  118. } else {
  119. /* Parse uncompresssed part of key frame header.
  120. * 3 bytes:- including version, frame type and an offset
  121. * 3 bytes:- sync code (0x9d, 0x01, 0x2a)
  122. * 4 bytes:- including image width and height in the lowest 14 bits
  123. * of each 2-byte value.
  124. */
  125. uint8_t clear_buffer[10];
  126. const uint8_t *clear = data;
  127. if (decrypt_cb) {
  128. int n = VPXMIN(sizeof(clear_buffer), data_sz);
  129. decrypt_cb(decrypt_state, data, clear_buffer, n);
  130. clear = clear_buffer;
  131. }
  132. si->is_kf = 0;
  133. if (data_sz >= 10 && !(clear[0] & 0x01)) { /* I-Frame */
  134. si->is_kf = 1;
  135. /* vet via sync code */
  136. if (clear[3] != 0x9d || clear[4] != 0x01 || clear[5] != 0x2a) {
  137. return VPX_CODEC_UNSUP_BITSTREAM;
  138. }
  139. si->w = (clear[6] | (clear[7] << 8)) & 0x3fff;
  140. si->h = (clear[8] | (clear[9] << 8)) & 0x3fff;
  141. /*printf("w=%d, h=%d\n", si->w, si->h);*/
  142. if (!(si->h && si->w)) res = VPX_CODEC_CORRUPT_FRAME;
  143. } else {
  144. res = VPX_CODEC_UNSUP_BITSTREAM;
  145. }
  146. }
  147. return res;
  148. }
  149. static vpx_codec_err_t vp8_peek_si(const uint8_t *data, unsigned int data_sz,
  150. vpx_codec_stream_info_t *si) {
  151. return vp8_peek_si_internal(data, data_sz, si, NULL, NULL);
  152. }
  153. static vpx_codec_err_t vp8_get_si(vpx_codec_alg_priv_t *ctx,
  154. vpx_codec_stream_info_t *si) {
  155. unsigned int sz;
  156. if (si->sz >= sizeof(vp8_stream_info_t)) {
  157. sz = sizeof(vp8_stream_info_t);
  158. } else {
  159. sz = sizeof(vpx_codec_stream_info_t);
  160. }
  161. memcpy(si, &ctx->si, sz);
  162. si->sz = sz;
  163. return VPX_CODEC_OK;
  164. }
  165. static vpx_codec_err_t update_error_state(
  166. vpx_codec_alg_priv_t *ctx, const struct vpx_internal_error_info *error) {
  167. vpx_codec_err_t res;
  168. if ((res = error->error_code)) {
  169. ctx->base.err_detail = error->has_detail ? error->detail : NULL;
  170. }
  171. return res;
  172. }
  173. static void yuvconfig2image(vpx_image_t *img, const YV12_BUFFER_CONFIG *yv12,
  174. void *user_priv) {
  175. /** vpx_img_wrap() doesn't allow specifying independent strides for
  176. * the Y, U, and V planes, nor other alignment adjustments that
  177. * might be representable by a YV12_BUFFER_CONFIG, so we just
  178. * initialize all the fields.*/
  179. img->fmt = VPX_IMG_FMT_I420;
  180. img->w = yv12->y_stride;
  181. img->h = (yv12->y_height + 2 * VP8BORDERINPIXELS + 15) & ~15;
  182. img->d_w = img->r_w = yv12->y_width;
  183. img->d_h = img->r_h = yv12->y_height;
  184. img->x_chroma_shift = 1;
  185. img->y_chroma_shift = 1;
  186. img->planes[VPX_PLANE_Y] = yv12->y_buffer;
  187. img->planes[VPX_PLANE_U] = yv12->u_buffer;
  188. img->planes[VPX_PLANE_V] = yv12->v_buffer;
  189. img->planes[VPX_PLANE_ALPHA] = NULL;
  190. img->stride[VPX_PLANE_Y] = yv12->y_stride;
  191. img->stride[VPX_PLANE_U] = yv12->uv_stride;
  192. img->stride[VPX_PLANE_V] = yv12->uv_stride;
  193. img->stride[VPX_PLANE_ALPHA] = yv12->y_stride;
  194. img->bit_depth = 8;
  195. img->bps = 12;
  196. img->user_priv = user_priv;
  197. img->img_data = yv12->buffer_alloc;
  198. img->img_data_owner = 0;
  199. img->self_allocd = 0;
  200. }
  201. static int update_fragments(vpx_codec_alg_priv_t *ctx, const uint8_t *data,
  202. unsigned int data_sz,
  203. volatile vpx_codec_err_t *res) {
  204. *res = VPX_CODEC_OK;
  205. if (ctx->fragments.count == 0) {
  206. /* New frame, reset fragment pointers and sizes */
  207. memset((void *)ctx->fragments.ptrs, 0, sizeof(ctx->fragments.ptrs));
  208. memset(ctx->fragments.sizes, 0, sizeof(ctx->fragments.sizes));
  209. }
  210. if (ctx->fragments.enabled && !(data == NULL && data_sz == 0)) {
  211. /* Store a pointer to this fragment and return. We haven't
  212. * received the complete frame yet, so we will wait with decoding.
  213. */
  214. ctx->fragments.ptrs[ctx->fragments.count] = data;
  215. ctx->fragments.sizes[ctx->fragments.count] = data_sz;
  216. ctx->fragments.count++;
  217. if (ctx->fragments.count > (1 << EIGHT_PARTITION) + 1) {
  218. ctx->fragments.count = 0;
  219. *res = VPX_CODEC_INVALID_PARAM;
  220. return -1;
  221. }
  222. return 0;
  223. }
  224. if (!ctx->fragments.enabled && (data == NULL && data_sz == 0)) {
  225. return 0;
  226. }
  227. if (!ctx->fragments.enabled) {
  228. ctx->fragments.ptrs[0] = data;
  229. ctx->fragments.sizes[0] = data_sz;
  230. ctx->fragments.count = 1;
  231. }
  232. return 1;
  233. }
  234. static vpx_codec_err_t vp8_decode(vpx_codec_alg_priv_t *ctx,
  235. const uint8_t *data, unsigned int data_sz,
  236. void *user_priv, long deadline) {
  237. volatile vpx_codec_err_t res = VPX_CODEC_INVALID_PARAM;
  238. volatile unsigned int resolution_change = 0;
  239. unsigned int w, h;
  240. if (!ctx->fragments.enabled && (data == NULL && data_sz == 0)) {
  241. return 0;
  242. }
  243. /* Update the input fragment data */
  244. if (update_fragments(ctx, data, data_sz, &res) <= 0) return res;
  245. /* Determine the stream parameters. Note that we rely on peek_si to
  246. * validate that we have a buffer that does not wrap around the top
  247. * of the heap.
  248. */
  249. w = ctx->si.w;
  250. h = ctx->si.h;
  251. if (ctx->fragments.ptrs[0]) {
  252. res = vp8_peek_si_internal(ctx->fragments.ptrs[0], ctx->fragments.sizes[0],
  253. &ctx->si, ctx->decrypt_cb, ctx->decrypt_state);
  254. }
  255. if ((res == VPX_CODEC_UNSUP_BITSTREAM) && !ctx->si.is_kf) {
  256. /* the peek function returns an error for non keyframes, however for
  257. * this case, it is not an error */
  258. res = VPX_CODEC_OK;
  259. }
  260. if (!ctx->decoder_init && !ctx->si.is_kf) res = VPX_CODEC_UNSUP_BITSTREAM;
  261. if ((ctx->si.h != h) || (ctx->si.w != w)) resolution_change = 1;
  262. #if CONFIG_MULTITHREAD
  263. if (!res && ctx->restart_threads) {
  264. struct frame_buffers *fb = &ctx->yv12_frame_buffers;
  265. VP8D_COMP *pbi = ctx->yv12_frame_buffers.pbi[0];
  266. VP8_COMMON *const pc = &pbi->common;
  267. if (setjmp(pbi->common.error.jmp)) {
  268. vp8_remove_decoder_instances(fb);
  269. vp8_zero(fb->pbi);
  270. vpx_clear_system_state();
  271. return VPX_CODEC_ERROR;
  272. }
  273. pbi->common.error.setjmp = 1;
  274. pbi->max_threads = ctx->cfg.threads;
  275. vp8_decoder_create_threads(pbi);
  276. if (vpx_atomic_load_acquire(&pbi->b_multithreaded_rd)) {
  277. vp8mt_alloc_temp_buffers(pbi, pc->Width, pc->mb_rows);
  278. }
  279. ctx->restart_threads = 0;
  280. pbi->common.error.setjmp = 0;
  281. }
  282. #endif
  283. /* Initialize the decoder instance on the first frame*/
  284. if (!res && !ctx->decoder_init) {
  285. VP8D_CONFIG oxcf;
  286. oxcf.Width = ctx->si.w;
  287. oxcf.Height = ctx->si.h;
  288. oxcf.Version = 9;
  289. oxcf.postprocess = 0;
  290. oxcf.max_threads = ctx->cfg.threads;
  291. oxcf.error_concealment =
  292. (ctx->base.init_flags & VPX_CODEC_USE_ERROR_CONCEALMENT);
  293. /* If postprocessing was enabled by the application and a
  294. * configuration has not been provided, default it.
  295. */
  296. if (!ctx->postproc_cfg_set &&
  297. (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC)) {
  298. ctx->postproc_cfg.post_proc_flag =
  299. VP8_DEBLOCK | VP8_DEMACROBLOCK | VP8_MFQE;
  300. ctx->postproc_cfg.deblocking_level = 4;
  301. ctx->postproc_cfg.noise_level = 0;
  302. }
  303. res = vp8_create_decoder_instances(&ctx->yv12_frame_buffers, &oxcf);
  304. if (res == VPX_CODEC_OK) ctx->decoder_init = 1;
  305. }
  306. /* Set these even if already initialized. The caller may have changed the
  307. * decrypt config between frames.
  308. */
  309. if (ctx->decoder_init) {
  310. ctx->yv12_frame_buffers.pbi[0]->decrypt_cb = ctx->decrypt_cb;
  311. ctx->yv12_frame_buffers.pbi[0]->decrypt_state = ctx->decrypt_state;
  312. }
  313. if (!res) {
  314. VP8D_COMP *pbi = ctx->yv12_frame_buffers.pbi[0];
  315. VP8_COMMON *const pc = &pbi->common;
  316. if (resolution_change) {
  317. MACROBLOCKD *const xd = &pbi->mb;
  318. #if CONFIG_MULTITHREAD
  319. int i;
  320. #endif
  321. pc->Width = ctx->si.w;
  322. pc->Height = ctx->si.h;
  323. {
  324. int prev_mb_rows = pc->mb_rows;
  325. if (setjmp(pbi->common.error.jmp)) {
  326. pbi->common.error.setjmp = 0;
  327. /* on failure clear the cached resolution to ensure a full
  328. * reallocation is attempted on resync. */
  329. ctx->si.w = 0;
  330. ctx->si.h = 0;
  331. vpx_clear_system_state();
  332. /* same return value as used in vp8dx_receive_compressed_data */
  333. return -1;
  334. }
  335. pbi->common.error.setjmp = 1;
  336. if (pc->Width <= 0) {
  337. pc->Width = w;
  338. vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
  339. "Invalid frame width");
  340. }
  341. if (pc->Height <= 0) {
  342. pc->Height = h;
  343. vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
  344. "Invalid frame height");
  345. }
  346. if (vp8_alloc_frame_buffers(pc, pc->Width, pc->Height)) {
  347. vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
  348. "Failed to allocate frame buffers");
  349. }
  350. xd->pre = pc->yv12_fb[pc->lst_fb_idx];
  351. xd->dst = pc->yv12_fb[pc->new_fb_idx];
  352. #if CONFIG_MULTITHREAD
  353. for (i = 0; i < pbi->allocated_decoding_thread_count; ++i) {
  354. pbi->mb_row_di[i].mbd.dst = pc->yv12_fb[pc->new_fb_idx];
  355. vp8_build_block_doffsets(&pbi->mb_row_di[i].mbd);
  356. }
  357. #endif
  358. vp8_build_block_doffsets(&pbi->mb);
  359. /* allocate memory for last frame MODE_INFO array */
  360. #if CONFIG_ERROR_CONCEALMENT
  361. if (pbi->ec_enabled) {
  362. /* old prev_mip was released by vp8_de_alloc_frame_buffers()
  363. * called in vp8_alloc_frame_buffers() */
  364. pc->prev_mip = vpx_calloc((pc->mb_cols + 1) * (pc->mb_rows + 1),
  365. sizeof(MODE_INFO));
  366. if (!pc->prev_mip) {
  367. vp8_de_alloc_frame_buffers(pc);
  368. vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
  369. "Failed to allocate"
  370. "last frame MODE_INFO array");
  371. }
  372. pc->prev_mi = pc->prev_mip + pc->mode_info_stride + 1;
  373. if (vp8_alloc_overlap_lists(pbi))
  374. vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
  375. "Failed to allocate overlap lists "
  376. "for error concealment");
  377. }
  378. #endif
  379. #if CONFIG_MULTITHREAD
  380. if (vpx_atomic_load_acquire(&pbi->b_multithreaded_rd)) {
  381. vp8mt_alloc_temp_buffers(pbi, pc->Width, prev_mb_rows);
  382. }
  383. #else
  384. (void)prev_mb_rows;
  385. #endif
  386. }
  387. pbi->common.error.setjmp = 0;
  388. /* required to get past the first get_free_fb() call */
  389. pbi->common.fb_idx_ref_cnt[0] = 0;
  390. }
  391. if (setjmp(pbi->common.error.jmp)) {
  392. /* We do not know if the missing frame(s) was supposed to update
  393. * any of the reference buffers, but we act conservative and
  394. * mark only the last buffer as corrupted.
  395. */
  396. pc->yv12_fb[pc->lst_fb_idx].corrupted = 1;
  397. if (pc->fb_idx_ref_cnt[pc->new_fb_idx] > 0) {
  398. pc->fb_idx_ref_cnt[pc->new_fb_idx]--;
  399. }
  400. pc->error.setjmp = 0;
  401. #if CONFIG_MULTITHREAD
  402. if (pbi->restart_threads) {
  403. ctx->si.w = 0;
  404. ctx->si.h = 0;
  405. ctx->restart_threads = 1;
  406. }
  407. #endif
  408. res = update_error_state(ctx, &pbi->common.error);
  409. return res;
  410. }
  411. pbi->common.error.setjmp = 1;
  412. /* update the pbi fragment data */
  413. pbi->fragments = ctx->fragments;
  414. #if CONFIG_MULTITHREAD
  415. pbi->restart_threads = 0;
  416. #endif
  417. ctx->user_priv = user_priv;
  418. if (vp8dx_receive_compressed_data(pbi, data_sz, data, deadline)) {
  419. res = update_error_state(ctx, &pbi->common.error);
  420. }
  421. /* get ready for the next series of fragments */
  422. ctx->fragments.count = 0;
  423. }
  424. return res;
  425. }
  426. static vpx_image_t *vp8_get_frame(vpx_codec_alg_priv_t *ctx,
  427. vpx_codec_iter_t *iter) {
  428. vpx_image_t *img = NULL;
  429. /* iter acts as a flip flop, so an image is only returned on the first
  430. * call to get_frame.
  431. */
  432. if (!(*iter) && ctx->yv12_frame_buffers.pbi[0]) {
  433. YV12_BUFFER_CONFIG sd;
  434. int64_t time_stamp = 0, time_end_stamp = 0;
  435. vp8_ppflags_t flags;
  436. vp8_zero(flags);
  437. if (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC) {
  438. flags.post_proc_flag = ctx->postproc_cfg.post_proc_flag;
  439. flags.deblocking_level = ctx->postproc_cfg.deblocking_level;
  440. flags.noise_level = ctx->postproc_cfg.noise_level;
  441. }
  442. if (0 == vp8dx_get_raw_frame(ctx->yv12_frame_buffers.pbi[0], &sd,
  443. &time_stamp, &time_end_stamp, &flags)) {
  444. yuvconfig2image(&ctx->img, &sd, ctx->user_priv);
  445. img = &ctx->img;
  446. *iter = img;
  447. }
  448. }
  449. return img;
  450. }
  451. static vpx_codec_err_t image2yuvconfig(const vpx_image_t *img,
  452. YV12_BUFFER_CONFIG *yv12) {
  453. const int y_w = img->d_w;
  454. const int y_h = img->d_h;
  455. const int uv_w = (img->d_w + 1) / 2;
  456. const int uv_h = (img->d_h + 1) / 2;
  457. vpx_codec_err_t res = VPX_CODEC_OK;
  458. yv12->y_buffer = img->planes[VPX_PLANE_Y];
  459. yv12->u_buffer = img->planes[VPX_PLANE_U];
  460. yv12->v_buffer = img->planes[VPX_PLANE_V];
  461. yv12->y_crop_width = y_w;
  462. yv12->y_crop_height = y_h;
  463. yv12->y_width = y_w;
  464. yv12->y_height = y_h;
  465. yv12->uv_crop_width = uv_w;
  466. yv12->uv_crop_height = uv_h;
  467. yv12->uv_width = uv_w;
  468. yv12->uv_height = uv_h;
  469. yv12->y_stride = img->stride[VPX_PLANE_Y];
  470. yv12->uv_stride = img->stride[VPX_PLANE_U];
  471. yv12->border = (img->stride[VPX_PLANE_Y] - img->d_w) / 2;
  472. return res;
  473. }
  474. static vpx_codec_err_t vp8_set_reference(vpx_codec_alg_priv_t *ctx,
  475. va_list args) {
  476. vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *);
  477. if (data) {
  478. vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data;
  479. YV12_BUFFER_CONFIG sd;
  480. image2yuvconfig(&frame->img, &sd);
  481. return vp8dx_set_reference(ctx->yv12_frame_buffers.pbi[0],
  482. frame->frame_type, &sd);
  483. } else {
  484. return VPX_CODEC_INVALID_PARAM;
  485. }
  486. }
  487. static vpx_codec_err_t vp8_get_reference(vpx_codec_alg_priv_t *ctx,
  488. va_list args) {
  489. vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *);
  490. if (data) {
  491. vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data;
  492. YV12_BUFFER_CONFIG sd;
  493. image2yuvconfig(&frame->img, &sd);
  494. return vp8dx_get_reference(ctx->yv12_frame_buffers.pbi[0],
  495. frame->frame_type, &sd);
  496. } else {
  497. return VPX_CODEC_INVALID_PARAM;
  498. }
  499. }
  500. static vpx_codec_err_t vp8_get_quantizer(vpx_codec_alg_priv_t *ctx,
  501. va_list args) {
  502. int *const arg = va_arg(args, int *);
  503. if (arg == NULL) return VPX_CODEC_INVALID_PARAM;
  504. *arg = vp8dx_get_quantizer(ctx->yv12_frame_buffers.pbi[0]);
  505. return VPX_CODEC_OK;
  506. }
  507. static vpx_codec_err_t vp8_set_postproc(vpx_codec_alg_priv_t *ctx,
  508. va_list args) {
  509. #if CONFIG_POSTPROC
  510. vp8_postproc_cfg_t *data = va_arg(args, vp8_postproc_cfg_t *);
  511. if (data) {
  512. ctx->postproc_cfg_set = 1;
  513. ctx->postproc_cfg = *((vp8_postproc_cfg_t *)data);
  514. return VPX_CODEC_OK;
  515. } else {
  516. return VPX_CODEC_INVALID_PARAM;
  517. }
  518. #else
  519. (void)ctx;
  520. (void)args;
  521. return VPX_CODEC_INCAPABLE;
  522. #endif
  523. }
  524. static vpx_codec_err_t vp8_get_last_ref_updates(vpx_codec_alg_priv_t *ctx,
  525. va_list args) {
  526. int *update_info = va_arg(args, int *);
  527. if (update_info) {
  528. VP8D_COMP *pbi = (VP8D_COMP *)ctx->yv12_frame_buffers.pbi[0];
  529. *update_info = pbi->common.refresh_alt_ref_frame * (int)VP8_ALTR_FRAME +
  530. pbi->common.refresh_golden_frame * (int)VP8_GOLD_FRAME +
  531. pbi->common.refresh_last_frame * (int)VP8_LAST_FRAME;
  532. return VPX_CODEC_OK;
  533. } else {
  534. return VPX_CODEC_INVALID_PARAM;
  535. }
  536. }
  537. static vpx_codec_err_t vp8_get_last_ref_frame(vpx_codec_alg_priv_t *ctx,
  538. va_list args) {
  539. int *ref_info = va_arg(args, int *);
  540. if (ref_info) {
  541. VP8D_COMP *pbi = (VP8D_COMP *)ctx->yv12_frame_buffers.pbi[0];
  542. VP8_COMMON *oci = &pbi->common;
  543. *ref_info =
  544. (vp8dx_references_buffer(oci, ALTREF_FRAME) ? VP8_ALTR_FRAME : 0) |
  545. (vp8dx_references_buffer(oci, GOLDEN_FRAME) ? VP8_GOLD_FRAME : 0) |
  546. (vp8dx_references_buffer(oci, LAST_FRAME) ? VP8_LAST_FRAME : 0);
  547. return VPX_CODEC_OK;
  548. } else {
  549. return VPX_CODEC_INVALID_PARAM;
  550. }
  551. }
  552. static vpx_codec_err_t vp8_get_frame_corrupted(vpx_codec_alg_priv_t *ctx,
  553. va_list args) {
  554. int *corrupted = va_arg(args, int *);
  555. VP8D_COMP *pbi = (VP8D_COMP *)ctx->yv12_frame_buffers.pbi[0];
  556. if (corrupted && pbi) {
  557. const YV12_BUFFER_CONFIG *const frame = pbi->common.frame_to_show;
  558. if (frame == NULL) return VPX_CODEC_ERROR;
  559. *corrupted = frame->corrupted;
  560. return VPX_CODEC_OK;
  561. } else {
  562. return VPX_CODEC_INVALID_PARAM;
  563. }
  564. }
  565. static vpx_codec_err_t vp8_set_decryptor(vpx_codec_alg_priv_t *ctx,
  566. va_list args) {
  567. vpx_decrypt_init *init = va_arg(args, vpx_decrypt_init *);
  568. if (init) {
  569. ctx->decrypt_cb = init->decrypt_cb;
  570. ctx->decrypt_state = init->decrypt_state;
  571. } else {
  572. ctx->decrypt_cb = NULL;
  573. ctx->decrypt_state = NULL;
  574. }
  575. return VPX_CODEC_OK;
  576. }
  577. vpx_codec_ctrl_fn_map_t vp8_ctf_maps[] = {
  578. { VP8_SET_REFERENCE, vp8_set_reference },
  579. { VP8_COPY_REFERENCE, vp8_get_reference },
  580. { VP8_SET_POSTPROC, vp8_set_postproc },
  581. { VP8D_GET_LAST_REF_UPDATES, vp8_get_last_ref_updates },
  582. { VP8D_GET_FRAME_CORRUPTED, vp8_get_frame_corrupted },
  583. { VP8D_GET_LAST_REF_USED, vp8_get_last_ref_frame },
  584. { VPXD_GET_LAST_QUANTIZER, vp8_get_quantizer },
  585. { VPXD_SET_DECRYPTOR, vp8_set_decryptor },
  586. { -1, NULL },
  587. };
  588. #ifndef VERSION_STRING
  589. #define VERSION_STRING
  590. #endif
  591. CODEC_INTERFACE(vpx_codec_vp8_dx) = {
  592. "WebM Project VP8 Decoder" VERSION_STRING,
  593. VPX_CODEC_INTERNAL_ABI_VERSION,
  594. VPX_CODEC_CAP_DECODER | VP8_CAP_POSTPROC | VP8_CAP_ERROR_CONCEALMENT |
  595. VPX_CODEC_CAP_INPUT_FRAGMENTS,
  596. /* vpx_codec_caps_t caps; */
  597. vp8_init, /* vpx_codec_init_fn_t init; */
  598. vp8_destroy, /* vpx_codec_destroy_fn_t destroy; */
  599. vp8_ctf_maps, /* vpx_codec_ctrl_fn_map_t *ctrl_maps; */
  600. {
  601. vp8_peek_si, /* vpx_codec_peek_si_fn_t peek_si; */
  602. vp8_get_si, /* vpx_codec_get_si_fn_t get_si; */
  603. vp8_decode, /* vpx_codec_decode_fn_t decode; */
  604. vp8_get_frame, /* vpx_codec_frame_get_fn_t frame_get; */
  605. NULL,
  606. },
  607. {
  608. /* encoder functions */
  609. 0, NULL, /* vpx_codec_enc_cfg_map_t */
  610. NULL, /* vpx_codec_encode_fn_t */
  611. NULL, /* vpx_codec_get_cx_data_fn_t */
  612. NULL, /* vpx_codec_enc_config_set_fn_t */
  613. NULL, /* vpx_codec_get_global_headers_fn_t */
  614. NULL, /* vpx_codec_get_preview_frame_fn_t */
  615. NULL /* vpx_codec_enc_mr_get_mem_loc_fn_t */
  616. }
  617. };