vp8_dx_iface.c 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676
  1. /*
  2. * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
  3. *
  4. * Use of this source code is governed by a BSD-style license
  5. * that can be found in the LICENSE file in the root of the source
  6. * tree. An additional intellectual property rights grant can be found
  7. * in the file PATENTS. All contributing project authors may
  8. * be found in the AUTHORS file in the root of the source tree.
  9. */
  10. #include <assert.h>
  11. #include <stdlib.h>
  12. #include <string.h>
  13. #include "./vp8_rtcd.h"
  14. #include "./vpx_dsp_rtcd.h"
  15. #include "./vpx_scale_rtcd.h"
  16. #include "vpx/vpx_decoder.h"
  17. #include "vpx/vp8dx.h"
  18. #include "vpx/internal/vpx_codec_internal.h"
  19. #include "vpx_version.h"
  20. #include "common/alloccommon.h"
  21. #include "common/common.h"
  22. #include "common/onyxd.h"
  23. #include "decoder/onyxd_int.h"
  24. #include "vpx_dsp/vpx_dsp_common.h"
  25. #include "vpx_mem/vpx_mem.h"
  26. #if CONFIG_ERROR_CONCEALMENT
  27. #include "decoder/error_concealment.h"
  28. #endif
  29. #include "decoder/decoderthreading.h"
  30. #define VP8_CAP_POSTPROC (CONFIG_POSTPROC ? VPX_CODEC_CAP_POSTPROC : 0)
  31. #define VP8_CAP_ERROR_CONCEALMENT \
  32. (CONFIG_ERROR_CONCEALMENT ? VPX_CODEC_CAP_ERROR_CONCEALMENT : 0)
  33. typedef vpx_codec_stream_info_t vp8_stream_info_t;
  34. /* Structures for handling memory allocations */
  35. typedef enum { VP8_SEG_ALG_PRIV = 256, VP8_SEG_MAX } mem_seg_id_t;
  36. #define NELEMENTS(x) ((int)(sizeof(x) / sizeof(x[0])))
  37. struct vpx_codec_alg_priv {
  38. vpx_codec_priv_t base;
  39. vpx_codec_dec_cfg_t cfg;
  40. vp8_stream_info_t si;
  41. int decoder_init;
  42. int postproc_cfg_set;
  43. vp8_postproc_cfg_t postproc_cfg;
  44. vpx_decrypt_cb decrypt_cb;
  45. void *decrypt_state;
  46. vpx_image_t img;
  47. int img_setup;
  48. struct frame_buffers yv12_frame_buffers;
  49. void *user_priv;
  50. FRAGMENT_DATA fragments;
  51. };
  52. static int vp8_init_ctx(vpx_codec_ctx_t *ctx) {
  53. vpx_codec_alg_priv_t *priv =
  54. (vpx_codec_alg_priv_t *)vpx_calloc(1, sizeof(*priv));
  55. if (!priv) return 1;
  56. ctx->priv = (vpx_codec_priv_t *)priv;
  57. ctx->priv->init_flags = ctx->init_flags;
  58. priv->si.sz = sizeof(priv->si);
  59. priv->decrypt_cb = NULL;
  60. priv->decrypt_state = NULL;
  61. if (ctx->config.dec) {
  62. /* Update the reference to the config structure to an internal copy. */
  63. priv->cfg = *ctx->config.dec;
  64. ctx->config.dec = &priv->cfg;
  65. }
  66. return 0;
  67. }
  68. static vpx_codec_err_t vp8_init(vpx_codec_ctx_t *ctx,
  69. vpx_codec_priv_enc_mr_cfg_t *data) {
  70. vpx_codec_err_t res = VPX_CODEC_OK;
  71. vpx_codec_alg_priv_t *priv = NULL;
  72. (void)data;
  73. vp8_rtcd();
  74. vpx_dsp_rtcd();
  75. vpx_scale_rtcd();
  76. /* This function only allocates space for the vpx_codec_alg_priv_t
  77. * structure. More memory may be required at the time the stream
  78. * information becomes known.
  79. */
  80. if (!ctx->priv) {
  81. if (vp8_init_ctx(ctx)) return VPX_CODEC_MEM_ERROR;
  82. priv = (vpx_codec_alg_priv_t *)ctx->priv;
  83. /* initialize number of fragments to zero */
  84. priv->fragments.count = 0;
  85. /* is input fragments enabled? */
  86. priv->fragments.enabled =
  87. (priv->base.init_flags & VPX_CODEC_USE_INPUT_FRAGMENTS);
  88. /*post processing level initialized to do nothing */
  89. } else {
  90. priv = (vpx_codec_alg_priv_t *)ctx->priv;
  91. }
  92. priv->yv12_frame_buffers.use_frame_threads =
  93. (ctx->priv->init_flags & VPX_CODEC_USE_FRAME_THREADING);
  94. /* for now, disable frame threading */
  95. priv->yv12_frame_buffers.use_frame_threads = 0;
  96. if (priv->yv12_frame_buffers.use_frame_threads &&
  97. ((ctx->priv->init_flags & VPX_CODEC_USE_ERROR_CONCEALMENT) ||
  98. (ctx->priv->init_flags & VPX_CODEC_USE_INPUT_FRAGMENTS))) {
  99. /* row-based threading, error concealment, and input fragments will
  100. * not be supported when using frame-based threading */
  101. res = VPX_CODEC_INVALID_PARAM;
  102. }
  103. return res;
  104. }
  105. static vpx_codec_err_t vp8_destroy(vpx_codec_alg_priv_t *ctx) {
  106. vp8_remove_decoder_instances(&ctx->yv12_frame_buffers);
  107. vpx_free(ctx);
  108. return VPX_CODEC_OK;
  109. }
  110. static vpx_codec_err_t vp8_peek_si_internal(const uint8_t *data,
  111. unsigned int data_sz,
  112. vpx_codec_stream_info_t *si,
  113. vpx_decrypt_cb decrypt_cb,
  114. void *decrypt_state) {
  115. vpx_codec_err_t res = VPX_CODEC_OK;
  116. assert(data != NULL);
  117. if (data + data_sz <= data) {
  118. res = VPX_CODEC_INVALID_PARAM;
  119. } else {
  120. /* Parse uncompresssed part of key frame header.
  121. * 3 bytes:- including version, frame type and an offset
  122. * 3 bytes:- sync code (0x9d, 0x01, 0x2a)
  123. * 4 bytes:- including image width and height in the lowest 14 bits
  124. * of each 2-byte value.
  125. */
  126. uint8_t clear_buffer[10];
  127. const uint8_t *clear = data;
  128. if (decrypt_cb) {
  129. int n = VPXMIN(sizeof(clear_buffer), data_sz);
  130. decrypt_cb(decrypt_state, data, clear_buffer, n);
  131. clear = clear_buffer;
  132. }
  133. si->is_kf = 0;
  134. if (data_sz >= 10 && !(clear[0] & 0x01)) /* I-Frame */
  135. {
  136. si->is_kf = 1;
  137. /* vet via sync code */
  138. if (clear[3] != 0x9d || clear[4] != 0x01 || clear[5] != 0x2a) {
  139. return VPX_CODEC_UNSUP_BITSTREAM;
  140. }
  141. si->w = (clear[6] | (clear[7] << 8)) & 0x3fff;
  142. si->h = (clear[8] | (clear[9] << 8)) & 0x3fff;
  143. /*printf("w=%d, h=%d\n", si->w, si->h);*/
  144. if (!(si->h && si->w)) res = VPX_CODEC_CORRUPT_FRAME;
  145. } else {
  146. res = VPX_CODEC_UNSUP_BITSTREAM;
  147. }
  148. }
  149. return res;
  150. }
  151. static vpx_codec_err_t vp8_peek_si(const uint8_t *data, unsigned int data_sz,
  152. vpx_codec_stream_info_t *si) {
  153. return vp8_peek_si_internal(data, data_sz, si, NULL, NULL);
  154. }
  155. static vpx_codec_err_t vp8_get_si(vpx_codec_alg_priv_t *ctx,
  156. vpx_codec_stream_info_t *si) {
  157. unsigned int sz;
  158. if (si->sz >= sizeof(vp8_stream_info_t)) {
  159. sz = sizeof(vp8_stream_info_t);
  160. } else {
  161. sz = sizeof(vpx_codec_stream_info_t);
  162. }
  163. memcpy(si, &ctx->si, sz);
  164. si->sz = sz;
  165. return VPX_CODEC_OK;
  166. }
  167. static vpx_codec_err_t update_error_state(
  168. vpx_codec_alg_priv_t *ctx, const struct vpx_internal_error_info *error) {
  169. vpx_codec_err_t res;
  170. if ((res = error->error_code)) {
  171. ctx->base.err_detail = error->has_detail ? error->detail : NULL;
  172. }
  173. return res;
  174. }
  175. static void yuvconfig2image(vpx_image_t *img, const YV12_BUFFER_CONFIG *yv12,
  176. void *user_priv) {
  177. /** vpx_img_wrap() doesn't allow specifying independent strides for
  178. * the Y, U, and V planes, nor other alignment adjustments that
  179. * might be representable by a YV12_BUFFER_CONFIG, so we just
  180. * initialize all the fields.*/
  181. img->fmt = VPX_IMG_FMT_I420;
  182. img->w = yv12->y_stride;
  183. img->h = (yv12->y_height + 2 * VP8BORDERINPIXELS + 15) & ~15;
  184. img->d_w = img->r_w = yv12->y_width;
  185. img->d_h = img->r_h = yv12->y_height;
  186. img->x_chroma_shift = 1;
  187. img->y_chroma_shift = 1;
  188. img->planes[VPX_PLANE_Y] = yv12->y_buffer;
  189. img->planes[VPX_PLANE_U] = yv12->u_buffer;
  190. img->planes[VPX_PLANE_V] = yv12->v_buffer;
  191. img->planes[VPX_PLANE_ALPHA] = NULL;
  192. img->stride[VPX_PLANE_Y] = yv12->y_stride;
  193. img->stride[VPX_PLANE_U] = yv12->uv_stride;
  194. img->stride[VPX_PLANE_V] = yv12->uv_stride;
  195. img->stride[VPX_PLANE_ALPHA] = yv12->y_stride;
  196. img->bit_depth = 8;
  197. img->bps = 12;
  198. img->user_priv = user_priv;
  199. img->img_data = yv12->buffer_alloc;
  200. img->img_data_owner = 0;
  201. img->self_allocd = 0;
  202. }
  203. static int update_fragments(vpx_codec_alg_priv_t *ctx, const uint8_t *data,
  204. unsigned int data_sz, vpx_codec_err_t *res) {
  205. *res = VPX_CODEC_OK;
  206. if (ctx->fragments.count == 0) {
  207. /* New frame, reset fragment pointers and sizes */
  208. memset((void *)ctx->fragments.ptrs, 0, sizeof(ctx->fragments.ptrs));
  209. memset(ctx->fragments.sizes, 0, sizeof(ctx->fragments.sizes));
  210. }
  211. if (ctx->fragments.enabled && !(data == NULL && data_sz == 0)) {
  212. /* Store a pointer to this fragment and return. We haven't
  213. * received the complete frame yet, so we will wait with decoding.
  214. */
  215. ctx->fragments.ptrs[ctx->fragments.count] = data;
  216. ctx->fragments.sizes[ctx->fragments.count] = data_sz;
  217. ctx->fragments.count++;
  218. if (ctx->fragments.count > (1 << EIGHT_PARTITION) + 1) {
  219. ctx->fragments.count = 0;
  220. *res = VPX_CODEC_INVALID_PARAM;
  221. return -1;
  222. }
  223. return 0;
  224. }
  225. if (!ctx->fragments.enabled && (data == NULL && data_sz == 0)) {
  226. return 0;
  227. }
  228. if (!ctx->fragments.enabled) {
  229. ctx->fragments.ptrs[0] = data;
  230. ctx->fragments.sizes[0] = data_sz;
  231. ctx->fragments.count = 1;
  232. }
  233. return 1;
  234. }
  235. static vpx_codec_err_t vp8_decode(vpx_codec_alg_priv_t *ctx,
  236. const uint8_t *data, unsigned int data_sz,
  237. void *user_priv, long deadline) {
  238. vpx_codec_err_t res = VPX_CODEC_OK;
  239. unsigned int resolution_change = 0;
  240. unsigned int w, h;
  241. if (!ctx->fragments.enabled && (data == NULL && data_sz == 0)) {
  242. return 0;
  243. }
  244. /* Update the input fragment data */
  245. if (update_fragments(ctx, data, data_sz, &res) <= 0) return res;
  246. /* Determine the stream parameters. Note that we rely on peek_si to
  247. * validate that we have a buffer that does not wrap around the top
  248. * of the heap.
  249. */
  250. w = ctx->si.w;
  251. h = ctx->si.h;
  252. res = vp8_peek_si_internal(ctx->fragments.ptrs[0], ctx->fragments.sizes[0],
  253. &ctx->si, ctx->decrypt_cb, ctx->decrypt_state);
  254. if ((res == VPX_CODEC_UNSUP_BITSTREAM) && !ctx->si.is_kf) {
  255. /* the peek function returns an error for non keyframes, however for
  256. * this case, it is not an error */
  257. res = VPX_CODEC_OK;
  258. }
  259. if (!ctx->decoder_init && !ctx->si.is_kf) res = VPX_CODEC_UNSUP_BITSTREAM;
  260. if ((ctx->si.h != h) || (ctx->si.w != w)) resolution_change = 1;
  261. /* Initialize the decoder instance on the first frame*/
  262. if (!res && !ctx->decoder_init) {
  263. VP8D_CONFIG oxcf;
  264. oxcf.Width = ctx->si.w;
  265. oxcf.Height = ctx->si.h;
  266. oxcf.Version = 9;
  267. oxcf.postprocess = 0;
  268. oxcf.max_threads = ctx->cfg.threads;
  269. oxcf.error_concealment =
  270. (ctx->base.init_flags & VPX_CODEC_USE_ERROR_CONCEALMENT);
  271. /* If postprocessing was enabled by the application and a
  272. * configuration has not been provided, default it.
  273. */
  274. if (!ctx->postproc_cfg_set &&
  275. (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC)) {
  276. ctx->postproc_cfg.post_proc_flag =
  277. VP8_DEBLOCK | VP8_DEMACROBLOCK | VP8_MFQE;
  278. ctx->postproc_cfg.deblocking_level = 4;
  279. ctx->postproc_cfg.noise_level = 0;
  280. }
  281. res = vp8_create_decoder_instances(&ctx->yv12_frame_buffers, &oxcf);
  282. if (res == VPX_CODEC_OK) ctx->decoder_init = 1;
  283. }
  284. /* Set these even if already initialized. The caller may have changed the
  285. * decrypt config between frames.
  286. */
  287. if (ctx->decoder_init) {
  288. ctx->yv12_frame_buffers.pbi[0]->decrypt_cb = ctx->decrypt_cb;
  289. ctx->yv12_frame_buffers.pbi[0]->decrypt_state = ctx->decrypt_state;
  290. }
  291. if (!res) {
  292. VP8D_COMP *pbi = ctx->yv12_frame_buffers.pbi[0];
  293. if (resolution_change) {
  294. VP8_COMMON *const pc = &pbi->common;
  295. MACROBLOCKD *const xd = &pbi->mb;
  296. #if CONFIG_MULTITHREAD
  297. int i;
  298. #endif
  299. pc->Width = ctx->si.w;
  300. pc->Height = ctx->si.h;
  301. {
  302. int prev_mb_rows = pc->mb_rows;
  303. if (setjmp(pbi->common.error.jmp)) {
  304. pbi->common.error.setjmp = 0;
  305. /* on failure clear the cached resolution to ensure a full
  306. * reallocation is attempted on resync. */
  307. ctx->si.w = 0;
  308. ctx->si.h = 0;
  309. vp8_clear_system_state();
  310. /* same return value as used in vp8dx_receive_compressed_data */
  311. return -1;
  312. }
  313. pbi->common.error.setjmp = 1;
  314. if (pc->Width <= 0) {
  315. pc->Width = w;
  316. vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
  317. "Invalid frame width");
  318. }
  319. if (pc->Height <= 0) {
  320. pc->Height = h;
  321. vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
  322. "Invalid frame height");
  323. }
  324. if (vp8_alloc_frame_buffers(pc, pc->Width, pc->Height)) {
  325. vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
  326. "Failed to allocate frame buffers");
  327. }
  328. xd->pre = pc->yv12_fb[pc->lst_fb_idx];
  329. xd->dst = pc->yv12_fb[pc->new_fb_idx];
  330. #if CONFIG_MULTITHREAD
  331. for (i = 0; i < pbi->allocated_decoding_thread_count; ++i) {
  332. pbi->mb_row_di[i].mbd.dst = pc->yv12_fb[pc->new_fb_idx];
  333. vp8_build_block_doffsets(&pbi->mb_row_di[i].mbd);
  334. }
  335. #endif
  336. vp8_build_block_doffsets(&pbi->mb);
  337. /* allocate memory for last frame MODE_INFO array */
  338. #if CONFIG_ERROR_CONCEALMENT
  339. if (pbi->ec_enabled) {
  340. /* old prev_mip was released by vp8_de_alloc_frame_buffers()
  341. * called in vp8_alloc_frame_buffers() */
  342. pc->prev_mip = vpx_calloc((pc->mb_cols + 1) * (pc->mb_rows + 1),
  343. sizeof(MODE_INFO));
  344. if (!pc->prev_mip) {
  345. vp8_de_alloc_frame_buffers(pc);
  346. vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
  347. "Failed to allocate"
  348. "last frame MODE_INFO array");
  349. }
  350. pc->prev_mi = pc->prev_mip + pc->mode_info_stride + 1;
  351. if (vp8_alloc_overlap_lists(pbi))
  352. vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
  353. "Failed to allocate overlap lists "
  354. "for error concealment");
  355. }
  356. #endif
  357. #if CONFIG_MULTITHREAD
  358. if (pbi->b_multithreaded_rd) {
  359. vp8mt_alloc_temp_buffers(pbi, pc->Width, prev_mb_rows);
  360. }
  361. #else
  362. (void)prev_mb_rows;
  363. #endif
  364. }
  365. pbi->common.error.setjmp = 0;
  366. /* required to get past the first get_free_fb() call */
  367. pbi->common.fb_idx_ref_cnt[0] = 0;
  368. }
  369. /* update the pbi fragment data */
  370. pbi->fragments = ctx->fragments;
  371. ctx->user_priv = user_priv;
  372. if (vp8dx_receive_compressed_data(pbi, data_sz, data, deadline)) {
  373. res = update_error_state(ctx, &pbi->common.error);
  374. }
  375. /* get ready for the next series of fragments */
  376. ctx->fragments.count = 0;
  377. }
  378. return res;
  379. }
  380. static vpx_image_t *vp8_get_frame(vpx_codec_alg_priv_t *ctx,
  381. vpx_codec_iter_t *iter) {
  382. vpx_image_t *img = NULL;
  383. /* iter acts as a flip flop, so an image is only returned on the first
  384. * call to get_frame.
  385. */
  386. if (!(*iter) && ctx->yv12_frame_buffers.pbi[0]) {
  387. YV12_BUFFER_CONFIG sd;
  388. int64_t time_stamp = 0, time_end_stamp = 0;
  389. vp8_ppflags_t flags;
  390. vp8_zero(flags);
  391. if (ctx->base.init_flags & VPX_CODEC_USE_POSTPROC) {
  392. flags.post_proc_flag = ctx->postproc_cfg.post_proc_flag;
  393. flags.deblocking_level = ctx->postproc_cfg.deblocking_level;
  394. flags.noise_level = ctx->postproc_cfg.noise_level;
  395. }
  396. if (0 == vp8dx_get_raw_frame(ctx->yv12_frame_buffers.pbi[0], &sd,
  397. &time_stamp, &time_end_stamp, &flags)) {
  398. yuvconfig2image(&ctx->img, &sd, ctx->user_priv);
  399. img = &ctx->img;
  400. *iter = img;
  401. }
  402. }
  403. return img;
  404. }
  405. static vpx_codec_err_t image2yuvconfig(const vpx_image_t *img,
  406. YV12_BUFFER_CONFIG *yv12) {
  407. const int y_w = img->d_w;
  408. const int y_h = img->d_h;
  409. const int uv_w = (img->d_w + 1) / 2;
  410. const int uv_h = (img->d_h + 1) / 2;
  411. vpx_codec_err_t res = VPX_CODEC_OK;
  412. yv12->y_buffer = img->planes[VPX_PLANE_Y];
  413. yv12->u_buffer = img->planes[VPX_PLANE_U];
  414. yv12->v_buffer = img->planes[VPX_PLANE_V];
  415. yv12->y_crop_width = y_w;
  416. yv12->y_crop_height = y_h;
  417. yv12->y_width = y_w;
  418. yv12->y_height = y_h;
  419. yv12->uv_crop_width = uv_w;
  420. yv12->uv_crop_height = uv_h;
  421. yv12->uv_width = uv_w;
  422. yv12->uv_height = uv_h;
  423. yv12->y_stride = img->stride[VPX_PLANE_Y];
  424. yv12->uv_stride = img->stride[VPX_PLANE_U];
  425. yv12->border = (img->stride[VPX_PLANE_Y] - img->d_w) / 2;
  426. return res;
  427. }
  428. static vpx_codec_err_t vp8_set_reference(vpx_codec_alg_priv_t *ctx,
  429. va_list args) {
  430. vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *);
  431. if (data && !ctx->yv12_frame_buffers.use_frame_threads) {
  432. vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data;
  433. YV12_BUFFER_CONFIG sd;
  434. image2yuvconfig(&frame->img, &sd);
  435. return vp8dx_set_reference(ctx->yv12_frame_buffers.pbi[0],
  436. frame->frame_type, &sd);
  437. } else {
  438. return VPX_CODEC_INVALID_PARAM;
  439. }
  440. }
  441. static vpx_codec_err_t vp8_get_reference(vpx_codec_alg_priv_t *ctx,
  442. va_list args) {
  443. vpx_ref_frame_t *data = va_arg(args, vpx_ref_frame_t *);
  444. if (data && !ctx->yv12_frame_buffers.use_frame_threads) {
  445. vpx_ref_frame_t *frame = (vpx_ref_frame_t *)data;
  446. YV12_BUFFER_CONFIG sd;
  447. image2yuvconfig(&frame->img, &sd);
  448. return vp8dx_get_reference(ctx->yv12_frame_buffers.pbi[0],
  449. frame->frame_type, &sd);
  450. } else {
  451. return VPX_CODEC_INVALID_PARAM;
  452. }
  453. }
  454. static vpx_codec_err_t vp8_set_postproc(vpx_codec_alg_priv_t *ctx,
  455. va_list args) {
  456. #if CONFIG_POSTPROC
  457. vp8_postproc_cfg_t *data = va_arg(args, vp8_postproc_cfg_t *);
  458. if (data) {
  459. ctx->postproc_cfg_set = 1;
  460. ctx->postproc_cfg = *((vp8_postproc_cfg_t *)data);
  461. return VPX_CODEC_OK;
  462. } else {
  463. return VPX_CODEC_INVALID_PARAM;
  464. }
  465. #else
  466. (void)ctx;
  467. (void)args;
  468. return VPX_CODEC_INCAPABLE;
  469. #endif
  470. }
  471. static vpx_codec_err_t vp8_get_last_ref_updates(vpx_codec_alg_priv_t *ctx,
  472. va_list args) {
  473. int *update_info = va_arg(args, int *);
  474. if (update_info && !ctx->yv12_frame_buffers.use_frame_threads) {
  475. VP8D_COMP *pbi = (VP8D_COMP *)ctx->yv12_frame_buffers.pbi[0];
  476. *update_info = pbi->common.refresh_alt_ref_frame * (int)VP8_ALTR_FRAME +
  477. pbi->common.refresh_golden_frame * (int)VP8_GOLD_FRAME +
  478. pbi->common.refresh_last_frame * (int)VP8_LAST_FRAME;
  479. return VPX_CODEC_OK;
  480. } else {
  481. return VPX_CODEC_INVALID_PARAM;
  482. }
  483. }
  484. extern int vp8dx_references_buffer(VP8_COMMON *oci, int ref_frame);
  485. static vpx_codec_err_t vp8_get_last_ref_frame(vpx_codec_alg_priv_t *ctx,
  486. va_list args) {
  487. int *ref_info = va_arg(args, int *);
  488. if (ref_info && !ctx->yv12_frame_buffers.use_frame_threads) {
  489. VP8D_COMP *pbi = (VP8D_COMP *)ctx->yv12_frame_buffers.pbi[0];
  490. VP8_COMMON *oci = &pbi->common;
  491. *ref_info =
  492. (vp8dx_references_buffer(oci, ALTREF_FRAME) ? VP8_ALTR_FRAME : 0) |
  493. (vp8dx_references_buffer(oci, GOLDEN_FRAME) ? VP8_GOLD_FRAME : 0) |
  494. (vp8dx_references_buffer(oci, LAST_FRAME) ? VP8_LAST_FRAME : 0);
  495. return VPX_CODEC_OK;
  496. } else {
  497. return VPX_CODEC_INVALID_PARAM;
  498. }
  499. }
  500. static vpx_codec_err_t vp8_get_frame_corrupted(vpx_codec_alg_priv_t *ctx,
  501. va_list args) {
  502. int *corrupted = va_arg(args, int *);
  503. VP8D_COMP *pbi = (VP8D_COMP *)ctx->yv12_frame_buffers.pbi[0];
  504. if (corrupted && pbi) {
  505. const YV12_BUFFER_CONFIG *const frame = pbi->common.frame_to_show;
  506. if (frame == NULL) return VPX_CODEC_ERROR;
  507. *corrupted = frame->corrupted;
  508. return VPX_CODEC_OK;
  509. } else {
  510. return VPX_CODEC_INVALID_PARAM;
  511. }
  512. }
  513. static vpx_codec_err_t vp8_set_decryptor(vpx_codec_alg_priv_t *ctx,
  514. va_list args) {
  515. vpx_decrypt_init *init = va_arg(args, vpx_decrypt_init *);
  516. if (init) {
  517. ctx->decrypt_cb = init->decrypt_cb;
  518. ctx->decrypt_state = init->decrypt_state;
  519. } else {
  520. ctx->decrypt_cb = NULL;
  521. ctx->decrypt_state = NULL;
  522. }
  523. return VPX_CODEC_OK;
  524. }
  525. vpx_codec_ctrl_fn_map_t vp8_ctf_maps[] = {
  526. { VP8_SET_REFERENCE, vp8_set_reference },
  527. { VP8_COPY_REFERENCE, vp8_get_reference },
  528. { VP8_SET_POSTPROC, vp8_set_postproc },
  529. { VP8D_GET_LAST_REF_UPDATES, vp8_get_last_ref_updates },
  530. { VP8D_GET_FRAME_CORRUPTED, vp8_get_frame_corrupted },
  531. { VP8D_GET_LAST_REF_USED, vp8_get_last_ref_frame },
  532. { VPXD_SET_DECRYPTOR, vp8_set_decryptor },
  533. { -1, NULL },
  534. };
  535. #ifndef VERSION_STRING
  536. #define VERSION_STRING
  537. #endif
  538. CODEC_INTERFACE(vpx_codec_vp8_dx) = {
  539. "WebM Project VP8 Decoder" VERSION_STRING,
  540. VPX_CODEC_INTERNAL_ABI_VERSION,
  541. VPX_CODEC_CAP_DECODER | VP8_CAP_POSTPROC | VP8_CAP_ERROR_CONCEALMENT |
  542. VPX_CODEC_CAP_INPUT_FRAGMENTS,
  543. /* vpx_codec_caps_t caps; */
  544. vp8_init, /* vpx_codec_init_fn_t init; */
  545. vp8_destroy, /* vpx_codec_destroy_fn_t destroy; */
  546. vp8_ctf_maps, /* vpx_codec_ctrl_fn_map_t *ctrl_maps; */
  547. {
  548. vp8_peek_si, /* vpx_codec_peek_si_fn_t peek_si; */
  549. vp8_get_si, /* vpx_codec_get_si_fn_t get_si; */
  550. vp8_decode, /* vpx_codec_decode_fn_t decode; */
  551. vp8_get_frame, /* vpx_codec_frame_get_fn_t frame_get; */
  552. NULL,
  553. },
  554. {
  555. /* encoder functions */
  556. 0, NULL, /* vpx_codec_enc_cfg_map_t */
  557. NULL, /* vpx_codec_encode_fn_t */
  558. NULL, /* vpx_codec_get_cx_data_fn_t */
  559. NULL, /* vpx_codec_enc_config_set_fn_t */
  560. NULL, /* vpx_codec_get_global_headers_fn_t */
  561. NULL, /* vpx_codec_get_preview_frame_fn_t */
  562. NULL /* vpx_codec_enc_mr_get_mem_loc_fn_t */
  563. }
  564. };