vf_deinterlace_qsv.c 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603
  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. /**
  19. * @file
  20. * deinterlace video filter - QSV
  21. */
  22. #include <mfx/mfxvideo.h>
  23. #include <stdio.h>
  24. #include <string.h>
  25. #include "libavutil/avstring.h"
  26. #include "libavutil/common.h"
  27. #include "libavutil/hwcontext.h"
  28. #include "libavutil/hwcontext_qsv.h"
  29. #include "libavutil/internal.h"
  30. #include "libavutil/mathematics.h"
  31. #include "libavutil/opt.h"
  32. #include "libavutil/pixdesc.h"
  33. #include "libavutil/time.h"
  34. #include "libavfilter/qsvvpp.h"
  35. #include "avfilter.h"
  36. #include "formats.h"
  37. #include "internal.h"
  38. #include "video.h"
  39. enum {
  40. QSVDEINT_MORE_OUTPUT = 1,
  41. QSVDEINT_MORE_INPUT,
  42. };
  43. typedef struct QSVFrame {
  44. AVFrame *frame;
  45. mfxFrameSurface1 surface;
  46. int used;
  47. struct QSVFrame *next;
  48. } QSVFrame;
  49. typedef struct QSVDeintContext {
  50. const AVClass *class;
  51. AVBufferRef *hw_frames_ctx;
  52. /* a clone of the main session, used internally for deinterlacing */
  53. mfxSession session;
  54. mfxMemId *mem_ids;
  55. int nb_mem_ids;
  56. mfxFrameSurface1 **surface_ptrs;
  57. int nb_surface_ptrs;
  58. mfxExtOpaqueSurfaceAlloc opaque_alloc;
  59. mfxExtVPPDeinterlacing deint_conf;
  60. mfxExtBuffer *ext_buffers[2];
  61. int num_ext_buffers;
  62. QSVFrame *work_frames;
  63. int64_t last_pts;
  64. int eof;
  65. /* option for Deinterlacing algorithm to be used */
  66. int mode;
  67. } QSVDeintContext;
  68. static av_cold void qsvdeint_uninit(AVFilterContext *ctx)
  69. {
  70. QSVDeintContext *s = ctx->priv;
  71. QSVFrame *cur;
  72. if (s->session) {
  73. MFXClose(s->session);
  74. s->session = NULL;
  75. }
  76. av_buffer_unref(&s->hw_frames_ctx);
  77. cur = s->work_frames;
  78. while (cur) {
  79. s->work_frames = cur->next;
  80. av_frame_free(&cur->frame);
  81. av_freep(&cur);
  82. cur = s->work_frames;
  83. }
  84. av_freep(&s->mem_ids);
  85. s->nb_mem_ids = 0;
  86. av_freep(&s->surface_ptrs);
  87. s->nb_surface_ptrs = 0;
  88. }
  89. static int qsvdeint_query_formats(AVFilterContext *ctx)
  90. {
  91. static const enum AVPixelFormat pixel_formats[] = {
  92. AV_PIX_FMT_QSV, AV_PIX_FMT_NONE,
  93. };
  94. AVFilterFormats *pix_fmts = ff_make_format_list(pixel_formats);
  95. int ret;
  96. if ((ret = ff_set_common_formats(ctx, pix_fmts)) < 0)
  97. return ret;
  98. return 0;
  99. }
  100. static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
  101. mfxFrameAllocResponse *resp)
  102. {
  103. AVFilterContext *ctx = pthis;
  104. QSVDeintContext *s = ctx->priv;
  105. if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
  106. !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
  107. !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
  108. return MFX_ERR_UNSUPPORTED;
  109. resp->mids = s->mem_ids;
  110. resp->NumFrameActual = s->nb_mem_ids;
  111. return MFX_ERR_NONE;
  112. }
  113. static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
  114. {
  115. return MFX_ERR_NONE;
  116. }
  117. static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
  118. {
  119. return MFX_ERR_UNSUPPORTED;
  120. }
  121. static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
  122. {
  123. return MFX_ERR_UNSUPPORTED;
  124. }
  125. static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
  126. {
  127. *hdl = mid;
  128. return MFX_ERR_NONE;
  129. }
  130. static const mfxHandleType handle_types[] = {
  131. MFX_HANDLE_VA_DISPLAY,
  132. MFX_HANDLE_D3D9_DEVICE_MANAGER,
  133. MFX_HANDLE_D3D11_DEVICE,
  134. };
  135. static int init_out_session(AVFilterContext *ctx)
  136. {
  137. QSVDeintContext *s = ctx->priv;
  138. AVHWFramesContext *hw_frames_ctx = (AVHWFramesContext*)s->hw_frames_ctx->data;
  139. AVQSVFramesContext *hw_frames_hwctx = hw_frames_ctx->hwctx;
  140. AVQSVDeviceContext *device_hwctx = hw_frames_ctx->device_ctx->hwctx;
  141. int opaque = !!(hw_frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
  142. mfxHDL handle = NULL;
  143. mfxHandleType handle_type;
  144. mfxVersion ver;
  145. mfxIMPL impl;
  146. mfxVideoParam par;
  147. mfxStatus err;
  148. int i;
  149. /* extract the properties of the "master" session given to us */
  150. err = MFXQueryIMPL(device_hwctx->session, &impl);
  151. if (err == MFX_ERR_NONE)
  152. err = MFXQueryVersion(device_hwctx->session, &ver);
  153. if (err != MFX_ERR_NONE) {
  154. av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
  155. return AVERROR_UNKNOWN;
  156. }
  157. for (i = 0; i < FF_ARRAY_ELEMS(handle_types); i++) {
  158. err = MFXVideoCORE_GetHandle(device_hwctx->session, handle_types[i], &handle);
  159. if (err == MFX_ERR_NONE) {
  160. handle_type = handle_types[i];
  161. break;
  162. }
  163. }
  164. if (err != MFX_ERR_NONE) {
  165. av_log(ctx, AV_LOG_ERROR, "Error getting the session handle\n");
  166. return AVERROR_UNKNOWN;
  167. }
  168. /* create a "slave" session with those same properties, to be used for
  169. * actual deinterlacing */
  170. err = MFXInit(impl, &ver, &s->session);
  171. if (err != MFX_ERR_NONE) {
  172. av_log(ctx, AV_LOG_ERROR, "Error initializing a session for deinterlacing\n");
  173. return AVERROR_UNKNOWN;
  174. }
  175. if (handle) {
  176. err = MFXVideoCORE_SetHandle(s->session, handle_type, handle);
  177. if (err != MFX_ERR_NONE)
  178. return AVERROR_UNKNOWN;
  179. }
  180. if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
  181. err = MFXJoinSession(device_hwctx->session, s->session);
  182. if (err != MFX_ERR_NONE)
  183. return AVERROR_UNKNOWN;
  184. }
  185. memset(&par, 0, sizeof(par));
  186. s->deint_conf.Header.BufferId = MFX_EXTBUFF_VPP_DEINTERLACING;
  187. s->deint_conf.Header.BufferSz = sizeof(s->deint_conf);
  188. s->deint_conf.Mode = s->mode;
  189. s->ext_buffers[s->num_ext_buffers++] = (mfxExtBuffer *)&s->deint_conf;
  190. if (opaque) {
  191. s->surface_ptrs = av_mallocz_array(hw_frames_hwctx->nb_surfaces,
  192. sizeof(*s->surface_ptrs));
  193. if (!s->surface_ptrs)
  194. return AVERROR(ENOMEM);
  195. for (i = 0; i < hw_frames_hwctx->nb_surfaces; i++)
  196. s->surface_ptrs[i] = hw_frames_hwctx->surfaces + i;
  197. s->nb_surface_ptrs = hw_frames_hwctx->nb_surfaces;
  198. s->opaque_alloc.In.Surfaces = s->surface_ptrs;
  199. s->opaque_alloc.In.NumSurface = s->nb_surface_ptrs;
  200. s->opaque_alloc.In.Type = hw_frames_hwctx->frame_type;
  201. s->opaque_alloc.Out = s->opaque_alloc.In;
  202. s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
  203. s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
  204. s->ext_buffers[s->num_ext_buffers++] = (mfxExtBuffer *)&s->opaque_alloc;
  205. par.IOPattern = MFX_IOPATTERN_IN_OPAQUE_MEMORY | MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
  206. } else {
  207. mfxFrameAllocator frame_allocator = {
  208. .pthis = ctx,
  209. .Alloc = frame_alloc,
  210. .Lock = frame_lock,
  211. .Unlock = frame_unlock,
  212. .GetHDL = frame_get_hdl,
  213. .Free = frame_free,
  214. };
  215. s->mem_ids = av_mallocz_array(hw_frames_hwctx->nb_surfaces,
  216. sizeof(*s->mem_ids));
  217. if (!s->mem_ids)
  218. return AVERROR(ENOMEM);
  219. for (i = 0; i < hw_frames_hwctx->nb_surfaces; i++)
  220. s->mem_ids[i] = hw_frames_hwctx->surfaces[i].Data.MemId;
  221. s->nb_mem_ids = hw_frames_hwctx->nb_surfaces;
  222. err = MFXVideoCORE_SetFrameAllocator(s->session, &frame_allocator);
  223. if (err != MFX_ERR_NONE)
  224. return AVERROR_UNKNOWN;
  225. par.IOPattern = MFX_IOPATTERN_IN_VIDEO_MEMORY | MFX_IOPATTERN_OUT_VIDEO_MEMORY;
  226. }
  227. par.ExtParam = s->ext_buffers;
  228. par.NumExtParam = s->num_ext_buffers;
  229. par.AsyncDepth = 1; // TODO async
  230. par.vpp.In = hw_frames_hwctx->surfaces[0].Info;
  231. par.vpp.In.CropW = ctx->inputs[0]->w;
  232. par.vpp.In.CropH = ctx->inputs[0]->h;
  233. if (ctx->inputs[0]->frame_rate.num) {
  234. par.vpp.In.FrameRateExtN = ctx->inputs[0]->frame_rate.num;
  235. par.vpp.In.FrameRateExtD = ctx->inputs[0]->frame_rate.den;
  236. } else {
  237. par.vpp.In.FrameRateExtN = ctx->inputs[0]->time_base.num;
  238. par.vpp.In.FrameRateExtD = ctx->inputs[0]->time_base.den;
  239. }
  240. par.vpp.Out = par.vpp.In;
  241. if (ctx->outputs[0]->frame_rate.num) {
  242. par.vpp.Out.FrameRateExtN = ctx->outputs[0]->frame_rate.num;
  243. par.vpp.Out.FrameRateExtD = ctx->outputs[0]->frame_rate.den;
  244. } else {
  245. par.vpp.Out.FrameRateExtN = ctx->outputs[0]->time_base.num;
  246. par.vpp.Out.FrameRateExtD = ctx->outputs[0]->time_base.den;
  247. }
  248. err = MFXVideoVPP_Init(s->session, &par);
  249. if (err != MFX_ERR_NONE) {
  250. av_log(ctx, AV_LOG_ERROR, "Error opening the VPP for deinterlacing: %d\n", err);
  251. return AVERROR_UNKNOWN;
  252. }
  253. return 0;
  254. }
  255. static int qsvdeint_config_props(AVFilterLink *outlink)
  256. {
  257. AVFilterContext *ctx = outlink->src;
  258. AVFilterLink *inlink = ctx->inputs[0];
  259. QSVDeintContext *s = ctx->priv;
  260. int ret;
  261. qsvdeint_uninit(ctx);
  262. s->last_pts = AV_NOPTS_VALUE;
  263. outlink->frame_rate = av_mul_q(inlink->frame_rate,
  264. (AVRational){ 2, 1 });
  265. outlink->time_base = av_mul_q(inlink->time_base,
  266. (AVRational){ 1, 2 });
  267. /* check that we have a hw context */
  268. if (!inlink->hw_frames_ctx) {
  269. av_log(ctx, AV_LOG_ERROR, "No hw context provided on input\n");
  270. return AVERROR(EINVAL);
  271. }
  272. s->hw_frames_ctx = av_buffer_ref(inlink->hw_frames_ctx);
  273. if (!s->hw_frames_ctx)
  274. return AVERROR(ENOMEM);
  275. av_buffer_unref(&outlink->hw_frames_ctx);
  276. outlink->hw_frames_ctx = av_buffer_ref(inlink->hw_frames_ctx);
  277. if (!outlink->hw_frames_ctx) {
  278. qsvdeint_uninit(ctx);
  279. return AVERROR(ENOMEM);
  280. }
  281. ret = init_out_session(ctx);
  282. if (ret < 0)
  283. return ret;
  284. return 0;
  285. }
  286. static void clear_unused_frames(QSVDeintContext *s)
  287. {
  288. QSVFrame *cur = s->work_frames;
  289. while (cur) {
  290. if (!cur->surface.Data.Locked) {
  291. av_frame_free(&cur->frame);
  292. cur->used = 0;
  293. }
  294. cur = cur->next;
  295. }
  296. }
  297. static int get_free_frame(QSVDeintContext *s, QSVFrame **f)
  298. {
  299. QSVFrame *frame, **last;
  300. clear_unused_frames(s);
  301. frame = s->work_frames;
  302. last = &s->work_frames;
  303. while (frame) {
  304. if (!frame->used) {
  305. *f = frame;
  306. return 0;
  307. }
  308. last = &frame->next;
  309. frame = frame->next;
  310. }
  311. frame = av_mallocz(sizeof(*frame));
  312. if (!frame)
  313. return AVERROR(ENOMEM);
  314. *last = frame;
  315. *f = frame;
  316. return 0;
  317. }
  318. static int submit_frame(AVFilterContext *ctx, AVFrame *frame,
  319. mfxFrameSurface1 **surface)
  320. {
  321. QSVDeintContext *s = ctx->priv;
  322. QSVFrame *qf;
  323. int ret;
  324. ret = get_free_frame(s, &qf);
  325. if (ret < 0)
  326. return ret;
  327. qf->frame = frame;
  328. qf->surface = *(mfxFrameSurface1*)qf->frame->data[3];
  329. qf->surface.Data.Locked = 0;
  330. qf->surface.Info.CropW = qf->frame->width;
  331. qf->surface.Info.CropH = qf->frame->height;
  332. qf->surface.Info.PicStruct = !qf->frame->interlaced_frame ? MFX_PICSTRUCT_PROGRESSIVE :
  333. (qf->frame->top_field_first ? MFX_PICSTRUCT_FIELD_TFF :
  334. MFX_PICSTRUCT_FIELD_BFF);
  335. if (qf->frame->repeat_pict == 1) {
  336. qf->surface.Info.PicStruct |= MFX_PICSTRUCT_FIELD_REPEATED;
  337. qf->surface.Info.PicStruct |= qf->frame->top_field_first ? MFX_PICSTRUCT_FIELD_TFF :
  338. MFX_PICSTRUCT_FIELD_BFF;
  339. } else if (qf->frame->repeat_pict == 2)
  340. qf->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_DOUBLING;
  341. else if (qf->frame->repeat_pict == 4)
  342. qf->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_TRIPLING;
  343. if (ctx->inputs[0]->frame_rate.num) {
  344. qf->surface.Info.FrameRateExtN = ctx->inputs[0]->frame_rate.num;
  345. qf->surface.Info.FrameRateExtD = ctx->inputs[0]->frame_rate.den;
  346. } else {
  347. qf->surface.Info.FrameRateExtN = ctx->inputs[0]->time_base.num;
  348. qf->surface.Info.FrameRateExtD = ctx->inputs[0]->time_base.den;
  349. }
  350. qf->surface.Data.TimeStamp = av_rescale_q(qf->frame->pts,
  351. ctx->inputs[0]->time_base,
  352. (AVRational){1, 90000});
  353. *surface = &qf->surface;
  354. qf->used = 1;
  355. return 0;
  356. }
  357. static int process_frame(AVFilterContext *ctx, const AVFrame *in,
  358. mfxFrameSurface1 *surf_in)
  359. {
  360. QSVDeintContext *s = ctx->priv;
  361. AVFilterLink *inlink = ctx->inputs[0];
  362. AVFilterLink *outlink = ctx->outputs[0];
  363. AVFrame *out;
  364. mfxFrameSurface1 *surf_out;
  365. mfxSyncPoint sync = NULL;
  366. mfxStatus err;
  367. int ret, again = 0;
  368. out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
  369. if (!out) {
  370. ret = AVERROR(ENOMEM);
  371. goto fail;
  372. }
  373. surf_out = (mfxFrameSurface1*)out->data[3];
  374. surf_out->Info.CropW = outlink->w;
  375. surf_out->Info.CropH = outlink->h;
  376. surf_out->Info.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
  377. do {
  378. err = MFXVideoVPP_RunFrameVPPAsync(s->session, surf_in, surf_out,
  379. NULL, &sync);
  380. if (err == MFX_WRN_DEVICE_BUSY)
  381. av_usleep(1);
  382. } while (err == MFX_WRN_DEVICE_BUSY);
  383. if (err == MFX_ERR_MORE_DATA) {
  384. av_frame_free(&out);
  385. return QSVDEINT_MORE_INPUT;
  386. }
  387. if ((err < 0 && err != MFX_ERR_MORE_SURFACE) || !sync) {
  388. av_log(ctx, AV_LOG_ERROR, "Error during deinterlacing: %d\n", err);
  389. ret = AVERROR_UNKNOWN;
  390. goto fail;
  391. }
  392. if (err == MFX_ERR_MORE_SURFACE)
  393. again = 1;
  394. do {
  395. err = MFXVideoCORE_SyncOperation(s->session, sync, 1000);
  396. } while (err == MFX_WRN_IN_EXECUTION);
  397. if (err < 0) {
  398. av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
  399. ret = AVERROR_UNKNOWN;
  400. goto fail;
  401. }
  402. ret = av_frame_copy_props(out, in);
  403. if (ret < 0)
  404. goto fail;
  405. out->width = outlink->w;
  406. out->height = outlink->h;
  407. out->interlaced_frame = 0;
  408. out->pts = av_rescale_q(out->pts, inlink->time_base, outlink->time_base);
  409. if (out->pts == s->last_pts)
  410. out->pts++;
  411. s->last_pts = out->pts;
  412. ret = ff_filter_frame(outlink, out);
  413. if (ret < 0)
  414. return ret;
  415. return again ? QSVDEINT_MORE_OUTPUT : 0;
  416. fail:
  417. av_frame_free(&out);
  418. return ret;
  419. }
  420. static int qsvdeint_filter_frame(AVFilterLink *link, AVFrame *in)
  421. {
  422. AVFilterContext *ctx = link->dst;
  423. mfxFrameSurface1 *surf_in;
  424. int ret;
  425. ret = submit_frame(ctx, in, &surf_in);
  426. if (ret < 0) {
  427. av_frame_free(&in);
  428. return ret;
  429. }
  430. do {
  431. ret = process_frame(ctx, in, surf_in);
  432. if (ret < 0)
  433. return ret;
  434. } while (ret == QSVDEINT_MORE_OUTPUT);
  435. return 0;
  436. }
  437. static int qsvdeint_request_frame(AVFilterLink *outlink)
  438. {
  439. AVFilterContext *ctx = outlink->src;
  440. return ff_request_frame(ctx->inputs[0]);
  441. }
  442. #define OFFSET(x) offsetof(QSVDeintContext, x)
  443. #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
  444. static const AVOption options[] = {
  445. { "mode", "set deinterlace mode", OFFSET(mode), AV_OPT_TYPE_INT, {.i64 = MFX_DEINTERLACING_ADVANCED}, MFX_DEINTERLACING_BOB, MFX_DEINTERLACING_ADVANCED, FLAGS, "mode"},
  446. { "bob", "bob algorithm", 0, AV_OPT_TYPE_CONST, {.i64 = MFX_DEINTERLACING_BOB}, MFX_DEINTERLACING_BOB, MFX_DEINTERLACING_ADVANCED, FLAGS, "mode"},
  447. { "advanced", "Motion adaptive algorithm", 0, AV_OPT_TYPE_CONST, {.i64 = MFX_DEINTERLACING_ADVANCED}, MFX_DEINTERLACING_BOB, MFX_DEINTERLACING_ADVANCED, FLAGS, "mode"},
  448. { NULL },
  449. };
  450. static const AVClass qsvdeint_class = {
  451. .class_name = "deinterlace_qsv",
  452. .item_name = av_default_item_name,
  453. .option = options,
  454. .version = LIBAVUTIL_VERSION_INT,
  455. };
  456. static const AVFilterPad qsvdeint_inputs[] = {
  457. {
  458. .name = "default",
  459. .type = AVMEDIA_TYPE_VIDEO,
  460. .filter_frame = qsvdeint_filter_frame,
  461. },
  462. { NULL }
  463. };
  464. static const AVFilterPad qsvdeint_outputs[] = {
  465. {
  466. .name = "default",
  467. .type = AVMEDIA_TYPE_VIDEO,
  468. .config_props = qsvdeint_config_props,
  469. .request_frame = qsvdeint_request_frame,
  470. },
  471. { NULL }
  472. };
  473. AVFilter ff_vf_deinterlace_qsv = {
  474. .name = "deinterlace_qsv",
  475. .description = NULL_IF_CONFIG_SMALL("QuickSync video deinterlacing"),
  476. .uninit = qsvdeint_uninit,
  477. .query_formats = qsvdeint_query_formats,
  478. .priv_size = sizeof(QSVDeintContext),
  479. .priv_class = &qsvdeint_class,
  480. .inputs = qsvdeint_inputs,
  481. .outputs = qsvdeint_outputs,
  482. .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
  483. };