vf_overlay_qsv.c 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434
  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. /**
  19. * @file
  20. * A hardware accelerated overlay filter based on Intel Quick Sync Video VPP
  21. */
  22. #include "libavutil/opt.h"
  23. #include "libavutil/common.h"
  24. #include "libavutil/pixdesc.h"
  25. #include "libavutil/eval.h"
  26. #include "libavutil/hwcontext.h"
  27. #include "libavutil/avstring.h"
  28. #include "libavutil/avassert.h"
  29. #include "libavutil/imgutils.h"
  30. #include "libavutil/mathematics.h"
  31. #include "internal.h"
  32. #include "avfilter.h"
  33. #include "formats.h"
  34. #include "video.h"
  35. #include "framesync.h"
  36. #include "qsvvpp.h"
  37. #define MAIN 0
  38. #define OVERLAY 1
  39. #define OFFSET(x) offsetof(QSVOverlayContext, x)
  40. #define FLAGS (AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_FILTERING_PARAM)
  41. enum var_name {
  42. VAR_MAIN_iW, VAR_MW,
  43. VAR_MAIN_iH, VAR_MH,
  44. VAR_OVERLAY_iW,
  45. VAR_OVERLAY_iH,
  46. VAR_OVERLAY_X, VAR_OX,
  47. VAR_OVERLAY_Y, VAR_OY,
  48. VAR_OVERLAY_W, VAR_OW,
  49. VAR_OVERLAY_H, VAR_OH,
  50. VAR_VARS_NB
  51. };
  52. typedef struct QSVOverlayContext {
  53. const AVClass *class;
  54. FFFrameSync fs;
  55. QSVVPPContext *qsv;
  56. QSVVPPParam qsv_param;
  57. mfxExtVPPComposite comp_conf;
  58. double var_values[VAR_VARS_NB];
  59. char *overlay_ox, *overlay_oy, *overlay_ow, *overlay_oh;
  60. uint16_t overlay_alpha, overlay_pixel_alpha;
  61. } QSVOverlayContext;
  62. static const char *const var_names[] = {
  63. "main_w", "W", /* input width of the main layer */
  64. "main_h", "H", /* input height of the main layer */
  65. "overlay_iw", /* input width of the overlay layer */
  66. "overlay_ih", /* input height of the overlay layer */
  67. "overlay_x", "x", /* x position of the overlay layer inside of main */
  68. "overlay_y", "y", /* y position of the overlay layer inside of main */
  69. "overlay_w", "w", /* output width of overlay layer */
  70. "overlay_h", "h", /* output height of overlay layer */
  71. NULL
  72. };
  73. static const AVOption overlay_qsv_options[] = {
  74. { "x", "Overlay x position", OFFSET(overlay_ox), AV_OPT_TYPE_STRING, { .str="0"}, 0, 255, .flags = FLAGS},
  75. { "y", "Overlay y position", OFFSET(overlay_oy), AV_OPT_TYPE_STRING, { .str="0"}, 0, 255, .flags = FLAGS},
  76. { "w", "Overlay width", OFFSET(overlay_ow), AV_OPT_TYPE_STRING, { .str="overlay_iw"}, 0, 255, .flags = FLAGS},
  77. { "h", "Overlay height", OFFSET(overlay_oh), AV_OPT_TYPE_STRING, { .str="overlay_ih*w/overlay_iw"}, 0, 255, .flags = FLAGS},
  78. { "alpha", "Overlay global alpha", OFFSET(overlay_alpha), AV_OPT_TYPE_INT, { .i64 = 255}, 0, 255, .flags = FLAGS},
  79. { "eof_action", "Action to take when encountering EOF from secondary input ",
  80. OFFSET(fs.opt_eof_action), AV_OPT_TYPE_INT, { .i64 = EOF_ACTION_REPEAT },
  81. EOF_ACTION_REPEAT, EOF_ACTION_PASS, .flags = FLAGS, "eof_action" },
  82. { "repeat", "Repeat the previous frame.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_REPEAT }, .flags = FLAGS, "eof_action" },
  83. { "endall", "End both streams.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_ENDALL }, .flags = FLAGS, "eof_action" },
  84. { "pass", "Pass through the main input.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_PASS }, .flags = FLAGS, "eof_action" },
  85. { "shortest", "force termination when the shortest input terminates", OFFSET(fs.opt_shortest), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
  86. { "repeatlast", "repeat overlay of the last overlay frame", OFFSET(fs.opt_repeatlast), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, FLAGS },
  87. { NULL }
  88. };
  89. FRAMESYNC_DEFINE_CLASS(overlay_qsv, QSVOverlayContext, fs);
  90. static int eval_expr(AVFilterContext *ctx)
  91. {
  92. QSVOverlayContext *vpp = ctx->priv;
  93. double *var_values = vpp->var_values;
  94. int ret = 0;
  95. AVExpr *ox_expr = NULL, *oy_expr = NULL;
  96. AVExpr *ow_expr = NULL, *oh_expr = NULL;
  97. #define PASS_EXPR(e, s) {\
  98. ret = av_expr_parse(&e, s, var_names, NULL, NULL, NULL, NULL, 0, ctx); \
  99. if (ret < 0) {\
  100. av_log(ctx, AV_LOG_ERROR, "Error when passing '%s'.\n", s);\
  101. goto release;\
  102. }\
  103. }
  104. PASS_EXPR(ox_expr, vpp->overlay_ox);
  105. PASS_EXPR(oy_expr, vpp->overlay_oy);
  106. PASS_EXPR(ow_expr, vpp->overlay_ow);
  107. PASS_EXPR(oh_expr, vpp->overlay_oh);
  108. #undef PASS_EXPR
  109. var_values[VAR_OVERLAY_W] =
  110. var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
  111. var_values[VAR_OVERLAY_H] =
  112. var_values[VAR_OH] = av_expr_eval(oh_expr, var_values, NULL);
  113. /* calc again in case ow is relative to oh */
  114. var_values[VAR_OVERLAY_W] =
  115. var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
  116. var_values[VAR_OVERLAY_X] =
  117. var_values[VAR_OX] = av_expr_eval(ox_expr, var_values, NULL);
  118. var_values[VAR_OVERLAY_Y] =
  119. var_values[VAR_OY] = av_expr_eval(oy_expr, var_values, NULL);
  120. /* calc again in case ox is relative to oy */
  121. var_values[VAR_OVERLAY_X] =
  122. var_values[VAR_OX] = av_expr_eval(ox_expr, var_values, NULL);
  123. /* calc overlay_w and overlay_h again incase relative to ox,oy */
  124. var_values[VAR_OVERLAY_W] =
  125. var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
  126. var_values[VAR_OVERLAY_H] =
  127. var_values[VAR_OH] = av_expr_eval(oh_expr, var_values, NULL);
  128. var_values[VAR_OVERLAY_W] =
  129. var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
  130. release:
  131. av_expr_free(ox_expr);
  132. av_expr_free(oy_expr);
  133. av_expr_free(ow_expr);
  134. av_expr_free(oh_expr);
  135. return ret;
  136. }
  137. static int have_alpha_planar(AVFilterLink *link)
  138. {
  139. enum AVPixelFormat pix_fmt = link->format;
  140. const AVPixFmtDescriptor *desc;
  141. AVHWFramesContext *fctx;
  142. if (link->format == AV_PIX_FMT_QSV) {
  143. fctx = (AVHWFramesContext *)link->hw_frames_ctx->data;
  144. pix_fmt = fctx->sw_format;
  145. }
  146. desc = av_pix_fmt_desc_get(pix_fmt);
  147. if (!desc)
  148. return 0;
  149. return !!(desc->flags & AV_PIX_FMT_FLAG_ALPHA);
  150. }
  151. static int config_main_input(AVFilterLink *inlink)
  152. {
  153. AVFilterContext *ctx = inlink->dst;
  154. QSVOverlayContext *vpp = ctx->priv;
  155. mfxVPPCompInputStream *st = &vpp->comp_conf.InputStream[0];
  156. av_log(ctx, AV_LOG_DEBUG, "Input[%d] is of %s.\n", FF_INLINK_IDX(inlink),
  157. av_get_pix_fmt_name(inlink->format));
  158. vpp->var_values[VAR_MAIN_iW] =
  159. vpp->var_values[VAR_MW] = inlink->w;
  160. vpp->var_values[VAR_MAIN_iH] =
  161. vpp->var_values[VAR_MH] = inlink->h;
  162. st->DstX = 0;
  163. st->DstY = 0;
  164. st->DstW = inlink->w;
  165. st->DstH = inlink->h;
  166. st->GlobalAlphaEnable = 0;
  167. st->PixelAlphaEnable = 0;
  168. return 0;
  169. }
  170. static int config_overlay_input(AVFilterLink *inlink)
  171. {
  172. AVFilterContext *ctx = inlink->dst;
  173. QSVOverlayContext *vpp = ctx->priv;
  174. mfxVPPCompInputStream *st = &vpp->comp_conf.InputStream[1];
  175. int ret = 0;
  176. av_log(ctx, AV_LOG_DEBUG, "Input[%d] is of %s.\n", FF_INLINK_IDX(inlink),
  177. av_get_pix_fmt_name(inlink->format));
  178. vpp->var_values[VAR_OVERLAY_iW] = inlink->w;
  179. vpp->var_values[VAR_OVERLAY_iH] = inlink->h;
  180. ret = eval_expr(ctx);
  181. if (ret < 0)
  182. return ret;
  183. st->DstX = vpp->var_values[VAR_OX];
  184. st->DstY = vpp->var_values[VAR_OY];
  185. st->DstW = vpp->var_values[VAR_OW];
  186. st->DstH = vpp->var_values[VAR_OH];
  187. st->GlobalAlpha = vpp->overlay_alpha;
  188. st->GlobalAlphaEnable = (st->GlobalAlpha < 255);
  189. st->PixelAlphaEnable = have_alpha_planar(inlink);
  190. return 0;
  191. }
  192. static int process_frame(FFFrameSync *fs)
  193. {
  194. AVFilterContext *ctx = fs->parent;
  195. QSVOverlayContext *s = fs->opaque;
  196. AVFrame *frame = NULL;
  197. int ret = 0, i;
  198. for (i = 0; i < ctx->nb_inputs; i++) {
  199. ret = ff_framesync_get_frame(fs, i, &frame, 0);
  200. if (ret == 0)
  201. ret = ff_qsvvpp_filter_frame(s->qsv, ctx->inputs[i], frame);
  202. if (ret < 0 && ret != AVERROR(EAGAIN))
  203. break;
  204. }
  205. return ret;
  206. }
  207. static int init_framesync(AVFilterContext *ctx)
  208. {
  209. QSVOverlayContext *s = ctx->priv;
  210. int ret, i;
  211. s->fs.on_event = process_frame;
  212. s->fs.opaque = s;
  213. ret = ff_framesync_init(&s->fs, ctx, ctx->nb_inputs);
  214. if (ret < 0)
  215. return ret;
  216. for (i = 0; i < ctx->nb_inputs; i++) {
  217. FFFrameSyncIn *in = &s->fs.in[i];
  218. in->before = EXT_STOP;
  219. in->after = EXT_INFINITY;
  220. in->sync = i ? 1 : 2;
  221. in->time_base = ctx->inputs[i]->time_base;
  222. }
  223. return ff_framesync_configure(&s->fs);
  224. }
  225. static int config_output(AVFilterLink *outlink)
  226. {
  227. AVFilterContext *ctx = outlink->src;
  228. QSVOverlayContext *vpp = ctx->priv;
  229. AVFilterLink *in0 = ctx->inputs[0];
  230. AVFilterLink *in1 = ctx->inputs[1];
  231. int ret;
  232. av_log(ctx, AV_LOG_DEBUG, "Output is of %s.\n", av_get_pix_fmt_name(outlink->format));
  233. if ((in0->format == AV_PIX_FMT_QSV && in1->format != AV_PIX_FMT_QSV) ||
  234. (in0->format != AV_PIX_FMT_QSV && in1->format == AV_PIX_FMT_QSV)) {
  235. av_log(ctx, AV_LOG_ERROR, "Mixing hardware and software pixel formats is not supported.\n");
  236. return AVERROR(EINVAL);
  237. } else if (in0->format == AV_PIX_FMT_QSV) {
  238. AVHWFramesContext *hw_frame0 = (AVHWFramesContext *)in0->hw_frames_ctx->data;
  239. AVHWFramesContext *hw_frame1 = (AVHWFramesContext *)in1->hw_frames_ctx->data;
  240. if (hw_frame0->device_ctx != hw_frame1->device_ctx) {
  241. av_log(ctx, AV_LOG_ERROR, "Inputs with different underlying QSV devices are forbidden.\n");
  242. return AVERROR(EINVAL);
  243. }
  244. }
  245. outlink->w = vpp->var_values[VAR_MW];
  246. outlink->h = vpp->var_values[VAR_MH];
  247. outlink->frame_rate = in0->frame_rate;
  248. outlink->time_base = av_inv_q(outlink->frame_rate);
  249. ret = init_framesync(ctx);
  250. if (ret < 0)
  251. return ret;
  252. return ff_qsvvpp_create(ctx, &vpp->qsv, &vpp->qsv_param);
  253. }
  254. /*
  255. * Callback for qsvvpp
  256. * @Note: qsvvpp composition does not generate PTS for result frame.
  257. * so we assign the PTS from framesync to the output frame.
  258. */
  259. static int filter_callback(AVFilterLink *outlink, AVFrame *frame)
  260. {
  261. QSVOverlayContext *s = outlink->src->priv;
  262. frame->pts = av_rescale_q(s->fs.pts,
  263. s->fs.time_base, outlink->time_base);
  264. return ff_filter_frame(outlink, frame);
  265. }
  266. static int overlay_qsv_init(AVFilterContext *ctx)
  267. {
  268. QSVOverlayContext *vpp = ctx->priv;
  269. /* fill composite config */
  270. vpp->comp_conf.Header.BufferId = MFX_EXTBUFF_VPP_COMPOSITE;
  271. vpp->comp_conf.Header.BufferSz = sizeof(vpp->comp_conf);
  272. vpp->comp_conf.NumInputStream = ctx->nb_inputs;
  273. vpp->comp_conf.InputStream = av_mallocz_array(ctx->nb_inputs,
  274. sizeof(*vpp->comp_conf.InputStream));
  275. if (!vpp->comp_conf.InputStream)
  276. return AVERROR(ENOMEM);
  277. /* initialize QSVVPP params */
  278. vpp->qsv_param.filter_frame = filter_callback;
  279. vpp->qsv_param.ext_buf = av_mallocz(sizeof(*vpp->qsv_param.ext_buf));
  280. if (!vpp->qsv_param.ext_buf)
  281. return AVERROR(ENOMEM);
  282. vpp->qsv_param.ext_buf[0] = (mfxExtBuffer *)&vpp->comp_conf;
  283. vpp->qsv_param.num_ext_buf = 1;
  284. vpp->qsv_param.out_sw_format = AV_PIX_FMT_NV12;
  285. vpp->qsv_param.num_crop = 0;
  286. return 0;
  287. }
  288. static av_cold void overlay_qsv_uninit(AVFilterContext *ctx)
  289. {
  290. QSVOverlayContext *vpp = ctx->priv;
  291. ff_qsvvpp_free(&vpp->qsv);
  292. ff_framesync_uninit(&vpp->fs);
  293. av_freep(&vpp->comp_conf.InputStream);
  294. av_freep(&vpp->qsv_param.ext_buf);
  295. }
  296. static int activate(AVFilterContext *ctx)
  297. {
  298. QSVOverlayContext *s = ctx->priv;
  299. return ff_framesync_activate(&s->fs);
  300. }
  301. static int overlay_qsv_query_formats(AVFilterContext *ctx)
  302. {
  303. int i;
  304. int ret;
  305. static const enum AVPixelFormat main_in_fmts[] = {
  306. AV_PIX_FMT_YUV420P,
  307. AV_PIX_FMT_NV12,
  308. AV_PIX_FMT_YUYV422,
  309. AV_PIX_FMT_RGB32,
  310. AV_PIX_FMT_QSV,
  311. AV_PIX_FMT_NONE
  312. };
  313. static const enum AVPixelFormat out_pix_fmts[] = {
  314. AV_PIX_FMT_NV12,
  315. AV_PIX_FMT_QSV,
  316. AV_PIX_FMT_NONE
  317. };
  318. for (i = 0; i < ctx->nb_inputs; i++) {
  319. ret = ff_formats_ref(ff_make_format_list(main_in_fmts), &ctx->inputs[i]->out_formats);
  320. if (ret < 0)
  321. return ret;
  322. }
  323. ret = ff_formats_ref(ff_make_format_list(out_pix_fmts), &ctx->outputs[0]->in_formats);
  324. if (ret < 0)
  325. return ret;
  326. return 0;
  327. }
  328. static const AVFilterPad overlay_qsv_inputs[] = {
  329. {
  330. .name = "main",
  331. .type = AVMEDIA_TYPE_VIDEO,
  332. .config_props = config_main_input,
  333. .needs_fifo = 1,
  334. },
  335. {
  336. .name = "overlay",
  337. .type = AVMEDIA_TYPE_VIDEO,
  338. .config_props = config_overlay_input,
  339. .needs_fifo = 1,
  340. },
  341. { NULL }
  342. };
  343. static const AVFilterPad overlay_qsv_outputs[] = {
  344. {
  345. .name = "default",
  346. .type = AVMEDIA_TYPE_VIDEO,
  347. .config_props = config_output,
  348. },
  349. { NULL }
  350. };
  351. AVFilter ff_vf_overlay_qsv = {
  352. .name = "overlay_qsv",
  353. .description = NULL_IF_CONFIG_SMALL("Quick Sync Video overlay."),
  354. .priv_size = sizeof(QSVOverlayContext),
  355. .query_formats = overlay_qsv_query_formats,
  356. .preinit = overlay_qsv_framesync_preinit,
  357. .init = overlay_qsv_init,
  358. .uninit = overlay_qsv_uninit,
  359. .activate = activate,
  360. .inputs = overlay_qsv_inputs,
  361. .outputs = overlay_qsv_outputs,
  362. .priv_class = &overlay_qsv_class,
  363. .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
  364. };