vf_fps.c 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356
  1. /*
  2. * Copyright 2007 Bobby Bingham
  3. * Copyright 2012 Robert Nagy <ronag89 gmail com>
  4. * Copyright 2012 Anton Khirnov <anton khirnov net>
  5. * Copyright 2018 Calvin Walton <calvin.walton@kepstin.ca>
  6. *
  7. * This file is part of FFmpeg.
  8. *
  9. * FFmpeg is free software; you can redistribute it and/or
  10. * modify it under the terms of the GNU Lesser General Public
  11. * License as published by the Free Software Foundation; either
  12. * version 2.1 of the License, or (at your option) any later version.
  13. *
  14. * FFmpeg is distributed in the hope that it will be useful,
  15. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  17. * Lesser General Public License for more details.
  18. *
  19. * You should have received a copy of the GNU Lesser General Public
  20. * License along with FFmpeg; if not, write to the Free Software
  21. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  22. */
  23. /**
  24. * @file
  25. * a filter enforcing given constant framerate
  26. */
  27. #include <float.h>
  28. #include <stdint.h>
  29. #include "libavutil/avassert.h"
  30. #include "libavutil/mathematics.h"
  31. #include "libavutil/opt.h"
  32. #include "avfilter.h"
  33. #include "filters.h"
  34. #include "internal.h"
  35. enum EOFAction {
  36. EOF_ACTION_ROUND,
  37. EOF_ACTION_PASS,
  38. EOF_ACTION_NB
  39. };
  40. typedef struct FPSContext {
  41. const AVClass *class;
  42. double start_time; ///< pts, in seconds, of the expected first frame
  43. AVRational framerate; ///< target framerate
  44. int rounding; ///< AVRounding method for timestamps
  45. int eof_action; ///< action performed for last frame in FIFO
  46. /* Set during outlink configuration */
  47. int64_t in_pts_off; ///< input frame pts offset for start_time handling
  48. int64_t out_pts_off; ///< output frame pts offset for start_time handling
  49. /* Runtime state */
  50. int status; ///< buffered input status
  51. int64_t status_pts; ///< buffered input status timestamp
  52. AVFrame *frames[2]; ///< buffered frames
  53. int frames_count; ///< number of buffered frames
  54. int64_t next_pts; ///< pts of the next frame to output
  55. /* statistics */
  56. int cur_frame_out; ///< number of times current frame has been output
  57. int frames_in; ///< number of frames on input
  58. int frames_out; ///< number of frames on output
  59. int dup; ///< number of frames duplicated
  60. int drop; ///< number of framed dropped
  61. } FPSContext;
  62. #define OFFSET(x) offsetof(FPSContext, x)
  63. #define V AV_OPT_FLAG_VIDEO_PARAM
  64. #define F AV_OPT_FLAG_FILTERING_PARAM
  65. static const AVOption fps_options[] = {
  66. { "fps", "A string describing desired output framerate", OFFSET(framerate), AV_OPT_TYPE_VIDEO_RATE, { .str = "25" }, 0, INT_MAX, V|F },
  67. { "start_time", "Assume the first PTS should be this value.", OFFSET(start_time), AV_OPT_TYPE_DOUBLE, { .dbl = DBL_MAX}, -DBL_MAX, DBL_MAX, V|F },
  68. { "round", "set rounding method for timestamps", OFFSET(rounding), AV_OPT_TYPE_INT, { .i64 = AV_ROUND_NEAR_INF }, 0, 5, V|F, "round" },
  69. { "zero", "round towards 0", 0, AV_OPT_TYPE_CONST, { .i64 = AV_ROUND_ZERO }, 0, 0, V|F, "round" },
  70. { "inf", "round away from 0", 0, AV_OPT_TYPE_CONST, { .i64 = AV_ROUND_INF }, 0, 0, V|F, "round" },
  71. { "down", "round towards -infty", 0, AV_OPT_TYPE_CONST, { .i64 = AV_ROUND_DOWN }, 0, 0, V|F, "round" },
  72. { "up", "round towards +infty", 0, AV_OPT_TYPE_CONST, { .i64 = AV_ROUND_UP }, 0, 0, V|F, "round" },
  73. { "near", "round to nearest", 0, AV_OPT_TYPE_CONST, { .i64 = AV_ROUND_NEAR_INF }, 0, 0, V|F, "round" },
  74. { "eof_action", "action performed for last frame", OFFSET(eof_action), AV_OPT_TYPE_INT, { .i64 = EOF_ACTION_ROUND }, 0, EOF_ACTION_NB-1, V|F, "eof_action" },
  75. { "round", "round similar to other frames", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_ROUND }, 0, 0, V|F, "eof_action" },
  76. { "pass", "pass through last frame", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_PASS }, 0, 0, V|F, "eof_action" },
  77. { NULL }
  78. };
  79. AVFILTER_DEFINE_CLASS(fps);
  80. static av_cold int init(AVFilterContext *ctx)
  81. {
  82. FPSContext *s = ctx->priv;
  83. s->status_pts = AV_NOPTS_VALUE;
  84. s->next_pts = AV_NOPTS_VALUE;
  85. av_log(ctx, AV_LOG_VERBOSE, "fps=%d/%d\n", s->framerate.num, s->framerate.den);
  86. return 0;
  87. }
  88. /* Remove the first frame from the buffer, returning it */
  89. static AVFrame *shift_frame(AVFilterContext *ctx, FPSContext *s)
  90. {
  91. AVFrame *frame;
  92. /* Must only be called when there are frames in the buffer */
  93. av_assert1(s->frames_count > 0);
  94. frame = s->frames[0];
  95. s->frames[0] = s->frames[1];
  96. s->frames[1] = NULL;
  97. s->frames_count--;
  98. /* Update statistics counters */
  99. s->frames_out += s->cur_frame_out;
  100. if (s->cur_frame_out > 1) {
  101. av_log(ctx, AV_LOG_DEBUG, "Duplicated frame with pts %"PRId64" %d times\n",
  102. frame->pts, s->cur_frame_out - 1);
  103. s->dup += s->cur_frame_out - 1;
  104. } else if (s->cur_frame_out == 0) {
  105. av_log(ctx, AV_LOG_DEBUG, "Dropping frame with pts %"PRId64"\n",
  106. frame->pts);
  107. s->drop++;
  108. }
  109. s->cur_frame_out = 0;
  110. return frame;
  111. }
  112. static av_cold void uninit(AVFilterContext *ctx)
  113. {
  114. FPSContext *s = ctx->priv;
  115. AVFrame *frame;
  116. while (s->frames_count > 0) {
  117. frame = shift_frame(ctx, s);
  118. av_frame_free(&frame);
  119. }
  120. av_log(ctx, AV_LOG_VERBOSE, "%d frames in, %d frames out; %d frames dropped, "
  121. "%d frames duplicated.\n", s->frames_in, s->frames_out, s->drop, s->dup);
  122. }
  123. static int config_props(AVFilterLink* outlink)
  124. {
  125. AVFilterContext *ctx = outlink->src;
  126. AVFilterLink *inlink = ctx->inputs[0];
  127. FPSContext *s = ctx->priv;
  128. outlink->time_base = av_inv_q(s->framerate);
  129. outlink->frame_rate = s->framerate;
  130. /* Calculate the input and output pts offsets for start_time */
  131. if (s->start_time != DBL_MAX && s->start_time != AV_NOPTS_VALUE) {
  132. double first_pts = s->start_time * AV_TIME_BASE;
  133. if (first_pts < INT64_MIN || first_pts > INT64_MAX) {
  134. av_log(ctx, AV_LOG_ERROR, "Start time %f cannot be represented in internal time base\n",
  135. s->start_time);
  136. return AVERROR(EINVAL);
  137. }
  138. s->in_pts_off = av_rescale_q_rnd(first_pts, AV_TIME_BASE_Q, inlink->time_base,
  139. s->rounding | AV_ROUND_PASS_MINMAX);
  140. s->out_pts_off = av_rescale_q_rnd(first_pts, AV_TIME_BASE_Q, outlink->time_base,
  141. s->rounding | AV_ROUND_PASS_MINMAX);
  142. s->next_pts = s->out_pts_off;
  143. av_log(ctx, AV_LOG_VERBOSE, "Set first pts to (in:%"PRId64" out:%"PRId64") from start time %f\n",
  144. s->in_pts_off, s->out_pts_off, s->start_time);
  145. }
  146. return 0;
  147. }
  148. /* Read a frame from the input and save it in the buffer */
  149. static int read_frame(AVFilterContext *ctx, FPSContext *s, AVFilterLink *inlink, AVFilterLink *outlink)
  150. {
  151. AVFrame *frame;
  152. int ret;
  153. int64_t in_pts;
  154. /* Must only be called when we have buffer room available */
  155. av_assert1(s->frames_count < 2);
  156. ret = ff_inlink_consume_frame(inlink, &frame);
  157. /* Caller must have run ff_inlink_check_available_frame first */
  158. av_assert1(ret);
  159. if (ret < 0)
  160. return ret;
  161. /* Convert frame pts to output timebase.
  162. * The dance with offsets is required to match the rounding behaviour of the
  163. * previous version of the fps filter when using the start_time option. */
  164. in_pts = frame->pts;
  165. frame->pts = s->out_pts_off + av_rescale_q_rnd(in_pts - s->in_pts_off,
  166. inlink->time_base, outlink->time_base,
  167. s->rounding | AV_ROUND_PASS_MINMAX);
  168. av_log(ctx, AV_LOG_DEBUG, "Read frame with in pts %"PRId64", out pts %"PRId64"\n",
  169. in_pts, frame->pts);
  170. s->frames[s->frames_count++] = frame;
  171. s->frames_in++;
  172. return 1;
  173. }
  174. /* Write a frame to the output */
  175. static int write_frame(AVFilterContext *ctx, FPSContext *s, AVFilterLink *outlink, int *again)
  176. {
  177. AVFrame *frame;
  178. av_assert1(s->frames_count == 2 || (s->status && s->frames_count == 1));
  179. /* We haven't yet determined the pts of the first frame */
  180. if (s->next_pts == AV_NOPTS_VALUE) {
  181. if (s->frames[0]->pts != AV_NOPTS_VALUE) {
  182. s->next_pts = s->frames[0]->pts;
  183. av_log(ctx, AV_LOG_VERBOSE, "Set first pts to %"PRId64"\n", s->next_pts);
  184. } else {
  185. av_log(ctx, AV_LOG_WARNING, "Discarding initial frame(s) with no "
  186. "timestamp.\n");
  187. frame = shift_frame(ctx, s);
  188. av_frame_free(&frame);
  189. *again = 1;
  190. return 0;
  191. }
  192. }
  193. /* There are two conditions where we want to drop a frame:
  194. * - If we have two buffered frames and the second frame is acceptable
  195. * as the next output frame, then drop the first buffered frame.
  196. * - If we have status (EOF) set, drop frames when we hit the
  197. * status timestamp. */
  198. if ((s->frames_count == 2 && s->frames[1]->pts <= s->next_pts) ||
  199. (s->status && s->status_pts <= s->next_pts)) {
  200. frame = shift_frame(ctx, s);
  201. av_frame_free(&frame);
  202. *again = 1;
  203. return 0;
  204. /* Output a copy of the first buffered frame */
  205. } else {
  206. frame = av_frame_clone(s->frames[0]);
  207. if (!frame)
  208. return AVERROR(ENOMEM);
  209. // Make sure Closed Captions will not be duplicated
  210. av_frame_remove_side_data(s->frames[0], AV_FRAME_DATA_A53_CC);
  211. frame->pts = s->next_pts++;
  212. av_log(ctx, AV_LOG_DEBUG, "Writing frame with pts %"PRId64" to pts %"PRId64"\n",
  213. s->frames[0]->pts, frame->pts);
  214. s->cur_frame_out++;
  215. return ff_filter_frame(outlink, frame);
  216. }
  217. }
  218. /* Convert status_pts to outlink timebase */
  219. static void update_eof_pts(AVFilterContext *ctx, FPSContext *s, AVFilterLink *inlink, AVFilterLink *outlink, int64_t status_pts)
  220. {
  221. int eof_rounding = (s->eof_action == EOF_ACTION_PASS) ? AV_ROUND_UP : s->rounding;
  222. s->status_pts = av_rescale_q_rnd(status_pts, inlink->time_base, outlink->time_base,
  223. eof_rounding | AV_ROUND_PASS_MINMAX);
  224. av_log(ctx, AV_LOG_DEBUG, "EOF is at pts %"PRId64"\n", s->status_pts);
  225. }
  226. static int activate(AVFilterContext *ctx)
  227. {
  228. FPSContext *s = ctx->priv;
  229. AVFilterLink *inlink = ctx->inputs[0];
  230. AVFilterLink *outlink = ctx->outputs[0];
  231. int ret;
  232. int again = 0;
  233. int64_t status_pts;
  234. FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink);
  235. /* No buffered status: normal operation */
  236. if (!s->status) {
  237. /* Read available input frames if we have room */
  238. while (s->frames_count < 2 && ff_inlink_check_available_frame(inlink)) {
  239. ret = read_frame(ctx, s, inlink, outlink);
  240. if (ret < 0)
  241. return ret;
  242. }
  243. /* We do not yet have enough frames to produce output */
  244. if (s->frames_count < 2) {
  245. /* Check if we've hit EOF (or otherwise that an error status is set) */
  246. ret = ff_inlink_acknowledge_status(inlink, &s->status, &status_pts);
  247. if (ret > 0)
  248. update_eof_pts(ctx, s, inlink, outlink, status_pts);
  249. if (!ret) {
  250. /* If someone wants us to output, we'd better ask for more input */
  251. FF_FILTER_FORWARD_WANTED(outlink, inlink);
  252. return 0;
  253. }
  254. }
  255. }
  256. /* Buffered frames are available, so generate an output frame */
  257. if (s->frames_count > 0) {
  258. ret = write_frame(ctx, s, outlink, &again);
  259. /* Couldn't generate a frame, so schedule us to perform another step */
  260. if (again)
  261. ff_filter_set_ready(ctx, 100);
  262. return ret;
  263. }
  264. /* No frames left, so forward the status */
  265. if (s->status && s->frames_count == 0) {
  266. ff_outlink_set_status(outlink, s->status, s->next_pts);
  267. return 0;
  268. }
  269. return FFERROR_NOT_READY;
  270. }
  271. static const AVFilterPad avfilter_vf_fps_inputs[] = {
  272. {
  273. .name = "default",
  274. .type = AVMEDIA_TYPE_VIDEO,
  275. },
  276. { NULL }
  277. };
  278. static const AVFilterPad avfilter_vf_fps_outputs[] = {
  279. {
  280. .name = "default",
  281. .type = AVMEDIA_TYPE_VIDEO,
  282. .config_props = config_props,
  283. },
  284. { NULL }
  285. };
  286. AVFilter ff_vf_fps = {
  287. .name = "fps",
  288. .description = NULL_IF_CONFIG_SMALL("Force constant framerate."),
  289. .init = init,
  290. .uninit = uninit,
  291. .priv_size = sizeof(FPSContext),
  292. .priv_class = &fps_class,
  293. .activate = activate,
  294. .inputs = avfilter_vf_fps_inputs,
  295. .outputs = avfilter_vf_fps_outputs,
  296. };