framesync.c 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413
  1. /*
  2. * Copyright (c) 2013 Nicolas George
  3. *
  4. * This file is part of FFmpeg.
  5. *
  6. * FFmpeg is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU Lesser General Public License
  8. * as published by the Free Software Foundation; either
  9. * version 2.1 of the License, or (at your option) any later version.
  10. *
  11. * FFmpeg is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. * GNU Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public License
  17. * along with FFmpeg; if not, write to the Free Software Foundation, Inc.,
  18. * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  19. */
  20. #include "libavutil/avassert.h"
  21. #include "libavutil/opt.h"
  22. #include "avfilter.h"
  23. #include "filters.h"
  24. #include "framesync.h"
  25. #include "internal.h"
  26. #define OFFSET(member) offsetof(FFFrameSync, member)
  27. #define FLAGS AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_FILTERING_PARAM
  28. static const char *framesync_name(void *ptr)
  29. {
  30. return "framesync";
  31. }
  32. static const AVOption framesync_options[] = {
  33. { "eof_action", "Action to take when encountering EOF from secondary input ",
  34. OFFSET(opt_eof_action), AV_OPT_TYPE_INT, { .i64 = EOF_ACTION_REPEAT },
  35. EOF_ACTION_REPEAT, EOF_ACTION_PASS, .flags = FLAGS, "eof_action" },
  36. { "repeat", "Repeat the previous frame.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_REPEAT }, .flags = FLAGS, "eof_action" },
  37. { "endall", "End both streams.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_ENDALL }, .flags = FLAGS, "eof_action" },
  38. { "pass", "Pass through the main input.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_PASS }, .flags = FLAGS, "eof_action" },
  39. { "shortest", "force termination when the shortest input terminates", OFFSET(opt_shortest), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
  40. { "repeatlast", "extend last frame of secondary streams beyond EOF", OFFSET(opt_repeatlast), AV_OPT_TYPE_BOOL, { .i64 = 1 }, 0, 1, FLAGS },
  41. { NULL }
  42. };
  43. static const AVClass framesync_class = {
  44. .version = LIBAVUTIL_VERSION_INT,
  45. .class_name = "framesync",
  46. .item_name = framesync_name,
  47. .category = AV_CLASS_CATEGORY_FILTER,
  48. .option = framesync_options,
  49. .parent_log_context_offset = OFFSET(parent),
  50. };
  51. enum {
  52. STATE_BOF,
  53. STATE_RUN,
  54. STATE_EOF,
  55. };
  56. static int consume_from_fifos(FFFrameSync *fs);
  57. const AVClass *ff_framesync_get_class(void)
  58. {
  59. return &framesync_class;
  60. }
  61. void ff_framesync_preinit(FFFrameSync *fs)
  62. {
  63. if (fs->class)
  64. return;
  65. fs->class = &framesync_class;
  66. av_opt_set_defaults(fs);
  67. }
  68. int ff_framesync_init(FFFrameSync *fs, AVFilterContext *parent, unsigned nb_in)
  69. {
  70. /* For filters with several outputs, we will not be able to assume which
  71. output is relevant for ff_outlink_frame_wanted() and
  72. ff_outlink_set_status(). To be designed when needed. */
  73. av_assert0(parent->nb_outputs == 1);
  74. ff_framesync_preinit(fs);
  75. fs->parent = parent;
  76. fs->nb_in = nb_in;
  77. fs->in = av_calloc(nb_in, sizeof(*fs->in));
  78. if (!fs->in)
  79. return AVERROR(ENOMEM);
  80. return 0;
  81. }
  82. static void framesync_eof(FFFrameSync *fs)
  83. {
  84. fs->eof = 1;
  85. fs->frame_ready = 0;
  86. ff_outlink_set_status(fs->parent->outputs[0], AVERROR_EOF, AV_NOPTS_VALUE);
  87. }
  88. static void framesync_sync_level_update(FFFrameSync *fs)
  89. {
  90. unsigned i, level = 0;
  91. for (i = 0; i < fs->nb_in; i++)
  92. if (fs->in[i].state != STATE_EOF)
  93. level = FFMAX(level, fs->in[i].sync);
  94. av_assert0(level <= fs->sync_level);
  95. if (level < fs->sync_level)
  96. av_log(fs, AV_LOG_VERBOSE, "Sync level %u\n", level);
  97. if (level)
  98. fs->sync_level = level;
  99. else
  100. framesync_eof(fs);
  101. }
  102. int ff_framesync_configure(FFFrameSync *fs)
  103. {
  104. unsigned i;
  105. int64_t gcd, lcm;
  106. if (!fs->opt_repeatlast || fs->opt_eof_action == EOF_ACTION_PASS) {
  107. fs->opt_repeatlast = 0;
  108. fs->opt_eof_action = EOF_ACTION_PASS;
  109. }
  110. if (fs->opt_shortest || fs->opt_eof_action == EOF_ACTION_ENDALL) {
  111. fs->opt_shortest = 1;
  112. fs->opt_eof_action = EOF_ACTION_ENDALL;
  113. }
  114. if (!fs->opt_repeatlast) {
  115. for (i = 1; i < fs->nb_in; i++) {
  116. fs->in[i].after = EXT_NULL;
  117. fs->in[i].sync = 0;
  118. }
  119. }
  120. if (fs->opt_shortest) {
  121. for (i = 0; i < fs->nb_in; i++)
  122. fs->in[i].after = EXT_STOP;
  123. }
  124. if (!fs->time_base.num) {
  125. for (i = 0; i < fs->nb_in; i++) {
  126. if (fs->in[i].sync) {
  127. if (fs->time_base.num) {
  128. gcd = av_gcd(fs->time_base.den, fs->in[i].time_base.den);
  129. lcm = (fs->time_base.den / gcd) * fs->in[i].time_base.den;
  130. if (lcm < AV_TIME_BASE / 2) {
  131. fs->time_base.den = lcm;
  132. fs->time_base.num = av_gcd(fs->time_base.num,
  133. fs->in[i].time_base.num);
  134. } else {
  135. fs->time_base.num = 1;
  136. fs->time_base.den = AV_TIME_BASE;
  137. break;
  138. }
  139. } else {
  140. fs->time_base = fs->in[i].time_base;
  141. }
  142. }
  143. }
  144. if (!fs->time_base.num) {
  145. av_log(fs, AV_LOG_ERROR, "Impossible to set time base\n");
  146. return AVERROR(EINVAL);
  147. }
  148. av_log(fs, AV_LOG_VERBOSE, "Selected %d/%d time base\n",
  149. fs->time_base.num, fs->time_base.den);
  150. }
  151. for (i = 0; i < fs->nb_in; i++)
  152. fs->in[i].pts = fs->in[i].pts_next = AV_NOPTS_VALUE;
  153. fs->sync_level = UINT_MAX;
  154. framesync_sync_level_update(fs);
  155. return 0;
  156. }
  157. static int framesync_advance(FFFrameSync *fs)
  158. {
  159. unsigned i;
  160. int64_t pts;
  161. int ret;
  162. while (!(fs->frame_ready || fs->eof)) {
  163. ret = consume_from_fifos(fs);
  164. if (ret <= 0)
  165. return ret;
  166. pts = INT64_MAX;
  167. for (i = 0; i < fs->nb_in; i++)
  168. if (fs->in[i].have_next && fs->in[i].pts_next < pts)
  169. pts = fs->in[i].pts_next;
  170. if (pts == INT64_MAX) {
  171. framesync_eof(fs);
  172. break;
  173. }
  174. for (i = 0; i < fs->nb_in; i++) {
  175. if (fs->in[i].pts_next == pts ||
  176. (fs->in[i].before == EXT_INFINITY &&
  177. fs->in[i].state == STATE_BOF)) {
  178. av_frame_free(&fs->in[i].frame);
  179. fs->in[i].frame = fs->in[i].frame_next;
  180. fs->in[i].pts = fs->in[i].pts_next;
  181. fs->in[i].frame_next = NULL;
  182. fs->in[i].pts_next = AV_NOPTS_VALUE;
  183. fs->in[i].have_next = 0;
  184. fs->in[i].state = fs->in[i].frame ? STATE_RUN : STATE_EOF;
  185. if (fs->in[i].sync == fs->sync_level && fs->in[i].frame)
  186. fs->frame_ready = 1;
  187. if (fs->in[i].state == STATE_EOF &&
  188. fs->in[i].after == EXT_STOP)
  189. framesync_eof(fs);
  190. }
  191. }
  192. if (fs->frame_ready)
  193. for (i = 0; i < fs->nb_in; i++)
  194. if ((fs->in[i].state == STATE_BOF &&
  195. fs->in[i].before == EXT_STOP))
  196. fs->frame_ready = 0;
  197. fs->pts = pts;
  198. }
  199. return 0;
  200. }
  201. static int64_t framesync_pts_extrapolate(FFFrameSync *fs, unsigned in,
  202. int64_t pts)
  203. {
  204. /* Possible enhancement: use the link's frame rate */
  205. return pts + 1;
  206. }
  207. static void framesync_inject_frame(FFFrameSync *fs, unsigned in, AVFrame *frame)
  208. {
  209. int64_t pts;
  210. av_assert0(!fs->in[in].have_next);
  211. av_assert0(frame);
  212. pts = av_rescale_q(frame->pts, fs->in[in].time_base, fs->time_base);
  213. frame->pts = pts;
  214. fs->in[in].frame_next = frame;
  215. fs->in[in].pts_next = pts;
  216. fs->in[in].have_next = 1;
  217. }
  218. static void framesync_inject_status(FFFrameSync *fs, unsigned in, int status, int64_t pts)
  219. {
  220. av_assert0(!fs->in[in].have_next);
  221. pts = fs->in[in].state != STATE_RUN || fs->in[in].after == EXT_INFINITY
  222. ? INT64_MAX : framesync_pts_extrapolate(fs, in, fs->in[in].pts);
  223. fs->in[in].sync = 0;
  224. framesync_sync_level_update(fs);
  225. fs->in[in].frame_next = NULL;
  226. fs->in[in].pts_next = pts;
  227. fs->in[in].have_next = 1;
  228. }
  229. int ff_framesync_get_frame(FFFrameSync *fs, unsigned in, AVFrame **rframe,
  230. unsigned get)
  231. {
  232. AVFrame *frame;
  233. unsigned need_copy = 0, i;
  234. int64_t pts_next;
  235. int ret;
  236. if (!fs->in[in].frame) {
  237. *rframe = NULL;
  238. return 0;
  239. }
  240. frame = fs->in[in].frame;
  241. if (get) {
  242. /* Find out if we need to copy the frame: is there another sync
  243. stream, and do we know if its current frame will outlast this one? */
  244. pts_next = fs->in[in].have_next ? fs->in[in].pts_next : INT64_MAX;
  245. for (i = 0; i < fs->nb_in && !need_copy; i++)
  246. if (i != in && fs->in[i].sync &&
  247. (!fs->in[i].have_next || fs->in[i].pts_next < pts_next))
  248. need_copy = 1;
  249. if (need_copy) {
  250. if (!(frame = av_frame_clone(frame)))
  251. return AVERROR(ENOMEM);
  252. if ((ret = av_frame_make_writable(frame)) < 0) {
  253. av_frame_free(&frame);
  254. return ret;
  255. }
  256. } else {
  257. fs->in[in].frame = NULL;
  258. }
  259. fs->frame_ready = 0;
  260. }
  261. *rframe = frame;
  262. return 0;
  263. }
  264. void ff_framesync_uninit(FFFrameSync *fs)
  265. {
  266. unsigned i;
  267. for (i = 0; i < fs->nb_in; i++) {
  268. av_frame_free(&fs->in[i].frame);
  269. av_frame_free(&fs->in[i].frame_next);
  270. }
  271. av_freep(&fs->in);
  272. }
  273. static int consume_from_fifos(FFFrameSync *fs)
  274. {
  275. AVFilterContext *ctx = fs->parent;
  276. AVFrame *frame = NULL;
  277. int64_t pts;
  278. unsigned i, nb_active, nb_miss;
  279. int ret, status;
  280. nb_active = nb_miss = 0;
  281. for (i = 0; i < fs->nb_in; i++) {
  282. if (fs->in[i].have_next || fs->in[i].state == STATE_EOF)
  283. continue;
  284. nb_active++;
  285. ret = ff_inlink_consume_frame(ctx->inputs[i], &frame);
  286. if (ret < 0)
  287. return ret;
  288. if (ret) {
  289. av_assert0(frame);
  290. framesync_inject_frame(fs, i, frame);
  291. } else {
  292. ret = ff_inlink_acknowledge_status(ctx->inputs[i], &status, &pts);
  293. if (ret > 0) {
  294. framesync_inject_status(fs, i, status, pts);
  295. } else if (!ret) {
  296. nb_miss++;
  297. }
  298. }
  299. }
  300. if (nb_miss) {
  301. if (nb_miss == nb_active && !ff_outlink_frame_wanted(ctx->outputs[0]))
  302. return FFERROR_NOT_READY;
  303. for (i = 0; i < fs->nb_in; i++)
  304. if (!fs->in[i].have_next && fs->in[i].state != STATE_EOF)
  305. ff_inlink_request_frame(ctx->inputs[i]);
  306. return 0;
  307. }
  308. return 1;
  309. }
  310. int ff_framesync_activate(FFFrameSync *fs)
  311. {
  312. int ret;
  313. ret = framesync_advance(fs);
  314. if (ret < 0)
  315. return ret;
  316. if (fs->eof || !fs->frame_ready)
  317. return 0;
  318. ret = fs->on_event(fs);
  319. if (ret < 0)
  320. return ret;
  321. fs->frame_ready = 0;
  322. return 0;
  323. }
  324. int ff_framesync_init_dualinput(FFFrameSync *fs, AVFilterContext *parent)
  325. {
  326. int ret;
  327. ret = ff_framesync_init(fs, parent, 2);
  328. if (ret < 0)
  329. return ret;
  330. fs->in[0].time_base = parent->inputs[0]->time_base;
  331. fs->in[1].time_base = parent->inputs[1]->time_base;
  332. fs->in[0].sync = 2;
  333. fs->in[0].before = EXT_STOP;
  334. fs->in[0].after = EXT_INFINITY;
  335. fs->in[1].sync = 1;
  336. fs->in[1].before = EXT_NULL;
  337. fs->in[1].after = EXT_INFINITY;
  338. return 0;
  339. }
  340. int ff_framesync_dualinput_get(FFFrameSync *fs, AVFrame **f0, AVFrame **f1)
  341. {
  342. AVFilterContext *ctx = fs->parent;
  343. AVFrame *mainpic = NULL, *secondpic = NULL;
  344. int ret;
  345. if ((ret = ff_framesync_get_frame(fs, 0, &mainpic, 1)) < 0 ||
  346. (ret = ff_framesync_get_frame(fs, 1, &secondpic, 0)) < 0) {
  347. av_frame_free(&mainpic);
  348. return ret;
  349. }
  350. av_assert0(mainpic);
  351. mainpic->pts = av_rescale_q(fs->pts, fs->time_base, ctx->outputs[0]->time_base);
  352. if (ctx->is_disabled)
  353. secondpic = NULL;
  354. *f0 = mainpic;
  355. *f1 = secondpic;
  356. return 0;
  357. }
  358. int ff_framesync_dualinput_get_writable(FFFrameSync *fs, AVFrame **f0, AVFrame **f1)
  359. {
  360. int ret;
  361. ret = ff_framesync_dualinput_get(fs, f0, f1);
  362. if (ret < 0)
  363. return ret;
  364. ret = ff_inlink_make_frame_writable(fs->parent->inputs[0], f0);
  365. if (ret < 0) {
  366. av_frame_free(f0);
  367. *f1 = NULL;
  368. return ret;
  369. }
  370. return 0;
  371. }