You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

411 lines
12KB

  1. /*
  2. * Copyright (c) 2013 Nicolas George
  3. *
  4. * This file is part of FFmpeg.
  5. *
  6. * FFmpeg is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU Lesser General Public License
  8. * as published by the Free Software Foundation; either
  9. * version 2.1 of the License, or (at your option) any later version.
  10. *
  11. * FFmpeg is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. * GNU Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public License
  17. * along with FFmpeg; if not, write to the Free Software Foundation, Inc.,
  18. * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  19. */
  20. #include "libavutil/avassert.h"
  21. #include "libavutil/opt.h"
  22. #include "avfilter.h"
  23. #include "filters.h"
  24. #include "framesync.h"
  25. #include "internal.h"
  26. #define OFFSET(member) offsetof(FFFrameSync, member)
  27. #define FLAGS AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_FILTERING_PARAM
  28. static const char *framesync_name(void *ptr)
  29. {
  30. return "framesync";
  31. }
  32. static const AVOption framesync_options[] = {
  33. { "eof_action", "Action to take when encountering EOF from secondary input ",
  34. OFFSET(opt_eof_action), AV_OPT_TYPE_INT, { .i64 = EOF_ACTION_REPEAT },
  35. EOF_ACTION_REPEAT, EOF_ACTION_PASS, .flags = FLAGS, "eof_action" },
  36. { "repeat", "Repeat the previous frame.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_REPEAT }, .flags = FLAGS, "eof_action" },
  37. { "endall", "End both streams.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_ENDALL }, .flags = FLAGS, "eof_action" },
  38. { "pass", "Pass through the main input.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_PASS }, .flags = FLAGS, "eof_action" },
  39. { "shortest", "force termination when the shortest input terminates", OFFSET(opt_shortest), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
  40. { "repeatlast", "extend last frame of secondary streams beyond EOF", OFFSET(opt_repeatlast), AV_OPT_TYPE_BOOL, { .i64 = 1 }, 0, 1, FLAGS },
  41. { NULL }
  42. };
  43. static const AVClass framesync_class = {
  44. .version = LIBAVUTIL_VERSION_INT,
  45. .class_name = "framesync",
  46. .item_name = framesync_name,
  47. .category = AV_CLASS_CATEGORY_FILTER,
  48. .option = framesync_options,
  49. .parent_log_context_offset = OFFSET(parent),
  50. };
  51. const AVClass *ff_framesync_child_class_iterate(void **iter)
  52. {
  53. const AVClass *c = *iter ? NULL : &framesync_class;
  54. *iter = (void *)(uintptr_t)c;
  55. return c;
  56. }
  57. enum {
  58. STATE_BOF,
  59. STATE_RUN,
  60. STATE_EOF,
  61. };
  62. static int consume_from_fifos(FFFrameSync *fs);
  63. const AVClass *ff_framesync_get_class(void)
  64. {
  65. return &framesync_class;
  66. }
  67. void ff_framesync_preinit(FFFrameSync *fs)
  68. {
  69. if (fs->class)
  70. return;
  71. fs->class = &framesync_class;
  72. av_opt_set_defaults(fs);
  73. }
  74. int ff_framesync_init(FFFrameSync *fs, AVFilterContext *parent, unsigned nb_in)
  75. {
  76. /* For filters with several outputs, we will not be able to assume which
  77. output is relevant for ff_outlink_frame_wanted() and
  78. ff_outlink_set_status(). To be designed when needed. */
  79. av_assert0(parent->nb_outputs == 1);
  80. ff_framesync_preinit(fs);
  81. fs->parent = parent;
  82. fs->nb_in = nb_in;
  83. fs->in = av_calloc(nb_in, sizeof(*fs->in));
  84. if (!fs->in)
  85. return AVERROR(ENOMEM);
  86. return 0;
  87. }
  88. static void framesync_eof(FFFrameSync *fs)
  89. {
  90. fs->eof = 1;
  91. fs->frame_ready = 0;
  92. ff_outlink_set_status(fs->parent->outputs[0], AVERROR_EOF, AV_NOPTS_VALUE);
  93. }
  94. static void framesync_sync_level_update(FFFrameSync *fs)
  95. {
  96. unsigned i, level = 0;
  97. for (i = 0; i < fs->nb_in; i++)
  98. if (fs->in[i].state != STATE_EOF)
  99. level = FFMAX(level, fs->in[i].sync);
  100. av_assert0(level <= fs->sync_level);
  101. if (level < fs->sync_level)
  102. av_log(fs, AV_LOG_VERBOSE, "Sync level %u\n", level);
  103. if (level)
  104. fs->sync_level = level;
  105. else
  106. framesync_eof(fs);
  107. }
  108. int ff_framesync_configure(FFFrameSync *fs)
  109. {
  110. unsigned i;
  111. if (!fs->opt_repeatlast || fs->opt_eof_action == EOF_ACTION_PASS) {
  112. fs->opt_repeatlast = 0;
  113. fs->opt_eof_action = EOF_ACTION_PASS;
  114. }
  115. if (fs->opt_shortest || fs->opt_eof_action == EOF_ACTION_ENDALL) {
  116. fs->opt_shortest = 1;
  117. fs->opt_eof_action = EOF_ACTION_ENDALL;
  118. }
  119. if (!fs->opt_repeatlast) {
  120. for (i = 1; i < fs->nb_in; i++) {
  121. fs->in[i].after = EXT_NULL;
  122. fs->in[i].sync = 0;
  123. }
  124. }
  125. if (fs->opt_shortest) {
  126. for (i = 0; i < fs->nb_in; i++)
  127. fs->in[i].after = EXT_STOP;
  128. }
  129. if (!fs->time_base.num) {
  130. for (i = 0; i < fs->nb_in; i++) {
  131. if (fs->in[i].sync) {
  132. if (fs->time_base.num) {
  133. fs->time_base = av_gcd_q(fs->time_base, fs->in[i].time_base,
  134. AV_TIME_BASE / 2, AV_TIME_BASE_Q);
  135. } else {
  136. fs->time_base = fs->in[i].time_base;
  137. }
  138. }
  139. }
  140. if (!fs->time_base.num) {
  141. av_log(fs, AV_LOG_ERROR, "Impossible to set time base\n");
  142. return AVERROR(EINVAL);
  143. }
  144. av_log(fs, AV_LOG_VERBOSE, "Selected %d/%d time base\n",
  145. fs->time_base.num, fs->time_base.den);
  146. }
  147. for (i = 0; i < fs->nb_in; i++)
  148. fs->in[i].pts = fs->in[i].pts_next = AV_NOPTS_VALUE;
  149. fs->sync_level = UINT_MAX;
  150. framesync_sync_level_update(fs);
  151. return 0;
  152. }
  153. static int framesync_advance(FFFrameSync *fs)
  154. {
  155. unsigned i;
  156. int64_t pts;
  157. int ret;
  158. while (!(fs->frame_ready || fs->eof)) {
  159. ret = consume_from_fifos(fs);
  160. if (ret <= 0)
  161. return ret;
  162. pts = INT64_MAX;
  163. for (i = 0; i < fs->nb_in; i++)
  164. if (fs->in[i].have_next && fs->in[i].pts_next < pts)
  165. pts = fs->in[i].pts_next;
  166. if (pts == INT64_MAX) {
  167. framesync_eof(fs);
  168. break;
  169. }
  170. for (i = 0; i < fs->nb_in; i++) {
  171. if (fs->in[i].pts_next == pts ||
  172. (fs->in[i].before == EXT_INFINITY &&
  173. fs->in[i].state == STATE_BOF)) {
  174. av_frame_free(&fs->in[i].frame);
  175. fs->in[i].frame = fs->in[i].frame_next;
  176. fs->in[i].pts = fs->in[i].pts_next;
  177. fs->in[i].frame_next = NULL;
  178. fs->in[i].pts_next = AV_NOPTS_VALUE;
  179. fs->in[i].have_next = 0;
  180. fs->in[i].state = fs->in[i].frame ? STATE_RUN : STATE_EOF;
  181. if (fs->in[i].sync == fs->sync_level && fs->in[i].frame)
  182. fs->frame_ready = 1;
  183. if (fs->in[i].state == STATE_EOF &&
  184. fs->in[i].after == EXT_STOP)
  185. framesync_eof(fs);
  186. }
  187. }
  188. if (fs->frame_ready)
  189. for (i = 0; i < fs->nb_in; i++)
  190. if ((fs->in[i].state == STATE_BOF &&
  191. fs->in[i].before == EXT_STOP))
  192. fs->frame_ready = 0;
  193. fs->pts = pts;
  194. }
  195. return 0;
  196. }
  197. static int64_t framesync_pts_extrapolate(FFFrameSync *fs, unsigned in,
  198. int64_t pts)
  199. {
  200. /* Possible enhancement: use the link's frame rate */
  201. return pts + 1;
  202. }
  203. static void framesync_inject_frame(FFFrameSync *fs, unsigned in, AVFrame *frame)
  204. {
  205. int64_t pts;
  206. av_assert0(!fs->in[in].have_next);
  207. av_assert0(frame);
  208. pts = av_rescale_q(frame->pts, fs->in[in].time_base, fs->time_base);
  209. frame->pts = pts;
  210. fs->in[in].frame_next = frame;
  211. fs->in[in].pts_next = pts;
  212. fs->in[in].have_next = 1;
  213. }
  214. static void framesync_inject_status(FFFrameSync *fs, unsigned in, int status, int64_t pts)
  215. {
  216. av_assert0(!fs->in[in].have_next);
  217. pts = fs->in[in].state != STATE_RUN || fs->in[in].after == EXT_INFINITY
  218. ? INT64_MAX : framesync_pts_extrapolate(fs, in, fs->in[in].pts);
  219. fs->in[in].sync = 0;
  220. framesync_sync_level_update(fs);
  221. fs->in[in].frame_next = NULL;
  222. fs->in[in].pts_next = pts;
  223. fs->in[in].have_next = 1;
  224. }
  225. int ff_framesync_get_frame(FFFrameSync *fs, unsigned in, AVFrame **rframe,
  226. unsigned get)
  227. {
  228. AVFrame *frame;
  229. unsigned need_copy = 0, i;
  230. int64_t pts_next;
  231. int ret;
  232. if (!fs->in[in].frame) {
  233. *rframe = NULL;
  234. return 0;
  235. }
  236. frame = fs->in[in].frame;
  237. if (get) {
  238. /* Find out if we need to copy the frame: is there another sync
  239. stream, and do we know if its current frame will outlast this one? */
  240. pts_next = fs->in[in].have_next ? fs->in[in].pts_next : INT64_MAX;
  241. for (i = 0; i < fs->nb_in && !need_copy; i++)
  242. if (i != in && fs->in[i].sync &&
  243. (!fs->in[i].have_next || fs->in[i].pts_next < pts_next))
  244. need_copy = 1;
  245. if (need_copy) {
  246. if (!(frame = av_frame_clone(frame)))
  247. return AVERROR(ENOMEM);
  248. if ((ret = av_frame_make_writable(frame)) < 0) {
  249. av_frame_free(&frame);
  250. return ret;
  251. }
  252. } else {
  253. fs->in[in].frame = NULL;
  254. }
  255. fs->frame_ready = 0;
  256. }
  257. *rframe = frame;
  258. return 0;
  259. }
  260. void ff_framesync_uninit(FFFrameSync *fs)
  261. {
  262. unsigned i;
  263. for (i = 0; i < fs->nb_in; i++) {
  264. av_frame_free(&fs->in[i].frame);
  265. av_frame_free(&fs->in[i].frame_next);
  266. }
  267. av_freep(&fs->in);
  268. }
  269. static int consume_from_fifos(FFFrameSync *fs)
  270. {
  271. AVFilterContext *ctx = fs->parent;
  272. AVFrame *frame = NULL;
  273. int64_t pts;
  274. unsigned i, nb_active, nb_miss;
  275. int ret, status;
  276. nb_active = nb_miss = 0;
  277. for (i = 0; i < fs->nb_in; i++) {
  278. if (fs->in[i].have_next || fs->in[i].state == STATE_EOF)
  279. continue;
  280. nb_active++;
  281. ret = ff_inlink_consume_frame(ctx->inputs[i], &frame);
  282. if (ret < 0)
  283. return ret;
  284. if (ret) {
  285. av_assert0(frame);
  286. framesync_inject_frame(fs, i, frame);
  287. } else {
  288. ret = ff_inlink_acknowledge_status(ctx->inputs[i], &status, &pts);
  289. if (ret > 0) {
  290. framesync_inject_status(fs, i, status, pts);
  291. } else if (!ret) {
  292. nb_miss++;
  293. }
  294. }
  295. }
  296. if (nb_miss) {
  297. if (nb_miss == nb_active && !ff_outlink_frame_wanted(ctx->outputs[0]))
  298. return FFERROR_NOT_READY;
  299. for (i = 0; i < fs->nb_in; i++)
  300. if (!fs->in[i].have_next && fs->in[i].state != STATE_EOF)
  301. ff_inlink_request_frame(ctx->inputs[i]);
  302. return 0;
  303. }
  304. return 1;
  305. }
  306. int ff_framesync_activate(FFFrameSync *fs)
  307. {
  308. int ret;
  309. ret = framesync_advance(fs);
  310. if (ret < 0)
  311. return ret;
  312. if (fs->eof || !fs->frame_ready)
  313. return 0;
  314. ret = fs->on_event(fs);
  315. if (ret < 0)
  316. return ret;
  317. fs->frame_ready = 0;
  318. return 0;
  319. }
  320. int ff_framesync_init_dualinput(FFFrameSync *fs, AVFilterContext *parent)
  321. {
  322. int ret;
  323. ret = ff_framesync_init(fs, parent, 2);
  324. if (ret < 0)
  325. return ret;
  326. fs->in[0].time_base = parent->inputs[0]->time_base;
  327. fs->in[1].time_base = parent->inputs[1]->time_base;
  328. fs->in[0].sync = 2;
  329. fs->in[0].before = EXT_STOP;
  330. fs->in[0].after = EXT_INFINITY;
  331. fs->in[1].sync = 1;
  332. fs->in[1].before = EXT_NULL;
  333. fs->in[1].after = EXT_INFINITY;
  334. return 0;
  335. }
  336. int ff_framesync_dualinput_get(FFFrameSync *fs, AVFrame **f0, AVFrame **f1)
  337. {
  338. AVFilterContext *ctx = fs->parent;
  339. AVFrame *mainpic = NULL, *secondpic = NULL;
  340. int ret;
  341. if ((ret = ff_framesync_get_frame(fs, 0, &mainpic, 1)) < 0 ||
  342. (ret = ff_framesync_get_frame(fs, 1, &secondpic, 0)) < 0) {
  343. av_frame_free(&mainpic);
  344. return ret;
  345. }
  346. av_assert0(mainpic);
  347. mainpic->pts = av_rescale_q(fs->pts, fs->time_base, ctx->outputs[0]->time_base);
  348. if (ctx->is_disabled)
  349. secondpic = NULL;
  350. *f0 = mainpic;
  351. *f1 = secondpic;
  352. return 0;
  353. }
  354. int ff_framesync_dualinput_get_writable(FFFrameSync *fs, AVFrame **f0, AVFrame **f1)
  355. {
  356. int ret;
  357. ret = ff_framesync_dualinput_get(fs, f0, f1);
  358. if (ret < 0)
  359. return ret;
  360. ret = ff_inlink_make_frame_writable(fs->parent->inputs[0], f0);
  361. if (ret < 0) {
  362. av_frame_free(f0);
  363. *f1 = NULL;
  364. return ret;
  365. }
  366. return 0;
  367. }