You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

422 lines
13KB

  1. /*
  2. * Copyright (c) 2013 Nicolas George
  3. *
  4. * This file is part of FFmpeg.
  5. *
  6. * FFmpeg is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU Lesser General Public License
  8. * as published by the Free Software Foundation; either
  9. * version 2.1 of the License, or (at your option) any later version.
  10. *
  11. * FFmpeg is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. * GNU Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public License
  17. * along with FFmpeg; if not, write to the Free Software Foundation, Inc.,
  18. * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  19. */
  20. #include "libavutil/avassert.h"
  21. #include "libavutil/opt.h"
  22. #include "avfilter.h"
  23. #include "filters.h"
  24. #include "framesync2.h"
  25. #include "internal.h"
  26. #define OFFSET(member) offsetof(FFFrameSync, member)
  27. #define FLAGS AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_FILTERING_PARAM
  28. enum EOFAction {
  29. EOF_ACTION_REPEAT,
  30. EOF_ACTION_ENDALL,
  31. EOF_ACTION_PASS
  32. };
  33. static const char *framesync_name(void *ptr)
  34. {
  35. return "framesync";
  36. }
  37. static const AVOption framesync_options[] = {
  38. { "eof_action", "Action to take when encountering EOF from secondary input ",
  39. OFFSET(opt_eof_action), AV_OPT_TYPE_INT, { .i64 = EOF_ACTION_REPEAT },
  40. EOF_ACTION_REPEAT, EOF_ACTION_PASS, .flags = FLAGS, "eof_action" },
  41. { "repeat", "Repeat the previous frame.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_REPEAT }, .flags = FLAGS, "eof_action" },
  42. { "endall", "End both streams.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_ENDALL }, .flags = FLAGS, "eof_action" },
  43. { "pass", "Pass through the main input.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_PASS }, .flags = FLAGS, "eof_action" },
  44. { "shortest", "force termination when the shortest input terminates", OFFSET(opt_shortest), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
  45. { "repeatlast", "repeat overlay of the last overlay frame", OFFSET(opt_repeatlast), AV_OPT_TYPE_BOOL, { .i64 = 1 }, 0, 1, FLAGS },
  46. { NULL }
  47. };
  48. static const AVClass framesync_class = {
  49. .version = LIBAVUTIL_VERSION_INT,
  50. .class_name = "framesync",
  51. .item_name = framesync_name,
  52. .category = AV_CLASS_CATEGORY_FILTER,
  53. .option = framesync_options,
  54. .parent_log_context_offset = OFFSET(parent),
  55. };
  56. enum {
  57. STATE_BOF,
  58. STATE_RUN,
  59. STATE_EOF,
  60. };
  61. static int consume_from_fifos(FFFrameSync *fs);
  62. const AVClass *framesync2_get_class(void)
  63. {
  64. return &framesync_class;
  65. }
  66. void ff_framesync2_preinit(FFFrameSync *fs)
  67. {
  68. if (fs->class)
  69. return;
  70. fs->class = &framesync_class;
  71. av_opt_set_defaults(fs);
  72. }
  73. int ff_framesync2_init(FFFrameSync *fs, AVFilterContext *parent, unsigned nb_in)
  74. {
  75. /* For filters with several outputs, we will not be able to assume which
  76. output is relevant for ff_outlink_frame_wanted() and
  77. ff_outlink_set_status(). To be designed when needed. */
  78. av_assert0(parent->nb_outputs == 1);
  79. ff_framesync2_preinit(fs);
  80. fs->parent = parent;
  81. fs->nb_in = nb_in;
  82. fs->in = av_calloc(nb_in, sizeof(*fs->in));
  83. if (!fs->in)
  84. return AVERROR(ENOMEM);
  85. return 0;
  86. }
  87. static void framesync_eof(FFFrameSync *fs)
  88. {
  89. fs->eof = 1;
  90. fs->frame_ready = 0;
  91. ff_outlink_set_status(fs->parent->outputs[0], AVERROR_EOF, AV_NOPTS_VALUE);
  92. }
  93. static void framesync_sync_level_update(FFFrameSync *fs)
  94. {
  95. unsigned i, level = 0;
  96. for (i = 0; i < fs->nb_in; i++)
  97. if (fs->in[i].state != STATE_EOF)
  98. level = FFMAX(level, fs->in[i].sync);
  99. av_assert0(level <= fs->sync_level);
  100. if (level < fs->sync_level)
  101. av_log(fs, AV_LOG_VERBOSE, "Sync level %u\n", level);
  102. if (level)
  103. fs->sync_level = level;
  104. else
  105. framesync_eof(fs);
  106. }
  107. int ff_framesync2_configure(FFFrameSync *fs)
  108. {
  109. unsigned i;
  110. int64_t gcd, lcm;
  111. if (!fs->opt_repeatlast || fs->opt_eof_action == EOF_ACTION_PASS) {
  112. fs->opt_repeatlast = 0;
  113. fs->opt_eof_action = EOF_ACTION_PASS;
  114. }
  115. if (fs->opt_shortest || fs->opt_eof_action == EOF_ACTION_ENDALL) {
  116. fs->opt_shortest = 1;
  117. fs->opt_eof_action = EOF_ACTION_ENDALL;
  118. }
  119. if (fs->opt_shortest) {
  120. for (i = 0; i < fs->nb_in; i++)
  121. fs->in[i].after = EXT_STOP;
  122. }
  123. if (!fs->opt_repeatlast) {
  124. for (i = 1; i < fs->nb_in; i++) {
  125. fs->in[i].after = EXT_NULL;
  126. fs->in[i].sync = 0;
  127. }
  128. }
  129. if (!fs->time_base.num) {
  130. for (i = 0; i < fs->nb_in; i++) {
  131. if (fs->in[i].sync) {
  132. if (fs->time_base.num) {
  133. gcd = av_gcd(fs->time_base.den, fs->in[i].time_base.den);
  134. lcm = (fs->time_base.den / gcd) * fs->in[i].time_base.den;
  135. if (lcm < AV_TIME_BASE / 2) {
  136. fs->time_base.den = lcm;
  137. fs->time_base.num = av_gcd(fs->time_base.num,
  138. fs->in[i].time_base.num);
  139. } else {
  140. fs->time_base.num = 1;
  141. fs->time_base.den = AV_TIME_BASE;
  142. break;
  143. }
  144. } else {
  145. fs->time_base = fs->in[i].time_base;
  146. }
  147. }
  148. }
  149. if (!fs->time_base.num) {
  150. av_log(fs, AV_LOG_ERROR, "Impossible to set time base\n");
  151. return AVERROR(EINVAL);
  152. }
  153. av_log(fs, AV_LOG_VERBOSE, "Selected %d/%d time base\n",
  154. fs->time_base.num, fs->time_base.den);
  155. }
  156. for (i = 0; i < fs->nb_in; i++)
  157. fs->in[i].pts = fs->in[i].pts_next = AV_NOPTS_VALUE;
  158. fs->sync_level = UINT_MAX;
  159. framesync_sync_level_update(fs);
  160. return 0;
  161. }
  162. static int framesync_advance(FFFrameSync *fs)
  163. {
  164. unsigned i;
  165. int64_t pts;
  166. int ret;
  167. while (!(fs->frame_ready || fs->eof)) {
  168. ret = consume_from_fifos(fs);
  169. if (ret <= 0)
  170. return ret;
  171. pts = INT64_MAX;
  172. for (i = 0; i < fs->nb_in; i++)
  173. if (fs->in[i].have_next && fs->in[i].pts_next < pts)
  174. pts = fs->in[i].pts_next;
  175. if (pts == INT64_MAX) {
  176. framesync_eof(fs);
  177. break;
  178. }
  179. for (i = 0; i < fs->nb_in; i++) {
  180. if (fs->in[i].pts_next == pts ||
  181. (fs->in[i].before == EXT_INFINITY &&
  182. fs->in[i].state == STATE_BOF)) {
  183. av_frame_free(&fs->in[i].frame);
  184. fs->in[i].frame = fs->in[i].frame_next;
  185. fs->in[i].pts = fs->in[i].pts_next;
  186. fs->in[i].frame_next = NULL;
  187. fs->in[i].pts_next = AV_NOPTS_VALUE;
  188. fs->in[i].have_next = 0;
  189. fs->in[i].state = fs->in[i].frame ? STATE_RUN : STATE_EOF;
  190. if (fs->in[i].sync == fs->sync_level && fs->in[i].frame)
  191. fs->frame_ready = 1;
  192. if (fs->in[i].state == STATE_EOF &&
  193. fs->in[i].after == EXT_STOP)
  194. framesync_eof(fs);
  195. }
  196. }
  197. if (fs->frame_ready)
  198. for (i = 0; i < fs->nb_in; i++)
  199. if ((fs->in[i].state == STATE_BOF &&
  200. fs->in[i].before == EXT_STOP))
  201. fs->frame_ready = 0;
  202. fs->pts = pts;
  203. }
  204. return 0;
  205. }
  206. static int64_t framesync_pts_extrapolate(FFFrameSync *fs, unsigned in,
  207. int64_t pts)
  208. {
  209. /* Possible enhancement: use the link's frame rate */
  210. return pts + 1;
  211. }
  212. static void framesync_inject_frame(FFFrameSync *fs, unsigned in, AVFrame *frame)
  213. {
  214. int64_t pts;
  215. av_assert0(!fs->in[in].have_next);
  216. av_assert0(frame);
  217. pts = av_rescale_q(frame->pts, fs->in[in].time_base, fs->time_base);
  218. frame->pts = pts;
  219. fs->in[in].frame_next = frame;
  220. fs->in[in].pts_next = pts;
  221. fs->in[in].have_next = 1;
  222. }
  223. static void framesync_inject_status(FFFrameSync *fs, unsigned in, int status, int64_t pts)
  224. {
  225. av_assert0(!fs->in[in].have_next);
  226. pts = fs->in[in].state != STATE_RUN || fs->in[in].after == EXT_INFINITY
  227. ? INT64_MAX : framesync_pts_extrapolate(fs, in, fs->in[in].pts);
  228. fs->in[in].sync = 0;
  229. framesync_sync_level_update(fs);
  230. fs->in[in].frame_next = NULL;
  231. fs->in[in].pts_next = pts;
  232. fs->in[in].have_next = 1;
  233. }
  234. int ff_framesync2_get_frame(FFFrameSync *fs, unsigned in, AVFrame **rframe,
  235. unsigned get)
  236. {
  237. AVFrame *frame;
  238. unsigned need_copy = 0, i;
  239. int64_t pts_next;
  240. int ret;
  241. if (!fs->in[in].frame) {
  242. *rframe = NULL;
  243. return 0;
  244. }
  245. frame = fs->in[in].frame;
  246. if (get) {
  247. /* Find out if we need to copy the frame: is there another sync
  248. stream, and do we know if its current frame will outlast this one? */
  249. pts_next = fs->in[in].have_next ? fs->in[in].pts_next : INT64_MAX;
  250. for (i = 0; i < fs->nb_in && !need_copy; i++)
  251. if (i != in && fs->in[i].sync &&
  252. (!fs->in[i].have_next || fs->in[i].pts_next < pts_next))
  253. need_copy = 1;
  254. if (need_copy) {
  255. if (!(frame = av_frame_clone(frame)))
  256. return AVERROR(ENOMEM);
  257. if ((ret = av_frame_make_writable(frame)) < 0) {
  258. av_frame_free(&frame);
  259. return ret;
  260. }
  261. } else {
  262. fs->in[in].frame = NULL;
  263. }
  264. fs->frame_ready = 0;
  265. }
  266. *rframe = frame;
  267. return 0;
  268. }
  269. void ff_framesync2_uninit(FFFrameSync *fs)
  270. {
  271. unsigned i;
  272. for (i = 0; i < fs->nb_in; i++) {
  273. av_frame_free(&fs->in[i].frame);
  274. av_frame_free(&fs->in[i].frame_next);
  275. }
  276. av_freep(&fs->in);
  277. }
  278. static int consume_from_fifos(FFFrameSync *fs)
  279. {
  280. AVFilterContext *ctx = fs->parent;
  281. AVFrame *frame = NULL;
  282. int64_t pts;
  283. unsigned i, nb_active, nb_miss;
  284. int ret, status;
  285. nb_active = nb_miss = 0;
  286. for (i = 0; i < fs->nb_in; i++) {
  287. if (fs->in[i].have_next || fs->in[i].state == STATE_EOF)
  288. continue;
  289. nb_active++;
  290. ret = ff_inlink_consume_frame(ctx->inputs[i], &frame);
  291. if (ret < 0)
  292. return ret;
  293. if (ret) {
  294. av_assert0(frame);
  295. framesync_inject_frame(fs, i, frame);
  296. } else {
  297. ret = ff_inlink_acknowledge_status(ctx->inputs[i], &status, &pts);
  298. if (ret > 0) {
  299. framesync_inject_status(fs, i, status, pts);
  300. } else if (!ret) {
  301. nb_miss++;
  302. }
  303. }
  304. }
  305. if (nb_miss) {
  306. if (nb_miss == nb_active && !ff_outlink_frame_wanted(ctx->outputs[0]))
  307. return FFERROR_NOT_READY;
  308. for (i = 0; i < fs->nb_in; i++)
  309. if (!fs->in[i].have_next && fs->in[i].state != STATE_EOF)
  310. ff_inlink_request_frame(ctx->inputs[i]);
  311. return 0;
  312. }
  313. return 1;
  314. }
  315. int ff_framesync2_activate(FFFrameSync *fs)
  316. {
  317. int ret;
  318. ret = framesync_advance(fs);
  319. if (ret < 0)
  320. return ret;
  321. if (fs->eof || !fs->frame_ready)
  322. return 0;
  323. ret = fs->on_event(fs);
  324. if (ret < 0)
  325. return ret;
  326. fs->frame_ready = 0;
  327. return 0;
  328. }
  329. int ff_framesync2_init_dualinput(FFFrameSync *fs, AVFilterContext *parent)
  330. {
  331. int ret;
  332. ret = ff_framesync2_init(fs, parent, 2);
  333. if (ret < 0)
  334. return ret;
  335. fs->in[0].time_base = parent->inputs[0]->time_base;
  336. fs->in[1].time_base = parent->inputs[1]->time_base;
  337. fs->in[0].sync = 2;
  338. fs->in[0].before = EXT_STOP;
  339. fs->in[0].after = EXT_INFINITY;
  340. fs->in[1].sync = 1;
  341. fs->in[1].before = EXT_NULL;
  342. fs->in[1].after = EXT_INFINITY;
  343. return 0;
  344. }
  345. int ff_framesync2_dualinput_get(FFFrameSync *fs, AVFrame **f0, AVFrame **f1)
  346. {
  347. AVFilterContext *ctx = fs->parent;
  348. AVFrame *mainpic = NULL, *secondpic = NULL;
  349. int ret = 0;
  350. if ((ret = ff_framesync2_get_frame(fs, 0, &mainpic, 1)) < 0 ||
  351. (ret = ff_framesync2_get_frame(fs, 1, &secondpic, 0)) < 0) {
  352. av_frame_free(&mainpic);
  353. return ret;
  354. }
  355. if (ret < 0)
  356. return ret;
  357. av_assert0(mainpic);
  358. mainpic->pts = av_rescale_q(fs->pts, fs->time_base, ctx->outputs[0]->time_base);
  359. if (ctx->is_disabled)
  360. secondpic = NULL;
  361. *f0 = mainpic;
  362. *f1 = secondpic;
  363. return 0;
  364. }
  365. int ff_framesync2_dualinput_get_writable(FFFrameSync *fs, AVFrame **f0, AVFrame **f1)
  366. {
  367. int ret;
  368. ret = ff_framesync2_dualinput_get(fs, f0, f1);
  369. if (ret < 0)
  370. return ret;
  371. ret = ff_inlink_make_frame_writable(fs->parent->inputs[0], f0);
  372. if (ret < 0) {
  373. av_frame_free(f0);
  374. av_frame_free(f1);
  375. return ret;
  376. }
  377. return 0;
  378. }