You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

371 lines
11KB

  1. /*
  2. * Copyright (c) 2013 Nicolas George
  3. *
  4. * This file is part of FFmpeg.
  5. *
  6. * FFmpeg is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU Lesser General Public License
  8. * as published by the Free Software Foundation; either
  9. * version 2.1 of the License, or (at your option) any later version.
  10. *
  11. * FFmpeg is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. * GNU Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public License
  17. * along with FFmpeg; if not, write to the Free Software Foundation, Inc.,
  18. * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  19. */
  20. #include "libavutil/avassert.h"
  21. #include "avfilter.h"
  22. #include "filters.h"
  23. #include "framesync2.h"
  24. #include "internal.h"
  25. #define OFFSET(member) offsetof(FFFrameSync, member)
  26. static const char *framesync_name(void *ptr)
  27. {
  28. return "framesync";
  29. }
  30. static const AVClass framesync_class = {
  31. .version = LIBAVUTIL_VERSION_INT,
  32. .class_name = "framesync",
  33. .item_name = framesync_name,
  34. .category = AV_CLASS_CATEGORY_FILTER,
  35. .option = NULL,
  36. .parent_log_context_offset = OFFSET(parent),
  37. };
  38. enum {
  39. STATE_BOF,
  40. STATE_RUN,
  41. STATE_EOF,
  42. };
  43. static int consume_from_fifos(FFFrameSync *fs);
  44. int ff_framesync2_init(FFFrameSync *fs, AVFilterContext *parent, unsigned nb_in)
  45. {
  46. /* For filters with several outputs, we will not be able to assume which
  47. output is relevant for ff_outlink_frame_wanted() and
  48. ff_outlink_set_status(). To be designed when needed. */
  49. av_assert0(parent->nb_outputs == 1);
  50. fs->class = &framesync_class;
  51. fs->parent = parent;
  52. fs->nb_in = nb_in;
  53. fs->in = av_calloc(nb_in, sizeof(*fs->in));
  54. if (!fs->in)
  55. return AVERROR(ENOMEM);
  56. return 0;
  57. }
  58. static void framesync_eof(FFFrameSync *fs)
  59. {
  60. fs->eof = 1;
  61. fs->frame_ready = 0;
  62. ff_outlink_set_status(fs->parent->outputs[0], AVERROR_EOF, AV_NOPTS_VALUE);
  63. }
  64. static void framesync_sync_level_update(FFFrameSync *fs)
  65. {
  66. unsigned i, level = 0;
  67. for (i = 0; i < fs->nb_in; i++)
  68. if (fs->in[i].state != STATE_EOF)
  69. level = FFMAX(level, fs->in[i].sync);
  70. av_assert0(level <= fs->sync_level);
  71. if (level < fs->sync_level)
  72. av_log(fs, AV_LOG_VERBOSE, "Sync level %u\n", level);
  73. if (level)
  74. fs->sync_level = level;
  75. else
  76. framesync_eof(fs);
  77. }
  78. int ff_framesync2_configure(FFFrameSync *fs)
  79. {
  80. unsigned i;
  81. int64_t gcd, lcm;
  82. if (!fs->time_base.num) {
  83. for (i = 0; i < fs->nb_in; i++) {
  84. if (fs->in[i].sync) {
  85. if (fs->time_base.num) {
  86. gcd = av_gcd(fs->time_base.den, fs->in[i].time_base.den);
  87. lcm = (fs->time_base.den / gcd) * fs->in[i].time_base.den;
  88. if (lcm < AV_TIME_BASE / 2) {
  89. fs->time_base.den = lcm;
  90. fs->time_base.num = av_gcd(fs->time_base.num,
  91. fs->in[i].time_base.num);
  92. } else {
  93. fs->time_base.num = 1;
  94. fs->time_base.den = AV_TIME_BASE;
  95. break;
  96. }
  97. } else {
  98. fs->time_base = fs->in[i].time_base;
  99. }
  100. }
  101. }
  102. if (!fs->time_base.num) {
  103. av_log(fs, AV_LOG_ERROR, "Impossible to set time base\n");
  104. return AVERROR(EINVAL);
  105. }
  106. av_log(fs, AV_LOG_VERBOSE, "Selected %d/%d time base\n",
  107. fs->time_base.num, fs->time_base.den);
  108. }
  109. for (i = 0; i < fs->nb_in; i++)
  110. fs->in[i].pts = fs->in[i].pts_next = AV_NOPTS_VALUE;
  111. fs->sync_level = UINT_MAX;
  112. framesync_sync_level_update(fs);
  113. return 0;
  114. }
  115. static int framesync_advance(FFFrameSync *fs)
  116. {
  117. unsigned i;
  118. int64_t pts;
  119. int ret;
  120. while (!(fs->frame_ready || fs->eof)) {
  121. ret = consume_from_fifos(fs);
  122. if (ret <= 0)
  123. return ret;
  124. pts = INT64_MAX;
  125. for (i = 0; i < fs->nb_in; i++)
  126. if (fs->in[i].have_next && fs->in[i].pts_next < pts)
  127. pts = fs->in[i].pts_next;
  128. if (pts == INT64_MAX) {
  129. framesync_eof(fs);
  130. break;
  131. }
  132. for (i = 0; i < fs->nb_in; i++) {
  133. if (fs->in[i].pts_next == pts ||
  134. (fs->in[i].before == EXT_INFINITY &&
  135. fs->in[i].state == STATE_BOF)) {
  136. av_frame_free(&fs->in[i].frame);
  137. fs->in[i].frame = fs->in[i].frame_next;
  138. fs->in[i].pts = fs->in[i].pts_next;
  139. fs->in[i].frame_next = NULL;
  140. fs->in[i].pts_next = AV_NOPTS_VALUE;
  141. fs->in[i].have_next = 0;
  142. fs->in[i].state = fs->in[i].frame ? STATE_RUN : STATE_EOF;
  143. if (fs->in[i].sync == fs->sync_level && fs->in[i].frame)
  144. fs->frame_ready = 1;
  145. if (fs->in[i].state == STATE_EOF &&
  146. fs->in[i].after == EXT_STOP)
  147. framesync_eof(fs);
  148. }
  149. }
  150. if (fs->frame_ready)
  151. for (i = 0; i < fs->nb_in; i++)
  152. if ((fs->in[i].state == STATE_BOF &&
  153. fs->in[i].before == EXT_STOP))
  154. fs->frame_ready = 0;
  155. fs->pts = pts;
  156. }
  157. return 0;
  158. }
  159. static int64_t framesync_pts_extrapolate(FFFrameSync *fs, unsigned in,
  160. int64_t pts)
  161. {
  162. /* Possible enhancement: use the link's frame rate */
  163. return pts + 1;
  164. }
  165. static void framesync_inject_frame(FFFrameSync *fs, unsigned in, AVFrame *frame)
  166. {
  167. int64_t pts;
  168. av_assert0(!fs->in[in].have_next);
  169. av_assert0(frame);
  170. pts = av_rescale_q(frame->pts, fs->in[in].time_base, fs->time_base);
  171. frame->pts = pts;
  172. fs->in[in].frame_next = frame;
  173. fs->in[in].pts_next = pts;
  174. fs->in[in].have_next = 1;
  175. }
  176. static void framesync_inject_status(FFFrameSync *fs, unsigned in, int status, int64_t pts)
  177. {
  178. av_assert0(!fs->in[in].have_next);
  179. pts = fs->in[in].state != STATE_RUN || fs->in[in].after == EXT_INFINITY
  180. ? INT64_MAX : framesync_pts_extrapolate(fs, in, fs->in[in].pts);
  181. fs->in[in].sync = 0;
  182. framesync_sync_level_update(fs);
  183. fs->in[in].frame_next = NULL;
  184. fs->in[in].pts_next = pts;
  185. fs->in[in].have_next = 1;
  186. }
  187. int ff_framesync2_get_frame(FFFrameSync *fs, unsigned in, AVFrame **rframe,
  188. unsigned get)
  189. {
  190. AVFrame *frame;
  191. unsigned need_copy = 0, i;
  192. int64_t pts_next;
  193. int ret;
  194. if (!fs->in[in].frame) {
  195. *rframe = NULL;
  196. return 0;
  197. }
  198. frame = fs->in[in].frame;
  199. if (get) {
  200. /* Find out if we need to copy the frame: is there another sync
  201. stream, and do we know if its current frame will outlast this one? */
  202. pts_next = fs->in[in].have_next ? fs->in[in].pts_next : INT64_MAX;
  203. for (i = 0; i < fs->nb_in && !need_copy; i++)
  204. if (i != in && fs->in[i].sync &&
  205. (!fs->in[i].have_next || fs->in[i].pts_next < pts_next))
  206. need_copy = 1;
  207. if (need_copy) {
  208. if (!(frame = av_frame_clone(frame)))
  209. return AVERROR(ENOMEM);
  210. if ((ret = av_frame_make_writable(frame)) < 0) {
  211. av_frame_free(&frame);
  212. return ret;
  213. }
  214. } else {
  215. fs->in[in].frame = NULL;
  216. }
  217. fs->frame_ready = 0;
  218. }
  219. *rframe = frame;
  220. return 0;
  221. }
  222. void ff_framesync2_uninit(FFFrameSync *fs)
  223. {
  224. unsigned i;
  225. for (i = 0; i < fs->nb_in; i++) {
  226. av_frame_free(&fs->in[i].frame);
  227. av_frame_free(&fs->in[i].frame_next);
  228. }
  229. av_freep(&fs->in);
  230. }
  231. static int consume_from_fifos(FFFrameSync *fs)
  232. {
  233. AVFilterContext *ctx = fs->parent;
  234. AVFrame *frame = NULL;
  235. int64_t pts;
  236. unsigned i, nb_active, nb_miss;
  237. int ret, status;
  238. nb_active = nb_miss = 0;
  239. for (i = 0; i < fs->nb_in; i++) {
  240. if (fs->in[i].have_next || fs->in[i].state == STATE_EOF)
  241. continue;
  242. nb_active++;
  243. ret = ff_inlink_consume_frame(ctx->inputs[i], &frame);
  244. if (ret < 0)
  245. return ret;
  246. if (ret) {
  247. av_assert0(frame);
  248. framesync_inject_frame(fs, i, frame);
  249. } else {
  250. ret = ff_inlink_acknowledge_status(ctx->inputs[i], &status, &pts);
  251. if (ret > 0) {
  252. framesync_inject_status(fs, i, status, pts);
  253. } else if (!ret) {
  254. nb_miss++;
  255. }
  256. }
  257. }
  258. if (nb_miss) {
  259. if (nb_miss == nb_active && !ff_outlink_frame_wanted(ctx->outputs[0]))
  260. return FFERROR_NOT_READY;
  261. for (i = 0; i < fs->nb_in; i++)
  262. if (!fs->in[i].have_next && fs->in[i].state != STATE_EOF)
  263. ff_inlink_request_frame(ctx->inputs[i]);
  264. return 0;
  265. }
  266. return 1;
  267. }
  268. int ff_framesync2_activate(FFFrameSync *fs)
  269. {
  270. int ret;
  271. ret = framesync_advance(fs);
  272. if (ret < 0)
  273. return ret;
  274. if (fs->eof || !fs->frame_ready)
  275. return 0;
  276. ret = fs->on_event(fs);
  277. if (ret < 0)
  278. return ret;
  279. fs->frame_ready = 0;
  280. return 0;
  281. }
  282. int ff_framesync2_init_dualinput(FFFrameSync *fs, AVFilterContext *parent)
  283. {
  284. int ret;
  285. ret = ff_framesync2_init(fs, parent, 2);
  286. if (ret < 0)
  287. return ret;
  288. fs->in[0].time_base = parent->inputs[0]->time_base;
  289. fs->in[1].time_base = parent->inputs[1]->time_base;
  290. fs->in[0].sync = 2;
  291. fs->in[0].before = EXT_STOP;
  292. fs->in[0].after = EXT_INFINITY;
  293. fs->in[1].sync = 1;
  294. fs->in[1].before = EXT_NULL;
  295. fs->in[1].after = EXT_INFINITY;
  296. return 0;
  297. }
  298. int ff_framesync2_dualinput_get(FFFrameSync *fs, AVFrame **f0, AVFrame **f1)
  299. {
  300. AVFilterContext *ctx = fs->parent;
  301. AVFrame *mainpic = NULL, *secondpic = NULL;
  302. int ret = 0;
  303. if ((ret = ff_framesync2_get_frame(fs, 0, &mainpic, 1)) < 0 ||
  304. (ret = ff_framesync2_get_frame(fs, 1, &secondpic, 0)) < 0) {
  305. av_frame_free(&mainpic);
  306. return ret;
  307. }
  308. if (ret < 0)
  309. return ret;
  310. av_assert0(mainpic);
  311. mainpic->pts = av_rescale_q(fs->pts, fs->time_base, ctx->outputs[0]->time_base);
  312. if (ctx->is_disabled)
  313. secondpic = NULL;
  314. *f0 = mainpic;
  315. *f1 = secondpic;
  316. return 0;
  317. }
  318. int ff_framesync2_dualinput_get_writable(FFFrameSync *fs, AVFrame **f0, AVFrame **f1)
  319. {
  320. int ret;
  321. ret = ff_framesync2_dualinput_get(fs, f0, f1);
  322. if (ret < 0)
  323. return ret;
  324. ret = ff_inlink_make_frame_writable(fs->parent->inputs[0], f0);
  325. if (ret < 0) {
  326. av_frame_free(f0);
  327. av_frame_free(f1);
  328. return ret;
  329. }
  330. return 0;
  331. }