You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

626 lines
21KB

  1. /*
  2. * BobWeaver Deinterlacing Filter
  3. * Copyright (C) 2016 Thomas Mundt <loudmax@yahoo.de>
  4. *
  5. * Based on YADIF (Yet Another Deinterlacing Filter)
  6. * Copyright (C) 2006-2011 Michael Niedermayer <michaelni@gmx.at>
  7. * 2010 James Darnley <james.darnley@gmail.com>
  8. *
  9. * With use of Weston 3 Field Deinterlacing Filter algorithm
  10. * Copyright (C) 2012 British Broadcasting Corporation, All Rights Reserved
  11. * Author of de-interlace algorithm: Jim Easterbrook for BBC R&D
  12. * Based on the process described by Martin Weston for BBC R&D
  13. *
  14. * This file is part of FFmpeg.
  15. *
  16. * FFmpeg is free software; you can redistribute it and/or
  17. * modify it under the terms of the GNU Lesser General Public
  18. * License as published by the Free Software Foundation; either
  19. * version 2.1 of the License, or (at your option) any later version.
  20. *
  21. * FFmpeg is distributed in the hope that it will be useful,
  22. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  23. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  24. * Lesser General Public License for more details.
  25. *
  26. * You should have received a copy of the GNU Lesser General Public
  27. * License along with FFmpeg; if not, write to the Free Software
  28. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  29. */
  30. #include "libavutil/avassert.h"
  31. #include "libavutil/common.h"
  32. #include "libavutil/opt.h"
  33. #include "libavutil/pixdesc.h"
  34. #include "libavutil/imgutils.h"
  35. #include "avfilter.h"
  36. #include "formats.h"
  37. #include "internal.h"
  38. #include "video.h"
  39. /*
  40. * Filter coefficients coef_lf and coef_hf taken from BBC PH-2071 (Weston 3 Field Deinterlacer).
  41. * Used when there is spatial and temporal interpolation.
  42. * Filter coefficients coef_sp are used when there is spatial interpolation only.
  43. * Adjusted for matching visual sharpness impression of spatial and temporal interpolation.
  44. */
  45. static const uint16_t coef_lf[2] = { 4309, 213 };
  46. static const uint16_t coef_hf[3] = { 5570, 3801, 1016 };
  47. static const uint16_t coef_sp[2] = { 5077, 981 };
  48. enum BWDIFMode {
  49. BWDIF_MODE_SEND_FRAME = 0, ///< send 1 frame for each frame
  50. BWDIF_MODE_SEND_FIELD = 1, ///< send 1 frame for each field
  51. };
  52. enum BWDIFParity {
  53. BWDIF_PARITY_TFF = 0, ///< top field first
  54. BWDIF_PARITY_BFF = 1, ///< bottom field first
  55. BWDIF_PARITY_AUTO = -1, ///< auto detection
  56. };
  57. enum BWDIFDeint {
  58. BWDIF_DEINT_ALL = 0, ///< deinterlace all frames
  59. BWDIF_DEINT_INTERLACED = 1, ///< only deinterlace frames marked as interlaced
  60. };
  61. typedef struct BWDIFContext {
  62. const AVClass *class;
  63. int mode; ///< BWDIFMode
  64. int parity; ///< BWDIFParity
  65. int deint; ///< BWDIFDeint
  66. int frame_pending;
  67. AVFrame *cur;
  68. AVFrame *next;
  69. AVFrame *prev;
  70. AVFrame *out;
  71. void (*filter_intra)(void *dst1, void *cur1, int w, int prefs, int mrefs,
  72. int prefs3, int mrefs3, int parity, int clip_max);
  73. void (*filter_line)(void *dst, void *prev, void *cur, void *next,
  74. int w, int prefs, int mrefs, int prefs2, int mrefs2,
  75. int prefs3, int mrefs3, int prefs4, int mrefs4,
  76. int parity, int clip_max);
  77. void (*filter_edge)(void *dst, void *prev, void *cur, void *next,
  78. int w, int prefs, int mrefs, int prefs2, int mrefs2,
  79. int parity, int clip_max, int spat);
  80. const AVPixFmtDescriptor *csp;
  81. int inter_field;
  82. int eof;
  83. } BWDIFContext;
  84. typedef struct ThreadData {
  85. AVFrame *frame;
  86. int plane;
  87. int w, h;
  88. int parity;
  89. int tff;
  90. } ThreadData;
  91. #define FILTER_INTRA() \
  92. for (x = 0; x < w; x++) { \
  93. interpol = (coef_sp[0] * (cur[mrefs] + cur[prefs]) - coef_sp[1] * (cur[mrefs3] + cur[prefs3])) >> 13; \
  94. dst[0] = av_clip(interpol, 0, clip_max); \
  95. \
  96. dst++; \
  97. cur++; \
  98. }
  99. #define FILTER1() \
  100. for (x = 0; x < w; x++) { \
  101. int c = cur[mrefs]; \
  102. int d = (prev2[0] + next2[0]) >> 1; \
  103. int e = cur[prefs]; \
  104. int temporal_diff0 = FFABS(prev2[0] - next2[0]); \
  105. int temporal_diff1 =(FFABS(prev[mrefs] - c) + FFABS(prev[prefs] - e)) >> 1; \
  106. int temporal_diff2 =(FFABS(next[mrefs] - c) + FFABS(next[prefs] - e)) >> 1; \
  107. int diff = FFMAX3(temporal_diff0 >> 1, temporal_diff1, temporal_diff2); \
  108. \
  109. if (!diff) { \
  110. dst[0] = d; \
  111. } else {
  112. #define SPAT_CHECK() \
  113. int b = ((prev2[mrefs2] + next2[mrefs2]) >> 1) - c; \
  114. int f = ((prev2[prefs2] + next2[prefs2]) >> 1) - e; \
  115. int dc = d - c; \
  116. int de = d - e; \
  117. int max = FFMAX3(de, dc, FFMIN(b, f)); \
  118. int min = FFMIN3(de, dc, FFMAX(b, f)); \
  119. diff = FFMAX3(diff, min, -max);
  120. #define FILTER_LINE() \
  121. SPAT_CHECK() \
  122. if (FFABS(c - e) > temporal_diff0) { \
  123. interpol = (((coef_hf[0] * (prev2[0] + next2[0]) \
  124. - coef_hf[1] * (prev2[mrefs2] + next2[mrefs2] + prev2[prefs2] + next2[prefs2]) \
  125. + coef_hf[2] * (prev2[mrefs4] + next2[mrefs4] + prev2[prefs4] + next2[prefs4])) >> 2) \
  126. + coef_lf[0] * (c + e) - coef_lf[1] * (cur[mrefs3] + cur[prefs3])) >> 13; \
  127. } else { \
  128. interpol = (coef_sp[0] * (c + e) - coef_sp[1] * (cur[mrefs3] + cur[prefs3])) >> 13; \
  129. }
  130. #define FILTER_EDGE() \
  131. if (spat) { \
  132. SPAT_CHECK() \
  133. } \
  134. interpol = (c + e) >> 1;
  135. #define FILTER2() \
  136. if (interpol > d + diff) \
  137. interpol = d + diff; \
  138. else if (interpol < d - diff) \
  139. interpol = d - diff; \
  140. \
  141. dst[0] = av_clip(interpol, 0, clip_max); \
  142. } \
  143. \
  144. dst++; \
  145. cur++; \
  146. prev++; \
  147. next++; \
  148. prev2++; \
  149. next2++; \
  150. }
  151. static void filter_intra(void *dst1, void *cur1, int w, int prefs, int mrefs,
  152. int prefs3, int mrefs3, int parity, int clip_max)
  153. {
  154. uint8_t *dst = dst1;
  155. uint8_t *cur = cur1;
  156. int interpol, x;
  157. FILTER_INTRA()
  158. }
  159. static void filter_line(void *dst1, void *prev1, void *cur1, void *next1,
  160. int w, int prefs, int mrefs, int prefs2, int mrefs2,
  161. int prefs3, int mrefs3, int prefs4, int mrefs4,
  162. int parity, int clip_max)
  163. {
  164. uint8_t *dst = dst1;
  165. uint8_t *prev = prev1;
  166. uint8_t *cur = cur1;
  167. uint8_t *next = next1;
  168. uint8_t *prev2 = parity ? prev : cur ;
  169. uint8_t *next2 = parity ? cur : next;
  170. int interpol, x;
  171. FILTER1()
  172. FILTER_LINE()
  173. FILTER2()
  174. }
  175. static void filter_edge(void *dst1, void *prev1, void *cur1, void *next1,
  176. int w, int prefs, int mrefs, int prefs2, int mrefs2,
  177. int parity, int clip_max, int spat)
  178. {
  179. uint8_t *dst = dst1;
  180. uint8_t *prev = prev1;
  181. uint8_t *cur = cur1;
  182. uint8_t *next = next1;
  183. uint8_t *prev2 = parity ? prev : cur ;
  184. uint8_t *next2 = parity ? cur : next;
  185. int interpol, x;
  186. FILTER1()
  187. FILTER_EDGE()
  188. FILTER2()
  189. }
  190. static void filter_intra_16bit(void *dst1, void *cur1, int w, int prefs, int mrefs,
  191. int prefs3, int mrefs3, int parity, int clip_max)
  192. {
  193. uint16_t *dst = dst1;
  194. uint16_t *cur = cur1;
  195. int interpol, x;
  196. FILTER_INTRA()
  197. }
  198. static void filter_line_16bit(void *dst1, void *prev1, void *cur1, void *next1,
  199. int w, int prefs, int mrefs, int prefs2, int mrefs2,
  200. int prefs3, int mrefs3, int prefs4, int mrefs4,
  201. int parity, int clip_max)
  202. {
  203. uint16_t *dst = dst1;
  204. uint16_t *prev = prev1;
  205. uint16_t *cur = cur1;
  206. uint16_t *next = next1;
  207. uint16_t *prev2 = parity ? prev : cur ;
  208. uint16_t *next2 = parity ? cur : next;
  209. int interpol, x;
  210. FILTER1()
  211. FILTER_LINE()
  212. FILTER2()
  213. }
  214. static void filter_edge_16bit(void *dst1, void *prev1, void *cur1, void *next1,
  215. int w, int prefs, int mrefs, int prefs2, int mrefs2,
  216. int parity, int clip_max, int spat)
  217. {
  218. uint16_t *dst = dst1;
  219. uint16_t *prev = prev1;
  220. uint16_t *cur = cur1;
  221. uint16_t *next = next1;
  222. uint16_t *prev2 = parity ? prev : cur ;
  223. uint16_t *next2 = parity ? cur : next;
  224. int interpol, x;
  225. FILTER1()
  226. FILTER_EDGE()
  227. FILTER2()
  228. }
  229. static int filter_slice(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
  230. {
  231. BWDIFContext *s = ctx->priv;
  232. ThreadData *td = arg;
  233. int linesize = s->cur->linesize[td->plane];
  234. int clip_max = (1 << (s->csp->comp[td->plane].depth)) - 1;
  235. int df = (s->csp->comp[td->plane].depth + 7) / 8;
  236. int refs = linesize / df;
  237. int slice_start = (td->h * jobnr ) / nb_jobs;
  238. int slice_end = (td->h * (jobnr+1)) / nb_jobs;
  239. int y;
  240. for (y = slice_start; y < slice_end; y++) {
  241. if ((y ^ td->parity) & 1) {
  242. uint8_t *prev = &s->prev->data[td->plane][y * linesize];
  243. uint8_t *cur = &s->cur ->data[td->plane][y * linesize];
  244. uint8_t *next = &s->next->data[td->plane][y * linesize];
  245. uint8_t *dst = &td->frame->data[td->plane][y * td->frame->linesize[td->plane]];
  246. if (!s->inter_field) {
  247. s->filter_intra(dst, cur, td->w, (y + df) < td->h ? refs : -refs,
  248. y > (df - 1) ? -refs : refs,
  249. (y + 3*df) < td->h ? 3 * refs : -refs,
  250. y > (3*df - 1) ? -3 * refs : refs,
  251. td->parity ^ td->tff, clip_max);
  252. } else if ((y < 4) || ((y + 5) > td->h)) {
  253. s->filter_edge(dst, prev, cur, next, td->w,
  254. (y + df) < td->h ? refs : -refs,
  255. y > (df - 1) ? -refs : refs,
  256. refs << 1, -(refs << 1),
  257. td->parity ^ td->tff, clip_max,
  258. (y < 2) || ((y + 3) > td->h) ? 0 : 1);
  259. } else {
  260. s->filter_line(dst, prev, cur, next, td->w,
  261. refs, -refs, refs << 1, -(refs << 1),
  262. 3 * refs, -3 * refs, refs << 2, -(refs << 2),
  263. td->parity ^ td->tff, clip_max);
  264. }
  265. } else {
  266. memcpy(&td->frame->data[td->plane][y * td->frame->linesize[td->plane]],
  267. &s->cur->data[td->plane][y * linesize], td->w * df);
  268. }
  269. }
  270. return 0;
  271. }
  272. static void filter(AVFilterContext *ctx, AVFrame *dstpic,
  273. int parity, int tff)
  274. {
  275. BWDIFContext *bwdif = ctx->priv;
  276. ThreadData td = { .frame = dstpic, .parity = parity, .tff = tff };
  277. int i;
  278. for (i = 0; i < bwdif->csp->nb_components; i++) {
  279. int w = dstpic->width;
  280. int h = dstpic->height;
  281. if (i == 1 || i == 2) {
  282. w = AV_CEIL_RSHIFT(w, bwdif->csp->log2_chroma_w);
  283. h = AV_CEIL_RSHIFT(h, bwdif->csp->log2_chroma_h);
  284. }
  285. td.w = w;
  286. td.h = h;
  287. td.plane = i;
  288. ctx->internal->execute(ctx, filter_slice, &td, NULL, FFMIN(h, ctx->graph->nb_threads));
  289. }
  290. if (!bwdif->inter_field) {
  291. bwdif->inter_field = 1;
  292. }
  293. emms_c();
  294. }
  295. static int return_frame(AVFilterContext *ctx, int is_second)
  296. {
  297. BWDIFContext *bwdif = ctx->priv;
  298. AVFilterLink *link = ctx->outputs[0];
  299. int tff, ret;
  300. if (bwdif->parity == -1) {
  301. tff = bwdif->cur->interlaced_frame ?
  302. bwdif->cur->top_field_first : 1;
  303. } else {
  304. tff = bwdif->parity ^ 1;
  305. }
  306. if (is_second) {
  307. bwdif->out = ff_get_video_buffer(link, link->w, link->h);
  308. if (!bwdif->out)
  309. return AVERROR(ENOMEM);
  310. av_frame_copy_props(bwdif->out, bwdif->cur);
  311. bwdif->out->interlaced_frame = 0;
  312. if (bwdif->inter_field < 0)
  313. bwdif->inter_field = 0;
  314. }
  315. filter(ctx, bwdif->out, tff ^ !is_second, tff);
  316. if (is_second) {
  317. int64_t cur_pts = bwdif->cur->pts;
  318. int64_t next_pts = bwdif->next->pts;
  319. if (next_pts != AV_NOPTS_VALUE && cur_pts != AV_NOPTS_VALUE) {
  320. bwdif->out->pts = cur_pts + next_pts;
  321. } else {
  322. bwdif->out->pts = AV_NOPTS_VALUE;
  323. }
  324. }
  325. ret = ff_filter_frame(ctx->outputs[0], bwdif->out);
  326. bwdif->frame_pending = (bwdif->mode&1) && !is_second;
  327. return ret;
  328. }
  329. static int checkstride(BWDIFContext *bwdif, const AVFrame *a, const AVFrame *b)
  330. {
  331. int i;
  332. for (i = 0; i < bwdif->csp->nb_components; i++)
  333. if (a->linesize[i] != b->linesize[i])
  334. return 1;
  335. return 0;
  336. }
  337. static void fixstride(AVFilterLink *link, AVFrame *f)
  338. {
  339. AVFrame *dst = ff_default_get_video_buffer(link, f->width, f->height);
  340. if(!dst)
  341. return;
  342. av_frame_copy_props(dst, f);
  343. av_image_copy(dst->data, dst->linesize,
  344. (const uint8_t **)f->data, f->linesize,
  345. dst->format, dst->width, dst->height);
  346. av_frame_unref(f);
  347. av_frame_move_ref(f, dst);
  348. av_frame_free(&dst);
  349. }
  350. static int filter_frame(AVFilterLink *link, AVFrame *frame)
  351. {
  352. AVFilterContext *ctx = link->dst;
  353. BWDIFContext *bwdif = ctx->priv;
  354. av_assert0(frame);
  355. if (bwdif->frame_pending)
  356. return_frame(ctx, 1);
  357. if (bwdif->prev)
  358. av_frame_free(&bwdif->prev);
  359. bwdif->prev = bwdif->cur;
  360. bwdif->cur = bwdif->next;
  361. bwdif->next = frame;
  362. if (!bwdif->cur) {
  363. bwdif->cur = av_frame_clone(bwdif->next);
  364. if (!bwdif->cur)
  365. return AVERROR(ENOMEM);
  366. bwdif->inter_field = 0;
  367. }
  368. if (checkstride(bwdif, bwdif->next, bwdif->cur)) {
  369. av_log(ctx, AV_LOG_VERBOSE, "Reallocating frame due to differing stride\n");
  370. fixstride(link, bwdif->next);
  371. }
  372. if (checkstride(bwdif, bwdif->next, bwdif->cur))
  373. fixstride(link, bwdif->cur);
  374. if (bwdif->prev && checkstride(bwdif, bwdif->next, bwdif->prev))
  375. fixstride(link, bwdif->prev);
  376. if (checkstride(bwdif, bwdif->next, bwdif->cur) || (bwdif->prev && checkstride(bwdif, bwdif->next, bwdif->prev))) {
  377. av_log(ctx, AV_LOG_ERROR, "Failed to reallocate frame\n");
  378. return -1;
  379. }
  380. if (!bwdif->prev)
  381. return 0;
  382. if ((bwdif->deint && !bwdif->cur->interlaced_frame) ||
  383. ctx->is_disabled ||
  384. (bwdif->deint && !bwdif->prev->interlaced_frame && bwdif->prev->repeat_pict) ||
  385. (bwdif->deint && !bwdif->next->interlaced_frame && bwdif->next->repeat_pict)
  386. ) {
  387. bwdif->out = av_frame_clone(bwdif->cur);
  388. if (!bwdif->out)
  389. return AVERROR(ENOMEM);
  390. av_frame_free(&bwdif->prev);
  391. if (bwdif->out->pts != AV_NOPTS_VALUE)
  392. bwdif->out->pts *= 2;
  393. return ff_filter_frame(ctx->outputs[0], bwdif->out);
  394. }
  395. bwdif->out = ff_get_video_buffer(ctx->outputs[0], link->w, link->h);
  396. if (!bwdif->out)
  397. return AVERROR(ENOMEM);
  398. av_frame_copy_props(bwdif->out, bwdif->cur);
  399. bwdif->out->interlaced_frame = 0;
  400. if (bwdif->out->pts != AV_NOPTS_VALUE)
  401. bwdif->out->pts *= 2;
  402. return return_frame(ctx, 0);
  403. }
  404. static int request_frame(AVFilterLink *link)
  405. {
  406. AVFilterContext *ctx = link->src;
  407. BWDIFContext *bwdif = ctx->priv;
  408. int ret;
  409. if (bwdif->frame_pending) {
  410. return_frame(ctx, 1);
  411. return 0;
  412. }
  413. if (bwdif->eof)
  414. return AVERROR_EOF;
  415. ret = ff_request_frame(link->src->inputs[0]);
  416. if (ret == AVERROR_EOF && bwdif->cur) {
  417. AVFrame *next = av_frame_clone(bwdif->next);
  418. if (!next)
  419. return AVERROR(ENOMEM);
  420. bwdif->inter_field = -1;
  421. next->pts = bwdif->next->pts * 2 - bwdif->cur->pts;
  422. filter_frame(link->src->inputs[0], next);
  423. bwdif->eof = 1;
  424. } else if (ret < 0) {
  425. return ret;
  426. }
  427. return 0;
  428. }
  429. static av_cold void uninit(AVFilterContext *ctx)
  430. {
  431. BWDIFContext *bwdif = ctx->priv;
  432. av_frame_free(&bwdif->prev);
  433. av_frame_free(&bwdif->cur );
  434. av_frame_free(&bwdif->next);
  435. }
  436. static int query_formats(AVFilterContext *ctx)
  437. {
  438. static const enum AVPixelFormat pix_fmts[] = {
  439. AV_PIX_FMT_YUV410P, AV_PIX_FMT_YUV411P, AV_PIX_FMT_YUV420P,
  440. AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV440P, AV_PIX_FMT_YUV444P,
  441. AV_PIX_FMT_YUVJ411P, AV_PIX_FMT_YUVJ420P,
  442. AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ440P, AV_PIX_FMT_YUVJ444P,
  443. AV_PIX_FMT_YUV420P9, AV_PIX_FMT_YUV422P9, AV_PIX_FMT_YUV444P9,
  444. AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV422P10, AV_PIX_FMT_YUV444P10,
  445. AV_PIX_FMT_YUV420P12, AV_PIX_FMT_YUV422P12, AV_PIX_FMT_YUV444P12,
  446. AV_PIX_FMT_YUV420P14, AV_PIX_FMT_YUV422P14, AV_PIX_FMT_YUV444P14,
  447. AV_PIX_FMT_YUV420P16, AV_PIX_FMT_YUV422P16, AV_PIX_FMT_YUV444P16,
  448. AV_PIX_FMT_YUVA420P, AV_PIX_FMT_YUVA422P, AV_PIX_FMT_YUVA444P,
  449. AV_PIX_FMT_YUVA420P9, AV_PIX_FMT_YUVA422P9, AV_PIX_FMT_YUVA444P9,
  450. AV_PIX_FMT_YUVA420P10, AV_PIX_FMT_YUVA422P10, AV_PIX_FMT_YUVA444P10,
  451. AV_PIX_FMT_YUVA420P16, AV_PIX_FMT_YUVA422P16, AV_PIX_FMT_YUVA444P16,
  452. AV_PIX_FMT_GBRP, AV_PIX_FMT_GBRP9, AV_PIX_FMT_GBRP10,
  453. AV_PIX_FMT_GBRP12, AV_PIX_FMT_GBRP14, AV_PIX_FMT_GBRP16,
  454. AV_PIX_FMT_GBRAP, AV_PIX_FMT_GBRAP16,
  455. AV_PIX_FMT_GRAY8, AV_PIX_FMT_GRAY16,
  456. AV_PIX_FMT_NONE
  457. };
  458. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  459. if (!fmts_list)
  460. return AVERROR(ENOMEM);
  461. return ff_set_common_formats(ctx, fmts_list);
  462. }
  463. static int config_props(AVFilterLink *link)
  464. {
  465. AVFilterContext *ctx = link->src;
  466. BWDIFContext *s = link->src->priv;
  467. link->time_base.num = link->src->inputs[0]->time_base.num;
  468. link->time_base.den = link->src->inputs[0]->time_base.den * 2;
  469. link->w = link->src->inputs[0]->w;
  470. link->h = link->src->inputs[0]->h;
  471. if(s->mode&1)
  472. link->frame_rate = av_mul_q(link->src->inputs[0]->frame_rate, (AVRational){2,1});
  473. if (link->w < 3 || link->h < 3) {
  474. av_log(ctx, AV_LOG_ERROR, "Video of less than 3 columns or lines is not supported\n");
  475. return AVERROR(EINVAL);
  476. }
  477. s->csp = av_pix_fmt_desc_get(link->format);
  478. if (s->csp->comp[0].depth > 8) {
  479. s->filter_intra = filter_intra_16bit;
  480. s->filter_line = filter_line_16bit;
  481. s->filter_edge = filter_edge_16bit;
  482. } else {
  483. s->filter_intra = filter_intra;
  484. s->filter_line = filter_line;
  485. s->filter_edge = filter_edge;
  486. }
  487. return 0;
  488. }
  489. #define OFFSET(x) offsetof(BWDIFContext, x)
  490. #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
  491. #define CONST(name, help, val, unit) { name, help, 0, AV_OPT_TYPE_CONST, {.i64=val}, INT_MIN, INT_MAX, FLAGS, unit }
  492. static const AVOption bwdif_options[] = {
  493. { "mode", "specify the interlacing mode", OFFSET(mode), AV_OPT_TYPE_INT, {.i64=BWDIF_MODE_SEND_FIELD}, 0, 1, FLAGS, "mode"},
  494. CONST("send_frame", "send one frame for each frame", BWDIF_MODE_SEND_FRAME, "mode"),
  495. CONST("send_field", "send one frame for each field", BWDIF_MODE_SEND_FIELD, "mode"),
  496. { "parity", "specify the assumed picture field parity", OFFSET(parity), AV_OPT_TYPE_INT, {.i64=BWDIF_PARITY_AUTO}, -1, 1, FLAGS, "parity" },
  497. CONST("tff", "assume top field first", BWDIF_PARITY_TFF, "parity"),
  498. CONST("bff", "assume bottom field first", BWDIF_PARITY_BFF, "parity"),
  499. CONST("auto", "auto detect parity", BWDIF_PARITY_AUTO, "parity"),
  500. { "deint", "specify which frames to deinterlace", OFFSET(deint), AV_OPT_TYPE_INT, {.i64=BWDIF_DEINT_INTERLACED}, 0, 1, FLAGS, "deint" },
  501. CONST("all", "deinterlace all frames", BWDIF_DEINT_ALL, "deint"),
  502. CONST("interlaced", "only deinterlace frames marked as interlaced", BWDIF_DEINT_INTERLACED, "deint"),
  503. { NULL }
  504. };
  505. AVFILTER_DEFINE_CLASS(bwdif);
  506. static const AVFilterPad avfilter_vf_bwdif_inputs[] = {
  507. {
  508. .name = "default",
  509. .type = AVMEDIA_TYPE_VIDEO,
  510. .filter_frame = filter_frame,
  511. },
  512. { NULL }
  513. };
  514. static const AVFilterPad avfilter_vf_bwdif_outputs[] = {
  515. {
  516. .name = "default",
  517. .type = AVMEDIA_TYPE_VIDEO,
  518. .request_frame = request_frame,
  519. .config_props = config_props,
  520. },
  521. { NULL }
  522. };
  523. AVFilter ff_vf_bwdif = {
  524. .name = "bwdif",
  525. .description = NULL_IF_CONFIG_SMALL("Deinterlace the input image."),
  526. .priv_size = sizeof(BWDIFContext),
  527. .priv_class = &bwdif_class,
  528. .uninit = uninit,
  529. .query_formats = query_formats,
  530. .inputs = avfilter_vf_bwdif_inputs,
  531. .outputs = avfilter_vf_bwdif_outputs,
  532. .flags = AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL | AVFILTER_FLAG_SLICE_THREADS,
  533. };