You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1254 lines
39KB

  1. /*
  2. * Copyright (c) 2007 Nicolas George <nicolas.george@normalesup.org>
  3. * Copyright (c) 2011 Stefano Sabatini
  4. * Copyright (c) 2012 Paul B Mahol
  5. *
  6. * This file is part of FFmpeg.
  7. *
  8. * FFmpeg is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public
  10. * License as published by the Free Software Foundation; either
  11. * version 2.1 of the License, or (at your option) any later version.
  12. *
  13. * FFmpeg is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with FFmpeg; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  21. */
  22. /**
  23. * @file
  24. * Misc test sources.
  25. *
  26. * testsrc is based on the test pattern generator demuxer by Nicolas George:
  27. * http://lists.ffmpeg.org/pipermail/ffmpeg-devel/2007-October/037845.html
  28. *
  29. * rgbtestsrc is ported from MPlayer libmpcodecs/vf_rgbtest.c by
  30. * Michael Niedermayer.
  31. *
  32. * allyuv, smptebars and smptehdbars are by Paul B Mahol.
  33. */
  34. #include <float.h>
  35. #include "libavutil/avassert.h"
  36. #include "libavutil/common.h"
  37. #include "libavutil/opt.h"
  38. #include "libavutil/imgutils.h"
  39. #include "libavutil/intreadwrite.h"
  40. #include "libavutil/parseutils.h"
  41. #include "avfilter.h"
  42. #include "drawutils.h"
  43. #include "formats.h"
  44. #include "internal.h"
  45. #include "video.h"
  46. typedef struct TestSourceContext {
  47. const AVClass *class;
  48. int w, h;
  49. unsigned int nb_frame;
  50. AVRational time_base, frame_rate;
  51. int64_t pts;
  52. int64_t duration; ///< duration expressed in microseconds
  53. AVRational sar; ///< sample aspect ratio
  54. int draw_once; ///< draw only the first frame, always put out the same picture
  55. int draw_once_reset; ///< draw only the first frame or in case of reset
  56. AVFrame *picref; ///< cached reference containing the painted picture
  57. void (* fill_picture_fn)(AVFilterContext *ctx, AVFrame *frame);
  58. /* only used by testsrc */
  59. int nb_decimals;
  60. /* only used by color */
  61. FFDrawContext draw;
  62. FFDrawColor color;
  63. uint8_t color_rgba[4];
  64. /* only used by rgbtest */
  65. uint8_t rgba_map[4];
  66. /* only used by haldclut */
  67. int level;
  68. } TestSourceContext;
  69. #define OFFSET(x) offsetof(TestSourceContext, x)
  70. #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
  71. #define SIZE_OPTIONS \
  72. { "size", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "320x240"}, 0, 0, FLAGS },\
  73. { "s", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "320x240"}, 0, 0, FLAGS },\
  74. #define COMMON_OPTIONS_NOSIZE \
  75. { "rate", "set video rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, 0, FLAGS },\
  76. { "r", "set video rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, 0, FLAGS },\
  77. { "duration", "set video duration", OFFSET(duration), AV_OPT_TYPE_DURATION, {.i64 = -1}, -1, INT64_MAX, FLAGS },\
  78. { "d", "set video duration", OFFSET(duration), AV_OPT_TYPE_DURATION, {.i64 = -1}, -1, INT64_MAX, FLAGS },\
  79. { "sar", "set video sample aspect ratio", OFFSET(sar), AV_OPT_TYPE_RATIONAL, {.dbl= 1}, 0, INT_MAX, FLAGS },
  80. #define COMMON_OPTIONS SIZE_OPTIONS COMMON_OPTIONS_NOSIZE
  81. static const AVOption options[] = {
  82. COMMON_OPTIONS
  83. { NULL }
  84. };
  85. static av_cold int init(AVFilterContext *ctx)
  86. {
  87. TestSourceContext *test = ctx->priv;
  88. test->time_base = av_inv_q(test->frame_rate);
  89. test->nb_frame = 0;
  90. test->pts = 0;
  91. av_log(ctx, AV_LOG_VERBOSE, "size:%dx%d rate:%d/%d duration:%f sar:%d/%d\n",
  92. test->w, test->h, test->frame_rate.num, test->frame_rate.den,
  93. test->duration < 0 ? -1 : (double)test->duration/1000000,
  94. test->sar.num, test->sar.den);
  95. return 0;
  96. }
  97. static av_cold void uninit(AVFilterContext *ctx)
  98. {
  99. TestSourceContext *test = ctx->priv;
  100. av_frame_free(&test->picref);
  101. }
  102. static int config_props(AVFilterLink *outlink)
  103. {
  104. TestSourceContext *test = outlink->src->priv;
  105. outlink->w = test->w;
  106. outlink->h = test->h;
  107. outlink->sample_aspect_ratio = test->sar;
  108. outlink->frame_rate = test->frame_rate;
  109. outlink->time_base = test->time_base;
  110. return 0;
  111. }
  112. static int request_frame(AVFilterLink *outlink)
  113. {
  114. TestSourceContext *test = outlink->src->priv;
  115. AVFrame *frame;
  116. if (test->duration >= 0 &&
  117. av_rescale_q(test->pts, test->time_base, AV_TIME_BASE_Q) >= test->duration)
  118. return AVERROR_EOF;
  119. if (test->draw_once) {
  120. if (test->draw_once_reset) {
  121. av_frame_free(&test->picref);
  122. test->draw_once_reset = 0;
  123. }
  124. if (!test->picref) {
  125. test->picref =
  126. ff_get_video_buffer(outlink, test->w, test->h);
  127. if (!test->picref)
  128. return AVERROR(ENOMEM);
  129. test->fill_picture_fn(outlink->src, test->picref);
  130. }
  131. frame = av_frame_clone(test->picref);
  132. } else
  133. frame = ff_get_video_buffer(outlink, test->w, test->h);
  134. if (!frame)
  135. return AVERROR(ENOMEM);
  136. frame->pts = test->pts;
  137. frame->key_frame = 1;
  138. frame->interlaced_frame = 0;
  139. frame->pict_type = AV_PICTURE_TYPE_I;
  140. frame->sample_aspect_ratio = test->sar;
  141. if (!test->draw_once)
  142. test->fill_picture_fn(outlink->src, frame);
  143. test->pts++;
  144. test->nb_frame++;
  145. return ff_filter_frame(outlink, frame);
  146. }
  147. #if CONFIG_COLOR_FILTER
  148. static const AVOption color_options[] = {
  149. { "color", "set color", OFFSET(color_rgba), AV_OPT_TYPE_COLOR, {.str = "black"}, CHAR_MIN, CHAR_MAX, FLAGS },
  150. { "c", "set color", OFFSET(color_rgba), AV_OPT_TYPE_COLOR, {.str = "black"}, CHAR_MIN, CHAR_MAX, FLAGS },
  151. COMMON_OPTIONS
  152. { NULL }
  153. };
  154. AVFILTER_DEFINE_CLASS(color);
  155. static void color_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  156. {
  157. TestSourceContext *test = ctx->priv;
  158. ff_fill_rectangle(&test->draw, &test->color,
  159. picref->data, picref->linesize,
  160. 0, 0, test->w, test->h);
  161. }
  162. static av_cold int color_init(AVFilterContext *ctx)
  163. {
  164. TestSourceContext *test = ctx->priv;
  165. test->fill_picture_fn = color_fill_picture;
  166. test->draw_once = 1;
  167. return init(ctx);
  168. }
  169. static int color_query_formats(AVFilterContext *ctx)
  170. {
  171. return ff_set_common_formats(ctx, ff_draw_supported_pixel_formats(0));
  172. }
  173. static int color_config_props(AVFilterLink *inlink)
  174. {
  175. AVFilterContext *ctx = inlink->src;
  176. TestSourceContext *test = ctx->priv;
  177. int ret;
  178. ff_draw_init(&test->draw, inlink->format, 0);
  179. ff_draw_color(&test->draw, &test->color, test->color_rgba);
  180. test->w = ff_draw_round_to_sub(&test->draw, 0, -1, test->w);
  181. test->h = ff_draw_round_to_sub(&test->draw, 1, -1, test->h);
  182. if (av_image_check_size(test->w, test->h, 0, ctx) < 0)
  183. return AVERROR(EINVAL);
  184. if ((ret = config_props(inlink)) < 0)
  185. return ret;
  186. return 0;
  187. }
  188. static int color_process_command(AVFilterContext *ctx, const char *cmd, const char *args,
  189. char *res, int res_len, int flags)
  190. {
  191. TestSourceContext *test = ctx->priv;
  192. int ret;
  193. if (!strcmp(cmd, "color") || !strcmp(cmd, "c")) {
  194. uint8_t color_rgba[4];
  195. ret = av_parse_color(color_rgba, args, -1, ctx);
  196. if (ret < 0)
  197. return ret;
  198. memcpy(test->color_rgba, color_rgba, sizeof(color_rgba));
  199. ff_draw_color(&test->draw, &test->color, test->color_rgba);
  200. test->draw_once_reset = 1;
  201. return 0;
  202. }
  203. return AVERROR(ENOSYS);
  204. }
  205. static const AVFilterPad color_outputs[] = {
  206. {
  207. .name = "default",
  208. .type = AVMEDIA_TYPE_VIDEO,
  209. .request_frame = request_frame,
  210. .config_props = color_config_props,
  211. },
  212. { NULL }
  213. };
  214. AVFilter ff_vsrc_color = {
  215. .name = "color",
  216. .description = NULL_IF_CONFIG_SMALL("Provide an uniformly colored input."),
  217. .priv_class = &color_class,
  218. .priv_size = sizeof(TestSourceContext),
  219. .init = color_init,
  220. .uninit = uninit,
  221. .query_formats = color_query_formats,
  222. .inputs = NULL,
  223. .outputs = color_outputs,
  224. .process_command = color_process_command,
  225. };
  226. #endif /* CONFIG_COLOR_FILTER */
  227. #if CONFIG_HALDCLUTSRC_FILTER
  228. static const AVOption haldclutsrc_options[] = {
  229. { "level", "set level", OFFSET(level), AV_OPT_TYPE_INT, {.i64 = 6}, 2, 8, FLAGS },
  230. COMMON_OPTIONS_NOSIZE
  231. { NULL }
  232. };
  233. AVFILTER_DEFINE_CLASS(haldclutsrc);
  234. static void haldclutsrc_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  235. {
  236. int i, j, k, x = 0, y = 0, is16bit = 0, step;
  237. uint32_t alpha = 0;
  238. const TestSourceContext *hc = ctx->priv;
  239. int level = hc->level;
  240. float scale;
  241. const int w = frame->width;
  242. const int h = frame->height;
  243. const uint8_t *data = frame->data[0];
  244. const int linesize = frame->linesize[0];
  245. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
  246. uint8_t rgba_map[4];
  247. av_assert0(w == h && w == level*level*level);
  248. ff_fill_rgba_map(rgba_map, frame->format);
  249. switch (frame->format) {
  250. case AV_PIX_FMT_RGB48:
  251. case AV_PIX_FMT_BGR48:
  252. case AV_PIX_FMT_RGBA64:
  253. case AV_PIX_FMT_BGRA64:
  254. is16bit = 1;
  255. alpha = 0xffff;
  256. break;
  257. case AV_PIX_FMT_RGBA:
  258. case AV_PIX_FMT_BGRA:
  259. case AV_PIX_FMT_ARGB:
  260. case AV_PIX_FMT_ABGR:
  261. alpha = 0xff;
  262. break;
  263. }
  264. step = av_get_padded_bits_per_pixel(desc) >> (3 + is16bit);
  265. scale = ((float)(1 << (8*(is16bit+1))) - 1) / (level*level - 1);
  266. #define LOAD_CLUT(nbits) do { \
  267. uint##nbits##_t *dst = ((uint##nbits##_t *)(data + y*linesize)) + x*step; \
  268. dst[rgba_map[0]] = av_clip_uint##nbits(i * scale); \
  269. dst[rgba_map[1]] = av_clip_uint##nbits(j * scale); \
  270. dst[rgba_map[2]] = av_clip_uint##nbits(k * scale); \
  271. if (step == 4) \
  272. dst[rgba_map[3]] = alpha; \
  273. } while (0)
  274. level *= level;
  275. for (k = 0; k < level; k++) {
  276. for (j = 0; j < level; j++) {
  277. for (i = 0; i < level; i++) {
  278. if (!is16bit)
  279. LOAD_CLUT(8);
  280. else
  281. LOAD_CLUT(16);
  282. if (++x == w) {
  283. x = 0;
  284. y++;
  285. }
  286. }
  287. }
  288. }
  289. }
  290. static av_cold int haldclutsrc_init(AVFilterContext *ctx)
  291. {
  292. TestSourceContext *hc = ctx->priv;
  293. hc->fill_picture_fn = haldclutsrc_fill_picture;
  294. hc->draw_once = 1;
  295. return init(ctx);
  296. }
  297. static int haldclutsrc_query_formats(AVFilterContext *ctx)
  298. {
  299. static const enum AVPixelFormat pix_fmts[] = {
  300. AV_PIX_FMT_RGB24, AV_PIX_FMT_BGR24,
  301. AV_PIX_FMT_RGBA, AV_PIX_FMT_BGRA,
  302. AV_PIX_FMT_ARGB, AV_PIX_FMT_ABGR,
  303. AV_PIX_FMT_0RGB, AV_PIX_FMT_0BGR,
  304. AV_PIX_FMT_RGB0, AV_PIX_FMT_BGR0,
  305. AV_PIX_FMT_RGB48, AV_PIX_FMT_BGR48,
  306. AV_PIX_FMT_RGBA64, AV_PIX_FMT_BGRA64,
  307. AV_PIX_FMT_NONE,
  308. };
  309. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  310. if (!fmts_list)
  311. return AVERROR(ENOMEM);
  312. return ff_set_common_formats(ctx, fmts_list);
  313. }
  314. static int haldclutsrc_config_props(AVFilterLink *outlink)
  315. {
  316. AVFilterContext *ctx = outlink->src;
  317. TestSourceContext *hc = ctx->priv;
  318. hc->w = hc->h = hc->level * hc->level * hc->level;
  319. return config_props(outlink);
  320. }
  321. static const AVFilterPad haldclutsrc_outputs[] = {
  322. {
  323. .name = "default",
  324. .type = AVMEDIA_TYPE_VIDEO,
  325. .request_frame = request_frame,
  326. .config_props = haldclutsrc_config_props,
  327. },
  328. { NULL }
  329. };
  330. AVFilter ff_vsrc_haldclutsrc = {
  331. .name = "haldclutsrc",
  332. .description = NULL_IF_CONFIG_SMALL("Provide an identity Hald CLUT."),
  333. .priv_class = &haldclutsrc_class,
  334. .priv_size = sizeof(TestSourceContext),
  335. .init = haldclutsrc_init,
  336. .uninit = uninit,
  337. .query_formats = haldclutsrc_query_formats,
  338. .inputs = NULL,
  339. .outputs = haldclutsrc_outputs,
  340. };
  341. #endif /* CONFIG_HALDCLUTSRC_FILTER */
  342. #if CONFIG_NULLSRC_FILTER
  343. #define nullsrc_options options
  344. AVFILTER_DEFINE_CLASS(nullsrc);
  345. static void nullsrc_fill_picture(AVFilterContext *ctx, AVFrame *picref) { }
  346. static av_cold int nullsrc_init(AVFilterContext *ctx)
  347. {
  348. TestSourceContext *test = ctx->priv;
  349. test->fill_picture_fn = nullsrc_fill_picture;
  350. return init(ctx);
  351. }
  352. static const AVFilterPad nullsrc_outputs[] = {
  353. {
  354. .name = "default",
  355. .type = AVMEDIA_TYPE_VIDEO,
  356. .request_frame = request_frame,
  357. .config_props = config_props,
  358. },
  359. { NULL },
  360. };
  361. AVFilter ff_vsrc_nullsrc = {
  362. .name = "nullsrc",
  363. .description = NULL_IF_CONFIG_SMALL("Null video source, return unprocessed video frames."),
  364. .init = nullsrc_init,
  365. .uninit = uninit,
  366. .priv_size = sizeof(TestSourceContext),
  367. .priv_class = &nullsrc_class,
  368. .inputs = NULL,
  369. .outputs = nullsrc_outputs,
  370. };
  371. #endif /* CONFIG_NULLSRC_FILTER */
  372. #if CONFIG_TESTSRC_FILTER
  373. static const AVOption testsrc_options[] = {
  374. COMMON_OPTIONS
  375. { "decimals", "set number of decimals to show", OFFSET(nb_decimals), AV_OPT_TYPE_INT, {.i64=0}, 0, 17, FLAGS },
  376. { "n", "set number of decimals to show", OFFSET(nb_decimals), AV_OPT_TYPE_INT, {.i64=0}, 0, 17, FLAGS },
  377. { NULL }
  378. };
  379. AVFILTER_DEFINE_CLASS(testsrc);
  380. /**
  381. * Fill a rectangle with value val.
  382. *
  383. * @param val the RGB value to set
  384. * @param dst pointer to the destination buffer to fill
  385. * @param dst_linesize linesize of destination
  386. * @param segment_width width of the segment
  387. * @param x horizontal coordinate where to draw the rectangle in the destination buffer
  388. * @param y horizontal coordinate where to draw the rectangle in the destination buffer
  389. * @param w width of the rectangle to draw, expressed as a number of segment_width units
  390. * @param h height of the rectangle to draw, expressed as a number of segment_width units
  391. */
  392. static void draw_rectangle(unsigned val, uint8_t *dst, int dst_linesize, int segment_width,
  393. int x, int y, int w, int h)
  394. {
  395. int i;
  396. int step = 3;
  397. dst += segment_width * (step * x + y * dst_linesize);
  398. w *= segment_width * step;
  399. h *= segment_width;
  400. for (i = 0; i < h; i++) {
  401. memset(dst, val, w);
  402. dst += dst_linesize;
  403. }
  404. }
  405. static void draw_digit(int digit, uint8_t *dst, int dst_linesize,
  406. int segment_width)
  407. {
  408. #define TOP_HBAR 1
  409. #define MID_HBAR 2
  410. #define BOT_HBAR 4
  411. #define LEFT_TOP_VBAR 8
  412. #define LEFT_BOT_VBAR 16
  413. #define RIGHT_TOP_VBAR 32
  414. #define RIGHT_BOT_VBAR 64
  415. struct segments {
  416. int x, y, w, h;
  417. } segments[] = {
  418. { 1, 0, 5, 1 }, /* TOP_HBAR */
  419. { 1, 6, 5, 1 }, /* MID_HBAR */
  420. { 1, 12, 5, 1 }, /* BOT_HBAR */
  421. { 0, 1, 1, 5 }, /* LEFT_TOP_VBAR */
  422. { 0, 7, 1, 5 }, /* LEFT_BOT_VBAR */
  423. { 6, 1, 1, 5 }, /* RIGHT_TOP_VBAR */
  424. { 6, 7, 1, 5 } /* RIGHT_BOT_VBAR */
  425. };
  426. static const unsigned char masks[10] = {
  427. /* 0 */ TOP_HBAR |BOT_HBAR|LEFT_TOP_VBAR|LEFT_BOT_VBAR|RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  428. /* 1 */ RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  429. /* 2 */ TOP_HBAR|MID_HBAR|BOT_HBAR|LEFT_BOT_VBAR |RIGHT_TOP_VBAR,
  430. /* 3 */ TOP_HBAR|MID_HBAR|BOT_HBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  431. /* 4 */ MID_HBAR |LEFT_TOP_VBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  432. /* 5 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR |RIGHT_BOT_VBAR,
  433. /* 6 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR|LEFT_BOT_VBAR |RIGHT_BOT_VBAR,
  434. /* 7 */ TOP_HBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  435. /* 8 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR|LEFT_BOT_VBAR|RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  436. /* 9 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  437. };
  438. unsigned mask = masks[digit];
  439. int i;
  440. draw_rectangle(0, dst, dst_linesize, segment_width, 0, 0, 8, 13);
  441. for (i = 0; i < FF_ARRAY_ELEMS(segments); i++)
  442. if (mask & (1<<i))
  443. draw_rectangle(255, dst, dst_linesize, segment_width,
  444. segments[i].x, segments[i].y, segments[i].w, segments[i].h);
  445. }
  446. #define GRADIENT_SIZE (6 * 256)
  447. static void test_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  448. {
  449. TestSourceContext *test = ctx->priv;
  450. uint8_t *p, *p0;
  451. int x, y;
  452. int color, color_rest;
  453. int icolor;
  454. int radius;
  455. int quad0, quad;
  456. int dquad_x, dquad_y;
  457. int grad, dgrad, rgrad, drgrad;
  458. int seg_size;
  459. int second;
  460. int i;
  461. uint8_t *data = frame->data[0];
  462. int width = frame->width;
  463. int height = frame->height;
  464. /* draw colored bars and circle */
  465. radius = (width + height) / 4;
  466. quad0 = width * width / 4 + height * height / 4 - radius * radius;
  467. dquad_y = 1 - height;
  468. p0 = data;
  469. for (y = 0; y < height; y++) {
  470. p = p0;
  471. color = 0;
  472. color_rest = 0;
  473. quad = quad0;
  474. dquad_x = 1 - width;
  475. for (x = 0; x < width; x++) {
  476. icolor = color;
  477. if (quad < 0)
  478. icolor ^= 7;
  479. quad += dquad_x;
  480. dquad_x += 2;
  481. *(p++) = icolor & 1 ? 255 : 0;
  482. *(p++) = icolor & 2 ? 255 : 0;
  483. *(p++) = icolor & 4 ? 255 : 0;
  484. color_rest += 8;
  485. if (color_rest >= width) {
  486. color_rest -= width;
  487. color++;
  488. }
  489. }
  490. quad0 += dquad_y;
  491. dquad_y += 2;
  492. p0 += frame->linesize[0];
  493. }
  494. /* draw sliding color line */
  495. p0 = p = data + frame->linesize[0] * (height * 3/4);
  496. grad = (256 * test->nb_frame * test->time_base.num / test->time_base.den) %
  497. GRADIENT_SIZE;
  498. rgrad = 0;
  499. dgrad = GRADIENT_SIZE / width;
  500. drgrad = GRADIENT_SIZE % width;
  501. for (x = 0; x < width; x++) {
  502. *(p++) =
  503. grad < 256 || grad >= 5 * 256 ? 255 :
  504. grad >= 2 * 256 && grad < 4 * 256 ? 0 :
  505. grad < 2 * 256 ? 2 * 256 - 1 - grad : grad - 4 * 256;
  506. *(p++) =
  507. grad >= 4 * 256 ? 0 :
  508. grad >= 1 * 256 && grad < 3 * 256 ? 255 :
  509. grad < 1 * 256 ? grad : 4 * 256 - 1 - grad;
  510. *(p++) =
  511. grad < 2 * 256 ? 0 :
  512. grad >= 3 * 256 && grad < 5 * 256 ? 255 :
  513. grad < 3 * 256 ? grad - 2 * 256 : 6 * 256 - 1 - grad;
  514. grad += dgrad;
  515. rgrad += drgrad;
  516. if (rgrad >= GRADIENT_SIZE) {
  517. grad++;
  518. rgrad -= GRADIENT_SIZE;
  519. }
  520. if (grad >= GRADIENT_SIZE)
  521. grad -= GRADIENT_SIZE;
  522. }
  523. p = p0;
  524. for (y = height / 8; y > 0; y--) {
  525. memcpy(p+frame->linesize[0], p, 3 * width);
  526. p += frame->linesize[0];
  527. }
  528. /* draw digits */
  529. seg_size = width / 80;
  530. if (seg_size >= 1 && height >= 13 * seg_size) {
  531. int64_t p10decimals = 1;
  532. double time = av_q2d(test->time_base) * test->nb_frame *
  533. pow(10, test->nb_decimals);
  534. if (time >= INT_MAX)
  535. return;
  536. for (x = 0; x < test->nb_decimals; x++)
  537. p10decimals *= 10;
  538. second = av_rescale_rnd(test->nb_frame * test->time_base.num, p10decimals, test->time_base.den, AV_ROUND_ZERO);
  539. x = width - (width - seg_size * 64) / 2;
  540. y = (height - seg_size * 13) / 2;
  541. p = data + (x*3 + y * frame->linesize[0]);
  542. for (i = 0; i < 8; i++) {
  543. p -= 3 * 8 * seg_size;
  544. draw_digit(second % 10, p, frame->linesize[0], seg_size);
  545. second /= 10;
  546. if (second == 0)
  547. break;
  548. }
  549. }
  550. }
  551. static av_cold int test_init(AVFilterContext *ctx)
  552. {
  553. TestSourceContext *test = ctx->priv;
  554. test->fill_picture_fn = test_fill_picture;
  555. return init(ctx);
  556. }
  557. static int test_query_formats(AVFilterContext *ctx)
  558. {
  559. static const enum AVPixelFormat pix_fmts[] = {
  560. AV_PIX_FMT_RGB24, AV_PIX_FMT_NONE
  561. };
  562. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  563. if (!fmts_list)
  564. return AVERROR(ENOMEM);
  565. return ff_set_common_formats(ctx, fmts_list);
  566. }
  567. static const AVFilterPad avfilter_vsrc_testsrc_outputs[] = {
  568. {
  569. .name = "default",
  570. .type = AVMEDIA_TYPE_VIDEO,
  571. .request_frame = request_frame,
  572. .config_props = config_props,
  573. },
  574. { NULL }
  575. };
  576. AVFilter ff_vsrc_testsrc = {
  577. .name = "testsrc",
  578. .description = NULL_IF_CONFIG_SMALL("Generate test pattern."),
  579. .priv_size = sizeof(TestSourceContext),
  580. .priv_class = &testsrc_class,
  581. .init = test_init,
  582. .uninit = uninit,
  583. .query_formats = test_query_formats,
  584. .inputs = NULL,
  585. .outputs = avfilter_vsrc_testsrc_outputs,
  586. };
  587. #endif /* CONFIG_TESTSRC_FILTER */
  588. #if CONFIG_RGBTESTSRC_FILTER
  589. #define rgbtestsrc_options options
  590. AVFILTER_DEFINE_CLASS(rgbtestsrc);
  591. #define R 0
  592. #define G 1
  593. #define B 2
  594. #define A 3
  595. static void rgbtest_put_pixel(uint8_t *dst, int dst_linesize,
  596. int x, int y, int r, int g, int b, enum AVPixelFormat fmt,
  597. uint8_t rgba_map[4])
  598. {
  599. int32_t v;
  600. uint8_t *p;
  601. switch (fmt) {
  602. case AV_PIX_FMT_BGR444: ((uint16_t*)(dst + y*dst_linesize))[x] = ((r >> 4) << 8) | ((g >> 4) << 4) | (b >> 4); break;
  603. case AV_PIX_FMT_RGB444: ((uint16_t*)(dst + y*dst_linesize))[x] = ((b >> 4) << 8) | ((g >> 4) << 4) | (r >> 4); break;
  604. case AV_PIX_FMT_BGR555: ((uint16_t*)(dst + y*dst_linesize))[x] = ((r>>3)<<10) | ((g>>3)<<5) | (b>>3); break;
  605. case AV_PIX_FMT_RGB555: ((uint16_t*)(dst + y*dst_linesize))[x] = ((b>>3)<<10) | ((g>>3)<<5) | (r>>3); break;
  606. case AV_PIX_FMT_BGR565: ((uint16_t*)(dst + y*dst_linesize))[x] = ((r>>3)<<11) | ((g>>2)<<5) | (b>>3); break;
  607. case AV_PIX_FMT_RGB565: ((uint16_t*)(dst + y*dst_linesize))[x] = ((b>>3)<<11) | ((g>>2)<<5) | (r>>3); break;
  608. case AV_PIX_FMT_RGB24:
  609. case AV_PIX_FMT_BGR24:
  610. v = (r << (rgba_map[R]*8)) + (g << (rgba_map[G]*8)) + (b << (rgba_map[B]*8));
  611. p = dst + 3*x + y*dst_linesize;
  612. AV_WL24(p, v);
  613. break;
  614. case AV_PIX_FMT_RGBA:
  615. case AV_PIX_FMT_BGRA:
  616. case AV_PIX_FMT_ARGB:
  617. case AV_PIX_FMT_ABGR:
  618. v = (r << (rgba_map[R]*8)) + (g << (rgba_map[G]*8)) + (b << (rgba_map[B]*8)) + (255 << (rgba_map[A]*8));
  619. p = dst + 4*x + y*dst_linesize;
  620. AV_WL32(p, v);
  621. break;
  622. }
  623. }
  624. static void rgbtest_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  625. {
  626. TestSourceContext *test = ctx->priv;
  627. int x, y, w = frame->width, h = frame->height;
  628. for (y = 0; y < h; y++) {
  629. for (x = 0; x < w; x++) {
  630. int c = 256*x/w;
  631. int r = 0, g = 0, b = 0;
  632. if (3*y < h ) r = c;
  633. else if (3*y < 2*h) g = c;
  634. else b = c;
  635. rgbtest_put_pixel(frame->data[0], frame->linesize[0], x, y, r, g, b,
  636. ctx->outputs[0]->format, test->rgba_map);
  637. }
  638. }
  639. }
  640. static av_cold int rgbtest_init(AVFilterContext *ctx)
  641. {
  642. TestSourceContext *test = ctx->priv;
  643. test->draw_once = 1;
  644. test->fill_picture_fn = rgbtest_fill_picture;
  645. return init(ctx);
  646. }
  647. static int rgbtest_query_formats(AVFilterContext *ctx)
  648. {
  649. static const enum AVPixelFormat pix_fmts[] = {
  650. AV_PIX_FMT_RGBA, AV_PIX_FMT_ARGB, AV_PIX_FMT_BGRA, AV_PIX_FMT_ABGR,
  651. AV_PIX_FMT_BGR24, AV_PIX_FMT_RGB24,
  652. AV_PIX_FMT_RGB444, AV_PIX_FMT_BGR444,
  653. AV_PIX_FMT_RGB565, AV_PIX_FMT_BGR565,
  654. AV_PIX_FMT_RGB555, AV_PIX_FMT_BGR555,
  655. AV_PIX_FMT_NONE
  656. };
  657. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  658. if (!fmts_list)
  659. return AVERROR(ENOMEM);
  660. return ff_set_common_formats(ctx, fmts_list);
  661. }
  662. static int rgbtest_config_props(AVFilterLink *outlink)
  663. {
  664. TestSourceContext *test = outlink->src->priv;
  665. ff_fill_rgba_map(test->rgba_map, outlink->format);
  666. return config_props(outlink);
  667. }
  668. static const AVFilterPad avfilter_vsrc_rgbtestsrc_outputs[] = {
  669. {
  670. .name = "default",
  671. .type = AVMEDIA_TYPE_VIDEO,
  672. .request_frame = request_frame,
  673. .config_props = rgbtest_config_props,
  674. },
  675. { NULL }
  676. };
  677. AVFilter ff_vsrc_rgbtestsrc = {
  678. .name = "rgbtestsrc",
  679. .description = NULL_IF_CONFIG_SMALL("Generate RGB test pattern."),
  680. .priv_size = sizeof(TestSourceContext),
  681. .priv_class = &rgbtestsrc_class,
  682. .init = rgbtest_init,
  683. .uninit = uninit,
  684. .query_formats = rgbtest_query_formats,
  685. .inputs = NULL,
  686. .outputs = avfilter_vsrc_rgbtestsrc_outputs,
  687. };
  688. #endif /* CONFIG_RGBTESTSRC_FILTER */
  689. #if CONFIG_SMPTEBARS_FILTER || CONFIG_SMPTEHDBARS_FILTER
  690. static const uint8_t rainbow[7][4] = {
  691. { 180, 128, 128, 255 }, /* 75% white */
  692. { 161, 44, 141, 255 }, /* 75% yellow */
  693. { 131, 156, 44, 255 }, /* 75% cyan */
  694. { 112, 72, 57, 255 }, /* 75% green */
  695. { 83, 183, 198, 255 }, /* 75% magenta */
  696. { 65, 99, 212, 255 }, /* 75% red */
  697. { 34, 212, 114, 255 }, /* 75% blue */
  698. };
  699. static const uint8_t rainbowhd[7][4] = {
  700. { 180, 128, 128, 255 }, /* 75% white */
  701. { 168, 44, 136, 255 }, /* 75% yellow */
  702. { 145, 147, 44, 255 }, /* 75% cyan */
  703. { 133, 63, 52, 255 }, /* 75% green */
  704. { 63, 193, 204, 255 }, /* 75% magenta */
  705. { 51, 109, 212, 255 }, /* 75% red */
  706. { 28, 212, 120, 255 }, /* 75% blue */
  707. };
  708. static const uint8_t wobnair[7][4] = {
  709. { 34, 212, 114, 255 }, /* 75% blue */
  710. { 19, 128, 128, 255 }, /* 7.5% intensity black */
  711. { 83, 183, 198, 255 }, /* 75% magenta */
  712. { 19, 128, 128, 255 }, /* 7.5% intensity black */
  713. { 131, 156, 44, 255 }, /* 75% cyan */
  714. { 19, 128, 128, 255 }, /* 7.5% intensity black */
  715. { 180, 128, 128, 255 }, /* 75% white */
  716. };
  717. static const uint8_t white[4] = { 235, 128, 128, 255 };
  718. /* pluge pulses */
  719. static const uint8_t neg4ire[4] = { 7, 128, 128, 255 };
  720. static const uint8_t pos4ire[4] = { 24, 128, 128, 255 };
  721. /* fudged Q/-I */
  722. static const uint8_t i_pixel[4] = { 57, 156, 97, 255 };
  723. static const uint8_t q_pixel[4] = { 44, 171, 147, 255 };
  724. static const uint8_t gray40[4] = { 104, 128, 128, 255 };
  725. static const uint8_t gray15[4] = { 49, 128, 128, 255 };
  726. static const uint8_t cyan[4] = { 188, 154, 16, 255 };
  727. static const uint8_t yellow[4] = { 219, 16, 138, 255 };
  728. static const uint8_t blue[4] = { 32, 240, 118, 255 };
  729. static const uint8_t red[4] = { 63, 102, 240, 255 };
  730. static const uint8_t black0[4] = { 16, 128, 128, 255 };
  731. static const uint8_t black2[4] = { 20, 128, 128, 255 };
  732. static const uint8_t black4[4] = { 25, 128, 128, 255 };
  733. static const uint8_t neg2[4] = { 12, 128, 128, 255 };
  734. static void draw_bar(TestSourceContext *test, const uint8_t color[4],
  735. int x, int y, int w, int h,
  736. AVFrame *frame)
  737. {
  738. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
  739. uint8_t *p, *p0;
  740. int plane;
  741. x = FFMIN(x, test->w - 1);
  742. y = FFMIN(y, test->h - 1);
  743. w = FFMIN(w, test->w - x);
  744. h = FFMIN(h, test->h - y);
  745. av_assert0(x + w <= test->w);
  746. av_assert0(y + h <= test->h);
  747. for (plane = 0; frame->data[plane]; plane++) {
  748. const int c = color[plane];
  749. const int linesize = frame->linesize[plane];
  750. int i, px, py, pw, ph;
  751. if (plane == 1 || plane == 2) {
  752. px = x >> desc->log2_chroma_w;
  753. pw = FF_CEIL_RSHIFT(w, desc->log2_chroma_w);
  754. py = y >> desc->log2_chroma_h;
  755. ph = FF_CEIL_RSHIFT(h, desc->log2_chroma_h);
  756. } else {
  757. px = x;
  758. pw = w;
  759. py = y;
  760. ph = h;
  761. }
  762. p0 = p = frame->data[plane] + py * linesize + px;
  763. memset(p, c, pw);
  764. p += linesize;
  765. for (i = 1; i < ph; i++, p += linesize)
  766. memcpy(p, p0, pw);
  767. }
  768. }
  769. static int smptebars_query_formats(AVFilterContext *ctx)
  770. {
  771. static const enum AVPixelFormat pix_fmts[] = {
  772. AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P,
  773. AV_PIX_FMT_YUV440P, AV_PIX_FMT_YUV444P,
  774. AV_PIX_FMT_YUV410P, AV_PIX_FMT_YUV411P,
  775. AV_PIX_FMT_NONE,
  776. };
  777. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  778. if (!fmts_list)
  779. return AVERROR(ENOMEM);
  780. return ff_set_common_formats(ctx, fmts_list);
  781. }
  782. static const AVFilterPad smptebars_outputs[] = {
  783. {
  784. .name = "default",
  785. .type = AVMEDIA_TYPE_VIDEO,
  786. .request_frame = request_frame,
  787. .config_props = config_props,
  788. },
  789. { NULL }
  790. };
  791. #if CONFIG_SMPTEBARS_FILTER
  792. #define smptebars_options options
  793. AVFILTER_DEFINE_CLASS(smptebars);
  794. static void smptebars_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  795. {
  796. TestSourceContext *test = ctx->priv;
  797. int r_w, r_h, w_h, p_w, p_h, i, tmp, x = 0;
  798. const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(picref->format);
  799. av_frame_set_colorspace(picref, AVCOL_SPC_BT470BG);
  800. r_w = FFALIGN((test->w + 6) / 7, 1 << pixdesc->log2_chroma_w);
  801. r_h = FFALIGN(test->h * 2 / 3, 1 << pixdesc->log2_chroma_h);
  802. w_h = FFALIGN(test->h * 3 / 4 - r_h, 1 << pixdesc->log2_chroma_h);
  803. p_w = FFALIGN(r_w * 5 / 4, 1 << pixdesc->log2_chroma_w);
  804. p_h = test->h - w_h - r_h;
  805. for (i = 0; i < 7; i++) {
  806. draw_bar(test, rainbow[i], x, 0, r_w, r_h, picref);
  807. draw_bar(test, wobnair[i], x, r_h, r_w, w_h, picref);
  808. x += r_w;
  809. }
  810. x = 0;
  811. draw_bar(test, i_pixel, x, r_h + w_h, p_w, p_h, picref);
  812. x += p_w;
  813. draw_bar(test, white, x, r_h + w_h, p_w, p_h, picref);
  814. x += p_w;
  815. draw_bar(test, q_pixel, x, r_h + w_h, p_w, p_h, picref);
  816. x += p_w;
  817. tmp = FFALIGN(5 * r_w - x, 1 << pixdesc->log2_chroma_w);
  818. draw_bar(test, black0, x, r_h + w_h, tmp, p_h, picref);
  819. x += tmp;
  820. tmp = FFALIGN(r_w / 3, 1 << pixdesc->log2_chroma_w);
  821. draw_bar(test, neg4ire, x, r_h + w_h, tmp, p_h, picref);
  822. x += tmp;
  823. draw_bar(test, black0, x, r_h + w_h, tmp, p_h, picref);
  824. x += tmp;
  825. draw_bar(test, pos4ire, x, r_h + w_h, tmp, p_h, picref);
  826. x += tmp;
  827. draw_bar(test, black0, x, r_h + w_h, test->w - x, p_h, picref);
  828. }
  829. static av_cold int smptebars_init(AVFilterContext *ctx)
  830. {
  831. TestSourceContext *test = ctx->priv;
  832. test->fill_picture_fn = smptebars_fill_picture;
  833. test->draw_once = 1;
  834. return init(ctx);
  835. }
  836. AVFilter ff_vsrc_smptebars = {
  837. .name = "smptebars",
  838. .description = NULL_IF_CONFIG_SMALL("Generate SMPTE color bars."),
  839. .priv_size = sizeof(TestSourceContext),
  840. .priv_class = &smptebars_class,
  841. .init = smptebars_init,
  842. .uninit = uninit,
  843. .query_formats = smptebars_query_formats,
  844. .inputs = NULL,
  845. .outputs = smptebars_outputs,
  846. };
  847. #endif /* CONFIG_SMPTEBARS_FILTER */
  848. #if CONFIG_SMPTEHDBARS_FILTER
  849. #define smptehdbars_options options
  850. AVFILTER_DEFINE_CLASS(smptehdbars);
  851. static void smptehdbars_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  852. {
  853. TestSourceContext *test = ctx->priv;
  854. int d_w, r_w, r_h, l_w, i, tmp, x = 0, y = 0;
  855. const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(picref->format);
  856. av_frame_set_colorspace(picref, AVCOL_SPC_BT709);
  857. d_w = FFALIGN(test->w / 8, 1 << pixdesc->log2_chroma_w);
  858. r_h = FFALIGN(test->h * 7 / 12, 1 << pixdesc->log2_chroma_h);
  859. draw_bar(test, gray40, x, 0, d_w, r_h, picref);
  860. x += d_w;
  861. r_w = FFALIGN((((test->w + 3) / 4) * 3) / 7, 1 << pixdesc->log2_chroma_w);
  862. for (i = 0; i < 7; i++) {
  863. draw_bar(test, rainbowhd[i], x, 0, r_w, r_h, picref);
  864. x += r_w;
  865. }
  866. draw_bar(test, gray40, x, 0, test->w - x, r_h, picref);
  867. y = r_h;
  868. r_h = FFALIGN(test->h / 12, 1 << pixdesc->log2_chroma_h);
  869. draw_bar(test, cyan, 0, y, d_w, r_h, picref);
  870. x = d_w;
  871. draw_bar(test, i_pixel, x, y, r_w, r_h, picref);
  872. x += r_w;
  873. tmp = r_w * 6;
  874. draw_bar(test, rainbowhd[0], x, y, tmp, r_h, picref);
  875. x += tmp;
  876. l_w = x;
  877. draw_bar(test, blue, x, y, test->w - x, r_h, picref);
  878. y += r_h;
  879. draw_bar(test, yellow, 0, y, d_w, r_h, picref);
  880. x = d_w;
  881. draw_bar(test, q_pixel, x, y, r_w, r_h, picref);
  882. x += r_w;
  883. for (i = 0; i < tmp; i += 1 << pixdesc->log2_chroma_w) {
  884. uint8_t yramp[4] = {0};
  885. yramp[0] = i * 255 / tmp;
  886. yramp[1] = 128;
  887. yramp[2] = 128;
  888. yramp[3] = 255;
  889. draw_bar(test, yramp, x, y, 1 << pixdesc->log2_chroma_w, r_h, picref);
  890. x += 1 << pixdesc->log2_chroma_w;
  891. }
  892. draw_bar(test, red, x, y, test->w - x, r_h, picref);
  893. y += r_h;
  894. draw_bar(test, gray15, 0, y, d_w, test->h - y, picref);
  895. x = d_w;
  896. tmp = FFALIGN(r_w * 3 / 2, 1 << pixdesc->log2_chroma_w);
  897. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  898. x += tmp;
  899. tmp = FFALIGN(r_w * 2, 1 << pixdesc->log2_chroma_w);
  900. draw_bar(test, white, x, y, tmp, test->h - y, picref);
  901. x += tmp;
  902. tmp = FFALIGN(r_w * 5 / 6, 1 << pixdesc->log2_chroma_w);
  903. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  904. x += tmp;
  905. tmp = FFALIGN(r_w / 3, 1 << pixdesc->log2_chroma_w);
  906. draw_bar(test, neg2, x, y, tmp, test->h - y, picref);
  907. x += tmp;
  908. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  909. x += tmp;
  910. draw_bar(test, black2, x, y, tmp, test->h - y, picref);
  911. x += tmp;
  912. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  913. x += tmp;
  914. draw_bar(test, black4, x, y, tmp, test->h - y, picref);
  915. x += tmp;
  916. r_w = l_w - x;
  917. draw_bar(test, black0, x, y, r_w, test->h - y, picref);
  918. x += r_w;
  919. draw_bar(test, gray15, x, y, test->w - x, test->h - y, picref);
  920. }
  921. static av_cold int smptehdbars_init(AVFilterContext *ctx)
  922. {
  923. TestSourceContext *test = ctx->priv;
  924. test->fill_picture_fn = smptehdbars_fill_picture;
  925. test->draw_once = 1;
  926. return init(ctx);
  927. }
  928. AVFilter ff_vsrc_smptehdbars = {
  929. .name = "smptehdbars",
  930. .description = NULL_IF_CONFIG_SMALL("Generate SMPTE HD color bars."),
  931. .priv_size = sizeof(TestSourceContext),
  932. .priv_class = &smptehdbars_class,
  933. .init = smptehdbars_init,
  934. .uninit = uninit,
  935. .query_formats = smptebars_query_formats,
  936. .inputs = NULL,
  937. .outputs = smptebars_outputs,
  938. };
  939. #endif /* CONFIG_SMPTEHDBARS_FILTER */
  940. #endif /* CONFIG_SMPTEBARS_FILTER || CONFIG_SMPTEHDBARS_FILTER */
  941. #if CONFIG_ALLYUV_FILTER
  942. static const AVOption allyuv_options[] = {
  943. COMMON_OPTIONS_NOSIZE
  944. { NULL }
  945. };
  946. AVFILTER_DEFINE_CLASS(allyuv);
  947. static void allyuv_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  948. {
  949. const int ys = frame->linesize[0];
  950. const int us = frame->linesize[1];
  951. const int vs = frame->linesize[2];
  952. int x, y, j;
  953. for (y = 0; y < 4096; y++) {
  954. for (x = 0; x < 2048; x++) {
  955. frame->data[0][y * ys + x] = ((x / 8) % 256);
  956. frame->data[0][y * ys + 4095 - x] = ((x / 8) % 256);
  957. }
  958. for (x = 0; x < 2048; x+=8) {
  959. for (j = 0; j < 8; j++) {
  960. frame->data[1][vs * y + x + j] = (y%16 + (j % 8) * 16);
  961. frame->data[1][vs * y + 4095 - x - j] = (128 + y%16 + (j % 8) * 16);
  962. }
  963. }
  964. for (x = 0; x < 4096; x++)
  965. frame->data[2][y * us + x] = 256 * y / 4096;
  966. }
  967. }
  968. static av_cold int allyuv_init(AVFilterContext *ctx)
  969. {
  970. TestSourceContext *test = ctx->priv;
  971. test->w = test->h = 4096;
  972. test->draw_once = 1;
  973. test->fill_picture_fn = allyuv_fill_picture;
  974. return init(ctx);
  975. }
  976. static int allyuv_query_formats(AVFilterContext *ctx)
  977. {
  978. static const enum AVPixelFormat pix_fmts[] = {
  979. AV_PIX_FMT_YUV444P, AV_PIX_FMT_GBRP,
  980. AV_PIX_FMT_NONE
  981. };
  982. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  983. if (!fmts_list)
  984. return AVERROR(ENOMEM);
  985. return ff_set_common_formats(ctx, fmts_list);
  986. }
  987. static const AVFilterPad avfilter_vsrc_allyuv_outputs[] = {
  988. {
  989. .name = "default",
  990. .type = AVMEDIA_TYPE_VIDEO,
  991. .request_frame = request_frame,
  992. .config_props = config_props,
  993. },
  994. { NULL }
  995. };
  996. AVFilter ff_vsrc_allyuv = {
  997. .name = "allyuv",
  998. .description = NULL_IF_CONFIG_SMALL("Generate all yuv colors."),
  999. .priv_size = sizeof(TestSourceContext),
  1000. .priv_class = &allyuv_class,
  1001. .init = allyuv_init,
  1002. .uninit = uninit,
  1003. .query_formats = allyuv_query_formats,
  1004. .inputs = NULL,
  1005. .outputs = avfilter_vsrc_allyuv_outputs,
  1006. };
  1007. #endif /* CONFIG_ALLYUV_FILTER */
  1008. #if CONFIG_ALLRGB_FILTER
  1009. static const AVOption allrgb_options[] = {
  1010. COMMON_OPTIONS_NOSIZE
  1011. { NULL }
  1012. };
  1013. AVFILTER_DEFINE_CLASS(allrgb);
  1014. static void allrgb_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  1015. {
  1016. unsigned x, y;
  1017. const int linesize = frame->linesize[0];
  1018. uint8_t *line = frame->data[0];
  1019. for (y = 0; y < 4096; y++) {
  1020. uint8_t *dst = line;
  1021. for (x = 0; x < 4096; x++) {
  1022. *dst++ = x;
  1023. *dst++ = y;
  1024. *dst++ = (x >> 8) | ((y >> 8) << 4);
  1025. }
  1026. line += linesize;
  1027. }
  1028. }
  1029. static av_cold int allrgb_init(AVFilterContext *ctx)
  1030. {
  1031. TestSourceContext *test = ctx->priv;
  1032. test->w = test->h = 4096;
  1033. test->draw_once = 1;
  1034. test->fill_picture_fn = allrgb_fill_picture;
  1035. return init(ctx);
  1036. }
  1037. static int allrgb_config_props(AVFilterLink *outlink)
  1038. {
  1039. TestSourceContext *test = outlink->src->priv;
  1040. ff_fill_rgba_map(test->rgba_map, outlink->format);
  1041. return config_props(outlink);
  1042. }
  1043. static int allrgb_query_formats(AVFilterContext *ctx)
  1044. {
  1045. static const enum AVPixelFormat pix_fmts[] = {
  1046. AV_PIX_FMT_RGB24, AV_PIX_FMT_NONE
  1047. };
  1048. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  1049. if (!fmts_list)
  1050. return AVERROR(ENOMEM);
  1051. return ff_set_common_formats(ctx, fmts_list);
  1052. }
  1053. static const AVFilterPad avfilter_vsrc_allrgb_outputs[] = {
  1054. {
  1055. .name = "default",
  1056. .type = AVMEDIA_TYPE_VIDEO,
  1057. .request_frame = request_frame,
  1058. .config_props = allrgb_config_props,
  1059. },
  1060. { NULL }
  1061. };
  1062. AVFilter ff_vsrc_allrgb = {
  1063. .name = "allrgb",
  1064. .description = NULL_IF_CONFIG_SMALL("Generate all RGB colors."),
  1065. .priv_size = sizeof(TestSourceContext),
  1066. .priv_class = &allrgb_class,
  1067. .init = allrgb_init,
  1068. .uninit = uninit,
  1069. .query_formats = allrgb_query_formats,
  1070. .inputs = NULL,
  1071. .outputs = avfilter_vsrc_allrgb_outputs,
  1072. };
  1073. #endif /* CONFIG_ALLRGB_FILTER */