You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1868 lines
58KB

  1. /*
  2. * Copyright (c) 2007 Nicolas George <nicolas.george@normalesup.org>
  3. * Copyright (c) 2011 Stefano Sabatini
  4. * Copyright (c) 2012 Paul B Mahol
  5. *
  6. * This file is part of FFmpeg.
  7. *
  8. * FFmpeg is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public
  10. * License as published by the Free Software Foundation; either
  11. * version 2.1 of the License, or (at your option) any later version.
  12. *
  13. * FFmpeg is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with FFmpeg; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  21. */
  22. /**
  23. * @file
  24. * Misc test sources.
  25. *
  26. * testsrc is based on the test pattern generator demuxer by Nicolas George:
  27. * http://lists.ffmpeg.org/pipermail/ffmpeg-devel/2007-October/037845.html
  28. *
  29. * rgbtestsrc is ported from MPlayer libmpcodecs/vf_rgbtest.c by
  30. * Michael Niedermayer.
  31. *
  32. * allyuv, smptebars and smptehdbars are by Paul B Mahol.
  33. */
  34. #include <float.h>
  35. #include "libavutil/avassert.h"
  36. #include "libavutil/common.h"
  37. #include "libavutil/ffmath.h"
  38. #include "libavutil/opt.h"
  39. #include "libavutil/imgutils.h"
  40. #include "libavutil/intreadwrite.h"
  41. #include "libavutil/parseutils.h"
  42. #include "libavutil/xga_font_data.h"
  43. #include "avfilter.h"
  44. #include "drawutils.h"
  45. #include "filters.h"
  46. #include "formats.h"
  47. #include "internal.h"
  48. #include "video.h"
  49. typedef struct TestSourceContext {
  50. const AVClass *class;
  51. int w, h;
  52. unsigned int nb_frame;
  53. AVRational time_base, frame_rate;
  54. int64_t pts;
  55. int64_t duration; ///< duration expressed in microseconds
  56. AVRational sar; ///< sample aspect ratio
  57. int draw_once; ///< draw only the first frame, always put out the same picture
  58. int draw_once_reset; ///< draw only the first frame or in case of reset
  59. AVFrame *picref; ///< cached reference containing the painted picture
  60. void (* fill_picture_fn)(AVFilterContext *ctx, AVFrame *frame);
  61. /* only used by testsrc */
  62. int nb_decimals;
  63. /* only used by testsrc2 */
  64. int alpha;
  65. /* only used by color */
  66. FFDrawContext draw;
  67. FFDrawColor color;
  68. uint8_t color_rgba[4];
  69. /* only used by rgbtest */
  70. uint8_t rgba_map[4];
  71. int complement;
  72. int depth;
  73. /* only used by haldclut */
  74. int level;
  75. } TestSourceContext;
  76. #define OFFSET(x) offsetof(TestSourceContext, x)
  77. #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
  78. #define FLAGSR AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_RUNTIME_PARAM
  79. #define SIZE_OPTIONS \
  80. { "size", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "320x240"}, 0, 0, FLAGS },\
  81. { "s", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "320x240"}, 0, 0, FLAGS },\
  82. #define COMMON_OPTIONS_NOSIZE \
  83. { "rate", "set video rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, INT_MAX, FLAGS },\
  84. { "r", "set video rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, INT_MAX, FLAGS },\
  85. { "duration", "set video duration", OFFSET(duration), AV_OPT_TYPE_DURATION, {.i64 = -1}, -1, INT64_MAX, FLAGS },\
  86. { "d", "set video duration", OFFSET(duration), AV_OPT_TYPE_DURATION, {.i64 = -1}, -1, INT64_MAX, FLAGS },\
  87. { "sar", "set video sample aspect ratio", OFFSET(sar), AV_OPT_TYPE_RATIONAL, {.dbl= 1}, 0, INT_MAX, FLAGS },
  88. #define COMMON_OPTIONS SIZE_OPTIONS COMMON_OPTIONS_NOSIZE
  89. #define NOSIZE_OPTIONS_OFFSET 2
  90. /* Filters using COMMON_OPTIONS_NOSIZE also use the following options
  91. * via &options[NOSIZE_OPTIONS_OFFSET]. So don't break it. */
  92. static const AVOption options[] = {
  93. COMMON_OPTIONS
  94. { NULL }
  95. };
  96. static av_cold int init(AVFilterContext *ctx)
  97. {
  98. TestSourceContext *test = ctx->priv;
  99. test->time_base = av_inv_q(test->frame_rate);
  100. test->nb_frame = 0;
  101. test->pts = 0;
  102. av_log(ctx, AV_LOG_VERBOSE, "size:%dx%d rate:%d/%d duration:%f sar:%d/%d\n",
  103. test->w, test->h, test->frame_rate.num, test->frame_rate.den,
  104. test->duration < 0 ? -1 : (double)test->duration/1000000,
  105. test->sar.num, test->sar.den);
  106. return 0;
  107. }
  108. static av_cold void uninit(AVFilterContext *ctx)
  109. {
  110. TestSourceContext *test = ctx->priv;
  111. av_frame_free(&test->picref);
  112. }
  113. static int config_props(AVFilterLink *outlink)
  114. {
  115. TestSourceContext *test = outlink->src->priv;
  116. outlink->w = test->w;
  117. outlink->h = test->h;
  118. outlink->sample_aspect_ratio = test->sar;
  119. outlink->frame_rate = test->frame_rate;
  120. outlink->time_base = test->time_base;
  121. return 0;
  122. }
  123. static int activate(AVFilterContext *ctx)
  124. {
  125. AVFilterLink *outlink = ctx->outputs[0];
  126. TestSourceContext *test = ctx->priv;
  127. AVFrame *frame;
  128. if (!ff_outlink_frame_wanted(outlink))
  129. return FFERROR_NOT_READY;
  130. if (test->duration >= 0 &&
  131. av_rescale_q(test->pts, test->time_base, AV_TIME_BASE_Q) >= test->duration) {
  132. ff_outlink_set_status(outlink, AVERROR_EOF, test->pts);
  133. return 0;
  134. }
  135. if (test->draw_once) {
  136. if (test->draw_once_reset) {
  137. av_frame_free(&test->picref);
  138. test->draw_once_reset = 0;
  139. }
  140. if (!test->picref) {
  141. test->picref =
  142. ff_get_video_buffer(outlink, test->w, test->h);
  143. if (!test->picref)
  144. return AVERROR(ENOMEM);
  145. test->fill_picture_fn(outlink->src, test->picref);
  146. }
  147. frame = av_frame_clone(test->picref);
  148. } else
  149. frame = ff_get_video_buffer(outlink, test->w, test->h);
  150. if (!frame)
  151. return AVERROR(ENOMEM);
  152. frame->pts = test->pts;
  153. frame->key_frame = 1;
  154. frame->interlaced_frame = 0;
  155. frame->pict_type = AV_PICTURE_TYPE_I;
  156. frame->sample_aspect_ratio = test->sar;
  157. if (!test->draw_once)
  158. test->fill_picture_fn(outlink->src, frame);
  159. test->pts++;
  160. test->nb_frame++;
  161. return ff_filter_frame(outlink, frame);
  162. }
  163. #if CONFIG_COLOR_FILTER
  164. static const AVOption color_options[] = {
  165. { "color", "set color", OFFSET(color_rgba), AV_OPT_TYPE_COLOR, {.str = "black"}, 0, 0, FLAGSR },
  166. { "c", "set color", OFFSET(color_rgba), AV_OPT_TYPE_COLOR, {.str = "black"}, 0, 0, FLAGSR },
  167. COMMON_OPTIONS
  168. { NULL }
  169. };
  170. AVFILTER_DEFINE_CLASS(color);
  171. static void color_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  172. {
  173. TestSourceContext *test = ctx->priv;
  174. ff_fill_rectangle(&test->draw, &test->color,
  175. picref->data, picref->linesize,
  176. 0, 0, test->w, test->h);
  177. }
  178. static av_cold int color_init(AVFilterContext *ctx)
  179. {
  180. TestSourceContext *test = ctx->priv;
  181. test->fill_picture_fn = color_fill_picture;
  182. test->draw_once = 1;
  183. return init(ctx);
  184. }
  185. static int color_query_formats(AVFilterContext *ctx)
  186. {
  187. return ff_set_common_formats(ctx, ff_draw_supported_pixel_formats(0));
  188. }
  189. static int color_config_props(AVFilterLink *inlink)
  190. {
  191. AVFilterContext *ctx = inlink->src;
  192. TestSourceContext *test = ctx->priv;
  193. int ret;
  194. ff_draw_init(&test->draw, inlink->format, 0);
  195. ff_draw_color(&test->draw, &test->color, test->color_rgba);
  196. test->w = ff_draw_round_to_sub(&test->draw, 0, -1, test->w);
  197. test->h = ff_draw_round_to_sub(&test->draw, 1, -1, test->h);
  198. if (av_image_check_size(test->w, test->h, 0, ctx) < 0)
  199. return AVERROR(EINVAL);
  200. if ((ret = config_props(inlink)) < 0)
  201. return ret;
  202. return 0;
  203. }
  204. static int color_process_command(AVFilterContext *ctx, const char *cmd, const char *args,
  205. char *res, int res_len, int flags)
  206. {
  207. TestSourceContext *test = ctx->priv;
  208. int ret;
  209. ret = ff_filter_process_command(ctx, cmd, args, res, res_len, flags);
  210. if (ret < 0)
  211. return ret;
  212. ff_draw_color(&test->draw, &test->color, test->color_rgba);
  213. test->draw_once_reset = 1;
  214. return 0;
  215. }
  216. static const AVFilterPad color_outputs[] = {
  217. {
  218. .name = "default",
  219. .type = AVMEDIA_TYPE_VIDEO,
  220. .config_props = color_config_props,
  221. },
  222. { NULL }
  223. };
  224. AVFilter ff_vsrc_color = {
  225. .name = "color",
  226. .description = NULL_IF_CONFIG_SMALL("Provide an uniformly colored input."),
  227. .priv_class = &color_class,
  228. .priv_size = sizeof(TestSourceContext),
  229. .init = color_init,
  230. .uninit = uninit,
  231. .activate = activate,
  232. .query_formats = color_query_formats,
  233. .inputs = NULL,
  234. .outputs = color_outputs,
  235. .process_command = color_process_command,
  236. };
  237. #endif /* CONFIG_COLOR_FILTER */
  238. #if CONFIG_HALDCLUTSRC_FILTER
  239. static const AVOption haldclutsrc_options[] = {
  240. { "level", "set level", OFFSET(level), AV_OPT_TYPE_INT, {.i64 = 6}, 2, 16, FLAGS },
  241. COMMON_OPTIONS_NOSIZE
  242. { NULL }
  243. };
  244. AVFILTER_DEFINE_CLASS(haldclutsrc);
  245. static void haldclutsrc_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  246. {
  247. int i, j, k, x = 0, y = 0, is16bit = 0, step;
  248. uint32_t alpha = 0;
  249. const TestSourceContext *hc = ctx->priv;
  250. int level = hc->level;
  251. float scale;
  252. const int w = frame->width;
  253. const int h = frame->height;
  254. const uint8_t *data = frame->data[0];
  255. const int linesize = frame->linesize[0];
  256. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
  257. uint8_t rgba_map[4];
  258. av_assert0(w == h && w == level*level*level);
  259. ff_fill_rgba_map(rgba_map, frame->format);
  260. switch (frame->format) {
  261. case AV_PIX_FMT_RGB48:
  262. case AV_PIX_FMT_BGR48:
  263. case AV_PIX_FMT_RGBA64:
  264. case AV_PIX_FMT_BGRA64:
  265. is16bit = 1;
  266. alpha = 0xffff;
  267. break;
  268. case AV_PIX_FMT_RGBA:
  269. case AV_PIX_FMT_BGRA:
  270. case AV_PIX_FMT_ARGB:
  271. case AV_PIX_FMT_ABGR:
  272. alpha = 0xff;
  273. break;
  274. }
  275. step = av_get_padded_bits_per_pixel(desc) >> (3 + is16bit);
  276. scale = ((float)(1 << (8*(is16bit+1))) - 1) / (level*level - 1);
  277. #define LOAD_CLUT(nbits) do { \
  278. uint##nbits##_t *dst = ((uint##nbits##_t *)(data + y*linesize)) + x*step; \
  279. dst[rgba_map[0]] = av_clip_uint##nbits(i * scale); \
  280. dst[rgba_map[1]] = av_clip_uint##nbits(j * scale); \
  281. dst[rgba_map[2]] = av_clip_uint##nbits(k * scale); \
  282. if (step == 4) \
  283. dst[rgba_map[3]] = alpha; \
  284. } while (0)
  285. level *= level;
  286. for (k = 0; k < level; k++) {
  287. for (j = 0; j < level; j++) {
  288. for (i = 0; i < level; i++) {
  289. if (!is16bit)
  290. LOAD_CLUT(8);
  291. else
  292. LOAD_CLUT(16);
  293. if (++x == w) {
  294. x = 0;
  295. y++;
  296. }
  297. }
  298. }
  299. }
  300. }
  301. static av_cold int haldclutsrc_init(AVFilterContext *ctx)
  302. {
  303. TestSourceContext *hc = ctx->priv;
  304. hc->fill_picture_fn = haldclutsrc_fill_picture;
  305. hc->draw_once = 1;
  306. return init(ctx);
  307. }
  308. static int haldclutsrc_query_formats(AVFilterContext *ctx)
  309. {
  310. static const enum AVPixelFormat pix_fmts[] = {
  311. AV_PIX_FMT_RGB24, AV_PIX_FMT_BGR24,
  312. AV_PIX_FMT_RGBA, AV_PIX_FMT_BGRA,
  313. AV_PIX_FMT_ARGB, AV_PIX_FMT_ABGR,
  314. AV_PIX_FMT_0RGB, AV_PIX_FMT_0BGR,
  315. AV_PIX_FMT_RGB0, AV_PIX_FMT_BGR0,
  316. AV_PIX_FMT_RGB48, AV_PIX_FMT_BGR48,
  317. AV_PIX_FMT_RGBA64, AV_PIX_FMT_BGRA64,
  318. AV_PIX_FMT_NONE,
  319. };
  320. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  321. if (!fmts_list)
  322. return AVERROR(ENOMEM);
  323. return ff_set_common_formats(ctx, fmts_list);
  324. }
  325. static int haldclutsrc_config_props(AVFilterLink *outlink)
  326. {
  327. AVFilterContext *ctx = outlink->src;
  328. TestSourceContext *hc = ctx->priv;
  329. hc->w = hc->h = hc->level * hc->level * hc->level;
  330. return config_props(outlink);
  331. }
  332. static const AVFilterPad haldclutsrc_outputs[] = {
  333. {
  334. .name = "default",
  335. .type = AVMEDIA_TYPE_VIDEO,
  336. .config_props = haldclutsrc_config_props,
  337. },
  338. { NULL }
  339. };
  340. AVFilter ff_vsrc_haldclutsrc = {
  341. .name = "haldclutsrc",
  342. .description = NULL_IF_CONFIG_SMALL("Provide an identity Hald CLUT."),
  343. .priv_class = &haldclutsrc_class,
  344. .priv_size = sizeof(TestSourceContext),
  345. .init = haldclutsrc_init,
  346. .uninit = uninit,
  347. .query_formats = haldclutsrc_query_formats,
  348. .activate = activate,
  349. .inputs = NULL,
  350. .outputs = haldclutsrc_outputs,
  351. };
  352. #endif /* CONFIG_HALDCLUTSRC_FILTER */
  353. #if CONFIG_NULLSRC_FILTER
  354. #define nullsrc_options options
  355. AVFILTER_DEFINE_CLASS(nullsrc);
  356. static void nullsrc_fill_picture(AVFilterContext *ctx, AVFrame *picref) { }
  357. static av_cold int nullsrc_init(AVFilterContext *ctx)
  358. {
  359. TestSourceContext *test = ctx->priv;
  360. test->fill_picture_fn = nullsrc_fill_picture;
  361. return init(ctx);
  362. }
  363. static const AVFilterPad nullsrc_outputs[] = {
  364. {
  365. .name = "default",
  366. .type = AVMEDIA_TYPE_VIDEO,
  367. .config_props = config_props,
  368. },
  369. { NULL },
  370. };
  371. AVFilter ff_vsrc_nullsrc = {
  372. .name = "nullsrc",
  373. .description = NULL_IF_CONFIG_SMALL("Null video source, return unprocessed video frames."),
  374. .init = nullsrc_init,
  375. .uninit = uninit,
  376. .activate = activate,
  377. .priv_size = sizeof(TestSourceContext),
  378. .priv_class = &nullsrc_class,
  379. .inputs = NULL,
  380. .outputs = nullsrc_outputs,
  381. };
  382. #endif /* CONFIG_NULLSRC_FILTER */
  383. #if CONFIG_TESTSRC_FILTER
  384. static const AVOption testsrc_options[] = {
  385. COMMON_OPTIONS
  386. { "decimals", "set number of decimals to show", OFFSET(nb_decimals), AV_OPT_TYPE_INT, {.i64=0}, 0, 17, FLAGS },
  387. { "n", "set number of decimals to show", OFFSET(nb_decimals), AV_OPT_TYPE_INT, {.i64=0}, 0, 17, FLAGS },
  388. { NULL }
  389. };
  390. AVFILTER_DEFINE_CLASS(testsrc);
  391. /**
  392. * Fill a rectangle with value val.
  393. *
  394. * @param val the RGB value to set
  395. * @param dst pointer to the destination buffer to fill
  396. * @param dst_linesize linesize of destination
  397. * @param segment_width width of the segment
  398. * @param x horizontal coordinate where to draw the rectangle in the destination buffer
  399. * @param y horizontal coordinate where to draw the rectangle in the destination buffer
  400. * @param w width of the rectangle to draw, expressed as a number of segment_width units
  401. * @param h height of the rectangle to draw, expressed as a number of segment_width units
  402. */
  403. static void draw_rectangle(unsigned val, uint8_t *dst, int dst_linesize, int segment_width,
  404. int x, int y, int w, int h)
  405. {
  406. int i;
  407. int step = 3;
  408. dst += segment_width * (step * x + y * dst_linesize);
  409. w *= segment_width * step;
  410. h *= segment_width;
  411. for (i = 0; i < h; i++) {
  412. memset(dst, val, w);
  413. dst += dst_linesize;
  414. }
  415. }
  416. static void draw_digit(int digit, uint8_t *dst, int dst_linesize,
  417. int segment_width)
  418. {
  419. #define TOP_HBAR 1
  420. #define MID_HBAR 2
  421. #define BOT_HBAR 4
  422. #define LEFT_TOP_VBAR 8
  423. #define LEFT_BOT_VBAR 16
  424. #define RIGHT_TOP_VBAR 32
  425. #define RIGHT_BOT_VBAR 64
  426. struct segments {
  427. int x, y, w, h;
  428. } segments[] = {
  429. { 1, 0, 5, 1 }, /* TOP_HBAR */
  430. { 1, 6, 5, 1 }, /* MID_HBAR */
  431. { 1, 12, 5, 1 }, /* BOT_HBAR */
  432. { 0, 1, 1, 5 }, /* LEFT_TOP_VBAR */
  433. { 0, 7, 1, 5 }, /* LEFT_BOT_VBAR */
  434. { 6, 1, 1, 5 }, /* RIGHT_TOP_VBAR */
  435. { 6, 7, 1, 5 } /* RIGHT_BOT_VBAR */
  436. };
  437. static const unsigned char masks[10] = {
  438. /* 0 */ TOP_HBAR |BOT_HBAR|LEFT_TOP_VBAR|LEFT_BOT_VBAR|RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  439. /* 1 */ RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  440. /* 2 */ TOP_HBAR|MID_HBAR|BOT_HBAR|LEFT_BOT_VBAR |RIGHT_TOP_VBAR,
  441. /* 3 */ TOP_HBAR|MID_HBAR|BOT_HBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  442. /* 4 */ MID_HBAR |LEFT_TOP_VBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  443. /* 5 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR |RIGHT_BOT_VBAR,
  444. /* 6 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR|LEFT_BOT_VBAR |RIGHT_BOT_VBAR,
  445. /* 7 */ TOP_HBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  446. /* 8 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR|LEFT_BOT_VBAR|RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  447. /* 9 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  448. };
  449. unsigned mask = masks[digit];
  450. int i;
  451. draw_rectangle(0, dst, dst_linesize, segment_width, 0, 0, 8, 13);
  452. for (i = 0; i < FF_ARRAY_ELEMS(segments); i++)
  453. if (mask & (1<<i))
  454. draw_rectangle(255, dst, dst_linesize, segment_width,
  455. segments[i].x, segments[i].y, segments[i].w, segments[i].h);
  456. }
  457. #define GRADIENT_SIZE (6 * 256)
  458. static void test_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  459. {
  460. TestSourceContext *test = ctx->priv;
  461. uint8_t *p, *p0;
  462. int x, y;
  463. int color, color_rest;
  464. int icolor;
  465. int radius;
  466. int quad0, quad;
  467. int dquad_x, dquad_y;
  468. int grad, dgrad, rgrad, drgrad;
  469. int seg_size;
  470. int second;
  471. int i;
  472. uint8_t *data = frame->data[0];
  473. int width = frame->width;
  474. int height = frame->height;
  475. /* draw colored bars and circle */
  476. radius = (width + height) / 4;
  477. quad0 = width * width / 4 + height * height / 4 - radius * radius;
  478. dquad_y = 1 - height;
  479. p0 = data;
  480. for (y = 0; y < height; y++) {
  481. p = p0;
  482. color = 0;
  483. color_rest = 0;
  484. quad = quad0;
  485. dquad_x = 1 - width;
  486. for (x = 0; x < width; x++) {
  487. icolor = color;
  488. if (quad < 0)
  489. icolor ^= 7;
  490. quad += dquad_x;
  491. dquad_x += 2;
  492. *(p++) = icolor & 1 ? 255 : 0;
  493. *(p++) = icolor & 2 ? 255 : 0;
  494. *(p++) = icolor & 4 ? 255 : 0;
  495. color_rest += 8;
  496. if (color_rest >= width) {
  497. color_rest -= width;
  498. color++;
  499. }
  500. }
  501. quad0 += dquad_y;
  502. dquad_y += 2;
  503. p0 += frame->linesize[0];
  504. }
  505. /* draw sliding color line */
  506. p0 = p = data + frame->linesize[0] * (height * 3/4);
  507. grad = (256 * test->nb_frame * test->time_base.num / test->time_base.den) %
  508. GRADIENT_SIZE;
  509. rgrad = 0;
  510. dgrad = GRADIENT_SIZE / width;
  511. drgrad = GRADIENT_SIZE % width;
  512. for (x = 0; x < width; x++) {
  513. *(p++) =
  514. grad < 256 || grad >= 5 * 256 ? 255 :
  515. grad >= 2 * 256 && grad < 4 * 256 ? 0 :
  516. grad < 2 * 256 ? 2 * 256 - 1 - grad : grad - 4 * 256;
  517. *(p++) =
  518. grad >= 4 * 256 ? 0 :
  519. grad >= 1 * 256 && grad < 3 * 256 ? 255 :
  520. grad < 1 * 256 ? grad : 4 * 256 - 1 - grad;
  521. *(p++) =
  522. grad < 2 * 256 ? 0 :
  523. grad >= 3 * 256 && grad < 5 * 256 ? 255 :
  524. grad < 3 * 256 ? grad - 2 * 256 : 6 * 256 - 1 - grad;
  525. grad += dgrad;
  526. rgrad += drgrad;
  527. if (rgrad >= GRADIENT_SIZE) {
  528. grad++;
  529. rgrad -= GRADIENT_SIZE;
  530. }
  531. if (grad >= GRADIENT_SIZE)
  532. grad -= GRADIENT_SIZE;
  533. }
  534. p = p0;
  535. for (y = height / 8; y > 0; y--) {
  536. memcpy(p+frame->linesize[0], p, 3 * width);
  537. p += frame->linesize[0];
  538. }
  539. /* draw digits */
  540. seg_size = width / 80;
  541. if (seg_size >= 1 && height >= 13 * seg_size) {
  542. int64_t p10decimals = 1;
  543. double time = av_q2d(test->time_base) * test->nb_frame *
  544. ff_exp10(test->nb_decimals);
  545. if (time >= INT_MAX)
  546. return;
  547. for (x = 0; x < test->nb_decimals; x++)
  548. p10decimals *= 10;
  549. second = av_rescale_rnd(test->nb_frame * test->time_base.num, p10decimals, test->time_base.den, AV_ROUND_ZERO);
  550. x = width - (width - seg_size * 64) / 2;
  551. y = (height - seg_size * 13) / 2;
  552. p = data + (x*3 + y * frame->linesize[0]);
  553. for (i = 0; i < 8; i++) {
  554. p -= 3 * 8 * seg_size;
  555. draw_digit(second % 10, p, frame->linesize[0], seg_size);
  556. second /= 10;
  557. if (second == 0)
  558. break;
  559. }
  560. }
  561. }
  562. static av_cold int test_init(AVFilterContext *ctx)
  563. {
  564. TestSourceContext *test = ctx->priv;
  565. test->fill_picture_fn = test_fill_picture;
  566. return init(ctx);
  567. }
  568. static int test_query_formats(AVFilterContext *ctx)
  569. {
  570. static const enum AVPixelFormat pix_fmts[] = {
  571. AV_PIX_FMT_RGB24, AV_PIX_FMT_NONE
  572. };
  573. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  574. if (!fmts_list)
  575. return AVERROR(ENOMEM);
  576. return ff_set_common_formats(ctx, fmts_list);
  577. }
  578. static const AVFilterPad avfilter_vsrc_testsrc_outputs[] = {
  579. {
  580. .name = "default",
  581. .type = AVMEDIA_TYPE_VIDEO,
  582. .config_props = config_props,
  583. },
  584. { NULL }
  585. };
  586. AVFilter ff_vsrc_testsrc = {
  587. .name = "testsrc",
  588. .description = NULL_IF_CONFIG_SMALL("Generate test pattern."),
  589. .priv_size = sizeof(TestSourceContext),
  590. .priv_class = &testsrc_class,
  591. .init = test_init,
  592. .uninit = uninit,
  593. .query_formats = test_query_formats,
  594. .activate = activate,
  595. .inputs = NULL,
  596. .outputs = avfilter_vsrc_testsrc_outputs,
  597. };
  598. #endif /* CONFIG_TESTSRC_FILTER */
  599. #if CONFIG_TESTSRC2_FILTER
  600. static const AVOption testsrc2_options[] = {
  601. COMMON_OPTIONS
  602. { "alpha", "set global alpha (opacity)", OFFSET(alpha), AV_OPT_TYPE_INT, {.i64 = 255}, 0, 255, FLAGS },
  603. { NULL }
  604. };
  605. AVFILTER_DEFINE_CLASS(testsrc2);
  606. static void set_color(TestSourceContext *s, FFDrawColor *color, uint32_t argb)
  607. {
  608. uint8_t rgba[4] = { (argb >> 16) & 0xFF,
  609. (argb >> 8) & 0xFF,
  610. (argb >> 0) & 0xFF,
  611. (argb >> 24) & 0xFF, };
  612. ff_draw_color(&s->draw, color, rgba);
  613. }
  614. static uint32_t color_gradient(unsigned index)
  615. {
  616. unsigned si = index & 0xFF, sd = 0xFF - si;
  617. switch (index >> 8) {
  618. case 0: return 0xFF0000 + (si << 8);
  619. case 1: return 0x00FF00 + (sd << 16);
  620. case 2: return 0x00FF00 + (si << 0);
  621. case 3: return 0x0000FF + (sd << 8);
  622. case 4: return 0x0000FF + (si << 16);
  623. case 5: return 0xFF0000 + (sd << 0);
  624. }
  625. av_assert0(0);
  626. }
  627. static void draw_text(TestSourceContext *s, AVFrame *frame, FFDrawColor *color,
  628. int x0, int y0, const uint8_t *text)
  629. {
  630. int x = x0;
  631. for (; *text; text++) {
  632. if (*text == '\n') {
  633. x = x0;
  634. y0 += 16;
  635. continue;
  636. }
  637. ff_blend_mask(&s->draw, color, frame->data, frame->linesize,
  638. frame->width, frame->height,
  639. avpriv_vga16_font + *text * 16, 1, 8, 16, 0, 0, x, y0);
  640. x += 8;
  641. }
  642. }
  643. static void test2_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  644. {
  645. TestSourceContext *s = ctx->priv;
  646. FFDrawColor color;
  647. unsigned alpha = (uint32_t)s->alpha << 24;
  648. /* colored background */
  649. {
  650. unsigned i, x = 0, x2;
  651. x = 0;
  652. for (i = 1; i < 7; i++) {
  653. x2 = av_rescale(i, s->w, 6);
  654. x2 = ff_draw_round_to_sub(&s->draw, 0, 0, x2);
  655. set_color(s, &color, ((i & 1) ? 0xFF0000 : 0) |
  656. ((i & 2) ? 0x00FF00 : 0) |
  657. ((i & 4) ? 0x0000FF : 0) |
  658. alpha);
  659. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  660. x, 0, x2 - x, frame->height);
  661. x = x2;
  662. }
  663. }
  664. /* oblique gradient */
  665. /* note: too slow if using blending */
  666. if (s->h >= 64) {
  667. unsigned x, dx, y0, y, g0, g;
  668. dx = ff_draw_round_to_sub(&s->draw, 0, +1, 1);
  669. y0 = av_rescale_q(s->pts, s->time_base, av_make_q(2, s->h - 16));
  670. g0 = av_rescale_q(s->pts, s->time_base, av_make_q(1, 128));
  671. for (x = 0; x < s->w; x += dx) {
  672. g = (av_rescale(x, 6 * 256, s->w) + g0) % (6 * 256);
  673. set_color(s, &color, color_gradient(g) | alpha);
  674. y = y0 + av_rescale(x, s->h / 2, s->w);
  675. y %= 2 * (s->h - 16);
  676. if (y > s->h - 16)
  677. y = 2 * (s->h - 16) - y;
  678. y = ff_draw_round_to_sub(&s->draw, 1, 0, y);
  679. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  680. x, y, dx, 16);
  681. }
  682. }
  683. /* top right: draw clock hands */
  684. if (s->w >= 64 && s->h >= 64) {
  685. int l = (FFMIN(s->w, s->h) - 32) >> 1;
  686. int steps = FFMAX(4, l >> 5);
  687. int xc = (s->w >> 2) + (s->w >> 1);
  688. int yc = (s->h >> 2);
  689. int cycle = l << 2;
  690. int pos, xh, yh;
  691. int c, i;
  692. for (c = 0; c < 3; c++) {
  693. set_color(s, &color, (0xBBBBBB ^ (0xFF << (c << 3))) | alpha);
  694. pos = av_rescale_q(s->pts, s->time_base, av_make_q(64 >> (c << 1), cycle)) % cycle;
  695. xh = pos < 1 * l ? pos :
  696. pos < 2 * l ? l :
  697. pos < 3 * l ? 3 * l - pos : 0;
  698. yh = pos < 1 * l ? 0 :
  699. pos < 2 * l ? pos - l :
  700. pos < 3 * l ? l :
  701. cycle - pos;
  702. xh -= l >> 1;
  703. yh -= l >> 1;
  704. for (i = 1; i <= steps; i++) {
  705. int x = av_rescale(xh, i, steps) + xc;
  706. int y = av_rescale(yh, i, steps) + yc;
  707. x = ff_draw_round_to_sub(&s->draw, 0, -1, x);
  708. y = ff_draw_round_to_sub(&s->draw, 1, -1, y);
  709. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  710. x, y, 8, 8);
  711. }
  712. }
  713. }
  714. /* bottom left: beating rectangles */
  715. if (s->w >= 64 && s->h >= 64) {
  716. int l = (FFMIN(s->w, s->h) - 16) >> 2;
  717. int cycle = l << 3;
  718. int xc = (s->w >> 2);
  719. int yc = (s->h >> 2) + (s->h >> 1);
  720. int xm1 = ff_draw_round_to_sub(&s->draw, 0, -1, xc - 8);
  721. int xm2 = ff_draw_round_to_sub(&s->draw, 0, +1, xc + 8);
  722. int ym1 = ff_draw_round_to_sub(&s->draw, 1, -1, yc - 8);
  723. int ym2 = ff_draw_round_to_sub(&s->draw, 1, +1, yc + 8);
  724. int size, step, x1, x2, y1, y2;
  725. size = av_rescale_q(s->pts, s->time_base, av_make_q(4, cycle));
  726. step = size / l;
  727. size %= l;
  728. if (step & 1)
  729. size = l - size;
  730. step = (step >> 1) & 3;
  731. set_color(s, &color, 0xFF808080);
  732. x1 = ff_draw_round_to_sub(&s->draw, 0, -1, xc - 4 - size);
  733. x2 = ff_draw_round_to_sub(&s->draw, 0, +1, xc + 4 + size);
  734. y1 = ff_draw_round_to_sub(&s->draw, 1, -1, yc - 4 - size);
  735. y2 = ff_draw_round_to_sub(&s->draw, 1, +1, yc + 4 + size);
  736. if (step == 0 || step == 2)
  737. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  738. x1, ym1, x2 - x1, ym2 - ym1);
  739. if (step == 1 || step == 2)
  740. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  741. xm1, y1, xm2 - xm1, y2 - y1);
  742. if (step == 3)
  743. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  744. x1, y1, x2 - x1, y2 - y1);
  745. }
  746. /* bottom right: checker with random noise */
  747. {
  748. unsigned xmin = av_rescale(5, s->w, 8);
  749. unsigned xmax = av_rescale(7, s->w, 8);
  750. unsigned ymin = av_rescale(5, s->h, 8);
  751. unsigned ymax = av_rescale(7, s->h, 8);
  752. unsigned x, y, i, r;
  753. uint8_t alpha[256];
  754. r = s->pts;
  755. for (y = ymin; y + 15 < ymax; y += 16) {
  756. for (x = xmin; x + 15 < xmax; x += 16) {
  757. if ((x ^ y) & 16)
  758. continue;
  759. for (i = 0; i < 256; i++) {
  760. r = r * 1664525 + 1013904223;
  761. alpha[i] = r >> 24;
  762. }
  763. set_color(s, &color, 0xFF00FF80);
  764. ff_blend_mask(&s->draw, &color, frame->data, frame->linesize,
  765. frame->width, frame->height,
  766. alpha, 16, 16, 16, 3, 0, x, y);
  767. }
  768. }
  769. }
  770. /* bouncing square */
  771. if (s->w >= 16 && s->h >= 16) {
  772. unsigned w = s->w - 8;
  773. unsigned h = s->h - 8;
  774. unsigned x = av_rescale_q(s->pts, s->time_base, av_make_q(233, 55 * w)) % (w << 1);
  775. unsigned y = av_rescale_q(s->pts, s->time_base, av_make_q(233, 89 * h)) % (h << 1);
  776. if (x > w)
  777. x = (w << 1) - x;
  778. if (y > h)
  779. y = (h << 1) - y;
  780. x = ff_draw_round_to_sub(&s->draw, 0, -1, x);
  781. y = ff_draw_round_to_sub(&s->draw, 1, -1, y);
  782. set_color(s, &color, 0xFF8000FF);
  783. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  784. x, y, 8, 8);
  785. }
  786. /* top right: draw frame time and frame number */
  787. {
  788. char buf[256];
  789. unsigned time;
  790. time = av_rescale_q(s->pts, s->time_base, av_make_q(1, 1000)) % 86400000;
  791. set_color(s, &color, 0xC0000000);
  792. ff_blend_rectangle(&s->draw, &color, frame->data, frame->linesize,
  793. frame->width, frame->height,
  794. 2, 2, 100, 36);
  795. set_color(s, &color, 0xFFFF8000);
  796. snprintf(buf, sizeof(buf), "%02d:%02d:%02d.%03d\n%12"PRIi64,
  797. time / 3600000, (time / 60000) % 60, (time / 1000) % 60,
  798. time % 1000, s->pts);
  799. draw_text(s, frame, &color, 4, 4, buf);
  800. }
  801. }
  802. static av_cold int test2_init(AVFilterContext *ctx)
  803. {
  804. TestSourceContext *s = ctx->priv;
  805. s->fill_picture_fn = test2_fill_picture;
  806. return init(ctx);
  807. }
  808. static int test2_query_formats(AVFilterContext *ctx)
  809. {
  810. return ff_set_common_formats(ctx, ff_draw_supported_pixel_formats(0));
  811. }
  812. static int test2_config_props(AVFilterLink *inlink)
  813. {
  814. AVFilterContext *ctx = inlink->src;
  815. TestSourceContext *s = ctx->priv;
  816. av_assert0(ff_draw_init(&s->draw, inlink->format, 0) >= 0);
  817. s->w = ff_draw_round_to_sub(&s->draw, 0, -1, s->w);
  818. s->h = ff_draw_round_to_sub(&s->draw, 1, -1, s->h);
  819. if (av_image_check_size(s->w, s->h, 0, ctx) < 0)
  820. return AVERROR(EINVAL);
  821. return config_props(inlink);
  822. }
  823. static const AVFilterPad avfilter_vsrc_testsrc2_outputs[] = {
  824. {
  825. .name = "default",
  826. .type = AVMEDIA_TYPE_VIDEO,
  827. .config_props = test2_config_props,
  828. },
  829. { NULL }
  830. };
  831. AVFilter ff_vsrc_testsrc2 = {
  832. .name = "testsrc2",
  833. .description = NULL_IF_CONFIG_SMALL("Generate another test pattern."),
  834. .priv_size = sizeof(TestSourceContext),
  835. .priv_class = &testsrc2_class,
  836. .init = test2_init,
  837. .uninit = uninit,
  838. .query_formats = test2_query_formats,
  839. .activate = activate,
  840. .inputs = NULL,
  841. .outputs = avfilter_vsrc_testsrc2_outputs,
  842. };
  843. #endif /* CONFIG_TESTSRC2_FILTER */
  844. #if CONFIG_RGBTESTSRC_FILTER
  845. static const AVOption rgbtestsrc_options[] = {
  846. COMMON_OPTIONS
  847. { "complement", "set complement colors", OFFSET(complement), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
  848. { "co", "set complement colors", OFFSET(complement), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
  849. { NULL }
  850. };
  851. AVFILTER_DEFINE_CLASS(rgbtestsrc);
  852. #define R 0
  853. #define G 1
  854. #define B 2
  855. #define A 3
  856. static void rgbtest_put_pixel(uint8_t *dstp[4], int dst_linesizep[4],
  857. int x, int y, unsigned r, unsigned g, unsigned b, enum AVPixelFormat fmt,
  858. uint8_t rgba_map[4])
  859. {
  860. uint8_t *dst = dstp[0];
  861. int dst_linesize = dst_linesizep[0];
  862. uint32_t v;
  863. uint8_t *p;
  864. uint16_t *p16;
  865. switch (fmt) {
  866. case AV_PIX_FMT_BGR444: ((uint16_t*)(dst + y*dst_linesize))[x] = ((r >> 4) << 8) | ((g >> 4) << 4) | (b >> 4); break;
  867. case AV_PIX_FMT_RGB444: ((uint16_t*)(dst + y*dst_linesize))[x] = ((b >> 4) << 8) | ((g >> 4) << 4) | (r >> 4); break;
  868. case AV_PIX_FMT_BGR555: ((uint16_t*)(dst + y*dst_linesize))[x] = ((r>>3)<<10) | ((g>>3)<<5) | (b>>3); break;
  869. case AV_PIX_FMT_RGB555: ((uint16_t*)(dst + y*dst_linesize))[x] = ((b>>3)<<10) | ((g>>3)<<5) | (r>>3); break;
  870. case AV_PIX_FMT_BGR565: ((uint16_t*)(dst + y*dst_linesize))[x] = ((r>>3)<<11) | ((g>>2)<<5) | (b>>3); break;
  871. case AV_PIX_FMT_RGB565: ((uint16_t*)(dst + y*dst_linesize))[x] = ((b>>3)<<11) | ((g>>2)<<5) | (r>>3); break;
  872. case AV_PIX_FMT_RGB24:
  873. case AV_PIX_FMT_BGR24:
  874. v = (r << (rgba_map[R]*8)) + (g << (rgba_map[G]*8)) + (b << (rgba_map[B]*8));
  875. p = dst + 3*x + y*dst_linesize;
  876. AV_WL24(p, v);
  877. break;
  878. case AV_PIX_FMT_RGBA:
  879. case AV_PIX_FMT_BGRA:
  880. case AV_PIX_FMT_ARGB:
  881. case AV_PIX_FMT_ABGR:
  882. v = (r << (rgba_map[R]*8)) + (g << (rgba_map[G]*8)) + (b << (rgba_map[B]*8)) + (255U << (rgba_map[A]*8));
  883. p = dst + 4*x + y*dst_linesize;
  884. AV_WL32(p, v);
  885. break;
  886. case AV_PIX_FMT_GBRP:
  887. p = dstp[0] + x + y * dst_linesizep[0];
  888. p[0] = g;
  889. p = dstp[1] + x + y * dst_linesizep[1];
  890. p[0] = b;
  891. p = dstp[2] + x + y * dst_linesizep[2];
  892. p[0] = r;
  893. break;
  894. case AV_PIX_FMT_GBRP9:
  895. case AV_PIX_FMT_GBRP10:
  896. case AV_PIX_FMT_GBRP12:
  897. case AV_PIX_FMT_GBRP14:
  898. case AV_PIX_FMT_GBRP16:
  899. p16 = (uint16_t *)(dstp[0] + x*2 + y * dst_linesizep[0]);
  900. p16[0] = g;
  901. p16 = (uint16_t *)(dstp[1] + x*2 + y * dst_linesizep[1]);
  902. p16[0] = b;
  903. p16 = (uint16_t *)(dstp[2] + x*2 + y * dst_linesizep[2]);
  904. p16[0] = r;
  905. break;
  906. }
  907. }
  908. static void rgbtest_fill_picture_complement(AVFilterContext *ctx, AVFrame *frame)
  909. {
  910. TestSourceContext *test = ctx->priv;
  911. int x, y, w = frame->width, h = frame->height;
  912. for (y = 0; y < h; y++) {
  913. for (x = 0; x < w; x++) {
  914. int c = (1 << FFMAX(test->depth, 8))*x/w;
  915. int r = 0, g = 0, b = 0;
  916. if (6*y < h ) r = c;
  917. else if (6*y < 2*h) g = c, b = c;
  918. else if (6*y < 3*h) g = c;
  919. else if (6*y < 4*h) r = c, b = c;
  920. else if (6*y < 5*h) b = c;
  921. else r = c, g = c;
  922. rgbtest_put_pixel(frame->data, frame->linesize, x, y, r, g, b,
  923. ctx->outputs[0]->format, test->rgba_map);
  924. }
  925. }
  926. }
  927. static void rgbtest_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  928. {
  929. TestSourceContext *test = ctx->priv;
  930. int x, y, w = frame->width, h = frame->height;
  931. for (y = 0; y < h; y++) {
  932. for (x = 0; x < w; x++) {
  933. int c = (1 << FFMAX(test->depth, 8))*x/w;
  934. int r = 0, g = 0, b = 0;
  935. if (3*y < h ) r = c;
  936. else if (3*y < 2*h) g = c;
  937. else b = c;
  938. rgbtest_put_pixel(frame->data, frame->linesize, x, y, r, g, b,
  939. ctx->outputs[0]->format, test->rgba_map);
  940. }
  941. }
  942. }
  943. static av_cold int rgbtest_init(AVFilterContext *ctx)
  944. {
  945. TestSourceContext *test = ctx->priv;
  946. test->draw_once = 1;
  947. test->fill_picture_fn = test->complement ? rgbtest_fill_picture_complement : rgbtest_fill_picture;
  948. return init(ctx);
  949. }
  950. static int rgbtest_query_formats(AVFilterContext *ctx)
  951. {
  952. static const enum AVPixelFormat pix_fmts[] = {
  953. AV_PIX_FMT_RGBA, AV_PIX_FMT_ARGB, AV_PIX_FMT_BGRA, AV_PIX_FMT_ABGR,
  954. AV_PIX_FMT_BGR24, AV_PIX_FMT_RGB24,
  955. AV_PIX_FMT_RGB444, AV_PIX_FMT_BGR444,
  956. AV_PIX_FMT_RGB565, AV_PIX_FMT_BGR565,
  957. AV_PIX_FMT_RGB555, AV_PIX_FMT_BGR555,
  958. AV_PIX_FMT_GBRP, AV_PIX_FMT_GBRP9, AV_PIX_FMT_GBRP10,
  959. AV_PIX_FMT_GBRP12, AV_PIX_FMT_GBRP14, AV_PIX_FMT_GBRP16,
  960. AV_PIX_FMT_NONE
  961. };
  962. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  963. if (!fmts_list)
  964. return AVERROR(ENOMEM);
  965. return ff_set_common_formats(ctx, fmts_list);
  966. }
  967. static int rgbtest_config_props(AVFilterLink *outlink)
  968. {
  969. TestSourceContext *test = outlink->src->priv;
  970. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(outlink->format);
  971. test->depth = desc->comp[0].depth;
  972. ff_fill_rgba_map(test->rgba_map, outlink->format);
  973. return config_props(outlink);
  974. }
  975. static const AVFilterPad avfilter_vsrc_rgbtestsrc_outputs[] = {
  976. {
  977. .name = "default",
  978. .type = AVMEDIA_TYPE_VIDEO,
  979. .config_props = rgbtest_config_props,
  980. },
  981. { NULL }
  982. };
  983. AVFilter ff_vsrc_rgbtestsrc = {
  984. .name = "rgbtestsrc",
  985. .description = NULL_IF_CONFIG_SMALL("Generate RGB test pattern."),
  986. .priv_size = sizeof(TestSourceContext),
  987. .priv_class = &rgbtestsrc_class,
  988. .init = rgbtest_init,
  989. .uninit = uninit,
  990. .query_formats = rgbtest_query_formats,
  991. .activate = activate,
  992. .inputs = NULL,
  993. .outputs = avfilter_vsrc_rgbtestsrc_outputs,
  994. };
  995. #endif /* CONFIG_RGBTESTSRC_FILTER */
  996. #if CONFIG_YUVTESTSRC_FILTER
  997. #define yuvtestsrc_options options
  998. AVFILTER_DEFINE_CLASS(yuvtestsrc);
  999. static void yuvtest_fill_picture8(AVFilterContext *ctx, AVFrame *frame)
  1000. {
  1001. int x, y, w = frame->width, h = frame->height / 3;
  1002. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
  1003. const int factor = 1 << desc->comp[0].depth;
  1004. const int mid = 1 << (desc->comp[0].depth - 1);
  1005. uint8_t *ydst = frame->data[0];
  1006. uint8_t *udst = frame->data[1];
  1007. uint8_t *vdst = frame->data[2];
  1008. int ylinesize = frame->linesize[0];
  1009. int ulinesize = frame->linesize[1];
  1010. int vlinesize = frame->linesize[2];
  1011. for (y = 0; y < h; y++) {
  1012. for (x = 0; x < w; x++) {
  1013. int c = factor * x / w;
  1014. ydst[x] = c;
  1015. udst[x] = mid;
  1016. vdst[x] = mid;
  1017. }
  1018. ydst += ylinesize;
  1019. udst += ulinesize;
  1020. vdst += vlinesize;
  1021. }
  1022. h += h;
  1023. for (; y < h; y++) {
  1024. for (x = 0; x < w; x++) {
  1025. int c = factor * x / w;
  1026. ydst[x] = mid;
  1027. udst[x] = c;
  1028. vdst[x] = mid;
  1029. }
  1030. ydst += ylinesize;
  1031. udst += ulinesize;
  1032. vdst += vlinesize;
  1033. }
  1034. for (; y < frame->height; y++) {
  1035. for (x = 0; x < w; x++) {
  1036. int c = factor * x / w;
  1037. ydst[x] = mid;
  1038. udst[x] = mid;
  1039. vdst[x] = c;
  1040. }
  1041. ydst += ylinesize;
  1042. udst += ulinesize;
  1043. vdst += vlinesize;
  1044. }
  1045. }
  1046. static void yuvtest_fill_picture16(AVFilterContext *ctx, AVFrame *frame)
  1047. {
  1048. int x, y, w = frame->width, h = frame->height / 3;
  1049. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
  1050. const int factor = 1 << desc->comp[0].depth;
  1051. const int mid = 1 << (desc->comp[0].depth - 1);
  1052. uint16_t *ydst = (uint16_t *)frame->data[0];
  1053. uint16_t *udst = (uint16_t *)frame->data[1];
  1054. uint16_t *vdst = (uint16_t *)frame->data[2];
  1055. int ylinesize = frame->linesize[0] / 2;
  1056. int ulinesize = frame->linesize[1] / 2;
  1057. int vlinesize = frame->linesize[2] / 2;
  1058. for (y = 0; y < h; y++) {
  1059. for (x = 0; x < w; x++) {
  1060. int c = factor * x / w;
  1061. ydst[x] = c;
  1062. udst[x] = mid;
  1063. vdst[x] = mid;
  1064. }
  1065. ydst += ylinesize;
  1066. udst += ulinesize;
  1067. vdst += vlinesize;
  1068. }
  1069. h += h;
  1070. for (; y < h; y++) {
  1071. for (x = 0; x < w; x++) {
  1072. int c = factor * x / w;
  1073. ydst[x] = mid;
  1074. udst[x] = c;
  1075. vdst[x] = mid;
  1076. }
  1077. ydst += ylinesize;
  1078. udst += ulinesize;
  1079. vdst += vlinesize;
  1080. }
  1081. for (; y < frame->height; y++) {
  1082. for (x = 0; x < w; x++) {
  1083. int c = factor * x / w;
  1084. ydst[x] = mid;
  1085. udst[x] = mid;
  1086. vdst[x] = c;
  1087. }
  1088. ydst += ylinesize;
  1089. udst += ulinesize;
  1090. vdst += vlinesize;
  1091. }
  1092. }
  1093. static av_cold int yuvtest_init(AVFilterContext *ctx)
  1094. {
  1095. TestSourceContext *test = ctx->priv;
  1096. test->draw_once = 1;
  1097. return init(ctx);
  1098. }
  1099. static int yuvtest_query_formats(AVFilterContext *ctx)
  1100. {
  1101. static const enum AVPixelFormat pix_fmts[] = {
  1102. AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUVJ444P,
  1103. AV_PIX_FMT_YUV444P9, AV_PIX_FMT_YUV444P10,
  1104. AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV444P14,
  1105. AV_PIX_FMT_YUV444P16,
  1106. AV_PIX_FMT_NONE
  1107. };
  1108. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  1109. if (!fmts_list)
  1110. return AVERROR(ENOMEM);
  1111. return ff_set_common_formats(ctx, fmts_list);
  1112. }
  1113. static int yuvtest_config_props(AVFilterLink *outlink)
  1114. {
  1115. TestSourceContext *test = outlink->src->priv;
  1116. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(outlink->format);
  1117. test->fill_picture_fn = desc->comp[0].depth > 8 ? yuvtest_fill_picture16 : yuvtest_fill_picture8;
  1118. return config_props(outlink);
  1119. }
  1120. static const AVFilterPad avfilter_vsrc_yuvtestsrc_outputs[] = {
  1121. {
  1122. .name = "default",
  1123. .type = AVMEDIA_TYPE_VIDEO,
  1124. .config_props = yuvtest_config_props,
  1125. },
  1126. { NULL }
  1127. };
  1128. AVFilter ff_vsrc_yuvtestsrc = {
  1129. .name = "yuvtestsrc",
  1130. .description = NULL_IF_CONFIG_SMALL("Generate YUV test pattern."),
  1131. .priv_size = sizeof(TestSourceContext),
  1132. .priv_class = &yuvtestsrc_class,
  1133. .init = yuvtest_init,
  1134. .uninit = uninit,
  1135. .query_formats = yuvtest_query_formats,
  1136. .activate = activate,
  1137. .inputs = NULL,
  1138. .outputs = avfilter_vsrc_yuvtestsrc_outputs,
  1139. };
  1140. #endif /* CONFIG_YUVTESTSRC_FILTER */
  1141. #if CONFIG_PAL75BARS_FILTER || CONFIG_PAL100BARS_FILTER || CONFIG_SMPTEBARS_FILTER || CONFIG_SMPTEHDBARS_FILTER
  1142. static const uint8_t rainbow[7][4] = {
  1143. { 180, 128, 128, 255 }, /* 75% white */
  1144. { 162, 44, 142, 255 }, /* 75% yellow */
  1145. { 131, 156, 44, 255 }, /* 75% cyan */
  1146. { 112, 72, 58, 255 }, /* 75% green */
  1147. { 84, 184, 198, 255 }, /* 75% magenta */
  1148. { 65, 100, 212, 255 }, /* 75% red */
  1149. { 35, 212, 114, 255 }, /* 75% blue */
  1150. };
  1151. static const uint8_t rainbow100[7][4] = {
  1152. { 235, 128, 128, 255 }, /* 100% white */
  1153. { 210, 16, 146, 255 }, /* 100% yellow */
  1154. { 170, 166, 16, 255 }, /* 100% cyan */
  1155. { 145, 54, 34, 255 }, /* 100% green */
  1156. { 106, 202, 222, 255 }, /* 100% magenta */
  1157. { 81, 90, 240, 255 }, /* 100% red */
  1158. { 41, 240, 110, 255 }, /* 100% blue */
  1159. };
  1160. static const uint8_t rainbowhd[7][4] = {
  1161. { 180, 128, 128, 255 }, /* 75% white */
  1162. { 168, 44, 136, 255 }, /* 75% yellow */
  1163. { 145, 147, 44, 255 }, /* 75% cyan */
  1164. { 133, 63, 52, 255 }, /* 75% green */
  1165. { 63, 193, 204, 255 }, /* 75% magenta */
  1166. { 51, 109, 212, 255 }, /* 75% red */
  1167. { 28, 212, 120, 255 }, /* 75% blue */
  1168. };
  1169. static const uint8_t wobnair[7][4] = {
  1170. { 35, 212, 114, 255 }, /* 75% blue */
  1171. { 19, 128, 128, 255 }, /* 7.5% intensity black */
  1172. { 84, 184, 198, 255 }, /* 75% magenta */
  1173. { 19, 128, 128, 255 }, /* 7.5% intensity black */
  1174. { 131, 156, 44, 255 }, /* 75% cyan */
  1175. { 19, 128, 128, 255 }, /* 7.5% intensity black */
  1176. { 180, 128, 128, 255 }, /* 75% white */
  1177. };
  1178. static const uint8_t white[4] = { 235, 128, 128, 255 };
  1179. /* pluge pulses */
  1180. static const uint8_t neg4ire[4] = { 7, 128, 128, 255 };
  1181. static const uint8_t pos4ire[4] = { 24, 128, 128, 255 };
  1182. /* fudged Q/-I */
  1183. static const uint8_t i_pixel[4] = { 57, 156, 97, 255 };
  1184. static const uint8_t q_pixel[4] = { 44, 171, 147, 255 };
  1185. static const uint8_t gray40[4] = { 104, 128, 128, 255 };
  1186. static const uint8_t gray15[4] = { 49, 128, 128, 255 };
  1187. static const uint8_t cyan[4] = { 188, 154, 16, 255 };
  1188. static const uint8_t yellow[4] = { 219, 16, 138, 255 };
  1189. static const uint8_t blue[4] = { 32, 240, 118, 255 };
  1190. static const uint8_t red[4] = { 63, 102, 240, 255 };
  1191. static const uint8_t black0[4] = { 16, 128, 128, 255 };
  1192. static const uint8_t black2[4] = { 20, 128, 128, 255 };
  1193. static const uint8_t black4[4] = { 25, 128, 128, 255 };
  1194. static const uint8_t neg2[4] = { 12, 128, 128, 255 };
  1195. static void draw_bar(TestSourceContext *test, const uint8_t color[4],
  1196. int x, int y, int w, int h,
  1197. AVFrame *frame)
  1198. {
  1199. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
  1200. uint8_t *p, *p0;
  1201. int plane;
  1202. x = FFMIN(x, test->w - 1);
  1203. y = FFMIN(y, test->h - 1);
  1204. w = FFMAX(FFMIN(w, test->w - x), 0);
  1205. h = FFMAX(FFMIN(h, test->h - y), 0);
  1206. av_assert0(x + w <= test->w);
  1207. av_assert0(y + h <= test->h);
  1208. for (plane = 0; frame->data[plane]; plane++) {
  1209. const int c = color[plane];
  1210. const int linesize = frame->linesize[plane];
  1211. int i, px, py, pw, ph;
  1212. if (plane == 1 || plane == 2) {
  1213. px = x >> desc->log2_chroma_w;
  1214. pw = AV_CEIL_RSHIFT(w, desc->log2_chroma_w);
  1215. py = y >> desc->log2_chroma_h;
  1216. ph = AV_CEIL_RSHIFT(h, desc->log2_chroma_h);
  1217. } else {
  1218. px = x;
  1219. pw = w;
  1220. py = y;
  1221. ph = h;
  1222. }
  1223. p0 = p = frame->data[plane] + py * linesize + px;
  1224. memset(p, c, pw);
  1225. p += linesize;
  1226. for (i = 1; i < ph; i++, p += linesize)
  1227. memcpy(p, p0, pw);
  1228. }
  1229. }
  1230. static int smptebars_query_formats(AVFilterContext *ctx)
  1231. {
  1232. static const enum AVPixelFormat pix_fmts[] = {
  1233. AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P,
  1234. AV_PIX_FMT_YUV440P, AV_PIX_FMT_YUV444P,
  1235. AV_PIX_FMT_YUV410P, AV_PIX_FMT_YUV411P,
  1236. AV_PIX_FMT_NONE,
  1237. };
  1238. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  1239. if (!fmts_list)
  1240. return AVERROR(ENOMEM);
  1241. return ff_set_common_formats(ctx, fmts_list);
  1242. }
  1243. static const AVFilterPad smptebars_outputs[] = {
  1244. {
  1245. .name = "default",
  1246. .type = AVMEDIA_TYPE_VIDEO,
  1247. .config_props = config_props,
  1248. },
  1249. { NULL }
  1250. };
  1251. #if CONFIG_PAL75BARS_FILTER
  1252. #define pal75bars_options options
  1253. AVFILTER_DEFINE_CLASS(pal75bars);
  1254. static void pal75bars_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  1255. {
  1256. TestSourceContext *test = ctx->priv;
  1257. int r_w, i, x = 0;
  1258. const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(picref->format);
  1259. picref->color_range = AVCOL_RANGE_MPEG;
  1260. picref->colorspace = AVCOL_SPC_BT470BG;
  1261. r_w = FFALIGN((test->w + 7) / 8, 1 << pixdesc->log2_chroma_w);
  1262. draw_bar(test, white, x, 0, r_w, test->h, picref);
  1263. x += r_w;
  1264. for (i = 1; i < 7; i++) {
  1265. draw_bar(test, rainbow[i], x, 0, r_w, test->h, picref);
  1266. x += r_w;
  1267. }
  1268. draw_bar(test, black0, x, 0, r_w, test->h, picref);
  1269. }
  1270. static av_cold int pal75bars_init(AVFilterContext *ctx)
  1271. {
  1272. TestSourceContext *test = ctx->priv;
  1273. test->fill_picture_fn = pal75bars_fill_picture;
  1274. test->draw_once = 1;
  1275. return init(ctx);
  1276. }
  1277. AVFilter ff_vsrc_pal75bars = {
  1278. .name = "pal75bars",
  1279. .description = NULL_IF_CONFIG_SMALL("Generate PAL 75% color bars."),
  1280. .priv_size = sizeof(TestSourceContext),
  1281. .priv_class = &pal75bars_class,
  1282. .init = pal75bars_init,
  1283. .uninit = uninit,
  1284. .query_formats = smptebars_query_formats,
  1285. .activate = activate,
  1286. .inputs = NULL,
  1287. .outputs = smptebars_outputs,
  1288. };
  1289. #endif /* CONFIG_PAL75BARS_FILTER */
  1290. #if CONFIG_PAL100BARS_FILTER
  1291. #define pal100bars_options options
  1292. AVFILTER_DEFINE_CLASS(pal100bars);
  1293. static void pal100bars_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  1294. {
  1295. TestSourceContext *test = ctx->priv;
  1296. int r_w, i, x = 0;
  1297. const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(picref->format);
  1298. picref->color_range = AVCOL_RANGE_MPEG;
  1299. picref->colorspace = AVCOL_SPC_BT470BG;
  1300. r_w = FFALIGN((test->w + 7) / 8, 1 << pixdesc->log2_chroma_w);
  1301. for (i = 0; i < 7; i++) {
  1302. draw_bar(test, rainbow100[i], x, 0, r_w, test->h, picref);
  1303. x += r_w;
  1304. }
  1305. draw_bar(test, black0, x, 0, r_w, test->h, picref);
  1306. }
  1307. static av_cold int pal100bars_init(AVFilterContext *ctx)
  1308. {
  1309. TestSourceContext *test = ctx->priv;
  1310. test->fill_picture_fn = pal100bars_fill_picture;
  1311. test->draw_once = 1;
  1312. return init(ctx);
  1313. }
  1314. AVFilter ff_vsrc_pal100bars = {
  1315. .name = "pal100bars",
  1316. .description = NULL_IF_CONFIG_SMALL("Generate PAL 100% color bars."),
  1317. .priv_size = sizeof(TestSourceContext),
  1318. .priv_class = &pal100bars_class,
  1319. .init = pal100bars_init,
  1320. .uninit = uninit,
  1321. .query_formats = smptebars_query_formats,
  1322. .activate = activate,
  1323. .inputs = NULL,
  1324. .outputs = smptebars_outputs,
  1325. };
  1326. #endif /* CONFIG_PAL100BARS_FILTER */
  1327. #if CONFIG_SMPTEBARS_FILTER
  1328. #define smptebars_options options
  1329. AVFILTER_DEFINE_CLASS(smptebars);
  1330. static void smptebars_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  1331. {
  1332. TestSourceContext *test = ctx->priv;
  1333. int r_w, r_h, w_h, p_w, p_h, i, tmp, x = 0;
  1334. const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(picref->format);
  1335. picref->colorspace = AVCOL_SPC_BT470BG;
  1336. r_w = FFALIGN((test->w + 6) / 7, 1 << pixdesc->log2_chroma_w);
  1337. r_h = FFALIGN(test->h * 2 / 3, 1 << pixdesc->log2_chroma_h);
  1338. w_h = FFALIGN(test->h * 3 / 4 - r_h, 1 << pixdesc->log2_chroma_h);
  1339. p_w = FFALIGN(r_w * 5 / 4, 1 << pixdesc->log2_chroma_w);
  1340. p_h = test->h - w_h - r_h;
  1341. for (i = 0; i < 7; i++) {
  1342. draw_bar(test, rainbow[i], x, 0, r_w, r_h, picref);
  1343. draw_bar(test, wobnair[i], x, r_h, r_w, w_h, picref);
  1344. x += r_w;
  1345. }
  1346. x = 0;
  1347. draw_bar(test, i_pixel, x, r_h + w_h, p_w, p_h, picref);
  1348. x += p_w;
  1349. draw_bar(test, white, x, r_h + w_h, p_w, p_h, picref);
  1350. x += p_w;
  1351. draw_bar(test, q_pixel, x, r_h + w_h, p_w, p_h, picref);
  1352. x += p_w;
  1353. tmp = FFALIGN(5 * r_w - x, 1 << pixdesc->log2_chroma_w);
  1354. draw_bar(test, black0, x, r_h + w_h, tmp, p_h, picref);
  1355. x += tmp;
  1356. tmp = FFALIGN(r_w / 3, 1 << pixdesc->log2_chroma_w);
  1357. draw_bar(test, neg4ire, x, r_h + w_h, tmp, p_h, picref);
  1358. x += tmp;
  1359. draw_bar(test, black0, x, r_h + w_h, tmp, p_h, picref);
  1360. x += tmp;
  1361. draw_bar(test, pos4ire, x, r_h + w_h, tmp, p_h, picref);
  1362. x += tmp;
  1363. draw_bar(test, black0, x, r_h + w_h, test->w - x, p_h, picref);
  1364. }
  1365. static av_cold int smptebars_init(AVFilterContext *ctx)
  1366. {
  1367. TestSourceContext *test = ctx->priv;
  1368. test->fill_picture_fn = smptebars_fill_picture;
  1369. test->draw_once = 1;
  1370. return init(ctx);
  1371. }
  1372. AVFilter ff_vsrc_smptebars = {
  1373. .name = "smptebars",
  1374. .description = NULL_IF_CONFIG_SMALL("Generate SMPTE color bars."),
  1375. .priv_size = sizeof(TestSourceContext),
  1376. .priv_class = &smptebars_class,
  1377. .init = smptebars_init,
  1378. .uninit = uninit,
  1379. .query_formats = smptebars_query_formats,
  1380. .activate = activate,
  1381. .inputs = NULL,
  1382. .outputs = smptebars_outputs,
  1383. };
  1384. #endif /* CONFIG_SMPTEBARS_FILTER */
  1385. #if CONFIG_SMPTEHDBARS_FILTER
  1386. #define smptehdbars_options options
  1387. AVFILTER_DEFINE_CLASS(smptehdbars);
  1388. static void smptehdbars_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  1389. {
  1390. TestSourceContext *test = ctx->priv;
  1391. int d_w, r_w, r_h, l_w, i, tmp, x = 0, y = 0;
  1392. const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(picref->format);
  1393. picref->colorspace = AVCOL_SPC_BT709;
  1394. d_w = FFALIGN(test->w / 8, 1 << pixdesc->log2_chroma_w);
  1395. r_h = FFALIGN(test->h * 7 / 12, 1 << pixdesc->log2_chroma_h);
  1396. draw_bar(test, gray40, x, 0, d_w, r_h, picref);
  1397. x += d_w;
  1398. r_w = FFALIGN((((test->w + 3) / 4) * 3) / 7, 1 << pixdesc->log2_chroma_w);
  1399. for (i = 0; i < 7; i++) {
  1400. draw_bar(test, rainbowhd[i], x, 0, r_w, r_h, picref);
  1401. x += r_w;
  1402. }
  1403. draw_bar(test, gray40, x, 0, test->w - x, r_h, picref);
  1404. y = r_h;
  1405. r_h = FFALIGN(test->h / 12, 1 << pixdesc->log2_chroma_h);
  1406. draw_bar(test, cyan, 0, y, d_w, r_h, picref);
  1407. x = d_w;
  1408. draw_bar(test, i_pixel, x, y, r_w, r_h, picref);
  1409. x += r_w;
  1410. tmp = r_w * 6;
  1411. draw_bar(test, rainbowhd[0], x, y, tmp, r_h, picref);
  1412. x += tmp;
  1413. l_w = x;
  1414. draw_bar(test, blue, x, y, test->w - x, r_h, picref);
  1415. y += r_h;
  1416. draw_bar(test, yellow, 0, y, d_w, r_h, picref);
  1417. x = d_w;
  1418. draw_bar(test, q_pixel, x, y, r_w, r_h, picref);
  1419. x += r_w;
  1420. for (i = 0; i < tmp; i += 1 << pixdesc->log2_chroma_w) {
  1421. uint8_t yramp[4] = {0};
  1422. yramp[0] = i * 255 / tmp;
  1423. yramp[1] = 128;
  1424. yramp[2] = 128;
  1425. yramp[3] = 255;
  1426. draw_bar(test, yramp, x, y, 1 << pixdesc->log2_chroma_w, r_h, picref);
  1427. x += 1 << pixdesc->log2_chroma_w;
  1428. }
  1429. draw_bar(test, red, x, y, test->w - x, r_h, picref);
  1430. y += r_h;
  1431. draw_bar(test, gray15, 0, y, d_w, test->h - y, picref);
  1432. x = d_w;
  1433. tmp = FFALIGN(r_w * 3 / 2, 1 << pixdesc->log2_chroma_w);
  1434. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  1435. x += tmp;
  1436. tmp = FFALIGN(r_w * 2, 1 << pixdesc->log2_chroma_w);
  1437. draw_bar(test, white, x, y, tmp, test->h - y, picref);
  1438. x += tmp;
  1439. tmp = FFALIGN(r_w * 5 / 6, 1 << pixdesc->log2_chroma_w);
  1440. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  1441. x += tmp;
  1442. tmp = FFALIGN(r_w / 3, 1 << pixdesc->log2_chroma_w);
  1443. draw_bar(test, neg2, x, y, tmp, test->h - y, picref);
  1444. x += tmp;
  1445. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  1446. x += tmp;
  1447. draw_bar(test, black2, x, y, tmp, test->h - y, picref);
  1448. x += tmp;
  1449. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  1450. x += tmp;
  1451. draw_bar(test, black4, x, y, tmp, test->h - y, picref);
  1452. x += tmp;
  1453. r_w = l_w - x;
  1454. draw_bar(test, black0, x, y, r_w, test->h - y, picref);
  1455. x += r_w;
  1456. draw_bar(test, gray15, x, y, test->w - x, test->h - y, picref);
  1457. }
  1458. static av_cold int smptehdbars_init(AVFilterContext *ctx)
  1459. {
  1460. TestSourceContext *test = ctx->priv;
  1461. test->fill_picture_fn = smptehdbars_fill_picture;
  1462. test->draw_once = 1;
  1463. return init(ctx);
  1464. }
  1465. AVFilter ff_vsrc_smptehdbars = {
  1466. .name = "smptehdbars",
  1467. .description = NULL_IF_CONFIG_SMALL("Generate SMPTE HD color bars."),
  1468. .priv_size = sizeof(TestSourceContext),
  1469. .priv_class = &smptehdbars_class,
  1470. .init = smptehdbars_init,
  1471. .uninit = uninit,
  1472. .query_formats = smptebars_query_formats,
  1473. .activate = activate,
  1474. .inputs = NULL,
  1475. .outputs = smptebars_outputs,
  1476. };
  1477. #endif /* CONFIG_SMPTEHDBARS_FILTER */
  1478. #endif /* CONFIG_SMPTEBARS_FILTER || CONFIG_SMPTEHDBARS_FILTER */
  1479. #if CONFIG_ALLYUV_FILTER
  1480. #define allyuv_options &options[NOSIZE_OPTIONS_OFFSET]
  1481. AVFILTER_DEFINE_CLASS(allyuv);
  1482. static void allyuv_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  1483. {
  1484. const int ys = frame->linesize[0];
  1485. const int us = frame->linesize[1];
  1486. const int vs = frame->linesize[2];
  1487. int x, y, j;
  1488. for (y = 0; y < 4096; y++) {
  1489. for (x = 0; x < 2048; x++) {
  1490. frame->data[0][y * ys + x] = ((x / 8) % 256);
  1491. frame->data[0][y * ys + 4095 - x] = ((x / 8) % 256);
  1492. }
  1493. for (x = 0; x < 2048; x+=8) {
  1494. for (j = 0; j < 8; j++) {
  1495. frame->data[1][vs * y + x + j] = (y%16 + (j % 8) * 16);
  1496. frame->data[1][vs * y + 4095 - x - j] = (128 + y%16 + (j % 8) * 16);
  1497. }
  1498. }
  1499. for (x = 0; x < 4096; x++)
  1500. frame->data[2][y * us + x] = 256 * y / 4096;
  1501. }
  1502. }
  1503. static av_cold int allyuv_init(AVFilterContext *ctx)
  1504. {
  1505. TestSourceContext *test = ctx->priv;
  1506. test->w = test->h = 4096;
  1507. test->draw_once = 1;
  1508. test->fill_picture_fn = allyuv_fill_picture;
  1509. return init(ctx);
  1510. }
  1511. static int allyuv_query_formats(AVFilterContext *ctx)
  1512. {
  1513. static const enum AVPixelFormat pix_fmts[] = {
  1514. AV_PIX_FMT_YUV444P, AV_PIX_FMT_GBRP,
  1515. AV_PIX_FMT_NONE
  1516. };
  1517. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  1518. if (!fmts_list)
  1519. return AVERROR(ENOMEM);
  1520. return ff_set_common_formats(ctx, fmts_list);
  1521. }
  1522. static const AVFilterPad avfilter_vsrc_allyuv_outputs[] = {
  1523. {
  1524. .name = "default",
  1525. .type = AVMEDIA_TYPE_VIDEO,
  1526. .config_props = config_props,
  1527. },
  1528. { NULL }
  1529. };
  1530. AVFilter ff_vsrc_allyuv = {
  1531. .name = "allyuv",
  1532. .description = NULL_IF_CONFIG_SMALL("Generate all yuv colors."),
  1533. .priv_size = sizeof(TestSourceContext),
  1534. .priv_class = &allyuv_class,
  1535. .init = allyuv_init,
  1536. .uninit = uninit,
  1537. .query_formats = allyuv_query_formats,
  1538. .activate = activate,
  1539. .inputs = NULL,
  1540. .outputs = avfilter_vsrc_allyuv_outputs,
  1541. };
  1542. #endif /* CONFIG_ALLYUV_FILTER */
  1543. #if CONFIG_ALLRGB_FILTER
  1544. #define allrgb_options &options[NOSIZE_OPTIONS_OFFSET]
  1545. AVFILTER_DEFINE_CLASS(allrgb);
  1546. static void allrgb_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  1547. {
  1548. unsigned x, y;
  1549. const int linesize = frame->linesize[0];
  1550. uint8_t *line = frame->data[0];
  1551. for (y = 0; y < 4096; y++) {
  1552. uint8_t *dst = line;
  1553. for (x = 0; x < 4096; x++) {
  1554. *dst++ = x;
  1555. *dst++ = y;
  1556. *dst++ = (x >> 8) | ((y >> 8) << 4);
  1557. }
  1558. line += linesize;
  1559. }
  1560. }
  1561. static av_cold int allrgb_init(AVFilterContext *ctx)
  1562. {
  1563. TestSourceContext *test = ctx->priv;
  1564. test->w = test->h = 4096;
  1565. test->draw_once = 1;
  1566. test->fill_picture_fn = allrgb_fill_picture;
  1567. return init(ctx);
  1568. }
  1569. static int allrgb_config_props(AVFilterLink *outlink)
  1570. {
  1571. TestSourceContext *test = outlink->src->priv;
  1572. ff_fill_rgba_map(test->rgba_map, outlink->format);
  1573. return config_props(outlink);
  1574. }
  1575. static int allrgb_query_formats(AVFilterContext *ctx)
  1576. {
  1577. static const enum AVPixelFormat pix_fmts[] = {
  1578. AV_PIX_FMT_RGB24, AV_PIX_FMT_NONE
  1579. };
  1580. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  1581. if (!fmts_list)
  1582. return AVERROR(ENOMEM);
  1583. return ff_set_common_formats(ctx, fmts_list);
  1584. }
  1585. static const AVFilterPad avfilter_vsrc_allrgb_outputs[] = {
  1586. {
  1587. .name = "default",
  1588. .type = AVMEDIA_TYPE_VIDEO,
  1589. .config_props = allrgb_config_props,
  1590. },
  1591. { NULL }
  1592. };
  1593. AVFilter ff_vsrc_allrgb = {
  1594. .name = "allrgb",
  1595. .description = NULL_IF_CONFIG_SMALL("Generate all RGB colors."),
  1596. .priv_size = sizeof(TestSourceContext),
  1597. .priv_class = &allrgb_class,
  1598. .init = allrgb_init,
  1599. .uninit = uninit,
  1600. .query_formats = allrgb_query_formats,
  1601. .activate = activate,
  1602. .inputs = NULL,
  1603. .outputs = avfilter_vsrc_allrgb_outputs,
  1604. };
  1605. #endif /* CONFIG_ALLRGB_FILTER */