You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1810 lines
56KB

  1. /*
  2. * Copyright (c) 2007 Nicolas George <nicolas.george@normalesup.org>
  3. * Copyright (c) 2011 Stefano Sabatini
  4. * Copyright (c) 2012 Paul B Mahol
  5. *
  6. * This file is part of FFmpeg.
  7. *
  8. * FFmpeg is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public
  10. * License as published by the Free Software Foundation; either
  11. * version 2.1 of the License, or (at your option) any later version.
  12. *
  13. * FFmpeg is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with FFmpeg; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  21. */
  22. /**
  23. * @file
  24. * Misc test sources.
  25. *
  26. * testsrc is based on the test pattern generator demuxer by Nicolas George:
  27. * http://lists.ffmpeg.org/pipermail/ffmpeg-devel/2007-October/037845.html
  28. *
  29. * rgbtestsrc is ported from MPlayer libmpcodecs/vf_rgbtest.c by
  30. * Michael Niedermayer.
  31. *
  32. * allyuv, smptebars and smptehdbars are by Paul B Mahol.
  33. */
  34. #include <float.h>
  35. #include "libavutil/avassert.h"
  36. #include "libavutil/common.h"
  37. #include "libavutil/ffmath.h"
  38. #include "libavutil/opt.h"
  39. #include "libavutil/imgutils.h"
  40. #include "libavutil/intreadwrite.h"
  41. #include "libavutil/parseutils.h"
  42. #include "libavutil/xga_font_data.h"
  43. #include "avfilter.h"
  44. #include "drawutils.h"
  45. #include "filters.h"
  46. #include "formats.h"
  47. #include "internal.h"
  48. #include "video.h"
  49. typedef struct TestSourceContext {
  50. const AVClass *class;
  51. int w, h;
  52. unsigned int nb_frame;
  53. AVRational time_base, frame_rate;
  54. int64_t pts;
  55. int64_t duration; ///< duration expressed in microseconds
  56. AVRational sar; ///< sample aspect ratio
  57. int draw_once; ///< draw only the first frame, always put out the same picture
  58. int draw_once_reset; ///< draw only the first frame or in case of reset
  59. AVFrame *picref; ///< cached reference containing the painted picture
  60. void (* fill_picture_fn)(AVFilterContext *ctx, AVFrame *frame);
  61. /* only used by testsrc */
  62. int nb_decimals;
  63. /* only used by testsrc2 */
  64. int alpha;
  65. /* only used by color */
  66. FFDrawContext draw;
  67. FFDrawColor color;
  68. uint8_t color_rgba[4];
  69. /* only used by rgbtest */
  70. uint8_t rgba_map[4];
  71. /* only used by haldclut */
  72. int level;
  73. } TestSourceContext;
  74. #define OFFSET(x) offsetof(TestSourceContext, x)
  75. #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
  76. #define FLAGSR AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_RUNTIME_PARAM
  77. #define SIZE_OPTIONS \
  78. { "size", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "320x240"}, 0, 0, FLAGS },\
  79. { "s", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "320x240"}, 0, 0, FLAGS },\
  80. #define COMMON_OPTIONS_NOSIZE \
  81. { "rate", "set video rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, INT_MAX, FLAGS },\
  82. { "r", "set video rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, INT_MAX, FLAGS },\
  83. { "duration", "set video duration", OFFSET(duration), AV_OPT_TYPE_DURATION, {.i64 = -1}, -1, INT64_MAX, FLAGS },\
  84. { "d", "set video duration", OFFSET(duration), AV_OPT_TYPE_DURATION, {.i64 = -1}, -1, INT64_MAX, FLAGS },\
  85. { "sar", "set video sample aspect ratio", OFFSET(sar), AV_OPT_TYPE_RATIONAL, {.dbl= 1}, 0, INT_MAX, FLAGS },
  86. #define COMMON_OPTIONS SIZE_OPTIONS COMMON_OPTIONS_NOSIZE
  87. #define NOSIZE_OPTIONS_OFFSET 2
  88. /* Filters using COMMON_OPTIONS_NOSIZE also use the following options
  89. * via &options[NOSIZE_OPTIONS_OFFSET]. So don't break it. */
  90. static const AVOption options[] = {
  91. COMMON_OPTIONS
  92. { NULL }
  93. };
  94. static av_cold int init(AVFilterContext *ctx)
  95. {
  96. TestSourceContext *test = ctx->priv;
  97. test->time_base = av_inv_q(test->frame_rate);
  98. test->nb_frame = 0;
  99. test->pts = 0;
  100. av_log(ctx, AV_LOG_VERBOSE, "size:%dx%d rate:%d/%d duration:%f sar:%d/%d\n",
  101. test->w, test->h, test->frame_rate.num, test->frame_rate.den,
  102. test->duration < 0 ? -1 : (double)test->duration/1000000,
  103. test->sar.num, test->sar.den);
  104. return 0;
  105. }
  106. static av_cold void uninit(AVFilterContext *ctx)
  107. {
  108. TestSourceContext *test = ctx->priv;
  109. av_frame_free(&test->picref);
  110. }
  111. static int config_props(AVFilterLink *outlink)
  112. {
  113. TestSourceContext *test = outlink->src->priv;
  114. outlink->w = test->w;
  115. outlink->h = test->h;
  116. outlink->sample_aspect_ratio = test->sar;
  117. outlink->frame_rate = test->frame_rate;
  118. outlink->time_base = test->time_base;
  119. return 0;
  120. }
  121. static int activate(AVFilterContext *ctx)
  122. {
  123. AVFilterLink *outlink = ctx->outputs[0];
  124. TestSourceContext *test = ctx->priv;
  125. AVFrame *frame;
  126. if (!ff_outlink_frame_wanted(outlink))
  127. return FFERROR_NOT_READY;
  128. if (test->duration >= 0 &&
  129. av_rescale_q(test->pts, test->time_base, AV_TIME_BASE_Q) >= test->duration) {
  130. ff_outlink_set_status(outlink, AVERROR_EOF, test->pts);
  131. return 0;
  132. }
  133. if (test->draw_once) {
  134. if (test->draw_once_reset) {
  135. av_frame_free(&test->picref);
  136. test->draw_once_reset = 0;
  137. }
  138. if (!test->picref) {
  139. test->picref =
  140. ff_get_video_buffer(outlink, test->w, test->h);
  141. if (!test->picref)
  142. return AVERROR(ENOMEM);
  143. test->fill_picture_fn(outlink->src, test->picref);
  144. }
  145. frame = av_frame_clone(test->picref);
  146. } else
  147. frame = ff_get_video_buffer(outlink, test->w, test->h);
  148. if (!frame)
  149. return AVERROR(ENOMEM);
  150. frame->pts = test->pts;
  151. frame->key_frame = 1;
  152. frame->interlaced_frame = 0;
  153. frame->pict_type = AV_PICTURE_TYPE_I;
  154. frame->sample_aspect_ratio = test->sar;
  155. if (!test->draw_once)
  156. test->fill_picture_fn(outlink->src, frame);
  157. test->pts++;
  158. test->nb_frame++;
  159. return ff_filter_frame(outlink, frame);
  160. }
  161. #if CONFIG_COLOR_FILTER
  162. static const AVOption color_options[] = {
  163. { "color", "set color", OFFSET(color_rgba), AV_OPT_TYPE_COLOR, {.str = "black"}, 0, 0, FLAGSR },
  164. { "c", "set color", OFFSET(color_rgba), AV_OPT_TYPE_COLOR, {.str = "black"}, 0, 0, FLAGSR },
  165. COMMON_OPTIONS
  166. { NULL }
  167. };
  168. AVFILTER_DEFINE_CLASS(color);
  169. static void color_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  170. {
  171. TestSourceContext *test = ctx->priv;
  172. ff_fill_rectangle(&test->draw, &test->color,
  173. picref->data, picref->linesize,
  174. 0, 0, test->w, test->h);
  175. }
  176. static av_cold int color_init(AVFilterContext *ctx)
  177. {
  178. TestSourceContext *test = ctx->priv;
  179. test->fill_picture_fn = color_fill_picture;
  180. test->draw_once = 1;
  181. return init(ctx);
  182. }
  183. static int color_query_formats(AVFilterContext *ctx)
  184. {
  185. return ff_set_common_formats(ctx, ff_draw_supported_pixel_formats(0));
  186. }
  187. static int color_config_props(AVFilterLink *inlink)
  188. {
  189. AVFilterContext *ctx = inlink->src;
  190. TestSourceContext *test = ctx->priv;
  191. int ret;
  192. ff_draw_init(&test->draw, inlink->format, 0);
  193. ff_draw_color(&test->draw, &test->color, test->color_rgba);
  194. test->w = ff_draw_round_to_sub(&test->draw, 0, -1, test->w);
  195. test->h = ff_draw_round_to_sub(&test->draw, 1, -1, test->h);
  196. if (av_image_check_size(test->w, test->h, 0, ctx) < 0)
  197. return AVERROR(EINVAL);
  198. if ((ret = config_props(inlink)) < 0)
  199. return ret;
  200. return 0;
  201. }
  202. static int color_process_command(AVFilterContext *ctx, const char *cmd, const char *args,
  203. char *res, int res_len, int flags)
  204. {
  205. TestSourceContext *test = ctx->priv;
  206. int ret;
  207. ret = ff_filter_process_command(ctx, cmd, args, res, res_len, flags);
  208. if (ret < 0)
  209. return ret;
  210. ff_draw_color(&test->draw, &test->color, test->color_rgba);
  211. test->draw_once_reset = 1;
  212. return 0;
  213. }
  214. static const AVFilterPad color_outputs[] = {
  215. {
  216. .name = "default",
  217. .type = AVMEDIA_TYPE_VIDEO,
  218. .config_props = color_config_props,
  219. },
  220. { NULL }
  221. };
  222. AVFilter ff_vsrc_color = {
  223. .name = "color",
  224. .description = NULL_IF_CONFIG_SMALL("Provide an uniformly colored input."),
  225. .priv_class = &color_class,
  226. .priv_size = sizeof(TestSourceContext),
  227. .init = color_init,
  228. .uninit = uninit,
  229. .activate = activate,
  230. .query_formats = color_query_formats,
  231. .inputs = NULL,
  232. .outputs = color_outputs,
  233. .process_command = color_process_command,
  234. };
  235. #endif /* CONFIG_COLOR_FILTER */
  236. #if CONFIG_HALDCLUTSRC_FILTER
  237. static const AVOption haldclutsrc_options[] = {
  238. { "level", "set level", OFFSET(level), AV_OPT_TYPE_INT, {.i64 = 6}, 2, 16, FLAGS },
  239. COMMON_OPTIONS_NOSIZE
  240. { NULL }
  241. };
  242. AVFILTER_DEFINE_CLASS(haldclutsrc);
  243. static void haldclutsrc_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  244. {
  245. int i, j, k, x = 0, y = 0, is16bit = 0, step;
  246. uint32_t alpha = 0;
  247. const TestSourceContext *hc = ctx->priv;
  248. int level = hc->level;
  249. float scale;
  250. const int w = frame->width;
  251. const int h = frame->height;
  252. const uint8_t *data = frame->data[0];
  253. const int linesize = frame->linesize[0];
  254. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
  255. uint8_t rgba_map[4];
  256. av_assert0(w == h && w == level*level*level);
  257. ff_fill_rgba_map(rgba_map, frame->format);
  258. switch (frame->format) {
  259. case AV_PIX_FMT_RGB48:
  260. case AV_PIX_FMT_BGR48:
  261. case AV_PIX_FMT_RGBA64:
  262. case AV_PIX_FMT_BGRA64:
  263. is16bit = 1;
  264. alpha = 0xffff;
  265. break;
  266. case AV_PIX_FMT_RGBA:
  267. case AV_PIX_FMT_BGRA:
  268. case AV_PIX_FMT_ARGB:
  269. case AV_PIX_FMT_ABGR:
  270. alpha = 0xff;
  271. break;
  272. }
  273. step = av_get_padded_bits_per_pixel(desc) >> (3 + is16bit);
  274. scale = ((float)(1 << (8*(is16bit+1))) - 1) / (level*level - 1);
  275. #define LOAD_CLUT(nbits) do { \
  276. uint##nbits##_t *dst = ((uint##nbits##_t *)(data + y*linesize)) + x*step; \
  277. dst[rgba_map[0]] = av_clip_uint##nbits(i * scale); \
  278. dst[rgba_map[1]] = av_clip_uint##nbits(j * scale); \
  279. dst[rgba_map[2]] = av_clip_uint##nbits(k * scale); \
  280. if (step == 4) \
  281. dst[rgba_map[3]] = alpha; \
  282. } while (0)
  283. level *= level;
  284. for (k = 0; k < level; k++) {
  285. for (j = 0; j < level; j++) {
  286. for (i = 0; i < level; i++) {
  287. if (!is16bit)
  288. LOAD_CLUT(8);
  289. else
  290. LOAD_CLUT(16);
  291. if (++x == w) {
  292. x = 0;
  293. y++;
  294. }
  295. }
  296. }
  297. }
  298. }
  299. static av_cold int haldclutsrc_init(AVFilterContext *ctx)
  300. {
  301. TestSourceContext *hc = ctx->priv;
  302. hc->fill_picture_fn = haldclutsrc_fill_picture;
  303. hc->draw_once = 1;
  304. return init(ctx);
  305. }
  306. static int haldclutsrc_query_formats(AVFilterContext *ctx)
  307. {
  308. static const enum AVPixelFormat pix_fmts[] = {
  309. AV_PIX_FMT_RGB24, AV_PIX_FMT_BGR24,
  310. AV_PIX_FMT_RGBA, AV_PIX_FMT_BGRA,
  311. AV_PIX_FMT_ARGB, AV_PIX_FMT_ABGR,
  312. AV_PIX_FMT_0RGB, AV_PIX_FMT_0BGR,
  313. AV_PIX_FMT_RGB0, AV_PIX_FMT_BGR0,
  314. AV_PIX_FMT_RGB48, AV_PIX_FMT_BGR48,
  315. AV_PIX_FMT_RGBA64, AV_PIX_FMT_BGRA64,
  316. AV_PIX_FMT_NONE,
  317. };
  318. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  319. if (!fmts_list)
  320. return AVERROR(ENOMEM);
  321. return ff_set_common_formats(ctx, fmts_list);
  322. }
  323. static int haldclutsrc_config_props(AVFilterLink *outlink)
  324. {
  325. AVFilterContext *ctx = outlink->src;
  326. TestSourceContext *hc = ctx->priv;
  327. hc->w = hc->h = hc->level * hc->level * hc->level;
  328. return config_props(outlink);
  329. }
  330. static const AVFilterPad haldclutsrc_outputs[] = {
  331. {
  332. .name = "default",
  333. .type = AVMEDIA_TYPE_VIDEO,
  334. .config_props = haldclutsrc_config_props,
  335. },
  336. { NULL }
  337. };
  338. AVFilter ff_vsrc_haldclutsrc = {
  339. .name = "haldclutsrc",
  340. .description = NULL_IF_CONFIG_SMALL("Provide an identity Hald CLUT."),
  341. .priv_class = &haldclutsrc_class,
  342. .priv_size = sizeof(TestSourceContext),
  343. .init = haldclutsrc_init,
  344. .uninit = uninit,
  345. .query_formats = haldclutsrc_query_formats,
  346. .activate = activate,
  347. .inputs = NULL,
  348. .outputs = haldclutsrc_outputs,
  349. };
  350. #endif /* CONFIG_HALDCLUTSRC_FILTER */
  351. #if CONFIG_NULLSRC_FILTER
  352. #define nullsrc_options options
  353. AVFILTER_DEFINE_CLASS(nullsrc);
  354. static void nullsrc_fill_picture(AVFilterContext *ctx, AVFrame *picref) { }
  355. static av_cold int nullsrc_init(AVFilterContext *ctx)
  356. {
  357. TestSourceContext *test = ctx->priv;
  358. test->fill_picture_fn = nullsrc_fill_picture;
  359. return init(ctx);
  360. }
  361. static const AVFilterPad nullsrc_outputs[] = {
  362. {
  363. .name = "default",
  364. .type = AVMEDIA_TYPE_VIDEO,
  365. .config_props = config_props,
  366. },
  367. { NULL },
  368. };
  369. AVFilter ff_vsrc_nullsrc = {
  370. .name = "nullsrc",
  371. .description = NULL_IF_CONFIG_SMALL("Null video source, return unprocessed video frames."),
  372. .init = nullsrc_init,
  373. .uninit = uninit,
  374. .activate = activate,
  375. .priv_size = sizeof(TestSourceContext),
  376. .priv_class = &nullsrc_class,
  377. .inputs = NULL,
  378. .outputs = nullsrc_outputs,
  379. };
  380. #endif /* CONFIG_NULLSRC_FILTER */
  381. #if CONFIG_TESTSRC_FILTER
  382. static const AVOption testsrc_options[] = {
  383. COMMON_OPTIONS
  384. { "decimals", "set number of decimals to show", OFFSET(nb_decimals), AV_OPT_TYPE_INT, {.i64=0}, 0, 17, FLAGS },
  385. { "n", "set number of decimals to show", OFFSET(nb_decimals), AV_OPT_TYPE_INT, {.i64=0}, 0, 17, FLAGS },
  386. { NULL }
  387. };
  388. AVFILTER_DEFINE_CLASS(testsrc);
  389. /**
  390. * Fill a rectangle with value val.
  391. *
  392. * @param val the RGB value to set
  393. * @param dst pointer to the destination buffer to fill
  394. * @param dst_linesize linesize of destination
  395. * @param segment_width width of the segment
  396. * @param x horizontal coordinate where to draw the rectangle in the destination buffer
  397. * @param y horizontal coordinate where to draw the rectangle in the destination buffer
  398. * @param w width of the rectangle to draw, expressed as a number of segment_width units
  399. * @param h height of the rectangle to draw, expressed as a number of segment_width units
  400. */
  401. static void draw_rectangle(unsigned val, uint8_t *dst, int dst_linesize, int segment_width,
  402. int x, int y, int w, int h)
  403. {
  404. int i;
  405. int step = 3;
  406. dst += segment_width * (step * x + y * dst_linesize);
  407. w *= segment_width * step;
  408. h *= segment_width;
  409. for (i = 0; i < h; i++) {
  410. memset(dst, val, w);
  411. dst += dst_linesize;
  412. }
  413. }
  414. static void draw_digit(int digit, uint8_t *dst, int dst_linesize,
  415. int segment_width)
  416. {
  417. #define TOP_HBAR 1
  418. #define MID_HBAR 2
  419. #define BOT_HBAR 4
  420. #define LEFT_TOP_VBAR 8
  421. #define LEFT_BOT_VBAR 16
  422. #define RIGHT_TOP_VBAR 32
  423. #define RIGHT_BOT_VBAR 64
  424. struct segments {
  425. int x, y, w, h;
  426. } segments[] = {
  427. { 1, 0, 5, 1 }, /* TOP_HBAR */
  428. { 1, 6, 5, 1 }, /* MID_HBAR */
  429. { 1, 12, 5, 1 }, /* BOT_HBAR */
  430. { 0, 1, 1, 5 }, /* LEFT_TOP_VBAR */
  431. { 0, 7, 1, 5 }, /* LEFT_BOT_VBAR */
  432. { 6, 1, 1, 5 }, /* RIGHT_TOP_VBAR */
  433. { 6, 7, 1, 5 } /* RIGHT_BOT_VBAR */
  434. };
  435. static const unsigned char masks[10] = {
  436. /* 0 */ TOP_HBAR |BOT_HBAR|LEFT_TOP_VBAR|LEFT_BOT_VBAR|RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  437. /* 1 */ RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  438. /* 2 */ TOP_HBAR|MID_HBAR|BOT_HBAR|LEFT_BOT_VBAR |RIGHT_TOP_VBAR,
  439. /* 3 */ TOP_HBAR|MID_HBAR|BOT_HBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  440. /* 4 */ MID_HBAR |LEFT_TOP_VBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  441. /* 5 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR |RIGHT_BOT_VBAR,
  442. /* 6 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR|LEFT_BOT_VBAR |RIGHT_BOT_VBAR,
  443. /* 7 */ TOP_HBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  444. /* 8 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR|LEFT_BOT_VBAR|RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  445. /* 9 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  446. };
  447. unsigned mask = masks[digit];
  448. int i;
  449. draw_rectangle(0, dst, dst_linesize, segment_width, 0, 0, 8, 13);
  450. for (i = 0; i < FF_ARRAY_ELEMS(segments); i++)
  451. if (mask & (1<<i))
  452. draw_rectangle(255, dst, dst_linesize, segment_width,
  453. segments[i].x, segments[i].y, segments[i].w, segments[i].h);
  454. }
  455. #define GRADIENT_SIZE (6 * 256)
  456. static void test_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  457. {
  458. TestSourceContext *test = ctx->priv;
  459. uint8_t *p, *p0;
  460. int x, y;
  461. int color, color_rest;
  462. int icolor;
  463. int radius;
  464. int quad0, quad;
  465. int dquad_x, dquad_y;
  466. int grad, dgrad, rgrad, drgrad;
  467. int seg_size;
  468. int second;
  469. int i;
  470. uint8_t *data = frame->data[0];
  471. int width = frame->width;
  472. int height = frame->height;
  473. /* draw colored bars and circle */
  474. radius = (width + height) / 4;
  475. quad0 = width * width / 4 + height * height / 4 - radius * radius;
  476. dquad_y = 1 - height;
  477. p0 = data;
  478. for (y = 0; y < height; y++) {
  479. p = p0;
  480. color = 0;
  481. color_rest = 0;
  482. quad = quad0;
  483. dquad_x = 1 - width;
  484. for (x = 0; x < width; x++) {
  485. icolor = color;
  486. if (quad < 0)
  487. icolor ^= 7;
  488. quad += dquad_x;
  489. dquad_x += 2;
  490. *(p++) = icolor & 1 ? 255 : 0;
  491. *(p++) = icolor & 2 ? 255 : 0;
  492. *(p++) = icolor & 4 ? 255 : 0;
  493. color_rest += 8;
  494. if (color_rest >= width) {
  495. color_rest -= width;
  496. color++;
  497. }
  498. }
  499. quad0 += dquad_y;
  500. dquad_y += 2;
  501. p0 += frame->linesize[0];
  502. }
  503. /* draw sliding color line */
  504. p0 = p = data + frame->linesize[0] * (height * 3/4);
  505. grad = (256 * test->nb_frame * test->time_base.num / test->time_base.den) %
  506. GRADIENT_SIZE;
  507. rgrad = 0;
  508. dgrad = GRADIENT_SIZE / width;
  509. drgrad = GRADIENT_SIZE % width;
  510. for (x = 0; x < width; x++) {
  511. *(p++) =
  512. grad < 256 || grad >= 5 * 256 ? 255 :
  513. grad >= 2 * 256 && grad < 4 * 256 ? 0 :
  514. grad < 2 * 256 ? 2 * 256 - 1 - grad : grad - 4 * 256;
  515. *(p++) =
  516. grad >= 4 * 256 ? 0 :
  517. grad >= 1 * 256 && grad < 3 * 256 ? 255 :
  518. grad < 1 * 256 ? grad : 4 * 256 - 1 - grad;
  519. *(p++) =
  520. grad < 2 * 256 ? 0 :
  521. grad >= 3 * 256 && grad < 5 * 256 ? 255 :
  522. grad < 3 * 256 ? grad - 2 * 256 : 6 * 256 - 1 - grad;
  523. grad += dgrad;
  524. rgrad += drgrad;
  525. if (rgrad >= GRADIENT_SIZE) {
  526. grad++;
  527. rgrad -= GRADIENT_SIZE;
  528. }
  529. if (grad >= GRADIENT_SIZE)
  530. grad -= GRADIENT_SIZE;
  531. }
  532. p = p0;
  533. for (y = height / 8; y > 0; y--) {
  534. memcpy(p+frame->linesize[0], p, 3 * width);
  535. p += frame->linesize[0];
  536. }
  537. /* draw digits */
  538. seg_size = width / 80;
  539. if (seg_size >= 1 && height >= 13 * seg_size) {
  540. int64_t p10decimals = 1;
  541. double time = av_q2d(test->time_base) * test->nb_frame *
  542. ff_exp10(test->nb_decimals);
  543. if (time >= INT_MAX)
  544. return;
  545. for (x = 0; x < test->nb_decimals; x++)
  546. p10decimals *= 10;
  547. second = av_rescale_rnd(test->nb_frame * test->time_base.num, p10decimals, test->time_base.den, AV_ROUND_ZERO);
  548. x = width - (width - seg_size * 64) / 2;
  549. y = (height - seg_size * 13) / 2;
  550. p = data + (x*3 + y * frame->linesize[0]);
  551. for (i = 0; i < 8; i++) {
  552. p -= 3 * 8 * seg_size;
  553. draw_digit(second % 10, p, frame->linesize[0], seg_size);
  554. second /= 10;
  555. if (second == 0)
  556. break;
  557. }
  558. }
  559. }
  560. static av_cold int test_init(AVFilterContext *ctx)
  561. {
  562. TestSourceContext *test = ctx->priv;
  563. test->fill_picture_fn = test_fill_picture;
  564. return init(ctx);
  565. }
  566. static int test_query_formats(AVFilterContext *ctx)
  567. {
  568. static const enum AVPixelFormat pix_fmts[] = {
  569. AV_PIX_FMT_RGB24, AV_PIX_FMT_NONE
  570. };
  571. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  572. if (!fmts_list)
  573. return AVERROR(ENOMEM);
  574. return ff_set_common_formats(ctx, fmts_list);
  575. }
  576. static const AVFilterPad avfilter_vsrc_testsrc_outputs[] = {
  577. {
  578. .name = "default",
  579. .type = AVMEDIA_TYPE_VIDEO,
  580. .config_props = config_props,
  581. },
  582. { NULL }
  583. };
  584. AVFilter ff_vsrc_testsrc = {
  585. .name = "testsrc",
  586. .description = NULL_IF_CONFIG_SMALL("Generate test pattern."),
  587. .priv_size = sizeof(TestSourceContext),
  588. .priv_class = &testsrc_class,
  589. .init = test_init,
  590. .uninit = uninit,
  591. .query_formats = test_query_formats,
  592. .activate = activate,
  593. .inputs = NULL,
  594. .outputs = avfilter_vsrc_testsrc_outputs,
  595. };
  596. #endif /* CONFIG_TESTSRC_FILTER */
  597. #if CONFIG_TESTSRC2_FILTER
  598. static const AVOption testsrc2_options[] = {
  599. COMMON_OPTIONS
  600. { "alpha", "set global alpha (opacity)", OFFSET(alpha), AV_OPT_TYPE_INT, {.i64 = 255}, 0, 255, FLAGS },
  601. { NULL }
  602. };
  603. AVFILTER_DEFINE_CLASS(testsrc2);
  604. static void set_color(TestSourceContext *s, FFDrawColor *color, uint32_t argb)
  605. {
  606. uint8_t rgba[4] = { (argb >> 16) & 0xFF,
  607. (argb >> 8) & 0xFF,
  608. (argb >> 0) & 0xFF,
  609. (argb >> 24) & 0xFF, };
  610. ff_draw_color(&s->draw, color, rgba);
  611. }
  612. static uint32_t color_gradient(unsigned index)
  613. {
  614. unsigned si = index & 0xFF, sd = 0xFF - si;
  615. switch (index >> 8) {
  616. case 0: return 0xFF0000 + (si << 8);
  617. case 1: return 0x00FF00 + (sd << 16);
  618. case 2: return 0x00FF00 + (si << 0);
  619. case 3: return 0x0000FF + (sd << 8);
  620. case 4: return 0x0000FF + (si << 16);
  621. case 5: return 0xFF0000 + (sd << 0);
  622. }
  623. av_assert0(0);
  624. }
  625. static void draw_text(TestSourceContext *s, AVFrame *frame, FFDrawColor *color,
  626. int x0, int y0, const uint8_t *text)
  627. {
  628. int x = x0;
  629. for (; *text; text++) {
  630. if (*text == '\n') {
  631. x = x0;
  632. y0 += 16;
  633. continue;
  634. }
  635. ff_blend_mask(&s->draw, color, frame->data, frame->linesize,
  636. frame->width, frame->height,
  637. avpriv_vga16_font + *text * 16, 1, 8, 16, 0, 0, x, y0);
  638. x += 8;
  639. }
  640. }
  641. static void test2_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  642. {
  643. TestSourceContext *s = ctx->priv;
  644. FFDrawColor color;
  645. unsigned alpha = (uint32_t)s->alpha << 24;
  646. /* colored background */
  647. {
  648. unsigned i, x = 0, x2;
  649. x = 0;
  650. for (i = 1; i < 7; i++) {
  651. x2 = av_rescale(i, s->w, 6);
  652. x2 = ff_draw_round_to_sub(&s->draw, 0, 0, x2);
  653. set_color(s, &color, ((i & 1) ? 0xFF0000 : 0) |
  654. ((i & 2) ? 0x00FF00 : 0) |
  655. ((i & 4) ? 0x0000FF : 0) |
  656. alpha);
  657. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  658. x, 0, x2 - x, frame->height);
  659. x = x2;
  660. }
  661. }
  662. /* oblique gradient */
  663. /* note: too slow if using blending */
  664. if (s->h >= 64) {
  665. unsigned x, dx, y0, y, g0, g;
  666. dx = ff_draw_round_to_sub(&s->draw, 0, +1, 1);
  667. y0 = av_rescale_q(s->pts, s->time_base, av_make_q(2, s->h - 16));
  668. g0 = av_rescale_q(s->pts, s->time_base, av_make_q(1, 128));
  669. for (x = 0; x < s->w; x += dx) {
  670. g = (av_rescale(x, 6 * 256, s->w) + g0) % (6 * 256);
  671. set_color(s, &color, color_gradient(g) | alpha);
  672. y = y0 + av_rescale(x, s->h / 2, s->w);
  673. y %= 2 * (s->h - 16);
  674. if (y > s->h - 16)
  675. y = 2 * (s->h - 16) - y;
  676. y = ff_draw_round_to_sub(&s->draw, 1, 0, y);
  677. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  678. x, y, dx, 16);
  679. }
  680. }
  681. /* top right: draw clock hands */
  682. if (s->w >= 64 && s->h >= 64) {
  683. int l = (FFMIN(s->w, s->h) - 32) >> 1;
  684. int steps = FFMAX(4, l >> 5);
  685. int xc = (s->w >> 2) + (s->w >> 1);
  686. int yc = (s->h >> 2);
  687. int cycle = l << 2;
  688. int pos, xh, yh;
  689. int c, i;
  690. for (c = 0; c < 3; c++) {
  691. set_color(s, &color, (0xBBBBBB ^ (0xFF << (c << 3))) | alpha);
  692. pos = av_rescale_q(s->pts, s->time_base, av_make_q(64 >> (c << 1), cycle)) % cycle;
  693. xh = pos < 1 * l ? pos :
  694. pos < 2 * l ? l :
  695. pos < 3 * l ? 3 * l - pos : 0;
  696. yh = pos < 1 * l ? 0 :
  697. pos < 2 * l ? pos - l :
  698. pos < 3 * l ? l :
  699. cycle - pos;
  700. xh -= l >> 1;
  701. yh -= l >> 1;
  702. for (i = 1; i <= steps; i++) {
  703. int x = av_rescale(xh, i, steps) + xc;
  704. int y = av_rescale(yh, i, steps) + yc;
  705. x = ff_draw_round_to_sub(&s->draw, 0, -1, x);
  706. y = ff_draw_round_to_sub(&s->draw, 1, -1, y);
  707. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  708. x, y, 8, 8);
  709. }
  710. }
  711. }
  712. /* bottom left: beating rectangles */
  713. if (s->w >= 64 && s->h >= 64) {
  714. int l = (FFMIN(s->w, s->h) - 16) >> 2;
  715. int cycle = l << 3;
  716. int xc = (s->w >> 2);
  717. int yc = (s->h >> 2) + (s->h >> 1);
  718. int xm1 = ff_draw_round_to_sub(&s->draw, 0, -1, xc - 8);
  719. int xm2 = ff_draw_round_to_sub(&s->draw, 0, +1, xc + 8);
  720. int ym1 = ff_draw_round_to_sub(&s->draw, 1, -1, yc - 8);
  721. int ym2 = ff_draw_round_to_sub(&s->draw, 1, +1, yc + 8);
  722. int size, step, x1, x2, y1, y2;
  723. size = av_rescale_q(s->pts, s->time_base, av_make_q(4, cycle));
  724. step = size / l;
  725. size %= l;
  726. if (step & 1)
  727. size = l - size;
  728. step = (step >> 1) & 3;
  729. set_color(s, &color, 0xFF808080);
  730. x1 = ff_draw_round_to_sub(&s->draw, 0, -1, xc - 4 - size);
  731. x2 = ff_draw_round_to_sub(&s->draw, 0, +1, xc + 4 + size);
  732. y1 = ff_draw_round_to_sub(&s->draw, 1, -1, yc - 4 - size);
  733. y2 = ff_draw_round_to_sub(&s->draw, 1, +1, yc + 4 + size);
  734. if (step == 0 || step == 2)
  735. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  736. x1, ym1, x2 - x1, ym2 - ym1);
  737. if (step == 1 || step == 2)
  738. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  739. xm1, y1, xm2 - xm1, y2 - y1);
  740. if (step == 3)
  741. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  742. x1, y1, x2 - x1, y2 - y1);
  743. }
  744. /* bottom right: checker with random noise */
  745. {
  746. unsigned xmin = av_rescale(5, s->w, 8);
  747. unsigned xmax = av_rescale(7, s->w, 8);
  748. unsigned ymin = av_rescale(5, s->h, 8);
  749. unsigned ymax = av_rescale(7, s->h, 8);
  750. unsigned x, y, i, r;
  751. uint8_t alpha[256];
  752. r = s->pts;
  753. for (y = ymin; y + 15 < ymax; y += 16) {
  754. for (x = xmin; x + 15 < xmax; x += 16) {
  755. if ((x ^ y) & 16)
  756. continue;
  757. for (i = 0; i < 256; i++) {
  758. r = r * 1664525 + 1013904223;
  759. alpha[i] = r >> 24;
  760. }
  761. set_color(s, &color, 0xFF00FF80);
  762. ff_blend_mask(&s->draw, &color, frame->data, frame->linesize,
  763. frame->width, frame->height,
  764. alpha, 16, 16, 16, 3, 0, x, y);
  765. }
  766. }
  767. }
  768. /* bouncing square */
  769. if (s->w >= 16 && s->h >= 16) {
  770. unsigned w = s->w - 8;
  771. unsigned h = s->h - 8;
  772. unsigned x = av_rescale_q(s->pts, s->time_base, av_make_q(233, 55 * w)) % (w << 1);
  773. unsigned y = av_rescale_q(s->pts, s->time_base, av_make_q(233, 89 * h)) % (h << 1);
  774. if (x > w)
  775. x = (w << 1) - x;
  776. if (y > h)
  777. y = (h << 1) - y;
  778. x = ff_draw_round_to_sub(&s->draw, 0, -1, x);
  779. y = ff_draw_round_to_sub(&s->draw, 1, -1, y);
  780. set_color(s, &color, 0xFF8000FF);
  781. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  782. x, y, 8, 8);
  783. }
  784. /* top right: draw frame time and frame number */
  785. {
  786. char buf[256];
  787. unsigned time;
  788. time = av_rescale_q(s->pts, s->time_base, av_make_q(1, 1000)) % 86400000;
  789. set_color(s, &color, 0xC0000000);
  790. ff_blend_rectangle(&s->draw, &color, frame->data, frame->linesize,
  791. frame->width, frame->height,
  792. 2, 2, 100, 36);
  793. set_color(s, &color, 0xFFFF8000);
  794. snprintf(buf, sizeof(buf), "%02d:%02d:%02d.%03d\n%12"PRIi64,
  795. time / 3600000, (time / 60000) % 60, (time / 1000) % 60,
  796. time % 1000, s->pts);
  797. draw_text(s, frame, &color, 4, 4, buf);
  798. }
  799. }
  800. static av_cold int test2_init(AVFilterContext *ctx)
  801. {
  802. TestSourceContext *s = ctx->priv;
  803. s->fill_picture_fn = test2_fill_picture;
  804. return init(ctx);
  805. }
  806. static int test2_query_formats(AVFilterContext *ctx)
  807. {
  808. return ff_set_common_formats(ctx, ff_draw_supported_pixel_formats(0));
  809. }
  810. static int test2_config_props(AVFilterLink *inlink)
  811. {
  812. AVFilterContext *ctx = inlink->src;
  813. TestSourceContext *s = ctx->priv;
  814. av_assert0(ff_draw_init(&s->draw, inlink->format, 0) >= 0);
  815. s->w = ff_draw_round_to_sub(&s->draw, 0, -1, s->w);
  816. s->h = ff_draw_round_to_sub(&s->draw, 1, -1, s->h);
  817. if (av_image_check_size(s->w, s->h, 0, ctx) < 0)
  818. return AVERROR(EINVAL);
  819. return config_props(inlink);
  820. }
  821. static const AVFilterPad avfilter_vsrc_testsrc2_outputs[] = {
  822. {
  823. .name = "default",
  824. .type = AVMEDIA_TYPE_VIDEO,
  825. .config_props = test2_config_props,
  826. },
  827. { NULL }
  828. };
  829. AVFilter ff_vsrc_testsrc2 = {
  830. .name = "testsrc2",
  831. .description = NULL_IF_CONFIG_SMALL("Generate another test pattern."),
  832. .priv_size = sizeof(TestSourceContext),
  833. .priv_class = &testsrc2_class,
  834. .init = test2_init,
  835. .uninit = uninit,
  836. .query_formats = test2_query_formats,
  837. .activate = activate,
  838. .inputs = NULL,
  839. .outputs = avfilter_vsrc_testsrc2_outputs,
  840. };
  841. #endif /* CONFIG_TESTSRC2_FILTER */
  842. #if CONFIG_RGBTESTSRC_FILTER
  843. #define rgbtestsrc_options options
  844. AVFILTER_DEFINE_CLASS(rgbtestsrc);
  845. #define R 0
  846. #define G 1
  847. #define B 2
  848. #define A 3
  849. static void rgbtest_put_pixel(uint8_t *dst, int dst_linesize,
  850. int x, int y, unsigned r, unsigned g, unsigned b, enum AVPixelFormat fmt,
  851. uint8_t rgba_map[4])
  852. {
  853. uint32_t v;
  854. uint8_t *p;
  855. switch (fmt) {
  856. case AV_PIX_FMT_BGR444: ((uint16_t*)(dst + y*dst_linesize))[x] = ((r >> 4) << 8) | ((g >> 4) << 4) | (b >> 4); break;
  857. case AV_PIX_FMT_RGB444: ((uint16_t*)(dst + y*dst_linesize))[x] = ((b >> 4) << 8) | ((g >> 4) << 4) | (r >> 4); break;
  858. case AV_PIX_FMT_BGR555: ((uint16_t*)(dst + y*dst_linesize))[x] = ((r>>3)<<10) | ((g>>3)<<5) | (b>>3); break;
  859. case AV_PIX_FMT_RGB555: ((uint16_t*)(dst + y*dst_linesize))[x] = ((b>>3)<<10) | ((g>>3)<<5) | (r>>3); break;
  860. case AV_PIX_FMT_BGR565: ((uint16_t*)(dst + y*dst_linesize))[x] = ((r>>3)<<11) | ((g>>2)<<5) | (b>>3); break;
  861. case AV_PIX_FMT_RGB565: ((uint16_t*)(dst + y*dst_linesize))[x] = ((b>>3)<<11) | ((g>>2)<<5) | (r>>3); break;
  862. case AV_PIX_FMT_RGB24:
  863. case AV_PIX_FMT_BGR24:
  864. v = (r << (rgba_map[R]*8)) + (g << (rgba_map[G]*8)) + (b << (rgba_map[B]*8));
  865. p = dst + 3*x + y*dst_linesize;
  866. AV_WL24(p, v);
  867. break;
  868. case AV_PIX_FMT_RGBA:
  869. case AV_PIX_FMT_BGRA:
  870. case AV_PIX_FMT_ARGB:
  871. case AV_PIX_FMT_ABGR:
  872. v = (r << (rgba_map[R]*8)) + (g << (rgba_map[G]*8)) + (b << (rgba_map[B]*8)) + (255U << (rgba_map[A]*8));
  873. p = dst + 4*x + y*dst_linesize;
  874. AV_WL32(p, v);
  875. break;
  876. }
  877. }
  878. static void rgbtest_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  879. {
  880. TestSourceContext *test = ctx->priv;
  881. int x, y, w = frame->width, h = frame->height;
  882. for (y = 0; y < h; y++) {
  883. for (x = 0; x < w; x++) {
  884. int c = 256*x/w;
  885. int r = 0, g = 0, b = 0;
  886. if (3*y < h ) r = c;
  887. else if (3*y < 2*h) g = c;
  888. else b = c;
  889. rgbtest_put_pixel(frame->data[0], frame->linesize[0], x, y, r, g, b,
  890. ctx->outputs[0]->format, test->rgba_map);
  891. }
  892. }
  893. }
  894. static av_cold int rgbtest_init(AVFilterContext *ctx)
  895. {
  896. TestSourceContext *test = ctx->priv;
  897. test->draw_once = 1;
  898. test->fill_picture_fn = rgbtest_fill_picture;
  899. return init(ctx);
  900. }
  901. static int rgbtest_query_formats(AVFilterContext *ctx)
  902. {
  903. static const enum AVPixelFormat pix_fmts[] = {
  904. AV_PIX_FMT_RGBA, AV_PIX_FMT_ARGB, AV_PIX_FMT_BGRA, AV_PIX_FMT_ABGR,
  905. AV_PIX_FMT_BGR24, AV_PIX_FMT_RGB24,
  906. AV_PIX_FMT_RGB444, AV_PIX_FMT_BGR444,
  907. AV_PIX_FMT_RGB565, AV_PIX_FMT_BGR565,
  908. AV_PIX_FMT_RGB555, AV_PIX_FMT_BGR555,
  909. AV_PIX_FMT_NONE
  910. };
  911. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  912. if (!fmts_list)
  913. return AVERROR(ENOMEM);
  914. return ff_set_common_formats(ctx, fmts_list);
  915. }
  916. static int rgbtest_config_props(AVFilterLink *outlink)
  917. {
  918. TestSourceContext *test = outlink->src->priv;
  919. ff_fill_rgba_map(test->rgba_map, outlink->format);
  920. return config_props(outlink);
  921. }
  922. static const AVFilterPad avfilter_vsrc_rgbtestsrc_outputs[] = {
  923. {
  924. .name = "default",
  925. .type = AVMEDIA_TYPE_VIDEO,
  926. .config_props = rgbtest_config_props,
  927. },
  928. { NULL }
  929. };
  930. AVFilter ff_vsrc_rgbtestsrc = {
  931. .name = "rgbtestsrc",
  932. .description = NULL_IF_CONFIG_SMALL("Generate RGB test pattern."),
  933. .priv_size = sizeof(TestSourceContext),
  934. .priv_class = &rgbtestsrc_class,
  935. .init = rgbtest_init,
  936. .uninit = uninit,
  937. .query_formats = rgbtest_query_formats,
  938. .activate = activate,
  939. .inputs = NULL,
  940. .outputs = avfilter_vsrc_rgbtestsrc_outputs,
  941. };
  942. #endif /* CONFIG_RGBTESTSRC_FILTER */
  943. #if CONFIG_YUVTESTSRC_FILTER
  944. #define yuvtestsrc_options options
  945. AVFILTER_DEFINE_CLASS(yuvtestsrc);
  946. static void yuvtest_fill_picture8(AVFilterContext *ctx, AVFrame *frame)
  947. {
  948. int x, y, w = frame->width, h = frame->height / 3;
  949. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
  950. const int factor = 1 << desc->comp[0].depth;
  951. const int mid = 1 << (desc->comp[0].depth - 1);
  952. uint8_t *ydst = frame->data[0];
  953. uint8_t *udst = frame->data[1];
  954. uint8_t *vdst = frame->data[2];
  955. int ylinesize = frame->linesize[0];
  956. int ulinesize = frame->linesize[1];
  957. int vlinesize = frame->linesize[2];
  958. for (y = 0; y < h; y++) {
  959. for (x = 0; x < w; x++) {
  960. int c = factor * x / w;
  961. ydst[x] = c;
  962. udst[x] = mid;
  963. vdst[x] = mid;
  964. }
  965. ydst += ylinesize;
  966. udst += ulinesize;
  967. vdst += vlinesize;
  968. }
  969. h += h;
  970. for (; y < h; y++) {
  971. for (x = 0; x < w; x++) {
  972. int c = factor * x / w;
  973. ydst[x] = mid;
  974. udst[x] = c;
  975. vdst[x] = mid;
  976. }
  977. ydst += ylinesize;
  978. udst += ulinesize;
  979. vdst += vlinesize;
  980. }
  981. for (; y < frame->height; y++) {
  982. for (x = 0; x < w; x++) {
  983. int c = factor * x / w;
  984. ydst[x] = mid;
  985. udst[x] = mid;
  986. vdst[x] = c;
  987. }
  988. ydst += ylinesize;
  989. udst += ulinesize;
  990. vdst += vlinesize;
  991. }
  992. }
  993. static void yuvtest_fill_picture16(AVFilterContext *ctx, AVFrame *frame)
  994. {
  995. int x, y, w = frame->width, h = frame->height / 3;
  996. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
  997. const int factor = 1 << desc->comp[0].depth;
  998. const int mid = 1 << (desc->comp[0].depth - 1);
  999. uint16_t *ydst = (uint16_t *)frame->data[0];
  1000. uint16_t *udst = (uint16_t *)frame->data[1];
  1001. uint16_t *vdst = (uint16_t *)frame->data[2];
  1002. int ylinesize = frame->linesize[0] / 2;
  1003. int ulinesize = frame->linesize[1] / 2;
  1004. int vlinesize = frame->linesize[2] / 2;
  1005. for (y = 0; y < h; y++) {
  1006. for (x = 0; x < w; x++) {
  1007. int c = factor * x / w;
  1008. ydst[x] = c;
  1009. udst[x] = mid;
  1010. vdst[x] = mid;
  1011. }
  1012. ydst += ylinesize;
  1013. udst += ulinesize;
  1014. vdst += vlinesize;
  1015. }
  1016. h += h;
  1017. for (; y < h; y++) {
  1018. for (x = 0; x < w; x++) {
  1019. int c = factor * x / w;
  1020. ydst[x] = mid;
  1021. udst[x] = c;
  1022. vdst[x] = mid;
  1023. }
  1024. ydst += ylinesize;
  1025. udst += ulinesize;
  1026. vdst += vlinesize;
  1027. }
  1028. for (; y < frame->height; y++) {
  1029. for (x = 0; x < w; x++) {
  1030. int c = factor * x / w;
  1031. ydst[x] = mid;
  1032. udst[x] = mid;
  1033. vdst[x] = c;
  1034. }
  1035. ydst += ylinesize;
  1036. udst += ulinesize;
  1037. vdst += vlinesize;
  1038. }
  1039. }
  1040. static av_cold int yuvtest_init(AVFilterContext *ctx)
  1041. {
  1042. TestSourceContext *test = ctx->priv;
  1043. test->draw_once = 1;
  1044. return init(ctx);
  1045. }
  1046. static int yuvtest_query_formats(AVFilterContext *ctx)
  1047. {
  1048. static const enum AVPixelFormat pix_fmts[] = {
  1049. AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUVJ444P,
  1050. AV_PIX_FMT_YUV444P9, AV_PIX_FMT_YUV444P10,
  1051. AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV444P14,
  1052. AV_PIX_FMT_YUV444P16,
  1053. AV_PIX_FMT_NONE
  1054. };
  1055. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  1056. if (!fmts_list)
  1057. return AVERROR(ENOMEM);
  1058. return ff_set_common_formats(ctx, fmts_list);
  1059. }
  1060. static int yuvtest_config_props(AVFilterLink *outlink)
  1061. {
  1062. TestSourceContext *test = outlink->src->priv;
  1063. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(outlink->format);
  1064. test->fill_picture_fn = desc->comp[0].depth > 8 ? yuvtest_fill_picture16 : yuvtest_fill_picture8;
  1065. return config_props(outlink);
  1066. }
  1067. static const AVFilterPad avfilter_vsrc_yuvtestsrc_outputs[] = {
  1068. {
  1069. .name = "default",
  1070. .type = AVMEDIA_TYPE_VIDEO,
  1071. .config_props = yuvtest_config_props,
  1072. },
  1073. { NULL }
  1074. };
  1075. AVFilter ff_vsrc_yuvtestsrc = {
  1076. .name = "yuvtestsrc",
  1077. .description = NULL_IF_CONFIG_SMALL("Generate YUV test pattern."),
  1078. .priv_size = sizeof(TestSourceContext),
  1079. .priv_class = &yuvtestsrc_class,
  1080. .init = yuvtest_init,
  1081. .uninit = uninit,
  1082. .query_formats = yuvtest_query_formats,
  1083. .activate = activate,
  1084. .inputs = NULL,
  1085. .outputs = avfilter_vsrc_yuvtestsrc_outputs,
  1086. };
  1087. #endif /* CONFIG_YUVTESTSRC_FILTER */
  1088. #if CONFIG_PAL75BARS_FILTER || CONFIG_PAL100BARS_FILTER || CONFIG_SMPTEBARS_FILTER || CONFIG_SMPTEHDBARS_FILTER
  1089. static const uint8_t rainbow[7][4] = {
  1090. { 180, 128, 128, 255 }, /* 75% white */
  1091. { 162, 44, 142, 255 }, /* 75% yellow */
  1092. { 131, 156, 44, 255 }, /* 75% cyan */
  1093. { 112, 72, 58, 255 }, /* 75% green */
  1094. { 84, 184, 198, 255 }, /* 75% magenta */
  1095. { 65, 100, 212, 255 }, /* 75% red */
  1096. { 35, 212, 114, 255 }, /* 75% blue */
  1097. };
  1098. static const uint8_t rainbow100[7][4] = {
  1099. { 235, 128, 128, 255 }, /* 100% white */
  1100. { 210, 16, 146, 255 }, /* 100% yellow */
  1101. { 170, 166, 16, 255 }, /* 100% cyan */
  1102. { 145, 54, 34, 255 }, /* 100% green */
  1103. { 106, 202, 222, 255 }, /* 100% magenta */
  1104. { 81, 90, 240, 255 }, /* 100% red */
  1105. { 41, 240, 110, 255 }, /* 100% blue */
  1106. };
  1107. static const uint8_t rainbowhd[7][4] = {
  1108. { 180, 128, 128, 255 }, /* 75% white */
  1109. { 168, 44, 136, 255 }, /* 75% yellow */
  1110. { 145, 147, 44, 255 }, /* 75% cyan */
  1111. { 133, 63, 52, 255 }, /* 75% green */
  1112. { 63, 193, 204, 255 }, /* 75% magenta */
  1113. { 51, 109, 212, 255 }, /* 75% red */
  1114. { 28, 212, 120, 255 }, /* 75% blue */
  1115. };
  1116. static const uint8_t wobnair[7][4] = {
  1117. { 35, 212, 114, 255 }, /* 75% blue */
  1118. { 19, 128, 128, 255 }, /* 7.5% intensity black */
  1119. { 84, 184, 198, 255 }, /* 75% magenta */
  1120. { 19, 128, 128, 255 }, /* 7.5% intensity black */
  1121. { 131, 156, 44, 255 }, /* 75% cyan */
  1122. { 19, 128, 128, 255 }, /* 7.5% intensity black */
  1123. { 180, 128, 128, 255 }, /* 75% white */
  1124. };
  1125. static const uint8_t white[4] = { 235, 128, 128, 255 };
  1126. /* pluge pulses */
  1127. static const uint8_t neg4ire[4] = { 7, 128, 128, 255 };
  1128. static const uint8_t pos4ire[4] = { 24, 128, 128, 255 };
  1129. /* fudged Q/-I */
  1130. static const uint8_t i_pixel[4] = { 57, 156, 97, 255 };
  1131. static const uint8_t q_pixel[4] = { 44, 171, 147, 255 };
  1132. static const uint8_t gray40[4] = { 104, 128, 128, 255 };
  1133. static const uint8_t gray15[4] = { 49, 128, 128, 255 };
  1134. static const uint8_t cyan[4] = { 188, 154, 16, 255 };
  1135. static const uint8_t yellow[4] = { 219, 16, 138, 255 };
  1136. static const uint8_t blue[4] = { 32, 240, 118, 255 };
  1137. static const uint8_t red[4] = { 63, 102, 240, 255 };
  1138. static const uint8_t black0[4] = { 16, 128, 128, 255 };
  1139. static const uint8_t black2[4] = { 20, 128, 128, 255 };
  1140. static const uint8_t black4[4] = { 25, 128, 128, 255 };
  1141. static const uint8_t neg2[4] = { 12, 128, 128, 255 };
  1142. static void draw_bar(TestSourceContext *test, const uint8_t color[4],
  1143. int x, int y, int w, int h,
  1144. AVFrame *frame)
  1145. {
  1146. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
  1147. uint8_t *p, *p0;
  1148. int plane;
  1149. x = FFMIN(x, test->w - 1);
  1150. y = FFMIN(y, test->h - 1);
  1151. w = FFMAX(FFMIN(w, test->w - x), 0);
  1152. h = FFMAX(FFMIN(h, test->h - y), 0);
  1153. av_assert0(x + w <= test->w);
  1154. av_assert0(y + h <= test->h);
  1155. for (plane = 0; frame->data[plane]; plane++) {
  1156. const int c = color[plane];
  1157. const int linesize = frame->linesize[plane];
  1158. int i, px, py, pw, ph;
  1159. if (plane == 1 || plane == 2) {
  1160. px = x >> desc->log2_chroma_w;
  1161. pw = AV_CEIL_RSHIFT(w, desc->log2_chroma_w);
  1162. py = y >> desc->log2_chroma_h;
  1163. ph = AV_CEIL_RSHIFT(h, desc->log2_chroma_h);
  1164. } else {
  1165. px = x;
  1166. pw = w;
  1167. py = y;
  1168. ph = h;
  1169. }
  1170. p0 = p = frame->data[plane] + py * linesize + px;
  1171. memset(p, c, pw);
  1172. p += linesize;
  1173. for (i = 1; i < ph; i++, p += linesize)
  1174. memcpy(p, p0, pw);
  1175. }
  1176. }
  1177. static int smptebars_query_formats(AVFilterContext *ctx)
  1178. {
  1179. static const enum AVPixelFormat pix_fmts[] = {
  1180. AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P,
  1181. AV_PIX_FMT_YUV440P, AV_PIX_FMT_YUV444P,
  1182. AV_PIX_FMT_YUV410P, AV_PIX_FMT_YUV411P,
  1183. AV_PIX_FMT_NONE,
  1184. };
  1185. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  1186. if (!fmts_list)
  1187. return AVERROR(ENOMEM);
  1188. return ff_set_common_formats(ctx, fmts_list);
  1189. }
  1190. static const AVFilterPad smptebars_outputs[] = {
  1191. {
  1192. .name = "default",
  1193. .type = AVMEDIA_TYPE_VIDEO,
  1194. .config_props = config_props,
  1195. },
  1196. { NULL }
  1197. };
  1198. #if CONFIG_PAL75BARS_FILTER
  1199. #define pal75bars_options options
  1200. AVFILTER_DEFINE_CLASS(pal75bars);
  1201. static void pal75bars_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  1202. {
  1203. TestSourceContext *test = ctx->priv;
  1204. int r_w, i, x = 0;
  1205. const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(picref->format);
  1206. picref->color_range = AVCOL_RANGE_MPEG;
  1207. picref->colorspace = AVCOL_SPC_BT470BG;
  1208. r_w = FFALIGN((test->w + 7) / 8, 1 << pixdesc->log2_chroma_w);
  1209. draw_bar(test, white, x, 0, r_w, test->h, picref);
  1210. x += r_w;
  1211. for (i = 1; i < 7; i++) {
  1212. draw_bar(test, rainbow[i], x, 0, r_w, test->h, picref);
  1213. x += r_w;
  1214. }
  1215. draw_bar(test, black0, x, 0, r_w, test->h, picref);
  1216. }
  1217. static av_cold int pal75bars_init(AVFilterContext *ctx)
  1218. {
  1219. TestSourceContext *test = ctx->priv;
  1220. test->fill_picture_fn = pal75bars_fill_picture;
  1221. test->draw_once = 1;
  1222. return init(ctx);
  1223. }
  1224. AVFilter ff_vsrc_pal75bars = {
  1225. .name = "pal75bars",
  1226. .description = NULL_IF_CONFIG_SMALL("Generate PAL 75% color bars."),
  1227. .priv_size = sizeof(TestSourceContext),
  1228. .priv_class = &pal75bars_class,
  1229. .init = pal75bars_init,
  1230. .uninit = uninit,
  1231. .query_formats = smptebars_query_formats,
  1232. .activate = activate,
  1233. .inputs = NULL,
  1234. .outputs = smptebars_outputs,
  1235. };
  1236. #endif /* CONFIG_PAL75BARS_FILTER */
  1237. #if CONFIG_PAL100BARS_FILTER
  1238. #define pal100bars_options options
  1239. AVFILTER_DEFINE_CLASS(pal100bars);
  1240. static void pal100bars_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  1241. {
  1242. TestSourceContext *test = ctx->priv;
  1243. int r_w, i, x = 0;
  1244. const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(picref->format);
  1245. picref->color_range = AVCOL_RANGE_MPEG;
  1246. picref->colorspace = AVCOL_SPC_BT470BG;
  1247. r_w = FFALIGN((test->w + 7) / 8, 1 << pixdesc->log2_chroma_w);
  1248. for (i = 0; i < 7; i++) {
  1249. draw_bar(test, rainbow100[i], x, 0, r_w, test->h, picref);
  1250. x += r_w;
  1251. }
  1252. draw_bar(test, black0, x, 0, r_w, test->h, picref);
  1253. }
  1254. static av_cold int pal100bars_init(AVFilterContext *ctx)
  1255. {
  1256. TestSourceContext *test = ctx->priv;
  1257. test->fill_picture_fn = pal100bars_fill_picture;
  1258. test->draw_once = 1;
  1259. return init(ctx);
  1260. }
  1261. AVFilter ff_vsrc_pal100bars = {
  1262. .name = "pal100bars",
  1263. .description = NULL_IF_CONFIG_SMALL("Generate PAL 100% color bars."),
  1264. .priv_size = sizeof(TestSourceContext),
  1265. .priv_class = &pal100bars_class,
  1266. .init = pal100bars_init,
  1267. .uninit = uninit,
  1268. .query_formats = smptebars_query_formats,
  1269. .activate = activate,
  1270. .inputs = NULL,
  1271. .outputs = smptebars_outputs,
  1272. };
  1273. #endif /* CONFIG_PAL100BARS_FILTER */
  1274. #if CONFIG_SMPTEBARS_FILTER
  1275. #define smptebars_options options
  1276. AVFILTER_DEFINE_CLASS(smptebars);
  1277. static void smptebars_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  1278. {
  1279. TestSourceContext *test = ctx->priv;
  1280. int r_w, r_h, w_h, p_w, p_h, i, tmp, x = 0;
  1281. const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(picref->format);
  1282. picref->colorspace = AVCOL_SPC_BT470BG;
  1283. r_w = FFALIGN((test->w + 6) / 7, 1 << pixdesc->log2_chroma_w);
  1284. r_h = FFALIGN(test->h * 2 / 3, 1 << pixdesc->log2_chroma_h);
  1285. w_h = FFALIGN(test->h * 3 / 4 - r_h, 1 << pixdesc->log2_chroma_h);
  1286. p_w = FFALIGN(r_w * 5 / 4, 1 << pixdesc->log2_chroma_w);
  1287. p_h = test->h - w_h - r_h;
  1288. for (i = 0; i < 7; i++) {
  1289. draw_bar(test, rainbow[i], x, 0, r_w, r_h, picref);
  1290. draw_bar(test, wobnair[i], x, r_h, r_w, w_h, picref);
  1291. x += r_w;
  1292. }
  1293. x = 0;
  1294. draw_bar(test, i_pixel, x, r_h + w_h, p_w, p_h, picref);
  1295. x += p_w;
  1296. draw_bar(test, white, x, r_h + w_h, p_w, p_h, picref);
  1297. x += p_w;
  1298. draw_bar(test, q_pixel, x, r_h + w_h, p_w, p_h, picref);
  1299. x += p_w;
  1300. tmp = FFALIGN(5 * r_w - x, 1 << pixdesc->log2_chroma_w);
  1301. draw_bar(test, black0, x, r_h + w_h, tmp, p_h, picref);
  1302. x += tmp;
  1303. tmp = FFALIGN(r_w / 3, 1 << pixdesc->log2_chroma_w);
  1304. draw_bar(test, neg4ire, x, r_h + w_h, tmp, p_h, picref);
  1305. x += tmp;
  1306. draw_bar(test, black0, x, r_h + w_h, tmp, p_h, picref);
  1307. x += tmp;
  1308. draw_bar(test, pos4ire, x, r_h + w_h, tmp, p_h, picref);
  1309. x += tmp;
  1310. draw_bar(test, black0, x, r_h + w_h, test->w - x, p_h, picref);
  1311. }
  1312. static av_cold int smptebars_init(AVFilterContext *ctx)
  1313. {
  1314. TestSourceContext *test = ctx->priv;
  1315. test->fill_picture_fn = smptebars_fill_picture;
  1316. test->draw_once = 1;
  1317. return init(ctx);
  1318. }
  1319. AVFilter ff_vsrc_smptebars = {
  1320. .name = "smptebars",
  1321. .description = NULL_IF_CONFIG_SMALL("Generate SMPTE color bars."),
  1322. .priv_size = sizeof(TestSourceContext),
  1323. .priv_class = &smptebars_class,
  1324. .init = smptebars_init,
  1325. .uninit = uninit,
  1326. .query_formats = smptebars_query_formats,
  1327. .activate = activate,
  1328. .inputs = NULL,
  1329. .outputs = smptebars_outputs,
  1330. };
  1331. #endif /* CONFIG_SMPTEBARS_FILTER */
  1332. #if CONFIG_SMPTEHDBARS_FILTER
  1333. #define smptehdbars_options options
  1334. AVFILTER_DEFINE_CLASS(smptehdbars);
  1335. static void smptehdbars_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  1336. {
  1337. TestSourceContext *test = ctx->priv;
  1338. int d_w, r_w, r_h, l_w, i, tmp, x = 0, y = 0;
  1339. const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(picref->format);
  1340. picref->colorspace = AVCOL_SPC_BT709;
  1341. d_w = FFALIGN(test->w / 8, 1 << pixdesc->log2_chroma_w);
  1342. r_h = FFALIGN(test->h * 7 / 12, 1 << pixdesc->log2_chroma_h);
  1343. draw_bar(test, gray40, x, 0, d_w, r_h, picref);
  1344. x += d_w;
  1345. r_w = FFALIGN((((test->w + 3) / 4) * 3) / 7, 1 << pixdesc->log2_chroma_w);
  1346. for (i = 0; i < 7; i++) {
  1347. draw_bar(test, rainbowhd[i], x, 0, r_w, r_h, picref);
  1348. x += r_w;
  1349. }
  1350. draw_bar(test, gray40, x, 0, test->w - x, r_h, picref);
  1351. y = r_h;
  1352. r_h = FFALIGN(test->h / 12, 1 << pixdesc->log2_chroma_h);
  1353. draw_bar(test, cyan, 0, y, d_w, r_h, picref);
  1354. x = d_w;
  1355. draw_bar(test, i_pixel, x, y, r_w, r_h, picref);
  1356. x += r_w;
  1357. tmp = r_w * 6;
  1358. draw_bar(test, rainbowhd[0], x, y, tmp, r_h, picref);
  1359. x += tmp;
  1360. l_w = x;
  1361. draw_bar(test, blue, x, y, test->w - x, r_h, picref);
  1362. y += r_h;
  1363. draw_bar(test, yellow, 0, y, d_w, r_h, picref);
  1364. x = d_w;
  1365. draw_bar(test, q_pixel, x, y, r_w, r_h, picref);
  1366. x += r_w;
  1367. for (i = 0; i < tmp; i += 1 << pixdesc->log2_chroma_w) {
  1368. uint8_t yramp[4] = {0};
  1369. yramp[0] = i * 255 / tmp;
  1370. yramp[1] = 128;
  1371. yramp[2] = 128;
  1372. yramp[3] = 255;
  1373. draw_bar(test, yramp, x, y, 1 << pixdesc->log2_chroma_w, r_h, picref);
  1374. x += 1 << pixdesc->log2_chroma_w;
  1375. }
  1376. draw_bar(test, red, x, y, test->w - x, r_h, picref);
  1377. y += r_h;
  1378. draw_bar(test, gray15, 0, y, d_w, test->h - y, picref);
  1379. x = d_w;
  1380. tmp = FFALIGN(r_w * 3 / 2, 1 << pixdesc->log2_chroma_w);
  1381. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  1382. x += tmp;
  1383. tmp = FFALIGN(r_w * 2, 1 << pixdesc->log2_chroma_w);
  1384. draw_bar(test, white, x, y, tmp, test->h - y, picref);
  1385. x += tmp;
  1386. tmp = FFALIGN(r_w * 5 / 6, 1 << pixdesc->log2_chroma_w);
  1387. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  1388. x += tmp;
  1389. tmp = FFALIGN(r_w / 3, 1 << pixdesc->log2_chroma_w);
  1390. draw_bar(test, neg2, x, y, tmp, test->h - y, picref);
  1391. x += tmp;
  1392. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  1393. x += tmp;
  1394. draw_bar(test, black2, x, y, tmp, test->h - y, picref);
  1395. x += tmp;
  1396. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  1397. x += tmp;
  1398. draw_bar(test, black4, x, y, tmp, test->h - y, picref);
  1399. x += tmp;
  1400. r_w = l_w - x;
  1401. draw_bar(test, black0, x, y, r_w, test->h - y, picref);
  1402. x += r_w;
  1403. draw_bar(test, gray15, x, y, test->w - x, test->h - y, picref);
  1404. }
  1405. static av_cold int smptehdbars_init(AVFilterContext *ctx)
  1406. {
  1407. TestSourceContext *test = ctx->priv;
  1408. test->fill_picture_fn = smptehdbars_fill_picture;
  1409. test->draw_once = 1;
  1410. return init(ctx);
  1411. }
  1412. AVFilter ff_vsrc_smptehdbars = {
  1413. .name = "smptehdbars",
  1414. .description = NULL_IF_CONFIG_SMALL("Generate SMPTE HD color bars."),
  1415. .priv_size = sizeof(TestSourceContext),
  1416. .priv_class = &smptehdbars_class,
  1417. .init = smptehdbars_init,
  1418. .uninit = uninit,
  1419. .query_formats = smptebars_query_formats,
  1420. .activate = activate,
  1421. .inputs = NULL,
  1422. .outputs = smptebars_outputs,
  1423. };
  1424. #endif /* CONFIG_SMPTEHDBARS_FILTER */
  1425. #endif /* CONFIG_SMPTEBARS_FILTER || CONFIG_SMPTEHDBARS_FILTER */
  1426. #if CONFIG_ALLYUV_FILTER
  1427. #define allyuv_options &options[NOSIZE_OPTIONS_OFFSET]
  1428. AVFILTER_DEFINE_CLASS(allyuv);
  1429. static void allyuv_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  1430. {
  1431. const int ys = frame->linesize[0];
  1432. const int us = frame->linesize[1];
  1433. const int vs = frame->linesize[2];
  1434. int x, y, j;
  1435. for (y = 0; y < 4096; y++) {
  1436. for (x = 0; x < 2048; x++) {
  1437. frame->data[0][y * ys + x] = ((x / 8) % 256);
  1438. frame->data[0][y * ys + 4095 - x] = ((x / 8) % 256);
  1439. }
  1440. for (x = 0; x < 2048; x+=8) {
  1441. for (j = 0; j < 8; j++) {
  1442. frame->data[1][vs * y + x + j] = (y%16 + (j % 8) * 16);
  1443. frame->data[1][vs * y + 4095 - x - j] = (128 + y%16 + (j % 8) * 16);
  1444. }
  1445. }
  1446. for (x = 0; x < 4096; x++)
  1447. frame->data[2][y * us + x] = 256 * y / 4096;
  1448. }
  1449. }
  1450. static av_cold int allyuv_init(AVFilterContext *ctx)
  1451. {
  1452. TestSourceContext *test = ctx->priv;
  1453. test->w = test->h = 4096;
  1454. test->draw_once = 1;
  1455. test->fill_picture_fn = allyuv_fill_picture;
  1456. return init(ctx);
  1457. }
  1458. static int allyuv_query_formats(AVFilterContext *ctx)
  1459. {
  1460. static const enum AVPixelFormat pix_fmts[] = {
  1461. AV_PIX_FMT_YUV444P, AV_PIX_FMT_GBRP,
  1462. AV_PIX_FMT_NONE
  1463. };
  1464. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  1465. if (!fmts_list)
  1466. return AVERROR(ENOMEM);
  1467. return ff_set_common_formats(ctx, fmts_list);
  1468. }
  1469. static const AVFilterPad avfilter_vsrc_allyuv_outputs[] = {
  1470. {
  1471. .name = "default",
  1472. .type = AVMEDIA_TYPE_VIDEO,
  1473. .config_props = config_props,
  1474. },
  1475. { NULL }
  1476. };
  1477. AVFilter ff_vsrc_allyuv = {
  1478. .name = "allyuv",
  1479. .description = NULL_IF_CONFIG_SMALL("Generate all yuv colors."),
  1480. .priv_size = sizeof(TestSourceContext),
  1481. .priv_class = &allyuv_class,
  1482. .init = allyuv_init,
  1483. .uninit = uninit,
  1484. .query_formats = allyuv_query_formats,
  1485. .activate = activate,
  1486. .inputs = NULL,
  1487. .outputs = avfilter_vsrc_allyuv_outputs,
  1488. };
  1489. #endif /* CONFIG_ALLYUV_FILTER */
  1490. #if CONFIG_ALLRGB_FILTER
  1491. #define allrgb_options &options[NOSIZE_OPTIONS_OFFSET]
  1492. AVFILTER_DEFINE_CLASS(allrgb);
  1493. static void allrgb_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  1494. {
  1495. unsigned x, y;
  1496. const int linesize = frame->linesize[0];
  1497. uint8_t *line = frame->data[0];
  1498. for (y = 0; y < 4096; y++) {
  1499. uint8_t *dst = line;
  1500. for (x = 0; x < 4096; x++) {
  1501. *dst++ = x;
  1502. *dst++ = y;
  1503. *dst++ = (x >> 8) | ((y >> 8) << 4);
  1504. }
  1505. line += linesize;
  1506. }
  1507. }
  1508. static av_cold int allrgb_init(AVFilterContext *ctx)
  1509. {
  1510. TestSourceContext *test = ctx->priv;
  1511. test->w = test->h = 4096;
  1512. test->draw_once = 1;
  1513. test->fill_picture_fn = allrgb_fill_picture;
  1514. return init(ctx);
  1515. }
  1516. static int allrgb_config_props(AVFilterLink *outlink)
  1517. {
  1518. TestSourceContext *test = outlink->src->priv;
  1519. ff_fill_rgba_map(test->rgba_map, outlink->format);
  1520. return config_props(outlink);
  1521. }
  1522. static int allrgb_query_formats(AVFilterContext *ctx)
  1523. {
  1524. static const enum AVPixelFormat pix_fmts[] = {
  1525. AV_PIX_FMT_RGB24, AV_PIX_FMT_NONE
  1526. };
  1527. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  1528. if (!fmts_list)
  1529. return AVERROR(ENOMEM);
  1530. return ff_set_common_formats(ctx, fmts_list);
  1531. }
  1532. static const AVFilterPad avfilter_vsrc_allrgb_outputs[] = {
  1533. {
  1534. .name = "default",
  1535. .type = AVMEDIA_TYPE_VIDEO,
  1536. .config_props = allrgb_config_props,
  1537. },
  1538. { NULL }
  1539. };
  1540. AVFilter ff_vsrc_allrgb = {
  1541. .name = "allrgb",
  1542. .description = NULL_IF_CONFIG_SMALL("Generate all RGB colors."),
  1543. .priv_size = sizeof(TestSourceContext),
  1544. .priv_class = &allrgb_class,
  1545. .init = allrgb_init,
  1546. .uninit = uninit,
  1547. .query_formats = allrgb_query_formats,
  1548. .activate = activate,
  1549. .inputs = NULL,
  1550. .outputs = avfilter_vsrc_allrgb_outputs,
  1551. };
  1552. #endif /* CONFIG_ALLRGB_FILTER */