You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1806 lines
56KB

  1. /*
  2. * Copyright (c) 2007 Nicolas George <nicolas.george@normalesup.org>
  3. * Copyright (c) 2011 Stefano Sabatini
  4. * Copyright (c) 2012 Paul B Mahol
  5. *
  6. * This file is part of FFmpeg.
  7. *
  8. * FFmpeg is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public
  10. * License as published by the Free Software Foundation; either
  11. * version 2.1 of the License, or (at your option) any later version.
  12. *
  13. * FFmpeg is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with FFmpeg; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  21. */
  22. /**
  23. * @file
  24. * Misc test sources.
  25. *
  26. * testsrc is based on the test pattern generator demuxer by Nicolas George:
  27. * http://lists.ffmpeg.org/pipermail/ffmpeg-devel/2007-October/037845.html
  28. *
  29. * rgbtestsrc is ported from MPlayer libmpcodecs/vf_rgbtest.c by
  30. * Michael Niedermayer.
  31. *
  32. * allyuv, smptebars and smptehdbars are by Paul B Mahol.
  33. */
  34. #include <float.h>
  35. #include "libavutil/avassert.h"
  36. #include "libavutil/common.h"
  37. #include "libavutil/ffmath.h"
  38. #include "libavutil/opt.h"
  39. #include "libavutil/imgutils.h"
  40. #include "libavutil/intreadwrite.h"
  41. #include "libavutil/parseutils.h"
  42. #include "libavutil/xga_font_data.h"
  43. #include "avfilter.h"
  44. #include "drawutils.h"
  45. #include "formats.h"
  46. #include "internal.h"
  47. #include "video.h"
  48. typedef struct TestSourceContext {
  49. const AVClass *class;
  50. int w, h;
  51. unsigned int nb_frame;
  52. AVRational time_base, frame_rate;
  53. int64_t pts;
  54. int64_t duration; ///< duration expressed in microseconds
  55. AVRational sar; ///< sample aspect ratio
  56. int draw_once; ///< draw only the first frame, always put out the same picture
  57. int draw_once_reset; ///< draw only the first frame or in case of reset
  58. AVFrame *picref; ///< cached reference containing the painted picture
  59. void (* fill_picture_fn)(AVFilterContext *ctx, AVFrame *frame);
  60. /* only used by testsrc */
  61. int nb_decimals;
  62. /* only used by testsrc2 */
  63. int alpha;
  64. /* only used by color */
  65. FFDrawContext draw;
  66. FFDrawColor color;
  67. uint8_t color_rgba[4];
  68. /* only used by rgbtest */
  69. uint8_t rgba_map[4];
  70. /* only used by haldclut */
  71. int level;
  72. } TestSourceContext;
  73. #define OFFSET(x) offsetof(TestSourceContext, x)
  74. #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
  75. #define FLAGSR AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_RUNTIME_PARAM
  76. #define SIZE_OPTIONS \
  77. { "size", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "320x240"}, 0, 0, FLAGS },\
  78. { "s", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "320x240"}, 0, 0, FLAGS },\
  79. #define COMMON_OPTIONS_NOSIZE \
  80. { "rate", "set video rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, INT_MAX, FLAGS },\
  81. { "r", "set video rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, INT_MAX, FLAGS },\
  82. { "duration", "set video duration", OFFSET(duration), AV_OPT_TYPE_DURATION, {.i64 = -1}, -1, INT64_MAX, FLAGS },\
  83. { "d", "set video duration", OFFSET(duration), AV_OPT_TYPE_DURATION, {.i64 = -1}, -1, INT64_MAX, FLAGS },\
  84. { "sar", "set video sample aspect ratio", OFFSET(sar), AV_OPT_TYPE_RATIONAL, {.dbl= 1}, 0, INT_MAX, FLAGS },
  85. #define COMMON_OPTIONS SIZE_OPTIONS COMMON_OPTIONS_NOSIZE
  86. static const AVOption options[] = {
  87. COMMON_OPTIONS
  88. { NULL }
  89. };
  90. static av_cold int init(AVFilterContext *ctx)
  91. {
  92. TestSourceContext *test = ctx->priv;
  93. test->time_base = av_inv_q(test->frame_rate);
  94. test->nb_frame = 0;
  95. test->pts = 0;
  96. av_log(ctx, AV_LOG_VERBOSE, "size:%dx%d rate:%d/%d duration:%f sar:%d/%d\n",
  97. test->w, test->h, test->frame_rate.num, test->frame_rate.den,
  98. test->duration < 0 ? -1 : (double)test->duration/1000000,
  99. test->sar.num, test->sar.den);
  100. return 0;
  101. }
  102. static av_cold void uninit(AVFilterContext *ctx)
  103. {
  104. TestSourceContext *test = ctx->priv;
  105. av_frame_free(&test->picref);
  106. }
  107. static int config_props(AVFilterLink *outlink)
  108. {
  109. TestSourceContext *test = outlink->src->priv;
  110. outlink->w = test->w;
  111. outlink->h = test->h;
  112. outlink->sample_aspect_ratio = test->sar;
  113. outlink->frame_rate = test->frame_rate;
  114. outlink->time_base = test->time_base;
  115. return 0;
  116. }
  117. static int request_frame(AVFilterLink *outlink)
  118. {
  119. TestSourceContext *test = outlink->src->priv;
  120. AVFrame *frame;
  121. if (test->duration >= 0 &&
  122. av_rescale_q(test->pts, test->time_base, AV_TIME_BASE_Q) >= test->duration)
  123. return AVERROR_EOF;
  124. if (test->draw_once) {
  125. if (test->draw_once_reset) {
  126. av_frame_free(&test->picref);
  127. test->draw_once_reset = 0;
  128. }
  129. if (!test->picref) {
  130. test->picref =
  131. ff_get_video_buffer(outlink, test->w, test->h);
  132. if (!test->picref)
  133. return AVERROR(ENOMEM);
  134. test->fill_picture_fn(outlink->src, test->picref);
  135. }
  136. frame = av_frame_clone(test->picref);
  137. } else
  138. frame = ff_get_video_buffer(outlink, test->w, test->h);
  139. if (!frame)
  140. return AVERROR(ENOMEM);
  141. frame->pts = test->pts;
  142. frame->key_frame = 1;
  143. frame->interlaced_frame = 0;
  144. frame->pict_type = AV_PICTURE_TYPE_I;
  145. frame->sample_aspect_ratio = test->sar;
  146. if (!test->draw_once)
  147. test->fill_picture_fn(outlink->src, frame);
  148. test->pts++;
  149. test->nb_frame++;
  150. return ff_filter_frame(outlink, frame);
  151. }
  152. #if CONFIG_COLOR_FILTER
  153. static const AVOption color_options[] = {
  154. { "color", "set color", OFFSET(color_rgba), AV_OPT_TYPE_COLOR, {.str = "black"}, CHAR_MIN, CHAR_MAX, FLAGSR },
  155. { "c", "set color", OFFSET(color_rgba), AV_OPT_TYPE_COLOR, {.str = "black"}, CHAR_MIN, CHAR_MAX, FLAGSR },
  156. COMMON_OPTIONS
  157. { NULL }
  158. };
  159. AVFILTER_DEFINE_CLASS(color);
  160. static void color_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  161. {
  162. TestSourceContext *test = ctx->priv;
  163. ff_fill_rectangle(&test->draw, &test->color,
  164. picref->data, picref->linesize,
  165. 0, 0, test->w, test->h);
  166. }
  167. static av_cold int color_init(AVFilterContext *ctx)
  168. {
  169. TestSourceContext *test = ctx->priv;
  170. test->fill_picture_fn = color_fill_picture;
  171. test->draw_once = 1;
  172. return init(ctx);
  173. }
  174. static int color_query_formats(AVFilterContext *ctx)
  175. {
  176. return ff_set_common_formats(ctx, ff_draw_supported_pixel_formats(0));
  177. }
  178. static int color_config_props(AVFilterLink *inlink)
  179. {
  180. AVFilterContext *ctx = inlink->src;
  181. TestSourceContext *test = ctx->priv;
  182. int ret;
  183. ff_draw_init(&test->draw, inlink->format, 0);
  184. ff_draw_color(&test->draw, &test->color, test->color_rgba);
  185. test->w = ff_draw_round_to_sub(&test->draw, 0, -1, test->w);
  186. test->h = ff_draw_round_to_sub(&test->draw, 1, -1, test->h);
  187. if (av_image_check_size(test->w, test->h, 0, ctx) < 0)
  188. return AVERROR(EINVAL);
  189. if ((ret = config_props(inlink)) < 0)
  190. return ret;
  191. return 0;
  192. }
  193. static int color_process_command(AVFilterContext *ctx, const char *cmd, const char *args,
  194. char *res, int res_len, int flags)
  195. {
  196. TestSourceContext *test = ctx->priv;
  197. int ret;
  198. ret = ff_filter_process_command(ctx, cmd, args, res, res_len, flags);
  199. if (ret < 0)
  200. return ret;
  201. ff_draw_color(&test->draw, &test->color, test->color_rgba);
  202. test->draw_once_reset = 1;
  203. return 0;
  204. }
  205. static const AVFilterPad color_outputs[] = {
  206. {
  207. .name = "default",
  208. .type = AVMEDIA_TYPE_VIDEO,
  209. .request_frame = request_frame,
  210. .config_props = color_config_props,
  211. },
  212. { NULL }
  213. };
  214. AVFilter ff_vsrc_color = {
  215. .name = "color",
  216. .description = NULL_IF_CONFIG_SMALL("Provide an uniformly colored input."),
  217. .priv_class = &color_class,
  218. .priv_size = sizeof(TestSourceContext),
  219. .init = color_init,
  220. .uninit = uninit,
  221. .query_formats = color_query_formats,
  222. .inputs = NULL,
  223. .outputs = color_outputs,
  224. .process_command = color_process_command,
  225. };
  226. #endif /* CONFIG_COLOR_FILTER */
  227. #if CONFIG_HALDCLUTSRC_FILTER
  228. static const AVOption haldclutsrc_options[] = {
  229. { "level", "set level", OFFSET(level), AV_OPT_TYPE_INT, {.i64 = 6}, 2, 16, FLAGS },
  230. COMMON_OPTIONS_NOSIZE
  231. { NULL }
  232. };
  233. AVFILTER_DEFINE_CLASS(haldclutsrc);
  234. static void haldclutsrc_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  235. {
  236. int i, j, k, x = 0, y = 0, is16bit = 0, step;
  237. uint32_t alpha = 0;
  238. const TestSourceContext *hc = ctx->priv;
  239. int level = hc->level;
  240. float scale;
  241. const int w = frame->width;
  242. const int h = frame->height;
  243. const uint8_t *data = frame->data[0];
  244. const int linesize = frame->linesize[0];
  245. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
  246. uint8_t rgba_map[4];
  247. av_assert0(w == h && w == level*level*level);
  248. ff_fill_rgba_map(rgba_map, frame->format);
  249. switch (frame->format) {
  250. case AV_PIX_FMT_RGB48:
  251. case AV_PIX_FMT_BGR48:
  252. case AV_PIX_FMT_RGBA64:
  253. case AV_PIX_FMT_BGRA64:
  254. is16bit = 1;
  255. alpha = 0xffff;
  256. break;
  257. case AV_PIX_FMT_RGBA:
  258. case AV_PIX_FMT_BGRA:
  259. case AV_PIX_FMT_ARGB:
  260. case AV_PIX_FMT_ABGR:
  261. alpha = 0xff;
  262. break;
  263. }
  264. step = av_get_padded_bits_per_pixel(desc) >> (3 + is16bit);
  265. scale = ((float)(1 << (8*(is16bit+1))) - 1) / (level*level - 1);
  266. #define LOAD_CLUT(nbits) do { \
  267. uint##nbits##_t *dst = ((uint##nbits##_t *)(data + y*linesize)) + x*step; \
  268. dst[rgba_map[0]] = av_clip_uint##nbits(i * scale); \
  269. dst[rgba_map[1]] = av_clip_uint##nbits(j * scale); \
  270. dst[rgba_map[2]] = av_clip_uint##nbits(k * scale); \
  271. if (step == 4) \
  272. dst[rgba_map[3]] = alpha; \
  273. } while (0)
  274. level *= level;
  275. for (k = 0; k < level; k++) {
  276. for (j = 0; j < level; j++) {
  277. for (i = 0; i < level; i++) {
  278. if (!is16bit)
  279. LOAD_CLUT(8);
  280. else
  281. LOAD_CLUT(16);
  282. if (++x == w) {
  283. x = 0;
  284. y++;
  285. }
  286. }
  287. }
  288. }
  289. }
  290. static av_cold int haldclutsrc_init(AVFilterContext *ctx)
  291. {
  292. TestSourceContext *hc = ctx->priv;
  293. hc->fill_picture_fn = haldclutsrc_fill_picture;
  294. hc->draw_once = 1;
  295. return init(ctx);
  296. }
  297. static int haldclutsrc_query_formats(AVFilterContext *ctx)
  298. {
  299. static const enum AVPixelFormat pix_fmts[] = {
  300. AV_PIX_FMT_RGB24, AV_PIX_FMT_BGR24,
  301. AV_PIX_FMT_RGBA, AV_PIX_FMT_BGRA,
  302. AV_PIX_FMT_ARGB, AV_PIX_FMT_ABGR,
  303. AV_PIX_FMT_0RGB, AV_PIX_FMT_0BGR,
  304. AV_PIX_FMT_RGB0, AV_PIX_FMT_BGR0,
  305. AV_PIX_FMT_RGB48, AV_PIX_FMT_BGR48,
  306. AV_PIX_FMT_RGBA64, AV_PIX_FMT_BGRA64,
  307. AV_PIX_FMT_NONE,
  308. };
  309. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  310. if (!fmts_list)
  311. return AVERROR(ENOMEM);
  312. return ff_set_common_formats(ctx, fmts_list);
  313. }
  314. static int haldclutsrc_config_props(AVFilterLink *outlink)
  315. {
  316. AVFilterContext *ctx = outlink->src;
  317. TestSourceContext *hc = ctx->priv;
  318. hc->w = hc->h = hc->level * hc->level * hc->level;
  319. return config_props(outlink);
  320. }
  321. static const AVFilterPad haldclutsrc_outputs[] = {
  322. {
  323. .name = "default",
  324. .type = AVMEDIA_TYPE_VIDEO,
  325. .request_frame = request_frame,
  326. .config_props = haldclutsrc_config_props,
  327. },
  328. { NULL }
  329. };
  330. AVFilter ff_vsrc_haldclutsrc = {
  331. .name = "haldclutsrc",
  332. .description = NULL_IF_CONFIG_SMALL("Provide an identity Hald CLUT."),
  333. .priv_class = &haldclutsrc_class,
  334. .priv_size = sizeof(TestSourceContext),
  335. .init = haldclutsrc_init,
  336. .uninit = uninit,
  337. .query_formats = haldclutsrc_query_formats,
  338. .inputs = NULL,
  339. .outputs = haldclutsrc_outputs,
  340. };
  341. #endif /* CONFIG_HALDCLUTSRC_FILTER */
  342. #if CONFIG_NULLSRC_FILTER
  343. #define nullsrc_options options
  344. AVFILTER_DEFINE_CLASS(nullsrc);
  345. static void nullsrc_fill_picture(AVFilterContext *ctx, AVFrame *picref) { }
  346. static av_cold int nullsrc_init(AVFilterContext *ctx)
  347. {
  348. TestSourceContext *test = ctx->priv;
  349. test->fill_picture_fn = nullsrc_fill_picture;
  350. return init(ctx);
  351. }
  352. static const AVFilterPad nullsrc_outputs[] = {
  353. {
  354. .name = "default",
  355. .type = AVMEDIA_TYPE_VIDEO,
  356. .request_frame = request_frame,
  357. .config_props = config_props,
  358. },
  359. { NULL },
  360. };
  361. AVFilter ff_vsrc_nullsrc = {
  362. .name = "nullsrc",
  363. .description = NULL_IF_CONFIG_SMALL("Null video source, return unprocessed video frames."),
  364. .init = nullsrc_init,
  365. .uninit = uninit,
  366. .priv_size = sizeof(TestSourceContext),
  367. .priv_class = &nullsrc_class,
  368. .inputs = NULL,
  369. .outputs = nullsrc_outputs,
  370. };
  371. #endif /* CONFIG_NULLSRC_FILTER */
  372. #if CONFIG_TESTSRC_FILTER
  373. static const AVOption testsrc_options[] = {
  374. COMMON_OPTIONS
  375. { "decimals", "set number of decimals to show", OFFSET(nb_decimals), AV_OPT_TYPE_INT, {.i64=0}, 0, 17, FLAGS },
  376. { "n", "set number of decimals to show", OFFSET(nb_decimals), AV_OPT_TYPE_INT, {.i64=0}, 0, 17, FLAGS },
  377. { NULL }
  378. };
  379. AVFILTER_DEFINE_CLASS(testsrc);
  380. /**
  381. * Fill a rectangle with value val.
  382. *
  383. * @param val the RGB value to set
  384. * @param dst pointer to the destination buffer to fill
  385. * @param dst_linesize linesize of destination
  386. * @param segment_width width of the segment
  387. * @param x horizontal coordinate where to draw the rectangle in the destination buffer
  388. * @param y horizontal coordinate where to draw the rectangle in the destination buffer
  389. * @param w width of the rectangle to draw, expressed as a number of segment_width units
  390. * @param h height of the rectangle to draw, expressed as a number of segment_width units
  391. */
  392. static void draw_rectangle(unsigned val, uint8_t *dst, int dst_linesize, int segment_width,
  393. int x, int y, int w, int h)
  394. {
  395. int i;
  396. int step = 3;
  397. dst += segment_width * (step * x + y * dst_linesize);
  398. w *= segment_width * step;
  399. h *= segment_width;
  400. for (i = 0; i < h; i++) {
  401. memset(dst, val, w);
  402. dst += dst_linesize;
  403. }
  404. }
  405. static void draw_digit(int digit, uint8_t *dst, int dst_linesize,
  406. int segment_width)
  407. {
  408. #define TOP_HBAR 1
  409. #define MID_HBAR 2
  410. #define BOT_HBAR 4
  411. #define LEFT_TOP_VBAR 8
  412. #define LEFT_BOT_VBAR 16
  413. #define RIGHT_TOP_VBAR 32
  414. #define RIGHT_BOT_VBAR 64
  415. struct segments {
  416. int x, y, w, h;
  417. } segments[] = {
  418. { 1, 0, 5, 1 }, /* TOP_HBAR */
  419. { 1, 6, 5, 1 }, /* MID_HBAR */
  420. { 1, 12, 5, 1 }, /* BOT_HBAR */
  421. { 0, 1, 1, 5 }, /* LEFT_TOP_VBAR */
  422. { 0, 7, 1, 5 }, /* LEFT_BOT_VBAR */
  423. { 6, 1, 1, 5 }, /* RIGHT_TOP_VBAR */
  424. { 6, 7, 1, 5 } /* RIGHT_BOT_VBAR */
  425. };
  426. static const unsigned char masks[10] = {
  427. /* 0 */ TOP_HBAR |BOT_HBAR|LEFT_TOP_VBAR|LEFT_BOT_VBAR|RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  428. /* 1 */ RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  429. /* 2 */ TOP_HBAR|MID_HBAR|BOT_HBAR|LEFT_BOT_VBAR |RIGHT_TOP_VBAR,
  430. /* 3 */ TOP_HBAR|MID_HBAR|BOT_HBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  431. /* 4 */ MID_HBAR |LEFT_TOP_VBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  432. /* 5 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR |RIGHT_BOT_VBAR,
  433. /* 6 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR|LEFT_BOT_VBAR |RIGHT_BOT_VBAR,
  434. /* 7 */ TOP_HBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  435. /* 8 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR|LEFT_BOT_VBAR|RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  436. /* 9 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  437. };
  438. unsigned mask = masks[digit];
  439. int i;
  440. draw_rectangle(0, dst, dst_linesize, segment_width, 0, 0, 8, 13);
  441. for (i = 0; i < FF_ARRAY_ELEMS(segments); i++)
  442. if (mask & (1<<i))
  443. draw_rectangle(255, dst, dst_linesize, segment_width,
  444. segments[i].x, segments[i].y, segments[i].w, segments[i].h);
  445. }
  446. #define GRADIENT_SIZE (6 * 256)
  447. static void test_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  448. {
  449. TestSourceContext *test = ctx->priv;
  450. uint8_t *p, *p0;
  451. int x, y;
  452. int color, color_rest;
  453. int icolor;
  454. int radius;
  455. int quad0, quad;
  456. int dquad_x, dquad_y;
  457. int grad, dgrad, rgrad, drgrad;
  458. int seg_size;
  459. int second;
  460. int i;
  461. uint8_t *data = frame->data[0];
  462. int width = frame->width;
  463. int height = frame->height;
  464. /* draw colored bars and circle */
  465. radius = (width + height) / 4;
  466. quad0 = width * width / 4 + height * height / 4 - radius * radius;
  467. dquad_y = 1 - height;
  468. p0 = data;
  469. for (y = 0; y < height; y++) {
  470. p = p0;
  471. color = 0;
  472. color_rest = 0;
  473. quad = quad0;
  474. dquad_x = 1 - width;
  475. for (x = 0; x < width; x++) {
  476. icolor = color;
  477. if (quad < 0)
  478. icolor ^= 7;
  479. quad += dquad_x;
  480. dquad_x += 2;
  481. *(p++) = icolor & 1 ? 255 : 0;
  482. *(p++) = icolor & 2 ? 255 : 0;
  483. *(p++) = icolor & 4 ? 255 : 0;
  484. color_rest += 8;
  485. if (color_rest >= width) {
  486. color_rest -= width;
  487. color++;
  488. }
  489. }
  490. quad0 += dquad_y;
  491. dquad_y += 2;
  492. p0 += frame->linesize[0];
  493. }
  494. /* draw sliding color line */
  495. p0 = p = data + frame->linesize[0] * (height * 3/4);
  496. grad = (256 * test->nb_frame * test->time_base.num / test->time_base.den) %
  497. GRADIENT_SIZE;
  498. rgrad = 0;
  499. dgrad = GRADIENT_SIZE / width;
  500. drgrad = GRADIENT_SIZE % width;
  501. for (x = 0; x < width; x++) {
  502. *(p++) =
  503. grad < 256 || grad >= 5 * 256 ? 255 :
  504. grad >= 2 * 256 && grad < 4 * 256 ? 0 :
  505. grad < 2 * 256 ? 2 * 256 - 1 - grad : grad - 4 * 256;
  506. *(p++) =
  507. grad >= 4 * 256 ? 0 :
  508. grad >= 1 * 256 && grad < 3 * 256 ? 255 :
  509. grad < 1 * 256 ? grad : 4 * 256 - 1 - grad;
  510. *(p++) =
  511. grad < 2 * 256 ? 0 :
  512. grad >= 3 * 256 && grad < 5 * 256 ? 255 :
  513. grad < 3 * 256 ? grad - 2 * 256 : 6 * 256 - 1 - grad;
  514. grad += dgrad;
  515. rgrad += drgrad;
  516. if (rgrad >= GRADIENT_SIZE) {
  517. grad++;
  518. rgrad -= GRADIENT_SIZE;
  519. }
  520. if (grad >= GRADIENT_SIZE)
  521. grad -= GRADIENT_SIZE;
  522. }
  523. p = p0;
  524. for (y = height / 8; y > 0; y--) {
  525. memcpy(p+frame->linesize[0], p, 3 * width);
  526. p += frame->linesize[0];
  527. }
  528. /* draw digits */
  529. seg_size = width / 80;
  530. if (seg_size >= 1 && height >= 13 * seg_size) {
  531. int64_t p10decimals = 1;
  532. double time = av_q2d(test->time_base) * test->nb_frame *
  533. ff_exp10(test->nb_decimals);
  534. if (time >= INT_MAX)
  535. return;
  536. for (x = 0; x < test->nb_decimals; x++)
  537. p10decimals *= 10;
  538. second = av_rescale_rnd(test->nb_frame * test->time_base.num, p10decimals, test->time_base.den, AV_ROUND_ZERO);
  539. x = width - (width - seg_size * 64) / 2;
  540. y = (height - seg_size * 13) / 2;
  541. p = data + (x*3 + y * frame->linesize[0]);
  542. for (i = 0; i < 8; i++) {
  543. p -= 3 * 8 * seg_size;
  544. draw_digit(second % 10, p, frame->linesize[0], seg_size);
  545. second /= 10;
  546. if (second == 0)
  547. break;
  548. }
  549. }
  550. }
  551. static av_cold int test_init(AVFilterContext *ctx)
  552. {
  553. TestSourceContext *test = ctx->priv;
  554. test->fill_picture_fn = test_fill_picture;
  555. return init(ctx);
  556. }
  557. static int test_query_formats(AVFilterContext *ctx)
  558. {
  559. static const enum AVPixelFormat pix_fmts[] = {
  560. AV_PIX_FMT_RGB24, AV_PIX_FMT_NONE
  561. };
  562. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  563. if (!fmts_list)
  564. return AVERROR(ENOMEM);
  565. return ff_set_common_formats(ctx, fmts_list);
  566. }
  567. static const AVFilterPad avfilter_vsrc_testsrc_outputs[] = {
  568. {
  569. .name = "default",
  570. .type = AVMEDIA_TYPE_VIDEO,
  571. .request_frame = request_frame,
  572. .config_props = config_props,
  573. },
  574. { NULL }
  575. };
  576. AVFilter ff_vsrc_testsrc = {
  577. .name = "testsrc",
  578. .description = NULL_IF_CONFIG_SMALL("Generate test pattern."),
  579. .priv_size = sizeof(TestSourceContext),
  580. .priv_class = &testsrc_class,
  581. .init = test_init,
  582. .uninit = uninit,
  583. .query_formats = test_query_formats,
  584. .inputs = NULL,
  585. .outputs = avfilter_vsrc_testsrc_outputs,
  586. };
  587. #endif /* CONFIG_TESTSRC_FILTER */
  588. #if CONFIG_TESTSRC2_FILTER
  589. static const AVOption testsrc2_options[] = {
  590. COMMON_OPTIONS
  591. { "alpha", "set global alpha (opacity)", OFFSET(alpha), AV_OPT_TYPE_INT, {.i64 = 255}, 0, 255, FLAGS },
  592. { NULL }
  593. };
  594. AVFILTER_DEFINE_CLASS(testsrc2);
  595. static void set_color(TestSourceContext *s, FFDrawColor *color, uint32_t argb)
  596. {
  597. uint8_t rgba[4] = { (argb >> 16) & 0xFF,
  598. (argb >> 8) & 0xFF,
  599. (argb >> 0) & 0xFF,
  600. (argb >> 24) & 0xFF, };
  601. ff_draw_color(&s->draw, color, rgba);
  602. }
  603. static uint32_t color_gradient(unsigned index)
  604. {
  605. unsigned si = index & 0xFF, sd = 0xFF - si;
  606. switch (index >> 8) {
  607. case 0: return 0xFF0000 + (si << 8);
  608. case 1: return 0x00FF00 + (sd << 16);
  609. case 2: return 0x00FF00 + (si << 0);
  610. case 3: return 0x0000FF + (sd << 8);
  611. case 4: return 0x0000FF + (si << 16);
  612. case 5: return 0xFF0000 + (sd << 0);
  613. }
  614. av_assert0(0);
  615. }
  616. static void draw_text(TestSourceContext *s, AVFrame *frame, FFDrawColor *color,
  617. int x0, int y0, const uint8_t *text)
  618. {
  619. int x = x0;
  620. for (; *text; text++) {
  621. if (*text == '\n') {
  622. x = x0;
  623. y0 += 16;
  624. continue;
  625. }
  626. ff_blend_mask(&s->draw, color, frame->data, frame->linesize,
  627. frame->width, frame->height,
  628. avpriv_vga16_font + *text * 16, 1, 8, 16, 0, 0, x, y0);
  629. x += 8;
  630. }
  631. }
  632. static void test2_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  633. {
  634. TestSourceContext *s = ctx->priv;
  635. FFDrawColor color;
  636. unsigned alpha = (uint32_t)s->alpha << 24;
  637. /* colored background */
  638. {
  639. unsigned i, x = 0, x2;
  640. x = 0;
  641. for (i = 1; i < 7; i++) {
  642. x2 = av_rescale(i, s->w, 6);
  643. x2 = ff_draw_round_to_sub(&s->draw, 0, 0, x2);
  644. set_color(s, &color, ((i & 1) ? 0xFF0000 : 0) |
  645. ((i & 2) ? 0x00FF00 : 0) |
  646. ((i & 4) ? 0x0000FF : 0) |
  647. alpha);
  648. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  649. x, 0, x2 - x, frame->height);
  650. x = x2;
  651. }
  652. }
  653. /* oblique gradient */
  654. /* note: too slow if using blending */
  655. if (s->h >= 64) {
  656. unsigned x, dx, y0, y, g0, g;
  657. dx = ff_draw_round_to_sub(&s->draw, 0, +1, 1);
  658. y0 = av_rescale_q(s->pts, s->time_base, av_make_q(2, s->h - 16));
  659. g0 = av_rescale_q(s->pts, s->time_base, av_make_q(1, 128));
  660. for (x = 0; x < s->w; x += dx) {
  661. g = (av_rescale(x, 6 * 256, s->w) + g0) % (6 * 256);
  662. set_color(s, &color, color_gradient(g) | alpha);
  663. y = y0 + av_rescale(x, s->h / 2, s->w);
  664. y %= 2 * (s->h - 16);
  665. if (y > s->h - 16)
  666. y = 2 * (s->h - 16) - y;
  667. y = ff_draw_round_to_sub(&s->draw, 1, 0, y);
  668. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  669. x, y, dx, 16);
  670. }
  671. }
  672. /* top right: draw clock hands */
  673. if (s->w >= 64 && s->h >= 64) {
  674. int l = (FFMIN(s->w, s->h) - 32) >> 1;
  675. int steps = FFMAX(4, l >> 5);
  676. int xc = (s->w >> 2) + (s->w >> 1);
  677. int yc = (s->h >> 2);
  678. int cycle = l << 2;
  679. int pos, xh, yh;
  680. int c, i;
  681. for (c = 0; c < 3; c++) {
  682. set_color(s, &color, (0xBBBBBB ^ (0xFF << (c << 3))) | alpha);
  683. pos = av_rescale_q(s->pts, s->time_base, av_make_q(64 >> (c << 1), cycle)) % cycle;
  684. xh = pos < 1 * l ? pos :
  685. pos < 2 * l ? l :
  686. pos < 3 * l ? 3 * l - pos : 0;
  687. yh = pos < 1 * l ? 0 :
  688. pos < 2 * l ? pos - l :
  689. pos < 3 * l ? l :
  690. cycle - pos;
  691. xh -= l >> 1;
  692. yh -= l >> 1;
  693. for (i = 1; i <= steps; i++) {
  694. int x = av_rescale(xh, i, steps) + xc;
  695. int y = av_rescale(yh, i, steps) + yc;
  696. x = ff_draw_round_to_sub(&s->draw, 0, -1, x);
  697. y = ff_draw_round_to_sub(&s->draw, 1, -1, y);
  698. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  699. x, y, 8, 8);
  700. }
  701. }
  702. }
  703. /* bottom left: beating rectangles */
  704. if (s->w >= 64 && s->h >= 64) {
  705. int l = (FFMIN(s->w, s->h) - 16) >> 2;
  706. int cycle = l << 3;
  707. int xc = (s->w >> 2);
  708. int yc = (s->h >> 2) + (s->h >> 1);
  709. int xm1 = ff_draw_round_to_sub(&s->draw, 0, -1, xc - 8);
  710. int xm2 = ff_draw_round_to_sub(&s->draw, 0, +1, xc + 8);
  711. int ym1 = ff_draw_round_to_sub(&s->draw, 1, -1, yc - 8);
  712. int ym2 = ff_draw_round_to_sub(&s->draw, 1, +1, yc + 8);
  713. int size, step, x1, x2, y1, y2;
  714. size = av_rescale_q(s->pts, s->time_base, av_make_q(4, cycle));
  715. step = size / l;
  716. size %= l;
  717. if (step & 1)
  718. size = l - size;
  719. step = (step >> 1) & 3;
  720. set_color(s, &color, 0xFF808080);
  721. x1 = ff_draw_round_to_sub(&s->draw, 0, -1, xc - 4 - size);
  722. x2 = ff_draw_round_to_sub(&s->draw, 0, +1, xc + 4 + size);
  723. y1 = ff_draw_round_to_sub(&s->draw, 1, -1, yc - 4 - size);
  724. y2 = ff_draw_round_to_sub(&s->draw, 1, +1, yc + 4 + size);
  725. if (step == 0 || step == 2)
  726. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  727. x1, ym1, x2 - x1, ym2 - ym1);
  728. if (step == 1 || step == 2)
  729. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  730. xm1, y1, xm2 - xm1, y2 - y1);
  731. if (step == 3)
  732. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  733. x1, y1, x2 - x1, y2 - y1);
  734. }
  735. /* bottom right: checker with random noise */
  736. {
  737. unsigned xmin = av_rescale(5, s->w, 8);
  738. unsigned xmax = av_rescale(7, s->w, 8);
  739. unsigned ymin = av_rescale(5, s->h, 8);
  740. unsigned ymax = av_rescale(7, s->h, 8);
  741. unsigned x, y, i, r;
  742. uint8_t alpha[256];
  743. r = s->pts;
  744. for (y = ymin; y + 15 < ymax; y += 16) {
  745. for (x = xmin; x + 15 < xmax; x += 16) {
  746. if ((x ^ y) & 16)
  747. continue;
  748. for (i = 0; i < 256; i++) {
  749. r = r * 1664525 + 1013904223;
  750. alpha[i] = r >> 24;
  751. }
  752. set_color(s, &color, 0xFF00FF80);
  753. ff_blend_mask(&s->draw, &color, frame->data, frame->linesize,
  754. frame->width, frame->height,
  755. alpha, 16, 16, 16, 3, 0, x, y);
  756. }
  757. }
  758. }
  759. /* bouncing square */
  760. if (s->w >= 16 && s->h >= 16) {
  761. unsigned w = s->w - 8;
  762. unsigned h = s->h - 8;
  763. unsigned x = av_rescale_q(s->pts, s->time_base, av_make_q(233, 55 * w)) % (w << 1);
  764. unsigned y = av_rescale_q(s->pts, s->time_base, av_make_q(233, 89 * h)) % (h << 1);
  765. if (x > w)
  766. x = (w << 1) - x;
  767. if (y > h)
  768. y = (h << 1) - y;
  769. x = ff_draw_round_to_sub(&s->draw, 0, -1, x);
  770. y = ff_draw_round_to_sub(&s->draw, 1, -1, y);
  771. set_color(s, &color, 0xFF8000FF);
  772. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  773. x, y, 8, 8);
  774. }
  775. /* top right: draw frame time and frame number */
  776. {
  777. char buf[256];
  778. unsigned time;
  779. time = av_rescale_q(s->pts, s->time_base, av_make_q(1, 1000)) % 86400000;
  780. set_color(s, &color, 0xC0000000);
  781. ff_blend_rectangle(&s->draw, &color, frame->data, frame->linesize,
  782. frame->width, frame->height,
  783. 2, 2, 100, 36);
  784. set_color(s, &color, 0xFFFF8000);
  785. snprintf(buf, sizeof(buf), "%02d:%02d:%02d.%03d\n%12"PRIi64,
  786. time / 3600000, (time / 60000) % 60, (time / 1000) % 60,
  787. time % 1000, s->pts);
  788. draw_text(s, frame, &color, 4, 4, buf);
  789. }
  790. }
  791. static av_cold int test2_init(AVFilterContext *ctx)
  792. {
  793. TestSourceContext *s = ctx->priv;
  794. s->fill_picture_fn = test2_fill_picture;
  795. return init(ctx);
  796. }
  797. static int test2_query_formats(AVFilterContext *ctx)
  798. {
  799. return ff_set_common_formats(ctx, ff_draw_supported_pixel_formats(0));
  800. }
  801. static int test2_config_props(AVFilterLink *inlink)
  802. {
  803. AVFilterContext *ctx = inlink->src;
  804. TestSourceContext *s = ctx->priv;
  805. av_assert0(ff_draw_init(&s->draw, inlink->format, 0) >= 0);
  806. s->w = ff_draw_round_to_sub(&s->draw, 0, -1, s->w);
  807. s->h = ff_draw_round_to_sub(&s->draw, 1, -1, s->h);
  808. if (av_image_check_size(s->w, s->h, 0, ctx) < 0)
  809. return AVERROR(EINVAL);
  810. return config_props(inlink);
  811. }
  812. static const AVFilterPad avfilter_vsrc_testsrc2_outputs[] = {
  813. {
  814. .name = "default",
  815. .type = AVMEDIA_TYPE_VIDEO,
  816. .request_frame = request_frame,
  817. .config_props = test2_config_props,
  818. },
  819. { NULL }
  820. };
  821. AVFilter ff_vsrc_testsrc2 = {
  822. .name = "testsrc2",
  823. .description = NULL_IF_CONFIG_SMALL("Generate another test pattern."),
  824. .priv_size = sizeof(TestSourceContext),
  825. .priv_class = &testsrc2_class,
  826. .init = test2_init,
  827. .uninit = uninit,
  828. .query_formats = test2_query_formats,
  829. .inputs = NULL,
  830. .outputs = avfilter_vsrc_testsrc2_outputs,
  831. };
  832. #endif /* CONFIG_TESTSRC2_FILTER */
  833. #if CONFIG_RGBTESTSRC_FILTER
  834. #define rgbtestsrc_options options
  835. AVFILTER_DEFINE_CLASS(rgbtestsrc);
  836. #define R 0
  837. #define G 1
  838. #define B 2
  839. #define A 3
  840. static void rgbtest_put_pixel(uint8_t *dst, int dst_linesize,
  841. int x, int y, unsigned r, unsigned g, unsigned b, enum AVPixelFormat fmt,
  842. uint8_t rgba_map[4])
  843. {
  844. uint32_t v;
  845. uint8_t *p;
  846. switch (fmt) {
  847. case AV_PIX_FMT_BGR444: ((uint16_t*)(dst + y*dst_linesize))[x] = ((r >> 4) << 8) | ((g >> 4) << 4) | (b >> 4); break;
  848. case AV_PIX_FMT_RGB444: ((uint16_t*)(dst + y*dst_linesize))[x] = ((b >> 4) << 8) | ((g >> 4) << 4) | (r >> 4); break;
  849. case AV_PIX_FMT_BGR555: ((uint16_t*)(dst + y*dst_linesize))[x] = ((r>>3)<<10) | ((g>>3)<<5) | (b>>3); break;
  850. case AV_PIX_FMT_RGB555: ((uint16_t*)(dst + y*dst_linesize))[x] = ((b>>3)<<10) | ((g>>3)<<5) | (r>>3); break;
  851. case AV_PIX_FMT_BGR565: ((uint16_t*)(dst + y*dst_linesize))[x] = ((r>>3)<<11) | ((g>>2)<<5) | (b>>3); break;
  852. case AV_PIX_FMT_RGB565: ((uint16_t*)(dst + y*dst_linesize))[x] = ((b>>3)<<11) | ((g>>2)<<5) | (r>>3); break;
  853. case AV_PIX_FMT_RGB24:
  854. case AV_PIX_FMT_BGR24:
  855. v = (r << (rgba_map[R]*8)) + (g << (rgba_map[G]*8)) + (b << (rgba_map[B]*8));
  856. p = dst + 3*x + y*dst_linesize;
  857. AV_WL24(p, v);
  858. break;
  859. case AV_PIX_FMT_RGBA:
  860. case AV_PIX_FMT_BGRA:
  861. case AV_PIX_FMT_ARGB:
  862. case AV_PIX_FMT_ABGR:
  863. v = (r << (rgba_map[R]*8)) + (g << (rgba_map[G]*8)) + (b << (rgba_map[B]*8)) + (255U << (rgba_map[A]*8));
  864. p = dst + 4*x + y*dst_linesize;
  865. AV_WL32(p, v);
  866. break;
  867. }
  868. }
  869. static void rgbtest_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  870. {
  871. TestSourceContext *test = ctx->priv;
  872. int x, y, w = frame->width, h = frame->height;
  873. for (y = 0; y < h; y++) {
  874. for (x = 0; x < w; x++) {
  875. int c = 256*x/w;
  876. int r = 0, g = 0, b = 0;
  877. if (3*y < h ) r = c;
  878. else if (3*y < 2*h) g = c;
  879. else b = c;
  880. rgbtest_put_pixel(frame->data[0], frame->linesize[0], x, y, r, g, b,
  881. ctx->outputs[0]->format, test->rgba_map);
  882. }
  883. }
  884. }
  885. static av_cold int rgbtest_init(AVFilterContext *ctx)
  886. {
  887. TestSourceContext *test = ctx->priv;
  888. test->draw_once = 1;
  889. test->fill_picture_fn = rgbtest_fill_picture;
  890. return init(ctx);
  891. }
  892. static int rgbtest_query_formats(AVFilterContext *ctx)
  893. {
  894. static const enum AVPixelFormat pix_fmts[] = {
  895. AV_PIX_FMT_RGBA, AV_PIX_FMT_ARGB, AV_PIX_FMT_BGRA, AV_PIX_FMT_ABGR,
  896. AV_PIX_FMT_BGR24, AV_PIX_FMT_RGB24,
  897. AV_PIX_FMT_RGB444, AV_PIX_FMT_BGR444,
  898. AV_PIX_FMT_RGB565, AV_PIX_FMT_BGR565,
  899. AV_PIX_FMT_RGB555, AV_PIX_FMT_BGR555,
  900. AV_PIX_FMT_NONE
  901. };
  902. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  903. if (!fmts_list)
  904. return AVERROR(ENOMEM);
  905. return ff_set_common_formats(ctx, fmts_list);
  906. }
  907. static int rgbtest_config_props(AVFilterLink *outlink)
  908. {
  909. TestSourceContext *test = outlink->src->priv;
  910. ff_fill_rgba_map(test->rgba_map, outlink->format);
  911. return config_props(outlink);
  912. }
  913. static const AVFilterPad avfilter_vsrc_rgbtestsrc_outputs[] = {
  914. {
  915. .name = "default",
  916. .type = AVMEDIA_TYPE_VIDEO,
  917. .request_frame = request_frame,
  918. .config_props = rgbtest_config_props,
  919. },
  920. { NULL }
  921. };
  922. AVFilter ff_vsrc_rgbtestsrc = {
  923. .name = "rgbtestsrc",
  924. .description = NULL_IF_CONFIG_SMALL("Generate RGB test pattern."),
  925. .priv_size = sizeof(TestSourceContext),
  926. .priv_class = &rgbtestsrc_class,
  927. .init = rgbtest_init,
  928. .uninit = uninit,
  929. .query_formats = rgbtest_query_formats,
  930. .inputs = NULL,
  931. .outputs = avfilter_vsrc_rgbtestsrc_outputs,
  932. };
  933. #endif /* CONFIG_RGBTESTSRC_FILTER */
  934. #if CONFIG_YUVTESTSRC_FILTER
  935. #define yuvtestsrc_options options
  936. AVFILTER_DEFINE_CLASS(yuvtestsrc);
  937. static void yuvtest_fill_picture8(AVFilterContext *ctx, AVFrame *frame)
  938. {
  939. int x, y, w = frame->width, h = frame->height / 3;
  940. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
  941. const int factor = 1 << desc->comp[0].depth;
  942. const int mid = 1 << (desc->comp[0].depth - 1);
  943. uint8_t *ydst = frame->data[0];
  944. uint8_t *udst = frame->data[1];
  945. uint8_t *vdst = frame->data[2];
  946. int ylinesize = frame->linesize[0];
  947. int ulinesize = frame->linesize[1];
  948. int vlinesize = frame->linesize[2];
  949. for (y = 0; y < h; y++) {
  950. for (x = 0; x < w; x++) {
  951. int c = factor * x / w;
  952. ydst[x] = c;
  953. udst[x] = mid;
  954. vdst[x] = mid;
  955. }
  956. ydst += ylinesize;
  957. udst += ulinesize;
  958. vdst += vlinesize;
  959. }
  960. h += h;
  961. for (; y < h; y++) {
  962. for (x = 0; x < w; x++) {
  963. int c = factor * x / w;
  964. ydst[x] = mid;
  965. udst[x] = c;
  966. vdst[x] = mid;
  967. }
  968. ydst += ylinesize;
  969. udst += ulinesize;
  970. vdst += vlinesize;
  971. }
  972. for (; y < frame->height; y++) {
  973. for (x = 0; x < w; x++) {
  974. int c = factor * x / w;
  975. ydst[x] = mid;
  976. udst[x] = mid;
  977. vdst[x] = c;
  978. }
  979. ydst += ylinesize;
  980. udst += ulinesize;
  981. vdst += vlinesize;
  982. }
  983. }
  984. static void yuvtest_fill_picture16(AVFilterContext *ctx, AVFrame *frame)
  985. {
  986. int x, y, w = frame->width, h = frame->height / 3;
  987. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
  988. const int factor = 1 << desc->comp[0].depth;
  989. const int mid = 1 << (desc->comp[0].depth - 1);
  990. uint16_t *ydst = (uint16_t *)frame->data[0];
  991. uint16_t *udst = (uint16_t *)frame->data[1];
  992. uint16_t *vdst = (uint16_t *)frame->data[2];
  993. int ylinesize = frame->linesize[0] / 2;
  994. int ulinesize = frame->linesize[1] / 2;
  995. int vlinesize = frame->linesize[2] / 2;
  996. for (y = 0; y < h; y++) {
  997. for (x = 0; x < w; x++) {
  998. int c = factor * x / w;
  999. ydst[x] = c;
  1000. udst[x] = mid;
  1001. vdst[x] = mid;
  1002. }
  1003. ydst += ylinesize;
  1004. udst += ulinesize;
  1005. vdst += vlinesize;
  1006. }
  1007. h += h;
  1008. for (; y < h; y++) {
  1009. for (x = 0; x < w; x++) {
  1010. int c = factor * x / w;
  1011. ydst[x] = mid;
  1012. udst[x] = c;
  1013. vdst[x] = mid;
  1014. }
  1015. ydst += ylinesize;
  1016. udst += ulinesize;
  1017. vdst += vlinesize;
  1018. }
  1019. for (; y < frame->height; y++) {
  1020. for (x = 0; x < w; x++) {
  1021. int c = factor * x / w;
  1022. ydst[x] = mid;
  1023. udst[x] = mid;
  1024. vdst[x] = c;
  1025. }
  1026. ydst += ylinesize;
  1027. udst += ulinesize;
  1028. vdst += vlinesize;
  1029. }
  1030. }
  1031. static av_cold int yuvtest_init(AVFilterContext *ctx)
  1032. {
  1033. TestSourceContext *test = ctx->priv;
  1034. test->draw_once = 1;
  1035. return init(ctx);
  1036. }
  1037. static int yuvtest_query_formats(AVFilterContext *ctx)
  1038. {
  1039. static const enum AVPixelFormat pix_fmts[] = {
  1040. AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUVJ444P,
  1041. AV_PIX_FMT_YUV444P9, AV_PIX_FMT_YUV444P10,
  1042. AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV444P14,
  1043. AV_PIX_FMT_YUV444P16,
  1044. AV_PIX_FMT_NONE
  1045. };
  1046. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  1047. if (!fmts_list)
  1048. return AVERROR(ENOMEM);
  1049. return ff_set_common_formats(ctx, fmts_list);
  1050. }
  1051. static int yuvtest_config_props(AVFilterLink *outlink)
  1052. {
  1053. TestSourceContext *test = outlink->src->priv;
  1054. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(outlink->format);
  1055. test->fill_picture_fn = desc->comp[0].depth > 8 ? yuvtest_fill_picture16 : yuvtest_fill_picture8;
  1056. return config_props(outlink);
  1057. }
  1058. static const AVFilterPad avfilter_vsrc_yuvtestsrc_outputs[] = {
  1059. {
  1060. .name = "default",
  1061. .type = AVMEDIA_TYPE_VIDEO,
  1062. .request_frame = request_frame,
  1063. .config_props = yuvtest_config_props,
  1064. },
  1065. { NULL }
  1066. };
  1067. AVFilter ff_vsrc_yuvtestsrc = {
  1068. .name = "yuvtestsrc",
  1069. .description = NULL_IF_CONFIG_SMALL("Generate YUV test pattern."),
  1070. .priv_size = sizeof(TestSourceContext),
  1071. .priv_class = &yuvtestsrc_class,
  1072. .init = yuvtest_init,
  1073. .uninit = uninit,
  1074. .query_formats = yuvtest_query_formats,
  1075. .inputs = NULL,
  1076. .outputs = avfilter_vsrc_yuvtestsrc_outputs,
  1077. };
  1078. #endif /* CONFIG_YUVTESTSRC_FILTER */
  1079. #if CONFIG_PAL75BARS_FILTER || CONFIG_PAL100BARS_FILTER || CONFIG_SMPTEBARS_FILTER || CONFIG_SMPTEHDBARS_FILTER
  1080. static const uint8_t rainbow[7][4] = {
  1081. { 180, 128, 128, 255 }, /* 75% white */
  1082. { 162, 44, 142, 255 }, /* 75% yellow */
  1083. { 131, 156, 44, 255 }, /* 75% cyan */
  1084. { 112, 72, 58, 255 }, /* 75% green */
  1085. { 84, 184, 198, 255 }, /* 75% magenta */
  1086. { 65, 100, 212, 255 }, /* 75% red */
  1087. { 35, 212, 114, 255 }, /* 75% blue */
  1088. };
  1089. static const uint8_t rainbow100[7][4] = {
  1090. { 235, 128, 128, 255 }, /* 100% white */
  1091. { 210, 16, 146, 255 }, /* 100% yellow */
  1092. { 170, 166, 16, 255 }, /* 100% cyan */
  1093. { 145, 54, 34, 255 }, /* 100% green */
  1094. { 106, 202, 222, 255 }, /* 100% magenta */
  1095. { 81, 90, 240, 255 }, /* 100% red */
  1096. { 41, 240, 110, 255 }, /* 100% blue */
  1097. };
  1098. static const uint8_t rainbowhd[7][4] = {
  1099. { 180, 128, 128, 255 }, /* 75% white */
  1100. { 168, 44, 136, 255 }, /* 75% yellow */
  1101. { 145, 147, 44, 255 }, /* 75% cyan */
  1102. { 133, 63, 52, 255 }, /* 75% green */
  1103. { 63, 193, 204, 255 }, /* 75% magenta */
  1104. { 51, 109, 212, 255 }, /* 75% red */
  1105. { 28, 212, 120, 255 }, /* 75% blue */
  1106. };
  1107. static const uint8_t wobnair[7][4] = {
  1108. { 35, 212, 114, 255 }, /* 75% blue */
  1109. { 19, 128, 128, 255 }, /* 7.5% intensity black */
  1110. { 84, 184, 198, 255 }, /* 75% magenta */
  1111. { 19, 128, 128, 255 }, /* 7.5% intensity black */
  1112. { 131, 156, 44, 255 }, /* 75% cyan */
  1113. { 19, 128, 128, 255 }, /* 7.5% intensity black */
  1114. { 180, 128, 128, 255 }, /* 75% white */
  1115. };
  1116. static const uint8_t white[4] = { 235, 128, 128, 255 };
  1117. /* pluge pulses */
  1118. static const uint8_t neg4ire[4] = { 7, 128, 128, 255 };
  1119. static const uint8_t pos4ire[4] = { 24, 128, 128, 255 };
  1120. /* fudged Q/-I */
  1121. static const uint8_t i_pixel[4] = { 57, 156, 97, 255 };
  1122. static const uint8_t q_pixel[4] = { 44, 171, 147, 255 };
  1123. static const uint8_t gray40[4] = { 104, 128, 128, 255 };
  1124. static const uint8_t gray15[4] = { 49, 128, 128, 255 };
  1125. static const uint8_t cyan[4] = { 188, 154, 16, 255 };
  1126. static const uint8_t yellow[4] = { 219, 16, 138, 255 };
  1127. static const uint8_t blue[4] = { 32, 240, 118, 255 };
  1128. static const uint8_t red[4] = { 63, 102, 240, 255 };
  1129. static const uint8_t black0[4] = { 16, 128, 128, 255 };
  1130. static const uint8_t black2[4] = { 20, 128, 128, 255 };
  1131. static const uint8_t black4[4] = { 25, 128, 128, 255 };
  1132. static const uint8_t neg2[4] = { 12, 128, 128, 255 };
  1133. static void draw_bar(TestSourceContext *test, const uint8_t color[4],
  1134. int x, int y, int w, int h,
  1135. AVFrame *frame)
  1136. {
  1137. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
  1138. uint8_t *p, *p0;
  1139. int plane;
  1140. x = FFMIN(x, test->w - 1);
  1141. y = FFMIN(y, test->h - 1);
  1142. w = FFMAX(FFMIN(w, test->w - x), 0);
  1143. h = FFMAX(FFMIN(h, test->h - y), 0);
  1144. av_assert0(x + w <= test->w);
  1145. av_assert0(y + h <= test->h);
  1146. for (plane = 0; frame->data[plane]; plane++) {
  1147. const int c = color[plane];
  1148. const int linesize = frame->linesize[plane];
  1149. int i, px, py, pw, ph;
  1150. if (plane == 1 || plane == 2) {
  1151. px = x >> desc->log2_chroma_w;
  1152. pw = AV_CEIL_RSHIFT(w, desc->log2_chroma_w);
  1153. py = y >> desc->log2_chroma_h;
  1154. ph = AV_CEIL_RSHIFT(h, desc->log2_chroma_h);
  1155. } else {
  1156. px = x;
  1157. pw = w;
  1158. py = y;
  1159. ph = h;
  1160. }
  1161. p0 = p = frame->data[plane] + py * linesize + px;
  1162. memset(p, c, pw);
  1163. p += linesize;
  1164. for (i = 1; i < ph; i++, p += linesize)
  1165. memcpy(p, p0, pw);
  1166. }
  1167. }
  1168. static int smptebars_query_formats(AVFilterContext *ctx)
  1169. {
  1170. static const enum AVPixelFormat pix_fmts[] = {
  1171. AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P,
  1172. AV_PIX_FMT_YUV440P, AV_PIX_FMT_YUV444P,
  1173. AV_PIX_FMT_YUV410P, AV_PIX_FMT_YUV411P,
  1174. AV_PIX_FMT_NONE,
  1175. };
  1176. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  1177. if (!fmts_list)
  1178. return AVERROR(ENOMEM);
  1179. return ff_set_common_formats(ctx, fmts_list);
  1180. }
  1181. static const AVFilterPad smptebars_outputs[] = {
  1182. {
  1183. .name = "default",
  1184. .type = AVMEDIA_TYPE_VIDEO,
  1185. .request_frame = request_frame,
  1186. .config_props = config_props,
  1187. },
  1188. { NULL }
  1189. };
  1190. #if CONFIG_PAL75BARS_FILTER
  1191. #define pal75bars_options options
  1192. AVFILTER_DEFINE_CLASS(pal75bars);
  1193. static void pal75bars_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  1194. {
  1195. TestSourceContext *test = ctx->priv;
  1196. int r_w, i, x = 0;
  1197. const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(picref->format);
  1198. picref->color_range = AVCOL_RANGE_MPEG;
  1199. picref->colorspace = AVCOL_SPC_BT470BG;
  1200. r_w = FFALIGN((test->w + 7) / 8, 1 << pixdesc->log2_chroma_w);
  1201. draw_bar(test, white, x, 0, r_w, test->h, picref);
  1202. x += r_w;
  1203. for (i = 1; i < 7; i++) {
  1204. draw_bar(test, rainbow[i], x, 0, r_w, test->h, picref);
  1205. x += r_w;
  1206. }
  1207. draw_bar(test, black0, x, 0, r_w, test->h, picref);
  1208. }
  1209. static av_cold int pal75bars_init(AVFilterContext *ctx)
  1210. {
  1211. TestSourceContext *test = ctx->priv;
  1212. test->fill_picture_fn = pal75bars_fill_picture;
  1213. test->draw_once = 1;
  1214. return init(ctx);
  1215. }
  1216. AVFilter ff_vsrc_pal75bars = {
  1217. .name = "pal75bars",
  1218. .description = NULL_IF_CONFIG_SMALL("Generate PAL 75% color bars."),
  1219. .priv_size = sizeof(TestSourceContext),
  1220. .priv_class = &pal75bars_class,
  1221. .init = pal75bars_init,
  1222. .uninit = uninit,
  1223. .query_formats = smptebars_query_formats,
  1224. .inputs = NULL,
  1225. .outputs = smptebars_outputs,
  1226. };
  1227. #endif /* CONFIG_PAL75BARS_FILTER */
  1228. #if CONFIG_PAL100BARS_FILTER
  1229. #define pal100bars_options options
  1230. AVFILTER_DEFINE_CLASS(pal100bars);
  1231. static void pal100bars_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  1232. {
  1233. TestSourceContext *test = ctx->priv;
  1234. int r_w, i, x = 0;
  1235. const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(picref->format);
  1236. picref->color_range = AVCOL_RANGE_MPEG;
  1237. picref->colorspace = AVCOL_SPC_BT470BG;
  1238. r_w = FFALIGN((test->w + 7) / 8, 1 << pixdesc->log2_chroma_w);
  1239. for (i = 0; i < 7; i++) {
  1240. draw_bar(test, rainbow100[i], x, 0, r_w, test->h, picref);
  1241. x += r_w;
  1242. }
  1243. draw_bar(test, black0, x, 0, r_w, test->h, picref);
  1244. }
  1245. static av_cold int pal100bars_init(AVFilterContext *ctx)
  1246. {
  1247. TestSourceContext *test = ctx->priv;
  1248. test->fill_picture_fn = pal100bars_fill_picture;
  1249. test->draw_once = 1;
  1250. return init(ctx);
  1251. }
  1252. AVFilter ff_vsrc_pal100bars = {
  1253. .name = "pal100bars",
  1254. .description = NULL_IF_CONFIG_SMALL("Generate PAL 100% color bars."),
  1255. .priv_size = sizeof(TestSourceContext),
  1256. .priv_class = &pal100bars_class,
  1257. .init = pal100bars_init,
  1258. .uninit = uninit,
  1259. .query_formats = smptebars_query_formats,
  1260. .inputs = NULL,
  1261. .outputs = smptebars_outputs,
  1262. };
  1263. #endif /* CONFIG_PAL100BARS_FILTER */
  1264. #if CONFIG_SMPTEBARS_FILTER
  1265. #define smptebars_options options
  1266. AVFILTER_DEFINE_CLASS(smptebars);
  1267. static void smptebars_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  1268. {
  1269. TestSourceContext *test = ctx->priv;
  1270. int r_w, r_h, w_h, p_w, p_h, i, tmp, x = 0;
  1271. const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(picref->format);
  1272. picref->colorspace = AVCOL_SPC_BT470BG;
  1273. r_w = FFALIGN((test->w + 6) / 7, 1 << pixdesc->log2_chroma_w);
  1274. r_h = FFALIGN(test->h * 2 / 3, 1 << pixdesc->log2_chroma_h);
  1275. w_h = FFALIGN(test->h * 3 / 4 - r_h, 1 << pixdesc->log2_chroma_h);
  1276. p_w = FFALIGN(r_w * 5 / 4, 1 << pixdesc->log2_chroma_w);
  1277. p_h = test->h - w_h - r_h;
  1278. for (i = 0; i < 7; i++) {
  1279. draw_bar(test, rainbow[i], x, 0, r_w, r_h, picref);
  1280. draw_bar(test, wobnair[i], x, r_h, r_w, w_h, picref);
  1281. x += r_w;
  1282. }
  1283. x = 0;
  1284. draw_bar(test, i_pixel, x, r_h + w_h, p_w, p_h, picref);
  1285. x += p_w;
  1286. draw_bar(test, white, x, r_h + w_h, p_w, p_h, picref);
  1287. x += p_w;
  1288. draw_bar(test, q_pixel, x, r_h + w_h, p_w, p_h, picref);
  1289. x += p_w;
  1290. tmp = FFALIGN(5 * r_w - x, 1 << pixdesc->log2_chroma_w);
  1291. draw_bar(test, black0, x, r_h + w_h, tmp, p_h, picref);
  1292. x += tmp;
  1293. tmp = FFALIGN(r_w / 3, 1 << pixdesc->log2_chroma_w);
  1294. draw_bar(test, neg4ire, x, r_h + w_h, tmp, p_h, picref);
  1295. x += tmp;
  1296. draw_bar(test, black0, x, r_h + w_h, tmp, p_h, picref);
  1297. x += tmp;
  1298. draw_bar(test, pos4ire, x, r_h + w_h, tmp, p_h, picref);
  1299. x += tmp;
  1300. draw_bar(test, black0, x, r_h + w_h, test->w - x, p_h, picref);
  1301. }
  1302. static av_cold int smptebars_init(AVFilterContext *ctx)
  1303. {
  1304. TestSourceContext *test = ctx->priv;
  1305. test->fill_picture_fn = smptebars_fill_picture;
  1306. test->draw_once = 1;
  1307. return init(ctx);
  1308. }
  1309. AVFilter ff_vsrc_smptebars = {
  1310. .name = "smptebars",
  1311. .description = NULL_IF_CONFIG_SMALL("Generate SMPTE color bars."),
  1312. .priv_size = sizeof(TestSourceContext),
  1313. .priv_class = &smptebars_class,
  1314. .init = smptebars_init,
  1315. .uninit = uninit,
  1316. .query_formats = smptebars_query_formats,
  1317. .inputs = NULL,
  1318. .outputs = smptebars_outputs,
  1319. };
  1320. #endif /* CONFIG_SMPTEBARS_FILTER */
  1321. #if CONFIG_SMPTEHDBARS_FILTER
  1322. #define smptehdbars_options options
  1323. AVFILTER_DEFINE_CLASS(smptehdbars);
  1324. static void smptehdbars_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  1325. {
  1326. TestSourceContext *test = ctx->priv;
  1327. int d_w, r_w, r_h, l_w, i, tmp, x = 0, y = 0;
  1328. const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(picref->format);
  1329. picref->colorspace = AVCOL_SPC_BT709;
  1330. d_w = FFALIGN(test->w / 8, 1 << pixdesc->log2_chroma_w);
  1331. r_h = FFALIGN(test->h * 7 / 12, 1 << pixdesc->log2_chroma_h);
  1332. draw_bar(test, gray40, x, 0, d_w, r_h, picref);
  1333. x += d_w;
  1334. r_w = FFALIGN((((test->w + 3) / 4) * 3) / 7, 1 << pixdesc->log2_chroma_w);
  1335. for (i = 0; i < 7; i++) {
  1336. draw_bar(test, rainbowhd[i], x, 0, r_w, r_h, picref);
  1337. x += r_w;
  1338. }
  1339. draw_bar(test, gray40, x, 0, test->w - x, r_h, picref);
  1340. y = r_h;
  1341. r_h = FFALIGN(test->h / 12, 1 << pixdesc->log2_chroma_h);
  1342. draw_bar(test, cyan, 0, y, d_w, r_h, picref);
  1343. x = d_w;
  1344. draw_bar(test, i_pixel, x, y, r_w, r_h, picref);
  1345. x += r_w;
  1346. tmp = r_w * 6;
  1347. draw_bar(test, rainbowhd[0], x, y, tmp, r_h, picref);
  1348. x += tmp;
  1349. l_w = x;
  1350. draw_bar(test, blue, x, y, test->w - x, r_h, picref);
  1351. y += r_h;
  1352. draw_bar(test, yellow, 0, y, d_w, r_h, picref);
  1353. x = d_w;
  1354. draw_bar(test, q_pixel, x, y, r_w, r_h, picref);
  1355. x += r_w;
  1356. for (i = 0; i < tmp; i += 1 << pixdesc->log2_chroma_w) {
  1357. uint8_t yramp[4] = {0};
  1358. yramp[0] = i * 255 / tmp;
  1359. yramp[1] = 128;
  1360. yramp[2] = 128;
  1361. yramp[3] = 255;
  1362. draw_bar(test, yramp, x, y, 1 << pixdesc->log2_chroma_w, r_h, picref);
  1363. x += 1 << pixdesc->log2_chroma_w;
  1364. }
  1365. draw_bar(test, red, x, y, test->w - x, r_h, picref);
  1366. y += r_h;
  1367. draw_bar(test, gray15, 0, y, d_w, test->h - y, picref);
  1368. x = d_w;
  1369. tmp = FFALIGN(r_w * 3 / 2, 1 << pixdesc->log2_chroma_w);
  1370. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  1371. x += tmp;
  1372. tmp = FFALIGN(r_w * 2, 1 << pixdesc->log2_chroma_w);
  1373. draw_bar(test, white, x, y, tmp, test->h - y, picref);
  1374. x += tmp;
  1375. tmp = FFALIGN(r_w * 5 / 6, 1 << pixdesc->log2_chroma_w);
  1376. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  1377. x += tmp;
  1378. tmp = FFALIGN(r_w / 3, 1 << pixdesc->log2_chroma_w);
  1379. draw_bar(test, neg2, x, y, tmp, test->h - y, picref);
  1380. x += tmp;
  1381. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  1382. x += tmp;
  1383. draw_bar(test, black2, x, y, tmp, test->h - y, picref);
  1384. x += tmp;
  1385. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  1386. x += tmp;
  1387. draw_bar(test, black4, x, y, tmp, test->h - y, picref);
  1388. x += tmp;
  1389. r_w = l_w - x;
  1390. draw_bar(test, black0, x, y, r_w, test->h - y, picref);
  1391. x += r_w;
  1392. draw_bar(test, gray15, x, y, test->w - x, test->h - y, picref);
  1393. }
  1394. static av_cold int smptehdbars_init(AVFilterContext *ctx)
  1395. {
  1396. TestSourceContext *test = ctx->priv;
  1397. test->fill_picture_fn = smptehdbars_fill_picture;
  1398. test->draw_once = 1;
  1399. return init(ctx);
  1400. }
  1401. AVFilter ff_vsrc_smptehdbars = {
  1402. .name = "smptehdbars",
  1403. .description = NULL_IF_CONFIG_SMALL("Generate SMPTE HD color bars."),
  1404. .priv_size = sizeof(TestSourceContext),
  1405. .priv_class = &smptehdbars_class,
  1406. .init = smptehdbars_init,
  1407. .uninit = uninit,
  1408. .query_formats = smptebars_query_formats,
  1409. .inputs = NULL,
  1410. .outputs = smptebars_outputs,
  1411. };
  1412. #endif /* CONFIG_SMPTEHDBARS_FILTER */
  1413. #endif /* CONFIG_SMPTEBARS_FILTER || CONFIG_SMPTEHDBARS_FILTER */
  1414. #if CONFIG_ALLYUV_FILTER
  1415. static const AVOption allyuv_options[] = {
  1416. COMMON_OPTIONS_NOSIZE
  1417. { NULL }
  1418. };
  1419. AVFILTER_DEFINE_CLASS(allyuv);
  1420. static void allyuv_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  1421. {
  1422. const int ys = frame->linesize[0];
  1423. const int us = frame->linesize[1];
  1424. const int vs = frame->linesize[2];
  1425. int x, y, j;
  1426. for (y = 0; y < 4096; y++) {
  1427. for (x = 0; x < 2048; x++) {
  1428. frame->data[0][y * ys + x] = ((x / 8) % 256);
  1429. frame->data[0][y * ys + 4095 - x] = ((x / 8) % 256);
  1430. }
  1431. for (x = 0; x < 2048; x+=8) {
  1432. for (j = 0; j < 8; j++) {
  1433. frame->data[1][vs * y + x + j] = (y%16 + (j % 8) * 16);
  1434. frame->data[1][vs * y + 4095 - x - j] = (128 + y%16 + (j % 8) * 16);
  1435. }
  1436. }
  1437. for (x = 0; x < 4096; x++)
  1438. frame->data[2][y * us + x] = 256 * y / 4096;
  1439. }
  1440. }
  1441. static av_cold int allyuv_init(AVFilterContext *ctx)
  1442. {
  1443. TestSourceContext *test = ctx->priv;
  1444. test->w = test->h = 4096;
  1445. test->draw_once = 1;
  1446. test->fill_picture_fn = allyuv_fill_picture;
  1447. return init(ctx);
  1448. }
  1449. static int allyuv_query_formats(AVFilterContext *ctx)
  1450. {
  1451. static const enum AVPixelFormat pix_fmts[] = {
  1452. AV_PIX_FMT_YUV444P, AV_PIX_FMT_GBRP,
  1453. AV_PIX_FMT_NONE
  1454. };
  1455. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  1456. if (!fmts_list)
  1457. return AVERROR(ENOMEM);
  1458. return ff_set_common_formats(ctx, fmts_list);
  1459. }
  1460. static const AVFilterPad avfilter_vsrc_allyuv_outputs[] = {
  1461. {
  1462. .name = "default",
  1463. .type = AVMEDIA_TYPE_VIDEO,
  1464. .request_frame = request_frame,
  1465. .config_props = config_props,
  1466. },
  1467. { NULL }
  1468. };
  1469. AVFilter ff_vsrc_allyuv = {
  1470. .name = "allyuv",
  1471. .description = NULL_IF_CONFIG_SMALL("Generate all yuv colors."),
  1472. .priv_size = sizeof(TestSourceContext),
  1473. .priv_class = &allyuv_class,
  1474. .init = allyuv_init,
  1475. .uninit = uninit,
  1476. .query_formats = allyuv_query_formats,
  1477. .inputs = NULL,
  1478. .outputs = avfilter_vsrc_allyuv_outputs,
  1479. };
  1480. #endif /* CONFIG_ALLYUV_FILTER */
  1481. #if CONFIG_ALLRGB_FILTER
  1482. static const AVOption allrgb_options[] = {
  1483. COMMON_OPTIONS_NOSIZE
  1484. { NULL }
  1485. };
  1486. AVFILTER_DEFINE_CLASS(allrgb);
  1487. static void allrgb_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  1488. {
  1489. unsigned x, y;
  1490. const int linesize = frame->linesize[0];
  1491. uint8_t *line = frame->data[0];
  1492. for (y = 0; y < 4096; y++) {
  1493. uint8_t *dst = line;
  1494. for (x = 0; x < 4096; x++) {
  1495. *dst++ = x;
  1496. *dst++ = y;
  1497. *dst++ = (x >> 8) | ((y >> 8) << 4);
  1498. }
  1499. line += linesize;
  1500. }
  1501. }
  1502. static av_cold int allrgb_init(AVFilterContext *ctx)
  1503. {
  1504. TestSourceContext *test = ctx->priv;
  1505. test->w = test->h = 4096;
  1506. test->draw_once = 1;
  1507. test->fill_picture_fn = allrgb_fill_picture;
  1508. return init(ctx);
  1509. }
  1510. static int allrgb_config_props(AVFilterLink *outlink)
  1511. {
  1512. TestSourceContext *test = outlink->src->priv;
  1513. ff_fill_rgba_map(test->rgba_map, outlink->format);
  1514. return config_props(outlink);
  1515. }
  1516. static int allrgb_query_formats(AVFilterContext *ctx)
  1517. {
  1518. static const enum AVPixelFormat pix_fmts[] = {
  1519. AV_PIX_FMT_RGB24, AV_PIX_FMT_NONE
  1520. };
  1521. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  1522. if (!fmts_list)
  1523. return AVERROR(ENOMEM);
  1524. return ff_set_common_formats(ctx, fmts_list);
  1525. }
  1526. static const AVFilterPad avfilter_vsrc_allrgb_outputs[] = {
  1527. {
  1528. .name = "default",
  1529. .type = AVMEDIA_TYPE_VIDEO,
  1530. .request_frame = request_frame,
  1531. .config_props = allrgb_config_props,
  1532. },
  1533. { NULL }
  1534. };
  1535. AVFilter ff_vsrc_allrgb = {
  1536. .name = "allrgb",
  1537. .description = NULL_IF_CONFIG_SMALL("Generate all RGB colors."),
  1538. .priv_size = sizeof(TestSourceContext),
  1539. .priv_class = &allrgb_class,
  1540. .init = allrgb_init,
  1541. .uninit = uninit,
  1542. .query_formats = allrgb_query_formats,
  1543. .inputs = NULL,
  1544. .outputs = avfilter_vsrc_allrgb_outputs,
  1545. };
  1546. #endif /* CONFIG_ALLRGB_FILTER */