You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1837 lines
57KB

  1. /*
  2. * Copyright (c) 2007 Nicolas George <nicolas.george@normalesup.org>
  3. * Copyright (c) 2011 Stefano Sabatini
  4. * Copyright (c) 2012 Paul B Mahol
  5. *
  6. * This file is part of FFmpeg.
  7. *
  8. * FFmpeg is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public
  10. * License as published by the Free Software Foundation; either
  11. * version 2.1 of the License, or (at your option) any later version.
  12. *
  13. * FFmpeg is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with FFmpeg; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  21. */
  22. /**
  23. * @file
  24. * Misc test sources.
  25. *
  26. * testsrc is based on the test pattern generator demuxer by Nicolas George:
  27. * http://lists.ffmpeg.org/pipermail/ffmpeg-devel/2007-October/037845.html
  28. *
  29. * rgbtestsrc is ported from MPlayer libmpcodecs/vf_rgbtest.c by
  30. * Michael Niedermayer.
  31. *
  32. * allyuv, smptebars and smptehdbars are by Paul B Mahol.
  33. */
  34. #include <float.h>
  35. #include "libavutil/avassert.h"
  36. #include "libavutil/common.h"
  37. #include "libavutil/ffmath.h"
  38. #include "libavutil/opt.h"
  39. #include "libavutil/imgutils.h"
  40. #include "libavutil/intreadwrite.h"
  41. #include "libavutil/parseutils.h"
  42. #include "libavutil/xga_font_data.h"
  43. #include "avfilter.h"
  44. #include "drawutils.h"
  45. #include "filters.h"
  46. #include "formats.h"
  47. #include "internal.h"
  48. #include "video.h"
  49. typedef struct TestSourceContext {
  50. const AVClass *class;
  51. int w, h;
  52. unsigned int nb_frame;
  53. AVRational time_base, frame_rate;
  54. int64_t pts;
  55. int64_t duration; ///< duration expressed in microseconds
  56. AVRational sar; ///< sample aspect ratio
  57. int draw_once; ///< draw only the first frame, always put out the same picture
  58. int draw_once_reset; ///< draw only the first frame or in case of reset
  59. AVFrame *picref; ///< cached reference containing the painted picture
  60. void (* fill_picture_fn)(AVFilterContext *ctx, AVFrame *frame);
  61. /* only used by testsrc */
  62. int nb_decimals;
  63. /* only used by testsrc2 */
  64. int alpha;
  65. /* only used by color */
  66. FFDrawContext draw;
  67. FFDrawColor color;
  68. uint8_t color_rgba[4];
  69. /* only used by rgbtest */
  70. uint8_t rgba_map[4];
  71. int depth;
  72. /* only used by haldclut */
  73. int level;
  74. } TestSourceContext;
  75. #define OFFSET(x) offsetof(TestSourceContext, x)
  76. #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
  77. #define FLAGSR AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_RUNTIME_PARAM
  78. #define SIZE_OPTIONS \
  79. { "size", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "320x240"}, 0, 0, FLAGS },\
  80. { "s", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "320x240"}, 0, 0, FLAGS },\
  81. #define COMMON_OPTIONS_NOSIZE \
  82. { "rate", "set video rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, INT_MAX, FLAGS },\
  83. { "r", "set video rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, INT_MAX, FLAGS },\
  84. { "duration", "set video duration", OFFSET(duration), AV_OPT_TYPE_DURATION, {.i64 = -1}, -1, INT64_MAX, FLAGS },\
  85. { "d", "set video duration", OFFSET(duration), AV_OPT_TYPE_DURATION, {.i64 = -1}, -1, INT64_MAX, FLAGS },\
  86. { "sar", "set video sample aspect ratio", OFFSET(sar), AV_OPT_TYPE_RATIONAL, {.dbl= 1}, 0, INT_MAX, FLAGS },
  87. #define COMMON_OPTIONS SIZE_OPTIONS COMMON_OPTIONS_NOSIZE
  88. #define NOSIZE_OPTIONS_OFFSET 2
  89. /* Filters using COMMON_OPTIONS_NOSIZE also use the following options
  90. * via &options[NOSIZE_OPTIONS_OFFSET]. So don't break it. */
  91. static const AVOption options[] = {
  92. COMMON_OPTIONS
  93. { NULL }
  94. };
  95. static av_cold int init(AVFilterContext *ctx)
  96. {
  97. TestSourceContext *test = ctx->priv;
  98. test->time_base = av_inv_q(test->frame_rate);
  99. test->nb_frame = 0;
  100. test->pts = 0;
  101. av_log(ctx, AV_LOG_VERBOSE, "size:%dx%d rate:%d/%d duration:%f sar:%d/%d\n",
  102. test->w, test->h, test->frame_rate.num, test->frame_rate.den,
  103. test->duration < 0 ? -1 : (double)test->duration/1000000,
  104. test->sar.num, test->sar.den);
  105. return 0;
  106. }
  107. static av_cold void uninit(AVFilterContext *ctx)
  108. {
  109. TestSourceContext *test = ctx->priv;
  110. av_frame_free(&test->picref);
  111. }
  112. static int config_props(AVFilterLink *outlink)
  113. {
  114. TestSourceContext *test = outlink->src->priv;
  115. outlink->w = test->w;
  116. outlink->h = test->h;
  117. outlink->sample_aspect_ratio = test->sar;
  118. outlink->frame_rate = test->frame_rate;
  119. outlink->time_base = test->time_base;
  120. return 0;
  121. }
  122. static int activate(AVFilterContext *ctx)
  123. {
  124. AVFilterLink *outlink = ctx->outputs[0];
  125. TestSourceContext *test = ctx->priv;
  126. AVFrame *frame;
  127. if (!ff_outlink_frame_wanted(outlink))
  128. return FFERROR_NOT_READY;
  129. if (test->duration >= 0 &&
  130. av_rescale_q(test->pts, test->time_base, AV_TIME_BASE_Q) >= test->duration) {
  131. ff_outlink_set_status(outlink, AVERROR_EOF, test->pts);
  132. return 0;
  133. }
  134. if (test->draw_once) {
  135. if (test->draw_once_reset) {
  136. av_frame_free(&test->picref);
  137. test->draw_once_reset = 0;
  138. }
  139. if (!test->picref) {
  140. test->picref =
  141. ff_get_video_buffer(outlink, test->w, test->h);
  142. if (!test->picref)
  143. return AVERROR(ENOMEM);
  144. test->fill_picture_fn(outlink->src, test->picref);
  145. }
  146. frame = av_frame_clone(test->picref);
  147. } else
  148. frame = ff_get_video_buffer(outlink, test->w, test->h);
  149. if (!frame)
  150. return AVERROR(ENOMEM);
  151. frame->pts = test->pts;
  152. frame->key_frame = 1;
  153. frame->interlaced_frame = 0;
  154. frame->pict_type = AV_PICTURE_TYPE_I;
  155. frame->sample_aspect_ratio = test->sar;
  156. if (!test->draw_once)
  157. test->fill_picture_fn(outlink->src, frame);
  158. test->pts++;
  159. test->nb_frame++;
  160. return ff_filter_frame(outlink, frame);
  161. }
  162. #if CONFIG_COLOR_FILTER
  163. static const AVOption color_options[] = {
  164. { "color", "set color", OFFSET(color_rgba), AV_OPT_TYPE_COLOR, {.str = "black"}, 0, 0, FLAGSR },
  165. { "c", "set color", OFFSET(color_rgba), AV_OPT_TYPE_COLOR, {.str = "black"}, 0, 0, FLAGSR },
  166. COMMON_OPTIONS
  167. { NULL }
  168. };
  169. AVFILTER_DEFINE_CLASS(color);
  170. static void color_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  171. {
  172. TestSourceContext *test = ctx->priv;
  173. ff_fill_rectangle(&test->draw, &test->color,
  174. picref->data, picref->linesize,
  175. 0, 0, test->w, test->h);
  176. }
  177. static av_cold int color_init(AVFilterContext *ctx)
  178. {
  179. TestSourceContext *test = ctx->priv;
  180. test->fill_picture_fn = color_fill_picture;
  181. test->draw_once = 1;
  182. return init(ctx);
  183. }
  184. static int color_query_formats(AVFilterContext *ctx)
  185. {
  186. return ff_set_common_formats(ctx, ff_draw_supported_pixel_formats(0));
  187. }
  188. static int color_config_props(AVFilterLink *inlink)
  189. {
  190. AVFilterContext *ctx = inlink->src;
  191. TestSourceContext *test = ctx->priv;
  192. int ret;
  193. ff_draw_init(&test->draw, inlink->format, 0);
  194. ff_draw_color(&test->draw, &test->color, test->color_rgba);
  195. test->w = ff_draw_round_to_sub(&test->draw, 0, -1, test->w);
  196. test->h = ff_draw_round_to_sub(&test->draw, 1, -1, test->h);
  197. if (av_image_check_size(test->w, test->h, 0, ctx) < 0)
  198. return AVERROR(EINVAL);
  199. if ((ret = config_props(inlink)) < 0)
  200. return ret;
  201. return 0;
  202. }
  203. static int color_process_command(AVFilterContext *ctx, const char *cmd, const char *args,
  204. char *res, int res_len, int flags)
  205. {
  206. TestSourceContext *test = ctx->priv;
  207. int ret;
  208. ret = ff_filter_process_command(ctx, cmd, args, res, res_len, flags);
  209. if (ret < 0)
  210. return ret;
  211. ff_draw_color(&test->draw, &test->color, test->color_rgba);
  212. test->draw_once_reset = 1;
  213. return 0;
  214. }
  215. static const AVFilterPad color_outputs[] = {
  216. {
  217. .name = "default",
  218. .type = AVMEDIA_TYPE_VIDEO,
  219. .config_props = color_config_props,
  220. },
  221. { NULL }
  222. };
  223. AVFilter ff_vsrc_color = {
  224. .name = "color",
  225. .description = NULL_IF_CONFIG_SMALL("Provide an uniformly colored input."),
  226. .priv_class = &color_class,
  227. .priv_size = sizeof(TestSourceContext),
  228. .init = color_init,
  229. .uninit = uninit,
  230. .activate = activate,
  231. .query_formats = color_query_formats,
  232. .inputs = NULL,
  233. .outputs = color_outputs,
  234. .process_command = color_process_command,
  235. };
  236. #endif /* CONFIG_COLOR_FILTER */
  237. #if CONFIG_HALDCLUTSRC_FILTER
  238. static const AVOption haldclutsrc_options[] = {
  239. { "level", "set level", OFFSET(level), AV_OPT_TYPE_INT, {.i64 = 6}, 2, 16, FLAGS },
  240. COMMON_OPTIONS_NOSIZE
  241. { NULL }
  242. };
  243. AVFILTER_DEFINE_CLASS(haldclutsrc);
  244. static void haldclutsrc_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  245. {
  246. int i, j, k, x = 0, y = 0, is16bit = 0, step;
  247. uint32_t alpha = 0;
  248. const TestSourceContext *hc = ctx->priv;
  249. int level = hc->level;
  250. float scale;
  251. const int w = frame->width;
  252. const int h = frame->height;
  253. const uint8_t *data = frame->data[0];
  254. const int linesize = frame->linesize[0];
  255. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
  256. uint8_t rgba_map[4];
  257. av_assert0(w == h && w == level*level*level);
  258. ff_fill_rgba_map(rgba_map, frame->format);
  259. switch (frame->format) {
  260. case AV_PIX_FMT_RGB48:
  261. case AV_PIX_FMT_BGR48:
  262. case AV_PIX_FMT_RGBA64:
  263. case AV_PIX_FMT_BGRA64:
  264. is16bit = 1;
  265. alpha = 0xffff;
  266. break;
  267. case AV_PIX_FMT_RGBA:
  268. case AV_PIX_FMT_BGRA:
  269. case AV_PIX_FMT_ARGB:
  270. case AV_PIX_FMT_ABGR:
  271. alpha = 0xff;
  272. break;
  273. }
  274. step = av_get_padded_bits_per_pixel(desc) >> (3 + is16bit);
  275. scale = ((float)(1 << (8*(is16bit+1))) - 1) / (level*level - 1);
  276. #define LOAD_CLUT(nbits) do { \
  277. uint##nbits##_t *dst = ((uint##nbits##_t *)(data + y*linesize)) + x*step; \
  278. dst[rgba_map[0]] = av_clip_uint##nbits(i * scale); \
  279. dst[rgba_map[1]] = av_clip_uint##nbits(j * scale); \
  280. dst[rgba_map[2]] = av_clip_uint##nbits(k * scale); \
  281. if (step == 4) \
  282. dst[rgba_map[3]] = alpha; \
  283. } while (0)
  284. level *= level;
  285. for (k = 0; k < level; k++) {
  286. for (j = 0; j < level; j++) {
  287. for (i = 0; i < level; i++) {
  288. if (!is16bit)
  289. LOAD_CLUT(8);
  290. else
  291. LOAD_CLUT(16);
  292. if (++x == w) {
  293. x = 0;
  294. y++;
  295. }
  296. }
  297. }
  298. }
  299. }
  300. static av_cold int haldclutsrc_init(AVFilterContext *ctx)
  301. {
  302. TestSourceContext *hc = ctx->priv;
  303. hc->fill_picture_fn = haldclutsrc_fill_picture;
  304. hc->draw_once = 1;
  305. return init(ctx);
  306. }
  307. static int haldclutsrc_query_formats(AVFilterContext *ctx)
  308. {
  309. static const enum AVPixelFormat pix_fmts[] = {
  310. AV_PIX_FMT_RGB24, AV_PIX_FMT_BGR24,
  311. AV_PIX_FMT_RGBA, AV_PIX_FMT_BGRA,
  312. AV_PIX_FMT_ARGB, AV_PIX_FMT_ABGR,
  313. AV_PIX_FMT_0RGB, AV_PIX_FMT_0BGR,
  314. AV_PIX_FMT_RGB0, AV_PIX_FMT_BGR0,
  315. AV_PIX_FMT_RGB48, AV_PIX_FMT_BGR48,
  316. AV_PIX_FMT_RGBA64, AV_PIX_FMT_BGRA64,
  317. AV_PIX_FMT_NONE,
  318. };
  319. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  320. if (!fmts_list)
  321. return AVERROR(ENOMEM);
  322. return ff_set_common_formats(ctx, fmts_list);
  323. }
  324. static int haldclutsrc_config_props(AVFilterLink *outlink)
  325. {
  326. AVFilterContext *ctx = outlink->src;
  327. TestSourceContext *hc = ctx->priv;
  328. hc->w = hc->h = hc->level * hc->level * hc->level;
  329. return config_props(outlink);
  330. }
  331. static const AVFilterPad haldclutsrc_outputs[] = {
  332. {
  333. .name = "default",
  334. .type = AVMEDIA_TYPE_VIDEO,
  335. .config_props = haldclutsrc_config_props,
  336. },
  337. { NULL }
  338. };
  339. AVFilter ff_vsrc_haldclutsrc = {
  340. .name = "haldclutsrc",
  341. .description = NULL_IF_CONFIG_SMALL("Provide an identity Hald CLUT."),
  342. .priv_class = &haldclutsrc_class,
  343. .priv_size = sizeof(TestSourceContext),
  344. .init = haldclutsrc_init,
  345. .uninit = uninit,
  346. .query_formats = haldclutsrc_query_formats,
  347. .activate = activate,
  348. .inputs = NULL,
  349. .outputs = haldclutsrc_outputs,
  350. };
  351. #endif /* CONFIG_HALDCLUTSRC_FILTER */
  352. #if CONFIG_NULLSRC_FILTER
  353. #define nullsrc_options options
  354. AVFILTER_DEFINE_CLASS(nullsrc);
  355. static void nullsrc_fill_picture(AVFilterContext *ctx, AVFrame *picref) { }
  356. static av_cold int nullsrc_init(AVFilterContext *ctx)
  357. {
  358. TestSourceContext *test = ctx->priv;
  359. test->fill_picture_fn = nullsrc_fill_picture;
  360. return init(ctx);
  361. }
  362. static const AVFilterPad nullsrc_outputs[] = {
  363. {
  364. .name = "default",
  365. .type = AVMEDIA_TYPE_VIDEO,
  366. .config_props = config_props,
  367. },
  368. { NULL },
  369. };
  370. AVFilter ff_vsrc_nullsrc = {
  371. .name = "nullsrc",
  372. .description = NULL_IF_CONFIG_SMALL("Null video source, return unprocessed video frames."),
  373. .init = nullsrc_init,
  374. .uninit = uninit,
  375. .activate = activate,
  376. .priv_size = sizeof(TestSourceContext),
  377. .priv_class = &nullsrc_class,
  378. .inputs = NULL,
  379. .outputs = nullsrc_outputs,
  380. };
  381. #endif /* CONFIG_NULLSRC_FILTER */
  382. #if CONFIG_TESTSRC_FILTER
  383. static const AVOption testsrc_options[] = {
  384. COMMON_OPTIONS
  385. { "decimals", "set number of decimals to show", OFFSET(nb_decimals), AV_OPT_TYPE_INT, {.i64=0}, 0, 17, FLAGS },
  386. { "n", "set number of decimals to show", OFFSET(nb_decimals), AV_OPT_TYPE_INT, {.i64=0}, 0, 17, FLAGS },
  387. { NULL }
  388. };
  389. AVFILTER_DEFINE_CLASS(testsrc);
  390. /**
  391. * Fill a rectangle with value val.
  392. *
  393. * @param val the RGB value to set
  394. * @param dst pointer to the destination buffer to fill
  395. * @param dst_linesize linesize of destination
  396. * @param segment_width width of the segment
  397. * @param x horizontal coordinate where to draw the rectangle in the destination buffer
  398. * @param y horizontal coordinate where to draw the rectangle in the destination buffer
  399. * @param w width of the rectangle to draw, expressed as a number of segment_width units
  400. * @param h height of the rectangle to draw, expressed as a number of segment_width units
  401. */
  402. static void draw_rectangle(unsigned val, uint8_t *dst, int dst_linesize, int segment_width,
  403. int x, int y, int w, int h)
  404. {
  405. int i;
  406. int step = 3;
  407. dst += segment_width * (step * x + y * dst_linesize);
  408. w *= segment_width * step;
  409. h *= segment_width;
  410. for (i = 0; i < h; i++) {
  411. memset(dst, val, w);
  412. dst += dst_linesize;
  413. }
  414. }
  415. static void draw_digit(int digit, uint8_t *dst, int dst_linesize,
  416. int segment_width)
  417. {
  418. #define TOP_HBAR 1
  419. #define MID_HBAR 2
  420. #define BOT_HBAR 4
  421. #define LEFT_TOP_VBAR 8
  422. #define LEFT_BOT_VBAR 16
  423. #define RIGHT_TOP_VBAR 32
  424. #define RIGHT_BOT_VBAR 64
  425. struct segments {
  426. int x, y, w, h;
  427. } segments[] = {
  428. { 1, 0, 5, 1 }, /* TOP_HBAR */
  429. { 1, 6, 5, 1 }, /* MID_HBAR */
  430. { 1, 12, 5, 1 }, /* BOT_HBAR */
  431. { 0, 1, 1, 5 }, /* LEFT_TOP_VBAR */
  432. { 0, 7, 1, 5 }, /* LEFT_BOT_VBAR */
  433. { 6, 1, 1, 5 }, /* RIGHT_TOP_VBAR */
  434. { 6, 7, 1, 5 } /* RIGHT_BOT_VBAR */
  435. };
  436. static const unsigned char masks[10] = {
  437. /* 0 */ TOP_HBAR |BOT_HBAR|LEFT_TOP_VBAR|LEFT_BOT_VBAR|RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  438. /* 1 */ RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  439. /* 2 */ TOP_HBAR|MID_HBAR|BOT_HBAR|LEFT_BOT_VBAR |RIGHT_TOP_VBAR,
  440. /* 3 */ TOP_HBAR|MID_HBAR|BOT_HBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  441. /* 4 */ MID_HBAR |LEFT_TOP_VBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  442. /* 5 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR |RIGHT_BOT_VBAR,
  443. /* 6 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR|LEFT_BOT_VBAR |RIGHT_BOT_VBAR,
  444. /* 7 */ TOP_HBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  445. /* 8 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR|LEFT_BOT_VBAR|RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  446. /* 9 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  447. };
  448. unsigned mask = masks[digit];
  449. int i;
  450. draw_rectangle(0, dst, dst_linesize, segment_width, 0, 0, 8, 13);
  451. for (i = 0; i < FF_ARRAY_ELEMS(segments); i++)
  452. if (mask & (1<<i))
  453. draw_rectangle(255, dst, dst_linesize, segment_width,
  454. segments[i].x, segments[i].y, segments[i].w, segments[i].h);
  455. }
  456. #define GRADIENT_SIZE (6 * 256)
  457. static void test_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  458. {
  459. TestSourceContext *test = ctx->priv;
  460. uint8_t *p, *p0;
  461. int x, y;
  462. int color, color_rest;
  463. int icolor;
  464. int radius;
  465. int quad0, quad;
  466. int dquad_x, dquad_y;
  467. int grad, dgrad, rgrad, drgrad;
  468. int seg_size;
  469. int second;
  470. int i;
  471. uint8_t *data = frame->data[0];
  472. int width = frame->width;
  473. int height = frame->height;
  474. /* draw colored bars and circle */
  475. radius = (width + height) / 4;
  476. quad0 = width * width / 4 + height * height / 4 - radius * radius;
  477. dquad_y = 1 - height;
  478. p0 = data;
  479. for (y = 0; y < height; y++) {
  480. p = p0;
  481. color = 0;
  482. color_rest = 0;
  483. quad = quad0;
  484. dquad_x = 1 - width;
  485. for (x = 0; x < width; x++) {
  486. icolor = color;
  487. if (quad < 0)
  488. icolor ^= 7;
  489. quad += dquad_x;
  490. dquad_x += 2;
  491. *(p++) = icolor & 1 ? 255 : 0;
  492. *(p++) = icolor & 2 ? 255 : 0;
  493. *(p++) = icolor & 4 ? 255 : 0;
  494. color_rest += 8;
  495. if (color_rest >= width) {
  496. color_rest -= width;
  497. color++;
  498. }
  499. }
  500. quad0 += dquad_y;
  501. dquad_y += 2;
  502. p0 += frame->linesize[0];
  503. }
  504. /* draw sliding color line */
  505. p0 = p = data + frame->linesize[0] * (height * 3/4);
  506. grad = (256 * test->nb_frame * test->time_base.num / test->time_base.den) %
  507. GRADIENT_SIZE;
  508. rgrad = 0;
  509. dgrad = GRADIENT_SIZE / width;
  510. drgrad = GRADIENT_SIZE % width;
  511. for (x = 0; x < width; x++) {
  512. *(p++) =
  513. grad < 256 || grad >= 5 * 256 ? 255 :
  514. grad >= 2 * 256 && grad < 4 * 256 ? 0 :
  515. grad < 2 * 256 ? 2 * 256 - 1 - grad : grad - 4 * 256;
  516. *(p++) =
  517. grad >= 4 * 256 ? 0 :
  518. grad >= 1 * 256 && grad < 3 * 256 ? 255 :
  519. grad < 1 * 256 ? grad : 4 * 256 - 1 - grad;
  520. *(p++) =
  521. grad < 2 * 256 ? 0 :
  522. grad >= 3 * 256 && grad < 5 * 256 ? 255 :
  523. grad < 3 * 256 ? grad - 2 * 256 : 6 * 256 - 1 - grad;
  524. grad += dgrad;
  525. rgrad += drgrad;
  526. if (rgrad >= GRADIENT_SIZE) {
  527. grad++;
  528. rgrad -= GRADIENT_SIZE;
  529. }
  530. if (grad >= GRADIENT_SIZE)
  531. grad -= GRADIENT_SIZE;
  532. }
  533. p = p0;
  534. for (y = height / 8; y > 0; y--) {
  535. memcpy(p+frame->linesize[0], p, 3 * width);
  536. p += frame->linesize[0];
  537. }
  538. /* draw digits */
  539. seg_size = width / 80;
  540. if (seg_size >= 1 && height >= 13 * seg_size) {
  541. int64_t p10decimals = 1;
  542. double time = av_q2d(test->time_base) * test->nb_frame *
  543. ff_exp10(test->nb_decimals);
  544. if (time >= INT_MAX)
  545. return;
  546. for (x = 0; x < test->nb_decimals; x++)
  547. p10decimals *= 10;
  548. second = av_rescale_rnd(test->nb_frame * test->time_base.num, p10decimals, test->time_base.den, AV_ROUND_ZERO);
  549. x = width - (width - seg_size * 64) / 2;
  550. y = (height - seg_size * 13) / 2;
  551. p = data + (x*3 + y * frame->linesize[0]);
  552. for (i = 0; i < 8; i++) {
  553. p -= 3 * 8 * seg_size;
  554. draw_digit(second % 10, p, frame->linesize[0], seg_size);
  555. second /= 10;
  556. if (second == 0)
  557. break;
  558. }
  559. }
  560. }
  561. static av_cold int test_init(AVFilterContext *ctx)
  562. {
  563. TestSourceContext *test = ctx->priv;
  564. test->fill_picture_fn = test_fill_picture;
  565. return init(ctx);
  566. }
  567. static int test_query_formats(AVFilterContext *ctx)
  568. {
  569. static const enum AVPixelFormat pix_fmts[] = {
  570. AV_PIX_FMT_RGB24, AV_PIX_FMT_NONE
  571. };
  572. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  573. if (!fmts_list)
  574. return AVERROR(ENOMEM);
  575. return ff_set_common_formats(ctx, fmts_list);
  576. }
  577. static const AVFilterPad avfilter_vsrc_testsrc_outputs[] = {
  578. {
  579. .name = "default",
  580. .type = AVMEDIA_TYPE_VIDEO,
  581. .config_props = config_props,
  582. },
  583. { NULL }
  584. };
  585. AVFilter ff_vsrc_testsrc = {
  586. .name = "testsrc",
  587. .description = NULL_IF_CONFIG_SMALL("Generate test pattern."),
  588. .priv_size = sizeof(TestSourceContext),
  589. .priv_class = &testsrc_class,
  590. .init = test_init,
  591. .uninit = uninit,
  592. .query_formats = test_query_formats,
  593. .activate = activate,
  594. .inputs = NULL,
  595. .outputs = avfilter_vsrc_testsrc_outputs,
  596. };
  597. #endif /* CONFIG_TESTSRC_FILTER */
  598. #if CONFIG_TESTSRC2_FILTER
  599. static const AVOption testsrc2_options[] = {
  600. COMMON_OPTIONS
  601. { "alpha", "set global alpha (opacity)", OFFSET(alpha), AV_OPT_TYPE_INT, {.i64 = 255}, 0, 255, FLAGS },
  602. { NULL }
  603. };
  604. AVFILTER_DEFINE_CLASS(testsrc2);
  605. static void set_color(TestSourceContext *s, FFDrawColor *color, uint32_t argb)
  606. {
  607. uint8_t rgba[4] = { (argb >> 16) & 0xFF,
  608. (argb >> 8) & 0xFF,
  609. (argb >> 0) & 0xFF,
  610. (argb >> 24) & 0xFF, };
  611. ff_draw_color(&s->draw, color, rgba);
  612. }
  613. static uint32_t color_gradient(unsigned index)
  614. {
  615. unsigned si = index & 0xFF, sd = 0xFF - si;
  616. switch (index >> 8) {
  617. case 0: return 0xFF0000 + (si << 8);
  618. case 1: return 0x00FF00 + (sd << 16);
  619. case 2: return 0x00FF00 + (si << 0);
  620. case 3: return 0x0000FF + (sd << 8);
  621. case 4: return 0x0000FF + (si << 16);
  622. case 5: return 0xFF0000 + (sd << 0);
  623. }
  624. av_assert0(0);
  625. }
  626. static void draw_text(TestSourceContext *s, AVFrame *frame, FFDrawColor *color,
  627. int x0, int y0, const uint8_t *text)
  628. {
  629. int x = x0;
  630. for (; *text; text++) {
  631. if (*text == '\n') {
  632. x = x0;
  633. y0 += 16;
  634. continue;
  635. }
  636. ff_blend_mask(&s->draw, color, frame->data, frame->linesize,
  637. frame->width, frame->height,
  638. avpriv_vga16_font + *text * 16, 1, 8, 16, 0, 0, x, y0);
  639. x += 8;
  640. }
  641. }
  642. static void test2_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  643. {
  644. TestSourceContext *s = ctx->priv;
  645. FFDrawColor color;
  646. unsigned alpha = (uint32_t)s->alpha << 24;
  647. /* colored background */
  648. {
  649. unsigned i, x = 0, x2;
  650. x = 0;
  651. for (i = 1; i < 7; i++) {
  652. x2 = av_rescale(i, s->w, 6);
  653. x2 = ff_draw_round_to_sub(&s->draw, 0, 0, x2);
  654. set_color(s, &color, ((i & 1) ? 0xFF0000 : 0) |
  655. ((i & 2) ? 0x00FF00 : 0) |
  656. ((i & 4) ? 0x0000FF : 0) |
  657. alpha);
  658. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  659. x, 0, x2 - x, frame->height);
  660. x = x2;
  661. }
  662. }
  663. /* oblique gradient */
  664. /* note: too slow if using blending */
  665. if (s->h >= 64) {
  666. unsigned x, dx, y0, y, g0, g;
  667. dx = ff_draw_round_to_sub(&s->draw, 0, +1, 1);
  668. y0 = av_rescale_q(s->pts, s->time_base, av_make_q(2, s->h - 16));
  669. g0 = av_rescale_q(s->pts, s->time_base, av_make_q(1, 128));
  670. for (x = 0; x < s->w; x += dx) {
  671. g = (av_rescale(x, 6 * 256, s->w) + g0) % (6 * 256);
  672. set_color(s, &color, color_gradient(g) | alpha);
  673. y = y0 + av_rescale(x, s->h / 2, s->w);
  674. y %= 2 * (s->h - 16);
  675. if (y > s->h - 16)
  676. y = 2 * (s->h - 16) - y;
  677. y = ff_draw_round_to_sub(&s->draw, 1, 0, y);
  678. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  679. x, y, dx, 16);
  680. }
  681. }
  682. /* top right: draw clock hands */
  683. if (s->w >= 64 && s->h >= 64) {
  684. int l = (FFMIN(s->w, s->h) - 32) >> 1;
  685. int steps = FFMAX(4, l >> 5);
  686. int xc = (s->w >> 2) + (s->w >> 1);
  687. int yc = (s->h >> 2);
  688. int cycle = l << 2;
  689. int pos, xh, yh;
  690. int c, i;
  691. for (c = 0; c < 3; c++) {
  692. set_color(s, &color, (0xBBBBBB ^ (0xFF << (c << 3))) | alpha);
  693. pos = av_rescale_q(s->pts, s->time_base, av_make_q(64 >> (c << 1), cycle)) % cycle;
  694. xh = pos < 1 * l ? pos :
  695. pos < 2 * l ? l :
  696. pos < 3 * l ? 3 * l - pos : 0;
  697. yh = pos < 1 * l ? 0 :
  698. pos < 2 * l ? pos - l :
  699. pos < 3 * l ? l :
  700. cycle - pos;
  701. xh -= l >> 1;
  702. yh -= l >> 1;
  703. for (i = 1; i <= steps; i++) {
  704. int x = av_rescale(xh, i, steps) + xc;
  705. int y = av_rescale(yh, i, steps) + yc;
  706. x = ff_draw_round_to_sub(&s->draw, 0, -1, x);
  707. y = ff_draw_round_to_sub(&s->draw, 1, -1, y);
  708. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  709. x, y, 8, 8);
  710. }
  711. }
  712. }
  713. /* bottom left: beating rectangles */
  714. if (s->w >= 64 && s->h >= 64) {
  715. int l = (FFMIN(s->w, s->h) - 16) >> 2;
  716. int cycle = l << 3;
  717. int xc = (s->w >> 2);
  718. int yc = (s->h >> 2) + (s->h >> 1);
  719. int xm1 = ff_draw_round_to_sub(&s->draw, 0, -1, xc - 8);
  720. int xm2 = ff_draw_round_to_sub(&s->draw, 0, +1, xc + 8);
  721. int ym1 = ff_draw_round_to_sub(&s->draw, 1, -1, yc - 8);
  722. int ym2 = ff_draw_round_to_sub(&s->draw, 1, +1, yc + 8);
  723. int size, step, x1, x2, y1, y2;
  724. size = av_rescale_q(s->pts, s->time_base, av_make_q(4, cycle));
  725. step = size / l;
  726. size %= l;
  727. if (step & 1)
  728. size = l - size;
  729. step = (step >> 1) & 3;
  730. set_color(s, &color, 0xFF808080);
  731. x1 = ff_draw_round_to_sub(&s->draw, 0, -1, xc - 4 - size);
  732. x2 = ff_draw_round_to_sub(&s->draw, 0, +1, xc + 4 + size);
  733. y1 = ff_draw_round_to_sub(&s->draw, 1, -1, yc - 4 - size);
  734. y2 = ff_draw_round_to_sub(&s->draw, 1, +1, yc + 4 + size);
  735. if (step == 0 || step == 2)
  736. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  737. x1, ym1, x2 - x1, ym2 - ym1);
  738. if (step == 1 || step == 2)
  739. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  740. xm1, y1, xm2 - xm1, y2 - y1);
  741. if (step == 3)
  742. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  743. x1, y1, x2 - x1, y2 - y1);
  744. }
  745. /* bottom right: checker with random noise */
  746. {
  747. unsigned xmin = av_rescale(5, s->w, 8);
  748. unsigned xmax = av_rescale(7, s->w, 8);
  749. unsigned ymin = av_rescale(5, s->h, 8);
  750. unsigned ymax = av_rescale(7, s->h, 8);
  751. unsigned x, y, i, r;
  752. uint8_t alpha[256];
  753. r = s->pts;
  754. for (y = ymin; y + 15 < ymax; y += 16) {
  755. for (x = xmin; x + 15 < xmax; x += 16) {
  756. if ((x ^ y) & 16)
  757. continue;
  758. for (i = 0; i < 256; i++) {
  759. r = r * 1664525 + 1013904223;
  760. alpha[i] = r >> 24;
  761. }
  762. set_color(s, &color, 0xFF00FF80);
  763. ff_blend_mask(&s->draw, &color, frame->data, frame->linesize,
  764. frame->width, frame->height,
  765. alpha, 16, 16, 16, 3, 0, x, y);
  766. }
  767. }
  768. }
  769. /* bouncing square */
  770. if (s->w >= 16 && s->h >= 16) {
  771. unsigned w = s->w - 8;
  772. unsigned h = s->h - 8;
  773. unsigned x = av_rescale_q(s->pts, s->time_base, av_make_q(233, 55 * w)) % (w << 1);
  774. unsigned y = av_rescale_q(s->pts, s->time_base, av_make_q(233, 89 * h)) % (h << 1);
  775. if (x > w)
  776. x = (w << 1) - x;
  777. if (y > h)
  778. y = (h << 1) - y;
  779. x = ff_draw_round_to_sub(&s->draw, 0, -1, x);
  780. y = ff_draw_round_to_sub(&s->draw, 1, -1, y);
  781. set_color(s, &color, 0xFF8000FF);
  782. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  783. x, y, 8, 8);
  784. }
  785. /* top right: draw frame time and frame number */
  786. {
  787. char buf[256];
  788. unsigned time;
  789. time = av_rescale_q(s->pts, s->time_base, av_make_q(1, 1000)) % 86400000;
  790. set_color(s, &color, 0xC0000000);
  791. ff_blend_rectangle(&s->draw, &color, frame->data, frame->linesize,
  792. frame->width, frame->height,
  793. 2, 2, 100, 36);
  794. set_color(s, &color, 0xFFFF8000);
  795. snprintf(buf, sizeof(buf), "%02d:%02d:%02d.%03d\n%12"PRIi64,
  796. time / 3600000, (time / 60000) % 60, (time / 1000) % 60,
  797. time % 1000, s->pts);
  798. draw_text(s, frame, &color, 4, 4, buf);
  799. }
  800. }
  801. static av_cold int test2_init(AVFilterContext *ctx)
  802. {
  803. TestSourceContext *s = ctx->priv;
  804. s->fill_picture_fn = test2_fill_picture;
  805. return init(ctx);
  806. }
  807. static int test2_query_formats(AVFilterContext *ctx)
  808. {
  809. return ff_set_common_formats(ctx, ff_draw_supported_pixel_formats(0));
  810. }
  811. static int test2_config_props(AVFilterLink *inlink)
  812. {
  813. AVFilterContext *ctx = inlink->src;
  814. TestSourceContext *s = ctx->priv;
  815. av_assert0(ff_draw_init(&s->draw, inlink->format, 0) >= 0);
  816. s->w = ff_draw_round_to_sub(&s->draw, 0, -1, s->w);
  817. s->h = ff_draw_round_to_sub(&s->draw, 1, -1, s->h);
  818. if (av_image_check_size(s->w, s->h, 0, ctx) < 0)
  819. return AVERROR(EINVAL);
  820. return config_props(inlink);
  821. }
  822. static const AVFilterPad avfilter_vsrc_testsrc2_outputs[] = {
  823. {
  824. .name = "default",
  825. .type = AVMEDIA_TYPE_VIDEO,
  826. .config_props = test2_config_props,
  827. },
  828. { NULL }
  829. };
  830. AVFilter ff_vsrc_testsrc2 = {
  831. .name = "testsrc2",
  832. .description = NULL_IF_CONFIG_SMALL("Generate another test pattern."),
  833. .priv_size = sizeof(TestSourceContext),
  834. .priv_class = &testsrc2_class,
  835. .init = test2_init,
  836. .uninit = uninit,
  837. .query_formats = test2_query_formats,
  838. .activate = activate,
  839. .inputs = NULL,
  840. .outputs = avfilter_vsrc_testsrc2_outputs,
  841. };
  842. #endif /* CONFIG_TESTSRC2_FILTER */
  843. #if CONFIG_RGBTESTSRC_FILTER
  844. #define rgbtestsrc_options options
  845. AVFILTER_DEFINE_CLASS(rgbtestsrc);
  846. #define R 0
  847. #define G 1
  848. #define B 2
  849. #define A 3
  850. static void rgbtest_put_pixel(uint8_t *dstp[4], int dst_linesizep[4],
  851. int x, int y, unsigned r, unsigned g, unsigned b, enum AVPixelFormat fmt,
  852. uint8_t rgba_map[4])
  853. {
  854. uint8_t *dst = dstp[0];
  855. int dst_linesize = dst_linesizep[0];
  856. uint32_t v;
  857. uint8_t *p;
  858. uint16_t *p16;
  859. switch (fmt) {
  860. case AV_PIX_FMT_BGR444: ((uint16_t*)(dst + y*dst_linesize))[x] = ((r >> 4) << 8) | ((g >> 4) << 4) | (b >> 4); break;
  861. case AV_PIX_FMT_RGB444: ((uint16_t*)(dst + y*dst_linesize))[x] = ((b >> 4) << 8) | ((g >> 4) << 4) | (r >> 4); break;
  862. case AV_PIX_FMT_BGR555: ((uint16_t*)(dst + y*dst_linesize))[x] = ((r>>3)<<10) | ((g>>3)<<5) | (b>>3); break;
  863. case AV_PIX_FMT_RGB555: ((uint16_t*)(dst + y*dst_linesize))[x] = ((b>>3)<<10) | ((g>>3)<<5) | (r>>3); break;
  864. case AV_PIX_FMT_BGR565: ((uint16_t*)(dst + y*dst_linesize))[x] = ((r>>3)<<11) | ((g>>2)<<5) | (b>>3); break;
  865. case AV_PIX_FMT_RGB565: ((uint16_t*)(dst + y*dst_linesize))[x] = ((b>>3)<<11) | ((g>>2)<<5) | (r>>3); break;
  866. case AV_PIX_FMT_RGB24:
  867. case AV_PIX_FMT_BGR24:
  868. v = (r << (rgba_map[R]*8)) + (g << (rgba_map[G]*8)) + (b << (rgba_map[B]*8));
  869. p = dst + 3*x + y*dst_linesize;
  870. AV_WL24(p, v);
  871. break;
  872. case AV_PIX_FMT_RGBA:
  873. case AV_PIX_FMT_BGRA:
  874. case AV_PIX_FMT_ARGB:
  875. case AV_PIX_FMT_ABGR:
  876. v = (r << (rgba_map[R]*8)) + (g << (rgba_map[G]*8)) + (b << (rgba_map[B]*8)) + (255U << (rgba_map[A]*8));
  877. p = dst + 4*x + y*dst_linesize;
  878. AV_WL32(p, v);
  879. break;
  880. case AV_PIX_FMT_GBRP:
  881. p = dstp[0] + x + y * dst_linesizep[0];
  882. p[0] = g;
  883. p = dstp[1] + x + y * dst_linesizep[1];
  884. p[0] = b;
  885. p = dstp[2] + x + y * dst_linesizep[2];
  886. p[0] = r;
  887. case AV_PIX_FMT_GBRP9:
  888. case AV_PIX_FMT_GBRP10:
  889. case AV_PIX_FMT_GBRP12:
  890. case AV_PIX_FMT_GBRP14:
  891. case AV_PIX_FMT_GBRP16:
  892. p16 = (uint16_t *)(dstp[0] + x*2 + y * dst_linesizep[0]);
  893. p16[0] = g;
  894. p16 = (uint16_t *)(dstp[1] + x*2 + y * dst_linesizep[1]);
  895. p16[0] = b;
  896. p16 = (uint16_t *)(dstp[2] + x*2 + y * dst_linesizep[2]);
  897. p16[0] = r;
  898. break;
  899. }
  900. }
  901. static void rgbtest_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  902. {
  903. TestSourceContext *test = ctx->priv;
  904. int x, y, w = frame->width, h = frame->height;
  905. for (y = 0; y < h; y++) {
  906. for (x = 0; x < w; x++) {
  907. int c = (1 << FFMAX(test->depth, 8))*x/w;
  908. int r = 0, g = 0, b = 0;
  909. if (3*y < h ) r = c;
  910. else if (3*y < 2*h) g = c;
  911. else b = c;
  912. rgbtest_put_pixel(frame->data, frame->linesize, x, y, r, g, b,
  913. ctx->outputs[0]->format, test->rgba_map);
  914. }
  915. }
  916. }
  917. static av_cold int rgbtest_init(AVFilterContext *ctx)
  918. {
  919. TestSourceContext *test = ctx->priv;
  920. test->draw_once = 1;
  921. test->fill_picture_fn = rgbtest_fill_picture;
  922. return init(ctx);
  923. }
  924. static int rgbtest_query_formats(AVFilterContext *ctx)
  925. {
  926. static const enum AVPixelFormat pix_fmts[] = {
  927. AV_PIX_FMT_RGBA, AV_PIX_FMT_ARGB, AV_PIX_FMT_BGRA, AV_PIX_FMT_ABGR,
  928. AV_PIX_FMT_BGR24, AV_PIX_FMT_RGB24,
  929. AV_PIX_FMT_RGB444, AV_PIX_FMT_BGR444,
  930. AV_PIX_FMT_RGB565, AV_PIX_FMT_BGR565,
  931. AV_PIX_FMT_RGB555, AV_PIX_FMT_BGR555,
  932. AV_PIX_FMT_GBRP, AV_PIX_FMT_GBRP9, AV_PIX_FMT_GBRP10,
  933. AV_PIX_FMT_GBRP12, AV_PIX_FMT_GBRP14, AV_PIX_FMT_GBRP16,
  934. AV_PIX_FMT_NONE
  935. };
  936. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  937. if (!fmts_list)
  938. return AVERROR(ENOMEM);
  939. return ff_set_common_formats(ctx, fmts_list);
  940. }
  941. static int rgbtest_config_props(AVFilterLink *outlink)
  942. {
  943. TestSourceContext *test = outlink->src->priv;
  944. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(outlink->format);
  945. test->depth = desc->comp[0].depth;
  946. ff_fill_rgba_map(test->rgba_map, outlink->format);
  947. return config_props(outlink);
  948. }
  949. static const AVFilterPad avfilter_vsrc_rgbtestsrc_outputs[] = {
  950. {
  951. .name = "default",
  952. .type = AVMEDIA_TYPE_VIDEO,
  953. .config_props = rgbtest_config_props,
  954. },
  955. { NULL }
  956. };
  957. AVFilter ff_vsrc_rgbtestsrc = {
  958. .name = "rgbtestsrc",
  959. .description = NULL_IF_CONFIG_SMALL("Generate RGB test pattern."),
  960. .priv_size = sizeof(TestSourceContext),
  961. .priv_class = &rgbtestsrc_class,
  962. .init = rgbtest_init,
  963. .uninit = uninit,
  964. .query_formats = rgbtest_query_formats,
  965. .activate = activate,
  966. .inputs = NULL,
  967. .outputs = avfilter_vsrc_rgbtestsrc_outputs,
  968. };
  969. #endif /* CONFIG_RGBTESTSRC_FILTER */
  970. #if CONFIG_YUVTESTSRC_FILTER
  971. #define yuvtestsrc_options options
  972. AVFILTER_DEFINE_CLASS(yuvtestsrc);
  973. static void yuvtest_fill_picture8(AVFilterContext *ctx, AVFrame *frame)
  974. {
  975. int x, y, w = frame->width, h = frame->height / 3;
  976. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
  977. const int factor = 1 << desc->comp[0].depth;
  978. const int mid = 1 << (desc->comp[0].depth - 1);
  979. uint8_t *ydst = frame->data[0];
  980. uint8_t *udst = frame->data[1];
  981. uint8_t *vdst = frame->data[2];
  982. int ylinesize = frame->linesize[0];
  983. int ulinesize = frame->linesize[1];
  984. int vlinesize = frame->linesize[2];
  985. for (y = 0; y < h; y++) {
  986. for (x = 0; x < w; x++) {
  987. int c = factor * x / w;
  988. ydst[x] = c;
  989. udst[x] = mid;
  990. vdst[x] = mid;
  991. }
  992. ydst += ylinesize;
  993. udst += ulinesize;
  994. vdst += vlinesize;
  995. }
  996. h += h;
  997. for (; y < h; y++) {
  998. for (x = 0; x < w; x++) {
  999. int c = factor * x / w;
  1000. ydst[x] = mid;
  1001. udst[x] = c;
  1002. vdst[x] = mid;
  1003. }
  1004. ydst += ylinesize;
  1005. udst += ulinesize;
  1006. vdst += vlinesize;
  1007. }
  1008. for (; y < frame->height; y++) {
  1009. for (x = 0; x < w; x++) {
  1010. int c = factor * x / w;
  1011. ydst[x] = mid;
  1012. udst[x] = mid;
  1013. vdst[x] = c;
  1014. }
  1015. ydst += ylinesize;
  1016. udst += ulinesize;
  1017. vdst += vlinesize;
  1018. }
  1019. }
  1020. static void yuvtest_fill_picture16(AVFilterContext *ctx, AVFrame *frame)
  1021. {
  1022. int x, y, w = frame->width, h = frame->height / 3;
  1023. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
  1024. const int factor = 1 << desc->comp[0].depth;
  1025. const int mid = 1 << (desc->comp[0].depth - 1);
  1026. uint16_t *ydst = (uint16_t *)frame->data[0];
  1027. uint16_t *udst = (uint16_t *)frame->data[1];
  1028. uint16_t *vdst = (uint16_t *)frame->data[2];
  1029. int ylinesize = frame->linesize[0] / 2;
  1030. int ulinesize = frame->linesize[1] / 2;
  1031. int vlinesize = frame->linesize[2] / 2;
  1032. for (y = 0; y < h; y++) {
  1033. for (x = 0; x < w; x++) {
  1034. int c = factor * x / w;
  1035. ydst[x] = c;
  1036. udst[x] = mid;
  1037. vdst[x] = mid;
  1038. }
  1039. ydst += ylinesize;
  1040. udst += ulinesize;
  1041. vdst += vlinesize;
  1042. }
  1043. h += h;
  1044. for (; y < h; y++) {
  1045. for (x = 0; x < w; x++) {
  1046. int c = factor * x / w;
  1047. ydst[x] = mid;
  1048. udst[x] = c;
  1049. vdst[x] = mid;
  1050. }
  1051. ydst += ylinesize;
  1052. udst += ulinesize;
  1053. vdst += vlinesize;
  1054. }
  1055. for (; y < frame->height; y++) {
  1056. for (x = 0; x < w; x++) {
  1057. int c = factor * x / w;
  1058. ydst[x] = mid;
  1059. udst[x] = mid;
  1060. vdst[x] = c;
  1061. }
  1062. ydst += ylinesize;
  1063. udst += ulinesize;
  1064. vdst += vlinesize;
  1065. }
  1066. }
  1067. static av_cold int yuvtest_init(AVFilterContext *ctx)
  1068. {
  1069. TestSourceContext *test = ctx->priv;
  1070. test->draw_once = 1;
  1071. return init(ctx);
  1072. }
  1073. static int yuvtest_query_formats(AVFilterContext *ctx)
  1074. {
  1075. static const enum AVPixelFormat pix_fmts[] = {
  1076. AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUVJ444P,
  1077. AV_PIX_FMT_YUV444P9, AV_PIX_FMT_YUV444P10,
  1078. AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV444P14,
  1079. AV_PIX_FMT_YUV444P16,
  1080. AV_PIX_FMT_NONE
  1081. };
  1082. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  1083. if (!fmts_list)
  1084. return AVERROR(ENOMEM);
  1085. return ff_set_common_formats(ctx, fmts_list);
  1086. }
  1087. static int yuvtest_config_props(AVFilterLink *outlink)
  1088. {
  1089. TestSourceContext *test = outlink->src->priv;
  1090. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(outlink->format);
  1091. test->fill_picture_fn = desc->comp[0].depth > 8 ? yuvtest_fill_picture16 : yuvtest_fill_picture8;
  1092. return config_props(outlink);
  1093. }
  1094. static const AVFilterPad avfilter_vsrc_yuvtestsrc_outputs[] = {
  1095. {
  1096. .name = "default",
  1097. .type = AVMEDIA_TYPE_VIDEO,
  1098. .config_props = yuvtest_config_props,
  1099. },
  1100. { NULL }
  1101. };
  1102. AVFilter ff_vsrc_yuvtestsrc = {
  1103. .name = "yuvtestsrc",
  1104. .description = NULL_IF_CONFIG_SMALL("Generate YUV test pattern."),
  1105. .priv_size = sizeof(TestSourceContext),
  1106. .priv_class = &yuvtestsrc_class,
  1107. .init = yuvtest_init,
  1108. .uninit = uninit,
  1109. .query_formats = yuvtest_query_formats,
  1110. .activate = activate,
  1111. .inputs = NULL,
  1112. .outputs = avfilter_vsrc_yuvtestsrc_outputs,
  1113. };
  1114. #endif /* CONFIG_YUVTESTSRC_FILTER */
  1115. #if CONFIG_PAL75BARS_FILTER || CONFIG_PAL100BARS_FILTER || CONFIG_SMPTEBARS_FILTER || CONFIG_SMPTEHDBARS_FILTER
  1116. static const uint8_t rainbow[7][4] = {
  1117. { 180, 128, 128, 255 }, /* 75% white */
  1118. { 162, 44, 142, 255 }, /* 75% yellow */
  1119. { 131, 156, 44, 255 }, /* 75% cyan */
  1120. { 112, 72, 58, 255 }, /* 75% green */
  1121. { 84, 184, 198, 255 }, /* 75% magenta */
  1122. { 65, 100, 212, 255 }, /* 75% red */
  1123. { 35, 212, 114, 255 }, /* 75% blue */
  1124. };
  1125. static const uint8_t rainbow100[7][4] = {
  1126. { 235, 128, 128, 255 }, /* 100% white */
  1127. { 210, 16, 146, 255 }, /* 100% yellow */
  1128. { 170, 166, 16, 255 }, /* 100% cyan */
  1129. { 145, 54, 34, 255 }, /* 100% green */
  1130. { 106, 202, 222, 255 }, /* 100% magenta */
  1131. { 81, 90, 240, 255 }, /* 100% red */
  1132. { 41, 240, 110, 255 }, /* 100% blue */
  1133. };
  1134. static const uint8_t rainbowhd[7][4] = {
  1135. { 180, 128, 128, 255 }, /* 75% white */
  1136. { 168, 44, 136, 255 }, /* 75% yellow */
  1137. { 145, 147, 44, 255 }, /* 75% cyan */
  1138. { 133, 63, 52, 255 }, /* 75% green */
  1139. { 63, 193, 204, 255 }, /* 75% magenta */
  1140. { 51, 109, 212, 255 }, /* 75% red */
  1141. { 28, 212, 120, 255 }, /* 75% blue */
  1142. };
  1143. static const uint8_t wobnair[7][4] = {
  1144. { 35, 212, 114, 255 }, /* 75% blue */
  1145. { 19, 128, 128, 255 }, /* 7.5% intensity black */
  1146. { 84, 184, 198, 255 }, /* 75% magenta */
  1147. { 19, 128, 128, 255 }, /* 7.5% intensity black */
  1148. { 131, 156, 44, 255 }, /* 75% cyan */
  1149. { 19, 128, 128, 255 }, /* 7.5% intensity black */
  1150. { 180, 128, 128, 255 }, /* 75% white */
  1151. };
  1152. static const uint8_t white[4] = { 235, 128, 128, 255 };
  1153. /* pluge pulses */
  1154. static const uint8_t neg4ire[4] = { 7, 128, 128, 255 };
  1155. static const uint8_t pos4ire[4] = { 24, 128, 128, 255 };
  1156. /* fudged Q/-I */
  1157. static const uint8_t i_pixel[4] = { 57, 156, 97, 255 };
  1158. static const uint8_t q_pixel[4] = { 44, 171, 147, 255 };
  1159. static const uint8_t gray40[4] = { 104, 128, 128, 255 };
  1160. static const uint8_t gray15[4] = { 49, 128, 128, 255 };
  1161. static const uint8_t cyan[4] = { 188, 154, 16, 255 };
  1162. static const uint8_t yellow[4] = { 219, 16, 138, 255 };
  1163. static const uint8_t blue[4] = { 32, 240, 118, 255 };
  1164. static const uint8_t red[4] = { 63, 102, 240, 255 };
  1165. static const uint8_t black0[4] = { 16, 128, 128, 255 };
  1166. static const uint8_t black2[4] = { 20, 128, 128, 255 };
  1167. static const uint8_t black4[4] = { 25, 128, 128, 255 };
  1168. static const uint8_t neg2[4] = { 12, 128, 128, 255 };
  1169. static void draw_bar(TestSourceContext *test, const uint8_t color[4],
  1170. int x, int y, int w, int h,
  1171. AVFrame *frame)
  1172. {
  1173. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
  1174. uint8_t *p, *p0;
  1175. int plane;
  1176. x = FFMIN(x, test->w - 1);
  1177. y = FFMIN(y, test->h - 1);
  1178. w = FFMAX(FFMIN(w, test->w - x), 0);
  1179. h = FFMAX(FFMIN(h, test->h - y), 0);
  1180. av_assert0(x + w <= test->w);
  1181. av_assert0(y + h <= test->h);
  1182. for (plane = 0; frame->data[plane]; plane++) {
  1183. const int c = color[plane];
  1184. const int linesize = frame->linesize[plane];
  1185. int i, px, py, pw, ph;
  1186. if (plane == 1 || plane == 2) {
  1187. px = x >> desc->log2_chroma_w;
  1188. pw = AV_CEIL_RSHIFT(w, desc->log2_chroma_w);
  1189. py = y >> desc->log2_chroma_h;
  1190. ph = AV_CEIL_RSHIFT(h, desc->log2_chroma_h);
  1191. } else {
  1192. px = x;
  1193. pw = w;
  1194. py = y;
  1195. ph = h;
  1196. }
  1197. p0 = p = frame->data[plane] + py * linesize + px;
  1198. memset(p, c, pw);
  1199. p += linesize;
  1200. for (i = 1; i < ph; i++, p += linesize)
  1201. memcpy(p, p0, pw);
  1202. }
  1203. }
  1204. static int smptebars_query_formats(AVFilterContext *ctx)
  1205. {
  1206. static const enum AVPixelFormat pix_fmts[] = {
  1207. AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P,
  1208. AV_PIX_FMT_YUV440P, AV_PIX_FMT_YUV444P,
  1209. AV_PIX_FMT_YUV410P, AV_PIX_FMT_YUV411P,
  1210. AV_PIX_FMT_NONE,
  1211. };
  1212. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  1213. if (!fmts_list)
  1214. return AVERROR(ENOMEM);
  1215. return ff_set_common_formats(ctx, fmts_list);
  1216. }
  1217. static const AVFilterPad smptebars_outputs[] = {
  1218. {
  1219. .name = "default",
  1220. .type = AVMEDIA_TYPE_VIDEO,
  1221. .config_props = config_props,
  1222. },
  1223. { NULL }
  1224. };
  1225. #if CONFIG_PAL75BARS_FILTER
  1226. #define pal75bars_options options
  1227. AVFILTER_DEFINE_CLASS(pal75bars);
  1228. static void pal75bars_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  1229. {
  1230. TestSourceContext *test = ctx->priv;
  1231. int r_w, i, x = 0;
  1232. const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(picref->format);
  1233. picref->color_range = AVCOL_RANGE_MPEG;
  1234. picref->colorspace = AVCOL_SPC_BT470BG;
  1235. r_w = FFALIGN((test->w + 7) / 8, 1 << pixdesc->log2_chroma_w);
  1236. draw_bar(test, white, x, 0, r_w, test->h, picref);
  1237. x += r_w;
  1238. for (i = 1; i < 7; i++) {
  1239. draw_bar(test, rainbow[i], x, 0, r_w, test->h, picref);
  1240. x += r_w;
  1241. }
  1242. draw_bar(test, black0, x, 0, r_w, test->h, picref);
  1243. }
  1244. static av_cold int pal75bars_init(AVFilterContext *ctx)
  1245. {
  1246. TestSourceContext *test = ctx->priv;
  1247. test->fill_picture_fn = pal75bars_fill_picture;
  1248. test->draw_once = 1;
  1249. return init(ctx);
  1250. }
  1251. AVFilter ff_vsrc_pal75bars = {
  1252. .name = "pal75bars",
  1253. .description = NULL_IF_CONFIG_SMALL("Generate PAL 75% color bars."),
  1254. .priv_size = sizeof(TestSourceContext),
  1255. .priv_class = &pal75bars_class,
  1256. .init = pal75bars_init,
  1257. .uninit = uninit,
  1258. .query_formats = smptebars_query_formats,
  1259. .activate = activate,
  1260. .inputs = NULL,
  1261. .outputs = smptebars_outputs,
  1262. };
  1263. #endif /* CONFIG_PAL75BARS_FILTER */
  1264. #if CONFIG_PAL100BARS_FILTER
  1265. #define pal100bars_options options
  1266. AVFILTER_DEFINE_CLASS(pal100bars);
  1267. static void pal100bars_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  1268. {
  1269. TestSourceContext *test = ctx->priv;
  1270. int r_w, i, x = 0;
  1271. const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(picref->format);
  1272. picref->color_range = AVCOL_RANGE_MPEG;
  1273. picref->colorspace = AVCOL_SPC_BT470BG;
  1274. r_w = FFALIGN((test->w + 7) / 8, 1 << pixdesc->log2_chroma_w);
  1275. for (i = 0; i < 7; i++) {
  1276. draw_bar(test, rainbow100[i], x, 0, r_w, test->h, picref);
  1277. x += r_w;
  1278. }
  1279. draw_bar(test, black0, x, 0, r_w, test->h, picref);
  1280. }
  1281. static av_cold int pal100bars_init(AVFilterContext *ctx)
  1282. {
  1283. TestSourceContext *test = ctx->priv;
  1284. test->fill_picture_fn = pal100bars_fill_picture;
  1285. test->draw_once = 1;
  1286. return init(ctx);
  1287. }
  1288. AVFilter ff_vsrc_pal100bars = {
  1289. .name = "pal100bars",
  1290. .description = NULL_IF_CONFIG_SMALL("Generate PAL 100% color bars."),
  1291. .priv_size = sizeof(TestSourceContext),
  1292. .priv_class = &pal100bars_class,
  1293. .init = pal100bars_init,
  1294. .uninit = uninit,
  1295. .query_formats = smptebars_query_formats,
  1296. .activate = activate,
  1297. .inputs = NULL,
  1298. .outputs = smptebars_outputs,
  1299. };
  1300. #endif /* CONFIG_PAL100BARS_FILTER */
  1301. #if CONFIG_SMPTEBARS_FILTER
  1302. #define smptebars_options options
  1303. AVFILTER_DEFINE_CLASS(smptebars);
  1304. static void smptebars_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  1305. {
  1306. TestSourceContext *test = ctx->priv;
  1307. int r_w, r_h, w_h, p_w, p_h, i, tmp, x = 0;
  1308. const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(picref->format);
  1309. picref->colorspace = AVCOL_SPC_BT470BG;
  1310. r_w = FFALIGN((test->w + 6) / 7, 1 << pixdesc->log2_chroma_w);
  1311. r_h = FFALIGN(test->h * 2 / 3, 1 << pixdesc->log2_chroma_h);
  1312. w_h = FFALIGN(test->h * 3 / 4 - r_h, 1 << pixdesc->log2_chroma_h);
  1313. p_w = FFALIGN(r_w * 5 / 4, 1 << pixdesc->log2_chroma_w);
  1314. p_h = test->h - w_h - r_h;
  1315. for (i = 0; i < 7; i++) {
  1316. draw_bar(test, rainbow[i], x, 0, r_w, r_h, picref);
  1317. draw_bar(test, wobnair[i], x, r_h, r_w, w_h, picref);
  1318. x += r_w;
  1319. }
  1320. x = 0;
  1321. draw_bar(test, i_pixel, x, r_h + w_h, p_w, p_h, picref);
  1322. x += p_w;
  1323. draw_bar(test, white, x, r_h + w_h, p_w, p_h, picref);
  1324. x += p_w;
  1325. draw_bar(test, q_pixel, x, r_h + w_h, p_w, p_h, picref);
  1326. x += p_w;
  1327. tmp = FFALIGN(5 * r_w - x, 1 << pixdesc->log2_chroma_w);
  1328. draw_bar(test, black0, x, r_h + w_h, tmp, p_h, picref);
  1329. x += tmp;
  1330. tmp = FFALIGN(r_w / 3, 1 << pixdesc->log2_chroma_w);
  1331. draw_bar(test, neg4ire, x, r_h + w_h, tmp, p_h, picref);
  1332. x += tmp;
  1333. draw_bar(test, black0, x, r_h + w_h, tmp, p_h, picref);
  1334. x += tmp;
  1335. draw_bar(test, pos4ire, x, r_h + w_h, tmp, p_h, picref);
  1336. x += tmp;
  1337. draw_bar(test, black0, x, r_h + w_h, test->w - x, p_h, picref);
  1338. }
  1339. static av_cold int smptebars_init(AVFilterContext *ctx)
  1340. {
  1341. TestSourceContext *test = ctx->priv;
  1342. test->fill_picture_fn = smptebars_fill_picture;
  1343. test->draw_once = 1;
  1344. return init(ctx);
  1345. }
  1346. AVFilter ff_vsrc_smptebars = {
  1347. .name = "smptebars",
  1348. .description = NULL_IF_CONFIG_SMALL("Generate SMPTE color bars."),
  1349. .priv_size = sizeof(TestSourceContext),
  1350. .priv_class = &smptebars_class,
  1351. .init = smptebars_init,
  1352. .uninit = uninit,
  1353. .query_formats = smptebars_query_formats,
  1354. .activate = activate,
  1355. .inputs = NULL,
  1356. .outputs = smptebars_outputs,
  1357. };
  1358. #endif /* CONFIG_SMPTEBARS_FILTER */
  1359. #if CONFIG_SMPTEHDBARS_FILTER
  1360. #define smptehdbars_options options
  1361. AVFILTER_DEFINE_CLASS(smptehdbars);
  1362. static void smptehdbars_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  1363. {
  1364. TestSourceContext *test = ctx->priv;
  1365. int d_w, r_w, r_h, l_w, i, tmp, x = 0, y = 0;
  1366. const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(picref->format);
  1367. picref->colorspace = AVCOL_SPC_BT709;
  1368. d_w = FFALIGN(test->w / 8, 1 << pixdesc->log2_chroma_w);
  1369. r_h = FFALIGN(test->h * 7 / 12, 1 << pixdesc->log2_chroma_h);
  1370. draw_bar(test, gray40, x, 0, d_w, r_h, picref);
  1371. x += d_w;
  1372. r_w = FFALIGN((((test->w + 3) / 4) * 3) / 7, 1 << pixdesc->log2_chroma_w);
  1373. for (i = 0; i < 7; i++) {
  1374. draw_bar(test, rainbowhd[i], x, 0, r_w, r_h, picref);
  1375. x += r_w;
  1376. }
  1377. draw_bar(test, gray40, x, 0, test->w - x, r_h, picref);
  1378. y = r_h;
  1379. r_h = FFALIGN(test->h / 12, 1 << pixdesc->log2_chroma_h);
  1380. draw_bar(test, cyan, 0, y, d_w, r_h, picref);
  1381. x = d_w;
  1382. draw_bar(test, i_pixel, x, y, r_w, r_h, picref);
  1383. x += r_w;
  1384. tmp = r_w * 6;
  1385. draw_bar(test, rainbowhd[0], x, y, tmp, r_h, picref);
  1386. x += tmp;
  1387. l_w = x;
  1388. draw_bar(test, blue, x, y, test->w - x, r_h, picref);
  1389. y += r_h;
  1390. draw_bar(test, yellow, 0, y, d_w, r_h, picref);
  1391. x = d_w;
  1392. draw_bar(test, q_pixel, x, y, r_w, r_h, picref);
  1393. x += r_w;
  1394. for (i = 0; i < tmp; i += 1 << pixdesc->log2_chroma_w) {
  1395. uint8_t yramp[4] = {0};
  1396. yramp[0] = i * 255 / tmp;
  1397. yramp[1] = 128;
  1398. yramp[2] = 128;
  1399. yramp[3] = 255;
  1400. draw_bar(test, yramp, x, y, 1 << pixdesc->log2_chroma_w, r_h, picref);
  1401. x += 1 << pixdesc->log2_chroma_w;
  1402. }
  1403. draw_bar(test, red, x, y, test->w - x, r_h, picref);
  1404. y += r_h;
  1405. draw_bar(test, gray15, 0, y, d_w, test->h - y, picref);
  1406. x = d_w;
  1407. tmp = FFALIGN(r_w * 3 / 2, 1 << pixdesc->log2_chroma_w);
  1408. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  1409. x += tmp;
  1410. tmp = FFALIGN(r_w * 2, 1 << pixdesc->log2_chroma_w);
  1411. draw_bar(test, white, x, y, tmp, test->h - y, picref);
  1412. x += tmp;
  1413. tmp = FFALIGN(r_w * 5 / 6, 1 << pixdesc->log2_chroma_w);
  1414. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  1415. x += tmp;
  1416. tmp = FFALIGN(r_w / 3, 1 << pixdesc->log2_chroma_w);
  1417. draw_bar(test, neg2, x, y, tmp, test->h - y, picref);
  1418. x += tmp;
  1419. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  1420. x += tmp;
  1421. draw_bar(test, black2, x, y, tmp, test->h - y, picref);
  1422. x += tmp;
  1423. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  1424. x += tmp;
  1425. draw_bar(test, black4, x, y, tmp, test->h - y, picref);
  1426. x += tmp;
  1427. r_w = l_w - x;
  1428. draw_bar(test, black0, x, y, r_w, test->h - y, picref);
  1429. x += r_w;
  1430. draw_bar(test, gray15, x, y, test->w - x, test->h - y, picref);
  1431. }
  1432. static av_cold int smptehdbars_init(AVFilterContext *ctx)
  1433. {
  1434. TestSourceContext *test = ctx->priv;
  1435. test->fill_picture_fn = smptehdbars_fill_picture;
  1436. test->draw_once = 1;
  1437. return init(ctx);
  1438. }
  1439. AVFilter ff_vsrc_smptehdbars = {
  1440. .name = "smptehdbars",
  1441. .description = NULL_IF_CONFIG_SMALL("Generate SMPTE HD color bars."),
  1442. .priv_size = sizeof(TestSourceContext),
  1443. .priv_class = &smptehdbars_class,
  1444. .init = smptehdbars_init,
  1445. .uninit = uninit,
  1446. .query_formats = smptebars_query_formats,
  1447. .activate = activate,
  1448. .inputs = NULL,
  1449. .outputs = smptebars_outputs,
  1450. };
  1451. #endif /* CONFIG_SMPTEHDBARS_FILTER */
  1452. #endif /* CONFIG_SMPTEBARS_FILTER || CONFIG_SMPTEHDBARS_FILTER */
  1453. #if CONFIG_ALLYUV_FILTER
  1454. #define allyuv_options &options[NOSIZE_OPTIONS_OFFSET]
  1455. AVFILTER_DEFINE_CLASS(allyuv);
  1456. static void allyuv_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  1457. {
  1458. const int ys = frame->linesize[0];
  1459. const int us = frame->linesize[1];
  1460. const int vs = frame->linesize[2];
  1461. int x, y, j;
  1462. for (y = 0; y < 4096; y++) {
  1463. for (x = 0; x < 2048; x++) {
  1464. frame->data[0][y * ys + x] = ((x / 8) % 256);
  1465. frame->data[0][y * ys + 4095 - x] = ((x / 8) % 256);
  1466. }
  1467. for (x = 0; x < 2048; x+=8) {
  1468. for (j = 0; j < 8; j++) {
  1469. frame->data[1][vs * y + x + j] = (y%16 + (j % 8) * 16);
  1470. frame->data[1][vs * y + 4095 - x - j] = (128 + y%16 + (j % 8) * 16);
  1471. }
  1472. }
  1473. for (x = 0; x < 4096; x++)
  1474. frame->data[2][y * us + x] = 256 * y / 4096;
  1475. }
  1476. }
  1477. static av_cold int allyuv_init(AVFilterContext *ctx)
  1478. {
  1479. TestSourceContext *test = ctx->priv;
  1480. test->w = test->h = 4096;
  1481. test->draw_once = 1;
  1482. test->fill_picture_fn = allyuv_fill_picture;
  1483. return init(ctx);
  1484. }
  1485. static int allyuv_query_formats(AVFilterContext *ctx)
  1486. {
  1487. static const enum AVPixelFormat pix_fmts[] = {
  1488. AV_PIX_FMT_YUV444P, AV_PIX_FMT_GBRP,
  1489. AV_PIX_FMT_NONE
  1490. };
  1491. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  1492. if (!fmts_list)
  1493. return AVERROR(ENOMEM);
  1494. return ff_set_common_formats(ctx, fmts_list);
  1495. }
  1496. static const AVFilterPad avfilter_vsrc_allyuv_outputs[] = {
  1497. {
  1498. .name = "default",
  1499. .type = AVMEDIA_TYPE_VIDEO,
  1500. .config_props = config_props,
  1501. },
  1502. { NULL }
  1503. };
  1504. AVFilter ff_vsrc_allyuv = {
  1505. .name = "allyuv",
  1506. .description = NULL_IF_CONFIG_SMALL("Generate all yuv colors."),
  1507. .priv_size = sizeof(TestSourceContext),
  1508. .priv_class = &allyuv_class,
  1509. .init = allyuv_init,
  1510. .uninit = uninit,
  1511. .query_formats = allyuv_query_formats,
  1512. .activate = activate,
  1513. .inputs = NULL,
  1514. .outputs = avfilter_vsrc_allyuv_outputs,
  1515. };
  1516. #endif /* CONFIG_ALLYUV_FILTER */
  1517. #if CONFIG_ALLRGB_FILTER
  1518. #define allrgb_options &options[NOSIZE_OPTIONS_OFFSET]
  1519. AVFILTER_DEFINE_CLASS(allrgb);
  1520. static void allrgb_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  1521. {
  1522. unsigned x, y;
  1523. const int linesize = frame->linesize[0];
  1524. uint8_t *line = frame->data[0];
  1525. for (y = 0; y < 4096; y++) {
  1526. uint8_t *dst = line;
  1527. for (x = 0; x < 4096; x++) {
  1528. *dst++ = x;
  1529. *dst++ = y;
  1530. *dst++ = (x >> 8) | ((y >> 8) << 4);
  1531. }
  1532. line += linesize;
  1533. }
  1534. }
  1535. static av_cold int allrgb_init(AVFilterContext *ctx)
  1536. {
  1537. TestSourceContext *test = ctx->priv;
  1538. test->w = test->h = 4096;
  1539. test->draw_once = 1;
  1540. test->fill_picture_fn = allrgb_fill_picture;
  1541. return init(ctx);
  1542. }
  1543. static int allrgb_config_props(AVFilterLink *outlink)
  1544. {
  1545. TestSourceContext *test = outlink->src->priv;
  1546. ff_fill_rgba_map(test->rgba_map, outlink->format);
  1547. return config_props(outlink);
  1548. }
  1549. static int allrgb_query_formats(AVFilterContext *ctx)
  1550. {
  1551. static const enum AVPixelFormat pix_fmts[] = {
  1552. AV_PIX_FMT_RGB24, AV_PIX_FMT_NONE
  1553. };
  1554. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  1555. if (!fmts_list)
  1556. return AVERROR(ENOMEM);
  1557. return ff_set_common_formats(ctx, fmts_list);
  1558. }
  1559. static const AVFilterPad avfilter_vsrc_allrgb_outputs[] = {
  1560. {
  1561. .name = "default",
  1562. .type = AVMEDIA_TYPE_VIDEO,
  1563. .config_props = allrgb_config_props,
  1564. },
  1565. { NULL }
  1566. };
  1567. AVFilter ff_vsrc_allrgb = {
  1568. .name = "allrgb",
  1569. .description = NULL_IF_CONFIG_SMALL("Generate all RGB colors."),
  1570. .priv_size = sizeof(TestSourceContext),
  1571. .priv_class = &allrgb_class,
  1572. .init = allrgb_init,
  1573. .uninit = uninit,
  1574. .query_formats = allrgb_query_formats,
  1575. .activate = activate,
  1576. .inputs = NULL,
  1577. .outputs = avfilter_vsrc_allrgb_outputs,
  1578. };
  1579. #endif /* CONFIG_ALLRGB_FILTER */