You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1702 lines
52KB

  1. /*
  2. * Copyright (c) 2007 Nicolas George <nicolas.george@normalesup.org>
  3. * Copyright (c) 2011 Stefano Sabatini
  4. * Copyright (c) 2012 Paul B Mahol
  5. *
  6. * This file is part of FFmpeg.
  7. *
  8. * FFmpeg is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU Lesser General Public
  10. * License as published by the Free Software Foundation; either
  11. * version 2.1 of the License, or (at your option) any later version.
  12. *
  13. * FFmpeg is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  16. * Lesser General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU Lesser General Public
  19. * License along with FFmpeg; if not, write to the Free Software
  20. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  21. */
  22. /**
  23. * @file
  24. * Misc test sources.
  25. *
  26. * testsrc is based on the test pattern generator demuxer by Nicolas George:
  27. * http://lists.ffmpeg.org/pipermail/ffmpeg-devel/2007-October/037845.html
  28. *
  29. * rgbtestsrc is ported from MPlayer libmpcodecs/vf_rgbtest.c by
  30. * Michael Niedermayer.
  31. *
  32. * allyuv, smptebars and smptehdbars are by Paul B Mahol.
  33. */
  34. #include <float.h>
  35. #include "libavutil/avassert.h"
  36. #include "libavutil/common.h"
  37. #include "libavutil/ffmath.h"
  38. #include "libavutil/opt.h"
  39. #include "libavutil/imgutils.h"
  40. #include "libavutil/intreadwrite.h"
  41. #include "libavutil/parseutils.h"
  42. #include "libavutil/xga_font_data.h"
  43. #include "avfilter.h"
  44. #include "drawutils.h"
  45. #include "formats.h"
  46. #include "internal.h"
  47. #include "video.h"
  48. typedef struct TestSourceContext {
  49. const AVClass *class;
  50. int w, h;
  51. unsigned int nb_frame;
  52. AVRational time_base, frame_rate;
  53. int64_t pts;
  54. int64_t duration; ///< duration expressed in microseconds
  55. AVRational sar; ///< sample aspect ratio
  56. int draw_once; ///< draw only the first frame, always put out the same picture
  57. int draw_once_reset; ///< draw only the first frame or in case of reset
  58. AVFrame *picref; ///< cached reference containing the painted picture
  59. void (* fill_picture_fn)(AVFilterContext *ctx, AVFrame *frame);
  60. /* only used by testsrc */
  61. int nb_decimals;
  62. /* only used by color */
  63. FFDrawContext draw;
  64. FFDrawColor color;
  65. uint8_t color_rgba[4];
  66. /* only used by rgbtest */
  67. uint8_t rgba_map[4];
  68. /* only used by haldclut */
  69. int level;
  70. } TestSourceContext;
  71. #define OFFSET(x) offsetof(TestSourceContext, x)
  72. #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
  73. #define SIZE_OPTIONS \
  74. { "size", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "320x240"}, 0, 0, FLAGS },\
  75. { "s", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "320x240"}, 0, 0, FLAGS },\
  76. #define COMMON_OPTIONS_NOSIZE \
  77. { "rate", "set video rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, INT_MAX, FLAGS },\
  78. { "r", "set video rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, INT_MAX, FLAGS },\
  79. { "duration", "set video duration", OFFSET(duration), AV_OPT_TYPE_DURATION, {.i64 = -1}, -1, INT64_MAX, FLAGS },\
  80. { "d", "set video duration", OFFSET(duration), AV_OPT_TYPE_DURATION, {.i64 = -1}, -1, INT64_MAX, FLAGS },\
  81. { "sar", "set video sample aspect ratio", OFFSET(sar), AV_OPT_TYPE_RATIONAL, {.dbl= 1}, 0, INT_MAX, FLAGS },
  82. #define COMMON_OPTIONS SIZE_OPTIONS COMMON_OPTIONS_NOSIZE
  83. static const AVOption options[] = {
  84. COMMON_OPTIONS
  85. { NULL }
  86. };
  87. static av_cold int init(AVFilterContext *ctx)
  88. {
  89. TestSourceContext *test = ctx->priv;
  90. test->time_base = av_inv_q(test->frame_rate);
  91. test->nb_frame = 0;
  92. test->pts = 0;
  93. av_log(ctx, AV_LOG_VERBOSE, "size:%dx%d rate:%d/%d duration:%f sar:%d/%d\n",
  94. test->w, test->h, test->frame_rate.num, test->frame_rate.den,
  95. test->duration < 0 ? -1 : (double)test->duration/1000000,
  96. test->sar.num, test->sar.den);
  97. return 0;
  98. }
  99. static av_cold void uninit(AVFilterContext *ctx)
  100. {
  101. TestSourceContext *test = ctx->priv;
  102. av_frame_free(&test->picref);
  103. }
  104. static int config_props(AVFilterLink *outlink)
  105. {
  106. TestSourceContext *test = outlink->src->priv;
  107. outlink->w = test->w;
  108. outlink->h = test->h;
  109. outlink->sample_aspect_ratio = test->sar;
  110. outlink->frame_rate = test->frame_rate;
  111. outlink->time_base = test->time_base;
  112. return 0;
  113. }
  114. static int request_frame(AVFilterLink *outlink)
  115. {
  116. TestSourceContext *test = outlink->src->priv;
  117. AVFrame *frame;
  118. if (test->duration >= 0 &&
  119. av_rescale_q(test->pts, test->time_base, AV_TIME_BASE_Q) >= test->duration)
  120. return AVERROR_EOF;
  121. if (test->draw_once) {
  122. if (test->draw_once_reset) {
  123. av_frame_free(&test->picref);
  124. test->draw_once_reset = 0;
  125. }
  126. if (!test->picref) {
  127. test->picref =
  128. ff_get_video_buffer(outlink, test->w, test->h);
  129. if (!test->picref)
  130. return AVERROR(ENOMEM);
  131. test->fill_picture_fn(outlink->src, test->picref);
  132. }
  133. frame = av_frame_clone(test->picref);
  134. } else
  135. frame = ff_get_video_buffer(outlink, test->w, test->h);
  136. if (!frame)
  137. return AVERROR(ENOMEM);
  138. frame->pts = test->pts;
  139. frame->key_frame = 1;
  140. frame->interlaced_frame = 0;
  141. frame->pict_type = AV_PICTURE_TYPE_I;
  142. frame->sample_aspect_ratio = test->sar;
  143. if (!test->draw_once)
  144. test->fill_picture_fn(outlink->src, frame);
  145. test->pts++;
  146. test->nb_frame++;
  147. return ff_filter_frame(outlink, frame);
  148. }
  149. #if CONFIG_COLOR_FILTER
  150. static const AVOption color_options[] = {
  151. { "color", "set color", OFFSET(color_rgba), AV_OPT_TYPE_COLOR, {.str = "black"}, CHAR_MIN, CHAR_MAX, FLAGS },
  152. { "c", "set color", OFFSET(color_rgba), AV_OPT_TYPE_COLOR, {.str = "black"}, CHAR_MIN, CHAR_MAX, FLAGS },
  153. COMMON_OPTIONS
  154. { NULL }
  155. };
  156. AVFILTER_DEFINE_CLASS(color);
  157. static void color_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  158. {
  159. TestSourceContext *test = ctx->priv;
  160. ff_fill_rectangle(&test->draw, &test->color,
  161. picref->data, picref->linesize,
  162. 0, 0, test->w, test->h);
  163. }
  164. static av_cold int color_init(AVFilterContext *ctx)
  165. {
  166. TestSourceContext *test = ctx->priv;
  167. test->fill_picture_fn = color_fill_picture;
  168. test->draw_once = 1;
  169. return init(ctx);
  170. }
  171. static int color_query_formats(AVFilterContext *ctx)
  172. {
  173. return ff_set_common_formats(ctx, ff_draw_supported_pixel_formats(0));
  174. }
  175. static int color_config_props(AVFilterLink *inlink)
  176. {
  177. AVFilterContext *ctx = inlink->src;
  178. TestSourceContext *test = ctx->priv;
  179. int ret;
  180. ff_draw_init(&test->draw, inlink->format, 0);
  181. ff_draw_color(&test->draw, &test->color, test->color_rgba);
  182. test->w = ff_draw_round_to_sub(&test->draw, 0, -1, test->w);
  183. test->h = ff_draw_round_to_sub(&test->draw, 1, -1, test->h);
  184. if (av_image_check_size(test->w, test->h, 0, ctx) < 0)
  185. return AVERROR(EINVAL);
  186. if ((ret = config_props(inlink)) < 0)
  187. return ret;
  188. return 0;
  189. }
  190. static int color_process_command(AVFilterContext *ctx, const char *cmd, const char *args,
  191. char *res, int res_len, int flags)
  192. {
  193. TestSourceContext *test = ctx->priv;
  194. int ret;
  195. if (!strcmp(cmd, "color") || !strcmp(cmd, "c")) {
  196. uint8_t color_rgba[4];
  197. ret = av_parse_color(color_rgba, args, -1, ctx);
  198. if (ret < 0)
  199. return ret;
  200. memcpy(test->color_rgba, color_rgba, sizeof(color_rgba));
  201. ff_draw_color(&test->draw, &test->color, test->color_rgba);
  202. test->draw_once_reset = 1;
  203. return 0;
  204. }
  205. return AVERROR(ENOSYS);
  206. }
  207. static const AVFilterPad color_outputs[] = {
  208. {
  209. .name = "default",
  210. .type = AVMEDIA_TYPE_VIDEO,
  211. .request_frame = request_frame,
  212. .config_props = color_config_props,
  213. },
  214. { NULL }
  215. };
  216. AVFilter ff_vsrc_color = {
  217. .name = "color",
  218. .description = NULL_IF_CONFIG_SMALL("Provide an uniformly colored input."),
  219. .priv_class = &color_class,
  220. .priv_size = sizeof(TestSourceContext),
  221. .init = color_init,
  222. .uninit = uninit,
  223. .query_formats = color_query_formats,
  224. .inputs = NULL,
  225. .outputs = color_outputs,
  226. .process_command = color_process_command,
  227. };
  228. #endif /* CONFIG_COLOR_FILTER */
  229. #if CONFIG_HALDCLUTSRC_FILTER
  230. static const AVOption haldclutsrc_options[] = {
  231. { "level", "set level", OFFSET(level), AV_OPT_TYPE_INT, {.i64 = 6}, 2, 8, FLAGS },
  232. COMMON_OPTIONS_NOSIZE
  233. { NULL }
  234. };
  235. AVFILTER_DEFINE_CLASS(haldclutsrc);
  236. static void haldclutsrc_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  237. {
  238. int i, j, k, x = 0, y = 0, is16bit = 0, step;
  239. uint32_t alpha = 0;
  240. const TestSourceContext *hc = ctx->priv;
  241. int level = hc->level;
  242. float scale;
  243. const int w = frame->width;
  244. const int h = frame->height;
  245. const uint8_t *data = frame->data[0];
  246. const int linesize = frame->linesize[0];
  247. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
  248. uint8_t rgba_map[4];
  249. av_assert0(w == h && w == level*level*level);
  250. ff_fill_rgba_map(rgba_map, frame->format);
  251. switch (frame->format) {
  252. case AV_PIX_FMT_RGB48:
  253. case AV_PIX_FMT_BGR48:
  254. case AV_PIX_FMT_RGBA64:
  255. case AV_PIX_FMT_BGRA64:
  256. is16bit = 1;
  257. alpha = 0xffff;
  258. break;
  259. case AV_PIX_FMT_RGBA:
  260. case AV_PIX_FMT_BGRA:
  261. case AV_PIX_FMT_ARGB:
  262. case AV_PIX_FMT_ABGR:
  263. alpha = 0xff;
  264. break;
  265. }
  266. step = av_get_padded_bits_per_pixel(desc) >> (3 + is16bit);
  267. scale = ((float)(1 << (8*(is16bit+1))) - 1) / (level*level - 1);
  268. #define LOAD_CLUT(nbits) do { \
  269. uint##nbits##_t *dst = ((uint##nbits##_t *)(data + y*linesize)) + x*step; \
  270. dst[rgba_map[0]] = av_clip_uint##nbits(i * scale); \
  271. dst[rgba_map[1]] = av_clip_uint##nbits(j * scale); \
  272. dst[rgba_map[2]] = av_clip_uint##nbits(k * scale); \
  273. if (step == 4) \
  274. dst[rgba_map[3]] = alpha; \
  275. } while (0)
  276. level *= level;
  277. for (k = 0; k < level; k++) {
  278. for (j = 0; j < level; j++) {
  279. for (i = 0; i < level; i++) {
  280. if (!is16bit)
  281. LOAD_CLUT(8);
  282. else
  283. LOAD_CLUT(16);
  284. if (++x == w) {
  285. x = 0;
  286. y++;
  287. }
  288. }
  289. }
  290. }
  291. }
  292. static av_cold int haldclutsrc_init(AVFilterContext *ctx)
  293. {
  294. TestSourceContext *hc = ctx->priv;
  295. hc->fill_picture_fn = haldclutsrc_fill_picture;
  296. hc->draw_once = 1;
  297. return init(ctx);
  298. }
  299. static int haldclutsrc_query_formats(AVFilterContext *ctx)
  300. {
  301. static const enum AVPixelFormat pix_fmts[] = {
  302. AV_PIX_FMT_RGB24, AV_PIX_FMT_BGR24,
  303. AV_PIX_FMT_RGBA, AV_PIX_FMT_BGRA,
  304. AV_PIX_FMT_ARGB, AV_PIX_FMT_ABGR,
  305. AV_PIX_FMT_0RGB, AV_PIX_FMT_0BGR,
  306. AV_PIX_FMT_RGB0, AV_PIX_FMT_BGR0,
  307. AV_PIX_FMT_RGB48, AV_PIX_FMT_BGR48,
  308. AV_PIX_FMT_RGBA64, AV_PIX_FMT_BGRA64,
  309. AV_PIX_FMT_NONE,
  310. };
  311. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  312. if (!fmts_list)
  313. return AVERROR(ENOMEM);
  314. return ff_set_common_formats(ctx, fmts_list);
  315. }
  316. static int haldclutsrc_config_props(AVFilterLink *outlink)
  317. {
  318. AVFilterContext *ctx = outlink->src;
  319. TestSourceContext *hc = ctx->priv;
  320. hc->w = hc->h = hc->level * hc->level * hc->level;
  321. return config_props(outlink);
  322. }
  323. static const AVFilterPad haldclutsrc_outputs[] = {
  324. {
  325. .name = "default",
  326. .type = AVMEDIA_TYPE_VIDEO,
  327. .request_frame = request_frame,
  328. .config_props = haldclutsrc_config_props,
  329. },
  330. { NULL }
  331. };
  332. AVFilter ff_vsrc_haldclutsrc = {
  333. .name = "haldclutsrc",
  334. .description = NULL_IF_CONFIG_SMALL("Provide an identity Hald CLUT."),
  335. .priv_class = &haldclutsrc_class,
  336. .priv_size = sizeof(TestSourceContext),
  337. .init = haldclutsrc_init,
  338. .uninit = uninit,
  339. .query_formats = haldclutsrc_query_formats,
  340. .inputs = NULL,
  341. .outputs = haldclutsrc_outputs,
  342. };
  343. #endif /* CONFIG_HALDCLUTSRC_FILTER */
  344. #if CONFIG_NULLSRC_FILTER
  345. #define nullsrc_options options
  346. AVFILTER_DEFINE_CLASS(nullsrc);
  347. static void nullsrc_fill_picture(AVFilterContext *ctx, AVFrame *picref) { }
  348. static av_cold int nullsrc_init(AVFilterContext *ctx)
  349. {
  350. TestSourceContext *test = ctx->priv;
  351. test->fill_picture_fn = nullsrc_fill_picture;
  352. return init(ctx);
  353. }
  354. static const AVFilterPad nullsrc_outputs[] = {
  355. {
  356. .name = "default",
  357. .type = AVMEDIA_TYPE_VIDEO,
  358. .request_frame = request_frame,
  359. .config_props = config_props,
  360. },
  361. { NULL },
  362. };
  363. AVFilter ff_vsrc_nullsrc = {
  364. .name = "nullsrc",
  365. .description = NULL_IF_CONFIG_SMALL("Null video source, return unprocessed video frames."),
  366. .init = nullsrc_init,
  367. .uninit = uninit,
  368. .priv_size = sizeof(TestSourceContext),
  369. .priv_class = &nullsrc_class,
  370. .inputs = NULL,
  371. .outputs = nullsrc_outputs,
  372. };
  373. #endif /* CONFIG_NULLSRC_FILTER */
  374. #if CONFIG_TESTSRC_FILTER
  375. static const AVOption testsrc_options[] = {
  376. COMMON_OPTIONS
  377. { "decimals", "set number of decimals to show", OFFSET(nb_decimals), AV_OPT_TYPE_INT, {.i64=0}, 0, 17, FLAGS },
  378. { "n", "set number of decimals to show", OFFSET(nb_decimals), AV_OPT_TYPE_INT, {.i64=0}, 0, 17, FLAGS },
  379. { NULL }
  380. };
  381. AVFILTER_DEFINE_CLASS(testsrc);
  382. /**
  383. * Fill a rectangle with value val.
  384. *
  385. * @param val the RGB value to set
  386. * @param dst pointer to the destination buffer to fill
  387. * @param dst_linesize linesize of destination
  388. * @param segment_width width of the segment
  389. * @param x horizontal coordinate where to draw the rectangle in the destination buffer
  390. * @param y horizontal coordinate where to draw the rectangle in the destination buffer
  391. * @param w width of the rectangle to draw, expressed as a number of segment_width units
  392. * @param h height of the rectangle to draw, expressed as a number of segment_width units
  393. */
  394. static void draw_rectangle(unsigned val, uint8_t *dst, int dst_linesize, int segment_width,
  395. int x, int y, int w, int h)
  396. {
  397. int i;
  398. int step = 3;
  399. dst += segment_width * (step * x + y * dst_linesize);
  400. w *= segment_width * step;
  401. h *= segment_width;
  402. for (i = 0; i < h; i++) {
  403. memset(dst, val, w);
  404. dst += dst_linesize;
  405. }
  406. }
  407. static void draw_digit(int digit, uint8_t *dst, int dst_linesize,
  408. int segment_width)
  409. {
  410. #define TOP_HBAR 1
  411. #define MID_HBAR 2
  412. #define BOT_HBAR 4
  413. #define LEFT_TOP_VBAR 8
  414. #define LEFT_BOT_VBAR 16
  415. #define RIGHT_TOP_VBAR 32
  416. #define RIGHT_BOT_VBAR 64
  417. struct segments {
  418. int x, y, w, h;
  419. } segments[] = {
  420. { 1, 0, 5, 1 }, /* TOP_HBAR */
  421. { 1, 6, 5, 1 }, /* MID_HBAR */
  422. { 1, 12, 5, 1 }, /* BOT_HBAR */
  423. { 0, 1, 1, 5 }, /* LEFT_TOP_VBAR */
  424. { 0, 7, 1, 5 }, /* LEFT_BOT_VBAR */
  425. { 6, 1, 1, 5 }, /* RIGHT_TOP_VBAR */
  426. { 6, 7, 1, 5 } /* RIGHT_BOT_VBAR */
  427. };
  428. static const unsigned char masks[10] = {
  429. /* 0 */ TOP_HBAR |BOT_HBAR|LEFT_TOP_VBAR|LEFT_BOT_VBAR|RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  430. /* 1 */ RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  431. /* 2 */ TOP_HBAR|MID_HBAR|BOT_HBAR|LEFT_BOT_VBAR |RIGHT_TOP_VBAR,
  432. /* 3 */ TOP_HBAR|MID_HBAR|BOT_HBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  433. /* 4 */ MID_HBAR |LEFT_TOP_VBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  434. /* 5 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR |RIGHT_BOT_VBAR,
  435. /* 6 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR|LEFT_BOT_VBAR |RIGHT_BOT_VBAR,
  436. /* 7 */ TOP_HBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  437. /* 8 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR|LEFT_BOT_VBAR|RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  438. /* 9 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  439. };
  440. unsigned mask = masks[digit];
  441. int i;
  442. draw_rectangle(0, dst, dst_linesize, segment_width, 0, 0, 8, 13);
  443. for (i = 0; i < FF_ARRAY_ELEMS(segments); i++)
  444. if (mask & (1<<i))
  445. draw_rectangle(255, dst, dst_linesize, segment_width,
  446. segments[i].x, segments[i].y, segments[i].w, segments[i].h);
  447. }
  448. #define GRADIENT_SIZE (6 * 256)
  449. static void test_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  450. {
  451. TestSourceContext *test = ctx->priv;
  452. uint8_t *p, *p0;
  453. int x, y;
  454. int color, color_rest;
  455. int icolor;
  456. int radius;
  457. int quad0, quad;
  458. int dquad_x, dquad_y;
  459. int grad, dgrad, rgrad, drgrad;
  460. int seg_size;
  461. int second;
  462. int i;
  463. uint8_t *data = frame->data[0];
  464. int width = frame->width;
  465. int height = frame->height;
  466. /* draw colored bars and circle */
  467. radius = (width + height) / 4;
  468. quad0 = width * width / 4 + height * height / 4 - radius * radius;
  469. dquad_y = 1 - height;
  470. p0 = data;
  471. for (y = 0; y < height; y++) {
  472. p = p0;
  473. color = 0;
  474. color_rest = 0;
  475. quad = quad0;
  476. dquad_x = 1 - width;
  477. for (x = 0; x < width; x++) {
  478. icolor = color;
  479. if (quad < 0)
  480. icolor ^= 7;
  481. quad += dquad_x;
  482. dquad_x += 2;
  483. *(p++) = icolor & 1 ? 255 : 0;
  484. *(p++) = icolor & 2 ? 255 : 0;
  485. *(p++) = icolor & 4 ? 255 : 0;
  486. color_rest += 8;
  487. if (color_rest >= width) {
  488. color_rest -= width;
  489. color++;
  490. }
  491. }
  492. quad0 += dquad_y;
  493. dquad_y += 2;
  494. p0 += frame->linesize[0];
  495. }
  496. /* draw sliding color line */
  497. p0 = p = data + frame->linesize[0] * (height * 3/4);
  498. grad = (256 * test->nb_frame * test->time_base.num / test->time_base.den) %
  499. GRADIENT_SIZE;
  500. rgrad = 0;
  501. dgrad = GRADIENT_SIZE / width;
  502. drgrad = GRADIENT_SIZE % width;
  503. for (x = 0; x < width; x++) {
  504. *(p++) =
  505. grad < 256 || grad >= 5 * 256 ? 255 :
  506. grad >= 2 * 256 && grad < 4 * 256 ? 0 :
  507. grad < 2 * 256 ? 2 * 256 - 1 - grad : grad - 4 * 256;
  508. *(p++) =
  509. grad >= 4 * 256 ? 0 :
  510. grad >= 1 * 256 && grad < 3 * 256 ? 255 :
  511. grad < 1 * 256 ? grad : 4 * 256 - 1 - grad;
  512. *(p++) =
  513. grad < 2 * 256 ? 0 :
  514. grad >= 3 * 256 && grad < 5 * 256 ? 255 :
  515. grad < 3 * 256 ? grad - 2 * 256 : 6 * 256 - 1 - grad;
  516. grad += dgrad;
  517. rgrad += drgrad;
  518. if (rgrad >= GRADIENT_SIZE) {
  519. grad++;
  520. rgrad -= GRADIENT_SIZE;
  521. }
  522. if (grad >= GRADIENT_SIZE)
  523. grad -= GRADIENT_SIZE;
  524. }
  525. p = p0;
  526. for (y = height / 8; y > 0; y--) {
  527. memcpy(p+frame->linesize[0], p, 3 * width);
  528. p += frame->linesize[0];
  529. }
  530. /* draw digits */
  531. seg_size = width / 80;
  532. if (seg_size >= 1 && height >= 13 * seg_size) {
  533. int64_t p10decimals = 1;
  534. double time = av_q2d(test->time_base) * test->nb_frame *
  535. ff_exp10(test->nb_decimals);
  536. if (time >= INT_MAX)
  537. return;
  538. for (x = 0; x < test->nb_decimals; x++)
  539. p10decimals *= 10;
  540. second = av_rescale_rnd(test->nb_frame * test->time_base.num, p10decimals, test->time_base.den, AV_ROUND_ZERO);
  541. x = width - (width - seg_size * 64) / 2;
  542. y = (height - seg_size * 13) / 2;
  543. p = data + (x*3 + y * frame->linesize[0]);
  544. for (i = 0; i < 8; i++) {
  545. p -= 3 * 8 * seg_size;
  546. draw_digit(second % 10, p, frame->linesize[0], seg_size);
  547. second /= 10;
  548. if (second == 0)
  549. break;
  550. }
  551. }
  552. }
  553. static av_cold int test_init(AVFilterContext *ctx)
  554. {
  555. TestSourceContext *test = ctx->priv;
  556. test->fill_picture_fn = test_fill_picture;
  557. return init(ctx);
  558. }
  559. static int test_query_formats(AVFilterContext *ctx)
  560. {
  561. static const enum AVPixelFormat pix_fmts[] = {
  562. AV_PIX_FMT_RGB24, AV_PIX_FMT_NONE
  563. };
  564. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  565. if (!fmts_list)
  566. return AVERROR(ENOMEM);
  567. return ff_set_common_formats(ctx, fmts_list);
  568. }
  569. static const AVFilterPad avfilter_vsrc_testsrc_outputs[] = {
  570. {
  571. .name = "default",
  572. .type = AVMEDIA_TYPE_VIDEO,
  573. .request_frame = request_frame,
  574. .config_props = config_props,
  575. },
  576. { NULL }
  577. };
  578. AVFilter ff_vsrc_testsrc = {
  579. .name = "testsrc",
  580. .description = NULL_IF_CONFIG_SMALL("Generate test pattern."),
  581. .priv_size = sizeof(TestSourceContext),
  582. .priv_class = &testsrc_class,
  583. .init = test_init,
  584. .uninit = uninit,
  585. .query_formats = test_query_formats,
  586. .inputs = NULL,
  587. .outputs = avfilter_vsrc_testsrc_outputs,
  588. };
  589. #endif /* CONFIG_TESTSRC_FILTER */
  590. #if CONFIG_TESTSRC2_FILTER
  591. static const AVOption testsrc2_options[] = {
  592. COMMON_OPTIONS
  593. { NULL }
  594. };
  595. AVFILTER_DEFINE_CLASS(testsrc2);
  596. static void set_color(TestSourceContext *s, FFDrawColor *color, uint32_t argb)
  597. {
  598. uint8_t rgba[4] = { (argb >> 16) & 0xFF,
  599. (argb >> 8) & 0xFF,
  600. (argb >> 0) & 0xFF,
  601. (argb >> 24) & 0xFF, };
  602. ff_draw_color(&s->draw, color, rgba);
  603. }
  604. static uint32_t color_gradient(unsigned index)
  605. {
  606. unsigned si = index & 0xFF, sd = 0xFF - si;
  607. switch (index >> 8) {
  608. case 0: return 0xFF0000 + (si << 8);
  609. case 1: return 0x00FF00 + (sd << 16);
  610. case 2: return 0x00FF00 + (si << 0);
  611. case 3: return 0x0000FF + (sd << 8);
  612. case 4: return 0x0000FF + (si << 16);
  613. case 5: return 0xFF0000 + (sd << 0);
  614. }
  615. av_assert0(0);
  616. }
  617. static void draw_text(TestSourceContext *s, AVFrame *frame, FFDrawColor *color,
  618. int x0, int y0, const uint8_t *text)
  619. {
  620. int x = x0;
  621. for (; *text; text++) {
  622. if (*text == '\n') {
  623. x = x0;
  624. y0 += 16;
  625. continue;
  626. }
  627. ff_blend_mask(&s->draw, color, frame->data, frame->linesize,
  628. frame->width, frame->height,
  629. avpriv_vga16_font + *text * 16, 1, 8, 16, 0, 0, x, y0);
  630. x += 8;
  631. }
  632. }
  633. static void test2_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  634. {
  635. TestSourceContext *s = ctx->priv;
  636. FFDrawColor color;
  637. /* colored background */
  638. {
  639. unsigned i, x = 0, x2;
  640. x = 0;
  641. for (i = 1; i < 7; i++) {
  642. x2 = av_rescale(i, s->w, 6);
  643. x2 = ff_draw_round_to_sub(&s->draw, 0, 0, x2);
  644. set_color(s, &color, ((i & 1) ? 0xFF0000 : 0) |
  645. ((i & 2) ? 0x00FF00 : 0) |
  646. ((i & 4) ? 0x0000FF : 0));
  647. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  648. x, 0, x2 - x, frame->height);
  649. x = x2;
  650. }
  651. }
  652. /* oblique gradient */
  653. /* note: too slow if using blending */
  654. if (s->h >= 64) {
  655. unsigned x, dx, y0, y, g0, g;
  656. dx = ff_draw_round_to_sub(&s->draw, 0, +1, 1);
  657. y0 = av_rescale_q(s->pts, s->time_base, av_make_q(2, s->h - 16));
  658. g0 = av_rescale_q(s->pts, s->time_base, av_make_q(1, 128));
  659. for (x = 0; x < s->w; x += dx) {
  660. g = (av_rescale(x, 6 * 256, s->w) + g0) % (6 * 256);
  661. set_color(s, &color, color_gradient(g));
  662. y = y0 + av_rescale(x, s->h / 2, s->w);
  663. y %= 2 * (s->h - 16);
  664. if (y > s->h - 16)
  665. y = 2 * (s->h - 16) - y;
  666. y = ff_draw_round_to_sub(&s->draw, 1, 0, y);
  667. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  668. x, y, dx, 16);
  669. }
  670. }
  671. /* top right: draw clock hands */
  672. if (s->w >= 64 && s->h >= 64) {
  673. int l = (FFMIN(s->w, s->h) - 32) >> 1;
  674. int steps = FFMAX(4, l >> 5);
  675. int xc = (s->w >> 2) + (s->w >> 1);
  676. int yc = (s->h >> 2);
  677. int cycle = l << 2;
  678. int pos, xh, yh;
  679. int c, i;
  680. for (c = 0; c < 3; c++) {
  681. set_color(s, &color, 0xBBBBBB ^ (0xFF << (c << 3)));
  682. pos = av_rescale_q(s->pts, s->time_base, av_make_q(64 >> (c << 1), cycle)) % cycle;
  683. xh = pos < 1 * l ? pos :
  684. pos < 2 * l ? l :
  685. pos < 3 * l ? 3 * l - pos : 0;
  686. yh = pos < 1 * l ? 0 :
  687. pos < 2 * l ? pos - l :
  688. pos < 3 * l ? l :
  689. cycle - pos;
  690. xh -= l >> 1;
  691. yh -= l >> 1;
  692. for (i = 1; i <= steps; i++) {
  693. int x = av_rescale(xh, i, steps) + xc;
  694. int y = av_rescale(yh, i, steps) + yc;
  695. x = ff_draw_round_to_sub(&s->draw, 0, -1, x);
  696. y = ff_draw_round_to_sub(&s->draw, 1, -1, y);
  697. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  698. x, y, 8, 8);
  699. }
  700. }
  701. }
  702. /* bottom left: beating rectangles */
  703. if (s->w >= 64 && s->h >= 64) {
  704. int l = (FFMIN(s->w, s->h) - 16) >> 2;
  705. int cycle = l << 3;
  706. int xc = (s->w >> 2);
  707. int yc = (s->h >> 2) + (s->h >> 1);
  708. int xm1 = ff_draw_round_to_sub(&s->draw, 0, -1, xc - 8);
  709. int xm2 = ff_draw_round_to_sub(&s->draw, 0, +1, xc + 8);
  710. int ym1 = ff_draw_round_to_sub(&s->draw, 1, -1, yc - 8);
  711. int ym2 = ff_draw_round_to_sub(&s->draw, 1, +1, yc + 8);
  712. int size, step, x1, x2, y1, y2;
  713. size = av_rescale_q(s->pts, s->time_base, av_make_q(4, cycle));
  714. step = size / l;
  715. size %= l;
  716. if (step & 1)
  717. size = l - size;
  718. step = (step >> 1) & 3;
  719. set_color(s, &color, 0xFF808080);
  720. x1 = ff_draw_round_to_sub(&s->draw, 0, -1, xc - 4 - size);
  721. x2 = ff_draw_round_to_sub(&s->draw, 0, +1, xc + 4 + size);
  722. y1 = ff_draw_round_to_sub(&s->draw, 1, -1, yc - 4 - size);
  723. y2 = ff_draw_round_to_sub(&s->draw, 1, +1, yc + 4 + size);
  724. if (step == 0 || step == 2)
  725. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  726. x1, ym1, x2 - x1, ym2 - ym1);
  727. if (step == 1 || step == 2)
  728. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  729. xm1, y1, xm2 - xm1, y2 - y1);
  730. if (step == 3)
  731. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  732. x1, y1, x2 - x1, y2 - y1);
  733. }
  734. /* bottom right: checker with random noise */
  735. {
  736. unsigned xmin = av_rescale(5, s->w, 8);
  737. unsigned xmax = av_rescale(7, s->w, 8);
  738. unsigned ymin = av_rescale(5, s->h, 8);
  739. unsigned ymax = av_rescale(7, s->h, 8);
  740. unsigned x, y, i, r;
  741. uint8_t alpha[256];
  742. r = s->pts;
  743. for (y = ymin; y < ymax - 15; y += 16) {
  744. for (x = xmin; x < xmax - 15; x += 16) {
  745. if ((x ^ y) & 16)
  746. continue;
  747. for (i = 0; i < 256; i++) {
  748. r = r * 1664525 + 1013904223;
  749. alpha[i] = r >> 24;
  750. }
  751. set_color(s, &color, 0xFF00FF80);
  752. ff_blend_mask(&s->draw, &color, frame->data, frame->linesize,
  753. frame->width, frame->height,
  754. alpha, 16, 16, 16, 3, 0, x, y);
  755. }
  756. }
  757. }
  758. /* bouncing square */
  759. if (s->w >= 16 && s->h >= 16) {
  760. unsigned w = s->w - 8;
  761. unsigned h = s->h - 8;
  762. unsigned x = av_rescale_q(s->pts, s->time_base, av_make_q(233, 55 * w)) % (w << 1);
  763. unsigned y = av_rescale_q(s->pts, s->time_base, av_make_q(233, 89 * h)) % (h << 1);
  764. if (x > w)
  765. x = (w << 1) - x;
  766. if (y > h)
  767. y = (h << 1) - y;
  768. x = ff_draw_round_to_sub(&s->draw, 0, -1, x);
  769. y = ff_draw_round_to_sub(&s->draw, 1, -1, y);
  770. set_color(s, &color, 0xFF8000FF);
  771. ff_fill_rectangle(&s->draw, &color, frame->data, frame->linesize,
  772. x, y, 8, 8);
  773. }
  774. /* top right: draw frame time and frame number */
  775. {
  776. char buf[256];
  777. unsigned time;
  778. time = av_rescale_q(s->pts, s->time_base, av_make_q(1, 1000)) % 86400000;
  779. set_color(s, &color, 0xC0000000);
  780. ff_blend_rectangle(&s->draw, &color, frame->data, frame->linesize,
  781. frame->width, frame->height,
  782. 2, 2, 100, 36);
  783. set_color(s, &color, 0xFFFF8000);
  784. snprintf(buf, sizeof(buf), "%02d:%02d:%02d.%03d\n%12"PRIi64,
  785. time / 3600000, (time / 60000) % 60, (time / 1000) % 60,
  786. time % 1000, s->pts);
  787. draw_text(s, frame, &color, 4, 4, buf);
  788. }
  789. }
  790. static av_cold int test2_init(AVFilterContext *ctx)
  791. {
  792. TestSourceContext *s = ctx->priv;
  793. s->fill_picture_fn = test2_fill_picture;
  794. return init(ctx);
  795. }
  796. static int test2_query_formats(AVFilterContext *ctx)
  797. {
  798. return ff_set_common_formats(ctx, ff_draw_supported_pixel_formats(0));
  799. }
  800. static int test2_config_props(AVFilterLink *inlink)
  801. {
  802. AVFilterContext *ctx = inlink->src;
  803. TestSourceContext *s = ctx->priv;
  804. av_assert0(ff_draw_init(&s->draw, inlink->format, 0) >= 0);
  805. s->w = ff_draw_round_to_sub(&s->draw, 0, -1, s->w);
  806. s->h = ff_draw_round_to_sub(&s->draw, 1, -1, s->h);
  807. if (av_image_check_size(s->w, s->h, 0, ctx) < 0)
  808. return AVERROR(EINVAL);
  809. return config_props(inlink);
  810. }
  811. static const AVFilterPad avfilter_vsrc_testsrc2_outputs[] = {
  812. {
  813. .name = "default",
  814. .type = AVMEDIA_TYPE_VIDEO,
  815. .request_frame = request_frame,
  816. .config_props = test2_config_props,
  817. },
  818. { NULL }
  819. };
  820. AVFilter ff_vsrc_testsrc2 = {
  821. .name = "testsrc2",
  822. .description = NULL_IF_CONFIG_SMALL("Generate another test pattern."),
  823. .priv_size = sizeof(TestSourceContext),
  824. .priv_class = &testsrc2_class,
  825. .init = test2_init,
  826. .uninit = uninit,
  827. .query_formats = test2_query_formats,
  828. .inputs = NULL,
  829. .outputs = avfilter_vsrc_testsrc2_outputs,
  830. };
  831. #endif /* CONFIG_TESTSRC2_FILTER */
  832. #if CONFIG_RGBTESTSRC_FILTER
  833. #define rgbtestsrc_options options
  834. AVFILTER_DEFINE_CLASS(rgbtestsrc);
  835. #define R 0
  836. #define G 1
  837. #define B 2
  838. #define A 3
  839. static void rgbtest_put_pixel(uint8_t *dst, int dst_linesize,
  840. int x, int y, int r, int g, int b, enum AVPixelFormat fmt,
  841. uint8_t rgba_map[4])
  842. {
  843. int32_t v;
  844. uint8_t *p;
  845. switch (fmt) {
  846. case AV_PIX_FMT_BGR444: ((uint16_t*)(dst + y*dst_linesize))[x] = ((r >> 4) << 8) | ((g >> 4) << 4) | (b >> 4); break;
  847. case AV_PIX_FMT_RGB444: ((uint16_t*)(dst + y*dst_linesize))[x] = ((b >> 4) << 8) | ((g >> 4) << 4) | (r >> 4); break;
  848. case AV_PIX_FMT_BGR555: ((uint16_t*)(dst + y*dst_linesize))[x] = ((r>>3)<<10) | ((g>>3)<<5) | (b>>3); break;
  849. case AV_PIX_FMT_RGB555: ((uint16_t*)(dst + y*dst_linesize))[x] = ((b>>3)<<10) | ((g>>3)<<5) | (r>>3); break;
  850. case AV_PIX_FMT_BGR565: ((uint16_t*)(dst + y*dst_linesize))[x] = ((r>>3)<<11) | ((g>>2)<<5) | (b>>3); break;
  851. case AV_PIX_FMT_RGB565: ((uint16_t*)(dst + y*dst_linesize))[x] = ((b>>3)<<11) | ((g>>2)<<5) | (r>>3); break;
  852. case AV_PIX_FMT_RGB24:
  853. case AV_PIX_FMT_BGR24:
  854. v = (r << (rgba_map[R]*8)) + (g << (rgba_map[G]*8)) + (b << (rgba_map[B]*8));
  855. p = dst + 3*x + y*dst_linesize;
  856. AV_WL24(p, v);
  857. break;
  858. case AV_PIX_FMT_RGBA:
  859. case AV_PIX_FMT_BGRA:
  860. case AV_PIX_FMT_ARGB:
  861. case AV_PIX_FMT_ABGR:
  862. v = (r << (rgba_map[R]*8)) + (g << (rgba_map[G]*8)) + (b << (rgba_map[B]*8)) + (255 << (rgba_map[A]*8));
  863. p = dst + 4*x + y*dst_linesize;
  864. AV_WL32(p, v);
  865. break;
  866. }
  867. }
  868. static void rgbtest_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  869. {
  870. TestSourceContext *test = ctx->priv;
  871. int x, y, w = frame->width, h = frame->height;
  872. for (y = 0; y < h; y++) {
  873. for (x = 0; x < w; x++) {
  874. int c = 256*x/w;
  875. int r = 0, g = 0, b = 0;
  876. if (3*y < h ) r = c;
  877. else if (3*y < 2*h) g = c;
  878. else b = c;
  879. rgbtest_put_pixel(frame->data[0], frame->linesize[0], x, y, r, g, b,
  880. ctx->outputs[0]->format, test->rgba_map);
  881. }
  882. }
  883. }
  884. static av_cold int rgbtest_init(AVFilterContext *ctx)
  885. {
  886. TestSourceContext *test = ctx->priv;
  887. test->draw_once = 1;
  888. test->fill_picture_fn = rgbtest_fill_picture;
  889. return init(ctx);
  890. }
  891. static int rgbtest_query_formats(AVFilterContext *ctx)
  892. {
  893. static const enum AVPixelFormat pix_fmts[] = {
  894. AV_PIX_FMT_RGBA, AV_PIX_FMT_ARGB, AV_PIX_FMT_BGRA, AV_PIX_FMT_ABGR,
  895. AV_PIX_FMT_BGR24, AV_PIX_FMT_RGB24,
  896. AV_PIX_FMT_RGB444, AV_PIX_FMT_BGR444,
  897. AV_PIX_FMT_RGB565, AV_PIX_FMT_BGR565,
  898. AV_PIX_FMT_RGB555, AV_PIX_FMT_BGR555,
  899. AV_PIX_FMT_NONE
  900. };
  901. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  902. if (!fmts_list)
  903. return AVERROR(ENOMEM);
  904. return ff_set_common_formats(ctx, fmts_list);
  905. }
  906. static int rgbtest_config_props(AVFilterLink *outlink)
  907. {
  908. TestSourceContext *test = outlink->src->priv;
  909. ff_fill_rgba_map(test->rgba_map, outlink->format);
  910. return config_props(outlink);
  911. }
  912. static const AVFilterPad avfilter_vsrc_rgbtestsrc_outputs[] = {
  913. {
  914. .name = "default",
  915. .type = AVMEDIA_TYPE_VIDEO,
  916. .request_frame = request_frame,
  917. .config_props = rgbtest_config_props,
  918. },
  919. { NULL }
  920. };
  921. AVFilter ff_vsrc_rgbtestsrc = {
  922. .name = "rgbtestsrc",
  923. .description = NULL_IF_CONFIG_SMALL("Generate RGB test pattern."),
  924. .priv_size = sizeof(TestSourceContext),
  925. .priv_class = &rgbtestsrc_class,
  926. .init = rgbtest_init,
  927. .uninit = uninit,
  928. .query_formats = rgbtest_query_formats,
  929. .inputs = NULL,
  930. .outputs = avfilter_vsrc_rgbtestsrc_outputs,
  931. };
  932. #endif /* CONFIG_RGBTESTSRC_FILTER */
  933. #if CONFIG_YUVTESTSRC_FILTER
  934. #define yuvtestsrc_options options
  935. AVFILTER_DEFINE_CLASS(yuvtestsrc);
  936. static void yuvtest_fill_picture8(AVFilterContext *ctx, AVFrame *frame)
  937. {
  938. int x, y, w = frame->width, h = frame->height / 3;
  939. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
  940. const int factor = 1 << desc->comp[0].depth;
  941. const int mid = 1 << (desc->comp[0].depth - 1);
  942. uint8_t *ydst = frame->data[0];
  943. uint8_t *udst = frame->data[1];
  944. uint8_t *vdst = frame->data[2];
  945. int ylinesize = frame->linesize[0];
  946. int ulinesize = frame->linesize[1];
  947. int vlinesize = frame->linesize[2];
  948. for (y = 0; y < h; y++) {
  949. for (x = 0; x < w; x++) {
  950. int c = factor * x / w;
  951. ydst[x] = c;
  952. udst[x] = mid;
  953. vdst[x] = mid;
  954. }
  955. ydst += ylinesize;
  956. udst += ulinesize;
  957. vdst += vlinesize;
  958. }
  959. h += h;
  960. for (; y < h; y++) {
  961. for (x = 0; x < w; x++) {
  962. int c = factor * x / w;
  963. ydst[x] = mid;
  964. udst[x] = c;
  965. vdst[x] = mid;
  966. }
  967. ydst += ylinesize;
  968. udst += ulinesize;
  969. vdst += vlinesize;
  970. }
  971. for (; y < frame->height; y++) {
  972. for (x = 0; x < w; x++) {
  973. int c = factor * x / w;
  974. ydst[x] = mid;
  975. udst[x] = mid;
  976. vdst[x] = c;
  977. }
  978. ydst += ylinesize;
  979. udst += ulinesize;
  980. vdst += vlinesize;
  981. }
  982. }
  983. static void yuvtest_fill_picture16(AVFilterContext *ctx, AVFrame *frame)
  984. {
  985. int x, y, w = frame->width, h = frame->height / 3;
  986. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
  987. const int factor = 1 << desc->comp[0].depth;
  988. const int mid = 1 << (desc->comp[0].depth - 1);
  989. uint16_t *ydst = (uint16_t *)frame->data[0];
  990. uint16_t *udst = (uint16_t *)frame->data[1];
  991. uint16_t *vdst = (uint16_t *)frame->data[2];
  992. int ylinesize = frame->linesize[0] / 2;
  993. int ulinesize = frame->linesize[1] / 2;
  994. int vlinesize = frame->linesize[2] / 2;
  995. for (y = 0; y < h; y++) {
  996. for (x = 0; x < w; x++) {
  997. int c = factor * x / w;
  998. ydst[x] = c;
  999. udst[x] = mid;
  1000. vdst[x] = mid;
  1001. }
  1002. ydst += ylinesize;
  1003. udst += ulinesize;
  1004. vdst += vlinesize;
  1005. }
  1006. h += h;
  1007. for (; y < h; y++) {
  1008. for (x = 0; x < w; x++) {
  1009. int c = factor * x / w;
  1010. ydst[x] = mid;
  1011. udst[x] = c;
  1012. vdst[x] = mid;
  1013. }
  1014. ydst += ylinesize;
  1015. udst += ulinesize;
  1016. vdst += vlinesize;
  1017. }
  1018. for (; y < frame->height; y++) {
  1019. for (x = 0; x < w; x++) {
  1020. int c = factor * x / w;
  1021. ydst[x] = mid;
  1022. udst[x] = mid;
  1023. vdst[x] = c;
  1024. }
  1025. ydst += ylinesize;
  1026. udst += ulinesize;
  1027. vdst += vlinesize;
  1028. }
  1029. }
  1030. static av_cold int yuvtest_init(AVFilterContext *ctx)
  1031. {
  1032. TestSourceContext *test = ctx->priv;
  1033. test->draw_once = 1;
  1034. return init(ctx);
  1035. }
  1036. static int yuvtest_query_formats(AVFilterContext *ctx)
  1037. {
  1038. static const enum AVPixelFormat pix_fmts[] = {
  1039. AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUVJ444P,
  1040. AV_PIX_FMT_YUV444P9, AV_PIX_FMT_YUV444P10,
  1041. AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV444P14,
  1042. AV_PIX_FMT_YUV444P16,
  1043. AV_PIX_FMT_NONE
  1044. };
  1045. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  1046. if (!fmts_list)
  1047. return AVERROR(ENOMEM);
  1048. return ff_set_common_formats(ctx, fmts_list);
  1049. }
  1050. static int yuvtest_config_props(AVFilterLink *outlink)
  1051. {
  1052. TestSourceContext *test = outlink->src->priv;
  1053. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(outlink->format);
  1054. test->fill_picture_fn = desc->comp[0].depth > 8 ? yuvtest_fill_picture16 : yuvtest_fill_picture8;
  1055. return config_props(outlink);
  1056. }
  1057. static const AVFilterPad avfilter_vsrc_yuvtestsrc_outputs[] = {
  1058. {
  1059. .name = "default",
  1060. .type = AVMEDIA_TYPE_VIDEO,
  1061. .request_frame = request_frame,
  1062. .config_props = yuvtest_config_props,
  1063. },
  1064. { NULL }
  1065. };
  1066. AVFilter ff_vsrc_yuvtestsrc = {
  1067. .name = "yuvtestsrc",
  1068. .description = NULL_IF_CONFIG_SMALL("Generate YUV test pattern."),
  1069. .priv_size = sizeof(TestSourceContext),
  1070. .priv_class = &yuvtestsrc_class,
  1071. .init = yuvtest_init,
  1072. .uninit = uninit,
  1073. .query_formats = yuvtest_query_formats,
  1074. .inputs = NULL,
  1075. .outputs = avfilter_vsrc_yuvtestsrc_outputs,
  1076. };
  1077. #endif /* CONFIG_YUVTESTSRC_FILTER */
  1078. #if CONFIG_SMPTEBARS_FILTER || CONFIG_SMPTEHDBARS_FILTER
  1079. static const uint8_t rainbow[7][4] = {
  1080. { 180, 128, 128, 255 }, /* 75% white */
  1081. { 162, 44, 142, 255 }, /* 75% yellow */
  1082. { 131, 156, 44, 255 }, /* 75% cyan */
  1083. { 112, 72, 58, 255 }, /* 75% green */
  1084. { 84, 184, 198, 255 }, /* 75% magenta */
  1085. { 65, 100, 212, 255 }, /* 75% red */
  1086. { 35, 212, 114, 255 }, /* 75% blue */
  1087. };
  1088. static const uint8_t rainbowhd[7][4] = {
  1089. { 180, 128, 128, 255 }, /* 75% white */
  1090. { 168, 44, 136, 255 }, /* 75% yellow */
  1091. { 145, 147, 44, 255 }, /* 75% cyan */
  1092. { 133, 63, 52, 255 }, /* 75% green */
  1093. { 63, 193, 204, 255 }, /* 75% magenta */
  1094. { 51, 109, 212, 255 }, /* 75% red */
  1095. { 28, 212, 120, 255 }, /* 75% blue */
  1096. };
  1097. static const uint8_t wobnair[7][4] = {
  1098. { 35, 212, 114, 255 }, /* 75% blue */
  1099. { 19, 128, 128, 255 }, /* 7.5% intensity black */
  1100. { 84, 184, 198, 255 }, /* 75% magenta */
  1101. { 19, 128, 128, 255 }, /* 7.5% intensity black */
  1102. { 131, 156, 44, 255 }, /* 75% cyan */
  1103. { 19, 128, 128, 255 }, /* 7.5% intensity black */
  1104. { 180, 128, 128, 255 }, /* 75% white */
  1105. };
  1106. static const uint8_t white[4] = { 235, 128, 128, 255 };
  1107. /* pluge pulses */
  1108. static const uint8_t neg4ire[4] = { 7, 128, 128, 255 };
  1109. static const uint8_t pos4ire[4] = { 24, 128, 128, 255 };
  1110. /* fudged Q/-I */
  1111. static const uint8_t i_pixel[4] = { 57, 156, 97, 255 };
  1112. static const uint8_t q_pixel[4] = { 44, 171, 147, 255 };
  1113. static const uint8_t gray40[4] = { 104, 128, 128, 255 };
  1114. static const uint8_t gray15[4] = { 49, 128, 128, 255 };
  1115. static const uint8_t cyan[4] = { 188, 154, 16, 255 };
  1116. static const uint8_t yellow[4] = { 219, 16, 138, 255 };
  1117. static const uint8_t blue[4] = { 32, 240, 118, 255 };
  1118. static const uint8_t red[4] = { 63, 102, 240, 255 };
  1119. static const uint8_t black0[4] = { 16, 128, 128, 255 };
  1120. static const uint8_t black2[4] = { 20, 128, 128, 255 };
  1121. static const uint8_t black4[4] = { 25, 128, 128, 255 };
  1122. static const uint8_t neg2[4] = { 12, 128, 128, 255 };
  1123. static void draw_bar(TestSourceContext *test, const uint8_t color[4],
  1124. int x, int y, int w, int h,
  1125. AVFrame *frame)
  1126. {
  1127. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
  1128. uint8_t *p, *p0;
  1129. int plane;
  1130. x = FFMIN(x, test->w - 1);
  1131. y = FFMIN(y, test->h - 1);
  1132. w = FFMAX(FFMIN(w, test->w - x), 0);
  1133. h = FFMAX(FFMIN(h, test->h - y), 0);
  1134. av_assert0(x + w <= test->w);
  1135. av_assert0(y + h <= test->h);
  1136. for (plane = 0; frame->data[plane]; plane++) {
  1137. const int c = color[plane];
  1138. const int linesize = frame->linesize[plane];
  1139. int i, px, py, pw, ph;
  1140. if (plane == 1 || plane == 2) {
  1141. px = x >> desc->log2_chroma_w;
  1142. pw = AV_CEIL_RSHIFT(w, desc->log2_chroma_w);
  1143. py = y >> desc->log2_chroma_h;
  1144. ph = AV_CEIL_RSHIFT(h, desc->log2_chroma_h);
  1145. } else {
  1146. px = x;
  1147. pw = w;
  1148. py = y;
  1149. ph = h;
  1150. }
  1151. p0 = p = frame->data[plane] + py * linesize + px;
  1152. memset(p, c, pw);
  1153. p += linesize;
  1154. for (i = 1; i < ph; i++, p += linesize)
  1155. memcpy(p, p0, pw);
  1156. }
  1157. }
  1158. static int smptebars_query_formats(AVFilterContext *ctx)
  1159. {
  1160. static const enum AVPixelFormat pix_fmts[] = {
  1161. AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P,
  1162. AV_PIX_FMT_YUV440P, AV_PIX_FMT_YUV444P,
  1163. AV_PIX_FMT_YUV410P, AV_PIX_FMT_YUV411P,
  1164. AV_PIX_FMT_NONE,
  1165. };
  1166. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  1167. if (!fmts_list)
  1168. return AVERROR(ENOMEM);
  1169. return ff_set_common_formats(ctx, fmts_list);
  1170. }
  1171. static const AVFilterPad smptebars_outputs[] = {
  1172. {
  1173. .name = "default",
  1174. .type = AVMEDIA_TYPE_VIDEO,
  1175. .request_frame = request_frame,
  1176. .config_props = config_props,
  1177. },
  1178. { NULL }
  1179. };
  1180. #if CONFIG_SMPTEBARS_FILTER
  1181. #define smptebars_options options
  1182. AVFILTER_DEFINE_CLASS(smptebars);
  1183. static void smptebars_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  1184. {
  1185. TestSourceContext *test = ctx->priv;
  1186. int r_w, r_h, w_h, p_w, p_h, i, tmp, x = 0;
  1187. const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(picref->format);
  1188. picref->colorspace = AVCOL_SPC_BT470BG;
  1189. r_w = FFALIGN((test->w + 6) / 7, 1 << pixdesc->log2_chroma_w);
  1190. r_h = FFALIGN(test->h * 2 / 3, 1 << pixdesc->log2_chroma_h);
  1191. w_h = FFALIGN(test->h * 3 / 4 - r_h, 1 << pixdesc->log2_chroma_h);
  1192. p_w = FFALIGN(r_w * 5 / 4, 1 << pixdesc->log2_chroma_w);
  1193. p_h = test->h - w_h - r_h;
  1194. for (i = 0; i < 7; i++) {
  1195. draw_bar(test, rainbow[i], x, 0, r_w, r_h, picref);
  1196. draw_bar(test, wobnair[i], x, r_h, r_w, w_h, picref);
  1197. x += r_w;
  1198. }
  1199. x = 0;
  1200. draw_bar(test, i_pixel, x, r_h + w_h, p_w, p_h, picref);
  1201. x += p_w;
  1202. draw_bar(test, white, x, r_h + w_h, p_w, p_h, picref);
  1203. x += p_w;
  1204. draw_bar(test, q_pixel, x, r_h + w_h, p_w, p_h, picref);
  1205. x += p_w;
  1206. tmp = FFALIGN(5 * r_w - x, 1 << pixdesc->log2_chroma_w);
  1207. draw_bar(test, black0, x, r_h + w_h, tmp, p_h, picref);
  1208. x += tmp;
  1209. tmp = FFALIGN(r_w / 3, 1 << pixdesc->log2_chroma_w);
  1210. draw_bar(test, neg4ire, x, r_h + w_h, tmp, p_h, picref);
  1211. x += tmp;
  1212. draw_bar(test, black0, x, r_h + w_h, tmp, p_h, picref);
  1213. x += tmp;
  1214. draw_bar(test, pos4ire, x, r_h + w_h, tmp, p_h, picref);
  1215. x += tmp;
  1216. draw_bar(test, black0, x, r_h + w_h, test->w - x, p_h, picref);
  1217. }
  1218. static av_cold int smptebars_init(AVFilterContext *ctx)
  1219. {
  1220. TestSourceContext *test = ctx->priv;
  1221. test->fill_picture_fn = smptebars_fill_picture;
  1222. test->draw_once = 1;
  1223. return init(ctx);
  1224. }
  1225. AVFilter ff_vsrc_smptebars = {
  1226. .name = "smptebars",
  1227. .description = NULL_IF_CONFIG_SMALL("Generate SMPTE color bars."),
  1228. .priv_size = sizeof(TestSourceContext),
  1229. .priv_class = &smptebars_class,
  1230. .init = smptebars_init,
  1231. .uninit = uninit,
  1232. .query_formats = smptebars_query_formats,
  1233. .inputs = NULL,
  1234. .outputs = smptebars_outputs,
  1235. };
  1236. #endif /* CONFIG_SMPTEBARS_FILTER */
  1237. #if CONFIG_SMPTEHDBARS_FILTER
  1238. #define smptehdbars_options options
  1239. AVFILTER_DEFINE_CLASS(smptehdbars);
  1240. static void smptehdbars_fill_picture(AVFilterContext *ctx, AVFrame *picref)
  1241. {
  1242. TestSourceContext *test = ctx->priv;
  1243. int d_w, r_w, r_h, l_w, i, tmp, x = 0, y = 0;
  1244. const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(picref->format);
  1245. picref->colorspace = AVCOL_SPC_BT709;
  1246. d_w = FFALIGN(test->w / 8, 1 << pixdesc->log2_chroma_w);
  1247. r_h = FFALIGN(test->h * 7 / 12, 1 << pixdesc->log2_chroma_h);
  1248. draw_bar(test, gray40, x, 0, d_w, r_h, picref);
  1249. x += d_w;
  1250. r_w = FFALIGN((((test->w + 3) / 4) * 3) / 7, 1 << pixdesc->log2_chroma_w);
  1251. for (i = 0; i < 7; i++) {
  1252. draw_bar(test, rainbowhd[i], x, 0, r_w, r_h, picref);
  1253. x += r_w;
  1254. }
  1255. draw_bar(test, gray40, x, 0, test->w - x, r_h, picref);
  1256. y = r_h;
  1257. r_h = FFALIGN(test->h / 12, 1 << pixdesc->log2_chroma_h);
  1258. draw_bar(test, cyan, 0, y, d_w, r_h, picref);
  1259. x = d_w;
  1260. draw_bar(test, i_pixel, x, y, r_w, r_h, picref);
  1261. x += r_w;
  1262. tmp = r_w * 6;
  1263. draw_bar(test, rainbowhd[0], x, y, tmp, r_h, picref);
  1264. x += tmp;
  1265. l_w = x;
  1266. draw_bar(test, blue, x, y, test->w - x, r_h, picref);
  1267. y += r_h;
  1268. draw_bar(test, yellow, 0, y, d_w, r_h, picref);
  1269. x = d_w;
  1270. draw_bar(test, q_pixel, x, y, r_w, r_h, picref);
  1271. x += r_w;
  1272. for (i = 0; i < tmp; i += 1 << pixdesc->log2_chroma_w) {
  1273. uint8_t yramp[4] = {0};
  1274. yramp[0] = i * 255 / tmp;
  1275. yramp[1] = 128;
  1276. yramp[2] = 128;
  1277. yramp[3] = 255;
  1278. draw_bar(test, yramp, x, y, 1 << pixdesc->log2_chroma_w, r_h, picref);
  1279. x += 1 << pixdesc->log2_chroma_w;
  1280. }
  1281. draw_bar(test, red, x, y, test->w - x, r_h, picref);
  1282. y += r_h;
  1283. draw_bar(test, gray15, 0, y, d_w, test->h - y, picref);
  1284. x = d_w;
  1285. tmp = FFALIGN(r_w * 3 / 2, 1 << pixdesc->log2_chroma_w);
  1286. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  1287. x += tmp;
  1288. tmp = FFALIGN(r_w * 2, 1 << pixdesc->log2_chroma_w);
  1289. draw_bar(test, white, x, y, tmp, test->h - y, picref);
  1290. x += tmp;
  1291. tmp = FFALIGN(r_w * 5 / 6, 1 << pixdesc->log2_chroma_w);
  1292. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  1293. x += tmp;
  1294. tmp = FFALIGN(r_w / 3, 1 << pixdesc->log2_chroma_w);
  1295. draw_bar(test, neg2, x, y, tmp, test->h - y, picref);
  1296. x += tmp;
  1297. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  1298. x += tmp;
  1299. draw_bar(test, black2, x, y, tmp, test->h - y, picref);
  1300. x += tmp;
  1301. draw_bar(test, black0, x, y, tmp, test->h - y, picref);
  1302. x += tmp;
  1303. draw_bar(test, black4, x, y, tmp, test->h - y, picref);
  1304. x += tmp;
  1305. r_w = l_w - x;
  1306. draw_bar(test, black0, x, y, r_w, test->h - y, picref);
  1307. x += r_w;
  1308. draw_bar(test, gray15, x, y, test->w - x, test->h - y, picref);
  1309. }
  1310. static av_cold int smptehdbars_init(AVFilterContext *ctx)
  1311. {
  1312. TestSourceContext *test = ctx->priv;
  1313. test->fill_picture_fn = smptehdbars_fill_picture;
  1314. test->draw_once = 1;
  1315. return init(ctx);
  1316. }
  1317. AVFilter ff_vsrc_smptehdbars = {
  1318. .name = "smptehdbars",
  1319. .description = NULL_IF_CONFIG_SMALL("Generate SMPTE HD color bars."),
  1320. .priv_size = sizeof(TestSourceContext),
  1321. .priv_class = &smptehdbars_class,
  1322. .init = smptehdbars_init,
  1323. .uninit = uninit,
  1324. .query_formats = smptebars_query_formats,
  1325. .inputs = NULL,
  1326. .outputs = smptebars_outputs,
  1327. };
  1328. #endif /* CONFIG_SMPTEHDBARS_FILTER */
  1329. #endif /* CONFIG_SMPTEBARS_FILTER || CONFIG_SMPTEHDBARS_FILTER */
  1330. #if CONFIG_ALLYUV_FILTER
  1331. static const AVOption allyuv_options[] = {
  1332. COMMON_OPTIONS_NOSIZE
  1333. { NULL }
  1334. };
  1335. AVFILTER_DEFINE_CLASS(allyuv);
  1336. static void allyuv_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  1337. {
  1338. const int ys = frame->linesize[0];
  1339. const int us = frame->linesize[1];
  1340. const int vs = frame->linesize[2];
  1341. int x, y, j;
  1342. for (y = 0; y < 4096; y++) {
  1343. for (x = 0; x < 2048; x++) {
  1344. frame->data[0][y * ys + x] = ((x / 8) % 256);
  1345. frame->data[0][y * ys + 4095 - x] = ((x / 8) % 256);
  1346. }
  1347. for (x = 0; x < 2048; x+=8) {
  1348. for (j = 0; j < 8; j++) {
  1349. frame->data[1][vs * y + x + j] = (y%16 + (j % 8) * 16);
  1350. frame->data[1][vs * y + 4095 - x - j] = (128 + y%16 + (j % 8) * 16);
  1351. }
  1352. }
  1353. for (x = 0; x < 4096; x++)
  1354. frame->data[2][y * us + x] = 256 * y / 4096;
  1355. }
  1356. }
  1357. static av_cold int allyuv_init(AVFilterContext *ctx)
  1358. {
  1359. TestSourceContext *test = ctx->priv;
  1360. test->w = test->h = 4096;
  1361. test->draw_once = 1;
  1362. test->fill_picture_fn = allyuv_fill_picture;
  1363. return init(ctx);
  1364. }
  1365. static int allyuv_query_formats(AVFilterContext *ctx)
  1366. {
  1367. static const enum AVPixelFormat pix_fmts[] = {
  1368. AV_PIX_FMT_YUV444P, AV_PIX_FMT_GBRP,
  1369. AV_PIX_FMT_NONE
  1370. };
  1371. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  1372. if (!fmts_list)
  1373. return AVERROR(ENOMEM);
  1374. return ff_set_common_formats(ctx, fmts_list);
  1375. }
  1376. static const AVFilterPad avfilter_vsrc_allyuv_outputs[] = {
  1377. {
  1378. .name = "default",
  1379. .type = AVMEDIA_TYPE_VIDEO,
  1380. .request_frame = request_frame,
  1381. .config_props = config_props,
  1382. },
  1383. { NULL }
  1384. };
  1385. AVFilter ff_vsrc_allyuv = {
  1386. .name = "allyuv",
  1387. .description = NULL_IF_CONFIG_SMALL("Generate all yuv colors."),
  1388. .priv_size = sizeof(TestSourceContext),
  1389. .priv_class = &allyuv_class,
  1390. .init = allyuv_init,
  1391. .uninit = uninit,
  1392. .query_formats = allyuv_query_formats,
  1393. .inputs = NULL,
  1394. .outputs = avfilter_vsrc_allyuv_outputs,
  1395. };
  1396. #endif /* CONFIG_ALLYUV_FILTER */
  1397. #if CONFIG_ALLRGB_FILTER
  1398. static const AVOption allrgb_options[] = {
  1399. COMMON_OPTIONS_NOSIZE
  1400. { NULL }
  1401. };
  1402. AVFILTER_DEFINE_CLASS(allrgb);
  1403. static void allrgb_fill_picture(AVFilterContext *ctx, AVFrame *frame)
  1404. {
  1405. unsigned x, y;
  1406. const int linesize = frame->linesize[0];
  1407. uint8_t *line = frame->data[0];
  1408. for (y = 0; y < 4096; y++) {
  1409. uint8_t *dst = line;
  1410. for (x = 0; x < 4096; x++) {
  1411. *dst++ = x;
  1412. *dst++ = y;
  1413. *dst++ = (x >> 8) | ((y >> 8) << 4);
  1414. }
  1415. line += linesize;
  1416. }
  1417. }
  1418. static av_cold int allrgb_init(AVFilterContext *ctx)
  1419. {
  1420. TestSourceContext *test = ctx->priv;
  1421. test->w = test->h = 4096;
  1422. test->draw_once = 1;
  1423. test->fill_picture_fn = allrgb_fill_picture;
  1424. return init(ctx);
  1425. }
  1426. static int allrgb_config_props(AVFilterLink *outlink)
  1427. {
  1428. TestSourceContext *test = outlink->src->priv;
  1429. ff_fill_rgba_map(test->rgba_map, outlink->format);
  1430. return config_props(outlink);
  1431. }
  1432. static int allrgb_query_formats(AVFilterContext *ctx)
  1433. {
  1434. static const enum AVPixelFormat pix_fmts[] = {
  1435. AV_PIX_FMT_RGB24, AV_PIX_FMT_NONE
  1436. };
  1437. AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
  1438. if (!fmts_list)
  1439. return AVERROR(ENOMEM);
  1440. return ff_set_common_formats(ctx, fmts_list);
  1441. }
  1442. static const AVFilterPad avfilter_vsrc_allrgb_outputs[] = {
  1443. {
  1444. .name = "default",
  1445. .type = AVMEDIA_TYPE_VIDEO,
  1446. .request_frame = request_frame,
  1447. .config_props = allrgb_config_props,
  1448. },
  1449. { NULL }
  1450. };
  1451. AVFilter ff_vsrc_allrgb = {
  1452. .name = "allrgb",
  1453. .description = NULL_IF_CONFIG_SMALL("Generate all RGB colors."),
  1454. .priv_size = sizeof(TestSourceContext),
  1455. .priv_class = &allrgb_class,
  1456. .init = allrgb_init,
  1457. .uninit = uninit,
  1458. .query_formats = allrgb_query_formats,
  1459. .inputs = NULL,
  1460. .outputs = avfilter_vsrc_allrgb_outputs,
  1461. };
  1462. #endif /* CONFIG_ALLRGB_FILTER */