You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

519 lines
17KB

  1. /*
  2. * Copyright (c) 2007 Nicolas George <nicolas.george@normalesup.org>
  3. * Copyright (c) 2011 Stefano Sabatini
  4. *
  5. * This file is part of Libav.
  6. *
  7. * Libav is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2.1 of the License, or (at your option) any later version.
  11. *
  12. * Libav is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * Lesser General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with Libav; if not, write to the Free Software
  19. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  20. */
  21. /**
  22. * @file
  23. * Misc test sources.
  24. *
  25. * testsrc is based on the test pattern generator demuxer by Nicolas George:
  26. * http://lists.ffmpeg.org/pipermail/ffmpeg-devel/2007-October/037845.html
  27. *
  28. * rgbtestsrc is ported from MPlayer libmpcodecs/vf_rgbtest.c by
  29. * Michael Niedermayer.
  30. */
  31. #include <float.h>
  32. #include "libavutil/common.h"
  33. #include "libavutil/mathematics.h"
  34. #include "libavutil/opt.h"
  35. #include "libavutil/intreadwrite.h"
  36. #include "libavutil/parseutils.h"
  37. #include "avfilter.h"
  38. #include "formats.h"
  39. #include "internal.h"
  40. #include "video.h"
  41. typedef struct {
  42. const AVClass *class;
  43. int h, w;
  44. unsigned int nb_frame;
  45. AVRational time_base;
  46. int64_t pts, max_pts;
  47. char *size; ///< video frame size
  48. char *rate; ///< video frame rate
  49. char *duration; ///< total duration of the generated video
  50. AVRational sar; ///< sample aspect ratio
  51. void (* fill_picture_fn)(AVFilterContext *ctx, AVFilterBufferRef *picref);
  52. /* only used by rgbtest */
  53. int rgba_map[4];
  54. } TestSourceContext;
  55. #define OFFSET(x) offsetof(TestSourceContext, x)
  56. static const AVOption testsrc_options[] = {
  57. { "size", "set video size", OFFSET(size), AV_OPT_TYPE_STRING, {.str = "320x240"}},
  58. { "s", "set video size", OFFSET(size), AV_OPT_TYPE_STRING, {.str = "320x240"}},
  59. { "rate", "set video rate", OFFSET(rate), AV_OPT_TYPE_STRING, {.str = "25"}, },
  60. { "r", "set video rate", OFFSET(rate), AV_OPT_TYPE_STRING, {.str = "25"}, },
  61. { "duration", "set video duration", OFFSET(duration), AV_OPT_TYPE_STRING, {.str = NULL}, },
  62. { "sar", "set video sample aspect ratio", OFFSET(sar), AV_OPT_TYPE_RATIONAL, {.dbl = 1}, 0, INT_MAX },
  63. { NULL },
  64. };
  65. static av_cold int init_common(AVFilterContext *ctx, const char *args)
  66. {
  67. TestSourceContext *test = ctx->priv;
  68. AVRational frame_rate_q;
  69. int64_t duration = -1;
  70. int ret = 0;
  71. av_opt_set_defaults(test);
  72. if ((ret = (av_set_options_string(test, args, "=", ":"))) < 0) {
  73. av_log(ctx, AV_LOG_ERROR, "Error parsing options string: '%s'\n", args);
  74. return ret;
  75. }
  76. if ((ret = av_parse_video_size(&test->w, &test->h, test->size)) < 0) {
  77. av_log(ctx, AV_LOG_ERROR, "Invalid frame size: '%s'\n", test->size);
  78. return ret;
  79. }
  80. if ((ret = av_parse_video_rate(&frame_rate_q, test->rate)) < 0 ||
  81. frame_rate_q.den <= 0 || frame_rate_q.num <= 0) {
  82. av_log(ctx, AV_LOG_ERROR, "Invalid frame rate: '%s'\n", test->rate);
  83. return ret;
  84. }
  85. if ((test->duration) && (ret = av_parse_time(&duration, test->duration, 1)) < 0) {
  86. av_log(ctx, AV_LOG_ERROR, "Invalid duration: '%s'\n", test->duration);
  87. return ret;
  88. }
  89. test->time_base.num = frame_rate_q.den;
  90. test->time_base.den = frame_rate_q.num;
  91. test->max_pts = duration >= 0 ?
  92. av_rescale_q(duration, AV_TIME_BASE_Q, test->time_base) : -1;
  93. test->nb_frame = 0;
  94. test->pts = 0;
  95. av_log(ctx, AV_LOG_DEBUG, "size:%dx%d rate:%d/%d duration:%f sar:%d/%d\n",
  96. test->w, test->h, frame_rate_q.num, frame_rate_q.den,
  97. duration < 0 ? -1 : test->max_pts * av_q2d(test->time_base),
  98. test->sar.num, test->sar.den);
  99. return 0;
  100. }
  101. static int config_props(AVFilterLink *outlink)
  102. {
  103. TestSourceContext *test = outlink->src->priv;
  104. outlink->w = test->w;
  105. outlink->h = test->h;
  106. outlink->sample_aspect_ratio = test->sar;
  107. outlink->time_base = test->time_base;
  108. return 0;
  109. }
  110. static int request_frame(AVFilterLink *outlink)
  111. {
  112. TestSourceContext *test = outlink->src->priv;
  113. AVFilterBufferRef *picref;
  114. int ret;
  115. if (test->max_pts >= 0 && test->pts > test->max_pts)
  116. return AVERROR_EOF;
  117. picref = ff_get_video_buffer(outlink, AV_PERM_WRITE, test->w, test->h);
  118. if (!picref)
  119. return AVERROR(ENOMEM);
  120. picref->pts = test->pts++;
  121. picref->pos = -1;
  122. picref->video->key_frame = 1;
  123. picref->video->interlaced = 0;
  124. picref->video->pict_type = AV_PICTURE_TYPE_I;
  125. picref->video->pixel_aspect = test->sar;
  126. test->nb_frame++;
  127. test->fill_picture_fn(outlink->src, picref);
  128. if ((ret = ff_start_frame(outlink, picref)) < 0 ||
  129. (ret = ff_draw_slice(outlink, 0, test->h, 1)) < 0 ||
  130. (ret = ff_end_frame(outlink)) < 0)
  131. return ret;
  132. return 0;
  133. }
  134. #if CONFIG_TESTSRC_FILTER
  135. static const char *testsrc_get_name(void *ctx)
  136. {
  137. return "testsrc";
  138. }
  139. static const AVClass testsrc_class = {
  140. .class_name = "TestSourceContext",
  141. .item_name = testsrc_get_name,
  142. .option = testsrc_options,
  143. };
  144. /**
  145. * Fill a rectangle with value val.
  146. *
  147. * @param val the RGB value to set
  148. * @param dst pointer to the destination buffer to fill
  149. * @param dst_linesize linesize of destination
  150. * @param segment_width width of the segment
  151. * @param x horizontal coordinate where to draw the rectangle in the destination buffer
  152. * @param y horizontal coordinate where to draw the rectangle in the destination buffer
  153. * @param w width of the rectangle to draw, expressed as a number of segment_width units
  154. * @param h height of the rectangle to draw, expressed as a number of segment_width units
  155. */
  156. static void draw_rectangle(unsigned val, uint8_t *dst, int dst_linesize, unsigned segment_width,
  157. unsigned x, unsigned y, unsigned w, unsigned h)
  158. {
  159. int i;
  160. int step = 3;
  161. dst += segment_width * (step * x + y * dst_linesize);
  162. w *= segment_width * step;
  163. h *= segment_width;
  164. for (i = 0; i < h; i++) {
  165. memset(dst, val, w);
  166. dst += dst_linesize;
  167. }
  168. }
  169. static void draw_digit(int digit, uint8_t *dst, unsigned dst_linesize,
  170. unsigned segment_width)
  171. {
  172. #define TOP_HBAR 1
  173. #define MID_HBAR 2
  174. #define BOT_HBAR 4
  175. #define LEFT_TOP_VBAR 8
  176. #define LEFT_BOT_VBAR 16
  177. #define RIGHT_TOP_VBAR 32
  178. #define RIGHT_BOT_VBAR 64
  179. struct {
  180. int x, y, w, h;
  181. } segments[] = {
  182. { 1, 0, 5, 1 }, /* TOP_HBAR */
  183. { 1, 6, 5, 1 }, /* MID_HBAR */
  184. { 1, 12, 5, 1 }, /* BOT_HBAR */
  185. { 0, 1, 1, 5 }, /* LEFT_TOP_VBAR */
  186. { 0, 7, 1, 5 }, /* LEFT_BOT_VBAR */
  187. { 6, 1, 1, 5 }, /* RIGHT_TOP_VBAR */
  188. { 6, 7, 1, 5 } /* RIGHT_BOT_VBAR */
  189. };
  190. static const unsigned char masks[10] = {
  191. /* 0 */ TOP_HBAR |BOT_HBAR|LEFT_TOP_VBAR|LEFT_BOT_VBAR|RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  192. /* 1 */ RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  193. /* 2 */ TOP_HBAR|MID_HBAR|BOT_HBAR|LEFT_BOT_VBAR |RIGHT_TOP_VBAR,
  194. /* 3 */ TOP_HBAR|MID_HBAR|BOT_HBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  195. /* 4 */ MID_HBAR |LEFT_TOP_VBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  196. /* 5 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR |RIGHT_BOT_VBAR,
  197. /* 6 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR|LEFT_BOT_VBAR |RIGHT_BOT_VBAR,
  198. /* 7 */ TOP_HBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  199. /* 8 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR|LEFT_BOT_VBAR|RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  200. /* 9 */ TOP_HBAR|BOT_HBAR|MID_HBAR|LEFT_TOP_VBAR |RIGHT_TOP_VBAR|RIGHT_BOT_VBAR,
  201. };
  202. unsigned mask = masks[digit];
  203. int i;
  204. draw_rectangle(0, dst, dst_linesize, segment_width, 0, 0, 8, 13);
  205. for (i = 0; i < FF_ARRAY_ELEMS(segments); i++)
  206. if (mask & (1<<i))
  207. draw_rectangle(255, dst, dst_linesize, segment_width,
  208. segments[i].x, segments[i].y, segments[i].w, segments[i].h);
  209. }
  210. #define GRADIENT_SIZE (6 * 256)
  211. static void test_fill_picture(AVFilterContext *ctx, AVFilterBufferRef *picref)
  212. {
  213. TestSourceContext *test = ctx->priv;
  214. uint8_t *p, *p0;
  215. int x, y;
  216. int color, color_rest;
  217. int icolor;
  218. int radius;
  219. int quad0, quad;
  220. int dquad_x, dquad_y;
  221. int grad, dgrad, rgrad, drgrad;
  222. int seg_size;
  223. int second;
  224. int i;
  225. uint8_t *data = picref->data[0];
  226. int width = picref->video->w;
  227. int height = picref->video->h;
  228. /* draw colored bars and circle */
  229. radius = (width + height) / 4;
  230. quad0 = width * width / 4 + height * height / 4 - radius * radius;
  231. dquad_y = 1 - height;
  232. p0 = data;
  233. for (y = 0; y < height; y++) {
  234. p = p0;
  235. color = 0;
  236. color_rest = 0;
  237. quad = quad0;
  238. dquad_x = 1 - width;
  239. for (x = 0; x < width; x++) {
  240. icolor = color;
  241. if (quad < 0)
  242. icolor ^= 7;
  243. quad += dquad_x;
  244. dquad_x += 2;
  245. *(p++) = icolor & 1 ? 255 : 0;
  246. *(p++) = icolor & 2 ? 255 : 0;
  247. *(p++) = icolor & 4 ? 255 : 0;
  248. color_rest += 8;
  249. if (color_rest >= width) {
  250. color_rest -= width;
  251. color++;
  252. }
  253. }
  254. quad0 += dquad_y;
  255. dquad_y += 2;
  256. p0 += picref->linesize[0];
  257. }
  258. /* draw sliding color line */
  259. p = data + picref->linesize[0] * height * 3/4;
  260. grad = (256 * test->nb_frame * test->time_base.num / test->time_base.den) %
  261. GRADIENT_SIZE;
  262. rgrad = 0;
  263. dgrad = GRADIENT_SIZE / width;
  264. drgrad = GRADIENT_SIZE % width;
  265. for (x = 0; x < width; x++) {
  266. *(p++) =
  267. grad < 256 || grad >= 5 * 256 ? 255 :
  268. grad >= 2 * 256 && grad < 4 * 256 ? 0 :
  269. grad < 2 * 256 ? 2 * 256 - 1 - grad : grad - 4 * 256;
  270. *(p++) =
  271. grad >= 4 * 256 ? 0 :
  272. grad >= 1 * 256 && grad < 3 * 256 ? 255 :
  273. grad < 1 * 256 ? grad : 4 * 256 - 1 - grad;
  274. *(p++) =
  275. grad < 2 * 256 ? 0 :
  276. grad >= 3 * 256 && grad < 5 * 256 ? 255 :
  277. grad < 3 * 256 ? grad - 2 * 256 : 6 * 256 - 1 - grad;
  278. grad += dgrad;
  279. rgrad += drgrad;
  280. if (rgrad >= GRADIENT_SIZE) {
  281. grad++;
  282. rgrad -= GRADIENT_SIZE;
  283. }
  284. if (grad >= GRADIENT_SIZE)
  285. grad -= GRADIENT_SIZE;
  286. }
  287. for (y = height / 8; y > 0; y--) {
  288. memcpy(p, p - picref->linesize[0], 3 * width);
  289. p += picref->linesize[0];
  290. }
  291. /* draw digits */
  292. seg_size = width / 80;
  293. if (seg_size >= 1 && height >= 13 * seg_size) {
  294. second = test->nb_frame * test->time_base.num / test->time_base.den;
  295. x = width - (width - seg_size * 64) / 2;
  296. y = (height - seg_size * 13) / 2;
  297. p = data + (x*3 + y * picref->linesize[0]);
  298. for (i = 0; i < 8; i++) {
  299. p -= 3 * 8 * seg_size;
  300. draw_digit(second % 10, p, picref->linesize[0], seg_size);
  301. second /= 10;
  302. if (second == 0)
  303. break;
  304. }
  305. }
  306. }
  307. static av_cold int test_init(AVFilterContext *ctx, const char *args)
  308. {
  309. TestSourceContext *test = ctx->priv;
  310. test->class = &testsrc_class;
  311. test->fill_picture_fn = test_fill_picture;
  312. return init_common(ctx, args);
  313. }
  314. static int test_query_formats(AVFilterContext *ctx)
  315. {
  316. static const enum AVPixelFormat pix_fmts[] = {
  317. AV_PIX_FMT_RGB24, AV_PIX_FMT_NONE
  318. };
  319. ff_set_common_formats(ctx, ff_make_format_list(pix_fmts));
  320. return 0;
  321. }
  322. static const AVFilterPad avfilter_vsrc_testsrc_outputs[] = {
  323. {
  324. .name = "default",
  325. .type = AVMEDIA_TYPE_VIDEO,
  326. .request_frame = request_frame,
  327. .config_props = config_props,
  328. },
  329. { NULL }
  330. };
  331. AVFilter avfilter_vsrc_testsrc = {
  332. .name = "testsrc",
  333. .description = NULL_IF_CONFIG_SMALL("Generate test pattern."),
  334. .priv_size = sizeof(TestSourceContext),
  335. .init = test_init,
  336. .query_formats = test_query_formats,
  337. .inputs = NULL,
  338. .outputs = avfilter_vsrc_testsrc_outputs,
  339. };
  340. #endif /* CONFIG_TESTSRC_FILTER */
  341. #if CONFIG_RGBTESTSRC_FILTER
  342. static const char *rgbtestsrc_get_name(void *ctx)
  343. {
  344. return "rgbtestsrc";
  345. }
  346. static const AVClass rgbtestsrc_class = {
  347. .class_name = "RGBTestSourceContext",
  348. .item_name = rgbtestsrc_get_name,
  349. .option = testsrc_options,
  350. };
  351. #define R 0
  352. #define G 1
  353. #define B 2
  354. #define A 3
  355. static void rgbtest_put_pixel(uint8_t *dst, int dst_linesize,
  356. int x, int y, int r, int g, int b, enum AVPixelFormat fmt,
  357. int rgba_map[4])
  358. {
  359. int32_t v;
  360. uint8_t *p;
  361. switch (fmt) {
  362. case AV_PIX_FMT_BGR444: ((uint16_t*)(dst + y*dst_linesize))[x] = ((r >> 4) << 8) | ((g >> 4) << 4) | (b >> 4); break;
  363. case AV_PIX_FMT_RGB444: ((uint16_t*)(dst + y*dst_linesize))[x] = ((b >> 4) << 8) | ((g >> 4) << 4) | (r >> 4); break;
  364. case AV_PIX_FMT_BGR555: ((uint16_t*)(dst + y*dst_linesize))[x] = ((r>>3)<<10) | ((g>>3)<<5) | (b>>3); break;
  365. case AV_PIX_FMT_RGB555: ((uint16_t*)(dst + y*dst_linesize))[x] = ((b>>3)<<10) | ((g>>3)<<5) | (r>>3); break;
  366. case AV_PIX_FMT_BGR565: ((uint16_t*)(dst + y*dst_linesize))[x] = ((r>>3)<<11) | ((g>>2)<<5) | (b>>3); break;
  367. case AV_PIX_FMT_RGB565: ((uint16_t*)(dst + y*dst_linesize))[x] = ((b>>3)<<11) | ((g>>2)<<5) | (r>>3); break;
  368. case AV_PIX_FMT_RGB24:
  369. case AV_PIX_FMT_BGR24:
  370. v = (r << (rgba_map[R]*8)) + (g << (rgba_map[G]*8)) + (b << (rgba_map[B]*8));
  371. p = dst + 3*x + y*dst_linesize;
  372. AV_WL24(p, v);
  373. break;
  374. case AV_PIX_FMT_RGBA:
  375. case AV_PIX_FMT_BGRA:
  376. case AV_PIX_FMT_ARGB:
  377. case AV_PIX_FMT_ABGR:
  378. v = (r << (rgba_map[R]*8)) + (g << (rgba_map[G]*8)) + (b << (rgba_map[B]*8));
  379. p = dst + 4*x + y*dst_linesize;
  380. AV_WL32(p, v);
  381. break;
  382. }
  383. }
  384. static void rgbtest_fill_picture(AVFilterContext *ctx, AVFilterBufferRef *picref)
  385. {
  386. TestSourceContext *test = ctx->priv;
  387. int x, y, w = picref->video->w, h = picref->video->h;
  388. for (y = 0; y < h; y++) {
  389. for (x = 0; x < picref->video->w; x++) {
  390. int c = 256*x/w;
  391. int r = 0, g = 0, b = 0;
  392. if (3*y < h ) r = c;
  393. else if (3*y < 2*h) g = c;
  394. else b = c;
  395. rgbtest_put_pixel(picref->data[0], picref->linesize[0], x, y, r, g, b,
  396. ctx->outputs[0]->format, test->rgba_map);
  397. }
  398. }
  399. }
  400. static av_cold int rgbtest_init(AVFilterContext *ctx, const char *args)
  401. {
  402. TestSourceContext *test = ctx->priv;
  403. test->class = &rgbtestsrc_class;
  404. test->fill_picture_fn = rgbtest_fill_picture;
  405. return init_common(ctx, args);
  406. }
  407. static int rgbtest_query_formats(AVFilterContext *ctx)
  408. {
  409. static const enum AVPixelFormat pix_fmts[] = {
  410. AV_PIX_FMT_RGBA, AV_PIX_FMT_ARGB, AV_PIX_FMT_BGRA, AV_PIX_FMT_ABGR,
  411. AV_PIX_FMT_BGR24, AV_PIX_FMT_RGB24,
  412. AV_PIX_FMT_RGB444, AV_PIX_FMT_BGR444,
  413. AV_PIX_FMT_RGB565, AV_PIX_FMT_BGR565,
  414. AV_PIX_FMT_RGB555, AV_PIX_FMT_BGR555,
  415. AV_PIX_FMT_NONE
  416. };
  417. ff_set_common_formats(ctx, ff_make_format_list(pix_fmts));
  418. return 0;
  419. }
  420. static int rgbtest_config_props(AVFilterLink *outlink)
  421. {
  422. TestSourceContext *test = outlink->src->priv;
  423. switch (outlink->format) {
  424. case AV_PIX_FMT_ARGB: test->rgba_map[A] = 0; test->rgba_map[R] = 1; test->rgba_map[G] = 2; test->rgba_map[B] = 3; break;
  425. case AV_PIX_FMT_ABGR: test->rgba_map[A] = 0; test->rgba_map[B] = 1; test->rgba_map[G] = 2; test->rgba_map[R] = 3; break;
  426. case AV_PIX_FMT_RGBA:
  427. case AV_PIX_FMT_RGB24: test->rgba_map[R] = 0; test->rgba_map[G] = 1; test->rgba_map[B] = 2; test->rgba_map[A] = 3; break;
  428. case AV_PIX_FMT_BGRA:
  429. case AV_PIX_FMT_BGR24: test->rgba_map[B] = 0; test->rgba_map[G] = 1; test->rgba_map[R] = 2; test->rgba_map[A] = 3; break;
  430. }
  431. return config_props(outlink);
  432. }
  433. static const AVFilterPad avfilter_vsrc_rgbtestsrc_outputs[] = {
  434. {
  435. .name = "default",
  436. .type = AVMEDIA_TYPE_VIDEO,
  437. .request_frame = request_frame,
  438. .config_props = rgbtest_config_props,
  439. },
  440. { NULL }
  441. };
  442. AVFilter avfilter_vsrc_rgbtestsrc = {
  443. .name = "rgbtestsrc",
  444. .description = NULL_IF_CONFIG_SMALL("Generate RGB test pattern."),
  445. .priv_size = sizeof(TestSourceContext),
  446. .init = rgbtest_init,
  447. .query_formats = rgbtest_query_formats,
  448. .inputs = NULL,
  449. .outputs = avfilter_vsrc_rgbtestsrc_outputs,
  450. };
  451. #endif /* CONFIG_RGBTESTSRC_FILTER */