You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

327 lines
17KB

  1. /*
  2. * Copyright (c) 2016 Paul B Mahol
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a copy
  5. * of this software and associated documentation files (the "Software"), to deal
  6. * in the Software without restriction, including without limitation the rights
  7. * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  8. * copies of the Software, and to permit persons to whom the Software is
  9. * furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice shall be included in
  12. * all copies or substantial portions of the Software.
  13. *
  14. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  17. * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  18. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  19. * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
  20. * SOFTWARE.
  21. */
  22. #include "libavutil/imgutils.h"
  23. #include "libavutil/opt.h"
  24. #include "libavutil/pixdesc.h"
  25. #include "avfilter.h"
  26. #include "formats.h"
  27. #include "internal.h"
  28. #include "video.h"
  29. typedef struct AverageBlurContext {
  30. const AVClass *class;
  31. int radius;
  32. int radiusV;
  33. int planes;
  34. int depth;
  35. int planewidth[4];
  36. int planeheight[4];
  37. float *buffer;
  38. int nb_planes;
  39. int (*filter_horizontally)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
  40. int (*filter_vertically)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
  41. } AverageBlurContext;
  42. #define OFFSET(x) offsetof(AverageBlurContext, x)
  43. #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
  44. static const AVOption avgblur_options[] = {
  45. { "sizeX", "set horizontal size", OFFSET(radius), AV_OPT_TYPE_INT, {.i64=1}, 1, 1024, FLAGS },
  46. { "planes", "set planes to filter", OFFSET(planes), AV_OPT_TYPE_INT, {.i64=0xF}, 0, 0xF, FLAGS },
  47. { "sizeY", "set vertical size", OFFSET(radiusV), AV_OPT_TYPE_INT, {.i64=0}, 0, 1024, FLAGS },
  48. { NULL }
  49. };
  50. AVFILTER_DEFINE_CLASS(avgblur);
  51. typedef struct ThreadData {
  52. int height;
  53. int width;
  54. uint8_t *ptr;
  55. int linesize;
  56. } ThreadData;
  57. #define HORIZONTAL_FILTER(name, type) \
  58. static int filter_horizontally_##name(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)\
  59. { \
  60. AverageBlurContext *s = ctx->priv; \
  61. ThreadData *td = arg; \
  62. const int height = td->height; \
  63. const int width = td->width; \
  64. const int slice_start = (height * jobnr ) / nb_jobs; \
  65. const int slice_end = (height * (jobnr+1)) / nb_jobs; \
  66. const int radius = FFMIN(s->radius, width / 2); \
  67. const int linesize = td->linesize / sizeof(type); \
  68. float *buffer = s->buffer; \
  69. const type *src; \
  70. float *ptr; \
  71. int y, x; \
  72. \
  73. /* Filter horizontally along each row */ \
  74. for (y = slice_start; y < slice_end; y++) { \
  75. float acc = 0; \
  76. int count = 0; \
  77. \
  78. src = (const type *)td->ptr + linesize * y; \
  79. ptr = buffer + width * y; \
  80. \
  81. for (x = 0; x < radius; x++) { \
  82. acc += src[x]; \
  83. } \
  84. count += radius; \
  85. \
  86. for (x = 0; x <= radius; x++) { \
  87. acc += src[x + radius]; \
  88. count++; \
  89. ptr[x] = acc / count; \
  90. } \
  91. \
  92. for (; x < width - radius; x++) { \
  93. acc += src[x + radius] - src[x - radius - 1]; \
  94. ptr[x] = acc / count; \
  95. } \
  96. \
  97. for (; x < width; x++) { \
  98. acc -= src[x - radius]; \
  99. count--; \
  100. ptr[x] = acc / count; \
  101. } \
  102. } \
  103. \
  104. return 0; \
  105. }
  106. HORIZONTAL_FILTER(8, uint8_t)
  107. HORIZONTAL_FILTER(16, uint16_t)
  108. #define VERTICAL_FILTER(name, type) \
  109. static int filter_vertically_##name(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs) \
  110. { \
  111. AverageBlurContext *s = ctx->priv; \
  112. ThreadData *td = arg; \
  113. const int height = td->height; \
  114. const int width = td->width; \
  115. const int slice_start = (width * jobnr ) / nb_jobs; \
  116. const int slice_end = (width * (jobnr+1)) / nb_jobs; \
  117. const int radius = FFMIN(s->radiusV, height / 2); \
  118. const int linesize = td->linesize / sizeof(type); \
  119. type *buffer = (type *)td->ptr; \
  120. const float *src; \
  121. type *ptr; \
  122. int i, x; \
  123. \
  124. /* Filter vertically along each column */ \
  125. for (x = slice_start; x < slice_end; x++) { \
  126. float acc = 0; \
  127. int count = 0; \
  128. \
  129. ptr = buffer + x; \
  130. src = s->buffer + x; \
  131. \
  132. for (i = 0; i < radius; i++) { \
  133. acc += src[0]; \
  134. src += width; \
  135. } \
  136. count += radius; \
  137. \
  138. src = s->buffer + x; \
  139. ptr = buffer + x; \
  140. for (i = 0; i <= radius; i++) { \
  141. acc += src[(i + radius) * width]; \
  142. count++; \
  143. ptr[i * linesize] = acc / count; \
  144. } \
  145. \
  146. for (; i < height - radius; i++) { \
  147. acc += src[(i + radius) * width] - src[(i - radius - 1) * width]; \
  148. ptr[i * linesize] = acc / count; \
  149. } \
  150. \
  151. for (; i < height; i++) { \
  152. acc -= src[(i - radius) * width]; \
  153. count--; \
  154. ptr[i * linesize] = acc / count; \
  155. } \
  156. } \
  157. \
  158. return 0; \
  159. }
  160. VERTICAL_FILTER(8, uint8_t)
  161. VERTICAL_FILTER(16, uint16_t)
  162. static int config_input(AVFilterLink *inlink)
  163. {
  164. const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format);
  165. AverageBlurContext *s = inlink->dst->priv;
  166. s->depth = desc->comp[0].depth;
  167. s->planewidth[1] = s->planewidth[2] = AV_CEIL_RSHIFT(inlink->w, desc->log2_chroma_w);
  168. s->planewidth[0] = s->planewidth[3] = inlink->w;
  169. s->planeheight[1] = s->planeheight[2] = AV_CEIL_RSHIFT(inlink->h, desc->log2_chroma_h);
  170. s->planeheight[0] = s->planeheight[3] = inlink->h;
  171. s->nb_planes = av_pix_fmt_count_planes(inlink->format);
  172. s->buffer = av_malloc_array(inlink->w, inlink->h * sizeof(*s->buffer));
  173. if (!s->buffer)
  174. return AVERROR(ENOMEM);
  175. if (s->radiusV <= 0) {
  176. s->radiusV = s->radius;
  177. }
  178. if (s->depth == 8) {
  179. s->filter_horizontally = filter_horizontally_8;
  180. s->filter_vertically = filter_vertically_8;
  181. } else {
  182. s->filter_horizontally = filter_horizontally_16;
  183. s->filter_vertically = filter_vertically_16;
  184. }
  185. return 0;
  186. }
  187. static void averageiir2d(AVFilterContext *ctx, AVFrame *in, AVFrame *out, int plane)
  188. {
  189. AverageBlurContext *s = ctx->priv;
  190. const int width = s->planewidth[plane];
  191. const int height = s->planeheight[plane];
  192. const int nb_threads = ff_filter_get_nb_threads(ctx);
  193. ThreadData td;
  194. td.width = width;
  195. td.height = height;
  196. td.ptr = in->data[plane];
  197. td.linesize = in->linesize[plane];
  198. ctx->internal->execute(ctx, s->filter_horizontally, &td, NULL, FFMIN(height, nb_threads));
  199. td.ptr = out->data[plane];
  200. td.linesize = out->linesize[plane];
  201. ctx->internal->execute(ctx, s->filter_vertically, &td, NULL, FFMIN(width, nb_threads));
  202. }
  203. static int query_formats(AVFilterContext *ctx)
  204. {
  205. static const enum AVPixelFormat pix_fmts[] = {
  206. AV_PIX_FMT_YUVA444P, AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV440P,
  207. AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVJ440P,
  208. AV_PIX_FMT_YUVA422P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUVA420P, AV_PIX_FMT_YUV420P,
  209. AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ420P,
  210. AV_PIX_FMT_YUVJ411P, AV_PIX_FMT_YUV411P, AV_PIX_FMT_YUV410P,
  211. AV_PIX_FMT_YUV420P9, AV_PIX_FMT_YUV422P9, AV_PIX_FMT_YUV444P9,
  212. AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV422P10, AV_PIX_FMT_YUV444P10,
  213. AV_PIX_FMT_YUV420P12, AV_PIX_FMT_YUV422P12, AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV440P12,
  214. AV_PIX_FMT_YUV420P14, AV_PIX_FMT_YUV422P14, AV_PIX_FMT_YUV444P14,
  215. AV_PIX_FMT_YUV420P16, AV_PIX_FMT_YUV422P16, AV_PIX_FMT_YUV444P16,
  216. AV_PIX_FMT_YUVA420P9, AV_PIX_FMT_YUVA422P9, AV_PIX_FMT_YUVA444P9,
  217. AV_PIX_FMT_YUVA420P10, AV_PIX_FMT_YUVA422P10, AV_PIX_FMT_YUVA444P10,
  218. AV_PIX_FMT_YUVA420P16, AV_PIX_FMT_YUVA422P16, AV_PIX_FMT_YUVA444P16,
  219. AV_PIX_FMT_GBRP, AV_PIX_FMT_GBRP9, AV_PIX_FMT_GBRP10,
  220. AV_PIX_FMT_GBRP12, AV_PIX_FMT_GBRP14, AV_PIX_FMT_GBRP16,
  221. AV_PIX_FMT_GBRAP, AV_PIX_FMT_GBRAP10, AV_PIX_FMT_GBRAP12, AV_PIX_FMT_GBRAP16,
  222. AV_PIX_FMT_GRAY8, AV_PIX_FMT_GRAY9, AV_PIX_FMT_GRAY10, AV_PIX_FMT_GRAY12, AV_PIX_FMT_GRAY16,
  223. AV_PIX_FMT_NONE
  224. };
  225. return ff_set_common_formats(ctx, ff_make_format_list(pix_fmts));
  226. }
  227. static int filter_frame(AVFilterLink *inlink, AVFrame *in)
  228. {
  229. AVFilterContext *ctx = inlink->dst;
  230. AverageBlurContext *s = ctx->priv;
  231. AVFilterLink *outlink = ctx->outputs[0];
  232. AVFrame *out;
  233. int plane;
  234. if (av_frame_is_writable(in)) {
  235. out = in;
  236. } else {
  237. out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
  238. if (!out) {
  239. av_frame_free(&in);
  240. return AVERROR(ENOMEM);
  241. }
  242. av_frame_copy_props(out, in);
  243. }
  244. for (plane = 0; plane < s->nb_planes; plane++) {
  245. const int height = s->planeheight[plane];
  246. const int width = s->planewidth[plane];
  247. if (!(s->planes & (1 << plane))) {
  248. if (out != in)
  249. av_image_copy_plane(out->data[plane], out->linesize[plane],
  250. in->data[plane], in->linesize[plane],
  251. width * ((s->depth + 7) / 8), height);
  252. continue;
  253. }
  254. averageiir2d(ctx, in, out, plane);
  255. }
  256. if (out != in)
  257. av_frame_free(&in);
  258. return ff_filter_frame(outlink, out);
  259. }
  260. static av_cold void uninit(AVFilterContext *ctx)
  261. {
  262. AverageBlurContext *s = ctx->priv;
  263. av_freep(&s->buffer);
  264. }
  265. static const AVFilterPad avgblur_inputs[] = {
  266. {
  267. .name = "default",
  268. .type = AVMEDIA_TYPE_VIDEO,
  269. .config_props = config_input,
  270. .filter_frame = filter_frame,
  271. },
  272. { NULL }
  273. };
  274. static const AVFilterPad avgblur_outputs[] = {
  275. {
  276. .name = "default",
  277. .type = AVMEDIA_TYPE_VIDEO,
  278. },
  279. { NULL }
  280. };
  281. AVFilter ff_vf_avgblur = {
  282. .name = "avgblur",
  283. .description = NULL_IF_CONFIG_SMALL("Apply Average Blur filter."),
  284. .priv_size = sizeof(AverageBlurContext),
  285. .priv_class = &avgblur_class,
  286. .uninit = uninit,
  287. .query_formats = query_formats,
  288. .inputs = avgblur_inputs,
  289. .outputs = avgblur_outputs,
  290. .flags = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC | AVFILTER_FLAG_SLICE_THREADS,
  291. };