You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

421 lines
13KB

  1. /*
  2. * This file is part of Libav.
  3. *
  4. * Libav is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * Libav is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with Libav; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include "libavutil/buffer.h"
  19. #include "libavutil/hwcontext.h"
  20. #include "libavutil/log.h"
  21. #include "libavutil/opt.h"
  22. #include "libavutil/pixdesc.h"
  23. #include "avfilter.h"
  24. #include "formats.h"
  25. #include "internal.h"
  26. #include "video.h"
  27. typedef struct HWMapContext {
  28. const AVClass *class;
  29. AVBufferRef *hwframes_ref;
  30. int mode;
  31. char *derive_device_type;
  32. int reverse;
  33. } HWMapContext;
  34. static int hwmap_query_formats(AVFilterContext *avctx)
  35. {
  36. ff_formats_ref(ff_all_formats(AVMEDIA_TYPE_VIDEO),
  37. &avctx->inputs[0]->out_formats);
  38. ff_formats_ref(ff_all_formats(AVMEDIA_TYPE_VIDEO),
  39. &avctx->outputs[0]->in_formats);
  40. return 0;
  41. }
  42. static int hwmap_config_output(AVFilterLink *outlink)
  43. {
  44. AVFilterContext *avctx = outlink->src;
  45. HWMapContext *ctx = avctx->priv;
  46. AVFilterLink *inlink = avctx->inputs[0];
  47. AVHWFramesContext *hwfc;
  48. AVBufferRef *device;
  49. const AVPixFmtDescriptor *desc;
  50. int err;
  51. av_log(avctx, AV_LOG_DEBUG, "Configure hwmap %s -> %s.\n",
  52. av_get_pix_fmt_name(inlink->format),
  53. av_get_pix_fmt_name(outlink->format));
  54. av_buffer_unref(&ctx->hwframes_ref);
  55. device = avctx->hw_device_ctx;
  56. if (inlink->hw_frames_ctx) {
  57. hwfc = (AVHWFramesContext*)inlink->hw_frames_ctx->data;
  58. if (ctx->derive_device_type) {
  59. enum AVHWDeviceType type;
  60. type = av_hwdevice_find_type_by_name(ctx->derive_device_type);
  61. if (type == AV_HWDEVICE_TYPE_NONE) {
  62. av_log(avctx, AV_LOG_ERROR, "Invalid device type.\n");
  63. goto fail;
  64. }
  65. err = av_hwdevice_ctx_create_derived(&device, type,
  66. hwfc->device_ref, 0);
  67. if (err < 0) {
  68. av_log(avctx, AV_LOG_ERROR, "Failed to created derived "
  69. "device context: %d.\n", err);
  70. goto fail;
  71. }
  72. }
  73. desc = av_pix_fmt_desc_get(outlink->format);
  74. if (!desc) {
  75. err = AVERROR(EINVAL);
  76. goto fail;
  77. }
  78. if (inlink->format == hwfc->format &&
  79. (desc->flags & AV_PIX_FMT_FLAG_HWACCEL) &&
  80. !ctx->reverse) {
  81. // Map between two hardware formats (including the case of
  82. // undoing an existing mapping).
  83. if (!device) {
  84. av_log(avctx, AV_LOG_ERROR, "A device reference is "
  85. "required to map to a hardware format.\n");
  86. err = AVERROR(EINVAL);
  87. goto fail;
  88. }
  89. err = av_hwframe_ctx_create_derived(&ctx->hwframes_ref,
  90. outlink->format,
  91. device,
  92. inlink->hw_frames_ctx, 0);
  93. if (err < 0) {
  94. av_log(avctx, AV_LOG_ERROR, "Failed to create derived "
  95. "frames context: %d.\n", err);
  96. goto fail;
  97. }
  98. } else if (inlink->format == hwfc->format &&
  99. (desc->flags & AV_PIX_FMT_FLAG_HWACCEL) &&
  100. ctx->reverse) {
  101. // Map between two hardware formats, but do it in reverse.
  102. // Make a new hwframe context for the target type, and then
  103. // overwrite the input hwframe context with a derived context
  104. // mapped from that back to the source type.
  105. AVBufferRef *source;
  106. AVHWFramesContext *frames;
  107. ctx->hwframes_ref = av_hwframe_ctx_alloc(device);
  108. if (!ctx->hwframes_ref) {
  109. err = AVERROR(ENOMEM);
  110. goto fail;
  111. }
  112. frames = (AVHWFramesContext*)ctx->hwframes_ref->data;
  113. frames->format = outlink->format;
  114. frames->sw_format = hwfc->sw_format;
  115. frames->width = hwfc->width;
  116. frames->height = hwfc->height;
  117. frames->initial_pool_size = 64;
  118. err = av_hwframe_ctx_init(ctx->hwframes_ref);
  119. if (err < 0) {
  120. av_log(avctx, AV_LOG_ERROR, "Failed to initialise "
  121. "target frames context: %d.\n", err);
  122. goto fail;
  123. }
  124. err = av_hwframe_ctx_create_derived(&source,
  125. inlink->format,
  126. hwfc->device_ref,
  127. ctx->hwframes_ref,
  128. ctx->mode);
  129. if (err < 0) {
  130. av_log(avctx, AV_LOG_ERROR, "Failed to create "
  131. "derived source frames context: %d.\n", err);
  132. goto fail;
  133. }
  134. // Here is the naughty bit. This overwriting changes what
  135. // ff_get_video_buffer() in the previous filter returns -
  136. // it will now give a frame allocated here mapped back to
  137. // the format it expects. If there were any additional
  138. // constraints on the output frames there then this may
  139. // break nastily.
  140. av_buffer_unref(&inlink->hw_frames_ctx);
  141. inlink->hw_frames_ctx = source;
  142. } else if ((outlink->format == hwfc->format &&
  143. inlink->format == hwfc->sw_format) ||
  144. inlink->format == hwfc->format) {
  145. // Map from a hardware format to a software format, or
  146. // undo an existing such mapping.
  147. ctx->hwframes_ref = av_buffer_ref(inlink->hw_frames_ctx);
  148. if (!ctx->hwframes_ref) {
  149. err = AVERROR(ENOMEM);
  150. goto fail;
  151. }
  152. } else {
  153. // Non-matching formats - not supported.
  154. av_log(avctx, AV_LOG_ERROR, "Unsupported formats for "
  155. "hwmap: from %s (%s) to %s.\n",
  156. av_get_pix_fmt_name(inlink->format),
  157. av_get_pix_fmt_name(hwfc->format),
  158. av_get_pix_fmt_name(outlink->format));
  159. err = AVERROR(EINVAL);
  160. goto fail;
  161. }
  162. } else if (avctx->hw_device_ctx) {
  163. // Map from a software format to a hardware format. This
  164. // creates a new hwframe context like hwupload, but then
  165. // returns frames mapped from that to the previous link in
  166. // order to fill them without an additional copy.
  167. if (!device) {
  168. av_log(avctx, AV_LOG_ERROR, "A device reference is "
  169. "required to create new frames with reverse "
  170. "mapping.\n");
  171. err = AVERROR(EINVAL);
  172. goto fail;
  173. }
  174. ctx->reverse = 1;
  175. ctx->hwframes_ref = av_hwframe_ctx_alloc(device);
  176. if (!ctx->hwframes_ref) {
  177. err = AVERROR(ENOMEM);
  178. goto fail;
  179. }
  180. hwfc = (AVHWFramesContext*)ctx->hwframes_ref->data;
  181. hwfc->format = outlink->format;
  182. hwfc->sw_format = inlink->format;
  183. hwfc->width = inlink->w;
  184. hwfc->height = inlink->h;
  185. err = av_hwframe_ctx_init(ctx->hwframes_ref);
  186. if (err < 0) {
  187. av_log(avctx, AV_LOG_ERROR, "Failed to create frame "
  188. "context for reverse mapping: %d.\n", err);
  189. goto fail;
  190. }
  191. } else {
  192. av_log(avctx, AV_LOG_ERROR, "Mapping requires a hardware "
  193. "context (a device, or frames on input).\n");
  194. return AVERROR(EINVAL);
  195. }
  196. outlink->hw_frames_ctx = av_buffer_ref(ctx->hwframes_ref);
  197. if (!outlink->hw_frames_ctx) {
  198. err = AVERROR(ENOMEM);
  199. goto fail;
  200. }
  201. outlink->w = inlink->w;
  202. outlink->h = inlink->h;
  203. return 0;
  204. fail:
  205. av_buffer_unref(&ctx->hwframes_ref);
  206. return err;
  207. }
  208. static AVFrame *hwmap_get_buffer(AVFilterLink *inlink, int w, int h)
  209. {
  210. AVFilterContext *avctx = inlink->dst;
  211. AVFilterLink *outlink = avctx->outputs[0];
  212. HWMapContext *ctx = avctx->priv;
  213. if (ctx->reverse && !inlink->hw_frames_ctx) {
  214. AVFrame *src, *dst;
  215. int err;
  216. src = ff_get_video_buffer(outlink, w, h);
  217. if (!src) {
  218. av_log(avctx, AV_LOG_ERROR, "Failed to allocate source "
  219. "frame for software mapping.\n");
  220. return NULL;
  221. }
  222. dst = av_frame_alloc();
  223. if (!dst) {
  224. av_frame_free(&src);
  225. return NULL;
  226. }
  227. err = av_hwframe_map(dst, src, ctx->mode);
  228. if (err) {
  229. av_log(avctx, AV_LOG_ERROR, "Failed to map frame to "
  230. "software: %d.\n", err);
  231. av_frame_free(&src);
  232. av_frame_free(&dst);
  233. return NULL;
  234. }
  235. av_frame_free(&src);
  236. return dst;
  237. } else {
  238. return ff_default_get_video_buffer(inlink, w, h);
  239. }
  240. }
  241. static int hwmap_filter_frame(AVFilterLink *link, AVFrame *input)
  242. {
  243. AVFilterContext *avctx = link->dst;
  244. AVFilterLink *outlink = avctx->outputs[0];
  245. HWMapContext *ctx = avctx->priv;
  246. AVFrame *map = NULL;
  247. int err;
  248. av_log(ctx, AV_LOG_DEBUG, "Filter input: %s, %ux%u (%"PRId64").\n",
  249. av_get_pix_fmt_name(input->format),
  250. input->width, input->height, input->pts);
  251. map = av_frame_alloc();
  252. if (!map) {
  253. err = AVERROR(ENOMEM);
  254. goto fail;
  255. }
  256. map->format = outlink->format;
  257. map->hw_frames_ctx = av_buffer_ref(ctx->hwframes_ref);
  258. if (!map->hw_frames_ctx) {
  259. err = AVERROR(ENOMEM);
  260. goto fail;
  261. }
  262. if (ctx->reverse && !input->hw_frames_ctx) {
  263. // If we mapped backwards from hardware to software, we need
  264. // to attach the hardware frame context to the input frame to
  265. // make the mapping visible to av_hwframe_map().
  266. input->hw_frames_ctx = av_buffer_ref(ctx->hwframes_ref);
  267. if (!input->hw_frames_ctx) {
  268. err = AVERROR(ENOMEM);
  269. goto fail;
  270. }
  271. }
  272. err = av_hwframe_map(map, input, ctx->mode);
  273. if (err < 0) {
  274. av_log(avctx, AV_LOG_ERROR, "Failed to map frame: %d.\n", err);
  275. goto fail;
  276. }
  277. err = av_frame_copy_props(map, input);
  278. if (err < 0)
  279. goto fail;
  280. av_frame_free(&input);
  281. av_log(ctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
  282. av_get_pix_fmt_name(map->format),
  283. map->width, map->height, map->pts);
  284. return ff_filter_frame(outlink, map);
  285. fail:
  286. av_frame_free(&input);
  287. av_frame_free(&map);
  288. return err;
  289. }
  290. static av_cold void hwmap_uninit(AVFilterContext *avctx)
  291. {
  292. HWMapContext *ctx = avctx->priv;
  293. av_buffer_unref(&ctx->hwframes_ref);
  294. }
  295. #define OFFSET(x) offsetof(HWMapContext, x)
  296. #define FLAGS (AV_OPT_FLAG_VIDEO_PARAM)
  297. static const AVOption hwmap_options[] = {
  298. { "mode", "Frame mapping mode",
  299. OFFSET(mode), AV_OPT_TYPE_FLAGS,
  300. { .i64 = AV_HWFRAME_MAP_READ | AV_HWFRAME_MAP_WRITE },
  301. 0, INT_MAX, FLAGS, "mode" },
  302. { "read", "Mapping should be readable",
  303. 0, AV_OPT_TYPE_CONST, { .i64 = AV_HWFRAME_MAP_READ },
  304. INT_MIN, INT_MAX, FLAGS, "mode" },
  305. { "write", "Mapping should be writeable",
  306. 0, AV_OPT_TYPE_CONST, { .i64 = AV_HWFRAME_MAP_WRITE },
  307. INT_MIN, INT_MAX, FLAGS, "mode" },
  308. { "overwrite", "Mapping will always overwrite the entire frame",
  309. 0, AV_OPT_TYPE_CONST, { .i64 = AV_HWFRAME_MAP_OVERWRITE },
  310. INT_MIN, INT_MAX, FLAGS, "mode" },
  311. { "direct", "Mapping should not involve any copying",
  312. 0, AV_OPT_TYPE_CONST, { .i64 = AV_HWFRAME_MAP_DIRECT },
  313. INT_MIN, INT_MAX, FLAGS, "mode" },
  314. { "derive_device", "Derive a new device of this type",
  315. OFFSET(derive_device_type), AV_OPT_TYPE_STRING,
  316. { .str = NULL }, 0, 0, FLAGS },
  317. { "reverse", "Map in reverse (create and allocate in the sink)",
  318. OFFSET(reverse), AV_OPT_TYPE_INT,
  319. { .i64 = 0 }, 0, 1, FLAGS },
  320. { NULL },
  321. };
  322. static const AVClass hwmap_class = {
  323. .class_name = "hwmap",
  324. .item_name = av_default_item_name,
  325. .option = hwmap_options,
  326. .version = LIBAVUTIL_VERSION_INT,
  327. };
  328. static const AVFilterPad hwmap_inputs[] = {
  329. {
  330. .name = "default",
  331. .type = AVMEDIA_TYPE_VIDEO,
  332. .get_video_buffer = &hwmap_get_buffer,
  333. .filter_frame = &hwmap_filter_frame,
  334. },
  335. { NULL }
  336. };
  337. static const AVFilterPad hwmap_outputs[] = {
  338. {
  339. .name = "default",
  340. .type = AVMEDIA_TYPE_VIDEO,
  341. .config_props = &hwmap_config_output,
  342. },
  343. { NULL }
  344. };
  345. AVFilter ff_vf_hwmap = {
  346. .name = "hwmap",
  347. .description = NULL_IF_CONFIG_SMALL("Map hardware frames"),
  348. .uninit = &hwmap_uninit,
  349. .priv_size = sizeof(HWMapContext),
  350. .priv_class = &hwmap_class,
  351. .query_formats = &hwmap_query_formats,
  352. .inputs = hwmap_inputs,
  353. .outputs = hwmap_outputs,
  354. .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
  355. };