You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

427 lines
14KB

  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include "libavutil/buffer.h"
  19. #include "libavutil/hwcontext.h"
  20. #include "libavutil/log.h"
  21. #include "libavutil/opt.h"
  22. #include "libavutil/pixdesc.h"
  23. #include "avfilter.h"
  24. #include "formats.h"
  25. #include "internal.h"
  26. #include "video.h"
  27. typedef struct HWMapContext {
  28. const AVClass *class;
  29. AVBufferRef *hwframes_ref;
  30. int mode;
  31. char *derive_device_type;
  32. int reverse;
  33. } HWMapContext;
  34. static int hwmap_query_formats(AVFilterContext *avctx)
  35. {
  36. int ret;
  37. if ((ret = ff_formats_ref(ff_all_formats(AVMEDIA_TYPE_VIDEO),
  38. &avctx->inputs[0]->out_formats)) < 0 ||
  39. (ret = ff_formats_ref(ff_all_formats(AVMEDIA_TYPE_VIDEO),
  40. &avctx->outputs[0]->in_formats)) < 0)
  41. return ret;
  42. return 0;
  43. }
  44. static int hwmap_config_output(AVFilterLink *outlink)
  45. {
  46. AVFilterContext *avctx = outlink->src;
  47. HWMapContext *ctx = avctx->priv;
  48. AVFilterLink *inlink = avctx->inputs[0];
  49. AVHWFramesContext *hwfc;
  50. AVBufferRef *device;
  51. const AVPixFmtDescriptor *desc;
  52. int err, device_is_derived;
  53. av_log(avctx, AV_LOG_DEBUG, "Configure hwmap %s -> %s.\n",
  54. av_get_pix_fmt_name(inlink->format),
  55. av_get_pix_fmt_name(outlink->format));
  56. av_buffer_unref(&ctx->hwframes_ref);
  57. device = avctx->hw_device_ctx;
  58. device_is_derived = 0;
  59. if (inlink->hw_frames_ctx) {
  60. hwfc = (AVHWFramesContext*)inlink->hw_frames_ctx->data;
  61. if (ctx->derive_device_type) {
  62. enum AVHWDeviceType type;
  63. type = av_hwdevice_find_type_by_name(ctx->derive_device_type);
  64. if (type == AV_HWDEVICE_TYPE_NONE) {
  65. av_log(avctx, AV_LOG_ERROR, "Invalid device type.\n");
  66. err = AVERROR(EINVAL);
  67. goto fail;
  68. }
  69. err = av_hwdevice_ctx_create_derived(&device, type,
  70. hwfc->device_ref, 0);
  71. if (err < 0) {
  72. av_log(avctx, AV_LOG_ERROR, "Failed to created derived "
  73. "device context: %d.\n", err);
  74. goto fail;
  75. }
  76. device_is_derived = 1;
  77. }
  78. desc = av_pix_fmt_desc_get(outlink->format);
  79. if (!desc) {
  80. err = AVERROR(EINVAL);
  81. goto fail;
  82. }
  83. if (inlink->format == hwfc->format &&
  84. (desc->flags & AV_PIX_FMT_FLAG_HWACCEL) &&
  85. !ctx->reverse) {
  86. // Map between two hardware formats (including the case of
  87. // undoing an existing mapping).
  88. if (!device) {
  89. av_log(avctx, AV_LOG_ERROR, "A device reference is "
  90. "required to map to a hardware format.\n");
  91. err = AVERROR(EINVAL);
  92. goto fail;
  93. }
  94. err = av_hwframe_ctx_create_derived(&ctx->hwframes_ref,
  95. outlink->format,
  96. device,
  97. inlink->hw_frames_ctx, 0);
  98. if (err < 0) {
  99. av_log(avctx, AV_LOG_ERROR, "Failed to create derived "
  100. "frames context: %d.\n", err);
  101. goto fail;
  102. }
  103. } else if (inlink->format == hwfc->format &&
  104. (desc->flags & AV_PIX_FMT_FLAG_HWACCEL) &&
  105. ctx->reverse) {
  106. // Map between two hardware formats, but do it in reverse.
  107. // Make a new hwframe context for the target type, and then
  108. // overwrite the input hwframe context with a derived context
  109. // mapped from that back to the source type.
  110. AVBufferRef *source;
  111. AVHWFramesContext *frames;
  112. ctx->hwframes_ref = av_hwframe_ctx_alloc(device);
  113. if (!ctx->hwframes_ref) {
  114. err = AVERROR(ENOMEM);
  115. goto fail;
  116. }
  117. frames = (AVHWFramesContext*)ctx->hwframes_ref->data;
  118. frames->format = outlink->format;
  119. frames->sw_format = hwfc->sw_format;
  120. frames->width = hwfc->width;
  121. frames->height = hwfc->height;
  122. frames->initial_pool_size = 64;
  123. err = av_hwframe_ctx_init(ctx->hwframes_ref);
  124. if (err < 0) {
  125. av_log(avctx, AV_LOG_ERROR, "Failed to initialise "
  126. "target frames context: %d.\n", err);
  127. goto fail;
  128. }
  129. err = av_hwframe_ctx_create_derived(&source,
  130. inlink->format,
  131. hwfc->device_ref,
  132. ctx->hwframes_ref,
  133. ctx->mode);
  134. if (err < 0) {
  135. av_log(avctx, AV_LOG_ERROR, "Failed to create "
  136. "derived source frames context: %d.\n", err);
  137. goto fail;
  138. }
  139. // Here is the naughty bit. This overwriting changes what
  140. // ff_get_video_buffer() in the previous filter returns -
  141. // it will now give a frame allocated here mapped back to
  142. // the format it expects. If there were any additional
  143. // constraints on the output frames there then this may
  144. // break nastily.
  145. av_buffer_unref(&inlink->hw_frames_ctx);
  146. inlink->hw_frames_ctx = source;
  147. } else if ((outlink->format == hwfc->format &&
  148. inlink->format == hwfc->sw_format) ||
  149. inlink->format == hwfc->format) {
  150. // Map from a hardware format to a software format, or
  151. // undo an existing such mapping.
  152. ctx->hwframes_ref = av_buffer_ref(inlink->hw_frames_ctx);
  153. if (!ctx->hwframes_ref) {
  154. err = AVERROR(ENOMEM);
  155. goto fail;
  156. }
  157. } else {
  158. // Non-matching formats - not supported.
  159. av_log(avctx, AV_LOG_ERROR, "Unsupported formats for "
  160. "hwmap: from %s (%s) to %s.\n",
  161. av_get_pix_fmt_name(inlink->format),
  162. av_get_pix_fmt_name(hwfc->format),
  163. av_get_pix_fmt_name(outlink->format));
  164. err = AVERROR(EINVAL);
  165. goto fail;
  166. }
  167. } else if (avctx->hw_device_ctx) {
  168. // Map from a software format to a hardware format. This
  169. // creates a new hwframe context like hwupload, but then
  170. // returns frames mapped from that to the previous link in
  171. // order to fill them without an additional copy.
  172. if (!device) {
  173. av_log(avctx, AV_LOG_ERROR, "A device reference is "
  174. "required to create new frames with reverse "
  175. "mapping.\n");
  176. err = AVERROR(EINVAL);
  177. goto fail;
  178. }
  179. ctx->reverse = 1;
  180. ctx->hwframes_ref = av_hwframe_ctx_alloc(device);
  181. if (!ctx->hwframes_ref) {
  182. err = AVERROR(ENOMEM);
  183. goto fail;
  184. }
  185. hwfc = (AVHWFramesContext*)ctx->hwframes_ref->data;
  186. hwfc->format = outlink->format;
  187. hwfc->sw_format = inlink->format;
  188. hwfc->width = inlink->w;
  189. hwfc->height = inlink->h;
  190. err = av_hwframe_ctx_init(ctx->hwframes_ref);
  191. if (err < 0) {
  192. av_log(avctx, AV_LOG_ERROR, "Failed to create frame "
  193. "context for reverse mapping: %d.\n", err);
  194. goto fail;
  195. }
  196. } else {
  197. av_log(avctx, AV_LOG_ERROR, "Mapping requires a hardware "
  198. "context (a device, or frames on input).\n");
  199. return AVERROR(EINVAL);
  200. }
  201. outlink->hw_frames_ctx = av_buffer_ref(ctx->hwframes_ref);
  202. if (!outlink->hw_frames_ctx) {
  203. err = AVERROR(ENOMEM);
  204. goto fail;
  205. }
  206. outlink->w = inlink->w;
  207. outlink->h = inlink->h;
  208. if (device_is_derived)
  209. av_buffer_unref(&device);
  210. return 0;
  211. fail:
  212. if (device_is_derived)
  213. av_buffer_unref(&device);
  214. av_buffer_unref(&ctx->hwframes_ref);
  215. return err;
  216. }
  217. static AVFrame *hwmap_get_buffer(AVFilterLink *inlink, int w, int h)
  218. {
  219. AVFilterContext *avctx = inlink->dst;
  220. AVFilterLink *outlink = avctx->outputs[0];
  221. HWMapContext *ctx = avctx->priv;
  222. if (ctx->reverse && !inlink->hw_frames_ctx) {
  223. AVFrame *src, *dst;
  224. int err;
  225. src = ff_get_video_buffer(outlink, w, h);
  226. if (!src) {
  227. av_log(avctx, AV_LOG_ERROR, "Failed to allocate source "
  228. "frame for software mapping.\n");
  229. return NULL;
  230. }
  231. dst = av_frame_alloc();
  232. if (!dst) {
  233. av_frame_free(&src);
  234. return NULL;
  235. }
  236. err = av_hwframe_map(dst, src, ctx->mode);
  237. if (err) {
  238. av_log(avctx, AV_LOG_ERROR, "Failed to map frame to "
  239. "software: %d.\n", err);
  240. av_frame_free(&src);
  241. av_frame_free(&dst);
  242. return NULL;
  243. }
  244. av_frame_free(&src);
  245. return dst;
  246. } else {
  247. return ff_default_get_video_buffer(inlink, w, h);
  248. }
  249. }
  250. static int hwmap_filter_frame(AVFilterLink *link, AVFrame *input)
  251. {
  252. AVFilterContext *avctx = link->dst;
  253. AVFilterLink *outlink = avctx->outputs[0];
  254. HWMapContext *ctx = avctx->priv;
  255. AVFrame *map = NULL;
  256. int err;
  257. av_log(ctx, AV_LOG_DEBUG, "Filter input: %s, %ux%u (%"PRId64").\n",
  258. av_get_pix_fmt_name(input->format),
  259. input->width, input->height, input->pts);
  260. map = av_frame_alloc();
  261. if (!map) {
  262. err = AVERROR(ENOMEM);
  263. goto fail;
  264. }
  265. map->format = outlink->format;
  266. map->hw_frames_ctx = av_buffer_ref(ctx->hwframes_ref);
  267. if (!map->hw_frames_ctx) {
  268. err = AVERROR(ENOMEM);
  269. goto fail;
  270. }
  271. if (ctx->reverse && !input->hw_frames_ctx) {
  272. // If we mapped backwards from hardware to software, we need
  273. // to attach the hardware frame context to the input frame to
  274. // make the mapping visible to av_hwframe_map().
  275. input->hw_frames_ctx = av_buffer_ref(ctx->hwframes_ref);
  276. if (!input->hw_frames_ctx) {
  277. err = AVERROR(ENOMEM);
  278. goto fail;
  279. }
  280. }
  281. err = av_hwframe_map(map, input, ctx->mode);
  282. if (err < 0) {
  283. av_log(avctx, AV_LOG_ERROR, "Failed to map frame: %d.\n", err);
  284. goto fail;
  285. }
  286. err = av_frame_copy_props(map, input);
  287. if (err < 0)
  288. goto fail;
  289. av_frame_free(&input);
  290. av_log(ctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
  291. av_get_pix_fmt_name(map->format),
  292. map->width, map->height, map->pts);
  293. return ff_filter_frame(outlink, map);
  294. fail:
  295. av_frame_free(&input);
  296. av_frame_free(&map);
  297. return err;
  298. }
  299. static av_cold void hwmap_uninit(AVFilterContext *avctx)
  300. {
  301. HWMapContext *ctx = avctx->priv;
  302. av_buffer_unref(&ctx->hwframes_ref);
  303. }
  304. #define OFFSET(x) offsetof(HWMapContext, x)
  305. #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM)
  306. static const AVOption hwmap_options[] = {
  307. { "mode", "Frame mapping mode",
  308. OFFSET(mode), AV_OPT_TYPE_FLAGS,
  309. { .i64 = AV_HWFRAME_MAP_READ | AV_HWFRAME_MAP_WRITE },
  310. 0, INT_MAX, FLAGS, "mode" },
  311. { "read", "Mapping should be readable",
  312. 0, AV_OPT_TYPE_CONST, { .i64 = AV_HWFRAME_MAP_READ },
  313. INT_MIN, INT_MAX, FLAGS, "mode" },
  314. { "write", "Mapping should be writeable",
  315. 0, AV_OPT_TYPE_CONST, { .i64 = AV_HWFRAME_MAP_WRITE },
  316. INT_MIN, INT_MAX, FLAGS, "mode" },
  317. { "overwrite", "Mapping will always overwrite the entire frame",
  318. 0, AV_OPT_TYPE_CONST, { .i64 = AV_HWFRAME_MAP_OVERWRITE },
  319. INT_MIN, INT_MAX, FLAGS, "mode" },
  320. { "direct", "Mapping should not involve any copying",
  321. 0, AV_OPT_TYPE_CONST, { .i64 = AV_HWFRAME_MAP_DIRECT },
  322. INT_MIN, INT_MAX, FLAGS, "mode" },
  323. { "derive_device", "Derive a new device of this type",
  324. OFFSET(derive_device_type), AV_OPT_TYPE_STRING,
  325. { .str = NULL }, 0, 0, FLAGS },
  326. { "reverse", "Map in reverse (create and allocate in the sink)",
  327. OFFSET(reverse), AV_OPT_TYPE_INT,
  328. { .i64 = 0 }, 0, 1, FLAGS },
  329. { NULL }
  330. };
  331. AVFILTER_DEFINE_CLASS(hwmap);
  332. static const AVFilterPad hwmap_inputs[] = {
  333. {
  334. .name = "default",
  335. .type = AVMEDIA_TYPE_VIDEO,
  336. .get_video_buffer = hwmap_get_buffer,
  337. .filter_frame = hwmap_filter_frame,
  338. },
  339. { NULL }
  340. };
  341. static const AVFilterPad hwmap_outputs[] = {
  342. {
  343. .name = "default",
  344. .type = AVMEDIA_TYPE_VIDEO,
  345. .config_props = hwmap_config_output,
  346. },
  347. { NULL }
  348. };
  349. AVFilter ff_vf_hwmap = {
  350. .name = "hwmap",
  351. .description = NULL_IF_CONFIG_SMALL("Map hardware frames"),
  352. .uninit = hwmap_uninit,
  353. .priv_size = sizeof(HWMapContext),
  354. .priv_class = &hwmap_class,
  355. .query_formats = hwmap_query_formats,
  356. .inputs = hwmap_inputs,
  357. .outputs = hwmap_outputs,
  358. .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
  359. };