You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

433 lines
14KB

  1. /*
  2. * This file is part of Libav.
  3. *
  4. * Libav is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * Libav is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with Libav; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include "libavutil/buffer.h"
  19. #include "libavutil/hwcontext.h"
  20. #include "libavutil/log.h"
  21. #include "libavutil/opt.h"
  22. #include "libavutil/pixdesc.h"
  23. #include "avfilter.h"
  24. #include "formats.h"
  25. #include "internal.h"
  26. #include "video.h"
  27. typedef struct HWMapContext {
  28. const AVClass *class;
  29. AVBufferRef *hwframes_ref;
  30. int mode;
  31. char *derive_device_type;
  32. int reverse;
  33. } HWMapContext;
  34. static int hwmap_query_formats(AVFilterContext *avctx)
  35. {
  36. ff_formats_ref(ff_all_formats(AVMEDIA_TYPE_VIDEO),
  37. &avctx->inputs[0]->out_formats);
  38. ff_formats_ref(ff_all_formats(AVMEDIA_TYPE_VIDEO),
  39. &avctx->outputs[0]->in_formats);
  40. return 0;
  41. }
  42. static int hwmap_config_output(AVFilterLink *outlink)
  43. {
  44. AVFilterContext *avctx = outlink->src;
  45. HWMapContext *ctx = avctx->priv;
  46. AVFilterLink *inlink = avctx->inputs[0];
  47. AVHWFramesContext *hwfc;
  48. AVBufferRef *device;
  49. const AVPixFmtDescriptor *desc;
  50. int err, device_is_derived;
  51. av_log(avctx, AV_LOG_DEBUG, "Configure hwmap %s -> %s.\n",
  52. av_get_pix_fmt_name(inlink->format),
  53. av_get_pix_fmt_name(outlink->format));
  54. av_buffer_unref(&ctx->hwframes_ref);
  55. device = avctx->hw_device_ctx;
  56. device_is_derived = 0;
  57. if (inlink->hw_frames_ctx) {
  58. hwfc = (AVHWFramesContext*)inlink->hw_frames_ctx->data;
  59. if (ctx->derive_device_type) {
  60. enum AVHWDeviceType type;
  61. type = av_hwdevice_find_type_by_name(ctx->derive_device_type);
  62. if (type == AV_HWDEVICE_TYPE_NONE) {
  63. av_log(avctx, AV_LOG_ERROR, "Invalid device type.\n");
  64. err = AVERROR(EINVAL);
  65. goto fail;
  66. }
  67. err = av_hwdevice_ctx_create_derived(&device, type,
  68. hwfc->device_ref, 0);
  69. if (err < 0) {
  70. av_log(avctx, AV_LOG_ERROR, "Failed to created derived "
  71. "device context: %d.\n", err);
  72. goto fail;
  73. }
  74. device_is_derived = 1;
  75. }
  76. desc = av_pix_fmt_desc_get(outlink->format);
  77. if (!desc) {
  78. err = AVERROR(EINVAL);
  79. goto fail;
  80. }
  81. if (inlink->format == hwfc->format &&
  82. (desc->flags & AV_PIX_FMT_FLAG_HWACCEL) &&
  83. !ctx->reverse) {
  84. // Map between two hardware formats (including the case of
  85. // undoing an existing mapping).
  86. if (!device) {
  87. av_log(avctx, AV_LOG_ERROR, "A device reference is "
  88. "required to map to a hardware format.\n");
  89. err = AVERROR(EINVAL);
  90. goto fail;
  91. }
  92. err = av_hwframe_ctx_create_derived(&ctx->hwframes_ref,
  93. outlink->format,
  94. device,
  95. inlink->hw_frames_ctx, 0);
  96. if (err < 0) {
  97. av_log(avctx, AV_LOG_ERROR, "Failed to create derived "
  98. "frames context: %d.\n", err);
  99. goto fail;
  100. }
  101. } else if (inlink->format == hwfc->format &&
  102. (desc->flags & AV_PIX_FMT_FLAG_HWACCEL) &&
  103. ctx->reverse) {
  104. // Map between two hardware formats, but do it in reverse.
  105. // Make a new hwframe context for the target type, and then
  106. // overwrite the input hwframe context with a derived context
  107. // mapped from that back to the source type.
  108. AVBufferRef *source;
  109. AVHWFramesContext *frames;
  110. ctx->hwframes_ref = av_hwframe_ctx_alloc(device);
  111. if (!ctx->hwframes_ref) {
  112. err = AVERROR(ENOMEM);
  113. goto fail;
  114. }
  115. frames = (AVHWFramesContext*)ctx->hwframes_ref->data;
  116. frames->format = outlink->format;
  117. frames->sw_format = hwfc->sw_format;
  118. frames->width = hwfc->width;
  119. frames->height = hwfc->height;
  120. if (avctx->extra_hw_frames >= 0)
  121. frames->initial_pool_size = 2 + avctx->extra_hw_frames;
  122. err = av_hwframe_ctx_init(ctx->hwframes_ref);
  123. if (err < 0) {
  124. av_log(avctx, AV_LOG_ERROR, "Failed to initialise "
  125. "target frames context: %d.\n", err);
  126. goto fail;
  127. }
  128. err = av_hwframe_ctx_create_derived(&source,
  129. inlink->format,
  130. hwfc->device_ref,
  131. ctx->hwframes_ref,
  132. ctx->mode);
  133. if (err < 0) {
  134. av_log(avctx, AV_LOG_ERROR, "Failed to create "
  135. "derived source frames context: %d.\n", err);
  136. goto fail;
  137. }
  138. // Here is the naughty bit. This overwriting changes what
  139. // ff_get_video_buffer() in the previous filter returns -
  140. // it will now give a frame allocated here mapped back to
  141. // the format it expects. If there were any additional
  142. // constraints on the output frames there then this may
  143. // break nastily.
  144. av_buffer_unref(&inlink->hw_frames_ctx);
  145. inlink->hw_frames_ctx = source;
  146. } else if ((outlink->format == hwfc->format &&
  147. inlink->format == hwfc->sw_format) ||
  148. inlink->format == hwfc->format) {
  149. // Map from a hardware format to a software format, or
  150. // undo an existing such mapping.
  151. ctx->hwframes_ref = av_buffer_ref(inlink->hw_frames_ctx);
  152. if (!ctx->hwframes_ref) {
  153. err = AVERROR(ENOMEM);
  154. goto fail;
  155. }
  156. } else {
  157. // Non-matching formats - not supported.
  158. av_log(avctx, AV_LOG_ERROR, "Unsupported formats for "
  159. "hwmap: from %s (%s) to %s.\n",
  160. av_get_pix_fmt_name(inlink->format),
  161. av_get_pix_fmt_name(hwfc->format),
  162. av_get_pix_fmt_name(outlink->format));
  163. err = AVERROR(EINVAL);
  164. goto fail;
  165. }
  166. } else if (avctx->hw_device_ctx) {
  167. // Map from a software format to a hardware format. This
  168. // creates a new hwframe context like hwupload, but then
  169. // returns frames mapped from that to the previous link in
  170. // order to fill them without an additional copy.
  171. if (!device) {
  172. av_log(avctx, AV_LOG_ERROR, "A device reference is "
  173. "required to create new frames with reverse "
  174. "mapping.\n");
  175. err = AVERROR(EINVAL);
  176. goto fail;
  177. }
  178. ctx->reverse = 1;
  179. ctx->hwframes_ref = av_hwframe_ctx_alloc(device);
  180. if (!ctx->hwframes_ref) {
  181. err = AVERROR(ENOMEM);
  182. goto fail;
  183. }
  184. hwfc = (AVHWFramesContext*)ctx->hwframes_ref->data;
  185. hwfc->format = outlink->format;
  186. hwfc->sw_format = inlink->format;
  187. hwfc->width = inlink->w;
  188. hwfc->height = inlink->h;
  189. if (avctx->extra_hw_frames >= 0)
  190. hwfc->initial_pool_size = 2 + avctx->extra_hw_frames;
  191. err = av_hwframe_ctx_init(ctx->hwframes_ref);
  192. if (err < 0) {
  193. av_log(avctx, AV_LOG_ERROR, "Failed to create frame "
  194. "context for reverse mapping: %d.\n", err);
  195. goto fail;
  196. }
  197. } else {
  198. av_log(avctx, AV_LOG_ERROR, "Mapping requires a hardware "
  199. "context (a device, or frames on input).\n");
  200. return AVERROR(EINVAL);
  201. }
  202. outlink->hw_frames_ctx = av_buffer_ref(ctx->hwframes_ref);
  203. if (!outlink->hw_frames_ctx) {
  204. err = AVERROR(ENOMEM);
  205. goto fail;
  206. }
  207. outlink->w = inlink->w;
  208. outlink->h = inlink->h;
  209. if (device_is_derived)
  210. av_buffer_unref(&device);
  211. return 0;
  212. fail:
  213. if (device_is_derived)
  214. av_buffer_unref(&device);
  215. av_buffer_unref(&ctx->hwframes_ref);
  216. return err;
  217. }
  218. static AVFrame *hwmap_get_buffer(AVFilterLink *inlink, int w, int h)
  219. {
  220. AVFilterContext *avctx = inlink->dst;
  221. AVFilterLink *outlink = avctx->outputs[0];
  222. HWMapContext *ctx = avctx->priv;
  223. if (ctx->reverse && !inlink->hw_frames_ctx) {
  224. AVFrame *src, *dst;
  225. int err;
  226. src = ff_get_video_buffer(outlink, w, h);
  227. if (!src) {
  228. av_log(avctx, AV_LOG_ERROR, "Failed to allocate source "
  229. "frame for software mapping.\n");
  230. return NULL;
  231. }
  232. dst = av_frame_alloc();
  233. if (!dst) {
  234. av_frame_free(&src);
  235. return NULL;
  236. }
  237. err = av_hwframe_map(dst, src, ctx->mode);
  238. if (err) {
  239. av_log(avctx, AV_LOG_ERROR, "Failed to map frame to "
  240. "software: %d.\n", err);
  241. av_frame_free(&src);
  242. av_frame_free(&dst);
  243. return NULL;
  244. }
  245. av_frame_free(&src);
  246. return dst;
  247. } else {
  248. return ff_default_get_video_buffer(inlink, w, h);
  249. }
  250. }
  251. static int hwmap_filter_frame(AVFilterLink *link, AVFrame *input)
  252. {
  253. AVFilterContext *avctx = link->dst;
  254. AVFilterLink *outlink = avctx->outputs[0];
  255. HWMapContext *ctx = avctx->priv;
  256. AVFrame *map = NULL;
  257. int err;
  258. av_log(ctx, AV_LOG_DEBUG, "Filter input: %s, %ux%u (%"PRId64").\n",
  259. av_get_pix_fmt_name(input->format),
  260. input->width, input->height, input->pts);
  261. map = av_frame_alloc();
  262. if (!map) {
  263. err = AVERROR(ENOMEM);
  264. goto fail;
  265. }
  266. map->format = outlink->format;
  267. map->hw_frames_ctx = av_buffer_ref(ctx->hwframes_ref);
  268. if (!map->hw_frames_ctx) {
  269. err = AVERROR(ENOMEM);
  270. goto fail;
  271. }
  272. if (ctx->reverse && !input->hw_frames_ctx) {
  273. // If we mapped backwards from hardware to software, we need
  274. // to attach the hardware frame context to the input frame to
  275. // make the mapping visible to av_hwframe_map().
  276. input->hw_frames_ctx = av_buffer_ref(ctx->hwframes_ref);
  277. if (!input->hw_frames_ctx) {
  278. err = AVERROR(ENOMEM);
  279. goto fail;
  280. }
  281. }
  282. err = av_hwframe_map(map, input, ctx->mode);
  283. if (err < 0) {
  284. av_log(avctx, AV_LOG_ERROR, "Failed to map frame: %d.\n", err);
  285. goto fail;
  286. }
  287. err = av_frame_copy_props(map, input);
  288. if (err < 0)
  289. goto fail;
  290. av_frame_free(&input);
  291. av_log(ctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
  292. av_get_pix_fmt_name(map->format),
  293. map->width, map->height, map->pts);
  294. return ff_filter_frame(outlink, map);
  295. fail:
  296. av_frame_free(&input);
  297. av_frame_free(&map);
  298. return err;
  299. }
  300. static av_cold void hwmap_uninit(AVFilterContext *avctx)
  301. {
  302. HWMapContext *ctx = avctx->priv;
  303. av_buffer_unref(&ctx->hwframes_ref);
  304. }
  305. #define OFFSET(x) offsetof(HWMapContext, x)
  306. #define FLAGS (AV_OPT_FLAG_VIDEO_PARAM)
  307. static const AVOption hwmap_options[] = {
  308. { "mode", "Frame mapping mode",
  309. OFFSET(mode), AV_OPT_TYPE_FLAGS,
  310. { .i64 = AV_HWFRAME_MAP_READ | AV_HWFRAME_MAP_WRITE },
  311. 0, INT_MAX, FLAGS, "mode" },
  312. { "read", "Mapping should be readable",
  313. 0, AV_OPT_TYPE_CONST, { .i64 = AV_HWFRAME_MAP_READ },
  314. INT_MIN, INT_MAX, FLAGS, "mode" },
  315. { "write", "Mapping should be writeable",
  316. 0, AV_OPT_TYPE_CONST, { .i64 = AV_HWFRAME_MAP_WRITE },
  317. INT_MIN, INT_MAX, FLAGS, "mode" },
  318. { "overwrite", "Mapping will always overwrite the entire frame",
  319. 0, AV_OPT_TYPE_CONST, { .i64 = AV_HWFRAME_MAP_OVERWRITE },
  320. INT_MIN, INT_MAX, FLAGS, "mode" },
  321. { "direct", "Mapping should not involve any copying",
  322. 0, AV_OPT_TYPE_CONST, { .i64 = AV_HWFRAME_MAP_DIRECT },
  323. INT_MIN, INT_MAX, FLAGS, "mode" },
  324. { "derive_device", "Derive a new device of this type",
  325. OFFSET(derive_device_type), AV_OPT_TYPE_STRING,
  326. { .str = NULL }, 0, 0, FLAGS },
  327. { "reverse", "Map in reverse (create and allocate in the sink)",
  328. OFFSET(reverse), AV_OPT_TYPE_INT,
  329. { .i64 = 0 }, 0, 1, FLAGS },
  330. { NULL },
  331. };
  332. static const AVClass hwmap_class = {
  333. .class_name = "hwmap",
  334. .item_name = av_default_item_name,
  335. .option = hwmap_options,
  336. .version = LIBAVUTIL_VERSION_INT,
  337. };
  338. static const AVFilterPad hwmap_inputs[] = {
  339. {
  340. .name = "default",
  341. .type = AVMEDIA_TYPE_VIDEO,
  342. .get_video_buffer = &hwmap_get_buffer,
  343. .filter_frame = &hwmap_filter_frame,
  344. },
  345. { NULL }
  346. };
  347. static const AVFilterPad hwmap_outputs[] = {
  348. {
  349. .name = "default",
  350. .type = AVMEDIA_TYPE_VIDEO,
  351. .config_props = &hwmap_config_output,
  352. },
  353. { NULL }
  354. };
  355. AVFilter ff_vf_hwmap = {
  356. .name = "hwmap",
  357. .description = NULL_IF_CONFIG_SMALL("Map hardware frames"),
  358. .uninit = &hwmap_uninit,
  359. .priv_size = sizeof(HWMapContext),
  360. .priv_class = &hwmap_class,
  361. .query_formats = &hwmap_query_formats,
  362. .inputs = hwmap_inputs,
  363. .outputs = hwmap_outputs,
  364. .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
  365. };