You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1132 lines
35KB

  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include <stdint.h>
  19. #include <string.h>
  20. #include <mfx/mfxvideo.h>
  21. #include "config.h"
  22. #if CONFIG_VAAPI
  23. #include "hwcontext_vaapi.h"
  24. #endif
  25. #if CONFIG_DXVA2
  26. #include "hwcontext_dxva2.h"
  27. #endif
  28. #include "buffer.h"
  29. #include "common.h"
  30. #include "hwcontext.h"
  31. #include "hwcontext_internal.h"
  32. #include "hwcontext_qsv.h"
  33. #include "mem.h"
  34. #include "pixfmt.h"
  35. #include "pixdesc.h"
  36. #include "time.h"
  37. typedef struct QSVDevicePriv {
  38. AVBufferRef *child_device_ctx;
  39. } QSVDevicePriv;
  40. typedef struct QSVDeviceContext {
  41. mfxHDL handle;
  42. mfxHandleType handle_type;
  43. mfxVersion ver;
  44. mfxIMPL impl;
  45. enum AVHWDeviceType child_device_type;
  46. enum AVPixelFormat child_pix_fmt;
  47. } QSVDeviceContext;
  48. typedef struct QSVFramesContext {
  49. mfxSession session_download;
  50. mfxSession session_upload;
  51. AVBufferRef *child_frames_ref;
  52. mfxFrameSurface1 *surfaces_internal;
  53. int nb_surfaces_used;
  54. // used in the frame allocator for non-opaque surfaces
  55. mfxMemId *mem_ids;
  56. // used in the opaque alloc request for opaque surfaces
  57. mfxFrameSurface1 **surface_ptrs;
  58. mfxExtOpaqueSurfaceAlloc opaque_alloc;
  59. mfxExtBuffer *ext_buffers[1];
  60. } QSVFramesContext;
  61. static const struct {
  62. mfxHandleType handle_type;
  63. enum AVHWDeviceType device_type;
  64. enum AVPixelFormat pix_fmt;
  65. } supported_handle_types[] = {
  66. #if CONFIG_VAAPI
  67. { MFX_HANDLE_VA_DISPLAY, AV_HWDEVICE_TYPE_VAAPI, AV_PIX_FMT_VAAPI },
  68. #endif
  69. #if CONFIG_DXVA2
  70. { MFX_HANDLE_D3D9_DEVICE_MANAGER, AV_HWDEVICE_TYPE_DXVA2, AV_PIX_FMT_DXVA2_VLD },
  71. #endif
  72. { 0 },
  73. };
  74. static const struct {
  75. enum AVPixelFormat pix_fmt;
  76. uint32_t fourcc;
  77. } supported_pixel_formats[] = {
  78. { AV_PIX_FMT_NV12, MFX_FOURCC_NV12 },
  79. { AV_PIX_FMT_P010, MFX_FOURCC_P010 },
  80. { AV_PIX_FMT_PAL8, MFX_FOURCC_P8 },
  81. };
  82. static uint32_t qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt)
  83. {
  84. int i;
  85. for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
  86. if (supported_pixel_formats[i].pix_fmt == pix_fmt)
  87. return supported_pixel_formats[i].fourcc;
  88. }
  89. return 0;
  90. }
  91. static int qsv_device_init(AVHWDeviceContext *ctx)
  92. {
  93. AVQSVDeviceContext *hwctx = ctx->hwctx;
  94. QSVDeviceContext *s = ctx->internal->priv;
  95. mfxStatus err;
  96. int i;
  97. for (i = 0; supported_handle_types[i].handle_type; i++) {
  98. err = MFXVideoCORE_GetHandle(hwctx->session, supported_handle_types[i].handle_type,
  99. &s->handle);
  100. if (err == MFX_ERR_NONE) {
  101. s->handle_type = supported_handle_types[i].handle_type;
  102. s->child_device_type = supported_handle_types[i].device_type;
  103. s->child_pix_fmt = supported_handle_types[i].pix_fmt;
  104. break;
  105. }
  106. }
  107. if (!s->handle) {
  108. av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
  109. "from the session\n");
  110. }
  111. err = MFXQueryIMPL(hwctx->session, &s->impl);
  112. if (err == MFX_ERR_NONE)
  113. err = MFXQueryVersion(hwctx->session, &s->ver);
  114. if (err != MFX_ERR_NONE) {
  115. av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
  116. return AVERROR_UNKNOWN;
  117. }
  118. return 0;
  119. }
  120. static void qsv_frames_uninit(AVHWFramesContext *ctx)
  121. {
  122. QSVFramesContext *s = ctx->internal->priv;
  123. if (s->session_download) {
  124. MFXVideoVPP_Close(s->session_download);
  125. MFXClose(s->session_download);
  126. }
  127. s->session_download = NULL;
  128. if (s->session_upload) {
  129. MFXVideoVPP_Close(s->session_upload);
  130. MFXClose(s->session_upload);
  131. }
  132. s->session_upload = NULL;
  133. av_freep(&s->mem_ids);
  134. av_freep(&s->surface_ptrs);
  135. av_freep(&s->surfaces_internal);
  136. av_buffer_unref(&s->child_frames_ref);
  137. }
  138. static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
  139. {
  140. }
  141. static AVBufferRef *qsv_pool_alloc(void *opaque, int size)
  142. {
  143. AVHWFramesContext *ctx = (AVHWFramesContext*)opaque;
  144. QSVFramesContext *s = ctx->internal->priv;
  145. AVQSVFramesContext *hwctx = ctx->hwctx;
  146. if (s->nb_surfaces_used < hwctx->nb_surfaces) {
  147. s->nb_surfaces_used++;
  148. return av_buffer_create((uint8_t*)(s->surfaces_internal + s->nb_surfaces_used - 1),
  149. sizeof(*hwctx->surfaces), qsv_pool_release_dummy, NULL, 0);
  150. }
  151. return NULL;
  152. }
  153. static int qsv_init_child_ctx(AVHWFramesContext *ctx)
  154. {
  155. AVQSVFramesContext *hwctx = ctx->hwctx;
  156. QSVFramesContext *s = ctx->internal->priv;
  157. QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
  158. AVBufferRef *child_device_ref = NULL;
  159. AVBufferRef *child_frames_ref = NULL;
  160. AVHWDeviceContext *child_device_ctx;
  161. AVHWFramesContext *child_frames_ctx;
  162. int i, ret = 0;
  163. if (!device_priv->handle) {
  164. av_log(ctx, AV_LOG_ERROR,
  165. "Cannot create a non-opaque internal surface pool without "
  166. "a hardware handle\n");
  167. return AVERROR(EINVAL);
  168. }
  169. child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
  170. if (!child_device_ref)
  171. return AVERROR(ENOMEM);
  172. child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
  173. #if CONFIG_VAAPI
  174. if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
  175. AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  176. child_device_hwctx->display = (VADisplay)device_priv->handle;
  177. }
  178. #endif
  179. #if CONFIG_DXVA2
  180. if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
  181. AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  182. child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
  183. }
  184. #endif
  185. ret = av_hwdevice_ctx_init(child_device_ref);
  186. if (ret < 0) {
  187. av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
  188. goto fail;
  189. }
  190. child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
  191. if (!child_frames_ref) {
  192. ret = AVERROR(ENOMEM);
  193. goto fail;
  194. }
  195. child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;
  196. child_frames_ctx->format = device_priv->child_pix_fmt;
  197. child_frames_ctx->sw_format = ctx->sw_format;
  198. child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
  199. child_frames_ctx->width = ctx->width;
  200. child_frames_ctx->height = ctx->height;
  201. #if CONFIG_DXVA2
  202. if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
  203. AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
  204. if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
  205. child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
  206. else
  207. child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
  208. }
  209. #endif
  210. ret = av_hwframe_ctx_init(child_frames_ref);
  211. if (ret < 0) {
  212. av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
  213. goto fail;
  214. }
  215. #if CONFIG_VAAPI
  216. if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
  217. AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
  218. for (i = 0; i < ctx->initial_pool_size; i++)
  219. s->surfaces_internal[i].Data.MemId = child_frames_hwctx->surface_ids + i;
  220. hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  221. }
  222. #endif
  223. #if CONFIG_DXVA2
  224. if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
  225. AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
  226. for (i = 0; i < ctx->initial_pool_size; i++)
  227. s->surfaces_internal[i].Data.MemId = (mfxMemId)child_frames_hwctx->surfaces[i];
  228. if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
  229. hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
  230. else
  231. hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  232. }
  233. #endif
  234. s->child_frames_ref = child_frames_ref;
  235. child_frames_ref = NULL;
  236. fail:
  237. av_buffer_unref(&child_device_ref);
  238. av_buffer_unref(&child_frames_ref);
  239. return ret;
  240. }
  241. static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
  242. {
  243. const AVPixFmtDescriptor *desc;
  244. uint32_t fourcc;
  245. desc = av_pix_fmt_desc_get(ctx->sw_format);
  246. if (!desc)
  247. return AVERROR(EINVAL);
  248. fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
  249. if (!fourcc)
  250. return AVERROR(EINVAL);
  251. surf->Info.BitDepthLuma = desc->comp[0].depth;
  252. surf->Info.BitDepthChroma = desc->comp[0].depth;
  253. surf->Info.Shift = desc->comp[0].depth > 8;
  254. if (desc->log2_chroma_w && desc->log2_chroma_h)
  255. surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
  256. else if (desc->log2_chroma_w)
  257. surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
  258. else
  259. surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
  260. surf->Info.FourCC = fourcc;
  261. surf->Info.Width = ctx->width;
  262. surf->Info.CropW = ctx->width;
  263. surf->Info.Height = ctx->height;
  264. surf->Info.CropH = ctx->height;
  265. surf->Info.FrameRateExtN = 25;
  266. surf->Info.FrameRateExtD = 1;
  267. return 0;
  268. }
  269. static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)
  270. {
  271. QSVFramesContext *s = ctx->internal->priv;
  272. AVQSVFramesContext *frames_hwctx = ctx->hwctx;
  273. int i, ret = 0;
  274. if (ctx->initial_pool_size <= 0) {
  275. av_log(ctx, AV_LOG_ERROR, "QSV requires a fixed frame pool size\n");
  276. return AVERROR(EINVAL);
  277. }
  278. s->surfaces_internal = av_mallocz_array(ctx->initial_pool_size,
  279. sizeof(*s->surfaces_internal));
  280. if (!s->surfaces_internal)
  281. return AVERROR(ENOMEM);
  282. for (i = 0; i < ctx->initial_pool_size; i++) {
  283. ret = qsv_init_surface(ctx, &s->surfaces_internal[i]);
  284. if (ret < 0)
  285. return ret;
  286. }
  287. if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) {
  288. ret = qsv_init_child_ctx(ctx);
  289. if (ret < 0)
  290. return ret;
  291. }
  292. ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
  293. ctx, qsv_pool_alloc, NULL);
  294. if (!ctx->internal->pool_internal)
  295. return AVERROR(ENOMEM);
  296. frames_hwctx->surfaces = s->surfaces_internal;
  297. frames_hwctx->nb_surfaces = ctx->initial_pool_size;
  298. return 0;
  299. }
  300. static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
  301. mfxFrameAllocResponse *resp)
  302. {
  303. AVHWFramesContext *ctx = pthis;
  304. QSVFramesContext *s = ctx->internal->priv;
  305. AVQSVFramesContext *hwctx = ctx->hwctx;
  306. mfxFrameInfo *i = &req->Info;
  307. mfxFrameInfo *i1 = &hwctx->surfaces[0].Info;
  308. if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
  309. !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
  310. !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
  311. return MFX_ERR_UNSUPPORTED;
  312. if (i->Width != i1->Width || i->Height != i1->Height ||
  313. i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
  314. av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
  315. "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
  316. i->Width, i->Height, i->FourCC, i->ChromaFormat,
  317. i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
  318. return MFX_ERR_UNSUPPORTED;
  319. }
  320. resp->mids = s->mem_ids;
  321. resp->NumFrameActual = hwctx->nb_surfaces;
  322. return MFX_ERR_NONE;
  323. }
  324. static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
  325. {
  326. return MFX_ERR_NONE;
  327. }
  328. static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
  329. {
  330. return MFX_ERR_UNSUPPORTED;
  331. }
  332. static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
  333. {
  334. return MFX_ERR_UNSUPPORTED;
  335. }
  336. static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
  337. {
  338. *hdl = mid;
  339. return MFX_ERR_NONE;
  340. }
  341. static int qsv_init_internal_session(AVHWFramesContext *ctx,
  342. mfxSession *session, int upload)
  343. {
  344. QSVFramesContext *s = ctx->internal->priv;
  345. AVQSVFramesContext *frames_hwctx = ctx->hwctx;
  346. QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
  347. int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
  348. mfxFrameAllocator frame_allocator = {
  349. .pthis = ctx,
  350. .Alloc = frame_alloc,
  351. .Lock = frame_lock,
  352. .Unlock = frame_unlock,
  353. .GetHDL = frame_get_hdl,
  354. .Free = frame_free,
  355. };
  356. mfxVideoParam par;
  357. mfxStatus err;
  358. err = MFXInit(device_priv->impl, &device_priv->ver, session);
  359. if (err != MFX_ERR_NONE) {
  360. av_log(ctx, AV_LOG_ERROR, "Error initializing an internal session\n");
  361. return AVERROR_UNKNOWN;
  362. }
  363. if (device_priv->handle) {
  364. err = MFXVideoCORE_SetHandle(*session, device_priv->handle_type,
  365. device_priv->handle);
  366. if (err != MFX_ERR_NONE)
  367. return AVERROR_UNKNOWN;
  368. }
  369. if (!opaque) {
  370. err = MFXVideoCORE_SetFrameAllocator(*session, &frame_allocator);
  371. if (err != MFX_ERR_NONE)
  372. return AVERROR_UNKNOWN;
  373. }
  374. memset(&par, 0, sizeof(par));
  375. if (opaque) {
  376. par.ExtParam = s->ext_buffers;
  377. par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
  378. par.IOPattern = upload ? MFX_IOPATTERN_OUT_OPAQUE_MEMORY :
  379. MFX_IOPATTERN_IN_OPAQUE_MEMORY;
  380. } else {
  381. par.IOPattern = upload ? MFX_IOPATTERN_OUT_VIDEO_MEMORY :
  382. MFX_IOPATTERN_IN_VIDEO_MEMORY;
  383. }
  384. par.IOPattern |= upload ? MFX_IOPATTERN_IN_SYSTEM_MEMORY :
  385. MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
  386. par.AsyncDepth = 1;
  387. par.vpp.In = frames_hwctx->surfaces[0].Info;
  388. /* Apparently VPP requires the frame rate to be set to some value, otherwise
  389. * init will fail (probably for the framerate conversion filter). Since we
  390. * are only doing data upload/download here, we just invent an arbitrary
  391. * value */
  392. par.vpp.In.FrameRateExtN = 25;
  393. par.vpp.In.FrameRateExtD = 1;
  394. par.vpp.Out = par.vpp.In;
  395. err = MFXVideoVPP_Init(*session, &par);
  396. if (err != MFX_ERR_NONE) {
  397. av_log(ctx, AV_LOG_VERBOSE, "Error opening the internal VPP session."
  398. "Surface upload/download will not be possible\n");
  399. MFXClose(*session);
  400. *session = NULL;
  401. }
  402. return 0;
  403. }
  404. static int qsv_frames_init(AVHWFramesContext *ctx)
  405. {
  406. QSVFramesContext *s = ctx->internal->priv;
  407. AVQSVFramesContext *frames_hwctx = ctx->hwctx;
  408. int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
  409. uint32_t fourcc;
  410. int i, ret;
  411. fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
  412. if (!fourcc) {
  413. av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n");
  414. return AVERROR(ENOSYS);
  415. }
  416. if (!ctx->pool) {
  417. ret = qsv_init_pool(ctx, fourcc);
  418. if (ret < 0) {
  419. av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
  420. return ret;
  421. }
  422. }
  423. if (opaque) {
  424. s->surface_ptrs = av_mallocz_array(frames_hwctx->nb_surfaces,
  425. sizeof(*s->surface_ptrs));
  426. if (!s->surface_ptrs)
  427. return AVERROR(ENOMEM);
  428. for (i = 0; i < frames_hwctx->nb_surfaces; i++)
  429. s->surface_ptrs[i] = frames_hwctx->surfaces + i;
  430. s->opaque_alloc.In.Surfaces = s->surface_ptrs;
  431. s->opaque_alloc.In.NumSurface = frames_hwctx->nb_surfaces;
  432. s->opaque_alloc.In.Type = frames_hwctx->frame_type;
  433. s->opaque_alloc.Out = s->opaque_alloc.In;
  434. s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
  435. s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
  436. s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
  437. } else {
  438. s->mem_ids = av_mallocz_array(frames_hwctx->nb_surfaces, sizeof(*s->mem_ids));
  439. if (!s->mem_ids)
  440. return AVERROR(ENOMEM);
  441. for (i = 0; i < frames_hwctx->nb_surfaces; i++)
  442. s->mem_ids[i] = frames_hwctx->surfaces[i].Data.MemId;
  443. }
  444. ret = qsv_init_internal_session(ctx, &s->session_download, 0);
  445. if (ret < 0)
  446. return ret;
  447. ret = qsv_init_internal_session(ctx, &s->session_upload, 1);
  448. if (ret < 0)
  449. return ret;
  450. return 0;
  451. }
  452. static int qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
  453. {
  454. frame->buf[0] = av_buffer_pool_get(ctx->pool);
  455. if (!frame->buf[0])
  456. return AVERROR(ENOMEM);
  457. frame->data[3] = frame->buf[0]->data;
  458. frame->format = AV_PIX_FMT_QSV;
  459. frame->width = ctx->width;
  460. frame->height = ctx->height;
  461. return 0;
  462. }
  463. static int qsv_transfer_get_formats(AVHWFramesContext *ctx,
  464. enum AVHWFrameTransferDirection dir,
  465. enum AVPixelFormat **formats)
  466. {
  467. enum AVPixelFormat *fmts;
  468. fmts = av_malloc_array(2, sizeof(*fmts));
  469. if (!fmts)
  470. return AVERROR(ENOMEM);
  471. fmts[0] = ctx->sw_format;
  472. fmts[1] = AV_PIX_FMT_NONE;
  473. *formats = fmts;
  474. return 0;
  475. }
  476. static int qsv_frames_derive_from(AVHWFramesContext *dst_ctx,
  477. AVHWFramesContext *src_ctx, int flags)
  478. {
  479. AVQSVFramesContext *src_hwctx = src_ctx->hwctx;
  480. int i;
  481. switch (dst_ctx->device_ctx->type) {
  482. #if CONFIG_VAAPI
  483. case AV_HWDEVICE_TYPE_VAAPI:
  484. {
  485. AVVAAPIFramesContext *dst_hwctx = dst_ctx->hwctx;
  486. dst_hwctx->surface_ids = av_mallocz_array(src_hwctx->nb_surfaces,
  487. sizeof(*dst_hwctx->surface_ids));
  488. if (!dst_hwctx->surface_ids)
  489. return AVERROR(ENOMEM);
  490. for (i = 0; i < src_hwctx->nb_surfaces; i++)
  491. dst_hwctx->surface_ids[i] =
  492. *(VASurfaceID*)src_hwctx->surfaces[i].Data.MemId;
  493. dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
  494. }
  495. break;
  496. #endif
  497. #if CONFIG_DXVA2
  498. case AV_HWDEVICE_TYPE_DXVA2:
  499. {
  500. AVDXVA2FramesContext *dst_hwctx = dst_ctx->hwctx;
  501. dst_hwctx->surfaces = av_mallocz_array(src_hwctx->nb_surfaces,
  502. sizeof(*dst_hwctx->surfaces));
  503. if (!dst_hwctx->surfaces)
  504. return AVERROR(ENOMEM);
  505. for (i = 0; i < src_hwctx->nb_surfaces; i++)
  506. dst_hwctx->surfaces[i] =
  507. (IDirect3DSurface9*)src_hwctx->surfaces[i].Data.MemId;
  508. dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
  509. if (src_hwctx->frame_type == MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
  510. dst_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
  511. else
  512. dst_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
  513. }
  514. break;
  515. #endif
  516. default:
  517. return AVERROR(ENOSYS);
  518. }
  519. return 0;
  520. }
  521. static int qsv_map_from(AVHWFramesContext *ctx,
  522. AVFrame *dst, const AVFrame *src, int flags)
  523. {
  524. QSVFramesContext *s = ctx->internal->priv;
  525. mfxFrameSurface1 *surf = (mfxFrameSurface1*)src->data[3];
  526. AVHWFramesContext *child_frames_ctx;
  527. const AVPixFmtDescriptor *desc;
  528. uint8_t *child_data;
  529. AVFrame *dummy;
  530. int ret = 0;
  531. if (!s->child_frames_ref)
  532. return AVERROR(ENOSYS);
  533. child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
  534. switch (child_frames_ctx->device_ctx->type) {
  535. #if CONFIG_VAAPI
  536. case AV_HWDEVICE_TYPE_VAAPI:
  537. child_data = (uint8_t*)(intptr_t)*(VASurfaceID*)surf->Data.MemId;
  538. break;
  539. #endif
  540. #if CONFIG_DXVA2
  541. case AV_HWDEVICE_TYPE_DXVA2:
  542. child_data = surf->Data.MemId;
  543. break;
  544. #endif
  545. default:
  546. return AVERROR(ENOSYS);
  547. }
  548. if (dst->format == child_frames_ctx->format) {
  549. ret = ff_hwframe_map_create(s->child_frames_ref,
  550. dst, src, NULL, NULL);
  551. if (ret < 0)
  552. return ret;
  553. dst->width = src->width;
  554. dst->height = src->height;
  555. dst->data[3] = child_data;
  556. return 0;
  557. }
  558. desc = av_pix_fmt_desc_get(dst->format);
  559. if (desc && desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
  560. // This only supports mapping to software.
  561. return AVERROR(ENOSYS);
  562. }
  563. dummy = av_frame_alloc();
  564. if (!dummy)
  565. return AVERROR(ENOMEM);
  566. dummy->buf[0] = av_buffer_ref(src->buf[0]);
  567. dummy->hw_frames_ctx = av_buffer_ref(s->child_frames_ref);
  568. if (!dummy->buf[0] || !dummy->hw_frames_ctx)
  569. goto fail;
  570. dummy->format = child_frames_ctx->format;
  571. dummy->width = src->width;
  572. dummy->height = src->height;
  573. dummy->data[3] = child_data;
  574. ret = av_hwframe_map(dst, dummy, flags);
  575. fail:
  576. av_frame_free(&dummy);
  577. return ret;
  578. }
  579. static int qsv_transfer_data_child(AVHWFramesContext *ctx, AVFrame *dst,
  580. const AVFrame *src)
  581. {
  582. QSVFramesContext *s = ctx->internal->priv;
  583. AVHWFramesContext *child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
  584. int download = !!src->hw_frames_ctx;
  585. mfxFrameSurface1 *surf = (mfxFrameSurface1*)(download ? src->data[3] : dst->data[3]);
  586. AVFrame *dummy;
  587. int ret;
  588. dummy = av_frame_alloc();
  589. if (!dummy)
  590. return AVERROR(ENOMEM);
  591. dummy->format = child_frames_ctx->format;
  592. dummy->width = src->width;
  593. dummy->height = src->height;
  594. dummy->buf[0] = download ? src->buf[0] : dst->buf[0];
  595. dummy->data[3] = surf->Data.MemId;
  596. dummy->hw_frames_ctx = s->child_frames_ref;
  597. ret = download ? av_hwframe_transfer_data(dst, dummy, 0) :
  598. av_hwframe_transfer_data(dummy, src, 0);
  599. dummy->buf[0] = NULL;
  600. dummy->data[3] = NULL;
  601. dummy->hw_frames_ctx = NULL;
  602. av_frame_free(&dummy);
  603. return ret;
  604. }
  605. static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst,
  606. const AVFrame *src)
  607. {
  608. QSVFramesContext *s = ctx->internal->priv;
  609. mfxFrameSurface1 out = {{ 0 }};
  610. mfxFrameSurface1 *in = (mfxFrameSurface1*)src->data[3];
  611. mfxSyncPoint sync = NULL;
  612. mfxStatus err;
  613. if (!s->session_download) {
  614. if (s->child_frames_ref)
  615. return qsv_transfer_data_child(ctx, dst, src);
  616. av_log(ctx, AV_LOG_ERROR, "Surface download not possible\n");
  617. return AVERROR(ENOSYS);
  618. }
  619. out.Info = in->Info;
  620. out.Data.PitchLow = dst->linesize[0];
  621. out.Data.Y = dst->data[0];
  622. out.Data.U = dst->data[1];
  623. out.Data.V = dst->data[2];
  624. out.Data.A = dst->data[3];
  625. do {
  626. err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
  627. if (err == MFX_WRN_DEVICE_BUSY)
  628. av_usleep(1);
  629. } while (err == MFX_WRN_DEVICE_BUSY);
  630. if (err < 0 || !sync) {
  631. av_log(ctx, AV_LOG_ERROR, "Error downloading the surface\n");
  632. return AVERROR_UNKNOWN;
  633. }
  634. do {
  635. err = MFXVideoCORE_SyncOperation(s->session_download, sync, 1000);
  636. } while (err == MFX_WRN_IN_EXECUTION);
  637. if (err < 0) {
  638. av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
  639. return AVERROR_UNKNOWN;
  640. }
  641. return 0;
  642. }
  643. static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst,
  644. const AVFrame *src)
  645. {
  646. QSVFramesContext *s = ctx->internal->priv;
  647. mfxFrameSurface1 in = {{ 0 }};
  648. mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];
  649. mfxSyncPoint sync = NULL;
  650. mfxStatus err;
  651. if (!s->session_upload) {
  652. if (s->child_frames_ref)
  653. return qsv_transfer_data_child(ctx, dst, src);
  654. av_log(ctx, AV_LOG_ERROR, "Surface upload not possible\n");
  655. return AVERROR(ENOSYS);
  656. }
  657. in.Info = out->Info;
  658. in.Data.PitchLow = src->linesize[0];
  659. in.Data.Y = src->data[0];
  660. in.Data.U = src->data[1];
  661. in.Data.V = src->data[2];
  662. in.Data.A = src->data[3];
  663. do {
  664. err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
  665. if (err == MFX_WRN_DEVICE_BUSY)
  666. av_usleep(1);
  667. } while (err == MFX_WRN_DEVICE_BUSY);
  668. if (err < 0 || !sync) {
  669. av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
  670. return AVERROR_UNKNOWN;
  671. }
  672. do {
  673. err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
  674. } while (err == MFX_WRN_IN_EXECUTION);
  675. if (err < 0) {
  676. av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
  677. return AVERROR_UNKNOWN;
  678. }
  679. return 0;
  680. }
  681. static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx,
  682. AVHWFramesContext *src_ctx, int flags)
  683. {
  684. QSVFramesContext *s = dst_ctx->internal->priv;
  685. AVQSVFramesContext *dst_hwctx = dst_ctx->hwctx;
  686. int i;
  687. switch (src_ctx->device_ctx->type) {
  688. #if CONFIG_VAAPI
  689. case AV_HWDEVICE_TYPE_VAAPI:
  690. {
  691. AVVAAPIFramesContext *src_hwctx = src_ctx->hwctx;
  692. s->surfaces_internal = av_mallocz_array(src_hwctx->nb_surfaces,
  693. sizeof(*s->surfaces_internal));
  694. if (!s->surfaces_internal)
  695. return AVERROR(ENOMEM);
  696. for (i = 0; i < src_hwctx->nb_surfaces; i++) {
  697. qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
  698. s->surfaces_internal[i].Data.MemId = src_hwctx->surface_ids + i;
  699. }
  700. dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
  701. dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  702. }
  703. break;
  704. #endif
  705. #if CONFIG_DXVA2
  706. case AV_HWDEVICE_TYPE_DXVA2:
  707. {
  708. AVDXVA2FramesContext *src_hwctx = src_ctx->hwctx;
  709. s->surfaces_internal = av_mallocz_array(src_hwctx->nb_surfaces,
  710. sizeof(*s->surfaces_internal));
  711. if (!s->surfaces_internal)
  712. return AVERROR(ENOMEM);
  713. for (i = 0; i < src_hwctx->nb_surfaces; i++) {
  714. qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
  715. s->surfaces_internal[i].Data.MemId = (mfxMemId)src_hwctx->surfaces[i];
  716. }
  717. dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
  718. if (src_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
  719. dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
  720. else
  721. dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  722. }
  723. break;
  724. #endif
  725. default:
  726. return AVERROR(ENOSYS);
  727. }
  728. dst_hwctx->surfaces = s->surfaces_internal;
  729. return 0;
  730. }
  731. static int qsv_map_to(AVHWFramesContext *dst_ctx,
  732. AVFrame *dst, const AVFrame *src, int flags)
  733. {
  734. AVQSVFramesContext *hwctx = dst_ctx->hwctx;
  735. int i, err;
  736. for (i = 0; i < hwctx->nb_surfaces; i++) {
  737. #if CONFIG_VAAPI
  738. if (*(VASurfaceID*)hwctx->surfaces[i].Data.MemId ==
  739. (VASurfaceID)(uintptr_t)src->data[3])
  740. break;
  741. #endif
  742. #if CONFIG_DXVA2
  743. if ((IDirect3DSurface9*)hwctx->surfaces[i].Data.MemId ==
  744. (IDirect3DSurface9*)(uintptr_t)src->data[3])
  745. break;
  746. #endif
  747. }
  748. if (i >= hwctx->nb_surfaces) {
  749. av_log(dst_ctx, AV_LOG_ERROR, "Trying to map from a surface which "
  750. "is not in the mapped frames context.\n");
  751. return AVERROR(EINVAL);
  752. }
  753. err = ff_hwframe_map_create(dst->hw_frames_ctx,
  754. dst, src, NULL, NULL);
  755. if (err)
  756. return err;
  757. dst->width = src->width;
  758. dst->height = src->height;
  759. dst->data[3] = (uint8_t*)&hwctx->surfaces[i];
  760. return 0;
  761. }
  762. static int qsv_frames_get_constraints(AVHWDeviceContext *ctx,
  763. const void *hwconfig,
  764. AVHWFramesConstraints *constraints)
  765. {
  766. int i;
  767. constraints->valid_sw_formats = av_malloc_array(FF_ARRAY_ELEMS(supported_pixel_formats) + 1,
  768. sizeof(*constraints->valid_sw_formats));
  769. if (!constraints->valid_sw_formats)
  770. return AVERROR(ENOMEM);
  771. for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++)
  772. constraints->valid_sw_formats[i] = supported_pixel_formats[i].pix_fmt;
  773. constraints->valid_sw_formats[FF_ARRAY_ELEMS(supported_pixel_formats)] = AV_PIX_FMT_NONE;
  774. constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
  775. if (!constraints->valid_hw_formats)
  776. return AVERROR(ENOMEM);
  777. constraints->valid_hw_formats[0] = AV_PIX_FMT_QSV;
  778. constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
  779. return 0;
  780. }
  781. static void qsv_device_free(AVHWDeviceContext *ctx)
  782. {
  783. AVQSVDeviceContext *hwctx = ctx->hwctx;
  784. QSVDevicePriv *priv = ctx->user_opaque;
  785. if (hwctx->session)
  786. MFXClose(hwctx->session);
  787. av_buffer_unref(&priv->child_device_ctx);
  788. av_freep(&priv);
  789. }
  790. static mfxIMPL choose_implementation(const char *device)
  791. {
  792. static const struct {
  793. const char *name;
  794. mfxIMPL impl;
  795. } impl_map[] = {
  796. { "auto", MFX_IMPL_AUTO },
  797. { "sw", MFX_IMPL_SOFTWARE },
  798. { "hw", MFX_IMPL_HARDWARE },
  799. { "auto_any", MFX_IMPL_AUTO_ANY },
  800. { "hw_any", MFX_IMPL_HARDWARE_ANY },
  801. { "hw2", MFX_IMPL_HARDWARE2 },
  802. { "hw3", MFX_IMPL_HARDWARE3 },
  803. { "hw4", MFX_IMPL_HARDWARE4 },
  804. };
  805. mfxIMPL impl = MFX_IMPL_AUTO_ANY;
  806. int i;
  807. if (device) {
  808. for (i = 0; i < FF_ARRAY_ELEMS(impl_map); i++)
  809. if (!strcmp(device, impl_map[i].name)) {
  810. impl = impl_map[i].impl;
  811. break;
  812. }
  813. if (i == FF_ARRAY_ELEMS(impl_map))
  814. impl = strtol(device, NULL, 0);
  815. }
  816. return impl;
  817. }
  818. static int qsv_device_derive_from_child(AVHWDeviceContext *ctx,
  819. mfxIMPL implementation,
  820. AVHWDeviceContext *child_device_ctx,
  821. int flags)
  822. {
  823. AVQSVDeviceContext *hwctx = ctx->hwctx;
  824. QSVDeviceContext *s = ctx->internal->priv;
  825. mfxVersion ver = { { 3, 1 } };
  826. mfxHDL handle;
  827. mfxHandleType handle_type;
  828. mfxStatus err;
  829. int ret;
  830. switch (child_device_ctx->type) {
  831. #if CONFIG_VAAPI
  832. case AV_HWDEVICE_TYPE_VAAPI:
  833. {
  834. AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  835. handle_type = MFX_HANDLE_VA_DISPLAY;
  836. handle = (mfxHDL)child_device_hwctx->display;
  837. }
  838. break;
  839. #endif
  840. #if CONFIG_DXVA2
  841. case AV_HWDEVICE_TYPE_DXVA2:
  842. {
  843. AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  844. handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
  845. handle = (mfxHDL)child_device_hwctx->devmgr;
  846. }
  847. break;
  848. #endif
  849. default:
  850. ret = AVERROR(ENOSYS);
  851. goto fail;
  852. }
  853. err = MFXInit(implementation, &ver, &hwctx->session);
  854. if (err != MFX_ERR_NONE) {
  855. av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
  856. "%d.\n", err);
  857. ret = AVERROR_UNKNOWN;
  858. goto fail;
  859. }
  860. err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle);
  861. if (err != MFX_ERR_NONE) {
  862. av_log(ctx, AV_LOG_ERROR, "Error setting child device handle: "
  863. "%d\n", err);
  864. ret = AVERROR_UNKNOWN;
  865. goto fail;
  866. }
  867. ret = qsv_device_init(ctx);
  868. if (ret < 0)
  869. goto fail;
  870. if (s->handle_type != handle_type) {
  871. av_log(ctx, AV_LOG_ERROR, "Error in child device handle setup: "
  872. "type mismatch (%d != %d).\n", s->handle_type, handle_type);
  873. err = AVERROR_UNKNOWN;
  874. goto fail;
  875. }
  876. return 0;
  877. fail:
  878. if (hwctx->session)
  879. MFXClose(hwctx->session);
  880. return ret;
  881. }
  882. static int qsv_device_derive(AVHWDeviceContext *ctx,
  883. AVHWDeviceContext *child_device_ctx, int flags)
  884. {
  885. return qsv_device_derive_from_child(ctx, MFX_IMPL_HARDWARE_ANY,
  886. child_device_ctx, flags);
  887. }
  888. static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
  889. AVDictionary *opts, int flags)
  890. {
  891. QSVDevicePriv *priv;
  892. enum AVHWDeviceType child_device_type;
  893. AVHWDeviceContext *child_device;
  894. AVDictionaryEntry *e;
  895. mfxIMPL impl;
  896. int ret;
  897. priv = av_mallocz(sizeof(*priv));
  898. if (!priv)
  899. return AVERROR(ENOMEM);
  900. ctx->user_opaque = priv;
  901. ctx->free = qsv_device_free;
  902. e = av_dict_get(opts, "child_device", NULL, 0);
  903. if (CONFIG_VAAPI)
  904. child_device_type = AV_HWDEVICE_TYPE_VAAPI;
  905. else if (CONFIG_DXVA2)
  906. child_device_type = AV_HWDEVICE_TYPE_DXVA2;
  907. else {
  908. av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
  909. return AVERROR(ENOSYS);
  910. }
  911. ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
  912. e ? e->value : NULL, NULL, 0);
  913. if (ret < 0)
  914. return ret;
  915. child_device = (AVHWDeviceContext*)priv->child_device_ctx->data;
  916. impl = choose_implementation(device);
  917. return qsv_device_derive_from_child(ctx, impl, child_device, 0);
  918. }
  919. const HWContextType ff_hwcontext_type_qsv = {
  920. .type = AV_HWDEVICE_TYPE_QSV,
  921. .name = "QSV",
  922. .device_hwctx_size = sizeof(AVQSVDeviceContext),
  923. .device_priv_size = sizeof(QSVDeviceContext),
  924. .frames_hwctx_size = sizeof(AVQSVFramesContext),
  925. .frames_priv_size = sizeof(QSVFramesContext),
  926. .device_create = qsv_device_create,
  927. .device_derive = qsv_device_derive,
  928. .device_init = qsv_device_init,
  929. .frames_get_constraints = qsv_frames_get_constraints,
  930. .frames_init = qsv_frames_init,
  931. .frames_uninit = qsv_frames_uninit,
  932. .frames_get_buffer = qsv_get_buffer,
  933. .transfer_get_formats = qsv_transfer_get_formats,
  934. .transfer_data_to = qsv_transfer_data_to,
  935. .transfer_data_from = qsv_transfer_data_from,
  936. .map_to = qsv_map_to,
  937. .map_from = qsv_map_from,
  938. .frames_derive_to = qsv_frames_derive_to,
  939. .frames_derive_from = qsv_frames_derive_from,
  940. .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE },
  941. };