You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1301 lines
40KB

  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include <stdint.h>
  19. #include <string.h>
  20. #include <mfx/mfxvideo.h>
  21. #include "config.h"
  22. #if HAVE_PTHREADS
  23. #include <pthread.h>
  24. #endif
  25. #if CONFIG_VAAPI
  26. #include "hwcontext_vaapi.h"
  27. #endif
  28. #if CONFIG_DXVA2
  29. #include "hwcontext_dxva2.h"
  30. #endif
  31. #include "buffer.h"
  32. #include "common.h"
  33. #include "hwcontext.h"
  34. #include "hwcontext_internal.h"
  35. #include "hwcontext_qsv.h"
  36. #include "mem.h"
  37. #include "pixfmt.h"
  38. #include "pixdesc.h"
  39. #include "time.h"
  40. #define QSV_VERSION_ATLEAST(MAJOR, MINOR) \
  41. (MFX_VERSION_MAJOR > (MAJOR) || \
  42. MFX_VERSION_MAJOR == (MAJOR) && MFX_VERSION_MINOR >= (MINOR))
  43. typedef struct QSVDevicePriv {
  44. AVBufferRef *child_device_ctx;
  45. } QSVDevicePriv;
  46. typedef struct QSVDeviceContext {
  47. mfxHDL handle;
  48. mfxHandleType handle_type;
  49. mfxVersion ver;
  50. mfxIMPL impl;
  51. enum AVHWDeviceType child_device_type;
  52. enum AVPixelFormat child_pix_fmt;
  53. } QSVDeviceContext;
  54. typedef struct QSVFramesContext {
  55. mfxSession session_download;
  56. int session_download_init;
  57. mfxSession session_upload;
  58. int session_upload_init;
  59. #if HAVE_PTHREADS
  60. pthread_mutex_t session_lock;
  61. pthread_cond_t session_cond;
  62. #endif
  63. AVBufferRef *child_frames_ref;
  64. mfxFrameSurface1 *surfaces_internal;
  65. int nb_surfaces_used;
  66. // used in the frame allocator for non-opaque surfaces
  67. mfxMemId *mem_ids;
  68. // used in the opaque alloc request for opaque surfaces
  69. mfxFrameSurface1 **surface_ptrs;
  70. mfxExtOpaqueSurfaceAlloc opaque_alloc;
  71. mfxExtBuffer *ext_buffers[1];
  72. } QSVFramesContext;
  73. static const struct {
  74. mfxHandleType handle_type;
  75. enum AVHWDeviceType device_type;
  76. enum AVPixelFormat pix_fmt;
  77. } supported_handle_types[] = {
  78. #if CONFIG_VAAPI
  79. { MFX_HANDLE_VA_DISPLAY, AV_HWDEVICE_TYPE_VAAPI, AV_PIX_FMT_VAAPI },
  80. #endif
  81. #if CONFIG_DXVA2
  82. { MFX_HANDLE_D3D9_DEVICE_MANAGER, AV_HWDEVICE_TYPE_DXVA2, AV_PIX_FMT_DXVA2_VLD },
  83. #endif
  84. { 0 },
  85. };
  86. static const struct {
  87. enum AVPixelFormat pix_fmt;
  88. uint32_t fourcc;
  89. } supported_pixel_formats[] = {
  90. { AV_PIX_FMT_NV12, MFX_FOURCC_NV12 },
  91. { AV_PIX_FMT_BGRA, MFX_FOURCC_RGB4 },
  92. { AV_PIX_FMT_P010, MFX_FOURCC_P010 },
  93. { AV_PIX_FMT_PAL8, MFX_FOURCC_P8 },
  94. #if CONFIG_VAAPI
  95. { AV_PIX_FMT_YUYV422,
  96. MFX_FOURCC_YUY2 },
  97. #if QSV_VERSION_ATLEAST(1, 27)
  98. { AV_PIX_FMT_Y210,
  99. MFX_FOURCC_Y210 },
  100. #endif
  101. #endif
  102. };
  103. static uint32_t qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt)
  104. {
  105. int i;
  106. for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
  107. if (supported_pixel_formats[i].pix_fmt == pix_fmt)
  108. return supported_pixel_formats[i].fourcc;
  109. }
  110. return 0;
  111. }
  112. static int qsv_device_init(AVHWDeviceContext *ctx)
  113. {
  114. AVQSVDeviceContext *hwctx = ctx->hwctx;
  115. QSVDeviceContext *s = ctx->internal->priv;
  116. mfxStatus err;
  117. int i;
  118. for (i = 0; supported_handle_types[i].handle_type; i++) {
  119. err = MFXVideoCORE_GetHandle(hwctx->session, supported_handle_types[i].handle_type,
  120. &s->handle);
  121. if (err == MFX_ERR_NONE) {
  122. s->handle_type = supported_handle_types[i].handle_type;
  123. s->child_device_type = supported_handle_types[i].device_type;
  124. s->child_pix_fmt = supported_handle_types[i].pix_fmt;
  125. break;
  126. }
  127. }
  128. if (!s->handle) {
  129. av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
  130. "from the session\n");
  131. }
  132. err = MFXQueryIMPL(hwctx->session, &s->impl);
  133. if (err == MFX_ERR_NONE)
  134. err = MFXQueryVersion(hwctx->session, &s->ver);
  135. if (err != MFX_ERR_NONE) {
  136. av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
  137. return AVERROR_UNKNOWN;
  138. }
  139. return 0;
  140. }
  141. static void qsv_frames_uninit(AVHWFramesContext *ctx)
  142. {
  143. QSVFramesContext *s = ctx->internal->priv;
  144. if (s->session_download) {
  145. MFXVideoVPP_Close(s->session_download);
  146. MFXClose(s->session_download);
  147. }
  148. s->session_download = NULL;
  149. s->session_download_init = 0;
  150. if (s->session_upload) {
  151. MFXVideoVPP_Close(s->session_upload);
  152. MFXClose(s->session_upload);
  153. }
  154. s->session_upload = NULL;
  155. s->session_upload_init = 0;
  156. #if HAVE_PTHREADS
  157. pthread_mutex_destroy(&s->session_lock);
  158. pthread_cond_destroy(&s->session_cond);
  159. #endif
  160. av_freep(&s->mem_ids);
  161. av_freep(&s->surface_ptrs);
  162. av_freep(&s->surfaces_internal);
  163. av_buffer_unref(&s->child_frames_ref);
  164. }
  165. static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
  166. {
  167. }
  168. static AVBufferRef *qsv_pool_alloc(void *opaque, buffer_size_t size)
  169. {
  170. AVHWFramesContext *ctx = (AVHWFramesContext*)opaque;
  171. QSVFramesContext *s = ctx->internal->priv;
  172. AVQSVFramesContext *hwctx = ctx->hwctx;
  173. if (s->nb_surfaces_used < hwctx->nb_surfaces) {
  174. s->nb_surfaces_used++;
  175. return av_buffer_create((uint8_t*)(s->surfaces_internal + s->nb_surfaces_used - 1),
  176. sizeof(*hwctx->surfaces), qsv_pool_release_dummy, NULL, 0);
  177. }
  178. return NULL;
  179. }
  180. static int qsv_init_child_ctx(AVHWFramesContext *ctx)
  181. {
  182. AVQSVFramesContext *hwctx = ctx->hwctx;
  183. QSVFramesContext *s = ctx->internal->priv;
  184. QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
  185. AVBufferRef *child_device_ref = NULL;
  186. AVBufferRef *child_frames_ref = NULL;
  187. AVHWDeviceContext *child_device_ctx;
  188. AVHWFramesContext *child_frames_ctx;
  189. int i, ret = 0;
  190. if (!device_priv->handle) {
  191. av_log(ctx, AV_LOG_ERROR,
  192. "Cannot create a non-opaque internal surface pool without "
  193. "a hardware handle\n");
  194. return AVERROR(EINVAL);
  195. }
  196. child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
  197. if (!child_device_ref)
  198. return AVERROR(ENOMEM);
  199. child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
  200. #if CONFIG_VAAPI
  201. if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
  202. AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  203. child_device_hwctx->display = (VADisplay)device_priv->handle;
  204. }
  205. #endif
  206. #if CONFIG_DXVA2
  207. if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
  208. AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  209. child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
  210. }
  211. #endif
  212. ret = av_hwdevice_ctx_init(child_device_ref);
  213. if (ret < 0) {
  214. av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
  215. goto fail;
  216. }
  217. child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
  218. if (!child_frames_ref) {
  219. ret = AVERROR(ENOMEM);
  220. goto fail;
  221. }
  222. child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;
  223. child_frames_ctx->format = device_priv->child_pix_fmt;
  224. child_frames_ctx->sw_format = ctx->sw_format;
  225. child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
  226. child_frames_ctx->width = FFALIGN(ctx->width, 16);
  227. child_frames_ctx->height = FFALIGN(ctx->height, 16);
  228. #if CONFIG_DXVA2
  229. if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
  230. AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
  231. if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
  232. child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
  233. else
  234. child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
  235. }
  236. #endif
  237. ret = av_hwframe_ctx_init(child_frames_ref);
  238. if (ret < 0) {
  239. av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
  240. goto fail;
  241. }
  242. #if CONFIG_VAAPI
  243. if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
  244. AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
  245. for (i = 0; i < ctx->initial_pool_size; i++)
  246. s->surfaces_internal[i].Data.MemId = child_frames_hwctx->surface_ids + i;
  247. hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  248. }
  249. #endif
  250. #if CONFIG_DXVA2
  251. if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
  252. AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
  253. for (i = 0; i < ctx->initial_pool_size; i++)
  254. s->surfaces_internal[i].Data.MemId = (mfxMemId)child_frames_hwctx->surfaces[i];
  255. if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
  256. hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
  257. else
  258. hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  259. }
  260. #endif
  261. s->child_frames_ref = child_frames_ref;
  262. child_frames_ref = NULL;
  263. fail:
  264. av_buffer_unref(&child_device_ref);
  265. av_buffer_unref(&child_frames_ref);
  266. return ret;
  267. }
  268. static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
  269. {
  270. const AVPixFmtDescriptor *desc;
  271. uint32_t fourcc;
  272. desc = av_pix_fmt_desc_get(ctx->sw_format);
  273. if (!desc)
  274. return AVERROR(EINVAL);
  275. fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
  276. if (!fourcc)
  277. return AVERROR(EINVAL);
  278. surf->Info.BitDepthLuma = desc->comp[0].depth;
  279. surf->Info.BitDepthChroma = desc->comp[0].depth;
  280. surf->Info.Shift = desc->comp[0].depth > 8;
  281. if (desc->log2_chroma_w && desc->log2_chroma_h)
  282. surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
  283. else if (desc->log2_chroma_w)
  284. surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
  285. else
  286. surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
  287. surf->Info.FourCC = fourcc;
  288. surf->Info.Width = FFALIGN(ctx->width, 16);
  289. surf->Info.CropW = ctx->width;
  290. surf->Info.Height = FFALIGN(ctx->height, 16);
  291. surf->Info.CropH = ctx->height;
  292. surf->Info.FrameRateExtN = 25;
  293. surf->Info.FrameRateExtD = 1;
  294. surf->Info.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
  295. return 0;
  296. }
  297. static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)
  298. {
  299. QSVFramesContext *s = ctx->internal->priv;
  300. AVQSVFramesContext *frames_hwctx = ctx->hwctx;
  301. int i, ret = 0;
  302. if (ctx->initial_pool_size <= 0) {
  303. av_log(ctx, AV_LOG_ERROR, "QSV requires a fixed frame pool size\n");
  304. return AVERROR(EINVAL);
  305. }
  306. s->surfaces_internal = av_mallocz_array(ctx->initial_pool_size,
  307. sizeof(*s->surfaces_internal));
  308. if (!s->surfaces_internal)
  309. return AVERROR(ENOMEM);
  310. for (i = 0; i < ctx->initial_pool_size; i++) {
  311. ret = qsv_init_surface(ctx, &s->surfaces_internal[i]);
  312. if (ret < 0)
  313. return ret;
  314. }
  315. if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) {
  316. ret = qsv_init_child_ctx(ctx);
  317. if (ret < 0)
  318. return ret;
  319. }
  320. ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
  321. ctx, qsv_pool_alloc, NULL);
  322. if (!ctx->internal->pool_internal)
  323. return AVERROR(ENOMEM);
  324. frames_hwctx->surfaces = s->surfaces_internal;
  325. frames_hwctx->nb_surfaces = ctx->initial_pool_size;
  326. return 0;
  327. }
  328. static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
  329. mfxFrameAllocResponse *resp)
  330. {
  331. AVHWFramesContext *ctx = pthis;
  332. QSVFramesContext *s = ctx->internal->priv;
  333. AVQSVFramesContext *hwctx = ctx->hwctx;
  334. mfxFrameInfo *i = &req->Info;
  335. mfxFrameInfo *i1 = &hwctx->surfaces[0].Info;
  336. if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
  337. !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
  338. !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
  339. return MFX_ERR_UNSUPPORTED;
  340. if (i->Width > i1->Width || i->Height > i1->Height ||
  341. i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
  342. av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
  343. "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
  344. i->Width, i->Height, i->FourCC, i->ChromaFormat,
  345. i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
  346. return MFX_ERR_UNSUPPORTED;
  347. }
  348. resp->mids = s->mem_ids;
  349. resp->NumFrameActual = hwctx->nb_surfaces;
  350. return MFX_ERR_NONE;
  351. }
  352. static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
  353. {
  354. return MFX_ERR_NONE;
  355. }
  356. static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
  357. {
  358. return MFX_ERR_UNSUPPORTED;
  359. }
  360. static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
  361. {
  362. return MFX_ERR_UNSUPPORTED;
  363. }
  364. static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
  365. {
  366. *hdl = mid;
  367. return MFX_ERR_NONE;
  368. }
  369. static int qsv_init_internal_session(AVHWFramesContext *ctx,
  370. mfxSession *session, int upload)
  371. {
  372. QSVFramesContext *s = ctx->internal->priv;
  373. AVQSVFramesContext *frames_hwctx = ctx->hwctx;
  374. QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
  375. int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
  376. mfxFrameAllocator frame_allocator = {
  377. .pthis = ctx,
  378. .Alloc = frame_alloc,
  379. .Lock = frame_lock,
  380. .Unlock = frame_unlock,
  381. .GetHDL = frame_get_hdl,
  382. .Free = frame_free,
  383. };
  384. mfxVideoParam par;
  385. mfxStatus err;
  386. err = MFXInit(device_priv->impl, &device_priv->ver, session);
  387. if (err != MFX_ERR_NONE) {
  388. av_log(ctx, AV_LOG_ERROR, "Error initializing an internal session\n");
  389. return AVERROR_UNKNOWN;
  390. }
  391. if (device_priv->handle) {
  392. err = MFXVideoCORE_SetHandle(*session, device_priv->handle_type,
  393. device_priv->handle);
  394. if (err != MFX_ERR_NONE)
  395. return AVERROR_UNKNOWN;
  396. }
  397. if (!opaque) {
  398. err = MFXVideoCORE_SetFrameAllocator(*session, &frame_allocator);
  399. if (err != MFX_ERR_NONE)
  400. return AVERROR_UNKNOWN;
  401. }
  402. memset(&par, 0, sizeof(par));
  403. if (opaque) {
  404. par.ExtParam = s->ext_buffers;
  405. par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
  406. par.IOPattern = upload ? MFX_IOPATTERN_OUT_OPAQUE_MEMORY :
  407. MFX_IOPATTERN_IN_OPAQUE_MEMORY;
  408. } else {
  409. par.IOPattern = upload ? MFX_IOPATTERN_OUT_VIDEO_MEMORY :
  410. MFX_IOPATTERN_IN_VIDEO_MEMORY;
  411. }
  412. par.IOPattern |= upload ? MFX_IOPATTERN_IN_SYSTEM_MEMORY :
  413. MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
  414. par.AsyncDepth = 1;
  415. par.vpp.In = frames_hwctx->surfaces[0].Info;
  416. /* Apparently VPP requires the frame rate to be set to some value, otherwise
  417. * init will fail (probably for the framerate conversion filter). Since we
  418. * are only doing data upload/download here, we just invent an arbitrary
  419. * value */
  420. par.vpp.In.FrameRateExtN = 25;
  421. par.vpp.In.FrameRateExtD = 1;
  422. par.vpp.Out = par.vpp.In;
  423. err = MFXVideoVPP_Init(*session, &par);
  424. if (err != MFX_ERR_NONE) {
  425. av_log(ctx, AV_LOG_VERBOSE, "Error opening the internal VPP session."
  426. "Surface upload/download will not be possible\n");
  427. MFXClose(*session);
  428. *session = NULL;
  429. }
  430. return 0;
  431. }
  432. static int qsv_frames_init(AVHWFramesContext *ctx)
  433. {
  434. QSVFramesContext *s = ctx->internal->priv;
  435. AVQSVFramesContext *frames_hwctx = ctx->hwctx;
  436. int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
  437. uint32_t fourcc;
  438. int i, ret;
  439. fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
  440. if (!fourcc) {
  441. av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n");
  442. return AVERROR(ENOSYS);
  443. }
  444. if (!ctx->pool) {
  445. ret = qsv_init_pool(ctx, fourcc);
  446. if (ret < 0) {
  447. av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
  448. return ret;
  449. }
  450. }
  451. if (opaque) {
  452. s->surface_ptrs = av_mallocz_array(frames_hwctx->nb_surfaces,
  453. sizeof(*s->surface_ptrs));
  454. if (!s->surface_ptrs)
  455. return AVERROR(ENOMEM);
  456. for (i = 0; i < frames_hwctx->nb_surfaces; i++)
  457. s->surface_ptrs[i] = frames_hwctx->surfaces + i;
  458. s->opaque_alloc.In.Surfaces = s->surface_ptrs;
  459. s->opaque_alloc.In.NumSurface = frames_hwctx->nb_surfaces;
  460. s->opaque_alloc.In.Type = frames_hwctx->frame_type;
  461. s->opaque_alloc.Out = s->opaque_alloc.In;
  462. s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
  463. s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
  464. s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
  465. } else {
  466. s->mem_ids = av_mallocz_array(frames_hwctx->nb_surfaces, sizeof(*s->mem_ids));
  467. if (!s->mem_ids)
  468. return AVERROR(ENOMEM);
  469. for (i = 0; i < frames_hwctx->nb_surfaces; i++)
  470. s->mem_ids[i] = frames_hwctx->surfaces[i].Data.MemId;
  471. }
  472. s->session_download = NULL;
  473. s->session_upload = NULL;
  474. s->session_download_init = 0;
  475. s->session_upload_init = 0;
  476. #if HAVE_PTHREADS
  477. pthread_mutex_init(&s->session_lock, NULL);
  478. pthread_cond_init(&s->session_cond, NULL);
  479. #endif
  480. return 0;
  481. }
  482. static int qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
  483. {
  484. frame->buf[0] = av_buffer_pool_get(ctx->pool);
  485. if (!frame->buf[0])
  486. return AVERROR(ENOMEM);
  487. frame->data[3] = frame->buf[0]->data;
  488. frame->format = AV_PIX_FMT_QSV;
  489. frame->width = ctx->width;
  490. frame->height = ctx->height;
  491. return 0;
  492. }
  493. static int qsv_transfer_get_formats(AVHWFramesContext *ctx,
  494. enum AVHWFrameTransferDirection dir,
  495. enum AVPixelFormat **formats)
  496. {
  497. enum AVPixelFormat *fmts;
  498. fmts = av_malloc_array(2, sizeof(*fmts));
  499. if (!fmts)
  500. return AVERROR(ENOMEM);
  501. fmts[0] = ctx->sw_format;
  502. fmts[1] = AV_PIX_FMT_NONE;
  503. *formats = fmts;
  504. return 0;
  505. }
  506. static int qsv_frames_derive_from(AVHWFramesContext *dst_ctx,
  507. AVHWFramesContext *src_ctx, int flags)
  508. {
  509. AVQSVFramesContext *src_hwctx = src_ctx->hwctx;
  510. int i;
  511. switch (dst_ctx->device_ctx->type) {
  512. #if CONFIG_VAAPI
  513. case AV_HWDEVICE_TYPE_VAAPI:
  514. {
  515. AVVAAPIFramesContext *dst_hwctx = dst_ctx->hwctx;
  516. dst_hwctx->surface_ids = av_mallocz_array(src_hwctx->nb_surfaces,
  517. sizeof(*dst_hwctx->surface_ids));
  518. if (!dst_hwctx->surface_ids)
  519. return AVERROR(ENOMEM);
  520. for (i = 0; i < src_hwctx->nb_surfaces; i++)
  521. dst_hwctx->surface_ids[i] =
  522. *(VASurfaceID*)src_hwctx->surfaces[i].Data.MemId;
  523. dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
  524. }
  525. break;
  526. #endif
  527. #if CONFIG_DXVA2
  528. case AV_HWDEVICE_TYPE_DXVA2:
  529. {
  530. AVDXVA2FramesContext *dst_hwctx = dst_ctx->hwctx;
  531. dst_hwctx->surfaces = av_mallocz_array(src_hwctx->nb_surfaces,
  532. sizeof(*dst_hwctx->surfaces));
  533. if (!dst_hwctx->surfaces)
  534. return AVERROR(ENOMEM);
  535. for (i = 0; i < src_hwctx->nb_surfaces; i++)
  536. dst_hwctx->surfaces[i] =
  537. (IDirect3DSurface9*)src_hwctx->surfaces[i].Data.MemId;
  538. dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
  539. if (src_hwctx->frame_type == MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
  540. dst_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
  541. else
  542. dst_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
  543. }
  544. break;
  545. #endif
  546. default:
  547. return AVERROR(ENOSYS);
  548. }
  549. return 0;
  550. }
  551. static int qsv_map_from(AVHWFramesContext *ctx,
  552. AVFrame *dst, const AVFrame *src, int flags)
  553. {
  554. QSVFramesContext *s = ctx->internal->priv;
  555. mfxFrameSurface1 *surf = (mfxFrameSurface1*)src->data[3];
  556. AVHWFramesContext *child_frames_ctx;
  557. const AVPixFmtDescriptor *desc;
  558. uint8_t *child_data;
  559. AVFrame *dummy;
  560. int ret = 0;
  561. if (!s->child_frames_ref)
  562. return AVERROR(ENOSYS);
  563. child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
  564. switch (child_frames_ctx->device_ctx->type) {
  565. #if CONFIG_VAAPI
  566. case AV_HWDEVICE_TYPE_VAAPI:
  567. child_data = (uint8_t*)(intptr_t)*(VASurfaceID*)surf->Data.MemId;
  568. break;
  569. #endif
  570. #if CONFIG_DXVA2
  571. case AV_HWDEVICE_TYPE_DXVA2:
  572. child_data = surf->Data.MemId;
  573. break;
  574. #endif
  575. default:
  576. return AVERROR(ENOSYS);
  577. }
  578. if (dst->format == child_frames_ctx->format) {
  579. ret = ff_hwframe_map_create(s->child_frames_ref,
  580. dst, src, NULL, NULL);
  581. if (ret < 0)
  582. return ret;
  583. dst->width = src->width;
  584. dst->height = src->height;
  585. dst->data[3] = child_data;
  586. return 0;
  587. }
  588. desc = av_pix_fmt_desc_get(dst->format);
  589. if (desc && desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
  590. // This only supports mapping to software.
  591. return AVERROR(ENOSYS);
  592. }
  593. dummy = av_frame_alloc();
  594. if (!dummy)
  595. return AVERROR(ENOMEM);
  596. dummy->buf[0] = av_buffer_ref(src->buf[0]);
  597. dummy->hw_frames_ctx = av_buffer_ref(s->child_frames_ref);
  598. if (!dummy->buf[0] || !dummy->hw_frames_ctx)
  599. goto fail;
  600. dummy->format = child_frames_ctx->format;
  601. dummy->width = src->width;
  602. dummy->height = src->height;
  603. dummy->data[3] = child_data;
  604. ret = av_hwframe_map(dst, dummy, flags);
  605. fail:
  606. av_frame_free(&dummy);
  607. return ret;
  608. }
  609. static int qsv_transfer_data_child(AVHWFramesContext *ctx, AVFrame *dst,
  610. const AVFrame *src)
  611. {
  612. QSVFramesContext *s = ctx->internal->priv;
  613. AVHWFramesContext *child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
  614. int download = !!src->hw_frames_ctx;
  615. mfxFrameSurface1 *surf = (mfxFrameSurface1*)(download ? src->data[3] : dst->data[3]);
  616. AVFrame *dummy;
  617. int ret;
  618. dummy = av_frame_alloc();
  619. if (!dummy)
  620. return AVERROR(ENOMEM);
  621. dummy->format = child_frames_ctx->format;
  622. dummy->width = src->width;
  623. dummy->height = src->height;
  624. dummy->buf[0] = download ? src->buf[0] : dst->buf[0];
  625. dummy->data[3] = surf->Data.MemId;
  626. dummy->hw_frames_ctx = s->child_frames_ref;
  627. ret = download ? av_hwframe_transfer_data(dst, dummy, 0) :
  628. av_hwframe_transfer_data(dummy, src, 0);
  629. dummy->buf[0] = NULL;
  630. dummy->data[3] = NULL;
  631. dummy->hw_frames_ctx = NULL;
  632. av_frame_free(&dummy);
  633. return ret;
  634. }
  635. static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
  636. {
  637. switch (frame->format) {
  638. case AV_PIX_FMT_NV12:
  639. case AV_PIX_FMT_P010:
  640. surface->Data.Y = frame->data[0];
  641. surface->Data.UV = frame->data[1];
  642. break;
  643. case AV_PIX_FMT_YUV420P:
  644. surface->Data.Y = frame->data[0];
  645. surface->Data.U = frame->data[1];
  646. surface->Data.V = frame->data[2];
  647. break;
  648. case AV_PIX_FMT_BGRA:
  649. surface->Data.B = frame->data[0];
  650. surface->Data.G = frame->data[0] + 1;
  651. surface->Data.R = frame->data[0] + 2;
  652. surface->Data.A = frame->data[0] + 3;
  653. break;
  654. #if CONFIG_VAAPI
  655. case AV_PIX_FMT_YUYV422:
  656. surface->Data.Y = frame->data[0];
  657. surface->Data.U = frame->data[0] + 1;
  658. surface->Data.V = frame->data[0] + 3;
  659. break;
  660. case AV_PIX_FMT_Y210:
  661. surface->Data.Y16 = (mfxU16 *)frame->data[0];
  662. surface->Data.U16 = (mfxU16 *)frame->data[0] + 1;
  663. surface->Data.V16 = (mfxU16 *)frame->data[0] + 3;
  664. break;
  665. #endif
  666. default:
  667. return MFX_ERR_UNSUPPORTED;
  668. }
  669. surface->Data.Pitch = frame->linesize[0];
  670. surface->Data.TimeStamp = frame->pts;
  671. return 0;
  672. }
  673. static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst,
  674. const AVFrame *src)
  675. {
  676. QSVFramesContext *s = ctx->internal->priv;
  677. mfxFrameSurface1 out = {{ 0 }};
  678. mfxFrameSurface1 *in = (mfxFrameSurface1*)src->data[3];
  679. mfxSyncPoint sync = NULL;
  680. mfxStatus err;
  681. int ret = 0;
  682. while (!s->session_download_init && !s->session_download && !ret) {
  683. #if HAVE_PTHREADS
  684. if (pthread_mutex_trylock(&s->session_lock) == 0) {
  685. #endif
  686. if (!s->session_download_init) {
  687. ret = qsv_init_internal_session(ctx, &s->session_download, 0);
  688. if (s->session_download)
  689. s->session_download_init = 1;
  690. }
  691. #if HAVE_PTHREADS
  692. pthread_mutex_unlock(&s->session_lock);
  693. pthread_cond_signal(&s->session_cond);
  694. } else {
  695. pthread_mutex_lock(&s->session_lock);
  696. while (!s->session_download_init && !s->session_download) {
  697. pthread_cond_wait(&s->session_cond, &s->session_lock);
  698. }
  699. pthread_mutex_unlock(&s->session_lock);
  700. }
  701. #endif
  702. }
  703. if (ret < 0)
  704. return ret;
  705. if (!s->session_download) {
  706. if (s->child_frames_ref)
  707. return qsv_transfer_data_child(ctx, dst, src);
  708. av_log(ctx, AV_LOG_ERROR, "Surface download not possible\n");
  709. return AVERROR(ENOSYS);
  710. }
  711. out.Info = in->Info;
  712. map_frame_to_surface(dst, &out);
  713. do {
  714. err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
  715. if (err == MFX_WRN_DEVICE_BUSY)
  716. av_usleep(1);
  717. } while (err == MFX_WRN_DEVICE_BUSY);
  718. if (err < 0 || !sync) {
  719. av_log(ctx, AV_LOG_ERROR, "Error downloading the surface\n");
  720. return AVERROR_UNKNOWN;
  721. }
  722. do {
  723. err = MFXVideoCORE_SyncOperation(s->session_download, sync, 1000);
  724. } while (err == MFX_WRN_IN_EXECUTION);
  725. if (err < 0) {
  726. av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
  727. return AVERROR_UNKNOWN;
  728. }
  729. return 0;
  730. }
  731. static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst,
  732. const AVFrame *src)
  733. {
  734. QSVFramesContext *s = ctx->internal->priv;
  735. mfxFrameSurface1 in = {{ 0 }};
  736. mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];
  737. mfxSyncPoint sync = NULL;
  738. mfxStatus err;
  739. int ret = 0;
  740. /* make a copy if the input is not padded as libmfx requires */
  741. AVFrame tmp_frame;
  742. const AVFrame *src_frame;
  743. int realigned = 0;
  744. while (!s->session_upload_init && !s->session_upload && !ret) {
  745. #if HAVE_PTHREADS
  746. if (pthread_mutex_trylock(&s->session_lock) == 0) {
  747. #endif
  748. if (!s->session_upload_init) {
  749. ret = qsv_init_internal_session(ctx, &s->session_upload, 1);
  750. if (s->session_upload)
  751. s->session_upload_init = 1;
  752. }
  753. #if HAVE_PTHREADS
  754. pthread_mutex_unlock(&s->session_lock);
  755. pthread_cond_signal(&s->session_cond);
  756. } else {
  757. pthread_mutex_lock(&s->session_lock);
  758. while (!s->session_upload_init && !s->session_upload) {
  759. pthread_cond_wait(&s->session_cond, &s->session_lock);
  760. }
  761. pthread_mutex_unlock(&s->session_lock);
  762. }
  763. #endif
  764. }
  765. if (ret < 0)
  766. return ret;
  767. if (src->height & 15 || src->linesize[0] & 15) {
  768. realigned = 1;
  769. memset(&tmp_frame, 0, sizeof(tmp_frame));
  770. tmp_frame.format = src->format;
  771. tmp_frame.width = FFALIGN(src->width, 16);
  772. tmp_frame.height = FFALIGN(src->height, 16);
  773. ret = av_frame_get_buffer(&tmp_frame, 0);
  774. if (ret < 0)
  775. return ret;
  776. ret = av_frame_copy(&tmp_frame, src);
  777. if (ret < 0) {
  778. av_frame_unref(&tmp_frame);
  779. return ret;
  780. }
  781. }
  782. src_frame = realigned ? &tmp_frame : src;
  783. if (!s->session_upload) {
  784. if (s->child_frames_ref)
  785. return qsv_transfer_data_child(ctx, dst, src_frame);
  786. av_log(ctx, AV_LOG_ERROR, "Surface upload not possible\n");
  787. return AVERROR(ENOSYS);
  788. }
  789. in.Info = out->Info;
  790. map_frame_to_surface(src_frame, &in);
  791. do {
  792. err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
  793. if (err == MFX_WRN_DEVICE_BUSY)
  794. av_usleep(1);
  795. } while (err == MFX_WRN_DEVICE_BUSY);
  796. if (err < 0 || !sync) {
  797. av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
  798. return AVERROR_UNKNOWN;
  799. }
  800. do {
  801. err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
  802. } while (err == MFX_WRN_IN_EXECUTION);
  803. if (err < 0) {
  804. av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
  805. return AVERROR_UNKNOWN;
  806. }
  807. if (realigned)
  808. av_frame_unref(&tmp_frame);
  809. return 0;
  810. }
  811. static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx,
  812. AVHWFramesContext *src_ctx, int flags)
  813. {
  814. QSVFramesContext *s = dst_ctx->internal->priv;
  815. AVQSVFramesContext *dst_hwctx = dst_ctx->hwctx;
  816. int i;
  817. switch (src_ctx->device_ctx->type) {
  818. #if CONFIG_VAAPI
  819. case AV_HWDEVICE_TYPE_VAAPI:
  820. {
  821. AVVAAPIFramesContext *src_hwctx = src_ctx->hwctx;
  822. s->surfaces_internal = av_mallocz_array(src_hwctx->nb_surfaces,
  823. sizeof(*s->surfaces_internal));
  824. if (!s->surfaces_internal)
  825. return AVERROR(ENOMEM);
  826. for (i = 0; i < src_hwctx->nb_surfaces; i++) {
  827. qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
  828. s->surfaces_internal[i].Data.MemId = src_hwctx->surface_ids + i;
  829. }
  830. dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
  831. dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  832. }
  833. break;
  834. #endif
  835. #if CONFIG_DXVA2
  836. case AV_HWDEVICE_TYPE_DXVA2:
  837. {
  838. AVDXVA2FramesContext *src_hwctx = src_ctx->hwctx;
  839. s->surfaces_internal = av_mallocz_array(src_hwctx->nb_surfaces,
  840. sizeof(*s->surfaces_internal));
  841. if (!s->surfaces_internal)
  842. return AVERROR(ENOMEM);
  843. for (i = 0; i < src_hwctx->nb_surfaces; i++) {
  844. qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
  845. s->surfaces_internal[i].Data.MemId = (mfxMemId)src_hwctx->surfaces[i];
  846. }
  847. dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
  848. if (src_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
  849. dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
  850. else
  851. dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  852. }
  853. break;
  854. #endif
  855. default:
  856. return AVERROR(ENOSYS);
  857. }
  858. dst_hwctx->surfaces = s->surfaces_internal;
  859. return 0;
  860. }
  861. static int qsv_map_to(AVHWFramesContext *dst_ctx,
  862. AVFrame *dst, const AVFrame *src, int flags)
  863. {
  864. AVQSVFramesContext *hwctx = dst_ctx->hwctx;
  865. int i, err;
  866. for (i = 0; i < hwctx->nb_surfaces; i++) {
  867. #if CONFIG_VAAPI
  868. if (*(VASurfaceID*)hwctx->surfaces[i].Data.MemId ==
  869. (VASurfaceID)(uintptr_t)src->data[3])
  870. break;
  871. #endif
  872. #if CONFIG_DXVA2
  873. if ((IDirect3DSurface9*)hwctx->surfaces[i].Data.MemId ==
  874. (IDirect3DSurface9*)(uintptr_t)src->data[3])
  875. break;
  876. #endif
  877. }
  878. if (i >= hwctx->nb_surfaces) {
  879. av_log(dst_ctx, AV_LOG_ERROR, "Trying to map from a surface which "
  880. "is not in the mapped frames context.\n");
  881. return AVERROR(EINVAL);
  882. }
  883. err = ff_hwframe_map_create(dst->hw_frames_ctx,
  884. dst, src, NULL, NULL);
  885. if (err)
  886. return err;
  887. dst->width = src->width;
  888. dst->height = src->height;
  889. dst->data[3] = (uint8_t*)&hwctx->surfaces[i];
  890. return 0;
  891. }
  892. static int qsv_frames_get_constraints(AVHWDeviceContext *ctx,
  893. const void *hwconfig,
  894. AVHWFramesConstraints *constraints)
  895. {
  896. int i;
  897. constraints->valid_sw_formats = av_malloc_array(FF_ARRAY_ELEMS(supported_pixel_formats) + 1,
  898. sizeof(*constraints->valid_sw_formats));
  899. if (!constraints->valid_sw_formats)
  900. return AVERROR(ENOMEM);
  901. for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++)
  902. constraints->valid_sw_formats[i] = supported_pixel_formats[i].pix_fmt;
  903. constraints->valid_sw_formats[FF_ARRAY_ELEMS(supported_pixel_formats)] = AV_PIX_FMT_NONE;
  904. constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
  905. if (!constraints->valid_hw_formats)
  906. return AVERROR(ENOMEM);
  907. constraints->valid_hw_formats[0] = AV_PIX_FMT_QSV;
  908. constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
  909. return 0;
  910. }
  911. static void qsv_device_free(AVHWDeviceContext *ctx)
  912. {
  913. AVQSVDeviceContext *hwctx = ctx->hwctx;
  914. QSVDevicePriv *priv = ctx->user_opaque;
  915. if (hwctx->session)
  916. MFXClose(hwctx->session);
  917. av_buffer_unref(&priv->child_device_ctx);
  918. av_freep(&priv);
  919. }
  920. static mfxIMPL choose_implementation(const char *device)
  921. {
  922. static const struct {
  923. const char *name;
  924. mfxIMPL impl;
  925. } impl_map[] = {
  926. { "auto", MFX_IMPL_AUTO },
  927. { "sw", MFX_IMPL_SOFTWARE },
  928. { "hw", MFX_IMPL_HARDWARE },
  929. { "auto_any", MFX_IMPL_AUTO_ANY },
  930. { "hw_any", MFX_IMPL_HARDWARE_ANY },
  931. { "hw2", MFX_IMPL_HARDWARE2 },
  932. { "hw3", MFX_IMPL_HARDWARE3 },
  933. { "hw4", MFX_IMPL_HARDWARE4 },
  934. };
  935. mfxIMPL impl = MFX_IMPL_AUTO_ANY;
  936. int i;
  937. if (device) {
  938. for (i = 0; i < FF_ARRAY_ELEMS(impl_map); i++)
  939. if (!strcmp(device, impl_map[i].name)) {
  940. impl = impl_map[i].impl;
  941. break;
  942. }
  943. if (i == FF_ARRAY_ELEMS(impl_map))
  944. impl = strtol(device, NULL, 0);
  945. }
  946. return impl;
  947. }
  948. static int qsv_device_derive_from_child(AVHWDeviceContext *ctx,
  949. mfxIMPL implementation,
  950. AVHWDeviceContext *child_device_ctx,
  951. int flags)
  952. {
  953. AVQSVDeviceContext *hwctx = ctx->hwctx;
  954. mfxVersion ver = { { 3, 1 } };
  955. mfxHDL handle;
  956. mfxHandleType handle_type;
  957. mfxStatus err;
  958. int ret;
  959. switch (child_device_ctx->type) {
  960. #if CONFIG_VAAPI
  961. case AV_HWDEVICE_TYPE_VAAPI:
  962. {
  963. AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  964. handle_type = MFX_HANDLE_VA_DISPLAY;
  965. handle = (mfxHDL)child_device_hwctx->display;
  966. }
  967. break;
  968. #endif
  969. #if CONFIG_DXVA2
  970. case AV_HWDEVICE_TYPE_DXVA2:
  971. {
  972. AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  973. handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
  974. handle = (mfxHDL)child_device_hwctx->devmgr;
  975. }
  976. break;
  977. #endif
  978. default:
  979. ret = AVERROR(ENOSYS);
  980. goto fail;
  981. }
  982. err = MFXInit(implementation, &ver, &hwctx->session);
  983. if (err != MFX_ERR_NONE) {
  984. av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
  985. "%d.\n", err);
  986. ret = AVERROR_UNKNOWN;
  987. goto fail;
  988. }
  989. err = MFXQueryVersion(hwctx->session, &ver);
  990. if (err != MFX_ERR_NONE) {
  991. av_log(ctx, AV_LOG_ERROR, "Error querying an MFX session: %d.\n", err);
  992. ret = AVERROR_UNKNOWN;
  993. goto fail;
  994. }
  995. av_log(ctx, AV_LOG_VERBOSE,
  996. "Initialize MFX session: API version is %d.%d, implementation version is %d.%d\n",
  997. MFX_VERSION_MAJOR, MFX_VERSION_MINOR, ver.Major, ver.Minor);
  998. MFXClose(hwctx->session);
  999. err = MFXInit(implementation, &ver, &hwctx->session);
  1000. if (err != MFX_ERR_NONE) {
  1001. av_log(ctx, AV_LOG_ERROR,
  1002. "Error initializing an MFX session: %d.\n", err);
  1003. ret = AVERROR_UNKNOWN;
  1004. goto fail;
  1005. }
  1006. err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle);
  1007. if (err != MFX_ERR_NONE) {
  1008. av_log(ctx, AV_LOG_ERROR, "Error setting child device handle: "
  1009. "%d\n", err);
  1010. ret = AVERROR_UNKNOWN;
  1011. goto fail;
  1012. }
  1013. return 0;
  1014. fail:
  1015. if (hwctx->session)
  1016. MFXClose(hwctx->session);
  1017. return ret;
  1018. }
  1019. static int qsv_device_derive(AVHWDeviceContext *ctx,
  1020. AVHWDeviceContext *child_device_ctx,
  1021. AVDictionary *opts, int flags)
  1022. {
  1023. return qsv_device_derive_from_child(ctx, MFX_IMPL_HARDWARE_ANY,
  1024. child_device_ctx, flags);
  1025. }
  1026. static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
  1027. AVDictionary *opts, int flags)
  1028. {
  1029. QSVDevicePriv *priv;
  1030. enum AVHWDeviceType child_device_type;
  1031. AVHWDeviceContext *child_device;
  1032. AVDictionary *child_device_opts;
  1033. AVDictionaryEntry *e;
  1034. mfxIMPL impl;
  1035. int ret;
  1036. priv = av_mallocz(sizeof(*priv));
  1037. if (!priv)
  1038. return AVERROR(ENOMEM);
  1039. ctx->user_opaque = priv;
  1040. ctx->free = qsv_device_free;
  1041. e = av_dict_get(opts, "child_device", NULL, 0);
  1042. child_device_opts = NULL;
  1043. if (CONFIG_VAAPI) {
  1044. child_device_type = AV_HWDEVICE_TYPE_VAAPI;
  1045. // libmfx does not actually implement VAAPI properly, rather it
  1046. // depends on the specific behaviour of a matching iHD driver when
  1047. // used on recent Intel hardware. Set options to the VAAPI device
  1048. // creation so that we should pick a usable setup by default if
  1049. // possible, even when multiple devices and drivers are available.
  1050. av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
  1051. av_dict_set(&child_device_opts, "driver", "iHD", 0);
  1052. } else if (CONFIG_DXVA2)
  1053. child_device_type = AV_HWDEVICE_TYPE_DXVA2;
  1054. else {
  1055. av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
  1056. return AVERROR(ENOSYS);
  1057. }
  1058. ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
  1059. e ? e->value : NULL, child_device_opts, 0);
  1060. av_dict_free(&child_device_opts);
  1061. if (ret < 0)
  1062. return ret;
  1063. child_device = (AVHWDeviceContext*)priv->child_device_ctx->data;
  1064. impl = choose_implementation(device);
  1065. return qsv_device_derive_from_child(ctx, impl, child_device, 0);
  1066. }
  1067. const HWContextType ff_hwcontext_type_qsv = {
  1068. .type = AV_HWDEVICE_TYPE_QSV,
  1069. .name = "QSV",
  1070. .device_hwctx_size = sizeof(AVQSVDeviceContext),
  1071. .device_priv_size = sizeof(QSVDeviceContext),
  1072. .frames_hwctx_size = sizeof(AVQSVFramesContext),
  1073. .frames_priv_size = sizeof(QSVFramesContext),
  1074. .device_create = qsv_device_create,
  1075. .device_derive = qsv_device_derive,
  1076. .device_init = qsv_device_init,
  1077. .frames_get_constraints = qsv_frames_get_constraints,
  1078. .frames_init = qsv_frames_init,
  1079. .frames_uninit = qsv_frames_uninit,
  1080. .frames_get_buffer = qsv_get_buffer,
  1081. .transfer_get_formats = qsv_transfer_get_formats,
  1082. .transfer_data_to = qsv_transfer_data_to,
  1083. .transfer_data_from = qsv_transfer_data_from,
  1084. .map_to = qsv_map_to,
  1085. .map_from = qsv_map_from,
  1086. .frames_derive_to = qsv_frames_derive_to,
  1087. .frames_derive_from = qsv_frames_derive_from,
  1088. .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE },
  1089. };