You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

793 lines
25KB

  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include <stdint.h>
  19. #include <string.h>
  20. #include <mfx/mfxvideo.h>
  21. #include "config.h"
  22. #if CONFIG_VAAPI
  23. #include "hwcontext_vaapi.h"
  24. #endif
  25. #if CONFIG_DXVA2
  26. #include "hwcontext_dxva2.h"
  27. #endif
  28. #include "buffer.h"
  29. #include "common.h"
  30. #include "hwcontext.h"
  31. #include "hwcontext_internal.h"
  32. #include "hwcontext_qsv.h"
  33. #include "mem.h"
  34. #include "pixfmt.h"
  35. #include "pixdesc.h"
  36. #include "time.h"
  37. typedef struct QSVDevicePriv {
  38. AVBufferRef *child_device_ctx;
  39. } QSVDevicePriv;
  40. typedef struct QSVDeviceContext {
  41. mfxHDL handle;
  42. mfxHandleType handle_type;
  43. mfxVersion ver;
  44. mfxIMPL impl;
  45. enum AVHWDeviceType child_device_type;
  46. enum AVPixelFormat child_pix_fmt;
  47. } QSVDeviceContext;
  48. typedef struct QSVFramesContext {
  49. mfxSession session_download;
  50. mfxSession session_upload;
  51. AVBufferRef *child_frames_ref;
  52. mfxFrameSurface1 *surfaces_internal;
  53. int nb_surfaces_used;
  54. // used in the frame allocator for non-opaque surfaces
  55. mfxMemId *mem_ids;
  56. // used in the opaque alloc request for opaque surfaces
  57. mfxFrameSurface1 **surface_ptrs;
  58. mfxExtOpaqueSurfaceAlloc opaque_alloc;
  59. mfxExtBuffer *ext_buffers[1];
  60. } QSVFramesContext;
  61. static const struct {
  62. mfxHandleType handle_type;
  63. enum AVHWDeviceType device_type;
  64. enum AVPixelFormat pix_fmt;
  65. } supported_handle_types[] = {
  66. #if CONFIG_VAAPI
  67. { MFX_HANDLE_VA_DISPLAY, AV_HWDEVICE_TYPE_VAAPI, AV_PIX_FMT_VAAPI },
  68. #endif
  69. #if CONFIG_DXVA2
  70. { MFX_HANDLE_D3D9_DEVICE_MANAGER, AV_HWDEVICE_TYPE_DXVA2, AV_PIX_FMT_DXVA2_VLD },
  71. #endif
  72. { 0 },
  73. };
  74. static const struct {
  75. enum AVPixelFormat pix_fmt;
  76. uint32_t fourcc;
  77. } supported_pixel_formats[] = {
  78. { AV_PIX_FMT_NV12, MFX_FOURCC_NV12 },
  79. { AV_PIX_FMT_P010, MFX_FOURCC_P010 },
  80. };
  81. static int qsv_device_init(AVHWDeviceContext *ctx)
  82. {
  83. AVQSVDeviceContext *hwctx = ctx->hwctx;
  84. QSVDeviceContext *s = ctx->internal->priv;
  85. mfxStatus err;
  86. int i;
  87. for (i = 0; supported_handle_types[i].handle_type; i++) {
  88. err = MFXVideoCORE_GetHandle(hwctx->session, supported_handle_types[i].handle_type,
  89. &s->handle);
  90. if (err == MFX_ERR_NONE) {
  91. s->handle_type = supported_handle_types[i].handle_type;
  92. s->child_device_type = supported_handle_types[i].device_type;
  93. s->child_pix_fmt = supported_handle_types[i].pix_fmt;
  94. break;
  95. }
  96. }
  97. if (!s->handle) {
  98. av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
  99. "from the session\n");
  100. }
  101. err = MFXQueryIMPL(hwctx->session, &s->impl);
  102. if (err == MFX_ERR_NONE)
  103. err = MFXQueryVersion(hwctx->session, &s->ver);
  104. if (err != MFX_ERR_NONE) {
  105. av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
  106. return AVERROR_UNKNOWN;
  107. }
  108. return 0;
  109. }
  110. static void qsv_frames_uninit(AVHWFramesContext *ctx)
  111. {
  112. QSVFramesContext *s = ctx->internal->priv;
  113. if (s->session_download) {
  114. MFXVideoVPP_Close(s->session_download);
  115. MFXClose(s->session_download);
  116. }
  117. s->session_download = NULL;
  118. if (s->session_upload) {
  119. MFXVideoVPP_Close(s->session_upload);
  120. MFXClose(s->session_upload);
  121. }
  122. s->session_upload = NULL;
  123. av_freep(&s->mem_ids);
  124. av_freep(&s->surface_ptrs);
  125. av_freep(&s->surfaces_internal);
  126. av_buffer_unref(&s->child_frames_ref);
  127. }
  128. static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
  129. {
  130. }
  131. static AVBufferRef *qsv_pool_alloc(void *opaque, int size)
  132. {
  133. AVHWFramesContext *ctx = (AVHWFramesContext*)opaque;
  134. QSVFramesContext *s = ctx->internal->priv;
  135. AVQSVFramesContext *hwctx = ctx->hwctx;
  136. if (s->nb_surfaces_used < hwctx->nb_surfaces) {
  137. s->nb_surfaces_used++;
  138. return av_buffer_create((uint8_t*)(s->surfaces_internal + s->nb_surfaces_used - 1),
  139. sizeof(*hwctx->surfaces), qsv_pool_release_dummy, NULL, 0);
  140. }
  141. return NULL;
  142. }
  143. static int qsv_init_child_ctx(AVHWFramesContext *ctx)
  144. {
  145. AVQSVFramesContext *hwctx = ctx->hwctx;
  146. QSVFramesContext *s = ctx->internal->priv;
  147. QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
  148. AVBufferRef *child_device_ref = NULL;
  149. AVBufferRef *child_frames_ref = NULL;
  150. AVHWDeviceContext *child_device_ctx;
  151. AVHWFramesContext *child_frames_ctx;
  152. int i, ret = 0;
  153. if (!device_priv->handle) {
  154. av_log(ctx, AV_LOG_ERROR,
  155. "Cannot create a non-opaque internal surface pool without "
  156. "a hardware handle\n");
  157. return AVERROR(EINVAL);
  158. }
  159. child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
  160. if (!child_device_ref)
  161. return AVERROR(ENOMEM);
  162. child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
  163. #if CONFIG_VAAPI
  164. if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
  165. AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  166. child_device_hwctx->display = (VADisplay)device_priv->handle;
  167. }
  168. #endif
  169. #if CONFIG_DXVA2
  170. if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
  171. AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  172. child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
  173. }
  174. #endif
  175. ret = av_hwdevice_ctx_init(child_device_ref);
  176. if (ret < 0) {
  177. av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
  178. goto fail;
  179. }
  180. child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
  181. if (!child_frames_ref) {
  182. ret = AVERROR(ENOMEM);
  183. goto fail;
  184. }
  185. child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;
  186. child_frames_ctx->format = device_priv->child_pix_fmt;
  187. child_frames_ctx->sw_format = ctx->sw_format;
  188. child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
  189. child_frames_ctx->width = ctx->width;
  190. child_frames_ctx->height = ctx->height;
  191. #if CONFIG_DXVA2
  192. if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
  193. AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
  194. if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
  195. child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
  196. else
  197. child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
  198. }
  199. #endif
  200. ret = av_hwframe_ctx_init(child_frames_ref);
  201. if (ret < 0) {
  202. av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
  203. goto fail;
  204. }
  205. #if CONFIG_VAAPI
  206. if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
  207. AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
  208. for (i = 0; i < ctx->initial_pool_size; i++)
  209. s->surfaces_internal[i].Data.MemId = child_frames_hwctx->surface_ids + i;
  210. hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  211. }
  212. #endif
  213. #if CONFIG_DXVA2
  214. if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
  215. AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
  216. for (i = 0; i < ctx->initial_pool_size; i++)
  217. s->surfaces_internal[i].Data.MemId = (mfxMemId)child_frames_hwctx->surfaces[i];
  218. if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
  219. hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
  220. else
  221. hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  222. }
  223. #endif
  224. s->child_frames_ref = child_frames_ref;
  225. child_frames_ref = NULL;
  226. fail:
  227. av_buffer_unref(&child_device_ref);
  228. av_buffer_unref(&child_frames_ref);
  229. return ret;
  230. }
  231. static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)
  232. {
  233. QSVFramesContext *s = ctx->internal->priv;
  234. AVQSVFramesContext *frames_hwctx = ctx->hwctx;
  235. const AVPixFmtDescriptor *desc;
  236. int i, ret = 0;
  237. desc = av_pix_fmt_desc_get(ctx->sw_format);
  238. if (!desc)
  239. return AVERROR_BUG;
  240. if (ctx->initial_pool_size <= 0) {
  241. av_log(ctx, AV_LOG_ERROR, "QSV requires a fixed frame pool size\n");
  242. return AVERROR(EINVAL);
  243. }
  244. s->surfaces_internal = av_mallocz_array(ctx->initial_pool_size,
  245. sizeof(*s->surfaces_internal));
  246. if (!s->surfaces_internal)
  247. return AVERROR(ENOMEM);
  248. for (i = 0; i < ctx->initial_pool_size; i++) {
  249. mfxFrameSurface1 *surf = &s->surfaces_internal[i];
  250. surf->Info.BitDepthLuma = desc->comp[0].depth;
  251. surf->Info.BitDepthChroma = desc->comp[0].depth;
  252. surf->Info.Shift = desc->comp[0].depth > 8;
  253. if (desc->log2_chroma_w && desc->log2_chroma_h)
  254. surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
  255. else if (desc->log2_chroma_w)
  256. surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
  257. else
  258. surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
  259. surf->Info.FourCC = fourcc;
  260. surf->Info.Width = ctx->width;
  261. surf->Info.CropW = ctx->width;
  262. surf->Info.Height = ctx->height;
  263. surf->Info.CropH = ctx->height;
  264. surf->Info.FrameRateExtN = 25;
  265. surf->Info.FrameRateExtD = 1;
  266. }
  267. if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) {
  268. ret = qsv_init_child_ctx(ctx);
  269. if (ret < 0)
  270. return ret;
  271. }
  272. ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
  273. ctx, qsv_pool_alloc, NULL);
  274. if (!ctx->internal->pool_internal)
  275. return AVERROR(ENOMEM);
  276. frames_hwctx->surfaces = s->surfaces_internal;
  277. frames_hwctx->nb_surfaces = ctx->initial_pool_size;
  278. return 0;
  279. }
  280. static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
  281. mfxFrameAllocResponse *resp)
  282. {
  283. AVHWFramesContext *ctx = pthis;
  284. QSVFramesContext *s = ctx->internal->priv;
  285. AVQSVFramesContext *hwctx = ctx->hwctx;
  286. mfxFrameInfo *i = &req->Info;
  287. mfxFrameInfo *i1 = &hwctx->surfaces[0].Info;
  288. if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
  289. !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
  290. !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
  291. return MFX_ERR_UNSUPPORTED;
  292. if (i->Width != i1->Width || i->Height != i1->Height ||
  293. i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
  294. av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
  295. "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
  296. i->Width, i->Height, i->FourCC, i->ChromaFormat,
  297. i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
  298. return MFX_ERR_UNSUPPORTED;
  299. }
  300. resp->mids = s->mem_ids;
  301. resp->NumFrameActual = hwctx->nb_surfaces;
  302. return MFX_ERR_NONE;
  303. }
  304. static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
  305. {
  306. return MFX_ERR_NONE;
  307. }
  308. static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
  309. {
  310. return MFX_ERR_UNSUPPORTED;
  311. }
  312. static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
  313. {
  314. return MFX_ERR_UNSUPPORTED;
  315. }
  316. static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
  317. {
  318. *hdl = mid;
  319. return MFX_ERR_NONE;
  320. }
  321. static int qsv_init_internal_session(AVHWFramesContext *ctx,
  322. mfxSession *session, int upload)
  323. {
  324. QSVFramesContext *s = ctx->internal->priv;
  325. AVQSVFramesContext *frames_hwctx = ctx->hwctx;
  326. QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
  327. int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
  328. mfxFrameAllocator frame_allocator = {
  329. .pthis = ctx,
  330. .Alloc = frame_alloc,
  331. .Lock = frame_lock,
  332. .Unlock = frame_unlock,
  333. .GetHDL = frame_get_hdl,
  334. .Free = frame_free,
  335. };
  336. mfxVideoParam par;
  337. mfxStatus err;
  338. err = MFXInit(device_priv->impl, &device_priv->ver, session);
  339. if (err != MFX_ERR_NONE) {
  340. av_log(ctx, AV_LOG_ERROR, "Error initializing an internal session\n");
  341. return AVERROR_UNKNOWN;
  342. }
  343. if (device_priv->handle) {
  344. err = MFXVideoCORE_SetHandle(*session, device_priv->handle_type,
  345. device_priv->handle);
  346. if (err != MFX_ERR_NONE)
  347. return AVERROR_UNKNOWN;
  348. }
  349. if (!opaque) {
  350. err = MFXVideoCORE_SetFrameAllocator(*session, &frame_allocator);
  351. if (err != MFX_ERR_NONE)
  352. return AVERROR_UNKNOWN;
  353. }
  354. memset(&par, 0, sizeof(par));
  355. if (opaque) {
  356. par.ExtParam = s->ext_buffers;
  357. par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
  358. par.IOPattern = upload ? MFX_IOPATTERN_OUT_OPAQUE_MEMORY :
  359. MFX_IOPATTERN_IN_OPAQUE_MEMORY;
  360. } else {
  361. par.IOPattern = upload ? MFX_IOPATTERN_OUT_VIDEO_MEMORY :
  362. MFX_IOPATTERN_IN_VIDEO_MEMORY;
  363. }
  364. par.IOPattern |= upload ? MFX_IOPATTERN_IN_SYSTEM_MEMORY :
  365. MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
  366. par.AsyncDepth = 1;
  367. par.vpp.In = frames_hwctx->surfaces[0].Info;
  368. /* Apparently VPP requires the frame rate to be set to some value, otherwise
  369. * init will fail (probably for the framerate conversion filter). Since we
  370. * are only doing data upload/download here, we just invent an arbitrary
  371. * value */
  372. par.vpp.In.FrameRateExtN = 25;
  373. par.vpp.In.FrameRateExtD = 1;
  374. par.vpp.Out = par.vpp.In;
  375. err = MFXVideoVPP_Init(*session, &par);
  376. if (err != MFX_ERR_NONE) {
  377. av_log(ctx, AV_LOG_ERROR, "Error opening the internal VPP session\n");
  378. return AVERROR_UNKNOWN;
  379. }
  380. return 0;
  381. }
  382. static int qsv_frames_init(AVHWFramesContext *ctx)
  383. {
  384. QSVFramesContext *s = ctx->internal->priv;
  385. AVQSVFramesContext *frames_hwctx = ctx->hwctx;
  386. int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
  387. uint32_t fourcc = 0;
  388. int i, ret;
  389. for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
  390. if (supported_pixel_formats[i].pix_fmt == ctx->sw_format) {
  391. fourcc = supported_pixel_formats[i].fourcc;
  392. break;
  393. }
  394. }
  395. if (!fourcc) {
  396. av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n");
  397. return AVERROR(ENOSYS);
  398. }
  399. if (!ctx->pool) {
  400. ret = qsv_init_pool(ctx, fourcc);
  401. if (ret < 0) {
  402. av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
  403. return ret;
  404. }
  405. }
  406. if (opaque) {
  407. s->surface_ptrs = av_mallocz_array(frames_hwctx->nb_surfaces,
  408. sizeof(*s->surface_ptrs));
  409. if (!s->surface_ptrs)
  410. return AVERROR(ENOMEM);
  411. for (i = 0; i < frames_hwctx->nb_surfaces; i++)
  412. s->surface_ptrs[i] = frames_hwctx->surfaces + i;
  413. s->opaque_alloc.In.Surfaces = s->surface_ptrs;
  414. s->opaque_alloc.In.NumSurface = frames_hwctx->nb_surfaces;
  415. s->opaque_alloc.In.Type = frames_hwctx->frame_type;
  416. s->opaque_alloc.Out = s->opaque_alloc.In;
  417. s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
  418. s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
  419. s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
  420. } else {
  421. s->mem_ids = av_mallocz_array(frames_hwctx->nb_surfaces, sizeof(*s->mem_ids));
  422. if (!s->mem_ids)
  423. return AVERROR(ENOMEM);
  424. for (i = 0; i < frames_hwctx->nb_surfaces; i++)
  425. s->mem_ids[i] = frames_hwctx->surfaces[i].Data.MemId;
  426. }
  427. ret = qsv_init_internal_session(ctx, &s->session_download, 0);
  428. if (ret < 0)
  429. return ret;
  430. ret = qsv_init_internal_session(ctx, &s->session_upload, 1);
  431. if (ret < 0)
  432. return ret;
  433. return 0;
  434. }
  435. static int qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
  436. {
  437. frame->buf[0] = av_buffer_pool_get(ctx->pool);
  438. if (!frame->buf[0])
  439. return AVERROR(ENOMEM);
  440. frame->data[3] = frame->buf[0]->data;
  441. frame->format = AV_PIX_FMT_QSV;
  442. frame->width = ctx->width;
  443. frame->height = ctx->height;
  444. return 0;
  445. }
  446. static int qsv_transfer_get_formats(AVHWFramesContext *ctx,
  447. enum AVHWFrameTransferDirection dir,
  448. enum AVPixelFormat **formats)
  449. {
  450. enum AVPixelFormat *fmts;
  451. fmts = av_malloc_array(2, sizeof(*fmts));
  452. if (!fmts)
  453. return AVERROR(ENOMEM);
  454. fmts[0] = ctx->sw_format;
  455. fmts[1] = AV_PIX_FMT_NONE;
  456. *formats = fmts;
  457. return 0;
  458. }
  459. static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst,
  460. const AVFrame *src)
  461. {
  462. QSVFramesContext *s = ctx->internal->priv;
  463. mfxFrameSurface1 out = {{ 0 }};
  464. mfxFrameSurface1 *in = (mfxFrameSurface1*)src->data[3];
  465. mfxSyncPoint sync = NULL;
  466. mfxStatus err;
  467. out.Info = in->Info;
  468. out.Data.PitchLow = dst->linesize[0];
  469. out.Data.Y = dst->data[0];
  470. out.Data.U = dst->data[1];
  471. out.Data.V = dst->data[2];
  472. out.Data.A = dst->data[3];
  473. do {
  474. err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
  475. if (err == MFX_WRN_DEVICE_BUSY)
  476. av_usleep(1);
  477. } while (err == MFX_WRN_DEVICE_BUSY);
  478. if (err < 0 || !sync) {
  479. av_log(ctx, AV_LOG_ERROR, "Error downloading the surface\n");
  480. return AVERROR_UNKNOWN;
  481. }
  482. do {
  483. err = MFXVideoCORE_SyncOperation(s->session_download, sync, 1000);
  484. } while (err == MFX_WRN_IN_EXECUTION);
  485. if (err < 0) {
  486. av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
  487. return AVERROR_UNKNOWN;
  488. }
  489. return 0;
  490. }
  491. static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst,
  492. const AVFrame *src)
  493. {
  494. QSVFramesContext *s = ctx->internal->priv;
  495. mfxFrameSurface1 in = {{ 0 }};
  496. mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];
  497. mfxSyncPoint sync = NULL;
  498. mfxStatus err;
  499. in.Info = out->Info;
  500. in.Data.PitchLow = src->linesize[0];
  501. in.Data.Y = src->data[0];
  502. in.Data.U = src->data[1];
  503. in.Data.V = src->data[2];
  504. in.Data.A = src->data[3];
  505. do {
  506. err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
  507. if (err == MFX_WRN_DEVICE_BUSY)
  508. av_usleep(1);
  509. } while (err == MFX_WRN_DEVICE_BUSY);
  510. if (err < 0 || !sync) {
  511. av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
  512. return AVERROR_UNKNOWN;
  513. }
  514. do {
  515. err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
  516. } while (err == MFX_WRN_IN_EXECUTION);
  517. if (err < 0) {
  518. av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
  519. return AVERROR_UNKNOWN;
  520. }
  521. return 0;
  522. }
  523. static int qsv_frames_get_constraints(AVHWDeviceContext *ctx,
  524. const void *hwconfig,
  525. AVHWFramesConstraints *constraints)
  526. {
  527. int i;
  528. constraints->valid_sw_formats = av_malloc_array(FF_ARRAY_ELEMS(supported_pixel_formats) + 1,
  529. sizeof(*constraints->valid_sw_formats));
  530. if (!constraints->valid_sw_formats)
  531. return AVERROR(ENOMEM);
  532. for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++)
  533. constraints->valid_sw_formats[i] = supported_pixel_formats[i].pix_fmt;
  534. constraints->valid_sw_formats[FF_ARRAY_ELEMS(supported_pixel_formats)] = AV_PIX_FMT_NONE;
  535. constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
  536. if (!constraints->valid_hw_formats)
  537. return AVERROR(ENOMEM);
  538. constraints->valid_hw_formats[0] = AV_PIX_FMT_QSV;
  539. constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
  540. return 0;
  541. }
  542. static void qsv_device_free(AVHWDeviceContext *ctx)
  543. {
  544. AVQSVDeviceContext *hwctx = ctx->hwctx;
  545. QSVDevicePriv *priv = ctx->user_opaque;
  546. if (hwctx->session)
  547. MFXClose(hwctx->session);
  548. av_buffer_unref(&priv->child_device_ctx);
  549. av_freep(&priv);
  550. }
  551. static mfxIMPL choose_implementation(const char *device)
  552. {
  553. static const struct {
  554. const char *name;
  555. mfxIMPL impl;
  556. } impl_map[] = {
  557. { "auto", MFX_IMPL_AUTO },
  558. { "sw", MFX_IMPL_SOFTWARE },
  559. { "hw", MFX_IMPL_HARDWARE },
  560. { "auto_any", MFX_IMPL_AUTO_ANY },
  561. { "hw_any", MFX_IMPL_HARDWARE_ANY },
  562. { "hw2", MFX_IMPL_HARDWARE2 },
  563. { "hw3", MFX_IMPL_HARDWARE3 },
  564. { "hw4", MFX_IMPL_HARDWARE4 },
  565. };
  566. mfxIMPL impl = MFX_IMPL_AUTO_ANY;
  567. int i;
  568. if (device) {
  569. for (i = 0; i < FF_ARRAY_ELEMS(impl_map); i++)
  570. if (!strcmp(device, impl_map[i].name)) {
  571. impl = impl_map[i].impl;
  572. break;
  573. }
  574. if (i == FF_ARRAY_ELEMS(impl_map))
  575. impl = strtol(device, NULL, 0);
  576. }
  577. return impl;
  578. }
  579. static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
  580. AVDictionary *opts, int flags)
  581. {
  582. AVQSVDeviceContext *hwctx = ctx->hwctx;
  583. QSVDevicePriv *priv;
  584. enum AVHWDeviceType child_device_type;
  585. AVDictionaryEntry *e;
  586. mfxVersion ver = { { 3, 1 } };
  587. mfxIMPL impl;
  588. mfxHDL handle;
  589. mfxHandleType handle_type;
  590. mfxStatus err;
  591. int ret;
  592. priv = av_mallocz(sizeof(*priv));
  593. if (!priv)
  594. return AVERROR(ENOMEM);
  595. ctx->user_opaque = priv;
  596. ctx->free = qsv_device_free;
  597. e = av_dict_get(opts, "child_device", NULL, 0);
  598. if (CONFIG_VAAPI)
  599. child_device_type = AV_HWDEVICE_TYPE_VAAPI;
  600. else if (CONFIG_DXVA2)
  601. child_device_type = AV_HWDEVICE_TYPE_DXVA2;
  602. else {
  603. av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
  604. return AVERROR(ENOSYS);
  605. }
  606. ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
  607. e ? e->value : NULL, NULL, 0);
  608. if (ret < 0)
  609. return ret;
  610. {
  611. AVHWDeviceContext *child_device_ctx = (AVHWDeviceContext*)priv->child_device_ctx->data;
  612. #if CONFIG_VAAPI
  613. AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  614. handle_type = MFX_HANDLE_VA_DISPLAY;
  615. handle = (mfxHDL)child_device_hwctx->display;
  616. #elif CONFIG_DXVA2
  617. AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  618. handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
  619. handle = (mfxHDL)child_device_hwctx->devmgr;
  620. #endif
  621. }
  622. impl = choose_implementation(device);
  623. err = MFXInit(impl, &ver, &hwctx->session);
  624. if (err != MFX_ERR_NONE) {
  625. av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session\n");
  626. return AVERROR_UNKNOWN;
  627. }
  628. err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle);
  629. if (err != MFX_ERR_NONE)
  630. return AVERROR_UNKNOWN;
  631. return 0;
  632. }
  633. const HWContextType ff_hwcontext_type_qsv = {
  634. .type = AV_HWDEVICE_TYPE_QSV,
  635. .name = "QSV",
  636. .device_hwctx_size = sizeof(AVQSVDeviceContext),
  637. .device_priv_size = sizeof(QSVDeviceContext),
  638. .frames_hwctx_size = sizeof(AVQSVFramesContext),
  639. .frames_priv_size = sizeof(QSVFramesContext),
  640. .device_create = qsv_device_create,
  641. .device_init = qsv_device_init,
  642. .frames_get_constraints = qsv_frames_get_constraints,
  643. .frames_init = qsv_frames_init,
  644. .frames_uninit = qsv_frames_uninit,
  645. .frames_get_buffer = qsv_get_buffer,
  646. .transfer_get_formats = qsv_transfer_get_formats,
  647. .transfer_data_to = qsv_transfer_data_to,
  648. .transfer_data_from = qsv_transfer_data_from,
  649. .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE },
  650. };