You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1219 lines
38KB

  1. /*
  2. * This file is part of Libav.
  3. *
  4. * Libav is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * Libav is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with Libav; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include <stdint.h>
  19. #include <string.h>
  20. #include <mfx/mfxvideo.h>
  21. #include "config.h"
  22. #if HAVE_PTHREADS
  23. #include <pthread.h>
  24. #endif
  25. #if CONFIG_VAAPI
  26. #include "hwcontext_vaapi.h"
  27. #endif
  28. #if CONFIG_DXVA2
  29. #include "hwcontext_dxva2.h"
  30. #endif
  31. #include "buffer.h"
  32. #include "common.h"
  33. #include "hwcontext.h"
  34. #include "hwcontext_internal.h"
  35. #include "hwcontext_qsv.h"
  36. #include "mem.h"
  37. #include "pixfmt.h"
  38. #include "pixdesc.h"
  39. #include "time.h"
  40. typedef struct QSVDevicePriv {
  41. AVBufferRef *child_device_ctx;
  42. } QSVDevicePriv;
  43. typedef struct QSVDeviceContext {
  44. mfxHDL handle;
  45. mfxHandleType handle_type;
  46. mfxVersion ver;
  47. mfxIMPL impl;
  48. enum AVHWDeviceType child_device_type;
  49. enum AVPixelFormat child_pix_fmt;
  50. } QSVDeviceContext;
  51. typedef struct QSVFramesContext {
  52. mfxSession session_download;
  53. int session_download_init;
  54. mfxSession session_upload;
  55. int session_upload_init;
  56. #if HAVE_PTHREADS
  57. pthread_mutex_t session_lock;
  58. pthread_cond_t session_cond;
  59. #endif
  60. AVBufferRef *child_frames_ref;
  61. mfxFrameSurface1 *surfaces_internal;
  62. int nb_surfaces_used;
  63. // used in the frame allocator for non-opaque surfaces
  64. mfxMemId *mem_ids;
  65. // used in the opaque alloc request for opaque surfaces
  66. mfxFrameSurface1 **surface_ptrs;
  67. mfxExtOpaqueSurfaceAlloc opaque_alloc;
  68. mfxExtBuffer *ext_buffers[1];
  69. } QSVFramesContext;
  70. static const struct {
  71. mfxHandleType handle_type;
  72. enum AVHWDeviceType device_type;
  73. enum AVPixelFormat pix_fmt;
  74. } supported_handle_types[] = {
  75. #if CONFIG_VAAPI
  76. { MFX_HANDLE_VA_DISPLAY, AV_HWDEVICE_TYPE_VAAPI, AV_PIX_FMT_VAAPI },
  77. #endif
  78. #if CONFIG_DXVA2
  79. { MFX_HANDLE_D3D9_DEVICE_MANAGER, AV_HWDEVICE_TYPE_DXVA2, AV_PIX_FMT_DXVA2_VLD },
  80. #endif
  81. { 0 },
  82. };
  83. static const struct {
  84. enum AVPixelFormat pix_fmt;
  85. uint32_t fourcc;
  86. } supported_pixel_formats[] = {
  87. { AV_PIX_FMT_NV12, MFX_FOURCC_NV12 },
  88. { AV_PIX_FMT_P010, MFX_FOURCC_P010 },
  89. { AV_PIX_FMT_PAL8, MFX_FOURCC_P8 },
  90. };
  91. static uint32_t qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt)
  92. {
  93. int i;
  94. for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
  95. if (supported_pixel_formats[i].pix_fmt == pix_fmt)
  96. return supported_pixel_formats[i].fourcc;
  97. }
  98. return 0;
  99. }
  100. static int qsv_device_init(AVHWDeviceContext *ctx)
  101. {
  102. AVQSVDeviceContext *hwctx = ctx->hwctx;
  103. QSVDeviceContext *s = ctx->internal->priv;
  104. mfxStatus err;
  105. int i;
  106. for (i = 0; supported_handle_types[i].handle_type; i++) {
  107. err = MFXVideoCORE_GetHandle(hwctx->session, supported_handle_types[i].handle_type,
  108. &s->handle);
  109. if (err == MFX_ERR_NONE) {
  110. s->handle_type = supported_handle_types[i].handle_type;
  111. s->child_device_type = supported_handle_types[i].device_type;
  112. s->child_pix_fmt = supported_handle_types[i].pix_fmt;
  113. break;
  114. }
  115. }
  116. if (!s->handle) {
  117. av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
  118. "from the session\n");
  119. }
  120. err = MFXQueryIMPL(hwctx->session, &s->impl);
  121. if (err == MFX_ERR_NONE)
  122. err = MFXQueryVersion(hwctx->session, &s->ver);
  123. if (err != MFX_ERR_NONE) {
  124. av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
  125. return AVERROR_UNKNOWN;
  126. }
  127. return 0;
  128. }
  129. static void qsv_frames_uninit(AVHWFramesContext *ctx)
  130. {
  131. QSVFramesContext *s = ctx->internal->priv;
  132. if (s->session_download) {
  133. MFXVideoVPP_Close(s->session_download);
  134. MFXClose(s->session_download);
  135. }
  136. s->session_download = NULL;
  137. s->session_download_init = 0;
  138. if (s->session_upload) {
  139. MFXVideoVPP_Close(s->session_upload);
  140. MFXClose(s->session_upload);
  141. }
  142. s->session_upload = NULL;
  143. s->session_upload_init = 0;
  144. #if HAVE_PTHREADS
  145. pthread_mutex_destroy(&s->session_lock);
  146. pthread_cond_destroy(&s->session_cond);
  147. #endif
  148. av_freep(&s->mem_ids);
  149. av_freep(&s->surface_ptrs);
  150. av_freep(&s->surfaces_internal);
  151. av_buffer_unref(&s->child_frames_ref);
  152. }
  153. static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
  154. {
  155. }
  156. static AVBufferRef *qsv_pool_alloc(void *opaque, int size)
  157. {
  158. AVHWFramesContext *ctx = (AVHWFramesContext*)opaque;
  159. QSVFramesContext *s = ctx->internal->priv;
  160. AVQSVFramesContext *hwctx = ctx->hwctx;
  161. if (s->nb_surfaces_used < hwctx->nb_surfaces) {
  162. s->nb_surfaces_used++;
  163. return av_buffer_create((uint8_t*)(s->surfaces_internal + s->nb_surfaces_used - 1),
  164. sizeof(*hwctx->surfaces), qsv_pool_release_dummy, NULL, 0);
  165. }
  166. return NULL;
  167. }
  168. static int qsv_init_child_ctx(AVHWFramesContext *ctx)
  169. {
  170. AVQSVFramesContext *hwctx = ctx->hwctx;
  171. QSVFramesContext *s = ctx->internal->priv;
  172. QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
  173. AVBufferRef *child_device_ref = NULL;
  174. AVBufferRef *child_frames_ref = NULL;
  175. AVHWDeviceContext *child_device_ctx;
  176. AVHWFramesContext *child_frames_ctx;
  177. int i, ret = 0;
  178. if (!device_priv->handle) {
  179. av_log(ctx, AV_LOG_ERROR,
  180. "Cannot create a non-opaque internal surface pool without "
  181. "a hardware handle\n");
  182. return AVERROR(EINVAL);
  183. }
  184. child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
  185. if (!child_device_ref)
  186. return AVERROR(ENOMEM);
  187. child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
  188. #if CONFIG_VAAPI
  189. if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
  190. AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  191. child_device_hwctx->display = (VADisplay)device_priv->handle;
  192. }
  193. #endif
  194. #if CONFIG_DXVA2
  195. if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
  196. AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  197. child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
  198. }
  199. #endif
  200. ret = av_hwdevice_ctx_init(child_device_ref);
  201. if (ret < 0) {
  202. av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
  203. goto fail;
  204. }
  205. child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
  206. if (!child_frames_ref) {
  207. ret = AVERROR(ENOMEM);
  208. goto fail;
  209. }
  210. child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;
  211. child_frames_ctx->format = device_priv->child_pix_fmt;
  212. child_frames_ctx->sw_format = ctx->sw_format;
  213. child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
  214. child_frames_ctx->width = FFALIGN(ctx->width, 16);
  215. child_frames_ctx->height = FFALIGN(ctx->height, 16);
  216. #if CONFIG_DXVA2
  217. if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
  218. AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
  219. if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
  220. child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
  221. else
  222. child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
  223. }
  224. #endif
  225. ret = av_hwframe_ctx_init(child_frames_ref);
  226. if (ret < 0) {
  227. av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
  228. goto fail;
  229. }
  230. #if CONFIG_VAAPI
  231. if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
  232. AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
  233. for (i = 0; i < ctx->initial_pool_size; i++)
  234. s->surfaces_internal[i].Data.MemId = child_frames_hwctx->surface_ids + i;
  235. hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  236. }
  237. #endif
  238. #if CONFIG_DXVA2
  239. if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
  240. AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
  241. for (i = 0; i < ctx->initial_pool_size; i++)
  242. s->surfaces_internal[i].Data.MemId = (mfxMemId)child_frames_hwctx->surfaces[i];
  243. if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
  244. hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
  245. else
  246. hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  247. }
  248. #endif
  249. s->child_frames_ref = child_frames_ref;
  250. child_frames_ref = NULL;
  251. fail:
  252. av_buffer_unref(&child_device_ref);
  253. av_buffer_unref(&child_frames_ref);
  254. return ret;
  255. }
  256. static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
  257. {
  258. const AVPixFmtDescriptor *desc;
  259. uint32_t fourcc;
  260. desc = av_pix_fmt_desc_get(ctx->sw_format);
  261. if (!desc)
  262. return AVERROR(EINVAL);
  263. fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
  264. if (!fourcc)
  265. return AVERROR(EINVAL);
  266. surf->Info.BitDepthLuma = desc->comp[0].depth;
  267. surf->Info.BitDepthChroma = desc->comp[0].depth;
  268. surf->Info.Shift = desc->comp[0].depth > 8;
  269. if (desc->log2_chroma_w && desc->log2_chroma_h)
  270. surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
  271. else if (desc->log2_chroma_w)
  272. surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
  273. else
  274. surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
  275. surf->Info.FourCC = fourcc;
  276. surf->Info.Width = FFALIGN(ctx->width, 16);
  277. surf->Info.CropW = ctx->width;
  278. surf->Info.Height = FFALIGN(ctx->height, 16);
  279. surf->Info.CropH = ctx->height;
  280. surf->Info.FrameRateExtN = 25;
  281. surf->Info.FrameRateExtD = 1;
  282. surf->Info.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
  283. return 0;
  284. }
  285. static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)
  286. {
  287. QSVFramesContext *s = ctx->internal->priv;
  288. AVQSVFramesContext *frames_hwctx = ctx->hwctx;
  289. int i, ret = 0;
  290. if (ctx->initial_pool_size <= 0) {
  291. av_log(ctx, AV_LOG_ERROR, "QSV requires a fixed frame pool size\n");
  292. return AVERROR(EINVAL);
  293. }
  294. s->surfaces_internal = av_mallocz_array(ctx->initial_pool_size,
  295. sizeof(*s->surfaces_internal));
  296. if (!s->surfaces_internal)
  297. return AVERROR(ENOMEM);
  298. for (i = 0; i < ctx->initial_pool_size; i++) {
  299. ret = qsv_init_surface(ctx, &s->surfaces_internal[i]);
  300. if (ret < 0)
  301. return ret;
  302. }
  303. if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) {
  304. ret = qsv_init_child_ctx(ctx);
  305. if (ret < 0)
  306. return ret;
  307. }
  308. ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
  309. ctx, qsv_pool_alloc, NULL);
  310. if (!ctx->internal->pool_internal)
  311. return AVERROR(ENOMEM);
  312. frames_hwctx->surfaces = s->surfaces_internal;
  313. frames_hwctx->nb_surfaces = ctx->initial_pool_size;
  314. return 0;
  315. }
  316. static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
  317. mfxFrameAllocResponse *resp)
  318. {
  319. AVHWFramesContext *ctx = pthis;
  320. QSVFramesContext *s = ctx->internal->priv;
  321. AVQSVFramesContext *hwctx = ctx->hwctx;
  322. mfxFrameInfo *i = &req->Info;
  323. mfxFrameInfo *i1 = &hwctx->surfaces[0].Info;
  324. if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
  325. !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
  326. !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
  327. return MFX_ERR_UNSUPPORTED;
  328. if (i->Width != i1->Width || i->Height != i1->Height ||
  329. i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
  330. av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
  331. "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
  332. i->Width, i->Height, i->FourCC, i->ChromaFormat,
  333. i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
  334. return MFX_ERR_UNSUPPORTED;
  335. }
  336. resp->mids = s->mem_ids;
  337. resp->NumFrameActual = hwctx->nb_surfaces;
  338. return MFX_ERR_NONE;
  339. }
  340. static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
  341. {
  342. return MFX_ERR_NONE;
  343. }
  344. static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
  345. {
  346. return MFX_ERR_UNSUPPORTED;
  347. }
  348. static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
  349. {
  350. return MFX_ERR_UNSUPPORTED;
  351. }
  352. static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
  353. {
  354. *hdl = mid;
  355. return MFX_ERR_NONE;
  356. }
  357. static int qsv_init_internal_session(AVHWFramesContext *ctx,
  358. mfxSession *session, int upload)
  359. {
  360. QSVFramesContext *s = ctx->internal->priv;
  361. AVQSVFramesContext *frames_hwctx = ctx->hwctx;
  362. QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
  363. int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
  364. mfxFrameAllocator frame_allocator = {
  365. .pthis = ctx,
  366. .Alloc = frame_alloc,
  367. .Lock = frame_lock,
  368. .Unlock = frame_unlock,
  369. .GetHDL = frame_get_hdl,
  370. .Free = frame_free,
  371. };
  372. mfxVideoParam par;
  373. mfxStatus err;
  374. err = MFXInit(device_priv->impl, &device_priv->ver, session);
  375. if (err != MFX_ERR_NONE) {
  376. av_log(ctx, AV_LOG_ERROR, "Error initializing an internal session\n");
  377. return AVERROR_UNKNOWN;
  378. }
  379. if (device_priv->handle) {
  380. err = MFXVideoCORE_SetHandle(*session, device_priv->handle_type,
  381. device_priv->handle);
  382. if (err != MFX_ERR_NONE)
  383. return AVERROR_UNKNOWN;
  384. }
  385. if (!opaque) {
  386. err = MFXVideoCORE_SetFrameAllocator(*session, &frame_allocator);
  387. if (err != MFX_ERR_NONE)
  388. return AVERROR_UNKNOWN;
  389. }
  390. memset(&par, 0, sizeof(par));
  391. if (opaque) {
  392. par.ExtParam = s->ext_buffers;
  393. par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
  394. par.IOPattern = upload ? MFX_IOPATTERN_OUT_OPAQUE_MEMORY :
  395. MFX_IOPATTERN_IN_OPAQUE_MEMORY;
  396. } else {
  397. par.IOPattern = upload ? MFX_IOPATTERN_OUT_VIDEO_MEMORY :
  398. MFX_IOPATTERN_IN_VIDEO_MEMORY;
  399. }
  400. par.IOPattern |= upload ? MFX_IOPATTERN_IN_SYSTEM_MEMORY :
  401. MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
  402. par.AsyncDepth = 1;
  403. par.vpp.In = frames_hwctx->surfaces[0].Info;
  404. /* Apparently VPP requires the frame rate to be set to some value, otherwise
  405. * init will fail (probably for the framerate conversion filter). Since we
  406. * are only doing data upload/download here, we just invent an arbitrary
  407. * value */
  408. par.vpp.In.FrameRateExtN = 25;
  409. par.vpp.In.FrameRateExtD = 1;
  410. par.vpp.Out = par.vpp.In;
  411. err = MFXVideoVPP_Init(*session, &par);
  412. if (err != MFX_ERR_NONE) {
  413. av_log(ctx, AV_LOG_VERBOSE, "Error opening the internal VPP session."
  414. "Surface upload/download will not be possible\n");
  415. MFXClose(*session);
  416. *session = NULL;
  417. }
  418. return 0;
  419. }
  420. static int qsv_frames_init(AVHWFramesContext *ctx)
  421. {
  422. QSVFramesContext *s = ctx->internal->priv;
  423. AVQSVFramesContext *frames_hwctx = ctx->hwctx;
  424. int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
  425. uint32_t fourcc;
  426. int i, ret;
  427. fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
  428. if (!fourcc) {
  429. av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n");
  430. return AVERROR(ENOSYS);
  431. }
  432. if (!ctx->pool) {
  433. ret = qsv_init_pool(ctx, fourcc);
  434. if (ret < 0) {
  435. av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
  436. return ret;
  437. }
  438. }
  439. if (opaque) {
  440. s->surface_ptrs = av_mallocz_array(frames_hwctx->nb_surfaces,
  441. sizeof(*s->surface_ptrs));
  442. if (!s->surface_ptrs)
  443. return AVERROR(ENOMEM);
  444. for (i = 0; i < frames_hwctx->nb_surfaces; i++)
  445. s->surface_ptrs[i] = frames_hwctx->surfaces + i;
  446. s->opaque_alloc.In.Surfaces = s->surface_ptrs;
  447. s->opaque_alloc.In.NumSurface = frames_hwctx->nb_surfaces;
  448. s->opaque_alloc.In.Type = frames_hwctx->frame_type;
  449. s->opaque_alloc.Out = s->opaque_alloc.In;
  450. s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
  451. s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
  452. s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
  453. } else {
  454. s->mem_ids = av_mallocz_array(frames_hwctx->nb_surfaces, sizeof(*s->mem_ids));
  455. if (!s->mem_ids)
  456. return AVERROR(ENOMEM);
  457. for (i = 0; i < frames_hwctx->nb_surfaces; i++)
  458. s->mem_ids[i] = frames_hwctx->surfaces[i].Data.MemId;
  459. }
  460. s->session_download = NULL;
  461. s->session_upload = NULL;
  462. s->session_download_init = 0;
  463. s->session_upload_init = 0;
  464. #if HAVE_PTHREADS
  465. pthread_mutex_init(&s->session_lock, NULL);
  466. pthread_cond_init(&s->session_cond, NULL);
  467. #endif
  468. return 0;
  469. }
  470. static int qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
  471. {
  472. frame->buf[0] = av_buffer_pool_get(ctx->pool);
  473. if (!frame->buf[0])
  474. return AVERROR(ENOMEM);
  475. frame->data[3] = frame->buf[0]->data;
  476. frame->format = AV_PIX_FMT_QSV;
  477. frame->width = ctx->width;
  478. frame->height = ctx->height;
  479. return 0;
  480. }
  481. static int qsv_transfer_get_formats(AVHWFramesContext *ctx,
  482. enum AVHWFrameTransferDirection dir,
  483. enum AVPixelFormat **formats)
  484. {
  485. enum AVPixelFormat *fmts;
  486. fmts = av_malloc_array(2, sizeof(*fmts));
  487. if (!fmts)
  488. return AVERROR(ENOMEM);
  489. fmts[0] = ctx->sw_format;
  490. fmts[1] = AV_PIX_FMT_NONE;
  491. *formats = fmts;
  492. return 0;
  493. }
  494. static int qsv_frames_derive_from(AVHWFramesContext *dst_ctx,
  495. AVHWFramesContext *src_ctx, int flags)
  496. {
  497. AVQSVFramesContext *src_hwctx = src_ctx->hwctx;
  498. int i;
  499. switch (dst_ctx->device_ctx->type) {
  500. #if CONFIG_VAAPI
  501. case AV_HWDEVICE_TYPE_VAAPI:
  502. {
  503. AVVAAPIFramesContext *dst_hwctx = dst_ctx->hwctx;
  504. dst_hwctx->surface_ids = av_mallocz_array(src_hwctx->nb_surfaces,
  505. sizeof(*dst_hwctx->surface_ids));
  506. if (!dst_hwctx->surface_ids)
  507. return AVERROR(ENOMEM);
  508. for (i = 0; i < src_hwctx->nb_surfaces; i++)
  509. dst_hwctx->surface_ids[i] =
  510. *(VASurfaceID*)src_hwctx->surfaces[i].Data.MemId;
  511. dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
  512. }
  513. break;
  514. #endif
  515. #if CONFIG_DXVA2
  516. case AV_HWDEVICE_TYPE_DXVA2:
  517. {
  518. AVDXVA2FramesContext *dst_hwctx = dst_ctx->hwctx;
  519. dst_hwctx->surfaces = av_mallocz_array(src_hwctx->nb_surfaces,
  520. sizeof(*dst_hwctx->surfaces));
  521. if (!dst_hwctx->surfaces)
  522. return AVERROR(ENOMEM);
  523. for (i = 0; i < src_hwctx->nb_surfaces; i++)
  524. dst_hwctx->surfaces[i] =
  525. (IDirect3DSurface9*)src_hwctx->surfaces[i].Data.MemId;
  526. dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
  527. if (src_hwctx->frame_type == MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
  528. dst_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
  529. else
  530. dst_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
  531. }
  532. break;
  533. #endif
  534. default:
  535. return AVERROR(ENOSYS);
  536. }
  537. return 0;
  538. }
  539. static int qsv_map_from(AVHWFramesContext *ctx,
  540. AVFrame *dst, const AVFrame *src, int flags)
  541. {
  542. QSVFramesContext *s = ctx->internal->priv;
  543. mfxFrameSurface1 *surf = (mfxFrameSurface1*)src->data[3];
  544. AVHWFramesContext *child_frames_ctx;
  545. const AVPixFmtDescriptor *desc;
  546. uint8_t *child_data;
  547. AVFrame *dummy;
  548. int ret = 0;
  549. if (!s->child_frames_ref)
  550. return AVERROR(ENOSYS);
  551. child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
  552. switch (child_frames_ctx->device_ctx->type) {
  553. #if CONFIG_VAAPI
  554. case AV_HWDEVICE_TYPE_VAAPI:
  555. child_data = (uint8_t*)(intptr_t)*(VASurfaceID*)surf->Data.MemId;
  556. break;
  557. #endif
  558. #if CONFIG_DXVA2
  559. case AV_HWDEVICE_TYPE_DXVA2:
  560. child_data = surf->Data.MemId;
  561. break;
  562. #endif
  563. default:
  564. return AVERROR(ENOSYS);
  565. }
  566. if (dst->format == child_frames_ctx->format) {
  567. ret = ff_hwframe_map_create(s->child_frames_ref,
  568. dst, src, NULL, NULL);
  569. if (ret < 0)
  570. return ret;
  571. dst->width = src->width;
  572. dst->height = src->height;
  573. dst->data[3] = child_data;
  574. return 0;
  575. }
  576. desc = av_pix_fmt_desc_get(dst->format);
  577. if (desc && desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
  578. // This only supports mapping to software.
  579. return AVERROR(ENOSYS);
  580. }
  581. dummy = av_frame_alloc();
  582. if (!dummy)
  583. return AVERROR(ENOMEM);
  584. dummy->buf[0] = av_buffer_ref(src->buf[0]);
  585. dummy->hw_frames_ctx = av_buffer_ref(s->child_frames_ref);
  586. if (!dummy->buf[0] || !dummy->hw_frames_ctx)
  587. goto fail;
  588. dummy->format = child_frames_ctx->format;
  589. dummy->width = src->width;
  590. dummy->height = src->height;
  591. dummy->data[3] = child_data;
  592. ret = av_hwframe_map(dst, dummy, flags);
  593. fail:
  594. av_frame_free(&dummy);
  595. return ret;
  596. }
  597. static int qsv_transfer_data_child(AVHWFramesContext *ctx, AVFrame *dst,
  598. const AVFrame *src)
  599. {
  600. QSVFramesContext *s = ctx->internal->priv;
  601. AVHWFramesContext *child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
  602. int download = !!src->hw_frames_ctx;
  603. mfxFrameSurface1 *surf = (mfxFrameSurface1*)(download ? src->data[3] : dst->data[3]);
  604. AVFrame *dummy;
  605. int ret;
  606. dummy = av_frame_alloc();
  607. if (!dummy)
  608. return AVERROR(ENOMEM);
  609. dummy->format = child_frames_ctx->format;
  610. dummy->width = src->width;
  611. dummy->height = src->height;
  612. dummy->buf[0] = download ? src->buf[0] : dst->buf[0];
  613. dummy->data[3] = surf->Data.MemId;
  614. dummy->hw_frames_ctx = s->child_frames_ref;
  615. ret = download ? av_hwframe_transfer_data(dst, dummy, 0) :
  616. av_hwframe_transfer_data(dummy, src, 0);
  617. dummy->buf[0] = NULL;
  618. dummy->data[3] = NULL;
  619. dummy->hw_frames_ctx = NULL;
  620. av_frame_free(&dummy);
  621. return ret;
  622. }
  623. static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst,
  624. const AVFrame *src)
  625. {
  626. QSVFramesContext *s = ctx->internal->priv;
  627. mfxFrameSurface1 out = {{ 0 }};
  628. mfxFrameSurface1 *in = (mfxFrameSurface1*)src->data[3];
  629. mfxSyncPoint sync = NULL;
  630. mfxStatus err;
  631. int ret = 0;
  632. while (!s->session_download_init && !s->session_download && !ret) {
  633. #if HAVE_PTHREADS
  634. if (pthread_mutex_trylock(&s->session_lock) == 0) {
  635. #endif
  636. if (!s->session_download_init) {
  637. ret = qsv_init_internal_session(ctx, &s->session_download, 0);
  638. if (s->session_download)
  639. s->session_download_init = 1;
  640. }
  641. #if HAVE_PTHREADS
  642. pthread_mutex_unlock(&s->session_lock);
  643. pthread_cond_signal(&s->session_cond);
  644. } else {
  645. pthread_mutex_lock(&s->session_lock);
  646. while (!s->session_download_init && !s->session_download) {
  647. pthread_cond_wait(&s->session_cond, &s->session_lock);
  648. }
  649. pthread_mutex_unlock(&s->session_lock);
  650. }
  651. #endif
  652. }
  653. if (ret < 0)
  654. return ret;
  655. if (!s->session_download) {
  656. if (s->child_frames_ref)
  657. return qsv_transfer_data_child(ctx, dst, src);
  658. av_log(ctx, AV_LOG_ERROR, "Surface download not possible\n");
  659. return AVERROR(ENOSYS);
  660. }
  661. out.Info = in->Info;
  662. out.Data.PitchLow = dst->linesize[0];
  663. out.Data.Y = dst->data[0];
  664. out.Data.U = dst->data[1];
  665. out.Data.V = dst->data[2];
  666. out.Data.A = dst->data[3];
  667. do {
  668. err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
  669. if (err == MFX_WRN_DEVICE_BUSY)
  670. av_usleep(1);
  671. } while (err == MFX_WRN_DEVICE_BUSY);
  672. if (err < 0 || !sync) {
  673. av_log(ctx, AV_LOG_ERROR, "Error downloading the surface\n");
  674. return AVERROR_UNKNOWN;
  675. }
  676. do {
  677. err = MFXVideoCORE_SyncOperation(s->session_download, sync, 1000);
  678. } while (err == MFX_WRN_IN_EXECUTION);
  679. if (err < 0) {
  680. av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
  681. return AVERROR_UNKNOWN;
  682. }
  683. return 0;
  684. }
  685. static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst,
  686. const AVFrame *src)
  687. {
  688. QSVFramesContext *s = ctx->internal->priv;
  689. mfxFrameSurface1 in = {{ 0 }};
  690. mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];
  691. mfxSyncPoint sync = NULL;
  692. mfxStatus err;
  693. int ret = 0;
  694. while (!s->session_upload_init && !s->session_upload && !ret) {
  695. #if HAVE_PTHREADS
  696. if (pthread_mutex_trylock(&s->session_lock) == 0) {
  697. #endif
  698. if (!s->session_upload_init) {
  699. ret = qsv_init_internal_session(ctx, &s->session_upload, 1);
  700. if (s->session_upload)
  701. s->session_upload_init = 1;
  702. }
  703. #if HAVE_PTHREADS
  704. pthread_mutex_unlock(&s->session_lock);
  705. pthread_cond_signal(&s->session_cond);
  706. } else {
  707. pthread_mutex_lock(&s->session_lock);
  708. while (!s->session_upload_init && !s->session_upload) {
  709. pthread_cond_wait(&s->session_cond, &s->session_lock);
  710. }
  711. pthread_mutex_unlock(&s->session_lock);
  712. }
  713. #endif
  714. }
  715. if (ret < 0)
  716. return ret;
  717. if (!s->session_upload) {
  718. if (s->child_frames_ref)
  719. return qsv_transfer_data_child(ctx, dst, src);
  720. av_log(ctx, AV_LOG_ERROR, "Surface upload not possible\n");
  721. return AVERROR(ENOSYS);
  722. }
  723. in.Info = out->Info;
  724. in.Data.PitchLow = src->linesize[0];
  725. in.Data.Y = src->data[0];
  726. in.Data.U = src->data[1];
  727. in.Data.V = src->data[2];
  728. in.Data.A = src->data[3];
  729. do {
  730. err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
  731. if (err == MFX_WRN_DEVICE_BUSY)
  732. av_usleep(1);
  733. } while (err == MFX_WRN_DEVICE_BUSY);
  734. if (err < 0 || !sync) {
  735. av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
  736. return AVERROR_UNKNOWN;
  737. }
  738. do {
  739. err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
  740. } while (err == MFX_WRN_IN_EXECUTION);
  741. if (err < 0) {
  742. av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
  743. return AVERROR_UNKNOWN;
  744. }
  745. return 0;
  746. }
  747. static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx,
  748. AVHWFramesContext *src_ctx, int flags)
  749. {
  750. QSVFramesContext *s = dst_ctx->internal->priv;
  751. AVQSVFramesContext *dst_hwctx = dst_ctx->hwctx;
  752. int i;
  753. switch (src_ctx->device_ctx->type) {
  754. #if CONFIG_VAAPI
  755. case AV_HWDEVICE_TYPE_VAAPI:
  756. {
  757. AVVAAPIFramesContext *src_hwctx = src_ctx->hwctx;
  758. s->surfaces_internal = av_mallocz_array(src_hwctx->nb_surfaces,
  759. sizeof(*s->surfaces_internal));
  760. if (!s->surfaces_internal)
  761. return AVERROR(ENOMEM);
  762. for (i = 0; i < src_hwctx->nb_surfaces; i++) {
  763. qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
  764. s->surfaces_internal[i].Data.MemId = src_hwctx->surface_ids + i;
  765. }
  766. dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
  767. dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  768. }
  769. break;
  770. #endif
  771. #if CONFIG_DXVA2
  772. case AV_HWDEVICE_TYPE_DXVA2:
  773. {
  774. AVDXVA2FramesContext *src_hwctx = src_ctx->hwctx;
  775. s->surfaces_internal = av_mallocz_array(src_hwctx->nb_surfaces,
  776. sizeof(*s->surfaces_internal));
  777. if (!s->surfaces_internal)
  778. return AVERROR(ENOMEM);
  779. for (i = 0; i < src_hwctx->nb_surfaces; i++) {
  780. qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
  781. s->surfaces_internal[i].Data.MemId = (mfxMemId)src_hwctx->surfaces[i];
  782. }
  783. dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
  784. if (src_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
  785. dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
  786. else
  787. dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  788. }
  789. break;
  790. #endif
  791. default:
  792. return AVERROR(ENOSYS);
  793. }
  794. dst_hwctx->surfaces = s->surfaces_internal;
  795. return 0;
  796. }
  797. static int qsv_map_to(AVHWFramesContext *dst_ctx,
  798. AVFrame *dst, const AVFrame *src, int flags)
  799. {
  800. AVQSVFramesContext *hwctx = dst_ctx->hwctx;
  801. int i, err;
  802. for (i = 0; i < hwctx->nb_surfaces; i++) {
  803. #if CONFIG_VAAPI
  804. if (*(VASurfaceID*)hwctx->surfaces[i].Data.MemId ==
  805. (VASurfaceID)(uintptr_t)src->data[3])
  806. break;
  807. #endif
  808. #if CONFIG_DXVA2
  809. if ((IDirect3DSurface9*)hwctx->surfaces[i].Data.MemId ==
  810. (IDirect3DSurface9*)(uintptr_t)src->data[3])
  811. break;
  812. #endif
  813. }
  814. if (i >= hwctx->nb_surfaces) {
  815. av_log(dst_ctx, AV_LOG_ERROR, "Trying to map from a surface which "
  816. "is not in the mapped frames context.\n");
  817. return AVERROR(EINVAL);
  818. }
  819. err = ff_hwframe_map_create(dst->hw_frames_ctx,
  820. dst, src, NULL, NULL);
  821. if (err)
  822. return err;
  823. dst->width = src->width;
  824. dst->height = src->height;
  825. dst->data[3] = (uint8_t*)&hwctx->surfaces[i];
  826. return 0;
  827. }
  828. static int qsv_frames_get_constraints(AVHWDeviceContext *ctx,
  829. const void *hwconfig,
  830. AVHWFramesConstraints *constraints)
  831. {
  832. int i;
  833. constraints->valid_sw_formats = av_malloc_array(FF_ARRAY_ELEMS(supported_pixel_formats) + 1,
  834. sizeof(*constraints->valid_sw_formats));
  835. if (!constraints->valid_sw_formats)
  836. return AVERROR(ENOMEM);
  837. for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++)
  838. constraints->valid_sw_formats[i] = supported_pixel_formats[i].pix_fmt;
  839. constraints->valid_sw_formats[FF_ARRAY_ELEMS(supported_pixel_formats)] = AV_PIX_FMT_NONE;
  840. constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
  841. if (!constraints->valid_hw_formats)
  842. return AVERROR(ENOMEM);
  843. constraints->valid_hw_formats[0] = AV_PIX_FMT_QSV;
  844. constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
  845. return 0;
  846. }
  847. static void qsv_device_free(AVHWDeviceContext *ctx)
  848. {
  849. AVQSVDeviceContext *hwctx = ctx->hwctx;
  850. QSVDevicePriv *priv = ctx->user_opaque;
  851. if (hwctx->session)
  852. MFXClose(hwctx->session);
  853. av_buffer_unref(&priv->child_device_ctx);
  854. av_freep(&priv);
  855. }
  856. static mfxIMPL choose_implementation(const char *device)
  857. {
  858. static const struct {
  859. const char *name;
  860. mfxIMPL impl;
  861. } impl_map[] = {
  862. { "auto", MFX_IMPL_AUTO },
  863. { "sw", MFX_IMPL_SOFTWARE },
  864. { "hw", MFX_IMPL_HARDWARE },
  865. { "auto_any", MFX_IMPL_AUTO_ANY },
  866. { "hw_any", MFX_IMPL_HARDWARE_ANY },
  867. { "hw2", MFX_IMPL_HARDWARE2 },
  868. { "hw3", MFX_IMPL_HARDWARE3 },
  869. { "hw4", MFX_IMPL_HARDWARE4 },
  870. };
  871. mfxIMPL impl = MFX_IMPL_AUTO_ANY;
  872. int i;
  873. if (device) {
  874. for (i = 0; i < FF_ARRAY_ELEMS(impl_map); i++)
  875. if (!strcmp(device, impl_map[i].name)) {
  876. impl = impl_map[i].impl;
  877. break;
  878. }
  879. if (i == FF_ARRAY_ELEMS(impl_map))
  880. impl = strtol(device, NULL, 0);
  881. }
  882. return impl;
  883. }
  884. static int qsv_device_derive_from_child(AVHWDeviceContext *ctx,
  885. mfxIMPL implementation,
  886. AVHWDeviceContext *child_device_ctx,
  887. int flags)
  888. {
  889. AVQSVDeviceContext *hwctx = ctx->hwctx;
  890. mfxVersion ver = { { 3, 1 } };
  891. mfxHDL handle;
  892. mfxHandleType handle_type;
  893. mfxStatus err;
  894. int ret;
  895. switch (child_device_ctx->type) {
  896. #if CONFIG_VAAPI
  897. case AV_HWDEVICE_TYPE_VAAPI:
  898. {
  899. AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  900. handle_type = MFX_HANDLE_VA_DISPLAY;
  901. handle = (mfxHDL)child_device_hwctx->display;
  902. }
  903. break;
  904. #endif
  905. #if CONFIG_DXVA2
  906. case AV_HWDEVICE_TYPE_DXVA2:
  907. {
  908. AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  909. handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
  910. handle = (mfxHDL)child_device_hwctx->devmgr;
  911. }
  912. break;
  913. #endif
  914. default:
  915. ret = AVERROR(ENOSYS);
  916. goto fail;
  917. }
  918. err = MFXInit(implementation, &ver, &hwctx->session);
  919. if (err != MFX_ERR_NONE) {
  920. av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
  921. "%d.\n", err);
  922. ret = AVERROR_UNKNOWN;
  923. goto fail;
  924. }
  925. err = MFXQueryVersion(hwctx->session, &ver);
  926. if (err != MFX_ERR_NONE) {
  927. av_log(ctx, AV_LOG_ERROR, "Error querying an MFX session: %d.\n", err);
  928. ret = AVERROR_UNKNOWN;
  929. goto fail;
  930. }
  931. av_log(ctx, AV_LOG_VERBOSE,
  932. "Initialize MFX session: API version is %d.%d, implementation version is %d.%d\n",
  933. MFX_VERSION_MAJOR, MFX_VERSION_MINOR, ver.Major, ver.Minor);
  934. MFXClose(hwctx->session);
  935. err = MFXInit(implementation, &ver, &hwctx->session);
  936. if (err != MFX_ERR_NONE) {
  937. av_log(ctx, AV_LOG_ERROR,
  938. "Error initializing an MFX session: %d.\n", err);
  939. ret = AVERROR_UNKNOWN;
  940. goto fail;
  941. }
  942. err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle);
  943. if (err != MFX_ERR_NONE) {
  944. av_log(ctx, AV_LOG_ERROR, "Error setting child device handle: "
  945. "%d\n", err);
  946. ret = AVERROR_UNKNOWN;
  947. goto fail;
  948. }
  949. ret = MFXQueryVersion(hwctx->session,&ver);
  950. if (ret == MFX_ERR_NONE) {
  951. av_log(ctx, AV_LOG_VERBOSE, "MFX compile/runtime API: %d.%d/%d.%d\n",
  952. MFX_VERSION_MAJOR, MFX_VERSION_MINOR, ver.Major, ver.Minor);
  953. }
  954. return 0;
  955. fail:
  956. if (hwctx->session)
  957. MFXClose(hwctx->session);
  958. return ret;
  959. }
  960. static int qsv_device_derive(AVHWDeviceContext *ctx,
  961. AVHWDeviceContext *child_device_ctx, int flags)
  962. {
  963. return qsv_device_derive_from_child(ctx, MFX_IMPL_HARDWARE_ANY,
  964. child_device_ctx, flags);
  965. }
  966. static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
  967. AVDictionary *opts, int flags)
  968. {
  969. QSVDevicePriv *priv;
  970. enum AVHWDeviceType child_device_type;
  971. AVHWDeviceContext *child_device;
  972. AVDictionaryEntry *e;
  973. mfxIMPL impl;
  974. int ret;
  975. priv = av_mallocz(sizeof(*priv));
  976. if (!priv)
  977. return AVERROR(ENOMEM);
  978. ctx->user_opaque = priv;
  979. ctx->free = qsv_device_free;
  980. e = av_dict_get(opts, "child_device", NULL, 0);
  981. if (CONFIG_VAAPI)
  982. child_device_type = AV_HWDEVICE_TYPE_VAAPI;
  983. else if (CONFIG_DXVA2)
  984. child_device_type = AV_HWDEVICE_TYPE_DXVA2;
  985. else {
  986. av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
  987. return AVERROR(ENOSYS);
  988. }
  989. ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
  990. e ? e->value : NULL, NULL, 0);
  991. if (ret < 0)
  992. return ret;
  993. child_device = (AVHWDeviceContext*)priv->child_device_ctx->data;
  994. impl = choose_implementation(device);
  995. return qsv_device_derive_from_child(ctx, impl, child_device, 0);
  996. }
  997. const HWContextType ff_hwcontext_type_qsv = {
  998. .type = AV_HWDEVICE_TYPE_QSV,
  999. .name = "QSV",
  1000. .device_hwctx_size = sizeof(AVQSVDeviceContext),
  1001. .device_priv_size = sizeof(QSVDeviceContext),
  1002. .frames_hwctx_size = sizeof(AVQSVFramesContext),
  1003. .frames_priv_size = sizeof(QSVFramesContext),
  1004. .device_create = qsv_device_create,
  1005. .device_derive = qsv_device_derive,
  1006. .device_init = qsv_device_init,
  1007. .frames_get_constraints = qsv_frames_get_constraints,
  1008. .frames_init = qsv_frames_init,
  1009. .frames_uninit = qsv_frames_uninit,
  1010. .frames_get_buffer = qsv_get_buffer,
  1011. .transfer_get_formats = qsv_transfer_get_formats,
  1012. .transfer_data_to = qsv_transfer_data_to,
  1013. .transfer_data_from = qsv_transfer_data_from,
  1014. .map_to = qsv_map_to,
  1015. .map_from = qsv_map_from,
  1016. .frames_derive_to = qsv_frames_derive_to,
  1017. .frames_derive_from = qsv_frames_derive_from,
  1018. .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE },
  1019. };