You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

621 lines
20KB

  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include "config.h"
  19. #include <windows.h>
  20. // Include thread.h before redefining _WIN32_WINNT, to get
  21. // the right implementation for AVOnce
  22. #include "thread.h"
  23. #if !defined(_WIN32_WINNT) || _WIN32_WINNT < 0x0600
  24. #undef _WIN32_WINNT
  25. #define _WIN32_WINNT 0x0600
  26. #endif
  27. #define COBJMACROS
  28. #include <initguid.h>
  29. #include <d3d11.h>
  30. #include <dxgi1_2.h>
  31. #if HAVE_DXGIDEBUG_H
  32. #include <dxgidebug.h>
  33. #endif
  34. #include "avassert.h"
  35. #include "common.h"
  36. #include "hwcontext.h"
  37. #include "hwcontext_d3d11va.h"
  38. #include "hwcontext_internal.h"
  39. #include "imgutils.h"
  40. #include "pixdesc.h"
  41. #include "pixfmt.h"
  42. typedef HRESULT(WINAPI *PFN_CREATE_DXGI_FACTORY)(REFIID riid, void **ppFactory);
  43. static AVOnce functions_loaded = AV_ONCE_INIT;
  44. static PFN_CREATE_DXGI_FACTORY mCreateDXGIFactory;
  45. static PFN_D3D11_CREATE_DEVICE mD3D11CreateDevice;
  46. static av_cold void load_functions(void)
  47. {
  48. #if !HAVE_UWP
  49. // We let these "leak" - this is fine, as unloading has no great benefit, and
  50. // Windows will mark a DLL as loaded forever if its internal refcount overflows
  51. // from too many LoadLibrary calls.
  52. HANDLE d3dlib, dxgilib;
  53. d3dlib = LoadLibrary("d3d11.dll");
  54. dxgilib = LoadLibrary("dxgi.dll");
  55. if (!d3dlib || !dxgilib)
  56. return;
  57. mD3D11CreateDevice = (PFN_D3D11_CREATE_DEVICE) GetProcAddress(d3dlib, "D3D11CreateDevice");
  58. mCreateDXGIFactory = (PFN_CREATE_DXGI_FACTORY) GetProcAddress(dxgilib, "CreateDXGIFactory");
  59. #else
  60. // In UWP (which lacks LoadLibrary), CreateDXGIFactory isn't available,
  61. // only CreateDXGIFactory1
  62. mD3D11CreateDevice = (PFN_D3D11_CREATE_DEVICE) D3D11CreateDevice;
  63. mCreateDXGIFactory = (PFN_CREATE_DXGI_FACTORY) CreateDXGIFactory1;
  64. #endif
  65. }
  66. typedef struct D3D11VAFramesContext {
  67. int nb_surfaces_used;
  68. DXGI_FORMAT format;
  69. ID3D11Texture2D *staging_texture;
  70. } D3D11VAFramesContext;
  71. static const struct {
  72. DXGI_FORMAT d3d_format;
  73. enum AVPixelFormat pix_fmt;
  74. } supported_formats[] = {
  75. { DXGI_FORMAT_NV12, AV_PIX_FMT_NV12 },
  76. { DXGI_FORMAT_P010, AV_PIX_FMT_P010 },
  77. // Special opaque formats. The pix_fmt is merely a place holder, as the
  78. // opaque format cannot be accessed directly.
  79. { DXGI_FORMAT_420_OPAQUE, AV_PIX_FMT_YUV420P },
  80. };
  81. static void d3d11va_default_lock(void *ctx)
  82. {
  83. WaitForSingleObjectEx(ctx, INFINITE, FALSE);
  84. }
  85. static void d3d11va_default_unlock(void *ctx)
  86. {
  87. ReleaseMutex(ctx);
  88. }
  89. static void d3d11va_frames_uninit(AVHWFramesContext *ctx)
  90. {
  91. AVD3D11VAFramesContext *frames_hwctx = ctx->hwctx;
  92. D3D11VAFramesContext *s = ctx->internal->priv;
  93. if (frames_hwctx->texture)
  94. ID3D11Texture2D_Release(frames_hwctx->texture);
  95. frames_hwctx->texture = NULL;
  96. if (s->staging_texture)
  97. ID3D11Texture2D_Release(s->staging_texture);
  98. s->staging_texture = NULL;
  99. }
  100. static int d3d11va_frames_get_constraints(AVHWDeviceContext *ctx,
  101. const void *hwconfig,
  102. AVHWFramesConstraints *constraints)
  103. {
  104. AVD3D11VADeviceContext *device_hwctx = ctx->hwctx;
  105. int nb_sw_formats = 0;
  106. HRESULT hr;
  107. int i;
  108. constraints->valid_sw_formats = av_malloc_array(FF_ARRAY_ELEMS(supported_formats) + 1,
  109. sizeof(*constraints->valid_sw_formats));
  110. if (!constraints->valid_sw_formats)
  111. return AVERROR(ENOMEM);
  112. for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++) {
  113. UINT format_support = 0;
  114. hr = ID3D11Device_CheckFormatSupport(device_hwctx->device, supported_formats[i].d3d_format, &format_support);
  115. if (SUCCEEDED(hr) && (format_support & D3D11_FORMAT_SUPPORT_TEXTURE2D))
  116. constraints->valid_sw_formats[nb_sw_formats++] = supported_formats[i].pix_fmt;
  117. }
  118. constraints->valid_sw_formats[nb_sw_formats] = AV_PIX_FMT_NONE;
  119. constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
  120. if (!constraints->valid_hw_formats)
  121. return AVERROR(ENOMEM);
  122. constraints->valid_hw_formats[0] = AV_PIX_FMT_D3D11;
  123. constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
  124. return 0;
  125. }
  126. static void free_texture(void *opaque, uint8_t *data)
  127. {
  128. ID3D11Texture2D_Release((ID3D11Texture2D *)opaque);
  129. av_free(data);
  130. }
  131. static AVBufferRef *wrap_texture_buf(ID3D11Texture2D *tex, int index)
  132. {
  133. AVBufferRef *buf;
  134. AVD3D11FrameDescriptor *desc = av_mallocz(sizeof(*desc));
  135. if (!desc) {
  136. ID3D11Texture2D_Release(tex);
  137. return NULL;
  138. }
  139. desc->texture = tex;
  140. desc->index = index;
  141. buf = av_buffer_create((uint8_t *)desc, sizeof(desc), free_texture, tex, 0);
  142. if (!buf) {
  143. ID3D11Texture2D_Release(tex);
  144. av_free(desc);
  145. return NULL;
  146. }
  147. return buf;
  148. }
  149. static AVBufferRef *d3d11va_alloc_single(AVHWFramesContext *ctx)
  150. {
  151. D3D11VAFramesContext *s = ctx->internal->priv;
  152. AVD3D11VAFramesContext *hwctx = ctx->hwctx;
  153. AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
  154. HRESULT hr;
  155. ID3D11Texture2D *tex;
  156. D3D11_TEXTURE2D_DESC texDesc = {
  157. .Width = ctx->width,
  158. .Height = ctx->height,
  159. .MipLevels = 1,
  160. .Format = s->format,
  161. .SampleDesc = { .Count = 1 },
  162. .ArraySize = 1,
  163. .Usage = D3D11_USAGE_DEFAULT,
  164. .BindFlags = hwctx->BindFlags,
  165. .MiscFlags = hwctx->MiscFlags,
  166. };
  167. hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &tex);
  168. if (FAILED(hr)) {
  169. av_log(ctx, AV_LOG_ERROR, "Could not create the texture (%lx)\n", (long)hr);
  170. return NULL;
  171. }
  172. return wrap_texture_buf(tex, 0);
  173. }
  174. static AVBufferRef *d3d11va_pool_alloc(void *opaque, int size)
  175. {
  176. AVHWFramesContext *ctx = (AVHWFramesContext*)opaque;
  177. D3D11VAFramesContext *s = ctx->internal->priv;
  178. AVD3D11VAFramesContext *hwctx = ctx->hwctx;
  179. D3D11_TEXTURE2D_DESC texDesc;
  180. if (!hwctx->texture)
  181. return d3d11va_alloc_single(ctx);
  182. ID3D11Texture2D_GetDesc(hwctx->texture, &texDesc);
  183. if (s->nb_surfaces_used >= texDesc.ArraySize) {
  184. av_log(ctx, AV_LOG_ERROR, "Static surface pool size exceeded.\n");
  185. return NULL;
  186. }
  187. ID3D11Texture2D_AddRef(hwctx->texture);
  188. return wrap_texture_buf(hwctx->texture, s->nb_surfaces_used++);
  189. }
  190. static int d3d11va_frames_init(AVHWFramesContext *ctx)
  191. {
  192. AVD3D11VAFramesContext *hwctx = ctx->hwctx;
  193. AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
  194. D3D11VAFramesContext *s = ctx->internal->priv;
  195. int i;
  196. HRESULT hr;
  197. D3D11_TEXTURE2D_DESC texDesc;
  198. for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++) {
  199. if (ctx->sw_format == supported_formats[i].pix_fmt) {
  200. s->format = supported_formats[i].d3d_format;
  201. break;
  202. }
  203. }
  204. if (i == FF_ARRAY_ELEMS(supported_formats)) {
  205. av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format: %s\n",
  206. av_get_pix_fmt_name(ctx->sw_format));
  207. return AVERROR(EINVAL);
  208. }
  209. texDesc = (D3D11_TEXTURE2D_DESC){
  210. .Width = ctx->width,
  211. .Height = ctx->height,
  212. .MipLevels = 1,
  213. .Format = s->format,
  214. .SampleDesc = { .Count = 1 },
  215. .ArraySize = ctx->initial_pool_size,
  216. .Usage = D3D11_USAGE_DEFAULT,
  217. .BindFlags = hwctx->BindFlags,
  218. .MiscFlags = hwctx->MiscFlags,
  219. };
  220. if (hwctx->texture) {
  221. D3D11_TEXTURE2D_DESC texDesc2;
  222. ID3D11Texture2D_GetDesc(hwctx->texture, &texDesc2);
  223. if (texDesc.Width != texDesc2.Width ||
  224. texDesc.Height != texDesc2.Height ||
  225. texDesc.Format != texDesc2.Format) {
  226. av_log(ctx, AV_LOG_ERROR, "User-provided texture has mismatching parameters\n");
  227. return AVERROR(EINVAL);
  228. }
  229. } else if (texDesc.ArraySize > 0) {
  230. hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &hwctx->texture);
  231. if (FAILED(hr)) {
  232. av_log(ctx, AV_LOG_ERROR, "Could not create the texture (%lx)\n", (long)hr);
  233. return AVERROR_UNKNOWN;
  234. }
  235. }
  236. ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(AVD3D11FrameDescriptor),
  237. ctx, d3d11va_pool_alloc, NULL);
  238. if (!ctx->internal->pool_internal)
  239. return AVERROR(ENOMEM);
  240. return 0;
  241. }
  242. static int d3d11va_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
  243. {
  244. AVD3D11FrameDescriptor *desc;
  245. frame->buf[0] = av_buffer_pool_get(ctx->pool);
  246. if (!frame->buf[0])
  247. return AVERROR(ENOMEM);
  248. desc = (AVD3D11FrameDescriptor *)frame->buf[0]->data;
  249. frame->data[0] = (uint8_t *)desc->texture;
  250. frame->data[1] = (uint8_t *)desc->index;
  251. frame->format = AV_PIX_FMT_D3D11;
  252. frame->width = ctx->width;
  253. frame->height = ctx->height;
  254. return 0;
  255. }
  256. static int d3d11va_transfer_get_formats(AVHWFramesContext *ctx,
  257. enum AVHWFrameTransferDirection dir,
  258. enum AVPixelFormat **formats)
  259. {
  260. D3D11VAFramesContext *s = ctx->internal->priv;
  261. enum AVPixelFormat *fmts;
  262. fmts = av_malloc_array(2, sizeof(*fmts));
  263. if (!fmts)
  264. return AVERROR(ENOMEM);
  265. fmts[0] = ctx->sw_format;
  266. fmts[1] = AV_PIX_FMT_NONE;
  267. // Don't signal support for opaque formats. Actual access would fail.
  268. if (s->format == DXGI_FORMAT_420_OPAQUE)
  269. fmts[0] = AV_PIX_FMT_NONE;
  270. *formats = fmts;
  271. return 0;
  272. }
  273. static int d3d11va_create_staging_texture(AVHWFramesContext *ctx)
  274. {
  275. AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
  276. D3D11VAFramesContext *s = ctx->internal->priv;
  277. HRESULT hr;
  278. D3D11_TEXTURE2D_DESC texDesc = {
  279. .Width = ctx->width,
  280. .Height = ctx->height,
  281. .MipLevels = 1,
  282. .Format = s->format,
  283. .SampleDesc = { .Count = 1 },
  284. .ArraySize = 1,
  285. .Usage = D3D11_USAGE_STAGING,
  286. .CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE,
  287. };
  288. hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &s->staging_texture);
  289. if (FAILED(hr)) {
  290. av_log(ctx, AV_LOG_ERROR, "Could not create the staging texture (%lx)\n", (long)hr);
  291. return AVERROR_UNKNOWN;
  292. }
  293. return 0;
  294. }
  295. static void fill_texture_ptrs(uint8_t *data[4], int linesize[4],
  296. AVHWFramesContext *ctx,
  297. D3D11_TEXTURE2D_DESC *desc,
  298. D3D11_MAPPED_SUBRESOURCE *map)
  299. {
  300. int i;
  301. for (i = 0; i < 4; i++)
  302. linesize[i] = map->RowPitch;
  303. av_image_fill_pointers(data, ctx->sw_format, desc->Height,
  304. (uint8_t*)map->pData, linesize);
  305. }
  306. static int d3d11va_transfer_data(AVHWFramesContext *ctx, AVFrame *dst,
  307. const AVFrame *src)
  308. {
  309. AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
  310. D3D11VAFramesContext *s = ctx->internal->priv;
  311. int download = src->format == AV_PIX_FMT_D3D11;
  312. const AVFrame *frame = download ? src : dst;
  313. const AVFrame *other = download ? dst : src;
  314. // (The interface types are compatible.)
  315. ID3D11Resource *texture = (ID3D11Resource *)(ID3D11Texture2D *)frame->data[0];
  316. int index = (intptr_t)frame->data[1];
  317. ID3D11Resource *staging;
  318. int w = FFMIN(dst->width, src->width);
  319. int h = FFMIN(dst->height, src->height);
  320. uint8_t *map_data[4];
  321. int map_linesize[4];
  322. D3D11_TEXTURE2D_DESC desc;
  323. D3D11_MAPPED_SUBRESOURCE map;
  324. HRESULT hr;
  325. if (frame->hw_frames_ctx->data != (uint8_t *)ctx || other->format != ctx->sw_format)
  326. return AVERROR(EINVAL);
  327. device_hwctx->lock(device_hwctx->lock_ctx);
  328. if (!s->staging_texture) {
  329. int res = d3d11va_create_staging_texture(ctx);
  330. if (res < 0)
  331. return res;
  332. }
  333. staging = (ID3D11Resource *)s->staging_texture;
  334. ID3D11Texture2D_GetDesc(s->staging_texture, &desc);
  335. if (download) {
  336. ID3D11DeviceContext_CopySubresourceRegion(device_hwctx->device_context,
  337. staging, 0, 0, 0, 0,
  338. texture, index, NULL);
  339. hr = ID3D11DeviceContext_Map(device_hwctx->device_context,
  340. staging, 0, D3D11_MAP_READ, 0, &map);
  341. if (FAILED(hr))
  342. goto map_failed;
  343. fill_texture_ptrs(map_data, map_linesize, ctx, &desc, &map);
  344. av_image_copy(dst->data, dst->linesize, map_data, map_linesize,
  345. ctx->sw_format, w, h);
  346. ID3D11DeviceContext_Unmap(device_hwctx->device_context, staging, 0);
  347. } else {
  348. hr = ID3D11DeviceContext_Map(device_hwctx->device_context,
  349. staging, 0, D3D11_MAP_WRITE, 0, &map);
  350. if (FAILED(hr))
  351. goto map_failed;
  352. fill_texture_ptrs(map_data, map_linesize, ctx, &desc, &map);
  353. av_image_copy(map_data, map_linesize, src->data, src->linesize,
  354. ctx->sw_format, w, h);
  355. ID3D11DeviceContext_Unmap(device_hwctx->device_context, staging, 0);
  356. ID3D11DeviceContext_CopySubresourceRegion(device_hwctx->device_context,
  357. texture, index, 0, 0, 0,
  358. staging, 0, NULL);
  359. }
  360. device_hwctx->unlock(device_hwctx->lock_ctx);
  361. return 0;
  362. map_failed:
  363. av_log(ctx, AV_LOG_ERROR, "Unable to lock D3D11VA surface (%lx)\n", (long)hr);
  364. device_hwctx->unlock(device_hwctx->lock_ctx);
  365. return AVERROR_UNKNOWN;
  366. }
  367. static int d3d11va_device_init(AVHWDeviceContext *hwdev)
  368. {
  369. AVD3D11VADeviceContext *device_hwctx = hwdev->hwctx;
  370. HRESULT hr;
  371. if (!device_hwctx->lock) {
  372. device_hwctx->lock_ctx = CreateMutex(NULL, 0, NULL);
  373. if (device_hwctx->lock_ctx == INVALID_HANDLE_VALUE) {
  374. av_log(NULL, AV_LOG_ERROR, "Failed to create a mutex\n");
  375. return AVERROR(EINVAL);
  376. }
  377. device_hwctx->lock = d3d11va_default_lock;
  378. device_hwctx->unlock = d3d11va_default_unlock;
  379. }
  380. if (!device_hwctx->device_context) {
  381. ID3D11Device_GetImmediateContext(device_hwctx->device, &device_hwctx->device_context);
  382. if (!device_hwctx->device_context)
  383. return AVERROR_UNKNOWN;
  384. }
  385. if (!device_hwctx->video_device) {
  386. hr = ID3D11DeviceContext_QueryInterface(device_hwctx->device, &IID_ID3D11VideoDevice,
  387. (void **)&device_hwctx->video_device);
  388. if (FAILED(hr))
  389. return AVERROR_UNKNOWN;
  390. }
  391. if (!device_hwctx->video_context) {
  392. hr = ID3D11DeviceContext_QueryInterface(device_hwctx->device_context, &IID_ID3D11VideoContext,
  393. (void **)&device_hwctx->video_context);
  394. if (FAILED(hr))
  395. return AVERROR_UNKNOWN;
  396. }
  397. return 0;
  398. }
  399. static void d3d11va_device_uninit(AVHWDeviceContext *hwdev)
  400. {
  401. AVD3D11VADeviceContext *device_hwctx = hwdev->hwctx;
  402. if (device_hwctx->device) {
  403. ID3D11Device_Release(device_hwctx->device);
  404. device_hwctx->device = NULL;
  405. }
  406. if (device_hwctx->device_context) {
  407. ID3D11DeviceContext_Release(device_hwctx->device_context);
  408. device_hwctx->device_context = NULL;
  409. }
  410. if (device_hwctx->video_device) {
  411. ID3D11VideoDevice_Release(device_hwctx->video_device);
  412. device_hwctx->video_device = NULL;
  413. }
  414. if (device_hwctx->video_context) {
  415. ID3D11VideoContext_Release(device_hwctx->video_context);
  416. device_hwctx->video_context = NULL;
  417. }
  418. if (device_hwctx->lock == d3d11va_default_lock) {
  419. CloseHandle(device_hwctx->lock_ctx);
  420. device_hwctx->lock_ctx = INVALID_HANDLE_VALUE;
  421. device_hwctx->lock = NULL;
  422. }
  423. }
  424. static int d3d11va_device_create(AVHWDeviceContext *ctx, const char *device,
  425. AVDictionary *opts, int flags)
  426. {
  427. AVD3D11VADeviceContext *device_hwctx = ctx->hwctx;
  428. HRESULT hr;
  429. IDXGIAdapter *pAdapter = NULL;
  430. ID3D10Multithread *pMultithread;
  431. UINT creationFlags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT;
  432. int is_debug = !!av_dict_get(opts, "debug", NULL, 0);
  433. int ret;
  434. // (On UWP we can't check this.)
  435. #if !HAVE_UWP
  436. if (!LoadLibrary("d3d11_1sdklayers.dll"))
  437. is_debug = 0;
  438. #endif
  439. if (is_debug)
  440. creationFlags |= D3D11_CREATE_DEVICE_DEBUG;
  441. if ((ret = ff_thread_once(&functions_loaded, load_functions)) != 0)
  442. return AVERROR_UNKNOWN;
  443. if (!mD3D11CreateDevice || !mCreateDXGIFactory) {
  444. av_log(ctx, AV_LOG_ERROR, "Failed to load D3D11 library or its functions\n");
  445. return AVERROR_UNKNOWN;
  446. }
  447. if (device) {
  448. IDXGIFactory2 *pDXGIFactory;
  449. hr = mCreateDXGIFactory(&IID_IDXGIFactory2, (void **)&pDXGIFactory);
  450. if (SUCCEEDED(hr)) {
  451. int adapter = atoi(device);
  452. if (FAILED(IDXGIFactory2_EnumAdapters(pDXGIFactory, adapter, &pAdapter)))
  453. pAdapter = NULL;
  454. IDXGIFactory2_Release(pDXGIFactory);
  455. }
  456. }
  457. hr = mD3D11CreateDevice(pAdapter, pAdapter ? D3D_DRIVER_TYPE_UNKNOWN : D3D_DRIVER_TYPE_HARDWARE, NULL, creationFlags, NULL, 0,
  458. D3D11_SDK_VERSION, &device_hwctx->device, NULL, NULL);
  459. if (pAdapter) {
  460. DXGI_ADAPTER_DESC2 desc;
  461. hr = IDXGIAdapter2_GetDesc(pAdapter, &desc);
  462. if (!FAILED(hr)) {
  463. av_log(ctx, AV_LOG_INFO, "Using device %04x:%04x (%ls).\n",
  464. desc.VendorId, desc.DeviceId, desc.Description);
  465. }
  466. IDXGIAdapter_Release(pAdapter);
  467. }
  468. if (FAILED(hr)) {
  469. av_log(ctx, AV_LOG_ERROR, "Failed to create Direct3D device (%lx)\n", (long)hr);
  470. return AVERROR_UNKNOWN;
  471. }
  472. hr = ID3D11Device_QueryInterface(device_hwctx->device, &IID_ID3D10Multithread, (void **)&pMultithread);
  473. if (SUCCEEDED(hr)) {
  474. ID3D10Multithread_SetMultithreadProtected(pMultithread, TRUE);
  475. ID3D10Multithread_Release(pMultithread);
  476. }
  477. #if !HAVE_UWP && HAVE_DXGIDEBUG_H
  478. if (is_debug) {
  479. HANDLE dxgidebug_dll = LoadLibrary("dxgidebug.dll");
  480. if (dxgidebug_dll) {
  481. HRESULT (WINAPI * pf_DXGIGetDebugInterface)(const GUID *riid, void **ppDebug)
  482. = (void *)GetProcAddress(dxgidebug_dll, "DXGIGetDebugInterface");
  483. if (pf_DXGIGetDebugInterface) {
  484. IDXGIDebug *dxgi_debug = NULL;
  485. hr = pf_DXGIGetDebugInterface(&IID_IDXGIDebug, (void**)&dxgi_debug);
  486. if (SUCCEEDED(hr) && dxgi_debug)
  487. IDXGIDebug_ReportLiveObjects(dxgi_debug, DXGI_DEBUG_ALL, DXGI_DEBUG_RLO_ALL);
  488. }
  489. }
  490. }
  491. #endif
  492. return 0;
  493. }
  494. const HWContextType ff_hwcontext_type_d3d11va = {
  495. .type = AV_HWDEVICE_TYPE_D3D11VA,
  496. .name = "D3D11VA",
  497. .device_hwctx_size = sizeof(AVD3D11VADeviceContext),
  498. .frames_hwctx_size = sizeof(AVD3D11VAFramesContext),
  499. .frames_priv_size = sizeof(D3D11VAFramesContext),
  500. .device_create = d3d11va_device_create,
  501. .device_init = d3d11va_device_init,
  502. .device_uninit = d3d11va_device_uninit,
  503. .frames_get_constraints = d3d11va_frames_get_constraints,
  504. .frames_init = d3d11va_frames_init,
  505. .frames_uninit = d3d11va_frames_uninit,
  506. .frames_get_buffer = d3d11va_get_buffer,
  507. .transfer_get_formats = d3d11va_transfer_get_formats,
  508. .transfer_data_to = d3d11va_transfer_data,
  509. .transfer_data_from = d3d11va_transfer_data,
  510. .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_D3D11, AV_PIX_FMT_NONE },
  511. };