You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

617 lines
20KB

  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include "config.h"
  19. #include <windows.h>
  20. #define COBJMACROS
  21. #include <initguid.h>
  22. #include <d3d11.h>
  23. #include <dxgi1_2.h>
  24. #if HAVE_DXGIDEBUG_H
  25. #include <dxgidebug.h>
  26. #endif
  27. #include "avassert.h"
  28. #include "common.h"
  29. #include "hwcontext.h"
  30. #include "hwcontext_d3d11va.h"
  31. #include "hwcontext_internal.h"
  32. #include "imgutils.h"
  33. #include "pixdesc.h"
  34. #include "pixfmt.h"
  35. #include "thread.h"
  36. #include "compat/w32dlfcn.h"
  37. typedef HRESULT(WINAPI *PFN_CREATE_DXGI_FACTORY)(REFIID riid, void **ppFactory);
  38. static AVOnce functions_loaded = AV_ONCE_INIT;
  39. static PFN_CREATE_DXGI_FACTORY mCreateDXGIFactory;
  40. static PFN_D3D11_CREATE_DEVICE mD3D11CreateDevice;
  41. static av_cold void load_functions(void)
  42. {
  43. #if !HAVE_UWP
  44. // We let these "leak" - this is fine, as unloading has no great benefit, and
  45. // Windows will mark a DLL as loaded forever if its internal refcount overflows
  46. // from too many LoadLibrary calls.
  47. HANDLE d3dlib, dxgilib;
  48. d3dlib = dlopen("d3d11.dll", 0);
  49. dxgilib = dlopen("dxgi.dll", 0);
  50. if (!d3dlib || !dxgilib)
  51. return;
  52. mD3D11CreateDevice = (PFN_D3D11_CREATE_DEVICE) GetProcAddress(d3dlib, "D3D11CreateDevice");
  53. mCreateDXGIFactory = (PFN_CREATE_DXGI_FACTORY) GetProcAddress(dxgilib, "CreateDXGIFactory");
  54. #else
  55. // In UWP (which lacks LoadLibrary), CreateDXGIFactory isn't available,
  56. // only CreateDXGIFactory1
  57. mD3D11CreateDevice = (PFN_D3D11_CREATE_DEVICE) D3D11CreateDevice;
  58. mCreateDXGIFactory = (PFN_CREATE_DXGI_FACTORY) CreateDXGIFactory1;
  59. #endif
  60. }
  61. typedef struct D3D11VAFramesContext {
  62. int nb_surfaces_used;
  63. DXGI_FORMAT format;
  64. ID3D11Texture2D *staging_texture;
  65. } D3D11VAFramesContext;
  66. static const struct {
  67. DXGI_FORMAT d3d_format;
  68. enum AVPixelFormat pix_fmt;
  69. } supported_formats[] = {
  70. { DXGI_FORMAT_NV12, AV_PIX_FMT_NV12 },
  71. { DXGI_FORMAT_P010, AV_PIX_FMT_P010 },
  72. // Special opaque formats. The pix_fmt is merely a place holder, as the
  73. // opaque format cannot be accessed directly.
  74. { DXGI_FORMAT_420_OPAQUE, AV_PIX_FMT_YUV420P },
  75. };
  76. static void d3d11va_default_lock(void *ctx)
  77. {
  78. WaitForSingleObjectEx(ctx, INFINITE, FALSE);
  79. }
  80. static void d3d11va_default_unlock(void *ctx)
  81. {
  82. ReleaseMutex(ctx);
  83. }
  84. static void d3d11va_frames_uninit(AVHWFramesContext *ctx)
  85. {
  86. AVD3D11VAFramesContext *frames_hwctx = ctx->hwctx;
  87. D3D11VAFramesContext *s = ctx->internal->priv;
  88. if (frames_hwctx->texture)
  89. ID3D11Texture2D_Release(frames_hwctx->texture);
  90. frames_hwctx->texture = NULL;
  91. if (s->staging_texture)
  92. ID3D11Texture2D_Release(s->staging_texture);
  93. s->staging_texture = NULL;
  94. }
  95. static int d3d11va_frames_get_constraints(AVHWDeviceContext *ctx,
  96. const void *hwconfig,
  97. AVHWFramesConstraints *constraints)
  98. {
  99. AVD3D11VADeviceContext *device_hwctx = ctx->hwctx;
  100. int nb_sw_formats = 0;
  101. HRESULT hr;
  102. int i;
  103. constraints->valid_sw_formats = av_malloc_array(FF_ARRAY_ELEMS(supported_formats) + 1,
  104. sizeof(*constraints->valid_sw_formats));
  105. if (!constraints->valid_sw_formats)
  106. return AVERROR(ENOMEM);
  107. for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++) {
  108. UINT format_support = 0;
  109. hr = ID3D11Device_CheckFormatSupport(device_hwctx->device, supported_formats[i].d3d_format, &format_support);
  110. if (SUCCEEDED(hr) && (format_support & D3D11_FORMAT_SUPPORT_TEXTURE2D))
  111. constraints->valid_sw_formats[nb_sw_formats++] = supported_formats[i].pix_fmt;
  112. }
  113. constraints->valid_sw_formats[nb_sw_formats] = AV_PIX_FMT_NONE;
  114. constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
  115. if (!constraints->valid_hw_formats)
  116. return AVERROR(ENOMEM);
  117. constraints->valid_hw_formats[0] = AV_PIX_FMT_D3D11;
  118. constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
  119. return 0;
  120. }
  121. static void free_texture(void *opaque, uint8_t *data)
  122. {
  123. ID3D11Texture2D_Release((ID3D11Texture2D *)opaque);
  124. av_free(data);
  125. }
  126. static AVBufferRef *wrap_texture_buf(ID3D11Texture2D *tex, int index)
  127. {
  128. AVBufferRef *buf;
  129. AVD3D11FrameDescriptor *desc = av_mallocz(sizeof(*desc));
  130. if (!desc) {
  131. ID3D11Texture2D_Release(tex);
  132. return NULL;
  133. }
  134. desc->texture = tex;
  135. desc->index = index;
  136. buf = av_buffer_create((uint8_t *)desc, sizeof(desc), free_texture, tex, 0);
  137. if (!buf) {
  138. ID3D11Texture2D_Release(tex);
  139. av_free(desc);
  140. return NULL;
  141. }
  142. return buf;
  143. }
  144. static AVBufferRef *d3d11va_alloc_single(AVHWFramesContext *ctx)
  145. {
  146. D3D11VAFramesContext *s = ctx->internal->priv;
  147. AVD3D11VAFramesContext *hwctx = ctx->hwctx;
  148. AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
  149. HRESULT hr;
  150. ID3D11Texture2D *tex;
  151. D3D11_TEXTURE2D_DESC texDesc = {
  152. .Width = ctx->width,
  153. .Height = ctx->height,
  154. .MipLevels = 1,
  155. .Format = s->format,
  156. .SampleDesc = { .Count = 1 },
  157. .ArraySize = 1,
  158. .Usage = D3D11_USAGE_DEFAULT,
  159. .BindFlags = hwctx->BindFlags,
  160. .MiscFlags = hwctx->MiscFlags,
  161. };
  162. hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &tex);
  163. if (FAILED(hr)) {
  164. av_log(ctx, AV_LOG_ERROR, "Could not create the texture (%lx)\n", (long)hr);
  165. return NULL;
  166. }
  167. return wrap_texture_buf(tex, 0);
  168. }
  169. static AVBufferRef *d3d11va_pool_alloc(void *opaque, buffer_size_t size)
  170. {
  171. AVHWFramesContext *ctx = (AVHWFramesContext*)opaque;
  172. D3D11VAFramesContext *s = ctx->internal->priv;
  173. AVD3D11VAFramesContext *hwctx = ctx->hwctx;
  174. D3D11_TEXTURE2D_DESC texDesc;
  175. if (!hwctx->texture)
  176. return d3d11va_alloc_single(ctx);
  177. ID3D11Texture2D_GetDesc(hwctx->texture, &texDesc);
  178. if (s->nb_surfaces_used >= texDesc.ArraySize) {
  179. av_log(ctx, AV_LOG_ERROR, "Static surface pool size exceeded.\n");
  180. return NULL;
  181. }
  182. ID3D11Texture2D_AddRef(hwctx->texture);
  183. return wrap_texture_buf(hwctx->texture, s->nb_surfaces_used++);
  184. }
  185. static int d3d11va_frames_init(AVHWFramesContext *ctx)
  186. {
  187. AVD3D11VAFramesContext *hwctx = ctx->hwctx;
  188. AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
  189. D3D11VAFramesContext *s = ctx->internal->priv;
  190. int i;
  191. HRESULT hr;
  192. D3D11_TEXTURE2D_DESC texDesc;
  193. for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++) {
  194. if (ctx->sw_format == supported_formats[i].pix_fmt) {
  195. s->format = supported_formats[i].d3d_format;
  196. break;
  197. }
  198. }
  199. if (i == FF_ARRAY_ELEMS(supported_formats)) {
  200. av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format: %s\n",
  201. av_get_pix_fmt_name(ctx->sw_format));
  202. return AVERROR(EINVAL);
  203. }
  204. texDesc = (D3D11_TEXTURE2D_DESC){
  205. .Width = ctx->width,
  206. .Height = ctx->height,
  207. .MipLevels = 1,
  208. .Format = s->format,
  209. .SampleDesc = { .Count = 1 },
  210. .ArraySize = ctx->initial_pool_size,
  211. .Usage = D3D11_USAGE_DEFAULT,
  212. .BindFlags = hwctx->BindFlags,
  213. .MiscFlags = hwctx->MiscFlags,
  214. };
  215. if (hwctx->texture) {
  216. D3D11_TEXTURE2D_DESC texDesc2;
  217. ID3D11Texture2D_GetDesc(hwctx->texture, &texDesc2);
  218. if (texDesc.Width != texDesc2.Width ||
  219. texDesc.Height != texDesc2.Height ||
  220. texDesc.Format != texDesc2.Format) {
  221. av_log(ctx, AV_LOG_ERROR, "User-provided texture has mismatching parameters\n");
  222. return AVERROR(EINVAL);
  223. }
  224. } else if (texDesc.ArraySize > 0) {
  225. hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &hwctx->texture);
  226. if (FAILED(hr)) {
  227. av_log(ctx, AV_LOG_ERROR, "Could not create the texture (%lx)\n", (long)hr);
  228. return AVERROR_UNKNOWN;
  229. }
  230. }
  231. ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(AVD3D11FrameDescriptor),
  232. ctx, d3d11va_pool_alloc, NULL);
  233. if (!ctx->internal->pool_internal)
  234. return AVERROR(ENOMEM);
  235. return 0;
  236. }
  237. static int d3d11va_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
  238. {
  239. AVD3D11FrameDescriptor *desc;
  240. frame->buf[0] = av_buffer_pool_get(ctx->pool);
  241. if (!frame->buf[0])
  242. return AVERROR(ENOMEM);
  243. desc = (AVD3D11FrameDescriptor *)frame->buf[0]->data;
  244. frame->data[0] = (uint8_t *)desc->texture;
  245. frame->data[1] = (uint8_t *)desc->index;
  246. frame->format = AV_PIX_FMT_D3D11;
  247. frame->width = ctx->width;
  248. frame->height = ctx->height;
  249. return 0;
  250. }
  251. static int d3d11va_transfer_get_formats(AVHWFramesContext *ctx,
  252. enum AVHWFrameTransferDirection dir,
  253. enum AVPixelFormat **formats)
  254. {
  255. D3D11VAFramesContext *s = ctx->internal->priv;
  256. enum AVPixelFormat *fmts;
  257. fmts = av_malloc_array(2, sizeof(*fmts));
  258. if (!fmts)
  259. return AVERROR(ENOMEM);
  260. fmts[0] = ctx->sw_format;
  261. fmts[1] = AV_PIX_FMT_NONE;
  262. // Don't signal support for opaque formats. Actual access would fail.
  263. if (s->format == DXGI_FORMAT_420_OPAQUE)
  264. fmts[0] = AV_PIX_FMT_NONE;
  265. *formats = fmts;
  266. return 0;
  267. }
  268. static int d3d11va_create_staging_texture(AVHWFramesContext *ctx)
  269. {
  270. AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
  271. D3D11VAFramesContext *s = ctx->internal->priv;
  272. HRESULT hr;
  273. D3D11_TEXTURE2D_DESC texDesc = {
  274. .Width = ctx->width,
  275. .Height = ctx->height,
  276. .MipLevels = 1,
  277. .Format = s->format,
  278. .SampleDesc = { .Count = 1 },
  279. .ArraySize = 1,
  280. .Usage = D3D11_USAGE_STAGING,
  281. .CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE,
  282. };
  283. hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &s->staging_texture);
  284. if (FAILED(hr)) {
  285. av_log(ctx, AV_LOG_ERROR, "Could not create the staging texture (%lx)\n", (long)hr);
  286. return AVERROR_UNKNOWN;
  287. }
  288. return 0;
  289. }
  290. static void fill_texture_ptrs(uint8_t *data[4], int linesize[4],
  291. AVHWFramesContext *ctx,
  292. D3D11_TEXTURE2D_DESC *desc,
  293. D3D11_MAPPED_SUBRESOURCE *map)
  294. {
  295. int i;
  296. for (i = 0; i < 4; i++)
  297. linesize[i] = map->RowPitch;
  298. av_image_fill_pointers(data, ctx->sw_format, desc->Height,
  299. (uint8_t*)map->pData, linesize);
  300. }
  301. static int d3d11va_transfer_data(AVHWFramesContext *ctx, AVFrame *dst,
  302. const AVFrame *src)
  303. {
  304. AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
  305. D3D11VAFramesContext *s = ctx->internal->priv;
  306. int download = src->format == AV_PIX_FMT_D3D11;
  307. const AVFrame *frame = download ? src : dst;
  308. const AVFrame *other = download ? dst : src;
  309. // (The interface types are compatible.)
  310. ID3D11Resource *texture = (ID3D11Resource *)(ID3D11Texture2D *)frame->data[0];
  311. int index = (intptr_t)frame->data[1];
  312. ID3D11Resource *staging;
  313. int w = FFMIN(dst->width, src->width);
  314. int h = FFMIN(dst->height, src->height);
  315. uint8_t *map_data[4];
  316. int map_linesize[4];
  317. D3D11_TEXTURE2D_DESC desc;
  318. D3D11_MAPPED_SUBRESOURCE map;
  319. HRESULT hr;
  320. if (frame->hw_frames_ctx->data != (uint8_t *)ctx || other->format != ctx->sw_format)
  321. return AVERROR(EINVAL);
  322. device_hwctx->lock(device_hwctx->lock_ctx);
  323. if (!s->staging_texture) {
  324. int res = d3d11va_create_staging_texture(ctx);
  325. if (res < 0)
  326. return res;
  327. }
  328. staging = (ID3D11Resource *)s->staging_texture;
  329. ID3D11Texture2D_GetDesc(s->staging_texture, &desc);
  330. if (download) {
  331. ID3D11DeviceContext_CopySubresourceRegion(device_hwctx->device_context,
  332. staging, 0, 0, 0, 0,
  333. texture, index, NULL);
  334. hr = ID3D11DeviceContext_Map(device_hwctx->device_context,
  335. staging, 0, D3D11_MAP_READ, 0, &map);
  336. if (FAILED(hr))
  337. goto map_failed;
  338. fill_texture_ptrs(map_data, map_linesize, ctx, &desc, &map);
  339. av_image_copy(dst->data, dst->linesize, (const uint8_t **)map_data, map_linesize,
  340. ctx->sw_format, w, h);
  341. ID3D11DeviceContext_Unmap(device_hwctx->device_context, staging, 0);
  342. } else {
  343. hr = ID3D11DeviceContext_Map(device_hwctx->device_context,
  344. staging, 0, D3D11_MAP_WRITE, 0, &map);
  345. if (FAILED(hr))
  346. goto map_failed;
  347. fill_texture_ptrs(map_data, map_linesize, ctx, &desc, &map);
  348. av_image_copy(map_data, map_linesize, (const uint8_t **)src->data, src->linesize,
  349. ctx->sw_format, w, h);
  350. ID3D11DeviceContext_Unmap(device_hwctx->device_context, staging, 0);
  351. ID3D11DeviceContext_CopySubresourceRegion(device_hwctx->device_context,
  352. texture, index, 0, 0, 0,
  353. staging, 0, NULL);
  354. }
  355. device_hwctx->unlock(device_hwctx->lock_ctx);
  356. return 0;
  357. map_failed:
  358. av_log(ctx, AV_LOG_ERROR, "Unable to lock D3D11VA surface (%lx)\n", (long)hr);
  359. device_hwctx->unlock(device_hwctx->lock_ctx);
  360. return AVERROR_UNKNOWN;
  361. }
  362. static int d3d11va_device_init(AVHWDeviceContext *hwdev)
  363. {
  364. AVD3D11VADeviceContext *device_hwctx = hwdev->hwctx;
  365. HRESULT hr;
  366. if (!device_hwctx->lock) {
  367. device_hwctx->lock_ctx = CreateMutex(NULL, 0, NULL);
  368. if (device_hwctx->lock_ctx == INVALID_HANDLE_VALUE) {
  369. av_log(NULL, AV_LOG_ERROR, "Failed to create a mutex\n");
  370. return AVERROR(EINVAL);
  371. }
  372. device_hwctx->lock = d3d11va_default_lock;
  373. device_hwctx->unlock = d3d11va_default_unlock;
  374. }
  375. if (!device_hwctx->device_context) {
  376. ID3D11Device_GetImmediateContext(device_hwctx->device, &device_hwctx->device_context);
  377. if (!device_hwctx->device_context)
  378. return AVERROR_UNKNOWN;
  379. }
  380. if (!device_hwctx->video_device) {
  381. hr = ID3D11DeviceContext_QueryInterface(device_hwctx->device, &IID_ID3D11VideoDevice,
  382. (void **)&device_hwctx->video_device);
  383. if (FAILED(hr))
  384. return AVERROR_UNKNOWN;
  385. }
  386. if (!device_hwctx->video_context) {
  387. hr = ID3D11DeviceContext_QueryInterface(device_hwctx->device_context, &IID_ID3D11VideoContext,
  388. (void **)&device_hwctx->video_context);
  389. if (FAILED(hr))
  390. return AVERROR_UNKNOWN;
  391. }
  392. return 0;
  393. }
  394. static void d3d11va_device_uninit(AVHWDeviceContext *hwdev)
  395. {
  396. AVD3D11VADeviceContext *device_hwctx = hwdev->hwctx;
  397. if (device_hwctx->device) {
  398. ID3D11Device_Release(device_hwctx->device);
  399. device_hwctx->device = NULL;
  400. }
  401. if (device_hwctx->device_context) {
  402. ID3D11DeviceContext_Release(device_hwctx->device_context);
  403. device_hwctx->device_context = NULL;
  404. }
  405. if (device_hwctx->video_device) {
  406. ID3D11VideoDevice_Release(device_hwctx->video_device);
  407. device_hwctx->video_device = NULL;
  408. }
  409. if (device_hwctx->video_context) {
  410. ID3D11VideoContext_Release(device_hwctx->video_context);
  411. device_hwctx->video_context = NULL;
  412. }
  413. if (device_hwctx->lock == d3d11va_default_lock) {
  414. CloseHandle(device_hwctx->lock_ctx);
  415. device_hwctx->lock_ctx = INVALID_HANDLE_VALUE;
  416. device_hwctx->lock = NULL;
  417. }
  418. }
  419. static int d3d11va_device_create(AVHWDeviceContext *ctx, const char *device,
  420. AVDictionary *opts, int flags)
  421. {
  422. AVD3D11VADeviceContext *device_hwctx = ctx->hwctx;
  423. HRESULT hr;
  424. IDXGIAdapter *pAdapter = NULL;
  425. ID3D10Multithread *pMultithread;
  426. UINT creationFlags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT;
  427. int is_debug = !!av_dict_get(opts, "debug", NULL, 0);
  428. int ret;
  429. // (On UWP we can't check this.)
  430. #if !HAVE_UWP
  431. if (!LoadLibrary("d3d11_1sdklayers.dll"))
  432. is_debug = 0;
  433. #endif
  434. if (is_debug)
  435. creationFlags |= D3D11_CREATE_DEVICE_DEBUG;
  436. if ((ret = ff_thread_once(&functions_loaded, load_functions)) != 0)
  437. return AVERROR_UNKNOWN;
  438. if (!mD3D11CreateDevice || !mCreateDXGIFactory) {
  439. av_log(ctx, AV_LOG_ERROR, "Failed to load D3D11 library or its functions\n");
  440. return AVERROR_UNKNOWN;
  441. }
  442. if (device) {
  443. IDXGIFactory2 *pDXGIFactory;
  444. hr = mCreateDXGIFactory(&IID_IDXGIFactory2, (void **)&pDXGIFactory);
  445. if (SUCCEEDED(hr)) {
  446. int adapter = atoi(device);
  447. if (FAILED(IDXGIFactory2_EnumAdapters(pDXGIFactory, adapter, &pAdapter)))
  448. pAdapter = NULL;
  449. IDXGIFactory2_Release(pDXGIFactory);
  450. }
  451. }
  452. if (pAdapter) {
  453. DXGI_ADAPTER_DESC desc;
  454. hr = IDXGIAdapter2_GetDesc(pAdapter, &desc);
  455. if (!FAILED(hr)) {
  456. av_log(ctx, AV_LOG_INFO, "Using device %04x:%04x (%ls).\n",
  457. desc.VendorId, desc.DeviceId, desc.Description);
  458. }
  459. }
  460. hr = mD3D11CreateDevice(pAdapter, pAdapter ? D3D_DRIVER_TYPE_UNKNOWN : D3D_DRIVER_TYPE_HARDWARE, NULL, creationFlags, NULL, 0,
  461. D3D11_SDK_VERSION, &device_hwctx->device, NULL, NULL);
  462. if (pAdapter)
  463. IDXGIAdapter_Release(pAdapter);
  464. if (FAILED(hr)) {
  465. av_log(ctx, AV_LOG_ERROR, "Failed to create Direct3D device (%lx)\n", (long)hr);
  466. return AVERROR_UNKNOWN;
  467. }
  468. hr = ID3D11Device_QueryInterface(device_hwctx->device, &IID_ID3D10Multithread, (void **)&pMultithread);
  469. if (SUCCEEDED(hr)) {
  470. ID3D10Multithread_SetMultithreadProtected(pMultithread, TRUE);
  471. ID3D10Multithread_Release(pMultithread);
  472. }
  473. #if !HAVE_UWP && HAVE_DXGIDEBUG_H
  474. if (is_debug) {
  475. HANDLE dxgidebug_dll = LoadLibrary("dxgidebug.dll");
  476. if (dxgidebug_dll) {
  477. HRESULT (WINAPI * pf_DXGIGetDebugInterface)(const GUID *riid, void **ppDebug)
  478. = (void *)GetProcAddress(dxgidebug_dll, "DXGIGetDebugInterface");
  479. if (pf_DXGIGetDebugInterface) {
  480. IDXGIDebug *dxgi_debug = NULL;
  481. hr = pf_DXGIGetDebugInterface(&IID_IDXGIDebug, (void**)&dxgi_debug);
  482. if (SUCCEEDED(hr) && dxgi_debug)
  483. IDXGIDebug_ReportLiveObjects(dxgi_debug, DXGI_DEBUG_ALL, DXGI_DEBUG_RLO_ALL);
  484. }
  485. }
  486. }
  487. #endif
  488. return 0;
  489. }
  490. const HWContextType ff_hwcontext_type_d3d11va = {
  491. .type = AV_HWDEVICE_TYPE_D3D11VA,
  492. .name = "D3D11VA",
  493. .device_hwctx_size = sizeof(AVD3D11VADeviceContext),
  494. .frames_hwctx_size = sizeof(AVD3D11VAFramesContext),
  495. .frames_priv_size = sizeof(D3D11VAFramesContext),
  496. .device_create = d3d11va_device_create,
  497. .device_init = d3d11va_device_init,
  498. .device_uninit = d3d11va_device_uninit,
  499. .frames_get_constraints = d3d11va_frames_get_constraints,
  500. .frames_init = d3d11va_frames_init,
  501. .frames_uninit = d3d11va_frames_uninit,
  502. .frames_get_buffer = d3d11va_get_buffer,
  503. .transfer_get_formats = d3d11va_transfer_get_formats,
  504. .transfer_data_to = d3d11va_transfer_data,
  505. .transfer_data_from = d3d11va_transfer_data,
  506. .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_D3D11, AV_PIX_FMT_NONE },
  507. };