You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

569 lines
18KB

  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include "config.h"
  19. #include <windows.h>
  20. // Include thread.h before redefining _WIN32_WINNT, to get
  21. // the right implementation for AVOnce
  22. #include "thread.h"
  23. #if !defined(_WIN32_WINNT) || _WIN32_WINNT < 0x0600
  24. #undef _WIN32_WINNT
  25. #define _WIN32_WINNT 0x0600
  26. #endif
  27. #define COBJMACROS
  28. #include <initguid.h>
  29. #include <d3d11.h>
  30. #include <dxgi1_2.h>
  31. #if HAVE_DXGIDEBUG_H
  32. #include <dxgidebug.h>
  33. #endif
  34. #include "avassert.h"
  35. #include "common.h"
  36. #include "hwcontext.h"
  37. #include "hwcontext_d3d11va.h"
  38. #include "hwcontext_internal.h"
  39. #include "imgutils.h"
  40. #include "pixdesc.h"
  41. #include "pixfmt.h"
  42. typedef HRESULT(WINAPI *PFN_CREATE_DXGI_FACTORY)(REFIID riid, void **ppFactory);
  43. static AVOnce functions_loaded = AV_ONCE_INIT;
  44. static PFN_CREATE_DXGI_FACTORY mCreateDXGIFactory;
  45. static PFN_D3D11_CREATE_DEVICE mD3D11CreateDevice;
  46. static av_cold void load_functions(void)
  47. {
  48. #if HAVE_LOADLIBRARY
  49. // We let these "leak" - this is fine, as unloading has no great benefit, and
  50. // Windows will mark a DLL as loaded forever if its internal refcount overflows
  51. // from too many LoadLibrary calls.
  52. HANDLE d3dlib, dxgilib;
  53. d3dlib = LoadLibrary("d3d11.dll");
  54. dxgilib = LoadLibrary("dxgi.dll");
  55. if (!d3dlib || !dxgilib)
  56. return;
  57. mD3D11CreateDevice = (PFN_D3D11_CREATE_DEVICE) GetProcAddress(d3dlib, "D3D11CreateDevice");
  58. mCreateDXGIFactory = (PFN_CREATE_DXGI_FACTORY) GetProcAddress(dxgilib, "CreateDXGIFactory");
  59. #else
  60. // In UWP (which lacks LoadLibrary), CreateDXGIFactory isn't available,
  61. // only CreateDXGIFactory1
  62. mD3D11CreateDevice = (PFN_D3D11_CREATE_DEVICE) D3D11CreateDevice;
  63. mCreateDXGIFactory = (PFN_CREATE_DXGI_FACTORY) CreateDXGIFactory1;
  64. #endif
  65. }
  66. typedef struct D3D11VAFramesContext {
  67. int nb_surfaces_used;
  68. DXGI_FORMAT format;
  69. ID3D11Texture2D *staging_texture;
  70. } D3D11VAFramesContext;
  71. static const struct {
  72. DXGI_FORMAT d3d_format;
  73. enum AVPixelFormat pix_fmt;
  74. } supported_formats[] = {
  75. { DXGI_FORMAT_NV12, AV_PIX_FMT_NV12 },
  76. { DXGI_FORMAT_P010, AV_PIX_FMT_P010 },
  77. // Special opaque formats. The pix_fmt is merely a place holder, as the
  78. // opaque format cannot be accessed directly.
  79. { DXGI_FORMAT_420_OPAQUE, AV_PIX_FMT_YUV420P },
  80. };
  81. static void d3d11va_default_lock(void *ctx)
  82. {
  83. WaitForSingleObjectEx(ctx, INFINITE, FALSE);
  84. }
  85. static void d3d11va_default_unlock(void *ctx)
  86. {
  87. ReleaseMutex(ctx);
  88. }
  89. static void d3d11va_frames_uninit(AVHWFramesContext *ctx)
  90. {
  91. AVD3D11VAFramesContext *frames_hwctx = ctx->hwctx;
  92. D3D11VAFramesContext *s = ctx->internal->priv;
  93. if (frames_hwctx->texture)
  94. ID3D11Texture2D_Release(frames_hwctx->texture);
  95. frames_hwctx->texture = NULL;
  96. if (s->staging_texture)
  97. ID3D11Texture2D_Release(s->staging_texture);
  98. s->staging_texture = NULL;
  99. }
  100. static void free_texture(void *opaque, uint8_t *data)
  101. {
  102. ID3D11Texture2D_Release((ID3D11Texture2D *)opaque);
  103. }
  104. static AVBufferRef *wrap_texture_buf(ID3D11Texture2D *tex, int index)
  105. {
  106. AVBufferRef *buf;
  107. AVD3D11FrameDescriptor *desc = av_mallocz(sizeof(*desc));
  108. if (!desc) {
  109. ID3D11Texture2D_Release(tex);
  110. return NULL;
  111. }
  112. desc->texture = tex;
  113. desc->index = index;
  114. buf = av_buffer_create((uint8_t *)desc, sizeof(desc), free_texture, tex, 0);
  115. if (!buf) {
  116. ID3D11Texture2D_Release(tex);
  117. av_free(desc);
  118. return NULL;
  119. }
  120. return buf;
  121. }
  122. static AVBufferRef *d3d11va_alloc_single(AVHWFramesContext *ctx)
  123. {
  124. D3D11VAFramesContext *s = ctx->internal->priv;
  125. AVD3D11VAFramesContext *hwctx = ctx->hwctx;
  126. AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
  127. HRESULT hr;
  128. ID3D11Texture2D *tex;
  129. D3D11_TEXTURE2D_DESC texDesc = {
  130. .Width = ctx->width,
  131. .Height = ctx->height,
  132. .MipLevels = 1,
  133. .Format = s->format,
  134. .SampleDesc = { .Count = 1 },
  135. .ArraySize = 1,
  136. .Usage = D3D11_USAGE_DEFAULT,
  137. .BindFlags = hwctx->BindFlags,
  138. .MiscFlags = hwctx->MiscFlags,
  139. };
  140. hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &tex);
  141. if (FAILED(hr)) {
  142. av_log(ctx, AV_LOG_ERROR, "Could not create the texture (%lx)\n", (long)hr);
  143. return NULL;
  144. }
  145. return wrap_texture_buf(tex, 0);
  146. }
  147. static AVBufferRef *d3d11va_pool_alloc(void *opaque, int size)
  148. {
  149. AVHWFramesContext *ctx = (AVHWFramesContext*)opaque;
  150. D3D11VAFramesContext *s = ctx->internal->priv;
  151. AVD3D11VAFramesContext *hwctx = ctx->hwctx;
  152. D3D11_TEXTURE2D_DESC texDesc;
  153. if (!hwctx->texture)
  154. return d3d11va_alloc_single(ctx);
  155. ID3D11Texture2D_GetDesc(hwctx->texture, &texDesc);
  156. if (s->nb_surfaces_used >= texDesc.ArraySize) {
  157. av_log(ctx, AV_LOG_ERROR, "Static surface pool size exceeded.\n");
  158. return NULL;
  159. }
  160. ID3D11Texture2D_AddRef(hwctx->texture);
  161. return wrap_texture_buf(hwctx->texture, s->nb_surfaces_used++);
  162. }
  163. static int d3d11va_frames_init(AVHWFramesContext *ctx)
  164. {
  165. AVD3D11VAFramesContext *hwctx = ctx->hwctx;
  166. AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
  167. D3D11VAFramesContext *s = ctx->internal->priv;
  168. int i;
  169. HRESULT hr;
  170. D3D11_TEXTURE2D_DESC texDesc;
  171. for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++) {
  172. if (ctx->sw_format == supported_formats[i].pix_fmt) {
  173. s->format = supported_formats[i].d3d_format;
  174. break;
  175. }
  176. }
  177. if (i == FF_ARRAY_ELEMS(supported_formats)) {
  178. av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format: %s\n",
  179. av_get_pix_fmt_name(ctx->sw_format));
  180. return AVERROR(EINVAL);
  181. }
  182. texDesc = (D3D11_TEXTURE2D_DESC){
  183. .Width = ctx->width,
  184. .Height = ctx->height,
  185. .MipLevels = 1,
  186. .Format = s->format,
  187. .SampleDesc = { .Count = 1 },
  188. .ArraySize = ctx->initial_pool_size,
  189. .Usage = D3D11_USAGE_DEFAULT,
  190. .BindFlags = hwctx->BindFlags,
  191. .MiscFlags = hwctx->MiscFlags,
  192. };
  193. if (hwctx->texture) {
  194. D3D11_TEXTURE2D_DESC texDesc2;
  195. ID3D11Texture2D_GetDesc(hwctx->texture, &texDesc2);
  196. if (texDesc.Width != texDesc2.Width ||
  197. texDesc.Height != texDesc2.Height ||
  198. texDesc.Format != texDesc2.Format) {
  199. av_log(ctx, AV_LOG_ERROR, "User-provided texture has mismatching parameters\n");
  200. return AVERROR(EINVAL);
  201. }
  202. } else if (texDesc.ArraySize > 0) {
  203. hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &hwctx->texture);
  204. if (FAILED(hr)) {
  205. av_log(ctx, AV_LOG_ERROR, "Could not create the texture (%lx)\n", (long)hr);
  206. return AVERROR_UNKNOWN;
  207. }
  208. }
  209. ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(AVD3D11FrameDescriptor),
  210. ctx, d3d11va_pool_alloc, NULL);
  211. if (!ctx->internal->pool_internal)
  212. return AVERROR(ENOMEM);
  213. return 0;
  214. }
  215. static int d3d11va_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
  216. {
  217. AVD3D11FrameDescriptor *desc;
  218. frame->buf[0] = av_buffer_pool_get(ctx->pool);
  219. if (!frame->buf[0])
  220. return AVERROR(ENOMEM);
  221. desc = (AVD3D11FrameDescriptor *)frame->buf[0]->data;
  222. frame->data[0] = (uint8_t *)desc->texture;
  223. frame->data[1] = (uint8_t *)desc->index;
  224. frame->format = AV_PIX_FMT_D3D11;
  225. frame->width = ctx->width;
  226. frame->height = ctx->height;
  227. return 0;
  228. }
  229. static int d3d11va_transfer_get_formats(AVHWFramesContext *ctx,
  230. enum AVHWFrameTransferDirection dir,
  231. enum AVPixelFormat **formats)
  232. {
  233. D3D11VAFramesContext *s = ctx->internal->priv;
  234. enum AVPixelFormat *fmts;
  235. fmts = av_malloc_array(2, sizeof(*fmts));
  236. if (!fmts)
  237. return AVERROR(ENOMEM);
  238. fmts[0] = ctx->sw_format;
  239. fmts[1] = AV_PIX_FMT_NONE;
  240. // Don't signal support for opaque formats. Actual access would fail.
  241. if (s->format == DXGI_FORMAT_420_OPAQUE)
  242. fmts[0] = AV_PIX_FMT_NONE;
  243. *formats = fmts;
  244. return 0;
  245. }
  246. static int d3d11va_create_staging_texture(AVHWFramesContext *ctx)
  247. {
  248. AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
  249. D3D11VAFramesContext *s = ctx->internal->priv;
  250. HRESULT hr;
  251. D3D11_TEXTURE2D_DESC texDesc = {
  252. .Width = ctx->width,
  253. .Height = ctx->height,
  254. .MipLevels = 1,
  255. .Format = s->format,
  256. .SampleDesc = { .Count = 1 },
  257. .ArraySize = 1,
  258. .Usage = D3D11_USAGE_STAGING,
  259. .CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE,
  260. };
  261. hr = ID3D11Device_CreateTexture2D(device_hwctx->device, &texDesc, NULL, &s->staging_texture);
  262. if (FAILED(hr)) {
  263. av_log(ctx, AV_LOG_ERROR, "Could not create the staging texture (%lx)\n", (long)hr);
  264. return AVERROR_UNKNOWN;
  265. }
  266. return 0;
  267. }
  268. static void fill_texture_ptrs(uint8_t *data[4], int linesize[4],
  269. AVHWFramesContext *ctx,
  270. D3D11_TEXTURE2D_DESC *desc,
  271. D3D11_MAPPED_SUBRESOURCE *map)
  272. {
  273. int i;
  274. for (i = 0; i < 4; i++)
  275. linesize[i] = map->RowPitch;
  276. av_image_fill_pointers(data, ctx->sw_format, desc->Height,
  277. (uint8_t*)map->pData, linesize);
  278. }
  279. static int d3d11va_transfer_data(AVHWFramesContext *ctx, AVFrame *dst,
  280. const AVFrame *src)
  281. {
  282. AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
  283. D3D11VAFramesContext *s = ctx->internal->priv;
  284. int download = src->format == AV_PIX_FMT_D3D11;
  285. const AVFrame *frame = download ? src : dst;
  286. const AVFrame *other = download ? dst : src;
  287. // (The interface types are compatible.)
  288. ID3D11Resource *texture = (ID3D11Resource *)(ID3D11Texture2D *)frame->data[0];
  289. int index = (intptr_t)frame->data[1];
  290. ID3D11Resource *staging;
  291. int w = FFMIN(dst->width, src->width);
  292. int h = FFMIN(dst->height, src->height);
  293. uint8_t *map_data[4];
  294. int map_linesize[4];
  295. D3D11_TEXTURE2D_DESC desc;
  296. D3D11_MAPPED_SUBRESOURCE map;
  297. HRESULT hr;
  298. if (frame->hw_frames_ctx->data != (uint8_t *)ctx || other->format != ctx->sw_format)
  299. return AVERROR(EINVAL);
  300. device_hwctx->lock(device_hwctx->lock_ctx);
  301. if (!s->staging_texture) {
  302. int res = d3d11va_create_staging_texture(ctx);
  303. if (res < 0)
  304. return res;
  305. }
  306. staging = (ID3D11Resource *)s->staging_texture;
  307. ID3D11Texture2D_GetDesc(s->staging_texture, &desc);
  308. if (download) {
  309. ID3D11DeviceContext_CopySubresourceRegion(device_hwctx->device_context,
  310. staging, 0, 0, 0, 0,
  311. texture, index, NULL);
  312. hr = ID3D11DeviceContext_Map(device_hwctx->device_context,
  313. staging, 0, D3D11_MAP_READ, 0, &map);
  314. if (FAILED(hr))
  315. goto map_failed;
  316. fill_texture_ptrs(map_data, map_linesize, ctx, &desc, &map);
  317. av_image_copy(dst->data, dst->linesize, map_data, map_linesize,
  318. ctx->sw_format, w, h);
  319. ID3D11DeviceContext_Unmap(device_hwctx->device_context, staging, 0);
  320. } else {
  321. hr = ID3D11DeviceContext_Map(device_hwctx->device_context,
  322. staging, 0, D3D11_MAP_WRITE, 0, &map);
  323. if (FAILED(hr))
  324. goto map_failed;
  325. fill_texture_ptrs(map_data, map_linesize, ctx, &desc, &map);
  326. av_image_copy(map_data, map_linesize, src->data, src->linesize,
  327. ctx->sw_format, w, h);
  328. ID3D11DeviceContext_Unmap(device_hwctx->device_context, staging, 0);
  329. ID3D11DeviceContext_CopySubresourceRegion(device_hwctx->device_context,
  330. texture, index, 0, 0, 0,
  331. staging, 0, NULL);
  332. }
  333. device_hwctx->unlock(device_hwctx->lock_ctx);
  334. return 0;
  335. map_failed:
  336. av_log(ctx, AV_LOG_ERROR, "Unable to lock D3D11VA surface (%lx)\n", (long)hr);
  337. device_hwctx->unlock(device_hwctx->lock_ctx);
  338. return AVERROR_UNKNOWN;
  339. }
  340. static int d3d11va_device_init(AVHWDeviceContext *hwdev)
  341. {
  342. AVD3D11VADeviceContext *device_hwctx = hwdev->hwctx;
  343. HRESULT hr;
  344. if (!device_hwctx->lock) {
  345. device_hwctx->lock_ctx = CreateMutex(NULL, 0, NULL);
  346. if (device_hwctx->lock_ctx == INVALID_HANDLE_VALUE) {
  347. av_log(NULL, AV_LOG_ERROR, "Failed to create a mutex\n");
  348. return AVERROR(EINVAL);
  349. }
  350. device_hwctx->lock = d3d11va_default_lock;
  351. device_hwctx->unlock = d3d11va_default_unlock;
  352. }
  353. if (!device_hwctx->device_context) {
  354. ID3D11Device_GetImmediateContext(device_hwctx->device, &device_hwctx->device_context);
  355. if (!device_hwctx->device_context)
  356. return AVERROR_UNKNOWN;
  357. }
  358. if (!device_hwctx->video_device) {
  359. hr = ID3D11DeviceContext_QueryInterface(device_hwctx->device, &IID_ID3D11VideoDevice,
  360. (void **)&device_hwctx->video_device);
  361. if (FAILED(hr))
  362. return AVERROR_UNKNOWN;
  363. }
  364. if (!device_hwctx->video_context) {
  365. hr = ID3D11DeviceContext_QueryInterface(device_hwctx->device_context, &IID_ID3D11VideoContext,
  366. (void **)&device_hwctx->video_context);
  367. if (FAILED(hr))
  368. return AVERROR_UNKNOWN;
  369. }
  370. return 0;
  371. }
  372. static void d3d11va_device_uninit(AVHWDeviceContext *hwdev)
  373. {
  374. AVD3D11VADeviceContext *device_hwctx = hwdev->hwctx;
  375. if (device_hwctx->device)
  376. ID3D11Device_Release(device_hwctx->device);
  377. if (device_hwctx->device_context)
  378. ID3D11DeviceContext_Release(device_hwctx->device_context);
  379. if (device_hwctx->video_device)
  380. ID3D11VideoDevice_Release(device_hwctx->video_device);
  381. if (device_hwctx->video_context)
  382. ID3D11VideoContext_Release(device_hwctx->video_context);
  383. if (device_hwctx->lock == d3d11va_default_lock)
  384. CloseHandle(device_hwctx->lock_ctx);
  385. }
  386. static int d3d11va_device_create(AVHWDeviceContext *ctx, const char *device,
  387. AVDictionary *opts, int flags)
  388. {
  389. AVD3D11VADeviceContext *device_hwctx = ctx->hwctx;
  390. HRESULT hr;
  391. IDXGIAdapter *pAdapter = NULL;
  392. ID3D10Multithread *pMultithread;
  393. UINT creationFlags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT;
  394. int is_debug = !!av_dict_get(opts, "debug", NULL, 0);
  395. int ret;
  396. // (On UWP we can't check this.)
  397. #if HAVE_LOADLIBRARY
  398. if (!LoadLibrary("d3d11_1sdklayers.dll"))
  399. is_debug = 0;
  400. #endif
  401. if (is_debug)
  402. creationFlags |= D3D11_CREATE_DEVICE_DEBUG;
  403. if ((ret = ff_thread_once(&functions_loaded, load_functions)) != 0)
  404. return AVERROR_UNKNOWN;
  405. if (!mD3D11CreateDevice || !mCreateDXGIFactory) {
  406. av_log(ctx, AV_LOG_ERROR, "Failed to load D3D11 library or its functions\n");
  407. return AVERROR_UNKNOWN;
  408. }
  409. if (device) {
  410. IDXGIFactory2 *pDXGIFactory;
  411. hr = mCreateDXGIFactory(&IID_IDXGIFactory2, (void **)&pDXGIFactory);
  412. if (SUCCEEDED(hr)) {
  413. int adapter = atoi(device);
  414. if (FAILED(IDXGIFactory2_EnumAdapters(pDXGIFactory, adapter, &pAdapter)))
  415. pAdapter = NULL;
  416. IDXGIFactory2_Release(pDXGIFactory);
  417. }
  418. }
  419. hr = mD3D11CreateDevice(pAdapter, pAdapter ? D3D_DRIVER_TYPE_UNKNOWN : D3D_DRIVER_TYPE_HARDWARE, NULL, creationFlags, NULL, 0,
  420. D3D11_SDK_VERSION, &device_hwctx->device, NULL, NULL);
  421. if (pAdapter)
  422. IDXGIAdapter_Release(pAdapter);
  423. if (FAILED(hr)) {
  424. av_log(ctx, AV_LOG_ERROR, "Failed to create Direct3D device (%lx)\n", (long)hr);
  425. return AVERROR_UNKNOWN;
  426. }
  427. hr = ID3D11Device_QueryInterface(device_hwctx->device, &IID_ID3D10Multithread, (void **)&pMultithread);
  428. if (SUCCEEDED(hr)) {
  429. ID3D10Multithread_SetMultithreadProtected(pMultithread, TRUE);
  430. ID3D10Multithread_Release(pMultithread);
  431. }
  432. #if HAVE_LOADLIBRARY && HAVE_DXGIDEBUG_H
  433. if (is_debug) {
  434. HANDLE dxgidebug_dll = LoadLibrary("dxgidebug.dll");
  435. if (dxgidebug_dll) {
  436. HRESULT (WINAPI * pf_DXGIGetDebugInterface)(const GUID *riid, void **ppDebug)
  437. = (void *)GetProcAddress(dxgidebug_dll, "DXGIGetDebugInterface");
  438. if (pf_DXGIGetDebugInterface) {
  439. IDXGIDebug *dxgi_debug = NULL;
  440. hr = pf_DXGIGetDebugInterface(&IID_IDXGIDebug, (void**)&dxgi_debug);
  441. if (SUCCEEDED(hr) && dxgi_debug)
  442. IDXGIDebug_ReportLiveObjects(dxgi_debug, DXGI_DEBUG_ALL, DXGI_DEBUG_RLO_ALL);
  443. }
  444. }
  445. }
  446. #endif
  447. return 0;
  448. }
  449. const HWContextType ff_hwcontext_type_d3d11va = {
  450. .type = AV_HWDEVICE_TYPE_D3D11VA,
  451. .name = "D3D11VA",
  452. .device_hwctx_size = sizeof(AVD3D11VADeviceContext),
  453. .frames_hwctx_size = sizeof(AVD3D11VAFramesContext),
  454. .frames_priv_size = sizeof(D3D11VAFramesContext),
  455. .device_create = d3d11va_device_create,
  456. .device_init = d3d11va_device_init,
  457. .device_uninit = d3d11va_device_uninit,
  458. .frames_init = d3d11va_frames_init,
  459. .frames_uninit = d3d11va_frames_uninit,
  460. .frames_get_buffer = d3d11va_get_buffer,
  461. .transfer_get_formats = d3d11va_transfer_get_formats,
  462. .transfer_data_to = d3d11va_transfer_data,
  463. .transfer_data_from = d3d11va_transfer_data,
  464. .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_D3D11, AV_PIX_FMT_NONE },
  465. };