You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

781 lines
28KB

  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include "config.h"
  19. #include "libavutil/avassert.h"
  20. #include "libavutil/imgutils.h"
  21. #include "libavutil/hwcontext.h"
  22. #if CONFIG_D3D11VA
  23. #include "libavutil/hwcontext_d3d11va.h"
  24. #endif
  25. #if CONFIG_DXVA2
  26. #define COBJMACROS
  27. #include "libavutil/hwcontext_dxva2.h"
  28. #endif
  29. #include "libavutil/mem.h"
  30. #include "libavutil/pixdesc.h"
  31. #include "libavutil/time.h"
  32. #include "amfenc.h"
  33. #include "internal.h"
  34. #if CONFIG_D3D11VA
  35. #include <d3d11.h>
  36. #endif
  37. #ifdef _WIN32
  38. #include "compat/w32dlfcn.h"
  39. #else
  40. #include <dlfcn.h>
  41. #endif
  42. #define FFMPEG_AMF_WRITER_ID L"ffmpeg_amf"
  43. #define PTS_PROP L"PtsProp"
  44. const enum AVPixelFormat ff_amf_pix_fmts[] = {
  45. AV_PIX_FMT_NV12,
  46. AV_PIX_FMT_YUV420P,
  47. #if CONFIG_D3D11VA
  48. AV_PIX_FMT_D3D11,
  49. #endif
  50. #if CONFIG_DXVA2
  51. AV_PIX_FMT_DXVA2_VLD,
  52. #endif
  53. AV_PIX_FMT_NONE
  54. };
  55. typedef struct FormatMap {
  56. enum AVPixelFormat av_format;
  57. enum AMF_SURFACE_FORMAT amf_format;
  58. } FormatMap;
  59. static const FormatMap format_map[] =
  60. {
  61. { AV_PIX_FMT_NONE, AMF_SURFACE_UNKNOWN },
  62. { AV_PIX_FMT_NV12, AMF_SURFACE_NV12 },
  63. { AV_PIX_FMT_BGR0, AMF_SURFACE_BGRA },
  64. { AV_PIX_FMT_RGB0, AMF_SURFACE_RGBA },
  65. { AV_PIX_FMT_GRAY8, AMF_SURFACE_GRAY8 },
  66. { AV_PIX_FMT_YUV420P, AMF_SURFACE_YUV420P },
  67. { AV_PIX_FMT_YUYV422, AMF_SURFACE_YUY2 },
  68. };
  69. static enum AMF_SURFACE_FORMAT amf_av_to_amf_format(enum AVPixelFormat fmt)
  70. {
  71. int i;
  72. for (i = 0; i < amf_countof(format_map); i++) {
  73. if (format_map[i].av_format == fmt) {
  74. return format_map[i].amf_format;
  75. }
  76. }
  77. return AMF_SURFACE_UNKNOWN;
  78. }
  79. static void AMF_CDECL_CALL AMFTraceWriter_Write(AMFTraceWriter *pThis,
  80. const wchar_t *scope, const wchar_t *message)
  81. {
  82. AmfTraceWriter *tracer = (AmfTraceWriter*)pThis;
  83. av_log(tracer->avctx, AV_LOG_DEBUG, "%ls: %ls", scope, message); // \n is provided from AMF
  84. }
  85. static void AMF_CDECL_CALL AMFTraceWriter_Flush(AMFTraceWriter *pThis)
  86. {
  87. }
  88. static AMFTraceWriterVtbl tracer_vtbl =
  89. {
  90. .Write = AMFTraceWriter_Write,
  91. .Flush = AMFTraceWriter_Flush,
  92. };
  93. static int amf_load_library(AVCodecContext *avctx)
  94. {
  95. AmfContext *ctx = avctx->priv_data;
  96. AMFInit_Fn init_fun;
  97. AMFQueryVersion_Fn version_fun;
  98. AMF_RESULT res;
  99. ctx->delayed_frame = av_frame_alloc();
  100. if (!ctx->delayed_frame) {
  101. return AVERROR(ENOMEM);
  102. }
  103. // hardcoded to current HW queue size - will realloc in timestamp_queue_enqueue() if too small
  104. ctx->timestamp_list = av_fifo_alloc((avctx->max_b_frames + 16) * sizeof(int64_t));
  105. if (!ctx->timestamp_list) {
  106. return AVERROR(ENOMEM);
  107. }
  108. ctx->dts_delay = 0;
  109. ctx->library = dlopen(AMF_DLL_NAMEA, RTLD_NOW | RTLD_LOCAL);
  110. AMF_RETURN_IF_FALSE(ctx, ctx->library != NULL,
  111. AVERROR_UNKNOWN, "DLL %s failed to open\n", AMF_DLL_NAMEA);
  112. init_fun = (AMFInit_Fn)dlsym(ctx->library, AMF_INIT_FUNCTION_NAME);
  113. AMF_RETURN_IF_FALSE(ctx, init_fun != NULL, AVERROR_UNKNOWN, "DLL %s failed to find function %s\n", AMF_DLL_NAMEA, AMF_INIT_FUNCTION_NAME);
  114. version_fun = (AMFQueryVersion_Fn)dlsym(ctx->library, AMF_QUERY_VERSION_FUNCTION_NAME);
  115. AMF_RETURN_IF_FALSE(ctx, version_fun != NULL, AVERROR_UNKNOWN, "DLL %s failed to find function %s\n", AMF_DLL_NAMEA, AMF_QUERY_VERSION_FUNCTION_NAME);
  116. res = version_fun(&ctx->version);
  117. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "%s failed with error %d\n", AMF_QUERY_VERSION_FUNCTION_NAME, res);
  118. res = init_fun(AMF_FULL_VERSION, &ctx->factory);
  119. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "%s failed with error %d\n", AMF_INIT_FUNCTION_NAME, res);
  120. res = ctx->factory->pVtbl->GetTrace(ctx->factory, &ctx->trace);
  121. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "GetTrace() failed with error %d\n", res);
  122. res = ctx->factory->pVtbl->GetDebug(ctx->factory, &ctx->debug);
  123. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "GetDebug() failed with error %d\n", res);
  124. return 0;
  125. }
  126. #if CONFIG_D3D11VA
  127. static int amf_init_from_d3d11_device(AVCodecContext *avctx, AVD3D11VADeviceContext *hwctx)
  128. {
  129. AmfContext *ctx = avctx->priv_data;
  130. AMF_RESULT res;
  131. res = ctx->context->pVtbl->InitDX11(ctx->context, hwctx->device, AMF_DX11_1);
  132. if (res != AMF_OK) {
  133. if (res == AMF_NOT_SUPPORTED)
  134. av_log(avctx, AV_LOG_ERROR, "AMF via D3D11 is not supported on the given device.\n");
  135. else
  136. av_log(avctx, AV_LOG_ERROR, "AMF failed to initialise on the given D3D11 device: %d.\n", res);
  137. return AVERROR(ENODEV);
  138. }
  139. return 0;
  140. }
  141. #endif
  142. #if CONFIG_DXVA2
  143. static int amf_init_from_dxva2_device(AVCodecContext *avctx, AVDXVA2DeviceContext *hwctx)
  144. {
  145. AmfContext *ctx = avctx->priv_data;
  146. HANDLE device_handle;
  147. IDirect3DDevice9 *device;
  148. HRESULT hr;
  149. AMF_RESULT res;
  150. int ret;
  151. hr = IDirect3DDeviceManager9_OpenDeviceHandle(hwctx->devmgr, &device_handle);
  152. if (FAILED(hr)) {
  153. av_log(avctx, AV_LOG_ERROR, "Failed to open device handle for Direct3D9 device: %lx.\n", (unsigned long)hr);
  154. return AVERROR_EXTERNAL;
  155. }
  156. hr = IDirect3DDeviceManager9_LockDevice(hwctx->devmgr, device_handle, &device, FALSE);
  157. if (SUCCEEDED(hr)) {
  158. IDirect3DDeviceManager9_UnlockDevice(hwctx->devmgr, device_handle, FALSE);
  159. ret = 0;
  160. } else {
  161. av_log(avctx, AV_LOG_ERROR, "Failed to lock device handle for Direct3D9 device: %lx.\n", (unsigned long)hr);
  162. ret = AVERROR_EXTERNAL;
  163. }
  164. IDirect3DDeviceManager9_CloseDeviceHandle(hwctx->devmgr, device_handle);
  165. if (ret < 0)
  166. return ret;
  167. res = ctx->context->pVtbl->InitDX9(ctx->context, device);
  168. IDirect3DDevice9_Release(device);
  169. if (res != AMF_OK) {
  170. if (res == AMF_NOT_SUPPORTED)
  171. av_log(avctx, AV_LOG_ERROR, "AMF via D3D9 is not supported on the given device.\n");
  172. else
  173. av_log(avctx, AV_LOG_ERROR, "AMF failed to initialise on given D3D9 device: %d.\n", res);
  174. return AVERROR(ENODEV);
  175. }
  176. return 0;
  177. }
  178. #endif
  179. static int amf_init_context(AVCodecContext *avctx)
  180. {
  181. AmfContext *ctx = avctx->priv_data;
  182. AMF_RESULT res;
  183. av_unused int ret;
  184. ctx->hwsurfaces_in_queue = 0;
  185. ctx->hwsurfaces_in_queue_max = 16;
  186. // configure AMF logger
  187. // the return of these functions indicates old state and do not affect behaviour
  188. ctx->trace->pVtbl->EnableWriter(ctx->trace, AMF_TRACE_WRITER_DEBUG_OUTPUT, ctx->log_to_dbg != 0 );
  189. if (ctx->log_to_dbg)
  190. ctx->trace->pVtbl->SetWriterLevel(ctx->trace, AMF_TRACE_WRITER_DEBUG_OUTPUT, AMF_TRACE_TRACE);
  191. ctx->trace->pVtbl->EnableWriter(ctx->trace, AMF_TRACE_WRITER_CONSOLE, 0);
  192. ctx->trace->pVtbl->SetGlobalLevel(ctx->trace, AMF_TRACE_TRACE);
  193. // connect AMF logger to av_log
  194. ctx->tracer.vtbl = &tracer_vtbl;
  195. ctx->tracer.avctx = avctx;
  196. ctx->trace->pVtbl->RegisterWriter(ctx->trace, FFMPEG_AMF_WRITER_ID,(AMFTraceWriter*)&ctx->tracer, 1);
  197. ctx->trace->pVtbl->SetWriterLevel(ctx->trace, FFMPEG_AMF_WRITER_ID, AMF_TRACE_TRACE);
  198. res = ctx->factory->pVtbl->CreateContext(ctx->factory, &ctx->context);
  199. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "CreateContext() failed with error %d\n", res);
  200. // If a device was passed to the encoder, try to initialise from that.
  201. if (avctx->hw_frames_ctx) {
  202. AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
  203. if (amf_av_to_amf_format(frames_ctx->sw_format) == AMF_SURFACE_UNKNOWN) {
  204. av_log(avctx, AV_LOG_ERROR, "Format of input frames context (%s) is not supported by AMF.\n",
  205. av_get_pix_fmt_name(frames_ctx->sw_format));
  206. return AVERROR(EINVAL);
  207. }
  208. switch (frames_ctx->device_ctx->type) {
  209. #if CONFIG_D3D11VA
  210. case AV_HWDEVICE_TYPE_D3D11VA:
  211. ret = amf_init_from_d3d11_device(avctx, frames_ctx->device_ctx->hwctx);
  212. if (ret < 0)
  213. return ret;
  214. break;
  215. #endif
  216. #if CONFIG_DXVA2
  217. case AV_HWDEVICE_TYPE_DXVA2:
  218. ret = amf_init_from_dxva2_device(avctx, frames_ctx->device_ctx->hwctx);
  219. if (ret < 0)
  220. return ret;
  221. break;
  222. #endif
  223. default:
  224. av_log(avctx, AV_LOG_ERROR, "AMF initialisation from a %s frames context is not supported.\n",
  225. av_hwdevice_get_type_name(frames_ctx->device_ctx->type));
  226. return AVERROR(ENOSYS);
  227. }
  228. ctx->hw_frames_ctx = av_buffer_ref(avctx->hw_frames_ctx);
  229. if (!ctx->hw_frames_ctx)
  230. return AVERROR(ENOMEM);
  231. if (frames_ctx->initial_pool_size > 0)
  232. ctx->hwsurfaces_in_queue_max = frames_ctx->initial_pool_size - 1;
  233. } else if (avctx->hw_device_ctx) {
  234. AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)avctx->hw_device_ctx->data;
  235. switch (device_ctx->type) {
  236. #if CONFIG_D3D11VA
  237. case AV_HWDEVICE_TYPE_D3D11VA:
  238. ret = amf_init_from_d3d11_device(avctx, device_ctx->hwctx);
  239. if (ret < 0)
  240. return ret;
  241. break;
  242. #endif
  243. #if CONFIG_DXVA2
  244. case AV_HWDEVICE_TYPE_DXVA2:
  245. ret = amf_init_from_dxva2_device(avctx, device_ctx->hwctx);
  246. if (ret < 0)
  247. return ret;
  248. break;
  249. #endif
  250. default:
  251. av_log(avctx, AV_LOG_ERROR, "AMF initialisation from a %s device is not supported.\n",
  252. av_hwdevice_get_type_name(device_ctx->type));
  253. return AVERROR(ENOSYS);
  254. }
  255. ctx->hw_device_ctx = av_buffer_ref(avctx->hw_device_ctx);
  256. if (!ctx->hw_device_ctx)
  257. return AVERROR(ENOMEM);
  258. } else {
  259. res = ctx->context->pVtbl->InitDX11(ctx->context, NULL, AMF_DX11_1);
  260. if (res == AMF_OK) {
  261. av_log(avctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via D3D11.\n");
  262. } else {
  263. res = ctx->context->pVtbl->InitDX9(ctx->context, NULL);
  264. if (res == AMF_OK) {
  265. av_log(avctx, AV_LOG_VERBOSE, "AMF initialisation succeeded via D3D9.\n");
  266. } else {
  267. av_log(avctx, AV_LOG_ERROR, "AMF initialisation failed via D3D9: error %d.\n", res);
  268. return AVERROR(ENOSYS);
  269. }
  270. }
  271. }
  272. return 0;
  273. }
  274. static int amf_init_encoder(AVCodecContext *avctx)
  275. {
  276. AmfContext *ctx = avctx->priv_data;
  277. const wchar_t *codec_id = NULL;
  278. AMF_RESULT res;
  279. enum AVPixelFormat pix_fmt;
  280. switch (avctx->codec->id) {
  281. case AV_CODEC_ID_H264:
  282. codec_id = AMFVideoEncoderVCE_AVC;
  283. break;
  284. case AV_CODEC_ID_HEVC:
  285. codec_id = AMFVideoEncoder_HEVC;
  286. break;
  287. default:
  288. break;
  289. }
  290. AMF_RETURN_IF_FALSE(ctx, codec_id != NULL, AVERROR(EINVAL), "Codec %d is not supported\n", avctx->codec->id);
  291. if (ctx->hw_frames_ctx)
  292. pix_fmt = ((AVHWFramesContext*)ctx->hw_frames_ctx->data)->sw_format;
  293. else
  294. pix_fmt = avctx->pix_fmt;
  295. ctx->format = amf_av_to_amf_format(pix_fmt);
  296. AMF_RETURN_IF_FALSE(ctx, ctx->format != AMF_SURFACE_UNKNOWN, AVERROR(EINVAL),
  297. "Format %s is not supported\n", av_get_pix_fmt_name(pix_fmt));
  298. res = ctx->factory->pVtbl->CreateComponent(ctx->factory, ctx->context, codec_id, &ctx->encoder);
  299. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_ENCODER_NOT_FOUND, "CreateComponent(%ls) failed with error %d\n", codec_id, res);
  300. return 0;
  301. }
  302. int av_cold ff_amf_encode_close(AVCodecContext *avctx)
  303. {
  304. AmfContext *ctx = avctx->priv_data;
  305. if (ctx->delayed_surface) {
  306. ctx->delayed_surface->pVtbl->Release(ctx->delayed_surface);
  307. ctx->delayed_surface = NULL;
  308. }
  309. if (ctx->encoder) {
  310. ctx->encoder->pVtbl->Terminate(ctx->encoder);
  311. ctx->encoder->pVtbl->Release(ctx->encoder);
  312. ctx->encoder = NULL;
  313. }
  314. if (ctx->context) {
  315. ctx->context->pVtbl->Terminate(ctx->context);
  316. ctx->context->pVtbl->Release(ctx->context);
  317. ctx->context = NULL;
  318. }
  319. av_buffer_unref(&ctx->hw_device_ctx);
  320. av_buffer_unref(&ctx->hw_frames_ctx);
  321. if (ctx->trace) {
  322. ctx->trace->pVtbl->UnregisterWriter(ctx->trace, FFMPEG_AMF_WRITER_ID);
  323. }
  324. if (ctx->library) {
  325. dlclose(ctx->library);
  326. ctx->library = NULL;
  327. }
  328. ctx->trace = NULL;
  329. ctx->debug = NULL;
  330. ctx->factory = NULL;
  331. ctx->version = 0;
  332. ctx->delayed_drain = 0;
  333. av_frame_free(&ctx->delayed_frame);
  334. av_fifo_freep(&ctx->timestamp_list);
  335. return 0;
  336. }
  337. static int amf_copy_surface(AVCodecContext *avctx, const AVFrame *frame,
  338. AMFSurface* surface)
  339. {
  340. AMFPlane *plane;
  341. uint8_t *dst_data[4];
  342. int dst_linesize[4];
  343. int planes;
  344. int i;
  345. planes = surface->pVtbl->GetPlanesCount(surface);
  346. av_assert0(planes < FF_ARRAY_ELEMS(dst_data));
  347. for (i = 0; i < planes; i++) {
  348. plane = surface->pVtbl->GetPlaneAt(surface, i);
  349. dst_data[i] = plane->pVtbl->GetNative(plane);
  350. dst_linesize[i] = plane->pVtbl->GetHPitch(plane);
  351. }
  352. av_image_copy(dst_data, dst_linesize,
  353. (const uint8_t**)frame->data, frame->linesize, frame->format,
  354. avctx->width, avctx->height);
  355. return 0;
  356. }
  357. static inline int timestamp_queue_enqueue(AVCodecContext *avctx, int64_t timestamp)
  358. {
  359. AmfContext *ctx = avctx->priv_data;
  360. if (av_fifo_space(ctx->timestamp_list) < sizeof(timestamp)) {
  361. if (av_fifo_grow(ctx->timestamp_list, sizeof(timestamp)) < 0) {
  362. return AVERROR(ENOMEM);
  363. }
  364. }
  365. av_fifo_generic_write(ctx->timestamp_list, &timestamp, sizeof(timestamp), NULL);
  366. return 0;
  367. }
  368. static int amf_copy_buffer(AVCodecContext *avctx, AVPacket *pkt, AMFBuffer *buffer)
  369. {
  370. AmfContext *ctx = avctx->priv_data;
  371. int ret;
  372. AMFVariantStruct var = {0};
  373. int64_t timestamp = AV_NOPTS_VALUE;
  374. int64_t size = buffer->pVtbl->GetSize(buffer);
  375. if ((ret = av_new_packet(pkt, size)) < 0) {
  376. return ret;
  377. }
  378. memcpy(pkt->data, buffer->pVtbl->GetNative(buffer), size);
  379. switch (avctx->codec->id) {
  380. case AV_CODEC_ID_H264:
  381. buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE, &var);
  382. if(var.int64Value == AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE_IDR) {
  383. pkt->flags = AV_PKT_FLAG_KEY;
  384. }
  385. break;
  386. case AV_CODEC_ID_HEVC:
  387. buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE, &var);
  388. if (var.int64Value == AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE_IDR) {
  389. pkt->flags = AV_PKT_FLAG_KEY;
  390. }
  391. break;
  392. default:
  393. break;
  394. }
  395. buffer->pVtbl->GetProperty(buffer, PTS_PROP, &var);
  396. pkt->pts = var.int64Value; // original pts
  397. AMF_RETURN_IF_FALSE(ctx, av_fifo_size(ctx->timestamp_list) > 0, AVERROR_UNKNOWN, "timestamp_list is empty\n");
  398. av_fifo_generic_read(ctx->timestamp_list, &timestamp, sizeof(timestamp), NULL);
  399. // calc dts shift if max_b_frames > 0
  400. if (avctx->max_b_frames > 0 && ctx->dts_delay == 0) {
  401. int64_t timestamp_last = AV_NOPTS_VALUE;
  402. AMF_RETURN_IF_FALSE(ctx, av_fifo_size(ctx->timestamp_list) > 0, AVERROR_UNKNOWN,
  403. "timestamp_list is empty while max_b_frames = %d\n", avctx->max_b_frames);
  404. av_fifo_generic_peek_at(
  405. ctx->timestamp_list,
  406. &timestamp_last,
  407. (av_fifo_size(ctx->timestamp_list) / sizeof(timestamp) - 1) * sizeof(timestamp_last),
  408. sizeof(timestamp_last),
  409. NULL);
  410. if (timestamp < 0 || timestamp_last < AV_NOPTS_VALUE) {
  411. return AVERROR(ERANGE);
  412. }
  413. ctx->dts_delay = timestamp_last - timestamp;
  414. }
  415. pkt->dts = timestamp - ctx->dts_delay;
  416. return 0;
  417. }
  418. // amfenc API implementation
  419. int ff_amf_encode_init(AVCodecContext *avctx)
  420. {
  421. int ret;
  422. if ((ret = amf_load_library(avctx)) == 0) {
  423. if ((ret = amf_init_context(avctx)) == 0) {
  424. if ((ret = amf_init_encoder(avctx)) == 0) {
  425. return 0;
  426. }
  427. }
  428. }
  429. ff_amf_encode_close(avctx);
  430. return ret;
  431. }
  432. static AMF_RESULT amf_set_property_buffer(AMFSurface *object, const wchar_t *name, AMFBuffer *val)
  433. {
  434. AMF_RESULT res;
  435. AMFVariantStruct var;
  436. res = AMFVariantInit(&var);
  437. if (res == AMF_OK) {
  438. AMFGuid guid_AMFInterface = IID_AMFInterface();
  439. AMFInterface *amf_interface;
  440. res = val->pVtbl->QueryInterface(val, &guid_AMFInterface, (void**)&amf_interface);
  441. if (res == AMF_OK) {
  442. res = AMFVariantAssignInterface(&var, amf_interface);
  443. amf_interface->pVtbl->Release(amf_interface);
  444. }
  445. if (res == AMF_OK) {
  446. res = object->pVtbl->SetProperty(object, name, var);
  447. }
  448. AMFVariantClear(&var);
  449. }
  450. return res;
  451. }
  452. static AMF_RESULT amf_get_property_buffer(AMFData *object, const wchar_t *name, AMFBuffer **val)
  453. {
  454. AMF_RESULT res;
  455. AMFVariantStruct var;
  456. res = AMFVariantInit(&var);
  457. if (res == AMF_OK) {
  458. res = object->pVtbl->GetProperty(object, name, &var);
  459. if (res == AMF_OK) {
  460. if (var.type == AMF_VARIANT_INTERFACE) {
  461. AMFGuid guid_AMFBuffer = IID_AMFBuffer();
  462. AMFInterface *amf_interface = AMFVariantInterface(&var);
  463. res = amf_interface->pVtbl->QueryInterface(amf_interface, &guid_AMFBuffer, (void**)val);
  464. } else {
  465. res = AMF_INVALID_DATA_TYPE;
  466. }
  467. }
  468. AMFVariantClear(&var);
  469. }
  470. return res;
  471. }
  472. static AMFBuffer *amf_create_buffer_with_frame_ref(const AVFrame *frame, AMFContext *context)
  473. {
  474. AVFrame *frame_ref;
  475. AMFBuffer *frame_ref_storage_buffer = NULL;
  476. AMF_RESULT res;
  477. res = context->pVtbl->AllocBuffer(context, AMF_MEMORY_HOST, sizeof(frame_ref), &frame_ref_storage_buffer);
  478. if (res == AMF_OK) {
  479. frame_ref = av_frame_clone(frame);
  480. if (frame_ref) {
  481. memcpy(frame_ref_storage_buffer->pVtbl->GetNative(frame_ref_storage_buffer), &frame_ref, sizeof(frame_ref));
  482. } else {
  483. frame_ref_storage_buffer->pVtbl->Release(frame_ref_storage_buffer);
  484. frame_ref_storage_buffer = NULL;
  485. }
  486. }
  487. return frame_ref_storage_buffer;
  488. }
  489. static void amf_release_buffer_with_frame_ref(AMFBuffer *frame_ref_storage_buffer)
  490. {
  491. AVFrame *frame_ref;
  492. memcpy(&frame_ref, frame_ref_storage_buffer->pVtbl->GetNative(frame_ref_storage_buffer), sizeof(frame_ref));
  493. av_frame_free(&frame_ref);
  494. frame_ref_storage_buffer->pVtbl->Release(frame_ref_storage_buffer);
  495. }
  496. int ff_amf_send_frame(AVCodecContext *avctx, const AVFrame *frame)
  497. {
  498. AmfContext *ctx = avctx->priv_data;
  499. AMFSurface *surface;
  500. AMF_RESULT res;
  501. int ret;
  502. if (!ctx->encoder)
  503. return AVERROR(EINVAL);
  504. if (!frame) { // submit drain
  505. if (!ctx->eof) { // submit drain one time only
  506. if (ctx->delayed_surface != NULL) {
  507. ctx->delayed_drain = 1; // input queue is full: resubmit Drain() in ff_amf_receive_packet
  508. } else if(!ctx->delayed_drain) {
  509. res = ctx->encoder->pVtbl->Drain(ctx->encoder);
  510. if (res == AMF_INPUT_FULL) {
  511. ctx->delayed_drain = 1; // input queue is full: resubmit Drain() in ff_amf_receive_packet
  512. } else {
  513. if (res == AMF_OK) {
  514. ctx->eof = 1; // drain started
  515. }
  516. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Drain() failed with error %d\n", res);
  517. }
  518. }
  519. } else{
  520. return AVERROR_EOF;
  521. }
  522. } else { // submit frame
  523. int hw_surface = 0;
  524. if (ctx->delayed_surface != NULL) {
  525. return AVERROR(EAGAIN); // should not happen when called from ffmpeg, other clients may resubmit
  526. }
  527. // prepare surface from frame
  528. switch (frame->format) {
  529. #if CONFIG_D3D11VA
  530. case AV_PIX_FMT_D3D11:
  531. {
  532. static const GUID AMFTextureArrayIndexGUID = { 0x28115527, 0xe7c3, 0x4b66, { 0x99, 0xd3, 0x4f, 0x2a, 0xe6, 0xb4, 0x7f, 0xaf } };
  533. ID3D11Texture2D *texture = (ID3D11Texture2D*)frame->data[0]; // actual texture
  534. int index = (intptr_t)frame->data[1]; // index is a slice in texture array is - set to tell AMF which slice to use
  535. av_assert0(frame->hw_frames_ctx && ctx->hw_frames_ctx &&
  536. frame->hw_frames_ctx->data == ctx->hw_frames_ctx->data);
  537. texture->lpVtbl->SetPrivateData(texture, &AMFTextureArrayIndexGUID, sizeof(index), &index);
  538. res = ctx->context->pVtbl->CreateSurfaceFromDX11Native(ctx->context, texture, &surface, NULL); // wrap to AMF surface
  539. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX11Native() failed with error %d\n", res);
  540. hw_surface = 1;
  541. }
  542. break;
  543. #endif
  544. #if CONFIG_DXVA2
  545. case AV_PIX_FMT_DXVA2_VLD:
  546. {
  547. IDirect3DSurface9 *texture = (IDirect3DSurface9 *)frame->data[3]; // actual texture
  548. res = ctx->context->pVtbl->CreateSurfaceFromDX9Native(ctx->context, texture, &surface, NULL); // wrap to AMF surface
  549. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX9Native() failed with error %d\n", res);
  550. hw_surface = 1;
  551. }
  552. break;
  553. #endif
  554. default:
  555. {
  556. res = ctx->context->pVtbl->AllocSurface(ctx->context, AMF_MEMORY_HOST, ctx->format, avctx->width, avctx->height, &surface);
  557. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "AllocSurface() failed with error %d\n", res);
  558. amf_copy_surface(avctx, frame, surface);
  559. }
  560. break;
  561. }
  562. if (hw_surface) {
  563. AMFBuffer *frame_ref_storage_buffer;
  564. // input HW surfaces can be vertically aligned by 16; tell AMF the real size
  565. surface->pVtbl->SetCrop(surface, 0, 0, frame->width, frame->height);
  566. frame_ref_storage_buffer = amf_create_buffer_with_frame_ref(frame, ctx->context);
  567. AMF_RETURN_IF_FALSE(ctx, frame_ref_storage_buffer != NULL, AVERROR(ENOMEM), "create_buffer_with_frame_ref() returned NULL\n");
  568. res = amf_set_property_buffer(surface, L"av_frame_ref", frame_ref_storage_buffer);
  569. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SetProperty failed for \"av_frame_ref\" with error %d\n", res);
  570. ctx->hwsurfaces_in_queue++;
  571. frame_ref_storage_buffer->pVtbl->Release(frame_ref_storage_buffer);
  572. }
  573. surface->pVtbl->SetPts(surface, frame->pts);
  574. AMF_ASSIGN_PROPERTY_INT64(res, surface, PTS_PROP, frame->pts);
  575. switch (avctx->codec->id) {
  576. case AV_CODEC_ID_H264:
  577. AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_AUD, !!ctx->aud);
  578. break;
  579. case AV_CODEC_ID_HEVC:
  580. AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_INSERT_AUD, !!ctx->aud);
  581. break;
  582. default:
  583. break;
  584. }
  585. // submit surface
  586. res = ctx->encoder->pVtbl->SubmitInput(ctx->encoder, (AMFData*)surface);
  587. if (res == AMF_INPUT_FULL) { // handle full queue
  588. //store surface for later submission
  589. ctx->delayed_surface = surface;
  590. if (surface->pVtbl->GetMemoryType(surface) == AMF_MEMORY_DX11) {
  591. av_frame_ref(ctx->delayed_frame, frame);
  592. }
  593. } else {
  594. surface->pVtbl->Release(surface);
  595. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SubmitInput() failed with error %d\n", res);
  596. if ((ret = timestamp_queue_enqueue(avctx, frame->pts)) < 0) {
  597. return ret;
  598. }
  599. }
  600. }
  601. return 0;
  602. }
  603. int ff_amf_receive_packet(AVCodecContext *avctx, AVPacket *avpkt)
  604. {
  605. int ret;
  606. AMF_RESULT res;
  607. AMF_RESULT res_query;
  608. AmfContext *ctx = avctx->priv_data;
  609. AMFData *data = NULL;
  610. int block_and_wait;
  611. if (!ctx->encoder)
  612. return AVERROR(EINVAL);
  613. do {
  614. block_and_wait = 0;
  615. // poll data
  616. res_query = ctx->encoder->pVtbl->QueryOutput(ctx->encoder, &data);
  617. if (data) {
  618. // copy data to packet
  619. AMFBuffer* buffer;
  620. AMFGuid guid = IID_AMFBuffer();
  621. data->pVtbl->QueryInterface(data, &guid, (void**)&buffer); // query for buffer interface
  622. ret = amf_copy_buffer(avctx, avpkt, buffer);
  623. buffer->pVtbl->Release(buffer);
  624. if (data->pVtbl->HasProperty(data, L"av_frame_ref")) {
  625. AMFBuffer *frame_ref_storage_buffer;
  626. res = amf_get_property_buffer(data, L"av_frame_ref", &frame_ref_storage_buffer);
  627. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "GetProperty failed for \"av_frame_ref\" with error %d\n", res);
  628. amf_release_buffer_with_frame_ref(frame_ref_storage_buffer);
  629. ctx->hwsurfaces_in_queue--;
  630. }
  631. data->pVtbl->Release(data);
  632. AMF_RETURN_IF_FALSE(ctx, ret >= 0, ret, "amf_copy_buffer() failed with error %d\n", ret);
  633. if (ctx->delayed_surface != NULL) { // try to resubmit frame
  634. res = ctx->encoder->pVtbl->SubmitInput(ctx->encoder, (AMFData*)ctx->delayed_surface);
  635. if (res != AMF_INPUT_FULL) {
  636. int64_t pts = ctx->delayed_surface->pVtbl->GetPts(ctx->delayed_surface);
  637. ctx->delayed_surface->pVtbl->Release(ctx->delayed_surface);
  638. ctx->delayed_surface = NULL;
  639. av_frame_unref(ctx->delayed_frame);
  640. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Repeated SubmitInput() failed with error %d\n", res);
  641. if ((ret = timestamp_queue_enqueue(avctx, pts)) < 0) {
  642. return ret;
  643. }
  644. } else {
  645. av_log(avctx, AV_LOG_WARNING, "Data acquired but delayed frame submission got AMF_INPUT_FULL- should not happen\n");
  646. }
  647. } else if (ctx->delayed_drain) { // try to resubmit drain
  648. res = ctx->encoder->pVtbl->Drain(ctx->encoder);
  649. if (res != AMF_INPUT_FULL) {
  650. ctx->delayed_drain = 0;
  651. ctx->eof = 1; // drain started
  652. AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Repeated Drain() failed with error %d\n", res);
  653. } else {
  654. av_log(avctx, AV_LOG_WARNING, "Data acquired but delayed drain submission got AMF_INPUT_FULL- should not happen\n");
  655. }
  656. }
  657. } else if (ctx->delayed_surface != NULL || ctx->delayed_drain || (ctx->eof && res_query != AMF_EOF) || (ctx->hwsurfaces_in_queue >= ctx->hwsurfaces_in_queue_max)) {
  658. block_and_wait = 1;
  659. av_usleep(1000); // wait and poll again
  660. }
  661. } while (block_and_wait);
  662. if (res_query == AMF_EOF) {
  663. ret = AVERROR_EOF;
  664. } else if (data == NULL) {
  665. ret = AVERROR(EAGAIN);
  666. } else {
  667. ret = 0;
  668. }
  669. return ret;
  670. }